예제 #1
0
def on_connect(mqtt_client, userdata, flags, rc):
    return_code = {
        0: "Connection successful",
        1: "Connection refused – incorrect protocol version",
        2: "Connection refused – invalid client identifier",
        3: "Connection refused – server unavailable",
        4: "Connection refused – bad username or password",
        5: "Connection refused – not authorised"
    }
    if rc == 0:
        app_logger.info("Broker connection was successful")
        mqtt_client.subscribe(topic, int(qos))
    else:
        app_logger.error(
            return_code.get(rc, "Unable to identify return code error!"))
예제 #2
0
 def add_demo_transitions_to_mem(self, dset):
     """Add transitions from expert demonstration trajectories to memory"""
     # Ensure the replay buffer is empty as demos need to be first
     assert self.num_entries == 0 and self.num_demos == 0
     logger.info("adding demonstrations to memory")
     # Zip transition atoms
     transitions = zipsame(dset.obs0, dset.acs, dset.env_rews, dset.obs1,
                           dset.dones1)
     # Note: careful w/ the order, it should correspond to the order in `append` signature
     for transition in transitions:
         self.append(*transition, is_demo=True)
         self.num_demos += 1
     assert self.num_demos == self.num_entries
     logger.info("  num entries in memory after addition: {}".format(
         self.num_entries))
예제 #3
0
def convert_pages(username, apikey, pages, out):
    if not os.path.isdir(out):
        os.makedirs(out)

    try:
        client = pdfcrowd.HtmlToPdfClient(username, apikey)
        client.setFailOnMainUrlError(True)

        for i, url in enumerate(gen_urls(pages)):
            file_name = os.path.join(out, 'generated_{}.pdf'.format(i))
            logger.info('creating %s from %s', file_name, url)
            client.convertUrlToFile(url, file_name)

    except pdfcrowd.Error as why:
        logger.error('Pdfcrowd Error: %s', why)
        sys.exit(1)
예제 #4
0
    def execute(self, command, params):
        """Execute a Timer command."""

        if command == COMMAND_TIMER_START:
            logger.info(
                'Make sure you have dzVents Dzga_Timer script installed and active'
            )
            url = DOMOTICZ_URL + '/json.htm?type=command&param=customevent&event=TIMER&data={"idx":' + self.state.id + ',"time":' + str(
                params['timerTimeSec']) + ',"on":true}'

            r = requests.get(url, auth=CREDITS)

        if command == COMMAND_TIMER_CANCEL:
            url = DOMOTICZ_URL + '/json.htm?type=command&param=customevent&event=TIMER&data={"idx":' + self.state.id + ',"cancel":true}'

            r = requests.get(url, auth=CREDITS)
예제 #5
0
def init_network(**_kwargs):
    rules = INIT_RULES
    add_rules(rules)
    add_drop_rules()

    needs_forwarding = False
    with open('/proc/sys/net/ipv4/ip_forward', 'r') as f:
        if f.read().strip() != 1:
            needs_forwarding = True

    if needs_forwarding:
        logger.info('Enabling ip forwarding')

        if not DRY_RUN:
            with open('/proc/sys/net/ipv4/ip_forward', 'w') as f:
                f.write('1')
예제 #6
0
def on_message(mqtt_client, userdata, msg):
    app_logger.info("MQTT Data Received...")
    app_logger.info("MQTT Topic: " + str(msg.topic))
    app_logger.info("Data: " + str(msg.payload))
    database_instance = Database(sensor_topic=msg.topic,
                                 sensor_data=convert_from_byte_literal(
                                     msg.payload))
    database_instance.save()
    app_logger.info("Data saved to Influx database!")
예제 #7
0
    def forceDevicesSync(self):
        userAgent = self.getUserAgent()
        enableReport = ReportState.enable_report_state()
        if userAgent is None:
            return 500  # internal error

        data = {"agentUserId": userAgent}
        if enableReport:
            r = ReportState.call_homegraph_api(REQUEST_SYNC_BASE_URL, data)
            logger.info('Device syncronization sent')
        elif 'Homegraph_API_Key' in configuration and configuration['Homegraph_API_Key'] != 'ADD_YOUR HOMEGRAPH_API_KEY_HERE':
            r = ReportState.call_homegraph_api_key(REQUEST_SYNC_BASE_URL, data)
            logger.info('Device syncronization sent')
        else:
            logger.error("No configuration for request_sync available")

        return r
예제 #8
0
    def populate_tables(self):
        for table in TABLE_NAMES:
            table_exists = self.curs.execute(sql_table_exist.format(table_name=table))
            file_name = f"{CSV_FOLDER}/{table.capitalize()}.csv"
            headers = self.headers_dict[table]
            logger.info(f'Filling out table {table}')
            if table_exists.fetchone()[0] and table_cols.get(table):
                with open(file_name, newline='') as csvfile:
                    reader = csv.reader(csvfile)
                    for row in reader:
                        if row[0] == 'source_id':
                            continue

                        if table == 'encounter':
                            if self.first_enc:
                                self.first_enc = False

                                patient_raw = self.curs.execute(sql_select_all.format(
                                    column_names='id, source_id',
                                    table_name='patient'))
                                self.patient_dict = {v: k for (k, v) in patient_raw.fetchall()}

                            pat_id = row[1].split('/')[1]
                            row[1] = str(self.patient_dict[pat_id])

                        if table in ('procedure', 'observation'):
                            if self.first_proc:
                                self.first_proc = False

                                enc_raw = self.curs.execute(sql_select_all.format(
                                    column_names='id, source_id',
                                    table_name='encounter'))
                                self.enc_dict = {v: k for (k, v) in enc_raw.fetchall()}

                            pat_id = row[1].split('/')[1]
                            row[1] = str(self.patient_dict[pat_id])
                            if row[2]:
                                enc_id = row[2].split('/')[1]
                                row[2] = str(self.enc_dict.get(enc_id, ''))

                        self.curs.execute(sql_insert_values.format(col_names=', '.join(headers),
                                                                   table_name=table,
                                                                   csv_row=self.preprocessing(','.join(row))))

            self.db_conection.commit()
예제 #9
0
def checkupdate():
    if repo is not None and 'CheckForUpdates' in configuration and configuration['CheckForUpdates'] == True:
        try:
            r = requests.get(
                'https://raw.githubusercontent.com/DewGew/Domoticz-Google-Assistant/' + branch + '/const.py')
            response = r.text
            if VERSION not in response:
                update = 1
                logger.info("========")
                logger.info("   New version is availible on Github!")
            else:
                update = 0
            return update
        except Exception as e:
            logger.error('Connection to Github refused! Check configuration.')
            return 0
    else:
        return 0
예제 #10
0
    def create_tables(self):
        logger.info('Tables created here')
        for table in self.table_names:
            table_exists = self.curs.execute(sql_table_exist.format(table_name=table))
            headers = self.headers_dict[table]
            if table_exists.fetchone()[0]:
                self.curs.execute(sql_truncate_table.format(table_name=table))
            else:
                if table == 'patient':
                    self.curs.execute(sql_create_table.format(table_name=table, columns=', '.join(table_cols[table])))
                if table == 'encounter':
                    self.curs.execute(sql_create_table_1fk.format(
                        table_name=table,
                        columns=', '.join(self.table_cols[table]),
                        key_name=headers[1],
                        other_table='patient',
                        other_pk='id'
                    ))
                if table == 'procedure':
                    self.curs.execute(sql_create_table_2fk.format(
                        table_name=table,
                        columns=', '.join(self.table_cols[table]),
                        key_name1=headers[1],
                        other_table1='patient',
                        other_pk1='id',
                        key_name2=headers[2],
                        other_table2='encounter',
                        other_pk2='id'
                    ))
                if table == 'observation':
                    self.curs.execute(sql_create_table_2fk.format(
                        table_name=table,
                        columns=', '.join(self.table_cols[table]),
                        key_name1=headers[1],
                        other_table1='patient',
                        other_pk1='id',
                        key_name2=headers[2],
                        other_table2='encounter',
                        other_pk2='id'
                    ))

            self.db_conection.commit()
예제 #11
0
 def setup_replay_buffer(self):
     """Setup experiental memory unit"""
     logger.info("setting up replay buffer")
     if self.hps.prioritized_replay:
         if self.hps.unreal:  # Unreal prioritized experience replay
             self.replay_buffer = UnrealReplayBuffer(
                 self.hps.mem_size, self.ob_shape, self.ac_shape)
         else:  # Vanilla prioritized experience replay
             self.replay_buffer = PrioritizedReplayBuffer(
                 self.hps.mem_size,
                 self.ob_shape,
                 self.ac_shape,
                 alpha=self.hps.alpha,
                 beta=self.hps.beta,
                 ranked=self.hps.ranked)
     else:  # Vanilla experience replay
         self.replay_buffer = ReplayBuffer(self.hps.mem_size, self.ob_shape,
                                           self.ac_shape)
     # Summarize replay buffer creation (relies on `__repr__` method)
     logger.info("  {} configured".format(self.replay_buffer))
예제 #12
0
    def download_button(self):
        """
        When download button is pressed
        """
        # get selected stream quality (itag)

        logger.info(
            f"itag of quality selected is "
            f"{self.comboBoxQuality.itemData(self.comboBoxQuality.currentIndex())}"
        )

        itag = self.comboBoxQuality.itemData(
            self.comboBoxQuality.currentIndex())

        if self.ytube is not None:
            self.ytube.download(location=self.user_directory, itag=itag)

        self.showPopUp(
            f"{self.ytube.videoTitle} - has been downloaded successfully to:\
        \n{os.path.abspath(self.user_directory)}")
예제 #13
0
    def enterURL(self):
        """ When OK button is pressed.
        Use the given URL to retrieve video information and process it
        """

        start_time = time.time()

        link = self.lineEditURL.text()
        self.ytube = YouTubeVideo(link,
                                  progress_callback=self.download_progress)
        if self.ytube.error:
            self.showPopUp(self.ytube.error)
            return

        # Display video title
        self.labelVideoTitle.setText(self.ytube.videoTitle)

        # Display thumbnail image
        pixmap = QPixmap()
        pixmap.loadFromData(
            urllib.request.urlopen(self.ytube.videoThumbnail).read())
        pixmap = pixmap.scaled(230, 230, Qt.KeepAspectRatio,
                               Qt.FastTransformation)
        self.labelThumbnail.setPixmap(pixmap)

        # Populate combo box
        self.populateComboBox()

        # enable download button
        self.btnDownload.setEnabled(True)

        # TODO get the rest of useful data

        # debug information
        # self.logger.info(f"URL: {self.ytube.url}")
        # self.logger.info(f"Video Title: {self.ytube.videoTitle}")
        # self.logger.info(
        #     f"Video Thumbnail: {self.ytube.videoThumbnail}")

        final_time = round(time.time() - start_time)
        logger.info(f"It took {final_time}s to get the data")
예제 #14
0
    def compile_sensor_data(self):
        """Compiles sensor related data into a single object

        Return:
             db_data (list): compiled sensor data object
        """
        try:
            sensor_type = parser.get_sensor_type(self._sensor_topic)
            db_data = [{
                "measurement":
                sensor_type,
                "tags": {
                    "id": uuid.uuid4()
                },
                "fields":
                parser.parse_multiple_sensor_data_to_dict(self._sensor_data)
            }]
            app_logger.info("Sensor data compiled successfully.")
            return db_data
        except Exception as e:
            app_logger.error(str(e))
예제 #15
0
    def smarthome_post(self, s):
        a = s.headers.get('Authorization', None)
        token = None
        if a != None:
            type, tokenH = a.split()
            if type.lower() == 'bearer':
                token = Auth['tokens'].get(tokenH, None)

        if token == None:
            raise SmartHomeError(ERR_PROTOCOL_ERROR, 'not authorized access!!')
            return

        message = json.loads(s.body)

        logger.info("Request: " +
                    json.dumps(message, indent=2, sort_keys=False))
        response = self.smarthome_process(message, token)

        try:
            if 'errorCode' in response['payload']:
                logger.info('Error handling message %s: %s' %
                            (message, response['payload']))
        except:
            pass
        s.send_json(200,
                    json.dumps(response, ensure_ascii=False).encode('utf-8'),
                    True)

        logger.info("Response: " +
                    json.dumps(response, indent=2, sort_keys=False))
예제 #16
0
def restartServer():
    """Restart."""
    logger.info(' ')
    logger.info("Restart server")
    logger.info(' ')

    os.execv(sys.executable, ['python'] + sys.argv)
예제 #17
0
    def encounters(self, table='encounter', col_name='start_date'):
        grouping = self.curs.execute(f"""select strftime('%w', start_date), count(*)
          from {table}
          group by strftime('%w', {col_name})""").fetchall()

        logger.info('* DAYS OF ENCOUNTERS *')
        grouping_sorted = sorted(grouping, reverse=True, key=lambda x: x[1])
        logger.info("The most popular Week day: " + calendar.day_name[int(grouping_sorted[0][0])-1]
                    + '; Number of Encounters: ' + str(grouping_sorted[0][1]))
        logger.info("The least popular Week day: " + calendar.day_name[int(grouping_sorted[-1][0])-1]
                    + '; Number of Encounters: ' + str(grouping_sorted[-1][1]))
예제 #18
0
def restartServer():
    """Restart."""
    logger.info(' ')
    logger.info("Restart server")
    logger.info(' ')
    
    time.sleep(5)
    
    pidfile.close()

    os.execv(sys.executable, ['python'] + sys.argv)
예제 #19
0
 def parse_noise_type(self, noise_type):
     """Parse the `noise_type` hyperparameter"""
     ac_noise = None
     param_noise = None
     logger.info("parsing noise type")
     # Parse the comma-seprated (with possible whitespaces) list of noise params
     for cur_noise_type in noise_type.split(','):
         cur_noise_type = cur_noise_type.strip(
         )  # remove all whitespaces (start and end)
         # If the specified noise type is literally 'none'
         if cur_noise_type == 'none':
             pass
         # If 'adaptive-param' is in the specified string for noise type
         elif 'adaptive-param' in cur_noise_type:
             # Set parameter noise
             _, std = cur_noise_type.split('_')
             std = float(std)
             param_noise = AdaptiveParamNoise(initial_std=std, delta=std)
             logger.info("  {} configured".format(param_noise))
         elif 'normal' in cur_noise_type:
             _, std = cur_noise_type.split('_')
             # Spherical (isotropic) gaussian action noise
             ac_noise = NormalAcNoise(mu=np.zeros(self.ac_dim),
                                      sigma=float(std) *
                                      np.ones(self.ac_dim))
             logger.info("  {} configured".format(ac_noise))
         elif 'ou' in cur_noise_type:
             _, std = cur_noise_type.split('_')
             # Ornstein-Uhlenbeck action noise
             ac_noise = OUAcNoise(mu=np.zeros(self.ac_dim),
                                  sigma=(float(std) * np.ones(self.ac_dim)))
             logger.info("  {} configured".format(ac_noise))
         else:
             raise RuntimeError("unknown specified noise type: '{}'".format(
                 cur_noise_type))
     return param_noise, ac_noise
def read(data_dir, subject_id, split=False):
    logger.info('Loading Florence Data')
    logger.info('Data directory: %s' % data_dir)
    data, labels, lens, subjects = [], [], [], []
    florence = np.loadtxt(data_dir)

    frame_len = florence[:, 0:1].flatten()
    subject_array = florence[:, 1:2].flatten()
    label_array = florence[:, 2:3].flatten()
    counts = collections.Counter(frame_len)

    first, second = 0, 0
    for frame_num in counts:
        second += counts[frame_num]

        # frame normalizer
        action = florence[first:second][:, 3:]
        new_action = []
        for frame in action:
            frame = helpers.frame_normalizer(
                frame=frame, frame_size=frame_size)  #frame normalizer

            new_action.append(frame)

        data.append(new_action)

        lens.append(counts[frame_num])
        labels.append(int(label_array[first]))
        subjects.append(int(subject_array[first]))
        first = second

    data = np.asarray(data)
    labels = np.asarray(labels) - 1
    lens = np.asarray(lens)
    subjects = np.asarray(subjects)
    logger.info('initial shapes data-label-len: %s %s %s' %
                (data.shape, labels.shape, lens.shape))
    data = helpers.dataset_normalizer(data)

    if split:
        return helpers.test_train_splitter(subject_id, data, labels, lens,
                                           subjects)

    else:
        return data, labels, lens
def gen_pdfs(username, apikey, max_pages, urls):
    # create the API client instance
    client = pdfcrowd.HtmlToPdfClient(username, apikey)
    client.setFailOnMainUrlError(True)

    for url in urls:
        pages_str = ''
        if max_pages > 0:
            logger.info('converting max %s pages from %s', max_pages, url)
            client.setPrintPageRange('-{}'.format(max_pages))
        else:
            logger.info('converting %s', url)
        yield client.convertUrl(url)
        logger.info('%s pages converted', client.getPageCount())
        if max_pages > 0:
            max_pages -= client.getPageCount()
            if max_pages <= 0:
                break
예제 #22
0
    def __init__(self, expert_path, num_demos):
        self.num_demos = num_demos

        with np.load(expert_path, allow_pickle=True) as data:
            self.data_map, self.stat_map = {}, {}
            for k, v in data.items():
                if k in ['ep_env_rets', 'ep_lens']:
                    self.stat_map[k] = v
                elif k in ['obs0', 'acs', 'env_rews', 'dones1', 'obs1']:
                    self.data_map[k] = np.array(np.concatenate(
                        (v[:num_demos])))

        fmtstr = "[DEMOS] >>>> extracted {} transitions, from {} trajectories"
        logger.info(fmtstr.format(len(self), self.num_demos))
        rets_, lens_ = self.stat_map['ep_env_rets'], self.stat_map['ep_lens']
        logger.info("  episodic return: {}({})".format(np.mean(rets_),
                                                       np.std(rets_)))
        logger.info("  episodic length: {}({})".format(np.mean(lens_),
                                                       np.std(lens_)))
    def __init__(self, exe_name: str):
        """
        Gets the process ID for the executable, then a handle for that process,
        then we get the base memory address for our process using the handle.

        With the base memory address known, we can then perform our standard
        memory calls (read_int, etc) to get data from memory.

        :param exe_name: The executable name of the program we want to read
        memory from
        """
        self.exe = exe_name
        try:
            self.pid = self._get_process_id()
            self.handle = self._get_process_handle()
            self.base_address = self._get_base_address()

            # There is definitely a better way to get lots of base memory data, but
            # this is v1 of automated pattern searching
            bulk_scan = self.read_bytes(self.base_address, 1000000000)
            self.u_world_base = search_data_for_pattern(
                bulk_scan, UWORLDPATTERN)
            self.g_object_base = search_data_for_pattern(
                bulk_scan, GOBJECTPATTERN)
            self.g_name_base = search_data_for_pattern(bulk_scan, GNAMEPATTERN)
            del bulk_scan

            g_name_offset = self.read_ulong(self.base_address +
                                            self.g_name_base + 3)
            g_name_ptr = self.base_address + self.g_name_base + g_name_offset + 7
            self.g_name_start_address = self.read_ptr(g_name_ptr)

            logger.info(f"gObject offset: {hex(self.g_object_base)}")
            logger.info(f"uWorld offset: {hex(self.u_world_base)}")
            logger.info(f"gName offset: {hex(self.g_name_base)}")
        except Exception as e:
            logger.error(f"Error initializing memory reader: {e}")
예제 #24
0
    def write_files(self):
        logger.info('Saving of required files started here')
        if not self._check(self.dest_folder):
            os.mkdir(self.dest_folder)

        for file in self.file_names:
            if file.startswith('Encounter'):
                with open(self.dest_folder + file.split('.')[0] + '.csv',
                          'w',
                          newline='') as csvfile:
                    writer = csv.DictWriter(csvfile, fieldnames=enc_headers)
                    writer.writeheader()
                    with open(self.src_folder + '/' + file) as f:
                        for line in f:
                            loaded_data = json.loads(line, encoding='UTF-8')
                            writer.writerow({
                                enc_headers[0]:
                                loaded_data['id'],
                                enc_headers[1]:
                                loaded_data['subject']['reference'],
                                enc_headers[2]:
                                loaded_data['period']['start'],
                                enc_headers[3]:
                                loaded_data['period']['end'],
                                enc_headers[4]:
                                loaded_data['type'][0]['coding'][0]["code"],
                                enc_headers[5]:
                                loaded_data['type'][0]['coding'][0]["system"]
                            })

            if file.startswith('Patient'):
                with open(self.dest_folder + file.split('.')[0] + '.csv',
                          'w',
                          newline='') as csvfile:
                    writer = csv.DictWriter(csvfile, fieldnames=pat_headers)
                    writer.writeheader()
                    with open(self.src_folder + '/' + file) as f:
                        for line in f:
                            loaded_data = json.loads(line, encoding='UTF-8')

                            race_code = ''
                            race_code_system = ''
                            etnicity_code = ''
                            etnicity_code_system = ''
                            if loaded_data.get('extension'):
                                for ext in loaded_data.get('extension'):
                                    if ext['url'] == 'http://hl7.org/fhir/us/core/StructureDefinition/us-core-race':
                                        race_code = ext[
                                            'valueCodeableConcept']['coding'][
                                                0]['code']
                                        race_code_system = ext[
                                            'valueCodeableConcept']['coding'][
                                                0]['system']
                                    if ext['url'] == 'http://hl7.org/fhir/us/core/StructureDefinition/us-core-ethnicity':
                                        etnicity_code = ext[
                                            'valueCodeableConcept']['coding'][
                                                0]['code']
                                        etnicity_code_system = ext[
                                            'valueCodeableConcept']['coding'][
                                                0]['system']

                            writer.writerow({
                                pat_headers[0]:
                                loaded_data['id'],
                                pat_headers[1]:
                                loaded_data['birthDate'],
                                pat_headers[2]:
                                loaded_data['gender'],
                                pat_headers[3]:
                                race_code,
                                pat_headers[4]:
                                race_code_system,
                                pat_headers[5]:
                                etnicity_code,
                                pat_headers[6]:
                                etnicity_code_system,
                                pat_headers[7]:
                                loaded_data['address'][0]['country'] if
                                (loaded_data.get('address')
                                 and loaded_data['address'][0].get('country'))
                                else '',
                            })

            if file.startswith('Procedure'):
                with open(self.dest_folder + file.split('.')[0] + '.csv',
                          'w',
                          newline='') as csvfile:
                    writer = csv.DictWriter(csvfile, fieldnames=proc_headers)
                    writer.writeheader()
                    with open(self.src_folder + '/' + file) as f:
                        for line in f:
                            loaded_data = json.loads(line, encoding='UTF-8')
                            writer.writerow({
                                proc_headers[0]:
                                loaded_data['id'],
                                proc_headers[1]:
                                loaded_data['subject']['reference']
                                if loaded_data.get('subject') else '',
                                proc_headers[2]:
                                loaded_data['context']['reference']
                                if loaded_data.get('context') else '',
                                proc_headers[3]:
                                loaded_data['performedDateTime']
                                if loaded_data.get('performedDateTime') else
                                loaded_data['performedPeriod']['start'],
                                proc_headers[4]:
                                loaded_data['code']['coding'][0]['code'],
                                proc_headers[5]:
                                loaded_data['code']['coding'][0]['system'],
                            })

            if file.startswith('Observation'):
                with open(self.dest_folder + file.split('.')[0] + '.csv',
                          'w',
                          newline='') as csvfile:
                    writer = csv.DictWriter(csvfile, fieldnames=obs_headers)
                    writer.writeheader()
                    with open(self.src_folder + '/' + file) as f:
                        for line in f:
                            loaded_data = json.loads(line, encoding='UTF-8')
                            if loaded_data.get('component'):
                                for comp in loaded_data.get('component'):
                                    writer.writerow({
                                        obs_headers[0]:
                                        loaded_data['id'],
                                        obs_headers[1]:
                                        loaded_data['subject']['reference']
                                        if loaded_data.get('subject') else '',
                                        obs_headers[2]:
                                        loaded_data['context']['reference']
                                        if loaded_data.get('context') else '',
                                        obs_headers[3]:
                                        loaded_data['effectiveDateTime']
                                        if loaded_data.get('effectiveDateTime')
                                        else '',
                                        obs_headers[4]:
                                        comp['code']['coding'][0]['code'],
                                        obs_headers[5]:
                                        comp['code']['coding'][0]['system'],
                                        obs_headers[6]:
                                        comp['valueQuantity']['value']
                                        if comp.get('valueQuantity') else '',
                                        obs_headers[7]:
                                        comp['valueQuantity']['unit']
                                        if comp.get('valueQuantity') else '',
                                        obs_headers[8]:
                                        comp['valueQuantity']['system']
                                        if comp.get('valueQuantity') else '',
                                    })
                            else:
                                writer.writerow({
                                    obs_headers[0]:
                                    loaded_data['id'],
                                    obs_headers[1]:
                                    loaded_data['subject']['reference']
                                    if loaded_data.get('subject') else '',
                                    obs_headers[2]:
                                    loaded_data['context']['reference']
                                    if loaded_data.get('context') else '',
                                    obs_headers[3]:
                                    loaded_data['effectiveDateTime']
                                    if loaded_data.get('effectiveDateTime')
                                    else '',
                                    obs_headers[4]:
                                    loaded_data['code']['coding'][0]['code'],
                                    obs_headers[5]:
                                    loaded_data['code']['coding'][0]['system'],
                                    obs_headers[6]:
                                    loaded_data['valueQuantity']['value'] if
                                    loaded_data.get('valueQuantity') else '',
                                    obs_headers[7]:
                                    loaded_data['valueQuantity']['unit']
                                    if loaded_data.get('valueQuantity') and
                                    loaded_data['valueQuantity'].get('unit')
                                    else '',
                                    obs_headers[8]:
                                    loaded_data['valueQuantity']['system']
                                    if loaded_data.get('valueQuantity') and
                                    loaded_data['valueQuantity'].get('system')
                                    else '',
                                })

        logger.info('Saving of required files ended here')
예제 #25
0
                                    loaded_data['effectiveDateTime']
                                    if loaded_data.get('effectiveDateTime')
                                    else '',
                                    obs_headers[4]:
                                    loaded_data['code']['coding'][0]['code'],
                                    obs_headers[5]:
                                    loaded_data['code']['coding'][0]['system'],
                                    obs_headers[6]:
                                    loaded_data['valueQuantity']['value'] if
                                    loaded_data.get('valueQuantity') else '',
                                    obs_headers[7]:
                                    loaded_data['valueQuantity']['unit']
                                    if loaded_data.get('valueQuantity') and
                                    loaded_data['valueQuantity'].get('unit')
                                    else '',
                                    obs_headers[8]:
                                    loaded_data['valueQuantity']['system']
                                    if loaded_data.get('valueQuantity') and
                                    loaded_data['valueQuantity'].get('system')
                                    else '',
                                })

        logger.info('Saving of required files ended here')


if __name__ == '__main__':
    logger.info('Reading and translating to CSV started here')
    r = Reader()
    r.valid_files()
    r.write_files()
예제 #26
0
def run(args):
    """Spawn jobs"""

    # Create directory for spawned jobs
    os.makedirs("spawn", exist_ok=True)
    if CLUSTER == 'local':
        os.makedirs("tmux", exist_ok=True)

    # Get the hyperparameter set(s)
    if args.sweep:
        hpmaps_ = [
            get_hps(sweep=True)
            for _ in range(CONFIG['parameters']['num_trials'])
        ]
        # Flatten into a 1-dim list
        hpmaps = [x for hpmap in hpmaps_ for x in hpmap]
    else:
        hpmaps = get_hps(sweep=False)

    # Create associated task strings
    commands = [
        "python main.py \\\n{}".format(unroll_options(hpmap))
        for hpmap in hpmaps
    ]
    if not len(commands) == len(set(commands)):
        # Terminate in case of duplicate experiment (extremely unlikely though)
        raise ValueError("bad luck, there are dupes -> Try again (:")
    # Create the job maps
    names = [
        "{}{}_{}".format(TYPE,
                         str(i).zfill(3), hpmap['uuid'])
        for i, hpmap in enumerate(hpmaps)
    ]
    # Finally get all the required job strings
    jobs = [
        create_job_str(name, command)
        for name, command in zipsame(names, commands)
    ]

    # Spawn the jobs
    for i, (name, job) in enumerate(zipsame(names, jobs)):
        logger.info(
            ">>>>>>>>>>>>>>>>>>>> Job #{} ready to submit. Config below.".
            format(i))
        logger.info(job + "\n")
        dir_ = name.split('.')[2]
        os.makedirs("spawn/{}".format(dir_), exist_ok=True)
        job_name = "spawn/{}/{}.sh".format(dir_, name)
        with open(job_name, 'w') as f:
            f.write(job)
        if args.call and not CLUSTER == 'local':
            # Spawn the job!
            check_output(["sbatch", "spawn/{}/{}".format(dir_, job_name)])
            logger.info(">>>>>>>>>>>>>>>>>>>> Job #{} submitted.".format(i))
    # Summarize the number of jobs spawned
    logger.info(">>>>>>>>>>>>>>>>>>>> {} jobs were spawned.".format(len(jobs)))

    if CLUSTER == 'local':
        dir_ = hpmaps[0]['uuid'].split('.')[2]  # arbitrarilly picked index 0
        session_name = "{}_{}seeds_{}".format(TYPE,
                                              str(NUM_SEEDS).zfill(2), dir_)
        yaml_content = {'session_name': session_name, 'windows': []}
        for i, name in enumerate(names):
            executable = "{}.sh".format(name)
            single_pane = {
                'shell_command': [
                    "source activate {}".format(CONDA),
                    "chmod u+x spawn/{}/{}".format(dir_, executable),
                    "spawn/{}/{}".format(dir_, executable)
                ]
            }
            yaml_content['windows'].append({
                'window_name':
                "job{}".format(str(i).zfill(2)),
                'panes': [single_pane]
            })
        # Dump the assembled tmux config into a yaml file
        job_config = "tmux/{}.yaml".format(session_name)
        with open(job_config, "w") as f:
            yaml.dump(yaml_content, f, default_flow_style=False)
        if args.call:
            # Spawn all the jobs in the tmux session!
            check_output(["tmuxp", "load", "{}".format(job_config)])
예제 #27
0
                        required=True,
                        type=int)
    parser.add_argument('--batch_size',
                        help='the number of training examples \
                        in one forward/backward pass',
                        required=True,
                        type=int)

    args = parser.parse_args()
    dataset = args.dataset
    epochs = args.epochs
    batch_size = args.batch_size

    is_valid_dataset(dataset)

    logger.info("Reading in preprocessed training {0} dataset".format(dataset))
    file_path = "{dir}{dataset}{suffix}".format(dir=DATADIR,
                                                dataset=dataset,
                                                suffix=TRAIN_FILE_SUFFIX)
    train_df = read(file_path)
    logger.info("Finished reading in preprocessed {0} dataset".format(dataset))

    logger.info("preparing training data")
    split = descriptor_activation_split(train_df)

    logger.info("Generating model")
    model = generate_model(split.shape)

    logger.info("fitting model")
    model.fit(split.descriptors,
              split.act,
예제 #28
0
def AogGetDomain(device):
    if device["Type"] in ['Light/Switch', 'Lighting 1', 'Lighting 2', 'RFY']:
        if device["SwitchType"] in [
                'Blinds', 'Blinds Inverted', 'Venetian Blinds EU',
                'Venetian Blinds US', 'Blinds Percentage',
                'Blinds Percentage Inverted'
        ]:
            return blindsDOMAIN
        elif 'Door Lock' == device["SwitchType"]:
            return lockDOMAIN
        elif 'Door Lock Inverted' == device["SwitchType"]:
            return invlockDOMAIN
        elif "Door Contact" == device["SwitchType"]:
            return doorDOMAIN
        elif device["SwitchType"] in ['Push On Button', 'Push Off Button']:
            return pushDOMAIN
        elif 'Motion Sensor' == device["SwitchType"]:
            return sensorDOMAIN
        elif 'Selector' == device["SwitchType"]:
            return selectorDOMAIN
        elif 'Fan' == device["Image"]:
            return fanDOMAIN
        elif 'Camera_Stream' in configuration and True == device[
                "UsedByCamera"] and True == configuration['Camera_Stream'][
                    'Enabled']:
            return cameraDOMAIN
        elif 'Image_Override' in configuration and device[
                "Image"] in configuration['Image_Override']['Switch']:
            return switchDOMAIN
        elif 'Image_Override' in configuration and device[
                "Image"] in configuration['Image_Override']['Light']:
            return lightDOMAIN
        elif 'Image_Override' in configuration and device[
                "Image"] in configuration['Image_Override']['Media']:
            return mediaDOMAIN
        elif 'Image_Override' in configuration and device[
                "Image"] in configuration['Image_Override']['Outlet']:
            return outletDOMAIN
        elif 'Image_Override' in configuration and device[
                "Image"] in configuration['Image_Override']['Speaker']:
            return speakerDOMAIN
        else:
            return lightDOMAIN
    elif 'Group' == device["Type"]:
        return groupDOMAIN
    elif 'Scene' == device["Type"]:
        return sceneDOMAIN
    elif 'Temp' == device["Type"]:
        return tempDOMAIN
    elif 'Thermostat' == device['Type']:
        return climateDOMAIN
    elif 'Temp + Humidity' == device['Type']:
        return tempDOMAIN
    elif 'Temp + Humidity + Baro' == device['Type']:
        return tempDOMAIN
    elif 'Color Switch' == device["Type"] and "Dimmer" == device["SwitchType"]:
        return colorDOMAIN
    elif 'Color Switch' == device["Type"] and "On/Off" == device["SwitchType"]:
        logger.info(
            device["Name"] + " (Idx: " + device["idx"] +
            ") is a color switch. To get all functions, set this device as Dimmer in Domoticz"
        )
        return lightDOMAIN
    elif 'Security' == device["Type"]:
        return securityDOMAIN
    return None
예제 #29
0
    def settings_post(self, s):

        if (s.form.get("save")):
            textToSave = s.form.get("save", None)
            codeToSave = textToSave.replace("+", " ")
            saveFile(CONFIGFILE, codeToSave)

            message = 'Config saved'
            logger.info(message)
            meta = '<!-- <meta http-equiv="refresh" content="5"> -->'
            code = readFile(CONFIGFILE)
            logs = readFile(LOGFILE)
            template = TEMPLATE.format(message=message,
                                       uptime=uptime(),
                                       list=deviceList,
                                       meta=meta,
                                       code=code,
                                       conf=confJSON,
                                       public_url=public_url,
                                       logs=logs,
                                       update=update)

            s.send_message(200, template)

        if (s.form.get("backup")):
            codeToSave = readFile(CONFIGFILE)
            saveFile('config.yaml.bak', codeToSave)

            message = 'Backup saved'
            logger.info(message)
            meta = '<!-- <meta http-equiv="refresh" content="5"> -->'
            code = readFile(CONFIGFILE)
            logs = readFile(LOGFILE)
            template = TEMPLATE.format(message=message,
                                       uptime=uptime(),
                                       list=deviceList,
                                       meta=meta,
                                       code=code,
                                       conf=confJSON,
                                       public_url=public_url,
                                       logs=logs,
                                       update=update)

            s.send_message(200, template)

        if (s.form.get("restart")):
            message = 'Restart Server, please wait!'
            meta = '<meta http-equiv="refresh" content="5">'
            code = ''
            logs = ''
            template = TEMPLATE.format(message=message,
                                       uptime=uptime(),
                                       list=deviceList,
                                       meta=meta,
                                       code=code,
                                       conf=confJSON,
                                       public_url=public_url,
                                       logs=logs,
                                       update=update)

            s.send_message(200, template)
            restartServer()

        if (s.form.get("sync")):
            r = self.forceDevicesSync()
            time.sleep(0.5)
            message = 'Devices syncronized'
            meta = '<!-- <meta http-equiv="refresh" content="10"> -->'
            code = readFile(CONFIGFILE)
            logs = readFile(LOGFILE)
            template = TEMPLATE.format(message=message,
                                       uptime=uptime(),
                                       list=deviceList,
                                       meta=meta,
                                       code=code,
                                       conf=confJSON,
                                       public_url=public_url,
                                       logs=logs,
                                       update=update)
            s.send_message(200, template)

        if (s.form.get("reload")):
            message = ''
            meta = '<!-- <meta http-equiv="refresh" content="10"> -->'
            code = readFile(CONFIGFILE)
            logs = readFile(LOGFILE)
            template = TEMPLATE.format(message=message,
                                       uptime=uptime(),
                                       list=deviceList,
                                       meta=meta,
                                       code=code,
                                       conf=confJSON,
                                       public_url=public_url,
                                       logs=logs,
                                       update=update)
            s.send_message(200, template)

        if (s.form.get("deletelogs")):
            logfile = os.path.join(FILE_DIR, LOGFILE)
            if os.path.exists(logfile):
                f = open(logfile, 'w')
                f.close()
            logger.info('Logs removed by user')
            message = ''
            meta = '<!-- <meta http-equiv="refresh" content="10"> -->'
            code = readFile(CONFIGFILE)
            logs = readFile(LOGFILE)
            template = TEMPLATE.format(message=message,
                                       uptime=uptime(),
                                       list=deviceList,
                                       meta=meta,
                                       code=code,
                                       conf=confJSON,
                                       public_url=public_url,
                                       logs=logs,
                                       update=update)
            s.send_message(200, template)

        if (s.form.get("update")):
            os.system('bash ~/Domoticz-Google-Assistant/scripts/update.sh')
            message = 'Updated, Restarting Server, please wait!'
            meta = '<meta http-equiv="refresh" content="5">'
            code = readFile(CONFIGFILE)
            logs = readFile(LOGFILE)
            template = TEMPLATE.format(message=message,
                                       uptime=uptime(),
                                       list=deviceList,
                                       meta=meta,
                                       code=code,
                                       conf=confJSON,
                                       public_url=public_url,
                                       logs=logs,
                                       update=update)
            s.send_message(200, template)
            restartServer()
예제 #30
0
if __name__ == "__main__":
    parser = argparse.ArgumentParser(description='Evaluate QSAR data')
    parser.add_argument('--dataset',
                        help='Enter one of available datasets: {}'.format(
                            ", ".join(DATASETS)),
                        required=True)

    args = parser.parse_args()
    dataset = args.dataset

    is_valid_dataset(dataset)

    file_path = "{dir}{dataset}{suffix}".format(dir=DATADIR,
                                                dataset=dataset,
                                                suffix=TEST_FILE_SUFFIX)
    logger.info("Reading in preprocessed testing {0} dataset".format(dataset))
    test_df = read(file_path)
    logger.info("Finished reading in preprocessed {0} dataset".format(dataset))

    logger.info("Preparing testing data")
    split = descriptor_activation_split(test_df)

    logger.info("Loading {0} model".format(dataset))
    model = load_model('{0}{1}.h5'.format('/data/', dataset),
                       custom_objects={'r2': r2})

    logger.info("Evaluating {0} model".format(dataset))
    score = model.evaluate(split.descriptors, split.act)

    logger.info("R2 score {0}".format(score[1]))