def set_duty_cycle(self, pin, value):
        """

        :param pin:
        :param value:
        :return:
        """
        if pin not in self.output_pins:
            log.error("pin %s was not registered as an output" % pin)
            return

        output_pin = self.output_pins[pin]

        if not output_pin['pwm']:
            log.error("pwm was not registered at pin %d" % pin)

        if value > 100:
            log.warning("Given dutycycle (%d) is bigger than 100. Has been set to 100" % value)
            value = 100
        if value < 0:
            log.warning("Given dutycycle (%d) is lower than 0. Has been set to 0" % value)
            value = 0

        if not output_pin['pwm_startet']:
            output_pin['pwm'].start(value)
            output_pin['pwm_startet'] = True
        else:
            output_pin['pwm'].ChangeDutyCycle(value)

        output_pin['dutycycle'] = value
        log.info("Dutycycle of pin %d has been set to %d" % (pin, value))
Esempio n. 2
0
def crawler():
    counter = 1
    for url_ref in config.FULL_URLS:
        resp = requests.get(url_ref)
        if resp.status_code == 200:
            _, name = get_name(url_ref)
            # Ensure folder exists
            folter_path = create_folder([config.LYRICS_FOLDER, name])
            # Get all links
            parsed_html = BeautifulSoup(resp.content, features='html.parser')
            lyrics_links = parsed_html.select('.listalbum-item a')
            LOG.info(f"Number of {name.upper()} songs: {len(lyrics_links)}")

            lyric_paths = [extract_link(link) for link in lyrics_links]

            for lyric_path in lyric_paths:

                try:
                    writer, song_name = get_name(lyric_path)
                    if name != writer:
                        alt_folder = create_folder(
                            [config.LYRICS_FOLDER, writer])
                        lyrics_file = alt_folder.joinpath(song_name + '.txt')
                        file_found = lyrics_file.is_file()
                    else:
                        writer = name
                        lyrics_file = folter_path.joinpath(song_name + '.txt')
                        file_found = lyrics_file.is_file()

                    if not file_found:
                        # url = config.BASE_URL + lyric_path
                        text = get_lyrics(lyric_path).strip()
                        LOG.info("Downloading (" + str(counter).zfill(3) +
                                 f") [{writer}]: {song_name}")
                        counter += 1

                        with open(lyrics_file, "w") as f:
                            f.write(text)
                        time.sleep(config.CRAWLER_WAIT +
                                   config.CRAWLER_WAIT * random.random())

                except IndexError:
                    LOG.error(
                        f"Access denied while scraping: {lyric_path} \n"
                        f"Try increasing the waiting time.\n"
                        f"Finishing the scrapping for the moment. Try to access on your browser to unblock access"
                    )
                    return
                except Exception as err:
                    print(f"ERROR: {lyric_path}: {err}")

        else:
            LOG.warning(f"Unable to load: {url_ref}")
Esempio n. 3
0
def parse_input(json_text):
    try:
        text_body = json_text["Text"]
        if type(text_body) != str:
            LOG.warning(
                f"Wrong user input. User posted us the following input {text_body} of type: {type(text_body)}"
            )
            return ValueError
        return str(text_body).strip()
    except KeyError:
        LOG.warning(
            f"Wrong user input. User posted us the following: {json_text}")
        return KeyError