Exemplo n.º 1
0
    def set_duty_cycle(self, pin, value):
        """

        :param pin:
        :param value:
        :return:
        """
        if pin not in self.output_pins:
            log.error("pin %s was not registered as an output" % pin)
            return

        output_pin = self.output_pins[pin]

        if not output_pin['pwm']:
            log.error("pwm was not registered at pin %d" % pin)

        if value > 100:
            log.warning("Given dutycycle (%d) is bigger than 100. Has been set to 100" % value)
            value = 100
        if value < 0:
            log.warning("Given dutycycle (%d) is lower than 0. Has been set to 0" % value)
            value = 0

        if not output_pin['pwm_startet']:
            output_pin['pwm'].start(value)
            output_pin['pwm_startet'] = True
        else:
            output_pin['pwm'].ChangeDutyCycle(value)

        output_pin['dutycycle'] = value
        log.info("Dutycycle of pin %d has been set to %d" % (pin, value))
Exemplo n.º 2
0
    def _decode_single_body(self):
        self.body = self.body.strip()
        cte = self.headers.get('Content-Transfer-Encoding', '').lower()
        if 'quoted-printable' in cte:
            LOG.debug("Detected quoted-printable encoding, decoding")
            self.body = quopri.decodestring(self.body)
        if 'base64' in cte:
            LOG.debug("Detected base64 encoding, decoding")
            try:
                self.body = base64.decodestring(self.body)
            except base64.binascii.Error:
                LOG.info("base64 decoder failed, trying partial decoding")
                self.body = base64_partial_decode(self.body)

        LOG.debug("Detected charset: %s", self.charset)
        try:
            self.body = self.body.decode(
                validate_charset(self.charset) and self.charset or 'ascii',
                'strict'
            )
        except UnicodeDecodeError:
            LOG.info('Error during strict decoding')
            self.email_stats['charset_errors'] = 1
            self.body = self.body.decode(
                validate_charset(self.charset) and self.charset or 'ascii',
                'ignore'
            )

        if self._guess_html():
            LOG.debug("Message recognized as HTML")
            self._parse_html()
        else:
            LOG.debug("Message recognized as plaintext")
Exemplo n.º 3
0
    def cleanup(self):
        """



        """
        log.info("Cleanup")
        self._all_output_off()
        GPIO.cleanup()
Exemplo n.º 4
0
    def send_msg(self, sender, target, data):
        if self._check_in_black(sender, target):
            LOG.info('Packet block %s->%s' % (sender, target))
            return

        queue = self.queues.get(target)
        if queue is not None:
            queue.put(data)
        else:
            raise Exception('Cannot send data')
Exemplo n.º 5
0
def crawler():
    counter = 1
    for url_ref in config.FULL_URLS:
        resp = requests.get(url_ref)
        if resp.status_code == 200:
            _, name = get_name(url_ref)
            # Ensure folder exists
            folter_path = create_folder([config.LYRICS_FOLDER, name])
            # Get all links
            parsed_html = BeautifulSoup(resp.content, features='html.parser')
            lyrics_links = parsed_html.select('.listalbum-item a')
            LOG.info(f"Number of {name.upper()} songs: {len(lyrics_links)}")

            lyric_paths = [extract_link(link) for link in lyrics_links]

            for lyric_path in lyric_paths:

                try:
                    writer, song_name = get_name(lyric_path)
                    if name != writer:
                        alt_folder = create_folder(
                            [config.LYRICS_FOLDER, writer])
                        lyrics_file = alt_folder.joinpath(song_name + '.txt')
                        file_found = lyrics_file.is_file()
                    else:
                        writer = name
                        lyrics_file = folter_path.joinpath(song_name + '.txt')
                        file_found = lyrics_file.is_file()

                    if not file_found:
                        # url = config.BASE_URL + lyric_path
                        text = get_lyrics(lyric_path).strip()
                        LOG.info("Downloading (" + str(counter).zfill(3) +
                                 f") [{writer}]: {song_name}")
                        counter += 1

                        with open(lyrics_file, "w") as f:
                            f.write(text)
                        time.sleep(config.CRAWLER_WAIT +
                                   config.CRAWLER_WAIT * random.random())

                except IndexError:
                    LOG.error(
                        f"Access denied while scraping: {lyric_path} \n"
                        f"Try increasing the waiting time.\n"
                        f"Finishing the scrapping for the moment. Try to access on your browser to unblock access"
                    )
                    return
                except Exception as err:
                    print(f"ERROR: {lyric_path}: {err}")

        else:
            LOG.warning(f"Unable to load: {url_ref}")
Exemplo n.º 6
0
def load_model(model_dir: str, model_filename: str):
    LOG.info("Loading...")
    if model_filename in listdir(model_dir):
        with open(path.join(model_dir, model_filename), 'rb') as f:
            model = joblib.load(f)
        LOG.info(
            f"Successfully loaded model {model_filename} from {model_dir}!")
        return model
    else:
        LOG.error(
            f"Trained model {model_filename} is not in expected {model_dir} directory! Please retrain the model."
        )
Exemplo n.º 7
0
def load_tfidf_vect(dir: str):
    LOG.info("Loading vectorizer...")
    filename = "tfidf_vect.joblib"
    if filename in listdir(dir):
        with open(path.join(dir, filename), 'rb') as f:
            vect = joblib.load(f)
        LOG.info(f"Successfully loaded vectorizer {filename} from {dir}!")
        return vect
    else:
        LOG.error(
            f"Fitted vectorizer {filename} is not in expected {dir} directory! Please regenerate."
        )
Exemplo n.º 8
0
    def process_view(self, request, view_func, *view_args, **view_kwargs):
        if request.path == '/token/refresh/' and JWT_AUTH_REFRESH_COOKIE in request.COOKIES:
            if request.body != b'':
                data = json.loads(request.body)
                data['refresh'] = request.COOKIES[JWT_AUTH_REFRESH_COOKIE]
                request._body = json.dumps(data).encode('utf-8')
            else:
                LOG.info(
                    f"\n{L.FAIL} Error in api/users/middleware.py: The incoming request body must be set to an empty object.{L.ENDC}\n"
                )

        return None
Exemplo n.º 9
0
    def _decode_body(self):
        if self.mime_type and (self.mime_type.startswith('image/') or
                               self.mime_type.startswith('application/')):
            LOG.info("Body marked as image, skipping body")
            self.email_stats['attached_images'] += 1
            self.body = ""
            return

        if self.is_multipart:
            LOG.debug("Detected multipart/* content-type")
            self._decode_multipart_body()
        else:
            self._decode_single_body()
Exemplo n.º 10
0
def analyze_galera(net: Network):
    '''Fig2. Analyze of the Galera protocol on the `net` Network.

    This function builds a specific `filter` with the CIDR of the
    `net` and prints TCP `packets` that are related to the Galera
    communications.

    '''
    LOG.info(f'Listen packet on {ifname(net)}...')
    scapy.sniff(
        iface=ifname(net),
        count=10,  # Stop analysis after 10 packets
        filter=f'net {net["cidr"]} and tcp and port 4567',
        prn=lambda packet: packet.summary())
Exemplo n.º 11
0
 def find(self):
     LOG.info("%s walk start", self.__class__.__name__)
     walker = Walker()
     self.__update_step(walker)
     file_instances = walker.walk(self.path_list)
     LOG.info("%s walk end", self.__class__.__name__)
     prev_filter = self.filter_list[0]
     prev_filter.set_files(file_instances)
     self.total = len(file_instances)
     self.__update_step(prev_filter)
     prev_filter.find()
     for _filter in self.filter_list[1:]:
         _filter.set_files(prev_filter.filtered_files)
         self.__update_step(_filter)
         _filter.find()
         prev_filter = _filter
Exemplo n.º 12
0
    def set_output(self, pin, output=1):
        """


        :param pin:
        :param output:
        """
        if pin not in self.output_pins:
            log.error("Pin %s was not registered as an output" % pin)
            return

        output_pin = self.output_pins[pin]

        if output_pin['pwm_started']:
            output_pin['pwm'].stop()

        GPIO.output(pin, output)
        log.info("Output of pin %d has been set to %d" % (pin, output))
Exemplo n.º 13
0
def run():
    LOG.info(f"\n{L.SUCCESS} Cleaning all job entries...{L.ENDC}")
    Job.objects.all().delete()
    LOG.info(f"{L.SUCCESS} Running the API crawler...{L.ENDC}")
    crawler()
    LOG.info(f"{L.SUCCESS} Running the scraper...{L.ENDC}\n")
    scraper()

    earliest_job = datetime.now(tzlocal()) - timedelta(days=45)
    Job.objects.filter(date__lt=earliest_job).delete()
    LOG.info(f"{L.SUCCESS} Done{L.ENDC}\n")
Exemplo n.º 14
0
def run_training():
    df = read_data_as_df(DATA_PATH)

    new_df = get_feature_df(df)
    tfidf_df = get_tfidf(new_df)

    X, y = preprocess_data(tfidf_df)

    X_test, y_test = X.loc[X.index == 'TEST'], y.loc[y.index == 'TEST'].values
    X_train, y_train = X.loc[(X.index == 'TRAIN') | (
        X.index == 'VALIDATION')], y.loc[(y.index == 'TRAIN') |
                                         (y.index == 'VALIDATION')].values
    LOG.info(f"Training set: {X_train.shape}, Testing set: {X_test.shape}")
    LOG.info(
        f"Training set positive examples: {y_train.sum()}, Testing set positive examples: {y_test.sum()}"
    )

    clf_d = get_trained_models(["RF", "SGD", "LR", "SVM"], X_train, y_train)
    evaluate_models(clf_d, X_train, X_test, y_train, y_test)
Exemplo n.º 15
0
def main():
    path = sys.argv[1]
    LOG.info("Start to find duplicated files on {0}".format(path))

    if os.path.isfile(path):
        start_time = time.time()
        print (File(path).md5sum)
        print (File(path).size)
        end_time = time.time()
        print (end_time - start_time)
    else:
        start_time = time.time()
        filters = [
            core.algorithm.SizeFilter(),
            core.algorithm.CharacterFilter()
        ]
        dup_finder = core.dup_finder.DupFinder([path], filters)
        dup_finder.find()
        end_time = time.time()
        #dup_finder.dump2file("output.txt")
        dup_finder.dump2csv("output.csv")
        print (end_time - start_time)
        print (utils.size_renderer(dup_finder.dup_size))
Exemplo n.º 16
0
def run():
    """Script to create two test users, one for Guest and one for Admin."""
    LOG.info(f"\nCreating test users...")
    if not User.objects.filter(username='******').exists():
        user=User.objects.create_user(username='******', email='*****@*****.**', first_name="Guest", password='******')
        user.is_superuser=False
        user.is_staff=False
        user.save()
        LOG.info(f"\n{L.SUCCESS} Normal User '*****@*****.**' created.{L.ENDC}\n")

    if not User.objects.filter(username='******').exists():
        user = User.objects.create_user(username='******', email='*****@*****.**',
                                        password=os.environ.get("DJANGO_ADMIN_PASSWORD"))
        user.is_superuser=True
        user.is_staff=True
        user.save()
        LOG.info(f"\n{L.SUCCESS} Superuser 'admin' created.{L.ENDC}\n")
    LOG.info(f"Done.")
Exemplo n.º 17
0
def monitor(rs: Roles, nets: List[Network]):
    '''Fig4. Reusable function for monitoring.

    Collect metrics on `monitored` hosts. Store and see metrics on
    `aggregator` hosts. Use the `monitor` network to send metrics.

    '''
    # Discover networks to use net info in telegraf.conf.j2
    discover_networks(rs, nets)

    # Install Docker
    with play_on(pattern_hosts="all", roles=rs) as ansible:
        ansible.shell(
            "which docker || (curl -sSL https://get.docker.com/ | sh)",
            display_name="Install docker")
        ansible.apt(
            display_name="Install python-docker (for ansible docker_container)",
            name="python-docker", update_cache=True)

    # Install Telegraf on monitored machines
    with play_on(pattern_hosts="monitored", roles=rs, gather_facts="all") as ansible:
        ansible.template(
            display_name="Generating Telegraf conf",
            src="misc/telegraf.conf.j2",
            dest="/root/telegraf.conf")
        ansible.docker_container(
            display_name="Installing Telegraf",
            name="telegraf", image="telegraf:1.12-alpine",
            detach=True, network_mode="host", state="started",
            volumes=['/root/telegraf.conf:/etc/telegraf/telegraf.conf'])

    # Install InfluxDB and Grafana on `aggregator` machines
    with play_on(pattern_hosts="aggregator", roles=rs) as ansible:
        ansible.docker_container(
            display_name="Install InfluxDB",
            name="influxdb", image="influxdb:1.7-alpine",
            detach=True, state="started", network_mode="host",
            exposed_ports="8086:8086")
        ansible.wait_for(
            display_name="Waiting for InfluxDB to be ready",
            host="localhost", port="8086", state="started",
            delay=2, timeout=120,)

        ansible.docker_container(
            display_name="Install Grafana",
            name="grafana", image="grafana/grafana:5.4.3",
            detach=True, state="started", network_mode="host",
            exposed_ports="3000:3000")
        ansible.wait_for(
            display_name="Waiting for Grafana to be ready",
            host="localhost", port="3000", state="started",
            delay=2, timeout=120,)
        ansible.uri(
            display_name="Add InfluxDB in Grafana",
            url="http://localhost:3000/api/datasources",
            user="******", password="******", force_basic_auth=True,
            body_format="json", method="POST",
            status_code=[200,409], # 409 for already added
            body=json.dumps({
                "name": "telegraf", "type": "influxdb",
                "url": "http://localhost:8086",
                "access": "proxy", "database": "telegraf",
                "isDefault": True}))
        ansible.uri(
            display_name="Import dashboard in Grafana",
            url="http://localhost:3000/api/dashboards/import",
            user="******", password="******", force_basic_auth=True,
            body_format="json", method="POST",
            status_code=[200], # 409 for already added
            src="misc/grafana-dashboard.json")

    # Display UI URLs to view metrics
    ui_urls = map(lambda h: f'http://{h.extra["monitor_ip"]}:3000', rs['aggregator'])
    LOG.info(f'View UI on {list(ui_urls)}')
    LOG.info('Connect with `admin` as login and password, '
             'then skip the change password, '
             'and finally select `Host Dashboard`.')
Exemplo n.º 18
0
# -*- coding: utf-8 -*-

# Imports
import os
from pprint import pformat
import yaml

from enoslib.infra.enos_vagrant.configuration import Configuration

from utils import infra, LOG


# Fig Code (Load the yaml file)
YAML_PATH = 'fig5.yaml'
YAML_DICT = None
with open(YAML_PATH) as yaml_file:
    YAML_DICT = yaml.safe_load(yaml_file)


# Test It!

# Define the infrastructure: 2 database machines, 2
# database/client machines, 1 net
CONF = Configuration.from_dictionnary(YAML_DICT)

# Setup the infra and call the `contextualize` function
LOG.info(f'Provisionning of {YAML_PATH}:\n{pformat(CONF.to_dict())}')
with infra(CONF):
    pass
Exemplo n.º 19
0
    This function builds a specific `filter` with the CIDR of the
    `net` and prints TCP `packets` that are related to the Galera
    communications.

    '''
    LOG.info(f'Listen packet on {ifname(net)}...')
    scapy.sniff(
        iface=ifname(net),
        count=10,  # Stop analysis after 10 packets
        filter=f'net {net["cidr"]} and tcp and port 4567',
        prn=lambda packet: packet.summary())


# Test it!

# Define the infrastructure: 2 machines, 1 net
CONF = (Configuration().from_settings(backend="libvirt").add_machine(
    flavour="tiny",
    number=2, roles=["database"]).add_network(cidr="192.168.42.0/24",
                                              roles=["database"]).finalize())

# Setup the infra and call the `analyze_galera` function
with infra(CONF) as (hosts, roles, networks):
    # First, install and configure active/active Galera
    setup_galera(roles, networks)

    # Then, analyze
    LOG.info(inspect.getsource(analyze_galera))
    analyze_galera(lookup_net(networks, "database"))
Exemplo n.º 20
0
def bench(parameter, env=None):
    LOG.info(f"Running bench with {parameter} on {env['roles']}")
Exemplo n.º 21
0
def signal_handler(signal, frame):
    print ""
    log.info("Shutting down ...")
    # do some cleaning stuff
    gpio_controller.cleanup()
    sys.exit(0);
Exemplo n.º 22
0
def destroy(env=None):
    LOG.info(f"Running destroy on {env['roles']}")
Exemplo n.º 23
0
def delete(id):
    LOG.info('delete blog #%s', id)
    execute_sql('delete from blog where id =?', (id,))
    redirect('/')
Exemplo n.º 24
0
def on_connect():
    log.info('Client connected!')
Exemplo n.º 25
0
def on_disconnect():
    log.info('Client disconnected!')
Exemplo n.º 26
0
def initial_message(msg):
    log.info('Initial Message from connected Client: %s' % msg['data'])
Exemplo n.º 27
0
def backup(env=None):
    LOG.info(f"Running backup on {env['roles']}")
Exemplo n.º 28
0
from typing import List

from enoslib.host import Host
from enoslib.api import run as run_command
from enoslib.infra.enos_vagrant.configuration import Configuration

from utils import infra, LOG


# Fig Code
def contextualize(hosts: List[Host]):
    '''Fig1. Install MariaDB and Galera on a list of `hosts`.

    A `Host` is an abstract notion of unit of computation that can be
    bound to bare-metal machines, virtual machines or containers.

    '''
    run_command("apt install -y mariadb-server galera", hosts)


# Test it!

# Define the infrastructure: 2 machines
CONF = (Configuration().from_settings(backend="virtualbox").add_machine(
    flavour="tiny", number=2, roles=["database"]).finalize())

# Setup the infra and call the `contextualize` function
with infra(CONF) as (hosts, _, _):
    LOG.info(inspect.getsource(contextualize))
    contextualize(hosts)
Exemplo n.º 29
0
            body_format="json", method="POST",
            status_code=[200], # 409 for already added
            src="misc/grafana-dashboard.json")

    # Display UI URLs to view metrics
    ui_urls = map(lambda h: f'http://{h.extra["monitor_ip"]}:3000', rs['aggregator'])
    LOG.info(f'View UI on {list(ui_urls)}')
    LOG.info('Connect with `admin` as login and password, '
             'then skip the change password, '
             'and finally select `Host Dashboard`.')


# Test it!

# Define the infrastructure: 2 database/monitored machines, 2
# database/client/monitored machines, 1 aggregator machine, 1 net for
# database, 1 net for monitoring.
CONF = (Configuration()
        .from_settings(backend="virtualbox")
        .add_machine(flavour='tiny', number=2, roles=['database', 'monitored'])
        .add_machine(flavour='tiny', number=2, roles=['database', 'client', 'monitored'])
        .add_machine(flavour='tiny', number=1, roles=['aggregator'])
        .add_network(cidr='192.168.43.0/24', roles=['database'])
        .add_network(cidr='192.168.44.0/24', roles=['monitor'])
        .finalize())

# Setup the infra and call the `monitor` function
with infra(CONF) as (_, roles, networks):
    LOG.info(inspect.getsource(monitor))
    monitor(roles, networks)