示例#1
0
def install(vpn_opts: ClientOpts, svc_opts: UnixServiceOpts,
            auto_startup: bool, auto_dnsmasq: bool, dnsmasq: bool,
            auto_connman_dhcp: bool, force: bool):
    executor = VPNClientExecutor(vpn_opts).probe(log_lvl=logger.INFO)
    dns_resolver = executor.device.dns_resolver
    if not dnsmasq and not dns_resolver.is_connman():
        logger.error('Only support dnsmasq as DNS resolver in first version')
        sys.exit(ErrorCode.NOT_YET_SUPPORTED)
    if executor.is_installed(silent=True):
        if force:
            logger.warn(
                'VPN service is already installed. Try to remove then reinstall...'
            )
            executor.do_uninstall(keep_vpn=False, keep_dnsmasq=True)
        else:
            logger.error('VPN service is already installed')
            sys.exit(ErrorCode.VPN_ALREADY_INSTALLED)
    if dnsmasq and not dns_resolver.is_dnsmasq_available(
    ) and not dns_resolver.is_connman():
        executor.device.install_dnsmasq(auto_dnsmasq)
    logger.info(
        f'Installing VPN client into [{vpn_opts.vpn_dir}] and register service[{svc_opts.service_name}]...'
    )
    executor.do_install(svc_opts, auto_startup, auto_connman_dhcp)
    logger.done()
示例#2
0
def detail(vpn_opts: ClientOpts, accounts):
    if accounts is None or len(accounts) == 0:
        logger.error('Must provide at least account')
        sys.exit(ErrorCode.INVALID_ARGUMENT)
    VPNClientExecutor(vpn_opts,
                      adhoc_task=True).require_install().probe().exec_command(
                          'AccountGet', params=accounts, log_lvl=logger.INFO)
 def wrapper(*args, **kwargs):
     try:
         return func(*args, **kwargs)
     except Warning as w:
         logger.warning(str(w), exc_info=True)
     except Exception as e:
         logger.error(str(e), exc_info=True)
def __with_bit_depth(audio_segment, bit_depth):
    """Return a version of <audio_segment> with updated bit depth."""
    if bit_depth % 8 != 0:
        log.error(
            "Bit depth should be a multiple of 8, used value here is {0}".
            format(bit_depth))

    return audio_segment.set_sample_width(bit_depth // 8)
示例#5
0
def generate_sequence(oui: list, seq: list, quantity: int):
    length = len(oui) + len(seq)
    if length < 6:
        logger.error(
            f'Combination between OUI and next MAC sequence is invalid [{oui + seq}]'
        )
        sys.exit(10)
    mac = oui + (seq if length == 6 else seq[len(oui):6])
    return (increase(mac, -1) for _ in range(quantity))
 def query_data(self):
     """Retrieve data points from database"""
     try:
         self.check_db()
         qr = QueryBuilder(measurement=self._measurement)
         rs = InfluxService.db_client.query(query=qr.generate_query())
         return list(rs.get_points())
     except Exception as e:
         app_logger.error(str(e))
示例#7
0
def delete(vpn_opts: ClientOpts, accounts):
    logger.info(
        f'Delete VPN account [{accounts}] and stop/disable VPN service if it\'s a current VPN connection...'
    )
    if accounts is None or len(accounts) == 0:
        logger.error('Must provide at least account')
        sys.exit(ErrorCode.INVALID_ARGUMENT)
    VPNClientExecutor(vpn_opts).require_install().probe(
        log_lvl=logger.INFO).do_delete(accounts)
    logger.done()
示例#8
0
def parseYamlFile(absoluteFilePath):

    with open(absoluteFilePath) as f:
        try:
            inputConfig = yaml.load(f)
        except yaml.YAMLError as e:
            logger.error(str(e))
            sys.exit(1)

    logger.debug("Parsed config:\n{}".format(json.dumps(inputConfig)))

    return inputConfig
def write_data():
    _dict = jsn.load()
    if not _dict:
        log.error("{0} is empty".format(tml.value('json', section='data', subkey='fname')))

    data = []
    for key in _dict:
        data.append(list(map(__convert, map(_read, (_dict[key], key)), repeat(DTYPE))))
    
    log.debug("{0} couples data/label have been retrieved".format(len(data)))

    npz.write(np.asarray(data))
示例#10
0
def _path_to_best_model(dpath):
    """Return path to best model amongst models in <dpath>."""
    fpaths = pth.__list_files(dpath)
    if not fpaths:
        log.error("\"{0}\" does not contain any file or is not a directory".format(dpath))
        return
    
    get_vloss = lambda fpath: float(pth.__no_extension(fpath).split('-')[-1])
    vlosses = list(map(get_vloss, fpaths))
    bmdl_idx = vlosses.index(min(vlosses))

    return fpaths[bmdl_idx]
示例#11
0
 def pre_exec(self, silent=False, log_lvl=logger.DEBUG, **kwargs):
     logger.log(log_lvl, 'Start VPN Client if not yet running...')
     if not self.is_installed(silent, log_lvl):
         return
     if self.pid_handler.is_running():
         self._prev_is_run = True
         return
     SystemHelper.exec_command(f'{self.opts.vpnclient} start',
                               log_lvl=logger.down_lvl(log_lvl))
     time.sleep(1)
     if not self.pid_handler.is_running(log_lvl=logger.down_lvl(log_lvl)):
         logger.error('Unable start VPN Client')
         sys.exit(ErrorCode.VPN_START_FAILED)
示例#12
0
文件: index.py 项目: play-iot/iot-vpn
 def get_command(self, ctx, cmd_name):
     name = cmd_name.encode(
         'ascii', 'replace') if sys.version_info[0] == 2 else cmd_name
     try:
         module = self.commands.get(name)
         if module is None:
             logger.error(f'Unsupported command "{name}"')
             click.echo(click.get_current_context().get_help())
             sys.exit(10)
         return __import__(to_module([module, 'cmd_' + name]), None, None,
                           ['cli']).cli
     except ImportError as err:
         logger.error("Load command failed {}::{}".format(name, str(err)))
         sys.exit(10)
示例#13
0
def __validate(file):
    keys, n, d = {}, 0, 0
    for row in file:
        n += 1
        keys[row] = keys[row] + [n] if row in keys else [n]
    for k, v in keys.items():
        if len(v) > 1:
            logger.warn(f'Duplicated key: {k.strip()} in lines {v}')
            d += 1
    if d == 0:
        logger.success('No duplication')
        sys.exit(0)
    logger.error(f'Duplicated {d} keys')
    sys.exit(20)
示例#14
0
def loop_interval(condition: Callable[[], bool], error_if_timeout: str, pre_func: Callable[[], NoReturn] = lambda: None,
                  max_retries: int = 5, interval: float = 1, throttle=0, exit_if_error=False):
    for c in range(max_retries + 1):
        pre_func()
        time.sleep(throttle)
        if condition():
            return
        time.sleep(interval)
    msg = f'{error_if_timeout} after {max_retries * interval}(s)'
    if exit_if_error:
        logger.error(msg)
        sys.exit(ErrorCode.TIMEOUT)
    else:
        raise TimeoutError(msg)
示例#15
0
def __copy(nic, asix1, asix2, oui, byte_fmt, sep):
    check = len(list(filter(lambda x: x, [asix1, asix2, oui])))
    if check > 1:
        logger.error(
            'Option [asix1, asix2, oui] is mutually exclusive with each another'
        )
        sys.exit(2)
    if check == 0:
        logger.error('Provide at least one option [asix1, asix2, oui]')
        sys.exit(2)
    try:
        nic_mac = netifaces.ifaddresses(nic)[netifaces.AF_LINK][0]['addr']
    except:
        logger.error(f'Not found NIC {nic} or MAC address of NIC {nic}')
        sys.exit(10)
    in_oui = asix1 or asix2 or oui
    oui = [int(c, base=16) for c in in_oui.split(sep)
           ] if type(in_oui) == str else (in_oui or [])
    mac = [int(c, base=16) for c in nic_mac.split(':')
           ] if type(nic_mac) == str else (nic_mac or [])
    mac = oui + mac[len(oui):6]
    if len(mac) != 6:
        logger.error(
            f'Combination between OUI and MAC is invalid [{in_oui},{nic_mac}]')
        sys.exit(10)
    logger.success(sep.join(byte_fmt % b for b in mac))
    def check_db():
        """Check if database exists"""
        # get all databases
        all_dbs_list = InfluxService.db_client.get_list_database()

        # check if current database exists and if return warning message
        if InfluxService.cnf.INFLUX_DB not in [
                str(x['name']) for x in all_dbs_list
        ]:
            try:
                app_logger.warning("Database {0} does not exist".format(
                    InfluxService.cnf.INFLUX_DB))
            except exceptions.InfluxDBClientError as e:
                app_logger.error(str(e))
            except exceptions.InfluxDBServerError as e1:
                app_logger.error(str(e1))
        else:
            try:
                app_logger.info("Using db {0}".format(
                    InfluxService.cnf.INFLUX_DB))
                InfluxService.db_client.switch_database(
                    InfluxService.cnf.INFLUX_DB)
            except exceptions.InfluxDBClientError as e:
                app_logger.error(str(e))
            except exceptions.InfluxDBServerError as e1:
                app_logger.error(str(e1))
示例#17
0
 def install_dnsmasq(self, auto_install: bool = False):
     if not auto_install:
         logger.error(
             'dnsmasq is not yet installed. Please install [dnsmasq] depends on your distro'
         )
         sys.exit(ErrorCode.MISSING_REQUIREMENT)
     logger.info('Try to install [dnsmasq]...')
     pm = self.pm
     if not pm:
         logger.error(
             'Unknown package manager. Please install [dnsmasq] by yourself'
         )
         sys.exit(ErrorCode.MISSING_REQUIREMENT)
     pm.install('dnsmasq')
     self._dns_resolver(self.dns_resolver.probe())
示例#18
0
 def do_connect(self, account: str, log_lvl: int = logger.INFO):
     if not account:
         logger.error(f'VPN account is not correct')
         sys.exit(ErrorCode.INVALID_ARGUMENT)
     acc = self.storage.find(account)
     if not acc:
         logger.error(f'Not found VPN account')
         sys.exit(ErrorCode.VPN_ACCOUNT_NOT_FOUND)
     logger.log(log_lvl, f'Connect VPN account [{account}]...')
     self.storage.create_or_update(acc, _connect=True)
     self.exec_command(['AccountConnect'], params=account)
     self.lease_vpn_service(is_enable=acc.is_default,
                            is_restart=acc.is_default,
                            is_lease_ip=not acc.is_default,
                            account=acc.account)
def __pcm2float(npy_array, _type='float64'):
    """Convert <npy_array> from pcm to float.
    Default conversion type is 'float64'.
    """
    if npy_array.dtype.kind != 'i':
        log.error(
            "\'__pcm2float\' takes an array of integers, forcing conversion to int16"
        )
        npy_array = npy_array.astype('int16')

    info = np.iinfo(npy_array.dtype)
    amp = 2**(info.bits - 1)
    offset = info.min + amp

    npy_array = (npy_array - offset) / amp

    return npy_array.clip(-1., 1.).astype(_type)
示例#20
0
文件: handler.py 项目: gtaylor/dott
    def _handle_other_errors(self, failure, invoker):
        """
        If execution reaches this point, we're probably dealing with an
        actual code issue. We'll print the traceback out for the user,
        then fall through to the logging error handler.
        """

        invoker.emit_to(
            'ERROR: A critical error has occurred. Please notify the staff.'
        )
        invoker.emit_to(failure.getTraceback())
        logger.error('Command handler encountered an error')
        logger.error('Invoker: %s' % invoker.get_appearance_name(None, force_admin_view=True))

        # Fall through to the last (and final) logging error handler so the
        # console can get a copy of the traceback.
        failure.trap()
示例#21
0
def shape(data):
    """Shape <data> to fit input of neural network."""
    log.debug("Shaping data")
    
    if data.ndim != 2:
        log.error("\'shape\' expects a two-dimensional array : (n_samples, sample_len)")
        return data

    _dtype = 'int{0}'.format(tml.value('bit_depth', section='audio'))
    
    if data.dtype != _dtype:
        log.warning("\'shape\' expects an {0} array".format(_dtype))
        
    data = data.astype('float64')
    for i in range(data.shape[0]):
        data[i] = __pcm2float(data[i].astype(_dtype))

    return data.reshape(*data.shape, 1)    
示例#22
0
def unshape(data):
    """Unshape <data> output of neural network."""
    log.debug("Unshaping data")
    
    if data.ndim != 3:
        log.error("\'unshape\' expects a three-dimensional array : (n_samples, sample_len, n_channels)")
        return data

    data = data.reshape(*data.shape[:-1])

    if data.dtype.kind != 'f':
        log.warning("\'unshape\' expects a float array")
        data = data.astype('float64')

    for i in range(data.shape[0]):
        data[i] = __float2pcm(data[i])

    return data.astype('int{0}'.format(tml.value('bit_depth', section='audio')))
示例#23
0
 def setup(self, vpn_service: str, origin_resolv_conf: Path,
           vpn_resolv_conf: Path, vpn_nameserver_hook_conf: Path):
     if not self._available:
         logger.error('[dnsmasq] is not yet installed or is corrupted')
         sys.exit(ErrorCode.MISSING_REQUIREMENT)
     logger.info('Setup DNS resolver[dnsmasq]...')
     dnsmasq_vpn_cfg = self._dnsmasq_vpn_cfg(vpn_service)
     runtime_resolv_cfg = self.adapt_dnsmasq(origin_resolv_conf,
                                             vpn_service)
     dnsmasq_opts = {
         '{{DNS_RESOLVED_FILE}}':
         self.__build_dnsmasq_conf('resolv-file', runtime_resolv_cfg),
         '{{PORT}}':
         self.__build_dnsmasq_conf('port',
                                   self.dnsmasq_options().get('port',
                                                              None)),
         '{{CACHE_SIZE}}':
         self.__build_dnsmasq_conf(
             'cache-size',
             self.dnsmasq_options().get('cache_size', None))
     }
     logger.debug(
         f'Add [dnsmasq] config for {vpn_service}[{dnsmasq_vpn_cfg}]...')
     FileHelper.copy(self.resource_dir.joinpath(self.DNSMASQ_CONFIG_TMPL),
                     dnsmasq_vpn_cfg,
                     force=True)
     FileHelper.replace_in_file(dnsmasq_vpn_cfg, dnsmasq_opts, backup='')
     FileHelper.chmod(dnsmasq_vpn_cfg, mode=0o0644)
     logger.debug(
         f'Symlink [dnsmasq] VPN nameserver runtime configuration [{vpn_nameserver_hook_conf}]...'
     )
     FileHelper.create_symlink(vpn_nameserver_hook_conf,
                               self._dnsmasq_vpn_hook_cfg,
                               force=True)
     logger.info(f'Generate System DNS config file from VPN service...')
     FileHelper.write_file(vpn_resolv_conf,
                           self.__dnsmasq_resolv(vpn_service),
                           mode=0o0644)
     FileHelper.create_symlink(vpn_resolv_conf,
                               DNSResolver.DNS_SYSTEM_FILE,
                               force=True)
     self.service.enable(self.config.identity)
示例#24
0
 def create_config(self, vpn_service: str, auto_connman_dhcp: bool):
     if self.is_connman():
         FileHelper.write_file(self.connman_dhcp, str(auto_connman_dhcp))
         return
     if not FileHelper.is_readable(self.origin_resolv_cfg):
         logger.info(
             f'Backup System DNS config file to [{self.origin_resolv_cfg}]...'
         )
         FileHelper.backup(DNSResolver.DNS_SYSTEM_FILE,
                           self.origin_resolv_cfg,
                           remove=False)
     if not FileHelper.is_readable(self.origin_resolv_cfg):
         logger.error(
             f'Not found origin DNS config file [{self.origin_resolv_cfg}]')
         sys.exit(ErrorCode.FILE_CORRUPTED)
     if not FileHelper.is_readable(self.vpn_hook_cfg):
         FileHelper.touch(self.vpn_hook_cfg, 0o0644)
     self._resolver().setup(vpn_service, self.origin_resolv_cfg,
                            self.vpn_resolv_cfg, self.vpn_hook_cfg)
     self._resolver().restart(_all=True)
示例#25
0
def __generate(quantity, seq, rand, output, overwrite, asix1, asix2, oui, uaa,
               multicast, byte_fmt, sep):
    check = len(list(filter(lambda x: x, [asix1, asix2, oui])))
    if check > 1:
        logger.error(
            'Option [asix1, asix2, oui] is mutually exclusive with each another'
        )
        sys.exit(2)
    if seq and rand:
        logger.error(
            'Option [seq, rand] is mutually exclusive with each another')
        sys.exit(2)
    oui = asix1 or asix2 or oui
    oui = [int(c, base=16)
           for c in oui.split(sep)] if type(oui) == str else (oui or [])
    seq = [0 for _ in range(5 - len(oui))] + [-1] if seq is None else [
        int(c, base=16) for c in seq.split(sep)
    ]
    gen = generate_random(oui, quantity, uaa,
                          multicast) if rand else generate_sequence(
                              oui, seq, quantity)
    out((sep.join(byte_fmt % b for b in each) for each in gen), overwrite,
        output)
示例#26
0
 def exec_command(command: str,
                  shell=False,
                  silent=False,
                  log_lvl=logger.DEBUG) -> str:
     logger.decrease(log_lvl, "Execute command: %s", command)
     list_cmd = command.split(" | ") if not shell else [command]
     length = len(list_cmd)
     prev = None
     for idx, cmd in enumerate(list_cmd, 1):
         logger.trace("\tsub_command::%s::%s", cmd, prev)
         kwargs = {} if EnvHelper.is_py3_5() else {"encoding": "utf-8"}
         complete = subprocess.run(cmd.split() if not shell else cmd,
                                   input=prev,
                                   env=TWEAK_ENV,
                                   shell=shell,
                                   stdout=subprocess.PIPE,
                                   stderr=subprocess.PIPE,
                                   **kwargs)
         ret = complete.returncode
         lvl = (logger.TRACE if idx < length else
                logger.DEBUG) if ret == 0 or silent else logger.ERROR
         try:
             prev = SystemHelper.__handle_command_result(
                 complete, silent, lvl)
         except RuntimeError as _:
             if not silent:
                 logger.error('Failed when executing command %s', cmd)
                 sys.exit(ret)
         finally:
             ret_val = ("0. Actual: %s" %
                        ret) if silent and ret != 0 else ret
             logger.decrease(log_lvl, "%sReturn code: %s",
                             "\t" if idx < length else "", ret_val)
     if prev:
         logger.log(log_lvl, "\t%s", prev)
     return prev
示例#27
0
def parseOptions(argv):

    # Intialize defaults
    inputFile = None

    try:
        options, args = getopt.getopt(argv, "hi:", ["inputFile="])
    except getopt.GetoptError as e:
        logger.error(str(e))
        sys.exit(1)

    for option, arg in options:
        if option == "-h":
            print(getCliHelpText())
            sys.exit()
        elif option in ("-i", "--inputFile"):
            inputFile = arg

    if not inputFile:
        raise Exception('-i/--inputFile is required')

    logger.debug("Input file: {}".format(inputFile))

    return (inputFile)
示例#28
0
def mse(x, y):
    len_x, len_y = x.shape[0], y.shape[0]
    if len_x == len_y:
        return sum((x - y)**2) / len_x

    log.error("Can't compute MSE, arrays have different lengths: {0} and {1}".format(len_x, len_y))
示例#29
0
 def wrapper(*args, **kwargs):
     if os.getuid() != 0:
         logger.error("You need root privileges to run this script")
         sys.exit(100)
     return func(*args, **kwargs)
示例#30
0
import sys, json, MySQLdb

from src.utils import logger, getCliHelpText, parseOptions, parseYamlFile
from src.data_gen import DataGen


if __name__ == "__main__":
    try:
        inputFile = parseOptions(sys.argv[1:])
    except Exception as e:
        logger.error(str(e))
        sys.exit(1)
    inputConfig = parseYamlFile(inputFile)

    if inputConfig["engine"] == "mysql":
        from src.schema_builders.mysql import MysqlSchemaBuilder as SchemaBuilder
        from src.writers.mysql import MysqlWriter as Writer
        from src.contexts.mysql import MysqlContext as Context
        conn = MySQLdb.connect(
            inputConfig["host"],
            inputConfig["user"],
            inputConfig["password"],
            inputConfig["database"],
            inputConfig["port"]
        )
        ctx = Context(conn, inputConfig)

    else:
        logger.error("Engine - {} not supported".format(inputConfig["engine"]))
        sys.exit(1)
示例#31
0
def main():

    if args.download:
        # Download all files
        downloader = Downloader()
        all_committees = downloader.committees_urls_list()
        logger.info("Starting to process %i committees" % len(all_committees))

        for idx, committee_url in enumerate([
                c for c in all_committees
        ]):  #  if 'foreign-affairs-committee' in c]):
            downloader.capture_committee_documents(committee_url)
            logger.info('Completed committee %i / %i' %
                        (idx, len(all_committees)))
        pass

    if args.parse:
        # Parse all HTML files, store to JSON, and summarise in XLSX
        html_filenames = glob.glob(
            os.path.expanduser('~/projects_data/parliament-text/*.html'))
        df = pd.DataFrame(columns=[
            'members', 'witnesses', 'speakers_dict', 'Q&A', 'plain_text'
        ],
                          index=[])
        for i, html_filename in enumerate(html_filenames[100:200]):
            with open(html_filename, 'r') as f:
                html_text = f.read()
            logger.info('%i / %i: %s' %
                        (i, len(html_filenames), html_filename))
            trscrpt = transcript(html_text, {
                'status': 'read html from directory',
                'html_file_location': f.name
            },
                                 html_filename=html_filename)
            trscrpt.process_raw_html()
            key_data, hyperlink = trscrpt.key_data_summary()
            df.loc[hyperlink] = key_data

        xlsx_filename = 'summary.xlsx'
        key_data_to_xlsx(df, xlsx_filename)

    if False:
        # Parse selected files
        key_html_documents = [
            '45429.html', '74822.html'
            # 'http://data.parliament.uk/writtenevidence/committeeevidence.svc/evidencedocument/treasury-committee/monetary-policy-forward-guidance/oral/3245.html'
        ]
        for d in key_html_documents:
            with open(
                    os.path.join(
                        os.path.expanduser('~/projects_data/parliament-text'),
                        d), 'r') as f:
                html_text = f.read()

            trscrpt = transcript(html_text, {'status': 'debugging case'},
                                 html_filename=d)
            trscrpt.process_raw_html()

    if args.diagnostic:
        # generate diagnostic spreadsheet summarising success of parsing
        all_summaries = []
        json_filenames = glob.glob(
            os.path.expanduser('~/projects_data/parliament-text/*.json'))
        # df_analysis = pd.DataFrame(columns=['members','witnesses',
        #                                     'speakers_dict','Q&A','plain_text'],
        #                            index=[])
        for i, json_filename in enumerate(json_filenames):
            with open(json_filename, 'r') as f:
                json_text = f.read()
            logger.info('%i / %i: %s' %
                        (i, len(json_filenames), json_filename))
            transcript_summary = analyse_json(json_text)
            all_summaries.append(transcript_summary)
            # key_data, hyperlink = trscrpt.key_data_summary()
            # df.loc[hyperlink] = key_data
        df_all_speaker_stats = pd.concat(all_summaries, ignore_index=True)
        speaker_data_to_xlsx(df_all_speaker_stats, 'speaker_summary.xlsx')

    elif args.analyse:
        # a quick chart to show how we might use the data
        df_all_speaker_stats = pd.read_excel(
            'speaker_summary.xlsx',
            sheet_name='speaker_stats',
            # skiprows=3,
            header=0)
        df_all_speaker_stats['title'] = df_all_speaker_stats[
            'witness_name'].str.extract(
                '(MP|Professor|Rt Hon|Lord QC|Dr |Brigadier|Commodore|Colonel|General|Marshal|Sir|Reverend|Rev\.|Rt Rev)',
                expand=False).str.strip()
        df_all_speaker_stats.loc[df_all_speaker_stats.title.isna(),
                                 'title'] = 'other'
        df_all_speaker_stats.title = df_all_speaker_stats.title.str.replace(
            re.compile('(Rev\.|Rt Rev)'), 'Reverend')
        df_all_speaker_stats.title = df_all_speaker_stats.title.str.replace(
            re.compile('(Brigadier|Commodore|Colonel|General|Marshal)'),
            '[military]')
        df_all_speaker_stats.title = pd.Categorical(df_all_speaker_stats.title)
        df_all_speaker_stats.title.value_counts()

        df_speakers_agg = df_all_speaker_stats[[
            'title', 'gunning_fog'
        ]].groupby(['title']).agg(['mean', 'count']).reset_index()
        df_speakers_agg.columns = [
            '_'.join(col).strip() for col in df_speakers_agg.columns.values
        ]
        df_speakers_agg.to_csv('test.csv')

        import matplotlib.pyplot as plt
        import seaborn as sns
        ax = sns.barplot(
            x="syllable_count_mean",
            y="witness_name_",
            data=df_speakers_agg[df_speakers_agg['syllable_count_count'] > 10])
        fig = ax.get_figure()
        fig.savefig('./img/barplot.png')

        ax2 = sns.barplot(x="title",
                          y="gunning_fog",
                          data=df_all_speaker_stats)
        plt.ylim(15, 18)
        fig = ax2.get_figure()
        fig.savefig('./img/barplot_fog_by_title.png')

    else:
        logger.error('No options selected')