Exemple #1
0
def main():
    global DEBUG
    if "--register" in sys.argv or "--unregister" in sys.argv:
        import win32com.server.register

        win32com.server.register.UseCommandLine(PyEmail)
    elif "py2exe" in sys.argv:
        from distutils.core import setup
        from .nsis import build_installer, Target
        import py2exe

        setup(
            name="PyEmail",
            version=__version__,
            description="Interfaz PyAfipWs Email %s",
            long_description=__doc__,
            author="Mariano Reingart",
            author_email="*****@*****.**",
            url="http://www.sistemasagiles.com.ar",
            license="GNU GPL v3",
            com_server=["pyemail"],
            console=[],
            options={
                "py2exe": {
                    "includes": [
                        "email.generator",
                        "email.iterators",
                        "email.message",
                        "email.utils",
                    ],
                    "optimize":
                    2,
                    "excludes": [
                        "pywin",
                        "pywin.dialogs",
                        "pywin.dialogs.list",
                        "win32ui",
                        "distutils.core",
                        "py2exe",
                        "nsis",
                    ],
                    #'skip_archive': True,
                }
            },
            data_files=[
                (".", ["licencia.txt"]),
            ],
            cmdclass={"py2exe": build_installer},
        )
    elif "/Automate" in sys.argv:
        # MS seems to like /automate to run the class factories.
        import win32com.server.localserver

        # win32com.server.localserver.main()
        # start the server.
        win32com.server.localserver.serve([PyEmail._reg_clsid_])
    elif "/prueba" in sys.argv:
        pyemail = PyEmail()
        import getpass
        i = sys.argv.index("/prueba")
        if i + 2 < len(sys.argv):
            usuario = sys.argv[sys.argv.index("/prueba") + 1]
            clave = sys.argv[sys.argv.index("/prueba") + 2]
        else:
            usuario = input("usuario:")
            clave = getpass.getpass("clave:")
        ok = pyemail.Conectar("smtp.gmail.com",
                              usuario=usuario,
                              clave=clave,
                              puerto=587)
        print("login ok?", ok, pyemail.Excepcion)
        print(pyemail.Traceback)
        ok = pyemail.Enviar(usuario, "prueba", usuario, "prueba!", None)
        print("mail enviado?", ok, pyemail.Excepcion)
        ok = pyemail.Salir()
    else:
        config = SafeConfigParser()
        config.read("rece.ini")

        if "/debug" in sys.argv:
            DEBUG = True
            print("VERSION", __version__)
            sys.argv.remove("/debug")

        if len(sys.argv) < 3:
            print("Parámetros: motivo destinatario [mensaje] [archivo]")
            sys.exit(1)

        conf_mail = dict(config.items("MAIL"))
        motivo = sys.argv[1]
        destinatario = sys.argv[2]
        mensaje = len(sys.argv) > 3 and sys.argv[3] or conf_mail["cuerpo"]
        archivo = len(sys.argv) > 4 and sys.argv[4] or None

        print("Motivo: ", motivo)
        print("Destinatario: ", destinatario)
        print("Mensaje: ", mensaje)
        print("Archivo: ", archivo)

        pyemail = PyEmail()
        ok = pyemail.Conectar(
            conf_mail["servidor"],
            conf_mail["usuario"],
            conf_mail["clave"],
            conf_mail.get("puerto", 25),
        )
        if ok:
            pyemail.Enviar(conf_mail["remitente"], motivo, destinatario,
                           mensaje, archivo)
        else:
            print(pyemail.Traceback)
Exemple #2
0

def getlist(self, section, option):
    return [x.strip() for x in self.get(section, option).split(',')]


SafeConfigParser.getlist = getlist

files = [
    os.path.join(os.path.dirname(os.path.realpath(__file__)),
                 '../default-config.ini'),
    os.path.join(os.path.dirname(os.path.realpath(__file__)), '../config.ini'),
    '/etc/voctomix/voctocore.ini',
    '/etc/voctomix.ini',  # deprecated
    '/etc/voctocore.ini',
    os.path.expanduser('~/.voctomix.ini'),  # deprecated
    os.path.expanduser('~/.voctocore.ini'),
]

if Args.ini_file is not None:
    files.append(Args.ini_file)

Config = SafeConfigParser()
readfiles = Config.read(files)

log = logging.getLogger('ConfigParser')
log.debug('considered config-files: \n%s',
          "\n".join(["\t\t" + os.path.normpath(file) for file in files]))
log.debug('successfully parsed config-files: \n%s',
          "\n".join(["\t\t" + os.path.normpath(file) for file in readfiles]))
Exemple #3
0
from configparser import SafeConfigParser
import pythoncom
import wmi
from utils import conf_filename, wait_for_result, log_fw
from PyQt5.QtWidgets import QMessageBox, QDialog, QLabel, QVBoxLayout
from PyQt5.QtCore import Qt
from threading import Thread
from copyer import run

parser = SafeConfigParser()


def is_removable_drive(drive_letter):
    pythoncom.CoInitialize()
    c = wmi.WMI()
    drive_letter = drive_letter.replace('\\', '')
    return len([
        localDisk
        for localDisk in c.Win32_LogicalDisk(Description='Removable Disk',
                                             DeviceID=drive_letter)
    ]) > 0


def get_serial_by_drive_letter(drive_letter):
    pythoncom.CoInitialize()
    c = wmi.WMI()
    logical_drive = [
        logicalDisk.Antecedent.deviceID
        for logicalDisk in c.Win32_LogicalDiskToPartition()
        if logicalDisk.Dependent.deviceID.lower() in drive_letter.lower()
    ][0]
Exemple #4
0
    def fromFile(cls, initFile, initList=None):
        """Loads specified features from file

        Parameters
        ----------
        initFile : str 
            Path to file

        initList : list
        	List of classes to load

        """
        parser = SafeConfigParser()
        parser.read(initFile)
        corpdb = parser.get('constants', 'corpdb') if parser.has_option(
            'constants', 'corpdb') else dlac.DEF_CORPDB
        corptable = parser.get('constants', 'corptable') if parser.has_option(
            'constants', 'corptable') else dlac.DEF_CORPTABLE
        correl_field = parser.get(
            'constants', 'correl_field') if parser.has_option(
                'constants', 'correl_field') else dlac.DEF_CORREL_FIELD
        mysql_host = parser.get(
            'constants', 'mysql_host') if parser.has_option(
                'constants', 'mysql_host') else dlac.MYSQL_HOST
        message_field = parser.get(
            'constants', 'message_field') if parser.has_option(
                'constants', 'message_field') else dlac.DEF_MESSAGE_FIELD
        messageid_field = parser.get(
            'constants', 'messageid_field') if parser.has_option(
                'constants', 'messageid_field') else dlac.DEF_MESSAGEID_FIELD
        encoding = parser.get('constants', 'encoding') if parser.has_option(
            'constants', 'encoding') else dlac.DEF_ENCODING
        if parser.has_option('constants', 'use_unicode'):
            use_unicode = True if parser.get(
                'constants', 'use_unicode') == "True" else False
        else:
            use_unicode = dlac.DEF_UNICODE_SWITCH
        lexicondb = parser.get('constants', 'lexicondb') if parser.has_option(
            'constants', 'lexicondb') else dlac.DEF_LEXICON_DB
        if parser.has_option('constants', 'feattable'):
            if len(parser.get('constants', 'feattable').split(",")) > 0:
                featureTable = [
                    f.strip()
                    for f in parser.get('constants', 'feattable').split(",")
                ]
            else:
                featureTable = parser.get('constants', 'feattable')
        else:
            featureTable = dlac.DEF_FEAT_TABLE
        featNames = parser.get('constants', 'featnames') if parser.has_option(
            'constants', 'featnames') else dlac.DEF_FEAT_NAMES
        date_field = parser.get(
            'constants', 'date_field') if parser.has_option(
                'constants', 'date_field') else dlac.DEF_DATE_FIELD
        outcome_table = parser.get(
            'constants', 'outcometable') if parser.has_option(
                'constants', 'outcometable') else dlac.DEF_OUTCOME_TABLE
        outcome_value_fields = [
            o.strip()
            for o in parser.get('constants', 'outcomefields').split(",")
        ] if parser.has_option('constants', 'outcomefields') else [
            dlac.DEF_OUTCOME_FIELD
        ]  # possible list
        outcome_controls = [
            o.strip()
            for o in parser.get('constants', 'outcomecontrols').split(",")
        ] if parser.has_option(
            'constants',
            'outcomecontrols') else dlac.DEF_OUTCOME_CONTROLS  # possible list
        outcome_interaction = [
            o.strip()
            for o in parser.get('constants', 'outcomeinteraction').split(",")
        ] if parser.has_option(
            'constants', 'outcomeinteraction'
        ) else dlac.DEF_OUTCOME_CONTROLS  # possible list
        group_freq_thresh = int(parser.get(
            'constants', 'groupfreqthresh')) if parser.has_option(
                'constants',
                'groupfreqthresh') else dlac.getGroupFreqThresh(correl_field)
        featureMappingTable = parser.get(
            'constants', 'featlabelmaptable') if parser.has_option(
                'constants', 'featlabelmaptable') else ''
        featureMappingLex = parser.get(
            'constants', 'featlabelmaplex') if parser.has_option(
                'constants', 'featlabelmaplex') else ''
        output_name = parser.get('constants',
                                 'outputname') if parser.has_option(
                                     'constants', 'outputname') else ''
        wordTable = parser.get('constants', 'wordTable') if parser.has_option(
            'constants', 'wordTable') else None
        model = parser.get('constants', 'model') if parser.has_option(
            'constants', 'model') else dlac.DEF_MODEL
        feature_selection = dlac.DEF_FEATURE_SELECTION_MAPPING[parser.get(
            'constants', 'featureselection')] if parser.has_option(
                'constants', 'featureselection') else ''
        feature_selection_string = parser.get(
            'constants', 'featureselectionstring') if parser.has_option(
                'constants', 'featureselectionstring') else ''
        if initList:
            init = initList
        else:
            init = [
                o.strip() for o in parser.get('constants', 'init').split(",")
            ] if parser.has_option('constants', 'init') else [
                'fw', 'fg', 'fe', 'fr', 'og', 'oa', 'rp', 'cp'
            ]
        return cls(corpdb=corpdb,
                   corptable=corptable,
                   correl_field=correl_field,
                   mysql_host=mysql_host,
                   message_field=message_field,
                   messageid_field=messageid_field,
                   encoding=encoding,
                   use_unicode=use_unicode,
                   lexicondb=lexicondb,
                   featureTable=featureTable,
                   featNames=featNames,
                   date_field=date_field,
                   outcome_table=outcome_table,
                   outcome_value_fields=outcome_value_fields,
                   outcome_controls=outcome_controls,
                   outcome_interaction=outcome_interaction,
                   group_freq_thresh=group_freq_thresh,
                   featureMappingTable=featureMappingTable,
                   featureMappingLex=featureMappingLex,
                   output_name=output_name,
                   wordTable=wordTable,
                   model=model,
                   feature_selection=feature_selection,
                   feature_selection_string=feature_selection_string,
                   init=init)
Exemple #5
0
def main():
    def ensure_dir(path):
        """Ensure that path is a directory creating it if necessary.

        If path already exists and is not a directory, print an error
        message and quit with sys.exit().

        Parameters:
          path   String specifying the path to ensure

        Return value:
          path

        """
        if not os.path.exists(path):
            os.makedirs(path)
        elif not os.path.isdir(path):
            print("error: '%s' is not a directory" % path)
            sys.exit(1)
        return path

    def config_default(config, section, option, default):
        """Set default values for options that do not have a value."""
        try:
            config.get(section, option)
        except NoSectionError:
            config.add_section(section)
            config.set(section, option, default)
        except NoOptionError:
            config.set(section, option, default)

    config_dir = ensure_dir(os.path.expanduser("~/.diceware.py"))
    cache_dir = ensure_dir(os.path.join(config_dir, "cache"))

    # Parse config file
    config_file = os.path.join(config_dir, "config")
    config = SafeConfigParser()
    config.read(config_file)

    config_default(config, "defaults", "lang", "en")
    config_default(config, "defaults", "words", "5")
    config_default(config, "defaults", "special", "0")
    config_default(config, "defaults", "file", "")
    config_default(config, "defaults", "separator", " ")

    # Sanity checks for config options
    if config.get("defaults", "lang") not in WORD_LIST_URLS.keys():
        print("error: '%s' is not a valid value for option 'lang'" %
              config.get("defaults", "lang"))
        sys.exit(1)
    try:
        config.getint("defaults", "words")
        config.getint("defaults", "special")
    except ValueError:
        print("error: 'words' and 'special' options must have integer values")
        sys.exit(1)

    # Parse command line arguments
    parser = OptionParser()
    parser.add_option("-g",
                      "--grid",
                      dest="grid",
                      action="store_true",
                      help="Instead of a single line, generate NxN grid of " +
                      "words. This makes eavesdropping harder")
    parser.add_option("-n",
                      "--words",
                      dest="words",
                      type="int",
                      metavar="N",
                      help="generate N words (default: %default)",
                      default=config.getint("defaults", "words"))
    parser.add_option("-s",
                      "--special",
                      dest="special",
                      type="int",
                      metavar="M",
                      help="insert M special characters (default: %default)",
                      default=config.getint("defaults", "special"))
    parser.add_option(
        "-f",
        "--file",
        dest="file",
        metavar="FILE",
        help="override the `lang' option and read the word list " +
        "from FILE",
        default=config.get("defaults", "file"))
    parser.add_option(
        "-p",
        "--separator",
        dest="separator",
        type="string",
        metavar="P",
        help="specify the separator between words (default: %default)",
        default=config.get("defaults", "separator"))
    linguas = sorted(WORD_LIST_URLS.keys())
    parser.add_option("-l",
                      "--lang",
                      dest="lang",
                      metavar="LANG",
                      type="choice",
                      choices=linguas,
                      help="use the word list for LANG (" +
                      ", ".join(linguas) + ") (default: %default)",
                      default=config.get("defaults", "lang"))

    options, args = parser.parse_args()
    if args or options.words < 1 or options.special < 0:
        parser.print_help()
        sys.exit(0)

    parser.destroy()

    # --file has higher precedence than --lang
    if options.file:
        try:
            fobj = open(options.file)
        except IOError:
            print("error: unable to open word list file '%s'" % options.file)
            sys.exit(1)
        try:
            word_list = read_word_list(fobj)
        except ValueError as e:
            print("error: %s" % e)
            sys.exit(1)
    else:
        word_list = get_word_list(cache_dir, options.lang)

    if not options.grid:
        words, with_specials = generate(word_list, options.words,
                                        options.special)
        print("passphrase   : %s" % options.separator.join(words))
        if options.special > 0:
            print("with specials: %s" % options.separator.join(with_specials))
    else:
        words, length = generate_grid(word_list, options.words,
                                      options.special)
        for word_row in words:
            print(" ".join([word.ljust(length) for word in word_row]))
Exemple #6
0
    def create_from_config_file(cls,
                                config_file="~/.hpecp.conf",
                                profile=None):
        """Create a ContainerPlatformClient object from a configuration file.

        Parameters
        ----------
        config_file : str
            The configuration filename and path
        profile : str
            If the configuration file has multiple profile sections, you
            can select the profile to use.

        Returns
        -------
        ContainerPlatformClient:
            An instance of ContainerPlatformClient is returned.

        Example
        -------
        Below is an example configuration file.

        [default]
        api_host = 127.0.0.1
        api_port = 8080
        use_ssl = True
        verify_ssl = False
        warn_ssl = False

        [demoserver]
        username = admin
        password = admin123
        tenant = /api/v1/tenant/2
        """
        _log = Logger.get_logger()

        if profile is None:
            profile = "default"

        if config_file.startswith("~"):
            file_path = config_file[1:]
            file_path = file_path.lstrip("/")
            config_file = os.path.join(os.path.expanduser("~"), file_path)

        if not os.path.exists(config_file):
            raise ContainerPlatformClientException(
                "Could not find configuration file '{}'".format(config_file))

        config = SafeConfigParser()
        config.readfp(codecs.open(config_file, "r", "utf8"))

        assert (profile
                in config.sections()), "'{}' section not found in '{}'".format(
                    profile, config_file)
        assert ("username" in config[profile]
                or "username" in config["default"]), (
                    "'username' not found in section '{}' or in "
                    "the default section".format(profile))
        assert ("password" in config[profile]
                or "password" in config["default"]), (
                    "'password' not found in section '{}' "
                    "or in the default section".format(profile))
        assert ("api_host" in config[profile]
                or "api_host" in config["default"]), (
                    "'api_host' not found in section '{}' or in "
                    "the default section".format(profile))
        assert ("api_port" in config[profile]
                or "api_port" in config["default"]), (
                    "'api_port' not found in section '{}' or in "
                    "the default section".format(profile))
        assert ("use_ssl" in config[profile]
                or "use_ssl" in config["default"]), (
                    "'use_ssl' not found in section '{}' or in"
                    "the default section".format(profile))
        assert ("verify_ssl" in config[profile]
                or "verify_ssl" in config["default"]), (
                    "'verify_ssl' not found in section '{}' or in"
                    "the default section".format(profile))
        assert ("warn_ssl" in config[profile]
                or "warn_ssl" in config["default"]), (
                    "'warn_ssl' not found in section '{}' or in"
                    "the default section".format(profile))

        # tenant parameter is optional

        def get_config_value(key, profile):
            if key in config[profile]:
                _log.debug("Found '{}' in profile '{}'".format(key, profile))
                return config[profile][key]
            else:
                try:
                    val = config["default"][key]
                    _log.debug("Found '{}' in profile '{}'".format(
                        key, "default"))
                    return val
                except Exception:
                    _log.debug("Could not find '{}' in profile '{}'".format(
                        key, profile))
                    return None

        username = str(get_config_value("username", profile))
        password = str(get_config_value("password", profile))
        api_host = str(get_config_value("api_host", profile))
        api_port = int(get_config_value("api_port", profile))
        use_ssl = str(get_config_value("use_ssl", profile))
        verify_ssl = str(get_config_value("verify_ssl", profile))
        warn_ssl = str(get_config_value("warn_ssl", profile))

        # optional parameter
        tenant = get_config_value("tenant", profile)
        if tenant:
            assert isinstance(tenant, str) and re.match(
                r"\/api\/v1\/tenant\/[0-9]+", tenant), (
                    "'tenant' must have format '/api/v1/tenant/[0-9]+' in '{}'"
                ).format(config_file)

        if use_ssl == "False":
            use_ssl = False
        else:
            use_ssl = True

        # verify_ssl could be a path
        if verify_ssl == "False":
            verify_ssl = False

        if warn_ssl == "False":
            warn_ssl = False
        else:
            warn_ssl = True

        return cls(
            username,
            password,
            api_host,
            api_port,
            use_ssl,
            verify_ssl,
            warn_ssl,
            tenant,
        )
Exemple #7
0
 def read_config(self):
     '''
     Gets info from config file
     '''
     self.config = SafeConfigParser()
     self.config.read(self.config_file)
    def __init__(self, file):

        # Script starts from here
        print("Arguement is : ", file)
        self.fname = os.path.basename(file)
        print("VHDL filename is : ", self.fname)
        self.home = os.path.expanduser("~")
        self.parser = SafeConfigParser()
        self.parser.read(
            os.path.join(self.home, os.path.join('.nghdl', 'config.ini')))
        self.ngspice_home = self.parser.get('NGSPICE', 'NGSPICE_HOME')
        self.release_dir = self.parser.get('NGSPICE', 'RELEASE')
        self.src_home = self.parser.get('SRC', 'SRC_HOME')
        self.licensefile = self.parser.get('SRC', 'LICENSE')

        # #### Creating connection_info.txt file from vhdl file #### #
        read_vhdl = open(file, 'r')
        vhdl_data = read_vhdl.readlines()
        read_vhdl.close()

        start_flag = -1  # Used for scaning part of data
        scan_data = []
        # p=re.search('port(.*?)end',read_vhdl,re.M|re.I|re.DOTALL).group()

        for item in vhdl_data:
            if re.search('port', item, re.I):
                start_flag = 1

            elif re.search("end", item, re.I):
                start_flag = 0

            if start_flag == 1:
                item = re.sub("port", " ", item, flags=re.I)
                item = re.sub("\(", " ", item, flags=re.I)  # noqa
                item = re.sub("\)", " ", item, flags=re.I)  # noqa
                item = re.sub(";", " ", item, flags=re.I)

                scan_data.append(item.rstrip())
                scan_data = [_f for _f in scan_data if _f]
            elif start_flag == 0:
                break

        port_info = []
        self.port_vector_info = []

        for item in scan_data:
            print("Scan Data :", item)
            if re.search("in", item, flags=re.I):
                if re.search("std_logic_vector", item, flags=re.I):
                    temp = re.compile(r"\s*std_logic_vector\s*", flags=re.I)
                elif re.search("std_logic", item, flags=re.I):
                    temp = re.compile(r"\s*std_logic\s*", flags=re.I)
                else:
                    raise ValueError("Please check your vhdl " +
                                     "code for datatype of input port")
            elif re.search("out", item, flags=re.I):
                if re.search("std_logic_vector", item, flags=re.I):
                    temp = re.compile(r"\s*std_logic_vector\s*", flags=re.I)
                elif re.search("std_logic", item, flags=re.I):
                    temp = re.compile(r"\s*std_logic\s*", flags=re.I)
                else:
                    raise ValueError("Please check your vhdl " +
                                     "code for datatype of output port")
            else:
                raise ValueError(
                    "Please check the in/out direction of your port")

            lhs = temp.split(item)[0]
            rhs = temp.split(item)[1]
            bit_info = re.compile(r"\s*downto\s*", flags=re.I).split(rhs)[0]
            if bit_info:
                port_info.append(lhs + ":" + str(int(bit_info) + int(1)))
                self.port_vector_info.append(1)
            else:
                port_info.append(lhs + ":" + str(int(1)))
                self.port_vector_info.append(0)

        print("Port Info :", port_info)

        # Open connection_info.txt file
        con_ifo = open('connection_info.txt', 'w')

        for item in port_info:
            word = item.split(':')
            con_ifo.write(word[0].strip() + ' ' + word[1].strip() + ' ' +
                          word[2].strip())
            con_ifo.write("\n")
        con_ifo.close()
Exemple #9
0
 def parse_ini(self):
     """Read ini file for visar setup and analysis settings."""
     cp = SafeConfigParser(defaults=self.DEFAULTS)
     if self.filenames['ini'] is not None:
         cp.read(self.filenames['ini'])
     return cp
Exemple #10
0
 def __init__(self, path=CONFIGPATH):
     self.this_parser = SafeConfigParser()
     self.path = path
     self.read()
     self.data = {}
     self.get_values()
Exemple #11
0
    def upload_books(self,
                     files,
                     names,
                     on_card=None,
                     end_session=True,
                     metadata=None):
        """Process sending the book to the Kobo device."""
        if self.modifying_css():
            common.log.info(
                "KoboTouchExtended:upload_books:Searching for device-specific "
                "CSS file")
            device_css_file_name = self.KOBO_EXTRA_CSSFILE
            try:
                if self.isAuraH2O():
                    device_css_file_name = "kobo_extra_AURAH2O.css"
                elif self.isAuraHD():
                    device_css_file_name = "kobo_extra_AURAHD.css"
                elif self.isAura():
                    device_css_file_name = "kobo_extra_AURA.css"
                elif self.isClaraHD():
                    device_css_file_name = "kobo_extra_CLARA.css"
                elif self.isForma():
                    device_css_file_name = "kobo_extra_FORMA.css"
                elif self.isGlo():
                    device_css_file_name = "kobo_extra_GLO.css"
                elif self.isGloHD():
                    device_css_file_name = "kobo_extra_GLOHD.css"
                elif self.isLibraH2O():
                    device_css_file_name = "kobo_extra_LIBRA.css"
                elif self.isMini():
                    device_css_file_name = "kobo_extra_MINI.css"
                elif self.isTouch():
                    device_css_file_name = "kobo_extra_TOUCH.css"
            except AttributeError:
                common.log.warning(
                    "KoboTouchExtended:upload_books:Calibre version too old "
                    "to handle some specific devices, falling back to "
                    "generic file {0}".format(device_css_file_name))
            device_css_file_name = os.path.join(self.configdir,
                                                device_css_file_name)
            if os.path.isfile(device_css_file_name):
                common.log.info(
                    "KoboTouchExtended:upload_books:Found device-specific "
                    "file {0}".format(device_css_file_name))
                shutil.copy(
                    device_css_file_name,
                    os.path.join(self._main_prefix, self.KOBO_EXTRA_CSSFILE),
                )
            else:
                common.log.info(
                    "KoboTouchExtended:upload_books:No device-specific CSS "
                    "file found (expecting {0})".format(device_css_file_name))

        kobo_config_file = os.path.join(self._main_prefix, ".kobo", "Kobo",
                                        "Kobo eReader.conf")
        if os.path.isfile(kobo_config_file):
            cfg = SafeConfigParser(allow_no_value=True)
            cfg.optionxform = str
            cfg.read(kobo_config_file)

            if not cfg.has_section("FeatureSettings"):
                cfg.add_section("FeatureSettings")
            common.log.info(
                "KoboTouchExtended:upload_books:Setting FeatureSettings."
                "FullBookPageNumbers to {0}".format(
                    "true" if self.full_page_numbers else "false"))
            cfg.set(
                "FeatureSettings",
                "FullBookPageNumbers",
                "true" if self.full_page_numbers else "false",
            )
            with open(kobo_config_file, "w") as cfgfile:
                cfg.write(cfgfile)

        return super(KOBOTOUCHEXTENDED,
                     self).upload_books(files, names, on_card, end_session,
                                        metadata)
Exemple #12
0
    def __init__(self):
        config_parser = argparse.ArgumentParser(add_help=False)
        config_parser.add_argument("-f",
                                   "--from-config",
                                   help="Specify config file",
                                   metavar="FILE")

        args, _ = config_parser.parse_known_args()
        defaults = {}

        if args.from_config:
            config = SafeConfigParser()
            config.read([args.from_config])
            defaults = dict(config.items("Defaults"))

        formatter = argparse.ArgumentDefaultsHelpFormatter
        self.parser = argparse.ArgumentParser(formatter_class=formatter,
                                              parents=[config_parser],
                                              description=__doc__)

        self.parser.add_argument("--data-provider",
                                 "-d",
                                 type=str,
                                 default="static")
        self.parser.add_argument("--input-data-path",
                                 "-n",
                                 type=str,
                                 default="data/input/coinbase-1h-btc-usd.csv")
        self.parser.add_argument("--reward-strategy",
                                 "-r",
                                 type=str,
                                 default="incremental-profit",
                                 dest="reward_strat")
        self.parser.add_argument("--pair", "-p", type=str, default="BTC/USD")
        self.parser.add_argument("--debug", "-D", action='store_false')
        self.parser.add_argument('--mini-batches',
                                 type=int,
                                 default=1,
                                 help='Mini batches',
                                 dest='n_minibatches')
        self.parser.add_argument('--train-split-percentage',
                                 type=float,
                                 default=0.8,
                                 help='Train set percentage')
        self.parser.add_argument('--verbose-model',
                                 type=int,
                                 default=1,
                                 help='Verbose model',
                                 dest='model_verbose')
        self.parser.add_argument('--params-db-path',
                                 type=str,
                                 default='sqlite:///data/params.db',
                                 help='Params path')
        self.parser.add_argument('--tensorboard-path',
                                 type=str,
                                 default=os.path.join('data', 'tensorboard'),
                                 help='Tensorboard path')
        self.parser.add_argument('--parallel-jobs',
                                 type=int,
                                 default=multiprocessing.cpu_count(),
                                 help='How many processes in parallel')

        subparsers = self.parser.add_subparsers(help='Command', dest="command")

        optimize_parser = subparsers.add_parser(
            'optimize', description='Optimize model parameters')
        optimize_parser.add_argument('--trials',
                                     type=int,
                                     default=1,
                                     help='Number of trials')
        optimize_parser.add_argument(
            '--prune-evals',
            type=int,
            default=2,
            help='Number of pruning evaluations per trial')
        optimize_parser.add_argument(
            '--eval-tests',
            type=int,
            default=1,
            help='Number of tests per pruning evaluation')

        train_parser = subparsers.add_parser('train',
                                             description='Train model')
        train_parser.add_argument('--epochs',
                                  type=int,
                                  default=10,
                                  help='Number of epochs to train')
        train_parser.add_argument('--save-every',
                                  type=int,
                                  default=1,
                                  help='Save the trained model every n epochs')
        train_parser.add_argument('--no-test',
                                  dest="test_trained",
                                  action="store_false",
                                  help='Test each saved model')
        train_parser.add_argument('--render-test',
                                  dest="render_test",
                                  action="store_true",
                                  help='Render the test environment')
        train_parser.add_argument('--no-report',
                                  dest="render_report",
                                  action="store_false",
                                  help='Render the performance report')
        train_parser.add_argument('--save-report',
                                  dest="save_report",
                                  action="store_true",
                                  help='Save the performance report as .html')

        test_parser = subparsers.add_parser('test', description='Test model')
        test_parser.add_argument('--model-epoch',
                                 type=int,
                                 default=0,
                                 help='Model epoch index')
        test_parser.add_argument('--no-render',
                                 dest="render_env",
                                 action="store_false",
                                 help='Render the test environment')
        test_parser.add_argument('--no-report',
                                 dest="render_report",
                                 action="store_false",
                                 help='Render the performance report')
        test_parser.add_argument('--save-report',
                                 dest="save_report",
                                 action="store_true",
                                 help='Save the performance report as .html')

        trade_parser = subparsers.add_parser(
            'trade', description='Trade with saved model')
        trade_parser.add_argument("--data-provider",
                                  "-d",
                                  type=str,
                                  default="static")
        trade_parser.add_argument('--model-epoch',
                                  type=int,
                                  default=0,
                                  help='Model epoch index')
        trade_parser.add_argument('--no-render',
                                  dest="render_env",
                                  action="store_false",
                                  help='Render the test environment')
        trade_parser.add_argument('--no-report',
                                  dest="render_report",
                                  action="store_false",
                                  help='Render the performance report')
        trade_parser.add_argument('--save-report',
                                  dest="save_report",
                                  action="store_true",
                                  help='Save the performance report as .html')

        subparsers.add_parser('update-static-data',
                              description='Update static data')

        self.parser.set_defaults(**defaults)
Exemple #13
0
def continuity_error(ini_fname, delta_t, cell_id, X, channel_indices):
    # run the model using the supplied configuration
    pytopkapi.run(ini_fname)

    # parse the config file
    config = SafeConfigParser()
    config.read(ini_fname)

    precip_fname = config.get('input_files', 'file_rain')
    ET_fname = config.get('input_files', 'file_ET')
    group_name = config.get('groups', 'group_name')
    result_fname = config.get('output_files', 'file_out')

    # write model version
    print('PyTOPKAPI version = %s' % pytopkapi.__version__)

    # compute the terms in the continuity eqn.
    initial_storage, final_storage = compute_storage(result_fname)
    print('Initial storage = ', initial_storage)
    print('Final storage = ', final_storage)

    precip_vol = compute_precip_volume(precip_fname, group_name, X)
    print('Precipitation = ', precip_vol)

    evapo_vol = compute_evapot_volume(result_fname, X)
    print('Evapotranspiration = ', evapo_vol)

    open_water_evap_vol = compute_evap_volume(result_fname, channel_indices)
    print('Channel evaporation = ', open_water_evap_vol)

    channel_runoff_vol = compute_channel_runoff(result_fname, delta_t, cell_id)
    print('Channel runoff (outlet) = ', channel_runoff_vol)

    overland_runoff_vol = compute_overland_runoff(result_fname, delta_t,
                                                  cell_id)
    print('Overland runoff (outlet) = ', overland_runoff_vol)

    soil_drainage_vol = compute_soil_drainage(result_fname, delta_t, cell_id)
    print('Soil drainage (outlet) = ', soil_drainage_vol)

    down_drainage_vol = compute_down_drainage(result_fname, delta_t, cell_id)
    print('Non-channel drainage (outlet) = ', down_drainage_vol)

    input = precip_vol
    output = evapo_vol \
             + open_water_evap_vol \
             + channel_runoff_vol \
             + down_drainage_vol

    delta_storage = final_storage - initial_storage
    error = delta_storage - (input - output)

    if precip_vol > 0:
        precip_error = abs((error / precip_vol) * 100.0)
    else:
        precip_error = None
    stor_error = abs((error / initial_storage) * 100.0)

    print('Continuity error = ', error)
    print('Error as % precip. = ', precip_error)
    print('Error as % initial storage = ', stor_error)

    os.remove(result_fname)

    return error, precip_error, stor_error
Exemple #14
0
def plot_as_wires(cfg_file, db_name, syn_db):
    # cfg_file = sys.argv[1]
    # db_name = sys.argv[2]
    parser = SafeConfigParser()
    parser.read(cfg_file)
    colors = ['r', 'g', 'b', 'c', 'm', 'k']
    markers = [".", "o", "s", "^", "p", "*"]
    c_mapping = {}
    m_mapping = {}

    conn = sqlite3.connect(db_name)
    cursor = conn.cursor()
    cursor.execute("select distinct name from swc_data order by name")
    rets = cursor.fetchall()
    names = []
    c = 0
    for entity in rets:
        print("entity: %s with color %s" %
              (str(entity[0]), colors[c % len(colors)]))
        names.append(entity[0])
        c_mapping[str(entity[0])] = colors[c % len(colors)]
        c = c + 1
    times = []
    fig = plt.figure()
    ax = fig.add_subplot(111, projection='3d')
    ax.set_xlabel("X")
    ax.set_ylabel("Y")
    ax.set_zlabel("Z")
    xmin, ymin, zmin = 0.0, 0.0, 0.0
    dim_xyz = eval(parser.get("substrate", "dim_xyz"))
    xmax = dim_xyz[0]
    ymax = dim_xyz[1]
    zmax = dim_xyz[2]
    ax.set_xlim([xmin, xmax])
    ax.set_ylim([ymin, ymax])
    ax.set_zlim([zmin, zmax])
    ax.view_init(elev=e_start, azim=a_start)
    for name in names:
        cursor.execute("select * from swc_data where name=? order by id",
                       (str(name), ))
        rets = cursor.fetchall()
        i = 0
        points = []

        u = np.linspace(0, 2 * np.pi, 20)
        v = np.linspace(0, np.pi, 20)
        soma = rets[0]
        soma_x = soma[3]
        soma_y = soma[4]
        soma_z = soma[5]
        radius = soma[9]
        x = soma_x + (radius * np.outer(np.cos(u), np.sin(v)))
        y = soma_y + (radius * np.outer(np.sin(u), np.sin(v)))
        z = soma_z + (radius * np.outer(np.ones(np.size(u)), np.cos(v)))
        ax.plot_surface(x, y, z, rstride=4, cstride=4, color=c_mapping[name])

        xlist = []
        ylist = []
        zlist = []
        for entity in rets:
            from_point = np.array([entity[3], entity[4], entity[5]])
            to_point = np.array([entity[6], entity[7], entity[8]])

            xlist.extend([from_point[0], to_point[0]])  #xpairs[-1])
            xlist.append(np.nan)  #None)
            ylist.extend([from_point[1], to_point[1]])  #ypairs[-1])
            ylist.append(np.nan)  #None)
            zlist.extend([from_point[2], to_point[2]])  #zpairs[-1])
            zlist.append(np.nan)  #None)

        t0 = timer()
        plt.plot(xlist, ylist, zlist, color=c_mapping[name], alpha=0.5)
        t1 = timer()
        times.append(t1 - t0)

    if len(syn_db) > 0:
        syn_conn = sqlite3.connect(syn_db)
        syn_cursor = syn_conn.cursor()
        syn_cursor.execute("select *  from synapses order by pre_syn_entity")
        rets = syn_cursor.fetchall()
        xs = []
        ys = []
        zs = []
        count = 0
        for ret in rets:
            xs.append(ret[2])
            ys.append(ret[3])
            zs.append(ret[4])
            count = count + 1
        ax.scatter(xs, ys, zs, c="b", marker="o")
        print("no_synapses (count)=", count)
    t0 = timer()
    if db_name.startswith(".."):
        out_name = db_name.split("/")[-1].split(".")[0] + "_wire.pdf"
    else:
        out_name = db_name.split(".")[0] + "_wire.pdf"
    plt.savefig(out_name)
    t1 = timer()
    print("writing the figure took: %fs" % (t1 - t0))
    times.append(t1 - t0)
    return np.sum(times)
 def __init__(self):
     self.config = SafeConfigParser()
     self.config.read('config.ini')
def main():
    config = configparser.ConfigParser()
    config.read('config.ini')

    arquivo = config['default']['video']

    font = cv2.FONT_HERSHEY_SIMPLEX

    posicao = 1
    new = False

    capture = cv2.VideoCapture(arquivo)
    _, image = capture.read()

    image_line1 = np.hstack((image, image))
    image_line2 = np.hstack((image, image))
    image = np.vstack((image_line1, image_line2))

    blur = int(config['default']['blur'])
    Bsize = int(config['default']['Bsize'])
    Hmin = int(config['default']['Hmin'])
    Hmax = int(config['default']['Hmax'])
    Smin = int(config['default']['Smin'])
    Smax = int(config['default']['Smax'])
    Vmin = int(config['default']['Vmin'])
    Vmax = int(config['default']['Vmax'])

    cv2.namedWindow("image")
    cv2.createTrackbar('Frame','image',0,int(capture.get(cv2.CAP_PROP_FRAME_COUNT)),frame_change)
    cv2.createTrackbar('Blur','image',blur,30,sliders_update)
    cv2.createTrackbar('Hmin','image',Hmin,100,sliders_update)
    cv2.createTrackbar('Hmax','image',Hmax,179,sliders_update)
    cv2.createTrackbar('Smin','image',Smin,255,sliders_update)
    cv2.createTrackbar('Smax','image',Smax,255,sliders_update)
    cv2.createTrackbar('Vmin','image',Vmin,255,sliders_update)
    cv2.createTrackbar('Vmax','image',Vmax,255,sliders_update)
    cv2.createTrackbar('Bsize','image',Bsize,50,sliders_update)



    rmin =  Bsize - int(Bsize/3)       # Raio minimo para ser considerado um objeto circular (em pixels)
    rmax =  Bsize + int(Bsize/3)  

    while True:
        if new:
            new = False
            capture.set(cv2.CAP_PROP_POS_FRAMES, posicao)
            _, image_raw = capture.read()
            blur = int(cv2.getTrackbarPos('Blur', 'image'))
            if blur%2 == 0: 
                blur += 1
            Hmin = int(cv2.getTrackbarPos('Hmin', 'image'))
            Hmax = int(cv2.getTrackbarPos('Hmax', 'image'))
            Smin = int(cv2.getTrackbarPos('Smin', 'image'))
            Smax = int(cv2.getTrackbarPos('Smax', 'image'))
            Vmin = int(cv2.getTrackbarPos('Vmin', 'image'))
            Vmax = int(cv2.getTrackbarPos('Vmax', 'image'))
            Bsize = int(cv2.getTrackbarPos('Bsize', 'image'))
            Hmax = int(cv2.getTrackbarPos('Hmax', 'image'))
            # TODO: max nao pode ser menor que min para H, S e V

            image_blur = cv2.blur(image_raw, (blur, blur))

            image_hsv = cv2.cvtColor(image_blur, cv2.COLOR_BGR2HSV)
            image_thresh = cv2.inRange(image_hsv,np.array((Hmin, Smin, Vmin)), np.array((Hmax, Smax, Vmax))) 
            image_thresh = cv2.blur(image_thresh,(blur, blur))
            try:
                rmin =  Bsize - int(Bsize/3) 
                rmax =  Bsize + int(Bsize/3)
                print(rmin, rmax)
                cir = cv2.HoughCircles(image_thresh,cv2.HOUGH_GRADIENT,1,200,
                                param1=25,param2=25,minRadius=5,maxRadius=20) 
                if cir is not None:
                    for i in cir:
                        for j in i:
                            if j[0] > 0:
                                cv2.circle(image_raw,(j[0],j[1]), int(j[2]), (255,255,0),5)
                                print(f'x={j[0]} \t y={j[1]}')
            except Exception as e:
                print('Exception: ', e)


            cv2.putText(image_raw,'sair:   q',(10,30), font, 1, (0, 0, 255), 2, cv2.LINE_AA)
            cv2.putText(image_raw,'reset:  r',(10,90), font, 1, (0, 0, 255), 2, cv2.LINE_AA)
            cv2.putText(image_raw,'Salvar: s',(10,150), font, 1, (0, 0, 255), 2, cv2.LINE_AA)
            image_line1 = np.hstack((image_raw, image_blur))
            image_thresh = cv2.cvtColor(image_thresh, cv2.COLOR_GRAY2BGR)
            image_line2 = np.hstack((image_thresh, image_blur))
            image = np.vstack((image_line1, image_line2))

        cv2.imshow("image", image)
        key = cv2.waitKey(1) & 0xFF

        # r para resset
        if key == ord("r"):
            posicao = 1
            capture.set(cv2.CAP_PROP_POS_FRAMES, posicao)
            _, image = capture.read()
        elif key == ord('s'):
            blur = int(cv2.getTrackbarPos('Blur', 'image'))
            if blur%2 == 0: 
                blur += 1
            Hmin = int(cv2.getTrackbarPos('Hmin', 'image'))
            Hmax = int(cv2.getTrackbarPos('Hmax', 'image'))
            Smin = int(cv2.getTrackbarPos('Smin', 'image'))
            Smax = int(cv2.getTrackbarPos('Smax', 'image'))
            Vmin = int(cv2.getTrackbarPos('Vmin', 'image'))
            Vmax = int(cv2.getTrackbarPos('Vmax', 'image'))
            Bsize = int(cv2.getTrackbarPos('Bsize', 'image'))

            parser = SafeConfigParser()
            parser.read('config.ini')
            parser.set('atual', 'blur', str(blur))
            parser.set('atual', 'Hmin', str(Hmin))
            parser.set('atual', 'Smin', str(Smin))
            parser.set('atual', 'Smax', str(Smax))
            parser.set('atual', 'Vmin', str(Vmin))
            parser.set('atual', 'Vmax', str(Vmax))
            parser.set('atual', 'Bsize', str(Bsize))

            with open('config.ini', 'w+') as configfile:
                parser.write(configfile)

        elif key == ord("q"):
            break

    cv2.destroyAllWindows()
def test_start_and_kill_faraday_server():
    """
        Starts the server and then send a signal to kill the
        process gracefully.
        The idea is to catch a broken faraday-server.py
        After sending the signal we wait for 15 seconds and
        if the server didn't stop we fail the test also.
    """
    server_port = 5988
    while daemonize.is_server_running(server_port) and server_port < 6500:
        server_port += 1

    if server_port > 6500:
        raise Exception('No free ports could be found')

    if 'POSTGRES_DB' in os.environ:
        # I'm on gitlab ci runner
        # I will overwrite server.ini
        connection_string = 'postgresql+psycopg2://{username}:{password}@postgres/{database}'.format(
            username=os.environ['POSTGRES_USER'],
            password=os.environ['POSTGRES_PASSWORD'],
            database=os.environ['POSTGRES_DB'],
        )
        faraday_config = SafeConfigParser()
        config_path = os.path.expanduser('~/.faraday/config/server.ini')
        faraday_config.read(config_path)
        try:
            faraday_config.add_section('database')
        except DuplicateSectionError:
            pass
        faraday_config.set('database', 'connection_string', connection_string)
        with open(config_path, 'w') as faraday_config_file:
            faraday_config.write(faraday_config_file)

        command = ['faraday-manage', 'create-tables']
        subproc = subprocess.Popen(command,
                                   stdout=subprocess.PIPE,
                                   stderr=subprocess.PIPE)
        subproc.wait()
        std, err = subproc.communicate()
        assert subproc.returncode == 0, ('Create tables failed!', std, err)

    command = [
        'faraday-server', '--port', '{0}'.format(server_port), '--debug'
    ]
    subproc = subprocess.Popen(command,
                               stdout=subprocess.PIPE,
                               stderr=subprocess.PIPE)
    start = datetime.now()
    while subproc.returncode is None:
        now = datetime.now()
        delta = now - start
        if delta.seconds > 140:
            raise UserWarning('Faraday server test timeout!')
        if delta.seconds > 30:
            subproc.send_signal(signal.SIGTERM)
            subproc.wait()
        subproc.poll()
        subproc.poll()
        time.sleep(0.1)
    out, err = subproc.communicate()
    if subproc.returncode != 0:
        log_path = os.path.expanduser('~/.faraday/logs/faraday-server.log')
        with open(log_path, 'r') as log_file:
            print(log_file.read())
    assert subproc.returncode == 0, (out, err, command)
Exemple #18
0
    def _sendAdminEmail(self, user, seller, topic, prod_num, message,
                        request_file, restricted_active, request, product_obj):

        config = SafeConfigParser()
        config.read('/code/config.ini')
        db = MySQLdb.connect(
            host=config.get('main',
                            'mysql_host'),  # your host, usually localhost
            user=config.get('main', 'mysql_name'),  # your username
            passwd=config.get('main', 'mysql_pw'),  # your password
            db=config.get('main', 'mysql_db'))  # your database
        cur = db.cursor()
        log = config.get('main', 'checkout_log_path')
        NP_PATH = config.get('main', 'np_path')
        username = user.username
        user_email = user.email
        topic = topic

        # Password with 6 characters (lower case + number)
        original_password = ''.join(
            random.choice(string.ascii_lowercase + string.digits)
            for _ in range(12))

        if not cur.execute("select (1) from users where username = %s limit 1",
                           (username, )):

            command = NP_PATH + ' ' + '-p' + ' ' + original_password
            command_bytes = command.encode('utf-8')
            pw_bytes = subprocess.Popen(command_bytes,
                                        stdout=subprocess.PIPE,
                                        shell=True).communicate()[0]
            password = pw_bytes.decode().rstrip('\n')

            cur.execute(
                "insert into users (username,pw,user_id) values (%s,%s,%s)",
                (username, password, user.id))  # stdout: ignore '\n'

            # Send password to email
            subject = 'Your new password'
            msg = "Your password to I3  is: " + original_password
            email = EmailMessage(subject, msg, to=[user_email])
            email.send()

            # Record email as notification
            notification_box = request.user.get_notification_box()
            notification_item = NotificationItem(
                notification_box=notification_box, subject=subject, body=msg)
            notification_item.save()

        # To do: make topic as a product obj that can be linked to
        flow_obj = Flow.objects.create(user=request.user,
                                       topic=topic,
                                       direction='in',
                                       state='inactive')
        flow_obj.save()

        # send to the user which topic is able to pub/sub
        # when the topic is unrestricted: insert to acls and send confirmation back to buyer
        if not restricted_active:
            subject = 'New product purchased'
            msg = 'Now you can subscribe to topic: ' + topic + '.'
            email = EmailMessage(subject, msg, to=[user_email])
            email.send()

            # Record email as notification
            notification_box = request.user.get_notification_box()
            notification_item = NotificationItem(
                notification_box=notification_box, subject=subject, body=msg)
            notification_item.save()

            subject = 'New buyer of an unrestricted topic'
            msg = 'Buyer ' + username + ' just bought product ' + topic + '.'
            email = EmailMessage(subject, msg, to=[seller.email])
            email.send()

            # Record email as notification
            notification_box = seller.get_notification_box()
            notification_item = NotificationItem(
                notification_box=notification_box, subject=subject, body=msg)
            notification_item.save()

            # insert into acls table
            rw = 1  # seller: can read and write
            if product_obj.sensor_type >= 2:
                rw = 2
            cur.execute(
                "insert into acls (username,topic,rw,user_id, topic_id) values (%s,%s,%s,%s,%s)",
                (username, topic, str(rw), user.id, product_obj.id))

            # write new sub info to log
            with open(log, 'a') as f:
                f.write(
                    str(time.time()) + ': New Sub ' + username + ' ' + topic +
                    ' ' + str(prod_num) + '\n')

        else:

            restricted_request_obj = RestrictedRequest(
                seller=product_obj.seller,
                requester=request.user,
                product=product_obj,
                price=product_obj.price,
                quantity=prod_num,
                intention=message,
                attachment=request_file)
            restricted_request_obj.save()

            subject = 'New product purchased (to be confirmed)'
            msg = 'Waiting seller to confirm purchase of ' + topic + '.'
            email = EmailMessage(subject, msg, to=[user_email])
            email.send()

            # Record email as notification
            notification_box = request.user.get_notification_box()
            notification_item = NotificationItem(
                notification_box=notification_box, subject=subject, body=msg)
            notification_item.save()

            subject = 'New buyer of a restricted topic'
            msg = 'Buyer ' + username + ' just bought product ' + topic + '. You need to approve the purchase.'
            email = EmailMessage(subject, msg, to=[seller.email])
            email.send()

            # Record email as notification
            notification_box = seller.get_notification_box()
            notification_item = NotificationItem(
                notification_box=notification_box, subject=subject, body=msg)
            notification_item.save()

        db.commit()
Exemple #19
0
    def get_options(self, test_args=None):
        """Parse command-line arguments passed to the desiInstall script.

        Parameters
        ----------
        test_args : :class:`list`
            Normally, this method is called without arguments, and
            :data:`sys.argv` is parsed.  Arguments should only be passed for
            testing purposes.

        Returns
        -------
        :class:`argparse.Namespace`
            A simple object containing the parsed options.  Also, the
            attribute `options` is set.
        """
        from argparse import ArgumentParser
        log = logging.getLogger(__name__ + '.DesiInstall.get_options')
        check_env = {'MODULESHOME': None,
                     'DESI_PRODUCT_ROOT': None,
                     'USER': None,
                     'LANG': None}
        for e in check_env:
            try:
                check_env[e] = environ[e]
            except KeyError:
                if e == 'DESI_PRODUCT_ROOT' and 'NERSC_HOST' in environ:
                    log.debug('The environment variable DESI_PRODUCT_ROOT ' +
                              'is not set, but this is probably not a ' +
                              'problem at NERSC.')
                else:
                    log.warning(('The environment variable {0} is not ' +
                                'set!').format(e))
        parser = ArgumentParser(description="Install DESI software.",
                                prog=self.executable)
        parser.add_argument('-a', '--anaconda', action='store', dest='anaconda',
                            default=self.anaconda_version(), metavar='VERSION',
                            help="Set the version of the DESI+Anaconda software stack.")
        parser.add_argument('-b', '--bootstrap', action='store_true',
                            dest='bootstrap',
                            help=("Run in bootstrap mode to install the " +
                                  "desiutil product."))
        parser.add_argument('-C', '--compile-c', action='store_true',
                            dest='force_build_type',
                            help=("Force C/C++ install mode, even if a " +
                                  "setup.py file is detected (WARNING: " +
                                  "this is for experts only)."))
        parser.add_argument('-c', '--configuration', action='store',
                            dest='config_file', default='',
                            metavar='FILE',
                            help=("Override built-in configuration with " +
                                  "data from FILE."))
        parser.add_argument('-d', '--default', action='store_true',
                            dest='default',
                            help='Make this version the default version.')
        parser.add_argument('-F', '--force', action='store_true',
                            dest='force',
                            help=('Overwrite any existing installation of ' +
                                  'this product/version.'))
        parser.add_argument('-k', '--keep', action='store_true',
                            dest='keep',
                            help='Keep the exported build directory.')
        parser.add_argument('-m', '--module-home', action='store',
                            dest='moduleshome',
                            default=check_env['MODULESHOME'],
                            metavar='DIR',
                            help='Set or override the value of $MODULESHOME')
        parser.add_argument('-M', '--module-dir', action='store',
                            dest='moduledir',
                            default='',
                            metavar='DIR',
                            help="Install module files in DIR.")
        parser.add_argument('-r', '--root', action='store',
                            dest='root',
                            default=check_env['DESI_PRODUCT_ROOT'],
                            metavar='DIR',
                            help=('Set or override the value of ' +
                                  '$DESI_PRODUCT_ROOT'))
        parser.add_argument('-t', '--test', action='store_true',
                            dest='test',
                            help=('Test Mode..  Do not actually install ' +
                                  'anything.'))
        parser.add_argument('-U', '--username', action='store',
                            dest='username',
                            default=check_env['USER'],
                            metavar='USER',
                            help="Set svn username to USER.")
        parser.add_argument('-v', '--verbose', action='store_true',
                            dest='verbose',
                            help='Print extra information.')
        parser.add_argument('-V', '--version', action='version',
                            version='%(prog)s ' + desiutilVersion)
        parser.add_argument('-x', '--cross-install', action='store_true',
                            dest='cross_install',
                            help=('Make the install available on multiple ' +
                                  'systems (e.g. NERSC).'))
        parser.add_argument('product', nargs='?',
                            default='NO PACKAGE',
                            help='Name of product to install.')
        parser.add_argument('product_version', nargs='?',
                            default='NO VERSION',
                            help='Version of product to install.')
        if test_args is None:  # pragma: no cover
            self.options = parser.parse_args()
        else:
            self.options = parser.parse_args(test_args)
        self.ll = logging.INFO
        if self.options.verbose or self.options.test or self.test:
            self.ll = logging.DEBUG
        logging.getLogger(__name__).setLevel(self.ll)
        if test_args is not None:
            log.debug('Called parse_args() with: {0}'.format(
                      ' '.join(test_args)))
        log.debug('Set log level to {0}.'.format(
                  logging.getLevelName(self.ll)))
        self.config = None
        if self.options.config_file:
            log.debug("Detected configuration file: {0}.".format(self.options.config_file))
            c = SafeConfigParser()
            status = c.read([self.options.config_file])
            if status[0] == self.options.config_file:
                self.config = c
                log.debug("Successfully parsed {0}.".format(self.options.config_file))
        return self.options
Exemple #20
0
python nodotjs/server.py <MODE> <CONFIG_PATH>

Look at `config.ini` for defined modes. Defaults are `production`,
`staging`, and `test`.""")
    exit(1)

MODE = sys.argv[1]

if len(sys.argv) > 2:
    CONFIG_PATH = sys.argv[2]
else:
    CONFIG_PATH = 'config.ini'

print(CONFIG_PATH)

PARSER = SafeConfigParser()

if not len(PARSER.read(CONFIG_PATH)):
    print("No config.ini file found in this directory.  Writing a config...")

    modes = ['production', 'staging', 'test']
    for i in range(0, len(modes)):  # ew redis dbs made me loop like this
        mode = modes[i]
        PARSER.add_section(mode)
        PARSER.set(mode, 'db', str(i))
        PARSER.set(mode, 'cookie_secret', str(uuid.uuid4()))
        PARSER.set(mode, 'timeout', '30')
        PARSER.set(mode, 'port', '7000')
        PARSER.set(mode, 'templates_dir', './templates')

    try:
import sys, os
import time
import json
import driver
import shutil
import boto3
import botocore
from boto3.dynamodb.conditions import Key, Attr

# # Import utility helpers
# sys.path.insert(1, os.path.realpath(os.path.pardir))
# import helpers

# Get configuration
from configparser import SafeConfigParser
config = SafeConfigParser(os.environ)
config.read('ann_config.ini')

# Add utility code here
AwsRegionName = config['aws']['AwsRegionName']
TableName = config['aws']['TableName']
CnetId = config['aws']['CnetId']
OutputBucket = config['aws']['OutputBucket']
SNS_JobResults = config['aws']['SNS_JobResults']
SNS_Archive = config['aws']['SNS_Archive']
"""A rudimentary timer for coarse-grained profiling
"""


class Timer(object):
    def __init__(self, verbose=True):
Exemple #22
0
def get_conf(conf_path):
    """
    Read the configuration file and return it
    """
    conf_path = conf_path
    parser = SafeConfigParser()
    parser.read(conf_path)
    parser.add_section('paths')

    # Default
    parser.set('packages', 'safe_conf', 'True')

    if not parser.has_section('settings'):
        raise ConfigException('Missing settings section in configuration file')
    if not parser.has_option('settings', 'var_path'):
        raise ConfigException(
            'Missing var_path option in settings section in configuration file'
        )

    # Create paths
    parser.set('paths', 'local_db',
               os.path.join(parser.get('settings', 'var_path'), 'packages.db'))
    parser.set('paths', 'lock',
               os.path.join(parser.get('settings', 'var_path'), 'db.lock'))
    parser.set('paths', 'infos',
               os.path.join(parser.get('settings', 'var_path'), 'infos'))
    parser.set(
        'paths', 'web_data',
        os.path.join(parser.get('settings', 'var_path'), 'ubik-web.dat'))

    # Detect system info
    if not parser.has_section('system'):
        parser.add_section('system')

    if not parser.has_option('system', 'arch'):
        if isit.bit32:
            parser.set('system', 'arch', 'i386')
        elif isit.bit64:
            parser.set('system', 'arch', 'x86_64')
    else:
        parser.set('system', 'arch', parser.get('system', 'arch').lower())

    if not parser.has_option('system', 'dist'):
        dist = 'unknown'
        vers = 'unknown'

        if isit.osx:
            dist = 'osx'
            vers = isit.osx_version
        elif isit.linux:
            if isit.debian:
                dist = "debian"
                if isit.debian_version:
                    vers = isit.debian_version
            elif isit.ubuntu:
                dist = "ubuntu"
                if isit.ubuntu_version:
                    vers = isit.ubuntu_version.replace('.', '')
            elif isit.centos:
                dist = "centos"
                if isit.centos_version:
                    vers = isit.centos_version
            elif isit.redhat:
                dist = "redhat"
                if isit.redhat_version:
                    vers = isit.redhat_version
            elif isit.archlinux:
                dist = "archlinux"
                if isit.archlinux_version:
                    vers = isit.archlinux_version

        parser.set('system', 'dist', dist)
        parser.set('system', 'vers', vers)
    else:
        parser.set('system', 'dist', parser.get('system', 'dist').lower())

    if parser.has_option('system', 'vers'):
        parser.set('system', 'vers', parser.get('system', 'vers').lower())

    return parser
Exemple #23
0
def run(ini_file='TOPKAPI.ini',
        verbose=False,
        quiet=False,
        parallel_exec=True,
        nworkers=int(mp.cpu_count() - 1)):
    """Run the model.

    Parameters
    ----------
    ini_file : str
       The name of the PyTOPKAPI initialization file. This file describes the
       locations of the parameter files and model setup options. Default is to
       use a file named `TOPKAPI.ini` in the current directory.
    verbose : bool
        Prints runtime information [default False - don't display runtime
        info]. Is independent of the `quiet` keyword argument.
    quiet : bool
        Toggles whether to display an informational banner at runtime [default
        False - display banner]. Is independent of the `verbose` keyword
        argument.
    nworkers : int
        Number of worker processes to spawn for solving each cell's time-series
        in parallel. Default is one fewer than CPU count reported by
        multiprocessing.

    """

    ##================================##
    ##  Read the input file (*.ini)   ##
    ##================================##
    config = SafeConfigParser()
    config.read(ini_file)

    ##~~~~~~ Numerical_options ~~~~~~##
    solve_s = config.getint('numerical_options', 'solve_s')
    solve_o = config.getint('numerical_options', 'solve_o')
    solve_c = config.getint('numerical_options', 'solve_c')

    ##~~~~~~~~~~~ input files ~~~~~~~~~~~##
    #Param
    file_global_param = config.get('input_files', 'file_global_param')
    file_cell_param = config.get('input_files', 'file_cell_param')
    #Rain
    file_rain = config.get('input_files', 'file_rain')
    #ETP
    file_ET = config.get('input_files', 'file_ET')

    #~~~~~~~~~~~ Group (simulated event) ~~~~~~~~~~~##
    group_name = config.get('groups', 'group_name')

    ##~~~~~~ Calibration ~~~~~~##
    fac_L = config.getfloat('calib_params', 'fac_L')
    fac_Ks = config.getfloat('calib_params', 'fac_Ks')
    fac_n_o = config.getfloat('calib_params', 'fac_n_o')
    fac_n_c = config.getfloat('calib_params', 'fac_n_c')

    ##~~~~~~ External flows ~~~~~~##
    external_flow = config.getboolean('external_flow', 'external_flow')
    if external_flow:
        file_Qexternal_flow = config.get('external_flow',
                                         'file_Qexternal_flow')
        Xexternal_flow = config.getfloat('external_flow', 'Xexternal_flow')
        Yexternal_flow = config.getfloat('external_flow', 'Yexternal_flow')

    ##~~~~~~~~~~~ output files ~~~~~~~~~~##
    file_out = config.get('output_files', 'file_out')
    ut.check_file_exist(file_out)  #create path_out if it doesn't exist
    if os.path.exists(file_out):
        first_run = False
    else:
        first_run = True

    append_output = config.getboolean('output_files', 'append_output')
    if append_output is True:
        fmode = 'a'
    else:
        fmode = 'w'

    ##============================##
    ##   Read the forcing data    ##
    ##============================##
    if verbose:
        print('Read the forcing data')

    #~~~~Rainfall
    h5_rain = h5py.File(file_rain)
    dset_name = '/{}/rainfall'.format(group_name)
    rainfall_forcing = h5_rain[dset_name][...]
    h5_rain.close()

    #~~~~ETr - Reference crop ET
    h5_ET = h5py.File(file_ET)
    dset_name = '/{}/ETr'.format(group_name)
    ETr_forcing = h5_ET[dset_name][...]

    #~~~~ETo - Open water potential evap.
    dset_name = '/{}/ETo'.format(group_name)
    ET0_forcing = h5_ET[dset_name][...]
    h5_ET.close()

    #~~~~external_flow flows
    if external_flow:
        external_flow_records = np.loadtxt(file_Qexternal_flow)[:, 5]
    else:
        external_flow_records = None

    ##============================##
    ## Pretreatment of input data ##
    ##============================##
    if verbose:
        print('Pretreatment of input data')

    #~~~~Read Global parameters file
    X, Dt, alpha_s, \
    alpha_o, alpha_c, \
    A_thres, W_min, W_max = pm.read_global_parameters(file_global_param)

    #~~~~Read Cell parameters file
    ar_cell_label, ar_coorx, \
    ar_coory, channel_flag, \
    Xc, ar_dam, \
    ar_tan_beta, ar_tan_beta_channel, \
    ar_L, Ks, \
    ar_theta_r, ar_theta_s, \
    ar_n_o, ar_n_c, \
    ar_cell_down, ar_pVs_t0, \
    ar_Vo_t0, ar_Qc_t0, \
    Kc, psi_b, lamda = pm.read_cell_parameters(file_cell_param)

    #~~~~Number of cell in the catchment
    nb_cell = len(ar_cell_label)

    #~~~~Computation of cell order
    node_hierarchy = pm.compute_node_hierarchy(ar_cell_label, ar_cell_down)
    ar_label_sort = pm.sort_cell(ar_cell_label, ar_cell_down)

    #~~~~Computation of upcells
    li_cell_up = pm.direct_up_cell(ar_cell_label, ar_cell_down, ar_label_sort)

    #~~~~Computation of drained area
    ar_A_drained = pm.drained_area(ar_label_sort, li_cell_up, X)

    #~~~~Apply calibration factors to the parameter values
    ar_L = ar_L * fac_L
    Ks = Ks * fac_Ks
    ar_n_o = ar_n_o * fac_n_o
    ar_n_c = ar_n_c * fac_n_c

    if verbose:
        print('Max L=', max(ar_L))
        print('Max Ks=', max(Ks))
        print('Max n_o=', max(ar_n_o))
        print('Max n_c=', max(ar_n_c))

    #~~~~Computation of model parameters from physical parameters
    Vsm, b_s, b_o, \
    W, b_c = pm.compute_cell_param(X, Xc, Dt, alpha_s,
                                         alpha_o, alpha_c, nb_cell,
                                         A_thres, W_max, W_min,
                                         channel_flag, ar_tan_beta,
                                         ar_tan_beta_channel, ar_L,
                                         Ks, ar_theta_r, ar_theta_s,
                                         ar_n_o, ar_n_c, ar_A_drained)

    #~~~~Look for the cell of external_flow tunnel
    if external_flow:
        cell_external_flow = ut.find_cell_coordinates(ar_cell_label,
                                                      Xexternal_flow,
                                                      Yexternal_flow, ar_coorx,
                                                      ar_coory, channel_flag)

        if verbose:
            print('external flows will be taken into account for cell no',\
                  cell_external_flow, ' coordinates ('\
                  ,Xexternal_flow,',',Yexternal_flow,')')
    else:
        cell_external_flow = None

    #~~~~Number of simulation time steps
    nb_time_step = rainfall_forcing.shape[0]

    ##=============================##
    ##  Variable array definition  ##
    ##=============================##

    ## Initialisation of the reservoirs
    #Matrix of soil,overland and channel store at the begining of the time step
    if append_output and not first_run:
        if verbose:
            print('Initialize from simulation file')

        h5file_in = h5py.File(file_out)

        Vs_t0 = h5file_in['/Soil/V_s'][-1, :]
        Vc_t0 = h5file_in['/Channel/V_c'][-1, :]
        Vo_t0 = h5file_in['/Overland/V_o'][-1, :]

        h5file_in.close()
    else:
        if verbose:
            print('Initialize from parameters')
        Vs_t0 = fl.initial_volume_soil(ar_pVs_t0, Vsm)
        Vo_t0 = ar_Vo_t0
        Vc_t0 = fl.initial_volume_channel(ar_Qc_t0, W, X, ar_n_c)

    ##=============================##
    ## HDF5 output file definition ##
    ##=============================##
    h5file, dset_Vs, dset_Vo, dset_Vc,     \
    dset_Qs_out, dset_Qo_out, dset_Qc_out, \
    dset_Q_down, dset_ET_out, dset_Ec_out  \
                                    = ut.open_simulation_file(file_out, fmode,
                                                   Vs_t0, Vo_t0, Vc_t0, no_data,
                                                   nb_cell, nb_time_step,
                                                   append_output, first_run)

    eff_theta = ar_theta_s - ar_theta_r

    ##===========================##
    ##     Core of the Model     ##
    ##===========================##
    if not quiet:
        ut.show_banner(ini_file, nb_cell, nb_time_step)
        progress_desc = 'Simulation'
    else:
        progress_desc = 'PyTOPKAPI v{}'.format(pytopkapi.__version__)

    # prepare parameter dict
    exec_params = {
        'nb_cell': nb_cell,
        'nb_time_step': nb_time_step,
        'progress_desc': progress_desc,
        'Dt': Dt,
        'rainfall_forcing': rainfall_forcing,
        'ETr_forcing': ETr_forcing,
        'ET0_forcing': ET0_forcing,
        'psi_b': psi_b,
        'lamda': lamda,
        'eff_theta': eff_theta,
        'Ks': Ks,
        'X': X,
        'b_s': b_s,
        'b_o': b_o,
        'b_c': b_c,
        'alpha_s': alpha_s,
        'alpha_o': alpha_o,
        'alpha_c': alpha_c,
        'Vs_t0': Vs_t0,
        'Vo_t0': Vo_t0,
        'Vc_t0': Vc_t0,
        'Vsm': Vsm,
        'dset_Vs': dset_Vs,
        'dset_Vo': dset_Vo,
        'dset_Vc': dset_Vc,
        'dset_Qs_out': dset_Qs_out,
        'dset_Qo_out': dset_Qo_out,
        'dset_Qc_out': dset_Qc_out,
        'dset_Q_down': dset_Q_down,
        'dset_ET_out': dset_ET_out,
        'dset_Ec_out': dset_Ec_out,
        'solve_s': solve_s,
        'solve_o': solve_o,
        'solve_c': solve_c,
        'channel_flag': channel_flag,
        'W': W,
        'Xc': Xc,
        'Kc': Kc,
        'cell_external_flow': cell_external_flow,
        'external_flow_records': external_flow_records,
        'node_hierarchy': node_hierarchy,
        'li_cell_up': li_cell_up,
        'nworkers': nworkers
    }

    if not parallel_exec:
        # Serial execution. Solve by timestep in a single process.
        # Outer loop timesteps - inner loop cells
        _serial_execute(exec_params)
    else:
        # Parallel execution. Solve by cell using multiple processes.
        # Outer loop cells - inner loop timesteps
        _parallel_execute(exec_params)

    h5file.close()
Exemple #24
0
def get_default_config(work_dir='.', section='recon-neonatal-cortex'):
    """Get default configuration."""
    # directories
    session_dir = os.path.join('%(WorkDir)s', '%(SubjectId)s-%(SessionId)s')
    input_dir   = os.path.join(session_dir, 'input')
    temp_dir    = os.path.join(session_dir, 'temp')
    mesh_dir    = os.path.join(session_dir, 'meshes')
    logs_dir    = os.path.join(session_dir, 'logs')
    # configuration
    config  = SafeConfigParser(defaults={'work_dir': work_dir, 'temp_dir': temp_dir})
    section = args.section
    config.add_section(section)
    config.set(section, 'logs_dir', logs_dir)
    # input file paths
    config.set(section, 'input_t1w_image',     os.path.join(input_dir, 't1w-image.nii.gz'))
    config.set(section, 'input_t2w_image',     os.path.join(input_dir, 't2w-image.nii.gz'))
    config.set(section, 'input_brain_mask',    os.path.join(input_dir, 'brain-mask.nii.gz'))
    config.set(section, 'input_labels_image',  os.path.join(input_dir, 'brain-labels.nii.gz'))
    config.set(section, 'input_tissues_image', os.path.join(input_dir, 'tissue-labels.nii.gz'))
    # intermediate file paths
    config.set(section, 't1w_image',             os.path.join(temp_dir, 't1w-image.nii.gz'))
    config.set(section, 't2w_image',             os.path.join(temp_dir, 't2w-image.nii.gz'))
    config.set(section, 'brain_mask',            os.path.join(temp_dir, 'brain-mask.nii.gz'))
    config.set(section, 'white_matter_mask',     os.path.join(temp_dir, 'white-matter-mask.nii.gz'))
    config.set(section, 'gray_matter_mask',      os.path.join(temp_dir, 'gray-matter-mask.nii.gz'))
    config.set(section, 'deep_gray_matter_mask', os.path.join(temp_dir, 'deep-gray-matter-mask.nii.gz'))
    config.set(section, 'corpus_callosum_mask',  os.path.join(temp_dir, 'corpus-callosum-mask.nii.gz'))
    config.set(section, 'ventricles_mask',       os.path.join(temp_dir, 'ventricles-mask.nii.gz'))
    config.set(section, 'ventricles_dmap',       os.path.join(temp_dir, 'ventricles-dmap.nii.gz'))
    config.set(section, 'regions_mask',          os.path.join(temp_dir, 'regions.nii.gz'))
    config.set(section, 'cortical_hull_dmap',    os.path.join(temp_dir, 'cortical-hull-dmap.nii.gz'))
    # output file paths
    config.set(section, 'brain_mesh',            os.path.join(mesh_dir, 'brain.vtp'))
    config.set(section, 'bs_cb_mesh',            os.path.join(mesh_dir, 'bs+cb.vtp'))
    config.set(section, 'internal_mesh',         os.path.join(mesh_dir, 'internal.vtp'))
    config.set(section, 'cerebrum_mesh',         os.path.join(mesh_dir, 'cerebrum.vtp'))
    config.set(section, 'right_cerebrum_mesh',   os.path.join(mesh_dir, 'cerebrum-rh.vtp'))
    config.set(section, 'left_cerebrum_mesh',    os.path.join(mesh_dir, 'cerebrum-lh.vtp'))
    config.set(section, 'white_mesh',            os.path.join(mesh_dir, 'white.vtp'))
    config.set(section, 'right_white_mesh',      os.path.join(mesh_dir, 'white-rh.vtp'))
    config.set(section, 'left_white_mesh',       os.path.join(mesh_dir, 'white-lh.vtp'))
    config.set(section, 'pial_mesh',             os.path.join(mesh_dir, 'pial.vtp'))
    config.set(section, 'right_pial_mesh',       os.path.join(mesh_dir, 'pial-rh.vtp'))
    config.set(section, 'left_pial_mesh',        os.path.join(mesh_dir, 'pial-lh.vtp'))
    # parameters of subdivide-brain-image step to create regions mask
    config.set(section, 'subcortex_closing', '5')
    config.set(section, 'brainstem_closing', '5')
    config.set(section, 'cerebellum_closing', '5')
    # default labels used when none specified are those of Draw-EM (all_labels)
    config.set(section, 'white_matter_labels', '51..82')
    config.set(section, 'gray_matter_labels', '5..16,20..39')
    config.set(section, 'deep_gray_matter_labels', '1..4,40..47,85..87')
    config.set(section, 'lateral_ventricles_labels', '49,50')
    config.set(section, 'corpus_callosum_labels', '48')
    config.set(section, 'inter_hemisphere_labels', '40..47,85..87')
    config.set(section, 'brainstem_labels', '19')
    config.set(section, 'cerebellum_labels', '17,18')
    rh_labels = []  # right hemisphere structures
    rh_labels.extend(range(2,  48, 2))
    rh_labels.extend(range(50, 63, 2))
    rh_labels.extend(range(63, 82, 2))
    rh_labels.append(86)
    config.set(section, 'right_hemisphere_labels', ','.join([str(x) for x in rh_labels]))
    lh_labels = []  # left hemisphere structures
    lh_labels.extend(range(1,  18, 2))
    lh_labels.extend(range(21, 62, 2))
    lh_labels.extend(range(64, 83, 2))
    lh_labels.append(87)
    config.set(section, 'left_hemisphere_labels', ','.join([str(x) for x in lh_labels]))
    return config
Exemple #25
0
 def __init__(self, filename):
     self.settings = SafeConfigParser()
     self.filename = filename
     self.load()
Exemple #26
0
 def initialise(**kwargs):
     Config.__parser_config = SafeConfigParser()
     p = os.path.dirname(__file__)
     file = os.path.join(p, 'project.config')
     Config.__parser_config.read(file)
Exemple #27
0
def generate_substitutions_from_package(package,
                                        os_name,
                                        os_version,
                                        ros_distro,
                                        installation_prefix='/usr',
                                        deb_inc=0,
                                        peer_packages=None,
                                        releaser_history=None,
                                        fallback_resolver=None,
                                        native=False):
    peer_packages = peer_packages or []
    data = {}
    # Name, Version, Description
    data['Name'] = package.name
    data['Version'] = package.version
    data['Description'] = format_description(package.description)
    # Websites
    websites = [str(url) for url in package.urls if url.type == 'website']
    homepage = websites[0] if websites else ''
    if homepage == '':
        warning("No homepage set, defaulting to ''")
    data['Homepage'] = homepage
    # Debian Increment Number
    data['DebianInc'] = '' if native else '-{0}'.format(deb_inc)
    # Debian Package Format
    data['format'] = 'native' if native else 'quilt'
    # Package name
    data['Package'] = sanitize_package_name(package.name)
    # Installation prefix
    data['InstallationPrefix'] = installation_prefix
    # Resolve dependencies
    depends = package.run_depends + package.buildtool_export_depends
    build_depends = package.build_depends + package.buildtool_depends + package.test_depends

    unresolved_keys = depends + build_depends + package.replaces + package.conflicts
    # The installer key is not considered here, but it is checked when the keys are checked before this
    resolved_deps = resolve_dependencies(
        unresolved_keys, os_name, os_version, ros_distro,
        peer_packages + [d.name for d in package.replaces + package.conflicts],
        fallback_resolver)
    data['Depends'] = sorted(set(format_depends(depends, resolved_deps)))
    data['BuildDepends'] = sorted(
        set(format_depends(build_depends, resolved_deps)))
    data['Replaces'] = sorted(
        set(format_depends(package.replaces, resolved_deps)))
    data['Conflicts'] = sorted(
        set(format_depends(package.conflicts, resolved_deps)))

    # Build-type specific substitutions.
    build_type = package.get_build_type()
    if build_type == 'catkin':
        pass
    elif build_type == 'cmake':
        pass
    elif build_type == 'ament_cmake':
        pass
    elif build_type == 'ament_python':
        # Don't set the install-scripts flag if it's already set in setup.cfg.
        package_path = os.path.abspath(os.path.dirname(package.filename))
        setup_cfg_path = os.path.join(package_path, 'setup.cfg')
        data['pass_install_scripts'] = True
        if os.path.isfile(setup_cfg_path):
            setup_cfg = SafeConfigParser()
            setup_cfg.read([setup_cfg_path])
            if (setup_cfg.has_option('install', 'install-scripts')
                    or setup_cfg.has_option('install', 'install_scripts')):
                data['pass_install_scripts'] = False
    else:
        error("Build type '{}' is not supported by this version of bloom.".
              format(build_type),
              exit=True)

    # Set the distribution
    data['Distribution'] = os_version
    # Use the time stamp to set the date strings
    stamp = datetime.datetime.now(tz.tzlocal())
    data['Date'] = stamp.strftime('%a, %d %b %Y %T %z')
    data['YYYY'] = stamp.strftime('%Y')
    # Maintainers
    maintainers = []
    for m in package.maintainers:
        maintainers.append(str(m))
    data['Maintainer'] = maintainers[0]
    data['Maintainers'] = ', '.join(maintainers)
    # Changelog
    changelogs = get_changelogs(package, releaser_history)
    if changelogs and package.version not in [x[0] for x in changelogs]:
        warning("")
        warning(
            "A CHANGELOG.rst was found, but no changelog for this version was found."
        )
        warning(
            "You REALLY should have a entry (even a blank one) for each version of your package."
        )
        warning("")
    if not changelogs:
        # Ensure at least a minimal changelog
        changelogs = []
    if package.version not in [x[0] for x in changelogs]:
        changelogs.insert(0, (
            package.version, get_rfc_2822_date(datetime.datetime.now()),
            '  * Autogenerated, no changelog for this version found in CHANGELOG.rst.',
            package.maintainers[0].name, package.maintainers[0].email))
    bad_changelog = False
    # Make sure that the first change log is the version being released
    if package.version != changelogs[0][0]:
        error("")
        error("The version of the first changelog entry '{0}' is not the "
              "same as the version being currently released '{1}'.".format(
                  package.version, changelogs[0][0]))
        bad_changelog = True
    # Make sure that the current version is the latest in the changelog
    for changelog in changelogs:
        if parse_version(package.version) < parse_version(changelog[0]):
            error("")
            error(
                "There is at least one changelog entry, '{0}', which has a "
                "newer version than the version of package '{1}' being released, '{2}'."
                .format(changelog[0], package.name, package.version))
            bad_changelog = True
    if bad_changelog:
        error("This is almost certainly by mistake, you should really take a "
              "look at the changelogs for the package you are releasing.")
        error("")
        if not maybe_continue('n', 'Continue anyways'):
            sys.exit("User quit.")
    data['changelogs'] = changelogs
    # Use debhelper version 7 for oneric, otherwise 9
    data['debhelper_version'] = 7 if os_version in ['oneiric'] else 9
    # Summarize dependencies
    summarize_dependency_mapping(data, depends, build_depends, resolved_deps)
    # Copyright
    licenses = []
    separator = '\n' + '=' * 80 + '\n\n'
    for l in package.licenses:
        if hasattr(l, 'file') and l.file is not None:
            license_file = os.path.join(os.path.dirname(package.filename),
                                        l.file)
            if not os.path.exists(license_file):
                error("License file '{}' is not found.".format(license_file),
                      exit=True)
            license_text = open(license_file, 'r').read()
            if not license_text.endswith('\n'):
                license_text += '\n'
            licenses.append(license_text)
    data['Copyright'] = separator.join(licenses)

    def convertToUnicode(obj):
        if sys.version_info.major == 2:
            if isinstance(obj, str):
                return unicode(obj.decode('utf8'))
            elif isinstance(obj, unicode):
                return obj
        else:
            if isinstance(obj, bytes):
                return str(obj.decode('utf8'))
            elif isinstance(obj, str):
                return obj
        if isinstance(obj, list):
            for i, val in enumerate(obj):
                obj[i] = convertToUnicode(val)
            return obj
        elif isinstance(obj, type(None)):
            return None
        elif isinstance(obj, tuple):
            obj_tmp = list(obj)
            for i, val in enumerate(obj_tmp):
                obj_tmp[i] = convertToUnicode(obj_tmp[i])
            return tuple(obj_tmp)
        elif isinstance(obj, int):
            return obj
        raise RuntimeError('need to deal with type %s' % (str(type(obj))))

    for item in data.items():
        data[item[0]] = convertToUnicode(item[1])

    return data
Exemple #28
0
    def confParser(self):
        '''Parse a config file to find potential conf file settings.
        
        @author: rsn


        '''
        success = False
        # This script should be run from [stonixroot]/src/MacBuild. We must
        os.chdir("../..")
        self.STONIX_ROOT = os.getcwd()
        os.chdir("src/MacBuild")
        macbuild_root = os.getcwd()
        myconf = os.path.join(macbuild_root, 'macbuild.conf')
        print(myconf)
        if os.path.isfile(myconf):
            self.parser = SafeConfigParser()
            candidates = [myconf, 'not_a_real_conf.conf']
            found = self.parser.read(candidates)
            missing = set(candidates) - set(found)

            try:
                dict1 = {}
                for section in self.parser.sections():
                    dict1[section] = self.configSectionMap(section)
                print(dict1)
            except:
                #####
                # happens if there was a problem attempting to read the config
                # file, Initializing class variables.
                self.STONIX = "stonix"
                self.STONIXICON = "stonix_icon"
                self.STONIXVERSION = self.APPVERSION
                self.STONIX4MAC = "stonix4mac"
                self.STONIX4MACICON = "stonix_icon"
                self.STONIX4MACVERSION = self.APPVERSION
                #-- Internal libraries
                from .macbuildlib import macbuildlib
                self.mbl = macbuildlib(self.logger)
                self.PYUIC = self.mbl.getpyuicpath()
                self.codesignVerbose = 'vvvv'
                self.codesignDeep = True
            else:
                #####
                # Config file read, initializing class variables.
                self.STONIX = dict1['stonix']['app']
                self.STONIXICON = dict1['stonix']['app_icon']
                self.STONIXVERSION = dict1['stonix']['app_version']
                self.STONIX4MAC = dict1['stonix']['wrapper']
                self.STONIX4MACICON = dict1['stonix']['wrapper_icon']
                self.STONIX4MACVERSION = dict1['stonix']['wrapper_version']
                self.PYUIC = dict1['libpaths']['pyuic']
                self.PYPATHS = dict1['libpaths']['pythonpath'].split(':')
                self.logger.log(
                    lp.INFO, 'attempting to get codesigning information...')
                self.codesignVerbose = dict1['codesign']['verbose']
                if re.match('^True$', dict1['codesign']['deep']):
                    self.codesignDeep = True
                else:
                    self.codesignDeep = False
                self.logger.log(lp.INFO, "Grabbed codesign info...")
                for path in self.PYPATHS:
                    sys.path.append(path)
                #-- Internal libraries
                try:
                    from .macbuildlib import macbuildlib
                    self.mbl = macbuildlib(self.logger, self.PYPATHS)
                except Exception as err:
                    raise
                self.logger.log(lp.INFO, "... macbuildlib loaded ...")
            finally:
                self.hiddenimports = self.mbl.getHiddenImports()
                self.logger.log(lp.DEBUG,
                                "Hidden imports: " + str(self.hiddenimports))
                success = True

        return success
Exemple #29
0
    def _parse(paths, prepos, ignored_map, ignored_location_map, local_config,
               portdir):
        """Parse files in paths to load config"""
        parser = SafeConfigParser()

        # use read_file/readfp in order to control decoding of unicode
        try:
            # Python >=3.2
            read_file = parser.read_file
            source_kwarg = 'source'
        except AttributeError:
            read_file = parser.readfp
            source_kwarg = 'filename'

        recursive_paths = []
        for p in paths:
            if isinstance(p, basestring):
                recursive_paths.extend(_recursive_file_list(p))
            else:
                recursive_paths.append(p)

        for p in recursive_paths:
            if isinstance(p, basestring):
                f = None
                try:
                    f = io.open(_unicode_encode(p,
                                                encoding=_encodings['fs'],
                                                errors='strict'),
                                mode='r',
                                encoding=_encodings['repo.content'],
                                errors='replace')
                except EnvironmentError:
                    pass
                else:
                    # The 'source' keyword argument is needed since otherwise
                    # ConfigParser in Python <3.3.3 may throw a TypeError
                    # because it assumes that f.name is a native string rather
                    # than binary when constructing error messages.
                    kwargs = {source_kwarg: p}
                    read_file(f, **portage._native_kwargs(kwargs))
                finally:
                    if f is not None:
                        f.close()
            elif isinstance(p, io.StringIO):
                kwargs = {source_kwarg: "<io.StringIO>"}
                read_file(p, **portage._native_kwargs(kwargs))
            else:
                raise TypeError(
                    "Unsupported type %r of element %r of 'paths' argument" %
                    (type(p), p))

        prepos['DEFAULT'] = RepoConfig("DEFAULT",
                                       parser.defaults(),
                                       local_config=local_config)

        for sname in parser.sections():
            optdict = {}
            for oname in parser.options(sname):
                optdict[oname] = parser.get(sname, oname)

            repo = RepoConfig(sname, optdict, local_config=local_config)

            if repo.sync_type is not None and repo.sync_uri is None:
                writemsg_level("!!! %s\n" % _(
                    "Repository '%s' has sync-type attribute, but is missing sync-uri attribute"
                ) % sname,
                               level=logging.ERROR,
                               noiselevel=-1)
                continue

            if repo.sync_uri is not None and repo.sync_type is None:
                writemsg_level("!!! %s\n" % _(
                    "Repository '%s' has sync-uri attribute, but is missing sync-type attribute"
                ) % sname,
                               level=logging.ERROR,
                               noiselevel=-1)
                continue

            if repo.sync_type not in (None, "cvs", "git", "rsync"):
                writemsg_level("!!! %s\n" % _(
                    "Repository '%s' has sync-type attribute set to unsupported value: '%s'"
                ) % (sname, repo.sync_type),
                               level=logging.ERROR,
                               noiselevel=-1)
                continue

            if repo.sync_type == "cvs" and repo.sync_cvs_repo is None:
                writemsg_level("!!! %s\n" % _(
                    "Repository '%s' has sync-type=cvs, but is missing sync-cvs-repo attribute"
                ) % sname,
                               level=logging.ERROR,
                               noiselevel=-1)
                continue

            # For backward compatibility with locations set via PORTDIR and
            # PORTDIR_OVERLAY, delay validation of the location and repo.name
            # until after PORTDIR and PORTDIR_OVERLAY have been processed.
            prepos[sname] = repo
def build_erfs_survey_collection():
    # self.hdf5_filename = os.path.join(os.path.dirname(ERF_HDF5_DATA_DIR),'erf','erf.h5')

    erfs_survey_collection = SurveyCollection()
    for year in range(2006, 2010):

        surveys = erfs_survey_collection.surveys
        yr = str(year)[2:]
        yr1 = str(year + 1)[2:]

        eec_variables = [
            'noi', 'noicon', 'noindiv', 'noiper', 'noimer', 'ident', 'naia',
            'naim', 'lien', 'acteu', 'stc', 'contra', 'titc', 'mrec', 'forter',
            'rstg', 'retrai', 'lpr', 'cohab', 'sexe', 'agepr', 'rga', 'statut',
            'txtppb', 'encadr', 'prosa', 'nbsala', 'chpub', 'dip11'
        ]
        eec_rsa_variables = ["sp0" + str(i)
                             for i in range(0, 10)] + ["sp10", "sp11"] + [
                                 'sitant', 'adeben', 'datant', 'raistp',
                                 'amois', 'adfdap', 'ancentr', 'ancchom',
                                 'dimtyp', 'rabsp', 'raistp', 'rdem',
                                 'ancinatm'
                             ]
        eec_aah_variables = ["rc1rev", "maahe"]
        eec_variables += eec_rsa_variables + eec_aah_variables

        erf_tables = {
            "erf_menage": {
                "RData_filename": "menage" + yr,
                "year": year,
                "variables": None,
            },
            "eec_menage": {
                "RData_filename": "mrf" + yr + "e" + yr + "t4",
                "year": year,
                "variables": None,
            },
            "foyer": {
                "RData_filename": "foyer" + yr,
                "year": year,
                "variables": None,
            },
            "erf_indivi": {
                "RData_filename":
                "indivi" + yr,
                "year":
                year,
                "variables": [
                    'noi',
                    'noindiv',
                    'ident',
                    'declar1',
                    'quelfic',
                    'persfip',
                    'declar2',
                    'persfipd',
                    'wprm',
                    "zsali",
                    "zchoi",
                    "ztsai",
                    "zreti",
                    "zperi",
                    "zrsti",
                    "zalri",
                    "zrtoi",
                    "zragi",
                    "zrici",
                    "zrnci",
                    "zsalo",
                    "zchoo",
                    "ztsao",
                    "zreto",
                    "zpero",
                    "zrsto",
                    "zalro",
                    "zrtoo",
                    "zrago",
                    "zrico",
                    "zrnco",
                ],
            },
            "eec_indivi": {
                "RData_filename": "irf" + yr + "e" + yr + "t4",
                "year": year,
                "variables": eec_variables,
            },
            "eec_cmp_1": {
                "RData_filename": "icomprf" + yr + "e" + yr1 + "t1",
                "year": year,
                "variables": eec_variables,
            },
            "eec_cmp_2": {
                "RData_filename": "icomprf" + yr + "e" + yr1 + "t2",
                "year": year,
                "variables": eec_variables,
            },
            "eec_cmp_3": {
                "RData_filename": "icomprf" + yr + "e" + yr1 + "t3",
                "year": year,
                "variables": eec_variables,
            },
        }

        # Build absolute path for RData_filename
        from configparser import SafeConfigParser

        parser = SafeConfigParser()
        config_local_ini = os.path.join(CONFIG_DIR, 'config_local.ini')
        config_ini = os.path.join(CONFIG_DIR, 'config.ini')
        found = parser.read(config_local_ini, config_ini)
        data_directory = parser.get('data', 'input_directory')
        for table in erf_tables:
            table["RData_filename"] = os.path.join(
                os.path.dirname(data_directory), 'R', 'erf')

    def initialize(self):
        """
        Initialize survey data
        """

        self.initialize_erf(tables=tables)
#        self.initialize_logement()

    def initialize_erf(self, tables=None):
        """
        """

        year = self.year
        erf = SurveyDescription()
        yr = str(year)[2:]
        yr1 = str(year + 1)[2:]
        erf_tables_to_process = {
            "erf_menage": "menage" + yr,
            "eec_menage": "mrf" + yr + "e" + yr + "t4",
            "foyer": "foyer" + yr,
            "erf_indivi": "indivi" + yr,
            "eec_indivi": "irf" + yr + "e" + yr + "t4",
            "eec_cmp_1": "icomprf" + yr + "e" + yr1 + "t1",
            "eec_cmp_2": "icomprf" + yr + "e" + yr1 + "t2",
            "eec_cmp_3": "icomprf" + yr + "e" + yr1 + "t3"
        }
        RData_dir = os.path.join(os.path.dirname(DATA_DIR), 'R', 'erf')

        variables = [
            'noi', 'noindiv', 'ident', 'declar1', 'quelfic', 'persfip',
            'declar2', 'persfipd', 'wprm', "zsali", "zchoi", "ztsai", "zreti",
            "zperi", "zrsti", "zalri", "zrtoi", "zragi", "zrici", "zrnci",
            "zsalo", "zchoo", "ztsao", "zreto", "zpero", "zrsto", "zalro",
            "zrtoo", "zrago", "zrico", "zrnco"
        ]

        variables_eec = [
            'noi', 'noicon', 'noindiv', 'noiper', 'noimer', 'ident', 'naia',
            'naim', 'lien', 'acteu', 'stc', 'contra', 'titc', 'mrec', 'forter',
            'rstg', 'retrai', 'lpr', 'cohab', 'sexe', 'agepr', 'rga', 'statut',
            'txtppb', 'encadr', 'prosa', 'nbsala', 'chpub', 'dip11'
        ]

        variables_eec_rsa = ["sp0" + str(i)
                             for i in range(0, 10)] + ["sp10", "sp11"] + [
                                 'sitant', 'adeben', 'datant', 'raistp',
                                 'amois', 'adfdap', 'ancentr', 'ancchom',
                                 'dimtyp', 'rabsp', 'raistp', 'rdem',
                                 'ancinatm'
                             ]

        variables_eec_aah = ["rc1rev", "maahe"]

        variables_eec += variables_eec_rsa + variables_eec_aah

        erf_tables = {
            "erf_menage": {
                "RData_filename": "menage" + yr,
                "variables": None
            },
            "eec_menage": {
                "RData_filename": "mrf" + yr + "e" + yr + "t4",
                "variables": None
            },
            "foyer": {
                "RData_filename": "foyer" + yr,
                "variables": None
            },
            "erf_indivi": {
                "RData_filename": "indivi" + yr,
                "variables": variables
            },
            "eec_indivi": {
                "RData_filename": "irf" + yr + "e" + yr + "t4",
                "variables": variables_eec
            },
            "eec_cmp_1": {
                "RData_filename": "icomprf" + yr + "e" + yr1 + "t1",
                "variables": variables_eec
            },
            "eec_cmp_2": {
                "RData_filename": "icomprf" + yr + "e" + yr1 + "t2",
                "variables": variables_eec
            },
            "eec_cmp_3": {
                "RData_filename": "icomprf" + yr + "e" + yr1 + "t3",
                "variables": variables_eec
            }
        }

        RData_dir = os.path.join(os.path.dirname(DATA_DIR), 'R', 'erf')

        if tables is None:
            erf_tables_to_process = erf_tables
        else:
            erf_tables_to_process = tables

        for name in erf_tables_to_process:
            erf.insert_table(name=name,
                             RData_filename=RData_filename,
                             RData_dir=RData_dir,
                             variables=variables)

        self.surveys["erf"] = erf

    def initialize_logement(self):
        """
        """
        year = self.year
        lgt = SurveyDescription()
        yr = str(year)[2:]
        yr1 = str(year + 1)[2:]

        if yr == "03":
            lgt_men = "menage"
            lgt_logt = None
            renameidlgt = dict(ident='ident')

        elif yr in ["06", "07", "08", "09"]:
            lgt_men = "menage1"
            lgt_lgt = "logement"
            renameidlgt = dict(idlog='ident')

        lgt_tables_to_process = {
            "adresse": "adresse",
            "lgt_menage": lgt_men,
            "lgt_logt": lgt_lgt
        }

        RData_dir = os.path.join(os.path.dirname(DATA_DIR), 'R', 'logement')
        for name, RData_filename in lgt_tables_to_process.items():
            lgt.insert_table(name=name,
                             RData_filename=RData_filename,
                             RData_dir=RData_dir)

        self.surveys["lgt"] = lgt

    def initialize_patrimoine(self, year):
        """
        TODO:
        """
        pat_tables_to_process = {
            "pat_individu": "individu",
            "pat_menage": "meange",
            "pat_produit": "produit",
            "pat_transmission": "transm"
        }

        pat_data_dir = os.path.join(os.path.dirname(DATA_DIR), 'R',
                                    'patrimoine')

        pat = {
            "name": "patrimoine",
            "data_dir": os.path.join(os.path.dirname(DATA_DIR), 'R',
                                     'patrimoine'),
            "tables_to_process": pat_tables_to_process
        }

    def set_config(self, **kwargs):
        """
        Set configuration parameters

        Parameters
        ----------
        year : int, default None
               year of the survey
        """
        if self.year is not None:
            year = self.year
        else:
            raise Exception("year should be defined")

        store = HDFStore(self.hdf5_filename)
        for survey_name, description in self.surveys.items():
            for destination_table_name, tables in description.tables.items():
                data_dir = tables["RData_dir"]
                R_table_name = tables["RData_filename"]
                try:
                    variables = tables["variables"]
                except Exception:
                    variables = None
                print(variables)
                self.store_survey(survey_name, R_table_name,
                                  destination_table_name, data_dir, variables)

    def store_survey(self,
                     survey_name,
                     R_table_name,
                     destination_table_name,
                     data_dir,
                     variables=None,
                     force_recreation=True):
        """
        Store a R data table in an HDF5 file

        Parameters
        ----------

        survey_name : string
                       the name of the survey
        R_table_name : string
                       the name of the R data table
        destination_table_name : string
                                 the name of the table in the HDFStore
        data_dir : path
                   the directory where to find the RData file

        variables : list of string, default None
                    When not None, list of the variables to keep
        """
        gc.collect()
        year = self.year

        def get_survey_year(survey_name, year):
            if survey_name == "logement":
                if year == 2003:
                    return 2003
                elif year in range(2006, 2010):
                    return 2006
            if survey_name == "patrimoine":
                return 2004
            else:
                return year

        print("creating %s" % (destination_table_name))
        table_Rdata = R_table_name + ".Rdata"
        filename = os.path.join(data_dir,
                                str(get_survey_year(survey_name, year)),
                                table_Rdata)
        print(filename)
        if not os.path.isfile(filename):
            raise Exception("filename do  not exists")

        rpy.r.load(filename)
        stored_table = com.load_data(R_table_name)
        store = HDFStore(self.hdf5_filename)
        store_path = str(self.year) + "/" + destination_table_name

        if store_path in store:
            if force_recreation is not True:
                print(store_path + "already exists, do not re-create and exit")
                store.close()
                return

        if variables is not None:

            print(store)
            print(store_path)
            print(variables)
            variables_stored = list(
                set(variables).intersection(set(stored_table.columns)))
            print(list(set(variables).difference((set(stored_table.columns)))))
            store[store_path] = stored_table[variables_stored]
        else:
            store[store_path] = stored_table
        store.close()
        del stored_table
        gc.collect()

    def get_value(self, variable, table=None):
        """
        Get value

        Parameters
        ----------
        variable : string
                  name of the variable
        table : string, default None
                name of the table where to get variable
        Returns
        -------
        df : DataFrame, default None
             A DataFrame containing the variable
        """
        df = self.get_values([variable], table)
        return df

    def get_values(self, variables=None, table=None):
        """
        Get values

        Parameters
        ----------
        variables : list of strings, default None
                  list of variables names, if None return the whole table
        table : string, default None
                name of the table where to get the variables
        Returns
        -------
        df : DataFrame, default None
             A DataFrame containing the variables
        """

        store = HDFStore(self.hdf5_filename)
        df = store[str(self.year) + "/" + table]
        # If no variables read the whole table
        if variables is None:
            return df

        diff = set(variables) - set(df.columns)
        if diff:
            raise Exception("The following variable(s) %s are missing" % diff)
        variables = list(set(variables).intersection(df.columns))
        df = df[variables]

        return df

    def get_of_value(self, variable, table=None):
        """
        Get value

        Parameters
        ----------
        variable : string
                  name of the variable
        table : string, default None
                name of the table where to get variable
        Returns
        -------
        df : DataFrame, default None
             A DataFrame containing the variable
        """
        df = self.get_of_values([variable], table)
        return df

    def get_of_values(self, variables=None, table=None):
        """
        Get values

        Parameters
        ----------
        variables : list of strings, default None
                  list of variables names, if None return the whole table
        table : string, default None
                name of the table where to get the variables
        Returns
        -------
        df : DataFrame, default None
             A DataFrame containing the variables
        """

        store = HDFStore(self.hdf5_filename)
        df = store[str(self.year) + "/" + table]

        # If no variables read the whole table
        if variables is None:
            return df

        from openfisca_france.data.erf import get_erf2of, get_of2erf
        of2erf = get_of2erf()
        to_be_renamed_variables = set(of2erf.keys()).intersection(variables)
        renamed_variables = []

        for variable in to_be_renamed_variables:
            renamed_variables.append(of2erf[variable])

        if renamed_variables:
            variables = list(
                set(variables).difference(
                    to_be_renamed_variables)) + renamed_variables

#        if table is None:
#            for test_table in self.tables.keys:
#                if set(variables) < set(self.tables[test_table].columns):
#                    table = test_table
#                    print("using guessed table :", table)
#                    break
#
#        if table is None:
#            print("varname not found in any tables")
#            df = None
#        else:

        variables = list(set(variables).intersection(df.columns))
        df = df[variables]

        # rename variables according to their name in openfisca
        erf2of = get_erf2of()
        to_be_renamed_variables = set(list(
            erf2of.values())).intersection(variables)
        if to_be_renamed_variables:
            for var in to_be_renamed_variables:
                df.rename(columns={var: erf2of[var]}, inplace=True)
        return df