Beispiel #1
0
    def readconfig(self):
        CONFIG = ConfigParser(inline_comment_prefixes=('#', ';'))
        CONFIG._optcre = re.compile(r'(?P<option>[^\s]+)(?P<vi>\s+=)?\s*(?P<value>.*)')
        CONFIG.read(self.CONFIG_FILENAME)

        sections = CONFIG.sections()
        order_sections = []
        for section in sections:
            try:
                order, action = isfiltername(section).group('order', 'action')
                order_sections.append((int(order), action, section))
            except:
                continue
        order_sections.sort(key=lambda x: x[0])
        self.clear()
        for order, action, section in order_sections:
            action = action.upper()
            if action not in actToNum:
                continue
            filters = classlist()
            filters.action = actToNum[action]
            for k, v in CONFIG.items(section):
                scheme = ''
                if k.find('://', 0, 9) > 0 :
                    scheme, _, k = k.partition('://')
                host, _, path = k.partition('/')
                if host[:1] == '@':
                    host = re.compile(host[1:]).search
                else:
                    host = host.lower()
                if path[:1] == '@':
                    path = re.compile(path[1:]).search
                if filters.action == FAKECERT and v and '*' not in v:
                    v = v.encode()
                if filters.action in (FORWARD, DIRECT):
                    if v[:1] == '@':
                        p, _, v = v.partition(' ')
                    else:
                        p = None
                    if isempty(v):
                        v = None
                    elif '|' in v:
                        v = pickip(v.lower()) or None
                    elif isipuse(v):
                        v = [v]
                    elif isip(v) or not (v in GC.IPLIST_MAP or v.find('.') > 0):
                        v = None
                    v = v, p
                elif filters.action in (REDIRECT, IREDIRECT):
                    if v[:1] == '!':
                        v = v[1:].lstrip(' \t')
                        mhost = False
                    else:
                        mhost = True
                    if '>>' in v:
                        patterns, _, replaces = v.partition('>>')
                        patterns = patterns.rstrip(' \t')
                        replaces = replaces.lstrip(' \t')
                        if ' ' in replaces:
                            raction, _, replaces = replaces.partition(' ')
                            if raction in ('forward', 'direct', 'gae'):
                                raction = 'do_' + raction.upper()
                            elif raction.startswith('proxy='):
                                raction = 'do_PROXY', raction[6:]
                            else:
                                raction = None
                            replaces = replaces.rstrip(' \t')
                        else:
                            raction = None
                        unquote = replaces[:1] == '@'
                        if unquote:
                            replaces = replaces[1:].lstrip(' \t')
                        if patterns[:1] == '@':
                            patterns = patterns[1:].lstrip(' \t')
                            rule = partial(re.compile(patterns).sub, replaces)
                        else:
                            rule = patterns, replaces, 1
                        v = rule, unquote, mhost, raction
                    else:
                        v = v, None, mhost, None
                filters.append((scheme.lower(), host, path, v))
            self.append(filters)
Beispiel #2
0
# be accessible, and the documentation will not build correctly.

import os
import sys
import datetime
from importlib import import_module

try:
    from sphinx_astropy.conf.v1 import *  # noqa
except ImportError:
    print('ERROR: the documentation requires the sphinx-astropy package to be installed')
    sys.exit(1)

# Get configuration information from setup.cfg
from configparser import ConfigParser
conf = ConfigParser()

conf.read([os.path.join(os.path.dirname(__file__), '..', 'setup.cfg')])
setup_cfg = dict(conf.items('metadata'))

# -- General configuration ----------------------------------------------------

# By default, highlight as Python 3.
highlight_language = 'python3'

# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.2'

# To perform a Sphinx version check that needs to be more specific than
# major.minor, call `check_sphinx_version("X.Y.Z")` here.
# check_sphinx_version("1.2.1")
Beispiel #3
0
#This tosses out the 4th sample 3 measurement of each heater value. (Due to badly implemented interpolation)
import sys
import os
import numpy as np
import pandas as pd
from configparser import ConfigParser
import matplotlib.pyplot as plt

#Get Header Names from ini file:
cfgparser = ConfigParser()
cfgparser.read("config.ini")
nChannels = int(cfgparser.get('scanner',
                              'channels'))  #Number of channels holding samples
nPasses = int(cfgparser.get('scanner',
                            'scannerpasses'))  #Number of scanner passes

#CSV header names:
datehdr = "date"
timehdr = "time"
htrhdr = "heater%"
MCThdr = "MCTemp"
MCTInterphdr = MCThdr + "_Interp"
MCRhdr = "MCResist"
s1hdr = str(cfgparser.get('sample1',
                          'description')).strip('\"')  #Sample 1 Header
s2hdr = str(cfgparser.get('sample2',
                          'description')).strip('\"')  #Sample 2 Header
s3hdr = str(cfgparser.get('sample3',
                          'description')).strip('\"')  #Sample 3 Header

#Read in CSV file as lists:
Beispiel #4
0
import os
import re
from setuptools import setup, find_packages
from configparser import ConfigParser
from setuptools.command.test import test as TestCommand

config = ConfigParser(default_section='metadata', empty_lines_in_values=False)
config.read(os.path.join(os.path.dirname(__file__), 'flit.ini'))


def read(f):
    return open(os.path.join(os.path.dirname(__file__), f)).read().strip()


try:
    version = re.findall(r"""^__version__ = "([^']+)"\r?$""",
                         read(os.path.join('aioftp', '__init__.py')), re.M)[0]
except IndexError:
    raise RuntimeError('Unable to determine version.')


class NoseTestCommand(TestCommand):
    def finalize_options(self):
        TestCommand.finalize_options(self)
        self.test_args = []
        self.test_suite = True

    def run_tests(self):
        import pathlib
        import shutil
        import nose
# @Time    : 2019/12/2 11:17
# @Author  : Libuda
# @FileName: 加密spider.py
# @Software: PyCharm
import xlrd
import smtplib  # 发送邮件 连接邮件服务器
from email.mime.text import MIMEText  # 构建邮件格式
from xlutils.copy import copy
from selenium import webdriver
import time
import requests
from configparser import ConfigParser
import base64

config_parser = ConfigParser()
config_parser.read('config.cfg', encoding="utf-8-sig")
config = config_parser['default']

user_agent = "mozilla/5.0 (linux; u; android 4.1.2; zh-cn; mi-one plus build/jzo54k) applewebkit/534.30 (khtml, like gecko) version/4.0 mobile safari/534.30 micromessenger/5.0.1.352"
url = "https://feiyu.oceanengine.com/feiyu/login"

user_list = ['*****@*****.**']

phone_num = 13281890000
wait_time = 3  # 各个阶段等待时间
time_jiange = 30  # 时间间隔 隔多长时间执行脚本一次
start_date = time.mktime(
    time.strptime("2019-11-1 18:00:00", "%Y-%m-%d %H:%M:%S"))  # 结束时间
end_date = time.mktime(
    time.strptime("2019-12-12 18:00:00", "%Y-%m-%d %H:%M:%S"))  # 结束时间
ding_num = 5  # 链接条数报警阈值
Beispiel #6
0
def main():
    """
    main function
    """
    host_transformation = {
        'fqdn': host_fqdn,
        'short': host_short,
        'ip': host_ip,
        'mixed': host_mixed,
        'random': host_random,
        'example.org': host_example_org,
    }

    ap = argparse.ArgumentParser(
        description="update of hosts and devices in gdeploy conf file")
    ap.add_argument(
        "-d",
        "--dry-run",
        action="store_true",
        help=
        "print to stdout instead of save to file (in place edit by default)")
    ap.add_argument("gdeployconf",
                    nargs="+",
                    help="gdeploy config files to edit")
    ap.add_argument("-i",
                    dest="inventory",
                    action="store",
                    required=True,
                    help="ansible inventory (aka hosts) file")
    ap.add_argument(
        "-p",
        dest="file_prefix",
        action="store",
        default="",
        help="output file prefix (added before the name of the input gdeploy conf file) " \
            "If empty, input file is overwritten (in place edit).")
    ap.add_argument(
        "-s",
        "--storage-devices",
        dest="storage_devices",
        help="Coma separated list of available devices for bricks.")
    ap.add_argument(
        "-H",
        "--hosts-definition",
        dest="hosts_definition",
        choices=host_transformation.keys(),
        default='fqdn',
        help=
        "How to define hosts in gdeploy config: fqdn, short, ip, mixed or random."
    )
    ap.add_argument(
        "--gluster-server-group",
        dest="gluster_server_group",
        action="store",
        default='gluster_servers',
        help="Name of ansible inventory group of gluster storage servers.")
    ap.add_argument(
        "--gluster-client-group",
        dest="gluster_client_group",
        action="store",
        default='usm_client',
        help="Name of ansible inventory group of gluster client machine.")
    args = ap.parse_args()

    # open gdeploy config files via plain config parser
    gdeploy_confs = {}
    for gdeploy_conf_file in args.gdeployconf:
        if not os.path.exists(gdeploy_conf_file):
            msg = "Specified gdeploy config file '{}' doesn't exists.".format(
                gdeploy_conf_file)
            print(msg, file=sys.stderr)
            return 1
        gdeploy_confs[gdeploy_conf_file] = ConfigParser(allow_no_value=True)
        gdeploy_confs[gdeploy_conf_file].read(gdeploy_conf_file)

    # open ansible inventory file via plain config parser
    inventory = ConfigParser(allow_no_value=True)
    inventory.read(args.inventory)

    # validate the inventory file
    sections_to_validate = [args.gluster_server_group]
    if any([gc.has_section("clients") for gc in gdeploy_confs.values()]):
        sections_to_validate.append(args.gluster_client_group)
    for section in sections_to_validate:
        if not inventory.has_section(section):
            msg = "inventory file {} is missing group {}".format(
                args.inventory, section)
            print(msg, file=sys.stderr)
            return 1

    # get machines from the inventory file
    servers = inventory.options(args.gluster_server_group)
    if any([gc.has_section("clients") for gc in gdeploy_confs.values()]):
        clients = inventory.options(args.gluster_client_group)
    else:
        clients = []

    # prepare list of storage devices
    storage_devices = []
    if args.storage_devices:
        storage_devices = args.storage_devices.split(",")

    print("servers: " + ", ".join(servers), file=sys.stderr)
    print("clients: " + ", ".join(clients), file=sys.stderr)
    print("devices: " + ", ".join(storage_devices), file=sys.stderr)

    # update gdeploy config files
    for gdeploy_conf_file, gdeploy_conf in gdeploy_confs.items():
        # ignore hosts section if present
        if gdeploy_conf.has_section("hosts"):
            gdeploy_conf.remove_section("hosts")
        # add servers into hosts sections
        gdeploy_conf.add_section("hosts")
        print("hosts_definition: %s" % args.hosts_definition, file=sys.stderr)
        for server in servers:
            gdeploy_conf.set(
                "hosts", host_transformation[args.hosts_definition](server),
                None)

        # configure client
        if gdeploy_conf.has_section("clients"):
            gdeploy_conf.set("clients", "hosts", ",".join(clients))

        # configure storage devices
        if args.storage_devices and \
                gdeploy_conf.has_section("backend-setup") and \
                gdeploy_conf.has_option("backend-setup", "devices"):

            # number of used devices
            n_devices = len(
                gdeploy_conf.get("backend-setup", "devices").split(","))

            if n_devices > len(storage_devices):
                msg = "Not enough storage devices for {} - available: {} ({}), required: {}".format(
                    gdeploy_conf_file, len(storage_devices),
                    ",".join(storage_devices), n_devices)
                print(msg, file=sys.stderr)
                return 1
            print(gdeploy_conf_file + " was assigned devices: " + \
                    ",".join(storage_devices[:n_devices]), file=sys.stderr)
            gdeploy_conf.set("backend-setup", "devices", \
                ",".join(storage_devices[:n_devices]))
            storage_devices = storage_devices[n_devices:]

        # generate and save/print output
        if args.dry_run:
            print("## %s" % gdeploy_conf_file)
            gdeploy_conf.write(sys.stdout, space_around_delimiters=False)
        else:
            output_filename = os.path.join(
                os.path.dirname(gdeploy_conf_file), "%s%s" %
                (args.file_prefix, os.path.basename(gdeploy_conf_file)))
            with open(output_filename, 'w') as output_file:
                gdeploy_conf.write(output_file, space_around_delimiters=False)
 def __init__(self):
     self._lock = RLock()
     self._config = ConfigParser(delimiters=("="), comment_prefixes=("#"))
                        else:
                            self.mask_save_points_to_file('autosave.10.csv', True)                    

            # Update windows
            cv2.imshow(self.window_name, canvas)
            
            # Process keyboard 
            keycode = cv2.waitKey(50)
            if self.process_key(keycode):
                break

# ============================================================================
if __name__ == "__main__":
    # parser config
    config_file = "./config.ini"
    cp = ConfigParser()
    cp.read(config_file)

    # default config
    image_source_dir = cp["DEFAULT"].get("image_source_dir")
    image_output_dir = cp["DEFAULT"].get("image_output_dir")
    data_entry_file = cp["DEFAULT"].get("data_entry_file")
    lung_masks_file = cp["DEFAULT"].get("lung_masks_file")
    bbox_list_file = cp["DEFAULT"].get("bbox_list_file")

    # Run explorer
    explorer = chestXrayExplorer("X-Ray Image", 
        data_entry_file, lung_masks_file, bbox_list_file, 
        image_source_dir, image_output_dir)
    explorer.run()
Beispiel #9
0
from utils import voc_utils
from utils.utils import *
import pandas as pd
from configparser import ConfigParser
import ast

config_object = ConfigParser()

try:
    config_object.read("core/Config.cfg")
except:
    error("Config file not found in the core folder!")

try:

    model_config = config_object["MODEL"]

    classes = ast.literal_eval(model_config["classes"])
    min_overlap = ast.literal_eval(model_config["min_overlap"])

    results_config = config_object["RESULTS"]

    dataset_name = ast.literal_eval(results_config["dataset_name"])

    data_config = config_object["DATA"]

    path_gt_file = ast.literal_eval(data_config["path_gt_file"])
    path_pred_file = ast.literal_eval(data_config["path_pred_file"])

except:
    error("Verify the Config.cfg file!")
Beispiel #10
0
    def get_login_details(self):
        '''
        This parses the config file, environment variables and command line options
        and returns the config values
        Order of parsing:
            command line options, ~/.pepperrc, environment, defaults
        '''

        # setting default values
        results = {
            'SALTAPI_URL': 'https://localhost:8000/',
            'SALTAPI_USER': None,
            'SALTAPI_PASS': None,
            'SALTAPI_EAUTH': 'auto',
        }

        try:
            config = ConfigParser(interpolation=None)
        except TypeError as e:
            config = ConfigParser.RawConfigParser()
        config.read(self.options.config)

        # read file
        profile = 'main'
        if config.has_section(profile):
            for key, value in config.items(profile):
                key = key.upper()
                results[key] = config.get(profile, key)

        # get environment values
        for key, value in list(results.items()):
            results[key] = os.environ.get(key, results[key])

        # get eauth prompt options
        if self.options.saltapiurl:
            results['SALTAPI_URL'] = self.options.saltapiurl

        if results['SALTAPI_EAUTH'] == 'kerberos':
            results['SALTAPI_PASS'] = None

        if self.options.eauth:
            results['SALTAPI_EAUTH'] = self.options.eauth
        if self.options.username is None and results['SALTAPI_USER'] is None:
            if self.options.interactive:
                results['SALTAPI_USER'] = input('Username: '******'SALTAPI_USER'] = self.options.username
        if self.options.password is None and results['SALTAPI_PASS'] is None:
            if self.options.interactive:
                results['SALTAPI_PASS'] = getpass.getpass(prompt='Password: '******'SALTAPI_PASS'] = self.options.password

        return results
# PyQT5 imports, ignore pylint errors
from PyQt5.QtCore import QTimer, QThread, pyqtSignal, Qt, pyqtSlot
from PyQt5.QtWidgets import QApplication, QHeaderView, QTableWidgetItem, QDialog, QMainWindow, QTabWidget, QMessageBox
from PyQt5.uic import loadUi

# Package imports
from utils import warning, event
from communications import database
from communications import udp_conn as UDP
from camera import video_manager as vm

SETTINGSEVENT = event.Event("SettingsChangedEvent")
RESTARTEVENT = event.Event("RestartAppEvent")

# Default settings
SETTINGS = ConfigParser()
SETTINGSWINDOW = None

DEFAULT_SECTIONS = ("main", "database", "communication")
DEFAULT_MAIN_SETTINGS = {
    # Empty means no stylesheet, default look
    "stylesheet": "False",
    "multithread": "False"
}
DEFAULT_DATABASE_SETTINGS = {
    "address": "127.0.0.1",
    "port": 5432,
    "db": "rover",
    "user": "******",
    "passwd": "xyz",
    "type": "postgresql"
Beispiel #12
0
import os
import logging
from configparser import ConfigParser

# Load configs file
config = ConfigParser(os.environ)
app_ini_file = 'dev.app.ini'
PROFILE = os.environ.get('PROFILE')
if PROFILE and os.environ['PROFILE'] == 'prod':
    app_ini_file = 'prod.app.ini'
logging.info(f"Loading {app_ini_file}")
config.read("{current_dir}/{ini_file}".format(current_dir=os.path.dirname(__file__), ini_file=app_ini_file))

DB_HOST = config.get('db', 'host')
DB_PORT = config.getint('db', 'port')
DB_NAME = config.get('db', 'name')
DB_USER = config.get('db', 'user')
DB_PASS = config.get('db', 'pass')

JWT_SECRET = config.get('jwt', 'secret')
JWT_ALGO = config.get('jwt', 'algo')
JWT_TTL = config.getint('jwt', 'ttl')
Beispiel #13
0
def create_spk(
    build,
    info=None,
    signature=None,
    with_checksum=False,
    with_package_icons=True,
    with_info_icons=False,
    with_info=True,
    with_package=True,
    with_scripts=True,
    with_conf=False,
    info_encoding="utf-8",
    license_encoding="utf-8",
    signature_encoding="ascii",
    conf_dependencies_encoding="utf-8",
    conf_conflicts_encoding="utf-8",
    conf_privilege_encoding="utf-8",
    conf_resource_encoding="utf-8",
):
    """
    Create a valid SPK file

    :param build: base build on which the SPK will be built
    :type build: :class:`~spkrepo.models.Build`
    :param info: INFO dict or `None` to use the result of :func:`create_info`
    :type info: dict or io.BytesIO
    :param signature: content of the syno_signature.asc file, if any
    :param bool with_checksum: whether to include the checksum in the INFO
    :param bool with_package_icons: whether to include the icons in the SPK
    :param bool with_info_icons: whether to include the icons in the INFO
    :param bool with_info: whether to include the INFO file
    :param bool with_package: whether to include the package.tgz file
    :param bool with_scripts: whether to include the scripts folder
    :param bool with_conf: whether to include the conf folder
    :param info_encoding: encoding for the INFO file
    :param license_encoding: encoding for the LICENSE file
    :param signature_encoding: encoding for the syno_signature.asc file
    :param conf_dependencies_encoding: encoding for the conf/PKG_DEPS file
    :param conf_conflicts_encoding: encoding for the conf/PKG_CONX file
    :param conf_privilege_encoding: encoding for the conf/privilege file
    :param conf_resource_encoding: encoding for the conf/resource file
    :return: the created SPK stream
    """
    # generate an info if none is given
    info = info or create_info(build)

    # open structure
    spk_stream = io.BytesIO()
    spk = tarfile.TarFile(fileobj=spk_stream, mode="w")

    # license
    if build.version.license:
        license_stream = io.BytesIO(build.version.license.encode(license_encoding))
        license_tarinfo = tarfile.TarInfo("LICENSE")
        license_stream.seek(0, io.SEEK_END)
        license_tarinfo.size = license_stream.tell()
        license_stream.seek(0)
        spk.addfile(license_tarinfo, fileobj=license_stream)

    # signature
    if signature is not None:
        signature_stream = io.BytesIO(signature.encode(signature_encoding))
        signature_tarinfo = tarfile.TarInfo("syno_signature.asc")
        signature_stream.seek(0, io.SEEK_END)
        signature_tarinfo.size = signature_stream.tell()
        signature_stream.seek(0)
        spk.addfile(signature_tarinfo, fileobj=signature_stream)

    # conf
    if (
        with_conf
        or build.version.conf_dependencies is not None
        or build.version.conf_conflicts is not None
        or build.version.conf_privilege is not None
        or build.version.conf_resource is not None
    ):
        conf_folder_tarinfo = tarfile.TarInfo("conf")
        conf_folder_tarinfo.type = tarfile.DIRTYPE
        conf_folder_tarinfo.mode = 0o755
        spk.addfile(conf_folder_tarinfo)
        if build.version.conf_dependencies is not None:
            conf_tarinfo = tarfile.TarInfo("conf/PKG_DEPS")
            config = ConfigParser()
            config.read_dict(json.loads(build.version.conf_dependencies))
            conf_stream = io.StringIO()
            config.write(conf_stream)
            conf_stream_bytes = io.BytesIO(
                conf_stream.getvalue().encode(conf_dependencies_encoding)
            )
            conf_stream_bytes.seek(0, io.SEEK_END)
            conf_tarinfo.size = conf_stream_bytes.tell()
            conf_stream_bytes.seek(0)
            spk.addfile(conf_tarinfo, fileobj=conf_stream_bytes)
        if build.version.conf_conflicts is not None:
            conf_tarinfo = tarfile.TarInfo("conf/PKG_CONX")
            config = ConfigParser()
            config.read_dict(json.loads(build.version.conf_conflicts))
            conf_stream = io.StringIO()
            config.write(conf_stream)
            conf_stream_bytes = io.BytesIO(
                conf_stream.getvalue().encode(conf_conflicts_encoding)
            )
            conf_stream_bytes.seek(0, io.SEEK_END)
            conf_tarinfo.size = conf_stream_bytes.tell()
            conf_stream_bytes.seek(0)
            spk.addfile(conf_tarinfo, fileobj=conf_stream_bytes)
        if build.version.conf_privilege is not None:
            conf_tarinfo = tarfile.TarInfo("conf/privilege")
            conf_stream_bytes = io.BytesIO(
                build.version.conf_privilege.encode(conf_privilege_encoding)
            )
            conf_stream_bytes.seek(0, io.SEEK_END)
            conf_tarinfo.size = conf_stream_bytes.tell()
            conf_stream_bytes.seek(0)
            spk.addfile(conf_tarinfo, fileobj=conf_stream_bytes)
        if build.version.conf_resource is not None:
            conf_tarinfo = tarfile.TarInfo("conf/resource")
            conf_stream_bytes = io.BytesIO(
                build.version.conf_resource.encode(conf_resource_encoding)
            )
            conf_stream_bytes.seek(0, io.SEEK_END)
            conf_tarinfo.size = conf_stream_bytes.tell()
            conf_stream_bytes.seek(0)
            spk.addfile(conf_tarinfo, fileobj=conf_stream_bytes)

    # wizards
    wizards = []
    if build.version.install_wizard:
        wizards.append("install")
    if build.version.upgrade_wizard:
        wizards.append("upgrade")
    if wizards:
        wizard_folder_tarinfo = tarfile.TarInfo("WIZARD_UIFILES")
        wizard_folder_tarinfo.type = tarfile.DIRTYPE
        wizard_folder_tarinfo.mode = 0o755
        spk.addfile(wizard_folder_tarinfo)
        for wizard in wizards:
            wizard_tarinfo = tarfile.TarInfo("WIZARD_UIFILES/%s_uifile" % wizard)
            wizard_stream = io.BytesIO(wizard.encode("utf-8"))
            wizard_stream.seek(0, io.SEEK_END)
            wizard_tarinfo.size = wizard_stream.tell()
            wizard_stream.seek(0)
            spk.addfile(wizard_tarinfo, fileobj=wizard_stream)

    # scripts
    if with_scripts:
        scripts_folder_tarinfo = tarfile.TarInfo("scripts")
        scripts_folder_tarinfo.type = tarfile.DIRTYPE
        scripts_folder_tarinfo.mode = 0o755
        spk.addfile(scripts_folder_tarinfo)
        for script in (
            "preinst",
            "postinst",
            "preuninst",
            "postuninst",
            "preupgrade",
            "postupgrade",
            "start-stop-status",
        ):
            script_tarinfo = tarfile.TarInfo("scripts/%s" % script)
            script_stream = io.BytesIO(script.encode("utf-8"))
            script_stream.seek(0, io.SEEK_END)
            script_tarinfo.size = script_stream.tell()
            script_stream.seek(0)
            spk.addfile(script_tarinfo, fileobj=script_stream)

    # package
    if with_package:
        package_stream = io.BytesIO()
        package = tarfile.TarFile(fileobj=package_stream, mode="w")
        unique = "%s-%d-%d-[%s]" % (
            build.version.package.name,
            build.version.version,
            build.firmware.build,
            "-".join(a.code for a in build.architectures),
        )
        unique_stream = io.BytesIO(unique.encode("utf-8"))
        unique_tarinfo = tarfile.TarInfo("unique")
        unique_stream.seek(0, io.SEEK_END)
        unique_tarinfo.size = unique_stream.tell()
        unique_stream.seek(0)
        package.addfile(unique_tarinfo, fileobj=unique_stream)
        unique_stream.close()
        package.close()
        package_tarinfo = tarfile.TarInfo("package.tgz")
        package_stream.seek(0, io.SEEK_END)
        package_tarinfo.size = package_stream.tell()
        package_stream.seek(0)
        spk.addfile(package_tarinfo, fileobj=package_stream)
        if "checksum" not in info and with_checksum:
            checksum = hashlib.md5()
            package_stream.seek(0)
            for chunk in iter(lambda: package_stream.read(io.DEFAULT_BUFFER_SIZE), b""):
                checksum.update(chunk)
            info["checksum"] = checksum.hexdigest().decode("utf-8")
        package_stream.close()

    # icons
    if with_package_icons or with_info_icons:
        for size, icon in build.version.icons.items():
            with create_icon(build.version.package.name, int(size)) as f:
                suffix = "" if size == "72" else "_%s" % size
                if with_package_icons:
                    icon_tarinfo = tarfile.TarInfo("PACKAGE_ICON%s.PNG" % suffix)
                    f.seek(0, io.SEEK_END)
                    icon_tarinfo.size = f.tell()
                    f.seek(0)
                    spk.addfile(icon_tarinfo, fileobj=f)
                if with_info_icons:
                    f.seek(0)
                    info["package_icon%s" % suffix] = base64.b64encode(f.read()).decode(
                        "utf-8"
                    )

    # info
    if with_info:
        if isinstance(info, io.BytesIO):
            info_stream = info
        else:
            b = "\n".join(['%s="%s"' % (k, v) for k, v in info.items()]).encode(
                info_encoding
            )
            info_stream = io.BytesIO(b)
        info_tarinfo = tarfile.TarInfo("INFO")
        info_stream.seek(0, io.SEEK_END)
        info_tarinfo.size = info_stream.tell()
        info_stream.seek(0)
        spk.addfile(info_tarinfo, fileobj=info_stream)

    # close structure
    spk.close()
    spk_stream.seek(0)

    return spk_stream
Beispiel #14
0
""" p3-template dev tools

    the tasks.py module contains:
    - development tools
    - pipeline steps
"""

import os
from configparser import ConfigParser

from invoke import task

# read config
SETUP_CFG = ConfigParser()
SETUP_CFG.read("setup.cfg")


def _get_cfg_value(section, field, cfg=None):
    """get the section field value from cfg

        Parse configuration file <cfg> and get the section
        field value.

    Pylint-disable:
        W0212 - OK to access the config protected attribute

    Args:
        section (str): The config file [section]
        field (str): The [section] field
        cfg (str): The configuration file path. Defaults to
            the package config 'setup.cfg'
Beispiel #15
0
# coding=utf-8
from configparser import ConfigParser
from py2neo import Graph, authenticate
import os

config = ConfigParser().read('config.ini')
is_local = config.getboolean('ogm', 'is_local')

if is_local:
    # For Local Neo4j & Py2neo v4
    bolt_url = config.get('local', 'bolt_uri')
    neo4j_user = config.get('local', 'neo4j_user')
    neo4j_password = config.get('local', 'neo4j_password')

    graph = Graph(bolt_url, user=neo4j_user, password=neo4j_password)

    graph.schema.create_uniqueness_constraint("User", "username")
    graph.schema.create_uniqueness_constraint("Tag", "name")
    graph.schema.create_uniqueness_constraint("Fact", "id")
    graph.schema.create_uniqueness_constraint("Question", "id")

elif not is_local:
    # For GrapheneDB Neo4j & Py2neo v3
    graphene_uri = config.get('remote', 'graphene_uri')
    graphene_user = config.get('remote', 'graphene_user')
    graphene_password = config.get('remote', 'graphene_password')

    authenticate(graphene_uri, graphene_user, graphene_password)
    graph = Graph("https://" + graphene_uri, bolt=False)

    def create_uniqueness_constraint(label, prop):
Beispiel #16
0
from configparser import ConfigParser
from os import environ

default_config_if_error = {
    'host': '0.0.0.0',
    'port': 8080,
    'debug': True,
    'reloader': True
}

config = ConfigParser(default_config_if_error)
config.read('project_root.ini')

try:
    if environ['BOTTLE_ENVIRON'] == 'dev':
        default_config = dict(config['dev'])
    elif environ['BOTTLE_ENVIRON'] == 'prod':
        default_config = dict(config['prod'])
except KeyError:
    default_config = dict(config['test'])
Beispiel #17
0
                         dest='msgid',
                         help='Messageid to load')

    (opt, args) = optparser.parse_args()

    if (len(args)):
        print("No bare arguments accepted")
        optparser.print_help()
        sys.exit(1)

    if not opt.msgid:
        print("Message-id must be specified")
        optparser.print_help()
        sys.exit(1)

    cfg = ConfigParser()
    cfg.read('%s/archives.ini' %
             os.path.realpath(os.path.dirname(sys.argv[0])))
    try:
        connstr = cfg.get('db', 'connstr')
    except Exception:
        connstr = 'need_connstr'

    conn = psycopg2.connect(connstr)
    curs = conn.cursor()

    curs.execute("SELECT id, threadid FROM messages WHERE messageid=%(msgid)s",
                 {
                     'msgid': opt.msgid,
                 })
    id, threadid = curs.fetchone()
Beispiel #18
0
    def check_payload(self, response, flags, taint):
        config_reader = ConfigParser(interpolation=None)
        config_reader.read_file(
            open(path_join(self.CONFIG_DIR, self.PAYLOADS_FILE)))

        for section in config_reader.sections():
            if section in flags:
                expected_value = config_reader[section]["value"].replace(
                    "__XSS__", taint)
                attribute = config_reader[section]["attribute"]
                case_sensitive = config_reader[section].getboolean(
                    "case_sensitive")
                match_type = config_reader[section].get("match_type", "exact")

                for tag in response.soup.find_all(
                        config_reader[section]["tag"]):
                    if find_non_exec_parent(tag):
                        continue

                    if attribute == "string" and tag.string:
                        if case_sensitive:
                            if expected_value in tag.string:
                                return True
                        else:
                            if expected_value.lower() in tag.string.lower():
                                return True
                    elif attribute == "full_string" and tag.string:
                        if case_sensitive:
                            if match_type == "exact" and expected_value == tag.string.strip(
                            ):
                                return True
                            elif match_type == "starts_with" and tag.string.strip(
                            ).startswith(expected_value):
                                return True
                        else:
                            if match_type == "exact" and expected_value.lower(
                            ) == tag.string.strip().lower():
                                return True
                            elif match_type == "starts_with" and \
                                    tag.string.strip().lower().startswith(expected_value.lower()):
                                return True
                    else:
                        # Found attribute specified in .ini file in attributes of the HTML tag
                        if attribute in tag.attrs:
                            if case_sensitive:
                                if match_type == "exact" and tag[
                                        attribute] == expected_value:
                                    return True
                                elif match_type == "starts_with" and tag[
                                        attribute].startswith(expected_value):
                                    return True
                            else:
                                if match_type == "exact" and tag[
                                        attribute].lower(
                                        ) == expected_value.lower():
                                    return True
                                elif match_type == "starts_with" and \
                                        expected_value.lower().startswith(tag[attribute].lower()):
                                    return True
                break

        return False
def open_config():
    config = ConfigParser()
    config.read(CONFIG_FILE)

    return config
# -*- coding: utf-8 -*-

import os
import polib
from pathlib import Path
from configparser import ConfigParser

configs = ConfigParser()
configs.read('configs.ini')

mod = configs.get('Mods', 'mod')
game_folder = configs.get('Folders', 'game folder')
translator_folder = configs.get('Folders', 'translator folder')
mods_folder = configs.get('Folders', 'mod folder')
string_folder = translator_folder+"/strings/"+ configs.get('Mods', 'mod')
user_folder = translator_folder+"/user/"+ configs.get('Mods', 'mod')
po_folder = translator_folder+"\\po\\"+ configs.get('Mods', 'mod')

#### Po creating ####


po = polib.POFile()
po.metadata = {
    'POT-Creation-Date': '2021-04-13 14:00+0100',
    'Last-Translator': 'Karantras',
    'Language': 'Russian',
    'Content-Type': 'text/plain; charset=utf-8',
    'Content-Transfer-Encoding': '8bit',
    'Plural-Forms': 'nplurals=4; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<12 || n%100>14) ? 1 : n%10==0 || (n%10>=5 && n%10<=9) || (n%100>=11 && n%100<=14)? 2 : 3);'
}
Beispiel #21
0
def main():
    global debug_fd
    if len(sys.argv) == 2:
        if sys.argv[1] == "--help":
            print("Usage: ./server.py <path/to/current/working/dir>\nRunning" +
                  " without arg defaults to the directory you're in right now")
            return
        else:
            os.chdir(sys.argv[1])
    debug_fd = open("debug.log", "w")
    debug("current working directory is: " + os.getcwd())
    try:
        config = ConfigParser()
        config.read("config.cfg")
        config.options("master-public-keys")
    except NoSectionError:
        log("Non-existant configuration file `config.cfg`")
        return
    try:
        rpc_u = config.get("bitcoin-rpc", "rpc_user")
        rpc_p = config.get("bitcoin-rpc", "rpc_password")
        debug("obtaining auth from rpc_user/pass")
    except NoOptionError:
        rpc_u, rpc_p = obtain_rpc_username_password(
            config.get("bitcoin-rpc", "datadir"))
        debug("obtaining auth from .cookie")
    if rpc_u == None:
        return
    rpc = JsonRpc(host=config.get("bitcoin-rpc", "host"),
                  port=int(config.get("bitcoin-rpc", "port")),
                  user=rpc_u,
                  password=rpc_p,
                  wallet_filename=config.get("bitcoin-rpc",
                                             "wallet_filename").strip())

    #TODO somewhere here loop until rpc works and fully sync'd, to allow
    # people to run this script without waiting for their node to fully
    # catch up sync'd when getblockchaininfo blocks == headers, or use
    # verificationprogress
    printed_error_msg = False
    while bestblockhash[0] == None:
        try:
            bestblockhash[0] = rpc.call("getbestblockhash", [])
        except JsonRpcError as e:
            if not printed_error_msg:
                log("Error with bitcoin json-rpc: " + repr(e))
                printed_error_msg = True
            time.sleep(5)

    import_needed, relevant_spks_addrs, deterministic_wallets = \
        get_scriptpubkeys_to_monitor(rpc, config)
    if import_needed:
        transactionmonitor.import_addresses(rpc, relevant_spks_addrs, debug,
                                            log)
        log("Done.\nIf recovering a wallet which already has existing " +
            "transactions, then\nrun the rescan script. If you're confident " +
            "that the wallets are new\nand empty then there's no need to " +
            "rescan, just restart this script")
    else:
        txmonitor = transactionmonitor.TransactionMonitor(
            rpc, deterministic_wallets, debug, log)
        if not txmonitor.build_address_history(relevant_spks_addrs):
            return
        hostport = (config.get("electrum-server", "host"),
                    int(config.get("electrum-server", "port")))
        poll_interval_listening = int(
            config.get("bitcoin-rpc", "poll_interval_listening"))
        poll_interval_connected = int(
            config.get("bitcoin-rpc", "poll_interval_connected"))
        certfile = config.get("electrum-server", "certfile")
        keyfile = config.get("electrum-server", "keyfile")
        run_electrum_server(hostport, rpc, txmonitor, poll_interval_listening,
                            poll_interval_connected, certfile, keyfile)
Beispiel #22
0
from pkg_resources import parse_version
from configparser import ConfigParser
import setuptools
assert parse_version(setuptools.__version__) >= parse_version('36.2')

# note: all settings are in settings.ini; edit there, not here
config = ConfigParser(delimiters=['='])
config.read('settings.ini')
cfg = config['DEFAULT']

cfg_keys = 'version description keywords author author_email'.split()
expected = cfg_keys + "lib_name user branch license status min_python audience language".split(
)
for o in expected:
    assert o in cfg, "missing expected setting: {}".format(o)
setup_cfg = {o: cfg[o] for o in cfg_keys}

licenses = {
    'apache2':
    ('Apache Software License 2.0', 'OSI Approved :: Apache Software License'),
    'mit': ('MIT License', 'OSI Approved :: MIT License'),
    'gpl2': ('GNU General Public License v2',
             'OSI Approved :: GNU General Public License v2 (GPLv2)'),
    'gpl3': ('GNU General Public License v3',
             'OSI Approved :: GNU General Public License v3 (GPLv3)'),
    'bsd3': ('BSD License', 'OSI Approved :: BSD License'),
}
statuses = [
    '1 - Planning', '2 - Pre-Alpha', '3 - Alpha', '4 - Beta',
    '5 - Production/Stable', '6 - Mature', '7 - Inactive'
]
Beispiel #23
0
def load_config():
    parser = ConfigParser()
    parser.read(join(HERE, CONFIG_FILENAME))
    config = parser[CONFIG_SECTION_NAME]
    check_config(config)
    return config
Beispiel #24
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
DiscordのBOT
"""

import click
import CommonConstants
import discord
from asyncio import ensure_future, get_event_loop
from CommonFunction import CommonFunction, getMyLogger
from configparser import ConfigParser

client = discord.Client()
ini = ConfigParser()
logger = None
commonFunction = None


@client.event
async def on_ready():
    """
    BOT起動時の処理
    """
    try:
        # 定期実行処理を非同期で開始(discord.py側で非同期処理開始しているため、非同期処理を追加するだけでOK)
        logger.info("BOT起動")
        ensure_future(commonFunction.asyncDeleteLog())
        ensure_future(commonFunction.asyncRemindTask())
        ensure_future(commonFunction.asyncRSS())
    except Exception as e:
Beispiel #25
0
import sys
import os

from PyQt5.QtWidgets import QApplication,QMainWindow,QFileDialog,QGraphicsPixmapItem,QGraphicsScene
from PyQt5 import QtGui
from PyQt5.QtCore import QThread,pyqtSignal
from gui.ui import Ui_Form

import time
import cv2 as cv
from configparser import ConfigParser

config_file = os.path.join(os.path.join(os.getcwd(),"cfgs"),"config.ini")
config_data = ConfigParser()
config_data.read(config_file)

class MyMainForm(QMainWindow,Ui_Form):
    def __init__(self,parent=None):  

        super(MyMainForm,self).__init__(parent)

        self.Ui = Ui_Form()
        self.Ui.setupUi(self)

        #Activate the convert thread
        self.Work = ConvertThread()

        self.Ui.File_choose_Button.clicked.connect(self.Choose_Ori_img) # choose the origianl img by the choose button
        self.Ui.File_choose_Button_2.clicked.connect(self.Choose_Style_img) # choose the style img by the choose button
        self.Ui.Start_Button.clicked.connect(self.Start_train)
        self.Ui.Save_Button.clicked.connect(self.SaveImage)
Beispiel #26
0
    def __init__(self, parent=None):
        super(qtelemental, self).__init__(parent)
        self.setWindowIcon(
            QtGui.QIcon(
                QtGui.QPixmap(os.environ["pychemqt"] +
                              "/images/button/PeriodicTableIcon.png")))
        self.setWindowTitle(
            QtWidgets.QApplication.translate("pychemqt", "Periodic Table"))

        self.Preferences = ConfigParser()
        self.Preferences.read(conf_dir + "pychemqtrc")

        layout = QtWidgets.QGridLayout(self)
        layout.setSpacing(2)

        self.populate()

        layout.addItem(
            QtWidgets.QSpacerItem(10, 10, QtWidgets.QSizePolicy.Fixed,
                                  QtWidgets.QSizePolicy.Fixed), 8, 0, 1, 20)
        layout.addItem(
            QtWidgets.QSpacerItem(10, 10, QtWidgets.QSizePolicy.Expanding,
                                  QtWidgets.QSizePolicy.Expanding), 12, 0, 1,
            20)
        asterisco = QtWidgets.QLabel("*")
        asterisco.setFont(font20)
        asterisco.setAlignment(alignment)
        layout.addWidget(asterisco, 6, 3)
        asterisco2 = QtWidgets.QLabel("**")
        asterisco2.setFont(font20)
        asterisco2.setAlignment(alignment)
        layout.addWidget(asterisco2, 7, 3)
        asterisco_ = QtWidgets.QLabel("*")
        asterisco_.setFont(font20)
        asterisco_.setAlignment(alignment)
        layout.addWidget(asterisco_, 10, 2)
        asterisco2_ = QtWidgets.QLabel("**")
        asterisco2_.setFont(font20)
        asterisco2_.setAlignment(alignment)
        layout.addWidget(asterisco2_, 11, 2)

        butonConfig = QtWidgets.QToolButton()
        butonConfig.setIcon(
            QtGui.QIcon(os.environ["pychemqt"] +
                        os.path.join("images", "button", "configure.png")))
        butonConfig.clicked.connect(self.configure)
        layout.addWidget(butonConfig, 11, 1)

        self.Info = QtWidgets.QFrame()
        layout.addWidget(self.Info, 0, 5, 3, 3)
        layoutInfo = QtWidgets.QGridLayout(self.Info)
        layoutInfo.setSpacing(1)
        layoutInfo.setContentsMargins(2, 0, 2, 0)
        self.Info.setFrameShape(QtWidgets.QFrame.StyledPanel)
        self.Info.setFrameShadow(QtWidgets.QFrame.Raised)
        self.Info.setAutoFillBackground(True)
        self.Info.setPalette(palette)
        self.numero_atomico = QtWidgets.QLabel()
        self.numero_atomico.setToolTip(
            QtWidgets.QApplication.translate("pychemqt", "Atomic number"))
        layoutInfo.addWidget(self.numero_atomico, 1, 1)
        self.simbolo = QtWidgets.QLabel()
        self.simbolo.setAlignment(alignment)
        self.simbolo.setToolTip(
            QtWidgets.QApplication.translate("pychemqt", "Symbol"))
        self.simbolo.setFont(font11)
        layoutInfo.addWidget(self.simbolo, 1, 3)
        self.nombre = QtWidgets.QLabel()
        self.nombre.setAlignment(QtCore.Qt.AlignCenter)
        self.nombre.setFont(font_title)
        layoutInfo.addWidget(self.nombre, 2, 1, 1, 3)
        font8 = QtGui.QFont()
        font8.setPointSize(8)
        self.peso_atomico = QtWidgets.QLabel()
        self.peso_atomico.setFont(font8)
        self.peso_atomico.setToolTip(
            QtWidgets.QApplication.translate("pychemqt", "Atomic mass, g/mol"))
        layoutInfo.addWidget(self.peso_atomico, 3, 1)
        self.densidad = QtWidgets.QLabel()
        self.densidad.setFont(font8)
        self.densidad.setAlignment(alignment)
        self.densidad.setToolTip(
            QtWidgets.QApplication.translate(
                "pychemqt",
                "Density:\nBrown: Solid, kg/l\nBlue: Liquid, kg/l\n"
                "Green: Gas, g/l"))
        layoutInfo.addWidget(self.densidad, 3, 3)
        self.Tf = QtWidgets.QLabel()
        self.Tf.setFont(font8)
        self.Tf.setToolTip(
            QtWidgets.QApplication.translate("pychemqt", "Melting Point, K"))
        layoutInfo.addWidget(self.Tf, 4, 1)
        self.Heat_f = QtWidgets.QLabel()
        self.Heat_f.setFont(font8)
        self.Heat_f.setToolTip(
            QtWidgets.QApplication.translate("pychemqt",
                                             "Heat of fusion, kJmol"))
        self.Heat_f.setAlignment(alignment)
        layoutInfo.addWidget(self.Heat_f, 4, 3)
        self.Tb = QtWidgets.QLabel()
        self.Tb.setFont(font8)
        self.Tb.setToolTip(
            QtWidgets.QApplication.translate("pychemqt", "Boiling Point, K"))
        layoutInfo.addWidget(self.Tb, 5, 1)
        self.Heat_b = QtWidgets.QLabel()
        self.Heat_b.setFont(font8)
        self.Heat_b.setToolTip(
            QtWidgets.QApplication.translate("pychemqt",
                                             "Heat of vaporization, kJmol"))
        self.Heat_b.setAlignment(alignment)
        layoutInfo.addWidget(self.Heat_b, 5, 3)

        self.configuracion = QtWidgets.QLabel()
        self.configuracion.setFont(font7)
        self.configuracion.setAlignment(QtCore.Qt.AlignCenter)
        self.configuracion.setToolTip(
            QtWidgets.QApplication.translate("pychemqt",
                                             "Electronic configuration"))
        layoutInfo.addWidget(self.configuracion, 6, 1, 1, 3)

        self.Info2 = QtWidgets.QFrame()
        layout.addWidget(self.Info2, 0, 8, 3, 3)
        layoutInfo2 = QtWidgets.QGridLayout(self.Info2)
        layoutInfo2.setSpacing(1)
        layoutInfo2.setContentsMargins(2, 0, 2, 0)
        self.Info2.setFrameShape(QtWidgets.QFrame.StyledPanel)
        self.Info2.setFrameShadow(QtWidgets.QFrame.Raised)
        self.Info2.setAutoFillBackground(True)
        self.Info2.setPalette(palette)
        self.atomic_volume = QtWidgets.QLabel()
        self.atomic_volume.setFont(font8)
        self.atomic_volume.setToolTip(
            QtWidgets.QApplication.translate("pychemqt", "Atomic volume") +
            ", cm³/mol")
        layoutInfo2.addWidget(self.atomic_volume, 1, 1)
        self.atomic_radius = QtWidgets.QLabel()
        self.atomic_radius.setFont(font8)
        self.atomic_radius.setToolTip(
            QtWidgets.QApplication.translate("pychemqt", "Atomic radius") +
            ", pm")
        layoutInfo2.addWidget(self.atomic_radius, 2, 1)
        self.covalent_radius = QtWidgets.QLabel()
        self.covalent_radius.setFont(font8)
        self.covalent_radius.setToolTip(
            QtWidgets.QApplication.translate("pychemqt", "Covalent radius") +
            ", pm")
        layoutInfo2.addWidget(self.covalent_radius, 3, 1)
        self.vanderWaals_radius = QtWidgets.QLabel()
        self.vanderWaals_radius.setFont(font8)
        self.vanderWaals_radius.setToolTip(
            QtWidgets.QApplication.translate("pychemqt",
                                             "Van der Waals radius") + ", pm")
        layoutInfo2.addWidget(self.vanderWaals_radius, 4, 1)
        self.ionic_radii = QtWidgets.QLabel()
        self.ionic_radii.setFont(font7)
        self.ionic_radii.setToolTip(
            QtWidgets.QApplication.translate("pychemqt", "Ionic radii") +
            ", pm")
        layoutInfo2.addWidget(self.ionic_radii, 5, 1, 1, 3)
        self.electronegativity = QtWidgets.QLabel()
        self.electronegativity.setFont(font8)
        self.electronegativity.setToolTip(
            QtWidgets.QApplication.translate(
                "pychemqt", "Electronegativity, Pauling scale"))
        self.electronegativity.setAlignment(QtCore.Qt.AlignRight
                                            | QtCore.Qt.AlignVCenter)
        layoutInfo2.addWidget(self.electronegativity, 1, 3)
        self.Cp = QtWidgets.QLabel()
        self.Cp.setFont(font8)
        self.Cp.setToolTip(
            QtWidgets.QApplication.translate(
                "pychemqt", "Specific heat capacitiy") + ", kJ/kgK")
        self.Cp.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignVCenter)
        layoutInfo2.addWidget(self.Cp, 2, 3)
        self.k = QtWidgets.QLabel()
        self.k.setFont(font8)
        self.k.setToolTip(
            QtWidgets.QApplication.translate(
                "pychemqt", "Thermal conductivity") + ", W/mK")
        self.k.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignVCenter)
        layoutInfo2.addWidget(self.k, 3, 3)
        self.first_ionization = QtWidgets.QLabel()
        self.first_ionization.setFont(font8)
        self.first_ionization.setToolTip(
            QtWidgets.QApplication.translate(
                "pychemqt", "First ionization energy") + ", kJ/mol")
        self.first_ionization.setAlignment(QtCore.Qt.AlignRight
                                           | QtCore.Qt.AlignVCenter)
        layoutInfo2.addWidget(self.first_ionization, 4, 3)

        self.oxidation = QtWidgets.QLabel()
        self.oxidation.setFont(font8)
        self.oxidation.setToolTip(
            QtWidgets.QApplication.translate("pychemqt", "Oxidation states"))
        self.oxidation.setAlignment(QtCore.Qt.AlignCenter
                                    | QtCore.Qt.AlignVCenter)
        layoutInfo2.addWidget(self.oxidation, 6, 1, 1, 3)

        elemento = Elemental(1)
        self.actualizar(elemento)
        logging.info(
            QtWidgets.QApplication.translate("pychemqt",
                                             "Starting periodic table tool"))
Beispiel #27
0
    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
    level=logging.INFO,
    handlers=[
        TimedRotatingFileHandler('logs/userbot.log',
                                 when="midnight",
                                 encoding=None,
                                 delay=False,
                                 backupCount=10),
        logging.StreamHandler()
    ])
LOGS = logging.getLogger(__name__)

# Read from config file
name = UserBot().__class__.__name__.lower()
config_file = f"{name}.ini"
config = ConfigParser()
config.read(config_file)

# MongoDB details
MONGO_URL = config.get('mongo', 'url')
DB_NAME = config.get('mongo', 'db_name')
DB_USERNAME = config.get('mongo', 'db_username')
DB_PASSWORD = config.get('mongo', 'db_password')

# Extra details
__version__ = '0.2.0'
__author__ = 'athphane'

# Get the Values from our .env
PM_PERMIT = config.get('pm_permit', 'pm_permit')
PM_LIMIT = int(config.get('pm_permit', 'pm_limit'))
Beispiel #28
0
# coding: utf8
import os
import time
import shutil
import filecmp
from subprocess import Popen, PIPE, TimeoutExpired
from configparser import ConfigParser

#from core.dbHandler import DbHandler
#from utils.computeMd5 import compute_md5
from utils.alarmMessage import send_alarm_mail
from utils.log import write_to_log

CONFIG = ConfigParser()


class Monitor:
    def __init__(self,
                 website_path,
                 backup_path,
                 mail_receivers,
                 page_suffix_set=None,
                 msg_interval=30,
                 auto_cure=True):
        self.website_path = website_path
        self.backup_path = backup_path
        self.__website_path_len = len(self.website_path)
        self.mail_receivers = mail_receivers
        self.msg_interval = msg_interval
        self.auto_cure = auto_cure
        self.tampered_page_cache = {}
def readandwritetoDB():

    # Opdrag vanaf paragraaf 5 in seriepoort handleiding
    bytestosend = '7E3235303034363432453030324646464430360D'

    # Stuur data 9600 baud.  Dit moet eers teen hierdie spoed gestuur word
    with serial.Serial('/dev/ttyUSB1', 9600, timeout=5.0) as ser:
        x = ser.write(unhexlify(bytestosend))  # Stuur opdrag na die battery

        uitstring = ser.read(5000)
        uitstringhex = uitstring.hex()
    print(uitstringhex)
    #uitstringhex = "7e3235303134363030323038363030303130463044393530443942304432363044394330444130304439443044393230443135304442343044393730443942304441323044423030444130304441353036304236343042354430423633304236303042383230423743303030304342353332373130303332373130303030343237313030303030353542413030303042323941453041440d"

    # pip install sqlalchemy
    # pip install PyMySQL

    bytebreakdown = pd.read_csv("bytebreakdownTSPbat.csv")

    # Haal veranderlike uit die hex string en return decimal
    def haalveranderlikeuithex(uitstringhex, bytenommer, bytelengte):
        veranderlike = unhexlify(
            uitstringhex[(2 * bytenommer) - 2:(bytenommer * 2) +
                         (bytelengte * 2) - 2])
        return veranderlike

    for ind, row in bytebreakdown.iterrows():
        try:
            bytebreakdown.loc[ind, 'result'] = int(
                haalveranderlikeuithex(uitstringhex, row['beginbyte'],
                                       row['bytelengte']).decode("utf-8"), 16)
        except:
            bytebreakdown.loc[ind, 'result'] = str(
                haalveranderlikeuithex(uitstringhex, row['beginbyte'],
                                       row['bytelengte']).decode("utf-8"))

    postdf = bytebreakdown.drop(['beginbyte', 'bytelengte'], axis=1)

    begintyd = str(datetime.datetime.now())

    #haal kolomname uit postdf
    y = postdf.columns[0]
    x = postdf.columns[1]

    datedf = pd.DataFrame({y: ['datetime'], x: [begintyd]})
    klientID = pd.DataFrame({y: ['klientid'], x: ['12345678']})
    batteryID = pd.DataFrame({y: ['batteryid'], x: ['987654321']})

    aangepastedb = datedf.append(klientID,
                                 ignore_index=True).append(batteryID,
                                                           ignore_index=True)

    finaltodb = aangepastedb.append(
        postdf, ignore_index=True).set_index(y).transpose()

    parser = ConfigParser()
    parser.read('config.ini')

    host = parser.get('db', 'db_host')
    port = int(parser.get('db', 'db_port'), 10)
    name = parser.get('db', 'db_name')
    user = parser.get('db', 'db_user')
    password = parser.get('db', 'db_password')

    db_url = 'mysql+mysqlconnector://{}:{}@{}:{}/{}'.format(
        user, password, host, port, name)

    engine = sqlalchemy.create_engine(db_url)

    begintyd = str(datetime.datetime.now())

    #haal kolomname uit postdf
    y = postdf.columns[0]
    x = postdf.columns[1]

    datedf = pd.DataFrame({y: ['datetime'], x: [begintyd]})

    finaltodb.to_sql('Battery_Info',
                     con=engine,
                     if_exists='append',
                     index=False)
Beispiel #30
0
def analyze_process_severity(configini, severitbrac, targetBehavior):
    print('Processing', targetBehavior, 'severity...')
    config = ConfigParser()
    config.read(configini)
    filesFound = []
    csv_dir = config.get('General settings', 'csv_path')
    csv_dir_in = os.path.join(csv_dir, 'machine_results')
    severity_brackets = int(severitbrac)
    vidInfPath = config.get('General settings', 'project_path')
    vidInfPath = os.path.join(vidInfPath, 'logs')
    vidInfPath = os.path.join(vidInfPath, 'video_info.csv')
    vidinfDf = pd.read_csv(vidInfPath)
    severityGrades = list(np.arange(0, 1.0, ((10 / severity_brackets) / 10)))
    severityGrades.append(10)
    severityLogFrames = [0] * severity_brackets
    severityLogTime = [0] * severity_brackets

    ########### logfile path ###########
    log_fn = 'severity_' + dateTime + '.csv'
    log_path = config.get('General settings', 'project_path')
    log_path = os.path.join(log_path, 'logs')
    log_fn = os.path.join(log_path, log_fn)
    if not os.path.exists(log_path):
        os.makedirs(log_path)
    loopy = 0
    headers = ['Video']
    for i in range(severity_brackets):
        currStr = 'Grade' + str(loopy) + '_frames'
        headers.append(currStr)
        loopy += 1
    loopy = 0
    for i in range(severity_brackets):
        currStr = 'Grade' + str(loopy) + '_time'
        headers.append(currStr)
        loopy += 1
    log_df = pd.DataFrame(columns=headers)

    ########### FIND CSV FILES ###########
    for i in os.listdir(csv_dir_in):
        if i.__contains__(".csv"):
            file = os.path.join(csv_dir_in, i)
            filesFound.append(file)
    loopy = 0
    for i in filesFound:
        currentFile = i
        CurrentVideoName = os.path.basename(currentFile)
        videoSettings = vidinfDf.loc[vidinfDf['Video'] == str(
            CurrentVideoName.replace('.csv', ''))]
        try:
            fps = int(videoSettings['fps'])
        except TypeError:
            print(
                'Error: make sure all the videos that are going to be analyzed are represented in the project_folder/logs/video_info.csv file'
            )
        csv_df = pd.read_csv(currentFile, index_col=[0])
        for pp in range(severity_brackets):
            lowerBound = severityGrades[pp]
            upperBound = severityGrades[pp + 1]
            currGrade = len(
                csv_df[(csv_df[str(targetBehavior)] == 1)
                       & (csv_df['Scaled_movement_M1_M2'] > lowerBound) &
                       (csv_df['Scaled_movement_M1_M2'] <= upperBound)])
            severityLogFrames[pp] = currGrade
        log_list = []
        log_list.append(str(CurrentVideoName.replace('.csv', '')))
        for bb in range(len(severityLogFrames)):
            severityLogTime[bb] = round(severityLogFrames[bb] / fps, 4)
        log_list.extend(severityLogFrames)
        log_list.extend(severityLogTime)
        log_df.loc[loopy] = log_list
        loopy += 1
        print('Files # processed for movement data: ' + str(loopy))
    log_df = log_df.replace('NaN', 0)
    log_df.to_csv(log_fn, index=False)
    print('All files processed for severity data: ' + 'data saved @' +
          str(log_fn))