Beispiel #1
0
    def set_server_network_address(self,
                                   ip_or_host,
                                   port=443,
                                   overwrite=False):

        # actual_ip = None
        # try:
        #     actual_ip = ipaddress.ip_address(ip)
        # except Exception as ex:
        #     raise Exception(_("Invalid IP address : {}").format(ip))

        if port == 443:
            protocol = 'https'
        else:
            protocol = 'http'

        base = "{}://{}:{}".format(protocol, ip_or_host, port)
        mainlog.info("Server base address is {}.".format(base))

        if overwrite or not self.get("DownloadSite", "url_version"):
            self.set("DownloadSite", "url_version", base + "/version")
        else:
            mainlog.debug("Leaving url_version as it is")

        if overwrite or not self.get("DownloadSite", "base_url"):
            self.set("DownloadSite", "base_url", base)
        else:
            mainlog.debug("Leaving base_url as it is")

        if overwrite or not self.get("DownloadSite", "url_file"):
            self.set("DownloadSite", "url_file", base + "/file")
        else:
            mainlog.debug("Leaving url_file as it is")
Beispiel #2
0
def http_download(download_url, outfile, proxy_url=None, proxy_port=None):

    if proxy_url:
        proxy = "{}:{}".format(proxy_url, proxy_port)
        mainlog.info("Using a proxy : {}".format(proxy))

        urlopener = build_opener(ProxyHandler({
            'https': proxy,
            'http': proxy
        }), HTTPRedirectHandler())
    else:
        mainlog.info("Not using a proxy")
        urlopener = build_opener(HTTPHandler(), HTTPSHandler(),
                                 HTTPRedirectHandler())

    urlopener.addheaders = [(
        'User-agent',
        'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:32.0) Gecko/20100101 Firefox/32.0'
    )]

    datasource = urlopener.open(download_url)

    out = open(outfile, 'wb')
    while True:
        d = datasource.read(8192)
        # self.logger.debug("Downloaded {} bytes".format(len(d)))
        if not d:
            break
        else:
            out.write(d)
            out.flush()
    out.close()
    datasource.close()
Beispiel #3
0
def send_mail(subject, content, cfg):
    # typical values for text_subtype are plain, html, xml
    text_subtype = 'plain'

    msg = MIMEText(content, text_subtype)
    msg['Subject'] = subject
    msg['From'] = cfg.get(
        'Mail',
        'sender')  # some SMTP servers will do this automatically, not all

    if not cfg.get('Mail', 'SMTPServer'):
        mainlog.error("Mail configuration seems broken. Can't send email.")
        # Since this is backup, failing sending a mail should not stop
        # the execution => no exception thrown
        return

    conn = SMTP(cfg.get('Mail', 'SMTPServer'))
    conn.set_debuglevel(False)
    # conn.login(cfg.get('Mail','SMTPUser'), cfg.get('Mail','SMTPPassword'))
    try:
        conn.sendmail(cfg.get('Mail', 'sender'),
                      cfg.get('Mail', 'destination'), msg.as_string())
    except Exception as ex:
        mainlog.error("Unable to send mail")

    finally:
        conn.close()

    mainlog.info("Mail sent")
Beispiel #4
0
def upgrade_mediafire(version):
    """ Upgrade to the given version. The upgrades
    will be downloaded from mediafire.

    The version can be higher or lower than the current one.
    This allows to downgrade (in case of a failed upgrade)
    """

    codename = configuration.get("Globals", "codename")
    filename = "{}-{}.zip".format(codename, version)
    dest = os.path.join(get_data_dir(), filename)

    mainlog.info("Downloading a new version {} into {} proxyport={}".format(
        filename, dest, configuration.get("Proxy", "proxy_port")))

    client = MediaFireClient()
    client.login(email=configuration.get("MediaFire", "email"),
                 password=configuration.get("MediaFire", "password"),
                 app_id=configuration.get("MediaFire", "appid"),
                 api_key=configuration.get("MediaFire", "sessionkey"))
    client.download_file("mf:/" + filename, dest)

    configuration.set("DownloadSite", "current_version", str(version))
    configuration.set("DownloadSite", "client_path", dest)
    configuration.save()
    return
Beispiel #5
0
def init_configuration():
    p = path_to_config("server.cfg")
    if not os.path.exists(p):
        ps = os.path.join(resource_dir, "server_config_check.cfg")
        if os.path.exists(ps):
            make_empty_configuration_file(p, ps)
            load_configuration_server(p, ps)

            configuration.set_server_network_address(
                ip_or_host=guess_server_url(), port=8079, overwrite=True)

            set_default_document_root(configuration)
            configuration.save()
            mainlog.info("Configuration file created at {}".format(p))
            return True
        else:
            mainlog.error(
                "Can't find the specification configuration file, there : {}".
                format(ps))
            return False
    else:
        mainlog.error(
            "Can't initialize configuration file because it already exists, there : {}"
            .format(p))
        return False
Beispiel #6
0
def make_screenshot():
    global window


    from PySide.QtCore import QTimer
    from PySide.QtGui import QPixmap

    def screenshot_callback():
        screenshot = QPixmap.grabWidget(window)
        screenshot = QPixmap.grabWindow(QApplication.desktop().winId())
        screenshot.save("screenshot-main.png")

    def screenshot_callback2():
        window._qaction_triggered_FinancialKPIPanel()
        screenshot = QPixmap.grabWidget(window)
        screenshot.save("screenshot-financial.png")

    def screenshot_callback3a():
        window.show_presence_overview()
        window.presence_overview_widget.table_view.setCurrentIndex( window.presence_overview_widget.table_view.model().index(1,1))
        window.presence_overview_widget.table_view.setCurrentIndex( window.presence_overview_widget.table_view.model().index(4,4))

    def screenshot_callback3():
        window.show_presence_overview()

        screenshot = QPixmap.grabWidget(window)
        screenshot = QPixmap.grabWindow(QApplication.desktop().winId())
        screenshot.save("screenshot-presence.png")

    QTimer.singleShot(1000, screenshot_callback)
    QTimer.singleShot(3000, screenshot_callback2)
    QTimer.singleShot(4000, screenshot_callback2)
    QTimer.singleShot(6000, screenshot_callback3a)
    QTimer.singleShot(8000, screenshot_callback3)
    mainlog.info("Screenshots done")
Beispiel #7
0
def make_empty_configuration_file(dest_path, spec_path):
    cfg = configobj.ConfigObj(create_empty=True,
                              configspec=spec_path,
                              default_encoding='utf-8')

    # Make sure the default values are copied
    cfg.validate(validate.Validator(), copy=True)

    mainlog.info("Writing configuration file {}".format(dest_path))
    with open(dest_path, mode="wb") as out_file:
        cfg.write(out_file)
Beispiel #8
0
    def _register_service_for_in_process(self, service):
        registered = 0
        for m_name, m in inspect.getmembers(service):
            if hasattr(m, '_json_callable'):
                self._json_caller.register_in_process_call(m)
                registered += 1

        if registered > 0:
            mainlog.info("Registered {} methods on {}".format(
                registered, service))
        else:
            mainlog.warn("Registered NO method for {}".format(service))
Beispiel #9
0
def extract_database(dbName, filename, base_configuration):

    user = base_configuration.get('Database', 'user')
    pw = base_configuration.get('Database', 'password')
    db_name = base_configuration.get('Database', 'db_name')

    command1 = [
        base_configuration.get('Commands', 'pg_dump'), '--format=custom'
    ]
    if user:
        command1 += ['--username={}'.format(user)]

    # Password is supplied via .pgpass

    command1 += ['--file={}'.format(filename), db_name]

    command3 = None
    out = None

    if "aescrypt" in base_configuration.get('Commands', 'encrypt'):
        command3 = [
            base_configuration.get('Commands', 'encrypt'), '-e', '-p',
            base_configuration.get('Backup', 'encryption_key'), filename
        ]
        out = filename + ".aes"
    elif "gpg2" in base_configuration.get('Commands', 'encrypt'):
        out = filename + ".gpg"
        command3 = [
            base_configuration.get('Commands', 'encrypt'), '-c', '--batch',
            '-o', out, '--passphrase',
            base_configuration.get('Backup', 'encryption_key'), filename
        ]

    else:
        raise Exception("Unsupported encryption program")

    p1 = subprocess.Popen(command1)
    mainlog.info("Creating backup : {}".format(command1))
    p1.wait()

    try:
        os.remove(out)
    except OSError:
        pass

    mainlog.info("Encrypting backup")
    p3 = subprocess.Popen(command3)
    p3.wait()

    return out
Beispiel #10
0
def alter_data():
    from koi.datalayer.employee_mapping import Employee,RoleType

    q = session().query(Employee).filter(Employee.roles != None).all()
    for employee in q:
        if RoleType.timetrack_modify in employee.roles and RoleType.view_timetrack not in employee.roles:
            mainlog.info(employee)

            # The roles attributes is not easy to work with :-(
            r = employee.roles
            r.add(RoleType.view_timetrack)
            employee.roles = r

    session().commit()
Beispiel #11
0
def check_database_connection():
    # I need DB url because I didn't find a way to get that information
    # from the session(), connection()...

    # Rage hard, maxi vinyl with spoken words (super rare)

    mainlog.info(
        "check_database_connection : Trying to connect to the database")
    try:
        session().connection().execute(
            "SELECT count(*) from {}.employees".format(DATABASE_SCHEMA))
        session().commit()
        return True
    except Exception as ex:
        mainlog.exception(ex)
        return str(ex)
Beispiel #12
0
def check_postgres_connection(db_url):
    """ Make sure we can connect to the server.
    We use the template1 schema for that, because it exists
    on any postgresql server.
    """

    # I need DB url because I didn't find a way to get that information
    # from the session(), connection()...

    db_url, params_from_url = parse_db_url(db_url)
    parsed_url = urlparse(db_url)
    t1_url = parsed_url.scheme + "://" + parsed_url.netloc + "/template1"

    # Rage hard, maxi vinyl with spoken words (super rare)

    mainlog.info("Trying to connect to PostgreSQL server...")

    engine = create_engine(t1_url)
    c = None
    try:
        c = engine.connect()
        c.execute("SELECT count(*) from pg_stats")
        return True
    except exc.OperationalError as ex:
        mainlog.exception(ex)
        mainlog.error("Can't query the database !!! Is it connected ?")
    finally:
        # This one is rather tricky. Somehow, I have
        # the impression that the select above opens
        # a connection and don't close it.
        # Because of that, in some cases, PostgreSQL
        # is cannot proceed with some operations.
        # For example, if one stays connected to template1
        # he cannot do a "drop table". So it is very important
        # that the connection, transaction, whatever is
        # actually closed when leaving this function.

        mainlog.debug("Closing conenction")
        if c: c.close()

        # I think this helps to better close the connection
        # although SQLA's documentation is a bit unclear.
        engine.dispose()
        del engine

    return False
Beispiel #13
0
def init_base():
    init_logging()
    mainlog.setLevel(logging.INFO)
    load_configuration("server.cfg")

    parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,
                                     description='This is an Horse! migration script.',
                                     epilog="For example --db-url {}".format(
                                         configuration.get("Database", "admin_url")))

    parser.add_argument('--db-url', default=configuration.database_url, help='Database URL')
    args = parser.parse_args()

    mainlog.info("Connecting to {}".format(args.db_url))
    init_i18n()
    from koi.db_mapping import metadata
    init_db_session(args.db_url, metadata, False)  # True or configuration.echo_query)
Beispiel #14
0
def upgrade_http(version, url, proxy_url=None, proxy_port=None):
    codename = configuration.get("Globals", "codename")
    filename = "{}-{}.zip".format(codename, version)
    dest = os.path.join(get_data_dir(), filename)

    mainlog.info(
        "Upgrading from {} to version {}. File will be sotred in {}".format(
            url, version, dest))
    http_download(url, dest, configuration.get("Proxy", "proxy_url"),
                  configuration.get("Proxy", "proxy_port"))

    configuration.set("DownloadSite", "current_version", str(version))
    configuration.set("DownloadSite", "client_path", dest)
    configuration.save()

    return "Successfully downloaded version {} from {}. Config was updated.".format(
        str(version), url)
Beispiel #15
0
def upgrade_file(path):
    global configuration
    re_file = re.compile(r'koi-delivery_slips-([0-9]+\.[0-9]+\.[0-9]+)\.zip')
    exe_filename = "{}/{}.exe".format(configuration.get("Globals", "codename"),
                                      configuration.get("Globals", "codename"))

    if os.path.exists(path):
        match = re_file.match(os.path.basename(path))

        if match:
            version = match.groups()[0]

            candidates = []
            exe_correct = False
            with zipfile.ZipFile(path, 'r') as zin:
                for item in zin.infolist():
                    if item.filename == exe_filename:
                        exe_correct = True
                        break
                    elif ".exe" in item.filename:
                        candidates.append(item.filename)

            if exe_correct:
                configuration.set("DownloadSite", "current_version",
                                  str(version))
                configuration.set("DownloadSite", "client_path", path)
                configuration.save()
                mainlog.info(
                    "Configuration successfully updated with delivery_slips version {}."
                    .format(version))
                mainlog.warning(
                    "Don't forget to restart the server to take it into account !"
                )
                return True
            else:
                mainlog.error(
                    "Didn't find {} inside the file you've given. Possible candidates {}"
                    .format(exe_filename, ", ".join(candidates)))
        else:
            mainlog.error(
                "I don't recognize the filename. It should be 'koi-delivery_slips-a.b.c.zip'."
            )
    else:
        mainlog.error("The file {} was not found.".format(path))

    return False
Beispiel #16
0
def alter_data():
    # For the moment I skip this because I think that adding a role that is not
    # supported by the user session might lock people out of Horse
    return

    from koi.datalayer.employee_mapping import Employee, RoleType

    q = session().query(Employee).filter(Employee.roles != None).all()
    for employee in q:
        if RoleType.modify_parameters in employee.roles and RoleType.modify_document_templates not in employee.roles:
            mainlog.info(employee)

            # The roles attributes is not easy to work with :-(
            r = employee.roles
            r.add(RoleType.modify_document_templates)
            employee.roles = r

    session().commit()
Beispiel #17
0
def backup_procedure(configuration):
    if not configuration.get('Backup', 'backup_directory'):
        raise Exception(
            "Missing Backup/backup_directory in configuration file")

    backup_dir = configuration.get('Backup', 'backup_directory')

    try:

        if not os.path.exists(backup_dir):
            os.mkdir(backup_dir)
            mainlog.info("Created backup directory because it was missing")

        mainlog.debug("Backup directory is {}".format(backup_dir))
        # We default to backup behaviour because when
        # this program is run as a scheduled task, we cannot
        # give parameters to it

        mainlog.info("Backing up the database")
        filename, bytes = dump_and_zip_database(configuration)

        mainlog.info("Backing up the documents")
        total_files, total_bytes, scanned_files = documents_copy_recurring(
            configuration.get('DocumentsDatabase', 'documents_root'),
            configuration.get('Backup', 'backup_directory'), configuration)
        mainlog.info(
            "Documents copy done. {} files copied ({} bytes), {} files scanned."
            .format(total_files, size_to_str(total_bytes), scanned_files))

        mainlog.info("Syncing the back up remotely")
        rsync_export_files(filename, mainlog)

        send_mail(
            "Backup SUCCESS",
            "The backup of was done correctly DB:{}, files: {} / {} bytes.".
            format(size_to_str(bytes), total_files,
                   total_bytes), configuration)

    except Exception as ex:
        mainlog.error("Failed to complete backup")
        mainlog.exception(ex)
        send_mail("Backup FAILURE", "The backup of was *not* done correctly.",
                  configuration)
Beispiel #18
0
    def load(self, config_path, config_spec):
        self._config_file = config_path
        self._config_spec = config_spec

        config_path = os.path.normpath(os.path.join(os.getcwd(), config_path))

        mainlog.info("Reading configuration file -> {}".format(config_path))
        mainlog.debug(
            "Reading configuration spec file -> {}".format(config_spec))

        if not os.path.exists(config_path):
            mainlog.error(
                "Configuration file not found at {}".format(config_path))
            raise Exception(
                "Configuration file not found at {}".format(config_path))

        try:
            self.base_configuration = configobj.ConfigObj(
                infile=config_path, configspec=config_spec, encoding='utf-8')
        except UnicodeDecodeError:
            mainlog.warn(
                "The encoding of the config file is not UTF-8. I'll try {}".
                format(locale.getpreferredencoding()))
            self.base_configuration = configobj.ConfigObj(
                infile=config_path,
                configspec=config_spec,
                encoding=locale.getpreferredencoding())

        self.base_configuration.validate(validate.Validator())

        if 'Programs' not in self.base_configuration or 'pdf_viewer' not in self.base_configuration[
                'Programs'] or not self.base_configuration['Programs'][
                    'pdf_viewer'] or not os.path.exists(
                        self.base_configuration['Programs']['pdf_viewer']):

            if platform.system() == 'Linux':
                self.base_configuration['Programs']['pdf_viewer'] = 'xpdf'
            else:
                self.base_configuration['Programs'][
                    'pdf_viewer'] = os.path.join(resource_dir,
                                                 'SumatraPDF.exe')
Beispiel #19
0
def _updateZip(zip_path, filename, data):

    mainlog.info("Injecting into file {} located in zip {}. Data to inject is {}.".format(filename, zip_path, data))
    needs_replace = False

    with zipfile.ZipFile(zip_path, 'r') as zin:
        for item in zin.infolist():
            if item.filename == filename:

                data_in = zin.read(item.filename).decode('ascii')

                if data == data_in:
                    mainlog.info("good data already in file, nothing to do")
                    return
                else:
                    mainlog.info("File is there but its content doesn't match data")
                    needs_replace = True
                    break


    if needs_replace:
        mainlog.info("Copying zip file without the file")
        # generate a temp file
        tmpfd, tmpname = tempfile.mkstemp(dir=os.path.dirname(zip_path))
        os.close(tmpfd)

        # create a temp copy of the archive without filename
        with zipfile.ZipFile(zip_path, 'r') as zin:
            with zipfile.ZipFile(tmpname, 'w') as zout:
                zout.comment = zin.comment # preserve the comment
                for item in zin.infolist():
                    if item.filename != filename:
                        zout.writestr(item, zin.read(item.filename))

        # replace with the temp archive
        os.remove(zip_path)
        os.rename(tmpname, zip_path)

    mainlog.info("append filename with its new data")
    with zipfile.ZipFile(zip_path, mode='a', compression=zipfile.ZIP_DEFLATED) as zf:
        zf.writestr(filename, data)
Beispiel #20
0
def extend_enumeration(db_engine, enumeration : DeclEnum, symbol : EnumSymbol):

    # The following statement really wants to run outside of a transaction.
    # SO I have to use the raw_connection stuff to escape SQLA's autoamted
    # transaction management.

    # See enumeration type information
    # select enumtypid, typname, enumlabel from pg_enum join pg_type on pg_type.oid = pg_enum.enumtypid order by enumtypid, enumlabel;

    c = db_engine().raw_connection()
    cursor = c.cursor()
    cursor.execute("COMMIT") # Leave any pending transaction
    try:
        # Will fail on duplicates
        sql = "ALTER TYPE {} ADD VALUE '{}'".format(enumeration.db_type().impl.name, symbol.value)
        cursor.execute(sql)
    except Exception as ex:
        mainlog.info("Tried " + sql)
        mainlog.error(ex)
    cursor.close()
    c.close()
Beispiel #21
0
    def load_database_param(self):

        # This will raise exceptions if connection fails.

        url = self.database_url_source
        mainlog.debug(
            "Loading DB connection string from server at '{}'".format(url))
        response = urlopen(url, timeout=5)
        db_url = response.read().decode('ascii')
        mainlog.debug("Connection string is ({})".format(db_url))

        if ',' in db_url:
            db_url = db_url.split(',')

        if db_url != self.base_configuration['Database']['url']:
            mainlog.debug("Replacing old DB url")
            # The DB url advertised by the server always takes
            # priority

            self.base_configuration['Database']['url'] = db_url
            mainlog.info("The DB url has changed, so I save it locally.")
            self.save()
Beispiel #22
0
def load_configuration(config_file=None, config_spec='config-check.cfg'):
    """ Figure out the configuration by all possible means.

    :param config_file:
    :param config_spec:
    :return:
    """
    # The spec file is always loaded from the resource directory
    global configuration

    mainlog.debug("load_configuration : config_file is {}".format(config_file))
    spec = os.path.join(resource_dir, config_spec)
    configuration.load_version()

    if config_file:
        p = path_to_config(config_file)

        if not os.path.exists(p):
            shutil.copy(os.path.join(resource_dir, 'config.cfg'),
                        os.path.join(get_data_dir(), 'config.cfg'))

        configuration.load(p, spec)
    else:
        if not os.path.exists(get_data_dir()):
            os.mkdir(get_data_dir())

        if not os.path.exists(os.path.join(get_data_dir(), 'config.cfg')):
            mainlog.info("Creating a default configuration file")
            shutil.copy(os.path.join(resource_dir, 'config.cfg'),
                        os.path.join(get_data_dir(), 'config.cfg'))

        configuration.load(os.path.join(get_data_dir(), 'config.cfg'), spec)
        mainlog.debug("Loading backup configuration")
        configuration.load_backup(os.path.join(resource_dir, 'config.cfg'),
                                  spec)

        configuration.load_network_param()

    mainlog.debug("Done with load configuration")
Beispiel #23
0
def check_active_postgres_connections():
    # I need DB url because I didn't find a way to get that information
    # from the session(), connection()...

    # Rage hard, maxi vinyl with spoken words (super rare)

    mainlog.info(
        "check_active_postgres_connections : Trying to connect to the database"
    )

    try:
        r = session().connection().execute(
            "SELECT count(*) from pg_stat_activity").scalar()
        mainlog.debug("Trying to connect to the database - 2")
        session().commit()
        mainlog.debug("Trying to connect to the database - 3")
        return r
    except exc.OperationalError as ex:
        mainlog.exception(ex)
        mainlog.error("Can't query the database !!! Is it connected ?")

    return False
Beispiel #24
0
def check_db_connection(db_url):
    # I need DB url because I didn't find a way to get that information
    # from the session(), connection()...

    import subprocess
    import re

    mainlog.info("check_db_connection : Trying to connect to the database")
    try:
        session().connection().execute(
            "SELECT count(*) from {}.employees".format(DATABASE_SCHEMA))
        session().commit()
        return True
    except Exception as ex:
        mainlog.error("Can't query the database !!! Is it connected ?")

        ret = str(ex)

        # mainlog.exception(ex)
        mainlog.info("I'll try a ping")
        server_host = re.search("(@.*:)",
                                db_url).groups()[0].replace("@", "").replace(
                                    ":", "")

        try:
            r = subprocess.Popen("\\Windows\\System32\\ping -n 1 " +
                                 server_host,
                                 stdout=PIPE,
                                 shell=False).stdout.read()
            mainlog.info("Ping to {} result is : {}".format(server_host, r))

            ret += "<br/><br/>"
            if "Reply" in r:
                mainlog.info(
                    "Ping was successful, the DB server machine seems up")
                ret += _(
                    " A ping was successful (so host is up, database is down)")
            else:
                ret += _(" A ping was not successful (so host is down)")

            return ret
        except Exception as ex:
            #mainlog.error(str(ex,'ASCII','replace'))
            return _("Ping failed, the host is down.")
Beispiel #25
0
def documents_copy_recurring(src_dir, dest_dir, base_configuration):
    """ This is a copy function :-) But it is optimized for recurring
    copy : copying several time from the same koi to the same
    destination. To do that it only copies file from the source
    directory if they are different than those in the backup
    directory
    """

    codename = base_configuration.get("Globals", "codename")
    mainlog.info(u"Copying documents from {} to {}".format(src_dir, dest_dir))

    if not os.path.exists(src_dir):
        raise Exception("Source directory for documents doesn't exist")

    if not os.path.exists(dest_dir):
        raise Exception("Destination directory for documents doesn't exist")

    if os.path.abspath(src_dir) == os.path.abspath(dest_dir):
        raise Exception("The source and destination directories are the same")

    total_size = total_files = 0

    nb_files = 0
    for fn in glob.glob(os.path.join(src_dir, codename + "_*")):
        nb_files += 1
        # mainlog.info("Eval {}".format(type(fn)))
        # enc = fn.encode('ascii', 'backslashreplace')
        # mainlog.info("enc")
        # mainlog.info(u"Evaluating {}".format(enc))
        dest = os.path.join(dest_dir, os.path.basename(fn))

        # Copy only if necessary
        if not os.path.exists(dest) or (os.path.getsize(fn) !=
                                        os.path.getsize(dest)):
            mainlog.info(u"Copying {}".format(
                fn.encode(sys.getfilesystemencoding(), 'ignore')))

            # Backup is critical so it must fail if something is
            # wrong. Therefore, no exception handling right now

            # Also, I let Python take care of the filename encoding issues

            copyfile(fn, dest)

            total_size += os.path.getsize(fn)
            total_files += 1

    mainlog.info("Scanned {} files".format(nb_files))
    return total_files, total_size, nb_files
Beispiel #26
0
def dump_and_zip_database(base_configuration):

    # login, password, dbname, host, port = _extract_db_params_from_url(base_configuration.get("Backup","db_url"))

    error = False
    if not base_configuration.get('Commands', 'pg_dump_cmd'):
        mainlog.error("Missing Commands/pg_dump_cmd in configuration file")
        error = True
    if not base_configuration.get('Backup', 'db_url'):
        mainlog.error("Missing Backup/db_url in configuration file")
        error = True
    if not base_configuration.get('Backup', 'prefix'):
        mainlog.error("Missing Backup/prefix in configuration file")
        error = True
    if not base_configuration.get('Backup', 'backup_directory'):
        mainlog.error("Missing Backup/backup_directory in configuration file")
        error = True

    if error:
        raise Exception("Too many errors, I stop here")

    filename = base_configuration.get("Backup", "prefix") + "_" + str(
        datetime.date.today()) + ".pgbackup"
    final_destination = os.path.join(
        base_configuration.get('Backup', 'backup_directory'), filename)

    # custom format will store more and is compressed
    command1 = [
        base_configuration.get('Commands', 'pg_dump_cmd'), '--format=custom',
        '--file={}'.format(final_destination),
        base_configuration.get("Backup", "db_url")
    ]

    mainlog.info("Creating backup : {}".format(" ".join(command1)))
    p1 = subprocess.Popen(command1)
    p1.communicate()
    p1.wait()

    if p1.returncode != 0:
        raise Exception("Unable to run the backup command")
    else:
        mainlog.info("Backup seems fine")

    bytes = os.path.getsize(final_destination)
    mainlog.info("Backup complete, {} bytes saved".format(bytes))

    return final_destination, bytes
Beispiel #27
0
def create_demo_database( nb_orders=50):
    mainlog.setLevel(logging.DEBUG)

    create_blank_database(configuration.get("Database", "admin_url"),
                          configuration.get("Database", "url"))
    dao = DAO()
    dao.set_session(session())

    random.seed(42)

    employees_texts  = ["Alfred Hitchcok", "Rocky Balboa", "Donald Knuth", "Ray Parker Junior", "Henry Mancini", "Nivek Ogre",
                 "Johnny Cash", "Sarah Connor"]

    nb_employees = len(employees_texts)

    for name in employees_texts:
        e = dao.employee_dao.make(name)
        e.login = (name.split(' ')[0][0:2] + name.split(' ')[1][0:2]).lower()
        e.set_encrypted_password(e.login)
        e.roles = RoleType.symbols()
        dao.employee_dao.save(e)


    for name in customers_texts:
        customer = dao.customer_dao.make(name)
        customer.address1 = u"Square Niklaus Wirth" + chr(233)
        customer.country = u"Pakistan" + chr(233)
        customer.phone = u"+494 0412 32 32 6654"
        customer.email = u"*****@*****.**"
        dao.customer_dao.save(customer)



    for name, short_name in operations_texts:
        opdef_op = dao.operation_definition_dao.make()
        opdef_op.short_id = short_name
        opdef_op.description = name
        opdef_op.imputable = True
        opdef_op.on_order = False
        opdef_op.on_operation = True
        opdef_op.XXXcost = random.random() * 50 + 50.0

        period = OperationDefinitionPeriod()
        period.start_date = date(2010, 1, 1)
        period.cost = random.randint(30, 60)
        dao.operation_definition_dao.add_period(period, opdef_op)
        dao.operation_definition_dao.save(opdef_op)

    customers = session().query(Customer).all()

    for i in range(nb_orders):
        order = dao.order_dao.make(u"Test order", customer)
        order.state = OrderStatusType.preorder_definition  # OrderStatusType.order_ready_for_production
        order.customer = customers[random.randint(0, len(customers) - 1)]
        order.creation_date = (world_begin + timedelta(days=random.randint(0, max_past_days))).date()
        dao.order_dao.save(order)

    for order in session().query(Order).all():

        position = 1
        for i in range(random.randint(3, 10)):
            order_part = dao.order_part_dao.make(order)

            texts = ["For part {}".format(random.randint(100, 999)),
                     "As plan {}".format(str(uuid.uuid4()).upper()[0:6]),
                     "Customer ref #{}".format(str(uuid.uuid4()).upper()[0:6]),
                     "#1 Bare Bright Copper Wire",
                     "#1 Copper Tubing",
                     "#1 Flashing Copper",
                     "#2 Copper Tubing",
                     "#2/3 Mix Copper",
                     "#3 Copper with Tar",
                     "#3 Roofing Copper",
                     "17-4 Stainless Steel",
                     "300 Series Stainless Steel",
                     "400 Series Stainless Steel",
                     "500/750 Insulated Cable",
                     "ACR",
                     "ACR Ends",
                     "AL Extrusion",
                     "AL Thermopane",
                     "AL/ Copper Rads w/Iron",
                     "AL/Copper Cutoffs",
                     "Alternators",
                     "Aluminum #3",
                     "Aluminum 6061",
                     "Aluminum 6063",
                     "Aluminum Boat",
                     "Aluminum Breakage",
                     "Aluminum Bumpers",
                     "Aluminum Cans",
                     "Aluminum Clips",
                     "Aluminum Copper Coil",
                     "Aluminum Copper Radiators",
                     "Aluminum Diesel Tank",
                     "Aluminum Engine Block",
                     "Aluminum Litho",
                     "Aluminum Radiators",
                     "Aluminum Rims",
                     "Aluminum Scrap",
                     "Aluminum Siding",
                     "Aluminum Thermo-Pane/Break",
                     "Aluminum Transformers",
                     "Aluminum Turnings",
                     "Aluminum Wire w/Steel",
                     "Ballasts",
                     "Bare Bright Copper",
                     "Brass Hair Wire",
                     "Brass Heater Cores",
                     "Brass Pipe",
                     "Brass Radiators",
                     "Brass Scrap",
                     "Brass Shells",
                     "Brass Turnings",
                     "Bronze",
                     "Bronze Turnings",
                     "Burnt Copper",
                     "Car/Truck Batteries",
                     "Carbide",
                     "Cast Aluminum",
                     "Catalytic Converters",
                     "CATV Wire",
                     "Christmas Lights",
                     "Circuit Breakers",
                     "Clean ACR",
                     "Clean AL Wire",
                     "Clean AL/Copper Fin",
                     "Clean Brass Radiators",
                     "Clean Brass Turnings",
                     "Clean Roofing Copper",
                     "Cobalt",
                     "Communications Wire",
                     "Composition Scrap",
                     "Compressors",
                     "Copper Scrap",
                     "Copper Transformers",
                     "Copper Turnings",
                     "Copper Yokes",
                     "Die Cast",
                     "Dirty ACR",
                     "Dirty AL Extrusion",
                     "Dirty AL Radiators",
                     "Dirty AL/Copper Fin",
                     "Dirty Aluminum Turnings",
                     "Dirty Brass",
                     "Dirty Brass Radiators",
                     "Dirty Roofing Copper",
                     "Double Insulated Cable",
                     "EC Wire",
                     "Electric Motors (Aluminum)",
                     "Electric Motors (Copper)",
                     "Elevator Wire",
                     "Enameled Copper",
                     "F 75",
                     "Fire Wire",
                     "Forktruck Battery",
                     "FSX 414",
                     "Fuses",
                     "Gold",
                     "Hastelloy Solids",
                     "Hastelloy Turnings",
                     "Heliax Wire",
                     "High Speed Steel",
                     "Housewire",
                     "Inconel",
                     "Inconel 792",
                     "Inconel 800",
                     "Inconel 825",
                     "Insulated Aluminum Wire",
                     "Insulated Copper Cable",
                     "Insulated Copper Wire",
                     "Insulated Steel BX",
                     "Invar",
                     "Junkshop Extrusion",
                     "Kovar",
                     "Lead",
                     "Lead Batteries",
                     "Lead Coated Copper",
                     "Lead Shot",
                     "Lead Wheel Weights",
                     "Light Copper",
                     "MarM247",
                     "Meatballs (Electric Motors)",
                     "Monel",
                     "Ni-Cad Batteries",
                     "Nickel",
                     "Non Magnetic Stainless Steel",
                     "Old Sheet Aluminum",
                     "Painted Aluminum",
                     "Pewter",
                     "Platinum",
                     "Plumbers Brass",
                     "Prepared Aluminum",
                     "Red Brass",
                     "Refined Rebrass & Copper",
                     "Rod Brass",
                     "Rod Brass Turnings",
                     "Romex® Wire",
                     "Sealed Units",
                     "Semi-Red Brass",
                     "Sheet Aluminum",
                     "Silver",
                     "Silver Plated Copper",
                     "Solid Core Heliax",
                     "Stainless Steel",
                     "Stainless Steel Breakage",
                     "Stainless Steel Heatsinks",
                     "Stainless Steel Kegs",
                     "Stainless Steel Sinks",
                     "Stainless Turnings",
                     "Starters",
                     "Steel BX",
                     "Steel Case Batteries",
                     "THHN Wire",
                     "Tin Babbit",
                     "Tin Coated Copper",
                     "Tin Insulated Copper Wire",
                     "Unclean Brass Radiators",
                     "Wire Scrap",
                     "Wiring Harness",
                     "Yellow Brass",
                     "Zinc",
                     "Zorba",
                     "#1 Heavy Melting Steel",
                     "#1 HMS",
                     "#1 Prepared",
                     "#1 Steel",
                     "#2 Heavy Melting Steel",
                     "#2 HMS",
                     "#2 Prepared",
                     "Automobiles",
                     "Busheling",
                     "Car w/Tires",
                     "Cast Iron",
                     "Complete Car",
                     "Crushed Cars",
                     "Dishwashers",
                     "Dry Automobile",
                     "Dryers",
                     "Incomplete Car",
                     "Light Iron",
                     "Machine Shop Turning/Iron Borings",
                     "Plate & Structural Steel",
                     "Refrigerators",
                     "Scrap Iron",
                     "Sheet Iron",
                     "Shreddable Steel",
                     "Steel Shavings",
                     "Tin",
                     "Uncleaned Auto Cast",
                     "Unprepared Cast Iron",
                     "Unprepared HMS",
                     "Unprepared P&S",
                     "Washing Machines",
                     "Water Heaters",
                     "Wet Automobile",
                     "Back Panels",
                     "Backup Batteries",
                     "Cellphones",
                     "Computer Wire",
                     "CPU Chips",
                     "CRT",
                     "Empty PC Servers",
                     "Hard Drive Boards",
                     "Hard Drives",
                     "Hard Drives without Boards",
                     "Ink Cartridges",
                     "Keyboards",
                     "Laptops",
                     "LCD Monitors (not working)",
                     "LCD Monitors (working)",
                     "Low Grade Boards",
                     "Mainframes",
                     "Memory",
                     "Mice",
                     "Motherboards",
                     "Non-Green PC Board",
                     "PC Board with Steel",
                     "PC Boards",
                     "PC Tower",
                     "Power Supplies",
                     "Printers/Fax Machines",
                     "Servers",
                     "Speakers",
                     "Telecom Equipment"]
            order_part.description = random.choice(texts)
            order_part.position = position
            order_part.priority = random.randint(1, 5)
            position += 1
            order_part.qty = random.randint(4, 4+10)
            order_part.sell_price = random.randint(100, 200)
            dao.order_part_dao.save(order_part)

            pf = dao.production_file_dao.make()
            pf.order_part = order_part
            order_part.production_file = [pf]
            session().add(pf)
            session().flush()

    operation_definitions = session().query(OperationDefinition).all()

    for pf in session().query(ProductionFile).all():
        for i in range(random.randint(3, 10)):
            operation = dao.operation_dao.make()
            operation.production_file = pf

            begin = random.randint(0, len(lorem) - 5)
            end = begin + min(6, random.randint(begin, len(lorem) - 1))
            operation.description = " ".join(lorem[begin:end])
            operation.operation_model = random.choice(operation_definitions)
            operation.planned_hours = float(random.randint(1, 20)) / 5 # per unit
            session().add(operation)

    session().commit()

    for order in session().query(Order).all():
        dao.order_dao.recompute_position_labels(order)
        session().commit()

    tasks = []

    for operation in session().query(Operation).all():
        task = TaskOnOperation()
        task.operation_id = operation.operation_id
        session().add(task)
        session().flush()
        tasks.append(task)


    order_schedules = dict()

    for order in session().query(Order).all():

        mainlog.info("populating order")

        order_start = order.creation_date
        central_clock.set_now_function(lambda: datetime( order_start.year, order_start.month, order_start.day))

        if True or random.randint(0,10) > 1:
            # a production order
            order_end = order_start + timedelta(days=(30 + order.order_id % 20))

            mainlog.debug("Interval {} {}".format( order_start, order_end))

            dao.order_dao.change_order_state(order.order_id, OrderStatusType.preorder_definition)
            dao.order_dao.change_order_state(order.order_id, OrderStatusType.order_ready_for_production)
            order_schedules[order.order_id] = (order_start, order_end)
        else:
            # a preorder

            dao.order_dao.change_order_state(order.order_id, OrderStatusType.preorder_definition)
            dao.order_dao.change_order_state(order.order_id, OrderStatusType.preorder_sent)

    mainlog.info("There are {} tasks".format(len(tasks)))
    mainlog.info("There are {} order scheduled for work".format(len(order_schedules)))

    # _make_tar(TaskActionReportType.start_task, datetime.now(), e, task)

    employees = session().query(Employee).all()

    # Buld the list of tasks available on each day
    tasks_on_day = dict()
    for task in tasks:
        order = task.operation.production_file.order_part.order
        if order.order_id in order_schedules:
            order_start, order_end = order_schedules[order.order_id]

            for d in daterange( order_start, order_end):
                if d not in tasks_on_day:
                    tasks_on_day[d] = []
                tasks_on_day[d].append(task)


    for day in range( int(max_past_days)):
        d = world_begin + timedelta(days=2 + day)
        d = date( d.year, d.month, d.day)

        if d.weekday() not in (5, 6) and d in tasks_on_day:

            employees_with_work = []
            central_clock.set_now_function(lambda: datetime(d.year,d.month,d.day))

            # tasks we can work on

            workable_tasks = tasks_on_day[d]

            mainlog.debug("{} workable tasks".format(len(workable_tasks)))

            if workable_tasks:
                # Now put actual work on those tasks
                for employee in employees:
                    # Each employee may or may not work
                    if random.randint(0,10) > 2:

                        total_duration = 0

                        while total_duration < 8:
                            task = random.choice(workable_tasks)
                            duration = float(random.randint(1,4)) + float(random.randint(0,4)) / 4.0
                            tt = _make_timetrack( task.task_id, employee.employee_id,
                                                  d,
                                                  duration)
                            session().add(tt)

                            total_duration += duration


                        dts = DayTimeSynthesis()
                        dts.employee_id = tt.employee_id
                        dts.day = d
                        dts.presence_time = total_duration
                        session().add( dts)
                    else:
                        from koi.people_admin.people_admin_mapping import DayEventType, DayEvent
                        from koi.people_admin.people_admin_service import people_admin_service

                        de = DayEvent()
                        de.employee_id = employee.employee_id
                        de.event_type = random.choice(DayEventType.symbols())
                        people_admin_service.set_event_on_days( de, [ (d, 1) ])

    for i in range(3):
        for order in session().query(Order).filter(Order.state == OrderStatusType.order_ready_for_production).all():
            parts_ids_quantities = dict()

            for order_part in order.parts:
                mainlog.debug("qex = {} / {}".format(order_part.tex2, order_part.qty))
                if order_part.tex2 < order_part.qty and order_part.total_hours:
                    parts_ids_quantities[order_part.order_part_id] = random.randint(1, order_part.qty - order_part.tex2)

            if parts_ids_quantities:
                mainlog.debug("Creating delivery slip")
                order_start, order_end = order_schedules[order.order_id]

                for dsp in session().query(DeliverySlipPart).filter(DeliverySlipPart.order_part_id.in_(parts_ids_quantities.keys())).all():
                    if dsp.delivery_slip.creation > datetime( order_start.year, order_start.month, order_start.day):
                        order_start = dsp.delivery_slip.creation.date()

                mainlog.debug("{} {}".format( type(order_end), type(order_start)))
                mainlog.debug("{} {}".format( order_start, order_end))

                days_between = (order_end - order_start).days
                if days_between > 0:
                    the_now = order_start + timedelta( days=random.randint(1, 1 + int(days_between / 2)))
                    mainlog.debug("Adding slips to an order on {}".format(the_now))
                    the_now = datetime( the_now.year, the_now.month, the_now.day) + timedelta(seconds=random.randint(1,10000))

                    central_clock.set_now_function( lambda : the_now)

                    dao.delivery_slip_part_dao.make_delivery_slip_for_order( order.order_id, parts_ids_quantities,
                        the_now, False)

            session().commit()


    for order in session().query(Order).filter(Order.state == OrderStatusType.order_ready_for_production).all():
        parts_ids_quantities = dict()

        for order_part in order.parts:
            mainlog.debug("qex = {} / {}".format(order_part.tex2, order_part.qty))
            if order_part.tex2 < order_part.qty and order_part.total_hours:
                parts_ids_quantities[order_part.order_part_id] = order_part.qty - order_part.tex2


        if parts_ids_quantities:
            mainlog.debug("Creating last delivery slip")

            the_now = world_begin + timedelta(days=max_past_days + random.randint(1,grace_period))
            mainlog.debug("Adding slips to an order on {}".format(the_now))
            the_now = datetime( the_now.year, the_now.month, the_now.day) + timedelta(seconds=random.randint(1,10000))

            central_clock.set_now_function( lambda : the_now)

            dao.delivery_slip_part_dao.make_delivery_slip_for_order( order.order_id, parts_ids_quantities,
                the_now, False)


    # Now we adapt the sell price to match the costs

    TWO_PLACES = decimal.Decimal(10) ** -2
    for order_part in session().query(OrderPart).all():
        order_part.sell_price = decimal.Decimal(
            (1.0 + random.random()) * dao.order_part_dao.value_work_on_order_part_up_to_date(
                order_part.order_part_id, date.today())).quantize(TWO_PLACES)

    mainlog.info("Not completed orders = {}".format(
        session().query(Order).filter(Order.state != OrderStatusType.order_completed).count()))

    mainlog.info("Not completed parts = {}".format(
        session().query(OrderPart).filter(OrderPart.state != OrderPartStateType.completed).count()))

    mainlog.info("Maximum completion date for order parts = {}".format(
        session().query( func.max( OrderPart.completed_date)).scalar()))
Beispiel #28
0
 def redo(self):
     mainlog.info("Redo")
     self._proxy_model.setData(self._index, self._new_value)
Beispiel #29
0
 def undo(self):
     mainlog.info("Undo")
     self._proxy_model.setData(self._index, self._old_value)
Beispiel #30
0
                    help='Reset the database and returns. ')
parser.add_argument('--create-root-account',
                    action='store_const',
                    const=True,
                    help='Create a root account. ')
parser.add_argument('--psql', default='psql.exe', help='Full path to psql')
parser.add_argument('--configure-zip', help='Path to zip file')
parser.add_argument('--host', help='Host')

if __name__ == "__main__":

    args = parser.parse_args()

    if args.configure_zip:
        if args.host:
            mainlog.info("Configuring zip at {} with host {}".format(
                args.configure_zip, args.host))
            configure_zip(args.configure_zip, args.host)
            sys.exit(0)
        else:
            mainlog.error("Missing host")
            sys.exit(1)

    app = QApplication(sys.argv)
    window = MainWindow()
    mainlog.addHandler(LoggerHandler(window))

    # d = AskWindowsShare(None)
    # d.exec_()

    if args.reset_database:
        window.create_database()