示例#1
0
    def svn_update(self):
        source = File.new_instance(self.directory,
                                   self.directory.name.replace("-",
                                                               "_")).abspath
        tests = File.new_instance(self.directory, "tests").abspath

        result = self.local("git", [self.git, "checkout", "dev"])
        if File.new_instance(source, ".svn").exists:
            result = self.local("svn",
                                [self.svn, "update", "--accept", "p", source])
            result = self.local("svn",
                                [self.svn, "commit", source, "-m", "auto"])
            result = self.local("svn",
                                [self.svn, "update", "--accept", "p", tests])
            result = self.local("svn",
                                [self.svn, "commit", tests, "-m", "auto"])
        result = self.local("git", [self.git, "add", "-A"])
        process, stdout, stderr = self.local(
            "git", [self.git, "commit", "-m", "updates from other projects"],
            raise_on_error=False)
        if "nothing to commit, working directory clean" in stdout or process.returncode == 0:
            pass
        else:
            Log.error("not expected {{result}}", result=result)
        result = self.local("git", [self.git, "push", "origin", "dev"])
示例#2
0
    def _load_functions(self):
        global _load_extension_warning_sent
        library_loc = File.new_instance(sys.modules[__name__].__file__,
                                        "../..")
        full_path = File.new_instance(
            library_loc, "vendor/sqlite/libsqlitefunctions.so").abspath
        try:
            trace = get_stacktrace(0)[0]
            if self.upgrade:
                if os.name == "nt":
                    file = File.new_instance(
                        trace["file"],
                        "../../vendor/sqlite/libsqlitefunctions.so")
                else:
                    file = File.new_instance(
                        trace["file"],
                        "../../vendor/sqlite/libsqlitefunctions")

                full_path = file.abspath
                self.db.enable_load_extension(True)
                self.db.execute(
                    text(SQL_SELECT + "load_extension" +
                         sql_iso(quote_value(full_path))))
        except Exception as e:
            if not _load_extension_warning_sent:
                _load_extension_warning_sent = True
                Log.warning(
                    "Could not load {{file}}, doing without. (no SQRT for you!)",
                    file=full_path,
                    cause=e,
                )
示例#3
0
def _upgrade():
    global _upgraded
    global _sqlite3

    try:
        import sys
        import platform

        if "windows" in platform.system().lower():
            original_dll = File.new_instance(sys.exec_prefix,
                                             "dlls/sqlite3.dll")
            if platform.architecture()[0] == "32bit":
                source_dll = File(
                    "vendor/pyLibrary/vendor/sqlite/sqlite3_32.dll")
            else:
                source_dll = File(
                    "vendor/pyLibrary/vendor/sqlite/sqlite3_64.dll")

            if not all(a == b for a, b in zip_longest(
                    source_dll.read_bytes(), original_dll.read_bytes())):
                original_dll.backup()
                File.copy(source_dll, original_dll)
        else:
            pass
    except Exception as e:
        Log.warning("could not upgrade python's sqlite", cause=e)

    import sqlite3 as _sqlite3

    _ = _sqlite3
    _upgraded = True
示例#4
0
    def test_read_home(self):
        file = "~/___test_file.json"
        source = File.new_instance(
            get_stacktrace(0)[0]["file"], "../resources/simple.json")
        File.copy(File(source), File(file))
        content = mo_json_config.get("file:///" + file)

        try:
            self.assertEqual(content, {"test_key": "test_value"})
        finally:
            File(file).delete()
示例#5
0
def _upgrade():
    global _upgraded
    _upgraded = True
    try:
        import sys

        sqlite_dll = File.new_instance(sys.exec_prefix, "dlls/sqlite3.dll")
        python_dll = File("pyLibrary/vendor/sqlite/sqlite3.dll")
        if python_dll.read_bytes() != sqlite_dll.read_bytes():
            backup = sqlite_dll.backup()
            File.copy(python_dll, sqlite_dll)
    except Exception as e:
        Log.warning("could not upgrade python's sqlite", cause=e)
示例#6
0
def _upgrade():
    global _upgraded
    _upgraded = True
    try:
        import sys

        sqlite_dll = File.new_instance(sys.exec_prefix, "dlls/sqlite3.dll")
        python_dll = File("pyLibrary/vendor/sqlite/sqlite3.dll")
        if python_dll.read_bytes() != sqlite_dll.read_bytes():
            backup = sqlite_dll.backup()
            File.copy(python_dll, sqlite_dll)
    except Exception as e:
        Log.warning("could not upgrade python's sqlite", cause=e)
示例#7
0
    def _worker(self, please_stop):
        global _load_extension_warning_sent

        if DEBUG:
            Log.note("Sqlite version {{version}}", version=sqlite3.sqlite_version)
        if Sqlite.canonical:
            self.db = Sqlite.canonical
        else:
            self.db = sqlite3.connect(coalesce(self.filename, ':memory:'))

            library_loc = File.new_instance(sys.modules[__name__].__file__, "../..")
            full_path = File.new_instance(library_loc, "vendor/sqlite/libsqlitefunctions.so").abspath
            try:
                trace = extract_stack(0)[0]
                file = File.new_instance(trace["file"], "../../vendor/sqlite/libsqlitefunctions.so")
                full_path = file.abspath
                self.db.enable_load_extension(True)
                self.db.execute("SELECT load_extension(" + self.quote_value(full_path) + ")")
            except Exception, e:
                if not _load_extension_warning_sent:
                    _load_extension_warning_sent = True
                    Log.warning("Could not load {{file}}}, doing without. (no SQRT for you!)", file=full_path, cause=e)
示例#8
0
 def last_deploy(self):
     setup_file = File.new_instance(self.directory, 'setup.py')
     if not setup_file.exists:
         Log.note("Not a pypi project: {{dir}}", dir=self.directory)
         return Date.today()
     setup = setup_file.read()
     version = json2value(strings.between(setup, "version=",
                                          ",")).split(".")[-1]
     date = unicode2Date(version, format="%y%j")
     Log.note("PyPi last deployed {{date|datetime}}",
              date=date,
              dir=self.directory)
     return date
示例#9
0
def _read_file(filename):
    try:
        file = File.new_instance(STATIC_DIRECTORY, filename)
        if not file.abspath.startswith(STATIC_DIRECTORY.abspath):
            return "", 404, "text/html"

        Log.note("Read {{file}}", file=file.abspath)
        mimetype, encoding = mimetypes.guess_type(file.extension)
        if not mimetype:
            mimetype = "text/html"
        return file.read_bytes(), 200, mimetype
    except Exception:
        return "", 404, "text/html"
示例#10
0
def _upgrade():
    global _upgraded
    global sqlite3

    try:
        import sys
        Log.error("Fix to work with 64bit windows too")
        original_dll = File.new_instance(sys.exec_prefix, "dlls/sqlite3.dll")
        source_dll = File("vendor/pyLibrary/vendor/sqlite/sqlite3.dll")
        if not all(a == b for a, b in zip_longest(source_dll.read_bytes(),
                                                  original_dll.read_bytes())):
            backup = original_dll.backup()
            File.copy(source_dll, original_dll)
    except Exception as e:
        Log.warning("could not upgrade python's sqlite", cause=e)

    import sqlite3
    _ = sqlite3
    _upgraded = True
    def __init__(self, _file):
        """
        file - USES FILE FOR PERSISTENCE
        """
        self.file = File.new_instance(_file)
        self.lock = Lock("lock for persistent queue using file " +
                         self.file.name)
        self.please_stop = Signal()
        self.db = Data()
        self.pending = []

        if self.file.exists:
            for line in self.file:
                with suppress_exception:
                    delta = mo_json.json2value(line)
                    apply_delta(self.db, delta)
            if self.db.status.start == None:  # HAPPENS WHEN ONLY ADDED TO QUEUE, THEN CRASH
                self.db.status.start = 0
            self.start = self.db.status.start

            # SCRUB LOST VALUES
            lost = 0
            for k in self.db.keys():
                with suppress_exception:
                    if k != "status" and int(k) < self.start:
                        self.db[k] = None
                        lost += 1
                # HAPPENS FOR self.db.status, BUT MAYBE OTHER PROPERTIES TOO
            if lost:
                Log.warning("queue file had {{num}} items lost", num=lost)

            if DEBUG:
                Log.note("Persistent queue {{name}} found with {{num}} items",
                         name=self.file.abspath,
                         num=len(self))
        else:
            self.db.status = Data(start=0, end=0)
            self.start = self.db.status.start
            if DEBUG:
                Log.note("New persistent queue {{name}}",
                         name=self.file.abspath)
示例#12
0
    def __init__(self, _file):
        """
        file - USES FILE FOR PERSISTENCE
        """
        self.file = File.new_instance(_file)
        self.lock = Lock("lock for persistent queue using file " + self.file.name)
        self.please_stop = Signal()
        self.db = Data()
        self.pending = []

        if self.file.exists:
            for line in self.file:
                with suppress_exception:
                    delta = mo_json.json2value(line)
                    apply_delta(self.db, delta)
            if self.db.status.start == None:  # HAPPENS WHEN ONLY ADDED TO QUEUE, THEN CRASH
                self.db.status.start = 0
            self.start = self.db.status.start

            # SCRUB LOST VALUES
            lost = 0
            for k in self.db.keys():
                with suppress_exception:
                    if k!="status" and int(k) < self.start:
                        self.db[k] = None
                        lost += 1
                  # HAPPENS FOR self.db.status, BUT MAYBE OTHER PROPERTIES TOO
            if lost:
                Log.warning("queue file had {{num}} items lost",  num= lost)

            DEBUG and Log.note("Persistent queue {{name}} found with {{num}} items", name=self.file.abspath, num=len(self))
        else:
            self.db.status = Data(
                start=0,
                end=0
            )
            self.start = self.db.status.start
            DEBUG and Log.note("New persistent queue {{name}}", name=self.file.abspath)
示例#13
0
from jx_bigquery import bigquery
from jx_mysql.mysql import MySQL, sql_query
from jx_mysql.mysql_snowflake_extractor import MySqlSnowflakeExtractor
from jx_python import jx
from mo_files import File
from mo_json import json2value, value2json
from mo_logs import Log, constants, startup, strings
from mo_sql import SQL
from mo_times import Timer
from mo_times.dates import Date
from redis import Redis

from treeherder.config.settings import REDIS_URL

CONFIG_FILE = (File.new_instance(__file__).parent /
               "extract_jobs.json").abspath


class ExtractJobs:
    def run(self, force=False, restart=False, start=None, merge=False):
        try:
            # SETUP LOGGING
            settings = startup.read_settings(filename=CONFIG_FILE)
            constants.set(settings.constants)
            Log.start(settings.debug)

            self.extract(settings, force, restart, start, merge)
        except Exception as e:
            Log.error("could not extract jobs", cause=e)
        finally:
            Log.stop()
示例#14
0
    def _worker(self, please_stop):
        global _load_extension_warning_sent

        if DEBUG:
            Log.note("Sqlite version {{version}}",
                     version=sqlite3.sqlite_version)
        if Sqlite.canonical:
            self.db = Sqlite.canonical
        else:
            self.db = sqlite3.connect(coalesce(self.filename, ':memory:'))

            library_loc = File.new_instance(sys.modules[__name__].__file__,
                                            "../..")
            full_path = File.new_instance(
                library_loc, "vendor/sqlite/libsqlitefunctions.so").abspath
            try:
                trace = extract_stack(0)[0]
                if self.upgrade:
                    if os.name == 'nt':
                        file = File.new_instance(
                            trace["file"],
                            "../../vendor/sqlite/libsqlitefunctions.so")
                    else:
                        file = File.new_instance(
                            trace["file"],
                            "../../vendor/sqlite/libsqlitefunctions")

                    full_path = file.abspath
                    self.db.enable_load_extension(True)
                    self.db.execute("SELECT load_extension(" +
                                    self.quote_value(full_path) + ")")
            except Exception as e:
                if not _load_extension_warning_sent:
                    _load_extension_warning_sent = True
                    Log.warning(
                        "Could not load {{file}}}, doing without. (no SQRT for you!)",
                        file=full_path,
                        cause=e)

        try:
            while not please_stop:
                command, result, signal, trace = self.queue.pop(
                    till=please_stop)

                if DEBUG_INSERT and command.strip().lower().startswith(
                        "insert"):
                    Log.note("Running command\n{{command|indent}}",
                             command=command)
                if DEBUG and not command.strip().lower().startswith("insert"):
                    Log.note("Running command\n{{command|indent}}",
                             command=command)
                with Timer("Run command", debug=DEBUG):
                    if signal is not None:
                        try:
                            curr = self.db.execute(command)
                            self.db.commit()
                            result.meta.format = "table"
                            result.header = [d[0] for d in curr.description
                                             ] if curr.description else None
                            result.data = curr.fetchall()
                            if DEBUG and result.data:
                                text = convert.table2csv(list(result.data))
                                Log.note("Result:\n{{data}}", data=text)
                        except Exception as e:
                            e = Except.wrap(e)
                            result.exception = Except(
                                ERROR,
                                "Problem with\n{{command|indent}}",
                                command=command,
                                cause=e)
                        finally:
                            signal.go()
                    else:
                        try:
                            self.db.execute(command)
                            self.db.commit()
                        except Exception as e:
                            e = Except.wrap(e)
                            e.cause = Except(type=ERROR,
                                             template="Bad call to Sqlite",
                                             trace=trace)
                            Log.warning("Failure to execute", cause=e)

        except Exception as e:
            if not please_stop:
                Log.error("Problem with sql thread", e)
        finally:
            if DEBUG:
                Log.note("Database is closed")
            self.db.commit()
            self.db.close()
示例#15
0
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Author: Kyle Lahnakoski ([email protected])
#
from __future__ import absolute_import, division, unicode_literals

import flask
from werkzeug.wrappers import Response

from active_data import record_request
from mo_files import File
from mo_logs import Log
from pyLibrary.env.flask_wrappers import cors_wrapper

CONTRIBUTE = File.new_instance("contribute.json").read_bytes()


@cors_wrapper
def send_contribute():
    """
    SEND THE contribute.json
    """
    try:
        record_request(flask.request, None, flask.request.get_data(), None)
        return Response(CONTRIBUTE, status=200)
    except Exception as e:
        Log.error("Could not return contribute.json", cause=e)
示例#16
0
 def test_grandparent(self):
     f = File.new_instance("tests/temp", "../..")
     self.assertEqual(f.filename, ".")
示例#17
0
from __future__ import absolute_import, division, unicode_literals

import mimetypes

import flask
from werkzeug.wrappers import Response

from active_data import record_request
from mo_files import File
from mo_logs import Log
from mo_threads.threads import RegisterThread
from mo_times.durations import DAY
from pyLibrary.env.flask_wrappers import cors_wrapper
from pyLibrary.meta import cache

STATIC_DIRECTORY = File.new_instance("active_data/public")


@cors_wrapper
def download(filename):
    """
    DOWNLOAD FILE CONTENTS
    :param filename:  URL PATH
    :return: Response OBJECT WITH FILE CONTENT
    """
    with RegisterThread():
        try:
            record_request(flask.request, None, flask.request.get_data(), None)
            content, status, mimetype = _read_file(filename)
            return Response(content,
                            status=status,
示例#18
0
def _synch(settings):
    cache = File(settings.local_cache)
    if not cache.exists:
        cache.create()
    settings.destination.directory = settings.destination.directory.trim("/")

    for repo in listwrap(coalesce(settings.repo, settings.repos)):
        Log.alert("Synch {{repo}}", repo=repo.description)
        if not strings.between(repo.source.url, "/", ".git"):
            Log.error("This is not a git reference: {{git_url}}",
                      git_url=repo.source.url)
        name = coalesce(repo.source.name,
                        strings.between(repo.source.url, "/", ".git"))

        if not repo.source.branch:
            Log.note("{{name}} has not branch property", name=name)
        # DO WE HAVE A LOCAL COPY?

        local_repo = File.new_instance(cache, name)
        local_dir = File.new_instance(local_repo, repo.source.directory)
        if not local_repo.exists:
            Process("clone repo", ["git", "clone", repo.source.url, name],
                    cwd=cache,
                    shell=True,
                    debug=DEBUG).join(raise_on_error=True)
        # SWITCH TO BRANCH
        Process("checkout", ["git", "checkout", repo.source.branch],
                cwd=local_repo,
                shell=True,
                debug=DEBUG).join(raise_on_error=True)
        # UPDATE THE LOCAL COPY
        Process("update", ["git", "pull", "origin", repo.source.branch],
                cwd=local_repo,
                shell=True,
                debug=DEBUG).join(raise_on_error=True)
        # GET CURRENT LISTING OUT OF S3

        try:
            connection = connect_to_region(
                region_name=repo.destination.region,
                calling_format="boto.s3.connection.OrdinaryCallingFormat",
                aws_access_key_id=unwrap(repo.destination.aws_access_key_id),
                aws_secret_access_key=unwrap(
                    repo.destination.aws_secret_access_key))
            bucket = connection.get_bucket(repo.destination.bucket)
        except Exception as e:
            Log.error("Problem connecting to {{bucket}}",
                      bucket=repo.destination.bucket,
                      cause=e)

        remote_prefix = repo.destination.directory.strip('/') + "/"
        listing = bucket.list(prefix=remote_prefix)
        metas = {
            m.key[len(remote_prefix):]: Data(key=m.key, etag=m.etag)
            for m in listing
        }
        net_new = []
        Log.note("Look for differences")
        for local_file in local_dir.leaves:
            local_rel_file = local_file.abspath[len(local_dir.abspath
                                                    ):].lstrip(b'/')
            if "/." in local_rel_file or local_rel_file.startswith("."):
                continue
            local_rel_file = local_rel_file.replace("qb/Qb", "qb/qb")
            remote_file = metas.get(local_rel_file)
            if not repo.force and remote_file:
                if remote_file.etag != md5(local_file):
                    net_new.append(local_file)
            else:
                net_new.append(local_file)

        # SEND DIFFERENCES
        for n in net_new:
            remote_file = join_path(repo.destination.directory,
                                    n.abspath[len(local_dir.abspath):])
            remote_file = remote_file.replace("qb/Qb", "qb/qb")
            try:
                Log.note("upload {{file}} ({{type}})",
                         file=remote_file,
                         type=n.mime_type)
                storage = bucket.new_key(remote_file)
                storage.content_type = n.mime_type
                storage.set_contents_from_string(n.read_bytes())
                storage.set_acl('public-read')
            except Exception as e:
                Log.warning("can not upload {{file}} ({{type}})",
                            file=remote_file,
                            type=n.mime_type,
                            cause=e)
示例#19
0
def setup():
    global config

    config = startup.read_settings(
        default_filename=os.environ.get('ACTIVEDATA_CONFIG'),
        defs=[{
            "name": ["--process_num", "--process"],
            "help": "Additional port offset (for multiple Flask processes",
            "type": int,
            "dest": "process_num",
            "default": 0,
            "required": False
        }])

    constants.set(config.constants)
    Log.start(config.debug)

    agg_bulk.S3_CONFIG = config.bulk.s3

    File.new_instance("activedata.pid").write(text(machine_metadata.pid))

    # PIPE REQUEST LOGS TO ES DEBUG
    if config.request_logs:
        cluster = elasticsearch.Cluster(config.request_logs)
        request_logger = cluster.get_or_create_index(config.request_logs)
        active_data.request_log_queue = request_logger.threaded_queue(
            max_size=2000, period=1)

    if config.dockerflow:

        def backend_check():
            http.get_json(config.elasticsearch.host + ":" +
                          text(config.elasticsearch.port))

        dockerflow(flask_app, backend_check)
    else:
        # IF NOT USING DOCKERFLOW, THEN RESPOND WITH A SIMPLER __version__
        add_version(flask_app)

    # SETUP DEFAULT CONTAINER, SO THERE IS SOMETHING TO QUERY
    container.config.default = {
        "type": "elasticsearch",
        "settings": config.elasticsearch.copy()
    }

    # TRIGGER FIRST INSTANCE
    if config.saved_queries:
        setattr(save_query, "query_finder", SaveQueries(config.saved_queries))

    # STARTUP QUERY STATS
    QueryStats(elasticsearch.Cluster(config.elasticsearch))

    if config.flask.port and config.args.process_num:
        config.flask.port += config.args.process_num

    # TURN ON /exit FOR WINDOWS DEBUGGING
    if config.flask.debug or config.flask.allow_exit:
        config.flask.allow_exit = None
        Log.warning("ActiveData is in debug mode")
        flask_app.add_url_rule('/exit', 'exit', _exit)

    if config.flask.ssl_context:
        if config.args.process_num:
            Log.error("can not serve ssl and multiple Flask instances at once")
        setup_flask_ssl()

    # ENSURE MAIN THREAD SHUTDOWN TRIGGERS Flask SHUTDOWN
    MAIN_THREAD.stopped.then(exit)
示例#20
0
    def pypi(self):
        if Date.today() <= self.last_deploy():
            Log.note("Can not upload to pypi")
            return False

        lib_name = self.directory.name
        source_readme = File.new_instance(self.directory, 'README.md').abspath
        dest_readme = File.new_instance(self.directory, 'README.txt').abspath
        pypandoc.convert(source_readme, to=b'rst', outputfile=dest_readme)
        setup_file = File.new_instance(self.directory, 'setup.py')
        req_file = File.new_instance(self.directory, 'requirements.txt')

        if not setup_file.exists:
            Log.warning("Not a PyPi project!  No setup.py file.")

        setup = setup_file.read()
        # UPDATE THE VERSION NUMBER
        curr = (datetime.datetime.utcnow() +
                datetime.timedelta(days=1)).strftime("%y%j")
        setup = re.sub(r'(version\s*=\s*\"\d*\.\d*\.)\d*(\")',
                       r'\g<1>%s\2' % curr, setup)

        # UPDATE THE REQUIREMENTS
        if not req_file.exists:
            Log.error("Expecting a requirements.txt file")
        req = req_file.read()
        setup_req = re.findall(r'install_requires\s*=\s*\[.*\]\s*,', setup)
        setup.replace(
            setup_req[0], 'install_requires=' + value2json(
                d for d in sorted(map(strings.trim, req.split("\n"))) if d))
        setup_file.write(setup)

        File.new_instance(self.directory, "build").delete()
        File.new_instance(self.directory, "dist").delete()
        File.new_instance(self.directory,
                          lib_name.replace("-", "_") + ".egg-info").delete()

        process, stdout, stderr = self.local(
            "pypi",
            ["C:/Python27/python.exe", "setup.py", "bdist_egg", "upload"],
            raise_on_error=False)
        if "Upload failed (400): File already exists." in stderr:
            Log.warning("Not uploaded")
        elif process.returncode == 0:
            pass
        else:
            Log.error("not expected")
        process, stdout, stderr = self.local(
            "pypi", ["C:/Python27/python.exe", "setup.py", "sdist", "upload"],
            raise_on_error=False)
        if "Upload failed (400): File already exists." in stderr:
            Log.warning("Not uploaded")
        elif process.returncode == 0:
            pass
        else:
            Log.error("not expected")

        File.new_instance(self.directory, "README.txt").delete()
        File.new_instance(self.directory, "build").delete()
        File.new_instance(self.directory, "dist").delete()
        File.new_instance(self.directory,
                          lib_name.replace("-", "_") + ".egg-info").delete()
        return True
示例#21
0
from jx_mysql.mysql import MySQL
from jx_mysql.mysql_snowflake_extractor import MySqlSnowflakeExtractor
from mo_files import File
from mo_json import (json2value,
                     value2json)
from mo_logs import (Log,
                     constants,
                     startup)
from mo_sql import SQL
from mo_times import Timer
from mo_times.dates import parse
from redis import Redis

from treeherder.model.models import Job

CONFIG_FILE = (File.new_instance(__file__).parent / "extract_jobs.json").abspath


class ExtractJobs:
    def run(self, force=False, restart=False, merge=False):
        # SETUP LOGGING
        settings = startup.read_settings(filename=CONFIG_FILE)
        constants.set(settings.constants)
        Log.start(settings.debug)

        if not settings.extractor.app_name:
            Log.error("Expecting an extractor.app_name in config file")

        # SETUP DESTINATION
        destination = bigquery.Dataset(
            dataset=settings.extractor.app_name, kwargs=settings.destination
示例#22
0
 def __init__(self, *args, **kwargs):
     FuzzyTestCase.__init__(self, *args, **kwargs)
     stack = get_stacktrace(0)
     this_file = stack[0]["file"]
     self.resources = "file:///" + File.new_instance(
         this_file, "../resources").abspath
示例#23
0
    def _worker(self, please_stop):
        global _load_extension_warning_sent

        if DEBUG:
            Log.note("Sqlite version {{version}}", version=sqlite3.sqlite_version)
        if Sqlite.canonical:
            self.db = Sqlite.canonical
        else:
            self.db = sqlite3.connect(coalesce(self.filename, ':memory:'))

            library_loc = File.new_instance(sys.modules[__name__].__file__, "../..")
            full_path = File.new_instance(library_loc, "vendor/sqlite/libsqlitefunctions.so").abspath
            try:
                trace = extract_stack(0)[0]
                if self.upgrade:
                    if os.name == 'nt':
                        file = File.new_instance(trace["file"], "../../vendor/sqlite/libsqlitefunctions.so")
                    else:
                        file = File.new_instance(trace["file"], "../../vendor/sqlite/libsqlitefunctions")

                    full_path = file.abspath
                    self.db.enable_load_extension(True)
                    self.db.execute("SELECT load_extension(" + self.quote_value(full_path) + ")")
            except Exception as e:
                if not _load_extension_warning_sent:
                    _load_extension_warning_sent = True
                    Log.warning("Could not load {{file}}}, doing without. (no SQRT for you!)", file=full_path, cause=e)

        try:
            while not please_stop:
                command, result, signal, trace = self.queue.pop(till=please_stop)

                if DEBUG_INSERT and command.strip().lower().startswith("insert"):
                    Log.note("Running command\n{{command|indent}}", command=command)
                if DEBUG and not command.strip().lower().startswith("insert"):
                    Log.note("Running command\n{{command|indent}}", command=command)
                with Timer("Run command", debug=DEBUG):
                    if signal is not None:
                        try:
                            curr = self.db.execute(command)
                            self.db.commit()
                            result.meta.format = "table"
                            result.header = [d[0] for d in curr.description] if curr.description else None
                            result.data = curr.fetchall()
                            if DEBUG and result.data:
                                text = convert.table2csv(list(result.data))
                                Log.note("Result:\n{{data}}", data=text)
                        except Exception as e:
                            e = Except.wrap(e)
                            result.exception = Except(ERROR, "Problem with\n{{command|indent}}", command=command, cause=e)
                        finally:
                            signal.go()
                    else:
                        try:
                            self.db.execute(command)
                            self.db.commit()
                        except Exception as e:
                            e = Except.wrap(e)
                            e.cause = Except(
                                type=ERROR,
                                template="Bad call to Sqlite",
                                trace=trace
                            )
                            Log.warning("Failure to execute", cause=e)

        except Exception as e:
            if not please_stop:
                Log.error("Problem with sql thread", e)
        finally:
            if DEBUG:
                Log.note("Database is closed")
            self.db.commit()
            self.db.close()
示例#24
0
 def test_concat(self):
     f = File.new_instance("tests/temp") / "something" / "or" / "other"
     self.assertTrue(f.abspath.endswith("/tests/temp/something/or/other"))