def _upgrade(): global _upgraded global _sqlite3 try: import sys import platform if "windows" in platform.system().lower(): original_dll = File.new_instance(sys.exec_prefix, "dlls/sqlite3.dll") if platform.architecture()[0] == "32bit": source_dll = File( "vendor/pyLibrary/vendor/sqlite/sqlite3_32.dll") else: source_dll = File( "vendor/pyLibrary/vendor/sqlite/sqlite3_64.dll") if not all(a == b for a, b in zip_longest( source_dll.read_bytes(), original_dll.read_bytes())): original_dll.backup() File.copy(source_dll, original_dll) else: pass except Exception as e: Log.warning("could not upgrade python's sqlite", cause=e) import sqlite3 as _sqlite3 _ = _sqlite3 _upgraded = True
def test_small_changeset_to_json(self): small_patch_file = File("tests/resources/small.patch") j1 = diff_to_json(small_patch_file.read_bytes().decode( "utf8", "replace")) expected = File("tests/resources/small.json").read_json(flexible=False, leaves=False) self.assertEqual(j1, expected)
def test_big_changeset_to_json(self): big_patch_file = File("tests/resources/big.patch") # big_patch_file.write_bytes(http.get("https://hg.mozilla.org/mozilla-central/raw-rev/e5693cea1ec944ca077c7a46c5f127c828a90f1b").content) self.assertEqual(b'\r'.decode('utf8', 'replace'), u'\r') j1 = diff_to_json(big_patch_file.read_bytes().decode( "utf8", "replace")) expected = File("tests/resources/big.json").read_json(flexible=False, leaves=False) self.assertEqual(j1, expected)
def _upgrade(): global _upgraded _upgraded = True try: import sys sqlite_dll = File.new_instance(sys.exec_prefix, "dlls/sqlite3.dll") python_dll = File("pyLibrary/vendor/sqlite/sqlite3.dll") if python_dll.read_bytes() != sqlite_dll.read_bytes(): backup = sqlite_dll.backup() File.copy(python_dll, sqlite_dll) except Exception as e: Log.warning("could not upgrade python's sqlite", cause=e)
def _restart_etl_supervisor(conn, please_stop, cpu_count): # READ LOCAL CONFIG FILE, ALTER IT FOR THIS MACHINE RESOURCES, AND PUSH TO REMOTE conf_file = File("./examples/config/etl_supervisor.conf") content = conf_file.read_bytes() find = between(content, "numprocs=", "\n") content = content.replace("numprocs=" + find + "\n", "numprocs=" + str(cpu_count) + "\n") with TempFile() as tempfile: tempfile.write(content) conn.sudo("rm -f /etc/supervisor/conf.d/etl_supervisor.conf") conn.put(tempfile.abspath, "/etc/supervisord.conf", use_sudo=True) conn.run("mkdir -p /home/ec2-user/logs") # START DAEMON (OR THROW ERROR IF RUNNING ALREADY) conn.sudo("supervisord -c /etc/supervisord.conf", warn=True) conn.sudo("supervisorctl reread") conn.sudo("supervisorctl update")
def _setup_etl_supervisor(self, cpu_count): # INSTALL supervsor sudo("apt-get install -y supervisor") with fabric_settings(warn_only=True): sudo("service supervisor start") # READ LOCAL CONFIG FILE, ALTER IT FOR THIS MACHINE RESOURCES, AND PUSH TO REMOTE conf_file = File("./examples/config/etl_supervisor.conf") content = conf_file.read_bytes() find = between(content, "numprocs=", "\n") content = content.replace("numprocs=" + find + "\n", "numprocs=" + str(cpu_count) + "\n") File("./temp/etl_supervisor.conf.alt").write_bytes(content) sudo("rm -f /etc/supervisor/conf.d/etl_supervisor.conf") put("./temp/etl_supervisor.conf.alt", '/etc/supervisor/conf.d/etl_supervisor.conf', use_sudo=True) run("mkdir -p /home/ubuntu/ActiveData-ETL/results/logs") # POKE supervisor TO NOTICE THE CHANGE sudo("supervisorctl reread") sudo("supervisorctl update")
def _upgrade(): global _upgraded global sqlite3 try: import sys Log.error("Fix to work with 64bit windows too") original_dll = File.new_instance(sys.exec_prefix, "dlls/sqlite3.dll") source_dll = File("vendor/pyLibrary/vendor/sqlite/sqlite3.dll") if not all(a == b for a, b in zip_longest(source_dll.read_bytes(), original_dll.read_bytes())): backup = original_dll.backup() File.copy(source_dll, original_dll) except Exception as e: Log.warning("could not upgrade python's sqlite", cause=e) import sqlite3 _ = sqlite3 _upgraded = True