Example #1
0
 def test_checked_hash_based_pyc(self):
     with util.create_modules('_temp') as mapping:
         source = mapping['_temp']
         pyc = self.util.cache_from_source(source)
         with open(source, 'wb') as fp:
             fp.write(b'state = "old"')
         os.utime(source, (50, 50))
         py_compile.compile(
             source,
             invalidation_mode=py_compile.PycInvalidationMode.CHECKED_HASH,
         )
         loader = self.machinery.SourceFileLoader('_temp', source)
         mod = types.ModuleType('_temp')
         mod.__spec__ = self.util.spec_from_loader('_temp', loader)
         loader.exec_module(mod)
         self.assertEqual(mod.state, 'old')
         # Write a new source with the same mtime and size as before.
         with open(source, 'wb') as fp:
             fp.write(b'state = "new"')
         os.utime(source, (50, 50))
         loader.exec_module(mod)
         self.assertEqual(mod.state, 'new')
         with open(pyc, 'rb') as fp:
             data = fp.read()
         self.assertEqual(int.from_bytes(data[4:8], 'little'), 0b11)
         self.assertEqual(
             self.util.source_hash(b'state = "new"'),
             data[8:16],
         )
 def test_script_compiled(self):
     with temp_dir() as script_dir:
         script_name = _make_test_script(script_dir, 'script')
         py_compile.compile(script_name, doraise=True)
         os.remove(script_name)
         pyc_file = support.make_legacy_pyc(script_name)
         self._check_script(pyc_file)
Example #3
0
    def build(self, source, filename):
        imp.acquire_lock()
        try:
            base, ext = os.path.splitext(filename)
            name = os.path.join(self.path, base + ".py")
            log.debug("writing source to disk (%d bytes)." % len(source))
            temp = tempfile.NamedTemporaryFile(
                prefix=base, suffix='.tmp', dir=self.path, delete=False)
            try:
                try:
                    temp.write("%s\n" % '# -*- coding: utf-8 -*-')
                    temp.write(source)
                finally:
                    temp.close()
            except:
                os.remove(temp.name)
                raise

            os.rename(temp.name, name)
            log.debug("compiling %s into byte-code..." % filename)
            py_compile.compile(name)

            return self._load(base, name)
        finally:
            imp.release_lock()
Example #4
0
 def test_overiden_unchecked_hash_based_pyc(self):
     with util.create_modules('_temp') as mapping, \
          unittest.mock.patch('_imp.check_hash_based_pycs', 'always'):
         source = mapping['_temp']
         pyc = self.util.cache_from_source(source)
         with open(source, 'wb') as fp:
             fp.write(b'state = "old"')
         os.utime(source, (50, 50))
         py_compile.compile(
             source,
             invalidation_mode=py_compile.PycInvalidationMode.UNCHECKED_HASH,
         )
         loader = self.machinery.SourceFileLoader('_temp', source)
         mod = types.ModuleType('_temp')
         mod.__spec__ = self.util.spec_from_loader('_temp', loader)
         loader.exec_module(mod)
         self.assertEqual(mod.state, 'old')
         # Update the source file, which should be ignored.
         with open(source, 'wb') as fp:
             fp.write(b'state = "new"')
         loader.exec_module(mod)
         self.assertEqual(mod.state, 'new')
         with open(pyc, 'rb') as fp:
             data = fp.read()
         self.assertEqual(int.from_bytes(data[4:8], 'little'), 0b1)
         self.assertEqual(
             self.util.source_hash(b'state = "new"'),
             data[8:16],
         )
Example #5
0
def redo_pyc(egg):
    if not os.path.isdir(egg):
        return
    for dirpath, dirnames, filenames in os.walk(egg):
        for filename in filenames:
            if not filename.endswith('.py'):
                continue
            filepath = os.path.join(dirpath, filename)
            if not (os.path.exists(filepath+'c')
                    or os.path.exists(filepath+'o')):
                # If it wasn't compiled, it may not be compilable
                continue

            # OK, it looks like we should try to compile.

            # Remove old files.
            for suffix in 'co':
                if os.path.exists(filepath+suffix):
                    os.remove(filepath+suffix)

            # Compile under current optimization
            try:
                py_compile.compile(filepath)
            except py_compile.PyCompileError:
                logger.warning("Couldn't compile %s", filepath)
            else:
                # Recompile under other optimization. :)
                args = [sys.executable]
                if __debug__:
                    args.append('-O')
                args.extend(['-m', 'py_compile', filepath])

                call_subprocess(args)
Example #6
0
	def codyReload(self, trigger):

		if len(self.MSG_BODY) > len(trigger + '\r\n'):
			try:
				newCody 				= self.MSG_BODY.split()
				newCody 				= newCody[1]

			except:
				newCody 				= 'NULL'
					
		#else if no file is supplied, reload the running version
		else:
			newCody					= self.FILE_NAME

		folderPath 				= ''
		
		if os.path.isfile(folderPath + newCody):
		
			try:
				py_compile.compile(folderPath + newCody, doraise = True)
						
			except py_compile.PyCompileError as e:
				errorMessage = str(e)
				self.IRC.send ( 'PRIVMSG '+self.MSG_CHANNEL+' :'+errorMessage+'. \r\n' )

			else:
				os.chmod(folderPath + newCody, stat.S_IRWXU)
				self.IRC.send ( 'QUIT :reloading myself\r\n' )
				exec(open(folderPath + newCody))
		

		else:
			self.IRC.send ( 'PRIVMSG '+self.MSG_CHANNEL+' :File not found! \r\n' )	
Example #7
0
def copy_python():
    
    if not os.path.exists(py_dir):
        os.mkdir(py_dir)

    for x in os.listdir(srcdir):
        y = os.path.join(srcdir, x)
        ext = os.path.splitext(x)[1]
        if os.path.isdir(y) and x not in ('test', 'hotshot', 'distutils',
                'site-packages', 'idlelib', 'lib2to3', 'dist-packages', '__pycache__'):
            shutil.copytree(y, os.path.join(py_dir, x),
                    ignore=ignore_in_dirs)
        if os.path.isfile(y) and ext in ('.py', '.so'):
            shutil.copy2(y, py_dir)

    #site_dest = os.path.join(py_dir, 'site-packages')
    copy_site_packages(site_packages, site_dest)
    create_site_py()

    for x in os.walk(py_dir):
        for f in x[-1]:
            if f.endswith('.py'):
                y = os.path.join(x[0], f)
                rel = os.path.relpath(y, py_dir)
                try:
                    py_compile.compile(y, cfile=y+'o',dfile=rel, doraise=True, optimize=2)
                    os.remove(y)
                    z = y+'c'
                    if os.path.exists(z):
                        os.remove(z)
                except:
                    print ('Failed to byte-compile', y)
Example #8
0
def compile_dir(dir, maxlevels = 10):
	print 'Listing', dir, '...'
	try:
		names = os.listdir(dir)
	except os.error:
		print "Can't list", dir
		names = []
	names.sort()
	for name in names:
		fullname = os.path.join(dir, name)
		if os.path.isfile(fullname):
			head, tail = name[:-3], name[-3:]
			if tail == '.py':
				print 'Compiling', fullname, '...'
				try:
					py_compile.compile(fullname)
				except KeyboardInterrupt:
					del names[:]
					print '\n[interrupt]'
					break
				except:
					if type(sys.exc_type) == type(''):
						exc_type_name = sys.exc_type
					else: exc_type_name = sys.exc_type.__name__
					print 'Sorry:', exc_type_name + ':',
					print sys.exc_value
		elif maxlevels > 0 and \
		     name != os.curdir and name != os.pardir and \
		     os.path.isdir(fullname) and \
		     not os.path.islink(fullname):
			compile_dir(fullname, maxlevels - 1)
Example #9
0
    def w_temp_zipfile(self, created_paths, source=True, bytecode=True):
        """Create a temporary zip file for testing.

        Clears zipimport._zip_directory_cache.

        """
        import zipimport, os, shutil, zipfile, py_compile
        example_code = 'attr = None'
        TESTFN = '@test'
        zipimport._zip_directory_cache.clear()
        zip_path = TESTFN + '.zip'
        bytecode_suffix = 'c'# if __debug__ else 'o'
        zip_file = zipfile.ZipFile(zip_path, 'w')
        for path in created_paths:
            if os.sep in path:
                directory = os.path.split(path)[0]
                if not os.path.exists(directory):
                    os.makedirs(directory)
            code_path = path + '.py'
            try:
                temp_file = open(code_path, 'w')
                temp_file.write(example_code)
            finally:
                temp_file.close()
            if source:
                zip_file.write(code_path)
            if bytecode:
                py_compile.compile(code_path, doraise=True)
                zip_file.write(code_path + bytecode_suffix)
        zip_file.close()
        return os.path.abspath(zip_path)
Example #10
0
    def test_module_with_large_stack(self, module='longlist'):
        # Regression test for http://bugs.python.org/issue561858.
        filename = module + os.extsep + 'py'

        # Create a file with a list of 65000 elements.
        with open(filename, 'w+') as f:
            f.write('d = [\n')
            for i in range(65000):
                f.write('"",\n')
            f.write(']')

        # Compile & remove .py file, we only need .pyc (or .pyo).
        with open(filename, 'r') as f:
            py_compile.compile(filename)
        if check_impl_detail(pypy=False):
            # pypy refuses to import a .pyc if the .py does not exist
            unlink(filename)

        # Need to be able to load from current dir.
        sys.path.append('')

        # This used to crash.
        exec 'import ' + module
        reload(longlist)

        # Cleanup.
        del sys.path[-1]
        unlink(filename + 'c')
        unlink(filename + 'o')
Example #11
0
def test_module_with_large_stack(module):
    # create module w/list of 65000 elements to test bug #561858
    filename = module + os.extsep + 'py'

    # create a file with a list of 65000 elements
    f = open(filename, 'w+')
    f.write('d = [\n')
    for i in range(65000):
        f.write('"",\n')
    f.write(']')
    f.close()

    # compile & remove .py file, we only need .pyc (or .pyo)
    f = open(filename, 'r')
    py_compile.compile(filename)
    f.close()
    os.unlink(filename)

    # need to be able to load from current dir
    sys.path.append('')

    # this used to crash
    exec 'import ' + module

    # cleanup
    del sys.path[-1]
    for ext in 'pyc', 'pyo':
        fname = module + os.extsep + ext
        if os.path.exists(fname):
            os.unlink(fname)
Example #12
0
    def test_module_with_large_stack(self, module='longlist'):
        # Regression test for http://bugs.python.org/issue561858.
        filename = module + os.extsep + 'py'

        # Create a file with a list of 65000 elements.
        with open(filename, 'w+') as f:
            f.write('d = [\n')
            for i in range(65000):
                f.write('"",\n')
            f.write(']')

        # Compile & remove .py file, we only need .pyc (or .pyo).
        if not due_to_ironpython_incompatibility("IronPython cannot use pyc files"):
            with open(filename, 'r') as f:
                py_compile.compile(filename)
            unlink(filename)

        # Need to be able to load from current dir.
        sys.path.append('')

        # This used to crash.
        exec 'import ' + module

        # Cleanup.
        del sys.path[-1]
        unlink(filename + 'c')
        unlink(filename + 'o')
        if due_to_ironpython_incompatibility("IronPython cannot use pyc files"):
            os.unlink(filename)
Example #13
0
 def precompile_site_pyc(self):
     print "Pre-compiling python sources"
     import py_compile
     py_compile.compile(os.path.join(self.rsrcRoot, 'lib', 'python%s' % PYTHON_VERSION, 'site.py'))
     print "Deleting python sources"
     # These can go, since we have the pyc now
     os.remove(os.path.join(self.rsrcRoot, 'lib', 'python%s' % PYTHON_VERSION, 'site.py'))
def main():
	if '--help' in sys.argv or '-h' in sys.argv or '--version' in sys.argv or '-v' in sys.argv:
		print(help)
		sys.exit(0)

	if len (sys.argv) != 2:
		print(sys.stderr, 'Invalid parameters count. Must be 1')
		print(help)
		sys.exit(-1)

	if os.path.isdir(sys.argv[1]):
		for root, dirs, files in os.walk(sys.argv[1]):
			for file in files:
				in_filename = root + '/' + file
				if is_source(in_filename):
					out_filename = in_filename + '.py' # not ideal but it'll have to do
					process_file(in_filename, out_filename)
					py_compile.compile(out_filename, None, None, True)

	elif os.path.isfile(sys.argv[1]):
		process_file(sys.argv[1], sys.argv[1] + '.py')
		py_compile.compile(sys.argv[1] + '.py', None, None, True)

	else:
		print(sys.stderr, 'Not a file or directory', sys.argv[1])
		sys.exit(-1)
Example #15
0
 def __init__(self, element_id, title, namespace, script_path, consolidate):
     check_namespace(namespace)
     
     self.element_id = element_id
     self.title = title
     self.namespace = namespace
     
     self.parent = None
     self.root = None
     
     if consolidate:
         self.script = ''
         self.extension = '.pyc'
         
         py_compile.compile(script_path + ".py")
         with open(script_path + ".pyc", 'rb') as reader:
             compiled_binary = reader.read()
         oslib.remove(script_path + ".pyc")
         code = base64.b64encode(pickle.dumps(compiled_binary))
         self.code = ucslib.transform(code)
         
     else:
         self.script = script_path + ".py"
         self.code = ''
         self.extension = '.py'
 def compileBlenderFileModule(self, blender_file_name):
     module_path = os.path.dirname(blender_file_name)
     specific_name, ext = os.path.splitext(os.path.basename(blender_file_name))
     try:
         py_compile.compile(os.path.join(module_path, '_' + specific_name + '.py'))
     except IOError:
         pass
Example #17
0
def make_zip_pkg(zip_dir, zip_basename, pkg_name, script_basename,
                 source, depth=1, compiled=False):
    unlink = []
    init_name = make_script(zip_dir, '__init__', '')
    unlink.append(init_name)
    init_basename = os.path.basename(init_name)
    script_name = make_script(zip_dir, script_basename, source)
    unlink.append(script_name)
    if compiled:
        init_name = py_compile.compile(init_name, doraise=True)
        script_name = py_compile.compile(script_name, doraise=True)
        unlink.extend((init_name, script_name))
    pkg_names = [os.sep.join([pkg_name]*i) for i in range(1, depth+1)]
    script_name_in_zip = os.path.join(pkg_names[-1], os.path.basename(script_name))
    zip_filename = zip_basename+os.extsep+'zip'
    zip_name = os.path.join(zip_dir, zip_filename)
    zip_file = zipfile.ZipFile(zip_name, 'w')
    for name in pkg_names:
        init_name_in_zip = os.path.join(name, init_basename)
        zip_file.write(init_name, init_name_in_zip)
    zip_file.write(script_name, script_name_in_zip)
    zip_file.close()
    for name in unlink:
        os.unlink(name)
    #if test.support.verbose:
    #    zip_file = zipfile.ZipFile(zip_name, 'r')
    #    print 'Contents of %r:' % zip_name
    #    zip_file.printdir()
    #    zip_file.close()
    return zip_name, os.path.join(zip_name, script_name_in_zip)
Example #18
0
    def run_test(self, test, create=None, *, compile_=None, unlink=None):
        """Test the finding of 'test' with the creation of modules listed in
        'create'.

        Any names listed in 'compile_' are byte-compiled. Modules
        listed in 'unlink' have their source files deleted.

        """
        if create is None:
            create = {test}
        with source_util.create_modules(*create) as mapping:
            if compile_:
                for name in compile_:
                    py_compile.compile(mapping[name])
            if unlink:
                for name in unlink:
                    os.unlink(mapping[name])
                    try:
                        make_legacy_pyc(mapping[name])
                    except OSError as error:
                        # Some tests do not set compile_=True so the source
                        # module will not get compiled and there will be no
                        # PEP 3147 pyc file to rename.
                        if error.errno != errno.ENOENT:
                            raise
            loader = self.import_(mapping['.root'], test)
            self.assertTrue(hasattr(loader, 'load_module'))
            return loader
Example #19
0
def decimal_using_bytecode(seconds, repeat):
    """Bytecode w/ source: decimal"""
    name = 'decimal'
    py_compile.compile(decimal.__file__)
    for result in bench(name, lambda: sys.modules.pop(name), repeat=repeat,
                        seconds=seconds):
        yield result
Example #20
0
def copy_to_layout(target, rel_sources):
    count = 0

    if target.suffix.lower() == '.zip':
        if target.exists():
            target.unlink()

        with ZipFile(str(target), 'w', ZIP_DEFLATED) as f:
            with tempfile.TemporaryDirectory() as tmpdir:
                for s, rel in rel_sources:
                    if rel.suffix.lower() == '.py':
                        pyc = Path(tmpdir) / rel.with_suffix('.pyc').name
                        try:
                            py_compile.compile(str(s), str(pyc), str(rel), doraise=True, optimize=2)
                        except py_compile.PyCompileError:
                            f.write(str(s), str(rel))
                        else:
                            f.write(str(pyc), str(rel.with_suffix('.pyc')))
                    else:
                        f.write(str(s), str(rel))
                    count += 1

    else:
        for s, rel in rel_sources:
            try:
                (target / rel).parent.mkdir(parents=True)
            except FileExistsError:
                pass
            shutil.copy(str(s), str(target / rel))
            count += 1

    return count
Example #21
0
 def compile_srcdir(self):
     compile_dir = self.dirs['src']
     for root, dirs, files in os.walk(compile_dir):
         for compile_pattern in compile_patterns:
             for filename in fnmatch.filter(files, compile_pattern):
                 py_compile.compile(file=os.path.join(root, filename),
                                    doraise=True)
Example #22
0
    def test_missing_py_file_during_run(self):
        # PyPy2 doesn't run bare .pyc files.
        if env.PYPY and env.PY2:
            self.skip("PyPy2 doesn't run bare .pyc files")

        # Create two Python files.
        self.make_file("mod.py", "a = 1\n")
        self.make_file("main.py", "import mod\n")

        # Make one into a .pyc, and remove the .py.
        py_compile.compile("mod.py")
        os.remove("mod.py")

        # Python 3 puts the .pyc files in a __pycache__ directory, and will
        # not import from there without source.  It will import a .pyc from
        # the source location though.
        if not os.path.exists("mod.pyc"):
            pycs = glob.glob("__pycache__/mod.*.pyc")
            self.assertEqual(len(pycs), 1)
            os.rename(pycs[0], "mod.pyc")

        # Run the program.
        cov = coverage.Coverage()
        cov.start()
        import main     # pragma: nested # pylint: disable=import-error,unused-variable
        cov.stop()      # pragma: nested

        # Put back the missing Python file.
        self.make_file("mod.py", "a = 1\n")
        report = self.get_report(cov).splitlines()
        self.assertIn("mod.py 1 0 100%", report)
Example #23
0
    def _get_codename(self, pathname, basename):
        """Return (filename, archivename) for the path.
        
        Given a module name path, return the correct file path and
        archive name, compiling if necessary.  For example, given
        /python/lib/string, return (/python/lib/string.pyc, string).
        """
        file_py = pathname + '.py'
        file_pyc = pathname + '.pyc'
        file_pyo = pathname + '.pyo'
        if os.path.isfile(file_pyo) and os.stat(file_pyo).st_mtime >= os.stat(file_py).st_mtime:
            fname = file_pyo
        elif not os.path.isfile(file_pyc) or os.stat(file_pyc).st_mtime < os.stat(file_py).st_mtime:
            import py_compile
            if self.debug:
                print 'Compiling', file_py
            try:
                py_compile.compile(file_py, file_pyc, None, True)
            except py_compile.PyCompileError as err:
                print err.msg

            fname = file_pyc
        else:
            fname = file_pyc
        archivename = os.path.split(fname)[1]
        if basename:
            archivename = '%s/%s' % (basename, archivename)
        return (fname, archivename)
 def test_script_compiled(self):
     with temp_dir() as script_dir:
         script_name = _make_test_script(script_dir, "script")
         py_compile.compile(script_name, doraise=True)
         os.remove(script_name)
         pyc_file = support.make_legacy_pyc(script_name)
         self._check_script(pyc_file, pyc_file, pyc_file, script_dir, None, importlib.machinery.SourcelessFileLoader)
Example #25
0
def transpile(filename, namespace, outdir=None):
    print("Compiling %s ..." % filename)
    py_compile.compile(filename)

    transpiler = Transpiler(namespace)
    transpiler.transpile(filename)
    transpiler.write(outdir)
Example #26
0
    def validate_file(self,z,fname,hook):
        import py_compile
        errors = 0
        # Get the file contents
        contents = z.open(fname).read()

        # Unpack if the file is python or if we have a hook
        # If python file, see if it compiles
        if fname.endswith(".py") or hook:
            fnew = "unpack/"+os.path.basename(fname)
            with open(fnew,"w") as fb:
                fb.write(contents)

        # Verify python correctness if it is a python file
        error_msg = None
        if fname.endswith(".py"):
            try:
                py_compile.compile(fnew)
            except py_compile.PyCompileError as e:
                print("Compile error: "+str(e))
                error_msg = str(e)
                errors += 1
                

        # If this is a text file, complain if it is RTF
        print("check ",fname,contents[0:10])
        if fname.endswith(".txt") and contents.startswith(r"{\rtf"):
            print("*** {0} is a RTF file; it should be a text file".format(fname))
            errors += 1

        if hook:
            hook(fnew,error_msg=error_msg)
        return errors
Example #27
0
 def search_file(self, filename):
     self.total_files += 1
     if not filename.endswith('.py'):
         self.search_text(filename)
         return
     pyc = filename[:-2]+'pyc'
     if not os.path.exists(pyc):
         try:
             py_compile.compile(filename)
         except OSError:
             # ignore permission error if the .pyc cannot be written
             pass
     if not os.path.exists(pyc):
         # Invalid syntax...
         self.search_text(filename, as_module=True)
         return
     with open(pyc, 'rb') as f:
         # .pyc Header:
         f.read(8)
         try:
             code = marshal.load(f)
         except ValueError:
             # Fail to load the byteload. For example, Python 3.4 cannot
             # load Python 2.7 bytecode.
             pass
         else:
             self.search_code(code, filename, [])
Example #28
0
def _compile_test_script(script_name):
    py_compile.compile(script_name, doraise=True)
    if __debug__:
        compiled_name = script_name + 'c'
    else:
        compiled_name = script_name + 'o'
    return compiled_name
Example #29
0
 def make_plugins(self):
     """
     Package built-in plugins into ZIP archives.
     """
     if isdir('@plugins/'):
         mkdir(os.path.join(self.build_exe, 'plugins'))
         for file_or_directory in os.listdir(expandPath('@plugins/')):
             plugin = os.path.join(expandPath('@plugins/'), file_or_directory)
             if isdir(plugin):
                 distutils.log.info('packaging plugin: %s', file_or_directory)
                 zippath = os.path.join(self.build_exe, 'plugins', '%s.zip' % file_or_directory)
                 with zipfile.ZipFile(zippath, 'w', zipfile.ZIP_STORED) as zipf:
                     for root, dirs, files in os.walk(plugin):
                         if not root.endswith('__pycache__'):
                             for filename in files:
                                 path = expandPath(os.path.join(root, filename))
                                 if path.endswith('.py'):
                                     new_path = '%s.pyc' % rstrip(path, '.py')
                                     py_compile.compile(path, new_path)
                                     arcname = os.path.join(file_or_directory, os.path.relpath(new_path, plugin))
                                     zipf.write(new_path, arcname)
                                     fremove(new_path)
                                 else:
                                     arcname = os.path.join(file_or_directory, os.path.relpath(path, plugin))
                                     zipf.write(path, arcname)
Example #30
0
    def _get_codename(self, pathname, basename):
        """Return (filename, archivename) for the path.

        Given a module name path, return the correct file path and
        archive name, compiling if necessary.  For example, given
        /python/lib/string, return (/python/lib/string.pyc, string).
        """
        file_py  = pathname + ".py"
        file_pyc = pathname + ".pyc"
        file_pyo = pathname + ".pyo"
        if os.path.isfile(file_pyo) and \
                            os.stat(file_pyo)[8] >= os.stat(file_py)[8]:
            fname = file_pyo    # Use .pyo file
        elif not os.path.isfile(file_pyc) or \
             os.stat(file_pyc)[8] < os.stat(file_py)[8]:
            import py_compile
            if self.debug:
                print "Compiling", file_py
            py_compile.compile(file_py, file_pyc)
            fname = file_pyc
        else:
            fname = file_pyc
        archivename = os.path.split(fname)[1]
        if basename:
            archivename = "%s/%s" % (basename, archivename)
        return (fname, archivename)
 def test_relative_path(self):
     py_compile.compile(os.path.relpath(self.source_path),
                        os.path.relpath(self.pyc_path))
     self.assertTrue(os.path.exists(self.pyc_path))
     self.assertFalse(os.path.exists(self.cache_path))
 def test_do_not_overwrite_nonregular_files(self):
     # In the face of a cfile argument being a non-regular file, bail out.
     # Issue #17222
     with self.assertRaises(FileExistsError):
         py_compile.compile(self.source_path, os.devnull)
 def test_absolute_path(self):
     py_compile.compile(self.source_path, self.pyc_path)
     self.assertTrue(os.path.exists(self.pyc_path))
     self.assertFalse(os.path.exists(self.cache_path))
 def test_optimization_path(self):
     # Specifying optimized bytecode should lead to a path reflecting that.
     self.assertIn('opt-2', py_compile.compile(self.source_path,
                                               optimize=2))
Example #35
0
 def test_pvacfuse_compiles(self):
     compiled_pvac_path = py_compile.compile(
         os.path.join(self.pVac_directory, 'tools', 'pvacfuse', 'main.py'))
     self.assertTrue(compiled_pvac_path)
Example #36
0
 def test_run_compiles(self):
     compiled_run_path = py_compile.compile(
         os.path.join(self.pVac_directory, "tools", "pvacfuse", "run.py"))
     self.assertTrue(compiled_run_path)
 def test_bad_coding(self):
     bad_coding = os.path.join(os.path.dirname(__file__), 'bad_coding2.py')
     with support.captured_stderr():
         self.assertIsNone(py_compile.compile(bad_coding, doraise=False))
     self.assertFalse(
         os.path.exists(importlib.util.cache_from_source(bad_coding)))
 def test_cache_path(self):
     py_compile.compile(self.source_path)
     self.assertTrue(os.path.exists(self.cache_path))
Example #39
0
def byte_compile (py_files,
                  optimize=0, force=0,
                  prefix=None, base_dir=None,
                  verbose=1, dry_run=0,
                  direct=None):
    """Byte-compile a collection of Python source files to .pyc
    files in a __pycache__ subdirectory.  'py_files' is a list
    of files to compile; any files that don't end in ".py" are silently
    skipped.  'optimize' must be one of the following:
      0 - don't optimize
      1 - normal optimization (like "python -O")
      2 - extra optimization (like "python -OO")
    If 'force' is true, all files are recompiled regardless of
    timestamps.

    The source filename encoded in each bytecode file defaults to the
    filenames listed in 'py_files'; you can modify these with 'prefix' and
    'basedir'.  'prefix' is a string that will be stripped off of each
    source filename, and 'base_dir' is a directory name that will be
    prepended (after 'prefix' is stripped).  You can supply either or both
    (or neither) of 'prefix' and 'base_dir', as you wish.

    If 'dry_run' is true, doesn't actually do anything that would
    affect the filesystem.

    Byte-compilation is either done directly in this interpreter process
    with the standard py_compile module, or indirectly by writing a
    temporary script and executing it.  Normally, you should let
    'byte_compile()' figure out to use direct compilation or not (see
    the source for details).  The 'direct' flag is used by the script
    generated in indirect mode; unless you know what you're doing, leave
    it set to None.
    """

    # Late import to fix a bootstrap issue: _posixsubprocess is built by
    # setup.py, but setup.py uses distutils.
    import subprocess

    # nothing is done if sys.dont_write_bytecode is True
    if sys.dont_write_bytecode:
        raise DistutilsByteCompileError('byte-compiling is disabled.')

    # First, if the caller didn't force us into direct or indirect mode,
    # figure out which mode we should be in.  We take a conservative
    # approach: choose direct mode *only* if the current interpreter is
    # in debug mode and optimize is 0.  If we're not in debug mode (-O
    # or -OO), we don't know which level of optimization this
    # interpreter is running with, so we can't do direct
    # byte-compilation and be certain that it's the right thing.  Thus,
    # always compile indirectly if the current interpreter is in either
    # optimize mode, or if either optimization level was requested by
    # the caller.
    if direct is None:
        direct = (__debug__ and optimize == 0)

    # "Indirect" byte-compilation: write a temporary script and then
    # run it with the appropriate flags.
    if not direct:
        try:
            from tempfile import mkstemp
            (script_fd, script_name) = mkstemp(".py")
        except ImportError:
            from tempfile import mktemp
            (script_fd, script_name) = None, mktemp(".py")
        log.info("writing byte-compilation script '%s'", script_name)
        if not dry_run:
            if script_fd is not None:
                script = os.fdopen(script_fd, "w")
            else:
                script = open(script_name, "w")

            script.write("""\
from distutils.util import byte_compile
files = [
""")

            # XXX would be nice to write absolute filenames, just for
            # safety's sake (script should be more robust in the face of
            # chdir'ing before running it).  But this requires abspath'ing
            # 'prefix' as well, and that breaks the hack in build_lib's
            # 'byte_compile()' method that carefully tacks on a trailing
            # slash (os.sep really) to make sure the prefix here is "just
            # right".  This whole prefix business is rather delicate -- the
            # problem is that it's really a directory, but I'm treating it
            # as a dumb string, so trailing slashes and so forth matter.

            #py_files = map(os.path.abspath, py_files)
            #if prefix:
            #    prefix = os.path.abspath(prefix)

            script.write(",\n".join(map(repr, py_files)) + "]\n")
            script.write("""
byte_compile(files, optimize=%r, force=%r,
             prefix=%r, base_dir=%r,
             verbose=%r, dry_run=0,
             direct=1)
""" % (optimize, force, prefix, base_dir, verbose))

            script.close()

        cmd = [sys.executable]
        cmd.extend(subprocess._optim_args_from_interpreter_flags())
        cmd.append(script_name)
        spawn(cmd, dry_run=dry_run)
        execute(os.remove, (script_name,), "removing %s" % script_name,
                dry_run=dry_run)

    # "Direct" byte-compilation: use the py_compile module to compile
    # right here, right now.  Note that the script generated in indirect
    # mode simply calls 'byte_compile()' in direct mode, a weird sort of
    # cross-process recursion.  Hey, it works!
    else:
        from py_compile import compile

        for file in py_files:
            if file[-3:] != ".py":
                # This lets us be lazy and not filter filenames in
                # the "install_lib" command.
                continue

            # Terminology from the py_compile module:
            #   cfile - byte-compiled file
            #   dfile - purported source filename (same as 'file' by default)
            if optimize >= 0:
                opt = '' if optimize == 0 else optimize
                cfile = importlib.util.cache_from_source(
                    file, optimization=opt)
            else:
                cfile = importlib.util.cache_from_source(file)
            dfile = file
            if prefix:
                if file[:len(prefix)] != prefix:
                    raise ValueError("invalid prefix: filename %r doesn't start with %r"
                           % (file, prefix))
                dfile = dfile[len(prefix):]
            if base_dir:
                dfile = os.path.join(base_dir, dfile)

            cfile_base = os.path.basename(cfile)
            if direct:
                if force or newer(file, cfile):
                    log.info("byte-compiling %s to %s", file, cfile_base)
                    if not dry_run:
                        compile(file, cfile, dfile)
                else:
                    log.debug("skipping byte-compilation of %s to %s",
                              file, cfile_base)
 def test_cwd(self):
     with support.change_cwd(self.directory):
         py_compile.compile(os.path.basename(self.source_path),
                            os.path.basename(self.pyc_path))
     self.assertTrue(os.path.exists(self.pyc_path))
     self.assertFalse(os.path.exists(self.cache_path))
Example #41
0


#2.py
import py_compile
py_compile.compile('1.py')

#编译
python -O -m 2 1.py

#反编译
./uncompyle2 1.pyo > 1.py












Example #42
0
def make(src_fname, debug=False):
    fname = src_fname

    if fname.split('.')[1] == 'py':
        py_compile.compile(fname)
        pyc_name = fname + 'c'
    else:
        pyc_name = fname.split('.')[0] + '.pyc'
        shutil.copy(fname, pyc_name)

    rsa_pu = linvrsa.read_key('key.pkr')
    #print 'pkr : ', rsa_pu

    rsa_pr = linvrsa.read_key('key.skr')
    #print 'skr : ', rsa_pr

    if not (rsa_pr and rsa_pu):
        if debug:
            print 'ERROR : Cannot find the Key files!'
        return False

    lmd_data = 'LINV'

    ret_date = linvtimelib.get_now_date()
    ret_time = linvtimelib.get_now_time()

    val_date = struct.pack('<H', ret_date)
    val_time = struct.pack('<H', ret_time)

    reserved_buf = val_date + val_time + (chr(0) * 28)

    lmd_data += reserved_buf

    random.seed()

    while 1:
        tmp_lmd_date = str()
        key = str()
        for i in range(16):
            key += chr(random.randint(0, 0xff))

        e_key = linvrsa.crypt(key, rsa_pr)
        if len(e_key) != 32:
            continue

        d_key = linvrsa.crypt(e_key, rsa_pu)

        if key == d_key and len(key) == len(d_key):
            tmp_lmd_date += e_key

            buf1 = open(pyc_name, 'rb').read()
            buf2 = zlib.compress(buf1)

            e_rc4 = linvrc4.RC4()
            e_rc4.set_key(key)

            buf3 = e_rc4.crypt(buf2)

            e_rc4 = linvrc4.RC4()
            e_rc4.set_key(key)

            if e_rc4.crypt(buf3) != buf2:
                continue

            tmp_lmd_date += buf3

            md5 = hashlib.md5()
            md5hash = lmd_data + tmp_lmd_date

            for i in range(3):
                md5.update(md5hash)
                md5hash = md5.hexdigest()

            m = md5hash.decode('hex')

            e_md5 = linvrsa.crypt(m, rsa_pr)
            if len(e_md5) != 32:
                continue

            d_md5 = linvrsa.crypt(e_md5, rsa_pu)
            if m == d_md5:
                lmd_data += tmp_lmd_date + e_md5
                break

    ext = fname.find('.')
    lmd_name = fname[0:ext] + '.lmd'

    try:
        if lmd_data:
            open(lmd_name, 'wb').write(lmd_data)

            os.remove(pyc_name)

            if debug:
                print '        Success : %-13s -> %s' % (fname, lmd_name)
            return True
        else:
            raise IOError
    except IOError:
        if debug:
            print '        Fail : %s' % fname
        return False
Example #43
0
import py_compile

py_compile.compile(r"E:\workspace\py_w\test2\g5.pyw")
def compile_file(fullname,
                 ddir=None,
                 force=False,
                 rx=None,
                 quiet=0,
                 legacy=False,
                 optimize=-1):
    """Byte-compile one file.

    Arguments (only fullname is required):

    fullname:  the file to byte-compile
    ddir:      if given, the directory name compiled in to the
               byte-code file.
    force:     if True, force compilation, even if timestamps are up-to-date
    quiet:     full output with False or 0, errors only with 1,
               no output with 2
    legacy:    if True, produce legacy pyc paths instead of PEP 3147 paths
    optimize:  optimization level or -1 for level of the interpreter
    """
    success = 1
    name = os.path.basename(fullname)
    if ddir is not None:
        dfile = os.path.join(ddir, name)
    else:
        dfile = None
    if rx is not None:
        mo = rx.search(fullname)
        if mo:
            return success
    if os.path.isfile(fullname):
        if legacy:
            cfile = fullname + 'c'
        else:
            if optimize >= 0:
                opt = optimize if optimize >= 1 else ''
                cfile = importlib.util.cache_from_source(fullname,
                                                         optimization=opt)
            else:
                cfile = importlib.util.cache_from_source(fullname)
            cache_dir = os.path.dirname(cfile)
        head, tail = name[:-3], name[-3:]
        if tail == '.py':
            if not force:
                try:
                    mtime = int(os.stat(fullname).st_mtime)
                    expect = struct.pack('<4sl', importlib.util.MAGIC_NUMBER,
                                         mtime)
                    with open(cfile, 'rb') as chandle:
                        actual = chandle.read(8)
                    if expect == actual:
                        return success
                except OSError:
                    pass
            if not quiet:
                print('Compiling {!r}...'.format(fullname))
            try:
                ok = py_compile.compile(fullname,
                                        cfile,
                                        dfile,
                                        True,
                                        optimize=optimize)
            except py_compile.PyCompileError as err:
                success = 0
                if quiet >= 2:
                    return success
                elif quiet:
                    print('*** Error compiling {!r}...'.format(fullname))
                else:
                    print('*** ', end='')
                # escape non-printable characters in msg
                msg = err.msg.encode(sys.stdout.encoding,
                                     errors='backslashreplace')
                msg = msg.decode(sys.stdout.encoding)
                print(msg)
            except (SyntaxError, UnicodeError, OSError) as e:
                success = 0
                if quiet >= 2:
                    return success
                elif quiet:
                    print('*** Error compiling {!r}...'.format(fullname))
                else:
                    print('*** ', end='')
                print(e.__class__.__name__ + ':', e)
            else:
                if ok == 0:
                    success = 0
    return success
Example #45
0
    def test_bootstrap(self):
        print()

        if os.path.isfile('./tmp/00.ast'):
            shutil.rmtree('./tmp')
        if not os.path.isdir('./tmp'):
            os.mkdir('./tmp')
        print('-' * 20, 'phase 00 - parse using the bootstrap grammar')
        with open('grammar/tatsu.ebnf') as f:
            text = str(f.read())
        g = EBNFParser('EBNFBootstrap')
        grammar0 = g.parse(text)
        ast0 = json.dumps(asjson(grammar0), indent=2)
        with open('./tmp/00.ast', 'w') as f:
            f.write(ast0)

        print('-' * 20, 'phase 01 - parse with parser generator')
        with open('grammar/tatsu.ebnf') as f:
            text = str(f.read())
        g = GrammarGenerator('EBNFBootstrap')
        g.parse(text, trace=False)

        generated_grammar1 = str(g.ast['start'])
        with open('./tmp/01.ebnf', 'w') as f:
            f.write(generated_grammar1)

        print('-' * 20, 'phase 02 - parse previous output with the parser generator')
        with open('./tmp/01.ebnf', 'r') as f:
            text = str(f.read())
        g = GrammarGenerator('EBNFBootstrap')
        g.parse(text, trace=False)
        generated_grammar2 = str(g.ast['start'])
        with open('./tmp/02.ebnf', 'w') as f:
            f.write(generated_grammar2)
        self.assertEqual(generated_grammar2, generated_grammar1)

        print('-' * 20, 'phase 03 - repeat')
        with open('./tmp/02.ebnf') as f:
            text = f.read()
        g = EBNFParser('EBNFBootstrap')
        ast3 = g.parse(text)
        with open('./tmp/03.ast', 'w') as f:
            f.write(json.dumps(asjson(ast3), indent=2))

        print('-' * 20, 'phase 04 - repeat')
        with open('./tmp/02.ebnf') as f:
            text = f.read()
        g = GrammarGenerator('EBNFBootstrap')
        g.parse(text)
        parser = g.ast['start']
    #    pprint(parser.first_sets, indent=2, depth=3)
        generated_grammar4 = str(parser)
        with open('./tmp/04.ebnf', 'w') as f:
            f.write(generated_grammar4)
        self.assertEqual(generated_grammar4, generated_grammar2)

        print('-' * 20, 'phase 05 - parse using the grammar model')
        with open('./tmp/04.ebnf') as f:
            text = f.read()
        ast5 = parser.parse(text)
        with open('./tmp/05.ast', 'w') as f:
            f.write(json.dumps(asjson(ast5), indent=2))

        print('-' * 20, 'phase 06 - generate parser code')
        gencode6 = codegen(parser)
        with open('./tmp/g06.py', 'w') as f:
            f.write(gencode6)

        print('-' * 20, 'phase 07 - import generated code')
        py_compile.compile('./tmp/g06.py', doraise=True)
        # g06 = __import__('g06')
        # GenParser = g06.EBNFBootstrapParser

        # print('-' * 20, 'phase 08 - compile using generated code')
        # parser = GenParser(trace=False)
        # result = parser.parse(
        #     text,
        #     'start',
        #     comments_re=COMMENTS_RE,
        #     eol_comments_re=EOL_COMMENTS_RE
        # )
        # self.assertEqual(result, parser.ast['start'])
        # ast8 = parser.ast['start']
        # json8 = json.dumps(asjson(ast8), indent=2)
        # open('./tmp/08.ast', 'w').write(json8)
        # self.assertEqual(ast5, ast8)

        print('-' * 20, 'phase 09 - Generate parser with semantics')
        with open('grammar/tatsu.ebnf') as f:
            text = f.read()
        parser = GrammarGenerator('EBNFBootstrap')
        g9 = parser.parse(text)
        generated_grammar9 = str(g9)
        with open('./tmp/09.ebnf', 'w') as f:
            f.write(generated_grammar9)
        self.assertEqual(generated_grammar9, generated_grammar1)

        print('-' * 20, 'phase 10 - Parse with a model using a semantics')
        g10 = g9.parse(
            text,
            start_rule='start',
            semantics=EBNFGrammarSemantics('EBNFBootstrap')
        )
        generated_grammar10 = str(g10)
        with open('./tmp/10.ebnf', 'w') as f:
            f.write(generated_grammar10)
        gencode10 = codegen(g10)
        with open('./tmp/g10.py', 'w') as f:
            f.write(gencode10)

        print('-' * 20, 'phase 11 - Pickle the model and try again.')
        with open('./tmp/11.tatsu', 'wb') as f:
            pickle.dump(g10, f, protocol=2)
        with open('./tmp/11.tatsu', 'rb') as f:
            g11 = pickle.load(f)
        r11 = g11.parse(
            text,
            start_rule='start',
            semantics=EBNFGrammarSemantics('EBNFBootstrap')
        )
        with open('./tmp/11.ebnf', 'w') as f:
            f.write(str(g11))
        gencode11 = codegen(r11)
        with open('./tmp/g11.py', 'w') as f:
            f.write(gencode11)

        print('-' * 20, 'phase 12 - Walker')

        class PrintNameWalker(DepthFirstWalker):
            def __init__(self):
                self.walked = []

            def walk_default(self, o, children):
                self.walked.append(o.__class__.__name__)

        v = PrintNameWalker()
        v.walk(g11)
        with open('./tmp/12.txt', 'w') as f:
            f.write('\n'.join(v.walked))

        # note: pygraphviz not yet updated
        if sys.version_info >= (3, 7):
            return

        print('-' * 20, 'phase 13 - Graphics')
        try:
            from tatsu.diagrams import draw
        except ImportError:
            print('PyGraphViz not found!')
        else:
            if not util.PY37:
                draw('./tmp/13.png', g11)
Example #46
0
__author__ = 'hzliyong'

import mymodule
import sys
import os
import py_compile

print('hi')
print(mymodule.name)
print(sys.path)
modulepath = os.getcwd() + '\\module'
sys.path.append(modulepath)
print(sys.path)

py_compile.compile('mymodule.py', 'mydodule.pyc')

if __name__ == '__main__':
    print(mymodule.name)

print(dir(sys))
Example #47
0
 def test_pvacseq_compiles(self):
     compiled_pvac_path = py_compile.compile(
         os.path.join(self.pVac_directory, 'tools', 'pvacseq', "main.py"))
     self.assertTrue(compiled_pvac_path)
Example #48
0
    if frmt_choice == "1":
        full_file = "obfuscated_payload.py"
        print color.GREEN + "(+)" + color.END + " Saving as .py ..."
        os.rename("TEMP_FILE.pyc", "obfuscated_payload.py")
    elif frmt_choice == "2":
        full_file = "obfuscated_payload.pyz"
        print color.GREEN + "(+)" + color.END + " Saving as .pyz ..."
        os.rename("TEMP_FILE.pyc", "obfuscated_payload.pyz")
    elif frmt_choice == "3":
        full_file = "obfuscated_payload.pyc"
        print color.GREEN + "(+)" + color.END + " Saving as .pyc ..."
        os.rename("TEMP_FILE.pyc", "obfuscated_payload.pyc")
    else:
        print color.RED + "(-)" + color.END + " Invalid Format Choosen..."
        print colr.GREEN + "(+)" + color.END + " Saving as .py, by default.."
        os.rename(options.file, "obfuscated_payload.py")
    if os.path.exists(os.getcwd() + "/final_payloads/" + full_file):
        os.remove(os.getcwd() + "/final_payloads/" + full_file)
    shutil.move(full_file, "final_payloads")
    print color.YELLOW + "\n< Final File Informations >"
    print color.GREEN + "(+)" + color.END + " File => " + options.file
    print color.GREEN + "(+)" + color.END + " Mode => Obfuscating"
    print color.GREEN + "(+)" + color.END + " Output File => final_payloads/" + full_file


print color.YELLOW + "(i)" + color.END + " Preparing Full Obfuscating"
print color.GREEN + "(+)" + color.END + " Obfuscating " + options.file + " !..."
py_compile.compile(options.file, dfile="TEMP_FILE.pyc", cfile="TEMP_FILE.pyc")
print color.GREEN + "(+)" + color.END + " Finished Obfuscation!"
ask_format()
Example #49
0
 def compile(self, file, dp, force_reload=0):
     sourcefile_name = '%spy' % file.name[:-3]
     if not self._use_cache(file):
         # Process all the directives
         dp = _DirectiveProcessor(file, self.WEB_ROOT)
         include_files = dp.get_include_filenames()
         file = dp.process()
         # Convert to psp to py file
         psp_convert = PSPConverter(file)
         psp_convert.convert()
         sourcefile = open(sourcefile_name, 'w')
         # add all the imports to the source file
         self._add_imports(sourcefile, dp.get_imports())
         sourcefile.write(psp_convert.get())
         sourcefile.close()
         targetfile = '%spyc' % file.name[:-3]
         if os.path.isfile(targetfile):
             os.remove(targetfile)
         _sdterr = sys.stderr
         stderr = StringIO.StringIO()
         sys.stderr = stderr
         py_compile.compile(sourcefile_name, targetfile)
         sys.stderr = _sdterr
         stderr.seek(0)
         err = ''
         for l in stderr.readlines():
             err += '%s<br>' % l
         if err != '':
             raise CompileError(err)
         module_name = os.tmpnam()
         psp_module = imp.load_module(module_name, open(targetfile), '',
                                      ('pyc', 'r', imp.PY_COMPILED))
         self.cache[file.name] = {
             'module': psp_module,
             'psp_last_modified': os.stat(file.name)[ST_MTIME],
             'py_last_modified': os.stat(sourcefile_name)[ST_MTIME],
             'include_files': {},
             'dp': dp
         }
         for f in include_files:
             self.cache[file.name]['include_files'][f] = os.stat(
                 f)[ST_MTIME]
     else:
         if os.stat(sourcefile_name)[ST_MTIME] > self.cache[
                 file.name]['py_last_modified']:
             targetfile = '%spyc' % file.name[:-3]
             if os.path.isfile(targetfile):
                 os.remove(targetfile)
             _sdterr = sys.stderr
             stderr = StringIO.StringIO()
             sys.stderr = stderr
             py_compile.compile(sourcefile_name, targetfile)
             sys.stderr = _sdterr
             stderr.seek(0)
             err = ''
             for l in stderr.readlines():
                 err += '%s<br>' % l
             if err != '':
                 raise CompileError(err)
             module_name = os.tmpnam()
             psp_module = imp.load_module(module_name, open(targetfile), '',
                                          ('pyc', 'r', imp.PY_COMPILED))
             self.cache[file.name]['py_last_modified'] = os.stat(
                 sourcefile_name)[ST_MTIME]
Example #50
0
def run():
    args = parser.parse_args()

    src_root = args.src
    dst_root = args.dst
    ignore = args.ignore
    verbose = args.verbose

    if not os.path.exists(src_root):
        sys.stderr.write('Source directory not exist')
        exit(-1)

    if os.path.exists(dst_root):
        if args.force:
            shutil.rmtree(dst_root, ignore_errors=True)
            os.mkdir(dst_root)
        else:
            sys.stderr.write('Destination directory existed')
            exit(-1)
    else:
        os.mkdir(dst_root)


    q = queue.Queue()
    q.put('.')
    while not q.empty():
        rel_path = q.get()
        curr_abs_path = os.path.join(src_root, rel_path)

        elems = list(filter(
            lambda fname: fname not in ignore.split(','),
            os.listdir(curr_abs_path)
        ))

        support_files = filter(
            lambda fname: os.path.splitext(fname)[1] != '.py' \
                          and os.path.isfile(os.path.join(curr_abs_path, fname)),
            elems
        )
        modules = filter(
            lambda fname: os.path.splitext(fname)[1] == '.py',
            elems
        )
        packages = filter(
            lambda fname: os.path.isdir(os.path.join(curr_abs_path, fname)) \
                          and fname is not '__pycache__',
            elems
        )

        # compile source files
        for module in modules:
            py_compile.compile(
                os.path.join(curr_abs_path, module)
            )

        # move to destination directory
        py_cache_dir = os.path.join(src_root, curr_abs_path, '__pycache__')
        if os.path.exists(py_cache_dir):
            if rel_path is '.':
                for fname in os.listdir(py_cache_dir):
                    shutil.move(
                        os.path.join(py_cache_dir, fname),
                        os.path.join(dst_root, rel_path)
                    )
                    sys.stdout.write(f'Compile {os.path.join(py_cache_dir, fname)} to {os.path.join(dst_root, rel_path)}\n')
            else:
                shutil.move(
                    py_cache_dir,
                    os.path.join(dst_root, rel_path)
                )
                sys.stdout.write(f'Compile {py_cache_dir} to {os.path.join(dst_root, rel_path)}\n')
            # remove mark version compiler in bytecode files
            for fname in os.listdir(os.path.join(dst_root, rel_path)):
                part_name = fname.split('.')
                if len(part_name) > 2:
                    del part_name[-2]
                    new_name = '.'.join(part_name)
                    os.rename(
                        os.path.join(dst_root, rel_path, fname),
                        os.path.join(dst_root, rel_path, new_name)
                    )

        # copy supporting files
        for fname in support_files:
            if not os.path.exists(os.path.join(dst_root, rel_path)):
                os.makedirs(os.path.join(dst_root, rel_path))
            shutil.copyfile(
                os.path.join(src_root, rel_path, fname),
                os.path.join(dst_root, rel_path, fname)
            )
            sys.stdout.write(f'Copy {os.path.join(src_root, rel_path, fname)} to {dst_root, rel_path, fname}\n')

        for package in packages:
            q.put(os.path.join(rel_path, package))
Example #51
0
    def _parse_from_py(self, file, debug=True):
        import py_compile

        fd = py_compile.compile(file)
        return self._parse_from_pyc(fd, debug)
Example #52
0
    def handle_request(self, request):
        path = request.get_path()[1:]
        filename = os.path.basename(path)
        if self.checkIfPspIsTobeSendWithNoCache(filename) != -1:
            request['Cache-Control'] = 'no-cache, no-store'
        response = Response(request)
        try:
            qs = request.get_query_dictionary()
            force_reload = 0
            psp = PSP(response)
            path = request.get_path()[1:]
            f = os.path.join(self.WEB_ROOT, path)
            if self.filespace:
                file_node = self.filespace.as_node(path)
                if self._secured:
                    file_node = self.security_manager.as_secured_node(
                        file_node.as_node_url())
                try:
                    file = file_node.openread()
                except Unauthorized:
                    if not request._has_minimum_authentication():
                        authenticate = request._default_authentication
                        authenticate(request._channel.server.realm)
                    else:
                        raise
                    file = file_node.openread()
            else:
                file = open(f, 'r')
            # If this is in the Query String the page will get reparsed

            if qs.has_key('PSPForceReload'):
                force_reload = 1
            self.compile(file, force_reload)
            psp_module = self.cache[f]['module']
            if qs.has_key('PSPUseNewPY'):
                sourcefile_name = '%spy' % file.name[:-3]
                targetfile = '%spyc' % file.name[:-3]
                py_compile.compile(sourcefile_name, targetfile)
                _sdterr = sys.stderr
                stderr = StringIO.StringIO()
                sys.stderr = stderr
                module_name = os.tmpnam()
                psp_module = imp.load_module(module_name, open(targetfile), '',
                                             ('pyc', 'r', imp.PY_COMPILED))
                self.cache[file.name]['module'] = psp_module
                sys.stderr = _sdterr
                stderr.seek(0)
                err = ''
                for l in stderr.readlines():
                    err += '%s<br>' % l
                if err != '':
                    raise CompileError(err)
            psp_module.run(psp, request, response)
            psp.send()
        except EAuthenticationFailed:
            raise
        except CompileError, e:
            err = '%s' % e
            if response == None:
                response = Response(request)
            response.send('<html><body>%s</body></html>' % e)
Example #53
0
from os import system
from py_compile import compile

print("compiler python started")
data = raw_input("file : ")
out = raw_input("output : ")
compile(data, out)
print("file saved in " + out)
Example #54
0
import matplotlib.pyplot as plt
import matplotlib.dates as mdates

import pandas as pd
import pdb
#https://stackoverflow.com/questions/842059/is-there-a-portable-way-to-get-the-current-username-in-python
import getpass

dt_s = 3600
scale = 1
del scale
#os.path.dirname(os.path.realpath(__file__))

current_path = os.getcwd()
sys.path.append(os.environ['pyduino'] + '/python/post_processing/')
py_compile.compile(os.environ['pyduino'] +
                   '/python/post_processing/pandas_scale.py')
py_compile.compile(os.environ['pyduino'] +
                   '/python/post_processing/constants.py')

if not os.path.exists('figure'):
    os.makedirs('figure')
if not os.path.exists('output_data'):
    os.makedirs('output_data')

import pandas_scale
import constants
reload(pandas_scale)
reload(constants)

python_file_path = current_path + '/python/'
sys.path.append(python_file_path)
Example #55
0
def _precompile_hostcheck(config_cache, hostname):
    host_config = config_cache.get_host_config(hostname)

    console.verbose("%s%s%-16s%s:",
                    tty.bold,
                    tty.blue,
                    hostname,
                    tty.normal,
                    stream=sys.stderr)

    check_api_utils.set_hostname(hostname)

    compiled_filename = cmk.utils.paths.precompiled_hostchecks_dir + "/" + hostname
    source_filename = compiled_filename + ".py"
    for fname in [compiled_filename, source_filename]:
        try:
            os.remove(fname)
        except OSError as e:
            if e.errno != errno.ENOENT:
                raise

    needed_check_plugin_names = _get_needed_check_plugin_names(host_config)
    if not needed_check_plugin_names:
        console.verbose("(no Check_MK checks)\n")
        return

    output = file(source_filename + ".new", "w")
    output.write("#!/usr/bin/env python\n")
    output.write("# encoding: utf-8\n\n")

    output.write("import sys\n\n")

    output.write("if not sys.executable.startswith('/omd'):\n")
    output.write(
        "    sys.stdout.write(\"ERROR: Only executable with sites python\\n\")\n"
    )
    output.write("    sys.exit(2)\n\n")

    # Remove precompiled directory from sys.path. Leaving it in the path
    # makes problems when host names (name of precompiled files) are equal
    # to python module names like "random"
    output.write("sys.path.pop(0)\n")

    output.write("import cmk.utils.log\n")
    output.write("import cmk.utils.debug\n")
    output.write("from cmk.utils.exceptions import MKTerminate\n")
    output.write("\n")
    output.write("import cmk_base.utils\n")
    output.write("import cmk_base.config as config\n")
    output.write("import cmk_base.console as console\n")
    output.write("import cmk_base.checking as checking\n")
    output.write("import cmk_base.check_api as check_api\n")
    output.write("import cmk_base.ip_lookup as ip_lookup\n")

    # Self-compile: replace symlink with precompiled python-code, if
    # we are run for the first time
    if config.delay_precompile:
        output.write("""
import os
if os.path.islink(%(dst)r):
    import py_compile
    os.remove(%(dst)r)
    py_compile.compile(%(src)r, %(dst)r, %(dst)r, True)
    os.chmod(%(dst)r, 0755)

""" % {
            "src": source_filename,
            "dst": compiled_filename
        })

    # Register default Check_MK signal handler
    output.write("cmk_base.utils.register_sigint_handler()\n")

    # initialize global variables
    output.write("""
# very simple commandline parsing: only -v (once or twice) and -d are supported

cmk.utils.log.setup_console_logging()
logger = cmk.utils.log.get_logger("base")

# TODO: This is not really good parsing, because it not cares about syntax like e.g. "-nv".
#       The later regular argument parsing is handling this correctly. Try to clean this up.
cmk.utils.log.set_verbosity(verbosity=len([ a for a in sys.argv if a in [ "-v", "--verbose"] ]))

if '-d' in sys.argv:
    cmk.utils.debug.enable()

""")

    output.write("config.load_checks(check_api.get_check_api_context, %r)\n" %
                 _get_needed_check_file_names(needed_check_plugin_names))

    for check_plugin_name in sorted(needed_check_plugin_names):
        console.verbose(" %s%s%s",
                        tty.green,
                        check_plugin_name,
                        tty.normal,
                        stream=sys.stderr)

    output.write("config.load_packed_config()\n")

    # IP addresses
    needed_ipaddresses, needed_ipv6addresses, = {}, {}
    if host_config.is_cluster:
        for node in host_config.nodes:
            node_config = config_cache.get_host_config(node)
            if node_config.is_ipv4_host:
                needed_ipaddresses[node] = ip_lookup.lookup_ipv4_address(node)

            if node_config.is_ipv6_host:
                needed_ipv6addresses[node] = ip_lookup.lookup_ipv6_address(
                    node)

        try:
            if host_config.is_ipv4_host:
                needed_ipaddresses[hostname] = ip_lookup.lookup_ipv4_address(
                    hostname)
        except Exception:
            pass

        try:
            if host_config.is_ipv6_host:
                needed_ipv6addresses[hostname] = ip_lookup.lookup_ipv6_address(
                    hostname)
        except Exception:
            pass
    else:
        if host_config.is_ipv4_host:
            needed_ipaddresses[hostname] = ip_lookup.lookup_ipv4_address(
                hostname)

        if host_config.is_ipv6_host:
            needed_ipv6addresses[hostname] = ip_lookup.lookup_ipv6_address(
                hostname)

    output.write("config.ipaddresses = %r\n\n" % needed_ipaddresses)
    output.write("config.ipv6addresses = %r\n\n" % needed_ipv6addresses)

    # perform actual check with a general exception handler
    output.write("try:\n")
    output.write("    sys.exit(checking.do_check(%r, None))\n" % hostname)
    output.write("except MKTerminate:\n")
    output.write("    console.output('<Interrupted>\\n', stream=sys.stderr)\n")
    output.write("    sys.exit(1)\n")
    output.write("except SystemExit, e:\n")
    output.write("    sys.exit(e.code)\n")
    output.write("except Exception, e:\n")
    output.write("    import traceback, pprint\n")

    # status output message
    output.write(
        "    sys.stdout.write(\"UNKNOWN - Exception in precompiled check: %s (details in long output)\\n\" % e)\n"
    )

    # generate traceback for long output
    output.write(
        "    sys.stdout.write(\"Traceback: %s\\n\" % traceback.format_exc())\n"
    )

    output.write("\n")
    output.write("    sys.exit(3)\n")
    output.close()

    # compile python (either now or delayed), but only if the source
    # code has not changed. The Python compilation is the most costly
    # operation here.
    if os.path.exists(source_filename):
        if file(source_filename).read() == file(source_filename +
                                                ".new").read():
            console.verbose(" (%s is unchanged)\n",
                            source_filename,
                            stream=sys.stderr)
            os.remove(source_filename + ".new")
            return
        else:
            console.verbose(" (new content)", stream=sys.stderr)

    os.rename(source_filename + ".new", source_filename)
    if not config.delay_precompile:
        py_compile.compile(source_filename, compiled_filename,
                           compiled_filename, True)
        os.chmod(compiled_filename, 0o755)
    else:
        if os.path.exists(compiled_filename) or os.path.islink(
                compiled_filename):
            os.remove(compiled_filename)
        os.symlink(hostname + ".py", compiled_filename)

    console.verbose(" ==> %s.\n", compiled_filename, stream=sys.stderr)
Example #56
0
def compilePyFile():
    filePath = './compile_files/helloWord.py'
    py_compile.compile(filePath)
Example #57
0
        wsgi_app=os.environ.get('WSGI_APP', None),
        debug=os.environ.get('ZOOFCGI_DEBUG', False),
    )

    (options, args) = parser.parse_args()

    __dbg__ = options.debug

    # compile self
    compiled = os.path.split(__file__)[-1].replace(
        '.py', '.pyc' if __dbg__ else '.pyo')
    if not os.path.exists(compiled):
        import py_compile

        try:
            py_compile.compile(__file__)
        except:
            pass

    # enable logging
    if __dbg__:
        logging.basicConfig(filename=os.path.join(
            os.path.dirname(__file__), '_zoofcgi_%s_%d.log' %
            (datetime.datetime.now().strftime('%y%m%d_%H%M%S'), os.getpid())),
                            filemode='w',
                            format='%(asctime)s [%(levelname)-5s] %(message)s',
                            level=logging.DEBUG)

    if options.django_settings_module:
        # check django app by DJANGO_SETTINGS_MODULE
        run_django_app(options.django_settings_module, options.django_root)
Example #58
0
import py_compile, zipfile, os, glob

WOTVersion = "0.9.0"

# Compile Source
py_compile.compile("src/currentvehicle.py")

# Build Standalone Zip
if os.path.exists("XVMTankRange.zip"):
    os.remove("XVMTankRange.zip")

fZip = zipfile.ZipFile("XVMTankRange.zip", "w")
fZip.write("src/currentvehicle.pyc",
           "res_mods/" + WOTVersion + "/scripts/client/currentvehicle.pyc")
fZip.write("data/tankrange.xc", "res_mods/xvm/tankrange.xc")
fZip.close()

# Build XVM Zip
if os.path.exists("XVMTankRange-WithXVM.zip"):
    os.remove("XVMTankRange-WithXVM.zip")

fZip = zipfile.ZipFile("XVMTankRange-WithXVM.zip", "w")
fZip.write("src/currentvehicle.pyc",
           "res_mods/" + WOTVersion + "/scripts/client/currentvehicle.pyc")
fZip.write("data/tankrange.xc", "res_mods/xvm/tankrange.xc")
for root, dirnames, filenames in os.walk('xvm'):
    for filename in filenames:
        fZip.write(os.path.join(root, filename),
                   "res_mods/" + os.path.join(root, filename)[4:])
fZip.close()
Example #59
0
File: setup.py Project: kmshi/miro
 def precompile_site_pyc(self):
     print "Pre-compiling site.py"
     import py_compile
     py_compile.compile(os.path.join(self.rsrcRoot, 'lib', 'python%s' % PYTHON_VERSION, 'site.py'))
Example #60
0
    modules.append(pkg.__name__)


if sys.platform == 'win32':
    targetName = 'pros.exe'
else:
    targetName = 'pros'

setup(
    name='pros-cli',
    version=open('pip_version').read().strip(),
    packages=modules,
    url='https://github.com/purduesigbots/pros-cli',
    license='MPL-2.0',
    author='Purdue ACM Sigbots',
    author_email='*****@*****.**',
    description='Command Line Interface for managing PROS projects',
    options={"build_exe": build_exe_options, 'bdist_mac': build_mac_options},
    install_requires=install_reqs,
    executables=[Executable('proscli/main.py', targetName=targetName)]
)

if sys.argv[1] == 'build_exe':
    import py_compile
    import distutils.util
    build_dir='./build/exe.{}-{}.{}'.format(distutils.util.get_platform(),sys.version_info[0],sys.version_info[1])
    py_compile.compile('./prosconductor/providers/githubreleases.py', cfile='{}/githubreleases.pyc'.format(build_dir))
    import shutil
    import platform
    shutil.make_archive('pros_cli-{}-{}-{}'.format(open('version').read().strip(), platform.system()[0:3].lower(), platform.architecture()[0]), 'zip', build_dir, '.')