def generate(root): version = common.get_changelog_version(root) maven_dir = os.path.join(root, 'maven-tmp') java_dir = os.path.join(maven_dir, 'src', 'main', 'java') unzipped_dir = os.path.join(maven_dir, 'unzipped') common.recreate_directory(maven_dir) os.makedirs(java_dir) shutil.copy('tinkerforge_java_bindings_{0}_{1}_{2}.zip'.format(*version), maven_dir) args = ['/usr/bin/unzip', '-q', 'tinkerforge_java_bindings_{0}_{1}_{2}.zip'.format(*version), '-d', unzipped_dir] if subprocess.call(args) != 0: raise Exception("Command '{0}' failed".format(' '.join(args))) shutil.copytree(os.path.join(unzipped_dir, 'source', 'com'), os.path.join(java_dir, 'com')) common.replace_in_file('pom.xml.template', os.path.join(maven_dir, 'pom.xml'), '{{VERSION}}', '.'.join(version)) with common.ChangedDirectory(maven_dir): args = ['/usr/bin/mvn', 'clean', 'verify'] if subprocess.call(args) != 0: raise Exception("Command '{0}' failed".format(' '.join(args)))
def __update_log4j(self): append_pattern = 'log4j.appender.R.File=' conf_file = os.path.join(self.get_conf_dir(), common.LOG4J_CONF) log_file = os.path.join(self.get_path(), 'logs', 'system.log') # log4j isn't partial to Windows \. I can't imagine why not. if common.is_win(): log_file = re.sub("\\\\", "/", log_file) common.replace_in_file(conf_file, append_pattern, append_pattern + log_file) # Setting the right log level # Replace the global log level if self.__global_log_level is not None: append_pattern = 'log4j.rootLogger=' common.replace_in_file( conf_file, append_pattern, append_pattern + self.__global_log_level + ',stdout,R') # Class specific log levels for class_name in self.__classes_log_level: logger_pattern = 'log4j.logger' full_logger_pattern = logger_pattern + '.' + class_name + '=' common.replace_or_add_into_file_tail( conf_file, full_logger_pattern, full_logger_pattern + self.__classes_log_level[class_name])
def __update_envfile(self): jmx_port_pattern='JMX_PORT=' remote_debug_port_pattern='address=' conf_file = os.path.join(self.get_conf_dir(), common.CASSANDRA_ENV) common.replace_in_file(conf_file, jmx_port_pattern, jmx_port_pattern + self.jmx_port) if self.remote_debug_port != '0': common.replace_in_file(conf_file, remote_debug_port_pattern, 'JVM_OPTS="$JVM_OPTS -Xdebug -Xnoagent -Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=' + str(self.remote_debug_port) + '"')
def __update_log4j(self): append_pattern = "log4j.appender.R.File=" conf_file = os.path.join(self.get_conf_dir(), common.LOG4J_CONF) log_file = os.path.join(self.get_path(), "logs", "system.log") common.replace_in_file(conf_file, append_pattern, append_pattern + log_file) # Setting the right log level append_pattern = "log4j.rootLogger=" l = self.__log_level + ",stdout,R" common.replace_in_file(conf_file, append_pattern, append_pattern + l)
def __update_log4j(self): append_pattern='log4j.appender.R.File=' conf_file = os.path.join(self.get_conf_dir(), common.LOG4J_CONF) log_file = os.path.join(self.get_path(), 'logs', 'system.log') common.replace_in_file(conf_file, append_pattern, append_pattern + log_file) # Setting the right log level append_pattern='log4j.rootLogger=' l = self.__log_level common.replace_in_file(conf_file, append_pattern, append_pattern + l + ',sdout,R')
def __update_log4j(self): append_pattern = 'log4j.appender.R.File=' conf_file = os.path.join(self.get_conf_dir(), common.LOG4J_CONF) log_file = os.path.join(self.get_path(), 'logs', 'system.log') common.replace_in_file(conf_file, append_pattern, append_pattern + log_file) # Setting the right log level append_pattern = 'log4j.rootLogger=' l = self.__log_level common.replace_in_file(conf_file, append_pattern, append_pattern + l + ',sdout,R')
def __clean_bat(self): # While the Windows specific changes to the batch files to get them to run are # fairly extensive and thus pretty brittle, all the changes are very unique to # the needs of ccm and shouldn't be pushed into the main repo. # Change the nodes to separate jmx ports bin_dir = os.path.join(self.get_path(), 'bin') jmx_port_pattern = "-Dcom.sun.management.jmxremote.port=" bat_file = os.path.join(bin_dir, "cassandra.bat") common.replace_in_file(bat_file, jmx_port_pattern, " " + jmx_port_pattern + self.jmx_port + "^") # Split binaries from conf home_pattern = "if NOT DEFINED CASSANDRA_HOME set CASSANDRA_HOME=%CD%" common.replace_in_file( bat_file, home_pattern, "set CASSANDRA_HOME=" + self.get_cassandra_dir()) classpath_pattern = "set CLASSPATH=\\\"%CASSANDRA_HOME%\\\\conf\\\"" common.replace_in_file(bat_file, classpath_pattern, "set CLASSPATH=\"" + self.get_conf_dir() + "\"") # background the server process and grab the pid run_text = "\"%JAVA_HOME%\\bin\\java\" %JAVA_OPTS% %CASSANDRA_PARAMS% -cp %CASSANDRA_CLASSPATH% \"%CASSANDRA_MAIN%\"" run_pattern = ".*-cp.*" common.replace_in_file( bat_file, run_pattern, "wmic process call create '" + run_text + "' > \"" + self.get_path() + "/dirty_pid.tmp\"\n")
def update_assembly_info(config: dict) -> None: version = config["version"] version_numbers = version.split(".") common.replace_in_file( common.root_dir() / common.PLUGIN_NAME / "Properties" / "AssemblyInfo.cs", [ (r"(\d+\.){3}\d+", version), ( r"KSPAssembly\([^\)]+\)", 'KSPAssembly("{}", {v[0]}, {v[1]}, {v[2]})'.format( common.PLUGIN_NAME, v=version_numbers ), ), ], )
def __update_log4j(self): append_pattern='log4j.appender.R.File=' conf_file = os.path.join(self.get_conf_dir(), common.LOG4J_CONF) log_file = os.path.join(self.get_path(), 'logs', 'system.log') common.replace_in_file(conf_file, append_pattern, append_pattern + log_file) # Setting the right log level # Replace the global log level if self.__global_log_level is not None: append_pattern='log4j.rootLogger=' common.replace_in_file(conf_file, append_pattern, append_pattern + self.__global_log_level + ',stdout,R') # Class specific log levels for class_name in self.__classes_log_level: logger_pattern='log4j.logger' full_logger_pattern = logger_pattern + '.' + class_name + '=' common.replace_or_add_into_file_tail(conf_file, full_logger_pattern, full_logger_pattern + self.__classes_log_level[class_name])
def __update_log4j(self): append_pattern='log4j.appender.R.File=' conf_file = os.path.join(self.get_conf_dir(), common.LOG4J_CONF) log_file = os.path.join(self.get_path(), 'logs', 'system.log') common.replace_in_file(conf_file, append_pattern, append_pattern + log_file) # Setting the right log level append_pattern='log4j.rootLogger=' logger_pattern='log4j.logger' l = self.__log_level c = self.__class_name if c is None: # Replace the existing root logger with new log level common.replace_in_file(conf_file, append_pattern, append_pattern + l + ',stdout,R') else: # Adding custom logger with log level or replace the existing custom logger full_logger_pattern = logger_pattern + '.' + c + '=' common.replace_or_add_into_file_tail(conf_file, full_logger_pattern, full_logger_pattern + l)
def __update_logback(self): append_pattern = '<file>.*</file>' conf_file = os.path.join(self.get_conf_dir(), common.LOGBACK_CONF) log_file = os.path.join(self.get_path(), 'logs', 'system.log') common.replace_in_file(conf_file, append_pattern, '<file>' + log_file + '</file>') append_pattern = '<fileNamePattern>.*</fileNamePattern>' common.replace_in_file( conf_file, append_pattern, '<fileNamePattern>' + log_file + '.%i.zip</fileNamePattern>') # Setting the right log level # Replace the global log level if self.__global_log_level is not None: append_pattern = '<root level=".*">' common.replace_in_file( conf_file, append_pattern, '<root level="' + self.__global_log_level + '">') # Class specific log levels for class_name in self.__classes_log_level: logger_pattern = '\t<logger name="' full_logger_pattern = logger_pattern + class_name + '" level=".*"/>' common.replace_or_add_into_file_tail( conf_file, full_logger_pattern, logger_pattern + class_name + '" level="' + self.__classes_log_level[class_name] + '"/>')
def generate_version_file(config: dict) -> None: version_template = common.root_dir() / config["versionTemplate"] version_file = common.mod_dir() / f"{common.PLUGIN_NAME}.version" shutil.copy(version_template, version_file) ksp_max = config["kspMax"].split(".") ksp_min = config["kspMin"].split(".") version_numbers = config["version"].split(".") common.replace_in_file( version_file, [ (r"\$\(VersionMajor\)", version_numbers[0]), (r"\$\(VersionMinor\)", version_numbers[1]), (r"\$\(VersionBuild\)", version_numbers[2]), (r"\$\(VersionRevision\)", version_numbers[3]), (r"\$\(KSPMajorMin\)", ksp_min[0]), (r"\$\(KSPMinorMin\)", ksp_min[1]), (r"\$\(KSPMajorMax\)", ksp_max[0]), (r"\$\(KSPMinorMax\)", ksp_max[1]), ], )
def __clean_bat(self): # While the Windows specific changes to the batch files to get them to run are # fairly extensive and thus pretty brittle, all the changes are very unique to # the needs of ccm and shouldn't be pushed into the main repo. # Change the nodes to separate jmx ports bin_dir = os.path.join(self.get_path(), 'bin') jmx_port_pattern="-Dcom.sun.management.jmxremote.port=" bat_file = os.path.join(bin_dir, "cassandra.bat"); common.replace_in_file(bat_file, jmx_port_pattern, " " + jmx_port_pattern + self.jmx_port + "^") # Split binaries from conf home_pattern="if NOT DEFINED CASSANDRA_HOME set CASSANDRA_HOME=%CD%" common.replace_in_file(bat_file, home_pattern, "set CASSANDRA_HOME=" + self.get_cassandra_dir()) classpath_pattern="set CLASSPATH=\\\"%CASSANDRA_HOME%\\\\conf\\\"" common.replace_in_file(bat_file, classpath_pattern, "set CLASSPATH=\"" + self.get_conf_dir() + "\"") # background the server process and grab the pid run_text="\"%JAVA_HOME%\\bin\\java\" %JAVA_OPTS% %CASSANDRA_PARAMS% -cp %CASSANDRA_CLASSPATH% \"%CASSANDRA_MAIN%\"" run_pattern=".*-cp.*" common.replace_in_file(bat_file, run_pattern, "wmic process call create '" + run_text + "' > " + self.get_path() + "/dirty_pid.tmp\n")
def __update_logback(self): append_pattern='<file>.*</file>' conf_file = os.path.join(self.get_conf_dir(), common.LOGBACK_CONF) log_file = os.path.join(self.get_path(), 'logs', 'system.log') common.replace_in_file(conf_file, append_pattern, '<file>' + log_file + '</file>') append_pattern='<fileNamePattern>.*</fileNamePattern>' common.replace_in_file(conf_file, append_pattern, '<fileNamePattern>' + log_file + '.%i.zip</fileNamePattern>') # Setting the right log level # Replace the global log level if self.__global_log_level is not None: append_pattern='<root level=".*">' common.replace_in_file(conf_file, append_pattern, '<root level="' + self.__global_log_level + '">') # Class specific log levels for class_name in self.__classes_log_level: logger_pattern='\t<logger name="' full_logger_pattern = logger_pattern + class_name + '" level=".*"/>' common.replace_or_add_into_file_tail(conf_file, full_logger_pattern, logger_pattern + class_name + '" level="' + self.__classes_log_level[class_name] + '"/>')
def finish(self): root = self.get_bindings_root_directory() version = common.get_changelog_version(root) dot_version = "{0}.{1}.{2}".format(*version) # Copy IPConnection examples examples = common.find_examples(root, '^example_.*\.pl$') for example in examples: shutil.copy(example[1], '/tmp/generator/examples') # Copy bindings and readme for filename in released_files: shutil.copy(os.path.join(root, 'bindings', filename), '/tmp/generator/cpan/source/Tinkerforge') shutil.copy(os.path.join(root, 'IPConnection.pm'), '/tmp/generator/cpan/source/Tinkerforge') shutil.copy(os.path.join(root, 'Device.pm'), '/tmp/generator/cpan/source/Tinkerforge') shutil.copy(os.path.join(root, 'Error.pm'), '/tmp/generator/cpan/source/Tinkerforge') shutil.copy(os.path.join(root, 'changelog.txt'), '/tmp/generator/cpan') shutil.copy(os.path.join(root, 'readme.txt'), '/tmp/generator/cpan') # Generate the CPAN package structure modules = [] for filename in released_files: modules.append('Tinkerforge::' + filename.replace('.pm', '')) modules.append("Tinkerforge::IPConnection") modules.append("Tinkerforge::Device") modules.append("Tinkerforge::Error") modules.append("Tinkerforge") modules = ','.join(modules) if os.path.exists('/tmp/generator/cpan/Tinkerforge'): shutil.rmtree('/tmp/generator/cpan/Tinkerforge') subprocess.call("module-starter --dir=/tmp/generator/cpan/Tinkerforge --module={0} --distro=Tinkerforge" " --author=\"Ishraq Ibne Ashraf\" [email protected]".format(modules), shell=True) # Version replacing common.replace_in_file(os.path.join(root, 'Tinkerforge_cpan_template.pm'), os.path.join(root, 'Tinkerforge_cpan.pm'), '<TF_API_VERSION>', dot_version) common.replace_in_file(os.path.join(root, 'README_cpan_template'), os.path.join(root, 'README_cpan'), '<TF_API_VERSION>', dot_version) # Copying bindings subprocess.call("rm -rf /tmp/generator/cpan/Tinkerforge/lib/Tinkerforge/*", shell=True) for filename in released_files: subprocess.call("cp -ar {0}/bindings/{1} /tmp/generator/cpan/Tinkerforge/lib/Tinkerforge/".format(root, filename), shell=True) # Copying IPconnection.pm, Device.pm and Error.pm subprocess.call("cp -ar {0}/IPConnection.pm /tmp/generator/cpan/Tinkerforge/lib/Tinkerforge/".format(root), shell=True) subprocess.call("cp -ar {0}/Device.pm /tmp/generator/cpan/Tinkerforge/lib/Tinkerforge/".format(root), shell=True) subprocess.call("cp -ar {0}/Error.pm /tmp/generator/cpan/Tinkerforge/lib/Tinkerforge/".format(root), shell=True) # Copying README subprocess.call("rm -rf /tmp/generator/cpan/Tinkerforge/README", shell=True) subprocess.call("cp -ar {0}/README_cpan /tmp/generator/cpan/Tinkerforge/README".format(root), shell=True) # Copying Changes subprocess.call("rm -rf /tmp/generator/cpan/Tinkerforge/Changes", shell=True) subprocess.call("cp -ar {0}/changelog.txt /tmp/generator/cpan/Tinkerforge/Changes".format(root), shell=True) # Copying Tinkerforge.pm subprocess.call("rm -rf /tmp/generator/cpan/Tinkerforge/lib/Tinkerforge.pm", shell=True) subprocess.call("cp {0}/Tinkerforge_cpan.pm /tmp/generator/cpan/Tinkerforge/lib/Tinkerforge.pm".format(root), shell=True) # Copying Makefile.PL subprocess.call("rm -rf /tmp/generator/cpan/Tinkerforge/Makefile.PL", shell=True) subprocess.call("cp {0}/Makefile_cpan.PL /tmp/generator/cpan/Tinkerforge/Makefile.PL".format(root), shell=True) # Modifying 00-load.t test file old_test_file = open('/tmp/generator/cpan/Tinkerforge/t/00-load.t') lines = old_test_file.readlines() old_test_file.close() subprocess.call("rm -rf /tmp/generator/cpan/Tinkerforge/t/00-load.t", shell=True) new_test_file = open('/tmp/generator/cpan/Tinkerforge/t/00-load.t','w') for i, line in enumerate(lines): if i == len(lines)-1: new_test_file.write("diag( \"Testing Tinkerforge $Tinkerforge::VERSION, Perl $], $^X\" );") else: new_test_file.write(line+"\n") new_test_file.close() # Generating the CPAN package archive and cleaning up subprocess.call("cd /tmp/generator/cpan/Tinkerforge/ && perl /tmp/generator/cpan/Tinkerforge/Makefile.PL", shell=True) subprocess.call("cd /tmp/generator/cpan/Tinkerforge/ && make dist", shell=True) shutil.copy("/tmp/generator/cpan/Tinkerforge/Tinkerforge-{0}.{1}.{2}.tar.gz".format(*version), "/tmp/generator/cpan/Tinkerforge.tar.gz") shutil.copy("/tmp/generator/cpan/Tinkerforge/Tinkerforge-{0}.{1}.{2}.tar.gz".format(*version), root) shutil.rmtree('/tmp/generator/cpan/Tinkerforge') os.remove(os.path.join(root, 'Tinkerforge_cpan.pm')) os.remove(os.path.join(root, 'README_cpan')) # Make zip common.make_zip(self.get_bindings_name(), '/tmp/generator/cpan', root, version)
def set_log_level(self, new_level): append_pattern='log4j.rootLogger='; conf_file = os.path.join(self.get_conf_dir(), common.LOG4J_CONF) l = new_level + ",stdout,R" common.replace_in_file(conf_file, append_pattern, append_pattern + l)
def update_envfile(self): jmx_port_pattern='JMX_PORT='; conf_file = os.path.join(self.get_conf_dir(), common.CASSANDRA_ENV) common.replace_in_file(conf_file, jmx_port_pattern, jmx_port_pattern + self.jmx_port)
def update_log4j(self): append_pattern='log4j.appender.R.File='; conf_file = os.path.join(self.get_conf_dir(), common.LOG4J_CONF) log_file = os.path.join(self.get_path(), 'logs', 'system.log') common.replace_in_file(conf_file, append_pattern, append_pattern + log_file)
def start(self, join_ring=True, no_wait=False, verbose=False, update_pid=True, wait_other_notice=False, replace_token=None, replace_address=None, jvm_args=[], wait_for_binary_proto=False, profile_options=None, use_jna=False): """ Start the node. Options includes: - join_ring: if false, start the node with -Dcassandra.join_ring=False - no_wait: by default, this method returns when the node is started and listening to clients. If no_wait=True, the method returns sooner. - wait_other_notice: if True, this method returns only when all other live node of the cluster have marked this node UP. - replace_token: start the node with the -Dcassandra.replace_token option. - replace_address: start the node with the -Dcassandra.replace_address option. """ if self.is_running(): raise NodeError("%s is already running" % self.name) for itf in self.network_interfaces.values(): if itf is not None and replace_address is None: common.check_socket_available(itf) if wait_other_notice: marks = [(node, node.mark_log()) for node in self.cluster.nodes.values() if node.is_running()] cdir = self.get_cassandra_dir() cass_bin = common.join_bin(cdir, 'bin', 'cassandra') # Copy back the cassandra scripts since profiling may have modified it the previous time shutil.copy(cass_bin, self.get_bin_dir()) cass_bin = common.join_bin(self.get_path(), 'bin', 'cassandra') # If Windows, change entries in .bat file to split conf from binaries if common.is_win(): self.__clean_bat() if profile_options is not None: config = common.get_config() if not 'yourkit_agent' in config: raise NodeError( "Cannot enable profile. You need to set 'yourkit_agent' to the path of your agent in a ~/.ccm/config" ) cmd = '-agentpath:%s' % config['yourkit_agent'] if 'options' in profile_options: cmd = cmd + '=' + profile_options['options'] print cmd # Yes, it's fragile as shit pattern = r'cassandra_parms="-Dlog4j.configuration=log4j-server.properties -Dlog4j.defaultInitOverride=true' common.replace_in_file(cass_bin, pattern, ' ' + pattern + ' ' + cmd + '"') os.chmod(cass_bin, os.stat(cass_bin).st_mode | stat.S_IEXEC) env = common.make_cassandra_env(cdir, self.get_path()) pidfile = os.path.join(self.get_path(), 'cassandra.pid') args = [ cass_bin, '-p', pidfile, '-Dcassandra.join_ring=%s' % str(join_ring) ] if replace_token is not None: args.append('-Dcassandra.replace_token=%s' % str(replace_token)) if replace_address is not None: args.append('-Dcassandra.replace_address=%s' % str(replace_address)) if use_jna is False: args.append('-Dcassandra.boot_without_jna=true') args = args + jvm_args process = None if common.is_win(): # clean up any old dirty_pid files from prior runs if (os.path.isfile(self.get_path() + "/dirty_pid.tmp")): os.remove(self.get_path() + "/dirty_pid.tmp") process = subprocess.Popen(args, cwd=self.get_bin_dir(), env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE) else: process = subprocess.Popen(args, env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE) # Our modified batch file writes a dirty output with more than just the pid - clean it to get in parity # with *nix operation here. if common.is_win(): self.__clean_win_pid() self._update_pid(process) elif update_pid: if no_wait: time.sleep( 2 ) # waiting 2 seconds nevertheless to check for early errors and for the pid to be set else: for line in process.stdout: if verbose: print line.rstrip('\n') self._update_pid(process) if not self.is_running(): raise NodeError("Error starting node %s" % self.name, process) if wait_other_notice: for node, mark in marks: node.watch_log_for_alive(self, from_mark=mark) if wait_for_binary_proto: self.watch_log_for("Starting listening for CQL clients") # we're probably fine at that point but just wait some tiny bit more because # the msg is logged just before starting the binary protocol server time.sleep(0.2) return process
def start(self, join_ring=True, no_wait=False, verbose=False, update_pid=True, wait_other_notice=False, replace_token=None, replace_address=None, jvm_args=[], wait_for_binary_proto=False, profile_options=None, use_jna=False): """ Start the node. Options includes: - join_ring: if false, start the node with -Dcassandra.join_ring=False - no_wait: by default, this method returns when the node is started and listening to clients. If no_wait=True, the method returns sooner. - wait_other_notice: if True, this method returns only when all other live node of the cluster have marked this node UP. - replace_token: start the node with the -Dcassandra.replace_token option. - replace_address: start the node with the -Dcassandra.replace_address option. """ if self.is_running(): raise NodeError("%s is already running" % self.name) for itf in list(self.network_interfaces.values()): if itf is not None and replace_address is None: common.check_socket_available(itf) if wait_other_notice: marks = [ (node, node.mark_log()) for node in list(self.cluster.nodes.values()) if node.is_running() ] cdir = self.get_cassandra_dir() cass_bin = common.join_bin(cdir, 'bin', 'cassandra') # Copy back the cassandra scripts since profiling may have modified it the previous time shutil.copy(cass_bin, self.get_bin_dir()) cass_bin = common.join_bin(self.get_path(), 'bin', 'cassandra') # If Windows, change entries in .bat file to split conf from binaries if common.is_win(): self.__clean_bat() if profile_options is not None: config = common.get_config() if not 'yourkit_agent' in config: raise NodeError("Cannot enable profile. You need to set 'yourkit_agent' to the path of your agent in a ~/.ccm/config") cmd = '-agentpath:%s' % config['yourkit_agent'] if 'options' in profile_options: cmd = cmd + '=' + profile_options['options'] print_(cmd) # Yes, it's fragile as shit pattern=r'cassandra_parms="-Dlog4j.configuration=log4j-server.properties -Dlog4j.defaultInitOverride=true' common.replace_in_file(cass_bin, pattern, ' ' + pattern + ' ' + cmd + '"') os.chmod(cass_bin, os.stat(cass_bin).st_mode | stat.S_IEXEC) env = common.make_cassandra_env(cdir, self.get_path()) pidfile = os.path.join(self.get_path(), 'cassandra.pid') args = [ cass_bin, '-p', pidfile, '-Dcassandra.join_ring=%s' % str(join_ring) ] if replace_token is not None: args.append('-Dcassandra.replace_token=%s' % str(replace_token)) if replace_address is not None: args.append('-Dcassandra.replace_address=%s' % str(replace_address)) if use_jna is False: args.append('-Dcassandra.boot_without_jna=true') args = args + jvm_args process = None if common.is_win(): process = subprocess.Popen(args, cwd=self.get_bin_dir(), env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE) else: process = subprocess.Popen(args, env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE) # Our modified batch file writes a dirty output with more than just the pid - clean it to get in parity # with *nix operation here. if common.is_win(): # short delay could give us a false positive on a node being started as it could die and delete the pid file # after we check, however dtests have assumptions on how long the starting process takes. time.sleep(.5) self.__clean_win_pid() self._update_pid(process) elif update_pid: if no_wait: time.sleep(2) # waiting 2 seconds nevertheless to check for early errors and for the pid to be set else: for line in process.stdout: if verbose: print_(line.rstrip('\n')) self._update_pid(process) if not self.is_running(): raise NodeError("Error starting node %s" % self.name, process) if wait_other_notice: for node, mark in marks: node.watch_log_for_alive(self, from_mark=mark) if wait_for_binary_proto: self.watch_log_for("Starting listening for CQL clients") # we're probably fine at that point but just wait some tiny bit more because # the msg is logged just before starting the binary protocol server time.sleep(0.2) return process
def finish(self): root = self.get_bindings_root_directory() version = common.get_changelog_version(root) dot_version = "{0}.{1}.{2}".format(*version) # Copy IPConnection examples examples_nodejs = common.find_examples(root, '^Example.*\.js') examples_browser = common.find_examples(root, '^Example.*\.html') for example in examples_nodejs: shutil.copy(example[1], '/tmp/generator/npm/nodejs/examples') for example in examples_browser: shutil.copy(example[1], '/tmp/generator/npm/browser/examples') # Copy bindings and readme for filename in released_files: if filename == os.path.join(root, 'bindings', 'TinkerforgeNPM.js'): shutil.copy(os.path.join(root, 'bindings', filename), '/tmp/generator/npm/nodejs/npm_pkg_dir/Tinkerforge.js') elif filename == os.path.join(root, 'bindings', 'BrowserAPI.js'): shutil.copy(os.path.join(root, 'bindings', filename), '/tmp/generator/npm/nodejs/source/Tinkerforge/') elif filename == os.path.join(root, 'bindings', 'TinkerforgeSource.js'): shutil.copy(os.path.join(root, 'bindings', filename), '/tmp/generator/npm/nodejs/source/Tinkerforge.js') else: shutil.copy(os.path.join(root, 'bindings', filename), '/tmp/generator/npm/nodejs/source/Tinkerforge/') shutil.copy(os.path.join(root, 'bindings', filename), '/tmp/generator/npm/nodejs/npm_pkg_dir/lib/') # Replace <TF_API_VERSION> in package.json file common.replace_in_file(os.path.join(root, 'package.json.template'), '/tmp/generator/npm/nodejs/npm_pkg_dir/package.json', '<TF_API_VERSION>', dot_version) shutil.copy(os.path.join(root, 'README.md'), '/tmp/generator/npm/nodejs/npm_pkg_dir') shutil.copy(os.path.join(root, 'LICENSE'), '/tmp/generator/npm/nodejs/npm_pkg_dir') shutil.copy(os.path.join(root, 'IPConnection.js'), '/tmp/generator/npm/nodejs/npm_pkg_dir/lib') shutil.copy(os.path.join(root, 'Device.js'), '/tmp/generator/npm/nodejs/npm_pkg_dir/lib') shutil.copy(os.path.join(root, 'IPConnection.js'), '/tmp/generator/npm/nodejs/source/Tinkerforge') shutil.copy(os.path.join(root, 'Device.js'), '/tmp/generator/npm/nodejs/source/Tinkerforge') shutil.copy(os.path.join(root, 'changelog.txt'), '/tmp/generator/npm') shutil.copy(os.path.join(root, 'readme.txt'), '/tmp/generator/npm/readme.txt') # Copy browser specific files shutil.copy(os.path.join(root, 'es5-shim.js'), '/tmp/generator/npm/nodejs/source/Tinkerforge') shutil.copy(os.path.join(root, 'es5-sham.js'), '/tmp/generator/npm/nodejs/source/Tinkerforge') # Make Tinkerforge.js for browser with browserify with common.ChangedDirectory('/tmp/generator/npm/nodejs/source/Tinkerforge/'): browserify_args = ['browserify'] browserify_args.extend(sorted(os.listdir('/tmp/generator/npm/nodejs/source/Tinkerforge/'))) browserify_args.append('-o') browserify_args.append('/tmp/generator/npm/browser/source/Tinkerforge.js') if subprocess.call(browserify_args) != 0: raise Exception("Command '{0}' failed".format(' '.join(browserify_args))) # Remove browser specific files os.remove('/tmp/generator/npm/nodejs/source/Tinkerforge/BrowserAPI.js') os.remove('/tmp/generator/npm/nodejs/source/Tinkerforge/es5-shim.js') os.remove('/tmp/generator/npm/nodejs/source/Tinkerforge/es5-sham.js') # Generate the NPM package and put it on the root of ZIP archive with common.ChangedDirectory('/tmp/generator/npm/nodejs/npm_pkg_dir'): if subprocess.call('npm pack', shell=True) != 0: raise Exception("Command npm pack failed") shutil.copy('/tmp/generator/npm/nodejs/npm_pkg_dir/tinkerforge-{0}.tgz'.format(dot_version), '/tmp/generator/npm/nodejs/tinkerforge.tgz') shutil.copy('/tmp/generator/npm/nodejs/npm_pkg_dir/tinkerforge-{0}.tgz'.format(dot_version), os.path.join(root, 'tinkerforge-{0}.tgz'.format(dot_version))) # Remove directory npm_pkg_dir shutil.rmtree('/tmp/generator/npm/nodejs/npm_pkg_dir/') # Make zip version = common.get_changelog_version(root) common.make_zip(self.get_bindings_name(), '/tmp/generator/npm', root, version) # copy Tinkerforge.js to bindings root dir so copy_all.py can pick it up shutil.copy('/tmp/generator/npm/browser/source/Tinkerforge.js', root)
def finish(self): root = self.get_bindings_root_directory() version = common.get_changelog_version(root) dot_version = "{0}.{1}.{2}".format(*version) # Copy IPConnection examples examples = common.find_examples(root, '^example_.*\.pl$') for example in examples: shutil.copy(example[1], '/tmp/generator/cpan/examples') # Copy bindings and readme for filename in released_files: shutil.copy(os.path.join(root, 'bindings', filename), '/tmp/generator/cpan/source/lib/Tinkerforge') shutil.copy(os.path.join(root, 'IPConnection.pm'), '/tmp/generator/cpan/source/lib/Tinkerforge') shutil.copy(os.path.join(root, 'Device.pm'), '/tmp/generator/cpan/source/lib/Tinkerforge') shutil.copy(os.path.join(root, 'Error.pm'), '/tmp/generator/cpan/source/lib/Tinkerforge') shutil.copy(os.path.join(root, 'changelog.txt'), '/tmp/generator/cpan') shutil.copy(os.path.join(root, 'readme.txt'), '/tmp/generator/cpan') # Generate the CPAN package structure modules = [] for filename in released_files: modules.append('Tinkerforge::' + filename.replace('.pm', '')) modules.append("Tinkerforge::IPConnection") modules.append("Tinkerforge::Device") modules.append("Tinkerforge::Error") modules.append("Tinkerforge") modules = ','.join(modules) if os.path.exists('/tmp/generator/cpan/Tinkerforge'): shutil.rmtree('/tmp/generator/cpan/Tinkerforge') subprocess.call( "module-starter --dir=/tmp/generator/cpan/Tinkerforge --module={0} --distro=Tinkerforge" " --author=\"Ishraq Ibne Ashraf\" [email protected]". format(modules), shell=True) # Version replacing common.replace_in_file( os.path.join(root, 'Tinkerforge.pm.template'), '/tmp/generator/cpan/Tinkerforge/lib/Tinkerforge.pm', '<TF_API_VERSION>', dot_version) shutil.copy('/tmp/generator/cpan/Tinkerforge/lib/Tinkerforge.pm', '/tmp/generator/cpan/source/lib') common.replace_in_file(os.path.join(root, 'README.template'), '/tmp/generator/cpan/Tinkerforge/README', '<TF_API_VERSION>', dot_version) # Copying bindings subprocess.call( "rm -rf /tmp/generator/cpan/Tinkerforge/lib/Tinkerforge/*", shell=True) for filename in released_files: subprocess.call( "cp -ar {0}/bindings/{1} /tmp/generator/cpan/Tinkerforge/lib/Tinkerforge/" .format(root, filename), shell=True) # Copying IPconnection.pm, Device.pm and Error.pm subprocess.call( "cp -ar {0}/IPConnection.pm /tmp/generator/cpan/Tinkerforge/lib/Tinkerforge/" .format(root), shell=True) subprocess.call( "cp -ar {0}/Device.pm /tmp/generator/cpan/Tinkerforge/lib/Tinkerforge/" .format(root), shell=True) subprocess.call( "cp -ar {0}/Error.pm /tmp/generator/cpan/Tinkerforge/lib/Tinkerforge/" .format(root), shell=True) # Copying Changes subprocess.call("rm -rf /tmp/generator/cpan/Tinkerforge/Changes", shell=True) subprocess.call( "cp -ar {0}/changelog.txt /tmp/generator/cpan/Tinkerforge/Changes". format(root), shell=True) # Copying Makefile.PL subprocess.call("rm -rf /tmp/generator/cpan/Tinkerforge/Makefile.PL", shell=True) subprocess.call( "cp {0}/Makefile.PL /tmp/generator/cpan/Tinkerforge/Makefile.PL". format(root), shell=True) subprocess.call( "cp {0}/Makefile.PL /tmp/generator/cpan/source/Makefile.PL".format( root), shell=True) # Modifying 00-load.t test file old_test_file = open('/tmp/generator/cpan/Tinkerforge/t/00-load.t') lines = old_test_file.readlines() old_test_file.close() subprocess.call("rm -rf /tmp/generator/cpan/Tinkerforge/t/00-load.t", shell=True) new_test_file = open('/tmp/generator/cpan/Tinkerforge/t/00-load.t', 'w') for i, line in enumerate(lines): if i == len(lines) - 1: new_test_file.write( "diag( \"Testing Tinkerforge $Tinkerforge::VERSION, Perl $], $^X\" );" ) else: new_test_file.write(line + "\n") new_test_file.close() # Generating the CPAN package archive and cleaning up subprocess.call( "cd /tmp/generator/cpan/Tinkerforge/ && perl /tmp/generator/cpan/Tinkerforge/Makefile.PL", shell=True) subprocess.call("cd /tmp/generator/cpan/Tinkerforge/ && make dist", shell=True) shutil.copy( "/tmp/generator/cpan/Tinkerforge/Tinkerforge-{0}.{1}.{2}.tar.gz". format(*version), root) shutil.rmtree('/tmp/generator/cpan/Tinkerforge') # Make zip common.make_zip(self.get_bindings_name(), '/tmp/generator/cpan', root, version)
def start(self, join_ring=True, no_wait=False, verbose=False, update_pid=True, wait_other_notice=False, replace_token=None, jvm_args=[], wait_for_binary_proto=False, profile_options=None): """ Start the node. Options includes: - join_ring: if false, start the node with -Dcassandra.join_ring=False - no_wait: by default, this method returns when the node is started and listening to clients. If no_wait=True, the method returns sooner. - wait_other_notice: if True, this method returns only when all other live node of the cluster have marked this node UP. - replace_token: start the node with the -Dcassandra.replace_token option. """ if self.is_running(): raise NodeError("%s is already running" % self.name) for itf in self.network_interfaces.values(): if itf is not None: common.check_socket_available(itf) if wait_other_notice: marks = [ (node, node.mark_log()) for node in self.cluster.nodes.values() if node.is_running() ] cdir = self.get_cassandra_dir() cass_bin = os.path.join(cdir, 'bin', 'cassandra') # Copy back the cassandra script since profiling may have modified it the previous time shutil.copy(cass_bin, self.get_bin_dir()) cass_bin = os.path.join(self.get_bin_dir(), 'cassandra') if profile_options is not None: config = common.get_config() if not 'yourkit_agent' in config: raise NodeError("Cannot enable profile. You need to set 'yourkit_agent' to the path of your agent in a ~/.ccm/config") cmd = '-agentpath:%s' % config['yourkit_agent'] if 'options' in profile_options: cmd = cmd + '=' + profile_options['options'] print cmd # Yes, it's fragile as shit pattern=r'cassandra_parms="-Dlog4j.configuration=log4j-server.properties -Dlog4j.defaultInitOverride=true' common.replace_in_file(cass_bin, pattern, ' ' + pattern + ' ' + cmd + '"') os.chmod(cass_bin, os.stat(cass_bin).st_mode | stat.S_IEXEC) env = common.make_cassandra_env(cdir, self.get_path()) pidfile = os.path.join(self.get_path(), 'cassandra.pid') args = [ cass_bin, '-p', pidfile, '-Dcassandra.join_ring=%s' % str(join_ring) ] if replace_token is not None: args = args + [ '-Dcassandra.replace_token=%s' % str(replace_token) ] args = args + jvm_args process = subprocess.Popen(args, env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE) if update_pid: if no_wait: time.sleep(2) # waiting 2 seconds nevertheless to check for early errors and for the pid to be set else: for line in process.stdout: if verbose: print line.rstrip('\n') self._update_pid(process) if not self.is_running(): raise NodeError("Error starting node %s" % self.name, process) if wait_other_notice: for node, mark in marks: node.watch_log_for_alive(self, from_mark=mark) if wait_for_binary_proto: self.watch_log_for("Starting listening for CQL clients") # we're probably fine at that point but just wait some tiny bit more because # the msg is logged just before starting the binary protocol server time.sleep(0.2) return process
def update_readme(config: dict) -> None: # get version from packages package_versions = common.package_versions() if package_versions is None: return url_format = "https://docs.unity3d.com/Packages/{name}@{version}/manual/index.html" def make_package_item(name: str) -> str: package = f"com.{name.lower()}" version = package_versions[package] # type: ignore # false None positive # unity manual for a package expects <major.minor> version only docs_version = "{}.{}".format(*version.split(".")[:2]) url = url_format.format(name=package, version=docs_version) return f"* [{name} {version}]({url}) " package_list = "\n".join( [make_package_item(name) for name in config["unityPackages"]] ) package_markdown = rf""" \g<1> {package_list} \g<2>""" replacements = [ ( re.compile( r"(\[comment\]: # \(begin_packages\))" ".*" r"(\[comment\]: # \(end_packages\))", re.DOTALL, # make dot match new lines as well ), package_markdown[1:], ), ] try: bcl_exe = list( (common.unity_dir() / "Library" / "PackageCache").glob( "com.unity.burst@*/**/bcl.exe" ) )[0] # bcl.exe report --help as unknown argument... so have to remove the # first line # also replace \ with / for the pesky re package that tries to interpret # them as escape sequences usage = ( "\n".join( subprocess.run([bcl_exe, "--help"], capture_output=True) .stdout.decode("utf-8") .splitlines()[1:] ) .replace(os.getcwd(), "<Current working dir>") .replace("\\", "/") ) # remove private info usage = re.sub(r"Users/\w+", "Users/<username>", usage) replacements.append( ( re.compile( r"(\[comment\]: # \(begin_bcl_usage\))" ".*" r"(\[comment\]: # \(end_bcl_usage\))", re.DOTALL, ), rf"""\g<1> ```text {usage} ``` \g<2>""", ) ) except IndexError: pass common.replace_in_file(common.root_dir() / "ReadMe.md", replacements)