def _check_for_intervals(self): if not self.step % get_configuration("DEBUG_LOGGING_INTERVAL"): self._debug_interval() if not self.step % get_configuration("SLACK_NOTIFICATION_INTERVAL"): self._send_hypothesis_state_to_slack() if not self.step % get_configuration("CLEAR_MODULES_CACHING_INTERVAL"): self.clear_modules_caching()
def _calculate_num_of_steps(): step = 0 temp = get_configuration("INITIAL_TEMPERATURE") while temp > get_configuration("THRESHOLD"): step += 1 temp *= get_configuration("COOLING_PARAMETER") return step
def __init__(self, fip, user='******', password='******', private_key=None): transport = SSHTransport(fip, user, password, private_key) config = utils.get_configuration() preparation_cmd = config.get('iperf_prep_string') or [''] transport.exec_command(preparation_cmd) # Install iperf using apt or downloaded deb package internet_at_vms = utils.get_configuration().get("internet_at_vms") if internet_at_vms.lower() == 'false': logger.debug("Using downloaded iperf package") transport.put_file("/var/lib/iperf_2.0.5+dfsg1-2_amd64.deb", "/home/ubuntu/iperf_2.0.5+dfsg1-2_amd64.deb") transport.exec_command( 'sudo dpkg -i /home/ubuntu/iperf_2.0.5+dfsg1-2_amd64.deb') else: logger.debug("Installing iperf using apt") transport.exec_command( 'sudo apt-get update; sudo apt-get install -y iperf') # Log whether iperf is installed with version check = transport.exec_command('dpkg -l | grep iperf') logger.debug(check) # Staring iperf server transport.exec_command('nohup iperf -s > file 2>&1 &')
def get_energy(self): data_length = self.get_data_length_given_grammar() grammar_length = self.grammar.get_encoding_length() data_multiplier = get_configuration("DATA_ENCODING_LENGTH_MULTIPLIER") grammar_multiplier = get_configuration("GRAMMAR_ENCODING_LENGTH_MULTIPLIER") self.grammar_energy = grammar_length * grammar_multiplier self.data_energy = data_length * data_multiplier self.combined_energy = self.grammar_energy + self.data_energy return self.combined_energy
def make_mutation(self): mutation_weights = [ (self.lexicon, get_configuration("MUTATE_LEXICON")), (self.constraint_set, get_configuration("MUTATE_CONSTRAINT_SET")) ] weighted_mutatable_object_list = get_weighted_list(mutation_weights) object_or_method = choice(weighted_mutatable_object_list) if inspect.ismethod(object_or_method): method = object_or_method mutation_result = method() else: object_ = object_or_method mutation_result = object_.make_mutation() return mutation_result
def by_interval_time(self, time_from_last_interval): number_of_remaining_steps = self.number_of_expected_steps - self.step number_of_remaining_intervals = int( number_of_remaining_steps / get_configuration("DEBUG_LOGGING_INTERVAL")) expected_time = number_of_remaining_intervals * time_from_last_interval return _pretty_runtime_str(expected_time)
def clone_state(self): """pick an inner state, create a new state with same transitions and emissions, if the original state a transition to itself the original and the new will be connected""" if len(self.inner_states) < get_configuration( "MAX_NUM_OF_INNER_STATES"): original_state = choice(self.inner_states) cloned_state = self._get_next_state() self.inner_states.append(cloned_state) self.emissions[cloned_state] = deepcopy( self.emissions[original_state]) #create incoming connections for state in self.transitions: if original_state in self.transitions[state]: self.transitions[state].append(cloned_state) #copy outgoing connections self.transitions[cloned_state] = deepcopy( self.transitions[original_state]) import time #write_to_dot(self, "runtime_hmm_{}".format(get_global_datum("step"))) return True else: return False
def __init__(self, config_file): self.config_file = config_file self.config = utils.get_configuration(config_file) self.paragraph_re = r'.+\n' self.language = None self.regex = None self.db = None
def __init__(self): self.view_functions = {} self.map_url_list = [] self.get_map_url = [] self.post_map_url = [] self.conf = get_configuration() self.log = self.init_logger('[APP]')
def generate_random(cls): bundles = list() for i in range( get_configuration( "INITIAL_NUMBER_OF_BUNDLES_IN_PHONOTACTIC_CONSTRAINT")): bundles.append(FeatureBundle.generate_random()) return PhonotacticConstraint(bundles)
def generate_random(cls): feature_table = get_feature_table() if get_configuration("INITIAL_NUMBER_OF_FEATURES" ) > feature_table.get_number_of_features(): raise ValueError( "INITIAL_NUMBER_OF_FEATURES is bigger from number of available features" ) feature_dict = dict() available_feature_labels = feature_table.get_features() for i in range(get_configuration("INITIAL_NUMBER_OF_FEATURES")): feature_label = choice(available_feature_labels) feature_dict[feature_label] = feature_table.get_random_value( feature_label) available_feature_labels.remove(feature_label) return FeatureBundle(feature_dict)
def _make_transducer(self): transducer, segments, state = super( DepConstraint, self)._base_faithfulness_transducer() for segment in segments: transducer.add_arc( Arc(state, segment, segment, CostVector.get_vector(1, 0), state)) transducer.add_arc( Arc(state, segment, NULL_SEGMENT, CostVector.get_vector(1, 0), state)) if segment.has_feature_bundle(self.feature_bundle): transducer.add_arc( Arc(state, NULL_SEGMENT, segment, CostVector.get_vector(1, 1), state)) else: transducer.add_arc( Arc(state, NULL_SEGMENT, segment, CostVector.get_vector(1, 0), state)) if get_configuration("ALLOW_CANDIDATES_WITH_CHANGED_SEGMENTS"): for first_segment, second_segment in permutations(segments, 2): transducer.add_arc( Arc(state, first_segment, second_segment, CostVector.get_vector(1, 0), state)) return transducer
def test_speed_glance(create_image, openstack_clients, record_property): """ Simplified Performance Tests Download / upload Glance 1. Create file with random data (dd) 2. Upload data as image to glance. 3. Download image. 4. Measure download/upload speed and print them into stdout """ image_size_megabytes = utils.get_configuration().get("IMAGE_SIZE_MB") if not is_parsable(image_size_megabytes, int): pytest.fail("Can't convert IMAGE_SIZE_MB={} to 'int'".format( image_size_megabytes)) image_size_megabytes = int(image_size_megabytes) if not create_image: pytest.skip( "Can't create image, maybe there is lack of disk space to create file {}MB" .format(image_size_megabytes)) try: image = openstack_clients.image.images.create(name="test_image", disk_format='iso', container_format='bare') except BaseException as e: pytest.fail( "Can't create image in Glance. Occurred error: {}".format(e)) # FIXME: Error may happens while executing images.upload: # CommunicationError: Error finding address for # http://os-ctl-vip.harhipova-cicd-os-test.local:9292/v2/images/8bce33dd-9837-4646-b747-7f7f5ce01092/file: Unable to establish connection to http://os-ctl-vip.harhipova-cicd-os-test.local:9292/v2/images/8bce33dd-9837-4646-b747-7f7f5ce01092/file: [Errno 32] Broken pipe # This may happen because of low disk space on ctl node or old cryptography package # (will be fixed after upgrading to Python3) start_time = time.time() try: openstack_clients.image.images.upload(image.id, image_data=open( "/tmp/image_mk_framework.dd", 'rb')) except BaseException as e: pytest.fail( "Can't upload image in Glance. Occurred error: {}".format(e)) end_time = time.time() speed_upload = image_size_megabytes / (end_time - start_time) start_time = time.time() # it creates new file /tmp/image_mk_framework.download . It should be removed in teardown with open("/tmp/image_mk_framework.download", 'wb') as image_file: for item in openstack_clients.image.images.data(image.id): image_file.write(item) end_time = time.time() speed_download = image_size_megabytes / (end_time - start_time) openstack_clients.image.images.delete(image.id) record_property("Upload", speed_upload) record_property("Download", speed_download) print("++++++++++++++++++++++++++++++++++++++++") print('upload - {} Mb/s'.format(speed_upload)) print('download - {} Mb/s'.format(speed_download))
def __init__(self, language, problems, config_filename, documents_path, train=True): self.language = language self.problems = problems self.config_filename = config_filename self.config = utils.get_configuration(config_filename) self.descriptor_path = self.config["dataset"] self.documents_path = documents_path self.train = train
def _remove_constraint(self): logger.debug("_remove_constraint") if len(self.constraints) > get_configuration("MIN_NUMBER_OF_CONSTRAINTS_IN_CONSTRAINT_SET"): removable_constraints = list(filter(lambda x: x.get_constraint_name() != "Faith", self.constraints)) self.constraints.remove(choice(removable_constraints)) return True else: # can not remove constraint, resulting constraint_set length will br beneath minimum length return False
def __init__(self): self.results = [] slaves_info = utils.get_slaves_info() self.slaves = [utils.StorageServerInfo(server[0], server[1]) for server in slaves_info] config = utils.get_configuration() self.mappers_num = config['mappers_num'] self.reducers_num = config['reducers_num'] self.is_job_finished = False
def remove_feature_bundle(self): if len(self.feature_bundles) > get_configuration( "MIN_FEATURE_BUNDLES_IN_PHONOTACTIC_CONSTRAINT"): self.feature_bundles.pop(randint(0, len(self.feature_bundles) - 1)) return True else: return False
def insert_feature_bundle(self): if len(self.feature_bundles) < get_configuration( "MAX_FEATURE_BUNDLES_IN_PHONOTACTIC_CONSTRAINT"): new_feature_bundle = FeatureBundle.generate_random() self.feature_bundles.insert(randint(0, len(self.feature_bundles)), new_feature_bundle) return True else: return False
def __init__(self, config, language, classifier_list): self.config_file = config self.config = utils.get_configuration(config) self.language = language self.classifier = classifier self.classifier_list = list(classifier_list) self.rate = 0.8 self.db = None
def os_resources(openstack_clients): os_actions = os_client.OSCliActions(openstack_clients) os_resource = {} config = utils.get_configuration() image_name = config.get('image_name', 'Ubuntu') flavor_name = config.get('flavor_name', 'spt-test') flavor_ram = config.get('flavor_ram', 1536) flavor_vcpus = config.get('flavor_vcpus', 1) flavor_disk = config.get('flavor_disk', 3) os_images_list = [image.id for image in openstack_clients.image.images.list(filters={'name': image_name})] if os_images_list.__len__() == 0: pytest.skip("No images with name {}. This name can be redefined with 'image_name' env var ".format(image_name)) os_resource['image_id'] = str(os_images_list[0]) os_resource['flavor_id'] = [flavor.id for flavor in openstack_clients.compute.flavors.list() if flavor.name == flavor_name] flavor_is_created = False if not os_resource['flavor_id']: os_resource['flavor_id'] = os_actions.create_flavor(flavor_name, flavor_ram, flavor_vcpus, flavor_disk).id flavor_is_created = True else: os_resource['flavor_id'] = str(os_resource['flavor_id'][0]) os_resource['sec_group'] = os_actions.create_sec_group() os_resource['keypair'] = openstack_clients.compute.keypairs.create( '{}-{}'.format('spt-key', random.randrange(100, 999)) ) os_resource['net1'] = os_actions.create_network_resources() os_resource['ext_net'] = os_actions.get_external_network() adm_tenant = os_actions.get_admin_tenant() os_resource['router'] = os_actions.create_router(os_resource['ext_net'], adm_tenant.id) os_resource['net2'] = os_actions.create_network(adm_tenant.id) os_resource['subnet2'] = os_actions.create_subnet(os_resource['net2'], adm_tenant.id, '10.2.7.0/24') for subnet in openstack_clients.network.list_subnets()['subnets']: if subnet['network_id'] == os_resource['net1']['id']: os_resource['subnet1'] = subnet['id'] openstack_clients.network.add_interface_router(os_resource['router']['id'], {'subnet_id': os_resource['subnet1']}) openstack_clients.network.add_interface_router(os_resource['router']['id'], {'subnet_id': os_resource['subnet2']['id']}) yield os_resource # time.sleep(5) openstack_clients.network.remove_interface_router(os_resource['router']['id'], {'subnet_id': os_resource['subnet1']}) openstack_clients.network.remove_interface_router(os_resource['router']['id'], {'subnet_id': os_resource['subnet2']['id']}) openstack_clients.network.remove_gateway_router(os_resource['router']['id']) time.sleep(5) openstack_clients.network.delete_router(os_resource['router']['id']) time.sleep(5) # openstack_clients.network.delete_subnet(subnet1['id']) openstack_clients.network.delete_network(os_resource['net1']['id']) openstack_clients.network.delete_network(os_resource['net2']['id']) openstack_clients.compute.security_groups.delete(os_resource['sec_group'].id) openstack_clients.compute.keypairs.delete(os_resource['keypair'].name) if flavor_is_created: openstack_clients.compute.flavors.delete(os_resource['flavor_id'])
def create_image(): image_size_megabytes = utils.get_configuration().get("IMAGE_SIZE_MB") create_file_cmdline = 'dd if=/dev/zero of=/tmp/image_mk_framework.dd bs=1M count={image_size}'.format( image_size=image_size_megabytes) is_cmd_successful = subprocess.call(create_file_cmdline, shell=True) == 0 yield is_cmd_successful # teardown subprocess.call('rm -f /tmp/image_mk_framework.dd', shell=True) subprocess.call('rm -f /tmp/image_mk_framework.download', shell=True)
def __init__(self, config, language): self.config_file = config self.config = utils.get_configuration(config) self.language = language self.feature_list = [] self.rf_criterion = self.config["rf"][self.language]["criterion"] self.rf_num_estimators = self.config["rf"][self.language]["estimators"] self.prob_degree = 5 self.use_adjustment = True self.db = None
def __init__(self, string_words): duplication_factor = get_configuration("CORPUS_DUPLICATION_FACTOR") n = len(string_words) duplication_factor_int = int(duplication_factor) duplication_factor_fraction = duplication_factor - int( duplication_factor) words_after_duplication = string_words * duplication_factor_int words_after_duplication.extend( string_words[:int(n * duplication_factor_fraction)]) self.words = words_after_duplication
def __init__(self, config, language, classifier): self.config_file = config self.config = utils.get_configuration(config) self.language = language self.prob_degree = 3 self.rate = 0.8 self.classifier = classifier self.db = None
def __init__(self, config, language, classifier): self.config_file = config self.config = utils.get_configuration(config) self.db = None self.language = language self.left_threshold = 0.5 self.right_threshold = 0.5 self.classifier = classifier self.rate = 0.8
def __init__(self, config_filename, dataset_filename=None, dataset_type='wiki', year_filter=None, parts=None): self.config_filename = config_filename self.config = utils.get_configuration(config_filename) self.path = self.config["dataset"] #import data if dataset_filename: self.import_dataset(dataset_filename, dataset_type, year_filter, parts) else: self.load_voters()
def _insert_constraint(self): logger.debug("_insert_constraint") if len(self.constraints) < get_configuration("MAX_NUMBER_OF_CONSTRAINTS_IN_CONSTRAINT_SET"): mutation_weights_for_insert = [(DepConstraint, get_configuration("DEP_FOR_INSERT")), (MaxConstraint, get_configuration("MAX_FOR_INSERT")), (IdentConstraint, get_configuration("IDENT_FOR_INSERT")), (PhonotacticConstraint, get_configuration("PHONOTACTIC_FOR_INSERT"))] weighted_constraint_class_for_insert = get_weighted_list(mutation_weights_for_insert) new_constraint_class = choice(weighted_constraint_class_for_insert) new_constraint = new_constraint_class.generate_random() index_of_insertion = randrange(len(self.constraints)+1) if new_constraint in self.constraints: # newly generated constraint is already in constraint_set return False else: self.constraints.insert(index_of_insertion, new_constraint) return True else: return False
def add_state(self): """adds empty state""" if len(self.inner_states) < get_configuration( "MAX_NUM_OF_INNER_STATES"): new_state = self._get_next_state() self.inner_states.append(new_state) self.emissions[new_state] = [] self.transitions[new_state] = [] return True else: return False
def _before_loop(self): self.start_time = time.time() self.previous_interval_time = self.start_time logger.info("Process Id: {}".format(process_id)) if get_configuration("RANDOM_SEED"): seed = choice(range(1, 1000)) set_configuration("SEED", seed) logger.info("Seed: {} - randomly selected".format(seed)) else: seed = get_configuration("SEED") logger.info("Seed: {} - specified".format(seed)) random.seed(3) #random.seed(seed) configurations = Configuration() logger.info(configurations) logger.info(self.current_hypothesis.grammar.feature_table) self.step_limitation = get_configuration("STEPS_LIMITATION") if self.step_limitation != float("inf"): self.number_of_expected_steps = self.step_limitation else: self.number_of_expected_steps = self._calculate_num_of_steps() logger.info("Number of expected steps is: {:,}".format( self.number_of_expected_steps)) self.current_hypothesis_energy = self.current_hypothesis.get_energy() if self.current_hypothesis_energy == float("INF"): raise ValueError("first hypothesis energy can not be INF") self._log_hypothesis_state() self.previous_interval_energy = self.current_hypothesis_energy self.current_temperature = get_configuration("INITIAL_TEMPERATURE") self.threshold = get_configuration("THRESHOLD") self.cooling_parameter = get_configuration("COOLING_PARAMETER")
def __init__(self, config, language, fe, n_pca = 5, n_gaussians=2, \ r=16, normals_type='diag'): self.config_file = config self.config = utils.get_configuration(config) self.language = language self.feature_list = [] self.weights = {} self.threshold = 0.0 self.n_pca = n_pca self.r = r self.tp = normals_type self.components=n_gaussians self.db = None
def __init__(self, config, language, fe, n_pca = 5, n_gaussians=2, \ r=16, normals_type='diag'): self.config_file = config self.config = utils.get_configuration(config) self.language = language self.feature_list = [] self.weights = {} self.threshold = 0.0 self.n_pca = n_pca self.r = r self.tp = normals_type self.components = n_gaussians self.db = None
def test_jenkins_jobs_branch(local_salt_client, check_cicd): """ This test compares Jenkins jobs versions collected from the cloud vs collected from pillars. """ excludes = [ 'upgrade-mcp-release', 'deploy-update-salt', 'git-mirror-downstream-mk-pipelines', 'git-mirror-downstream-pipeline-library' ] config = utils.get_configuration() drivetrain_version = config.get('drivetrain_version', '') jenkins_password = get_password(local_salt_client, 'jenkins:client') version_mismatch = [] server = join_to_jenkins(local_salt_client, 'admin', jenkins_password) for job_instance in server.get_jobs(): job_name = job_instance.get('name') if job_name in excludes: continue job_config = server.get_job_config(job_name) xml_data = minidom.parseString(job_config) BranchSpec = xml_data.getElementsByTagName( 'hudson.plugins.git.BranchSpec') # We use master branch for pipeline-library in case of 'testing,stable,nighlty' versions # Leave proposed version as is # in other cases we get release/{drivetrain_version} (e.g release/2019.2.0) if drivetrain_version in ['testing', 'nightly', 'stable']: expected_version = 'master' else: expected_version = local_salt_client.pillar_get( tgt='gerrit:client', param='jenkins:client:job:{}:scm:branch'.format(job_name)) if not BranchSpec: logging.debug( "No BranchSpec has found for {} job".format(job_name)) continue actual_version = BranchSpec[0].getElementsByTagName( 'name')[0].childNodes[0].data if expected_version and actual_version not in expected_version: version_mismatch.append("Job {0} has {1} branch." "Expected {2}".format( job_name, actual_version, expected_version)) assert len(version_mismatch) == 0, ( "Some DriveTrain jobs have version/branch mismatch:\n{}".format( json.dumps(version_mismatch, indent=4)))
def _send_hypothesis_state_to_slack(self): log_file_name = get_configuration("LOG_FILE_NAME") message = f"{log_file_name}\n" message += "Grammar with: {}\n".format( self.current_hypothesis.grammar.constraint_set) message += "{}\n".format(self.current_hypothesis.grammar.lexicon) message += "Parse: {}\n".format( self.current_hypothesis.get_recent_data_parse()) message += "{}\n".format( self.current_hypothesis.get_recent_energy_signature()) message += "HMM:\n" for line in self.current_hypothesis.grammar.lexicon.hmm.get_log_lines( ): message += f"{line}:\n" send_to_webhook(message)
def augment_feature_bundle(self): if len(self.feature_dict) < get_configuration( "MAX_FEATURES_IN_BUNDLE"): all_feature_labels = self.feature_table.get_features() feature_labels_in_feature_bundle = self.feature_dict.keys() available_feature_labels = list( set(all_feature_labels) - set(feature_labels_in_feature_bundle)) if available_feature_labels: feature_label = choice(available_feature_labels) self.feature_dict[ feature_label] = self.feature_table.get_random_value( feature_label) return True return False
""" import logging from twisted.python import log from app import MHubApp from utils import get_configuration #observer = log.PythonLoggingObserver() #observer.start() # Configuration cfg = get_configuration() app_id = cfg.get("app").get("general").get("app_id", "mhub") verbose = cfg.get("app").get("general").get("verbose", True) # Logging log_format = "%(asctime)15s %(levelname)s [%(module)s.%(name)s] %(message)s" log_level = logging.DEBUG if verbose else logging.INFO logging.basicConfig(format=log_format, level=log_level) logging.getLogger("cli").setLevel(log_level) logging.getLogger("app").setLevel(log_level) logging.getLogger("service").setLevel(log_level) logging.getLogger("plugin").setLevel(log_level) logger = logging.getLogger("cli") # Services
def __init__(self, config_filename): self.config_filename = config_filename self.config = utils.get_configuration(config_filename) self.path = self.config["dataset"] self.languages = self.get_languages()
import sqlalchemy from sqlalchemy import * from sqlalchemy.orm import sessionmaker from sqlalchemy.ext.declarative import declarative_base import malware_sample_model import utils import os, sys # Load configuration config = utils.get_configuration("malwarehouse.cfg") uri = config.get('database', 'uri') engine = create_engine(uri) engine.echo = True # Create the root directory if it does not exist if engine.name == "sqlite": database_path = os.path.dirname(engine.url.database) if not os.path.exists(database_path): try: os.makedirs(database_path) except Exception, err: print err sys.exit(1) try: metadata = MetaData(engine) Base = malware_sample_model.Base Base.metadata.create_all(engine) print "%s successfully created"%uri except Exception, err:
def __init__(self, config, language): self.config_file = config self.config = utils.get_configuration(config) self.language = language self.feature_list = [] self.db = None