def run_simulated_manually(filepath, lines_to_read=sys.maxint, cluster_args=None): with open(get_resource_name(filepath), 'r') as f: data = [line.rstrip().split(' ') for line in islice(f, lines_to_read)] if cluster_args is None: cluster_args = dict() cluster = Cluster(**cluster_args) shingler = Shingler(span=3) s = FeatureClusterSummarizer() content_dict = dict() for pair in data: if len(pair) > 1: label, text = pair else: label, text = pair[0], '' content_dict[label] = text shingles = shingler.get_shingles(text) s.add_features(label, shingles) cluster.add_set(shingles, label) clusters = cluster.get_clusters() is_label_positive = lambda lbl: ':' in lbl return dict(stats=get_stats(clusters, is_label_positive), uindex=s.summarize_clusters(clusters))
def test_shingler(self): s = Shingler(span=5, skip=1, unique=True) shingles = s.get_shingles("abracadabra") self.assertIn(("d", "b", "a"), shingles) t = Shingler(span=5, skip=1, unique=False) shingles = t.get_shingles("abracadabra") self.assertEqual(("a", "r", "c"), shingles[0])
def test_word_shingler(self): s = Shingler(span=5, skip=1, unique=True, tokenizer=RegexTokenizer()) shingles = s.get_shingles("the quick brown fox jumps over a lazy dog") self.assertIn(("jumps", "a", "dog"), shingles) t = Shingler(span=5, skip=1, unique=False, tokenizer=RegexTokenizer()) shingles = t.get_shingles("the quick brown fox jumps over a lazy dog") self.assertEqual(("the", "brown", "jumps"), shingles[0])
def __init__(self, cfg, content_filter=None, opts=None): """Read configuration""" self.cfg = cfg common_kwargs = dict( normalizer=HTMLNormalizer(), tokenizer=RegexTokenizer() ) deepupdate(common_kwargs, opts or {}) # Set options self.content_filter = content_filter min_support = cfg['min_support'] # Configure minhash signer sig_width = cfg['sig_width'] lsh_hasher = LSHC(width=sig_width, **cfg['lsh_options']) self.signer = MinHashSignature(sig_width, lsh_hasher=lsh_hasher, kmin=cfg['kmin']) # Configure shingler cfg_key_shingle = cfg['shingler'] cfg_key_shingle.update(common_kwargs) self.shingler = Shingler(**cfg_key_shingle) # Configure sketch comparison algorithm cfg_sketch = cfg['sketch'] self.sketch_enabled = cfg_sketch['enabled'] sketch_dist_fn = None xor_threshold = None if self.sketch_enabled: algorithm_name = cfg_sketch['algorithm'] try: sketch_algorithm = getattr(SketchModel, algorithm_name) except AttributeError: raise RuntimeError("Unknown sketch model specified: '%s'" % algorithm_name) sketch_bits = cfg_sketch['size'] * 8 cfg_sketch_shingle = cfg_sketch['shingler'] cfg_sketch_shingle.update(common_kwargs) self.sketch_shingler = Shingler(**cfg_sketch_shingle) if sketch_algorithm == SketchModel.simhash: self.sketch_signer = SimHashSignature(bit_depth=sketch_bits) elif sketch_algorithm == SketchModel.minhash: self.sketch_signer = MinHashSketchSignature(sketch_bits) xor_threshold = \ int(floor(sketch_bits * (1.0 - float(cfg_sketch['resemblance'])))) sketch_dist_fn = hamming self.cluster_builder = Cluster(sketch_dist_fn=sketch_dist_fn, max_dist=xor_threshold, min_support=min_support)
def test_names(self): """Should return 281 clusters of names. """ with open(get_resource_name('data/perrys.csv'), 'r') as fhandle: data = set(line.rstrip() for line in fhandle) cluster = Cluster(width=20, bandwidth=5, seed=SEED) shingler = Shingler(3) for name in data: shingles = shingler.get_shingles(name) cluster.add_item(shingles, name) clusters = cluster.get_clusters() self.assertEqual(327, len(clusters))
def test_bills(self): """Should return 97 clusters of bills. """ with open(get_resource_name('data/bills100.txt'), 'r') as fhandle: data = [line.rstrip().split('|') for line in fhandle] cluster = Cluster(width=20, bandwidth=5, seed=SEED) shingler = Shingler(span=3, tokenizer=RegexTokenizer()) for label, text in data: shingles = shingler.get_shingles(text) cluster.add_item(shingles, label) clusters = cluster.get_clusters() self.assertEqual(96, len(clusters))
def test_bills(self): """Should return 97 clusters of bills. """ with open(get_resource_name('data/bills100.txt'), 'r') as fhandle: data = [line.rstrip().split('|') for line in fhandle] cluster = Cluster(width=20, bandwidth=5, seed=SEED) shingler = Shingler(span=3, tokenizer=RegexTokenizer()) for label, text in data: shingles = shingler.get_shingles(text) cluster.add_item(shingles, label) clusters = cluster.get_clusters() self.assertEqual(len(clusters), 97)
def test_names(self): """Should return 281 clusters of names. """ with open(get_resource_name('data/perrys.csv'), 'r') as fhandle: data = set(line.rstrip() for line in fhandle) cluster = Cluster(width=20, bandwidth=5, seed=SEED) shingler = Shingler(3) for name in data: shingles = shingler.get_shingles(name) cluster.add_item(shingles, name) clusters = cluster.get_clusters() self.assertEqual(len(clusters), 209)
def test_names_kmin_scheme(self): """Should return 145 clusters of names. """ with open(get_resource_name('data/perrys.csv'), 'r') as fhandle: data = set(line.rstrip() for line in fhandle) cluster = Cluster(width=20, bandwidth=5, kmin=2, lsh_scheme="a1", seed=SEED) shingler = Shingler(3) for name in data: shingles = shingler.get_shingles(name) cluster.add_item(shingles, name) clusters = cluster.get_clusters() # for cluster in clusters: # print cluster self.assertEqual(len(clusters), 176)
def test_bills(self): """Should return 97 clusters of bills. """ with open(get_resource_name('data/bills100.txt'), 'r') as f: data = [line.rstrip().split('|') for line in f] cluster = Cluster(width=20, bandwidth=5) shingler = Shingler(span=3, tokenizer=RegexTokenizer()) s = FeatureClusterSummarizer() for label, text in data: shingles = shingler.get_shingles(text) s.add_features(label, shingles) cluster.add_set(shingles, label) clusters = cluster.get_clusters() ti = s.summarize_clusters(clusters) self.assertEqual(len(clusters), 97) self.assertAlmostEqual(ti, 1.0)
def test_names(self): """Should return 281 clusters of names. """ with open(get_resource_name('data/perrys.csv'), 'r') as f: data = set(line.rstrip() for line in f) cluster = Cluster(width=20, bandwidth=5) shingler = Shingler(3) s = FeatureClusterSummarizer() for name in data: shingles = shingler.get_shingles(name) s.add_features(name, shingles) cluster.add_set(shingles, name) clusters = cluster.get_clusters() ti = s.summarize_clusters(clusters) self.assertEqual(len(clusters), 281) self.assertAlmostEqual(ti, 0.9780512134223747)
def test_names_kmin(self): """Should return 252 clusters of names. """ with open(get_resource_name('data/perrys.csv'), 'r') as f: data = set(line.rstrip() for line in f) cluster = Cluster(width=20, bandwidth=5, kmin=2) shingler = Shingler(3) s = FeatureClusterSummarizer() for name in data: shingles = shingler.get_shingles(name) s.add_features(name, shingles) cluster.add_set(shingles, name) clusters = cluster.get_clusters() # for cluster in clusters: # print cluster ti = s.summarize_clusters(clusters) self.assertEqual(len(clusters), 252) self.assertAlmostEqual(ti, 0.9732840816954408)
def get_clusters(args, data): cluster = Cluster(width=args.width, bandwidth=args.bandwidth, lsh_scheme=args.lsh_scheme, kmin=args.kmin, hashfun=args.hashfun) shingler = Shingler( span=args.shingle_span, skip=args.shingle_skip, kmin=args.shingle_kmin, unique=bool(args.shingle_uniq) ) content_dict = dict() for label, text in data: content_dict[label] = text shingles = shingler.get_shingles(text) cluster.add_item(shingles, label) return cluster.get_clusters()
def run_simulated_manually(filepath, lines_to_read=sys.maxint, cluster_args=None): with open(get_resource_name(filepath), 'r') as fhandle: data = [line.rstrip().split(' ') for line in islice(fhandle, lines_to_read)] if cluster_args is None: cluster_args = dict() cluster = Cluster(**cluster_args) shingler = Shingler(span=3) content_dict = dict() for pair in data: if len(pair) > 1: label, text = pair else: label, text = pair[0], '' content_dict[label] = text shingles = shingler.get_shingles(text) cluster.add_item(shingles, label) clusters = cluster.get_clusters() is_label_positive = lambda lbl: ':' in lbl return clusters, is_label_positive
def run_simulated_manually(filepath, lines_to_read=sys.maxint, cluster_args=None): with open(get_resource_name(filepath), 'r') as fhandle: data = [line.rstrip().split(' ') for line in islice(fhandle, lines_to_read)] if cluster_args is None: cluster_args = dict() cluster = Cluster(**cluster_args) shingler = Shingler(span=3) content_dict = dict() for pair in data: if len(pair) > 1: label, text = pair else: label, text = pair[0], '' content_dict[label] = text shingles = shingler.get_shingles(text) cluster.add_item(shingles, label) clusters = cluster.get_clusters() is_label_positive = lambda lbl: ':' in lbl return dict(stats=describe_clusters(clusters, is_label_positive))
def test_simulated(opts, data): cluster = Cluster(width=opts.width, bandwidth=opts.bandwidth, lsh_scheme=opts.lsh_scheme) shingler = Shingler(span=opts.shingle_span) s = FeatureClusterSummarizer() content_dict = dict() for label, text in data: content_dict[label] = text shingles = shingler.get_shingles(text) s.add_features(label, shingles) cluster.add_item(shingles, label) clusters = cluster.get_clusters() c = describe_clusters(clusters, lambda x: len(x.split(':')) > 1) ti = s.summarize_clusters(clusters) print json.dumps(dict( stats=c.dict(), ratios=dict( precision=c.get_precision(), recall=c.get_recall() ), ti=ti ))
def __init__(self, cfg, content_filter=None, trace_every=0, get_body=None, get_label=None, get_prefix=None, min_support=None, seed=0, normalizer=None, tokenizer=None): """Read configuration""" self.cfg = cfg self._get_body = get_body self._get_label = get_label self._get_prefix = get_prefix self.trace_every = trace_every # Set options self.content_filter = content_filter self.min_support = cfg[ 'min_support'] if min_support is None else min_support # normalizer and tokenizer self.normalizer = get_default_normalizer( **cfg.get('preprocessor', {}).get('normalizer', {})) \ if normalizer is None else normalizer self.tokenizer = get_default_tokenizer( ) if tokenizer is None else tokenizer # Configure minhash signer sig_width = cfg['sig_width'] lsh_hasher = LSHC(width=sig_width, **cfg['lsh_options']) self.signer = MinHashSignature(sig_width, lsh_hasher=lsh_hasher, kmin=cfg['kmin']) # Configure shingler cfg_key_shingle = cfg['shingler'] self.shingler = get_default_shingler(**cfg_key_shingle) # Configure sketch comparison algorithm cfg_sketch = cfg['sketch'] self.sketch_enabled = cfg_sketch['enabled'] self.sketch_dist_fn = None self.max_dist = None if self.sketch_enabled: algorithm_name = cfg_sketch['algorithm'] try: sketch_algorithm = getattr(SketchModel, algorithm_name) except AttributeError: raise RuntimeError("Unknown sketch model specified: '%s'" % algorithm_name) self.sketch_bits = cfg_sketch['size'] cfg_sketch_shingler = cfg_sketch['shingler'] if not cfg_sketch_shingler['enabled']: # if sketch shingler is disabled, we also disable signer # as we will use default signer self.sketch_shingler = None self.sketch_signer = None elif sketch_algorithm == SketchModel.simhash: del cfg_sketch_shingler['enabled'] self.sketch_shingler = Shingler(**cfg_sketch_shingler) self.sketch_signer = SimHashSignature(self.sketch_bits, seed=seed) elif sketch_algorithm == SketchModel.minhash: del cfg_sketch_shingler['enabled'] self.sketch_shingler = Shingler(**cfg_sketch_shingler) self.sketch_signer = MinHashSketchSignature(self.sketch_bits, seed=seed) self.sketch_shingler._tokenizer = None self.sketch_shingler._normalizer = None self.max_dist = \ int(floor(self.sketch_bits * (1.0 - float(cfg_sketch['resemblance'])))) self.sketch_dist_fn = hamming self.sketch_operator = OPERATOR_MAP[cfg_sketch.get( 'operator', 'and')] self.cluster_builder = Cluster(sketch_dist_fn=self.sketch_dist_fn, max_dist=self.max_dist, min_support=self.min_support, sketch_operator=self.sketch_operator)
def get_default_shingler(**opts): shingler = Shingler(**opts) shingler._normalizer = None shingler._tokenizer = None return shingler
class HDClustering(object): def __init__(self, cfg, content_filter=None, trace_every=0, get_body=None, get_label=None, get_prefix=None, min_support=None, seed=0, normalizer=None, tokenizer=None): """Read configuration""" self.cfg = cfg self._get_body = get_body self._get_label = get_label self._get_prefix = get_prefix self.trace_every = trace_every # Set options self.content_filter = content_filter self.min_support = cfg['min_support'] if min_support is None else min_support # normalizer and tokenizer self.normalizer = get_default_normalizer( **cfg.get('preprocessor', {}).get('normalizer', {})) \ if normalizer is None else normalizer self.tokenizer = get_default_tokenizer() if tokenizer is None else tokenizer # Configure minhash signer sig_width = cfg['sig_width'] lsh_hasher = LSHC(width=sig_width, **cfg['lsh_options']) self.signer = MinHashSignature(sig_width, lsh_hasher=lsh_hasher, kmin=cfg['kmin']) # Configure shingler cfg_key_shingle = cfg['shingler'] self.shingler = get_default_shingler(**cfg_key_shingle) # Configure sketch comparison algorithm cfg_sketch = cfg['sketch'] self.sketch_enabled = cfg_sketch['enabled'] self.sketch_dist_fn = None self.max_dist = None if self.sketch_enabled: algorithm_name = cfg_sketch['algorithm'] try: sketch_algorithm = getattr(SketchModel, algorithm_name) except AttributeError: raise RuntimeError("Unknown sketch model specified: '%s'" % algorithm_name) self.sketch_bits = cfg_sketch['size'] cfg_sketch_shingler = cfg_sketch['shingler'] if not cfg_sketch_shingler['enabled']: # if sketch shingler is disabled, we also disable signer # as we will use default signer self.sketch_shingler = None self.sketch_signer = None elif sketch_algorithm == SketchModel.simhash: del cfg_sketch_shingler['enabled'] self.sketch_shingler = Shingler(**cfg_sketch_shingler) self.sketch_signer = SimHashSignature(self.sketch_bits, seed=seed) elif sketch_algorithm == SketchModel.minhash: del cfg_sketch_shingler['enabled'] self.sketch_shingler = Shingler(**cfg_sketch_shingler) self.sketch_signer = MinHashSketchSignature(self.sketch_bits, seed=seed) self.sketch_shingler._tokenizer = None self.sketch_shingler._normalizer = None self.max_dist = \ int(floor(self.sketch_bits * (1.0 - float(cfg_sketch['resemblance'])))) self.sketch_dist_fn = hamming self.sketch_operator = OPERATOR_MAP[cfg_sketch.get('operator', 'and')] self.cluster_builder = Cluster(sketch_dist_fn=self.sketch_dist_fn, max_dist=self.max_dist, min_support=self.min_support, sketch_operator=self.sketch_operator) def _map_iter(self, data): """Find clusters in an iterable""" get_body = self._get_body get_label = self._get_label get_prefix = self._get_prefix for i, obj in enumerate(data): body = obj if get_body is None else get_body(obj) label = i if get_label is None else get_label(obj) prefix = None if get_prefix is None else get_prefix(obj) for feat in self._map_item(obj, body, label, prefix): yield feat def _map_item(self, obj, body, label, prefix=None): # Extract features src = MessageSource.source(obj) obj_content = obj['content'] normalized_content, meta = self.normalizer.normalize(obj_content) content_tokens = self.tokenizer.tokenize(normalized_content) if self.content_filter is not None: rule_accept, rule_score = self.content_filter.accept( obj, content_tokens=content_tokens, urls=meta.get('url_components', []), src=src) else: rule_accept = False if not rule_accept: features = self.shingler.get_shingles(content_tokens, prefix=prefix) if self.sketch_enabled and (self.sketch_shingler is None or self.sketch_signer is None): keys, sketch = self.signer.get_signature(features, with_sketch=True) elif self.sketch_enabled and (self.sketch_shingler is not None and self.sketch_signer is not None): keys = self.signer.get_signature(features) sketch_features = self.sketch_shingler.get_shingles(content_tokens) sketch = self.sketch_signer.get_signature(sketch_features) else: keys = self.signer.get_signature(features) sketch = None yield (keys, (label, sketch)) def clusters_from_iter(self, data): """Find clusters in an iterable""" cluster_builder = self.cluster_builder trace_every = self.trace_every for i, obj in enumerate(self._map_iter(data)): if trace_every > 0 and (not i % trace_every): LOG.info("Processing line " + str(i)) keys, val = obj label, sketch = val \ if isinstance(val, tuple) \ else (val, None) cluster_builder.add_item(keys, label=label, sketch=sketch) return cluster_builder.get_clusters() def mapper(self, obj): """Perform a mapper task in MR""" get_body = self._get_body get_label = self._get_label get_prefix = self._get_prefix body = obj if get_body is None else get_body(obj) label = obj if get_label is None else get_label(obj) prefix = None if get_prefix is None else get_prefix(obj) for keys, val in self._map_item(obj, body, label, prefix): for key in keys: yield key, val def reducer(self, key, tuple_gen): """Perform a reducer task in MR If sketches enabled, data consists of: (key, [(lbl, sk), (lbl, sk), (lbl, sk)]) Otherwise: (key, [lbl, lbl, lbl]) """ # If not using sketches, we are done if self.sketch_dist_fn is None: return key, list(set(tuple_gen)) # create a dict mappipng a label to a sketch return key, dict(tuple_gen).items()
def __init__(self, cfg, content_filter=None, trace_every=0, get_body=None, get_label=None, get_prefix=None, min_support=None, seed=0, normalizer=None, tokenizer=None): """Read configuration""" self.cfg = cfg self._get_body = get_body self._get_label = get_label self._get_prefix = get_prefix self.trace_every = trace_every # Set options self.content_filter = content_filter self.min_support = cfg['min_support'] if min_support is None else min_support # normalizer and tokenizer self.normalizer = get_default_normalizer( **cfg.get('preprocessor', {}).get('normalizer', {})) \ if normalizer is None else normalizer self.tokenizer = get_default_tokenizer() if tokenizer is None else tokenizer # Configure minhash signer sig_width = cfg['sig_width'] lsh_hasher = LSHC(width=sig_width, **cfg['lsh_options']) self.signer = MinHashSignature(sig_width, lsh_hasher=lsh_hasher, kmin=cfg['kmin']) # Configure shingler cfg_key_shingle = cfg['shingler'] self.shingler = get_default_shingler(**cfg_key_shingle) # Configure sketch comparison algorithm cfg_sketch = cfg['sketch'] self.sketch_enabled = cfg_sketch['enabled'] self.sketch_dist_fn = None self.max_dist = None if self.sketch_enabled: algorithm_name = cfg_sketch['algorithm'] try: sketch_algorithm = getattr(SketchModel, algorithm_name) except AttributeError: raise RuntimeError("Unknown sketch model specified: '%s'" % algorithm_name) self.sketch_bits = cfg_sketch['size'] cfg_sketch_shingler = cfg_sketch['shingler'] if not cfg_sketch_shingler['enabled']: # if sketch shingler is disabled, we also disable signer # as we will use default signer self.sketch_shingler = None self.sketch_signer = None elif sketch_algorithm == SketchModel.simhash: del cfg_sketch_shingler['enabled'] self.sketch_shingler = Shingler(**cfg_sketch_shingler) self.sketch_signer = SimHashSignature(self.sketch_bits, seed=seed) elif sketch_algorithm == SketchModel.minhash: del cfg_sketch_shingler['enabled'] self.sketch_shingler = Shingler(**cfg_sketch_shingler) self.sketch_signer = MinHashSketchSignature(self.sketch_bits, seed=seed) self.sketch_shingler._tokenizer = None self.sketch_shingler._normalizer = None self.max_dist = \ int(floor(self.sketch_bits * (1.0 - float(cfg_sketch['resemblance'])))) self.sketch_dist_fn = hamming self.sketch_operator = OPERATOR_MAP[cfg_sketch.get('operator', 'and')] self.cluster_builder = Cluster(sketch_dist_fn=self.sketch_dist_fn, max_dist=self.max_dist, min_support=self.min_support, sketch_operator=self.sketch_operator)
class HDClustering(object): def __init__(self, cfg, content_filter=None, trace_every=0, get_body=None, get_label=None, get_prefix=None, min_support=None, seed=0, normalizer=None, tokenizer=None): """Read configuration""" self.cfg = cfg self._get_body = get_body self._get_label = get_label self._get_prefix = get_prefix self.trace_every = trace_every # Set options self.content_filter = content_filter self.min_support = cfg[ 'min_support'] if min_support is None else min_support # normalizer and tokenizer self.normalizer = get_default_normalizer( **cfg.get('preprocessor', {}).get('normalizer', {})) \ if normalizer is None else normalizer self.tokenizer = get_default_tokenizer( ) if tokenizer is None else tokenizer # Configure minhash signer sig_width = cfg['sig_width'] lsh_hasher = LSHC(width=sig_width, **cfg['lsh_options']) self.signer = MinHashSignature(sig_width, lsh_hasher=lsh_hasher, kmin=cfg['kmin']) # Configure shingler cfg_key_shingle = cfg['shingler'] self.shingler = get_default_shingler(**cfg_key_shingle) # Configure sketch comparison algorithm cfg_sketch = cfg['sketch'] self.sketch_enabled = cfg_sketch['enabled'] self.sketch_dist_fn = None self.max_dist = None if self.sketch_enabled: algorithm_name = cfg_sketch['algorithm'] try: sketch_algorithm = getattr(SketchModel, algorithm_name) except AttributeError: raise RuntimeError("Unknown sketch model specified: '%s'" % algorithm_name) self.sketch_bits = cfg_sketch['size'] cfg_sketch_shingler = cfg_sketch['shingler'] if not cfg_sketch_shingler['enabled']: # if sketch shingler is disabled, we also disable signer # as we will use default signer self.sketch_shingler = None self.sketch_signer = None elif sketch_algorithm == SketchModel.simhash: del cfg_sketch_shingler['enabled'] self.sketch_shingler = Shingler(**cfg_sketch_shingler) self.sketch_signer = SimHashSignature(self.sketch_bits, seed=seed) elif sketch_algorithm == SketchModel.minhash: del cfg_sketch_shingler['enabled'] self.sketch_shingler = Shingler(**cfg_sketch_shingler) self.sketch_signer = MinHashSketchSignature(self.sketch_bits, seed=seed) self.sketch_shingler._tokenizer = None self.sketch_shingler._normalizer = None self.max_dist = \ int(floor(self.sketch_bits * (1.0 - float(cfg_sketch['resemblance'])))) self.sketch_dist_fn = hamming self.sketch_operator = OPERATOR_MAP[cfg_sketch.get( 'operator', 'and')] self.cluster_builder = Cluster(sketch_dist_fn=self.sketch_dist_fn, max_dist=self.max_dist, min_support=self.min_support, sketch_operator=self.sketch_operator) def _map_iter(self, data): """Find clusters in an iterable""" get_body = self._get_body get_label = self._get_label get_prefix = self._get_prefix for i, obj in enumerate(data): body = obj if get_body is None else get_body(obj) label = i if get_label is None else get_label(obj) prefix = None if get_prefix is None else get_prefix(obj) for feat in self._map_item(obj, body, label, prefix): yield feat def _map_item(self, obj, body, label, prefix=None): # Extract features src = MessageSource.source(obj) obj_content = obj['content'] normalized_content, meta = self.normalizer.normalize(obj_content) content_tokens = self.tokenizer.tokenize(normalized_content) if self.content_filter is not None: rule_accept, rule_score = self.content_filter.accept( obj, content_tokens=content_tokens, urls=meta.get('url_components', []), src=src) else: rule_accept = False if not rule_accept: features = self.shingler.get_shingles(content_tokens, prefix=prefix) if self.sketch_enabled and (self.sketch_shingler is None or self.sketch_signer is None): keys, sketch = self.signer.get_signature(features, with_sketch=True) elif self.sketch_enabled and (self.sketch_shingler is not None and self.sketch_signer is not None): keys = self.signer.get_signature(features) sketch_features = self.sketch_shingler.get_shingles( content_tokens) sketch = self.sketch_signer.get_signature(sketch_features) else: keys = self.signer.get_signature(features) sketch = None yield (keys, (label, sketch)) def clusters_from_iter(self, data): """Find clusters in an iterable""" cluster_builder = self.cluster_builder trace_every = self.trace_every for i, obj in enumerate(self._map_iter(data)): if trace_every > 0 and (not i % trace_every): LOG.info("Processing line " + str(i)) keys, val = obj label, sketch = val \ if isinstance(val, tuple) \ else (val, None) cluster_builder.add_item(keys, label=label, sketch=sketch) return cluster_builder.get_clusters() def mapper(self, obj): """Perform a mapper task in MR""" get_body = self._get_body get_label = self._get_label get_prefix = self._get_prefix body = obj if get_body is None else get_body(obj) label = obj if get_label is None else get_label(obj) prefix = None if get_prefix is None else get_prefix(obj) for keys, val in self._map_item(obj, body, label, prefix): for key in keys: yield key, val def reducer(self, key, tuple_gen): """Perform a reducer task in MR If sketches enabled, data consists of: (key, [(lbl, sk), (lbl, sk), (lbl, sk)]) Otherwise: (key, [lbl, lbl, lbl]) """ # If not using sketches, we are done if self.sketch_dist_fn is None: return key, list(set(tuple_gen)) # create a dict mappipng a label to a sketch return key, dict(tuple_gen).items()
class HDClustering(object): def __init__(self, cfg, content_filter=None, opts=None): """Read configuration""" self.cfg = cfg common_kwargs = dict( normalizer=HTMLNormalizer(), tokenizer=RegexTokenizer() ) deepupdate(common_kwargs, opts or {}) # Set options self.content_filter = content_filter min_support = cfg['min_support'] # Configure minhash signer sig_width = cfg['sig_width'] lsh_hasher = LSHC(width=sig_width, **cfg['lsh_options']) self.signer = MinHashSignature(sig_width, lsh_hasher=lsh_hasher, kmin=cfg['kmin']) # Configure shingler cfg_key_shingle = cfg['shingler'] cfg_key_shingle.update(common_kwargs) self.shingler = Shingler(**cfg_key_shingle) # Configure sketch comparison algorithm cfg_sketch = cfg['sketch'] self.sketch_enabled = cfg_sketch['enabled'] sketch_dist_fn = None xor_threshold = None if self.sketch_enabled: algorithm_name = cfg_sketch['algorithm'] try: sketch_algorithm = getattr(SketchModel, algorithm_name) except AttributeError: raise RuntimeError("Unknown sketch model specified: '%s'" % algorithm_name) sketch_bits = cfg_sketch['size'] * 8 cfg_sketch_shingle = cfg_sketch['shingler'] cfg_sketch_shingle.update(common_kwargs) self.sketch_shingler = Shingler(**cfg_sketch_shingle) if sketch_algorithm == SketchModel.simhash: self.sketch_signer = SimHashSignature(bit_depth=sketch_bits) elif sketch_algorithm == SketchModel.minhash: self.sketch_signer = MinHashSketchSignature(sketch_bits) xor_threshold = \ int(floor(sketch_bits * (1.0 - float(cfg_sketch['resemblance'])))) sketch_dist_fn = hamming self.cluster_builder = Cluster(sketch_dist_fn=sketch_dist_fn, max_dist=xor_threshold, min_support=min_support) def clusters_from_iter(self, data, get_body=None, get_label=None, get_prefix=None): """Find clusters in an iterable""" cluster_builder = self.cluster_builder for i, obj in enumerate(data): if not i % 1000: print "Processing line " + str(i) body = obj if get_body is None else get_body(obj) label = i if get_label is None else get_label(obj) prefix = None if get_prefix is None else get_prefix(obj) # Step 1: Extract features if self.content_filter is None or \ not self.content_filter.accept(obj): features = self.shingler.get_shingles(body, prefix=prefix) keys = self.signer.get_signature(features) if self.sketch_enabled: sketch_features = self.sketch_shingler.get_shingles(body) sketch = self.sketch_signer.get_signature(sketch_features) else: sketch = None # Step 2: Cluster given keys, sketch cluster_builder.add_set(keys, label=label, sketch=sketch) return cluster_builder.get_clusters()