def update_oldest_lookups(options): query = Lookup.all(keys_only=True).order('timestamp') if options.days is None: print "Trying to fetch %d oldest names" % options.batch else: print "Trying to fetch %d names that are younger than %d days" % ( options.batch, options.days) days_ago = datetime.now() - timedelta(days=options.days) query.filter('timestamp >', days_ago) keys = retry(query.fetch, options.batch) if not keys: sys.exit("The datastore returned no results.") names = [key.name() for key in keys] oldest = retry(Lookup.get_by_key_name, names[0]) age = datetime.now() - oldest.timestamp hours = age.seconds / 3600 minutes = age.seconds / 60 - hours * 60 seconds = age.seconds - hours * 3600 - minutes * 60 print "Age of oldest lookup: %d days, %d:%02d:%02d" % ( age.days, hours, minutes, seconds) print "Resolving .com names:", results = resolve_parallel([name + '.com' for name in names], options, 5.0) # Delete registered .com names. registered = [name for name in names if not com_available(results, name)] print len(registered), 'registered:', ' '.join(registered) retry(db.delete, [db.Key.from_path('dns_lookup', name) for name in registered] + [db.Key.from_path('domains_domain', name) for name in registered]) # Update available .com names. available = [name for name in names if com_available(results, name)] print len(available), 'available:', ' '.join(available) print "Resolving", len(available), "of", options.batch, "names:", lookups = lookup_names(available, options) retry_objects(db.put, lookups)
def update_error(options): tld = random.choice(options.active_tld_list) query = Lookup.all(keys_only=True).filter(tld, options.retry) # prefix = random_prefix('left', length_choices=[2, 3, 4]) # start_key = db.Key.from_path('dns_lookup', prefix) # query.filter('__key__ >', start_key) print "Trying to fetch %d names where %s is %s" % ( options.batch, tld, options.retry) keys = retry(query.fetch, options.batch) if keys: names = [key.name() for key in keys] lookups = lookup_names(names, options) retry_objects(db.put, lookups) if len(keys) < options.batch: options.active_tld_list.remove(tld)
def update_best_names(position, keyword, length, options): print "Trying to fetch %d best names with" % options.batch, if keyword and position == 'left': print "prefix", keyword, "and", if keyword and position == 'right': print "suffix", keyword, "and", print "length", length query = Domain.all(keys_only=True) if keyword: query.filter('%s%d' % (position, len(keyword)), keyword) query.filter('length', length) query.order('-score') keys = retry(query.fetch, options.batch) if not keys: return lookups = lookup_names([key.name() for key in keys], options) retry_objects(db.put, lookups)
def cron_suffixes(request): refresh_seconds = request.GET.get('refresh', 0) previous = resume_previous(request, Suffix) chars = previous.key().name().lstrip('.') resume = False greater = '>=' start = chars stop = increment_prefix(start) if hasattr(previous, 'resume'): resume = previous.resume greater = '>' start = previous.resume query = Lookup.all().order('backwards') query.filter('backwards ' + greater, start) query.filter('backwards <', stop) lookups = retry(query.fetch, BATCH_SIZE) suffixes = count(chars, resume, lookups, Suffix) return render_to_response(request, 'prefixes/cron.html', locals())
def cron(request): refresh_seconds = request.GET.get('refresh', 0) previous = resume_previous(request, Prefix) chars = previous.key().name().lstrip('.') resume = False greater = '>=' start = db.Key.from_path('dns_lookup', chars) stop = db.Key.from_path('dns_lookup', increment_prefix(chars)) if hasattr(previous, 'resume'): resume = previous.resume greater = '>' start = db.Key.from_path('dns_lookup', previous.resume) query = Lookup.all().order('__key__') query.filter('__key__ ' + greater, start) query.filter('__key__ <', stop) lookups = retry(query.fetch, BATCH_SIZE) prefixes = count(chars, resume, lookups, Prefix) return render_to_response(request, 'prefixes/cron.html', locals())
def fetch_file(Model, length): kind = Model.kind() filename = '.data/popular/%ses.%d.txt' % (kind.split('_')[-1], length) outfile = open(filename, 'w') start = db.Key.from_path(kind, '-') while True: query = Model.all().filter('length', length).order('__key__') query.filter('__key__ >', start) prefixes = retry(query.fetch, 1000) for prefix in prefixes: name = prefix.key().name() if name.startswith('.'): continue if kind == 'prefixes_suffix': name = name[::-1] outfile.write('%d %s\n' % (prefix.com, name)) if len(prefixes) < 1000: break start = prefixes[-1].key() outfile.close()
def count_existing(names): lookups = retry(Lookup.get_by_key_name, names) return sum([1 for lookup in lookups if lookup])
'timeout=30', ] def auth_func(): if os.path.exists(PASSWORD_FILENAME): return open(PASSWORD_FILENAME).read().split(':') username = raw_input('Username:'******'Password:'******'scoretool', '/remote_api_hidden', auth_func, 'scoretool.appspot.com') while PURGE_VALUES: query = Lookup.all(keys_only=True) value = random.choice(PURGE_VALUES) query.filter('com', value) keys = retry(query.fetch, BATCH_SIZE) if len(keys) < BATCH_SIZE: PURGE_VALUES.remove(value) if len(keys): print "deleting %d names (%s to %s) where com is %s" % ( len(keys), keys[0].name(), keys[-1].name(), value) domain_keys = [db.Key.from_path('domains_domain', key.name()) for key in keys] db.delete(keys + domain_keys)
def auth_func(): if os.path.exists(PASSWORD_FILENAME): return open(PASSWORD_FILENAME).read().split(':') username = raw_input('Username:'******'Password:'******'scoretool', '/remote_api_hidden', auth_func, 'scoretool.appspot.com') while PURGE_VALUES: query = Lookup.all(keys_only=True) value = random.choice(PURGE_VALUES) query.filter('com', value) keys = retry(query.fetch, BATCH_SIZE) if len(keys) < BATCH_SIZE: PURGE_VALUES.remove(value) if len(keys): print "deleting %d names (%s to %s) where com is %s" % ( len(keys), keys[0].name(), keys[-1].name(), value) domain_keys = [ db.Key.from_path('domains_domain', key.name()) for key in keys ] db.delete(keys + domain_keys)