def cmd_ios(ctx, opt_fp_jobs, opt_dir_out, opt_force): """Process iOS Wi-Fi scans""" log = ctx.obj['log'] parser = NetParser() # load data log.info('opening: {}'.format(opt_fp_jobs)) jobs = pd.read_csv(opt_fp_jobs) jobs['run'] = jobs['run'].fillna(0).astype('int') for i, job in jobs.iterrows(): # skip jobs if run == 0 if int(job['run']) == 0: continue fp_in = job.path_in networks = parser.ios_to_networks(fp_in) meta = dict(job.copy()) # copy the job variables meta['type'] = 'ios' # add ios type meta['filepath'] = Path(fp_in).stem data = {'meta': meta, 'networks': networks} fp_out = Path(job.path_out) / f'{Path(fp_in).stem}.json' log.info(f'filepath out: {fp_out}') if Path(fp_out).exists() and not opt_force: log.error('file exists "{}". use "-f" to overwrite'.format(fp_out)) else: file_utils.write_json(data, fp_out, minify=False)
def cmd_wigle(ctx, opt_fp_jobs, opt_force): """Process Wigle Export CSVs""" log = ctx.obj['log'] net_parser = NetParser() df_jobs = pd.read_csv(opt_fp_jobs, skiprows=(0)) jobs['run'] = jobs['run'].fillna(0).astype('int') df_jobs['comment'] = df_jobs['comment'].fillna('').astype('str') for i, job in df_jobs.iterrows(): if int(job['run']) == 0: continue fp_wigle = join(job.path_in, f'{job.filename}.csv') networks = net_parser.wigle_export_to_networks(fp_wigle, job.path_out, job.comment) networks = net_parser.sort_distance(networks, 'wigle_export') # add metadata to context meta = dict(job.copy()) # copy the job variables meta['type'] = 'wigle_export' # add ios type data = {'meta': meta, 'networks': networks} if Path(job.filename).exists() and not opt_force: log.error('file exists "{}". use "-f" to overwrite'.format( job.filename)) else: file_utils.write_json(data, job.filename, minify=False)
def cmd_wigle_api(ctx, opt_fp_jobs, opt_api_name, opt_api_key, opt_force): """Fetches Wigle data from API""" log = ctx.obj['log'] if not (opt_api_key or opt_api_key): log.error('Wigle API Name and API key missing') return net_parser = NetParser() wigle = wigle_utils.WigleAPI(opt_api_name, opt_api_key) log.info('opening: {}'.format(opt_fp_jobs)) jobs = pd.read_csv(opt_fp_jobs) jobs['comment'] = jobs['comment'].fillna('').astype('str') jobs['notes'] = jobs['notes'].fillna('').astype('str') jobs['venue_name'] = jobs['venue_name'].fillna('').astype('str') for i, job in jobs.iterrows(): try: if int(job['run']) == 0: continue except Exception as e: log.warn( 'Failed to read row. You may have extra rows in your CSV file.' ) continue # failed to read 'run' value log.info( 'Fetching lat: {lat}, long: {lon}, since: {since}, radius: {radius}' .format(**job)) url = wigle.build_url(job.lat, job.lon, job.radius, job.since) networks = wigle.fetch(url, job.lat, job.lon) if not networks: return networks = net_parser.sort_distance(networks, 'wigle') # add metadata to context meta = dict(job.copy()) # copy the job variables meta['type'] = 'wigle' # add ios type data = {'meta': meta, 'networks': networks} if Path(job.filepath).exists() and not opt_force: log.error('file exists "{}". use "-f" to overwrite'.format( job.filepath)) else: file_utils.write_json(data, job.filepath, minify=False)
def osx(cls, lat, lon, comment): """Use MacOS WiFi scan""" print('[+] Scanning for WiFi networks...') t = subprocess.Popen(args=[cfg.FP_AIRPORT, '-s', '-x'], stdout=subprocess.PIPE) output = t.communicate()[0] scan_xml = plistlib.readPlistFromBytes(output) networks = {} for network in scan_xml: # Ensure 2-digit hexidecimal bssid = ':'.join( map(lambda x: x if (len(x) == 2) else ('0' + x), network['BSSID'].split(':'))) ssid = network['SSID_STR'] channel = network['CHANNEL'] rssi = network['RSSI'] if not bssid in networks.keys(): networks[bssid] = WiFiNet(ssid, bssid, channel, rssi, vendor=self.mac2vendor(bssid)) # TODO handle unicode network names networks = [n.as_obj() for n in networks] json_out = { 'meta': { 'source': os.path.basename(fp_input.name), 'type': ScanType.MACOS.value.lower(), 'comment': comment, 'lat_origin': lat, 'lon_origin': lon, }, 'networks': networks } file_utils.write_json(data, fp_out, minify=False, ensure_ascii=True)
def cmd_ios(ctx, opt_fp_jobs, opt_dir_out, opt_force): """Process iOS Wi-Fi scans""" log = ctx.obj['log'] parser = NetParser() # load data log.info('opening: {}'.format(opt_fp_jobs)) jobs = pd.read_csv(opt_fp_jobs) for i, job in jobs.iterrows(): # skip jobs if run == 0 if int(job['run']) == 0: continue fp_ios = job.filepath networks = parser.ios_to_networks(fp_ios, job['lat'], job['lon']) meta = dict(job.copy()) # copy the job variables meta['type'] = 'ios' # add ios type data = {'meta': meta, 'networks': networks} fp_out = join(job['path_out'], Path(job.filename).name) if Path(fp_out).exists() and not opt_force: log.error('file exists "{}". use "-f" to overwrite'.format(fp_out)) else: file_utils.write_json(data, fp_out, minify=False)
def cli(ctx, opt_input_glob, opt_output_fn, opt_threshold, opt_ext): """ Dedupe a folder of images """ import os import glob from PIL import Image from app.utils.im_utils import compute_phash from app.utils.file_utils import write_json, sha256 from app.utils import logger_utils log = logger_utils.Logger.getLogger() seen = [] total = 0 for fn in tqdm(sorted(glob.iglob(join(opt_input_glob, f'*.{opt_ext}')))): total += 1 im = Image.open(fn).convert('RGB') phash = compute_phash(im) if is_phash_new(fn, phash, seen, opt_threshold): hash = sha256(fn) fpart, ext = os.path.splitext(fn) ext = ext[1:] seen.append({ 'sha256': hash, 'phash': phash, 'fn': fn, 'ext': ext, }) if opt_output_fn: write_json(seen, opt_output_fn) log.info("checked {} files, found {} unique".format(total, len(seen)))