def download_model(dwnld_url, output_dir, get_thumb=True): base, rest = dwnld_url.split('?', 1) params = dict([param.split('=') for param in rest.split('&')]) try: mid = params['mid'] rtyp = params['rtyp'] name = params['fn'] except KeyError: print 'something wrong with url %s, bailing' % dwnld_url # TODO in theory could be collada or kmz sub_output_dir = output_dir / mid if not sub_output_dir.exists(): sub_output_dir.mkdir() fname = sub_output_dir / 'model.skp' print tc.colored('downloading ' + fname, 'green') data = urllib2.urlopen(dwnld_url).read() with open(fname, 'wb') as f: f.write(data) if get_thumb: thumb_fname = sub_output_dir / 'thumb.3dwarehouse.jpg' download_thumb(mid, thumb_fname) # upload/create json metadata json_fname = sub_output_dir / 'metadata.json' print tc.colored('saving metadata ' + json_fname, 'green') data = {'mid': mid, 'rtyp': rtyp, 'name': name} util.json_dict_update(json_fname, data)
def download_model(dwnld_url, output_dir, get_thumb=True): base, rest = dwnld_url.split('?', 1) params = dict([param.split('=') for param in rest.split('&')]) try: mid = params['mid'] rtyp = params['rtyp'] name = params['fn'] except KeyError: print 'something wrong with url %s, bailing'%dwnld_url # TODO in theory could be collada or kmz sub_output_dir = output_dir/mid if not sub_output_dir.exists(): sub_output_dir.mkdir() fname = sub_output_dir/'model.skp' print tc.colored('downloading '+fname, 'green') data = urllib2.urlopen(dwnld_url).read() with open(fname, 'wb') as f: f.write(data) if get_thumb: thumb_fname = sub_output_dir/'thumb.3dwarehouse.jpg' download_thumb(mid, thumb_fname) # upload/create json metadata json_fname = sub_output_dir/'metadata.json' print tc.colored('saving metadata '+json_fname, 'green') data = {'mid': mid, 'rtyp': rtyp, 'name': name} util.json_dict_update(json_fname, data)
def download_model(model_id, output_dir, get_thumb=True): base = "https://3dwarehouse.sketchup.com/warehouse/GetEntity" url = '?'.join([base,'id=' + model_id + '&showBinaryAttributes=true']) response = urllib2.urlopen(url).read().decode('utf8') data = json.loads(response) if 'zip' not in data['binaries']: return print(data['binaries']['zip']['url']) dwnld_url = data['binaries']['zip']['url'] base, rest = dwnld_url.split('?', 1) params = dict([param.split('=') for param in rest.split('&')]) model_id = params['subjectId'] name = data['title'] sub_output_dir = output_dir/model_id if not sub_output_dir.exists(): sub_output_dir.mkdir() fname = sub_output_dir/'model.zip' print(tc.colored('downloading '+fname, 'green')) data = urllib2.urlopen(dwnld_url).read() with open(fname, 'wb') as f: f.write(data) with zipfile.ZipFile(fname,"r") as zip_ref: zip_ref.extractall(sub_output_dir) dae_fname = sub_output_dir/'model.dae' #cmd = ['meshlabserver', '-i', dae_fname, '-o', dae_fname, '-om', 'vt', 'wt'] #print(' '.join(cmd)) #subprocess.call(cmd) generate_sdf(sub_output_dir,model_id) if get_thumb: thumb_fname = sub_output_dir/'thumb.jpg' download_thumb(model_id, thumb_fname) # upload/create json metadata json_fname = sub_output_dir/'metadata.json' print(tc.colored('saving metadata '+json_fname, 'green')) data = {'mid': model_id, 'name': name} util.json_dict_update(json_fname, data)