def upload_file(upload_data, f): headers = utils.get_headers(upload_data['token']) version_id = upload_data['id'] bg_blender.progress('uploading %s' % f['type']) upload_info = { 'assetId': version_id, 'fileType': f['type'], 'fileIndex': f['index'], 'originalFilename': os.path.basename(f['file_path']) } upload_create_url = paths.get_api_url() + 'uploads/' upload = rerequests.post(upload_create_url, json=upload_info, headers=headers, verify=True) upload = upload.json() # chunk_size = 1024 * 1024 * 2 utils.pprint(upload) # file gets uploaded here: uploaded = False # s3 upload is now the only option for a in range(0, 5): if not uploaded: try: upload_response = requests.put(upload['s3UploadUrl'], data=upload_in_chunks( f['file_path'], chunk_size, f['type']), stream=True, verify=True) if upload_response.status_code == 200: uploaded = True else: print(upload_response.text) bg_blender.progress(f'Upload failed, retry. {a}') except Exception as e: print(e) bg_blender.progress('Upload %s failed, retrying' % f['type']) time.sleep(1) # confirm single file upload to bkit server upload_done_url = paths.get_api_url( ) + 'uploads_s3/' + upload['id'] + '/upload-file/' upload_response = rerequests.post(upload_done_url, headers=headers, verify=True) bg_blender.progress('finished uploading') return uploaded
def get_category_name_path(categories, category): '''finds the category in all possible subcategories and returns the path to it''' category_path = [] check_categories = categories[:] parents = {} utils.pprint(categories) while len(check_categories) > 0: ccheck = check_categories.pop() # print(ccheck['name']) if not ccheck.get('children'): continue for ch in ccheck['children']: # print(ch['name']) parents[ch['slug']] = ccheck['slug'] if ch['slug'] == category: category_path = [ch['slug']] slug = ch['slug'] while parents.get(slug): slug = parents.get(slug) category_path.insert(0, slug) return category_path check_categories.append(ch)
def start_upload(self, context, asset_type, reupload, upload_set): '''start upload process, by processing data''' # fix the name first utils.name_update() props = utils.get_upload_props() storage_quota_ok = check_storage_quota(props) if not storage_quota_ok: self.report({'ERROR_INVALID_INPUT'}, props.report) return {'CANCELLED'} location = get_upload_location(props) props.upload_state = 'preparing upload' auto_fix(asset_type=asset_type) # do this for fixing long tags in some upload cases props.tags = props.tags[:] props.name = props.name.strip() # TODO move this to separate function # check for missing metadata if asset_type == 'MODEL': get_missing_data_model(props) if asset_type == 'SCENE': get_missing_data_scene(props) elif asset_type == 'MATERIAL': get_missing_data_material(props) elif asset_type == 'BRUSH': get_missing_data_brush(props) if props.report != '': self.report({'ERROR_INVALID_INPUT'}, props.report) return {'CANCELLED'} if not reupload: props.asset_base_id = '' props.id = '' export_data, upload_data, eval_path_computing, eval_path_state, eval_path, props = get_upload_data(self, context, asset_type) # utils.pprint(upload_data) upload_data['parameters'] = params_to_dict( upload_data['parameters']) # weird array conversion only for upload, not for tooltips. binary_path = bpy.app.binary_path script_path = os.path.dirname(os.path.realpath(__file__)) basename, ext = os.path.splitext(bpy.data.filepath) # if not basename: # basename = os.path.join(basename, "temp") if not ext: ext = ".blend" tempdir = tempfile.mkdtemp() source_filepath = os.path.join(tempdir, "export_blenderkit" + ext) clean_file_path = paths.get_clean_filepath() data = { 'clean_file_path': clean_file_path, 'source_filepath': source_filepath, 'temp_dir': tempdir, 'export_data': export_data, 'upload_data': upload_data, 'debug_value': bpy.app.debug_value, 'upload_set': upload_set, } datafile = os.path.join(tempdir, BLENDERKIT_EXPORT_DATA_FILE) # check if thumbnail exists: if 'THUMBNAIL' in upload_set: if not os.path.exists(export_data["thumbnail_path"]): props.upload_state = 'Thumbnail not found' props.uploading = False return {'CANCELLED'} # first upload metadata to server, so it can be saved inside the current file url = paths.get_api_url() + 'assets/' headers = utils.get_headers(upload_data['token']) # upload_data['license'] = 'ovejajojo' json_metadata = upload_data # json.dumps(upload_data, ensure_ascii=False).encode('utf8') global reports if props.asset_base_id == '': try: r = rerequests.post(url, json=json_metadata, headers=headers, verify=True, immediate=True) # files = files, ui.add_report('uploaded metadata') utils.p(r.text) except requests.exceptions.RequestException as e: print(e) props.upload_state = str(e) props.uploading = False return {'CANCELLED'} else: url += props.id + '/' try: if upload_set != ['METADATA']: json_metadata["verificationStatus"] = "uploading" r = rerequests.put(url, json=json_metadata, headers=headers, verify=True, immediate=True) # files = files, ui.add_report('uploaded metadata') # parse the request # print('uploaded metadata') # print(r.text) except requests.exceptions.RequestException as e: print(e) props.upload_state = str(e) props.uploading = False return {'CANCELLED'} # props.upload_state = 'step 1' if upload_set == ['METADATA']: props.uploading = False props.upload_state = 'upload finished successfully' return {'FINISHED'} try: rj = r.json() utils.pprint(rj) # if r.status_code not in (200, 201): # if r.status_code == 401: # ui.add_report(r.detail, 5, colors.RED) # return {'CANCELLED'} if props.asset_base_id == '': props.asset_base_id = rj['assetBaseId'] props.id = rj['id'] upload_data['assetBaseId'] = props.asset_base_id upload_data['id'] = props.id # bpy.ops.wm.save_mainfile() # bpy.ops.wm.save_as_mainfile(filepath=filepath, compress=False, copy=True) props.uploading = True # save a copy of actual scene but don't interfere with the users models bpy.ops.wm.save_as_mainfile(filepath=source_filepath, compress=False, copy=True) with open(datafile, 'w') as s: json.dump(data, s) proc = subprocess.Popen([ binary_path, "--background", "-noaudio", clean_file_path, "--python", os.path.join(script_path, "upload_bg.py"), "--", datafile # ,filepath, tempdir ], bufsize=5000, stdout=subprocess.PIPE, stdin=subprocess.PIPE) bg_blender.add_bg_process(eval_path_computing=eval_path_computing, eval_path_state=eval_path_state, eval_path=eval_path, process_type='UPLOAD', process=proc, location=location) except Exception as e: props.upload_state = str(e) props.uploading = False print(e) return {'CANCELLED'} return {'FINISHED'}
def run(self): # utils.pprint(upload_data) self.upload_data['parameters'] = utils.dict_to_params( self.upload_data['parameters']) # weird array conversion only for upload, not for tooltips. script_path = os.path.dirname(os.path.realpath(__file__)) # first upload metadata to server, so it can be saved inside the current file url = paths.get_api_url() + 'assets/' headers = utils.get_headers(self.upload_data['token']) # self.upload_data['license'] = 'ovejajojo' json_metadata = self.upload_data # json.dumps(self.upload_data, ensure_ascii=False).encode('utf8') # tasks_queue.add_task((ui.add_report, ('Posting metadata',))) self.send_message('Posting metadata') if self.export_data['assetBaseId'] == '': try: r = rerequests.post(url, json=json_metadata, headers=headers, verify=True, immediate=True) # files = files, # tasks_queue.add_task((ui.add_report, ('uploaded metadata',))) utils.p(r.text) self.send_message('uploaded metadata') except requests.exceptions.RequestException as e: print(e) self.end_upload(e) return {'CANCELLED'} else: url += self.export_data['id'] + '/' try: if 'MAINFILE' in self.upload_set: json_metadata["verificationStatus"] = "uploading" r = rerequests.patch(url, json=json_metadata, headers=headers, verify=True, immediate=True) # files = files, self.send_message('uploaded metadata') # tasks_queue.add_task((ui.add_report, ('uploaded metadata',))) # parse the request # print('uploaded metadata') print(r.text) except requests.exceptions.RequestException as e: print(e) self.end_upload(e) return {'CANCELLED'} if self.stopped(): self.end_upload('Upload cancelled by user') return # props.upload_state = 'step 1' if self.upload_set == ['METADATA']: self.end_upload('Metadata posted successfully') return {'FINISHED'} try: rj = r.json() utils.pprint(rj) # if r.status_code not in (200, 201): # if r.status_code == 401: # ###ui.add_report(r.detail, 5, colors.RED) # return {'CANCELLED'} # if props.asset_base_id == '': # props.asset_base_id = rj['assetBaseId'] # props.id = rj['id'] if self.export_data['assetBaseId'] == '': self.export_data['assetBaseId'] = rj['assetBaseId'] self.export_data['id'] = rj['id'] # here we need to send asset ID's back into UI to be written in asset data. estring = f"{self.export_data['eval_path']}.blenderkit.asset_base_id = '{rj['assetBaseId']}'" tasks_queue.add_task((exec, (estring,))) estring = f"{self.export_data['eval_path']}.blenderkit.id = '{rj['id']}'" tasks_queue.add_task((exec, (estring,))) # after that, the user's file needs to be saved to save the self.upload_data['assetBaseId'] = self.export_data['assetBaseId'] self.upload_data['id'] = self.export_data['id'] # props.uploading = True if 'MAINFILE' in self.upload_set: if self.upload_data['assetType'] == 'hdr': fpath = self.export_data['hdr_filepath'] else: fpath = os.path.join(self.export_data['temp_dir'], self.upload_data['assetBaseId'] + '.blend') clean_file_path = paths.get_clean_filepath() data = { 'export_data': self.export_data, 'upload_data': self.upload_data, 'debug_value': self.export_data['debug_value'], 'upload_set': self.upload_set, } datafile = os.path.join(self.export_data['temp_dir'], BLENDERKIT_EXPORT_DATA_FILE) with open(datafile, 'w') as s: json.dump(data, s) # non waiting method - not useful here.. # proc = subprocess.Popen([ # binary_path, # "--background", # "-noaudio", # clean_file_path, # "--python", os.path.join(script_path, "upload_bg.py"), # "--", datafile # ,filepath, tempdir # ], bufsize=5000, stdout=subprocess.PIPE, stdin=subprocess.PIPE) # tasks_queue.add_task((ui.add_report, ('preparing scene - running blender instance',))) self.send_message('preparing scene - running blender instance') proc = subprocess.run([ self.export_data['binary_path'], "--background", "-noaudio", clean_file_path, "--python", os.path.join(script_path, "upload_bg.py"), "--", datafile ], bufsize=1, stdout=sys.stdout, stdin=subprocess.PIPE, creationflags=utils.get_process_flags()) if self.stopped(): self.end_upload('Upload stopped by user') return files = [] if 'THUMBNAIL' in self.upload_set: files.append({ "type": "thumbnail", "index": 0, "file_path": self.export_data["thumbnail_path"] }) if 'MAINFILE' in self.upload_set: files.append({ "type": "blend", "index": 0, "file_path": fpath }) self.send_message('Uploading files') uploaded = upload_bg.upload_files(self.upload_data, files) if uploaded: # mark on server as uploaded if 'MAINFILE' in self.upload_set: confirm_data = { "verificationStatus": "uploaded" } url = paths.get_api_url() + 'assets/' headers = utils.get_headers(self.upload_data['token']) url += self.upload_data["id"] + '/' r = rerequests.patch(url, json=confirm_data, headers=headers, verify=True) # files = files, self.end_upload('Upload finished successfully') else: self.end_upload('Upload failed') except Exception as e: self.end_upload(e) print(e) return {'CANCELLED'}
def run(self): maxthreads = 50 query = self.query params = self.params global reports t = time.time() mt('search thread started') tempdir = paths.get_temp_dir('%s_search' % query['asset_type']) # json_filepath = os.path.join(tempdir, '%s_searchresult.json' % query['asset_type']) headers = utils.get_headers(params['api_key']) rdata = {} rdata['results'] = [] if params['get_next']: urlquery = self.result['next'] if not params['get_next']: urlquery = self.query_to_url() try: utils.p(urlquery) r = rerequests.get(urlquery, headers=headers) # , params = rparameters) # print(r.url) reports = '' # utils.p(r.text) except requests.exceptions.RequestException as e: print(e) reports = e # props.report = e return mt('response is back ') try: rdata = r.json() except Exception as inst: reports = r.text print(inst) mt('data parsed ') if not rdata.get('results'): utils.pprint(rdata) # if the result was converted to json and didn't return results, # it means it's a server error that has a clear message. # That's why it gets processed in the update timer, where it can be passed in messages to user. self.result = rdata return # print('number of results: ', len(rdata.get('results', []))) if self.stopped(): utils.p('stopping search : ' + str(query)) return mt('search finished') i = 0 thumb_small_urls = [] thumb_small_filepaths = [] thumb_full_urls = [] thumb_full_filepaths = [] # END OF PARSING for d in rdata.get('results', []): get_author(d) for f in d['files']: # TODO move validation of published assets to server, too manmy checks here. if f['fileType'] == 'thumbnail' and f[ 'fileThumbnail'] != None and f[ 'fileThumbnailLarge'] != None: if f['fileThumbnail'] == None: f['fileThumbnail'] = 'NONE' if f['fileThumbnailLarge'] == None: f['fileThumbnailLarge'] = 'NONE' thumb_small_urls.append(f['fileThumbnail']) thumb_full_urls.append(f['fileThumbnailLarge']) imgname = paths.extract_filename_from_url( f['fileThumbnail']) imgpath = os.path.join(tempdir, imgname) thumb_small_filepaths.append(imgpath) imgname = paths.extract_filename_from_url( f['fileThumbnailLarge']) imgpath = os.path.join(tempdir, imgname) thumb_full_filepaths.append(imgpath) sml_thbs = zip(thumb_small_filepaths, thumb_small_urls) full_thbs = zip(thumb_full_filepaths, thumb_full_urls) # we save here because a missing thumbnail check is in the previous loop # we can also prepend previous results. These have downloaded thumbnails already... if params['get_next']: rdata['results'][0:0] = self.result['results'] self.result = rdata # with open(json_filepath, 'w') as outfile: # json.dump(rdata, outfile) killthreads_sml = [] for k in thumb_sml_download_threads.keys(): if k not in thumb_small_filepaths: killthreads_sml.append(k) # do actual killing here? killthreads_full = [] for k in thumb_full_download_threads.keys(): if k not in thumb_full_filepaths: killthreads_full.append(k) # do actual killing here? # TODO do the killing/ stopping here! remember threads might have finished inbetween! if self.stopped(): utils.p('stopping search : ' + str(query)) return # this loop handles downloading of small thumbnails for imgpath, url in sml_thbs: if imgpath not in thumb_sml_download_threads and not os.path.exists( imgpath): thread = ThumbDownloader(url, imgpath) # thread = threading.Thread(target=download_thumbnail, args=([url, imgpath]), # daemon=True) thread.start() thumb_sml_download_threads[imgpath] = thread # threads.append(thread) if len(thumb_sml_download_threads) > maxthreads: while len(thumb_sml_download_threads) > maxthreads: threads_copy = thumb_sml_download_threads.copy( ) # because for loop can erase some of the items. for tk, thread in threads_copy.items(): if not thread.is_alive(): thread.join() # utils.p(x) del (thumb_sml_download_threads[tk]) # utils.p('fetched thumbnail ', i) i += 1 if self.stopped(): utils.p('stopping search : ' + str(query)) return idx = 0 while len(thumb_sml_download_threads) > 0: threads_copy = thumb_sml_download_threads.copy( ) # because for loop can erase some of the items. for tk, thread in threads_copy.items(): if not thread.is_alive(): thread.join() del (thumb_sml_download_threads[tk]) i += 1 if self.stopped(): utils.p('stopping search : ' + str(query)) return # start downloading full thumbs in the end for imgpath, url in full_thbs: if imgpath not in thumb_full_download_threads and not os.path.exists( imgpath): thread = ThumbDownloader(url, imgpath) # thread = threading.Thread(target=download_thumbnail, args=([url, imgpath]), # daemon=True) thread.start() thumb_full_download_threads[imgpath] = thread mt('thumbnails finished')