def delete_temp_user_data(temp_user_id, r): UserDictKeys.query.filter_by(temp_user_id=temp_user_id).delete() db.session.commit() ChatLog.query.filter_by(temp_owner_id=temp_user_id).delete() db.session.commit() ChatLog.query.filter_by(temp_user_id=temp_user_id).delete() db.session.commit() GlobalObjectStorage.query.filter_by(temp_user_id=temp_user_id).delete() db.session.commit() files_to_delete = list() for short_code_item in Shortener.query.filter_by( temp_user_id=temp_user_id).all(): for email in Email.query.filter_by(short=short_code_item.short).all(): for attachment in EmailAttachment.query.filter_by( email_id=email.id).all(): files_to_delete.append(attachment.upload) for file_number in files_to_delete: the_file = SavedFile(file_number) the_file.delete() Shortener.query.filter_by(temp_user_id=temp_user_id).delete() db.session.commit() keys_to_delete = set() for key in r.keys('*userid:t' + text_type(temp_user_id)): keys_to_delete.add(key) for key in r.keys('*userid:t' + text_type(temp_user_id) + ':*'): keys_to_delete.add(key) for key in keys_to_delete: r.delete(key)
def absolute_filename(the_file): match = re.match(r'^docassemble.playground([0-9]+):(.*)', the_file) #logmessage("absolute_filename call: " + the_file) if match: filename = re.sub(r'[^A-Za-z0-9\-\_\. ]', '', match.group(2)) #logmessage("absolute_filename: filename is " + filename) playground = SavedFile(match.group(1), section='playground', fix=True, filename=filename) return playground match = re.match(r'^/playgroundtemplate/([0-9]+)/(.*)', the_file) if match: filename = re.sub(r'[^A-Za-z0-9\-\_\. ]', '', match.group(2)) playground = SavedFile(match.group(1), section='playgroundtemplate', fix=True, filename=filename) return playground match = re.match(r'^/playgroundstatic/([0-9]+)/(.*)', the_file) if match: filename = re.sub(r'[^A-Za-z0-9\-\_\. ]', '', match.group(2)) playground = SavedFile(match.group(1), section='playgroundstatic', fix=True, filename=filename) return playground match = re.match(r'^/playgroundsources/([0-9]+)/(.*)', the_file) if match: filename = re.sub(r'[^A-Za-z0-9\-\_\. ]', '', match.group(2)) playground = SavedFile(match.group(1), section='playgroundsources', fix=True, filename=filename) write_ml_source(playground, match.group(1), filename) return playground return(None)
def save_attachment(uid, yaml_filename, filename, email_id, index, content_type, extension, content): att_file_number = get_new_file_number(uid, filename, yaml_file_name=yaml_filename) attachment_record = EmailAttachment(email_id=email_id, index=0, content_type=content_type, extension=extension, upload=att_file_number) db.session.add(attachment_record) db.session.commit() saved_file_attachment = SavedFile(att_file_number, extension=extension) saved_file_attachment.write_content(content) saved_file_attachment.finalize()
def reset_user_dict(user_code, filename, user_id=None, temp_user_id=None, force=False): #logmessage("reset_user_dict called with " + str(user_code) + " and " + str(filename)) if force: the_user_id = None else: if user_id is None and temp_user_id is None: if current_user.is_authenticated and not current_user.is_anonymous: user_type = 'user' the_user_id = current_user.id else: user_type = 'tempuser' the_user_id = session.get('tempuser', None) elif user_id is not None: user_type = 'user' the_user_id = user_id else: user_type = 'tempuser' the_user_id = temp_user_id if the_user_id is None: UserDictKeys.query.filter_by(key=user_code, filename=filename).delete() db.session.commit() do_delete = True else: if user_type == 'user': UserDictKeys.query.filter_by(key=user_code, filename=filename, user_id=the_user_id).delete() else: UserDictKeys.query.filter_by(key=user_code, filename=filename, temp_user_id=the_user_id).delete() db.session.commit() existing_user_dict_key = UserDictKeys.query.filter_by(key=user_code, filename=filename).first() if not existing_user_dict_key: do_delete = True else: do_delete = False if do_delete: UserDict.query.filter_by(key=user_code, filename=filename).delete() db.session.commit() files_to_delete = list() for speaklist in SpeakList.query.filter_by(key=user_code, filename=filename).all(): if speaklist.upload is not None: files_to_delete.append(speaklist.upload) SpeakList.query.filter_by(key=user_code, filename=filename).delete() db.session.commit() for upload in Uploads.query.filter_by(key=user_code, yamlfile=filename, persistent=False).all(): files_to_delete.append(upload.indexno) Uploads.query.filter_by(key=user_code, yamlfile=filename, persistent=False).delete() db.session.commit() ChatLog.query.filter_by(key=user_code, filename=filename).delete() db.session.commit() for short_code_item in Shortener.query.filter_by(uid=user_code, filename=filename).all(): for email in Email.query.filter_by(short=short_code_item.short).all(): for attachment in EmailAttachment.query.filter_by(email_id=email.id).all(): files_to_delete.append(attachment.upload) Shortener.query.filter_by(uid=user_code, filename=filename).delete() db.session.commit() for file_number in files_to_delete: the_file = SavedFile(file_number) the_file.delete() return
def __init__(self, section=''): if docassemble.base.functions.this_thread.current_info['user'][ 'is_anonymous']: raise DAError( "Users must be logged in to create Playground objects") self.user_id = docassemble.base.functions.this_thread.current_info[ 'user']['theid'] self.current_info = docassemble.base.functions.this_thread.current_info self.section = section self.area = SavedFile(self.user_id, fix=True, section='playground' + self.section) self._update_file_list()
def reset_user_dict(user_code, filename, user_id=None, temp_user_id=None, force=False): #logmessage("reset_user_dict called with " + str(user_code) + " and " + str(filename)) if force: the_user_id = None else: if user_id is None and temp_user_id is None: if current_user.is_authenticated and not current_user.is_anonymous: user_type = 'user' the_user_id = current_user.id else: user_type = 'tempuser' the_user_id = session.get('tempuser', None) elif user_id is not None: user_type = 'user' the_user_id = user_id else: user_type = 'tempuser' the_user_id = temp_user_id if the_user_id is None: UserDictKeys.query.filter_by(key=user_code, filename=filename).delete() db.session.commit() do_delete = True else: if user_type == 'user': UserDictKeys.query.filter_by(key=user_code, filename=filename, user_id=the_user_id).delete() else: UserDictKeys.query.filter_by(key=user_code, filename=filename, temp_user_id=the_user_id).delete() db.session.commit() existing_user_dict_key = UserDictKeys.query.filter_by(key=user_code, filename=filename).first() if not existing_user_dict_key: do_delete = True else: do_delete = False if do_delete: UserDict.query.filter_by(key=user_code, filename=filename).delete() db.session.commit() for upload in Uploads.query.filter_by(key=user_code, yamlfile=filename, persistent=False).all(): old_file = SavedFile(upload.indexno) old_file.delete() Uploads.query.filter_by(key=user_code, yamlfile=filename, persistent=False).delete() db.session.commit() # Attachments.query.filter_by(key=user_code, filename=filename).delete() # db.session.commit() SpeakList.query.filter_by(key=user_code, filename=filename).delete() db.session.commit() ChatLog.query.filter_by(key=user_code, filename=filename).delete() db.session.commit() Shortener.query.filter_by(uid=user_code, filename=filename).delete() db.session.commit() #logmessage("reset_user_dict: done") return
def save_numbered_file(filename, orig_path, yaml_file_name=None, uid=None): if uid is None: if has_request_context() and 'uid' in session: uid = session.get('uid', None) else: uid = docassemble.base.functions.get_uid() if uid is None: raise Exception("save_numbered_file: uid not defined") file_number = get_new_file_number(uid, filename, yaml_file_name=yaml_file_name) extension, mimetype = get_ext_and_mimetype(filename) new_file = SavedFile(file_number, extension=extension, fix=True) new_file.copy_from(orig_path) new_file.save(finalize=True) return(file_number, extension, mimetype)
def install_package(package): if package.type == 'zip' and package.upload is None: return 0, '' logmessage('install_package: ' + package.name) logfilecontents = '' pip.utils.logging._log_state = threading.local() pip.utils.logging._log_state.indentation = 0 pip_log = tempfile.NamedTemporaryFile() if package.type == 'zip' and package.upload is not None: saved_file = SavedFile(package.upload, extension='zip', fix=True) commands = ['install', '--egg', '--no-index', '--src=' + tempfile.mkdtemp(), '--log-file=' + pip_log.name, '--upgrade', saved_file.path + '.zip'] elif package.type == 'git' and package.giturl is not None: commands = ['install', '--egg', '--src=' + tempfile.mkdtemp(), '--upgrade', '--log-file=' + pip_log.name, 'git+' + package.giturl + '.git#egg=' + package.name] elif package.type == 'pip': if package.limitation is None: limit = "" else: limit = str(package.limitation) commands = ['install', '--src=' + tempfile.mkdtemp(), '--upgrade', '--log-file=' + pip_log.name, package.name + limit] else: return 1, 'Unable to recognize package type: ' + package.name logmessage("Running pip " + " ".join(commands)) logfilecontents += "Running pip " + " ".join(commands) + "\n" returnval = pip.main(commands) with open(pip_log.name) as x: logfilecontents += x.read() logmessage(logfilecontents) logmessage('install_package: done') return returnval, logfilecontents
def get_info_from_file_number(file_number, privileged=False, filename=None): if has_request_context(): uid = session['uid'] else: uid = docassemble.base.functions.get_uid() #logmessage("get_info_from_file_number: privileged is " + str(privileged) + " and uid is " + str(uid)) result = dict() if privileged: upload = Uploads.query.filter_by(indexno=file_number).first() else: upload = Uploads.query.filter_by(indexno=file_number, key=uid).first() if upload: if filename is None: result['filename'] = upload.filename else: result['filename'] = filename result['extension'], result['mimetype'] = get_ext_and_mimetype(result['filename']) result['savedfile'] = SavedFile(file_number, extension=result['extension'], fix=True) result['path'] = result['savedfile'].path result['fullpath'] = result['path'] + '.' + result['extension'] #logmessage("fullpath is " + str(result['fullpath'])) if 'path' not in result: logmessage("get_info_from_file_number: path is not in result for " + str(file_number)) return result final_filename = result['path'] + '.' + result['extension'] if os.path.isfile(final_filename): add_info_about_file(final_filename, result) #else: # logmessage("Filename DID NOT EXIST.") return(result)
def get_info_from_file_number(file_number, privileged=False, filename=None): if current_user and current_user.is_authenticated and current_user.has_role('admin', 'developer', 'advocate', 'trainer'): privileged = True else: if has_request_context() and 'uid' in session: uid = session['uid'] else: uid = docassemble.base.functions.get_uid() #logmessage("get_info_from_file_number: privileged is " + str(privileged) + " and uid is " + str(uid)) result = dict() if privileged: upload = Uploads.query.filter_by(indexno=file_number).first() else: upload = Uploads.query.filter(and_(Uploads.indexno == file_number, or_(Uploads.key == uid, Uploads.private == False))).first() if upload: if filename is None: result['filename'] = upload.filename else: result['filename'] = filename result['extension'], result['mimetype'] = get_ext_and_mimetype(result['filename']) sf = SavedFile(file_number, extension=result['extension'], fix=True) result['path'] = sf.path result['fullpath'] = result['path'] + '.' + result['extension'] result['private'] = upload.private result['persistent'] = upload.persistent #logmessage("fullpath is " + str(result['fullpath'])) if 'path' not in result: logmessage("get_info_from_file_number: path is not in result for " + str(file_number)) return result final_filename = result['path'] + '.' + result['extension'] if os.path.isfile(final_filename): add_info_about_file(final_filename, result) # else: # logmessage("Filename " + final_filename + "did not exist.") return(result)
def install_package(package): sys.stderr.write("install_package: " + package.name + "\n") if package.type == 'zip' and package.upload is None: return 0, '' sys.stderr.write('install_package: ' + package.name + "\n") from docassemble.base.config import daconfig PACKAGE_DIRECTORY = daconfig.get('packages', '/usr/share/docassemble/local') logfilecontents = '' #pip.utils.logging._log_state = threading.local() #pip.utils.logging._log_state.indentation = 0 pip_log = tempfile.NamedTemporaryFile() temp_dir = tempfile.mkdtemp() if package.type == 'zip' and package.upload is not None: saved_file = SavedFile(package.upload, extension='zip', fix=True) # with zipfile.ZipFile(saved_file.path + '.zip', mode='r') as zf: # for zinfo in zf.infolist(): # parts = splitall(zinfo.filename) # if parts[-1] == 'setup.py': commands = ['pip', 'install', '--quiet', '--process-dependency-links', '--allow-all-external', '--prefix=' + PACKAGE_DIRECTORY, '--src=' + temp_dir, '--log-file=' + pip_log.name, '--upgrade', saved_file.path + '.zip'] elif package.type == 'git' and package.giturl is not None: if package.gitsubdir is not None: commands = ['pip', 'install', '--quiet', '--process-dependency-links', '--allow-all-external', '--prefix=' + PACKAGE_DIRECTORY, '--src=' + temp_dir, '--upgrade', '--log-file=' + pip_log.name, 'git+' + str(package.giturl) + '.git#egg=' + package.name + '&subdirectory=' + str(package.gitsubdir)] else: commands = ['pip', 'install', '--quiet', '--process-dependency-links', '--allow-all-external', '--prefix=' + PACKAGE_DIRECTORY, '--src=' + temp_dir, '--upgrade', '--log-file=' + pip_log.name, 'git+' + str(package.giturl) + '.git#egg=' + package.name] elif package.type == 'pip': if package.limitation is None: limit = "" else: limit = str(package.limitation) commands = ['pip', 'install', '--quiet', '--process-dependency-links', '--prefix=' + PACKAGE_DIRECTORY, '--src=' + temp_dir, '--upgrade', '--log-file=' + pip_log.name, package.name + limit] else: sys.stderr.write("Wrong package type\n") return 1, 'Unable to recognize package type: ' + package.name sys.stderr.write("install_package: running " + " ".join(commands) + "\n") logfilecontents += " ".join(commands) + "\n" returnval = 1 try: subprocess.call(commands) returnval = 0 except subprocess.CalledProcessError as err: returnval = err.returncode sys.stderr.flush() sys.stdout.flush() time.sleep(1) with open(pip_log.name, 'rU') as x: logfilecontents += x.read().decode('utf8') pip_log.close() try: sys.stderr.write(logfilecontents + "\n") except: pass sys.stderr.flush() sys.stdout.flush() time.sleep(1) sys.stderr.write('returnval is: ' + str(returnval) + "\n") sys.stderr.write('install_package: done' + "\n") shutil.rmtree(temp_dir) return returnval, logfilecontents
def install_package(package): sys.stderr.write("install_package: " + package.name + "\n") if package.type == 'zip' and package.upload is None: return 0, '' sys.stderr.write('install_package: ' + package.name + "\n") from docassemble.base.config import daconfig PACKAGE_DIRECTORY = daconfig.get('packages', '/usr/share/docassemble/local') logfilecontents = '' #pip.utils.logging._log_state = threading.local() #pip.utils.logging._log_state.indentation = 0 pip_log = tempfile.NamedTemporaryFile() temp_dir = tempfile.mkdtemp() if package.type == 'zip' and package.upload is not None: saved_file = SavedFile(package.upload, extension='zip', fix=True) commands = [ 'pip', 'install', '--quiet', '--process-dependency-links', '--allow-all-external', '--prefix=' + PACKAGE_DIRECTORY, '--src=' + temp_dir, '--log-file=' + pip_log.name, '--upgrade', saved_file.path + '.zip' ] elif package.type == 'git' and package.giturl is not None: commands = [ 'pip', 'install', '--quiet', '--process-dependency-links', '--allow-all-external', '--prefix=' + PACKAGE_DIRECTORY, '--src=' + temp_dir, '--upgrade', '--log-file=' + pip_log.name, 'git+' + package.giturl + '.git#egg=' + package.name ] elif package.type == 'pip': if package.limitation is None: limit = "" else: limit = str(package.limitation) commands = [ 'pip', 'install', '--quiet', '--process-dependency-links', '--prefix=' + PACKAGE_DIRECTORY, '--src=' + temp_dir, '--upgrade', '--log-file=' + pip_log.name, package.name + limit ] else: sys.stderr.write("Wrong package type\n") return 1, 'Unable to recognize package type: ' + package.name sys.stderr.write("install_package: running " + " ".join(commands) + "\n") logfilecontents += " ".join(commands) + "\n" #returnval = pip.main(commands) try: subprocess.call(commands) returnval = 0 except subprocess.CalledProcessError as err: returnval = err.returncode with open(pip_log.name, 'rU') as x: logfilecontents += x.read().decode('utf8') sys.stderr.write(logfilecontents + "\n") sys.stderr.write('install_package: done' + "\n") shutil.rmtree(temp_dir) return returnval, logfilecontents
def reset_user_dict(user_code, filename): UserDict.query.filter_by(key=user_code, filename=filename).delete() db.session.commit() UserDictKeys.query.filter_by(key=user_code, filename=filename).delete() db.session.commit() for upload in Uploads.query.filter_by(key=user_code, yamlfile=filename).all(): old_file = SavedFile(upload.indexno) old_file.delete() Uploads.query.filter_by(key=user_code, yamlfile=filename).delete() db.session.commit() Attachments.query.filter_by(key=user_code, filename=filename).delete() db.session.commit() SpeakList.query.filter_by(key=user_code, filename=filename).delete() db.session.commit() ChatLog.query.filter_by(key=user_code, filename=filename).delete() db.session.commit() Shortener.query.filter_by(uid=user_code, filename=filename).delete() db.session.commit() return
def variables_from(self, content): the_directory = directory_for(self.get_area(), self.project) interview_source = docassemble.base.parse.InterviewSourceString(content=content, directory=the_directory, path="docassemble.playground" + str(self.user_id) + project_name(self.project) + ":_temp.yml", package='docassemble.playground' + str(self.user_id) + project_name(self.project), testing=True) interview = interview_source.get_interview() temp_current_info = copy.deepcopy(self.current_info) temp_current_info['yaml_filename'] = "docassemble.playground" + str(self.user_id) + project_name(self.project) + ":_temp.yml" interview_status = docassemble.base.parse.InterviewStatus(current_info=temp_current_info) user_dict = docassemble.base.parse.get_initial_dict() user_dict['_internal']['starttime'] = datetime.datetime.utcnow() user_dict['_internal']['modtime'] = datetime.datetime.utcnow() try: interview.assemble(user_dict, interview_status) has_error = False except Exception as errmess: has_error = True error_message = str(errmess) error_type = type(errmess) #logmessage("Failed assembly with error type " + str(error_type) + " and message: " + error_message) functions = set() modules = set() classes = set() fields_used = set() names_used = set() names_used.update(interview.names_used) area = SavedFile(self.user_id, fix=True, section='playgroundmodules') the_directory = directory_for(area, self.project) avail_modules = set([re.sub(r'.py$', '', f) for f in os.listdir(the_directory) if os.path.isfile(os.path.join(the_directory, f))]) for question in interview.questions_list: names_used.update(question.mako_names) names_used.update(question.names_used) names_used.update(question.fields_used) fields_used.update(question.fields_used) for val in interview.questions: names_used.add(val) fields_used.add(val) for val in user_dict: if type(user_dict[val]) is types.FunctionType: functions.add(val) elif type(user_dict[val]) is TypeType or type(user_dict[val]) is types.ClassType: classes.add(val) elif type(user_dict[val]) is types.ModuleType: modules.add(val) for val in docassemble.base.functions.pickleable_objects(user_dict): names_used.add(val) for var in ['_internal']: names_used.discard(var) names_used = names_used.difference( functions | classes | modules | avail_modules ) undefined_names = names_used.difference(fields_used | always_defined ) for var in ['_internal']: undefined_names.discard(var) names_used = names_used.difference( undefined_names ) all_names = names_used | undefined_names | fields_used all_names_reduced = all_names.difference( set(['url_args']) ) return dict(names_used=names_used, undefined_names=undefined_names, fields_used=fields_used, all_names=all_names, all_names_reduced=all_names_reduced)
def reset_user_dict(user_code, filename): #logmessage("reset_user_dict called with " + str(user_code) + " and " + str(filename)) UserDict.query.filter_by(key=user_code, filename=filename).delete() db.session.commit() UserDictKeys.query.filter_by(key=user_code, filename=filename).delete() db.session.commit() for upload in Uploads.query.filter_by(key=user_code, yamlfile=filename, persistent=False).all(): old_file = SavedFile(upload.indexno) old_file.delete() Uploads.query.filter_by(key=user_code, yamlfile=filename, persistent=False).delete() db.session.commit() # Attachments.query.filter_by(key=user_code, filename=filename).delete() # db.session.commit() SpeakList.query.filter_by(key=user_code, filename=filename).delete() db.session.commit() ChatLog.query.filter_by(key=user_code, filename=filename).delete() db.session.commit() Shortener.query.filter_by(uid=user_code, filename=filename).delete() db.session.commit() #logmessage("reset_user_dict: done") return
def fix_ml_files(playground_number): playground = SavedFile(playground_number, section='playgroundsources', fix=False) changed = False for filename in playground.list_of_files(): if re.match(r'^ml-.*\.json', filename): playground.fix() try: if write_ml_source(playground, playground_number, filename, finalize=False): changed = True except: logmessage("Error writing machine learning source file " + str(filename)) if changed: playground.finalize()
def get_info_from_file_number(file_number, privileged=False, filename=None, uids=None): if current_user and current_user.is_authenticated and current_user.has_role('admin', 'developer', 'advocate', 'trainer'): privileged = True elif uids is None or len(uids) == 0: new_uid = docassemble.base.functions.get_uid() if new_uid is not None: uids = [new_uid] else: uids = [] result = dict() upload = db.session.execute(select(Uploads).filter_by(indexno=file_number)).scalar() if not privileged and upload is not None and upload.private and upload.key not in uids: has_access = False if current_user and current_user.is_authenticated: if db.session.execute(select(UserDictKeys).filter_by(key=upload.key, user_id=current_user.id)).first() or db.session.execute(select(UploadsUserAuth).filter_by(uploads_indexno=file_number, user_id=current_user.id)).first() or db.session.execute(select(UploadsRoleAuth.id).join(UserRoles, and_(UserRoles.user_id == current_user.id, UploadsRoleAuth.role_id == UserRoles.role_id)).where(UploadsRoleAuth.uploads_indexno == file_number)).first(): has_access = True elif session and 'tempuser' in session: temp_user_id = int(session['tempuser']) if db.session.execute(select(UserDictKeys).filter_by(key=upload.key, temp_user_id=temp_user_id)).first() or db.session.execute(select(UploadsUserAuth).filter_by(uploads_indexno=file_number, temp_user_id=temp_user_id)).first(): has_access = True if not has_access: upload = None if upload: if filename is None: result['filename'] = upload.filename else: result['filename'] = filename result['extension'], result['mimetype'] = get_ext_and_mimetype(result['filename']) sf = SavedFile(file_number, extension=result['extension'], fix=True) result['path'] = sf.path result['fullpath'] = result['path'] + '.' + result['extension'] result['private'] = upload.private result['persistent'] = upload.persistent #logmessage("fullpath is " + str(result['fullpath'])) if 'path' not in result: logmessage("get_info_from_file_number: path is not in result for " + str(file_number)) return result final_filename = result['path'] + '.' + result['extension'] if os.path.isfile(final_filename): add_info_about_file(final_filename, result['path'], result) # else: # logmessage("Filename " + final_filename + "did not exist.") return(result)
def save_numbered_file(filename, orig_path, yaml_file_name=None, uid=None): if uid is None: if has_request_context(): uid = session.get('uid', None) else: uid = docassemble.base.functions.get_uid() if uid is None: raise Exception("save_numbered_file: uid not defined") file_number = get_new_file_number(uid, filename, yaml_file_name=yaml_file_name) extension, mimetype = get_ext_and_mimetype(filename) new_file = SavedFile(file_number, extension=extension, fix=True) new_file.copy_from(orig_path) new_file.save(finalize=True) return(file_number, extension, mimetype)
def save_numbered_file(filename, orig_path, yaml_file_name=None, uid=None): if uid is None: try: uid = docassemble.base.functions.get_uid() assert uid is not None except: uid = unattached_uid() if uid is None: raise Exception("save_numbered_file: uid not defined") file_number = get_new_file_number(uid, filename, yaml_file_name=yaml_file_name) extension, mimetype = get_ext_and_mimetype(filename) new_file = SavedFile(file_number, extension=extension, fix=True) new_file.copy_from(orig_path) new_file.save(finalize=True) return(file_number, extension, mimetype)
def get_info_from_file_number(file_number, privileged=False, filename=None, uids=None): if current_user and current_user.is_authenticated and current_user.has_role( 'admin', 'developer', 'advocate', 'trainer'): privileged = True elif uids is None: try: uids = [ docassemble.base.functions.this_thread.current_info['session'] ] except: uids = [] result = dict() upload = Uploads.query.filter_by(indexno=file_number).first() if not privileged and upload is not None and upload.private and upload.key not in uids: upload = None if upload: if filename is None: result['filename'] = upload.filename else: result['filename'] = filename result['extension'], result['mimetype'] = get_ext_and_mimetype( result['filename']) sf = SavedFile(file_number, extension=result['extension'], fix=True) result['path'] = sf.path result['fullpath'] = result['path'] + '.' + result['extension'] result['private'] = upload.private result['persistent'] = upload.persistent #logmessage("fullpath is " + str(result['fullpath'])) if 'path' not in result: logmessage("get_info_from_file_number: path is not in result for " + str(file_number)) return result final_filename = result['path'] + '.' + result['extension'] if os.path.isfile(final_filename): add_info_about_file(final_filename, result['path'], result) # else: # logmessage("Filename " + final_filename + "did not exist.") return (result)
def save_numbered_file(filename, orig_path, yaml_file_name=None, uid=None): if uid is None: try: uid = docassemble.base.functions.this_thread.current_info[ 'session'] except: pass if uid is None: raise Exception("save_numbered_file: uid not defined") file_number = get_new_file_number(uid, filename, yaml_file_name=yaml_file_name) extension, mimetype = get_ext_and_mimetype(filename) new_file = SavedFile(file_number, extension=extension, fix=True) new_file.copy_from(orig_path) new_file.save(finalize=True) return (file_number, extension, mimetype)
class PlaygroundSection(object): def __init__(self, section=''): if docassemble.base.functions.this_thread.current_info['user'][ 'is_anonymous']: raise DAError( "Users must be logged in to create Playground objects") self.user_id = docassemble.base.functions.this_thread.current_info[ 'user']['theid'] self.current_info = docassemble.base.functions.this_thread.current_info self.section = section self.area = SavedFile(self.user_id, fix=True, section='playground' + self.section) self._update_file_list() def _update_file_list(self): self.file_list = sorted([ f for f in os.listdir(self.area.directory) if os.path.isfile(os.path.join(self.area.directory, f)) ]) def image_file_list(self): out_list = list() for the_file in self.file_list: extension, mimetype = get_ext_and_mimetype(the_file) if re.search(r'^image', mimetype): out_list.append(the_file) return out_list def reduced_file_list(self): lower_list = [f.lower() for f in self.file_list] out_list = [ f for f in self.file_list if os.path.splitext(f)[1].lower() == '.md' or os.path.splitext(f)[0].lower() + '.md' not in lower_list ] return out_list def get_file(self, filename): return os.path.join(self.area.directory, filename) def file_exists(self, filename): path = self.get_file(filename) if os.path.isfile(path): return True return False def read_file(self, filename): path = self.get_file(filename) if path is None: return None with open(path, 'rU') as fp: content = fp.read().decode('utf8') return content return None def write_file(self, filename, content): path = os.path.join(self.area.directory, filename) with open(path, 'w') as fp: fp.write(content.encode('utf8')) self.area.finalize() def commit(self): self.area.finalize() def copy_from(self, from_file, filename=None): if filename is None: filename = os.path.basename(from_file) to_path = self.get_file(filename) shutil.copyfile(from_file, to_path) self.area.finalize() return filename def is_markdown(self, filename): extension, mimetype = get_ext_and_mimetype(filename) if extension == "md": return True return False def is_pdf(self, filename): extension, mimetype = get_ext_and_mimetype(filename) if extension == "pdf": return True return False def get_fields(self, filename): return docassemble.base.pdftk.read_fields(self.get_file(filename)) def convert_file_to_md(self, filename, convert_variables=True): extension, mimetype = get_ext_and_mimetype(filename) if (mimetype and mimetype in convertible_mimetypes): the_format = convertible_mimetypes[mimetype] elif extension and extension in convertible_extensions: the_format = convertible_extensions[extension] else: return None if not self.file_exists(filename): return None path = self.get_file(filename) temp_file = word_to_markdown(path, the_format) if temp_file is None: return None out_filename = os.path.splitext(filename)[0] + '.md' if convert_variables: with open(temp_file.name, 'rU') as fp: self.write_file( out_filename, replace_square_brackets.sub(fix_variable_name, fp.read().decode('utf8'))) else: shutil.copyfile(temp_file.name, self.get_file(out_filename)) return out_filename def variables_from_file(self, filename): content = self.read_file(filename) if content is None: return None return Playground().variables_from(content)
def sync_with_google_drive(user_id): sys.stderr.write("sync_with_google_drive: starting\n") if worker_controller is None: initialize_db() sys.stderr.write("sync_with_google_drive: continuing\n") storage = RedisCredStorage(worker_controller.r, user_id, app='googledrive') credentials = storage.get() if not credentials or credentials.invalid: sys.stderr.write("sync_with_google_drive: credentials failed\n") return worker_controller.functions.ReturnValue( ok=False, error="credentials expired", restart=False) try: with worker_controller.flaskapp.app_context(): http = credentials.authorize(httplib2.Http()) service = worker_controller.apiclient.discovery.build('drive', 'v3', http=http) key = 'da:googledrive:mapping:userid:' + str(user_id) the_folder = worker_controller.r.get(key) response = service.files().get( fileId=the_folder, fields="mimeType, id, name, trashed").execute() the_mime_type = response.get('mimeType', None) trashed = response.get('trashed', False) if trashed is True or the_mime_type != "application/vnd.google-apps.folder": return worker_controller.functions.ReturnValue( ok=False, error="error accessing Google Drive", restart=False) local_files = dict() local_modtimes = dict() gd_files = dict() gd_ids = dict() gd_modtimes = dict() gd_deleted = dict() sections_modified = set() commentary = '' for section in [ 'static', 'templates', 'questions', 'modules', 'sources' ]: local_files[section] = set() local_modtimes[section] = dict() if section == 'questions': the_section = 'playground' elif section == 'templates': the_section = 'playgroundtemplate' else: the_section = 'playground' + section area = SavedFile(user_id, fix=True, section=the_section) for f in os.listdir(area.directory): local_files[section].add(f) local_modtimes[section][f] = os.path.getmtime( os.path.join(area.directory, f)) subdirs = list() page_token = None while True: response = service.files().list( spaces="drive", fields="nextPageToken, files(id, name)", q="mimeType='application/vnd.google-apps.folder' and trashed=false and name='" + section + "' and '" + str(the_folder) + "' in parents").execute() for the_file in response.get('files', []): if 'id' in the_file: subdirs.append(the_file['id']) page_token = response.get('nextPageToken', None) if page_token is None: break if len(subdirs) == 0: return worker_controller.functions.ReturnValue( ok=False, error="error accessing " + section + " in Google Drive", restart=False) subdir = subdirs[0] gd_files[section] = set() gd_ids[section] = dict() gd_modtimes[section] = dict() gd_deleted[section] = set() page_token = None while True: response = service.files().list( spaces="drive", fields= "nextPageToken, files(id, name, modifiedTime, trashed)", q="mimeType!='application/vnd.google-apps.folder' and '" + str(subdir) + "' in parents").execute() for the_file in response.get('files', []): if re.search(r'(\.tmp|\.gdoc)$', the_file['name']): continue if re.search(r'^\~', the_file['name']): continue gd_ids[section][the_file['name']] = the_file['id'] gd_modtimes[section][the_file[ 'name']] = strict_rfc3339.rfc3339_to_timestamp( the_file['modifiedTime']) sys.stderr.write("Google says modtime on " + unicode(the_file) + " is " + the_file['modifiedTime'] + "\n") if the_file['trashed']: gd_deleted[section].add(the_file['name']) continue gd_files[section].add(the_file['name']) page_token = response.get('nextPageToken', None) if page_token is None: break gd_deleted[section] = gd_deleted[section] - gd_files[section] for f in gd_files[section]: sys.stderr.write("Considering " + f + " on GD\n") if f not in local_files[section] or gd_modtimes[section][ f] - local_modtimes[section][f] > 3: sys.stderr.write("Considering " + f + " to copy to local\n") sections_modified.add(section) commentary += "Copied " + f + " from Google Drive.\n" the_path = os.path.join(area.directory, f) with open(the_path, 'wb') as fh: response = service.files().get_media( fileId=gd_ids[section][f]) downloader = worker_controller.apiclient.http.MediaIoBaseDownload( fh, response) done = False while done is False: status, done = downloader.next_chunk() #sys.stderr.write("Download %d%%." % int(status.progress() * 100) + "\n") os.utime( the_path, (gd_modtimes[section][f], gd_modtimes[section][f])) for f in local_files[section]: sys.stderr.write("Considering " + f + ", which is a local file\n") if f not in gd_deleted[section]: sys.stderr.write("Considering " + f + " is not in Google Drive deleted\n") if f not in gd_files[section]: sys.stderr.write("Considering " + f + " is not in Google Drive\n") the_path = os.path.join(area.directory, f) if os.path.getsize(the_path) == 0: sys.stderr.write("Found zero byte file: " + the_path + "\n") continue sys.stderr.write("Copying " + f + " to Google Drive.\n") commentary += "Copied " + f + " to Google Drive.\n" extension, mimetype = worker_controller.get_ext_and_mimetype( the_path) the_modtime = strict_rfc3339.timestamp_to_rfc3339_utcoffset( local_modtimes[section][f]) sys.stderr.write( "Setting GD modtime on new file " + unicode(f) + " to " + unicode(the_modtime) + "\n") file_metadata = { 'name': f, 'parents': [subdir], 'modifiedTime': the_modtime, 'createdTime': the_modtime } media = worker_controller.apiclient.http.MediaFileUpload( the_path, mimetype=mimetype) the_new_file = service.files().create( body=file_metadata, media_body=media, fields='id').execute() new_id = the_new_file.get('id') elif local_modtimes[section][f] - gd_modtimes[section][ f] > 3: sys.stderr.write( "Considering " + f + " is in Google Drive but local is more recent\n" ) the_path = os.path.join(area.directory, f) if os.path.getsize(the_path) == 0: sys.stderr.write( "Found zero byte file during update: " + the_path + "\n") continue commentary += "Updated " + f + " on Google Drive.\n" extension, mimetype = worker_controller.get_ext_and_mimetype( the_path) the_modtime = strict_rfc3339.timestamp_to_rfc3339_utcoffset( local_modtimes[section][f]) sys.stderr.write( "Setting GD modtime on modified " + unicode(f) + " to " + unicode(the_modtime) + "\n") file_metadata = {'modifiedTime': the_modtime} media = worker_controller.apiclient.http.MediaFileUpload( the_path, mimetype=mimetype) service.files().update(fileId=gd_ids[section][f], body=file_metadata, media_body=media).execute() for f in gd_deleted[section]: sys.stderr.write("Considering " + f + " is deleted on Google Drive\n") if f in local_files[section]: sys.stderr.write( "Considering " + f + " is deleted on Google Drive but exists locally\n") if local_modtimes[section][f] - gd_modtimes[section][ f] > 3: sys.stderr.write( "Considering " + f + " is deleted on Google Drive but exists locally and needs to be undeleted on GD\n" ) commentary += "Undeleted and updated " + f + " on Google Drive.\n" the_path = os.path.join(area.directory, f) extension, mimetype = worker_controller.get_ext_and_mimetype( the_path) the_modtime = strict_rfc3339.timestamp_to_rfc3339_utcoffset( local_modtimes[section][f]) sys.stderr.write( "Setting GD modtime on undeleted file " + unicode(f) + " to " + unicode(the_modtime) + "\n") file_metadata = { 'modifiedTime': the_modtime, 'trashed': False } media = worker_controller.apiclient.http.MediaFileUpload( the_path, mimetype=mimetype) service.files().update(fileId=gd_ids[section][f], body=file_metadata, media_body=media).execute() else: sys.stderr.write( "Considering " + f + " is deleted on Google Drive but exists locally and needs to deleted locally\n" ) sections_modified.add(section) commentary += "Deleted " + f + " from Playground.\n" the_path = os.path.join(area.directory, f) if os.path.isfile(the_path): area.delete_file(f) area.finalize() for key in worker_controller.r.keys( 'da:interviewsource:docassemble.playground' + str(user_id) + ':*'): worker_controller.r.incr(key) if commentary != '': sys.stderr.write(commentary + "\n") if 'modules' in sections_modified: do_restart = True else: do_restart = False return worker_controller.functions.ReturnValue(ok=True, summary=commentary, restart=do_restart) except Exception as e: return worker_controller.functions.ReturnValue( ok=False, error="Error syncing with Google Drive: " + str(e), restart=False)
def reset_user_dict(user_code, filename, user_id=None, temp_user_id=None, force=False): #logmessage("reset_user_dict called with " + str(user_code) + " and " + str(filename)) if force: the_user_id = None else: if user_id is None and temp_user_id is None: if current_user.is_authenticated and not current_user.is_anonymous: user_type = 'user' the_user_id = current_user.id else: user_type = 'tempuser' the_user_id = session.get('tempuser', None) elif user_id is not None: user_type = 'user' the_user_id = user_id else: user_type = 'tempuser' the_user_id = temp_user_id if the_user_id is None: UserDictKeys.query.filter_by(key=user_code, filename=filename).delete() db.session.commit() do_delete = True else: if user_type == 'user': UserDictKeys.query.filter_by(key=user_code, filename=filename, user_id=the_user_id).delete() else: UserDictKeys.query.filter_by(key=user_code, filename=filename, temp_user_id=the_user_id).delete() db.session.commit() existing_user_dict_key = UserDictKeys.query.filter_by( key=user_code, filename=filename).first() if not existing_user_dict_key: do_delete = True else: do_delete = False if do_delete: UserDict.query.filter_by(key=user_code, filename=filename).delete() db.session.commit() for upload in Uploads.query.filter_by(key=user_code, yamlfile=filename, persistent=False).all(): old_file = SavedFile(upload.indexno) old_file.delete() Uploads.query.filter_by(key=user_code, yamlfile=filename, persistent=False).delete() db.session.commit() # Attachments.query.filter_by(key=user_code, filename=filename).delete() # db.session.commit() SpeakList.query.filter_by(key=user_code, filename=filename).delete() db.session.commit() ChatLog.query.filter_by(key=user_code, filename=filename).delete() db.session.commit() Shortener.query.filter_by(uid=user_code, filename=filename).delete() db.session.commit() #logmessage("reset_user_dict: done") return
def install_package(package): sys.stderr.write("install_package: " + package.name + "\n") if package.type == 'zip' and package.upload is None: return 0, '' sys.stderr.write('install_package: ' + package.name + "\n") from docassemble.base.config import daconfig from docassemble.webapp.daredis import r from docassemble.webapp.files import SavedFile if PY2: PACKAGE_DIRECTORY = daconfig.get('packages', '/usr/share/docassemble/local') else: PACKAGE_DIRECTORY = daconfig.get('packages', '/usr/share/docassemble/local3.5') logfilecontents = '' pip_log = tempfile.NamedTemporaryFile() temp_dir = tempfile.mkdtemp() #use_pip_cache = r.get('da:updatepackage:use_pip_cache') #if use_pip_cache is None: # disable_pip_cache = False #elif int(use_pip_cache): # disable_pip_cache = False #else: # disable_pip_cache = True disable_pip_cache = True if package.type == 'zip' and package.upload is not None: saved_file = SavedFile(package.upload, extension='zip', fix=True) commands = ['pip', 'install'] if disable_pip_cache: commands.append('--no-cache-dir') commands.extend([ '--quiet', '--prefix=' + PACKAGE_DIRECTORY, '--src=' + temp_dir, '--log-file=' + pip_log.name, '--upgrade', saved_file.path + '.zip' ]) elif package.type == 'git' and package.giturl is not None: if package.gitbranch is not None: branchpart = '@' + str(package.gitbranch) else: branchpart = '' if package.gitsubdir is not None: commands = ['pip', 'install'] if disable_pip_cache: commands.append('--no-cache-dir') commands.extend([ '--quiet', '--prefix=' + PACKAGE_DIRECTORY, '--src=' + temp_dir, '--upgrade', '--log-file=' + pip_log.name, 'git+' + str(package.giturl) + '.git' + branchpart + '#egg=' + package.name + '&subdirectory=' + str(package.gitsubdir) ]) else: commands = ['pip', 'install'] if disable_pip_cache: commands.append('--no-cache-dir') commands.extend([ '--quiet', '--prefix=' + PACKAGE_DIRECTORY, '--src=' + temp_dir, '--upgrade', '--log-file=' + pip_log.name, 'git+' + str(package.giturl) + '.git' + branchpart + '#egg=' + package.name ]) elif package.type == 'pip': if package.limitation is None: limit = "" else: limit = str(package.limitation) commands = ['pip', 'install'] if disable_pip_cache: commands.append('--no-cache-dir') commands.extend([ '--quiet', '--prefix=' + PACKAGE_DIRECTORY, '--src=' + temp_dir, '--upgrade', '--log-file=' + pip_log.name, package.name + limit ]) else: sys.stderr.write("Wrong package type\n") return 1, 'Unable to recognize package type: ' + package.name sys.stderr.write("install_package: running " + " ".join(commands) + "\n") logfilecontents += " ".join(commands) + "\n" returnval = 1 try: subprocess.call(commands) returnval = 0 except subprocess.CalledProcessError as err: returnval = err.returncode fix_fnctl() sys.stderr.flush() sys.stdout.flush() time.sleep(4) with open(pip_log.name, 'rU', encoding='utf-8') as x: logfilecontents += x.read() pip_log.close() try: sys.stderr.write(logfilecontents + "\n") except: pass sys.stderr.flush() sys.stdout.flush() time.sleep(4) sys.stderr.write('returnval is: ' + str(returnval) + "\n") sys.stderr.write('install_package: done' + "\n") shutil.rmtree(temp_dir) return returnval, logfilecontents
def delete_user_data(user_id, r, r_user): UserDict.query.filter_by(user_id=user_id).delete() db.session.commit() UserDictKeys.query.filter_by(user_id=user_id).delete() db.session.commit() ChatLog.query.filter_by(owner_id=user_id).delete() db.session.commit() ChatLog.query.filter_by(user_id=user_id).delete() db.session.commit() GlobalObjectStorage.query.filter_by(user_id=user_id).delete() db.session.commit() for package_auth in PackageAuth.query.filter_by(user_id=user_id).all(): package_auth.user_id = 1 db.session.commit() files_to_delete = list() for short_code_item in Shortener.query.filter_by(user_id=user_id).all(): for email in Email.query.filter_by(short=short_code_item.short).all(): for attachment in EmailAttachment.query.filter_by( email_id=email.id).all(): files_to_delete.append(attachment.upload) for file_number in files_to_delete: the_file = SavedFile(file_number) the_file.delete() Shortener.query.filter_by(user_id=user_id).delete() db.session.commit() UserRoles.query.filter_by(user_id=user_id).delete() db.session.commit() for user_auth in UserAuthModel.query.filter_by(user_id=user_id): user_auth.password = '' user_auth.reset_password_token = '' db.session.commit() for section in ('playground', 'playgroundmodules', 'playgroundpackages', 'playgroundsources', 'playgroundstatic', 'playgroundtemplate'): the_section = SavedFile(user_id, section=section) the_section.delete() old_email = None for user_object in UserModel.query.filter_by(id=user_id): old_email = user_object.email user_object.active = False user_object.first_name = '' user_object.last_name = '' user_object.nickname = '' user_object.email = None user_object.country = '' user_object.subdivisionfirst = '' user_object.subdivisionsecond = '' user_object.subdivisionthird = '' user_object.organization = '' user_object.timezone = None user_object.language = None user_object.pypi_username = None user_object.pypi_password = None user_object.otp_secret = None user_object.social_id = 'disabled$' + text_type(user_id) db.session.commit() keys_to_delete = set() for key in r.keys('*userid:' + text_type(user_id)): keys_to_delete.add(key) for key in r.keys('*userid:' + text_type(user_id) + ':*'): keys_to_delete.add(key) for key in keys_to_delete: r.delete(key) keys_to_delete = set() for key in r_user.keys('*:user:' + text_type(old_email)): keys_to_delete.add(key) for key in keys_to_delete: r_user.delete(key)
def install_package(package, start_time=None): if start_time is None: start_time = time.time() invalidate_installed_distributions_cache() sys.stderr.write("install_package: " + package.name + " after " + str(time.time() - start_time) + " seconds\n") if package.type == 'zip' and package.upload is None: return 0, '' from docassemble.base.config import daconfig from docassemble.webapp.daredis import r from docassemble.webapp.files import SavedFile PACKAGE_DIRECTORY = daconfig.get( 'packages', '/usr/share/docassemble/local' + str(sys.version_info.major) + '.' + str(sys.version_info.minor)) logfilecontents = '' pip_log = tempfile.NamedTemporaryFile() temp_dir = tempfile.mkdtemp() #use_pip_cache = r.get('da:updatepackage:use_pip_cache') #if use_pip_cache is None: # disable_pip_cache = False #elif int(use_pip_cache): # disable_pip_cache = False #else: # disable_pip_cache = True disable_pip_cache = True if package.type in ('zip', 'git'): sys.stderr.write("install_package: calling uninstall_package on " + package.name + " after " + str(time.time() - start_time) + " seconds\n") returnval, newlog = uninstall_package(package, sleep=False, start_time=start_time) logfilecontents += newlog if package.type == 'zip' and package.upload is not None: saved_file = SavedFile(package.upload, extension='zip', fix=True) commands = ['pip', 'install'] if disable_pip_cache: commands.append('--no-cache-dir') commands.extend([ '--quiet', '--prefix=' + PACKAGE_DIRECTORY, '--src=' + temp_dir, '--log-file=' + pip_log.name, '--upgrade', saved_file.path + '.zip' ]) elif package.type == 'git' and package.giturl: if package.gitbranch is not None: branchpart = '@' + str(package.gitbranch) else: branchpart = '' if str(package.giturl).endswith('.git'): gitsuffix = '' else: gitsuffix = '.git' if str(package.giturl).startswith('git+'): gitprefix = '' else: gitprefix = 'git+' if package.gitsubdir is not None: commands = ['pip', 'install'] if disable_pip_cache: commands.append('--no-cache-dir') commands.extend([ '--quiet', '--prefix=' + PACKAGE_DIRECTORY, '--src=' + temp_dir, '--upgrade', '--log-file=' + pip_log.name, gitprefix + str(package.giturl).rstrip('/') + gitsuffix + branchpart + '#egg=' + package.name + '&subdirectory=' + str(package.gitsubdir) ]) else: commands = ['pip', 'install'] if disable_pip_cache: commands.append('--no-cache-dir') commands.extend([ '--quiet', '--prefix=' + PACKAGE_DIRECTORY, '--src=' + temp_dir, '--upgrade', '--log-file=' + pip_log.name, gitprefix + str(package.giturl).rstrip('/') + gitsuffix + branchpart + '#egg=' + package.name ]) elif package.type == 'pip': if package.limitation is None: limit = "" else: limit = str(package.limitation) commands = ['pip', 'install'] if disable_pip_cache: commands.append('--no-cache-dir') commands.extend([ '--quiet', '--prefix=' + PACKAGE_DIRECTORY, '--src=' + temp_dir, '--upgrade', '--log-file=' + pip_log.name, package.name + limit ]) else: sys.stderr.write("Wrong package type after " + str(time.time() - start_time) + " seconds\n") return 1, 'Unable to recognize package type: ' + package.name sys.stderr.write("install_package: running " + " ".join(commands) + " after " + str(time.time() - start_time) + " seconds\n") logfilecontents += "install_package: running " + " ".join(commands) + "\n" returnval = 1 try: subprocess.run(commands) returnval = 0 except subprocess.CalledProcessError as err: returnval = err.returncode if False: fix_fnctl() sys.stderr.flush() sys.stdout.flush() #time.sleep(4) pip_log.seek(0) with open(pip_log.name, 'r', encoding='utf-8') as x: logfilecontents += x.read() pip_log.close() if False: try: sys.stderr.write(logfilecontents + "\n") except: pass sys.stderr.flush() sys.stdout.flush() #time.sleep(4) shutil.rmtree(temp_dir) sys.stderr.write('returnval is: ' + str(returnval) + "\n") sys.stderr.write('install_package: done' + " after " + str(time.time() - start_time) + " seconds\n") return returnval, logfilecontents
def get_area(self): return SavedFile(self.user_id, fix=True, section='playground' + self.section)