def upload(): if 'file' not in request.files: flash('No file part') return redirect(request.url) f = request.files['file'] if f.filename == '': flash('No selected file') return redirect(request.url) if f: filename = secure_filename(f.filename) f.save(os.path.join(app.config['UPLOAD_FOLDER'], filename)) name = request.form['name'] exited = File.query.filter_by(name=name).first() if exited: # 更新文件 exited.path = filename db.session.commit() else: # 创建一条记录 file = File() file.name = name file.path = filename db.session.add(file) db.session.commit() return redirect(url_for('manage'))
def share(path): is_public = False is_private = False try: f = File.get(File.public_share_url == path) is_public = True except peewee.DoesNotExist: try: f = File.get(File.private_share_url == path) is_private = True except peewee.DoesNotExist: return jsonify(message='error'), 404 actual_filename = generate_filename(f.folder.name, f.filename) target_file = os.path.join(os.path.expanduser(app.config['UPLOAD_FOLDER']), actual_filename) if not ((is_public and f.open_public_share) or (is_private and f.open_private_share)): return jsonify(message='error'), 404 s = URLSafeSerializer(app.config['SECRET_KEY'], expires_in=24 * 3600) args = request.args if args.get('download') == 'true': token = None cookies = request.cookies if 'token' in cookies: token = cookies['token'] try: data = s.loads(token) if data['path'] == path: if os.path.exists(target_file): return send_file(target_file) else: return jsonify(message='error'), 404 else: return jsonify(message='unauthorized'), 401 except: return jsonify(message='unauthorized'), 401 token = s.dumps({'path': path}).decode('utf-8') payload = { 'filename': f.filename, 'folder': f.folder.name, 'open_public_share': f.open_public_share, 'open_private_share': f.open_private_share, 'token': token, } if is_private: if 'password' not in args or args['password'] != f.private_share_password: payload['token'] = '' return jsonify(message='OK', data=payload)
def file_put(self, _uname, _fname, _obj): _query = self.file_query("first", _uname, _fname) if _query is None: add_row = File() add_row.name = u"" + _fname add_row.pull_id = self.pull_add(_obj) add_row.user_id = self.user_query("one", _uname).id self.__session.add(add_row) self.__session.commit() return True else: return False
def main(): ATM( menu, Accounts( Table( File('accounts.txt') ) ), History( Table( File('transactions.txt') ) ) ).start()
def post(self): from google.appengine.api import users from model import File code = self.request.get('code') name = self.request.get('name') current_user = users.get_current_user() query = File.gql("where name=:1 and author=:2", name, current_user) if query.count() > 0: file_obj = query.fetch(1)[0] else: from model import File file_obj = File(name=name, author = current_user) file_obj.content = code file_obj.put() self.response.out.write('保存成功')
def folder(folder_name): try: folder = Folder.get(Folder.name == folder_name) except peewee.DoesNotExist: return jsonify(message='error'), 404 if request.method == 'POST': f = request.files['file'] if f: actual_filename = generate_filename(folder_name, f.filename) target_file = os.path.join( os.path.expanduser(app.config['UPLOAD_FOLDER']), actual_filename) if os.path.exists(target_file): return jsonify(message='error'), 409 try: f.save(target_file) f2 = File.create(folder=folder, filename=f.filename) f2.save() except Exception as e: app.logger.exception(e) return jsonify(message='error'), 500 return jsonify(message='OK'), 201 if request.method == 'GET': return jsonify(message='OK', data=model_to_dict(folder, backrefs=True)) if request.method == 'DELETE': try: folder.delete_instance() except peewee.IntegrityError: return jsonify(message='error'), 409 return jsonify(message='OK')
def main(): print("use git lfs to calculate the sha and save it to index.db") cnt = 0 result = git_obj.execute('git lfs ls-files -l') for line in result.split("\n"): sha, path_str = re.split(r" [*-] ", line) path = pathlib.Path(path_str).absolute() if path.stat().st_size <= 1000: continue file_obj, created = File.get_or_create( path=str(path), defaults={ "st_size": path.stat().st_size, "st_ctime": datetime.datetime.fromtimestamp(path.stat().st_ctime), }) if not file_obj.sha256sum: file_obj.sha256sum = sha file_obj.save() if file_obj.is_del: file_obj.is_del = False file_obj.save() assert file_obj.sha256sum == sha assert file_obj.st_size == path.stat().st_size cnt += 1 if cnt % 100 == 0: print(f" {cnt} handled") print( f"finished {len(File.filter(sha256sum=None))} file still have no sha256sum" )
def get(self, id, name='some_file.bin'): #try: id = int(id) #int(self.request.get('id')) file = File.get(db.Key.from_path('File', id)) #if file.content: file.incrementDownloadCount() import datetime lastmod = datetime.datetime.now() self.response.headers['Content-Type'] = "application/octet-stream" #self.response.headers['Cache-Control']= 'public, max-age=172800' #self.response.headers['Last-Modified'] = lastmod.strftime("%a, %d %b %Y %H:%M:%S GMT") #expires = lastmod + datetime.timedelta(days=365) #self.response.headers['Expires'] = expires.strftime("%a, %d %b %Y %H:%M:%S GMT") self.response.headers['Content-disposition'] = 'attachment; filename="%s"' % str(file.name) self.response.out.write(file.content) #self.response.headers['Cache-Control'] = "public, max-age=31536000" #self.response.headers['Content-Type'] = str(media_object.guessed_type) #except: # self.response.out.write('Sorry, There is no such file') #last_modified_string = media_object.creation.strftime("%a, %d %b %Y %H:%M:%S GMT") #self.response.headers['Cache-Control'] = "public, max-age=31536000" #self.response.headers['Content-Type'] = str(media_object.guessed_type) #self.response.headers['Last-Modified'] = last_modified_string #expires = media_object.creation + datetime.timedelta(days=30) #self.response.headers['Expires'] = expires.strftime("%a, %d %b %Y %H:%M:%S GMT") pass
def _save_image(db_session, scrape_result): """ Save the image returned by Splash to a local file. """ if scrape_result['error'] is None: image_name = '{}.jpg'.format(scrape_result['site']['name']) content = base64.decodestring(scrape_result['image'].encode('utf8')) image_file = File(name=image_name, mime='image/jpeg', content=content) db_session.add(image_file) try: db_session.commit() except: db_session.rollback() raise ScrapeException('Could not save image') else: # Get the generic error image. image_file = ( db_session .query(File) .filter(File.name == 'hgprofiler_error.png') .one() ) return image_file
def _save_image(db_session, scrape_result, user_id, censor=False): """ Save the image returned by Splash to a local file. """ if scrape_result['error'] is None and censor is True: # Get the generic censored image. image_file = (db_session.query(File).filter( File.name == _censored_image_name).one()) elif scrape_result['error'] is None: image_name = '{}.jpg'.format(scrape_result['site']['name'].replace( ' ', '')) content = base64.decodestring(scrape_result['image'].encode('utf8')) image_file = File(name=image_name, mime='image/jpeg', content=content, user_id=user_id) db_session.add(image_file) try: db_session.commit() except: db_session.rollback() raise ScrapeException('Could not save image') else: # Get the generic error image. image_file = (db_session.query(File).filter( File.name == _error_image_name).one()) return image_file
def files(folder_name, filename): # Get the name path actual_filename = generate_filename(folder_name, filename) target_file = os.path.join(os.path.expanduser(app.config['UPLOAD_FOLDER']), actual_filename) try: f = File.get(filename=filename) except peewee.DoesNotExist: return jsonify(message='error'), 404 if request.method == 'GET': args = request.args if 'query' in args and args['query'] == 'info': return jsonify(message='OK', data=model_to_dict(f)) if os.path.exists(target_file): return send_file(target_file) else: return jsonify(message='error'), 404 if request.method == 'DELETE': if os.path.exists(target_file): try: f.delete_instance() os.remove(target_file) return jsonify(message='OK') except Exception as e: app.logger.exception(e) return jsonify(message='error'), 500 else: return jsonify(message='error'), 404
def upload_file(): user = g.user file = request.files['file'] resp = {} resp['filename'] = '' msg = 'Upload successful' if file and allowed_file(file.filename): filename = secure_filename(file.filename) new_file = str(user.id) + '_' + user.username + '_' + str( int(time.time())) + '.' + filename.split('.')[1] basepath = os.path.dirname(__file__) if (user.role == 1): upload_path = os.path.join(basepath, app.config['UPLOAD_FOLDER_STU'], new_file) else: upload_path = os.path.join(basepath, app.config['UPLOAD_FOLDER_TEC'], new_file) file_url = upload_path file.save(upload_path) resp['filename'] = file_url.split('/')[-1] f = File(user_id=user.id, url=file_url, name=resp['filename']) db.session.add(f) db.session.commit() else: msg = 'Filename limitation' resp['msg'] = msg return get_response(resp)
def dispatch(self, file: File) -> None: FileDispatcher.__LOG.info(f"[DISPATCHING] '{file.filename}'") copied = False # indicates that input file has been copied at least in one destination for rule in self.__rules: if file.match_rule(rule): FileDispatcher.__LOG.debug( f"[COPYING] '{file.filename}' to '{rule.destinations}") file.copy_to(rule.destinations) copied |= len(rule.destinations) > 0 if copied: # if file matches a rule, remove it FileDispatcher.__LOG.debug(f"[REMOVING] '{file.filename}'") file.delete() else: FileDispatcher.__LOG.debug( f"[SKIPPING] '{file.filename}': not match any rule")
def show(self): while True: window = self.window event, values = window.read() if event == "submit": file = File(None, values['name'], values['path'], self.doc.id) fn = self.make_file_on_disk(self.person, self.doc, file) if fn: file.path = fn self.service.add_file(file) break elif event == "reset": window.Element("name").Update(value= "") window.Element("doc").Update(value= "") else: break window.Close()
def deletebill(billid): print(billid) username = request.authorization.username passwordinfo = request.authorization.password # bill_sc = Billschema(many=False) # data = request.get_json() flag = checkauthentication(username, passwordinfo) print(billid) if flag == True: print(billid) result = Credential.select_user_by_email(username) user_sc = Credentialschema() data = user_sc.dump(result) owner_id = data.get('id') print(owner_id) result2 = Bills.select_user_by_billid(billid) bill_sc = Billschema(many=False) data2 = bill_sc.dump((result2)) owner_id_test = data2.get('owner_id') print(owner_id_test) #return "before delete" if owner_id == owner_id_test: Bills.delete_bills(billid) File.delete_file_by_bill(billid) result2 = File.select_file_by_billid(billid) file_sc = Fileschema(many=False) data2 = file_sc.dump(result2) file_id = data2.get('id') basedir = app.config['UPLOAD_FOLDER'] filedir = basedir + file_id + "/" shutil.rmtree(filedir) return custom_http_code("deleted", 204) else: return custom_http_code("bill id invalid or not found", 404) else: return custom_http_code("unauthorized", 401)
def get_by_doc_id(cls, did): sql = """select * from file where doc_id=?""" data = (did, ) rows = cls.db.query(sql, data) records = [] for row in rows: print(row) records.append(File(row[0], row[1], row[2], row[3])) return records
def btopen(s, mode='r'): """ Open the file (eg. return a BtIO object) """ f = getfile(s) if f is None: f = File.new(path=s) io = BtIO(f, mode) return io
def main(): for item in File.raw( "select sha256sum, count(id) total from file where sha256sum is not null and is_del = False group by sha256sum having total >= 2" ): sha256sum = item.sha256sum files = [ file_obj for file_obj in File.filter(sha256sum=sha256sum, is_del=False) ] while len(files) >= 2: print(f"{files[0]} VS {files[1]}") keep_file, delete_file = keep_and_delete(files[0], files[1], rules) print(f" delete: {delete_file.path}") print(f" keep: {keep_file.path}") delete_file.is_del = True delete_file.save() os.unlink(delete_file.path) files.remove(delete_file)
def deletefile(billid, fileid): bill_id = billid username = request.authorization.username passwordinfo = request.authorization.password bill_sc = Billschema(many=False) data1 = request.get_json() flag = checkauthentication(username, passwordinfo) if flag == True: #check if user exits result = Credential.select_user_by_email(username) user_sc = Credentialschema() data = user_sc.dump(result) owner_id = data.get('id') result2 = Bills.select_user_by_billid(bill_id) bill_sc = Billschema(many=False) data2 = bill_sc.dump((result2)) owner_id2 = data2.get('owner_id') if owner_id == owner_id2: #authorized against bill and user file_sc = File_schema_output(many=False) result = File.select_file_by_file_id(fileid) print(result) if not result: return custom_http_code("file does not exist", 404) basedir = app.config['UPLOAD_FOLDER'] filedir = root_dir + "/" + "attachments" + "/" + fileid + "/" shutil.rmtree(filedir) File.delete_file(fileid) return custom_http_code(data, 204) else: return custom_http_code('Unauthorised', 401) else: return custom_http_code('invalid login', 401)
def getfile(billid, fileid): start = time.time() bill_id = billid username = request.authorization.username passwordinfo = request.authorization.password bill_sc = Billschema(many=False) data1 = request.get_json() dbtime = time.time() flag = checkauthentication(username, passwordinfo) dur = (time.time() - dbtime) * 1000 c.timing("dbconnect", dur) if flag == True: #check if user exits result = Credential.select_user_by_email(username) user_sc = Credentialschema() data = user_sc.dump(result) owner_id = data.get('id') result2 = Bills.select_user_by_billid(bill_id) bill_sc = Billschema(many=False) data2 = bill_sc.dump((result2)) owner_id2 = data2.get('owner_id') if owner_id == owner_id2: #authorized against bill and user file_sc = File_schema_output(many=False) dbtime = time.time() result = File.select_file_by_file_id(fileid) dur = (time.time() - dbtime) * 1000 c.timing("dbconnect", dur) print(result) data = file_sc.dump(result) print(data) if not result: c.incr("getfilecount") dur = (time.time() - start) * 1000 c.timing("getfilecount", dur) return custom_http_code("file does not exist bad request", 404) c.incr("getfilecount") dur = (time.time() - start) * 1000 c.timing("getfilecount", dur) return custom_http_code(data, 200) else: return custom_http_code('Unauthorised', 401) else: return custom_http_code('invalid login', 401)
def post(self): parser = reqparse.RequestParser() args = parser.parse_args() uploaded_file = request.files['file'] filename = str( uuid.uuid4()) + '.' + uploaded_file.filename.split('.')[-1] print(uploaded_file.filename) uploaded_file.save(os.path.join('files', filename)) file = File(filename) session.add(file) session.commit() return file.id, 201
def get(self): user = self.getAuthentificatedUser() forum = self.getForumInstance() files = File.all() template_values = { 'url' : users.CreateLogoutURL(self.request.uri), 'user' : user.nickname(), 'forum' : forum, 'files' : files, } path = os.path.join(os.path.dirname(__file__), os.path.join('templates', 'filesList.htm')) self.response.out.write(template.render(path, template_values))
def handleDelete(self): from google.appengine.api import users from model import File name = self.request.get('name') current_user = users.get_current_user() query = File.gql("where name=:1 and author=:2", name, current_user) if query.count() > 0: file_obj = query.fetch(1)[0] file_obj.delete() return True else: return False
def insertFile(fname, filePerm, size, uId, pFolderId): try: newFile = File(fileName=fname, filePerm=filePerm, size=size, uId=uId, pFolderId=pFolderId) db.session.add(newFile) db.session.commit() return newFile except SQLAlchemyError as e: print(e) return False
def _create_fixture_images(self, config): ''' Create the generic error image. Since this script will often run as root, it modifies the owner of the new file to match the owner of the data directory. ''' session = app.database.get_session(self._db) image_name = 'hgprofiler_error.png' data_stat = os.stat(get_path('data')) img_path = os.path.join(get_path('static'), 'img', image_name) with open(img_path, 'rb') as img: img_data = img.read() image_file = File(name=image_name, mime='image/png', content=img_data) image_file.chown(data_stat.st_uid, data_stat.st_gid) session.add(image_file) session.commit()
def handleRunFile(self): from google.appengine.api import users current_user = users.get_current_user() name = self.request.get('name') from model import File query = File.gql("where name=:1 and author=:2", name, current_user) if query.count() <= 0: self.response.out.write(u'无法找到此文件或没有权限: %s' %name) else: file_obj = query.fetch(1)[0] code = file_obj.content results = self.runCode(code) self.response.out.write(results)
def __submit(self, filename) -> None: try: Validation.is_file(filename) Validation.has_extension(filename) except ValidationException.MissingExtensionError: FileEventHandler.__LOG.debug( f"[SKIPPING] file '{filename}': no extension") return except FileNotFoundError: FileEventHandler.__LOG.debug( f"[SKIPPING] file '{filename}': no regular file") return self.__executor.submit(self.__dispatcher.execute, File(filename))
def get(self): url = self.request.get("url") if not url: self.response.out.write("download?url=file_url") return mixed_result = self.check_exist_and_sustainable(url) if mixed_result == None: self.response.out.write('error: url resource not found, or network error.') return if mixed_result == False: self.response.out.write('error: url resource cannot support continually download.') # todo small file download return # else result is a size try: filename, size, mimetype = mixed_result except: self.response.out.write('internal error: function "check_exist_and_sustainable" returned %s.' %str(mixed_result)) return # got file size if size > 500*1024*1024: self.response.out.write('要下载的文件太大了(超过500MB),单个app承受不起啊(每天就1G流量..),正在计划搭建下载均衡器,请关注.<br><a href="list">返回</a>') return # calculate block_number block_number = calc_block_number(size) # create file record from model import EStatus, File from google.appengine.api import users downloader = users.get_current_user() new_file = File.get_or_insert('%s-%s'%(downloader, url), name = filename, url = url, downloader = downloader, size = size, mimetype = mimetype, status = EStatus.DOWNLOADING) new_file.put() key = new_file.key() from google.appengine.api import taskqueue def add_task(): # add task for n in range(block_number): if n == block_number - 1: last = 'true' else: last = '' try: taskqueue.add(url='/downloadfileblock', queue_name=conf.DOWNLOAD_FILEBLOCK_QUEUE_NAME, method='GET', params={'url': url, 'key': key, 'n': n, 'last': last}, transactional=False) except taskqueue.TombstonedTaskError: logging.warn('added a task which is already run, url: %s, block_num: %d' %(url, n)) # added a task with that exact name before which is already run # executed task names are kept around for some time to prevent accidental duplicates pass add_task() self.response.out.write('添加成功!<br><a href="list">返回</a>') echo = 'Added tasks into queue: "%s" and started downloading, url: %s' %(conf.DOWNLOAD_FILEBLOCK_QUEUE_NAME, url) logging.info(echo)
def get(self): from google.appengine.api import users from model import File import urllib current_user = users.get_current_user() name = self.request.get('name') query = File.gql("where name=:1 and author=:2", name, current_user) if query.count() > 0: file_obj = query.fetch(1)[0] self.response.headers['Content-Type'] = 'application/octet-stream' self.response.headers['Content-Disposition'] = 'attachment; filename=%s' %urllib.quote(file_obj.name.encode('utf-8')) self.response.out.write(file_obj.content) return else: self.response.out.write("文件不存在或没有权限.")
def get(self): from google.appengine.api import users from google.appengine.ext.webapp import template from model import File current_user = users.get_current_user() template_values = {} query = File.gql('where author=:1 order by name', current_user) filelist = query.fetch(1000) for file in filelist: file.size = len(file.content) template_values['filelist'] = filelist template_values['logout_url'] = users.create_logout_url("") template_values['change_user_url'] = users.create_logout_url("list") template_values['nickname'] = users.get_current_user().nickname() self.response.out.write(template.render('templates/list.html', template_values))
def files(folder_name, filename): actrual_filename = generate_filename(folder_name, filename) target_file = os.path.join(os.path.expanduser(app.config['UPLOAD_FOLDER']), actrual_filename) foreign_id = Folder.get(name=folder_name).id try: f = File.get(filename=filename).get(folder_id=foreign_id) except peewee.DoesNotExist: return jsonify(message='error'), 404 if request.method == 'GET': args = request.args if 'query' in args and args['query'] == 'info': return jsonify(message='OK', data=model_to_dict(f)), 201 if os.path.exists(target_file): return send_file(target_file) else: return jsonify(message='error'), 404 if request.method == 'DELETE': if os.path.exists(target_file): try: f.delete_instance() os.remove(target_file) return jsonify(message="OK"), 201 except Exception as e: app.logger.exception(e) return jsonify(message='error'), 500 else: return jsonify(message='error'), 404 if request.method == 'PATCH': share_type = request.args.get('shareType') if share_type == 'public': f.open_public_share = True f.open_private_share = False elif share_type == 'none': f.open_public_share = False f.open_private_share = False elif share_type == 'private': f.open_public_share = False f.open_private_share = True f.save() return jsonify(message='OK')
def files(folder_name, filename): actual_filename = generate_filename(folder_name, filename) target_file = os.path.join(os.path.expanduser(app.config['UPLOAD_FOLDER']), actual_filename) try: f = File.get(filename=filename) except peewee.DoesNotExist: return jsonify(message='error'), 404 if request.method == 'GET': args = request.args if 'query' in args and args['query'] == 'info': return jsonify(message='OK', data=model_to_dict(f)) if os.path.exists(target_file): return send_file(target_file) else: return jsonify(message='error'), 404 if request.method == 'DELETE': if os.path.exists(target_file): try: f.delete_instance() os.remove(target_file) return jsonify(message='OK') except Exception as e: app.logger.exception(e) return jsonify(message='error'), 500 else: return jsonify(message='error'), 404 if request.method == 'PATCH': share_type = request.args.get('shareType') if share_type == 'private': f.open_private_share = True f.open_public_share = False elif share_type == 'public': f.open_private_share = False f.open_public_share = True elif share_type == 'none': f.open_public_share = False f.open_private_share = False f.save() return jsonify(message='OK')
def post(self): try: file = self.request.get('file') #images.resize(self.request.get("img"), 64, 64) fileName = self.request.body fileObj = File() fileObj.name = fileName[fileName.rfind('+%27')+4:fileName.rfind('%27')] fileObj.content = db.Blob(file) fileObj.owner = users.get_current_user() fileObj.put() self.redirect('/filesList') except RequestTooLargeError: self.response.out.write('The file:%s is too big! File size should me < 1MB' % fileObj.name) except: self.response.out.write('Sorry. There was an error(?)')
def adding(upload, filename, datetime): user = user_operations.get_user_for_model() current_dir = obj_of_current_dir() id = user.key.id() + getting_path(filename, current_dir) key = ndb.Key("File", id) if nocontain(key, current_dir.files): object = File(id=id) object.name = filename object.date = datetime object.blob = upload.key() object.put() current_dir.files.append(key) current_dir.put() return "file added!" else: blobstore.delete(upload.key()) return "A file with this name already exists in this directory!"
def create_zip(filename, results, user_id): ''' Generate zip archive of results and return the file id. Adds screenshots and HTML for found results. Adds csv result summary. ''' db_session = worker.get_session() files = [] str_files = [] # Get images and HTML for result in results: if result.status == 'f': # Add the image file files.append((result.image_file.name, result.image_file.relpath())) # Add the HTML as a string file html_filename = '{}.html'.format(result.site_name.replace(' ', '')) html_file = (html_filename, result.html) str_files.append(html_file) # Generate in-memory results csv csv_string = results_csv_string(results) str_file = ('{}.csv'.format(filename), csv_string) str_files.append(str_file) zip_file = File(name='{}.zip'.format(filename), mime='application/zip', zip_archive=True, zip_files=files, zip_str_files=str_files, user_id=user_id) db_session.add(zip_file) try: db_session.commit() except Exception as e: raise ArchiveException(e) return zip_file.id
def __do_fetch(self, directory: str): for file in sorted(os.listdir(directory)): if file in self.__excluded: continue file_path = os.path.join(directory, file) if os.path.isdir(file_path): results = [ child_file for child_file in self.__do_fetch(file_path) ] else: results = [file_path] # Return the files for fp in results: if isinstance(fp, File): yield fp continue yield File(fp)
def list_or_retr(path): conn = get_conn() path = FTPClientConnection.to_full_path(path) # Do list if the arg indicates directory: if path[-1] == '/': conn.cwd(path) data = conn.list().split('\r\n') file_list = [] for line in data: if not line: continue print(line) f = File(line) file_list.append(f.dict) return return_json(file_list) else: directory, file = FTPClientConnection.split_dir(path) print(directory) conn.cwd(directory) return Response(conn.direct_retr(file), mimetype='application/octet-stream')
def get(self): from google.appengine.ext.webapp import template from model import File template_values = {} name = self.request.get('name') if not name: new_file = True import time template_values['defaultname'] = time.strftime("%Y%m%d%H%M%S.py") else: template_values['name'] = name query = File.gql('where name=:1', name) if query.count() > 0: new_file = False file_obj = query.fetch(1)[0] template_values['code'] = file_obj.content else: new_file = True if new_file: template_values['new_file'] = True with open('extra/script.tpl.py') as fp: template_values['code'] = fp.read() self.response.out.write(template.render('templates/edit.html', template_values))
def file_add(self, _name, _prog, _ver, _arch, _job=constant.init["job"]): # test version for file try: _vquery = self.ver_query("one", _prog, _ver) except: return False else: # find fle _query = self.file_query("all", _name, _prog, _ver, False, _arch) if _query == []: ex = _name.split(".")[-1] _type = constant.ftype[ex] add_row = File() add_row.name = u"" + _name add_row.ver_id = _vquery.id add_row.const_type = u"" + _type add_row.const_arch = u"" + _arch add_row.job_id = self.job_query("one", _job).id self.__session.add(add_row) self.__session.commit() return True
def folder(folder_name): try: folder = Folder.get(Folder.name==folder_name) except peewee.DoesNotExist: return jsonify(message='error'), 404 if request.method == 'POST': f = request.files['file'] if f: actual_filename = generate_filename(folder_name, f.filename) target_file = os.path.join(os.path.expanduser(app.config['UPLOAD_FOLDER']), actual_filename) if os.path.exists(target_file): return jsonify(message='error'), 409 try: f.save(target_file) f2 = File.create(folder=folder, filename=f.filename, public_share_url=generate_url(), private_share_url=generate_url(), private_share_password=generate_password(), open_public_share=False, open_private_share=False) f2.save() except Exception as e: app.logger.exception(e) return jsonify(message='error'), 500 return jsonify(message='OK'), 201 if request.method == 'GET': return jsonify(message='OK', data=model_to_dict(folder, backrefs=True)) if request.method == 'DELETE': try: folder.delete_instance() except peewee.IntegrityError: return jsonify(message='error'), 409 return jsonify(message='OK')
def create_zip(filename, results): ''' Generate zip archive of results and return the file id. Adds all images for results that have screenshots. Adds csv result summary created on the fly (as IOString). ''' db_session = worker.get_session() files = [] str_files = [] # Create list of images for result in results: # Add the name to results for the csv output files.append((result.image_file.name, result.image_file.relpath())) # Generate in-memory results csv csv_string = results_csv_string(results) str_file = ('{}.csv'.format(filename), csv_string) str_files.append(str_file) zip_file = File(name='{}.zip'.format(filename), mime='application/zip', zip_archive=True, zip_files=files, zip_str_files=str_files) db_session.add(zip_file) try: db_session.commit() except Exception as e: raise ArchiveException(e) return zip_file.id
def calculate_sums(session, message, tmpdir): """ Extract the content of the file extracted from the fedmsg message and browse the sources of the specified package and for each of the files in the sources get their sha256sum, sha1sum, and md5sum. """ local_filename = os.path.join(tmpdir, message['filename']) if not os.path.exists(local_filename): raise IOError('File %s not found' % local_filename) if local_filename.endswith('.gem'): cmd = ['rpmdev-extract', '-C', tmpdir, local_filename] proc = Popen(cmd, stdout=PIPE, stderr=PIPE) proc.communicate() # Remove not-used files os.unlink(os.path.join(tmpdir, 'metadata.gz')) os.unlink(os.path.join(tmpdir, 'checksums.yaml.gz')) # Remove original sources - we only keep the data archive os.unlink(local_filename) local_filename = os.path.join(tmpdir, 'data.tar.gz') if zipfile.is_zipfile(local_filename): if local_filename.endswith('.jar') or local_filename.endswith('.war'): log.warning('Invalid sources uploaded: %r - package: %r' % ( local_filename, message.get('name'))) return {'invalid': local_filename} cmd = ['rpmdev-extract', '-C', tmpdir, local_filename] proc = Popen(cmd, stdout=PIPE, stderr=PIPE) if proc.returncode: raise IOError( 'Something went wrong when extracting %s' % local_filename) filename = proc.communicate()[0].split('\n') # output from zip archives if 'Archive:' in filename[0] and 'creating:' in filename[1]: filename = filename[1].split('creating:')[1].strip() else: filename = filename[0] if filename and '/' in filename: filename = filename.split('/')[0] filename = os.path.join(tmpdir, filename) else: log.warning("No folder extracted from %r" % local_filename) filename = tmpdir if local_filename and os.path.exists(local_filename): os.unlink(local_filename) count, stored = 0, 0 for fname, sha256sum, sha1sum, md5sum in walk_directory(filename): count = count + 1 pkgobj = File.exists(session, message['md5sum'], fname) fname = fname.replace(tmpdir, '') if not pkgobj: pkgobj = File( pkg_name=message['name'], filename=fname, sha256sum=sha256sum, sha1sum=sha1sum, md5sum=md5sum, tar_file=message['filename'], tar_sum=message['md5sum'] ) session.add(pkgobj) stored = stored + 1 else: pass session.commit() log.info("Stored %i of %i files" % (stored, count))
def deletefile(billid, fileid): start = time.time() bill_id = billid username = request.authorization.username passwordinfo = request.authorization.password bill_sc = Billschema(many=False) data1 = request.get_json() dbtime = time.time() flag = checkauthentication(username, passwordinfo) dur = (time.time() - dbtime) * 1000 c.timing("dbconnect", dur) if flag == True: #check if user exits result = Credential.select_user_by_email(username) user_sc = Credentialschema() data = user_sc.dump(result) owner_id = data.get('id') result2 = Bills.select_user_by_billid(bill_id) bill_sc = Billschema(many=False) data2 = bill_sc.dump((result2)) owner_id2 = data2.get('owner_id') if owner_id == owner_id2: #authorized against bill and user file_sc = File_schema_output(many=False) dbtime = time.time() result = File.select_file_by_file_id(fileid) dur = (time.time() - dbtime) * 1000 c.timing("dbconnect", dur) print(result) if not result: return custom_http_code("file does not exist", 404) filedir = root_dir + "/" + "attachments" + "/" + fileid + "/" bucketkey = 'fileid' + '/' s3 = boto3.resource("s3") bucketobj = s3.Bucket(bucket) file_key = fileid + '/' bucketobj.objects.filter(Prefix=file_key).delete() if os.path.exists(filedir): shutil.rmtree(filedir) else: print("file id folder noyt found") File.delete_file(fileid) c.incr("deletefilecount") dur = (time.time() - start) * 1000 c.timing("deletefilecount", dur) return custom_http_code(data, 204) else: c.incr("deletefilecount") dur = (time.time() - start) * 1000 c.timing("deletefilecount", dur) return custom_http_code('Unauthorised', 401) else: return custom_http_code('invalid login', 401)
def addFiles(files): print "have to check %d files" % len(files) errors = [] base = re.compile("^(.*)\.([\w]+)$") for file in files: if isinstance(file, File): continue f,revision = getFile(file) if not f: print "%s not found!" % file continue path = f['filepath'] f['id'] = f['fid'] f['filepath'] = f['filename'] f['filename'] = revision['description'] f['node'] = f['nid'] del(f['fid']) del(f['nid']) try: File.get(f['id']) print "file %s already there" % f['id'] continue except SQLObjectNotFound: file = File(**f) match = base.match(file.filepath) if not match: print "%s odd filename" % file.filename continue basename,ext = match.groups() if file.filemime[:5] == "video": download_from = "http://master/files/orbit/%s.flv" % urllib.quote(basename) download_to = "%s/movies/%s.flv" % (DOWNLOAD,basename) filename = basename + ".flv" alt = "%s/files/orbit/%s" % (DOWNLOAD,filename) elif file.filemime[:5] == "image": download_from = "http://master/%s" % urllib.quote(path) filename = basename + "." + ext download_to = "%s/images/%s" % (DOWNLOAD,filename) alt = "%s/%s" % (DOWNLOAD,path) elif file.filemime[:5] == "audio": download_from = "http://master/%s" % urllib.quote(path) filename = basename + "." + ext download_to = "%s/audio/%s" % (DOWNLOAD,filename) alt = "%s/%s" % (DOWNLOAD,path) if not os.path.isfile(download_to) and os.path.isfile(alt): print "copy %s" % basename shutil.copy(alt,download_to) elif not os.path.isfile(download_to): print "download %s" % basename urllib.urlretrieve(download_from, download_to) if not os.path.isfile(download_to): print "could not fetch %s!" % download_to errors.append(file) continue if file.filemime[:5] == "image": if file.filemime[6:] == "gif": print "convert gif" movie = '%s/movies/%s.flv' % (DOWNLOAD,basename) cmd1 = "ffmpeg -f gif -i '%s' -s 320x240 -y %s" % (download_to,movie) os.popen(cmd1) if not os.path.isfile(movie): print "%s cont execute!" % cmd1 errors.append(file) continue else: os.unlink(download_to) file.filemime = "video/flv" filename = basename + ".flv" else: download_to_old = "" if file.filemime[6:] != "jpeg": download_to_old = download_to download_to = download_to + ".jpg" filename = filename + ".jpg" shutil.move(download_to_old,download_to) cmd1 = "sips --getProperty pixelWidth '%s'" % download_to cmd2 = "sips --setProperty format jpeg --resampleWidth 400 '%s'" % download_to try: if int(os.popen(cmd1).readlines()[1][14:-1]) <> 400: os.popen(cmd2) file.filemime = "image/jpeg" except IndexError: print "%s cont execute!" % cmd2 errors.append(file) continue file.filepath = filename for e in errors: e.destroySelf()
def getfile(s): f = File.retrieve(s) return f