def delete(self, id): user = session.query(User).filter(User.id == id).first() if not user: abort(404, message="User {} does not exist".format(id)) session.delete(user) session.commit() return 204
def create_topic(cls, title, description): topic = cls() topic.title = title topic.description = description session.add(topic) session.commit() return topic
def delete(uid): status, folder = FolderManager.get(uid) if status != FolderManager.SUCCESS: return status if not folder.root: return FolderManager.NO_SUCH_FOLDER # delete associated tracks and prune empty albums/artists for artist in Artist.query.all(): for album in artist.albums[:]: for track in filter(lambda t: t.root_folder.id == folder.id, album.tracks): album.tracks.remove(track) session.delete(track) if len(album.tracks) == 0: artist.albums.remove(album) session.delete(album) if len(artist.albums) == 0: session.delete(artist) def cleanup_folder(folder): for f in folder.children: cleanup_folder(f) session.delete(folder) cleanup_folder(folder) session.commit() return FolderManager.SUCCESS
def append_comment(self, comment): new_comment = Comment() new_comment.topic_id = self.id new_comment.description = comment session.add(new_comment) session.commit() return new_comment
def on_done(self, event): """The done button was pressed.""" name = self.name.GetValue() description = self.description.GetValue() public = self.public.GetValue() logger.debug('Attempting to create playlist with name "%s", description "%s" and public %s.', name, repr(description), public) if not name: do_error('Playlist names cannot be blank.') self.name.SetFocus() else: try: id = application.api.create_playlist(name, description, public) logger.debug('New playlist ID is %s.', id) p = Playlist(id = id, name = name, description = description) logger.debug('Created Playlist object %s.', p) session.add(p) application.frame.add_playlist(p) entry_ids = application.api.add_songs_to_playlist(p.id, [t.id for t in self.tracks]) logger.debug('Entry IDs are: %s.', entry_ids) if len(entry_ids) == len(self.tracks): for pos, track in enumerate(self.tracks): p.tracks.append(track) e = PlaylistEntry(playlist = p, track = track, id = entry_ids[pos]) logger.debug('Created playlist entry %s (%s).', e, e.track) session.add(e) else: do_error('Only %s %s out of %s were added to the playlist.' % (len(entry_ids), 'track' if len(entry_ids) == 1 else 'tracks', len(self.tracks))) session.commit() logger.debug('Done. Closing %s.', self) self.Close(True) except NotLoggedIn: do_login(callback = self.on_done, args = [event])
def on_put(self, req, resp, id): try: user = req.context['user'] if (not user.is_logged_in()) or (not user.is_org()): # Toto tady musi byt -- jinak nefunguje frontend. self.on_get(req, resp, id) return data = json.loads(req.stream.read())['post'] post = session.query(model.Post).get(id) if post is None: resp.status = falcon.HTTP_404 return post.author = data['author'] post.body = data['body'] session.commit() except SQLAlchemyError: session.rollback() raise finally: session.close() self.on_get(req, resp, id)
def upsert_contributors(o_data, r_data): organization = session.query(Organization).filter(Organization.id==o_data.id).first() contributors = set() for r_item in r_data: if not r_item.fork: c_data = r_item.get_contributors() repo_contributors = [] for c_item in c_data: check_rate_limit(c_item) contributor = p_formatter(c_item) contributor = upsert(model=Person, unique_key='id', item=contributor) contributors.add(contributor) repo_contributors.append(contributor) repository = session.query(Repository).filter(Repository.id==r_item.id).first() repository.contributors = repo_contributors session.add(repository) session.commit() contributors = list(contributors) organization.contributors = contributors try: session.add(organization) session.commit() except: session.rollback()
def post(self): sectionSchema = SectionSchema() studentsSchema = StudentSchema(many=True) subjectsSchema = SubjectSchema(many=True) subjectsFromDb = session.query(Subject).all() newStudentSubjects = [] json_data = request.get_json() if not json_data: return jsonify({'message' : 'No input data provided'}), 400 section = sectionSchema.load(json_data).data students = studentsSchema.load(json_data['students']).data # when section is serialized from json and then added to db, # it duplicates subjects (inserts them with new ids), which obviously # is undesired - this nested loop replaces serialized subjects with db subjects for student in students: studentsSubjects = student.subjects; for studentSubject in student.subjects: for dbSubject in subjectsFromDb: if dbSubject.name == studentSubject.name: newStudentSubjects.append(dbSubject) student.subjects = newStudentSubjects newStudentSubjects = [] section.students = students session.add(section) session.commit() return sectionSchema.dump(section), 201
def put(self, id): team = session.query(Team).filter(Team.id == id).first() if not team: abort(404, message="Team {} does not exist".format(id)) error_message = None try: # Processing input arguments parser.add_argument('name', type=str, required=True, trim=True) parsed_args = parser.parse_args() # Updating team instance team.name = parsed_args['name'] if parsed_args['name'] else team.name # Saving to DB session.merge(team) session.commit() except IntegrityError: error_message = 'Faulty or a duplicate record' except ValueError as e: error_message = str(e) except Exception as e: print(str(e)) error_message = 'Bad request' finally: if error_message: session.rollback() abort(400, message='Unable to create team record: ' + error_message) else: return team, 202
def __api_request(self, write, **kwargs): if not self.__enabled: return if write: if not self.__user.lastfm_session or not self.__user.lastfm_status: return kwargs['sk'] = self.__user.lastfm_session kwargs['api_key'] = self.__api_key sig_str = '' for k, v in sorted(kwargs.iteritems()): if type(v) is unicode: sig_str += k + v.encode('utf-8') else: sig_str += k + str(v) sig = hashlib.md5(sig_str + self.__api_secret).hexdigest() kwargs['api_sig'] = sig kwargs['format'] = 'json' if write: r = requests.post('http://ws.audioscrobbler.com/2.0/', data = kwargs) else: r = requests.get('http://ws.audioscrobbler.com/2.0/', params = kwargs) if 'error' in r.json: if r.json['error'] in (9, '9'): self.__user.lastfm_status = False session.commit() self.__logger.warn('LastFM error %i: %s' % (r.json['error'], r.json['message'])) return r.json
def add_pad(self, url, name=None): pad_name = name or url.split('/')[-1] rss_name = self.make_rss_name(pad_name) html_url = self.get_parse_url(url) pad = Pad( name=pad_name, start_url=url, html_url=html_url, rss_name=rss_name ) session.add(pad) try: pad_watcher = PadWatcher(pad_name, rss_name, html_url) except (urllib2.URLError, urllib2.HTTPError, socket.error) as exception: pad.status = Pad.ERROR pad.error_msg = unicode(repr(exception)) else: pad_watcher.db_pad = pad pad.status = Pad.OK self.pads.append(pad_watcher) self.write_rss(pad_watcher) #pad.delete_token = uuid4() session.commit()
def delete(self, id): team = session.query(Team).filter(Team.id == id).first() if not team: abort(404, message="Team {} does not exist".format(id)) session.delete(team) session.commit() return 204
def SetupDB(): # Delete the db file import os try: os.unlink('citybus.db') except OSError: pass # It's okay if the file doesn't exist. # Create our database, add the models, etc. Model.metadata.create_all(bind=engine) # Add a couple bus routes bus201 = BusRoute() bus201.id = '201' bus201.busses = 17 bus211 = BusRoute() bus211.id = '221' bus211.busses = 30 bus315 = BusRoute() bus315.id = '315' bus315.busses = 12 session.add(bus201) session.add(bus211) session.add(bus315) session.commit()
def editMenuItem(restaurant_id, menuitem_id): restaurant = session.query(Restaurant).filter_by(id=restaurant_id).one() creator = getUserInfo(restaurant.user_id) if creator.id != login_session['user_id']: return redirect(url_for('showLogin')) menuitem = session.query(MenuItem).filter_by(id=menuitem_id).one() if request.method == 'POST': if request.form['name']: menuitem.name = request.form['name'] if request.form['description']: menuitem.description = request.form['description'] if request.form['price']: menuitem.price = request.form['price'] if request.form['course']: menuitem.course = request.form['course'] session.add(menuitem) session.commit() flash('Menu Item {0} Updated'.format(menuitem.name)) return redirect(url_for('restaurantMenu', restaurant_id=restaurant_id)) else: return render_template( 'edit-menu-item.html', restaurant_id=restaurant_id, menuitem_id=menuitem_id, item=menuitem, courses=constants.COURSES)
def newMenuItem(restaurant_id): restaurant = session.query(Restaurant).filter_by(id=restaurant_id).one() creator = getUserInfo(restaurant.user_id) if ('username' not in login_session) or (creator.id != login_session['user_id']): return redirect(url_for('showLogin')) if request.method == 'POST': # File upload filename = "" file = request.files['image'] if file and allowed_file(file.filename): filename = secure_filename(file.filename) file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename)) newItem = MenuItem( course=request.form['course'], description=request.form['description'], name=request.form['name'], price=request.form['price'], restaurant_id=restaurant_id, user_id=login_session['user_id'], image=filename ) session.add(newItem) session.commit() flash('New Menu Item created!') return redirect(url_for('restaurantMenu', restaurant_id=restaurant_id)) else: restaurant = session.query(Restaurant).filter_by(id=restaurant_id).one() return render_template( 'new-menu-item.html', restaurant=restaurant, courses=constants.COURSES)
def on_delete(self, req, resp, id): user = req.context['user'] try: achievement = session.query(model.Achievement).get(id) except SQLAlchemyError: session.rollback() raise if (not user.is_logged_in()) or (not user.is_admin()): req.context['result'] = { 'errors': [ { 'status': '401', 'title': 'Unauthorized', 'detail': u'Smazání trofeje může provést pouze administrátor.' } ] } resp.status = falcon.HTTP_400 return if not achievement: req.context['result'] = { 'errors': [ { 'status': '404', 'title': 'Not Found', 'detail': u'Trofej s tímto ID neexsituje.' } ] } resp.status = falcon.HTTP_404 return try: session.delete(achievement) session.commit() except SQLAlchemyError: session.rollback() raise finally: session.close() req.context['result'] = {}
def post(self): error_message = None try: # Processing input arguments parser.add_argument('name', type=str, required=True, trim=True) parsed_args = parser.parse_args() # Creating team instance team = Team( created=datetime.datetime.utcnow(), name=parsed_args['name'], params=parsed_args['params'], meta=parsed_args['meta'] ) # Saving to DB session.add(team) session.commit() except IntegrityError: error_message = 'Faulty or a duplicate record' except ValueError as e: error_message = str(e) except Exception as e: print(str(e)) error_message = 'Bad request' finally: if error_message: session.rollback() abort(400, message='Unable to create team record: ' + error_message) else: return team, 201
def do_user_import(): if not request.files["file"]: return render_template("importusers.html") users = [] reader = csv.reader(request.files["file"]) for id, name, mail, password, salt, admin, lfmsess, lfmstatus in reader: mail = None if mail == "None" else mail admin = admin == "True" lfmsess = None if lfmsess == "None" else lfmsess lfmstatus = lfmstatus == "True" users.append( User( id=uuid.UUID(id), name=name, password=password, salt=salt, admin=admin, lastfm_session=lfmsess, lastfm_status=lfmstatus, ) ) User.query.delete() for u in users: db_sess.add(u) db_sess.commit() return redirect(url_for("user_index"))
def delete(self, year): temperature = session.query(Temperature).filter(Temperature.year == year).first() if not temperature: abort(404, message="Temperature {} doesn't exist".format(year)) session.delete(temperature) session.commit() return {}, 204
def upsert_repositories(o_data): organization = session.query(Organization).filter(Organization.id==o_data.id).first() r_data = o_data.get_repos() repositories = [] for r_item in r_data: check_rate_limit(r_item) repository = r_formatter(r_item) source_login = repository['source_owner_login'] if source_login: repository['source_civic'] = source_login.lower() in organizations_civic repository['source_government'] = source_login.lower() in organizations_government else: repository['source_government'] = True repository = upsert(model=Repository, unique_key='id', item=repository) repositories.append(repository) organization.repositories = repositories try: session.add(organization) session.commit() return r_data except: session.rollback()
def on_put(self, req, resp, id): user = req.context['user'] if (not user.is_logged_in()) or (not user.is_org()): req.context['result'] = { 'errors': [ { 'status': '401', 'title': 'Unauthorized', 'detail': u'Upravit článek může pouze organizátor.' } ] } resp.status = falcon.HTTP_400 return data = json.loads(req.stream.read())['article'] try: article = session.query(model.Article).get(id) if article is None: req.context['result'] = { 'errors': [ { 'status': '404', 'title': 'Not Found', 'detail': u'Článek s tímto ID neexistuje.' } ] } resp.status = falcon.HTTP_404 return article.title = data['title'] article.body = data['body'] article.published = data['published'] article.time_created = data['time_published'] article.picture = data['picture'] session.commit() except SQLAlchemyError: session.rollback() raise finally: session.close() self.on_get(req, resp, id)
def on_post(self, req, resp): user = req.context['user'] if (not user.is_logged_in()) or (not user.is_org()): req.context['result'] = { 'errors': [ { 'status': '401', 'title': 'Unauthorized', 'detail': u'Přidat článek může pouze organizátor.' } ] } resp.status = falcon.HTTP_400 return data = json.loads(req.stream.read())['article'] try: article = model.Article( author = user.id, title = data['title'], body = data['body'], published = data['published'], year = req.context['year'], time_created = dateutil.parser.parse(data['time_published']), picture = data['picture'] ) session.add(article) session.commit() article.resource = 'articles/' + str(article.id) session.commit() except SQLAlchemyError: session.rollback() raise req.context['result'] = { 'article': _artice_to_json(article) } session.close()
def post(self): parsed_args = parser.parse_args() gene = Gene(symbol=parsed_args["symbol"]) session.add(gene) session.commit() return gene, 201
def delete(self, id): section = session.query(Section).filter(Section.id == id).first() session.delete(section) session.commit() return {}, 200
def register(): if 'username' in session: flash("Cannot create new account while logged in.") return redirect(url_for('hello')) else: form = RegistrationForm() if form.validate_on_submit(): login = form.username.data user = dbsession.query(User).filter_by(name=login).first() if user is None: pw_hash = bcrypt.generate_password_hash(form.password.data) user = User(login, '', pw_hash) user.gender = form.gender.data user.species = form.species.data user.bio = form.bio.data user.email = form.email.data user.minorflag = not form.adult.data user.accepttos = True dbsession.add(user) dbsession.commit() flash("User Created") return redirect(url_for('login')) else: flash("User already exists.") return redirect(url_for('register')) return render_template('register.html', form=form)
def __api_request(self, write, **kwargs): if not self.__enabled: return if write: if not self.__user.lastfm_session or not self.__user.lastfm_status: return kwargs["sk"] = self.__user.lastfm_session kwargs["api_key"] = self.__api_key sig_str = "" for k, v in sorted(kwargs.iteritems()): if type(v) is unicode: sig_str += k + v.encode("utf-8") else: sig_str += k + str(v) sig = hashlib.md5(sig_str + self.__api_secret).hexdigest() kwargs["api_sig"] = sig kwargs["format"] = "json" if write: r = requests.post("http://ws.audioscrobbler.com/2.0/", data=kwargs) else: r = requests.get("http://ws.audioscrobbler.com/2.0/", params=kwargs) if "error" in r.json: if r.json["error"] in (9, "9"): self.__user.lastfm_status = False session.commit() self.__logger.warn("LastFM error %i: %s" % (r.json["error"], r.json["message"])) return r.json
def storynew(): if 'username' in session: user = session['username'] form = StoryForm() if form.validate_on_submit(): uid = dbsession.query(User.id).filter_by(name=user).first() newstory = Story(form.title.data) newstory.text = markdown.markdown(form.body.data) newstory.uid = uid[0] newstory.adult = form.adult.data tagslist = form.tags.data tagslist = tagslist.split(',') for tagitem in tagslist: tagitem = tagitem.strip() tagitem = tagitem.lower() tag = dbsession.query(Tag).filter_by(tagname=tagitem).first() if tag is None: tag = Tag(tagitem) newstory.tags.append(tag) dbsession.add(newstory) dbsession.commit() return redirect("~"+user) return render_template("storynew.html", form=form) else: return render_template("storynew.html")
def _finishAddQuestions(self, t): item = QListWidgetItem() item.setText(t.title) item.setStatusTip(str(t.id)) self.lstTopics.addItem(item) session.commit() print('finished')
def delete(self, id): todo = session.query(Todo).filter(Todo.id == id).first() if not todo: abort(404, message="Todo {} doesn't exist".format(id)) session.delete(todo) session.commit() return {}, 204
def on_post(self, req, resp, id): try: user = req.context['user'] # Kontrola existence ulohy task = session.query(model.Task).get(id) if task is None: req.context['result'] = 'Neexistujici uloha' resp.status = falcon.HTTP_404 return # Kontrola existence git_branch a git_path if (task.git_path is None) or (task.git_branch is None): req.context['result'] = 'Uloha nema zadanou gitovskou vetev nebo adresar' resp.status = falcon.HTTP_400 return if task.git_branch == "master": req.context['result'] = 'Uloha je j*z ve vetvi master' resp.status = falcon.HTTP_400 return wave = session.query(model.Wave).get(task.wave) # Merge mohou provadet pouze administratori a garant vlny if (not user.is_logged_in()) or ((not user.is_admin()) and (user.id != wave.garant)): req.context['result'] = 'Nedostatecna opravneni' resp.status = falcon.HTTP_400 return # Kontrola zamku lock = util.lock.git_locked() if lock: req.context['result'] = 'GIT uzamcen zámkem '+lock + "\nNekdo momentalne provadi akci s gitem, opakujte prosim akci za 20 sekund." resp.status = falcon.HTTP_409 return try: mergeLock = LockFile(util.admin.taskMerge.LOCKFILE) mergeLock.acquire(60) # Timeout zamku je 1 minuta # Fetch repozitare repo = git.Repo(util.git.GIT_SEMINAR_PATH) if task.git_branch in repo.heads: # Cannot delete branch we are on repo.git.checkout("master") repo.git.branch('-D', task.git_branch) task.git_branch = 'master' session.commit() resp.status = falcon.HTTP_200 finally: mergeLock.release() except SQLAlchemyError: session.rollback() raise finally: session.close()
def poll_and_save(): """ Check for new events each day and save them to the database TODO:First poll of the day adds new events TODO: Subsequent polls should update completion status id = db.Column(db.Integer, primary_key=True) eid = db.Column(db.String, index=True) start_time = db.Column(db.DateTime) # Date portion is ignored end_time = db.Column(db.DateTime) # Date portion is ignored completed = db.Column(db.Boolean) day_id = db.Column(db.Integer, db.ForeignKey("day.id")) activity_id = """ users = User.query.all() for user in users: logging.debug("polling for {}".format(user)) # API call to WEconnect activities-with-events activity_events = weconnect.get_todays_events(user) logging.debug(activity_events) for activity in activity_events: for ev in activity["events"]: event = session.query(Event).filter_by(eid == ev["eid"]).first() if event: #update the completion event.completed = (ev["didCheckin"] == True) else: #eid doesn't exist, add new event newEvent = weconnect.createNewEvent(ev) session.add(newEvent) try: session.commit() print("Received {} Activity events in last poll.").format( len(activity_events)) except: session.rollback() print("Session Commit failed")
def add_price(lastPrice): response = requests.get("https://api.binance.com/api/v3/klines?symbol=BTCUSDT&interval=1m") fetchTime = response.json()[0][0] openPrice = response.json()[0][1] highPrice = response.json()[0][2] lowPrice = response.json()[0][3] closePrice = response.json()[0][4] volume = response.json()[0][5] print(closePrice) newentry = BTCUSDT( datetime=fetchTime, open=openPrice, close=closePrice, low=lowPrice, high=highPrice, volume=volume ) session.add(newentry) session.commit() s.enter(60, 1, add_price,argument=(lastPrice,)) #run function again in 1 minute
def onRemoveL(self): ''' remove selected langauge and realted topics and questions. ''' answer = QMessageBox.question( self, 'Remove Language', "This operation will remove this language and it's topics.") if answer == QMessageBox.No: return row = self.lstLangs.currentRow() id = self.lstLangs.currentItem().statusTip() record = session.query(Language).filter(Language.id == id) if record: record.delete(synchronize_session=False) self.lstLangs.takeItem(row) self.lstTopics.clear() ts = session.query(Topic).filter(Topic.lang_id == id).all() for t in ts: qs = session.query(Question).filter( Question.topic_id == t.id).all() for q in qs: session.delete(q) session.delete(t) session.commit() self.btnRemoveL.setEnabled(False)
def process_command_output(queue): # TODO: run the command and put its data in the db while (not queue.empty()): command = queue.get() # create an object of the table c = Command(command, len(command), 0, 'fetching results...') # check whether the same command exists in the table and if it does do not the command again to the database command_db = session.query(Command).filter_by( command_string=command).first() if (command_db is None): session.add(c) session.commit() # create a temp file that does not exist already file = open('test_' + str(os.getpid()) + '.sh', 'w') file.write(command) file.close() start = time.time() p = subprocess.Popen(['sh', 'test_' + str(os.getpid()) + '.sh'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) out = '' # poll the subprocess created until the returncode is not None or until the time is over 60 seconds while (p.returncode == None and time.time() - start < 60): p.poll() #read from stdout and store it in out out = out + p.stdout.readline() t = time.time() - start # if return code of the process is still None kill the process if (p.returncode == None): p.kill() # if time taken to run the command is over 60 seconds then assign the time taken as 0 if (t > 60): t = 0.0 # create an object of the table c = Command(command, len(command), math.ceil(t), out) # check whether the same command exists in the table and if it does do not the command again to the database command_db = session.query(Command).filter_by( command_string=command).first() if (command_db is None): session.add(c) session.commit() else: if (command_db.output == 'fetching results...'): command_db.output = out command_db.duration = t session.commit() #remove the temporary file that was created os.remove('test_' + str(os.getpid()) + '.sh')
def upload_fb(): body = request.get_json() token = body.get('token') files = body.get('files') i = 0 try: idinfo = id_token.verify_oauth2_token(token, requests.Request(), environ.get("GOOGLE_CLIENT_ID")) if idinfo['iss'] not in [ 'accounts.google.com', 'https://accounts.google.com' ]: raise ValueError('Wrong issuer') userid = idinfo['sub'] try: user = session.query(User).filter(User.id == userid).one() except: user = User(id=userid) session.add(user) session.commit() for text in files: try: new_thread = MessageThread(id=i, user_id=user.id, person=str(i)) session.add(new_thread) i += 1 try: session.commit() except: session.rollback() p, msgs = scrapePage( str(i), text.replace(' EDT', '').replace(' EST', '')) for m in msgs: date = datetime.fromtimestamp(m['date']) msg = Message(thread_id=new_thread.id, type='FB', body=m['body'], date=date, user_speaking=m['user_speaking']) session.add(msg) except Exception as e: return str(e) try: session.commit() except: session.rollback() return 'Error', 401 return json.dumps({'msg': 'Success'}), 200 except Exception as e: session.rollback() return str(e)
def parse2(): data_dict = parseCsv( '../data/csv/DatafinitiElectronicsProductsPricingData.csv', [ 'name', 'brand', 'categories', 'dateAdded', 'manufacturer', 'primaryCategories', 'prices.amountMin' ], 'name') # Adding Products to database: products_count = 0 categories_count = 0 links_count = 0 for row in data_dict: new_product = Product(row['name'], row['brand'], row['manufacturer'], row['dateAdded'], int(float(row['prices.amountMin']))) session.add(new_product) session.commit() products_count += 1 session.refresh(new_product) # Adding categories to database: category_names = row['categories'].split(',') for category_name in category_names: checked_categories = session.query(Category).filter( Category.name == category_name).all() if len(checked_categories) == 0: new_category = Category(category_name, row['primaryCategories']) session.add(new_category) session.commit() categories_count += 1 session.refresh(new_category) # Adding link: ins = links_products_categories.insert().values( product_id=new_product.id, category_id=new_category.id) session.execute(ins) else: ins = links_products_categories.insert().values( product_id=new_product.id, category_id=checked_categories[0].id) session.execute(ins) links_count += 1 session.commit() print( f'Added {products_count} products, {categories_count} categories and {links_count} links.' ) input('\nPress any key to continue...')
def getProductsByCategory(): category = input('Enter category for scrapping: ') data = scrape(f'https://www.amazon.com/s?k={category}') if not data['products']: raise Exception('Null') products_count = 0 categories_count = 0 links_count = 0 for raw in data['products']: print(f"Get product from {raw['url']}") if raw['price'] != None: price = int(float(re.findall("\d+\.\d+", raw['price'])[0])) else: price = random.randint(200, 5000) new_product = Product(raw['title'], '', '', datetime.today().strftime('%Y-%m-%d'), price) session.add(new_product) session.commit() products_count += 1 session.refresh(new_product) # Adding categories to database: checked_categories = session.query(Category).filter( Category.name == category).all() if len(checked_categories) == 0: new_category = Category(category, category) session.add(new_category) session.commit() categories_count += 1 session.refresh(new_category) # Adding link: ins = links_products_categories.insert().values( product_id=new_product.id, category_id=new_category.id) session.execute(ins) else: ins = links_products_categories.insert().values( product_id=new_product.id, category_id=checked_categories[0].id) session.execute(ins) links_count += 1 session.commit() print( f'\nAdded {products_count} products, {categories_count} categories and {links_count} links.' ) input('\nPress any key to continue...')
def post(self): parsed_args = parser.parse_args() repo = Repo(creator=parsed_args['creator'], name=parsed_args['name']) session.add(repo) session.commit() return repo, 201
def increment_cnt(self, repo): repo.access_cnt = int(repo.access_cnt) + 1 session.add(repo) session.commit()
def hello_2(): x = SomeTable(val=14) session.add(x) session.commit() return jsonify({"id": x.id, "val": x.val}), 201
def grab_match(id): # check if the id already exists if session.query(Match).filter(Match.id == id).first() is not None: print " Match already parsed." if enableFullCrawl == False: return -1 # already parsed else: print " Dropping this match" # drop this match and associated players session.delete(session.query(Match).filter(Match.id == id).first()) players_in_this_match = session.query(Player).filter( Player.match_id == id).all() for p in players_in_this_match: session.delete(p) session.commit() print " Match dropped." print "Parsing new match " + str(id) abandons = 0 req = get_match_details(id) if req == None: print "Got None, skipping...", id return -1 json_req = req.json() try: details = req.json()["result"] except KeyError: print "Weird results, skipping..." pp.pprint(json_req) return -1 try: match = {} match["id"] = details["match_id"] match["barracks_status_dire"] = details["barracks_status_dire"] match["barracks_status_radiant"] = details["barracks_status_radiant"] match["cluster"] = details["cluster"] match["duration"] = details["duration"] match["first_blood_time"] = details["first_blood_time"] match["game_mode"] = details["game_mode"] match["human_players"] = details["human_players"] match["leagueid"] = details["leagueid"] match["negative_votes"] = details["negative_votes"] match["positive_votes"] = details["positive_votes"] match["radiant_win"] = details["radiant_win"] # apparently season got removed # match["season"] = details["season"] match["season"] = "" match["starttime"] = datetime.datetime.fromtimestamp( details["start_time"]) match["tower_status_dire"] = details["tower_status_dire"] match["tower_status_radiant"] = details["tower_status_radiant"] except KeyError, e: print datetime.datetime.now(), "Exception:", e, sys.exc_info()[0] print '-' * 60 traceback.print_exc(file=sys.stdout) print '-' * 60 pp.pprint(details)
def sure_out_api(): """ user,flag,card_id,money, :return: """ post_data = request.get_json() flag = post_data['flag'] user_phone = post_data["user"] money = int(post_data["money"]) print(money) user = session.query(User).filter_by(phone_num=user_phone).all()[0] # 获取用户 user_bank_cards = user.band_cards # 获取用户对应银行信息表 if flag == "转入": card_id = post_data["card_id"] if "余额" in card_id: card_id = post_data["card_id"] user_balance_data = user.user_balance_finances # 根据表关系查询该用户下的用户余额宝表 user_balance_money = user_balance_data[0].paid_money if user_balance_data else 0 # 获取用户余额宝总额 user.user_accounts[0].user_balance -= money user_balance_data[0].paid_money += money session.commit() insert_detail(user.id, "余额宝充值", money) return jsonify({ "status": 0, "mdg": "转入成功!" }) elif "银行" in card_id: user_balance_data = user.user_balance_finances # 根据表关系查询该用户下的用户余额宝表 user_balance_money = user_balance_data[0].paid_money if user_balance_data else 0 # 获取用户余额宝总额 if user.user_accounts[0].user_balance >= money: if user_balance_money: user_balance_data[0].paid_money += money else: ubf = UserBalanceFinance() ubf.user_id = user.id ubf.paid_money = money session.add(ubf) user.user_accounts[0].user_balance -= money session.commit() insert_detail(user.id, "余额宝充值", money) return jsonify({ "status": 0, "mdg": "转入成功!" }) else: return jsonify({ "status": 1, "mdg": "账户余额不足,转入失败!" }) else: return jsonify({ "status": 3, "msg": "别胡来" }) elif flag == "转出": try: card_id = post_data["card_id"] print(card_id) user_balance_data = user.user_balance_finances # 根据表关系查询该用户下的用户余额宝表 user_balance_money = user_balance_data[0].paid_money if user_balance_data else 0 # 获取用户余额宝总额 user_balance_data[0].paid_money -= money session.commit() insert_detail(user.id, "余额宝提现", -money) return jsonify({ "status": 333, "mdg": "转出成功!" }) except: user_balance_data = user.user_balance_finances # 根据表关系查询该用户下的用户余额宝表 user_balance_money = user_balance_data[0].paid_money if user_balance_data else 0 # 获取用户余额宝总额 user_balance_data[0].paid_money -= money user.user_accounts[0].user_balance += money session.commit() insert_detail(user.id, "余额宝提现", money) return jsonify({ "status": 0, "mdg": "转出成功!" }) else: return jsonify({ "status": 1, 'msg': "没有此功能" })
def key_date(json_dict): return json_dict['sent'] data = get_input_sms(url) sorted_data = sorted(data, key=key_date) phone, message, input_id, used_phones = '', '', '', [] for dict_elem in sorted_data: phone = '+%s' % dict_elem['phone'] message = dict_elem['message'] input_id = dict_elem['id'] client = session.query(Clients).filter(Clients.phone == phone).first() sent = session.query(Sent).filter(Sent.phone == phone).first() sms_id = sent.sms_id answer = Received(sms_id=sent.sms_id, phone=phone, mes=message, input_id=input_id) session.add(answer) session.commit() print(dict_elem) print(sms_id) print(message) if phone in used_phones: continue used_phones.append(phone) get_answers(sms_id, phone, message)
def on_delete(self, req, resp, id): try: submittedFile = self._get_submitted_file(req, resp, id) if submittedFile: # Kontrola casu (soubory lze mazat jen pred deadline) eval_id = submittedFile.evaluation task = session.query(model.Task).\ join(model.Module, model.Module.task == model.Task.id).\ join(model.Evaluation, model.Evaluation.module == model.Module.id).\ filter(model.Evaluation.id == submittedFile.evaluation).\ first() if task.time_deadline < datetime.datetime.utcnow(): req.context['result'] = { 'result': 'error', 'error': ('Nelze smazat soubory po termínu ' 'odevzdání úlohy') } return try: os.remove(submittedFile.path) evaluation = session.query(model.Evaluation).get(eval_id) if evaluation: evaluation.full_report +=\ (str(datetime.datetime.now()) + " : removed file " + submittedFile.path + '\n') session.delete(submittedFile) session.commit() # Pokud resitel odstranil vsechny soubory, odstranime # evaluation. if evaluation: files_cnt = session.query(model.SubmittedFile).\ filter(model.SubmittedFile.evaluation == eval_id).\ count() if files_cnt == 0: session.delete(evaluation) session.commit() req.context['result'] = {'status': 'ok'} except OSError: req.context['result'] = { 'status': 'error', 'error': 'Soubor se nepodařilo odstranit z filesystému' } return except exc.SQLAlchemyError: req.context['result'] = { 'status': 'error', 'error': ('Záznam o souboru se nepodařilo odstranit z' ' databáze') } return else: if resp.status == falcon.HTTP_404: req.context['result'] = { 'status': 'error', 'error': 'Soubor nenalezen na serveru' } elif resp.status == falcon.HTTP_403: req.context['result'] = { 'status': 'error', 'error': 'K tomuto souboru nemáte oprávnění' } else: req.context['result'] = { 'status': 'error', 'error': 'Soubor se nepodařilo získat' } resp.status = falcon.HTTP_200 except SQLAlchemyError: session.rollback() raise finally: session.close()
def post(self): parsed_args = parser.parse_args() todo = Todo(task=parsed_args['task']) session.add(todo) session.commit() return todo, 201
def on_post(self, req, resp): data = json.loads(req.stream.read().decode('utf-8')) try: existing_user = session.query(model.User).\ filter(model.User.email == data['email']).\ first() if existing_user is not None: req.context['result'] = {'error': "duplicate_user"} return except SQLAlchemyError: session.rollback() raise try: if 'nick_name' not in data: data['nick_name'] = "" user = model.User(email=data['email'], password=auth.get_hashed_password( data['password']), first_name=data['first_name'], last_name=data['last_name'], nick_name=data['nick_name'], sex=data['gender'], short_info=data["short_info"]) session.add(user) session.commit() except SQLAlchemyError: session.rollback() req.context['result'] = { 'error': "Nelze vytvořit uživatele, kontaktuj prosím orga." } raise try: profile = model.Profile( user_id=user.id, addr_street=data['addr_street'], addr_city=data['addr_city'], addr_zip=data['addr_zip'], addr_country=data['addr_country'].lower(), school_name=data['school_name'], school_street=data['school_street'], school_city=data['school_city'], school_zip=data['school_zip'], school_country=data['school_country'].lower(), school_finish=int(data['school_finish']), tshirt_size=data['tshirt_size'].upper(), referral=data.get('referral', "{}")) except BaseException: session.delete(user) session.commit() req.context['result'] = { 'error': "Nelze vytvořit profil, kontaktuj prosím orga." } raise try: session.add(profile) session.commit() except SQLAlchemyError: session.rollback() raise try: notify = model.UserNotify( user=user.id, auth_token=util.user_notify.new_token(), notify_eval=data['notify_eval'] if 'notify_eval' in data else True, notify_response=data['notify_response'] if 'notify_response' in data else True, notify_ksi=data['notify_ksi'] if 'notify_ksi' in data else True, notify_events=data['notify_events'] if 'notify_events' in data else True, ) except BaseException: session.delete(profile) session.commit() session.delete(user) session.commit() req.context['result'] = { 'error': "Nelze vytvořit notifikační záznam, kontaktuj prosím orga." } raise try: session.add(notify) session.commit() except SQLAlchemyError: session.rollback() raise try: util.mail.send( user.email, '[KSI-WEB] Potvrzení registrace do Korespondenčního semináře ' 'z informatiky', 'Ahoj!<br/>Vítáme tě v Korespondenčním ' 'semináři z informatiky Fakulty informatiky Masarykovy ' 'univerzity. Nyní můžeš začít řešit naplno. Stačí se přihlásit' ' na https://ksi.fi.muni.cz pomocí e-mailu a zvoleného hesla. ' 'Přejeme ti hodně úspěchů při řešení semináře!<br/><br/>KSI') except SQLAlchemyError: exc_type, exc_value, exc_traceback = sys.exc_info() traceback.print_exception(exc_type, exc_value, exc_traceback, file=sys.stderr) session.close() req.context['result'] = {}
def delete_data(self, model, schema, id): session.query(model).filter(model.id == id).delete() session.commit() return f'Removed id {id}'
def update_data(self, model, schema, payload, id): session.query(model).filter(model.id == id).update(payload) session.commit() data = session.query(model).filter(model.id == id).first() return schema.dump(data)
def delete_users(user_id: int): user = session.query(UserTable).filter(UserTable.id == user_id).delete() session.commit() return read_users
async def create_user(name: str, age: int): user = UserTable() user.name = name user.age = age session.add(user) session.commit()
def _evaluate_code(self, req, module, user, resp, data): try: # Pokud neni modul autocorrrect, pridavame submitted_files # k jednomu evaluation. # Pokud je autocorrect, pridavame evaluation pro kazde vyhodnoceni # souboru. existing = util.module.existing_evaluation(module.id, user.id) if (not module.autocorrect) and (len(existing) > 0): evaluation = session.query(model.Evaluation).get(existing[0]) evaluation.time = datetime.datetime.utcnow() else: evaluation = model.Evaluation(user=user.id, module=module.id, full_report="", ok=False) session.add(evaluation) session.commit() code = model.SubmittedCode(evaluation=evaluation.id, code=data) session.add(code) session.commit() if not module.autocorrect: session.commit() req.context['result'] = {'result': 'ok'} return reporter = util.programming.Reporter(max_size=50*1000) # prevent database overflow try: result = util.programming.evaluate( module.task, module, user.id, data, evaluation.id, reporter ) except util.programming.ENoFreeBox as e: result = { 'result': 'error', 'message': ('Přesáhnut maximální počet souběžně běžících ' 'opravení, zkuste to za chvíli.') } except Exception as e: if not isinstance(e, util.programming.EIsolateError): reporter += traceback.format_exc() result = { 'result': 'error', 'message': ('Nastala chyba při vykonávání kódu, kontaktuj ' 'organizátora') } evaluation.points = result['score'] if 'score' in result else 0 evaluation.ok = (result['result'] == 'ok') evaluation.full_report += (str(datetime.datetime.now()) + " : " + reporter.report_truncated + '\n') session.commit() if 'actions' in result: for action in result['actions']: reporter += "Performing %s...\n" % (action) util.module.perform_action(module, user, action) if user.is_org(): result['report'] = reporter.report_truncated req.context['result'] = result except SQLAlchemyError: session.rollback() raise finally: session.close()
async def crawl(): dogdrip = Dogdrip() async with dogdrip: await dogdrip.run() session.commit() print('ended')
def on_post(self, req, resp, id): try: user = req.context['user'] if not user.is_logged_in(): resp.status = falcon.HTTP_400 return module = session.query(model.Module).get(id) # Check for custom assignment custom = session.query(model.ModuleCustom).get((id, user.id)) if custom is not None: module.data = custom.data if not module: resp.status = falcon.HTTP_404 req.context['result'] = { 'result': 'error', 'error': 'Neexistující modul' } return # Po deadlinu nelze POSTovat reseni if session.query(model.Task).get(module.task).time_deadline < \ datetime.datetime.utcnow(): req.context['result'] = { 'result': 'error', 'error': 'Nelze odevzdat po termínu odevzdání úlohy' } return if module.type == ModuleType.GENERAL: self._upload_files(req, module, user.id, resp) return # Kontrola poctu odevzdani if not user.is_org(): subm_in_last_day = session.query(model.Evaluation).\ filter(model.Evaluation.user == user.id, model.Evaluation.module == id, model.Evaluation.time >= datetime.datetime.utcnow() - datetime.timedelta(days=1)).\ count() if subm_in_last_day >= 20: req.context['result'] = { 'result': 'error', 'error': ('Překročen limit odevzdání ' '(20 odevzdání / 24 hodin).') } return data = json.loads(req.stream.read().decode('utf-8'))['content'] if module.type == ModuleType.PROGRAMMING: self._evaluate_code(req, module, user, resp, data) return elif module.type == ModuleType.TEXT: reporter = util.programming.Reporter() try: result = util.text.evaluate(module.task, module, data, reporter) except util.text.ECheckError: result = { 'result': 'error', 'message': ('Při opravování nastala výjimka, kontaktuj' ' organizátora!') } result['report'] = reporter.report elif module.type == ModuleType.QUIZ: ok, report = util.quiz.evaluate(module.task, module, data) result = { 'result': 'ok' if ok else 'nok', 'report': report, } elif module.type == ModuleType.SORTABLE: ok, report = util.sortable.evaluate(module.task, module, data) result = { 'result': 'ok' if ok else 'nok', 'report': report, } if 'score' in result: score = result['score'] else: score = module.max_points \ if result['result'] == 'ok' else 0 evaluation = model.Evaluation( user=user.id, module=module.id, points=score, full_report=result['report'], ok=(result['result'] == 'ok') ) for l in result['report'].split('\n'): if l.startswith('action '): util.module.perform_action(module, user, l.strip()) req.context['result'] = result if 'report' in req.context['result'] and not user.is_org(): del req.context['result']['report'] session.add(evaluation) session.commit() except SQLAlchemyError: session.rollback() raise finally: session.close()
def post(self): parsed_args = parser.parse_args() account = Account(task=parsed_args['account_name']) session.add(account) session.commit() return account, 201
def _upload_files(self, req, module, user_id, resp): # Soubory bez specifikace delky neberem. if not req.content_length: resp.status = falcon.HTTP_411 req.context['result'] = { 'result': 'error', 'error': 'Nelze nahrát neukončený stream.' } return # Prilis velke soubory neberem. if req.content_length > util.config.MAX_UPLOAD_FILE_SIZE: resp.status = falcon.HTTP_413 req.context['result'] = { 'result': 'error', 'error': 'Maximální velikost dávky je 20 MB.' } return # Pokud uz existuji odevzdane soubory, nevytvarime nove # evaluation, pouze pripojujeme k j*z existujicimu try: existing = util.module.existing_evaluation(module.id, user_id) if len(existing) > 0: evaluation = session.query(model.Evaluation).get(existing[0]) evaluation.time = datetime.datetime.utcnow() report = evaluation.full_report else: report = (str(datetime.datetime.now()) + ' : === Uploading files for module id \'%s\' for ' 'task id \'%s\' ===\n' % (module.id, module.task)) evaluation = model.Evaluation(user=user_id, module=module.id, ok=True) session.add(evaluation) session.commit() # Lze uploadovat jen omezeny pocet souboru. file_cnt = session.query(model.SubmittedFile).\ filter(model.SubmittedFile.evaluation == evaluation.id).count() if file_cnt > util.config.MAX_UPLOAD_FILE_COUNT: resp.status = falcon.HTTP_400 req.context['result'] = { 'result': 'error', 'error': 'K řešení lze nahrát nejvýše 20 souborů.' } return except SQLAlchemyError: session.rollback() raise dir = util.module.submission_dir(module.id, user_id) try: os.makedirs(dir) except OSError: pass if not os.path.isdir(dir): resp.status = falcon.HTTP_400 req.context['result'] = { 'result': 'error', 'error': 'Chyba 42, kontaktuj orga.' } return files = multipart.MultiDict() content_type, options = multipart.parse_options_header( req.content_type) boundary = options.get('boundary', '') if not boundary: raise multipart.MultipartError( "No boundary for multipart/form-data.") for part in multipart.MultipartParser(req.stream, boundary, req.content_length, 2**30, 2**20, 2**18, 2**16, 'utf-8'): path = '%s/%s' % (dir, part.filename) part.save_as(path) mime = magic.Magic(mime=True).from_file(path) report += (str(datetime.datetime.now()) + ' : [y] uploaded file: \'%s\' (mime: %s) to ' 'file %s\n' % (part.filename, mime, path)) # Pokud je tento soubor j*z v databazi, zaznam znovu nepridavame try: file_in_db = session.query(model.SubmittedFile).\ filter(model.SubmittedFile.evaluation == evaluation.id).\ filter(model.SubmittedFile.path == path).scalar() if file_in_db is None: submitted_file = model.SubmittedFile( evaluation=evaluation.id, mime=mime, path=path) session.add(submitted_file) except SQLAlchemyError: session.rollback() raise evaluation.full_report = report try: session.add(evaluation) session.commit() except SQLAlchemyError: session.rollback() raise finally: session.close() req.context['result'] = {'result': 'ok'}
def init_postgis(tmpdir, dataset, key= None): print("INITIAILIZING POSTGIS SQL") nm = dataset["dataset"] dsid = getDatasetId(nm) dsname = dsid # session.execute(f"UPDATE segment SET hull=null WHERE segment.dsid= {dsid}") # session.execute(f"UPDATE segment SET kde_density=null WHERE segment.dsid= {dsid}") # cells = sq(Segment).filter(Segment.dsid==dsname).filter(Segment.n_umis>20) # valid_cells = pd.Series(cells,index =[ c.id for c in cells]) # def make_convex_hull_safe(x): # if len(x.umis)<3: # return None # try: # return sp_spatial.ConvexHull([[u.x,u.y] for u in x.umis] ) # except Exception as e: # return None # hulls = valid_cells.apply(make_convex_hull_safe) # cells_and_hulls = pd.concat([valid_cells.rename("cell"),hulls.rename("hull")],axis=1) # for k,r in cells_and_hulls.iterrows(): # hull=r.hull # points = hull.points # print(points) # indices = hull.simplices # poly_string = "POLYGON(("+", ".join([f"{p[0]} {p[1]}" for p in hull.points[np.concatenate([hull.vertices,[hull.vertices[0]]])]])+"))" # r.cell.hull = poly_string # session.add(r.cell) raw_sql = text(""" UPDATE umi SET xumi_xy=ST_SetSRID(ST_MakePoint(x, y),4326) FROM segment WHERE segment.id = umi.seg AND umi.dsid=:name; UPDATE segment SET hull = ch.hull FROM( SELECT umi.seg as seg, ST_ConvexHull(ST_Collect(xumi_xy)) as hull FROM umi JOIN segment ON segment.id= umi.seg WHERE segment.n_umis>20 AND segment.dsid=:name GROUP BY umi.seg ) as ch WHERE ch.seg = segment.id AND segment.dsid=:name; """).params(name=dsid) session.execute(raw_sql) def array2raster(newRasterfn,rasterOrigin,pixelWidth,pixelHeight,array): cols = array.shape[1] rows = array.shape[0] originX = rasterOrigin[0] originY = rasterOrigin[1] driver = gdal.GetDriverByName('GTiff') outRaster = driver.Create(newRasterfn, cols, rows, 1, gdal.GDT_Byte) outRaster.SetGeoTransform((originX, pixelWidth, 0, originY, 0, pixelHeight)) outband = outRaster.GetRasterBand(1) outband.WriteArray(array) outRasterSRS = osr.SpatialReference() outRasterSRS.ImportFromEPSG(4326) outRaster.SetProjection(outRasterSRS.ExportToWkt()) outband.FlushCache() cells = sq(Segment).filter(Segment.dsid==dsname).filter(Segment.n_umis>20) for i,c in enumerate(cells): #f c.id != 300228: continue #c0 = c print("SETTING") coords = np.array([(u.x,u.y) for u in c.umis]).T points_kde = stats.gaussian_kde(coords) sf = 20 r = max(c.eval0, c.eval1)*sf n = 50 h = c.hull if type(h) == type("STR"): h = WKBElement(h) pts = to_shape(h).exterior.xy pts_df = pd.DataFrame(pts).T.rename({0:"x",1:"y"},axis="columns") xrange=[pts_df.x.min(), pts_df.x.max()] yrange=[pts_df.y.min(), pts_df.y.max()] X,Y = np.meshgrid(np.linspace(xrange[0],xrange[1],n),np.linspace(yrange[0],yrange[1],n)) grid =np.vstack([X.ravel(), Y.ravel()]).T vals = points_kde.evaluate(grid.T) vsquare = vals.reshape(n,n) array =( vsquare / np.max(vsquare) * 255).astype(int) reversed_arr = array[::-1] # reverse array so the tif looks like the array rasterOrigin = (xrange[0], yrange[0]) pixelWidth = (xrange[1] - xrange[0]) / n pixelHeight = (yrange[1] - yrange[0]) / n newRasterfn = 'test.tif' array2raster(newRasterfn,rasterOrigin,pixelWidth,pixelHeight,array) # convert array to raster hexbits = open(newRasterfn,"rb").read().hex() sub = r"\x" + f"{hexbits}" query = f"""UPDATE segment SET kde_density=ST_FromGDALRaster('{sub}') WHERE id={c.id}""" session.execute(query) session.commit() raw_sql = text(""" UPDATE umi SET kde_val= ST_NearestValue(kde_density, ST_SetSRID(umi.xumi_xy,4326 )) FROM segment WHERE segment.id = umi.seg AND umi.dsid=:name; UPDATE segment SET hull1 = ch.hull FROM( SELECT umi.seg as seg, ST_ConvexHull(ST_Collect(xumi_xy)) as hull FROM umi JOIN segment ON segment.id= umi.seg WHERE segment.kde_density is not null AND umi.kde_val >= 1 AND segment.dsid=:name GROUP BY umi.seg ) as ch WHERE ch.seg = segment.id AND segment.dsid=:name; UPDATE segment SET hull128 = ch.hull FROM( SELECT umi.seg as seg, ST_ConvexHull(ST_Collect(xumi_xy)) as hull FROM umi JOIN segment ON segment.id= umi.seg WHERE segment.kde_density is not null AND umi.kde_val >= 128 AND segment.dsid=:name GROUP BY umi.seg ) as ch WHERE ch.seg = segment.id AND segment.dsid=:name; UPDATE segment SET hull12 = ch.hull FROM( SELECT umi.seg as seg, ST_ConvexHull(ST_Collect(xumi_xy)) as hull FROM umi JOIN segment ON segment.id= umi.seg WHERE segment.kde_density is not null AND umi.kde_val >= 12 AND segment.dsid=:name GROUP BY umi.seg ) as ch WHERE ch.seg = segment.id AND segment.dsid=:name; UPDATE segment SET center = ch.centroid FROM( SELECT umi.seg as seg, ST_Centroid(ST_Collect(xumi_xy)) as centroid FROM umi JOIN segment ON segment.id= umi.seg WHERE segment.kde_density is not null AND segment.dsid=:name GROUP BY umi.seg ) as ch WHERE ch.seg = segment.id AND segment.dsid=:name; UPDATE segment SET area12 = ST_Area(hull12); update segment set rval=(5915587277.0*id)%255/256 WHERE segment.dsid=:name; update segment set gval=(5915587277.0*id)/256%255/256 WHERE segment.dsid=:name; update segment set bval=(5915587277.0*id)/256/256%255/256 WHERE segment.dsid=:name; """).params(name=dsid) session.execute(raw_sql) raw_sql2 = text(""" UPDATE dataset SET raster_2k_red = ST_MapAlgebra( ST_AddBand( ST_MakeEmptyRaster(2000,2000, -20, -20, .02, .02, 0, 0, 4326), 1, '8BUI'::text, 5, 0), images.union_img, '[rast1]*0+[rast2]', null, 'UNION') FROM ( SELECT rs.dsid as dsid, ST_Union(resampled, 'SUM' ) as union_img FROM ( SELECT ST_MapAlgebra( ST_Resample(kde_density, ST_MakeEmptyRaster(2000,2000, -20, -20, .02, .02, 0, 0, 4326),'Bilinear'), 1,null,FORMAT('[rast] * %s ',segment.rval) ) AS resampled, dsid FROM segment WHERE segment.kde_density IS NOT null AND segment.dsid=:name ) AS rs GROUP BY rs.dsid ) AS images WHERE images.dsid = dataset.id AND dataset.id=:name; UPDATE dataset SET raster_2k_green = ST_MapAlgebra( ST_AddBand( ST_MakeEmptyRaster(2000,2000, -20, -20, .02, .02, 0, 0, 4326), 1, '8BUI'::text, 5, 0), images.union_img, '[rast1]*0+[rast2]', null, 'UNION') FROM ( SELECT rs.dsid as dsid, ST_Union(resampled, 'SUM' ) as union_img FROM ( SELECT ST_MapAlgebra( ST_Resample(kde_density, ST_MakeEmptyRaster(2000,2000, -20, -20, .02, .02, 0, 0, 4326),'Bilinear'), 1,null,FORMAT('[rast] * %s ',segment.gval) ) AS resampled, dsid FROM segment WHERE segment.kde_density IS NOT null AND segment.dsid=:name ) AS rs GROUP BY rs.dsid ) AS images WHERE images.dsid = dataset.id AND dataset.id=:name; UPDATE dataset SET raster_2k_blue = ST_MapAlgebra( ST_AddBand( ST_MakeEmptyRaster(2000,2000, -20, -20, .02, .02, 0, 0, 4326), 1, '8BUI'::text, 5, 0), images.union_img, '[rast1]*0+[rast2]', null, 'UNION') FROM ( SELECT rs.dsid as dsid, ST_Union(resampled, 'SUM' ) as union_img FROM ( SELECT ST_MapAlgebra( ST_Resample(kde_density, ST_MakeEmptyRaster(2000,2000, -20, -20, .02, .02, 0, 0, 4326),'Bilinear'), 1,null,FORMAT('[rast] * %s ',segment.bval) ) AS resampled, dsid FROM segment WHERE segment.kde_density IS NOT null AND segment.dsid=:name ) AS rs GROUP BY rs.dsid ) AS images WHERE images.dsid = dataset.id AND dataset.id=:name; UPDATE dataset SET raster_2k_all = ST_AddBand( ST_AddBand( raster_2k_red,raster_2k_green), raster_2k_blue) WHERE dataset.id=:name; """).params(name=dsid) session.execute(raw_sql2) raw_sql3 = text(""" UPDATE segment SET points = sq.new_geo FROM ( SELECT segment.id as seg_id, ST_Collect(umi.xumi_xy) as new_geo FROM segment JOIN umi ON umi.seg = segment.id WHERE segment.dsid=:name GROUP BY segment.id ) AS sq WHERE segment.id=sq.seg_id; UPDATE segment SET points_xym_total_reads = sq.new_geo FROM ( SELECT segment.id AS seg_id, ST_Collect(ST_MakePointM(ST_X(umi.xumi_xy),ST_Y(umi.xumi_xy),umi.total_reads)) as new_geo FROM segment JOIN umi ON umi.seg = segment.id WHERE segment.dsid=:name GROUP BY segment.id ) AS sq WHERE segment.id = sq.seg_id; UPDATE segment SET points_xym_kde = sq.new_geo FROM ( SELECT segment.id AS seg_id, ST_Collect(ST_MakePointM(ST_X(umi.xumi_xy),ST_Y(umi.xumi_xy),umi.kde_val)) as new_geo FROM segment JOIN umi ON umi.seg = segment.id WHERE segment.dsid=:name GROUP BY segment.id ) AS sq WHERE segment.id = sq.seg_id; """).params(name=dsid) session.execute(raw_sql3) raw_sql4 = text(""" UPDATE segment SET points12 = sq.new_geo FROM ( SELECT segment.id as seg_id, ST_Collect(umi.xumi_xy) as new_geo FROM segment JOIN umi ON umi.seg = segment.id WHERE umi.kde_val > 12 AND segment.dsid=:name GROUP BY segment.id ) AS sq WHERE segment.id=sq.seg_id; UPDATE segment SET points128 = sq.new_geo FROM ( SELECT segment.id as seg_id, ST_Collect(umi.xumi_xy) as new_geo FROM segment JOIN umi ON umi.seg = segment.id WHERE umi.kde_val > 128 AND segment.dsid=:name GROUP BY segment.id ) AS sq WHERE segment.id=sq.seg_id; """).params(name=dsid) session.execute(raw_sql4) session.commit() def alpha_shape(points, alpha): """ Compute the alpha shape (concave hull) of a set of points. @param points: Iterable container of points. @param alpha: alpha value to influence the gooeyness of the border. Smaller numbers don't fall inward as much as larger numbers. Too large, and you lose everything! """ if len(points) < 4: # When you have a triangle, there is no sense # in computing an alpha shape. return geometry.MultiPoint(list(points)).convex_hull coords = np.array([point.coords[0] for point in points]) tri = Delaunay(coords) triangles = coords[tri.vertices] a = ((triangles[:,0,0] - triangles[:,1,0]) ** 2 + (triangles[:,0,1] - triangles[:,1,1]) ** 2) ** 0.5 b = ((triangles[:,1,0] - triangles[:,2,0]) ** 2 + (triangles[:,1,1] - triangles[:,2,1]) ** 2) ** 0.5 c = ((triangles[:,2,0] - triangles[:,0,0]) ** 2 + (triangles[:,2,1] - triangles[:,0,1]) ** 2) ** 0.5 s = ( a + b + c ) / 2.0 areas = (s*(s-a)*(s-b)*(s-c)) ** 0.5 circums = a * b * c / (4.0 * areas) filtered = triangles[circums < (1.0 / alpha)] edge1 = filtered[:,(0,1)] edge2 = filtered[:,(1,2)] edge3 = filtered[:,(2,0)] edge_points = np.unique(np.concatenate((edge1,edge2,edge3)), axis = 0).tolist() m = geometry.MultiLineString(edge_points) triangles = list(polygonize(m)) return cascaded_union(triangles), edge_points for i,c in enumerate(sq(Segment).filter(Segment.dsid== dsid).filter(Segment.n_umis>20).all()): shp = to_shape(c.points) alpha = .1/np.mean(np.var(np.array([p.coords[0] for p in shp]).T,1))**.5 if i %20 == 0: print(i,alpha) concave_hull, edge_points = alpha_shape(shp, alpha=alpha) if concave_hull.geom_type == 'MultiPolygon': concave_hull = max(concave_hull, key=lambda a: a.area) wkb_element = from_shape(concave_hull) c.hull = wkb_element shp = to_shape(c.points12) alpha = .5/np.mean(np.var(np.array([p.coords[0] for p in shp]).T,1))**.5 concave_hull, edge_points = alpha_shape(shp, alpha=alpha) if concave_hull.geom_type == 'MultiPolygon': concave_hull = max(concave_hull, key=lambda a: a.area) wkb_element = from_shape(concave_hull) c.hull12 = wkb_element shp = to_shape(c.points128) alpha = .5/np.mean(np.var(np.array([p.coords[0] for p in shp]).T,1))**.5 concave_hull, edge_points = alpha_shape(shp, alpha=alpha) if concave_hull.geom_type == 'MultiPolygon': concave_hull = max(concave_hull, key=lambda a: a.area) wkb_element = from_shape(concave_hull) c.hull128 = wkb_element session.add(c) #pTODO] PUT BACK IN COMMITS WHEN THIS CODE IS READY TO RUN! #session.commit() return 0
def parse_comments(self, bs, content, params): comments = session.query(models.Comment).\ filter(models.Comment.cid == content.id).\ all() if len(comments) > 0: print('alread updated') return comment_top = bs.find('div', id='comment_top') last_page = self.get_comment_last_page(comment_top) before_comment = None for i in range(last_page + 1): param = params params['cpage'] = i res = BeautifulSoup(requests.get(self.base_url, params).text, 'html.parser') comment_list = res.find('div', id='commentbox').find('div', attrs={'class': 'comment-list'}) # for comment_box in comment_list.findAll(lambda x: x.name == 'div' and 'class' in x.attrs and 'depth' not in x.attrs['class'] and 'comment-item' in x.attrs['class']): for comment_box in comment_list.findAll(lambda x: x.name == 'div' and 'class' in x.attrs and 'comment-item' in x.attrs['class']): box = comment_box.select('> div')[0].select('> div')[0] try: text = box.find('div', attrs={'class': 'xe_content'}).text except Exception as e: continue if not text: before_comment = None print('continued') break date = box.find('div').findAll('div')[-1].find('span').text delta = None if '일 전' in date: delta = timedelta(days=int(date[0])) if '시간 전' in date: delta = timedelta(hours=int(date[0])) if '분 전' in date: delta = timedelta(minutes=int(date[0])) if delta is not None: date = datetime.utcnow() + timedelta(hours=9) - delta else: date = datetime.strptime(date, '%Y.%m.%d') selected = box.select('div.comment-bar > div') if len(selected) == 0: selected = box.select('div.comment-bar-author > div') writer = selected[0].text.strip() # writer = box.select('a.ed.link-reset')[0].text writer = hashlib.shake_128(writer.encode()).hexdigest(length=4) user = session.query(models.User).\ filter(models.User.nickname == writer).\ first() if user is None: user = models.User(nickname=writer) session.add(user) session.flush() comment = models.Comment(data=text, cid=content.id, created_at=date, uid=user.id) if 'depth' in comment_box.attrs['class'] and before_comment: target = box.select('span.ed.label-primary')[0].text.strip()[1:] target = hashlib.shake_128(target.encode()).hexdigest(length=4) comment.data = f'@{target} {comment.data}' comment.parent_id = before_comment.id else: before_comment = comment exist = session.query(models.Comment).\ filter(models.Comment.uid == user.id).\ first() if not exist: session.add(comment) session.flush() print(text) session.commit() return content
def add_role_to_user(self, user: '******', role: Role): relation = Group_User(group_id=self.id, user_id=user.id, role=role) session.add(relation) session.commit()
def parse_content(self, bs): print('parse content') try: new = bs print(bs) title = new.select('h4')[0].text m = hashlib.blake2b(digest_size=12) m.update(title.encode()) hashed = m.hexdigest() date = None exist = session.query(models.Content).filter(models.Content.permanent_id == hashed).first() if exist: if date is None: exist.created_at = datetime.utcnow() + timedelta(hours=9) else: exist.created_at = date exist.origin = enums.DataOriginEnum.DOGDRIP session.commit() print('passed') return exist date = None for date_obj in new.select('div.ed.flex.flex-wrap.flex-left.flex-middle.title-toolbar span.ed.text-xsmall.text-muted'): text = date_obj.text delta = None if '일 전' in text: delta = timedelta(days=int(text[0])) if '시간 전' in text: delta = timedelta(hours=int(text[0])) if '분 전' in text: delta = timedelta(minutes=int(text[0])) if delta is not None: date = datetime.utcnow() + timedelta(hours=9) - delta else: try: date = datetime.strptime(text, '%Y.%m.%d') except: print('continued') continue break '''delta = None if '일 전' in date: delta = timedelta(days=int(date[0])) if '시간 전' in date: delta = timedelta(hours=int(date[0])) if '분 전' in date: delta = timedelta(minutes=int(date[0])) if delta is not None: date = datetime.utcnow() + timedelta(hours=9) - delta else: breakpoint() date = datetime.strptime(date, '%Y.%m.%d')''' # writer = new.select('div.ed.flex.flex-wrap.flex-left.flex-middle.title-toolbar > div.ed.flex.flex-wrap a') writer = new.select('div.title-toolbar span')[0].text.strip() writer = hashlib.shake_128(writer.encode()).hexdigest(length=4) user = models.User(nickname=writer) session.add(user) session.flush() # TODO 2: 작성자 아이디 구해와서 해싱 # DOIT! content = new.select('div.ed.article-wrapper.inner-container > div.ed > div')[1] content = content.select('div')[0] # breakpoint() # content = new.select('div#article_1')[0] for img in content.select('img'): if 'img.sscroll.net' in img['src']: print('continued') continue if './' in img['src']: img['src'] = img['src'].replace('./', 'http://www.dogdrip.net/') elif img['src'].startswith('/'): img['src'] = 'https://www.dogdrip.net' + img['src'] elif not img['src'].startswith('http'): img['src'] = 'https://www.dogdrip.net/' + img['src'] if 'transparent' in img['src']: return for_img = hashlib.sha256(img['src'].encode()) last = img['src'].split('.')[-1] rename = for_img.hexdigest() rename += '.' + last urllib.request.urlretrieve(img['src'], rename) s3.upload_file(rename, bucket, 'upload/' + rename, ExtraArgs={'ACL': 'public-read', 'CacheControl': 'max-age=2592000'}) os.remove(rename) img['src'] = 'http://img.sscroll.net/upload/'+ rename for video in content.select('source'): if 'img.sscroll.net' in video['src']: print('continued') continue if './' in video['src']: video['src'] = video['src'].replace('./', 'http://www.dogdrip.net/') elif video['src'].startswith('/'): video['src'] = 'https://www.dogdrip.net' + video['src'] elif not video['src'].startswith('http'): video['src'] = 'https://www.dogdrip.net/' + video['src'] if 'transparent' in video['src']: return for_img = hashlib.sha256(video['src'].encode()) last = video['src'].split('.')[-1] rename = for_img.hexdigest() rename += '.' + last urllib.request.urlretrieve(video['src'], rename) s3.upload_file(rename, bucket, 'upload/' + rename, ExtraArgs={'ACL': 'public-read', 'CacheControl': 'max-age=2592000'}) os.remove(rename) video['src'] = 'http://img.sscroll.net/upload/'+ rename content = content.decode() except Exception as e: print('exit') traceback.print_tb(e.__traceback__) return item = models.Content(title=title, data=content, permanent_id=hashed, created_at=date, origin=enums.DataOriginEnum.DOGDRIP, uid=user.id) if item.created_at is None: item.created_at = datetime.utcnow() + timedelta(hours=9) data = new.select('script[type="text/javascript"]')[0].text try: up, down = filter(lambda x: x != '', re.compile('[0-9]*').findall(data)) item.up = up item.down = down except: pass session.add(item) session.commit() print('added!') return item
def update_availability(data_event, availability): session.query(Events.availability, Events.data_event)\ .filter(Events.data_event == data_event)\ .update({"availability": availability}) session.commit()