def setUp(self): self.path = "test_book_standard.json" with open(self.path, "w") as handle: json.dump([], handle) self.book = Book(path=self.path) with open(self.path, "r") as handle: self.data = json.load(handle)
def add_books(): session = get_session(Base, engine) fields = request.get_json() # validate input if not all_fields_present(fields, ["password", "number_of_books", "isbn_code"]) or not validate_fields(fields): abort(422, "Невірні дані") # validate password library = session.query(Library).filter(Library.password == fields["password"]).first() if library is None: abort(422, "Неправильний пароль") # find book or create a new one book = session.query(Book).filter(Book.isbn_code == fields["isbn_code"]).first() if book is None: book_name = get_book_name(fields["isbn_code"]) if book_name is None: abort(422, "Неіснуючий ISBN код") session.add(Book(isbn_code=fields["isbn_code"], name=book_name)) book = session.query(Book).filter(Book.isbn_code == fields["isbn_code"]).first() # write or update record record = session.query(LibraryBook).filter(LibraryBook.book_id == book.id).filter( LibraryBook.library_id == library.id).first() if record is None: session.add(LibraryBook(library_id=library.id, book_id=book.id, number_of_books=fields["number_of_books"])) else: record.number_of_books += fields["number_of_books"] session.commit() return "Книги успішно додано", 200
def main(): for author in Author.objects: books = scrape_author(author.name, author.year) for book in books: year = book[0] title = book[1].encode('ascii', 'ignore') link = book[2] if not Book.objects(author=author.name, title=title, year=year).count(): Book(author=author.name, title=title, link=link, year=year, read=False).save()
def test_charges_endpoint(client): book = Book().json() data = json.loads(book) response = client.post( "/charges", json=[data], headers={"Content-Type": "application/json"}, ) response.headers["Content-type"] = "application/json" payload = response.get_json() assert response.status_code == 200 assert isinstance(payload, list) assert len(payload) == 1 assert payload[0]["book_type"] == "REGULAR"
async def load_markets(): logger.info("loading markets") global EXS, MKTS tasks = [ex.handler.load_markets() for ex in EXS.values()] await asyncio.wait(tasks) for mkts in CONF['exchanges'].values(): for mkt in mkts: if mkt not in MKTS: MKTS[mkt] = Market(mkt) for ex, mkts in CONF['exchanges'].items(): all_markets = EXS[ex].handler.symbols for mkt in mkts: if mkt not in all_markets: logger.error("%s not supported by %s" % (mkt, ex)) exit(1) book = Book(ex, mkt) MKTS[mkt].add_exchange(ex, book) EXS[ex].add_market(mkt, book)
def save_to_firebase(self): validation = self.validate() if validation: titulo = self.lineEdit_titulo.text() isbn = self.lineEdit_ISBN.text() numerodepaginas = self.lineEdit_numerodepaginas.text() ano = self.lineEdit_ano.text() genero = self.lineEdit_Genero.text() descricao = self.lineEdit_descricao.text() autor = self.lineEdit_autor.text() book = Book(isbn, titulo, numerodepaginas, genero, descricao, ano, autor) db.child('books').push(book.to_dict()) msg = QtWidgets.QMessageBox() msg.setIcon(QtWidgets.QMessageBox.NoIcon) msg.setText("Sucesso") msg.setInformativeText("Cadastrado com sucesso!") msg.setWindowTitle("Sucesso") msg.exec_() self.lineEdit_titulo.setText('') self.lineEdit_ISBN.setText('') self.lineEdit_numerodepaginas.setText('') self.lineEdit_ano.setText('') self.lineEdit_Genero.setText('') self.lineEdit_descricao.setText('') self.lineEdit_autor.setText('') if self.mainWindow: if loggedUser != None and loggedUser.level == LevelOfAccess.ADMIN: self.mainWindow.stackedWidget.setCurrentIndex(4) else: self.mainWindow.stackedWidget.setCurrentIndex(0) else: print("Validation Error")
def readfile(name): lines = [] with open(name, "r") as f: lines = f.readlines() top = lines[0].strip().split(" ") Data.nBooks = int(top[0]) Data.nLibraries = int(top[1]) Data.nScanningDays = int(top[2]) for e in lines[1].strip().split(" "): Data.bookScores.append(int(e)) for i, l in enumerate(lines[2::2]): lf = l.strip().split(" ") if lf[0] == '': continue lib = Library(int(lf[0]), int(lf[1]), int(lf[2]), i) Data.libraries.append(lib) for i, l in enumerate(lines[3::2]): lf = l.strip().split(" ") for e in lf: b = Book(int(e), Data.bookScores[int(e)]) Data.libraries[i].books.append(b)
# print(LevelOfAccess.COMMON_USER.value) line = '192.168.125-00' import re line = re.sub('[.-]', '', line) print(line) exit() booksRef = db.child('books') from data import Book, User, LevelOfAccess b = Book('1234567890123', 'Teste', "Editora", 'Genero', "Descrição do livro.", '2019', ['Fulano']) print('sending ', b, ' to firebase...') booksRef.push(b.to_dict()) u = User("*****@*****.**", "Fulano", "123467543091", LevelOfAccess.COMMON_USER) db.child('users').child(u.email).set(u.to_dict()) docs = booksRef.get() print("Data:") print(docs.val()) ''' for doc in docs.val(): print('Retrieved document ', doc.id, ' from Firebase: ', Book.from_dict(doc.to_dict()))
def test_charges3_fiction_1_days(): book = Book(return_date=date.today() + timedelta(days=1)) book.book_type = "FICTION" assert book.charges3 == 3
def test_charges3_novel_greater_than_min_days(): book = Book(return_date=date.today() + timedelta(days=4)) book.book_type = "NOVEL" assert book.charges3 == 6
def test_charges3_novel_less_than_min_days(): book = Book() book.book_type = "NOVEL" assert book.charges3 == 4.5
def test_charges3_regular_equal_to_min_days(): book = Book(return_date=date.today() + timedelta(days=2)) assert book.charges3 == 2
def test_charges3_regular_less_than_min_days(): book = Book() assert book.charges3 == 2
def test_charges2_0_days(): book = Book() assert book.charges2 == 0
def test_charges2_1_days(): book = Book(return_date=date.today() + timedelta(days=1)) assert book.charges2 == 1.5
except RuntimeError as e: print(e) parser = argparse.ArgumentParser() parser.add_argument("--embed_size", type=int, default=128) parser.add_argument("--lr", type=float, default=1e-4) parser.add_argument("--weight_decay", type=float, default=1e-4) parser.add_argument("--epoch_size", type=int, default=10) parser.add_argument("--batch_size", type=int, default=512) parser.add_argument("--alpha", type=float, default=0.3) parser.add_argument("-beta", type=float, default=10.) parser.add_argument("--num_sample_step", type=int, default=100) parser.add_argument("--seq_len", type=int, default=10) arg = parser.parse_args() book = Book() book.g.discrete(arg.seq_len) sample = RandomTemporalSubGraph(book.g, arg.batch_size, arg.num_sample_step) # supervised data = sample.supervised() nodes = keras.layers.Input(shape=(), batch_size=arg.batch_size) adjs = keras.layers.Input(shape=(arg.seq_len, None), batch_size=arg.batch_size) nodes_embed = keras.layers.Embedding(input_dim=book.g.node_size, output_dim=arg.embed_size)(nodes) o = keras.layers.RNN( GCRN2Cell(arg.embed_size, GraphAttention, {"units": arg.embed_size}))(adjs, initial_state=nodes_embed) cls_o = keras.layers.Dense(book.g.label_size, activation="sigmoid")(o) supervised_gcrn_ = keras.Model(inputs=(nodes, adjs), outputs=o) supervised_gcrn = keras.Model(inputs=[nodes, adjs], outputs=cls_o)