def test_set_part(): """Confirms that the index is updated when a part is renamed.""" test_parts = { 'index': trim(""" Index - Part One - Part Two """), 'part-one': trim(""" Part One Text. """), 'part-two': trim(""" Part Two Text. """) } doc = Document(data) doc.set_parts('user-slug', 'doc-slug', test_parts) old_slug = 'part-one' new_text = trim(""" Part Three Text. """) new_part_slug = doc.set_part(old_slug, new_text) assert new_part_slug == "part-three" assert doc.parts == { 'index': trim(""" Index - Part Three - Part Two """), 'part-three': trim(""" Part Three Text. """), 'part-two': trim(""" Part Two Text. """) }
def test_import_and_export_document(): """ Generates an archive file and then turns it back into the original parts. """ doc1 = Document(data) user_slug = random_slug('test-user-') doc_slug = random_slug('test-doc-') doc1.set_parts(user_slug, doc_slug, minimal_document) file_name, file_text = doc1.export_txt_file() assert user_slug in file_name assert doc_slug in file_name doc2 = Document(data) doc2.import_txt_file(user_slug, doc_slug, file_text) assert doc1 == doc2
def main(): tdw = TiledDrawWidget() tdw.zoom_max = 64.0 tdw.zoom_min = 1.0/16 model = Document() try: model.load(TEST_BIGIMAGE) tdw.set_model(model) for name, func, kwargs in TESTS: nframes, dt = func(tdw, model, **kwargs) if dt <= 0: print "%s: 0s" else: print "%s: %0.3f seconds, %0.1f fps" % (name, dt, nframes/dt) finally: model.cleanup()
def status(multi, identity, jobid): """ Show user info and active jobs. """ (log, user, key, urls) = init(multi, identity) Order.update_orders(rein, Document, Document.get_user_documents) documents = Document.get_user_documents(rein) if jobid is None: click.echo("User: %s" % user.name) click.echo("Master bitcoin address: %s" % user.maddr) click.echo("Delegate bitcoin address: %s" % user.daddr) click.echo("Delegate public key: %s" % key) click.echo("Willing to mediate: %s" % user.will_mediate) if user.will_mediate: click.echo("Mediator fee: %s %%" % user.mediator_fee) click.echo("Total document count: %s" % len(documents)) click.echo("Registered servers: ") for url in urls: click.echo(" " + url) click.echo("Testnet: %s" % PersistConfig.get_testnet(rein)) click.echo('') click.echo('ID Job ID Status') click.echo('-----------------------------------------------------') orders = Order.get_user_orders(rein, Document) for order in orders: past_tense = order.get_past_tense(order.get_state(rein, Document)) click.echo("%s %s %s" % (order.id, order.job_id, past_tense)) else: remote_documents = [] for url in urls: log.info("Querying %s for job id %s..." % (url, jobid)) sel_url = "{0}query?owner={1}&query=by_job_id&job_ids={2}&testnet={3}" try: answer = requests.get( url=sel_url.format(url, user.maddr, jobid, rein.testnet)) except: click.echo('Error connecting to server.') log.error('server connect error ' + url) continue data = answer.json() remote_documents += filter_and_parse_valid_sigs( rein, data['by_job_id']) unique_documents = unique(remote_documents) for doc in remote_documents: click.echo(doc) if len(remote_documents) == 0: order = Order.get_by_job_id(rein, jobid) if order: documents = order.get_documents(rein, Document) for document in documents: click.echo("\n" + document.contents) else: click.echo("Job id not found")
def refresh_metadata(): """ Cycle through all documents for all users and regenerate their cache and metadata entries. This needs a """ raise RuntimeError("Must be able to document.set_host") data = get_redis_client() for user_slug in data.userSet_list(): for doc_slug in data.userDocumentSet_list(user_slug): document = Document(data) document.set_host() # <-- TODO document.load(user_slug, doc_slug) document.save()
def status(multi, identity, jobid): """ Show user info and active jobs. """ (log, user, key, urls) = init(multi, identity) Order.update_orders(rein, Document, Document.get_user_documents) documents = Document.get_user_documents(rein) if jobid is None: click.echo("User: %s" % user.name) click.echo("Master bitcoin address: %s" % user.maddr) click.echo("Delegate bitcoin address: %s" % user.daddr) click.echo("Delegate public key: %s" % key) click.echo("Willing to mediate: %s" % user.will_mediate) if user.will_mediate: click.echo("Mediator fee: %s %%" % user.mediator_fee) click.echo("Total document count: %s" % len(documents)) click.echo("Registered servers: ") for url in urls: click.echo(" " + url) click.echo("Testnet: %s" % PersistConfig.get_testnet(rein)) click.echo('') click.echo('ID Job ID Status') click.echo('-----------------------------------------------------') orders = Order.get_user_orders(rein, Document) for order in orders: past_tense = order.get_past_tense(order.get_state(rein, Document)) click.echo("%s %s %s" % (order.id, order.job_id, past_tense)) else: remote_documents = [] for url in urls: log.info("Querying %s for job id %s..." % (url, jobid)) sel_url = "{0}query?owner={1}&query=by_job_id&job_ids={2}&testnet={3}" try: answer = requests.get(url=sel_url.format(url, user.maddr, jobid, rein.testnet)) except: click.echo('Error connecting to server.') log.error('server connect error ' + url) continue data = answer.json() remote_documents += filter_and_parse_valid_sigs(rein, data['by_job_id']) unique_documents = unique(remote_documents) for doc in remote_documents: click.echo(doc) if len(remote_documents) == 0: order = Order.get_by_job_id(rein, jobid) if order: documents = order.get_documents(rein, Document) for document in documents: click.echo("\n" + document.contents) else: click.echo("Job id not found")
def setUpClass(cls): # The tdw import below just segfaults on my system right now, if # there's no X11 display available. Be careful about proceding. cls._tdw = None cls._model = None from gi.repository import Gdk if Gdk.Display.get_default() is None: return try: import gui.tileddrawwidget except Exception: return class TiledDrawWidget (gui.tileddrawwidget.TiledDrawWidget): """Monkeypatched TDW for testing purposes""" def __init__(self, *args, **kwargs): gui.tileddrawwidget.TiledDrawWidget\ .__init__(self, *args, **kwargs) self.renderer.get_allocation = self._get_allocation def set_allocation(self, alloc): self._alloc = alloc def _get_allocation(self): return self._alloc tdw = TiledDrawWidget() tdw.zoom_max = 64.0 tdw.zoom_min = 1.0 / 16 model = Document(painting_only=True) model.load(join(paths.TESTS_DIR, TEST_BIGIMAGE)) tdw.set_model(model) cls._model = model cls._tdw = tdw
def remove(self): data = self.listview.selectedItems()[0].data(Qt.UserRole) file = Document(file_name=data.file_name, file_hash=data.file_hash) message = QMessageBox.question( self, 'Remove file', 'Are you sure you want to permanently remove this file?\nFilename: ' + data.file_name, QMessageBox.Yes | QMessageBox.No) if message == QMessageBox.Yes: file.remove() file.save_to_db() self.listview.takeItem(self.listview.currentRow()) self.parent.taglist.refresh_listview()
def download_txt(user_slug, doc_slug): """ Creates a single text file to download. """ document = Document(data) document.set_host(domain_name(bottle.request)) if not document.load(user_slug, doc_slug): msg = "Document '{:s}' not found." bottle.abort(HTTP_NOT_FOUND, msg.format(doc_slug)) file_name, text = document.export_txt_file() attach_as_file = 'attachment; filename="{:s}"'.format(file_name) bottle.response.set_header('Content-Type', 'text/plain') bottle.response.set_header('Content-Disposition', attach_as_file) return text
def post_upload_txt(user_slug, doc_slug): """ Create a document from an download file. @todo: Show a diff? """ require_authority_for_user(user_slug) # else 401s upload = bottle.request.files.get('upload') # Validation limit = int(config['UPLOAD_LIMIT_KB']) if upload.content_length > (limit * 1024): msg = "The uploaded file is too large (limit: {:d}K)." bottle.abort(msg.format(limit)) name = upload.filename prefix = 'article-wiki_{:s}_{:s}_'.format(user_slug, doc_slug) if not name.startswith(prefix) or not name.endswith('.txt'): msg = "A '{:s}*.txt' filename is expected." bottle.abort(400, msg.format(prefix)) # Load contents filepath = '/tmp/' + upload.filename if os.path.exists(filepath): os.unlink(filepath) upload.save('/tmp') try: contents = codecs.open(filepath, 'r', 'utf-8').read() except Exception: msg = "Failed to read path '{:s}'." bottle.abort(HTTP_NOT_FOUND, msg.format(user_slug)) os.unlink(filepath) document = Document(data) host = domain_name(bottle.request) document.set_host(host) document.import_txt_file(user_slug, doc_slug, contents) document.save() uri = '/read/{:s}/{:s}'.format(user_slug, doc_slug) bottle.redirect(uri)
def main(): parser = ArgumentParser(description='Tag files') parser.add_argument('file', help='File to tag', type=lambda file: is_file_valid(parser, file)) parser.add_argument('-t', '--tags', help='Add tags to file') parser.add_argument('-e', '--edit', help='Edit tags for file', action='store_true') parser.add_argument('--list', help='List all tags', action='store_true') doc = Document(parser.parse_args().file) if parser.parse_args().edit: if parser.parse_args().tags: doc.edit(parser.parse_args().tags) else: print('Add -t to edit tags') else: doc.add(tags=parser.parse_args().tags) if parser.parse_args().list: tags = Tags() print(tags.get('database')) doc.save_to_db()
def process_collection(i): global document try: q = process_command(query_list[i]) if q[0] == "doc": global database, collection document = Document(q[1], database.enc_name, database.collection_index, parent=collection.parents + [collection.name]) process_document(i + 1) elif q[0] == "getDocs": collection.getDocs() elif q[0] == "put": collection.put(q[1]) database.collection_index = collection.get_collection_index() else: print("\nThe command \'" + q[0] + "\' is not valid\n") except Exception as e: print("\nThe command is not valid exception : " + str(e) + "\n")
def main(): tdw = TiledDrawWidget() tdw.zoom_max = 64.0 tdw.zoom_min = 1.0 / 16 model = Document() try: model.load(TEST_BIGIMAGE) tdw.set_model(model) bbox = model.get_effective_bbox() for name, func, kwargs in TESTS: nframes, dt = func(tdw, model, **kwargs) if dt <= 0: print "%s: 0s" else: print "%s: %0.3f seconds, %0.1f fps" % (name, dt, nframes / dt) finally: model.cleanup()
def test_delete_part(): """Confirms that the index is updated when a part is deleted.""" test_parts = { 'index': trim(""" Index - Part One - Part Two """), 'part-one': trim(""" Part One Text. """), 'part-two': trim(""" Part Two Text. """) } doc = Document(data) doc.set_parts('user-slug', 'doc-slug', test_parts) doc.delete_part('part-one') assert doc.parts == { 'index': trim(""" Index - Part Two """), 'part-two': trim(""" Part Two Text. """) }
def search(self): if self.search_input.text(): print(self.search_input.text()) docs = Document(self.search_input.text(), file_hash='Dummy hash') print(docs.get()) self.filelist.add_file(docs.get())
def sync(multi, identity): """ Upload records to each registered server. Each user, bid, offer, etc. (i.e. anything except actual payments) is stored as document across a public database that is maintained across a network of paid servers. This command pushes the documents you have created to the servers from which you have purchased hosting. """ (log, user, key, urls) = init(multi, identity) click.echo("User: "******"No buckets registered. Run 'rein request' to continue.") return Placement.create_placements(rein.engine) upload = [] nonce = {} for url in urls: nonce[url] = get_new_nonce(rein, url) if nonce[url] is None: continue check = Document.get_user_documents(rein) if len(check) == 0: click.echo("Nothing to do.") for doc in check: if len(doc.contents) > 8192: click.echo('Document is too big. 8192 bytes should be enough for anyone.') log.error("Document oversized %s" % doc.doc_hash) else: placements = Placement.get_placements(rein, url, doc.id) if len(placements) == 0: upload.append([doc, url]) else: for plc in placements: if Placement.get_remote_document_hash(rein, plc) != doc.doc_hash: upload.append([doc, url]) failed = [] succeeded = 0 for doc, url in upload: placements = Placement.get_placements(rein, url, doc.id) if len(placements) == 0: remote_key = ''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(32)) plc = Placement(doc.id, url, remote_key, False, rein.testnet) rein.session.add(plc) rein.session.commit() else: plc = placements[0] for p in placements[1:]: rein.session.delete(p) rein.session.commit() if len(doc.contents) > 8192: log.error("Document oversized %s" % doc.doc_hash) click.echo('Document is too big. 8192 bytes should be enough for anyone.') elif nonce[url] is None: continue else: message = plc.remote_key + doc.contents + user.daddr + nonce[url] message = message.decode('utf8') message = message.encode('ascii') signature = sign(user.dkey, message) data = {"key": plc.remote_key, "value": doc.contents, "nonce": nonce[url], "signature": signature, "signature_address": user.daddr, "owner": user.maddr, "testnet": rein.testnet} body = json.dumps(data) headers = {'Content-Type': 'application/json'} answer = requests.post(url='{0}put'.format(url), headers=headers, data=body) res = answer.json() if 'result' not in res or res['result'] != 'success': log.error('upload failed doc=%s plc=%s url=%s' % (doc.id, plc.id, url)) failed.append(doc) else: plc.verified += 1 rein.session.commit() log.info('upload succeeded doc=%s plc=%s url=%s' % (doc.id, plc.id, url)) click.echo('uploaded %s' % doc.doc_hash) succeeded += 1 for url in urls: if nonce[url] is None: continue sel_url = url + 'nonce?address={0}&clear={1}' answer = requests.get(url=sel_url.format(user.maddr, nonce[url])) log.info('nonce cleared for %s' % (url)) click.echo('%s docs checked on %s servers, %s uploads done.' % (len(check), len(urls), str(succeeded)))
def _test(): """Test the custom model in an ad-hoc GUI window""" from lib.document import Document from lib.layer import PaintingLayer, LayerStack doc_model = Document() root = doc_model.layer_stack root.clear() layer_info = [ ((0, ), LayerStack(name="Layer 0")), ((0, 0), PaintingLayer(name="Layer 0:0")), ((0, 1), PaintingLayer(name="Layer 0:1")), ((0, 2), LayerStack(name="Layer 0:2")), ((0, 2, 0), PaintingLayer(name="Layer 0:2:0")), ((0, 2, 1), PaintingLayer(name="Layer 0:2:1")), ((0, 3), PaintingLayer(name="Layer 0:3")), ((1, ), LayerStack(name="Layer 1")), ((1, 0), PaintingLayer(name="Layer 1:0")), ((1, 1), PaintingLayer(name="Layer 1:1")), ((1, 2), LayerStack(name="Layer 1:2")), ((1, 2, 0), PaintingLayer(name="Layer 1:2:0")), ((1, 2, 1), PaintingLayer(name="Layer 1:2:1")), ((1, 2, 2), PaintingLayer(name="Layer 1:2:2")), ((1, 2, 3), PaintingLayer(name="Layer 1:2:3")), ((1, 3), PaintingLayer(name="Layer 1:3")), ((1, 4), PaintingLayer(name="Layer 1:4")), ((1, 5), PaintingLayer(name="Layer 1:5")), ((1, 6), PaintingLayer(name="Layer 1:6")), ((2, ), PaintingLayer(name="Layer 2")), ((3, ), PaintingLayer(name="Layer 3")), ((4, ), PaintingLayer(name="Layer 4")), ((5, ), PaintingLayer(name="Layer 5")), ((6, ), LayerStack(name="Layer 6")), ((6, 0), PaintingLayer(name="Layer 6:0")), ((6, 1), PaintingLayer(name="Layer 6:1")), ((6, 2), PaintingLayer(name="Layer 6:2")), ((6, 3), PaintingLayer(name="Layer 6:3")), ((6, 4), PaintingLayer(name="Layer 6:4")), ((6, 5), PaintingLayer(name="Layer 6:5")), ((7, ), PaintingLayer(name="Layer 7")), ] for path, layer in layer_info: root.deepinsert(path, layer) icon_theme = Gtk.IconTheme.get_default() icon_theme.append_search_path("./desktop/icons") view = Gtk.TreeView() stack_wrapper = RootStackTreeModelWrapper(doc_model) view.set_model(stack_wrapper) view.set_show_expanders(True) view.set_enable_tree_lines(True) view.set_reorderable(True) view.set_headers_visible(True) sel = view.get_selection() sel.set_mode(Gtk.SelectionMode.SINGLE) view_scroll = Gtk.ScrolledWindow() view_scroll.set_shadow_type(Gtk.ShadowType.ETCHED_IN) scroll_pol = Gtk.PolicyType.AUTOMATIC view_scroll.set_policy(scroll_pol, scroll_pol) view_scroll.add(view) view_scroll.set_size_request(-1, 100) cell = Gtk.CellRendererPixbuf() col = Gtk.TreeViewColumn("T") col.pack_start(cell, expand=False) col.set_cell_data_func(cell, layer_type_pixbuf_datafunc) col.set_max_width(24) col.set_sizing(Gtk.TreeViewColumnSizing.AUTOSIZE) view.append_column(col) cell = Gtk.CellRendererText() cell.set_property("ellipsize", Pango.EllipsizeMode.END) col = Gtk.TreeViewColumn("Name") col.pack_start(cell, expand=True) col.set_cell_data_func(cell, layer_name_text_datafunc) col.set_expand(True) col.set_min_width(48) col.set_sizing(Gtk.TreeViewColumnSizing.AUTOSIZE) view.append_column(col) view.set_expander_column(col) cell = Gtk.CellRendererPixbuf() col = Gtk.TreeViewColumn("V") col.pack_start(cell, expand=False) col.set_cell_data_func(cell, layer_visible_pixbuf_datafunc) col.set_max_width(24) view.append_column(col) cell = Gtk.CellRendererPixbuf() col = Gtk.TreeViewColumn("L") col.pack_start(cell, expand=False) col.set_cell_data_func(cell, layer_locked_pixbuf_datafunc) col.set_max_width(24) view.append_column(col) win = Gtk.Window() win.set_title(unicode(__package__)) win.connect("destroy", Gtk.main_quit) win.add(view_scroll) win.set_default_size(300, 500) win.show_all() Gtk.main()
def delete_part(user_slug, doc_slug, part_slug): """ Delete a part from a document. Must be logged in, and be the owner. To Do: - Form and confirmation step? - Delete parts including unused? """ require_authority_for_user(user_slug) # or 401 document = Document(data) document.set_host(domain_name(bottle.request)) if not document.load(user_slug, doc_slug): msg = "Document '{:s}/{:s}' not found." bottle.abort(HTTP_NOT_FOUND, msg.format(user_slug, doc_slug)) document.delete_part(part_slug) if len(document.parts) > 0: document.save() bottle.redirect('/read/{:s}/{:s}'.format(user_slug, doc_slug)) else: document.delete() bottle.redirect('/user/{:s}'.format(user_slug))
def post_edit_part(user_slug, doc_slug, part_slug): """ Wiki editor for existing doc part (or '_' if new). """ if user_slug == '_': # New user msg = "Blank user '_' not supported." bottle.abort(HTTP_BAD_REQUEST, msg) if doc_slug == '_' and part_slug == '_': msg = "Blank document and part '_/_' not supported." bottle.abort(HTTP_BAD_REQUEST, msg) if 'they_selected_save' in bottle.request.forms: require_authority_for_user(user_slug) if 'content' not in bottle.request.forms: bottle.abort(HTTP_BAD_REQUEST, "Form data was missing.") new_text = clean_text(bottle.request.forms.content) # Default slugs unless we change them new_part_slug = part_slug new_doc_slug, old_doc_slug = doc_slug, doc_slug document = Document(data) host = domain_name(bottle.request) document.set_host(host) if doc_slug == "_": # New article... new_doc_slug = document.set_index(new_text) new_doc_slug = data.userDocument_unique_slug(user_slug, new_doc_slug) document.set_slugs(user_slug, new_doc_slug) elif document.load(user_slug, doc_slug): if part_slug == 'index': new_doc_slug = document.set_index(new_text) if new_doc_slug != doc_slug: unique_slug = data.userDocument_unique_slug( user_slug, new_doc_slug) document.set_slugs(user_slug, unique_slug) else: new_part_slug = document.set_part(part_slug, new_text) else: msg = "Document '{:s}/{:s}' not found." bottle.abort(HTTP_NOT_FOUND, msg.format(user_slug, doc_slug)) okay_to_save = all([ 'they_selected_save' in bottle.request.forms, has_authority_for_user(user_slug) ]) if okay_to_save: new_doc_slug = document.save(pregenerate=True) old_doc = Document(data) if old_doc.load(user_slug, old_doc_slug): if old_doc.doc_slug != new_doc_slug: old_doc.delete() # Special redirects when editing fixtures uri = '/read/{:s}/{:s}'.format(user_slug, new_doc_slug) if doc_slug == 'fixtures': if part_slug == 'homepage': uri = '/' elif part_slug == 'author': uri = '/user/{:s}'.format(user_slug) bottle.redirect(uri) is_preview = 'they_selected_preview' in bottle.request.forms return show_editor(new_text, document.user_slug, document.doc_slug, new_part_slug, is_preview, can_be_saved=has_authority_for_user(user_slug))
def main(): combined_sql_list = [] # eventually is written to a file # [Generate API sql file] print('Generate API') print(' - load api configuration, generate funcPattern key and values') # get configuration file name {folder: "", name: ""} # get list of files of type .json in folder ./config # [Use a configuration file] config_folder = '{}'.format(os.getcwd().replace('_tasks', 'config')) # [Select API Source ] sourceConfiguration = open_api(config_folder, file_type="source") if not sourceConfiguration: print('cancel') exit(0) # [Select API Target ] targetConfiguration = open_api(config_folder, file_type="target") if not targetConfiguration: print('cancel') exit(0) # [Merge Source and Target] sourceConfiguration.update(targetConfiguration) apiConfiguration = sourceConfiguration # setup default environment homeDev = HomeDevelopment().setup() # [Create missing folders] sourceDev = HomeDevelopment().setup() targetDev = HomeDevelopment().setup() # [Scan configuration for home, source, and target environment configurations] for apiName in apiConfiguration: if apiName == 'source': # [Configure input sources from GIT repositories] sourceDev = get_environment(apiConfiguration[apiName]) elif apiName == 'target': # [Configure output targets from GIT repositories] targetDev = get_environment(apiConfiguration[apiName]) print('=================') report(homeDev, sourceDev, targetDev) print('=================') ############## # [Process Postgres Extentions] ############## if targetDev.getFolder('db'): if not Util().folder_exists(sourceDev.getFolder('db')): # [Copy first set of _tasks code and config files] Util().copyFolder(sourceDev.getFolder('db'), targetDev.getFolder('db')) else: if not Util().confirm( '* Install/Overwrite postgres code and extentions?', 'N'): print(" Overwriting postgres configuration and extentions") print(' - source ', sourceDev.getFolder('db')) print(' - target ', targetDev.getFolder('db')) # [Copy all files in extention/db folder] for fn in Util().getFileList(sourceDev.getFolder('db')): if Util().file_exists(targetDev.getFolder('db'), fn): if not Util().confirm(' -- Overwrite {}?'.format(fn), 'N'): Util().copy( '{}/{}'.format(sourceDev.getFolder('db'), fn), '{}/{}'.format(targetDev.getFolder('db'), fn)) else: print(' - copy', fn) Util().copy( '{}/{}'.format(sourceDev.getFolder('db'), fn), '{}/{}'.format(targetDev.getFolder('db'), fn)) ############# # [## Initalize docker-component] ############# # [Define list of words to replace in docker-compose file] replace_ = ['one_db', 'hapi-api'] replace_with = [targetDev.getName('db'), targetDev.getName('db_api')] if not Util().file_exists(targetDev.getFolder('project'), 'docker-compose.test.yml'): # [Copy docker-compose.yml to target when it doesnt exist] #print('Create new docker-compose.yml') dcDoc = Document(sourceDev.getFolder('project'), 'docker-compose.yml').load() dcDoc.replace(replace_, replace_with) dcDoc.saveAs(targetDev.getFolder('project'), 'docker-compose.test.yml') else: # [Ask to overwrite when docker-compose exists] if not Util().confirm('* Overwrite docker-compose.test.yml?', 'N'): dcDoc = Document(sourceDev.getFolder('project'), 'docker-compose.yml').load() backup = Util().makeBackupFile(targetDev.getFolder('project'), 'docker-compose.test.yml') print('backup', backup) dcDoc = Document(sourceDev.getFolder('project'), 'docker-compose.yml').load() dcDoc.replace(replace_, replace_with) dcDoc.saveAs(targetDev.getFolder('project'), 'docker-compose.test.yml') ############## # [## .env # if .env doesnt exist then create one] ############# srcDoc = EnvironmentDocument(sourceDev.getFolder('env'), '.env').load() srcDoc.replace(replace_, replace_with) if not Util().file_exists(targetDev.getFolder('env'), '.env'): # [Copy .env to target when it doesnt exist] print('Create .env') srcDoc.saveAs(targetDev.getFolder('env'), '.env') else: # [if .env exists then update and add new variables] if not Util().confirm('* Update .env?', 'N'): # [Ask to update when .e nv exists] print('Update .env') trgtDoc = EnvironmentDocument(targetDev.getFolder('env'), '.env').load() #pprint(trgtDoc) #print('--------') trgtDoc.backup() trgtDoc.update(srcDoc) trgtDoc.save() #trgtDoc.saveAs(targetDev.getFolder('env'), '.env') #pprint(trgtDoc) ############# # [## Process Static Scripts] ############# # [Static scripts end with .static.sql] if targetDev.getFolder('db'): if not Util().confirm('* Install/Overwrite static scripts?', 'N'): print("writing static scripts") ############# # [Process Static Db-Api] ############# process_documents_to(apiConfiguration, sourceDev, targetDev, 'db.sql', '00.db.sql') ############# # [Process Static Database Scripts] ############# process_documents_to(apiConfiguration, sourceDev, targetDev, 'table.sql', '10.base.table.sql') ############# # [Process Base Function Scripts] ############# # process_to_one(apiConfiguration, sourceDev, targetDev, 'base.function.sql','12.base.function.sql') process_documents_to(apiConfiguration, sourceDev, targetDev, 'base.function.sql', '20.base.function.sql') ############# # [Process Api Function Scripts] ############# # process_to_one(apiConfiguration, sourceDev, targetDev, 'api.function.sql','20.api.function.sql') # combine_documents(apiConfiguration, sourceDev, targetDev, 'api.function.sql', '30.api.function.sql') process_documents_to(apiConfiguration, sourceDev, targetDev, 'api.usage.sql', '30.api.usage.sql') ############# # [Process Static Test Scripts] ############# #combine_documents(apiConfiguration, sourceDev, targetDev, 'base.test.sql', '90.base.test.sql') #combine_documents(apiConfiguration, sourceDev, targetDev, 'api.test.sql', '92.api.test.sql') ############# # [Process Static Data Scripts] # retired the data scripts. data is now encpsulated in tests ############# # process_to_one(apiConfiguration, sourceDev, targetDev, 'data.sql','80.data.sql') ############# # [Process Static Cleaup Scripts] # retired the data scripts. data is now encpsulated in tests ############# # process_to_one(apiConfiguration, sourceDev, targetDev, 'cleanup.sql','98.test.cleanup.sql') ############# # [Process DbApi] ############# #Util().copyFolder(sourceDev.getFolder('db_api'), targetDev.getFolder('db_api'),ignore=shutil.ignore_patterns('node_modules')) # ############# # [## Process multiple API SQL Definitions] # skip api-static, _tasks, source and target ############# # [Set target _tasks name] if targetDev.getFolder('db'): apiNameList = [ nm for nm in apiConfiguration if apiConfiguration[nm]['kind'] == 'api-definition' ] for apiName in apiNameList: apiScriptFilename = '{}.{}.api.function.sql'.format( apiConfiguration[apiName]['prefix'], apiConfiguration[apiName]['name'].upper()) # [Combine all API functions into one script] combined_sql_list = [] combined_sql_list.append('-- api : {}'.format( apiName.upper())) combined_sql_list.append('-- schema : {}'.format( apiConfiguration[apiName]['schema'])) combined_sql_list.append('-- generated on: {}'.format( datetime.now())) combined_sql_list.append('-- source project: {} '.format( sourceDev.getName('project'))) #combined_sql_list.append('\c {}'.format(targetDev.getName('db'))) #combined_sql_list.append('SET search_path TO {};'.format(', '.join(apiConfiguration['_tasks']['schema']))) # [Generate POST Function] combined_sql_list.append('-- POST') combined_sql_list.extend(PostTemplate(apiName, folder='../templates', filename='post.sql.template') \ .validate(apiConfiguration)\ .apply(apiConfiguration)) # [Generate GET Function] combined_sql_list.append('-- GET') combined_sql_list.extend(GetTemplate(apiName, folder='../templates', filename='get.sql.template') \ .validate(apiConfiguration) \ .apply(apiConfiguration)) # [Generate DELETE Function] combined_sql_list.append('-- DELETE') combined_sql_list.extend(DeleteTemplate(apiName, folder='../templates', filename='delete.sql.template') \ .validate(apiConfiguration) \ .apply(apiConfiguration)) # [Generate PUT Function] combined_sql_list.append('-- PUT') combined_sql_list.extend(PutTemplate(apiName, folder='../templates', filename='put.sql.template') \ .validate(apiConfiguration) \ .apply(apiConfiguration)) # [Assemble API (POST, GET, PUT, and Delete) Functions into single script] newDoc = Document(targetDev.getFolder('scripts'), apiScriptFilename).load(combined_sql_list) # [Confirm overwrite of existing API files] if not Util().file_exists(targetDev.getFolder('scripts'), apiScriptFilename): print(' - Writing API {} script'.format(apiScriptFilename)) # [Create Api when Api doesnt exist] newDoc.write() else: if not Util().confirm( '* Overwrite API {} script?'.format(apiScriptFilename), 'N'): # [Confirm the overwrite of existing Api script] # [Backup Api script before overwriting] backup = Util().makeBackupFile( targetDev.getFolder('scripts'), apiScriptFilename) print(" - Overwriting API {} script".format( apiScriptFilename)) newDoc.write() apiStaticNameList = [ nm for nm in apiConfiguration if apiConfiguration[nm]['kind'] == 'api-static' ] ############# # [## Process static API scripts] # Static scripts are copied, combined and renamed to the target folder ############# if targetDev.getFolder('db'): for apiName in apiStaticNameList: sourceFilename = '{}.{}.{}.api.function.sql'.format( '24', apiConfiguration[apiName]['schema'].replace('_', '.'), apiConfiguration[apiName]['name']) targetFilename = '{}.{}.api.function.sql'.format( apiConfiguration[apiName]['prefix'], apiConfiguration[apiName]['name'].upper()) combined_sql_list = [] combined_sql_list.append('-- api : {}'.format( apiName.upper())) combined_sql_list.append('-- schema : {}'.format( apiConfiguration[apiName]['schema'])) combined_sql_list.append('-- copied on: {}'.format(datetime.now())) combined_sql_list.append('-- source project: {}'.format( sourceDev.getName('project'))) combined_sql_list.append('\c {}'.format(targetDev.getName('db'))) combined_sql_list.append('SET search_path TO {};'.format(', '.join( apiConfiguration['_tasks']['schema']))) # [Define list of words to replace in docker-compose file] replace_ = ['one_db', 'hapi-api'] replace_with = [ targetDev.getName('db'), targetDev.getName('db_api') ] if not Util().file_exists(targetDev.getFolder('scripts'), targetFilename): # [Copy docker-compose.yml to target when it doesnt exist] print('* Create new {}'.format(targetFilename)) print(' -- load {}'.format(apiName)) print(' -- swap out values ') print(' -- write api to target ') dcDoc = Document(sourceDev.getFolder('scripts'), sourceFilename).load() dcDoc.replace(replace_, replace_with) dcDoc.saveAs(targetDev.getFolder('scripts'), targetFilename) else: # [Ask to overwrite when docker-compose exists] if not Util().confirm('* Overwrite {}?'.format(targetFilename), 'N'): #print('* Overwrite api-static') print(' -- load {}'.format(apiName)) print(' -- replace values ') print(' -- save api to target ') dcDoc = Document(sourceDev.getFolder('scripts'), sourceFilename).load() backup = Util().makeBackupFile( targetDev.getFolder('scripts'), targetFilename) print(' -- backup', backup) dcDoc = Document(sourceDev.getFolder('scripts'), sourceFilename).load() dcDoc.replace(replace_, replace_with) dcDoc.saveAs(targetDev.getFolder('scripts'), targetFilename) ################### # [Write API tests] ################## '''
def edit_document(self): doc = Document(self.fileinfo.file_name, file_hash=self.fileinfo.file_hash) doc.edit(self.tag_input.text()) doc.save_to_db() self.close()
def sync(multi, identity): """ Upload records to each registered server. Each user, bid, offer, etc. (i.e. anything except actual payments) is stored as document across a public database that is maintained across a network of paid servers. This command pushes the documents you have created to the servers from which you have purchased hosting. """ (log, user, key, urls) = init(multi, identity) click.echo("User: "******"No buckets registered. Run 'rein request' to continue.") return Placement.create_placements(rein.engine) upload = [] nonce = {} for url in urls: nonce[url] = get_new_nonce(rein, url) if nonce[url] is None: continue check = Document.get_user_documents(rein) if len(check) == 0: click.echo("Nothing to do.") for doc in check: if len(doc.contents) > 8192: click.echo( 'Document is too big. 8192 bytes should be enough for anyone.' ) log.error("Document oversized %s" % doc.doc_hash) else: placements = Placement.get_placements(rein, url, doc.id) if len(placements) == 0: upload.append([doc, url]) else: for plc in placements: if Placement.get_remote_document_hash( rein, plc) != doc.doc_hash: upload.append([doc, url]) failed = [] succeeded = 0 for doc, url in upload: placements = Placement.get_placements(rein, url, doc.id) if len(placements) == 0: remote_key = ''.join( random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(32)) plc = Placement(doc.id, url, remote_key, False, rein.testnet) rein.session.add(plc) rein.session.commit() else: plc = placements[0] for p in placements[1:]: rein.session.delete(p) rein.session.commit() if len(doc.contents) > 8192: log.error("Document oversized %s" % doc.doc_hash) click.echo( 'Document is too big. 8192 bytes should be enough for anyone.') elif nonce[url] is None: continue else: message = plc.remote_key + doc.contents + user.daddr + nonce[url] message = message.decode('utf8') message = message.encode('ascii') signature = sign(user.dkey, message) data = { "key": plc.remote_key, "value": doc.contents, "nonce": nonce[url], "signature": signature, "signature_address": user.daddr, "owner": user.maddr, "testnet": rein.testnet } body = json.dumps(data) headers = {'Content-Type': 'application/json'} answer = requests.post(url='{0}put'.format(url), headers=headers, data=body) res = answer.json() if 'result' not in res or res['result'] != 'success': log.error('upload failed doc=%s plc=%s url=%s' % (doc.id, plc.id, url)) failed.append(doc) else: plc.verified += 1 rein.session.commit() log.info('upload succeeded doc=%s plc=%s url=%s' % (doc.id, plc.id, url)) click.echo('uploaded %s' % doc.doc_hash) succeeded += 1 for url in urls: if nonce[url] is None: continue sel_url = url + 'nonce?address={0}&clear={1}' answer = requests.get(url=sel_url.format(user.maddr, nonce[url])) log.info('nonce cleared for %s' % (url)) click.echo('%s docs checked on %s servers, %s uploads done.' % (len(check), len(urls), str(succeeded)))
def add_document(self): for i in range(len(self.fileinfo)): new_doc = Document(self.fileinfo[i]) new_doc.add(self.tag_input.text()) new_doc.save_to_db() self.close()
def test_repr_save_load_delete(): """ Confirms data in data out. Builds upon data.py. """ data.redis.flushdb() user_slug = random_slug('test-user-') doc_slug = random_slug('test-doc-') doc = Document(data) doc.set_host('http://example.org') doc.set_parts(user_slug, doc_slug, minimal_document) # Create new_doc_slug = doc.save(pregenerate=True, update_doc_slug=True) assert user_slug in str(doc) assert new_doc_slug in str(doc) assert "(3 parts)" in str(doc) assert new_doc_slug == 'example-document' assert data.userSet_exists(user_slug) assert data.userDocument_exists(user_slug, new_doc_slug) latest_slugs = [ _['slug'] for _ in data.userDocumentLastChanged_list(user_slug) ] assert new_doc_slug in latest_slugs assert data.userDocumentMetadata_exists(user_slug, new_doc_slug) assert data.userDocumentCache_exists(user_slug, new_doc_slug) assert data.userDocumentSet_exists(user_slug, new_doc_slug) # Rename doc.set_index( trim(""" New Example Document Text Goes Here! """)) new_doc_slug = doc.save(pregenerate=True, update_doc_slug=True) assert new_doc_slug == "new-example-document" assert not data.userDocumentSet_exists(user_slug, doc_slug) assert not data.userDocument_exists(user_slug, doc_slug) assert not data.userDocumentMetadata_exists(user_slug, doc_slug) assert not data.userDocumentCache_exists(user_slug, doc_slug) latest_metadata = data.userDocumentLastChanged_list(user_slug) assert not any([_.get('slug') == doc_slug for _ in latest_metadata]) assert any([_.get('slug') == new_doc_slug for _ in latest_metadata]) assert data.userDocumentSet_exists(user_slug, new_doc_slug) assert data.userDocument_exists(user_slug, new_doc_slug) assert data.userDocumentMetadata_exists(user_slug, new_doc_slug) assert data.userDocumentCache_exists(user_slug, new_doc_slug) assert data.userDocumentSet_exists(user_slug, new_doc_slug) doc2 = Document(data) doc2.load(user_slug, new_doc_slug) assert doc.user_slug == doc2.user_slug assert doc.doc_slug == doc2.doc_slug assert doc.parts == doc2.parts # Delete doc.delete() assert not data.userDocument_exists(user_slug, new_doc_slug) assert not data.userDocumentSet_exists(user_slug, new_doc_slug) assert not data.userDocumentMetadata_exists(user_slug, new_doc_slug) latest_metadata = data.userDocumentLastChanged_list(user_slug) assert not any([_.get('slug') == new_doc_slug for _ in latest_metadata]) assert not data.userDocumentCache_exists(user_slug, new_doc_slug)
def _test(): """Test the custom model in an ad-hoc GUI window""" from lib.layer import PaintingLayer, LayerStack doc_model = Document() root = doc_model.layer_stack root.clear() layer_info = [ ((0, ), LayerStack(name="Layer 0")), ((0, 0), PaintingLayer(name="Layer 0:0")), ((0, 1), PaintingLayer(name="Layer 0:1")), ((0, 2), LayerStack(name="Layer 0:2")), ((0, 2, 0), PaintingLayer(name="Layer 0:2:0")), ((0, 2, 1), PaintingLayer(name="Layer 0:2:1")), ((0, 3), PaintingLayer(name="Layer 0:3")), ((1, ), LayerStack(name="Layer 1")), ((1, 0), PaintingLayer(name="Layer 1:0")), ((1, 1), PaintingLayer(name="Layer 1:1")), ((1, 2), LayerStack(name="Layer 1:2")), ((1, 2, 0), PaintingLayer(name="Layer 1:2:0")), ((1, 2, 1), PaintingLayer(name="Layer 1:2:1")), ((1, 2, 2), PaintingLayer(name="Layer 1:2:2")), ((1, 2, 3), PaintingLayer(name="Layer 1:2:3")), ((1, 3), PaintingLayer(name="Layer 1:3")), ((1, 4), PaintingLayer(name="Layer 1:4")), ((1, 5), PaintingLayer(name="Layer 1:5")), ((1, 6), PaintingLayer(name="Layer 1:6")), ((2, ), PaintingLayer(name="Layer 2")), ((3, ), PaintingLayer(name="Layer 3")), ((4, ), PaintingLayer(name="Layer 4")), ((5, ), PaintingLayer(name="Layer 5")), ((6, ), LayerStack(name="Layer 6")), ((6, 0), PaintingLayer(name="Layer 6:0")), ((6, 1), PaintingLayer(name="Layer 6:1")), ((6, 2), PaintingLayer(name="Layer 6:2")), ((6, 3), PaintingLayer(name="Layer 6:3")), ((6, 4), PaintingLayer(name="Layer 6:4")), ((6, 5), PaintingLayer(name="Layer 6:5")), ((7, ), PaintingLayer(name="Layer 7")), ] for path, layer in layer_info: root.deepinsert(path, layer) root.set_current_path([4]) icon_theme = Gtk.IconTheme.get_default() icon_theme.append_search_path("./desktop/icons") view = RootStackTreeView(doc_model) view_scroll = Gtk.ScrolledWindow() view_scroll.set_shadow_type(Gtk.ShadowType.ETCHED_IN) scroll_pol = Gtk.PolicyType.AUTOMATIC view_scroll.set_policy(scroll_pol, scroll_pol) view_scroll.add(view) view_scroll.set_size_request(-1, 100) win = Gtk.Window() win.set_title(unicode(__package__)) win.connect("destroy", Gtk.main_quit) win.add(view_scroll) win.set_default_size(300, 500) win.show_all() Gtk.main()
def main(): clause2method = {'insert':'POST', 'update':'PUT', 'query':'GET', 'delete':'DELETE'} combined_sql_list = [] # eventually is written to a file # [Generate API sql file] print('Generate API') print(' - load api configuration, generate funcPattern key and values') # get configuration file name {folder: "", name: ""} # get list of files of type .json in folder ./config # [Use a configuration file] config_folder = '{}'.format(os.getcwd().replace('_tasks','config')) # [Select API Source ] sourceConfiguration = open_api(config_folder,file_type="source") if not sourceConfiguration: print('cancel') exit(0) # [Select API Target ] targetConfiguration = open_api(config_folder,file_type="target") if not targetConfiguration: print('cancel') exit(0) # [Merge Source and Target] sourceConfiguration.update(targetConfiguration) apiConfiguration = sourceConfiguration # setup default environment homeDev = HomeDevelopment().setup() # [Create missing folders] sourceDev = HomeDevelopment().setup() targetDev = HomeDevelopment().setup() # [Scan configuration for home, source, and target environment configurations] for apiName in apiConfiguration: if apiName == 'source': # [Configure input sources from GIT repositories] sourceDev = get_environment(apiConfiguration[apiName]) elif apiName == 'target': # [Configure output targets from GIT repositories] targetDev = get_environment(apiConfiguration[apiName]) print('=================') report(homeDev, sourceDev, targetDev) print('=================') print('=================') print('Base Tests Combined') print('=================') #fileList = Util().getFileList(sourceDev.getDbFolder('sql'),'static.sql') fileList = Util().getFileList(sourceDev.getFolder('scripts'),'base.test.sql') fileList.sort() # [Replace project specific values] replace_ = ['one_db', 'hapi-api'] replace_with = [targetDev.getName('db'), targetDev.getName('db_api')] targetName = '90.base.test.sql' combinedDocument = Document(targetDev.getFolder('scripts'), targetName) # [Backup the target file] backup = Util().makeBackupFile(targetDev.getFolder('scripts'), targetName) print(' - backup : {}'.format(backup)) # [Move Static Files to Target Folder] for fileName in fileList: # [Combine base tests into one file] # [Copy from a source folder to a target folder] staticDocument = Document(sourceDev.getFolder('scripts'), fileName) \ .load() \ .replace(replace_, replace_with) combinedDocument.extend(staticDocument) combinedDocument.save() print('=================') print('API Tests') print('=================') apiNameList = [nm for nm in apiConfiguration if apiConfiguration[nm]['kind'] == 'api-definition' or apiConfiguration[nm]['kind'] == 'api-static'] # [Move Static Files to Target Folder] #targetNames = [] #sourceNames = [] names = [] src_folder = sourceDev.getFolder('scripts') tmpl_folder = '../templates' trg_folder = targetDev.getFolder('scripts') clauses = ['insert','query','update','delete'] for apiName in apiNameList: schema = apiConfiguration[apiName]['schema'].replace('_','.') prefix = apiConfiguration[apiName]['prefix-test'] # make list of existing test files names = [{"source":'{}.test.{}.{}.{}.api.test.sql'.format(prefix,schema, apiName.upper(), clause), "target":'{}.{}.{}.api.test.sql'.format(prefix,apiName.upper(), clause), "template": '{}.test.sql.template'.format(clause), "clause": '{}'.format(clause), "method": '{}'.format(clause2method[clause])} for clause in clauses] names = [pair for pair in names if Util().file_exists(src_folder, pair['source'])] # for pair in names: backup = None #print('pair', pair, 'kind', apiConfiguration[apiName]['kind']) if apiConfiguration[apiName]['kind'] == 'api-definition': # handle backup if Util().file_exists(trg_folder, pair['target']): backup = Util().makeBackupFile(targetDev.getFolder('scripts'), pair['target']) # Use Template if Util().file_exists(tmpl_folder, pair['template']): # Use Template # Use Template print(' - generate test {} FROM {}'.format(pair['target'], pair['template'])) if backup: print(' -- backup ', backup) templateDoc = TestTemplate(apiName, pair['method'], pair['template']) \ .apply(apiConfiguration)\ .saveAs(trg_folder, pair['target']) elif Util().file_exists(src_folder, pair['source']): # Copy from existing file # Copy from source print(' - copy test {} FROM {} '.format(pair['target'],pair['source'])) # load # replace # save doc = Document(src_folder, pair['source']) \ .load() \ .replace(replace_, replace_with)\ .saveAs(trg_folder,pair['target']) else: # No test available print('No Test Available for ', pair['source']) elif apiConfiguration[apiName]['kind'] == 'api-static': #print('template', tmpl_folder) print(' - copy test for api-static ', pair['source']) # [Backup the target file] #print(' - backup : {}'.format(fileName)) apiNameList = [nm for nm in apiConfiguration if apiConfiguration[nm]['kind'] == 'api-static'] # [Move Static Files to Target Folder] '''