def execute(my): my.buffer = cStringIO.StringIO() try: try: # clear the main container for this thread Container.create() # clear the buffer WebContainer.clear_buffer() # initialize the web environment object and register it adapter = my.get_adapter() WebContainer.set_web(adapter) # get the display my._get_display() except SetupException, e: '''Display setup exception in the interface''' print "Setup exception: ", e.__str__() DbContainer.rollback_all() ExceptionLog.log(e) my.writeln("<h3>Tactic Setup Error</h3>" ) my.writeln("<pre>" ) my.writeln(e.__str__() ) my.writeln("</pre>" ) except DatabaseException, e: from tactic.ui.startup import DbConfigPanelWdg config_wdg = DbConfigPanelWdg() my.writeln("<pre>") my.writeln(config_wdg.get_buffer_display()) my.writeln("</pre>")
def __init__(my, port=''): # It is possible on startup that the database is not running. from pyasm.search import DbContainer, DatabaseException, Sql try: sql = DbContainer.get("sthpw") if sql.get_database_type() != "MongoDb": # before batch, clean up the ticket with a NULL code if os.getenv('TACTIC_MODE') != 'production': sql.do_update('DELETE from "ticket" where "code" is NULL;') else: start_port = Config.get_value("services", "start_port") if start_port: start_port = int(start_port) else: start_port = 8081 if port and int(port) == start_port: sql.do_update('DELETE from "ticket" where "code" is NULL;') except DatabaseException, e: # TODO: need to work on this print "ERROR: could not connect to [sthpw] database" #os.environ["TACTIC_CONFIG_PATH"] = Config.get_default_config_path() #Sql.set_default_vendor("Sqlite") Config.set_tmp_config() Config.reload_config() # try connecting again try: sql = DbContainer.get("sthpw") except: print "Could not connect to the database." raise
def __init__(self, project_code=None, login_code=None, site=None): self.set_app_server("batch") if not site: # if not explicitly set, keep the current site site = Site.get_site() plugin_dir = Environment.get_plugin_dir() if plugin_dir not in sys.path: sys.path.insert(0, plugin_dir) super(Batch,self).__init__() self.login_code = login_code # clear the main container Container.create() if site: Site.set_site(site) # set this as the environment if not project_code: self.context = self.get_default_context() else: self.context = project_code Environment.set_env_object( self ) # set up the security object security = Security() Environment.set_security(security) self._do_login() site_dir = Environment.get_site_dir() if site_dir not in sys.path: sys.path.insert(0, site_dir) # set the project from pyasm.biz import Project if self.context == "batch": Project.set_project("admin") else: Project.set_project(self.context) self.initialize_python_path() # start workflow engine #from pyasm.command import Workflow #Workflow().init() DbContainer.commit_thread_sql()
def start(): scheduler = Scheduler.get() scheduler.start_thread() task = TransactionQueueManager( #check_interval=0.1, max_jobs_completed=20 ) task.cleanup_db_jobs() scheduler.add_single_task(task, mode='threaded', delay=1) # important to close connection after adding tasks DbContainer.close_all()
def execute(self): start = time.time() #Batch() #print "refresh caches ..." caches = CacheContainer.get_all_caches() for key, cache in caches.items(): #print "... %s" % key, cache cache.init_cache() #print "... %s seconds" % (time.time() - start) DbContainer.commit_thread_sql() DbContainer.close_all()
def start(cls): task = WatchServerFolderTask() scheduler = Scheduler.get() scheduler.add_single_task(task, 3) #scheduler.add_interval_task(task, 1) scheduler.start_thread() # important to close connection after adding tasks DbContainer.close_all() return scheduler
def _test_time(my): """ test timezone related behavior""" sobject = SearchType.create("sthpw/task") sobject.set_value("project_code", "unittest") sobject.set_value("bid_start_date", "2014-11-11 05:00:00") time = sobject.get_value("bid_start_date") my.assertEquals(time, "2014-11-11 05:00:00") sobject.commit() time = sobject.get_value("bid_start_date") my.assertEquals(time, "2014-11-11 05:00:00") from pyasm.search import DbContainer sql = DbContainer.get("sthpw") db_value = sql.do_query("SELECT bid_start_date from task where id = %s" % sobject.get_id()) # 2014-11-11 00:00:00 is actually written to the database my.assertEquals(db_value[0][0].strftime("%Y-%m-%d %H:%M:%S %Z"), "2014-11-11 00:00:00 ") # an sType specified without a project but with an id could be a common human error # but it should handle that fine obj1 = Search.eval('@SOBJECT(unittest/person?project=unittest["id", "%s"])' % sobject.get_id(), single=True) obj2 = Search.eval('@SOBJECT(unittest/person?id=2["id", "%s"])' % sobject.get_id(), single=True) obj3 = Search.eval('@SOBJECT(sthpw/task?id=2["id", "%s"])' % sobject.get_id(), single=True) task = Search.eval('@SOBJECT(sthpw/task["id", "%s"])' % sobject.get_id(), single=True) # EST and GMT diff is 5 hours my.assertEquals(task.get_value("bid_start_date"), "2014-11-11 05:00:00")
def execute(self): if not self.login_name: self.login_name = self.kwargs.get('login'); # invalidate the ticket security = Environment.get_security() ticket = security.get_ticket() if ticket == None: return login_name = ticket.get_value("login") print "Signing out: ", login_name # expire the ticket from pyasm.security import Site site = Site.get() if site: Site.set_site("default") try: from pyasm.search import Sql, DbContainer sql = DbContainer.get("sthpw") ticket.set_value("expiry", sql.get_timestamp_now(), quoted=False) ticket.commit() except: if site: Site.pop_site()
def get_tables_wdg(my): div = DivWdg() div.set_name("Tables") div.add("In order to fully register a database, you must bind it to a TACTIC project") div.add("<br/>") project_code = "mongodb" database = "test_database" db_resource = DbResource( server='localhost', vendor='MongoDb', database=database ) try: connect = DbContainer.get(db_resource) except Exception, e: div.add("Could not connect") div.add_style("padding: 30px") div.add("<br/>"*2) div.add(str(e)) return div
def has_table(my, search_type): if isinstance(search_type, basestring): search_type = SearchType.get(search_type) # in search type database == project project_code = search_type.get_project_code() # get the db_resource for this project db_resource = my.get_project_db_resource() # get the table table = search_type.get_table() if not table: return False try: # looking up a database's tables other than the current one sql = DbContainer.get(db_resource) tables = sql.get_tables() has_table = table in tables except Exception, e: print "WARNING: in Project.has_table(): table [%s] not found" % table print "Message: ", e has_table = False
def fix_notification_login_id(): db = DbContainer.get("sthpw") sql = ''' BEGIN TRANSACTION; CREATE TABLE t_backup ( id integer PRIMARY KEY AUTOINCREMENT, notification_log_id integer, "login" character varying(256), "type" character varying(256), project_code character varying(256), "timestamp" timestamp without time zone DEFAULT CURRENT_TIMESTAMP ); INSERT INTO t_backup SELECT id, notification_log_id, "login", "type", project_code, "timestamp" FROM %(table)s; DROP TABLE %(table)s; ALTER TABLE t_backup RENAME TO %(table)s; COMMIT; ''' % {"table": "notification_login"} conn = db.conn conn.executescript(sql)
def get_columns(cls, db_resource, table): from pyasm.search import DbResource, DbContainer sql = DbContainer.get(db_resource) conn = sql.get_connection() collection = conn.get_collection(table) # FIXME: # This just gets the first one to discover the columns. This is # not accurate because each item in a collection can contain # different "attributes". The key here is to define a location # for where this "schema" description is stored result = collection.find_one() if not result: return ['_id'] else: columns = result.keys() # assume existence of both code and _id #if "code" in columns: # columns.remove("code") #columns.insert(0, "code") if "_id" in columns: columns.remove("_id") columns.insert(0, "_id") return columns
def delete(my,log=False): column = my.get_value("name") search_type = my.get_value("search_type") search_type_obj = SearchType.get(search_type) table = search_type_obj.get_table() database = search_type_obj.get_database() # remove it from the table if log: AlterTableUndo.log_drop(database, table, column) sql = DbContainer.get(database) try: from pyasm.search.sql import Sql if Sql.get_database_type() == 'SQLServer': statement = 'ALTER TABLE [%s] DROP "%s" %s' % \ (table, column) else: statement = 'ALTER TABLE "%s" DROP COLUMN "%s"' % (table, column) sql.do_update(statement) except SqlException, e: print("WARNING: %s" % e )
def execute(my): import cherrypy print print "Stopping TACTIC ..." print print " ... stopping Schduler" scheduler = Scheduler.get() scheduler.stop() print " ... stopping Cherrypy" cherrypy.engine.stop() cherrypy.engine.exit() print " ... closing DB connections" DbContainer.close_all_global_connections() print " ... kill current process" Common.kill() print "Done."
def get_columns(my, required_only=False): if my.search_type == 'sthpw/virtual': return [] search_type_obj = SearchType.get(my.search_type) table = search_type_obj.get_table() from pyasm.biz import Project db_resource = Project.get_db_resource_by_search_type(my.search_type) database_name = db_resource.get_database() db = DbContainer.get(db_resource) # table may not exist try: all_columns = db.get_columns(table) columns = [] if required_only: nullables = db.get_column_nullables(table) for column in all_columns: null_ok = nullables.get(column) if not null_ok: columns.append(column) # if there are no required columns if not columns: columns = all_columns else: columns = all_columns except SqlException: Environment.add_warning('missing table', 'Table [%s] does not exist in database [%s]' %(table, database_name)) return [] return columns
def execute(my): #Batch() dirty_caches = CacheContainer.get_dirty() for dirty_cache in dirty_caches: key = dirty_cache.get_value("key") #print "... caching: ", key cache = CacheContainer.get(key) if not cache: print "WARNING: cache [%s] does not exist in memory" % key continue cache.init_cache() DbContainer.commit_thread_sql() DbContainer.close_all()
def fix_debug_log_id(): db = DbContainer.get("sthpw") sql = ''' BEGIN TRANSACTION; CREATE TABLE t_backup ( id integer PRIMARY KEY AUTOINCREMENT, "category" character varying(256), "level" character varying(256), "message" text, "timestamp" timestamp without time zone DEFAULT CURRENT_TIMESTAMP, "login" character varying(256), "s_status" character varying(30) ); INSERT INTO t_backup SELECT "id", "category", "level", "message", "timestamp", "login", "s_status" FROM %(table)s; DROP TABLE %(table)s; ALTER TABLE t_backup RENAME TO %(table)s; COMMIT; ''' % {"table": "debug_log"} conn = db.conn conn.executescript(sql)
def import_bootstrap(): print "Importing bootstrap ..." vendor = "SQLServer" impl = DatabaseImpl.get(vendor) impl.create_database("sthpw") upgrade_dir = Environment.get_upgrade_dir() for category in ['bootstrap', 'sthpw', 'config']: f = open("%s/%s/%s_schema.sql" % (upgrade_dir, vendor.lower(), category) ) data = f.read() f.close() data = data.split(";") cmds = [] for cmd in data: cmd = cmd.strip() if cmd == '': continue cmds.append(cmd) from pyasm.search import DbContainer sql = DbContainer.get("sthpw") for cmd in cmds: sql.do_update(cmd)
def __init__(my, num_processes=None): my.check_interval = 120 my.num_processes = num_processes my.dev_mode = False sql = DbContainer.get("sthpw") # before batch, clean up the ticket with a NULL code sql.do_update('DELETE from "ticket" where "code" is NULL;')
def execute(my): database = "sthpw" sql = DbContainer.get(database) value_array = sql.do_query("select code, cc from (select code, count(code) as cc from file group by code order by cc desc) as X where cc > 1;") #value_array = sql.do_query("select code, cc from (select code, count(code) as cc from file group by code order by cc desc) as X;") print "found [%s] pairs" % len(value_array) for count, value_list in enumerate(value_array): if count >= BATCH: break # get the file object file_code = value_list[0] search = Search("sthpw/file") search.add_filter("code", file_code) files = search.get_sobjects() #if len(files) == 1: # continue for file in files: project_code = file.get_value("project_code") if not project_code: print "WARNING: file [%s] has no project_code" % file_code continue project = Project.get_by_code(project_code) initials = project.get_initials() id = file.get_id() new_file_code = "%s%s" % (id, initials) if file_code == new_file_code: continue print "-"*20 print "switching: ", file_code, "to", new_file_code snapshot_code = file.get_value("snapshot_code") snapshot = Snapshot.get_by_code(snapshot_code) assert snapshot snapshot_xml = snapshot.get_xml_value("snapshot") print snapshot_xml.to_string() node = snapshot_xml.get_node("snapshot/file[@file_code='%s']" % file_code) Xml.set_attribute(node, "file_code", new_file_code) print snapshot_xml.to_string() assert node # set the file_code file.set_value("code", new_file_code) file.commit() # set the snapshot snapshot.set_value("snapshot", snapshot_xml.to_string() ) snapshot.commit()
def execute(my): import atexit import time atexit.register( my.cleanup ) while 1: my.check_existing_jobs() my.check_new_job() time.sleep(my.check_interval) DbContainer.close_thread_sql() if my.max_jobs_completed != -1 and my.jobs_completed > my.max_jobs_completed: Common.restart() while 1: print "Waiting to restart..." time.sleep(1)
def execute(self): import atexit import time atexit.register( self.cleanup ) while 1: self.check_existing_jobs() self.check_new_job() time.sleep(self.check_interval) DbContainer.close_thread_sql() if self.max_jobs_completed != -1 and self.jobs_completed > self.max_jobs_completed: Common.restart() while 1: print("Waiting to restart...") time.sleep(1)
def __init__(my, project_code=None, login_code=None, site=None): my.set_app_server("batch") plugin_dir = Environment.get_plugin_dir() if plugin_dir not in sys.path: sys.path.insert(0, plugin_dir) super(Batch,my).__init__() my.login_code = login_code # clear the main container Container.create() if site: Site.set_site(site) # set this as the environment if not project_code: my.context = my.get_default_context() else: my.context = project_code Environment.set_env_object( my ) # set up the security object security = Security() Environment.set_security(security) my._do_login() site_dir = Environment.get_site_dir() if site_dir not in sys.path: sys.path.insert(0, site_dir) # set the project from pyasm.biz import Project if my.context == "batch": Project.set_project("admin") else: Project.set_project(my.context) my.initialize_python_path() DbContainer.commit_thread_sql()
def execute(my): Batch() # get all of the file servers search = Search("sthpw/sync_server") search.add_filter("state", "online") search.add_filter("sync_mode", "file") my.servers = search.get_sobjects() my.tasks = [] for server in my.servers: base_dir = server.get_value("base_dir") if not base_dir: continue transaction_dir = "%s/transactions" % base_dir ticket = server.get_value("ticket") task = WatchFolderTask(base_dir=transaction_dir, ticket=ticket) my.tasks.append(task) # close all the database connections DbContainer.close_all() count = 0 while 1: #start = time.time() for task in my.tasks: try: task.execute() except Exception, e: print "WARNING: error executing task:" print "Reported Error: ", str(e) #print time.time() - start # catch a breather? time.sleep(2) count += 1
def get_data_type(cls, search_type, attr_type): search_type_obj = SearchType.get(search_type) db_resource = Project.get_db_resource_by_search_type(search_type) sql = DbContainer.get(db_resource) impl = sql.get_database_impl() # SearchType Manager and Add Widget Column use mixed upper and # lowercases for the following attr_type, so fix it at some point if not attr_type: attr_type = "varchar" if attr_type == "integer": data_type = impl.get_int() elif attr_type == "float": data_type = "float" elif attr_type == "boolean": data_type = impl.get_boolean() elif attr_type == "link": data_type = "text" elif attr_type.startswith('varchar'): data_type = attr_type elif attr_type == 'time': data_type = impl.get_timestamp() elif attr_type in ["Date", "date"]: data_type = impl.get_timestamp() elif attr_type == "Category": data_type = "varchar(256)" elif attr_type in ["text", "Text"]: data_type = impl.get_text() elif attr_type in ["Date Range", 'timestamp']: data_type = impl.get_timestamp() elif attr_type == "Checkbox": data_type = "varchar(256)" elif attr_type in ["Foreign Key", "foreign_key"]: data_type = "varchar(256)" elif attr_type in ["List", "list"]: data_type = "varchar(512)" elif attr_type == "Name/Code": data_type = "varchar(256)" elif attr_type == "Number": data_type = impl.get_int() elif attr_type in ["currency", "scientific", "percent"]: data_type = "float" elif attr_type == "timecode": data_type = impl.get_int() else: #data_type = "varchar(256)" data_type = impl.get_varchar() return data_type
def __init__(my, num_processes=None): my.check_interval = 120 my.num_processes = num_processes my.dev_mode = False import sys plugin_dir = Environment.get_plugin_dir() sys.path.insert(0, plugin_dir) sql = DbContainer.get("sthpw") # before batch, clean up the ticket with a NULL code sql.do_update('DELETE from "ticket" where "code" is NULL;')
def get_table_info(self, database): from pyasm.search import DbResource, DbContainer sql = DbContainer.get(database) conn = sql.get_connection() collections = conn.collection_names() table_info = {} for collection in collections: table_info[collection] = { } return table_info
def run_sql(my, sql): ''' run an sql statement. my is an instance of the dynamically created <project_type>Upgrade class. If SqlException arise, it will record the error, and the user is advised to check if the error is a result of syntax error or the upgrade function is doing redundant work''' project = Project.get_by_code(my.project_code) db_resource = project.get_project_db_resource() db = DbContainer.get(db_resource) #if not my.quiet: # print sql try: db.do_update(sql, quiet=my.quiet) except SqlException, e: print "Error: ", e # TEST for Sqlite if str(e).startswith("duplicate column name:"): pass elif str(e).startswith("table") and str(e).endswith("already exists"): pass elif not my.quiet: print print "WARNING: Skipping due to SqlException..." print "Message: ", e print members = inspect.getmembers(my, predicate=inspect.ismethod) key = '%s|%s' %(my.project_code, my.upgrade_class) Container.append_seq(key, (my.upgrade_method, str(e))) """ for name, member in members: # there should only be 1 upgrade method if name.startswith('upgrade_v'): Container.append_seq(key, (my.upgrade_method, str(e))) break """ # to prevent sql error affecting query that follows the Upgrade #DbContainer.abort_thread_sql() DbContainer.release_thread_sql()
def execute(self): # check to see the status of this job """ job = self.kwargs.get('job') job_code = job.get_code() search = Search("sthpw/queue") search.add_filter("code", job_code) self.kwargs['job'] = search.get_sobject() if not job: print("Cancelling ...") return state = job.get_value("state") if state == "cancel": print("Cancelling 2 ....") return """ subprocess_kwargs = { 'login': login, 'project_code': project_code, 'command': command, 'kwargs': kwargs } subprocess_kwargs_str = jsondumps(subprocess_kwargs) install_dir = Environment.get_install_dir() python = Config.get_value("services", "python") if not python: python = 'python' args = ['%s' % python, '%s/src/tactic/command/queue.py' % install_dir] args.append(subprocess_kwargs_str) import subprocess p = subprocess.Popen(args) DbContainer.close_thread_sql() return
def execute(self): # wait until KillThread is premitted while GlobalContainer.get("KillThreadCmd:allow") == "false": print "Kill locked ... waiting 5 seconds" time.sleep(5) continue import cherrypy print print "Stopping TACTIC ..." print print " ... stopping Schduler" scheduler = Scheduler.get() scheduler.stop() print " ... stopping Cherrypy" cherrypy.engine.stop() cherrypy.engine.exit() print " ... closing DB connections" DbContainer.close_all_global_connections() print " ... kill current process" Common.kill() print "Done."
def get_display(my): widget = DivWdg() widget.add_style("text-align: center") search_type = my.get_current_sobject() project_code = search_type.get_value("database") if project_code == "{project}": # HACK: assumes database == project_code. Can't seem to get # around the {project} variable ... need to look at this # sometime project = Project.get() else: project = Project.get_by_code(project_code) if not project: widget.add(IconWdg("Exists", IconWdg.ERROR)) widget.add("Project does not exist") return widget table = search_type.get_table() if not table: return "" try: db_resource = project.get_project_db_resource() sql = DbContainer.get(db_resource) tables = sql.get_tables() has_table = table in tables except DatabaseException: has_table = False if has_table: widget.add(IconWdg("Exists", IconWdg.DOT_GREEN)) else: widget.add(IconWdg("Does not Exist", IconWdg.DOT_RED)) return widget
def import_bootstrap(): print "Importing bootstrap ..." vendor = "PostgreSQL" plugin_dir = Environment.get_plugin_dir() sys.path.insert(0, plugin_dir) impl = DatabaseImpl.get(vendor) impl.create_database("sthpw") upgrade_dir = Environment.get_upgrade_dir() for category in ['bootstrap', 'sthpw', 'config']: f = open("%s/%s/%s_schema.sql" % (upgrade_dir, vendor.lower(), category)) data = f.read() f.close() data = data.split(";") cmds = [] for cmd in data: lines = cmd.split("\n") lines2 = [] for line in lines: if line.startswith("--"): continue lines2.append(line) cmd = "\n".join(lines2) cmd = cmd.strip() if cmd == '': continue cmds.append(cmd) from pyasm.search import DbContainer sql = DbContainer.get("sthpw") for cmd in cmds: sql.do_update(cmd)
def fix_work_hour_id(): db = DbContainer.get("sthpw") sql = ''' BEGIN TRANSACTION; CREATE TABLE t_backup ( id integer PRIMARY KEY AUTOINCREMENT, code character varying(256), project_code character varying(256), description text, category character varying(256), process character varying(256), "login" character varying(256), "day" timestamp without time zone, start_time timestamp without time zone, end_time timestamp without time zone, straight_time double precision, over_time double precision, search_type character varying(256), search_id integer, status character varying(256), task_code character varying(256), CONSTRAINT "work_hour_code_idx" UNIQUE (code) ); INSERT INTO t_backup SELECT id, code, project_code, description, category, process, "login", "day", start_time, end_time, straight_time, over_time, search_type, search_id, status, task_code FROM %(table)s; DROP TABLE %(table)s; ALTER TABLE t_backup RENAME TO %(table)s; COMMIT; ''' % { "table": "work_hour" } conn = db.conn conn.executescript(sql)
def execute(self): search_type_obj = SearchType.get(self.search_type) db_resource = Project.get_db_resource_by_search_type(self.search_type) sql = DbContainer.get(db_resource) impl = sql.get_database_impl() data_type = self.get_data_type(self.search_type, self.attr_type) # if there is no type, then no column is created for widget_config if self.attr_type == "Date Range": column1 = "%s_start_date" % self.attr_name column2 = "%s_end_date" % self.attr_name self._add_column(column1, data_type) self._add_column(column2, data_type) elif type != "": self._add_column(self.attr_name, data_type) self.add_description("Added attribute '%s' of type '%s'" % (self.attr_name, self.attr_type) )
def get_columns(self, required_only=False): if self.search_type == 'sthpw/virtual': return [] search_type_obj = SearchType.get(self.search_type) table = search_type_obj.get_table() from pyasm.biz import Project db_resource = Project.get_db_resource_by_search_type(self.search_type) database_name = db_resource.get_database() db = DbContainer.get(db_resource) # table may not exist try: all_columns = db.get_columns(table) columns = [] if required_only: nullables = db.get_column_nullables(table) for column in all_columns: null_ok = nullables.get(column) if not null_ok: columns.append(column) # if there are no required columns if not columns: columns = all_columns else: columns = all_columns except SqlException: Environment.add_warning( 'missing table', 'Table [%s] does not exist in database [%s]' % (table, database_name)) return [] return columns
def get_sobjects(my): columns = [ 'product_name', 'sample_size', 'labor_average', 'material_average_cost', 'average_time_taken' ] sql = ''' SELECT "product_name", "number_of_each_product_type" "sample_size", "average_time_taken" "labor_average", sum("material_cost_per_item") "material_average_cost", sum("vendor_cost_per_item") "vender_average_cost" FROM ( SELECT "product_name", "subtask"."id" "subtask_id", "number_of_each_product_type", "average_time_taken", "product_quantity", --"total_material_cost", "total_vendor_costs", "total_material_cost" / "product_quantity" "material_cost_per_item", "total_vendor_costs" / "product_quantity" "vendor_cost_per_item" FROM "product_type" LEFT OUTER JOIN "product_type_aggrgt" ON "product_type"."id" = "product_type_aggrgt"."product_type_id" LEFT OUTER JOIN "subtask_product" ON "product_type"."id" = "subtask_product"."product_type_id" LEFT OUTER JOIN "subtask" ON "subtask"."id" = "subtask_product"."subtask_id" LEFT OUTER JOIN "subtask_material_aggrgt" ON "subtask"."id" = "subtask_material_aggrgt"."subtask_id" LEFT OUTER JOIN "subtask_vndrcost_aggrgt" ON "subtask"."id" = "subtask_vndrcost_aggrgt"."subtask_id" ) X group by X."product_name", X."number_of_each_product_type", X."average_time_taken", X."product_quantity", "subtask_id" order by X."product_name" ''' db = DbContainer.get("MMS") from pyasm.search import Select, Search search_type = my.kwargs.get('search_type') select = Select() select.set_statement(sql) statement = select.get_statement() search = Search(search_type) search.select = select sobjects = search.get_sobjects() for sobject in sobjects: sobject.set_value("id", -1) return sobjects
def preprocess(my): my.is_preprocess_run = True if my.get_option('filename') == 'true': my.show_filename_flag = True if my.get_option('file_type') == 'true': my.show_file_type = True if my.get_option('original') == 'true': my.show_orig_icon = True if my.get_option('versionless') == 'true': my.show_versionless = True if my.get_option('latest_icon') == 'true': my.show_latest_icon = True context = my.get_option('icon_context') if not context and my.sobjects: sobject = my.sobjects[0] assert(sobject) if sobject: # protect agains it being None context = sobject.get_icon_context(my.context) if context: my.context = context if not my.image_link_order: order = my.get_option('image_link_order') if order: order = order.split('|') my.set_image_link_order(order) # preselect all of the snapshots (don't do this for redirect right now) redirect = my.get_option("redirect") redirect_expr = my.get_option("redirect_expr") if not redirect and not redirect_expr and my.sobjects: if not my.sobjects[0]: # retired return snapshots = [] # if it is snapshot, there is no need to search for it again # and we dont' try to to look for publish context icons for it to save time if isinstance(my.sobjects[0], Snapshot): snapshots = my.sobjects else: if my.show_latest_icon: icon_context = None else: icon_context = my.sobjects[0].get_icon_context(my.context) try: if my.version == None: my.data = Snapshot.get_by_sobjects(my.sobjects, icon_context, is_latest=True, return_dict=True) # verify if we get icon for all if len(my.data) < len(my.sobjects): publish_data = Snapshot.get_by_sobjects(my.sobjects, my.DEFAULT_CONTEXT, is_latest=True, return_dict=True) my._update_data(publish_data) # verify if we get icon for all if len(my.data) < len(my.sobjects): publish_data = Snapshot.get_by_sobjects(my.sobjects, process=my.DEFAULT_PROCESS, is_latest=True, return_dict=True) my._update_data(publish_data) # verify if we get icon for all if len(my.data) < len(my.sobjects): publish_data = Snapshot.get_by_sobjects(my.sobjects, is_latest=True, return_dict=True) my._update_data(publish_data) else: my.data = Snapshot.get_by_sobjects(my.sobjects, icon_context, version=my.version, return_dict=True) # verify if we get icon for all if len(my.data) < len(my.sobjects): publish_data = Snapshot.get_by_sobjects(my.sobjects, my.DEFAULT_CONTEXT, version=my.version, return_dict=True) my._update_data(publish_data) # verify if we get icon for all if len(my.data) < len(my.sobjects): publish_data = Snapshot.get_by_sobjects(my.sobjects, process=my.DEFAULT_PROCESS, version=my.version, return_dict=True) my._update_data(publish_data) # verify if we get icon for all if len(my.data) < len(my.sobjects): publish_data = Snapshot.get_by_sobjects(my.sobjects, version=my.version, return_dict=True) my._update_data(publish_data) except SqlException, e: my.SQL_ERROR = True DbContainer.abort_thread_sql() return snapshots = my.data.values() # get all of the file objects file_objects = File.get_by_snapshots(snapshots) for file_object in file_objects: file_code = file_object.get_code() my.file_objects[file_code] = file_object
def execute(self): project_code = self.kwargs.get('project_code') project_title = self.kwargs.get('project_title') project_type = self.kwargs.get('project_type') project_description = self.kwargs.get("description") if not project_type: project_type = "simple" is_template = self.kwargs.get('is_template') project_theme = self.kwargs.get('project_theme') use_default_side_bar = self.kwargs.get('use_default_side_bar') if use_default_side_bar in [False, 'false']: use_default_side_bar = False else: use_default_side_bar = True assert project_code assert project_type if project_type: # check to see if it exists search = Search("sthpw/project_type") search.add_filter("code", project_type) project_type_sobj = search.get_sobject() if not project_type_sobj: # just create a default one in this case if it is named # after the project code if not is_template and project_type == project_code: project_type = 'default' # create a new project type search = Search("sthpw/project_type") search.add_filter("code", project_type) project_type_sobj = search.get_sobject() if not project_type_sobj: project_type_sobj = SearchType.create("sthpw/project_type") project_type_sobj.set_value("code", project_type) project_type_sobj.set_value("type", "simple") project_type_sobj.commit() # set the current project to Admin Project.set_project("admin") # create a new project sobject project = SearchType.create("sthpw/project") project.set_value("code", project_code) project.set_value("title", project_title) project.set_value("type", project_type) if project_description: project.set_value("description", project_description) # set the update of the database to current (this is obsolete) #project.set_value("last_db_update", "now()") project.set_value("last_version_update", "2.5.0.v01") if is_template in ['true', True, 'True']: project.set_value("is_template", True) else: project.set_value("is_template", False) if project_type != "default": category = Common.get_display_title(project_type) project.set_value("category", category) project.commit() # if there is an image, check it in upload_path = self.kwargs.get("project_image_path") if upload_path: if not os.path.exists(upload_path): raise TacticException( "Cannot find upload image for project [%s]" % upload_path) file_type = 'main' file_paths = [upload_path] file_types = [file_type] source_paths = [upload_path] from pyasm.biz import IconCreator if os.path.isfile(upload_path): icon_creator = IconCreator(upload_path) icon_creator.execute() web_path = icon_creator.get_web_path() icon_path = icon_creator.get_icon_path() if web_path: file_paths = [upload_path, web_path, icon_path] file_types = [file_type, 'web', 'icon'] from pyasm.checkin import FileCheckin checkin = FileCheckin(project, context='icon', file_paths=file_paths, file_types=file_types) checkin.execute() # find project's base_type base_type = project.get_base_type() if not base_type and project_type == 'unittest': base_type = 'unittest' elif not base_type: base_type = 'simple' # get the database for this project db_resource = project.get_project_db_resource() database = db_resource.get_database_impl() #database = DatabaseImpl.get() database_type = database.get_database_type() if database_type == 'Oracle': raise TacticException( "Creation of project is not supported. Please create manually") # creating project database print "Creating database '%s' ..." % project_code try: # create the datbase database.create_database(db_resource) except Exception as e: print str(e) print "WARNING: Error creating database [%s]" % project_code # import the appropriate schema with config first database.import_schema(db_resource, base_type) self.create_schema(project_code) # before we upgrade, we have to commit the transaction # This is because upgrade actually run as separate processes # so if not commit has been made, the tables from importing the # schema will not have existed yet DbContainer.commit_thread_sql() self.upgrade() # import the appropriate data database.import_default_data(db_resource, base_type) # import default links if use_default_side_bar: self.import_default_side_bar() # create specified stypes self.create_search_types() # create theme if project_theme: self.create_theme(project_theme) # set as main project is_main_project = self.kwargs.get("is_main_project") if is_main_project in [True, 'true', 'on']: Config.set_value("install", "default_project", project_code) Config.save_config() Config.reload_config() # initiate the DbContainer DbContainer.get('sthpw') self.info['result'] = "Finished creating project [%s]." % project_code print "Done."
def clear_search_data(search_type, view=None): DbContainer.abort_thread_sql(force=True) key = SearchWdg._get_key(search_type, view) WidgetSettings.set_value_by_key(key, '')
def run(my): import time time.sleep(6) #print "Starting Timed Trigger" # checks are done every 60 seconds chunk = 60 # FIXME: not sure why we have to do a batch here from pyasm.security import Batch Batch(login_code="admin") # get the all of the timed triggers #search = Search("sthpw/timed_trigger") #search.add_filter("type", "timed") search = Search("sthpw/trigger") search.add_filter("event", "timed") timed_trigger_sobjs = search.get_sobjects() timed_triggers = [] for trigger_sobj in timed_trigger_sobjs: trigger_class = trigger_sobj.get_value("class_name") try: timed_trigger = Common.create_from_class_path(trigger_class) except ImportError: raise Exception("WARNING: [%s] does not exist" % trigger_class) timed_triggers.append(timed_trigger) while 1: time.sleep(chunk) #print "Running timer" date = Date() #print "utc: ", date.get_display_time() # go through each trigger for timed_trigger in timed_triggers: print timed_trigger if not timed_trigger.is_ready(): print "... not ready" continue if timed_trigger.is_in_separate_thread(): class xxx(threading.Thread): def run(my): try: Batch() timed_trigger._do_execute() finally: DbContainer.close_thread_sql() xxx().start() else: timed_trigger._do_execute() DbContainer.close_thread_sql() if my.end: print "Stopping timed thread" break
def get_sql(my): db_resource = my.get_project_db_resource() from pyasm.search import DbContainer sql = DbContainer.get(db_resource) return sql
def execute(my): from pyasm.search import DbContainer project_code = my.kwargs.get("project_code") if project_code: project = Project.get_by_code(project_code) else: search_key = my.kwargs.get("search_key") project = Search.get_by_search_key(search_key) project_code = project.get_code() assert project_code assert project # dump the database # remove all dependencies the sthpw database related_types = my.kwargs.get("related_types") if related_types: for related_type in related_types: search = Search(related_type) if related_type == "sthpw/schema": search.add_filter("code", project_code) else: search.add_filter("project_code", project_code) count = search.get_count() sobjects = search.get_sobjects() for sobject in sobjects: if related_type == 'sthpw/snapshot': my.delete_snapshot(sobject) else: sobject.delete() sthpw_project = Project.get_by_code('sthpw') # delete the database sthpw_db_resource = sthpw_project.get_project_db_resource() db_resource = project.get_project_db_resource() impl = sthpw_db_resource.get_database_impl() deleted_impl = db_resource.get_database_impl() if not impl.database_exists(db_resource): # remove the project entry project.delete() return # close this connection to the project to be deleted sql = DbContainer.get(db_resource) sql.close() if sql.get_database_type() == 'Sqlite': DbContainer.release_thread_sql() result = impl.drop_database(db_resource) # this is just extra check if result and "failed" in result: raise TacticException(result) Container.put("Sql:database_exists:%s" % db_resource.get_key(), None) sql = DbContainer.get(db_resource, connect=True) if sql: try: if sql.get_database_type() != 'Sqlite': if sql.get_connection() and sql.connect(): raise TacticException( "Database [%s] still exists. There could still be connections to it." % project_code) except SqlException, e: pass
def get_tables_wdg(self): div = DivWdg() div.set_name("Tables") div.add( "In order to fully register a database, you must bind it to a TACTIC project" ) div.add("<br/>") project_code = "mongodb" database = "test_database" db_resource = DbResource(server='localhost', vendor='MongoDb', database=database) try: connect = DbContainer.get(db_resource) except Exception as e: div.add("Could not connect") div.add_style("padding: 30px") div.add("<br/>" * 2) div.add(str(e)) return div # Bind project to this resource database_text = TextWdg("database") div.add("Database: ") div.add(database_text) div.add("<br/>" * 2) project_text = TextWdg("project") div.add("Project Code: ") div.add(project_text) div.add("<br/>") div.add("<hr/>") # connect and introspect the tables in this database tables = connect.get_tables() table = Table() div.add(table) table.set_max_width() for table_name in tables: table.add_row() search_type = "table/%s?project=%s" % (table_name, project_code) td = table.add_cell() icon = IconWdg("View Table", IconWdg.FOLDER_GO) td.add(icon) icon.add_behavior({ 'type': 'click_up', 'search_type': search_type, 'cbjs_action': ''' var class_name = 'tactic.ui.panel.ViewPanelWdg'; var kwargs = { search_type: bvr.search_type } spt.panel.load_popup("table", class_name, kwargs); ''' }) td = table.add_cell() td.add(table_name) td = table.add_cell() search = Search(search_type) count = search.get_count() td.add(" %s item/s" % count) columns = search.get_columns() td = table.add_cell() td.add(columns) # search_type td = table.add_cell() text = TextWdg("search_type") td.add(text) new_search_type = "%s/%s" % (project_code, table_name) text.set_value(new_search_type) register_div = DivWdg() div.add(register_div) register_div.add_style("padding: 20px") button = ActionButtonWdg(title="Register") register_div.add(button) return div
def execute(self): database = "sthpw" sql = DbContainer.get(database) value_array = sql.do_query( "select code, cc from (select code, count(code) as cc from file group by code order by cc desc) as X where cc > 1;" ) #value_array = sql.do_query("select code, cc from (select code, count(code) as cc from file group by code order by cc desc) as X;") print "found [%s] pairs" % len(value_array) for count, value_list in enumerate(value_array): if count >= BATCH: break # get the file object file_code = value_list[0] search = Search("sthpw/file") search.add_filter("code", file_code) files = search.get_sobjects() #if len(files) == 1: # continue for file in files: project_code = file.get_value("project_code") if not project_code: print "WARNING: file [%s] has no project_code" % file_code continue project = Project.get_by_code(project_code) initials = project.get_initials() id = file.get_id() new_file_code = "%s%s" % (id, initials) if file_code == new_file_code: continue print "-" * 20 print "switching: ", file_code, "to", new_file_code snapshot_code = file.get_value("snapshot_code") snapshot = Snapshot.get_by_code(snapshot_code) assert snapshot snapshot_xml = snapshot.get_xml_value("snapshot") print snapshot_xml.to_string() node = snapshot_xml.get_node("snapshot/file[@file_code='%s']" % file_code) Xml.set_attribute(node, "file_code", new_file_code) print snapshot_xml.to_string() assert node # set the file_code file.set_value("code", new_file_code) file.commit() # set the snapshot snapshot.set_value("snapshot", snapshot_xml.to_string()) snapshot.commit()
def execute(my): #print "Closing all connections" DbContainer.close_all_global_connections()
class BaseAppServer(Base): '''The base application server class that handles the top level processing of a given page. Different applications will derive off of this class to implement how the resulting html will go to the server''' ONLOAD_EVENT = "body_onload" if PROFILE: profile.object = None def __init__(my): my.top = None my.hash = None super(BaseAppServer,my).__init__() def writeln(my, string): my.buffer.write(string) def get_display(my): profile_flag = False if profile_flag: BaseAppServer.profile.object = my if os.name == 'nt': path = "C:/sthpw/profile" else: path = "/tmp/sthpw/temp/profile" profile.run( "from pyasm.web.app_server import BaseAppServer; BaseAppServer.profile()", path) p = pstats.Stats(path) p.sort_stats('cumulative').print_stats(30) print "*"*30 p.sort_stats('time').print_stats(30) else: my.execute() value = WebContainer.get_buffer().getvalue() WebContainer.clear_buffer() return value def profile(): my = BaseAppServer.profile.object my.execute() profile = staticmethod(profile) def execute(my): my.buffer = cStringIO.StringIO() try: try: # clear the main container for this thread Container.create() # clear the buffer WebContainer.clear_buffer() # initialize the web environment object and register it adapter = my.get_adapter() WebContainer.set_web(adapter) # get the display my._get_display() except SetupException, e: '''Display setup exception in the interface''' print "Setup exception: ", e.__str__() DbContainer.rollback_all() ExceptionLog.log(e) my.writeln("<h3>Tactic Setup Error</h3>" ) my.writeln("<pre>" ) my.writeln(e.__str__() ) my.writeln("</pre>" ) except DatabaseException, e: from tactic.ui.startup import DbConfigPanelWdg config_wdg = DbConfigPanelWdg() my.writeln("<pre>") my.writeln(config_wdg.get_buffer_display()) my.writeln("</pre>") except Exception, e: stack_trace = ExceptionLog.get_stack_trace(e) print stack_trace my.writeln("<pre>") my.writeln(stack_trace) my.writeln("</pre>") # it is possible that the security object was not set security = Environment.get_security() if not security: security = Security() WebContainer.set_security(security) log = None # ensure that database connections are rolled back try: DbContainer.rollback_all() except Exception, e2: print "Error: Could not rollback: ", e2.__str__() my.writeln("Error: Could not rollback: '%s'" % e2.__str__() ) stack_trace = ExceptionLog.get_stack_trace(e2) print stack_trace my.writeln("<pre>") my.writeln(stack_trace) my.writeln("</pre>") raise e
def execute(my): from pyasm.security import Batch Batch(login_code="admin") os.environ["TACTIC_MONITOR"] = "true" if not my.num_processes: my.num_processes = Config.get_value("services", "process_count") if my.num_processes: my.num_processes = int(my.num_processes) else: my.num_processes = 3 start_port = Config.get_value("services", "start_port") ports_str = os.environ.get("TACTIC_PORTS") if not ports_str: ports_str = Config.get_value("services", "ports") if ports_str: ports = ports_str.split("|") ports = [int(x) for x in ports] else: if start_port: start_port = int(start_port) else: start_port = 8081 ports = [] for i in range(0, my.num_processes): ports.append( start_port + i ) tactic_threads = [] #start_tactic = Config.get_value("services", "tactic") #start_job_queue = Config.get_value("services", "job_queue") #start_watch_folder = Config.get_value("services", "watch_folder") start_tactic = False start_job_queue = False start_watch_folder = False start_async = False services = Config.get_value("services", "enable") custom_services = [] if services: #services = services.split("|") services = re.split("[|,]", services) for service in services: if service == 'tactic': start_tactic = True elif service == 'job_queue': start_job_queue = True elif service == 'watch_folder': start_watch_folder = True elif service == 'async': start_async = True else: custom_services.append(service) else: start_tactic = True # create a number of processes if start_tactic: #for i in range(0, my.num_processes): for port in ports: # start cherrypy tactic_thread = TacticThread(port) tactic_thread.set_dev(my.dev_mode) tactic_thread.start() tactic_threads.append(tactic_thread) time.sleep(1) #port += 1 # aSync Queue services if start_async: num_processes = Config.get_value("async", "process_count") if not num_processes: num_processes = 1 else: num_processes = int(num_processes) for i in range(0, num_processes): job_thread = ASyncThread() job_thread.start() tactic_threads.append(job_thread) # Job Queue services if start_job_queue: num_processes = Config.get_value("services", "queue_process_count") if not num_processes: num_processes = 1 else: num_processes = int(num_processes) for i in range(0, num_processes): job_thread = JobQueueThread() job_thread.start() tactic_threads.append(job_thread) # Watch Folder services if start_watch_folder: search = Search("sthpw/watch_folder") watch_folders = search.get_sobjects() for watch_folder in watch_folders: project_code = watch_folder.get("project_code") base_dir = watch_folder.get("base_dir") search_type = watch_folder.get("search_type") process = watch_folder.get("process") if not project_code: print "Watch Folder missing project_code ... skipping" continue if not project_code: print "Watch Folder missing base_dir ... skipping" continue if not search_type: print "Watch Folder missing search_type ... skipping" continue watch_thread = WatchFolderThread( project_code=project_code, base_dir=base_dir, search_type=search_type, process=process ) watch_thread.start() tactic_threads.append(watch_thread) # set up custom services for service in custom_services: kwargs = Config.get_section_values(service) custom_thread = CustomPythonProcessThread(**kwargs) custom_thread.start() tactic_threads.append(custom_thread) if len(tactic_threads) == 0: print print "No services started ..." print return # create a separate thread for timed processes # DEPRECATED tactic_timed_thread = TacticTimedThread() tactic_timed_thread.start() tactic_threads.append(tactic_timed_thread) # create a separate thread for scheduler processes start_scheduler = Config.get_value("services", "scheduler") if start_scheduler == 'true': tactic_scheduler_thread = TacticSchedulerThread() tactic_scheduler_thread.start() tactic_threads.append(tactic_scheduler_thread) DbContainer.close_thread_sql() # check each thread every 20 seconds while 1: end = False try: if my.check_interval: time.sleep(my.check_interval) for tactic_thread in tactic_threads: tactic_thread._check() else: # FIXME: break for now (for windows service) break except KeyboardInterrupt, e: print "Keyboard interrupt ... exiting Tactic" for tactic_thread in tactic_threads: tactic_thread.end = True end = True if end: break
def run(my): try: Batch() timed_trigger._do_execute() finally: DbContainer.close_thread_sql()
class CherryPyStartup(object): def __init__(my, port=''): # It is possible on startup that the database is not running. from pyasm.common import Environment from pyasm.search import DbContainer, DatabaseException, Sql plugin_dir = Environment.get_plugin_dir() sys.path.insert(0, plugin_dir) try: sql = DbContainer.get("sthpw") if sql.get_database_type() != "MongoDb": # before batch, clean up the ticket with a NULL code if os.getenv('TACTIC_MODE') != 'production': sql.do_update('DELETE from "ticket" where "code" is NULL;') else: start_port = Config.get_value("services", "start_port") if start_port: start_port = int(start_port) else: start_port = 8081 if port and int(port) == start_port: sql.do_update( 'DELETE from "ticket" where "code" is NULL;') except DatabaseException, e: # TODO: need to work on this print "ERROR: could not connect to [sthpw] database" #os.environ["TACTIC_CONFIG_PATH"] = Config.get_default_config_path() #Sql.set_default_vendor("Sqlite") Config.set_tmp_config() Config.reload_config() # try connecting again try: sql = DbContainer.get("sthpw") except: print "Could not connect to the database." raise # is it CherryPyStartup's responsibility to start batch? from pyasm.security import Batch Batch() my.site_dir = os.getenv("TACTIC_SITE_DIR") my.install_dir = os.getenv("TACTIC_INSTALL_DIR") # set up a simple environment. May need a more complex one later my.env = Environment() my.setup_env() my.config = my.setup_sites() my.init_only = False cherrypy.startup = my # this initializes the web. # - sets up virtual implied tiggers from web_init import WebInit WebInit().execute() # Windows should handle fine #start up the caching system if it's not windows cache_mode = Config.get_value("install", "cache_mode") if not cache_mode: cache_mode = 'complete' if os.name == 'nt': cache_mode = 'basic' from cache_startup import CacheStartup cmd = CacheStartup(mode=cache_mode) cmd.execute() cmd.init_scheduler() # DEPRECATED (but keeping it around" """ # start up the queue system ... if Config.get_value("sync", "enabled") == "true": # start up the sync system ... print "Starting Transaction Sync ..." from tactic.command import TransactionQueueManager TransactionQueueManager.start() # start up the sync system ... print "Starting Watch Folder Service ..." from tactic.command import WatchServerFolderTask WatchServerFolderTask.start() """ # start up scheduled triggers #from tactic.command import ScheduledTriggerMonitor #ScheduledTriggerMonitor.start() #from pyasm.web import Translation #Translation.install() # close all the threads in this startup thread from pyasm.search import DbContainer DbContainer.close_thread_sql() version = Environment.get_release_version() print print "Starting TACTIC v%s ..." % version print
class CreateProjectCmd(Command): def is_undoable(cls): return False is_undoable = classmethod(is_undoable) def get_title(my): return "Create Project" def get_args_keys(my): return { 'project_code': 'code of the new project', 'project_title': 'title of the new project', 'project_type': 'determines the type of project which specifies the initial schema and the naming conventions', #'copy_pipelines': 'flag to copy template site pipelines to project' } def check(my): project_code = my.kwargs.get('project_code') regexs = '^\d|\W' m = re.search(r'%s' % regexs, project_code) if m: if isinstance(project_code, unicode): project_code = project_code.encode('utf-8') else: project_code = unicode(project_code).encode('utf-8') raise TacticException( '<project_code> [%s] cannot contain special characters or start with a number.' % project_code) # check to see if this project already exists test_project = Project.get_by_code(project_code) if test_project: if test_project.get_value('s_status') == 'retired': raise TacticException( 'Project with code [%s] already exists but is retired.' % project_code) else: raise TacticException( 'Project with code [%s] already exists.' % project_code) return True def execute(my): project_code = my.kwargs.get('project_code') project_title = my.kwargs.get('project_title') project_type = my.kwargs.get('project_type') if not project_type: project_type = "simple" is_template = my.kwargs.get('is_template') project_theme = my.kwargs.get('project_theme') use_default_side_bar = my.kwargs.get('use_default_side_bar') if use_default_side_bar in [False, 'false']: use_default_side_bar = False else: use_default_side_bar = True assert project_code assert project_type if project_type: # check to see if it exists search = Search("sthpw/project_type") search.add_filter("code", project_type) project_type_sobj = search.get_sobject() if not project_type_sobj: # just create a default one in this case if it is named # after the project code if not is_template and project_type == project_code: project_type = 'default' # create a new project type search = Search("sthpw/project_type") search.add_filter("code", project_type) project_type_sobj = search.get_sobject() if not project_type_sobj: project_type_sobj = SearchType.create("sthpw/project_type") project_type_sobj.set_value("code", project_type) project_type_sobj.set_value("type", "simple") project_type_sobj.commit() # set the current project to Admin Project.set_project("admin") # create a new project sobject project = SearchType.create("sthpw/project") project.set_value("code", project_code) project.set_value("title", project_title) project.set_value("type", project_type) # set the update of the database to current (this is obsolete) #project.set_value("last_db_update", "now()") project.set_value("last_version_update", "2.5.0.v01") if is_template in ['true', True, 'True']: project.set_value("is_template", True) else: project.set_value("is_template", False) if project_type != "default": category = Common.get_display_title(project_type) project.set_value("category", category) project.commit() # if there is an image, check it in upload_path = my.kwargs.get("project_image_path") if upload_path: if not os.path.exists(upload_path): raise TacticException( "Cannot find upload image for project [%s]" % upload_path) file_type = 'main' file_paths = [upload_path] file_types = [file_type] source_paths = [upload_path] from pyasm.biz import IconCreator if os.path.isfile(upload_path): icon_creator = IconCreator(upload_path) icon_creator.execute() web_path = icon_creator.get_web_path() icon_path = icon_creator.get_icon_path() if web_path: file_paths = [upload_path, web_path, icon_path] file_types = [file_type, 'web', 'icon'] from pyasm.checkin import FileCheckin checkin = FileCheckin(project, context='icon', file_paths=file_paths, file_types=file_types) checkin.execute() # find project's base_type base_type = project.get_base_type() if not base_type and project_type == 'unittest': base_type = 'unittest' elif not base_type: base_type = 'simple' # get the database for this project db_resource = project.get_project_db_resource() database = db_resource.get_database_impl() #database = DatabaseImpl.get() database_type = database.get_database_type() if database_type == 'Oracle': raise TacticException( "Creation of project is not supported. Please create manually") # creating project database print "Creating database '%s' ..." % project_code try: # create the datbase database.create_database(db_resource) except Exception, e: print str(e) print "WARNING: Error creating database [%s]" % project_code # import the appropriate schema with config first database.import_schema(db_resource, base_type) my.create_schema(project_code) # before we upgrade, we have to commit the transaction # This is because upgrade actually run as separate processes # so if not commit has been made, the tables from importing the # schema will not have existed yet DbContainer.commit_thread_sql() my.upgrade() # import the appropriate data database.import_default_data(db_resource, base_type) # import default links if use_default_side_bar: my.import_default_side_bar() # create specified stypes my.create_search_types() # create theme if project_theme: my.create_theme(project_theme) # set as main project is_main_project = my.kwargs.get("is_main_project") if is_main_project in [True, 'true', 'on']: Config.set_value("install", "default_project", project_code) Config.save_config() Config.reload_config() # initiate the DbContainer DbContainer.get('sthpw') my.info['result'] = "Finished creating project [%s]." % project_code print "Done."
def get_database_impl(): project = Project.get() db_resource = project.get_db_resource() sql = DbContainer.get(db_resource) return sql.get_database_impl()
def import_manifest(my, nodes): paths_read = [] for node in nodes: node_name = my.xml.get_node_name(node) if node_name == 'search_type': search_type = my.xml.get_attribute(node, 'code') # implicitly add the entry to the schema table. # Reset the cache every time to ensure that any updates to # the scehma are reflected here. schema = Schema.get(reset_cache=True) xml = schema.get_xml() schema_node = xml.get_node("schema/search_type[@name='%s']" % search_type) parent = xml.get_node("schema") if schema_node == None: schema_node = xml.create_element("search_type") xml.set_attribute(schema_node, "name", search_type) #parent = xml.get_parent(node) xml.append_child(parent, schema_node) schema.set_value('schema', xml.to_string() ) schema.commit() # TODO: connections? path = my.xml.get_attribute(node, "path") if not path: path = "%s.spt" % search_type.replace("/", "_") path = "%s/%s" % (my.plugin_dir, path) if path in paths_read: continue if my.verbose: print "Reading search_type: ", path # NOTE: priviledged knowledge of the order or return values jobs = my.import_data(path, commit=True) paths_read.append(path) if not jobs: continue search_type_obj = jobs[0] if len(jobs) == 1: # only the search type was defined table = None else: table = jobs[1] try: # check to see if the search type already exists search_type_chk = SearchType.get(search_type) if search_type_chk: if my.verbose: print 'WARNING: Search Type [%s] is already registered' % search_type_chk.get_value("search_type") else: search_type_obj.commit() except SearchException, e: if e.__str__().find('not registered') != -1: search_type_obj.commit() # check if table exists has_table = False if has_table: if my.verbose: print 'WARNING: Table [%s] already exists' elif table: #print table.get_statement() if table: database = table.get_database() table_name = table.get_table() TableUndo.log(search_type, database, table_name) elif node_name == 'sobject': path = my.xml.get_attribute(node, "path") search_type = my.xml.get_attribute(node, "search_type") seq_max = my.xml.get_attribute(node, "seq_max") try: if seq_max: seq_max = int(seq_max) except ValueError: seq_max = 0 if not path: if search_type: path = "%s.spt" % search_type.replace("/","_") if not path: raise TacticException("No path specified") path = "%s/%s" % (my.plugin_dir, path) if path in paths_read: continue unique = my.xml.get_attribute(node, "unique") if unique == 'true': unique = True else: unique = False if my.verbose: print "Reading: ", path # jobs doesn't matter for sobject node jobs = my.import_data(path, unique=unique) # compare sequence st_obj = SearchType.get(search_type) SearchType.sequence_nextval(search_type) cur_seq_id = SearchType.sequence_currval(search_type) sql = DbContainer.get("sthpw") if seq_max > 0 and seq_max > cur_seq_id: # TODO: SQL Server - Reseed the sequences instead of passing. if sql.get_database_type() == 'SQLServer': pass else: SearchType.sequence_setval(search_type, seq_max) else: cur_seq_id -= 1 # TODO: SQL Server - Reseed the sequences instead of passing. if sql.get_database_type() == 'SQLServer': pass else: # this is a db requirement if cur_seq_id > 0: SearchType.sequence_setval(search_type, cur_seq_id) paths_read.append(path)
def get_display(self): from pyasm.biz import Project security = Environment.get_security() if not security.check_access("builtin", "side_bar_schema", "allow", default="deny"): return DivWdg() section_div = LabeledHidableWdg(label="Schema Views") section_div.set_attr('spt_class_name', Common.get_full_class_name(self) ) palette = Palette.get() color = palette.color("background3") project_div = RoundedCornerDivWdg(hex_color_code=color,corner_size="10") project_div.set_dimensions( width_str='175px', content_height_str='100px' ) project = Project.get() project_code = project.get_code() project_type = project.get_type() div = DivWdg() section_div.add(project_div) project_div.add(div) # get project type schema schema = Schema.get_by_code(project_code) if schema: div.add( self.get_schema_wdg(schema) ) #if not project_type: # raise SetupException("Project type not found for this [%s]" %project_code) if project_type: schema = Schema.get_predefined_schema(project_type) if schema: div.add( self.get_schema_wdg(schema) ) schema = Schema.get_predefined_schema('config') div.add( self.get_schema_wdg(schema) ) schema = Schema.get_admin_schema() div.add( self.get_schema_wdg(schema) ) return section_div # create a fake schema project = Project.get() db_name = project.get_database() sql = DbContainer.get(db_name) tables = sql.get_tables() tables.sort() tables_str = "\n".join( ['<search_type name="%s"/>'%x for x in tables] ) # look at all of the search objects for mapped tables search = Search("sthpw/search_object") #search.add_where('''"namespace" = 'MMS' or namespace = '{project}' ''') search.add_filter("namespace", 'MMS') search.add_filter("namespace", '{project}') search.add_where("or") search_types = search.get_sobjects() schema_xml = ''' <schema> %s </schema> ''' % tables_str schema = SearchType.create("sthpw/schema") schema.set_value("code", "table") schema.set_value("schema", schema_xml) #div.add( self.get_schema_wdg(schema) ) return section_div
def get_database_type(self): project = Project.get_by_code(self.project_code) db_resource = project.get_project_db_resource() db = DbContainer.get(db_resource) return db.get_database_type()
def __init__(my, **kwargs): super(TimedTask, my).__init__(**kwargs) my.index = kwargs.get("index") my.project_code = kwargs.get("project_code") def execute(my): try: #Batch() #Command.execute_cmd(timed_trigger) Project.set_project(my.project_code) timed_triggers[my.index].execute() except Exception, e: print "Error running trigger" raise finally: DbContainer.close_thread_sql() DbContainer.commit_thread_sql() DbContainer.close_all() for idx, timed_trigger in enumerate(timed_triggers): data = timed_trigger.get_input() if not data: continue """ data = { 'type': 'interval', 'interval': 10, 'delay': 0,
def run_sql(self, sql): from pyasm.search import DatabaseImpl, DbContainer db = DbContainer.get("sthpw") db.do_update(sql)
def __del__(self): # ensure that database connections are always closed if DbContainer: DbContainer.close_all()
def dump_to_tactic(my, path=None, mode='sql'): from pyasm.search import SearchType, Sql, DbContainer search_type_obj = SearchType.get(my.search_type) database = search_type_obj.get_database() table = search_type_obj.get_table() db_resource = SearchType.get_db_resource_by_search_type(my.search_type) sql = DbContainer.get(db_resource) exists = sql.table_exists(table) if not exists: return info = sql.get_column_info(table) impl = sql.get_database_impl() columns = info.keys() columns.sort() # if the table does not exist, there are no columns if not columns: return if path: import os f = codecs.getwriter('utf8')(open(path, 'ab')) else: import sys f = sys.stdout if not my.delimiter: my.delimiter = "--" my.end_delimiter = my.delimiter f.write( "%s\n" % my.delimiter ) if mode == 'sobject': f.write("table = CreateTable('%s')\n" % my.search_type) else: f.write("table = CreateTable()\n") f.write("table.set_table('%s')\n" % table) # Hard code this - all search types have id as the primary key at the # moment primary_key_col = 'id' for column in columns: if column in my.ignore_columns: continue col_info = info[column] #print col_info space = " "*(25-len(column)) data_type = col_info['data_type'] is_nullable = col_info['nullable'] if column == "id": # A dump will output a database independent serial #data_type = impl.get_serial() <--- Database depenedent data_type = 'serial' f.write("table.add('%s',%s'%s', primary_key=True)\n" % (column, space, data_type) ) continue elif data_type in ['varchar','char','nvarchar']: size = col_info.get('size') if size == -1: size = 'max' if not size: size = 256 if is_nullable: f.write("table.add('%s',%s'%s', length='%s' )\n" % (column, space, data_type, size)) else: f.write("table.add('%s',%s'%s', length='%s', not_null=True )\n" % (column, space, data_type, size)) continue if is_nullable: f.write("table.add('%s',%s'%s' )\n" % (column, space, data_type)) else: f.write("table.add('%s',%s'%s', not_null=True )\n" % (column, space, data_type)) # add the constraints constraints = impl.get_constraints(db_resource, table) for constraint in constraints: name = constraint.get("name") columns = constraint.get("columns") mode = constraint.get("mode") if not name: name = "%s_%s_idx" % (name, "_".join(columns)) f.write('''table.add_constraint(%s, mode="%s")\n''' % (columns, mode)) #def add_constraint(my, columns, mode="UNIQUE"): #f.write("table.set_primary_key('id')\n") # Disable commit for now #if mode == 'sobject': # f.write("table.commit()\n") f.write( "%s\n" % my.end_delimiter ) f.write("\n")
class DumpTransactionCodesCmd(Command): def execute(my): project = Project.get() search = Search("sthpw/transaction_log") search.add_filter("namespace", project.get_code()) search.add_column("code") transactions = search.get_sobjects() codes = SObject.get_values(transactions, "code") codes = set(codes) # dump out the transactions for this project f = open("/tmp/transactions_codes", 'wb') f.write(str(codes)) f.close() if __name__ == '__main__': Batch() DbContainer.close_all() WatchServerFolderTask.start() while 1: try: time.sleep(15) except (KeyboardInterrupt, SystemExit), e: scheduler.stop() break