def migrate(): updates = [] cur = db.cursor() cur.execute("ALTER TABLE raw_data ADD (ts DOUBLE, remote_addr VARCHAR(15))") cur.execute("SELECT id, ts, remote_addr, data FROM raw_data") for r in cur.fetchall(): jsontxt = r["data"] jsonobj = json.loads(jsontxt) objid = r["id"] ts = r["ts"] if ts is None: ts = jsonobj["ts"] remote_addr = r["remote_addr"] if remote_addr is None: remote_addr = jsonobj["remote_addr"] updates.append([ts, remote_addr, objid]) cur.executemany("UPDATE raw_data SET ts=%s, remote_addr=%s WHERE id=%s", updates) db.commit() cur.close()
def migrate(): cur = db.cursor() cur.execute("alter table planner_metrics add dax_api VARCHAR(15)") db.commit() cur.close()
def migrate(conn): cur = db.cursor() cur.execute("ALTER TABLE planner_metrics ADD (application VARCHAR(256))") db.commit() cur.close()
def migrate(): cur = db.cursor() cur.execute("alter table planner_metrics add uses_pmc BOOLEAN") cur.execute("alter table planner_metrics add planner_args TEXT") cur.execute("alter table planner_metrics add deleted_tasks INTEGER UNSIGNED") db.commit(); cur.close();
def migrate(): cur = db.cursor() cur.execute("alter table planner_metrics add dax_input_files int unsigned") cur.execute("alter table planner_metrics add dax_inter_files int unsigned") cur.execute("alter table planner_metrics add dax_output_files int unsigned") cur.execute("alter table planner_metrics add dax_total_files int unsigned") db.commit(); cur.close();
def migrate(): cur = db.cursor() cur.execute("alter table planner_metrics modify version VARCHAR(32)") cur.execute("alter table downloads modify version VARCHAR(32)") cur.execute("alter table dagman_metrics modify version VARCHAR(32)") cur.execute("alter table dagman_metrics modify planner_version VARCHAR(32)") db.commit() cur.close()
def migrate(): cur = db.cursor() def drop_index(table, idx): cur.execute("SHOW INDEX FROM %s WHERE KEY_NAME='%s'" % (table, idx)) if cur.fetchone(): cur.execute("DROP INDEX %s ON %s" % (idx, table)) drop_index("planner_metrics", "idx_planner_metrics_root_wf_uuid") cur.execute("create index idx_planner_metrics_root_wf_uuid on planner_metrics(root_wf_uuid)") drop_index("planner_metrics", "idx_planner_metrics_ts") cur.execute("create index idx_planner_metrics_ts on planner_metrics(ts)") db.commit() cur.close()
def migrate(): cur = db.cursor() cur.execute(""" create table dagman_metrics ( id INTEGER UNSIGNED NOT NULL, ts DOUBLE, remote_addr VARCHAR(15), hostname VARCHAR(256), domain VARCHAR(256), version VARCHAR(32), wf_uuid VARCHAR(36), root_wf_uuid VARCHAR(36), start_time DOUBLE, end_time DOUBLE, duration FLOAT, exitcode SMALLINT, dagman_id VARCHAR(32), parent_dagman_id VARCHAR(32), jobs INTEGER, jobs_failed INTEGER, jobs_succeeded INTEGER, dag_jobs INTEGER, dag_jobs_failed INTEGER, dag_jobs_succeeded INTEGER, dag_status INTEGER, planner VARCHAR(1024), planner_version VARCHAR(32), rescue_dag_number INTEGER, total_job_time DOUBLE, total_jobs INTEGER, total_jobs_run INTEGER, PRIMARY KEY (id), FOREIGN KEY (id) REFERENCES raw_data(id) ON DELETE CASCADE ) ENGINE=InnoDB DEFAULT CHARSET=utf8; """) db.commit() cur.close()
def reprocess(): i = loader.reprocess_raw_data() db.commit() flash("Reprocessed %d objects successfully" % i) return redirect(request.referrer or url_for('index'))
# don't add one if the key exists if "ts" not in data: data["ts"] = time.time() ts = data["ts"] # Get the remote IP address. The downloads will have # a remote_addr already, so don't add it if the key # exists if "remote_addr" not in data: data["remote_addr"] = request.environ["REMOTE_ADDR"] remote_addr = data["remote_addr"] # Store the raw data try: data["id"] = db.store_raw_data(ts, remote_addr, data) db.commit() except Exception, e: log.error("Error storing JSON data: %s", e) db.rollback() return "Error storing JSON data", 500 # Store the processed data try: loader.process_raw_data(data) db.commit() except Exception, e: log.error("Error processing JSON data: %s", e) db.rollback() return "", 202