def get_operation(provider_name, operation, user_rank=users.RANK_BRONZE, include_machines=False): g_db = core.api_util.DatabaseContext.get_conn() if not include_machines: return pg_util.row_to_dict( g_db.execute( """SELECT * FROM operations WHERE operation_name = %s AND provider_code = %s AND user_rank = %s""", [operation, provider_name, user_rank]).fetchone()) else: req = g_db.execute( """SELECT o.*, ARRAY_REMOVE(ARRAY_AGG(m.machine_code), NULL) AS machines FROM operations AS o LEFT JOIN operation_machine AS om ON o.operation_name = om.operation_name LEFT JOIN machines AS m ON om.machine_uid = m.uid AND o.provider_code = m.provider_code WHERE o.operation_name = %s AND o.provider_code = %s AND o.user_rank = %s GROUP BY o.id, o.provider_code, o.operation_name, o.user_rank""", [operation, provider_name, user_rank]).fetchone() return pg_util.row_to_dict(req)
def charge_user_computing(user_id, job_id, description): log.info("Charging user %s. Reason: %s" % (str(user_id), description)) g_db = core.api_util.DatabaseContext.get_conn() with pg_util.Transaction(g_db): with g_db.cursor() as cur: cur.execute( "SELECT machine_price_id, start_time, nbr_machines FROM jobs WHERE id = %s", [job_id]) job = pg_util.row_to_dict(cur.fetchone()) with g_db.cursor() as cur: cur.execute("SELECT * FROM machine_prices_history WHERE id = %s", [job["machine_price_id"]]) price = pg_util.row_to_dict(cur.fetchone()) now = pg_util.get_now(g_db) with g_db.cursor() as cur: cur.execute( "SELECT MAX(computing_end) FROM user_accounts WHERE job_id = %s", [job_id]) last_comp = cur.fetchone() if last_comp is not None: last_comp = last_comp[0] if last_comp is None: start_time = job["start_time"] if job["start_time"] else now end_time = start_time + datetime.timedelta( seconds=price["min_sec_granularity"]) else: start_time = last_comp end_time = start_time + datetime.timedelta( seconds=price["sec_granularity"]) while end_time < now: end_time += datetime.timedelta(seconds=price["sec_granularity"]) charged_price = (end_time - start_time).total_seconds() * int( price["sec_price"]) * int(job["nbr_machines"]) price_snapshot = { "price": pg_util.row_to_dict(price), "start_time": start_time, "end_time": end_time } with g_db.cursor() as cur: cur.execute( """INSERT INTO user_accounts (user_id, amount, description, job_id, price_snapshot, computing_start, computing_end) VALUES (%s, %s, %s, %s, %s, %s, %s)""", [ user_id, charged_price * -1, description, job_id, price_snapshot, start_time, end_time ]) return end_time
def save_project_file(user_id, project_codename, filename, file_size, key=None): """ Add a file to project file list :param user_id: The project owner id :type user_id: int :param project_codename: The project uid :type project_codename: str :param filename: The saved file name :type filename: str :param file_size: The size of the file :type file_size: int :param key: The key of the file :type key: int|None :return: The file information :rtype: dict[str, any] """ project = get_project(user_id, project_codename) if not project: raise RuntimeError("No project " + project_codename) g_db = core.api_util.DatabaseContext.get_conn() row_id = g_db.execute( """INSERT INTO project_files (project_uid, filename, key, size) VALUES (%s, %s, %s, %s) RETURNING id""", [project_codename, filename, key, file_size]).fetchval() result = g_db.execute("SELECT * FROM project_files WHERE id = %s", [row_id]).fetchone() return pg_util.row_to_dict(result)
def get_project(user_id, project_codename, include_deleted=False): g_db = core.api_util.DatabaseContext.get_conn() result = g_db.execute( """SELECT p.*, T1.creation_date FROM projects AS p LEFT JOIN ( SELECT MIN(start_time) AS creation_date, uid FROM projects_history GROUP BY uid ) AS T1 ON T1.uid = p.uid WHERE p.user_id = %s AND p.uid = %s""", [user_id, project_codename]).fetchone() if result is None and include_deleted: result = g_db.execute( """SELECT p.*, T1.creation_date FROM projects_history AS p LEFT JOIN ( SELECT MIN(start_time) AS creation_date, uid FROM projects_history GROUP BY uid ) AS T1 ON T1.uid = p.uid WHERE p.user_id = %s AND P.uid = %s ORDER BY end_time DESC LIMIT 1""", [user_id, project_codename]).fetchone() return pg_util.row_to_dict(result)
def get_task_info(job_id): """ Get the task from the task queue :param job_id: The id of the queued job :type job_id: int :return: The task info :rtype: dict[str, any] """ g_db = core.api_util.DatabaseContext.get_conn() return pg_util.row_to_dict(g_db.execute("SELECT * FROM task_queue WHERE job_id = %s", [job_id]).fetchone())
def append_file_to_project(user_id, project_codename, file_path, filename=None, key=None, overwrite=False): """ Add a file to project file list and save it into distant storage """ project = get_project(user_id, project_codename) if not project: raise RuntimeError("No project " + project_codename) storage = core.api_util.get_storage(project['storage']) if filename is None: _, file_extension = os.path.splitext(file_path) if not file_extension: file_extension = "" if file_extension: file_extension = "." + file_extension.lstrip(".") filename = project_codename + "-" + str(uuid.uuid4()) + file_extension old_file = None g_db = core.api_util.DatabaseContext.get_conn() with storage.uploading_file(file_path, filename): file_size = os.path.getsize(file_path) if key and overwrite: with pg_util.Transaction(g_db): old_file = g_db.execute( """SELECT * FROM project_files WHERE project_uid = %s AND key = %s""", [project_codename, key]).fetchone() if old_file: g_db.execute("DELETE FROM project_files WHERE id = %s", [old_file['id']]) row_id = g_db.execute( """INSERT INTO project_files (project_uid, filename, key, size) VALUES (%s, %s, %s, %s) RETURNING id""", [project_codename, filename, key, file_size]).fetchval() else: row_id = g_db.execute( """INSERT INTO project_files (project_uid, filename, key, size) VALUES (%s, %s, %s, %s) RETURNING id""", [project_codename, filename, key, file_size]).fetchval() if old_file: storage = core.api_util.get_storage(project['storage']) storage.delete_file(old_file["filename"]) result = g_db.execute("SELECT * FROM project_files WHERE id = %s", [row_id]).fetchone() return pg_util.row_to_dict(result)
def charge_user_fix_price(user_id, job_id, description): log.info("Charging user %s. Reason: %s" % (str(user_id), description)) g_db = core.api_util.DatabaseContext.get_conn() with g_db.cursor() as cur: cur.execute("SELECT operation_id FROM jobs WHERE id = %s", [job_id]) job = cur.fetchone() operation_id = job["operation_id"] with g_db.cursor() as cur: cur.execute("SELECT * FROM operations_history WHERE id = %s", [operation_id]) fix_price = cur.fetchone() price_snapshot = { "fix_price": pg_util.row_to_dict(fix_price, datetime_to_int=True) } with g_db.cursor() as cur: cur.execute( """INSERT INTO user_accounts (user_id, amount, description, job_id, price_snapshot) VALUES (%s, %s, %s, %s, %s)""", [ user_id, int(fix_price["fixed_cost"]) * -1, description, job_id, price_snapshot ]) g_db.commit()
def get_job(job_id, include_log=False): """ Get the job information, and optionally the relative specific fields :param job_id: The id of the job :type job_id: int :return: The job information, or None if the job is not found :rtype: dict[str, any]|None """ g_db = core.api_util.DatabaseContext.get_conn() if include_log: job = g_db.execute("""SELECT id, user_id, project_uid, status, progress, operation_id, provider_cost_id, machine_price_id, nbr_machines, create_date, start_time, end_time, logs, debug, TRIM(COALESCE(logs, '')) <> '' AS has_logs FROM jobs WHERE id = %s""", [job_id]).fetchone() else: job = g_db.execute("""SELECT id, user_id, project_uid, status, progress, operation_id, provider_cost_id, machine_price_id, nbr_machines, create_date, start_time, end_time, debug, TRIM(COALESCE(logs, '')) <> '' AS has_logs FROM jobs WHERE id = %s""", [job_id]).fetchone() return pg_util.row_to_dict(job)
def get_report(from_date, to_date, user_id=None, order_by=None): overview_query_args = [ core.api_util.PRICE_PRECISION, from_date, core.api_util.PRICE_PRECISION, to_date ] overview_query = """SELECT U.id AS user_id, U.login, U.email, COALESCE(T1.previous_balance, 0) AS previous_balance, COALESCE(T2.current_balance, 0) AS current_balance FROM users AS u LEFT JOIN ( SELECT UA.user_id, SUM(UA.amount) / %s AS previous_balance FROM user_accounts AS UA WHERE UA.date < %s GROUP BY UA.user_id ) AS T1 ON T1.user_id = U.id LEFT JOIN ( SELECT UA.user_id, SUM(UA.amount) / %s AS current_balance FROM user_accounts AS UA WHERE UA.date < %s GROUP BY UA.user_id ) AS T2 ON T2.user_id = U.id""" details_query_args = [ core.api_util.PRICE_PRECISION, from_date, to_date, from_date, to_date, from_date, to_date ] details_query = """SELECT UA.user_id, MAX(UA.date) as date, SUM(UA.amount) / %s AS amount, UA.description, J.project_uid AS project, SUM(J.cores_per_sec) AS cores_per_sec, MAX(P.mesh_count) AS mesh_count, MAX(P.calc_count) AS calc_count FROM user_accounts AS UA LEFT JOIN ( SELECT jobs.*, GREATEST( MAX(MPH.min_sec_granularity), (CEIL(EXTRACT(EPOCH FROM (MAX(jobs.end_time) - MIN(jobs.start_time)))::integer / MAX(MPH.sec_granularity)) * MAX(MPH.sec_granularity))::integer ) * MAX(jobs.nbr_machines) * MAX(MH.nbr_cores) AS cores_per_sec FROM jobs LEFT JOIN machine_prices_history AS MPH ON MPH.id = jobs.machine_price_id LEFT JOIN machines_history AS MH ON MH.uid = MPH.machine_uid GROUP BY jobs.id ) AS J ON J.id = UA.job_id LEFT JOIN ( SELECT projects.uid, COUNT(DISTINCT M.id) as mesh_count, COUNT(DISTINCT C.id) as calc_count FROM projects LEFT JOIN meshes AS M ON M.project_uid = projects.uid LEFT JOIN calculations AS C ON C.project_uid = projects.uid WHERE M.status != 1 AND (M.delete_date IS NULL OR M.delete_date >= %s) AND M.create_date <= %s AND C.status != 1 AND (C.delete_date IS NULL OR C.delete_date >= %s) AND C.create_date <= %s GROUP BY projects.uid ) AS P ON P.uid = J.project_uid WHERE UA.date > %s AND UA.date <= %s """ if user_id is not None: overview_query += " WHERE U.id = %s" overview_query_args.append(user_id) details_query += " AND UA.user_id = %s " details_query_args.append(user_id) details_query += """GROUP BY J.project_uid, UA.description, UA.user_id""" if order_by is None or order_by == "project": details_query += " ORDER BY user_id, project NULLS FIRST, date DESC, description" elif order_by == "date": details_query += " ORDER BY user_id, date DESC, project NULLS FIRST, description" elif order_by == "description": details_query += " ORDER BY user_id, description, date DESC, project NULLS FIRST" g_db = core.api_util.DatabaseContext.get_conn() by_user_id = {} with g_db.cursor() as cur: cur.execute(overview_query, overview_query_args) for row in cur: by_user_id[row["user_id"]] = pg_util.row_to_dict(row) by_user_id[row["user_id"]]['details'] = [] with g_db.cursor() as cur: cur.execute(details_query, details_query_args) for row in cur: row_info = pg_util.row_to_dict(row) user_id = row_info["user_id"] if "cloud computation cost" not in row_info['description'].lower(): del row_info['cores_per_sec'] if "calculation storage cost" not in row_info['description'].lower( ): del row_info['calc_count'] if "mesh storage cost" not in row_info['description'].lower(): del row_info['mesh_count'] del row_info["user_id"] by_user_id[user_id]['details'].append(row_info) return by_user_id