Ejemplo n.º 1
0
def set_job_specific_cost(job_id, provider_code, machine_code, cost, currency, sec_granularity, min_sec_granularity):
    """
    Save specific cost from spot instance

    :param job_id:                  The job id of the task used for
    :type job_id:                   int
    :param provider_code:           The code of the provider (ex: "aws_eu_spot")
    :type provider_code:            str
    :param machine_code:            The code of the instance to run (ex: "c4.2x")
    :type machine_code:             str
    :param cost:                    The cost of the second multiplied by the precision
    :type cost:                     int
    :param currency:                The currency of the cost
    :type currency:                 str
    """
    g_db = core.api_util.DatabaseContext.get_conn()
    with pg_util.Transaction(g_db):
        machine_uid = g_db.execute("""SELECT uid 
                                              FROM machines         
                                             WHERE machine_code = %s 
                                               AND provider_code = %s 
                                             LIMIT 1""", [machine_code, provider_code]).fetchval()
        now = pg_util.get_now(g_db)
        query_args = [machine_uid, cost, currency, sec_granularity, min_sec_granularity, now, now]
        g_db.execute("""INSERT INTO provider_costs_history (machine_uid, cost_per_sec, currency, sec_granularity, 
                                                            min_sec_granularity, start_time, end_time)
                                                    VALUES (%s, %s, %s, %s, %s, %s, %s)
                                                 RETURNING id""", query_args)
        row_id = g_db.fetchval()
        cost_id = g_db.execute("SELECT id FROM provider_costs_history WHERE id = %s", [row_id]).fetchval()
        g_db.execute("UPDATE jobs SET provider_cost_id = %s WHERE id = %s ", [cost_id, job_id])
Ejemplo n.º 2
0
def cancel_jobs(job_id_list):
    if not job_id_list:
        return
    g_db = core.api_util.DatabaseContext.get_conn()
    with pg_util.Transaction(g_db):
        g_db.execute("DELETE FROM task_queue WHERE job_id IN ("+", ".join(["%s"]*len(job_id_list))+")", job_id_list)
        g_db.execute("""UPDATE jobs 
                           SET status = %s ,
                               end_time = now(),
                               progress = 0.0
                         WHERE id IN ("""+", ".join(["%s"]*len(job_id_list))+")",
                     [JOB_STATUS_CANCELING]+job_id_list)

        g_db.executemany("""INSERT INTO task_queue
                                 VALUES (%s, %s, %s)
                            ON CONFLICT(job_id) DO UPDATE SET task = EXCLUDED.task""",
                         [[job_id, TASK_CANCEL, {}] for job_id in job_id_list])

    with core.api_util.RedisContext.using_pubsub_conn() as r:
        channel = core.api_util.RedisContext.get_channel("launcher")
        for job_id in job_id_list:
            try:
                r.publish(channel, str(TASK_CANCEL) + "_" + str(job_id))
                log.debug("redis cancellation publish")
            except StandardError as e:
                log.warning("Unable to connect to redis server: " + str(e))
Ejemplo n.º 3
0
def create_job(user_id, project_codename, operation_id, provider_cost_id=None, machine_price_id=None,
               nbr_machines=1, status=JOB_STATUS_PENDING):
    """
    Create a new job

    :param user_id:                 The job owner
    :type user_id:                  int
    :param project_codename:        The project unique identifier
    :type project_codename:         str
    :param operation_id:            The identifier of a specific operation.
    :type operation_id:             int
    :param provider_cost_id:        The cost associated with the task. Optional, default None
    :type provider_cost_id:         int|None
    :param machine_price_id:        The price of the worker to launch. Optional, default None
    :type machine_price_id:         int|None
    :param nbr_machines:            The number of workers to launch. Optional, default 1
    :type nbr_machines:             int
    :param status:                  The initial status of the job. Optional, default JOB_STATUS_PENDING
    :type status:                   int
    :return:                        The created job
    :rtype:                         dict[str, any]
    """
    g_db = core.api_util.DatabaseContext.get_conn()
    with pg_util.Transaction(g_db):
        g_db.execute("""INSERT INTO jobs (user_id, project_uid, operation_id, provider_cost_id, machine_price_id, 
                                          status, nbr_machines)
                                  VALUES (%s, %s, %s, %s, %s, %s, %s)
                               RETURNING id""",
                     [user_id, project_codename, operation_id, provider_cost_id, machine_price_id, status,
                      nbr_machines])
        job_id = g_db.fetchval()
        return get_job(job_id)
def remove_machine(provider_name, machine_code):
    g_db = core.api_util.DatabaseContext.get_conn()
    with pg_util.Transaction(g_db):
        machine = get_machine(provider_name, machine_code)
        if not machine:
            return
        pg_util.hist_remove(g_db,
                            "machines",
                            where=[
                                ("provider_code", "=", provider_name),
                                ("machine_code", "=", machine_code),
                            ])
        pg_util.hist_remove(g_db,
                            "machine_prices",
                            where=[("machine_uid", "=", machine["uid"])])
        pg_util.hist_remove(g_db,
                            "provider_costs",
                            where=[("machine_uid", "=", machine["uid"])])
        g_db.execute(
            """DELETE FROM operation_machine 
                              WHERE machine_uid IN (
                                        SELECT uid 
                                          FROM machines
                                         WHERE provider_code = %s
                                           AND machine_code = %s)""",
            [provider_name, machine_code])
def set_operation_machines(provider_name, operation_name, allowed_machines):
    g_db = core.api_util.DatabaseContext.get_conn()
    with pg_util.Transaction(g_db):
        machines = list_machines(provider_name)
        existing_machine_codes = [m["machine_code"] for m in machines]
        for machine_code in allowed_machines:
            if machine_code not in existing_machine_codes:
                raise RuntimeError("Unknown machine " + machine_code)
        count_op = g_db.execute(
            """SELECT count(*) 
                                           FROM operations
                                          WHERE operation_name = %s""",
            [operation_name]).fetchval()
        if count_op == 0:
            raise RuntimeError("Unknown operation " + str(operation_name))

        g_db.execute(
            """DELETE FROM operation_machine 
                              WHERE operation_name = %s
                                AND machine_uid IN (
                                       SELECT uid 
                                         FROM machines
                                        WHERE provider_code = %s)""",
            [operation_name, provider_name])
        g_db.executemany(
            """INSERT INTO operation_machine (operation_name, machine_uid)
                                 SELECT %s, uid
                                   FROM machines
                                  WHERE machine_code = %s
                                    AND provider_code = %s""",
            [(operation_name, x, provider_name) for x in allowed_machines])
Ejemplo n.º 6
0
def save_report_date(report_date):
    g_db = core.api_util.DatabaseContext.get_conn()

    with pg_util.Transaction(g_db):
        with g_db.cursor() as cur:
            cur.execute("DELETE FROM last_report")
            cur.execute("INSERT INTO last_report (report_date) VALUES (%s)",
                        [report_date])
def set_operation_cluster_limit(provider_name, operation_name, cluster_limit):
    g_db = core.api_util.DatabaseContext.get_conn()
    with pg_util.Transaction(g_db):
        for rank in users.all_ranks():
            pg_util.hist_upsert(g_db,
                                "operations",
                                values={"cluster_limit": cluster_limit},
                                where=[("operation_name", "=", operation_name),
                                       ("provider_code", "=", provider_name),
                                       ("user_rank", "=", rank)])
Ejemplo n.º 8
0
def append_file_to_project(user_id,
                           project_codename,
                           file_path,
                           filename=None,
                           key=None,
                           overwrite=False):
    """
    Add a file to project file list and save it into distant storage
    """
    project = get_project(user_id, project_codename)
    if not project:
        raise RuntimeError("No project " + project_codename)
    storage = core.api_util.get_storage(project['storage'])
    if filename is None:
        _, file_extension = os.path.splitext(file_path)
        if not file_extension:
            file_extension = ""
        if file_extension:
            file_extension = "." + file_extension.lstrip(".")
        filename = project_codename + "-" + str(uuid.uuid4()) + file_extension
    old_file = None
    g_db = core.api_util.DatabaseContext.get_conn()
    with storage.uploading_file(file_path, filename):
        file_size = os.path.getsize(file_path)
        if key and overwrite:
            with pg_util.Transaction(g_db):
                old_file = g_db.execute(
                    """SELECT * 
                                             FROM project_files 
                                            WHERE project_uid = %s
                                              AND key = %s""",
                    [project_codename, key]).fetchone()
                if old_file:
                    g_db.execute("DELETE FROM project_files WHERE id = %s",
                                 [old_file['id']])
                row_id = g_db.execute(
                    """INSERT INTO project_files (project_uid, filename, key, size)
                                                            VALUES (%s, %s, %s, %s)
                                         RETURNING id""",
                    [project_codename, filename, key, file_size]).fetchval()
        else:
            row_id = g_db.execute(
                """INSERT INTO project_files (project_uid, filename, key, size)
                                                        VALUES (%s, %s, %s, %s)
                                     RETURNING id""",
                [project_codename, filename, key, file_size]).fetchval()
    if old_file:
        storage = core.api_util.get_storage(project['storage'])
        storage.delete_file(old_file["filename"])
    result = g_db.execute("SELECT * FROM project_files WHERE id = %s",
                          [row_id]).fetchone()
    return pg_util.row_to_dict(result)
Ejemplo n.º 9
0
def create_calc(user_id, project_codename, mesh_id, calc_name, overwrite=True):
    g_db = core.api_util.DatabaseContext.get_conn()
    with pg_util.Transaction(g_db):
        project = projects.get_project(user_id, project_codename)
        if not project:
            raise RuntimeError("Unknown project " + project_codename)
        if overwrite:
            delete_calc(user_id, project_codename, calc_name)
        calc_id = g_db.execute(
            "INSERT INTO calculations (project_uid, mesh_id, name) VALUES (%s, %s, %s) RETURNING id",
            [project_codename, mesh_id, calc_name]).fetchval()
        return g_db.execute("SELECT * FROM calculations WHERE id = %s",
                            [calc_id]).fetchone()
Ejemplo n.º 10
0
def create_mesh(user_id, project_codename, mesh_name, overwrite=True):
    g_db = core.api_util.DatabaseContext.get_conn()
    with pg_util.Transaction(g_db):
        project = projects.get_project(user_id, project_codename)
        if not project:
            raise RuntimeError("Unknown project " + project_codename)
        if overwrite:
            log.warning("Deleting mesh " + mesh_name + " from overwrite")
            delete_mesh(user_id, project_codename, mesh_name)
        mesh_id = g_db.execute(
            "INSERT INTO meshes (project_uid, name) VALUES (%s, %s) RETURNING id",
            [project_codename, mesh_name]).fetchval()
        return g_db.execute("SELECT * FROM meshes WHERE id = %s",
                            [mesh_id]).fetchone()
Ejemplo n.º 11
0
def charge_user_computing(user_id, job_id, description):
    log.info("Charging user %s. Reason: %s" % (str(user_id), description))
    g_db = core.api_util.DatabaseContext.get_conn()
    with pg_util.Transaction(g_db):
        with g_db.cursor() as cur:
            cur.execute(
                "SELECT machine_price_id, start_time, nbr_machines FROM jobs WHERE id = %s",
                [job_id])
            job = pg_util.row_to_dict(cur.fetchone())
        with g_db.cursor() as cur:
            cur.execute("SELECT * FROM machine_prices_history WHERE id = %s",
                        [job["machine_price_id"]])
            price = pg_util.row_to_dict(cur.fetchone())
        now = pg_util.get_now(g_db)
        with g_db.cursor() as cur:
            cur.execute(
                "SELECT MAX(computing_end) FROM user_accounts WHERE job_id = %s",
                [job_id])
            last_comp = cur.fetchone()
            if last_comp is not None:
                last_comp = last_comp[0]

        if last_comp is None:
            start_time = job["start_time"] if job["start_time"] else now
            end_time = start_time + datetime.timedelta(
                seconds=price["min_sec_granularity"])
        else:
            start_time = last_comp
            end_time = start_time + datetime.timedelta(
                seconds=price["sec_granularity"])

        while end_time < now:
            end_time += datetime.timedelta(seconds=price["sec_granularity"])

        charged_price = (end_time - start_time).total_seconds() * int(
            price["sec_price"]) * int(job["nbr_machines"])
        price_snapshot = {
            "price": pg_util.row_to_dict(price),
            "start_time": start_time,
            "end_time": end_time
        }
        with g_db.cursor() as cur:
            cur.execute(
                """INSERT INTO user_accounts (user_id, amount, description, job_id, price_snapshot, 
                                                       computing_start, computing_end)
                                  VALUES (%s, %s, %s, %s, %s, %s, %s)""", [
                    user_id, charged_price * -1, description, job_id,
                    price_snapshot, start_time, end_time
                ])
        return end_time
def set_machine_and_prices(provider_name, machine_code, nbr_cores, ram_size,
                           nbr_available, provider_cost_per_sec,
                           provider_currency, provider_sec_granularity,
                           provider_min_sec_granularity, price_sec_granularity,
                           price_min_sec_granularity, prices, auto_update):
    g_db = core.api_util.DatabaseContext.get_conn()
    with pg_util.Transaction(g_db):
        set_machine(provider_name, machine_code, nbr_cores, ram_size,
                    nbr_available, provider_cost_per_sec, provider_currency,
                    provider_sec_granularity, provider_min_sec_granularity)
        for rank, price in prices.items():
            set_machine_price(provider_name, machine_code, rank, price,
                              price_sec_granularity, price_min_sec_granularity,
                              auto_update)
def set_machine_price(provider_name,
                      machine_code,
                      user_rank,
                      price,
                      sec_granularity,
                      min_sec_granularity=None,
                      auto_update=True):
    if min_sec_granularity is None:
        min_sec_granularity = sec_granularity
    machine = get_machine(provider_name, machine_code)
    if machine is None:
        raise RuntimeError("Unknown machine " + str(machine_code))
    if util.float_equals(price, 0) or price < 0:
        raise RuntimeError("A price should never be zero or less")
    g_db = core.api_util.DatabaseContext.get_conn()
    with pg_util.Transaction(g_db):
        pg_util.hist_upsert(g_db,
                            "machine_prices",
                            values={
                                "sec_price": price,
                                "sec_granularity": sec_granularity,
                                "min_sec_granularity": min_sec_granularity,
                                "auto_update": auto_update
                            },
                            where=[("machine_uid", "=", machine["uid"]),
                                   ("user_rank", "=", user_rank)])
        g_db.execute("""DELETE FROM machine_price_auto_update 
                         WHERE machine_price_id NOT IN (
                            SELECT id FROM machine_prices
                            WHERE auto_update = TRUE
                         )""")
        g_db.execute(
            """INSERT INTO machine_price_auto_update(machine_price_id, cost_id, to_zcoins) 
                        SELECT mp.id AS machine_price_id, 
                               pc.id AS cost_id, 
                               cer.to_zcoins AS to_zcoins
                          FROM machine_prices AS mp
                          LEFT JOIN provider_costs AS pc
                                 ON mp.machine_uid = pc.machine_uid
                          JOIN currency_exchange_rates AS cer
                                 ON pc.currency = cer.currency
                          WHERE mp.auto_update = TRUE
                            AND mp.id NOT IN (
                                SELECT machine_price_id 
                                  FROM machine_price_auto_update )""")
def update_machine_prices():
    g_db = core.api_util.DatabaseContext.get_conn()
    with pg_util.Transaction(g_db):
        result = g_db.execute("""SELECT ref_mph.machine_uid AS machine_uid,
                                       ref_mph.user_rank AS user_rank,
                                       CAST(ROUND(CAST (ref_mph.sec_price AS FLOAT) / ref_pch.cost_per_sec / mpau.to_zcoins * new_pch.cost_per_sec * cer.to_zcoins) AS INT) AS new_sec_price
                                  FROM machine_price_auto_update AS mpau
                                  LEFT JOIN machine_prices_history AS ref_mph
                                         ON mpau.machine_price_id = ref_mph.id
                                  LEFT JOIN provider_costs_history AS ref_pch
                                         ON mpau.cost_id = ref_pch.id
                                  LEFT JOIN provider_costs_history AS new_pch
                                         ON ref_mph.machine_uid = new_pch.machine_uid
                                  LEFT JOIN currency_exchange_rates AS cer
                                         ON new_pch.currency = cer.currency
                                  LEFT JOIN machine_prices_history AS curr_mph
                                         ON ref_mph.machine_uid = curr_mph.machine_uid AND ref_mph.user_rank = curr_mph.user_rank
                                  WHERE new_pch.end_time IS NULL
                                    AND curr_mph.end_time IS NULL
                                    AND ref_pch.cost_per_sec != 0
                                    AND  mpau.to_zcoins != 0
                                    AND CAST(ROUND(CAST (ref_mph.sec_price AS FLOAT) / ref_pch.cost_per_sec / mpau.to_zcoins * new_pch.cost_per_sec * cer.to_zcoins) AS INT) != curr_mph.sec_price
                                  ORDER BY ref_mph.machine_uid, ref_mph.user_rank"""
                              ).fetchall()

        # Usefull columns meaning:
        # ref_pch.currency AS currency,
        # ref_pch.cost_per_sec AS ref_cost_per_sec,
        # mpau.to_zcoins AS ref_to_zcoins,
        # ref_mph.sec_price AS ref_sec_price,
        # new_pch.cost_per_sec AS new_cost_per_sec,
        # cer.to_zcoins AS new_to_zcoins,
        # curr_mph.sec_price AS current_sec_price,

        for row in result:
            pg_util.hist_upsert(g_db,
                                "machine_prices",
                                values={
                                    "sec_price": int(row["new_sec_price"]),
                                    "auto_priced": True
                                },
                                where=[("machine_uid", "=",
                                        row["machine_uid"]),
                                       ("user_rank", "=", row["user_rank"])])
Ejemplo n.º 15
0
def cancel_job(job_id):
    g_db = core.api_util.DatabaseContext.get_conn()
    with pg_util.Transaction(g_db):
        log.debug("adding cancellation in task queue")
        g_db.execute("""INSERT INTO task_queue 
                               VALUES (%s, %s, %s)
                        ON CONFLICT(job_id) DO UPDATE SET task = EXCLUDED.task""", [job_id, TASK_CANCEL, {}])
        g_db.execute("""UPDATE jobs 
                           SET status = %s,
                               end_time = now(),
                               progress = 0.0
                         WHERE id = %s """, [JOB_STATUS_CANCELING, job_id])
    with core.api_util.RedisContext.using_pubsub_conn() as r:
        try:
            channel = core.api_util.RedisContext.get_channel("launcher")
            r.publish(channel, str(TASK_CANCEL) + "_" + str(job_id))
            log.debug("redis cancellation publish")
        except StandardError as e:
            log.warning("Unable to connect to redis server: " + str(e))
def update_machine(provider_name, machine_code, params):
    log.error("SAM: update_params: " + repr(params))
    g_db = core.api_util.DatabaseContext.get_conn()
    with pg_util.Transaction(g_db):
        update_params = {}
        for param in ["nbr_cores", "ram_size", "nbr_available"]:
            if param in params.keys():
                update_params[param] = params[param]
        if update_params:
            results = pg_util.hist_update(g_db,
                                          "machines",
                                          values=update_params,
                                          where=[("provider_code", "=",
                                                  provider_name),
                                                 ("machine_code", "=",
                                                  machine_code)])
            machine_uid = results[0]["uid"]
        else:
            result = get_machine(provider_name, machine_code)
            machine_uid = result["uid"]

        update_params = {}
        for param in [
                "provider_cost_per_sec", "provider_currency",
                "provider_sec_granularity", "provider_min_sec_granularity"
        ]:
            update_params[param[len('provider_'):]] = params[param]
        if update_params:
            pg_util.hist_upsert(g_db,
                                "provider_costs",
                                update_params,
                                where=[("machine_uid", "=", machine_uid)])
        update_params = {}
        for param in ["provider_min_sec_granularity", "price_sec_granularity"]:
            update_params[param[len('price_'):]] = params[param]

        for rank in users.all_ranks():
            rank_update_params = copy.deepcopy(update_params)
            if "prices" in params and rank in params['prices']:
                rank_update_params['sec_price'] = params['prices'][rank]
            if "auto_update" in params:
                rank_update_params['auto_update'] = params['auto_update']
            log.error("SAM: rank_update_params: " + repr(rank_update_params))
            if rank_update_params:
                pg_util.hist_upsert(g_db,
                                    "machine_prices",
                                    values=rank_update_params,
                                    where=[("machine_uid", "=", machine_uid),
                                           ("user_rank", "=", rank)])
        if "auto_update" in params.keys():
            with pg_util.Transaction(g_db):
                g_db.execute("""DELETE FROM machine_price_auto_update 
                                         WHERE machine_price_id NOT IN (
                                            SELECT id FROM machine_prices
                                            WHERE auto_update = TRUE
                                         )""")
                g_db.execute(
                    """INSERT INTO machine_price_auto_update(machine_price_id, cost_id, to_zcoins) 
                                        SELECT mp.id AS machine_price_id, 
                                               pc.id AS cost_id, 
                                               cer.to_zcoins AS to_zcoins
                                          FROM machine_prices AS mp
                                          LEFT JOIN provider_costs AS pc
                                                 ON mp.machine_uid = pc.machine_uid
                                          JOIN currency_exchange_rates AS cer
                                                 ON pc.currency = cer.currency
                                          WHERE mp.auto_update = TRUE
                                            AND mp.id NOT IN (
                                                SELECT machine_price_id 
                                                  FROM machine_price_auto_update )"""
                )
            if params["auto_update"] and "prices" not in params.keys():
                update_machine_prices()