def get_public_api_data_daily(environment, sport, date):
    freeze_list = get_freeze_list(environment)
    config = get_url_config()
    public_api_daily_pattern = config[environment]['public-api-base-url'] + config[environment]['public-api-event-list']
    public_api_daily_link = public_api_daily_pattern.format(sport=sport, ls_date=date)

    Log.debug(public_api_daily_link)

    try:
        daily = requests.get(public_api_daily_link).json()
    except Exception:
        return "Internal Server Error", 500
    mapping_template = []
    if environment in ["dev", "test"]:
        mapping_template = Kafka.consume(environment, "export-mapping-template")
        mapping_template = json.loads(mapping_template[0])
        mapping_template = mapping_template["featureProviders"]

    full_daily = {"Stages": []}
    for stage in daily["Stages"]:
        stage["_LC"] = 0
        for event in stage["Events"]:
            if event["Epr"] == 1:
                stage["_LC"] += 1
                if True in (a in freeze_list for a in event["Pids"].values()):
                    event["_FR"] = "1"
        if environment in ["dev", "test"]:
            stage["_FP"] = {}
            stage["_FP"].update(find_priority_rules(mapping_template, get_sport_id_by_name(sport), int(stage["Sid"])))
            stage["_FP_unique"] = []
            stage["_FP_unique"] = list(set(stage["_FP"].values()))
        full_daily["Stages"].append(stage)
    return full_daily
def separate_file(folder, filename):
    Log.info("Separating file {}".format(filename))
    ps_path = 'powershell'
    separator_path = os.path.join(MODULES_DIR, 'separator.ps1')
    file = os.path.join(MAIN_REPOSITORY_PATH, folder, filename)
    command = ps_path + " -Command " + separator_path + " " + file
    result = subprocess.check_output(command).decode('cp866').rstrip()
    return result, 200
def remove_old_result_files():
    Log.info("Checking RESULTS_DIR for old files")
    week = 604800  # seconds in 1 week
    res = list()
    for file in __listdirattr(RESULTS_DIR):
        if file['created'] < get_timestamp() - week:
            Log.info("File {} is older than 1 week. Deleting...".format(
                file['name']))
            delete_file(RESULTS_DIR, file['name'])
    return res, 200
def connect(dns):
    Log.info("Connecting to {}".format(dns))
    try:
        ssh = paramiko.SSHClient()
        ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
        ssh.connect(dns, username='******', key_filename='C:\\Users\\IDidyk\\.ssh\\id_rsa.ppk')
    except TimeoutError as error:
        Log.error(error)
        return str(error), 410
    else:
        return ssh
def delete_file(folder, filename):
    file = os.path.join(MAIN_REPOSITORY_PATH, folder, filename)
    if os.path.exists(file):
        os.remove(file)
        if not os.path.exists(file):
            Log.info("File {} is deleted".format(filename))
            return "File {} removed".format(filename), 200
        else:
            return "Something went wrong", 510
    else:
        return "Not found", 404
Beispiel #6
0
def consume(environment, topic):
    bootstrap_servers = [
        "{}-kafka-0.ls.seo:9092".format(environment),
        "{}-kafka-1.ls.seo:9092".format(environment)
    ]
    topic = topic
    consumer = KafkaConsumer(topic,
                             bootstrap_servers=bootstrap_servers,
                             auto_offset_reset="earliest")
    for message in consumer:
        message_str = message.value.decode('utf-8')
        return message_str, 200
    Log.warning("No message gotten from '{}' topic ({})".format(
        topic, environment))
    return "Something went wrong", 500
def move_file(old_folder, new_folder, filename):
    Log.info("Try to move {} from {} to {}".format(filename, old_folder,
                                                   new_folder))
    old_path = os.path.join(MAIN_REPOSITORY_PATH, old_folder, filename)
    new_path = os.path.join(MAIN_REPOSITORY_PATH, new_folder, filename)
    if os.path.exists(old_path):
        if old_path == new_path:
            return "File is already in this folder", 400
        os.rename(old_path, new_path)
        if os.path.exists(new_path):
            return "File moved to /" + new_folder, 200
        else:
            return "Something went wrong", 510
    else:
        return "Not found", 404
def get_freeze_list(environment):
    if environment in ["dev", "test"]:
        ids = []
        enet_freeze_list_data = {}
        sr_freeze_list_data = {}
        freeze_list_data = {}
        try:
            enet_freeze_list_data = requests.get("http://" + environment + "-crawler-enetapi-0-lsm.ls-g.net:8070/simulator/listfrozen").json()
            freeze_list_data += enet_freeze_list_data
        except Exception:
            Log.debug("No data about freeze list (ENET)")
        try:
            sr_freeze_list_data = requests.get("http://" + environment + "-crawler-sportradar-0-lsm.ls-g.net:8070/simulator/listfrozen").json()
            freeze_list_data += sr_freeze_list_data
        except Exception:
            Log.debug("No data about freeze list (SR)")
        freeze_list_data = enet_freeze_list_data + sr_freeze_list_data
        for el in freeze_list_data:
            id_search = re.search(r"etails\D+(\d+)", el)
            if id_search:
                ids.append(id_search.group(1))
        return ids
    else:
        return []
def collect(data):
    # Preparation
    Log.info("Command preparation...")
    config = get_config()
    component = config[data['component']]
    file_name = get_result_file_name(data)
    ssh = connect(component['dns'])
    cat_command = get_command(ssh, data, config)

    # CAT operation
    Log.info("Command executing...")
    channel = ssh.get_transport().open_session()
    channel.exec_command(cat_command)
    while not channel.exit_status_ready():
        time.sleep(1)

    # Copying from remote to local
    Log.info("Try to copy remote file...")
    sftp = ssh.open_sftp()
    remote_path = "/home/ivan.didyk/{}".format(file_name)
    local_path = os.path.join(RESULTS_DIR, file_name)
    sftp.get(remote_path, local_path)

    # Remove remote file
    Log.info("Try to remove remote file...")
    sftp.remove(remote_path)
    sftp.close()
    Log.info("SFTP connection closed...")
    ssh.close()
    Log.info("SSH connection closed...")

    return get_result_file_name(data), 200
Beispiel #10
0
def get_health_pages():

    devtest_url = "http://ls-tools-ls-g.dev-i.net:8091/health-status/"
    preprod_url = "https://preprod-component-monitoring.livescore.com/health-status/"
    prod_url = "https://component-monitoring.livescore.com/health-status/"
    loadtest_url = "http://35.246.114.214:8091/health-status/"
    loadtest_iron_url = "http://35.246.114.214:8092/health-status/"
    sr_keys_url = "http://ls-tools-ls-g.dev-i.net:8060/"

    health_data = []

    try:
        devtest = requests.get(devtest_url).json()
        for env in devtest:
            health_data.append({env: devtest[env]})
    except Exception as e:
        Log.warning("Can't get data from {}, {}".format(devtest_url, e))

    try:
        preprod = requests.get(preprod_url).json()
        for env in preprod:
            health_data.append({env: preprod[env]})
    except Exception as e:
        Log.warning("Can't get data from {}, {}".format(preprod_url, e))

    try:
        prod = requests.get(prod_url).json()
        for env in prod:
            health_data.append({env: prod[env]})
    except Exception as e:
        Log.warning("Can't get data from {}, {}".format(prod_url, e))

    try:
        loadtest = requests.get(loadtest_url).json()
        for env in loadtest:
            health_data.append({env: loadtest[env]})
    except Exception as e:
        Log.warning("Can't get data from {}, {}".format(loadtest_url, e))

    try:
        loadtest_iron = {
            "loadtest-iron": requests.get(loadtest_iron_url).json()['loadtest']
        }
        for env in loadtest_iron:
            health_data.append({env: loadtest_iron[env]})
    except Exception as e:
        Log.warning("Can't get data from {}, {}".format(loadtest_iron_url, e))

    try:
        tree = html.fromstring(requests.get(sr_keys_url).content)
        headers = list()
        lines = list()
        sr_keys = {"sr-keys": []}
        for header in tree.xpath("//th/text()"):
            headers.append(header)
        for line in tree.xpath("//td/*/text()"):
            lines.append(line)
        for i in range(0, len(lines)):
            if len(sr_keys["sr-keys"]) < i // len(headers) + 1:
                sr_keys["sr-keys"].append({})
            sr_keys["sr-keys"][i // len(headers)].update(
                {headers[i % len(headers)]: lines[i]})
        health_data.append(sr_keys)
    except Exception as e:
        Log.warning("Can't get data from {}, {}".format(sr_keys_url, e))

    if not os.path.exists(os.path.join(MODULES_DIR, "health_page_cache.txt")):
        with open(os.path.join(MODULES_DIR, "health_page_cache.txt"), 'w'):
            pass
    else:
        FileManager.write_file(MODULES_DIR, "health_page_cache.txt",
                               json.dumps(health_data))

    return health_data, 200