Пример #1
0
def main():
    for connection in CONNECTIONS:
        if connection['model'] == 'EVA':
            system = connection['name']
            args = [connection[key] for key in ['address', 'username', 'password']]
            connect_line = 'SELECT MANAGER %s USERNAME=%s PASSWORD=%s' %tuple(args)
    
            script = tempfile.NamedTemporaryFile()
            cli_commands = [
                connect_line,
               'select system %s' %system, 'exit']
            script.write(bytes('\n'.join(cli_commands), 'utf-8'))
            script.seek(0)
    
            p = subprocess.Popen([EVA_CLI, 'file %s' %script.name],
                stdout=subprocess.PIPE)
            out = p.stdout.read().decode("utf-8")
            p.communicate()
    
            err = ''.join([line for line in out.split('\n') if 'Error' in line])
            if err:
                logging.warning('%s test failed - %s' %(system, err))
            else:
                logging.info('%s test success' %system)

    for dirname in ['logs', 'cache']:
        dirpath = os.path.join(os.getcwd(), dirname)
        if os.path.isdir(dirpath):
            shutil.rmtree(dirpath)

    return
Пример #2
0
def main():

    os.environ['LD_LIBRARY_PATH'] = os.path.join(HDS_CLI, 'lib')
    os.environ['STONAVM_HOME'] = HDS_CLI

    for connection in CONNECTIONS:
        if connection['model'] == 'HDS':
            systemname = connection['name']
            address = connection['address']
            username = connection['username']
            password = connection['password']

            clicommand = '%s/auunitadd -unit %s -ctl0 %s' %(HDS_CLI, systemname, address)
            p = subprocess.Popen(clicommand.split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
            out = p.stdout.read().decode("utf-8")
            p.communicate()

            authcommand = "printf %s\\n%s" %(username, password)
            clicommand = "%s/%s -unit %s" %(HDS_CLI, 'auunitinfo', systemname)
            auth = subprocess.Popen(authcommand.split(), stdout=subprocess.PIPE)
            p = subprocess.Popen(
                clicommand.split(),
                stdin=auth.stdout,
                stdout=subprocess.PIPE,
                stderr=subprocess.PIPE)
            out = p.stdout.read().decode("utf-8")
            err = p.stderr.read().decode("utf-8").strip()
            p.communicate()
    
            if not out and err:
                logging.warning('%s test failed - %s' %(systemname, err.split('\n')[-1]))
            else:
                logging.info('%s test success' %systemname)

    return
Пример #3
0
def main():
    for connection in CONNECTIONS:
        try:
            data = get_data(connection['address'])
            Name = data['RIMP']['INFRA2']['ENCL'],
            SN = data['RIMP']['INFRA2']['ENCL_SN'],
            Location = data['RIMP']['INFRA2']['RACK']
            logging.info('{} test success ({} {} {})'.format(connection['name'], Name, SN, Location))
        except:
            logging.warning('%s test failed' %connection['name'])
    return
Пример #4
0
def main():
    for connection in CONNECTIONS:
        if connection['model'] == '3PAR':
            args = [connection[key] for key in ['name', 'address', 'username', 'password']]
            args.append([])
            systemname, outs, errs, exception = ssh_run(args)
            if exception:
                logging.warning('%s test failed - %s' %(systemname, exception))
            else:
                logging.info('%s test success' %systemname)
    return
Пример #5
0
def main():
    for connection in CONNECTIONS:
        try:
            content = get_content(connection['address'])
            filename = '%s.xmldata' %(connection['name'])
            filepath = os.path.join(TEXTDIR, filename)
            with open(filepath, 'w') as f:
                f.write(content)
                logging.info('%s | %s lines' %(filename, len(content)))
        except:
            logging.warning('%s data collection failed' %connection['name'])
    return
Пример #6
0
 def _create_keyboard_header(self):
     logging.debug(
         f'{type(self).__name__} | Creating keyboard header started...')
     buttons = []
     logging.warning('!!!!!!!!!!!!!!!!!!!', debug_mode, type(debug_mode))
     if debug_mode:
         user_session_button = types.InlineKeyboardButton(
             text="user session",
             callback_data=encode_data(GetUserInfo.__name__))
         buttons.append(user_session_button)
     self._keyboard.add(*buttons)
     logging.debug(
         f'{type(self).__name__} | Creating keyboard header completed')
Пример #7
0
def snmpwalk(connection, counters=counters):
    """
    perform snmpwalk command and return counters values;
    """
    values = {}
    address = connection['address']
    name = connection['name']
    cmdGen = cmdgen.CommandGenerator()
    errorIndication, errorStatus, errorIndex, varBindTable = cmdGen.nextCmd(
        cmdgen.CommunityData('public'),
        cmdgen.UdpTransportTarget((address, 161), timeout=5, retries=5),
        *list(counters.keys())
    )
    if errorIndication:
        logging.warning(name)
        logging.warning(errorIndication)
    else:
        if errorStatus:
            logging.warning(name)
            logging.warning('%s at %s' % (
                errorStatus.prettyPrint(),
                errorIndex and varBindTable[-1][int(errorIndex)-1] or '?'
                )
            )
        else:
            for varBindTableRow in varBindTable:
                for number, value in varBindTableRow:
                    counter = counter_from_number(number)
                    port = int(number.asTuple()[-1]) -1
                    value = str(value)
                    values['{} {} {}'.format(name, port, counter)] = value
    return values
Пример #8
0
def main():
    for connection in CONNECTIONS:
        if connection['model'] == '3PAR':
            args = [connection[key] for key in ['name', 'address', 'username', 'password']]
            args = args + [COMMANDS]
            systemname, outs, errs, exception = ssh_run(args)
    
            if exception:
                logging.warning('%s - %s' %(systemname, exception))
            for commandname, out in outs.items():
                filename = '%s.%s' %(systemname, commandname)
                filepath = os.path.join(TEXTDIR, filename)
                with open(filepath, 'w') as f:
                    f.write(out)
                    logging.info('%s | %s lines' %(
                        filename, len(out.strip().split('\n'))))
    return
def process(input_path, output_dir=None, trim=True):

    input_path = Path(input_path).resolve()
    output_dir = Path(
        output_dir).resolve() if output_dir else input_path.parent

    # create output dir
    if not os.path.isdir(output_dir):
        os.makedirs(output_dir)

    output_file = input_path.stem + "_sample" + input_path.suffix
    output_path = output_dir / output_file
    json_file = input_path.stem + "_sample.json"
    json_path = output_dir / json_file
    txt_file = input_path.stem + "_sample.txt"
    txt_path = output_dir / txt_file

    # convert to strings
    input_path = str(input_path)
    output_path = str(output_path)
    json_path = str(json_path)
    txt_path = str(txt_path)

    ##############
    # TRIM VIDEO #
    ##############

    if trim:
        logging.info("Trimming video: %s" % input_path)

        # trim video
        ffmpeg_extract_subclip(input_path,
                               TRIM_START,
                               TRIM_END,
                               targetname=output_path)
        logging.info("Saved to: %s" % output_path)
    else:
        logging.info("Coppied video: %s" % output_path)

        # copy video
        shutil.copy(input_path, output_path)

    ####################
    # UPLOAD TO AWS S3 #
    ####################

    s3 = boto3.client(
        "s3",
        aws_access_key_id=AWS_ACCESS_KEY_ID,
        aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
        region_name=AWS_REGION_NAME,
    )
    logging.info("Uploading to S3")

    # create bucket
    try:
        s3.create_bucket(
            Bucket=AWS_BUCKET_NAME,
            CreateBucketConfiguration={"LocationConstraint": AWS_REGION_NAME},
        )
    except ClientError as e:
        logging.warning(e)

    # upload file
    s3.upload_file(output_path, AWS_BUCKET_NAME, output_path)
    logging.info("Uploaded file: %s" % output_path)

    ######################
    # TRANSCRIBE S3 FILE #
    ######################

    output_hash = hashlib.md5(open(output_path, "rb").read()).hexdigest()
    logging.info("File hash: %s" % output_hash)

    transcribe = boto3.client(
        "transcribe",
        aws_access_key_id=AWS_ACCESS_KEY_ID,
        aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
        region_name=AWS_REGION_NAME,
    )

    try:
        transcribe.start_transcription_job(
            TranscriptionJobName=output_hash,
            Media={
                "MediaFileUri": "s3://%s/%s" % (AWS_BUCKET_NAME, output_path)
            },
            MediaFormat="mp4",
            LanguageCode="en-IN",
        )
    except ClientError as e:
        logging.warning(e)

    while True:
        response = transcribe.get_transcription_job(
            TranscriptionJobName=output_hash)

        if response["TranscriptionJob"]["TranscriptionJobStatus"] in [
                "COMPLETED",
                "FAILED",
        ]:
            # save transcribe json
            uri = response["TranscriptionJob"]["Transcript"][
                "TranscriptFileUri"]
            urllib.request.urlretrieve(uri, json_path)

            logging.info("Transcribing (COMPLETE): %s" % json_path)
            break

        logging.info("Processing ...")
        time.sleep(5)

    # create transcribe txt
    transcript = ""

    for t in json.load(open(json_path))["results"]["transcripts"]:
        transcript += t["transcript"]

    open(txt_path, "w+").write(transcript)

    logging.info("Transcript (READY): %s" % txt_path)

    return {
        "sample": output_path,
        "json": json_path,
        "txt": txt_path,
    }
    def get_health(self):
        dictionary['checks'].clear()
        dictionary_spoor['checks'].clear()
        dictionary_etl['checks'].clear()
        dictionary_hui['checks'].clear()
        for k, v in health.items():
            try:
                app_dict = {
                    'ok': '',
                    'checkOutput': '',
                    'panicGuide': '',
                    'severity': '3',
                    'businessImpact': '',
                    'technicalSummary': 'For individual checks: {}'.format(v),
                    'name': '',
                    'lastUpdated': ''
                }

                response = requests.get(v, timeout=20)
                data = response.json()
                x = len(data['checks'])  # how many checks we have
                i = 0
                temp_status = []
                temp_updated = []
                while i < x:
                    temp_updated.append(data['checks'][i]['lastUpdated'])
                    temp_status.append(data['checks'][i]['ok'])

                    app_dict['severity'] = data['checks'][i]['severity']
                    app_dict['name'] = k
                    app_dict['panicGuide'] = data['checks'][i]['panicGuide']
                    app_dict['businessImpact'] = data['checks'][i][
                        'businessImpact']
                    app_dict['checkOutput'] = response.status_code
                    app_dict['severity'] = data['checks'][i]['severity']
                    i += 1

                # print('--------------------')
                # print(temp_status)
                severity = 3
                for index, status in enumerate(temp_status):
                    if status is True:
                        app_dict['ok'] = True
                        app_dict['severity'] = severity
                    if status is False:
                        #print('For app {} index {}'.format(k, index))
                        app_dict['ok'] = False
                        if app_dict['severity'] < severity:
                            app_dict['severity'] = severity
                        break

                least_recent = temp_updated[0]
                for item in temp_updated:
                    if item < least_recent:
                        #print('Comparing {} with {}. found new least-recent'.format(item, least_recent))
                        least_recent = item
                    #else:
                    #print('Comparing {} with {}. nothing to see here'.format(item, least_recent))
                #print('Least recent is {}'.format(least_recent))
                app_dict['lastUpdated'] = least_recent

                #print('Dictionary for {} is {}'.format(k, app_dict))
                logging.debug('Dictionary for {} is {}'.format(k, app_dict))
                dictionary['checks'].append(app_dict)
                if 'hui' in k.lower():
                    dictionary_hui['checks'].append(app_dict)
                elif 'spoor' in k.lower():
                    dictionary_spoor['checks'].append(app_dict)
                elif 'ingester' or 'validator' or 'transformer' or 'dq' in k.lower(
                ):
                    dictionary_etl['checks'].append(app_dict)

            except (requests.ConnectionError, requests.Timeout) as err:
                logging.error(err)
                err_dict = {
                    'ok': '',
                    'checkOutput': '',
                    'panicGuide': '',
                    'severity': '3',
                    'businessImpact': '',
                    'technicalSummary': '',
                    'name': '',
                    'lastUpdated': ''
                }
                if str(err.errno) == "None":
                    custom_err = "Link not found. Please check the link "
                else:
                    custom_err = str(err.errno)

                err_dict['ok'] = False
                err_dict['severity'] = "1"
                err_dict['checkOutput'] = custom_err
                err_dict['name'] = k
                dictionary['checks'].append(err_dict)

        for cluster in redshift_clusters:
            try:
                cluster_status = {
                    'ok': '',
                    'checkOutput': '',
                    'panicGuide': '',
                    'severity': '1',
                    'businessImpact': '',
                    'technicalSummary': 'Please check Redshift cluster',
                    'name': '',
                    'lastUpdated': ''
                }
                response = cloudwatch.get_metric_statistics(
                    Namespace='AWS/Redshift',
                    MetricName='HealthStatus',
                    Dimensions=[
                        {
                            'Name': 'ClusterIdentifier',
                            'Value': cluster
                        },
                    ],
                    StartTime=datetime.utcnow() - timedelta(minutes=2),
                    EndTime=datetime.utcnow() - timedelta(minutes=1),
                    Period=60,
                    Statistics=['Minimum'])

                for resp in response['Datapoints']:
                    if (int(resp['Minimum'])) == 1:
                        cluster_status['ok'] = True
                        cluster_status['checkOutput'] = "Cluster is healthy"
                    else:
                        cluster_status['ok'] = False
                        cluster_status[
                            'checkOutput'] = "!! Cluster is UNHEALTHY !!"
                cluster_status['name'] = "Redshift: {} cluster ".format(
                    cluster.upper())
                cluster_status['lastUpdated'] = datetime.now().strftime(
                    "%Y-%m-%d %H:%M")
                cluster_status['businessImpact'] = "N/A"
                cluster_status[
                    'panicGuide'] = "If cluster is down for long period of time raise ticket with AWS"
                dictionary_etl['checks'].append(cluster_status)
                dictionary['checks'].append(cluster_status)

                # print("{} for cluster {} the result is {}".format(datetime.utcnow(), cluster, cluster_status['ok']))
                logging.info(cluster_status)

            except ClientError as error:
                logging.warning('Boto API error: %s', error)

        # print('ETL is {}'.format(dictionary_etl))
        # print('HUI is {}'.format(dictionary_hui))
        # print('Spoor is {}'.format(dictionary_spoor))

        dictionary_etl['checks'] = sorted(dictionary_etl['checks'],
                                          key=lambda k: k['name'])
        dictionary['checks'] = sorted(dictionary['checks'],
                                      key=lambda k: k['name'])
        dictionary_hui['checks'] = sorted(dictionary_hui['checks'],
                                          key=lambda k: k['name'])
        dictionary_spoor['checks'] = sorted(dictionary_spoor['checks'],
                                            key=lambda k: k['name'])