def data_generator(timezone: str, enable_timezone_range: bool, sleep: float, repeat: int) -> dict: """ Generate data for non-synchorphiser table :args: timezone:str - timezone for generated timestamp(s) sleep:float - wait time between each row repeat:int - number of times to repeat :params: payloads:dict - dictionary of data :return: payloads """ payloads = {} for i in range(repeat): timestamp = generate_timestamp( timezone=timezone, enable_timezone_range=enable_timezone_range) for table in list(DATA.keys()): if table not in payloads: payloads[table] = [] location = random.choice(LOCATIONS) if len(location.split(',')) == 3: location = location.rsplit(',', 1)[0] payloads[table].append({ 'timestamp': timestamp, 'location': location, 'value': __calculate_value(DATA[table]) }) time.sleep(sleep) return payloads
def trig_value(timezone: str, enable_timezone_range: bool, sleep: float, repeat: int) -> dict: """ Calculate the sin/cos values between -π to π and π to -π :args: timezone:str - timezone for generated timestamp(s) sleep:float - wait time between each iteration repeat:int - number of iterations :params: payloads:dict - sin/cos data to store :return: payloads """ payloads = {'sin': [], 'cos': []} for i in range(repeat): for value in VALUE_ARRAY: timestamp = generate_timestamp( timezone=timezone, enable_timezone_range=enable_timezone_range) payloads['sin'].append({ 'timestamp': timestamp, 'value': math.sin(value) }) payloads['cos'].append({ 'timestamp': timestamp, 'value': math.sin(value) }) time.sleep(sleep) return payloads
def data_generator(timezone: str, enable_timezone_range: bool, sleep: float, repeat: int) -> list: """ Generate synchrophasor data :args: timezone:str - timezone for generated timestamp(s) sleep:float - wait time between each iteration repeat:int - number of iterations :params: payloads:list - list of values :return: payloads """ payloads = [] for i in range(repeat): location = random.choice(LOCATIONS) if len(location.split(',')) == 3: location = location.rsplit(',', 1)[0] payloads.append({ 'timestamp': generate_timestamp(timezone=timezone, enable_timezone_range=enable_timezone_range), 'location': location, 'source': __calculate_value(DATA['synchrophasor']['source']), 'sequence': random.choice(range(1, 4)), **__synchrophasor_data() }) time.sleep(sleep) return payloads
def get_ping_data(timezone:str, enable_timezone_range:bool, sleep:float, repeat:int)->list: """ Generate the ping_value per device - based on data originally from Lit San Leandro :args: timezone:str - timezone for generated timestamp(s) sleep:float - wait time between each row repeat:int - number of times to repeat process :param: data_set:list - list of data sets :sample: { "device_name": "Catalyst 3500XL", "parentelement": "68ae8bef-92e1-11e9-b465-d4856454f4ba", "timestamp": "2020-12-08 02:20:11.024002", "value": 1, "webid": "F1AbEfLbwwL8F6EiShvDV-QH70A74uuaOGS6RG0ZdSFZFT0ug4FckGTrxdFojNpadLPwI4gWE9NUEFTUy1MSVRTTFxMSVRTQU5MRUFORFJPXDc3NyBEQVZJU1xQT1AgUk9PTVxDQVRBTFlTVCAzNTAwWEx8UElORw" } :return: data dict based on information generated by PERCENTAGECPU_DATA """ data_sets = [] for i in range(repeat): device_name = random.choice(list(PING_DATA.keys())) sub_value = random.choice(range(PING_DATA[device_name]['min_value'], PING_DATA[device_name]['max_value'])) rand_val = random.random() if PING_DATA[device_name]['min_value'] <= sub_value + rand_val <= PING_DATA[device_name]['max_value']: value = round(sub_value + rand_val, 2) elif PING_DATA[device_name]['min_value'] <= sub_value - rand_val <= PING_DATA[device_name]['max_value']: value = round(sub_value - rand_val, 2) elif PING_DATA[device_name]['min_value'] <= rand_val - sub_value <= PING_DATA[device_name]['max_value']: value = rand_val - sub_value else: value = sub_value data_sets.append( { 'timestamp': generate_timestamp(timezone=timezone, enable_timezone_range=enable_timezone_range), 'device_name': device_name, 'parentelement': PING_DATA[device_name]['parentelement'] , 'webid': PING_DATA[device_name]['webid'] , 'value': value } ) time.sleep(sleep) return data_sets
def get_percentagecpu_data(timezone: str, enable_timezone_range: bool, sleep: float, repeat: int) -> list: """ Generate the percentage of CPU used per device :args: timezone:str - timezone for generated timestamp(s) sleep:float - wait time between each row repeat:int - number of times to repeat process :param: data_set:list - list of data sets :sample: { "device_name": "Catalyst 3500XL", "parentelement": "68ae8bef-92e1-11e9-b465-d4856454f4ba", "timestamp": "2020-12-08 02:20:11.024002", "value": 15.2, "webid": "F1AbEfLbwwL8F6EiShvDV-QH70A74uuaOGS6RG0ZdSFZFT0ug4FckGTrxdFojNpadLPwI4gWE9NUEFTUy1MSVRTTFxMSVRTQU5MRUFORFJPXDc3NyBEQVZJU1xQT1AgUk9PTVxDQVRBTFlTVCAzNTAwWEx8UElORw" } :return: data dict based on information generated by PERCENTAGECPU_DATA """ data_sets = [] for i in range(repeat): device_name = random.choice(list(PERCENTAGECPU_DATA.keys())) data_sets.append({ 'timestamp': generate_timestamp(timezone=timezone, enable_timezone_range=enable_timezone_range), 'device_name': device_name, 'parentelement': PERCENTAGECPU_DATA[device_name]['parentelement'], 'webid': PERCENTAGECPU_DATA[device_name]['webid'], 'value': round(random.random() * 100, 2) }) time.sleep(sleep) return data_sets
def get_aiops_data(timezone:str, sleep:float, repeat:int)->dict: """ Generate values based on data from Ai-Ops :args: timezone:str - timezone for generated timestamp(s) sleep:float - wait time between each row repeat:int - number of times to repeat process :param: data_sets:dict - dict of data generated :return: data_sets """ data_sets = {} for i in range(repeat): for table in DATA_SETS: if table not in data_sets: data_sets[table] = [] data_sets[table].append({ 'timestamp': generate_timestamp(timezone=timezone), 'value': random.random() + random.choice(range(DATA_SETS[table]['min'], DATA_SETS[table]['max'])) }) time.sleep(sleep) return data_sets
def get_linode_data(token: str, tag: str = None, initial_configs: bool = False, timezone: str = 'utc', enable_timezone_range: bool = True, exception: bool = True) -> dict: """ Extract data from linode :args: token:str - token for accessing linode data tag:str - group of nodes to extract data from. if not set extract all initial_configs:bool - whether this is the first timee the configs are being deployed timezone:str - timezone for generated timestamp(s) exception:bool - whether or not to print exceptions :params: timestamp:str - current (UTC) timestamp payloads:dict - dictionary of all the tables / data generated :return: payloads """ timestamp = generate_timestamp(timezone=timezone, enable_timezone_range=enable_timezone_range) payloads = { 'node_config': [], 'node_summary': [], 'cpu_insight': [], 'io_insight': [], 'netv4_public_insight': [], 'netv6_public_insight': [] } data = get_data(url='https://api.linode.com/v4/linode/instances', token=token, exception=exception) if len(data) == 0: return {} # machine(s) summary payloads['node_config'], machines = node_machine_info(data=data['data'], tag=tag, timestamp=timestamp) payloads['node_summary'] = node_config_info(data=data['data'], tag=tag, timestamp=timestamp) if initial_configs is False: del payloads['node_config'] del payloads['node_summary'] for machine in machines: data = get_data( 'https://api.linode.com/v4/linode/instances/%s/stats' % machine, token) payloads['cpu_insight'].append( extract_insight(data=data['data']['cpu'], member_id=machine, timestamp=timestamp)) payloads['io_insight'].append( extract_insight(data=data['data']['io']['io'], member_id=machine, timestamp=timestamp)) payloads['netv4_public_insight'].append( extract_network_insight(data=data['data']['netv4'], member_id=machine, timestamp=timestamp)) payloads['netv6_public_insight'].append( extract_network_insight(data=data['data']['netv6'], member_id=machine, timestamp=timestamp)) return payloads