diff --git a/src/graphInfluxDB.py b/src/graphInfluxDB.py index 327497b6..4e04abcf 100644 --- a/src/graphInfluxDB.py +++ b/src/graphInfluxDB.py @@ -1,621 +1,618 @@ -print("influxDB Support is Deperecated. Use the Long-Term Stats system instead.") +import subprocess +import json +import subprocess +from datetime import datetime +from pathlib import Path +import statistics +import time +import psutil -# import subprocess -# import json -# import subprocess -# from datetime import datetime -# from pathlib import Path -# import statistics -# import time -# import psutil +from influxdb_client import InfluxDBClient, Point +from influxdb_client.client.write_api import SYNCHRONOUS -# from influxdb_client import InfluxDBClient, Point -# from influxdb_client.client.write_api import SYNCHRONOUS +from liblqos_python import interface_a, interface_b, influx_db_enabled, influx_db_bucket, influx_db_org, influx_db_token, influx_db_url, sqm -# from ispConfig import interfaceA, interfaceB, influxDBEnabled, influxDBBucket, influxDBOrg, influxDBtoken, influxDBurl, sqm +def getInterfaceStats(interface): + command = 'tc -j -s qdisc show dev ' + interface + jsonAr = json.loads(subprocess.run(command.split(' '), stdout=subprocess.PIPE).stdout.decode('utf-8')) + jsonDict = {} + for element in filter(lambda e: 'parent' in e, jsonAr): + flowID = ':'.join(map(lambda p: f'0x{p}', element['parent'].split(':')[0:2])) + jsonDict[flowID] = element + del jsonAr + return jsonDict -# def getInterfaceStats(interface): -# command = 'tc -j -s qdisc show dev ' + interface -# jsonAr = json.loads(subprocess.run(command.split(' '), stdout=subprocess.PIPE).stdout.decode('utf-8')) -# jsonDict = {} -# for element in filter(lambda e: 'parent' in e, jsonAr): -# flowID = ':'.join(map(lambda p: f'0x{p}', element['parent'].split(':')[0:2])) -# jsonDict[flowID] = element -# del jsonAr -# return jsonDict +def chunk_list(l, n): + for i in range(0, len(l), n): + yield l[i:i + n] - -# def chunk_list(l, n): -# for i in range(0, len(l), n): -# yield l[i:i + n] - -# def getCircuitBandwidthStats(subscriberCircuits, tinsStats): -# interfaces = [interfaceA, interfaceB] -# ifaceStats = list(map(getInterfaceStats, interfaces)) +def getCircuitBandwidthStats(subscriberCircuits, tinsStats): + interfaces = [interface_a(), interface_b()] + ifaceStats = list(map(getInterfaceStats, interfaces)) -# for circuit in subscriberCircuits: -# if 'stats' not in circuit: -# circuit['stats'] = {} -# if 'currentQuery' in circuit['stats']: -# circuit['stats']['priorQuery'] = circuit['stats']['currentQuery'] -# circuit['stats']['currentQuery'] = {} -# circuit['stats']['sinceLastQuery'] = {} -# else: -# #circuit['stats']['priorQuery'] = {} -# #circuit['stats']['priorQuery']['time'] = datetime.now().isoformat() -# circuit['stats']['currentQuery'] = {} -# circuit['stats']['sinceLastQuery'] = {} + for circuit in subscriberCircuits: + if 'stats' not in circuit: + circuit['stats'] = {} + if 'currentQuery' in circuit['stats']: + circuit['stats']['priorQuery'] = circuit['stats']['currentQuery'] + circuit['stats']['currentQuery'] = {} + circuit['stats']['sinceLastQuery'] = {} + else: + #circuit['stats']['priorQuery'] = {} + #circuit['stats']['priorQuery']['time'] = datetime.now().isoformat() + circuit['stats']['currentQuery'] = {} + circuit['stats']['sinceLastQuery'] = {} -# #for entry in tinsStats: -# if 'currentQuery' in tinsStats: -# tinsStats['priorQuery'] = tinsStats['currentQuery'] -# tinsStats['currentQuery'] = {} -# tinsStats['sinceLastQuery'] = {} -# else: -# tinsStats['currentQuery'] = {} -# tinsStats['sinceLastQuery'] = {} + #for entry in tinsStats: + if 'currentQuery' in tinsStats: + tinsStats['priorQuery'] = tinsStats['currentQuery'] + tinsStats['currentQuery'] = {} + tinsStats['sinceLastQuery'] = {} + else: + tinsStats['currentQuery'] = {} + tinsStats['sinceLastQuery'] = {} -# tinsStats['currentQuery'] = { 'Bulk': {'Download': {'sent_packets': 0.0, 'drops': 0.0}, 'Upload': {'sent_packets': 0.0, 'drops': 0.0}}, -# 'BestEffort': {'Download': {'sent_packets': 0.0, 'drops': 0.0}, 'Upload': {'sent_packets': 0.0, 'drops': 0.0}}, -# 'Video': {'Download': {'sent_packets': 0.0, 'drops': 0.0}, 'Upload': {'sent_packets': 0.0, 'drops': 0.0}}, -# 'Voice': {'Download': {'sent_packets': 0.0, 'drops': 0.0}, 'Upload': {'sent_packets': 0.0, 'drops': 0.0}}, -# } -# tinsStats['sinceLastQuery'] = { 'Bulk': {'Download': {'sent_packets': 0.0, 'drops': 0.0}, 'Upload': {'sent_packets': 0.0, 'drops': 0.0}}, -# 'BestEffort': {'Download': {'sent_packets': 0.0, 'drops': 0.0}, 'Upload': {'sent_packets': 0.0, 'drops': 0.0}}, -# 'Video': {'Download': {'sent_packets': 0.0, 'drops': 0.0}, 'Upload': {'sent_packets': 0.0, 'drops': 0.0}}, -# 'Voice': {'Download': {'sent_packets': 0.0, 'drops': 0.0}, 'Upload': {'sent_packets': 0.0, 'drops': 0.0}}, -# } + tinsStats['currentQuery'] = { 'Bulk': {'Download': {'sent_packets': 0.0, 'drops': 0.0}, 'Upload': {'sent_packets': 0.0, 'drops': 0.0}}, + 'BestEffort': {'Download': {'sent_packets': 0.0, 'drops': 0.0}, 'Upload': {'sent_packets': 0.0, 'drops': 0.0}}, + 'Video': {'Download': {'sent_packets': 0.0, 'drops': 0.0}, 'Upload': {'sent_packets': 0.0, 'drops': 0.0}}, + 'Voice': {'Download': {'sent_packets': 0.0, 'drops': 0.0}, 'Upload': {'sent_packets': 0.0, 'drops': 0.0}}, + } + tinsStats['sinceLastQuery'] = { 'Bulk': {'Download': {'sent_packets': 0.0, 'drops': 0.0}, 'Upload': {'sent_packets': 0.0, 'drops': 0.0}}, + 'BestEffort': {'Download': {'sent_packets': 0.0, 'drops': 0.0}, 'Upload': {'sent_packets': 0.0, 'drops': 0.0}}, + 'Video': {'Download': {'sent_packets': 0.0, 'drops': 0.0}, 'Upload': {'sent_packets': 0.0, 'drops': 0.0}}, + 'Voice': {'Download': {'sent_packets': 0.0, 'drops': 0.0}, 'Upload': {'sent_packets': 0.0, 'drops': 0.0}}, + } -# for circuit in subscriberCircuits: -# for (interface, stats, dirSuffix) in zip(interfaces, ifaceStats, ['Download', 'Upload']): + for circuit in subscriberCircuits: + for (interface, stats, dirSuffix) in zip(interfaces, ifaceStats, ['Download', 'Upload']): -# element = stats[circuit['classid']] if circuit['classid'] in stats else False + element = stats[circuit['classid']] if circuit['classid'] in stats else False -# if element: -# bytesSent = float(element['bytes']) -# drops = float(element['drops']) -# packets = float(element['packets']) -# if (element['drops'] > 0) and (element['packets'] > 0): -# overloadFactor = float(round(element['drops']/element['packets'],3)) -# else: -# overloadFactor = 0.0 + if element: + bytesSent = float(element['bytes']) + drops = float(element['drops']) + packets = float(element['packets']) + if (element['drops'] > 0) and (element['packets'] > 0): + overloadFactor = float(round(element['drops']/element['packets'],3)) + else: + overloadFactor = 0.0 -# if 'cake diffserv4' in sqm: -# tinCounter = 1 -# for tin in element['tins']: -# sent_packets = float(tin['sent_packets']) -# ack_drops = float(tin['ack_drops']) -# ecn_mark = float(tin['ecn_mark']) -# tinDrops = float(tin['drops']) -# trueDrops = ecn_mark + tinDrops - ack_drops -# if tinCounter == 1: -# tinsStats['currentQuery']['Bulk'][dirSuffix]['sent_packets'] += sent_packets -# tinsStats['currentQuery']['Bulk'][dirSuffix]['drops'] += trueDrops -# elif tinCounter == 2: -# tinsStats['currentQuery']['BestEffort'][dirSuffix]['sent_packets'] += sent_packets -# tinsStats['currentQuery']['BestEffort'][dirSuffix]['drops'] += trueDrops -# elif tinCounter == 3: -# tinsStats['currentQuery']['Video'][dirSuffix]['sent_packets'] += sent_packets -# tinsStats['currentQuery']['Video'][dirSuffix]['drops'] += trueDrops -# elif tinCounter == 4: -# tinsStats['currentQuery']['Voice'][dirSuffix]['sent_packets'] += sent_packets -# tinsStats['currentQuery']['Voice'][dirSuffix]['drops'] += trueDrops -# tinCounter += 1 + if 'cake diffserv4' in sqm(): + tinCounter = 1 + for tin in element['tins']: + sent_packets = float(tin['sent_packets']) + ack_drops = float(tin['ack_drops']) + ecn_mark = float(tin['ecn_mark']) + tinDrops = float(tin['drops']) + trueDrops = ecn_mark + tinDrops - ack_drops + if tinCounter == 1: + tinsStats['currentQuery']['Bulk'][dirSuffix]['sent_packets'] += sent_packets + tinsStats['currentQuery']['Bulk'][dirSuffix]['drops'] += trueDrops + elif tinCounter == 2: + tinsStats['currentQuery']['BestEffort'][dirSuffix]['sent_packets'] += sent_packets + tinsStats['currentQuery']['BestEffort'][dirSuffix]['drops'] += trueDrops + elif tinCounter == 3: + tinsStats['currentQuery']['Video'][dirSuffix]['sent_packets'] += sent_packets + tinsStats['currentQuery']['Video'][dirSuffix]['drops'] += trueDrops + elif tinCounter == 4: + tinsStats['currentQuery']['Voice'][dirSuffix]['sent_packets'] += sent_packets + tinsStats['currentQuery']['Voice'][dirSuffix]['drops'] += trueDrops + tinCounter += 1 -# circuit['stats']['currentQuery']['bytesSent' + dirSuffix] = bytesSent -# circuit['stats']['currentQuery']['packetDrops' + dirSuffix] = drops -# circuit['stats']['currentQuery']['packetsSent' + dirSuffix] = packets -# circuit['stats']['currentQuery']['overloadFactor' + dirSuffix] = overloadFactor + circuit['stats']['currentQuery']['bytesSent' + dirSuffix] = bytesSent + circuit['stats']['currentQuery']['packetDrops' + dirSuffix] = drops + circuit['stats']['currentQuery']['packetsSent' + dirSuffix] = packets + circuit['stats']['currentQuery']['overloadFactor' + dirSuffix] = overloadFactor -# #if 'cake diffserv4' in sqm: -# # circuit['stats']['currentQuery']['tins'] = theseTins + #if 'cake diffserv4' in sqm(): + # circuit['stats']['currentQuery']['tins'] = theseTins -# circuit['stats']['currentQuery']['time'] = datetime.now().isoformat() + circuit['stats']['currentQuery']['time'] = datetime.now().isoformat() -# allPacketsDownload = 0.0 -# allPacketsUpload = 0.0 -# for circuit in subscriberCircuits: -# circuit['stats']['sinceLastQuery']['bitsDownload'] = circuit['stats']['sinceLastQuery']['bitsUpload'] = None -# circuit['stats']['sinceLastQuery']['bytesSentDownload'] = circuit['stats']['sinceLastQuery']['bytesSentUpload'] = None -# circuit['stats']['sinceLastQuery']['packetDropsDownload'] = circuit['stats']['sinceLastQuery']['packetDropsUpload'] = None -# circuit['stats']['sinceLastQuery']['packetsSentDownload'] = circuit['stats']['sinceLastQuery']['packetsSentUpload'] = None + allPacketsDownload = 0.0 + allPacketsUpload = 0.0 + for circuit in subscriberCircuits: + circuit['stats']['sinceLastQuery']['bitsDownload'] = circuit['stats']['sinceLastQuery']['bitsUpload'] = None + circuit['stats']['sinceLastQuery']['bytesSentDownload'] = circuit['stats']['sinceLastQuery']['bytesSentUpload'] = None + circuit['stats']['sinceLastQuery']['packetDropsDownload'] = circuit['stats']['sinceLastQuery']['packetDropsUpload'] = None + circuit['stats']['sinceLastQuery']['packetsSentDownload'] = circuit['stats']['sinceLastQuery']['packetsSentUpload'] = None -# try: -# if (circuit['stats']['currentQuery']['bytesSentDownload'] - circuit['stats']['priorQuery']['bytesSentDownload']) >= 0.0: -# circuit['stats']['sinceLastQuery']['bytesSentDownload'] = circuit['stats']['currentQuery']['bytesSentDownload'] - circuit['stats']['priorQuery']['bytesSentDownload'] -# else: -# circuit['stats']['sinceLastQuery']['bytesSentDownload'] = None -# if (circuit['stats']['currentQuery']['bytesSentUpload'] - circuit['stats']['priorQuery']['bytesSentUpload']) >= 0.0: -# circuit['stats']['sinceLastQuery']['bytesSentUpload'] = circuit['stats']['currentQuery']['bytesSentUpload'] - circuit['stats']['priorQuery']['bytesSentUpload'] -# else: -# circuit['stats']['sinceLastQuery']['bytesSentUpload'] = None -# except: -# circuit['stats']['sinceLastQuery']['bytesSentDownload'] = None -# circuit['stats']['sinceLastQuery']['bytesSentUpload'] = None -# try: -# if (circuit['stats']['currentQuery']['packetDropsDownload'] - circuit['stats']['priorQuery']['packetDropsDownload']) >= 0.0: -# circuit['stats']['sinceLastQuery']['packetDropsDownload'] = circuit['stats']['currentQuery']['packetDropsDownload'] - circuit['stats']['priorQuery']['packetDropsDownload'] -# else: -# circuit['stats']['sinceLastQuery']['packetDropsDownload'] = None -# if (circuit['stats']['currentQuery']['packetDropsUpload'] - circuit['stats']['priorQuery']['packetDropsUpload']) >= 0.0: -# circuit['stats']['sinceLastQuery']['packetDropsUpload'] = circuit['stats']['currentQuery']['packetDropsUpload'] - circuit['stats']['priorQuery']['packetDropsUpload'] -# else: -# circuit['stats']['sinceLastQuery']['packetDropsUpload'] = None -# except: -# circuit['stats']['sinceLastQuery']['packetDropsDownload'] = None -# circuit['stats']['sinceLastQuery']['packetDropsUpload'] = None -# try: -# if (circuit['stats']['currentQuery']['packetsSentDownload'] - circuit['stats']['priorQuery']['packetsSentDownload']) >= 0.0: -# circuit['stats']['sinceLastQuery']['packetsSentDownload'] = circuit['stats']['currentQuery']['packetsSentDownload'] - circuit['stats']['priorQuery']['packetsSentDownload'] -# else: -# circuit['stats']['sinceLastQuery']['packetsSentDownload'] = None -# if (circuit['stats']['currentQuery']['packetsSentUpload'] - circuit['stats']['priorQuery']['packetsSentUpload']) >= 0.0: -# circuit['stats']['sinceLastQuery']['packetsSentUpload'] = circuit['stats']['currentQuery']['packetsSentUpload'] - circuit['stats']['priorQuery']['packetsSentUpload'] -# else: -# circuit['stats']['sinceLastQuery']['packetsSentUpload'] = None -# except: -# circuit['stats']['sinceLastQuery']['packetsSentDownload'] = None -# circuit['stats']['sinceLastQuery']['packetsSentUpload'] = None + try: + if (circuit['stats']['currentQuery']['bytesSentDownload'] - circuit['stats']['priorQuery']['bytesSentDownload']) >= 0.0: + circuit['stats']['sinceLastQuery']['bytesSentDownload'] = circuit['stats']['currentQuery']['bytesSentDownload'] - circuit['stats']['priorQuery']['bytesSentDownload'] + else: + circuit['stats']['sinceLastQuery']['bytesSentDownload'] = None + if (circuit['stats']['currentQuery']['bytesSentUpload'] - circuit['stats']['priorQuery']['bytesSentUpload']) >= 0.0: + circuit['stats']['sinceLastQuery']['bytesSentUpload'] = circuit['stats']['currentQuery']['bytesSentUpload'] - circuit['stats']['priorQuery']['bytesSentUpload'] + else: + circuit['stats']['sinceLastQuery']['bytesSentUpload'] = None + except: + circuit['stats']['sinceLastQuery']['bytesSentDownload'] = None + circuit['stats']['sinceLastQuery']['bytesSentUpload'] = None + try: + if (circuit['stats']['currentQuery']['packetDropsDownload'] - circuit['stats']['priorQuery']['packetDropsDownload']) >= 0.0: + circuit['stats']['sinceLastQuery']['packetDropsDownload'] = circuit['stats']['currentQuery']['packetDropsDownload'] - circuit['stats']['priorQuery']['packetDropsDownload'] + else: + circuit['stats']['sinceLastQuery']['packetDropsDownload'] = None + if (circuit['stats']['currentQuery']['packetDropsUpload'] - circuit['stats']['priorQuery']['packetDropsUpload']) >= 0.0: + circuit['stats']['sinceLastQuery']['packetDropsUpload'] = circuit['stats']['currentQuery']['packetDropsUpload'] - circuit['stats']['priorQuery']['packetDropsUpload'] + else: + circuit['stats']['sinceLastQuery']['packetDropsUpload'] = None + except: + circuit['stats']['sinceLastQuery']['packetDropsDownload'] = None + circuit['stats']['sinceLastQuery']['packetDropsUpload'] = None + try: + if (circuit['stats']['currentQuery']['packetsSentDownload'] - circuit['stats']['priorQuery']['packetsSentDownload']) >= 0.0: + circuit['stats']['sinceLastQuery']['packetsSentDownload'] = circuit['stats']['currentQuery']['packetsSentDownload'] - circuit['stats']['priorQuery']['packetsSentDownload'] + else: + circuit['stats']['sinceLastQuery']['packetsSentDownload'] = None + if (circuit['stats']['currentQuery']['packetsSentUpload'] - circuit['stats']['priorQuery']['packetsSentUpload']) >= 0.0: + circuit['stats']['sinceLastQuery']['packetsSentUpload'] = circuit['stats']['currentQuery']['packetsSentUpload'] - circuit['stats']['priorQuery']['packetsSentUpload'] + else: + circuit['stats']['sinceLastQuery']['packetsSentUpload'] = None + except: + circuit['stats']['sinceLastQuery']['packetsSentDownload'] = None + circuit['stats']['sinceLastQuery']['packetsSentUpload'] = None -# if(circuit['stats']['sinceLastQuery']['packetsSentDownload']): -# allPacketsDownload += circuit['stats']['sinceLastQuery']['packetsSentDownload'] -# if(circuit['stats']['sinceLastQuery']['packetsSentUpload']): -# allPacketsUpload += circuit['stats']['sinceLastQuery']['packetsSentUpload'] + if(circuit['stats']['sinceLastQuery']['packetsSentDownload']): + allPacketsDownload += circuit['stats']['sinceLastQuery']['packetsSentDownload'] + if(circuit['stats']['sinceLastQuery']['packetsSentUpload']): + allPacketsUpload += circuit['stats']['sinceLastQuery']['packetsSentUpload'] -# if 'priorQuery' in circuit['stats']: -# if 'time' in circuit['stats']['priorQuery']: -# currentQueryTime = datetime.fromisoformat(circuit['stats']['currentQuery']['time']) -# priorQueryTime = datetime.fromisoformat(circuit['stats']['priorQuery']['time']) -# deltaSeconds = (currentQueryTime - priorQueryTime).total_seconds() -# if (circuit['stats']['sinceLastQuery']['bytesSentDownload']): -# circuit['stats']['sinceLastQuery']['bitsDownload'] = round((circuit['stats']['sinceLastQuery']['bytesSentDownload'] * 8) / deltaSeconds) if deltaSeconds > 0 else 0 -# else: -# circuit['stats']['sinceLastQuery']['bitsDownload'] = None -# if (circuit['stats']['sinceLastQuery']['bytesSentUpload']): -# circuit['stats']['sinceLastQuery']['bitsUpload'] = round((circuit['stats']['sinceLastQuery']['bytesSentUpload'] * 8) / deltaSeconds) if deltaSeconds > 0 else 0 -# else: -# circuit['stats']['sinceLastQuery']['bitsUpload'] = None + if 'priorQuery' in circuit['stats']: + if 'time' in circuit['stats']['priorQuery']: + currentQueryTime = datetime.fromisoformat(circuit['stats']['currentQuery']['time']) + priorQueryTime = datetime.fromisoformat(circuit['stats']['priorQuery']['time']) + deltaSeconds = (currentQueryTime - priorQueryTime).total_seconds() + if (circuit['stats']['sinceLastQuery']['bytesSentDownload']): + circuit['stats']['sinceLastQuery']['bitsDownload'] = round((circuit['stats']['sinceLastQuery']['bytesSentDownload'] * 8) / deltaSeconds) if deltaSeconds > 0 else 0 + else: + circuit['stats']['sinceLastQuery']['bitsDownload'] = None + if (circuit['stats']['sinceLastQuery']['bytesSentUpload']): + circuit['stats']['sinceLastQuery']['bitsUpload'] = round((circuit['stats']['sinceLastQuery']['bytesSentUpload'] * 8) / deltaSeconds) if deltaSeconds > 0 else 0 + else: + circuit['stats']['sinceLastQuery']['bitsUpload'] = None -# else: -# circuit['stats']['sinceLastQuery']['bitsDownload'] = None -# if(circuit['stats']['sinceLastQuery']['bytesSentDownload']): -# circuit['stats']['sinceLastQuery']['bitsDownload'] = (circuit['stats']['sinceLastQuery']['bytesSentDownload'] * 8) -# circuit['stats']['sinceLastQuery']['bitsUpload'] = None -# if(circuit['stats']['sinceLastQuery']['bytesSentUpload']): -# circuit['stats']['sinceLastQuery']['bitsUpload'] = (circuit['stats']['sinceLastQuery']['bytesSentUpload'] * 8) + else: + circuit['stats']['sinceLastQuery']['bitsDownload'] = None + if(circuit['stats']['sinceLastQuery']['bytesSentDownload']): + circuit['stats']['sinceLastQuery']['bitsDownload'] = (circuit['stats']['sinceLastQuery']['bytesSentDownload'] * 8) + circuit['stats']['sinceLastQuery']['bitsUpload'] = None + if(circuit['stats']['sinceLastQuery']['bytesSentUpload']): + circuit['stats']['sinceLastQuery']['bitsUpload'] = (circuit['stats']['sinceLastQuery']['bytesSentUpload'] * 8) -# tinsStats['sinceLastQuery']['Bulk']['Download']['dropPercentage'] = tinsStats['sinceLastQuery']['Bulk']['Upload']['dropPercentage'] = 0.0 -# tinsStats['sinceLastQuery']['BestEffort']['Download']['dropPercentage'] = tinsStats['sinceLastQuery']['BestEffort']['Upload']['dropPercentage'] = 0.0 -# tinsStats['sinceLastQuery']['Video']['Download']['dropPercentage'] = tinsStats['sinceLastQuery']['Video']['Upload']['dropPercentage'] = 0.0 -# tinsStats['sinceLastQuery']['Voice']['Download']['dropPercentage'] = tinsStats['sinceLastQuery']['Voice']['Upload']['dropPercentage'] = 0.0 + tinsStats['sinceLastQuery']['Bulk']['Download']['dropPercentage'] = tinsStats['sinceLastQuery']['Bulk']['Upload']['dropPercentage'] = 0.0 + tinsStats['sinceLastQuery']['BestEffort']['Download']['dropPercentage'] = tinsStats['sinceLastQuery']['BestEffort']['Upload']['dropPercentage'] = 0.0 + tinsStats['sinceLastQuery']['Video']['Download']['dropPercentage'] = tinsStats['sinceLastQuery']['Video']['Upload']['dropPercentage'] = 0.0 + tinsStats['sinceLastQuery']['Voice']['Download']['dropPercentage'] = tinsStats['sinceLastQuery']['Voice']['Upload']['dropPercentage'] = 0.0 -# tinsStats['sinceLastQuery']['Bulk']['Download']['percentage'] = tinsStats['sinceLastQuery']['Bulk']['Upload']['percentage'] = 0.0 -# tinsStats['sinceLastQuery']['BestEffort']['Download']['percentage'] = tinsStats['sinceLastQuery']['BestEffort']['Upload']['percentage'] = 0.0 -# tinsStats['sinceLastQuery']['Video']['Download']['percentage'] = tinsStats['sinceLastQuery']['Video']['Upload']['percentage'] = 0.0 -# tinsStats['sinceLastQuery']['Voice']['Download']['percentage'] = tinsStats['sinceLastQuery']['Voice']['Upload']['percentage'] = 0.0 + tinsStats['sinceLastQuery']['Bulk']['Download']['percentage'] = tinsStats['sinceLastQuery']['Bulk']['Upload']['percentage'] = 0.0 + tinsStats['sinceLastQuery']['BestEffort']['Download']['percentage'] = tinsStats['sinceLastQuery']['BestEffort']['Upload']['percentage'] = 0.0 + tinsStats['sinceLastQuery']['Video']['Download']['percentage'] = tinsStats['sinceLastQuery']['Video']['Upload']['percentage'] = 0.0 + tinsStats['sinceLastQuery']['Voice']['Download']['percentage'] = tinsStats['sinceLastQuery']['Voice']['Upload']['percentage'] = 0.0 -# try: -# tinsStats['sinceLastQuery']['Bulk']['Download']['sent_packets'] = tinsStats['currentQuery']['Bulk']['Download']['sent_packets'] - tinsStats['priorQuery']['Bulk']['Download']['sent_packets'] -# tinsStats['sinceLastQuery']['BestEffort']['Download']['sent_packets'] = tinsStats['currentQuery']['BestEffort']['Download']['sent_packets'] - tinsStats['priorQuery']['BestEffort']['Download']['sent_packets'] -# tinsStats['sinceLastQuery']['Video']['Download']['sent_packets'] = tinsStats['currentQuery']['Video']['Download']['sent_packets'] - tinsStats['priorQuery']['Video']['Download']['sent_packets'] -# tinsStats['sinceLastQuery']['Voice']['Download']['sent_packets'] = tinsStats['currentQuery']['Voice']['Download']['sent_packets'] - tinsStats['priorQuery']['Voice']['Download']['sent_packets'] -# tinsStats['sinceLastQuery']['Bulk']['Upload']['sent_packets'] = tinsStats['currentQuery']['Bulk']['Upload']['sent_packets'] - tinsStats['priorQuery']['Bulk']['Upload']['sent_packets'] -# tinsStats['sinceLastQuery']['BestEffort']['Upload']['sent_packets'] = tinsStats['currentQuery']['BestEffort']['Upload']['sent_packets'] - tinsStats['priorQuery']['BestEffort']['Upload']['sent_packets'] -# tinsStats['sinceLastQuery']['Video']['Upload']['sent_packets'] = tinsStats['currentQuery']['Video']['Upload']['sent_packets'] - tinsStats['priorQuery']['Video']['Upload']['sent_packets'] -# tinsStats['sinceLastQuery']['Voice']['Upload']['sent_packets'] = tinsStats['currentQuery']['Voice']['Upload']['sent_packets'] - tinsStats['priorQuery']['Voice']['Upload']['sent_packets'] -# except: -# tinsStats['sinceLastQuery']['Bulk']['Download']['sent_packets'] = tinsStats['sinceLastQuery']['BestEffort']['Download']['sent_packets'] = 0.0 -# tinsStats['sinceLastQuery']['Video']['Download']['sent_packets'] = tinsStats['sinceLastQuery']['Voice']['Download']['sent_packets'] = 0.0 -# tinsStats['sinceLastQuery']['Bulk']['Upload']['sent_packets'] = tinsStats['sinceLastQuery']['BestEffort']['Upload']['sent_packets'] = 0.0 -# tinsStats['sinceLastQuery']['Video']['Upload']['sent_packets'] = tinsStats['sinceLastQuery']['Voice']['Upload']['sent_packets'] = 0.0 + try: + tinsStats['sinceLastQuery']['Bulk']['Download']['sent_packets'] = tinsStats['currentQuery']['Bulk']['Download']['sent_packets'] - tinsStats['priorQuery']['Bulk']['Download']['sent_packets'] + tinsStats['sinceLastQuery']['BestEffort']['Download']['sent_packets'] = tinsStats['currentQuery']['BestEffort']['Download']['sent_packets'] - tinsStats['priorQuery']['BestEffort']['Download']['sent_packets'] + tinsStats['sinceLastQuery']['Video']['Download']['sent_packets'] = tinsStats['currentQuery']['Video']['Download']['sent_packets'] - tinsStats['priorQuery']['Video']['Download']['sent_packets'] + tinsStats['sinceLastQuery']['Voice']['Download']['sent_packets'] = tinsStats['currentQuery']['Voice']['Download']['sent_packets'] - tinsStats['priorQuery']['Voice']['Download']['sent_packets'] + tinsStats['sinceLastQuery']['Bulk']['Upload']['sent_packets'] = tinsStats['currentQuery']['Bulk']['Upload']['sent_packets'] - tinsStats['priorQuery']['Bulk']['Upload']['sent_packets'] + tinsStats['sinceLastQuery']['BestEffort']['Upload']['sent_packets'] = tinsStats['currentQuery']['BestEffort']['Upload']['sent_packets'] - tinsStats['priorQuery']['BestEffort']['Upload']['sent_packets'] + tinsStats['sinceLastQuery']['Video']['Upload']['sent_packets'] = tinsStats['currentQuery']['Video']['Upload']['sent_packets'] - tinsStats['priorQuery']['Video']['Upload']['sent_packets'] + tinsStats['sinceLastQuery']['Voice']['Upload']['sent_packets'] = tinsStats['currentQuery']['Voice']['Upload']['sent_packets'] - tinsStats['priorQuery']['Voice']['Upload']['sent_packets'] + except: + tinsStats['sinceLastQuery']['Bulk']['Download']['sent_packets'] = tinsStats['sinceLastQuery']['BestEffort']['Download']['sent_packets'] = 0.0 + tinsStats['sinceLastQuery']['Video']['Download']['sent_packets'] = tinsStats['sinceLastQuery']['Voice']['Download']['sent_packets'] = 0.0 + tinsStats['sinceLastQuery']['Bulk']['Upload']['sent_packets'] = tinsStats['sinceLastQuery']['BestEffort']['Upload']['sent_packets'] = 0.0 + tinsStats['sinceLastQuery']['Video']['Upload']['sent_packets'] = tinsStats['sinceLastQuery']['Voice']['Upload']['sent_packets'] = 0.0 -# try: -# tinsStats['sinceLastQuery']['Bulk']['Download']['drops'] = tinsStats['currentQuery']['Bulk']['Download']['drops'] - tinsStats['priorQuery']['Bulk']['Download']['drops'] -# tinsStats['sinceLastQuery']['BestEffort']['Download']['drops'] = tinsStats['currentQuery']['BestEffort']['Download']['drops'] - tinsStats['priorQuery']['BestEffort']['Download']['drops'] -# tinsStats['sinceLastQuery']['Video']['Download']['drops'] = tinsStats['currentQuery']['Video']['Download']['drops'] - tinsStats['priorQuery']['Video']['Download']['drops'] -# tinsStats['sinceLastQuery']['Voice']['Download']['drops'] = tinsStats['currentQuery']['Voice']['Download']['drops'] - tinsStats['priorQuery']['Voice']['Download']['drops'] -# tinsStats['sinceLastQuery']['Bulk']['Upload']['drops'] = tinsStats['currentQuery']['Bulk']['Upload']['drops'] - tinsStats['priorQuery']['Bulk']['Upload']['drops'] -# tinsStats['sinceLastQuery']['BestEffort']['Upload']['drops'] = tinsStats['currentQuery']['BestEffort']['Upload']['drops'] - tinsStats['priorQuery']['BestEffort']['Upload']['drops'] -# tinsStats['sinceLastQuery']['Video']['Upload']['drops'] = tinsStats['currentQuery']['Video']['Upload']['drops'] - tinsStats['priorQuery']['Video']['Upload']['drops'] -# tinsStats['sinceLastQuery']['Voice']['Upload']['drops'] = tinsStats['currentQuery']['Voice']['Upload']['drops'] - tinsStats['priorQuery']['Voice']['Upload']['drops'] -# except: -# tinsStats['sinceLastQuery']['Bulk']['Download']['drops'] = tinsStats['sinceLastQuery']['BestEffort']['Download']['drops'] = 0.0 -# tinsStats['sinceLastQuery']['Video']['Download']['drops'] = tinsStats['sinceLastQuery']['Voice']['Download']['drops'] = 0.0 -# tinsStats['sinceLastQuery']['Bulk']['Upload']['drops'] = tinsStats['sinceLastQuery']['BestEffort']['Upload']['drops'] = 0.0 -# tinsStats['sinceLastQuery']['Video']['Upload']['drops'] = tinsStats['sinceLastQuery']['Voice']['Upload']['drops'] = 0.0 + try: + tinsStats['sinceLastQuery']['Bulk']['Download']['drops'] = tinsStats['currentQuery']['Bulk']['Download']['drops'] - tinsStats['priorQuery']['Bulk']['Download']['drops'] + tinsStats['sinceLastQuery']['BestEffort']['Download']['drops'] = tinsStats['currentQuery']['BestEffort']['Download']['drops'] - tinsStats['priorQuery']['BestEffort']['Download']['drops'] + tinsStats['sinceLastQuery']['Video']['Download']['drops'] = tinsStats['currentQuery']['Video']['Download']['drops'] - tinsStats['priorQuery']['Video']['Download']['drops'] + tinsStats['sinceLastQuery']['Voice']['Download']['drops'] = tinsStats['currentQuery']['Voice']['Download']['drops'] - tinsStats['priorQuery']['Voice']['Download']['drops'] + tinsStats['sinceLastQuery']['Bulk']['Upload']['drops'] = tinsStats['currentQuery']['Bulk']['Upload']['drops'] - tinsStats['priorQuery']['Bulk']['Upload']['drops'] + tinsStats['sinceLastQuery']['BestEffort']['Upload']['drops'] = tinsStats['currentQuery']['BestEffort']['Upload']['drops'] - tinsStats['priorQuery']['BestEffort']['Upload']['drops'] + tinsStats['sinceLastQuery']['Video']['Upload']['drops'] = tinsStats['currentQuery']['Video']['Upload']['drops'] - tinsStats['priorQuery']['Video']['Upload']['drops'] + tinsStats['sinceLastQuery']['Voice']['Upload']['drops'] = tinsStats['currentQuery']['Voice']['Upload']['drops'] - tinsStats['priorQuery']['Voice']['Upload']['drops'] + except: + tinsStats['sinceLastQuery']['Bulk']['Download']['drops'] = tinsStats['sinceLastQuery']['BestEffort']['Download']['drops'] = 0.0 + tinsStats['sinceLastQuery']['Video']['Download']['drops'] = tinsStats['sinceLastQuery']['Voice']['Download']['drops'] = 0.0 + tinsStats['sinceLastQuery']['Bulk']['Upload']['drops'] = tinsStats['sinceLastQuery']['BestEffort']['Upload']['drops'] = 0.0 + tinsStats['sinceLastQuery']['Video']['Upload']['drops'] = tinsStats['sinceLastQuery']['Voice']['Upload']['drops'] = 0.0 -# try: -# dlPerc = tinsStats['sinceLastQuery']['Bulk']['Download']['drops'] / tinsStats['sinceLastQuery']['Bulk']['Download']['sent_packets'] -# ulPerc = tinsStats['sinceLastQuery']['Bulk']['Upload']['drops'] / tinsStats['sinceLastQuery']['Bulk']['Upload']['sent_packets'] -# tinsStats['sinceLastQuery']['Bulk']['Download']['dropPercentage'] = max(round(dlPerc * 100.0, 3),0.0) -# tinsStats['sinceLastQuery']['Bulk']['Upload']['dropPercentage'] = max(round(ulPerc * 100.0, 3),0.0) + try: + dlPerc = tinsStats['sinceLastQuery']['Bulk']['Download']['drops'] / tinsStats['sinceLastQuery']['Bulk']['Download']['sent_packets'] + ulPerc = tinsStats['sinceLastQuery']['Bulk']['Upload']['drops'] / tinsStats['sinceLastQuery']['Bulk']['Upload']['sent_packets'] + tinsStats['sinceLastQuery']['Bulk']['Download']['dropPercentage'] = max(round(dlPerc * 100.0, 3),0.0) + tinsStats['sinceLastQuery']['Bulk']['Upload']['dropPercentage'] = max(round(ulPerc * 100.0, 3),0.0) -# dlPerc = tinsStats['sinceLastQuery']['BestEffort']['Download']['drops'] / tinsStats['sinceLastQuery']['BestEffort']['Download']['sent_packets'] -# ulPerc = tinsStats['sinceLastQuery']['BestEffort']['Upload']['drops'] / tinsStats['sinceLastQuery']['BestEffort']['Upload']['sent_packets'] -# tinsStats['sinceLastQuery']['BestEffort']['Download']['dropPercentage'] = max(round(dlPerc * 100.0, 3),0.0) -# tinsStats['sinceLastQuery']['BestEffort']['Upload']['dropPercentage'] = max(round(ulPerc * 100.0, 3),0.0) + dlPerc = tinsStats['sinceLastQuery']['BestEffort']['Download']['drops'] / tinsStats['sinceLastQuery']['BestEffort']['Download']['sent_packets'] + ulPerc = tinsStats['sinceLastQuery']['BestEffort']['Upload']['drops'] / tinsStats['sinceLastQuery']['BestEffort']['Upload']['sent_packets'] + tinsStats['sinceLastQuery']['BestEffort']['Download']['dropPercentage'] = max(round(dlPerc * 100.0, 3),0.0) + tinsStats['sinceLastQuery']['BestEffort']['Upload']['dropPercentage'] = max(round(ulPerc * 100.0, 3),0.0) -# dlPerc = tinsStats['sinceLastQuery']['Video']['Download']['drops'] / tinsStats['sinceLastQuery']['Video']['Download']['sent_packets'] -# ulPerc = tinsStats['sinceLastQuery']['Video']['Upload']['drops'] / tinsStats['sinceLastQuery']['Video']['Upload']['sent_packets'] -# tinsStats['sinceLastQuery']['Video']['Download']['dropPercentage'] = max(round(dlPerc * 100.0, 3),0.0) -# tinsStats['sinceLastQuery']['Video']['Upload']['dropPercentage'] = max(round(ulPerc * 100.0, 3),0.0) + dlPerc = tinsStats['sinceLastQuery']['Video']['Download']['drops'] / tinsStats['sinceLastQuery']['Video']['Download']['sent_packets'] + ulPerc = tinsStats['sinceLastQuery']['Video']['Upload']['drops'] / tinsStats['sinceLastQuery']['Video']['Upload']['sent_packets'] + tinsStats['sinceLastQuery']['Video']['Download']['dropPercentage'] = max(round(dlPerc * 100.0, 3),0.0) + tinsStats['sinceLastQuery']['Video']['Upload']['dropPercentage'] = max(round(ulPerc * 100.0, 3),0.0) -# dlPerc = tinsStats['sinceLastQuery']['Voice']['Download']['drops'] / tinsStats['sinceLastQuery']['Voice']['Download']['sent_packets'] -# ulPerc = tinsStats['sinceLastQuery']['Voice']['Upload']['drops'] / tinsStats['sinceLastQuery']['Voice']['Upload']['sent_packets'] -# tinsStats['sinceLastQuery']['Voice']['Download']['dropPercentage'] = max(round(dlPerc * 100.0, 3),0.0) -# tinsStats['sinceLastQuery']['Voice']['Upload']['dropPercentage'] = max(round(ulPerc * 100.0, 3),0.0) -# except: -# tinsStats['sinceLastQuery']['Bulk']['Download']['dropPercentage'] = 0.0 -# tinsStats['sinceLastQuery']['Bulk']['Upload']['dropPercentage'] = 0.0 -# tinsStats['sinceLastQuery']['BestEffort']['Download']['dropPercentage'] = 0.0 -# tinsStats['sinceLastQuery']['BestEffort']['Upload']['dropPercentage'] = 0.0 -# tinsStats['sinceLastQuery']['Video']['Download']['dropPercentage'] = 0.0 -# tinsStats['sinceLastQuery']['Video']['Upload']['dropPercentage'] = 0.0 -# tinsStats['sinceLastQuery']['Voice']['Download']['dropPercentage'] = 0.0 -# tinsStats['sinceLastQuery']['Voice']['Upload']['dropPercentage'] = 0.0 + dlPerc = tinsStats['sinceLastQuery']['Voice']['Download']['drops'] / tinsStats['sinceLastQuery']['Voice']['Download']['sent_packets'] + ulPerc = tinsStats['sinceLastQuery']['Voice']['Upload']['drops'] / tinsStats['sinceLastQuery']['Voice']['Upload']['sent_packets'] + tinsStats['sinceLastQuery']['Voice']['Download']['dropPercentage'] = max(round(dlPerc * 100.0, 3),0.0) + tinsStats['sinceLastQuery']['Voice']['Upload']['dropPercentage'] = max(round(ulPerc * 100.0, 3),0.0) + except: + tinsStats['sinceLastQuery']['Bulk']['Download']['dropPercentage'] = 0.0 + tinsStats['sinceLastQuery']['Bulk']['Upload']['dropPercentage'] = 0.0 + tinsStats['sinceLastQuery']['BestEffort']['Download']['dropPercentage'] = 0.0 + tinsStats['sinceLastQuery']['BestEffort']['Upload']['dropPercentage'] = 0.0 + tinsStats['sinceLastQuery']['Video']['Download']['dropPercentage'] = 0.0 + tinsStats['sinceLastQuery']['Video']['Upload']['dropPercentage'] = 0.0 + tinsStats['sinceLastQuery']['Voice']['Download']['dropPercentage'] = 0.0 + tinsStats['sinceLastQuery']['Voice']['Upload']['dropPercentage'] = 0.0 -# try: -# tinsStats['sinceLastQuery']['Bulk']['Download']['percentage'] = min(round((tinsStats['sinceLastQuery']['Bulk']['Download']['sent_packets']/allPacketsUpload)*100.0, 3),100.0) -# tinsStats['sinceLastQuery']['Bulk']['Upload']['percentage'] = min(round((tinsStats['sinceLastQuery']['Bulk']['Upload']['sent_packets']/allPacketsUpload)*100.0, 3),100.0) -# tinsStats['sinceLastQuery']['BestEffort']['Download']['percentage'] = min(round((tinsStats['sinceLastQuery']['BestEffort']['Download']['sent_packets']/allPacketsDownload)*100.0, 3),100.0) -# tinsStats['sinceLastQuery']['BestEffort']['Upload']['percentage'] = min(round((tinsStats['sinceLastQuery']['BestEffort']['Upload']['sent_packets']/allPacketsUpload)*100.0, 3),100.0) -# tinsStats['sinceLastQuery']['Video']['Download']['percentage'] = min(round((tinsStats['sinceLastQuery']['Video']['Download']['sent_packets']/allPacketsDownload)*100.0, 3),100.0) -# tinsStats['sinceLastQuery']['Video']['Upload']['percentage'] = min(round((tinsStats['sinceLastQuery']['Video']['Upload']['sent_packets']/allPacketsUpload)*100.0, 3),100.0) -# tinsStats['sinceLastQuery']['Voice']['Download']['percentage'] = min(round((tinsStats['sinceLastQuery']['Voice']['Download']['sent_packets']/allPacketsDownload)*100.0, 3),100.0) -# tinsStats['sinceLastQuery']['Voice']['Upload']['percentage'] = min(round((tinsStats['sinceLastQuery']['Voice']['Upload']['sent_packets']/allPacketsUpload)*100.0, 3),100.0) -# except: -# # To avoid graphing 0.0 for all categories, which would show unusual graph results upon each queue reload, we just set these to None if the above calculations fail. -# tinsStats['sinceLastQuery']['Bulk']['Download']['percentage'] = tinsStats['sinceLastQuery']['Bulk']['Upload']['percentage'] = None -# tinsStats['sinceLastQuery']['BestEffort']['Download']['percentage'] = tinsStats['sinceLastQuery']['BestEffort']['Upload']['percentage'] = None -# tinsStats['sinceLastQuery']['Video']['Download']['percentage'] = tinsStats['sinceLastQuery']['Video']['Upload']['percentage'] = None -# tinsStats['sinceLastQuery']['Voice']['Download']['percentage'] = tinsStats['sinceLastQuery']['Voice']['Upload']['percentage'] = None + try: + tinsStats['sinceLastQuery']['Bulk']['Download']['percentage'] = min(round((tinsStats['sinceLastQuery']['Bulk']['Download']['sent_packets']/allPacketsUpload)*100.0, 3),100.0) + tinsStats['sinceLastQuery']['Bulk']['Upload']['percentage'] = min(round((tinsStats['sinceLastQuery']['Bulk']['Upload']['sent_packets']/allPacketsUpload)*100.0, 3),100.0) + tinsStats['sinceLastQuery']['BestEffort']['Download']['percentage'] = min(round((tinsStats['sinceLastQuery']['BestEffort']['Download']['sent_packets']/allPacketsDownload)*100.0, 3),100.0) + tinsStats['sinceLastQuery']['BestEffort']['Upload']['percentage'] = min(round((tinsStats['sinceLastQuery']['BestEffort']['Upload']['sent_packets']/allPacketsUpload)*100.0, 3),100.0) + tinsStats['sinceLastQuery']['Video']['Download']['percentage'] = min(round((tinsStats['sinceLastQuery']['Video']['Download']['sent_packets']/allPacketsDownload)*100.0, 3),100.0) + tinsStats['sinceLastQuery']['Video']['Upload']['percentage'] = min(round((tinsStats['sinceLastQuery']['Video']['Upload']['sent_packets']/allPacketsUpload)*100.0, 3),100.0) + tinsStats['sinceLastQuery']['Voice']['Download']['percentage'] = min(round((tinsStats['sinceLastQuery']['Voice']['Download']['sent_packets']/allPacketsDownload)*100.0, 3),100.0) + tinsStats['sinceLastQuery']['Voice']['Upload']['percentage'] = min(round((tinsStats['sinceLastQuery']['Voice']['Upload']['sent_packets']/allPacketsUpload)*100.0, 3),100.0) + except: + # To avoid graphing 0.0 for all categories, which would show unusual graph results upon each queue reload, we just set these to None if the above calculations fail. + tinsStats['sinceLastQuery']['Bulk']['Download']['percentage'] = tinsStats['sinceLastQuery']['Bulk']['Upload']['percentage'] = None + tinsStats['sinceLastQuery']['BestEffort']['Download']['percentage'] = tinsStats['sinceLastQuery']['BestEffort']['Upload']['percentage'] = None + tinsStats['sinceLastQuery']['Video']['Download']['percentage'] = tinsStats['sinceLastQuery']['Video']['Upload']['percentage'] = None + tinsStats['sinceLastQuery']['Voice']['Download']['percentage'] = tinsStats['sinceLastQuery']['Voice']['Upload']['percentage'] = None -# return subscriberCircuits, tinsStats + return subscriberCircuits, tinsStats -# def getParentNodeBandwidthStats(parentNodes, subscriberCircuits): -# for parentNode in parentNodes: -# thisNodeDropsDownload = 0 -# thisNodeDropsUpload = 0 -# thisNodeDropsTotal = 0 -# thisNodeBitsDownload = 0 -# thisNodeBitsUpload = 0 -# packetsSentDownloadAggregate = 0.0 -# packetsSentUploadAggregate = 0.0 -# packetsSentTotalAggregate = 0.0 -# circuitsMatched = 0 -# thisParentNodeStats = {'sinceLastQuery': {}} -# for circuit in subscriberCircuits: -# if circuit['ParentNode'] == parentNode['parentNodeName']: -# if circuit['stats']['sinceLastQuery']['bitsDownload']: -# thisNodeBitsDownload += circuit['stats']['sinceLastQuery']['bitsDownload'] -# if circuit['stats']['sinceLastQuery']['bitsUpload']: -# thisNodeBitsUpload += circuit['stats']['sinceLastQuery']['bitsUpload'] -# #thisNodeDropsDownload += circuit['packetDropsDownloadSinceLastQuery'] -# #thisNodeDropsUpload += circuit['packetDropsUploadSinceLastQuery'] -# if circuit['stats']['sinceLastQuery']['packetDropsDownload'] and circuit['stats']['sinceLastQuery']['packetDropsUpload']: -# thisNodeDropsTotal += (circuit['stats']['sinceLastQuery']['packetDropsDownload'] + circuit['stats']['sinceLastQuery']['packetDropsUpload']) -# if circuit['stats']['sinceLastQuery']['packetsSentDownload']: -# packetsSentDownloadAggregate += circuit['stats']['sinceLastQuery']['packetsSentDownload'] -# if circuit['stats']['sinceLastQuery']['packetsSentUpload']: -# packetsSentUploadAggregate += circuit['stats']['sinceLastQuery']['packetsSentUpload'] -# if circuit['stats']['sinceLastQuery']['packetsSentDownload'] and circuit['stats']['sinceLastQuery']['packetsSentUpload']: -# packetsSentTotalAggregate += (circuit['stats']['sinceLastQuery']['packetsSentDownload'] + circuit['stats']['sinceLastQuery']['packetsSentUpload']) -# circuitsMatched += 1 -# if (packetsSentDownloadAggregate > 0) and (packetsSentUploadAggregate > 0) and (packetsSentTotalAggregate > 0): -# #overloadFactorDownloadSinceLastQuery = float(round((thisNodeDropsDownload/packetsSentDownloadAggregate)*100.0, 3)) -# #overloadFactorUploadSinceLastQuery = float(round((thisNodeDropsUpload/packetsSentUploadAggregate)*100.0, 3)) -# overloadFactorTotalSinceLastQuery = float(round((thisNodeDropsTotal/packetsSentTotalAggregate)*100.0, 1)) -# else: -# #overloadFactorDownloadSinceLastQuery = 0.0 -# #overloadFactorUploadSinceLastQuery = 0.0 -# overloadFactorTotalSinceLastQuery = 0.0 +def getParentNodeBandwidthStats(parentNodes, subscriberCircuits): + for parentNode in parentNodes: + thisNodeDropsDownload = 0 + thisNodeDropsUpload = 0 + thisNodeDropsTotal = 0 + thisNodeBitsDownload = 0 + thisNodeBitsUpload = 0 + packetsSentDownloadAggregate = 0.0 + packetsSentUploadAggregate = 0.0 + packetsSentTotalAggregate = 0.0 + circuitsMatched = 0 + thisParentNodeStats = {'sinceLastQuery': {}} + for circuit in subscriberCircuits: + if circuit['ParentNode'] == parentNode['parentNodeName']: + if circuit['stats']['sinceLastQuery']['bitsDownload']: + thisNodeBitsDownload += circuit['stats']['sinceLastQuery']['bitsDownload'] + if circuit['stats']['sinceLastQuery']['bitsUpload']: + thisNodeBitsUpload += circuit['stats']['sinceLastQuery']['bitsUpload'] + #thisNodeDropsDownload += circuit['packetDropsDownloadSinceLastQuery'] + #thisNodeDropsUpload += circuit['packetDropsUploadSinceLastQuery'] + if circuit['stats']['sinceLastQuery']['packetDropsDownload'] and circuit['stats']['sinceLastQuery']['packetDropsUpload']: + thisNodeDropsTotal += (circuit['stats']['sinceLastQuery']['packetDropsDownload'] + circuit['stats']['sinceLastQuery']['packetDropsUpload']) + if circuit['stats']['sinceLastQuery']['packetsSentDownload']: + packetsSentDownloadAggregate += circuit['stats']['sinceLastQuery']['packetsSentDownload'] + if circuit['stats']['sinceLastQuery']['packetsSentUpload']: + packetsSentUploadAggregate += circuit['stats']['sinceLastQuery']['packetsSentUpload'] + if circuit['stats']['sinceLastQuery']['packetsSentDownload'] and circuit['stats']['sinceLastQuery']['packetsSentUpload']: + packetsSentTotalAggregate += (circuit['stats']['sinceLastQuery']['packetsSentDownload'] + circuit['stats']['sinceLastQuery']['packetsSentUpload']) + circuitsMatched += 1 + if (packetsSentDownloadAggregate > 0) and (packetsSentUploadAggregate > 0) and (packetsSentTotalAggregate > 0): + #overloadFactorDownloadSinceLastQuery = float(round((thisNodeDropsDownload/packetsSentDownloadAggregate)*100.0, 3)) + #overloadFactorUploadSinceLastQuery = float(round((thisNodeDropsUpload/packetsSentUploadAggregate)*100.0, 3)) + overloadFactorTotalSinceLastQuery = float(round((thisNodeDropsTotal/packetsSentTotalAggregate)*100.0, 1)) + else: + #overloadFactorDownloadSinceLastQuery = 0.0 + #overloadFactorUploadSinceLastQuery = 0.0 + overloadFactorTotalSinceLastQuery = 0.0 -# thisParentNodeStats['sinceLastQuery']['bitsDownload'] = thisNodeBitsDownload -# thisParentNodeStats['sinceLastQuery']['bitsUpload'] = thisNodeBitsUpload -# thisParentNodeStats['sinceLastQuery']['packetDropsTotal'] = thisNodeDropsTotal -# thisParentNodeStats['sinceLastQuery']['overloadFactorTotal'] = overloadFactorTotalSinceLastQuery -# parentNode['stats'] = thisParentNodeStats + thisParentNodeStats['sinceLastQuery']['bitsDownload'] = thisNodeBitsDownload + thisParentNodeStats['sinceLastQuery']['bitsUpload'] = thisNodeBitsUpload + thisParentNodeStats['sinceLastQuery']['packetDropsTotal'] = thisNodeDropsTotal + thisParentNodeStats['sinceLastQuery']['overloadFactorTotal'] = overloadFactorTotalSinceLastQuery + parentNode['stats'] = thisParentNodeStats -# return parentNodes + return parentNodes -# def getParentNodeLatencyStats(parentNodes, subscriberCircuits): -# for parentNode in parentNodes: -# if 'stats' not in parentNode: -# parentNode['stats'] = {} -# parentNode['stats']['sinceLastQuery'] = {} +def getParentNodeLatencyStats(parentNodes, subscriberCircuits): + for parentNode in parentNodes: + if 'stats' not in parentNode: + parentNode['stats'] = {} + parentNode['stats']['sinceLastQuery'] = {} -# for parentNode in parentNodes: -# thisParentNodeStats = {'sinceLastQuery': {}} -# circuitsMatchedLatencies = [] -# for circuit in subscriberCircuits: -# if circuit['ParentNode'] == parentNode['parentNodeName']: -# if circuit['stats']['sinceLastQuery']['tcpLatency'] != None: -# circuitsMatchedLatencies.append(circuit['stats']['sinceLastQuery']['tcpLatency']) -# if len(circuitsMatchedLatencies) > 0: -# thisParentNodeStats['sinceLastQuery']['tcpLatency'] = statistics.median(circuitsMatchedLatencies) -# else: -# thisParentNodeStats['sinceLastQuery']['tcpLatency'] = None -# parentNode['stats'] = thisParentNodeStats -# return parentNodes + for parentNode in parentNodes: + thisParentNodeStats = {'sinceLastQuery': {}} + circuitsMatchedLatencies = [] + for circuit in subscriberCircuits: + if circuit['ParentNode'] == parentNode['parentNodeName']: + if circuit['stats']['sinceLastQuery']['tcpLatency'] != None: + circuitsMatchedLatencies.append(circuit['stats']['sinceLastQuery']['tcpLatency']) + if len(circuitsMatchedLatencies) > 0: + thisParentNodeStats['sinceLastQuery']['tcpLatency'] = statistics.median(circuitsMatchedLatencies) + else: + thisParentNodeStats['sinceLastQuery']['tcpLatency'] = None + parentNode['stats'] = thisParentNodeStats + return parentNodes -# def getCircuitLatencyStats(subscriberCircuits): -# command = './bin/xdp_pping' -# consoleOutput = subprocess.run(command.split(' '), stdout=subprocess.PIPE).stdout.decode('utf-8') -# consoleOutput = consoleOutput.replace('\n','').replace('}{', '}, {') -# listOfEntries = json.loads(consoleOutput) +def getCircuitLatencyStats(subscriberCircuits): + command = './bin/xdp_pping' + consoleOutput = subprocess.run(command.split(' '), stdout=subprocess.PIPE).stdout.decode('utf-8') + consoleOutput = consoleOutput.replace('\n','').replace('}{', '}, {') + listOfEntries = json.loads(consoleOutput) -# tcpLatencyForClassID = {} -# for entry in listOfEntries: -# if 'tc' in entry: -# handle = '0x' + entry['tc'].split(':')[0] + ':' + '0x' + entry['tc'].split(':')[1] -# # To avoid outliers messing up avg for each circuit - cap at ceiling of 200ms -# ceiling = 200.0 -# tcpLatencyForClassID[handle] = min(entry['median'], ceiling) -# for circuit in subscriberCircuits: -# if 'stats' not in circuit: -# circuit['stats'] = {} -# circuit['stats']['sinceLastQuery'] = {} + tcpLatencyForClassID = {} + for entry in listOfEntries: + if 'tc' in entry: + handle = '0x' + entry['tc'].split(':')[0] + ':' + '0x' + entry['tc'].split(':')[1] + # To avoid outliers messing up avg for each circuit - cap at ceiling of 200ms + ceiling = 200.0 + tcpLatencyForClassID[handle] = min(entry['median'], ceiling) + for circuit in subscriberCircuits: + if 'stats' not in circuit: + circuit['stats'] = {} + circuit['stats']['sinceLastQuery'] = {} -# for circuit in subscriberCircuits: -# classID = circuit['classid'] -# if classID in tcpLatencyForClassID: -# circuit['stats']['sinceLastQuery']['tcpLatency'] = tcpLatencyForClassID[classID] -# else: -# # If we can't identify RTT this time around, use most recently recorded RTT -# # None by default, change if found in priorQuery -# circuit['stats']['sinceLastQuery']['tcpLatency'] = None -# if 'priorQuery' in circuit['stats']: -# if circuit['stats']['priorQuery'] != None: -# if 'priorQuery' in circuit['stats']: -# if 'tcpLatency' in circuit['stats']['priorQuery']: -# circuit['stats']['sinceLastQuery']['tcpLatency'] = circuit['stats']['priorQuery']['tcpLatency'] + for circuit in subscriberCircuits: + classID = circuit['classid'] + if classID in tcpLatencyForClassID: + circuit['stats']['sinceLastQuery']['tcpLatency'] = tcpLatencyForClassID[classID] + else: + # If we can't identify RTT this time around, use most recently recorded RTT + # None by default, change if found in priorQuery + circuit['stats']['sinceLastQuery']['tcpLatency'] = None + if 'priorQuery' in circuit['stats']: + if circuit['stats']['priorQuery'] != None: + if 'priorQuery' in circuit['stats']: + if 'tcpLatency' in circuit['stats']['priorQuery']: + circuit['stats']['sinceLastQuery']['tcpLatency'] = circuit['stats']['priorQuery']['tcpLatency'] -# return subscriberCircuits + return subscriberCircuits -# def getParentNodeDict(data, depth, parentNodeNameDict): -# if parentNodeNameDict == None: -# parentNodeNameDict = {} +def getParentNodeDict(data, depth, parentNodeNameDict): + if parentNodeNameDict == None: + parentNodeNameDict = {} -# for elem in data: -# if 'children' in data[elem]: -# for child in data[elem]['children']: -# parentNodeNameDict[child] = elem -# tempDict = getParentNodeDict(data[elem]['children'], depth + 1, parentNodeNameDict) -# parentNodeNameDict = dict(parentNodeNameDict, **tempDict) -# return parentNodeNameDict + for elem in data: + if 'children' in data[elem]: + for child in data[elem]['children']: + parentNodeNameDict[child] = elem + tempDict = getParentNodeDict(data[elem]['children'], depth + 1, parentNodeNameDict) + parentNodeNameDict = dict(parentNodeNameDict, **tempDict) + return parentNodeNameDict -# def parentNodeNameDictPull(): -# # Load network hierarchy -# with open('network.json', 'r') as j: -# network = json.loads(j.read()) -# parentNodeNameDict = getParentNodeDict(network, 0, None) -# return parentNodeNameDict +def parentNodeNameDictPull(): + # Load network hierarchy + with open('network.json', 'r') as j: + network = json.loads(j.read()) + parentNodeNameDict = getParentNodeDict(network, 0, None) + return parentNodeNameDict -# def refreshBandwidthGraphs(): -# startTime = datetime.now() -# with open('statsByParentNode.json', 'r') as j: -# parentNodes = json.loads(j.read()) +def refreshBandwidthGraphs(): + startTime = datetime.now() + with open('statsByParentNode.json', 'r') as j: + parentNodes = json.loads(j.read()) -# with open('statsByCircuit.json', 'r') as j: -# subscriberCircuits = json.loads(j.read()) + with open('statsByCircuit.json', 'r') as j: + subscriberCircuits = json.loads(j.read()) -# fileLoc = Path("tinsStats.json") -# if fileLoc.is_file(): -# with open(fileLoc, 'r') as j: -# tinsStats = json.loads(j.read()) -# else: -# tinsStats = {} + fileLoc = Path("tinsStats.json") + if fileLoc.is_file(): + with open(fileLoc, 'r') as j: + tinsStats = json.loads(j.read()) + else: + tinsStats = {} -# fileLoc = Path("longTermStats.json") -# if fileLoc.is_file(): -# with open(fileLoc, 'r') as j: -# longTermStats = json.loads(j.read()) -# droppedPacketsAllTime = longTermStats['droppedPacketsTotal'] -# else: -# longTermStats = {} -# longTermStats['droppedPacketsTotal'] = 0.0 -# droppedPacketsAllTime = 0.0 + fileLoc = Path("longTermStats.json") + if fileLoc.is_file(): + with open(fileLoc, 'r') as j: + longTermStats = json.loads(j.read()) + droppedPacketsAllTime = longTermStats['droppedPacketsTotal'] + else: + longTermStats = {} + longTermStats['droppedPacketsTotal'] = 0.0 + droppedPacketsAllTime = 0.0 -# parentNodeNameDict = parentNodeNameDictPull() + parentNodeNameDict = parentNodeNameDictPull() -# print("Retrieving circuit statistics") -# subscriberCircuits, tinsStats = getCircuitBandwidthStats(subscriberCircuits, tinsStats) -# print("Computing parent node statistics") -# parentNodes = getParentNodeBandwidthStats(parentNodes, subscriberCircuits) -# print("Writing data to InfluxDB") -# client = InfluxDBClient( -# url=influxDBurl, -# token=influxDBtoken, -# org=influxDBOrg -# ) + print("Retrieving circuit statistics") + subscriberCircuits, tinsStats = getCircuitBandwidthStats(subscriberCircuits, tinsStats) + print("Computing parent node statistics") + parentNodes = getParentNodeBandwidthStats(parentNodes, subscriberCircuits) + print("Writing data to InfluxDB") + client = InfluxDBClient( + url=influx_db_url(), + token=influx_db_token(), + org=influx_db_org() + ) -# # Record current timestamp, use for all points added -# timestamp = time.time_ns() -# write_api = client.write_api(write_options=SYNCHRONOUS) + # Record current timestamp, use for all points added + timestamp = time.time_ns() + write_api = client.write_api(write_options=SYNCHRONOUS) -# chunkedsubscriberCircuits = list(chunk_list(subscriberCircuits, 200)) + chunkedsubscriberCircuits = list(chunk_list(subscriberCircuits, 200)) -# queriesToSendCount = 0 -# for chunk in chunkedsubscriberCircuits: -# seenSomethingBesides0s = False -# queriesToSend = [] -# for circuit in chunk: -# bitsDownloadMin = float(circuit['minDownload']) * 1000000 if circuit['minDownload'] else None -# bitsDownloadMax = float(circuit['maxDownload']) * 1000000 if circuit['maxDownload'] else None -# bitsUploadMin = float(circuit['minUpload']) * 1000000 if circuit['minUpload'] else None -# bitsUploadMax = float(circuit['maxUpload']) * 1000000 if circuit['maxUpload'] else None -# bitsDownload = float(circuit['stats']['sinceLastQuery']['bitsDownload']) if circuit['stats']['sinceLastQuery']['bitsDownload'] else None -# bitsUpload = float(circuit['stats']['sinceLastQuery']['bitsUpload']) if circuit['stats']['sinceLastQuery']['bitsUpload'] else None -# bytesSentDownload = float(circuit['stats']['sinceLastQuery']['bytesSentDownload']) if circuit['stats']['sinceLastQuery']['bytesSentDownload'] else None -# bytesSentUpload = float(circuit['stats']['sinceLastQuery']['bytesSentUpload']) if circuit['stats']['sinceLastQuery']['bytesSentUpload'] else None -# percentUtilizationDownload = round((bitsDownload / round(circuit['maxDownload'] * 1000000))*100.0, 1) if bitsDownload and circuit['maxDownload'] else None -# percentUtilizationUpload = round((bitsUpload / round(circuit['maxUpload'] * 1000000))*100.0, 1) if bitsUpload and circuit['maxUpload'] else None -# if bitsDownload and bitsUpload: -# if (bitsDownload > 0.0) or (bitsUpload > 0.0): -# seenSomethingBesides0s = True -# p = Point('Bandwidth').tag("Circuit", circuit['circuitName']).tag("ParentNode", circuit['ParentNode']).tag("Type", "Circuit").field("Download", bitsDownload).field("Upload", bitsUpload).time(timestamp) -# queriesToSend.append(p) -# p = Point('Utilization').tag("Circuit", circuit['circuitName']).tag("ParentNode", circuit['ParentNode']).tag("Type", "Circuit").field("Download", percentUtilizationDownload).field("Upload", percentUtilizationUpload).time(timestamp) -# queriesToSend.append(p) + queriesToSendCount = 0 + for chunk in chunkedsubscriberCircuits: + seenSomethingBesides0s = False + queriesToSend = [] + for circuit in chunk: + bitsDownloadMin = float(circuit['minDownload']) * 1000000 if circuit['minDownload'] else None + bitsDownloadMax = float(circuit['maxDownload']) * 1000000 if circuit['maxDownload'] else None + bitsUploadMin = float(circuit['minUpload']) * 1000000 if circuit['minUpload'] else None + bitsUploadMax = float(circuit['maxUpload']) * 1000000 if circuit['maxUpload'] else None + bitsDownload = float(circuit['stats']['sinceLastQuery']['bitsDownload']) if circuit['stats']['sinceLastQuery']['bitsDownload'] else None + bitsUpload = float(circuit['stats']['sinceLastQuery']['bitsUpload']) if circuit['stats']['sinceLastQuery']['bitsUpload'] else None + bytesSentDownload = float(circuit['stats']['sinceLastQuery']['bytesSentDownload']) if circuit['stats']['sinceLastQuery']['bytesSentDownload'] else None + bytesSentUpload = float(circuit['stats']['sinceLastQuery']['bytesSentUpload']) if circuit['stats']['sinceLastQuery']['bytesSentUpload'] else None + percentUtilizationDownload = round((bitsDownload / round(circuit['maxDownload'] * 1000000))*100.0, 1) if bitsDownload and circuit['maxDownload'] else None + percentUtilizationUpload = round((bitsUpload / round(circuit['maxUpload'] * 1000000))*100.0, 1) if bitsUpload and circuit['maxUpload'] else None + if bitsDownload and bitsUpload: + if (bitsDownload > 0.0) or (bitsUpload > 0.0): + seenSomethingBesides0s = True + p = Point('Bandwidth').tag("Circuit", circuit['circuitName']).tag("ParentNode", circuit['ParentNode']).tag("Type", "Circuit").field("Download", bitsDownload).field("Upload", bitsUpload).time(timestamp) + queriesToSend.append(p) + p = Point('Utilization').tag("Circuit", circuit['circuitName']).tag("ParentNode", circuit['ParentNode']).tag("Type", "Circuit").field("Download", percentUtilizationDownload).field("Upload", percentUtilizationUpload).time(timestamp) + queriesToSend.append(p) -# if seenSomethingBesides0s: -# write_api.write(bucket=influxDBBucket, record=queriesToSend) -# # print("Added " + str(len(queriesToSend)) + " points to InfluxDB.") -# queriesToSendCount += len(queriesToSend) + if seenSomethingBesides0s: + write_api.write(bucket=influx_db_bucket(), record=queriesToSend) + # print("Added " + str(len(queriesToSend)) + " points to InfluxDB.") + queriesToSendCount += len(queriesToSend) -# queriesToSend = [] -# seenSomethingBesides0s = False -# for parentNode in parentNodes: -# bitsDownload = float(parentNode['stats']['sinceLastQuery']['bitsDownload']) -# bitsUpload = float(parentNode['stats']['sinceLastQuery']['bitsUpload']) -# dropsTotal = float(parentNode['stats']['sinceLastQuery']['packetDropsTotal']) -# overloadFactor = float(parentNode['stats']['sinceLastQuery']['overloadFactorTotal']) -# droppedPacketsAllTime += dropsTotal -# percentUtilizationDownload = round((bitsDownload / round(parentNode['maxDownload'] * 1000000))*100.0, 1) -# percentUtilizationUpload = round((bitsUpload / round(parentNode['maxUpload'] * 1000000))*100.0, 1) -# if bitsDownload and bitsUpload: -# if (bitsDownload > 0.0) or (bitsUpload > 0.0): -# seenSomethingBesides0s = True -# p = Point('Bandwidth').tag("Device", parentNode['parentNodeName']).tag("ParentNode", parentNode['parentNodeName']).tag("Type", "Parent Node").field("Download", bitsDownload).field("Upload", bitsUpload).time(timestamp) -# queriesToSend.append(p) -# p = Point('Utilization').tag("Device", parentNode['parentNodeName']).tag("ParentNode", parentNode['parentNodeName']).tag("Type", "Parent Node").field("Download", percentUtilizationDownload).field("Upload", percentUtilizationUpload).time(timestamp) -# queriesToSend.append(p) -# p = Point('Overload').tag("Device", parentNode['parentNodeName']).tag("ParentNode", parentNode['parentNodeName']).tag("Type", "Parent Node").field("Overload", overloadFactor).time(timestamp) -# queriesToSend.append(p) + queriesToSend = [] + seenSomethingBesides0s = False + for parentNode in parentNodes: + bitsDownload = float(parentNode['stats']['sinceLastQuery']['bitsDownload']) + bitsUpload = float(parentNode['stats']['sinceLastQuery']['bitsUpload']) + dropsTotal = float(parentNode['stats']['sinceLastQuery']['packetDropsTotal']) + overloadFactor = float(parentNode['stats']['sinceLastQuery']['overloadFactorTotal']) + droppedPacketsAllTime += dropsTotal + percentUtilizationDownload = round((bitsDownload / round(parentNode['maxDownload'] * 1000000))*100.0, 1) + percentUtilizationUpload = round((bitsUpload / round(parentNode['maxUpload'] * 1000000))*100.0, 1) + if bitsDownload and bitsUpload: + if (bitsDownload > 0.0) or (bitsUpload > 0.0): + seenSomethingBesides0s = True + p = Point('Bandwidth').tag("Device", parentNode['parentNodeName']).tag("ParentNode", parentNode['parentNodeName']).tag("Type", "Parent Node").field("Download", bitsDownload).field("Upload", bitsUpload).time(timestamp) + queriesToSend.append(p) + p = Point('Utilization').tag("Device", parentNode['parentNodeName']).tag("ParentNode", parentNode['parentNodeName']).tag("Type", "Parent Node").field("Download", percentUtilizationDownload).field("Upload", percentUtilizationUpload).time(timestamp) + queriesToSend.append(p) + p = Point('Overload').tag("Device", parentNode['parentNodeName']).tag("ParentNode", parentNode['parentNodeName']).tag("Type", "Parent Node").field("Overload", overloadFactor).time(timestamp) + queriesToSend.append(p) -# if seenSomethingBesides0s: -# write_api.write(bucket=influxDBBucket, record=queriesToSend) -# # print("Added " + str(len(queriesToSend)) + " points to InfluxDB.") -# queriesToSendCount += len(queriesToSend) + if seenSomethingBesides0s: + write_api.write(bucket=influx_db_bucket(), record=queriesToSend) + # print("Added " + str(len(queriesToSend)) + " points to InfluxDB.") + queriesToSendCount += len(queriesToSend) -# if 'cake diffserv4' in sqm: -# seenSomethingBesides0s = False -# queriesToSend = [] -# listOfTins = ['Bulk', 'BestEffort', 'Video', 'Voice'] -# for tin in listOfTins: -# p = Point('Tin Drop Percentage').tag("Type", "Tin").tag("Tin", tin).field("Download", tinsStats['sinceLastQuery'][tin]['Download']['dropPercentage']).field("Upload", tinsStats['sinceLastQuery'][tin]['Upload']['dropPercentage']).time(timestamp) -# queriesToSend.append(p) -# # Check to ensure tin percentage has value (!= None) before graphing. During partial or full reload these will have a value of None. -# if (tinsStats['sinceLastQuery'][tin]['Download']['percentage'] != None) and (tinsStats['sinceLastQuery'][tin]['Upload']['percentage'] != None): -# if (tinsStats['sinceLastQuery'][tin]['Download']['percentage'] > 0.0) or (tinsStats['sinceLastQuery'][tin]['Upload']['percentage'] > 0.0): -# seenSomethingBesides0s = True -# p = Point('Tins Assigned').tag("Type", "Tin").tag("Tin", tin).field("Download", tinsStats['sinceLastQuery'][tin]['Download']['percentage']).field("Upload", tinsStats['sinceLastQuery'][tin]['Upload']['percentage']).time(timestamp) -# queriesToSend.append(p) + if 'cake diffserv4' in sqm(): + seenSomethingBesides0s = False + queriesToSend = [] + listOfTins = ['Bulk', 'BestEffort', 'Video', 'Voice'] + for tin in listOfTins: + p = Point('Tin Drop Percentage').tag("Type", "Tin").tag("Tin", tin).field("Download", tinsStats['sinceLastQuery'][tin]['Download']['dropPercentage']).field("Upload", tinsStats['sinceLastQuery'][tin]['Upload']['dropPercentage']).time(timestamp) + queriesToSend.append(p) + # Check to ensure tin percentage has value (!= None) before graphing. During partial or full reload these will have a value of None. + if (tinsStats['sinceLastQuery'][tin]['Download']['percentage'] != None) and (tinsStats['sinceLastQuery'][tin]['Upload']['percentage'] != None): + if (tinsStats['sinceLastQuery'][tin]['Download']['percentage'] > 0.0) or (tinsStats['sinceLastQuery'][tin]['Upload']['percentage'] > 0.0): + seenSomethingBesides0s = True + p = Point('Tins Assigned').tag("Type", "Tin").tag("Tin", tin).field("Download", tinsStats['sinceLastQuery'][tin]['Download']['percentage']).field("Upload", tinsStats['sinceLastQuery'][tin]['Upload']['percentage']).time(timestamp) + queriesToSend.append(p) -# if seenSomethingBesides0s: -# write_api.write(bucket=influxDBBucket, record=queriesToSend) -# # print("Added " + str(len(queriesToSend)) + " points to InfluxDB.") -# queriesToSendCount += len(queriesToSend) + if seenSomethingBesides0s: + write_api.write(bucket=influx_db_bucket(), record=queriesToSend) + # print("Added " + str(len(queriesToSend)) + " points to InfluxDB.") + queriesToSendCount += len(queriesToSend) -# # Graph CPU use -# cpuVals = psutil.cpu_percent(percpu=True) -# queriesToSend = [] -# for index, item in enumerate(cpuVals): -# p = Point('CPU').field('CPU_' + str(index), item) -# queriesToSend.append(p) -# write_api.write(bucket=influxDBBucket, record=queriesToSend) -# queriesToSendCount += len(queriesToSend) + # Graph CPU use + cpuVals = psutil.cpu_percent(percpu=True) + queriesToSend = [] + for index, item in enumerate(cpuVals): + p = Point('CPU').field('CPU_' + str(index), item) + queriesToSend.append(p) + write_api.write(bucket=influx_db_bucket(), record=queriesToSend) + queriesToSendCount += len(queriesToSend) -# print("Added " + str(queriesToSendCount) + " points to InfluxDB.") + print("Added " + str(queriesToSendCount) + " points to InfluxDB.") -# client.close() + client.close() -# with open('statsByParentNode.json', 'w') as f: -# f.write(json.dumps(parentNodes, indent=4)) + with open('statsByParentNode.json', 'w') as f: + f.write(json.dumps(parentNodes, indent=4)) -# with open('statsByCircuit.json', 'w') as f: -# f.write(json.dumps(subscriberCircuits, indent=4)) + with open('statsByCircuit.json', 'w') as f: + f.write(json.dumps(subscriberCircuits, indent=4)) -# longTermStats['droppedPacketsTotal'] = droppedPacketsAllTime -# with open('longTermStats.json', 'w') as f: -# f.write(json.dumps(longTermStats, indent=4)) + longTermStats['droppedPacketsTotal'] = droppedPacketsAllTime + with open('longTermStats.json', 'w') as f: + f.write(json.dumps(longTermStats, indent=4)) -# with open('tinsStats.json', 'w') as f: -# f.write(json.dumps(tinsStats, indent=4)) + with open('tinsStats.json', 'w') as f: + f.write(json.dumps(tinsStats, indent=4)) -# endTime = datetime.now() -# durationSeconds = round((endTime - startTime).total_seconds(), 2) -# print("Graphs updated within " + str(durationSeconds) + " seconds.") + endTime = datetime.now() + durationSeconds = round((endTime - startTime).total_seconds(), 2) + print("Graphs updated within " + str(durationSeconds) + " seconds.") -# def refreshLatencyGraphs(): -# startTime = datetime.now() -# with open('statsByParentNode.json', 'r') as j: -# parentNodes = json.loads(j.read()) +def refreshLatencyGraphs(): + startTime = datetime.now() + with open('statsByParentNode.json', 'r') as j: + parentNodes = json.loads(j.read()) -# with open('statsByCircuit.json', 'r') as j: -# subscriberCircuits = json.loads(j.read()) + with open('statsByCircuit.json', 'r') as j: + subscriberCircuits = json.loads(j.read()) -# parentNodeNameDict = parentNodeNameDictPull() + parentNodeNameDict = parentNodeNameDictPull() -# print("Retrieving circuit statistics") -# subscriberCircuits = getCircuitLatencyStats(subscriberCircuits) -# print("Computing parent node statistics") -# parentNodes = getParentNodeLatencyStats(parentNodes, subscriberCircuits) -# print("Writing data to InfluxDB") -# client = InfluxDBClient( -# url=influxDBurl, -# token=influxDBtoken, -# org=influxDBOrg -# ) + print("Retrieving circuit statistics") + subscriberCircuits = getCircuitLatencyStats(subscriberCircuits) + print("Computing parent node statistics") + parentNodes = getParentNodeLatencyStats(parentNodes, subscriberCircuits) + print("Writing data to InfluxDB") + client = InfluxDBClient( + url=influx_db_url(), + token=influx_db_token(), + org=influx_db_org() + ) -# # Record current timestamp, use for all points added -# timestamp = time.time_ns() + # Record current timestamp, use for all points added + timestamp = time.time_ns() -# write_api = client.write_api(write_options=SYNCHRONOUS) + write_api = client.write_api(write_options=SYNCHRONOUS) -# chunkedsubscriberCircuits = list(chunk_list(subscriberCircuits, 200)) + chunkedsubscriberCircuits = list(chunk_list(subscriberCircuits, 200)) -# queriesToSendCount = 0 -# for chunk in chunkedsubscriberCircuits: -# queriesToSend = [] -# for circuit in chunk: -# if circuit['stats']['sinceLastQuery']['tcpLatency'] != None: -# tcpLatency = float(circuit['stats']['sinceLastQuery']['tcpLatency']) -# p = Point('TCP Latency').tag("Circuit", circuit['circuitName']).tag("ParentNode", circuit['ParentNode']).tag("Type", "Circuit").field("TCP Latency", tcpLatency).time(timestamp) -# queriesToSend.append(p) -# write_api.write(bucket=influxDBBucket, record=queriesToSend) -# queriesToSendCount += len(queriesToSend) + queriesToSendCount = 0 + for chunk in chunkedsubscriberCircuits: + queriesToSend = [] + for circuit in chunk: + if circuit['stats']['sinceLastQuery']['tcpLatency'] != None: + tcpLatency = float(circuit['stats']['sinceLastQuery']['tcpLatency']) + p = Point('TCP Latency').tag("Circuit", circuit['circuitName']).tag("ParentNode", circuit['ParentNode']).tag("Type", "Circuit").field("TCP Latency", tcpLatency).time(timestamp) + queriesToSend.append(p) + write_api.write(bucket=influx_db_bucket(), record=queriesToSend) + queriesToSendCount += len(queriesToSend) -# queriesToSend = [] -# for parentNode in parentNodes: -# if parentNode['stats']['sinceLastQuery']['tcpLatency'] != None: -# tcpLatency = float(parentNode['stats']['sinceLastQuery']['tcpLatency']) -# p = Point('TCP Latency').tag("Device", parentNode['parentNodeName']).tag("ParentNode", parentNode['parentNodeName']).tag("Type", "Parent Node").field("TCP Latency", tcpLatency).time(timestamp) -# queriesToSend.append(p) + queriesToSend = [] + for parentNode in parentNodes: + if parentNode['stats']['sinceLastQuery']['tcpLatency'] != None: + tcpLatency = float(parentNode['stats']['sinceLastQuery']['tcpLatency']) + p = Point('TCP Latency').tag("Device", parentNode['parentNodeName']).tag("ParentNode", parentNode['parentNodeName']).tag("Type", "Parent Node").field("TCP Latency", tcpLatency).time(timestamp) + queriesToSend.append(p) -# write_api.write(bucket=influxDBBucket, record=queriesToSend) -# queriesToSendCount += len(queriesToSend) + write_api.write(bucket=influx_db_bucket(), record=queriesToSend) + queriesToSendCount += len(queriesToSend) -# listOfAllLatencies = [] -# for circuit in subscriberCircuits: -# if circuit['stats']['sinceLastQuery']['tcpLatency'] != None: -# listOfAllLatencies.append(circuit['stats']['sinceLastQuery']['tcpLatency']) -# if len(listOfAllLatencies) > 0: -# currentNetworkLatency = statistics.median(listOfAllLatencies) -# p = Point('TCP Latency').tag("Type", "Network").field("TCP Latency", currentNetworkLatency).time(timestamp) -# write_api.write(bucket=influxDBBucket, record=p) -# queriesToSendCount += 1 + listOfAllLatencies = [] + for circuit in subscriberCircuits: + if circuit['stats']['sinceLastQuery']['tcpLatency'] != None: + listOfAllLatencies.append(circuit['stats']['sinceLastQuery']['tcpLatency']) + if len(listOfAllLatencies) > 0: + currentNetworkLatency = statistics.median(listOfAllLatencies) + p = Point('TCP Latency').tag("Type", "Network").field("TCP Latency", currentNetworkLatency).time(timestamp) + write_api.write(bucket=influx_db_bucket(), record=p) + queriesToSendCount += 1 -# print("Added " + str(queriesToSendCount) + " points to InfluxDB.") + print("Added " + str(queriesToSendCount) + " points to InfluxDB.") -# client.close() + client.close() -# with open('statsByParentNode.json', 'w') as f: -# f.write(json.dumps(parentNodes, indent=4)) + with open('statsByParentNode.json', 'w') as f: + f.write(json.dumps(parentNodes, indent=4)) -# with open('statsByCircuit.json', 'w') as f: -# f.write(json.dumps(subscriberCircuits, indent=4)) + with open('statsByCircuit.json', 'w') as f: + f.write(json.dumps(subscriberCircuits, indent=4)) -# endTime = datetime.now() -# durationSeconds = round((endTime - startTime).total_seconds(), 2) -# print("Graphs updated within " + str(durationSeconds) + " seconds.") + endTime = datetime.now() + durationSeconds = round((endTime - startTime).total_seconds(), 2) + print("Graphs updated within " + str(durationSeconds) + " seconds.") -# if __name__ == '__main__': -# refreshBandwidthGraphs() -# refreshLatencyGraphs() +if __name__ == '__main__': + refreshBandwidthGraphs() + refreshLatencyGraphs() diff --git a/src/integrationCommon.py b/src/integrationCommon.py index 805e1500..a020e7bf 100644 --- a/src/integrationCommon.py +++ b/src/integrationCommon.py @@ -146,7 +146,7 @@ class NetworkGraph: def addRawNode(self, node: NetworkNode) -> None: # Adds a NetworkNode to the graph, unchanged. - # If a site is excluded (via excludedSites in ispConfig) + # If a site is excluded (via excludedSites in lqos.conf) # it won't be added if not node.displayName in self.excludeSites: # TODO: Fixup exceptionCPE handling diff --git a/src/integrationRestHttp.py b/src/integrationRestHttp.py index a31b1549..9465a271 100644 --- a/src/integrationRestHttp.py +++ b/src/integrationRestHttp.py @@ -1,79 +1,81 @@ -import csv -import os -import shutil -from datetime import datetime +print("Deprecated for now.") -from requests import get +# import csv +# import os +# import shutil +# from datetime import datetime -from ispConfig import automaticImportRestHttp as restconf -from pydash import objects +# from requests import get -requestsBaseConfig = { - 'verify': True, - 'headers': { - 'accept': 'application/json' - } -} +# from ispConfig import automaticImportRestHttp as restconf +# from pydash import objects + +# requestsBaseConfig = { +# 'verify': True, +# 'headers': { +# 'accept': 'application/json' +# } +# } -def createShaper(): +# def createShaper(): - # shutil.copy('Shaper.csv', 'Shaper.csv.bak') - ts = datetime.now().strftime('%Y-%m-%d.%H-%M-%S') +# # shutil.copy('Shaper.csv', 'Shaper.csv.bak') +# ts = datetime.now().strftime('%Y-%m-%d.%H-%M-%S') - devicesURL = restconf.get('baseURL') + '/' + restconf.get('devicesURI').strip('/') +# devicesURL = restconf.get('baseURL') + '/' + restconf.get('devicesURI').strip('/') - requestConfig = objects.defaults_deep({'params': {}}, restconf.get('requestsConfig'), requestsBaseConfig) +# requestConfig = objects.defaults_deep({'params': {}}, restconf.get('requestsConfig'), requestsBaseConfig) - raw = get(devicesURL, **requestConfig, timeout=10) +# raw = get(devicesURL, **requestConfig, timeout=10) - if raw.status_code != 200: - print('Failed to request ' + devicesURL + ', got ' + str(raw.status_code)) - return False +# if raw.status_code != 200: +# print('Failed to request ' + devicesURL + ', got ' + str(raw.status_code)) +# return False - devicesCsvFP = os.path.dirname(os.path.realpath(__file__)) + '/ShapedDevices.csv' +# devicesCsvFP = os.path.dirname(os.path.realpath(__file__)) + '/ShapedDevices.csv' - with open(devicesCsvFP, 'w') as csvfile: - wr = csv.writer(csvfile, quoting=csv.QUOTE_ALL) - wr.writerow( - ['Circuit ID', 'Circuit Name', 'Device ID', 'Device Name', 'Parent Node', 'MAC', 'IPv4', 'IPv6', - 'Download Min Mbps', 'Upload Min Mbps', 'Download Max Mbps', 'Upload Max Mbps', 'Comment']) - for row in raw.json(): - wr.writerow(row.values()) +# with open(devicesCsvFP, 'w') as csvfile: +# wr = csv.writer(csvfile, quoting=csv.QUOTE_ALL) +# wr.writerow( +# ['Circuit ID', 'Circuit Name', 'Device ID', 'Device Name', 'Parent Node', 'MAC', 'IPv4', 'IPv6', +# 'Download Min Mbps', 'Upload Min Mbps', 'Download Max Mbps', 'Upload Max Mbps', 'Comment']) +# for row in raw.json(): +# wr.writerow(row.values()) - if restconf['logChanges']: - devicesBakFilePath = restconf['logChanges'].rstrip('/') + '/ShapedDevices.' + ts + '.csv' - try: - shutil.copy(devicesCsvFP, devicesBakFilePath) - except: - os.makedirs(restconf['logChanges'], exist_ok=True) - shutil.copy(devicesCsvFP, devicesBakFilePath) +# if restconf['logChanges']: +# devicesBakFilePath = restconf['logChanges'].rstrip('/') + '/ShapedDevices.' + ts + '.csv' +# try: +# shutil.copy(devicesCsvFP, devicesBakFilePath) +# except: +# os.makedirs(restconf['logChanges'], exist_ok=True) +# shutil.copy(devicesCsvFP, devicesBakFilePath) - networkURL = restconf['baseURL'] + '/' + restconf['networkURI'].strip('/') +# networkURL = restconf['baseURL'] + '/' + restconf['networkURI'].strip('/') - raw = get(networkURL, **requestConfig, timeout=10) +# raw = get(networkURL, **requestConfig, timeout=10) - if raw.status_code != 200: - print('Failed to request ' + networkURL + ', got ' + str(raw.status_code)) - return False +# if raw.status_code != 200: +# print('Failed to request ' + networkURL + ', got ' + str(raw.status_code)) +# return False - networkJsonFP = os.path.dirname(os.path.realpath(__file__)) + '/network.json' +# networkJsonFP = os.path.dirname(os.path.realpath(__file__)) + '/network.json' - with open(networkJsonFP, 'w') as handler: - handler.write(raw.text) +# with open(networkJsonFP, 'w') as handler: +# handler.write(raw.text) - if restconf['logChanges']: - networkBakFilePath = restconf['logChanges'].rstrip('/') + '/network.' + ts + '.json' - try: - shutil.copy(networkJsonFP, networkBakFilePath) - except: - os.makedirs(restconf['logChanges'], exist_ok=True) - shutil.copy(networkJsonFP, networkBakFilePath) +# if restconf['logChanges']: +# networkBakFilePath = restconf['logChanges'].rstrip('/') + '/network.' + ts + '.json' +# try: +# shutil.copy(networkJsonFP, networkBakFilePath) +# except: +# os.makedirs(restconf['logChanges'], exist_ok=True) +# shutil.copy(networkJsonFP, networkBakFilePath) -def importFromRestHttp(): - createShaper() +# def importFromRestHttp(): +# createShaper() -if __name__ == '__main__': - importFromRestHttp() +# if __name__ == '__main__': +# importFromRestHttp() diff --git a/src/integrationSonar.py b/src/integrationSonar.py index 9044d221..3cbffca4 100644 --- a/src/integrationSonar.py +++ b/src/integrationSonar.py @@ -4,7 +4,6 @@ import requests import subprocess from liblqos_python import sonar_api_key, sonar_api_url, snmp_community, sonar_airmax_ap_model_ids, \ sonar_ltu_ap_model_ids, sonar_active_status_ids -#from ispConfig import sonar_airmax_ap_model_ids,sonar_active_status_ids,sonar_ltu_ap_model_ids all_models = sonar_airmax_ap_model_ids() + sonar_ltu_ap_model_ids() from integrationCommon import NetworkGraph, NetworkNode, NodeType from multiprocessing.pool import ThreadPool @@ -120,7 +119,7 @@ def getSitesAndAps(): } sites_and_aps = sonarRequest(query,variables) - # This should only return sites that have equipment on them that is in the list sonar_ubiquiti_ap_model_ids in ispConfig.py + # This should only return sites that have equipment on them that is in the list sonar_ubiquiti_ap_model_ids in lqos.conf sites = [] aps = [] for site in sites_and_aps: diff --git a/src/lqos.example b/src/lqos.example index 184ae217..deb197de 100644 --- a/src/lqos.example +++ b/src/lqos.example @@ -93,3 +93,10 @@ snmp_community = "public" airmax_model_ids = [ "" ] ltu_model_ids = [ "" ] active_status_ids = [ "" ] + +[influxdb] +enable_influxdb = false +url = "http://localhost:8086" +org = "libreqos" +bucket = "Your ISP Name Here" +token = "" \ No newline at end of file diff --git a/src/rust/lqos_config/src/etc/etclqos_migration.rs b/src/rust/lqos_config/src/etc/etclqos_migration.rs index 78ccab96..17ed2190 100644 --- a/src/rust/lqos_config/src/etc/etclqos_migration.rs +++ b/src/rust/lqos_config/src/etc/etclqos_migration.rs @@ -309,7 +309,7 @@ mod test { fn round_trip_toml() { let doc = EXAMPLE_LQOS_CONF.parse::().unwrap(); let reserialized = doc.to_string(); - assert_eq!(EXAMPLE_LQOS_CONF, reserialized); + assert_eq!(EXAMPLE_LQOS_CONF.trim(), reserialized.trim()); } #[test] diff --git a/src/rust/lqos_config/src/etc/migration.rs b/src/rust/lqos_config/src/etc/migration.rs index 3d09ac03..98dcf53d 100644 --- a/src/rust/lqos_config/src/etc/migration.rs +++ b/src/rust/lqos_config/src/etc/migration.rs @@ -89,6 +89,7 @@ fn do_migration_14_to_15( migrate_powercode(python_config, &mut new_config)?; migrate_sonar(python_config, &mut new_config)?; migrate_queues( python_config, &mut new_config)?; + migrate_influx(python_config, &mut new_config)?; new_config.validate().unwrap(); // Left as an upwrap because this should *never* happen Ok(new_config) @@ -272,6 +273,18 @@ fn migrate_uisp( Ok(()) } +fn migrate_influx( + python_config: &PythonMigration, + new_config: &mut Config, +) -> Result<(), MigrationError> { + new_config.influxdb.enable_influxdb = python_config.influx_db_enabled; + new_config.influxdb.url = python_config.influx_db_url.clone(); + new_config.influxdb.bucket = python_config.infux_db_bucket.clone(); + new_config.influxdb.org = python_config.influx_db_org.clone(); + new_config.influxdb.token = python_config.influx_db_token.clone(); + Ok(()) +} + #[cfg(test)] mod test { use super::*; diff --git a/src/rust/lqos_config/src/etc/python_migration.rs b/src/rust/lqos_config/src/etc/python_migration.rs index 92454f3f..6f5d6b6f 100644 --- a/src/rust/lqos_config/src/etc/python_migration.rs +++ b/src/rust/lqos_config/src/etc/python_migration.rs @@ -182,6 +182,13 @@ impl PythonMigration { cfg.sonar_airmax_ap_model_ids = from_python(&py, "sonar_airmax_ap_model_ids").unwrap_or(vec![]); cfg.sonar_ltu_ap_model_ids = from_python(&py, "sonar_ltu_ap_model_ids").unwrap_or(vec![]); + // InfluxDB + cfg.influx_db_enabled = from_python(&py, "influxDBEnabled").unwrap_or(false); + cfg.influx_db_url = from_python(&py, "influxDBurl").unwrap_or("http://localhost:8086".to_string()); + cfg.infux_db_bucket = from_python(&py, "influxDBBucket").unwrap_or("libreqos".to_string()); + cfg.influx_db_org = from_python(&py, "influxDBOrg").unwrap_or("Your ISP Name Here".to_string()); + cfg.influx_db_token = from_python(&py, "influxDBtoken").unwrap_or("".to_string()); + Ok(()) } diff --git a/src/rust/lqos_config/src/etc/v15/example.toml b/src/rust/lqos_config/src/etc/v15/example.toml index 184ae217..deb197de 100644 --- a/src/rust/lqos_config/src/etc/v15/example.toml +++ b/src/rust/lqos_config/src/etc/v15/example.toml @@ -93,3 +93,10 @@ snmp_community = "public" airmax_model_ids = [ "" ] ltu_model_ids = [ "" ] active_status_ids = [ "" ] + +[influxdb] +enable_influxdb = false +url = "http://localhost:8086" +org = "libreqos" +bucket = "Your ISP Name Here" +token = "" \ No newline at end of file diff --git a/src/rust/lqos_config/src/etc/v15/influxdb.rs b/src/rust/lqos_config/src/etc/v15/influxdb.rs new file mode 100644 index 00000000..eb7788e3 --- /dev/null +++ b/src/rust/lqos_config/src/etc/v15/influxdb.rs @@ -0,0 +1,22 @@ +use serde::{Serialize, Deserialize}; + +#[derive(Clone, Serialize, Deserialize, Debug)] +pub struct InfluxDbConfig { + pub enable_influxdb: bool, + pub url: String, + pub bucket: String, + pub org: String, + pub token: String, +} + +impl Default for InfluxDbConfig { + fn default() -> Self { + Self { + enable_influxdb: false, + url: "http://localhost:8086".to_string(), + bucket: "libreqos".to_string(), + org: "Your ISP Name".to_string(), + token: "".to_string() + } + } +} \ No newline at end of file diff --git a/src/rust/lqos_config/src/etc/v15/mod.rs b/src/rust/lqos_config/src/etc/v15/mod.rs index fc95839a..44567b47 100644 --- a/src/rust/lqos_config/src/etc/v15/mod.rs +++ b/src/rust/lqos_config/src/etc/v15/mod.rs @@ -13,6 +13,7 @@ mod spylnx_integration; mod uisp_integration; mod powercode_integration; mod sonar_integration; +mod influxdb; pub use bridge::*; pub use long_term_stats::LongTermStats; pub use tuning::Tunables; \ No newline at end of file diff --git a/src/rust/lqos_config/src/etc/v15/top_config.rs b/src/rust/lqos_config/src/etc/v15/top_config.rs index f322d7da..5d7175d1 100644 --- a/src/rust/lqos_config/src/etc/v15/top_config.rs +++ b/src/rust/lqos_config/src/etc/v15/top_config.rs @@ -65,6 +65,9 @@ pub struct Config { /// Sonar Integration pub sonar_integration: super::sonar_integration::SonarIntegration, + + /// InfluxDB Configuration + pub influxdb: super::influxdb::InfluxDbConfig, } impl Config { @@ -127,6 +130,7 @@ impl Default for Config { uisp_integration: super::uisp_integration::UispIntegration::default(), powercode_integration: super::powercode_integration::PowercodeIntegration::default(), sonar_integration: super::sonar_integration::SonarIntegration::default(), + influxdb: super::influxdb::InfluxDbConfig::default(), packet_capture_time: 10, queue_check_period_ms: 1000, } diff --git a/src/rust/lqos_python/src/lib.rs b/src/rust/lqos_python/src/lib.rs index a7afa1a6..32adfd29 100644 --- a/src/rust/lqos_python/src/lib.rs +++ b/src/rust/lqos_python/src/lib.rs @@ -81,6 +81,11 @@ fn liblqos_python(_py: Python, m: &PyModule) -> PyResult<()> { m.add_wrapped(wrap_pyfunction!(sonar_airmax_ap_model_ids))?; m.add_wrapped(wrap_pyfunction!(sonar_ltu_ap_model_ids))?; m.add_wrapped(wrap_pyfunction!(sonar_active_status_ids))?; + m.add_wrapped(wrap_pyfunction!(influx_db_enabled))?; + m.add_wrapped(wrap_pyfunction!(influx_db_bucket))?; + m.add_wrapped(wrap_pyfunction!(influx_db_org))?; + m.add_wrapped(wrap_pyfunction!(influx_db_token))?; + m.add_wrapped(wrap_pyfunction!(influx_db_url))?; Ok(()) } @@ -602,4 +607,34 @@ fn sonar_ltu_ap_model_ids() -> PyResult> { fn sonar_active_status_ids() -> PyResult> { let config = lqos_config::load_config().unwrap(); Ok(config.sonar_integration.active_status_ids) +} + +#[pyfunction] +fn influx_db_enabled() -> PyResult { + let config = lqos_config::load_config().unwrap(); + Ok(config.influxdb.enable_influxdb) +} + +#[pyfunction] +fn influx_db_bucket() -> PyResult { + let config = lqos_config::load_config().unwrap(); + Ok(config.influxdb.bucket) +} + +#[pyfunction] +fn influx_db_org() -> PyResult { + let config = lqos_config::load_config().unwrap(); + Ok(config.influxdb.org) +} + +#[pyfunction] +fn influx_db_token() -> PyResult { + let config = lqos_config::load_config().unwrap(); + Ok(config.influxdb.token) +} + +#[pyfunction] +fn influx_db_url() -> PyResult { + let config = lqos_config::load_config().unwrap(); + Ok(config.influxdb.url) } \ No newline at end of file