Add files via upload

This commit is contained in:
Robert Chacón 2022-02-25 21:35:41 -07:00 committed by GitHub
parent abdebeb16d
commit 50b9d0820c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 295 additions and 5 deletions

View File

@ -173,11 +173,11 @@ def refreshShapers():
print(tabs, end='')
shell('tc class add dev ' + interfaceA + ' parent ' + parentClassID + ' classid ' + str(minor) + ' htb rate '+ str(round(elemDownloadMin)) + 'mbit ceil '+ str(round(elemDownloadMax)) + 'mbit prio 3')
print(tabs, end='')
shell('tc qdisc add dev ' + interfaceA + ' parent ' + str(major) + ':' + str(minor) + ' ' + fqOrCAKE)
#shell('tc qdisc add dev ' + interfaceA + ' parent ' + str(major) + ':' + str(minor) + ' ' + fqOrCAKE)
print(tabs, end='')
shell('tc class add dev ' + interfaceB + ' parent ' + parentClassID + ' classid ' + str(minor) + ' htb rate '+ str(round(elemUploadMin)) + 'mbit ceil '+ str(round(elemUploadMax)) + 'mbit prio 3')
print(tabs, end='')
shell('tc qdisc add dev ' + interfaceB + ' parent ' + str(major) + ':' + str(minor) + ' ' + fqOrCAKE)
#shell('tc qdisc add dev ' + interfaceB + ' parent ' + str(major) + ':' + str(minor) + ' ' + fqOrCAKE)
print()
thisParentNode = {
"parentNodeName": elem,

151
v1.1/graphBandwidth.py Normal file
View File

@ -0,0 +1,151 @@
import os
import subprocess
from subprocess import PIPE
import io
import decimal
import json
from ispConfig import fqOrCAKE, interfaceA, interfaceB, influxDBBucket, influxDBOrg, influxDBtoken, influxDBurl
from datetime import date, datetime, timedelta
import decimal
from influxdb_client import InfluxDBClient, Point, Dialect
from influxdb_client.client.write_api import SYNCHRONOUS
import dateutil.parser
def getDeviceStats(devices):
interfaces = [interfaceA, interfaceB]
for interface in interfaces:
command = 'tc -j -s qdisc show dev ' + interface
commands = command.split(' ')
tcShowResults = subprocess.run(commands, stdout=subprocess.PIPE).stdout.decode('utf-8')
if interface == interfaceA:
interfaceAjson = json.loads(tcShowResults)
else:
interfaceBjson = json.loads(tcShowResults)
for device in devices:
if 'timeQueried' in device:
device['priorQueryTime'] = device['timeQueried']
for interface in interfaces:
if interface == interfaceA:
jsonVersion = interfaceAjson
else:
jsonVersion = interfaceBjson
for element in jsonVersion:
if "parent" in element:
if element['parent'] == device['qdisc']:
drops = int(element['drops'])
packets = int(element['packets'])
bytesSent = int(element['bytes'])
if interface == interfaceA:
if 'bytesSentDownload' in device:
device['priorQueryBytesDownload'] = device['bytesSentDownload']
device['bytesSentDownload'] = bytesSent
else:
if 'bytesSentUpload' in device:
device['priorQueryBytesUpload'] = device['bytesSentUpload']
device['bytesSentUpload'] = bytesSent
device['timeQueried'] = datetime.now().isoformat()
for device in devices:
if 'priorQueryTime' in device:
bytesDLSinceLastQuery = device['bytesSentDownload'] - device['priorQueryBytesDownload']
bytesULSinceLastQuery = device['bytesSentUpload'] - device['priorQueryBytesUpload']
currentQueryTime = datetime.fromisoformat(device['timeQueried'])
priorQueryTime = datetime.fromisoformat(device['priorQueryTime'])
delta = currentQueryTime - priorQueryTime
deltaSeconds = delta.total_seconds()
if deltaSeconds > 0:
bitsDownload = round((((bytesDLSinceLastQuery*8))/deltaSeconds))
bitsUpload = round((((bytesULSinceLastQuery*8))/deltaSeconds))
else:
bitsDownload = 0
bitsUpload = 0
device['bitsDownloadSinceLastQuery'] = bitsDownload
device['bitsUploadSinceLastQuery'] = bitsUpload
else:
device['bitsDownloadSinceLastQuery'] = 0
device['bitsUploadSinceLastQuery'] = 0
return (devices)
def getParentNodeStats(parentNodes, devices):
for parentNode in parentNodes:
thisNodeBitsDownload = 0
thisNodeBitsUpload = 0
for device in devices:
if device['ParentNode'] == parentNode['parentNodeName']:
thisNodeBitsDownload += device['bitsDownloadSinceLastQuery']
thisNodeBitsUpload += device['bitsUploadSinceLastQuery']
parentNode['bitsDownloadSinceLastQuery'] = thisNodeBitsDownload
parentNode['bitsUploadSinceLastQuery'] = thisNodeBitsUpload
return parentNodes
def refreshBandwidthGraphs():
startTime = datetime.now()
with open('statsByParentNode.json', 'r') as j:
parentNodes = json.loads(j.read())
with open('statsByDevice.json', 'r') as j:
devices = json.loads(j.read())
print("Retrieving device statistics")
devices = getDeviceStats(devices)
print("Computing parent node statistics")
parentNodes = getParentNodeStats(parentNodes, devices)
print("Writing data to InfluxDB")
bucket = influxDBBucket
org = influxDBOrg
token = influxDBtoken
url=influxDBurl
client = InfluxDBClient(
url=url,
token=token,
org=org
)
write_api = client.write_api(write_options=SYNCHRONOUS)
queriesToSend = []
for device in devices:
bitsDownload = int(device['bitsDownloadSinceLastQuery'])
bitsUpload = int(device['bitsUploadSinceLastQuery'])
if (bitsDownload > 0) and (bitsUpload > 0):
percentUtilizationDownload = round((bitsDownload / round(device['downloadMax']*1000000)),4)
percentUtilizationUpload = round((bitsUpload / round(device['uploadMax']*1000000)),4)
p = Point('Bandwidth').tag("Device", device['hostname']).tag("ParentNode", device['ParentNode']).tag("Type", "Device").field("Download", bitsDownload)
queriesToSend.append(p)
p = Point('Bandwidth').tag("Device", device['hostname']).tag("ParentNode", device['ParentNode']).tag("Type", "Device").field("Upload", bitsUpload)
queriesToSend.append(p)
p = Point('Utilization').tag("Device", device['hostname']).tag("ParentNode", device['ParentNode']).tag("Type", "Device").field("Download", percentUtilizationDownload)
queriesToSend.append(p)
p = Point('Utilization').tag("Device", device['hostname']).tag("ParentNode", device['ParentNode']).tag("Type", "Device").field("Upload", percentUtilizationUpload)
queriesToSend.append(p)
for parentNode in parentNodes:
bitsDownload = int(parentNode['bitsDownloadSinceLastQuery'])
bitsUpload = int(parentNode['bitsUploadSinceLastQuery'])
if (bitsDownload > 0) and (bitsUpload > 0):
percentUtilizationDownload = round((bitsDownload / round(parentNode['downloadMax']*1000000)),4)
percentUtilizationUpload = round((bitsUpload / round(parentNode['uploadMax']*1000000)),4)
p = Point('Bandwidth').tag("Device", parentNode['parentNodeName']).tag("ParentNode", parentNode['parentNodeName']).tag("Type", "Parent Node").field("Download", bitsDownload)
queriesToSend.append(p)
p = Point('Bandwidth').tag("Device", parentNode['parentNodeName']).tag("ParentNode", parentNode['parentNodeName']).tag("Type", "Parent Node").field("Upload", bitsUpload)
queriesToSend.append(p)
p = Point('Utilization').tag("Device", parentNode['parentNodeName']).tag("ParentNode", parentNode['parentNodeName']).tag("Type", "Parent Node").field("Download", percentUtilizationDownload)
queriesToSend.append(p)
p = Point('Utilization').tag("Device", parentNode['parentNodeName']).tag("ParentNode", parentNode['parentNodeName']).tag("Type", "Parent Node").field("Upload", percentUtilizationUpload)
write_api.write(bucket=bucket, record=queriesToSend)
print("Added " + str(len(queriesToSend)) + " points to InfluxDB.")
client.close()
with open('statsByParentNode.json', 'w') as infile:
json.dump(parentNodes, infile)
with open('statsByDevice.json', 'w') as infile:
json.dump(devices, infile)
endTime = datetime.now()
durationSeconds = round((endTime - startTime).total_seconds(),2)
print("Graphs updated within " + str(durationSeconds) + " seconds.")
if __name__ == '__main__':
refreshBandwidthGraphs()

136
v1.1/graphLatency.py Normal file
View File

@ -0,0 +1,136 @@
import os
import subprocess
from subprocess import PIPE
import io
import decimal
import json
from ispConfig import fqOrCAKE, interfaceA, interfaceB, influxDBBucket, influxDBOrg, influxDBtoken, influxDBurl, ppingLocation
from datetime import date, datetime, timedelta
import decimal
from influxdb_client import InfluxDBClient, Point, Dialect
from influxdb_client.client.write_api import SYNCHRONOUS
import dateutil.parser
def getLatencies(devices, secondsToRun):
interfaces = [interfaceA, interfaceB]
tcpLatency = 0
listOfAllDiffs = []
maxLatencyRecordable = 200
matchableIPs = []
for device in devices:
matchableIPs.append(device['ipv4'])
rttDict = {}
jitterDict = {}
#for interface in interfaces:
command = "./pping -i " + interfaceA + " -s " + str(secondsToRun) + " -m"
commands = command.split(' ')
wd = ppingLocation
tcShowResults = subprocess.run(command, shell=True, cwd=wd,stdout=subprocess.PIPE, stderr=subprocess.DEVNULL).stdout.decode('utf-8').splitlines()
for line in tcShowResults:
if len(line) > 59:
rtt1 = float(line[18:27])*1000
rtt2 = float(line[27:36]) *1000
toAndFrom = line[38:].split(' ')[3]
fromIP = toAndFrom.split('+')[0].split(':')[0]
toIP = toAndFrom.split('+')[1].split(':')[0]
matchedIP = ''
if fromIP in matchableIPs:
matchedIP = fromIP
elif toIP in matchableIPs:
matchedIP = toIP
jitter = rtt1 - rtt2
#Cap ceil
if rtt1 >= maxLatencyRecordable:
rtt1 = 200
#Lowest observed rtt
if matchedIP in rttDict:
if rtt1 < rttDict[matchedIP]:
rttDict[matchedIP] = rtt1
jitterDict[matchedIP] = jitter
else:
rttDict[matchedIP] = rtt1
jitterDict[matchedIP] = jitter
for device in devices:
diffsForThisDevice = []
if device['ipv4'] in rttDict:
device['tcpLatency'] = rttDict[device['ipv4']]
else:
device['tcpLatency'] = None
if device['ipv4'] in jitterDict:
device['tcpJitter'] = jitterDict[device['ipv4']]
else:
device['tcpJitter'] = None
return devices
def getParentNodeStats(parentNodes, devices):
for parentNode in parentNodes:
acceptableLatencies = []
for device in devices:
if device['ParentNode'] == parentNode['parentNodeName']:
if device['tcpLatency'] != None:
acceptableLatencies.append(device['tcpLatency'])
if len(acceptableLatencies) > 0:
parentNode['tcpLatency'] = sum(acceptableLatencies)/len(acceptableLatencies)
else:
parentNode['tcpLatency'] = None
return parentNodes
def refreshLatencyGraphs(secondsToRun):
startTime = datetime.now()
with open('statsByParentNode.json', 'r') as j:
parentNodes = json.loads(j.read())
with open('statsByDevice.json', 'r') as j:
devices = json.loads(j.read())
print("Retrieving device statistics")
devices = getLatencies(devices, secondsToRun)
print("Computing parent node statistics")
parentNodes = getParentNodeStats(parentNodes, devices)
print("Writing data to InfluxDB")
bucket = influxDBBucket
org = influxDBOrg
token = influxDBtoken
url="http://localhost:8086"
client = InfluxDBClient(
url=url,
token=token,
org=org
)
write_api = client.write_api(write_options=SYNCHRONOUS)
queriesToSend = []
for device in devices:
if device['tcpLatency'] != None:
p = Point('Latency').tag("Device", device['hostname']).tag("ParentNode", device['ParentNode']).tag("Type", "Device").field("TCP Latency", device['tcpLatency'])
queriesToSend.append(p)
p = Point('Latency').tag("Device", device['hostname']).tag("ParentNode", device['ParentNode']).tag("Type", "Device").field("TCP Latency", device['tcpLatency'])
queriesToSend.append(p)
for parentNode in parentNodes:
if parentNode['tcpLatency'] != None:
p = Point('Latency').tag("Device", parentNode['parentNodeName']).tag("ParentNode", parentNode['parentNodeName']).tag("Type", "Parent Node").field("TCP Latency", parentNode['tcpLatency'])
queriesToSend.append(p)
p = Point('Latency').tag("Device", parentNode['parentNodeName']).tag("ParentNode", parentNode['parentNodeName']).tag("Type", "Parent Node").field("TCP Latency", parentNode['tcpLatency'])
queriesToSend.append(p)
write_api.write(bucket=bucket, record=queriesToSend)
print("Added " + str(len(queriesToSend)) + " points to InfluxDB.")
client.close()
#with open('statsByParentNode.json', 'w') as infile:
# json.dump(parentNodes, infile)
#with open('statsByDevice.json', 'w') as infile:
# json.dump(devices, infile)
endTime = datetime.now()
durationSeconds = round((endTime - startTime).total_seconds())
print("Graphs updated within " + str(durationSeconds) + " seconds.")
if __name__ == '__main__':
refreshLatencyGraphs(10)

View File

@ -2,7 +2,8 @@ import time
import schedule
from datetime import date
from LibreQoS import refreshShapers
from graph import refreshGraphs
from graphBandwidth import refreshBandwidthGraphs
from graphLatency import refreshLatencyGraphs
from ispConfig import graphingEnabled
if __name__ == '__main__':
@ -12,7 +13,9 @@ if __name__ == '__main__':
schedule.run_pending()
if graphingEnabled:
try:
refreshGraphs()
refreshBandwidthGraphs()
refreshLatencyGraphs(10)
except:
print("Failed to update graphs")
time.sleep(15) # wait one minute
else:
time.sleep(60) # wait x seconds