mirror of
https://github.com/LibreQoE/LibreQoS.git
synced 2025-01-24 12:46:25 -06:00
Merge pull request #84 from khandieyea/dev
minor tidy, but chunk `devices` and send to influx in groups. 20k Poi…
This commit is contained in:
commit
f24267fac6
@ -20,64 +20,61 @@ def getInterfaceStats(interface):
|
|||||||
return jsonDict
|
return jsonDict
|
||||||
|
|
||||||
|
|
||||||
|
def chunk_list(l, n):
|
||||||
|
for i in range(0, len(l), n):
|
||||||
|
yield l[i:i + n]
|
||||||
|
|
||||||
|
|
||||||
def getDeviceStats(devices):
|
def getDeviceStats(devices):
|
||||||
interfaces = [interfaceA, interfaceB]
|
interfaces = [interfaceA, interfaceB]
|
||||||
for interface in interfaces:
|
ifaceStats = list(map(getInterfaceStats, interfaces))
|
||||||
tcShowResults = getInterfaceStats(interface)
|
|
||||||
if interface == interfaceA:
|
|
||||||
interfaceAjson = tcShowResults
|
|
||||||
else:
|
|
||||||
interfaceBjson = tcShowResults
|
|
||||||
|
|
||||||
for device in devices:
|
for device in devices:
|
||||||
if 'timeQueried' in device:
|
if 'timeQueried' in device:
|
||||||
device['priorQueryTime'] = device['timeQueried']
|
device['priorQueryTime'] = device['timeQueried']
|
||||||
for interface in interfaces:
|
for (interface, stats, dirSuffix) in zip(interfaces, ifaceStats, ['Download', 'Upload']):
|
||||||
if interface == interfaceA:
|
|
||||||
jsonVersion = interfaceAjson
|
|
||||||
else:
|
|
||||||
jsonVersion = interfaceBjson
|
|
||||||
|
|
||||||
element = jsonVersion[device['qdisc']] if device['qdisc'] in jsonVersion else False
|
element = stats[device['qdisc']] if device['qdisc'] in stats else False
|
||||||
|
|
||||||
if element:
|
if element:
|
||||||
|
|
||||||
|
bytesSent = int(element['bytes'])
|
||||||
drops = int(element['drops'])
|
drops = int(element['drops'])
|
||||||
packets = int(element['packets'])
|
packets = int(element['packets'])
|
||||||
bytesSent = int(element['bytes'])
|
|
||||||
if interface == interfaceA:
|
if 'bytesSent' + dirSuffix in device:
|
||||||
if 'bytesSentDownload' in device:
|
device['priorQueryBytes' + dirSuffix] = device['bytesSent' + dirSuffix]
|
||||||
device['priorQueryBytesDownload'] = device['bytesSentDownload']
|
device['bytesSent' + dirSuffix] = bytesSent
|
||||||
device['bytesSentDownload'] = bytesSent
|
|
||||||
else:
|
if 'dropsSent' + dirSuffix in device:
|
||||||
if 'bytesSentUpload' in device:
|
device['priorDropsSent' + dirSuffix] = device['dropsSent' + dirSuffix]
|
||||||
device['priorQueryBytesUpload'] = device['bytesSentUpload']
|
device['dropsSent' + dirSuffix] = drops
|
||||||
device['bytesSentUpload'] = bytesSent
|
|
||||||
|
if 'packetsSent' + dirSuffix in device:
|
||||||
|
device['priorPacketsSent' + dirSuffix] = device['packetsSent' + dirSuffix]
|
||||||
|
device['packetsSent' + dirSuffix] = packets
|
||||||
|
|
||||||
device['timeQueried'] = datetime.now().isoformat()
|
device['timeQueried'] = datetime.now().isoformat()
|
||||||
for device in devices:
|
for device in devices:
|
||||||
|
device['bitsDownloadSinceLastQuery'] = device['bitsUploadSinceLastQuery'] = 0
|
||||||
if 'priorQueryTime' in device:
|
if 'priorQueryTime' in device:
|
||||||
try:
|
try:
|
||||||
bytesDLSinceLastQuery = device['bytesSentDownload'] - device['priorQueryBytesDownload']
|
bytesDLSinceLastQuery = device['bytesSentDownload'] - device['priorQueryBytesDownload']
|
||||||
bytesULSinceLastQuery = device['bytesSentUpload'] - device['priorQueryBytesUpload']
|
bytesULSinceLastQuery = device['bytesSentUpload'] - device['priorQueryBytesUpload']
|
||||||
except:
|
except:
|
||||||
bytesDLSinceLastQuery = 0
|
bytesDLSinceLastQuery = bytesULSinceLastQuery = 0
|
||||||
bytesULSinceLastQuery = 0
|
|
||||||
currentQueryTime = datetime.fromisoformat(device['timeQueried'])
|
currentQueryTime = datetime.fromisoformat(device['timeQueried'])
|
||||||
priorQueryTime = datetime.fromisoformat(device['priorQueryTime'])
|
priorQueryTime = datetime.fromisoformat(device['priorQueryTime'])
|
||||||
delta = currentQueryTime - priorQueryTime
|
deltaSeconds = (currentQueryTime - priorQueryTime).total_seconds()
|
||||||
deltaSeconds = delta.total_seconds()
|
|
||||||
if deltaSeconds > 0:
|
device['bitsDownloadSinceLastQuery'] = round(
|
||||||
bitsDownload = round((((bytesDLSinceLastQuery*8))/deltaSeconds))
|
((bytesDLSinceLastQuery * 8) / deltaSeconds)) if deltaSeconds > 0 else 0
|
||||||
bitsUpload = round((((bytesULSinceLastQuery*8))/deltaSeconds))
|
device['bitsUploadSinceLastQuery'] = round(
|
||||||
else:
|
((bytesULSinceLastQuery * 8) / deltaSeconds)) if deltaSeconds > 0 else 0
|
||||||
bitsDownload = 0
|
|
||||||
bitsUpload = 0
|
return devices
|
||||||
device['bitsDownloadSinceLastQuery'] = bitsDownload
|
|
||||||
device['bitsUploadSinceLastQuery'] = bitsUpload
|
|
||||||
else:
|
|
||||||
device['bitsDownloadSinceLastQuery'] = 0
|
|
||||||
device['bitsUploadSinceLastQuery'] = 0
|
|
||||||
return (devices)
|
|
||||||
|
|
||||||
def getParentNodeStats(parentNodes, devices):
|
def getParentNodeStats(parentNodes, devices):
|
||||||
for parentNode in parentNodes:
|
for parentNode in parentNodes:
|
||||||
@ -92,6 +89,7 @@ def getParentNodeStats(parentNodes, devices):
|
|||||||
parentNode['bitsUploadSinceLastQuery'] = thisNodeBitsUpload
|
parentNode['bitsUploadSinceLastQuery'] = thisNodeBitsUpload
|
||||||
return parentNodes
|
return parentNodes
|
||||||
|
|
||||||
|
|
||||||
def getParentNodeDict(data, depth, parentNodeNameDict):
|
def getParentNodeDict(data, depth, parentNodeNameDict):
|
||||||
if parentNodeNameDict == None:
|
if parentNodeNameDict == None:
|
||||||
parentNodeNameDict = {}
|
parentNodeNameDict = {}
|
||||||
@ -100,12 +98,13 @@ def getParentNodeDict(data, depth, parentNodeNameDict):
|
|||||||
if 'children' in data[elem]:
|
if 'children' in data[elem]:
|
||||||
for child in data[elem]['children']:
|
for child in data[elem]['children']:
|
||||||
parentNodeNameDict[child] = elem
|
parentNodeNameDict[child] = elem
|
||||||
tempDict = getParentNodeDict(data[elem]['children'], depth+1, parentNodeNameDict)
|
tempDict = getParentNodeDict(data[elem]['children'], depth + 1, parentNodeNameDict)
|
||||||
parentNodeNameDict = dict(parentNodeNameDict, **tempDict)
|
parentNodeNameDict = dict(parentNodeNameDict, **tempDict)
|
||||||
return parentNodeNameDict
|
return parentNodeNameDict
|
||||||
|
|
||||||
|
|
||||||
def parentNodeNameDictPull():
|
def parentNodeNameDictPull():
|
||||||
#Load network heirarchy
|
# Load network heirarchy
|
||||||
with open('network.json', 'r') as j:
|
with open('network.json', 'r') as j:
|
||||||
network = json.loads(j.read())
|
network = json.loads(j.read())
|
||||||
parentNodeNameDict = getParentNodeDict(network, 0, None)
|
parentNodeNameDict = getParentNodeDict(network, 0, None)
|
||||||
@ -126,61 +125,83 @@ def refreshBandwidthGraphs():
|
|||||||
print("Computing parent node statistics")
|
print("Computing parent node statistics")
|
||||||
parentNodes = getParentNodeStats(parentNodes, devices)
|
parentNodes = getParentNodeStats(parentNodes, devices)
|
||||||
print("Writing data to InfluxDB")
|
print("Writing data to InfluxDB")
|
||||||
bucket = influxDBBucket
|
|
||||||
org = influxDBOrg
|
|
||||||
token = influxDBtoken
|
|
||||||
url=influxDBurl
|
|
||||||
client = InfluxDBClient(
|
client = InfluxDBClient(
|
||||||
url=url,
|
url=influxDBurl,
|
||||||
token=token,
|
token=influxDBtoken,
|
||||||
org=org
|
org=influxDBOrg
|
||||||
)
|
)
|
||||||
write_api = client.write_api(write_options=SYNCHRONOUS)
|
write_api = client.write_api(write_options=SYNCHRONOUS)
|
||||||
|
|
||||||
|
chunkedDevices = list(chunk_list(devices, 200))
|
||||||
|
|
||||||
|
queriesToSendCount = 0
|
||||||
|
for chunk in chunkedDevices:
|
||||||
|
queriesToSend = []
|
||||||
|
for device in chunk:
|
||||||
|
bitsDownload = int(device['bitsDownloadSinceLastQuery'])
|
||||||
|
bitsUpload = int(device['bitsUploadSinceLastQuery'])
|
||||||
|
if (bitsDownload > 0) and (bitsUpload > 0):
|
||||||
|
percentUtilizationDownload = round((bitsDownload / round(device['downloadMax'] * 1000000)), 4)
|
||||||
|
percentUtilizationUpload = round((bitsUpload / round(device['uploadMax'] * 1000000)), 4)
|
||||||
|
|
||||||
|
p = Point('Bandwidth').tag("Device", device['hostname']).tag("ParentNode", device['ParentNode']).tag(
|
||||||
|
"Type", "Device").field("Download", bitsDownload)
|
||||||
|
queriesToSend.append(p)
|
||||||
|
p = Point('Bandwidth').tag("Device", device['hostname']).tag("ParentNode", device['ParentNode']).tag(
|
||||||
|
"Type", "Device").field("Upload", bitsUpload)
|
||||||
|
queriesToSend.append(p)
|
||||||
|
p = Point('Utilization').tag("Device", device['hostname']).tag("ParentNode", device['ParentNode']).tag(
|
||||||
|
"Type", "Device").field("Download", percentUtilizationDownload)
|
||||||
|
queriesToSend.append(p)
|
||||||
|
p = Point('Utilization').tag("Device", device['hostname']).tag("ParentNode", device['ParentNode']).tag(
|
||||||
|
"Type", "Device").field("Upload", percentUtilizationUpload)
|
||||||
|
queriesToSend.append(p)
|
||||||
|
|
||||||
|
write_api.write(bucket=influxDBBucket, record=queriesToSend)
|
||||||
|
# print("Added " + str(len(queriesToSend)) + " points to InfluxDB.")
|
||||||
|
queriesToSendCount += len(queriesToSend)
|
||||||
|
|
||||||
queriesToSend = []
|
queriesToSend = []
|
||||||
for device in devices:
|
|
||||||
bitsDownload = int(device['bitsDownloadSinceLastQuery'])
|
|
||||||
bitsUpload = int(device['bitsUploadSinceLastQuery'])
|
|
||||||
if (bitsDownload > 0) and (bitsUpload > 0):
|
|
||||||
percentUtilizationDownload = round((bitsDownload / round(device['downloadMax']*1000000)),4)
|
|
||||||
percentUtilizationUpload = round((bitsUpload / round(device['uploadMax']*1000000)),4)
|
|
||||||
|
|
||||||
p = Point('Bandwidth').tag("Device", device['hostname']).tag("ParentNode", device['ParentNode']).tag("Type", "Device").field("Download", bitsDownload)
|
|
||||||
queriesToSend.append(p)
|
|
||||||
p = Point('Bandwidth').tag("Device", device['hostname']).tag("ParentNode", device['ParentNode']).tag("Type", "Device").field("Upload", bitsUpload)
|
|
||||||
queriesToSend.append(p)
|
|
||||||
p = Point('Utilization').tag("Device", device['hostname']).tag("ParentNode", device['ParentNode']).tag("Type", "Device").field("Download", percentUtilizationDownload)
|
|
||||||
queriesToSend.append(p)
|
|
||||||
p = Point('Utilization').tag("Device", device['hostname']).tag("ParentNode", device['ParentNode']).tag("Type", "Device").field("Upload", percentUtilizationUpload)
|
|
||||||
queriesToSend.append(p)
|
|
||||||
|
|
||||||
for parentNode in parentNodes:
|
for parentNode in parentNodes:
|
||||||
bitsDownload = int(parentNode['bitsDownloadSinceLastQuery'])
|
bitsDownload = int(parentNode['bitsDownloadSinceLastQuery'])
|
||||||
bitsUpload = int(parentNode['bitsUploadSinceLastQuery'])
|
bitsUpload = int(parentNode['bitsUploadSinceLastQuery'])
|
||||||
if (bitsDownload > 0) and (bitsUpload > 0):
|
if (bitsDownload > 0) and (bitsUpload > 0):
|
||||||
percentUtilizationDownload = round((bitsDownload / round(parentNode['downloadMax']*1000000)),4)
|
percentUtilizationDownload = round((bitsDownload / round(parentNode['downloadMax'] * 1000000)), 4)
|
||||||
percentUtilizationUpload = round((bitsUpload / round(parentNode['uploadMax']*1000000)),4)
|
percentUtilizationUpload = round((bitsUpload / round(parentNode['uploadMax'] * 1000000)), 4)
|
||||||
|
|
||||||
p = Point('Bandwidth').tag("Device", parentNode['parentNodeName']).tag("ParentNode", parentNode['parentNodeName']).tag("Type", "Parent Node").field("Download", bitsDownload)
|
p = Point('Bandwidth').tag("Device", parentNode['parentNodeName']).tag("ParentNode",
|
||||||
|
parentNode['parentNodeName']).tag(
|
||||||
|
"Type", "Parent Node").field("Download", bitsDownload)
|
||||||
queriesToSend.append(p)
|
queriesToSend.append(p)
|
||||||
p = Point('Bandwidth').tag("Device", parentNode['parentNodeName']).tag("ParentNode", parentNode['parentNodeName']).tag("Type", "Parent Node").field("Upload", bitsUpload)
|
p = Point('Bandwidth').tag("Device", parentNode['parentNodeName']).tag("ParentNode",
|
||||||
|
parentNode['parentNodeName']).tag(
|
||||||
|
"Type", "Parent Node").field("Upload", bitsUpload)
|
||||||
queriesToSend.append(p)
|
queriesToSend.append(p)
|
||||||
p = Point('Utilization').tag("Device", parentNode['parentNodeName']).tag("ParentNode", parentNode['parentNodeName']).tag("Type", "Parent Node").field("Download", percentUtilizationDownload)
|
p = Point('Utilization').tag("Device", parentNode['parentNodeName']).tag("ParentNode",
|
||||||
|
parentNode['parentNodeName']).tag(
|
||||||
|
"Type", "Parent Node").field("Download", percentUtilizationDownload)
|
||||||
queriesToSend.append(p)
|
queriesToSend.append(p)
|
||||||
p = Point('Utilization').tag("Device", parentNode['parentNodeName']).tag("ParentNode", parentNode['parentNodeName']).tag("Type", "Parent Node").field("Upload", percentUtilizationUpload)
|
p = Point('Utilization').tag("Device", parentNode['parentNodeName']).tag("ParentNode",
|
||||||
|
parentNode['parentNodeName']).tag(
|
||||||
|
"Type", "Parent Node").field("Upload", percentUtilizationUpload)
|
||||||
queriesToSend.append(p)
|
queriesToSend.append(p)
|
||||||
|
|
||||||
write_api.write(bucket=bucket, record=queriesToSend)
|
write_api.write(bucket=influxDBBucket, record=queriesToSend)
|
||||||
print("Added " + str(len(queriesToSend)) + " points to InfluxDB.")
|
# print("Added " + str(len(queriesToSend)) + " points to InfluxDB.")
|
||||||
|
queriesToSendCount += len(queriesToSend)
|
||||||
|
print("Added " + str(queriesToSendCount) + " points to InfluxDB.")
|
||||||
|
|
||||||
client.close()
|
client.close()
|
||||||
|
|
||||||
|
|
||||||
with open('statsByParentNode.json', 'w') as infile:
|
with open('statsByParentNode.json', 'w') as infile:
|
||||||
json.dump(parentNodes, infile)
|
json.dump(parentNodes, infile)
|
||||||
|
|
||||||
with open('statsByDevice.json', 'w') as infile:
|
with open('statsByDevice.json', 'w') as infile:
|
||||||
json.dump(devices, infile)
|
json.dump(devices, infile)
|
||||||
|
|
||||||
endTime = datetime.now()
|
endTime = datetime.now()
|
||||||
durationSeconds = round((endTime - startTime).total_seconds(),2)
|
durationSeconds = round((endTime - startTime).total_seconds(), 2)
|
||||||
print("Graphs updated within " + str(durationSeconds) + " seconds.")
|
print("Graphs updated within " + str(durationSeconds) + " seconds.")
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
Loading…
Reference in New Issue
Block a user