Merge pull request #200 from LibreQoE/v1.4-pre-alpha-rust-integration

reforked schedulerAdvanced, plus a few other bits
This commit is contained in:
Robert Chacón
2023-01-16 01:02:11 -07:00
committed by GitHub
4 changed files with 178 additions and 10 deletions

View File

@@ -29,7 +29,7 @@ git checkout v1.4-pre-alpha-rust-integration
You need to have a few packages from `apt` installed:
```
apt-get install -y python3-pip clang gcc gcc-multilib llvm libelf-dev git nano graphviz curl screen llvm pkg-config linux-tools-common linux-tools-`uname r` libbpf-dev
apt-get install -y python3-pip clang gcc gcc-multilib llvm libelf-dev git nano graphviz curl screen llvm pkg-config linux-tools-common linux-tools-`uname -r` libbpf-dev
```
Then you need to install some Python dependencies:

View File

@@ -0,0 +1,79 @@
import csv
import os
import shutil
from datetime import datetime
from requests import get
from ispConfig import automaticImportRestHttp as restconf
from pydash import objects
requestsBaseConfig = {
'verify': True,
'headers': {
'accept': 'application/json'
}
}
def createShaper():
# shutil.copy('Shaper.csv', 'Shaper.csv.bak')
ts = datetime.now().strftime('%Y-%m-%d.%H-%M-%S')
devicesURL = restconf.get('baseURL') + '/' + restconf.get('devicesURI').strip('/')
requestConfig = objects.defaults_deep({'params': {}}, restconf.get('requestsConfig'), requestsBaseConfig)
raw = get(devicesURL, **requestConfig)
if raw.status_code != 200:
print('Failed to request ' + devicesURL + ', got ' + str(raw.status_code))
return False
devicesCsvFP = os.path.dirname(os.path.realpath(__file__)) + '/ShapedDevices.csv'
with open(devicesCsvFP, 'w') as csvfile:
wr = csv.writer(csvfile, quoting=csv.QUOTE_ALL)
wr.writerow(
['Circuit ID', 'Circuit Name', 'Device ID', 'Device Name', 'Parent Node', 'MAC', 'IPv4', 'IPv6',
'Download Min Mbps', 'Upload Min Mbps', 'Download Max Mbps', 'Upload Max Mbps', 'Comment'])
for row in raw.json():
wr.writerow(row.values())
if restconf['logChanges']:
devicesBakFilePath = restconf['logChanges'].rstrip('/') + '/ShapedDevices.' + ts + '.csv'
try:
shutil.copy(devicesCsvFP, devicesBakFilePath)
except:
os.makedirs(restconf['logChanges'], exist_ok=True)
shutil.copy(devicesCsvFP, devicesBakFilePath)
networkURL = restconf['baseURL'] + '/' + restconf['networkURI'].strip('/')
raw = get(networkURL, **requestConfig)
if raw.status_code != 200:
print('Failed to request ' + networkURL + ', got ' + str(raw.status_code))
return False
networkJsonFP = os.path.dirname(os.path.realpath(__file__)) + '/network.json'
with open(networkJsonFP, 'w') as handler:
handler.write(raw.text)
if restconf['logChanges']:
networkBakFilePath = restconf['logChanges'].rstrip('/') + '/network.' + ts + '.json'
try:
shutil.copy(networkJsonFP, networkBakFilePath)
except:
os.makedirs(restconf['logChanges'], exist_ok=True)
shutil.copy(networkJsonFP, networkBakFilePath)
def importFromRestHttp():
createShaper()
if __name__ == '__main__':
importFromRestHttp()

View File

@@ -23,8 +23,8 @@ interfaceA = 'eth1'
# Interface connected to edge router
interfaceB = 'eth2'
## WORK IN PROGRESS. Note that interfaceA determines the "stick" interface
## I could only get scanning to work if I issued ethtool -K enp1s0f1 rxvlan off
# WORK IN PROGRESS. Note that interfaceA determines the "stick" interface
# I could only get scanning to work if I issued ethtool -K enp1s0f1 rxvlan off
OnAStick = False
# VLAN facing the core router
StickVlanA = 0
@@ -38,7 +38,8 @@ enableActualShellCommands = True
# Add 'sudo' before execution of any shell commands. May be required depending on distribution and environment.
runShellCommandsAsSudo = False
# Allows overriding queues / CPU cores used. When set to 0, the max possible queues / CPU cores are utilized. Please leave as 0.
# Allows overriding queues / CPU cores used. When set to 0, the max possible queues / CPU cores are utilized. Please
# leave as 0.
queuesAvailableOverride = 0
# Some networks are flat - where there are no Parent Nodes defined in ShapedDevices.csv
@@ -83,19 +84,41 @@ uispSite = ''
uispStrategy = "full"
# List any sites that should not be included, with each site name surrounded by '' and separated by commas
excludeSites = []
# If you use IPv6, this can be used to find associated IPv6 prefixes for your clients' IPv4 addresses, and match them to those devices
# If you use IPv6, this can be used to find associated IPv6 prefixes for your clients' IPv4 addresses, and match them
# to those devices
findIPv6usingMikrotik = False
# If you want to provide a safe cushion for speed test results to prevent customer complains, you can set this to 1.15 (15% above plan rate).
# If not, you can leave as 1.0
# If you want to provide a safe cushion for speed test results to prevent customer complains, you can set this to
# 1.15 (15% above plan rate). If not, you can leave as 1.0
bandwidthOverheadFactor = 1.0
# For edge cases, set the respective ParentNode for these CPEs
exceptionCPEs = {}
# 'CPE-SomeLocation1': 'AP-SomeLocation1',
# 'CPE-SomeLocation2': 'AP-SomeLocation2',
#}
# exceptionCPEs = {
# 'CPE-SomeLocation1': 'AP-SomeLocation1',
# 'CPE-SomeLocation2': 'AP-SomeLocation2',
# }
# API Auth
apiUsername = "testUser"
apiPassword = "changeme8343486806"
apiHostIP = "127.0.0.1"
apiHostPost = 5000
httpRestIntegrationConfig = {
'enabled': False,
'baseURL': 'https://domain',
'networkURI': '/some/path',
'shaperURI': '/some/path/etc',
'requestsConfig': {
'verify': True, # Good for Dev if your dev env doesnt have cert
'params': { # params for query string ie uri?some-arg=some-value
'search': 'hold-my-beer'
},
#'headers': {
# 'Origin': 'SomeHeaderValue',
#},
},
# If you want to store a timestamped copy/backup of both network.json and Shaper.csv each time they are updated,
# provide a path
# 'logChanges': '/var/log/libreqos'
}

66
src/schedulerAdvanced.py Normal file
View File

@@ -0,0 +1,66 @@
import time
from LibreQoS import refreshShapers, refreshShapersUpdateOnly
from graphInfluxDB import refreshBandwidthGraphs, refreshLatencyGraphs
from ispConfig import influxDBEnabled, automaticImportUISP, automaticImportSplynx, httpRestIntegrationConfig
if automaticImportUISP:
from integrationUISP import importFromUISP
if automaticImportSplynx:
from integrationSplynx import importFromSplynx
if httpRestIntegrationConfig['enabled']:
from integrationRestHttp import importFromRestHttp
from apscheduler.schedulers.background import BlockingScheduler
ads = BlockingScheduler()
def importFromCRM():
if automaticImportUISP:
try:
importFromUISP()
except:
print("Failed to import from UISP")
elif automaticImportSplynx:
try:
importFromSplynx()
except:
print("Failed to import from Splynx")
elif httpRestIntegrationConfig['enabled']:
try:
importFromRestHttp()
except:
print("Failed to import from RestHttp")
def importAndShapeFullReload():
importFromCRM()
refreshShapers()
def importAndShapePartialReload():
importFromCRM()
refreshShapersUpdateOnly()
if __name__ == '__main__':
importAndShapeFullReload()
# schedule.every().day.at("04:00").do(importAndShapeFullReload)
ads.add_job(importAndShapeFullReload, 'cron', hour=4)
# schedule.every(30).minutes.do(importAndShapePartialReload)
ads.add_job(importAndShapePartialReload, 'interval', minutes=30)
if influxDBEnabled:
# schedule.every(10).seconds.do(refreshBandwidthGraphs)
ads.add_job(refreshBandwidthGraphs, 'interval', seconds=10)
# schedule.every(30).seconds.do(refreshLatencyGraphs)
ads.add_job(refreshLatencyGraphs, 'interval', seconds=30)
# while True:
# schedule.run_pending()
# time.sleep(1)
ads.start()