Re-add InfluxDB Support to the unified configuration.

This commit is contained in:
Herbert Wolverson
2024-02-04 21:29:02 -06:00
parent 1ee3543eb1
commit 3ab165a591
13 changed files with 688 additions and 594 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -146,7 +146,7 @@ class NetworkGraph:
def addRawNode(self, node: NetworkNode) -> None:
# Adds a NetworkNode to the graph, unchanged.
# If a site is excluded (via excludedSites in ispConfig)
# If a site is excluded (via excludedSites in lqos.conf)
# it won't be added
if not node.displayName in self.excludeSites:
# TODO: Fixup exceptionCPE handling

View File

@@ -1,79 +1,81 @@
import csv
import os
import shutil
from datetime import datetime
print("Deprecated for now.")
from requests import get
# import csv
# import os
# import shutil
# from datetime import datetime
from ispConfig import automaticImportRestHttp as restconf
from pydash import objects
# from requests import get
requestsBaseConfig = {
'verify': True,
'headers': {
'accept': 'application/json'
}
}
# from ispConfig import automaticImportRestHttp as restconf
# from pydash import objects
# requestsBaseConfig = {
# 'verify': True,
# 'headers': {
# 'accept': 'application/json'
# }
# }
def createShaper():
# def createShaper():
# shutil.copy('Shaper.csv', 'Shaper.csv.bak')
ts = datetime.now().strftime('%Y-%m-%d.%H-%M-%S')
# # shutil.copy('Shaper.csv', 'Shaper.csv.bak')
# ts = datetime.now().strftime('%Y-%m-%d.%H-%M-%S')
devicesURL = restconf.get('baseURL') + '/' + restconf.get('devicesURI').strip('/')
# devicesURL = restconf.get('baseURL') + '/' + restconf.get('devicesURI').strip('/')
requestConfig = objects.defaults_deep({'params': {}}, restconf.get('requestsConfig'), requestsBaseConfig)
# requestConfig = objects.defaults_deep({'params': {}}, restconf.get('requestsConfig'), requestsBaseConfig)
raw = get(devicesURL, **requestConfig, timeout=10)
# raw = get(devicesURL, **requestConfig, timeout=10)
if raw.status_code != 200:
print('Failed to request ' + devicesURL + ', got ' + str(raw.status_code))
return False
# if raw.status_code != 200:
# print('Failed to request ' + devicesURL + ', got ' + str(raw.status_code))
# return False
devicesCsvFP = os.path.dirname(os.path.realpath(__file__)) + '/ShapedDevices.csv'
# devicesCsvFP = os.path.dirname(os.path.realpath(__file__)) + '/ShapedDevices.csv'
with open(devicesCsvFP, 'w') as csvfile:
wr = csv.writer(csvfile, quoting=csv.QUOTE_ALL)
wr.writerow(
['Circuit ID', 'Circuit Name', 'Device ID', 'Device Name', 'Parent Node', 'MAC', 'IPv4', 'IPv6',
'Download Min Mbps', 'Upload Min Mbps', 'Download Max Mbps', 'Upload Max Mbps', 'Comment'])
for row in raw.json():
wr.writerow(row.values())
# with open(devicesCsvFP, 'w') as csvfile:
# wr = csv.writer(csvfile, quoting=csv.QUOTE_ALL)
# wr.writerow(
# ['Circuit ID', 'Circuit Name', 'Device ID', 'Device Name', 'Parent Node', 'MAC', 'IPv4', 'IPv6',
# 'Download Min Mbps', 'Upload Min Mbps', 'Download Max Mbps', 'Upload Max Mbps', 'Comment'])
# for row in raw.json():
# wr.writerow(row.values())
if restconf['logChanges']:
devicesBakFilePath = restconf['logChanges'].rstrip('/') + '/ShapedDevices.' + ts + '.csv'
try:
shutil.copy(devicesCsvFP, devicesBakFilePath)
except:
os.makedirs(restconf['logChanges'], exist_ok=True)
shutil.copy(devicesCsvFP, devicesBakFilePath)
# if restconf['logChanges']:
# devicesBakFilePath = restconf['logChanges'].rstrip('/') + '/ShapedDevices.' + ts + '.csv'
# try:
# shutil.copy(devicesCsvFP, devicesBakFilePath)
# except:
# os.makedirs(restconf['logChanges'], exist_ok=True)
# shutil.copy(devicesCsvFP, devicesBakFilePath)
networkURL = restconf['baseURL'] + '/' + restconf['networkURI'].strip('/')
# networkURL = restconf['baseURL'] + '/' + restconf['networkURI'].strip('/')
raw = get(networkURL, **requestConfig, timeout=10)
# raw = get(networkURL, **requestConfig, timeout=10)
if raw.status_code != 200:
print('Failed to request ' + networkURL + ', got ' + str(raw.status_code))
return False
# if raw.status_code != 200:
# print('Failed to request ' + networkURL + ', got ' + str(raw.status_code))
# return False
networkJsonFP = os.path.dirname(os.path.realpath(__file__)) + '/network.json'
# networkJsonFP = os.path.dirname(os.path.realpath(__file__)) + '/network.json'
with open(networkJsonFP, 'w') as handler:
handler.write(raw.text)
# with open(networkJsonFP, 'w') as handler:
# handler.write(raw.text)
if restconf['logChanges']:
networkBakFilePath = restconf['logChanges'].rstrip('/') + '/network.' + ts + '.json'
try:
shutil.copy(networkJsonFP, networkBakFilePath)
except:
os.makedirs(restconf['logChanges'], exist_ok=True)
shutil.copy(networkJsonFP, networkBakFilePath)
# if restconf['logChanges']:
# networkBakFilePath = restconf['logChanges'].rstrip('/') + '/network.' + ts + '.json'
# try:
# shutil.copy(networkJsonFP, networkBakFilePath)
# except:
# os.makedirs(restconf['logChanges'], exist_ok=True)
# shutil.copy(networkJsonFP, networkBakFilePath)
def importFromRestHttp():
createShaper()
# def importFromRestHttp():
# createShaper()
if __name__ == '__main__':
importFromRestHttp()
# if __name__ == '__main__':
# importFromRestHttp()

View File

@@ -4,7 +4,6 @@ import requests
import subprocess
from liblqos_python import sonar_api_key, sonar_api_url, snmp_community, sonar_airmax_ap_model_ids, \
sonar_ltu_ap_model_ids, sonar_active_status_ids
#from ispConfig import sonar_airmax_ap_model_ids,sonar_active_status_ids,sonar_ltu_ap_model_ids
all_models = sonar_airmax_ap_model_ids() + sonar_ltu_ap_model_ids()
from integrationCommon import NetworkGraph, NetworkNode, NodeType
from multiprocessing.pool import ThreadPool
@@ -120,7 +119,7 @@ def getSitesAndAps():
}
sites_and_aps = sonarRequest(query,variables)
# This should only return sites that have equipment on them that is in the list sonar_ubiquiti_ap_model_ids in ispConfig.py
# This should only return sites that have equipment on them that is in the list sonar_ubiquiti_ap_model_ids in lqos.conf
sites = []
aps = []
for site in sites_and_aps:

View File

@@ -93,3 +93,10 @@ snmp_community = "public"
airmax_model_ids = [ "" ]
ltu_model_ids = [ "" ]
active_status_ids = [ "" ]
[influxdb]
enable_influxdb = false
url = "http://localhost:8086"
org = "libreqos"
bucket = "Your ISP Name Here"
token = ""

View File

@@ -309,7 +309,7 @@ mod test {
fn round_trip_toml() {
let doc = EXAMPLE_LQOS_CONF.parse::<toml_edit::Document>().unwrap();
let reserialized = doc.to_string();
assert_eq!(EXAMPLE_LQOS_CONF, reserialized);
assert_eq!(EXAMPLE_LQOS_CONF.trim(), reserialized.trim());
}
#[test]

View File

@@ -89,6 +89,7 @@ fn do_migration_14_to_15(
migrate_powercode(python_config, &mut new_config)?;
migrate_sonar(python_config, &mut new_config)?;
migrate_queues( python_config, &mut new_config)?;
migrate_influx(python_config, &mut new_config)?;
new_config.validate().unwrap(); // Left as an upwrap because this should *never* happen
Ok(new_config)
@@ -272,6 +273,18 @@ fn migrate_uisp(
Ok(())
}
fn migrate_influx(
python_config: &PythonMigration,
new_config: &mut Config,
) -> Result<(), MigrationError> {
new_config.influxdb.enable_influxdb = python_config.influx_db_enabled;
new_config.influxdb.url = python_config.influx_db_url.clone();
new_config.influxdb.bucket = python_config.infux_db_bucket.clone();
new_config.influxdb.org = python_config.influx_db_org.clone();
new_config.influxdb.token = python_config.influx_db_token.clone();
Ok(())
}
#[cfg(test)]
mod test {
use super::*;

View File

@@ -182,6 +182,13 @@ impl PythonMigration {
cfg.sonar_airmax_ap_model_ids = from_python(&py, "sonar_airmax_ap_model_ids").unwrap_or(vec![]);
cfg.sonar_ltu_ap_model_ids = from_python(&py, "sonar_ltu_ap_model_ids").unwrap_or(vec![]);
// InfluxDB
cfg.influx_db_enabled = from_python(&py, "influxDBEnabled").unwrap_or(false);
cfg.influx_db_url = from_python(&py, "influxDBurl").unwrap_or("http://localhost:8086".to_string());
cfg.infux_db_bucket = from_python(&py, "influxDBBucket").unwrap_or("libreqos".to_string());
cfg.influx_db_org = from_python(&py, "influxDBOrg").unwrap_or("Your ISP Name Here".to_string());
cfg.influx_db_token = from_python(&py, "influxDBtoken").unwrap_or("".to_string());
Ok(())
}

View File

@@ -93,3 +93,10 @@ snmp_community = "public"
airmax_model_ids = [ "" ]
ltu_model_ids = [ "" ]
active_status_ids = [ "" ]
[influxdb]
enable_influxdb = false
url = "http://localhost:8086"
org = "libreqos"
bucket = "Your ISP Name Here"
token = ""

View File

@@ -0,0 +1,22 @@
use serde::{Serialize, Deserialize};
#[derive(Clone, Serialize, Deserialize, Debug)]
pub struct InfluxDbConfig {
pub enable_influxdb: bool,
pub url: String,
pub bucket: String,
pub org: String,
pub token: String,
}
impl Default for InfluxDbConfig {
fn default() -> Self {
Self {
enable_influxdb: false,
url: "http://localhost:8086".to_string(),
bucket: "libreqos".to_string(),
org: "Your ISP Name".to_string(),
token: "".to_string()
}
}
}

View File

@@ -13,6 +13,7 @@ mod spylnx_integration;
mod uisp_integration;
mod powercode_integration;
mod sonar_integration;
mod influxdb;
pub use bridge::*;
pub use long_term_stats::LongTermStats;
pub use tuning::Tunables;

View File

@@ -65,6 +65,9 @@ pub struct Config {
/// Sonar Integration
pub sonar_integration: super::sonar_integration::SonarIntegration,
/// InfluxDB Configuration
pub influxdb: super::influxdb::InfluxDbConfig,
}
impl Config {
@@ -127,6 +130,7 @@ impl Default for Config {
uisp_integration: super::uisp_integration::UispIntegration::default(),
powercode_integration: super::powercode_integration::PowercodeIntegration::default(),
sonar_integration: super::sonar_integration::SonarIntegration::default(),
influxdb: super::influxdb::InfluxDbConfig::default(),
packet_capture_time: 10,
queue_check_period_ms: 1000,
}

View File

@@ -81,6 +81,11 @@ fn liblqos_python(_py: Python, m: &PyModule) -> PyResult<()> {
m.add_wrapped(wrap_pyfunction!(sonar_airmax_ap_model_ids))?;
m.add_wrapped(wrap_pyfunction!(sonar_ltu_ap_model_ids))?;
m.add_wrapped(wrap_pyfunction!(sonar_active_status_ids))?;
m.add_wrapped(wrap_pyfunction!(influx_db_enabled))?;
m.add_wrapped(wrap_pyfunction!(influx_db_bucket))?;
m.add_wrapped(wrap_pyfunction!(influx_db_org))?;
m.add_wrapped(wrap_pyfunction!(influx_db_token))?;
m.add_wrapped(wrap_pyfunction!(influx_db_url))?;
Ok(())
}
@@ -603,3 +608,33 @@ fn sonar_active_status_ids() -> PyResult<Vec<String>> {
let config = lqos_config::load_config().unwrap();
Ok(config.sonar_integration.active_status_ids)
}
#[pyfunction]
fn influx_db_enabled() -> PyResult<bool> {
let config = lqos_config::load_config().unwrap();
Ok(config.influxdb.enable_influxdb)
}
#[pyfunction]
fn influx_db_bucket() -> PyResult<String> {
let config = lqos_config::load_config().unwrap();
Ok(config.influxdb.bucket)
}
#[pyfunction]
fn influx_db_org() -> PyResult<String> {
let config = lqos_config::load_config().unwrap();
Ok(config.influxdb.org)
}
#[pyfunction]
fn influx_db_token() -> PyResult<String> {
let config = lqos_config::load_config().unwrap();
Ok(config.influxdb.token)
}
#[pyfunction]
fn influx_db_url() -> PyResult<String> {
let config = lqos_config::load_config().unwrap();
Ok(config.influxdb.url)
}