From f387e069eeb061020149c1f144bd24bd6676fcb7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Robert=20Chac=C3=B3n?= Date: Wed, 3 Jan 2024 16:53:54 -0700 Subject: [PATCH 1/7] Update README.md --- README.md | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index a7a36204..f82b8ba7 100644 --- a/README.md +++ b/README.md @@ -6,6 +6,8 @@ Servers running LibreQoS can shape traffic for many thousands of customers. Learn more at [LibreQoS.io](https://libreqos.io/)! +LibreQoS + ## Sponsors LibreQoS' development is made possible by our sponsors, the NLnet Foundation and Equinix. @@ -27,4 +29,13 @@ Please support the continued development of LibreQoS by sponsoring us via [GitHu Our Matrix chat channel is available at [https://matrix.to/#/#libreqos:matrix.org](https://matrix.to/#/#libreqos:matrix.org). -LibreQoS +## Long-Term Stats (LTS) + +Long-Term Stats (LTS) is an analytics service built for LibreQoS that revolutionizes the way you track and analyze your network. +With flexible time window views ranging from 5 minutes to 1 month, LTS gives you comprehensive insights into your network's performance. +Built from the ground-up for performance and efficiency, LTS greatly outperforms our original InfluxDB plugin, and gives you rapidly rendered data to help you maximize your network perfromance. +Enjoy a free month trial to see how LTS can deliver for your ISP. + +To get started, [update to v1.4](https://libreqos.readthedocs.io/en/latest/docs/Updates/update.html) and click "Start Stats Free Trial" in the top right corner of the LibreQoS WebUI. +LibreQoS Long Term Stats + From 54440ceba5d0fcbe66f7dea7547c7bf334824200 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Robert=20Chac=C3=B3n?= Date: Wed, 3 Jan 2024 21:01:40 -0700 Subject: [PATCH 2/7] Update README.md --- README.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index f82b8ba7..81fbf119 100644 --- a/README.md +++ b/README.md @@ -34,8 +34,9 @@ Our Matrix chat channel is available at [https://matrix.to/#/#libreqos:matrix.or Long-Term Stats (LTS) is an analytics service built for LibreQoS that revolutionizes the way you track and analyze your network. With flexible time window views ranging from 5 minutes to 1 month, LTS gives you comprehensive insights into your network's performance. Built from the ground-up for performance and efficiency, LTS greatly outperforms our original InfluxDB plugin, and gives you rapidly rendered data to help you maximize your network perfromance. -Enjoy a free month trial to see how LTS can deliver for your ISP. -To get started, [update to v1.4](https://libreqos.readthedocs.io/en/latest/docs/Updates/update.html) and click "Start Stats Free Trial" in the top right corner of the LibreQoS WebUI. +We provide a free 30-day trial of LTS, after which the rate is $0.30 USD per shaped subscriber. +You can enroll in the 30-day free trial by [upgrading to the latest version of LibreQoS v1.4](https://libreqos.readthedocs.io/en/latest/docs/Updates/update.html) and selecting "Start Stats Free Trial" in the top-right corner of the local LibreQoS WebUI. + LibreQoS Long Term Stats From 623a9acd0d07a070c783b6d9d92c93382ca08289 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Robert=20Chac=C3=B3n?= Date: Wed, 3 Jan 2024 21:08:58 -0700 Subject: [PATCH 3/7] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 81fbf119..619b3acb 100644 --- a/README.md +++ b/README.md @@ -33,7 +33,7 @@ Our Matrix chat channel is available at [https://matrix.to/#/#libreqos:matrix.or Long-Term Stats (LTS) is an analytics service built for LibreQoS that revolutionizes the way you track and analyze your network. With flexible time window views ranging from 5 minutes to 1 month, LTS gives you comprehensive insights into your network's performance. -Built from the ground-up for performance and efficiency, LTS greatly outperforms our original InfluxDB plugin, and gives you rapidly rendered data to help you maximize your network perfromance. +Built from the ground-up for performance and efficiency, LTS greatly outperforms our original InfluxDB plugin, and gives you rapidly rendered data to help you maximize your network performance. We provide a free 30-day trial of LTS, after which the rate is $0.30 USD per shaped subscriber. You can enroll in the 30-day free trial by [upgrading to the latest version of LibreQoS v1.4](https://libreqos.readthedocs.io/en/latest/docs/Updates/update.html) and selecting "Start Stats Free Trial" in the top-right corner of the local LibreQoS WebUI. From 0099bf9f38af2bf5b1422c7ace0765c8ad1998c4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Robert=20Chac=C3=B3n?= Date: Fri, 5 Jan 2024 17:11:10 -0700 Subject: [PATCH 4/7] Update ispConfig.example.py --- src/ispConfig.example.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/src/ispConfig.example.py b/src/ispConfig.example.py index 3c2367b0..7e76ccc7 100644 --- a/src/ispConfig.example.py +++ b/src/ispConfig.example.py @@ -83,6 +83,17 @@ powercode_api_key = '' # Everything before :444/api/ in your Powercode instance URL powercode_api_url = '' +# Sonar Integration +automaticImportSonar = False +sonar_api_key = '' +sonar_api_url = '' # ex 'https://company.sonar.software/api/graphql' +# If there are radios in these lists, we will try to get the clients using snmp. This requires snmpwalk to be install on the server. You can use "sudo apt-get install snmp" for that. You will also need to fill in the snmp_community. +sonar_airmax_ap_model_ids = [] # ex ['29','43'] +sonar_ltu_ap_model_ids = [] # ex ['4'] +snmp_community = '' +# This is for all account statuses where we should be applying QoS. If you leave it blank, we'll use any status in account marked with "Activates Account" in Sonar. +sonar_active_status_ids = [] + # Splynx Integration automaticImportSplynx = False splynx_api_key = '' From 1cc6a5464e1f8b7fd790a2a297eea0057d4d474f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Robert=20Chac=C3=B3n?= Date: Fri, 5 Jan 2024 17:15:32 -0700 Subject: [PATCH 5/7] Update integrations.md --- docs/TechnicalDocs/integrations.md | 35 ++++++++++++++++++++++++++++-- 1 file changed, 33 insertions(+), 2 deletions(-) diff --git a/docs/TechnicalDocs/integrations.md b/docs/TechnicalDocs/integrations.md index fcbc17d7..9d46eae6 100644 --- a/docs/TechnicalDocs/integrations.md +++ b/docs/TechnicalDocs/integrations.md @@ -14,7 +14,38 @@ On the first successful run, it will create a network.json and ShapedDevices.csv If a network.json file exists, it will not be overwritten. You can modify the network.json file to more accurately reflect bandwidth limits. ShapedDevices.csv will be overwritten every time the UISP integration is run. -You have the option to run integrationUISP.py automatically on boot and every 30 minutes, which is recommended. This can be enabled by setting ```automaticImportUISP = True``` in ispConfig.py +You have the option to run integrationUISP.py automatically on boot and every 10 minutes, which is recommended. This can be enabled by setting ```automaticImportUISP = True``` in ispConfig.py + +## Powercode Integration + +First, set the relevant parameters for Sonar (powercode_api_key, powercode_api_url, etc.) in ispConfig.py. + +To test the Powercode Integration, use + +```shell +python3 integrationPowercode.py +``` + +On the first successful run, it will create a ShapedDevices.csv file. +You can modify the network.json file manually to reflect Site/AP bandwidth limits. +ShapedDevices.csv will be overwritten every time the Powercode integration is run. +You have the option to run integrationPowercode.py automatically on boot and every 10 minutes, which is recommended. This can be enabled by setting ```automaticImportPowercode = True``` in ispConfig.py + +## Sonar Integration + +First, set the relevant parameters for Sonar (sonar_api_key, sonar_api_url, etc.) in ispConfig.py. + +To test the Sonar Integration, use + +```shell +python3 integrationSonar.py +``` + +On the first successful run, it will create a ShapedDevices.csv file. +If a network.json file exists, it will not be overwritten. +You can modify the network.json file to more accurately reflect bandwidth limits. +ShapedDevices.csv will be overwritten every time the Sonar integration is run. +You have the option to run integrationSonar.py automatically on boot and every 10 minutes, which is recommended. This can be enabled by setting ```automaticImportSonar = True``` in ispConfig.py ## Splynx Integration @@ -31,4 +62,4 @@ python3 integrationSplynx.py On the first successful run, it will create a ShapedDevices.csv file. You can manually create your network.json file to more accurately reflect bandwidth limits. ShapedDevices.csv will be overwritten every time the Splynx integration is run. -You have the option to run integrationSplynx.py automatically on boot and every 30 minutes, which is recommended. This can be enabled by setting ```automaticImportSplynx = True``` in ispConfig.py +You have the option to run integrationSplynx.py automatically on boot and every 10 minutes, which is recommended. This can be enabled by setting ```automaticImportSplynx = True``` in ispConfig.py From 43fe3c2edf55047838910196000224b3b477b623 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Robert=20Chac=C3=B3n?= Date: Sun, 7 Jan 2024 23:48:49 -0700 Subject: [PATCH 6/7] Update mikrotikFindIPv6.py --- src/mikrotikFindIPv6.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/src/mikrotikFindIPv6.py b/src/mikrotikFindIPv6.py index 987d7661..79cda9f6 100644 --- a/src/mikrotikFindIPv6.py +++ b/src/mikrotikFindIPv6.py @@ -35,10 +35,15 @@ def pullMikrotikIPv6(): list_binding6 = api.get_resource('/ipv6/dhcp-server/binding') entries = list_binding6.get() for entry in entries: - try: - clientAddressToIPv6[entry['client-address']] = entry['address'] - except: - pass + if len(entry['duid']) == 14: + mac = entry['duid'][2:14].upper() + macNew = mac[0:2] + ':' + mac[2:4] + ':' + mac[4:6] + ':' + mac[6:8] + ':' + mac[8:10] + ':' + mac[10:12] + macToIPv6[macNew] = entry['address'] + else: + try: + clientAddressToIPv6[entry['client-address']] = entry['address'] + except: + pass list_neighbor6 = api.get_resource('/ipv6/neighbor') entries = list_neighbor6.get() for entry in entries: From 8d663aa0094c90e050e5ffbc5db5114b969fe7cf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Robert=20Chac=C3=B3n?= Date: Mon, 8 Jan 2024 00:00:24 -0700 Subject: [PATCH 7/7] Update mikrotikFindIPv6.py --- src/mikrotikFindIPv6.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/src/mikrotikFindIPv6.py b/src/mikrotikFindIPv6.py index 79cda9f6..557496eb 100644 --- a/src/mikrotikFindIPv6.py +++ b/src/mikrotikFindIPv6.py @@ -27,6 +27,13 @@ def pullMikrotikIPv6(): # pass list_arp4 = api.get_resource('/ip/arp') entries = list_arp4.get() + for entry in entries: + try: + macToIPv4[entry['mac-address']] = entry['address'] + except: + pass + list_dhcp4 = api.get_resource('/ip/dhcp-server/lease') + entries = list_dhcp4.get() for entry in entries: try: macToIPv4[entry['mac-address']] = entry['address']