mirror of
https://github.com/LibreQoE/LibreQoS.git
synced 2025-02-25 18:55:32 -06:00
Fix the order of cake marks/drops in the tree. Add dashlet item to display total marks/drops.
This commit is contained in:
parent
53b257f25b
commit
8ca5b24d70
@ -9,14 +9,14 @@ pub static TOTAL_QUEUE_STATS: TotalQueueStats = TotalQueueStats::new();
|
|||||||
|
|
||||||
pub struct TotalQueueStats {
|
pub struct TotalQueueStats {
|
||||||
pub drops: AtomicDownUp,
|
pub drops: AtomicDownUp,
|
||||||
pub mark: AtomicDownUp,
|
pub marks: AtomicDownUp,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TotalQueueStats {
|
impl TotalQueueStats {
|
||||||
pub const fn new() -> Self {
|
pub const fn new() -> Self {
|
||||||
Self {
|
Self {
|
||||||
drops: AtomicDownUp::zeroed(),
|
drops: AtomicDownUp::zeroed(),
|
||||||
mark: AtomicDownUp::zeroed(),
|
marks: AtomicDownUp::zeroed(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -31,7 +31,7 @@ pub struct QueueData {
|
|||||||
|
|
||||||
fn zero_total_queue_stats() {
|
fn zero_total_queue_stats() {
|
||||||
TOTAL_QUEUE_STATS.drops.set_to_zero();
|
TOTAL_QUEUE_STATS.drops.set_to_zero();
|
||||||
TOTAL_QUEUE_STATS.mark.set_to_zero();
|
TOTAL_QUEUE_STATS.marks.set_to_zero();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
@ -133,7 +133,7 @@ impl AllQueueData {
|
|||||||
|
|
||||||
TOTAL_QUEUE_STATS.drops.set_down(drops.down);
|
TOTAL_QUEUE_STATS.drops.set_down(drops.down);
|
||||||
TOTAL_QUEUE_STATS.drops.set_up(drops.up);
|
TOTAL_QUEUE_STATS.drops.set_up(drops.up);
|
||||||
TOTAL_QUEUE_STATS.mark.set_down(marks.down);
|
TOTAL_QUEUE_STATS.marks.set_down(marks.down);
|
||||||
TOTAL_QUEUE_STATS.mark.set_up(marks.up);
|
TOTAL_QUEUE_STATS.marks.set_up(marks.up);
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -171,7 +171,7 @@ fn all_queue_reader() {
|
|||||||
(download, upload)
|
(download, upload)
|
||||||
};
|
};
|
||||||
|
|
||||||
println!("{}", download.len() + upload.len());
|
//println!("{}", download.len() + upload.len());
|
||||||
ALL_QUEUE_SUMMARY.ingest_batch(download, upload);
|
ALL_QUEUE_SUMMARY.ingest_batch(download, upload);
|
||||||
} else {
|
} else {
|
||||||
log::warn!("(TC monitor) Unable to read configuration");
|
log::warn!("(TC monitor) Unable to read configuration");
|
||||||
@ -180,7 +180,7 @@ fn all_queue_reader() {
|
|||||||
log::warn!("(TC monitor) Not reading queues due to structure not yet ready");
|
log::warn!("(TC monitor) Not reading queues due to structure not yet ready");
|
||||||
}
|
}
|
||||||
let elapsed = start.elapsed();
|
let elapsed = start.elapsed();
|
||||||
log::warn!("(TC monitor) Completed in {:.5} seconds", elapsed.as_secs_f32());
|
log::debug!("(TC monitor) Completed in {:.5} seconds", elapsed.as_secs_f32());
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Spawns a thread that periodically reads the queue statistics from
|
/// Spawns a thread that periodically reads the queue statistics from
|
||||||
|
@ -17,6 +17,7 @@ import {RamDash} from "./ram_dash";
|
|||||||
import {TopTreeSummary} from "./top_tree_summary";
|
import {TopTreeSummary} from "./top_tree_summary";
|
||||||
import {CombinedTopDashlet} from "./combined_top_dash";
|
import {CombinedTopDashlet} from "./combined_top_dash";
|
||||||
import {RttHisto3dDash} from "./rtt_histo3d_dash";
|
import {RttHisto3dDash} from "./rtt_histo3d_dash";
|
||||||
|
import {QueueStatsTotalDash} from "./queue_stats_total";
|
||||||
|
|
||||||
export const DashletMenu = [
|
export const DashletMenu = [
|
||||||
{ name: "Throughput Bits/Second", tag: "throughputBps", size: 3 },
|
{ name: "Throughput Bits/Second", tag: "throughputBps", size: 3 },
|
||||||
@ -38,6 +39,7 @@ export const DashletMenu = [
|
|||||||
{ name: "RAM Utilization", tag: "ram", size: 3 },
|
{ name: "RAM Utilization", tag: "ram", size: 3 },
|
||||||
{ name: "Network Tree Summary", tag: "treeSummary", size: 6 },
|
{ name: "Network Tree Summary", tag: "treeSummary", size: 6 },
|
||||||
{ name: "Combined Top 10 Box", tag: "combinedTop10", size: 3 },
|
{ name: "Combined Top 10 Box", tag: "combinedTop10", size: 3 },
|
||||||
|
{ name: "Total Cake Stats", tag: "totalCakeStats", size: 3 },
|
||||||
];
|
];
|
||||||
|
|
||||||
export function widgetFactory(widgetName, count) {
|
export function widgetFactory(widgetName, count) {
|
||||||
@ -62,6 +64,7 @@ export function widgetFactory(widgetName, count) {
|
|||||||
case "ram" : widget = new RamDash(count); break;
|
case "ram" : widget = new RamDash(count); break;
|
||||||
case "treeSummary" : widget = new TopTreeSummary(count); break;
|
case "treeSummary" : widget = new TopTreeSummary(count); break;
|
||||||
case "combinedTop10" : widget = new CombinedTopDashlet(count); break;
|
case "combinedTop10" : widget = new CombinedTopDashlet(count); break;
|
||||||
|
case "totalCakeStats" : widget = new QueueStatsTotalDash(count); break;
|
||||||
default: {
|
default: {
|
||||||
console.log("I don't know how to construct a widget of type [" + widgetName + "]");
|
console.log("I don't know how to construct a widget of type [" + widgetName + "]");
|
||||||
return null;
|
return null;
|
||||||
|
@ -0,0 +1,35 @@
|
|||||||
|
import {BaseDashlet} from "./base_dashlet";
|
||||||
|
import {clearDashDiv, simpleRow, theading} from "../helpers/builders";
|
||||||
|
import {scaleNumber, scaleNanos} from "../helpers/scaling";
|
||||||
|
import {QueueStatsTotalGraph} from "../graphs/queue_stats_total_graph";
|
||||||
|
|
||||||
|
export class QueueStatsTotalDash extends BaseDashlet {
|
||||||
|
constructor(slot) {
|
||||||
|
super(slot);
|
||||||
|
}
|
||||||
|
|
||||||
|
title() {
|
||||||
|
return "Cake Stats (Total)";
|
||||||
|
}
|
||||||
|
|
||||||
|
subscribeTo() {
|
||||||
|
return [ "QueueStatsTotal" ];
|
||||||
|
}
|
||||||
|
|
||||||
|
buildContainer() {
|
||||||
|
let base = super.buildContainer();
|
||||||
|
base.appendChild(this.graphDiv());
|
||||||
|
return base;
|
||||||
|
}
|
||||||
|
|
||||||
|
setup() {
|
||||||
|
super.setup();
|
||||||
|
this.graph = new QueueStatsTotalGraph(this.graphDivId())
|
||||||
|
}
|
||||||
|
|
||||||
|
onMessage(msg) {
|
||||||
|
if (msg.event === "QueueStatsTotal") {
|
||||||
|
this.graph.update(msg.marks, msg.drops);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,136 @@
|
|||||||
|
import {DashboardGraph} from "./dashboard_graph";
|
||||||
|
import {scaleNumber} from "../helpers/scaling";
|
||||||
|
|
||||||
|
const RING_SIZE = 60 * 5; // 5 Minutes
|
||||||
|
|
||||||
|
export class QueueStatsTotalGraph extends DashboardGraph {
|
||||||
|
constructor(id) {
|
||||||
|
super(id);
|
||||||
|
this.ringbuffer = new RingBuffer(RING_SIZE);
|
||||||
|
|
||||||
|
let xaxis = [];
|
||||||
|
for (let i=0; i<RING_SIZE; i++) {
|
||||||
|
xaxis.push(i);
|
||||||
|
}
|
||||||
|
|
||||||
|
this.option = {
|
||||||
|
legend: {
|
||||||
|
orient: "horizontal",
|
||||||
|
right: 10,
|
||||||
|
top: "bottom",
|
||||||
|
selectMode: false,
|
||||||
|
data: [
|
||||||
|
{
|
||||||
|
name: "ECN Marks",
|
||||||
|
icon: 'circle',
|
||||||
|
itemStyle: {
|
||||||
|
color: "green"
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
name: "Cake Drops",
|
||||||
|
icon: 'circle',
|
||||||
|
itemStyle: {
|
||||||
|
color: "orange"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
textStyle: {
|
||||||
|
color: '#aaa'
|
||||||
|
},
|
||||||
|
},
|
||||||
|
xAxis: {
|
||||||
|
type: 'category',
|
||||||
|
data: xaxis,
|
||||||
|
},
|
||||||
|
yAxis: {
|
||||||
|
type: 'value',
|
||||||
|
axisLabel: {
|
||||||
|
formatter: (val) => {
|
||||||
|
return scaleNumber(Math.abs(val), 0);
|
||||||
|
},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
series: [
|
||||||
|
{
|
||||||
|
name: 'ECN Marks',
|
||||||
|
data: [],
|
||||||
|
type: 'line',
|
||||||
|
lineStyle: { color: "green" },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'ECN Marks Up',
|
||||||
|
data: [],
|
||||||
|
type: 'line',
|
||||||
|
lineStyle: { color: "green" },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'Cake Drops',
|
||||||
|
data: [],
|
||||||
|
type: 'line',
|
||||||
|
lineStyle: { color: "orange" },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'Cake Drops Up',
|
||||||
|
data: [],
|
||||||
|
type: 'line',
|
||||||
|
lineStyle: { color: "orange" },
|
||||||
|
},
|
||||||
|
],
|
||||||
|
tooltip: {
|
||||||
|
trigger: 'item',
|
||||||
|
},
|
||||||
|
animation: false,
|
||||||
|
}
|
||||||
|
this.option && this.chart.setOption(this.option);
|
||||||
|
}
|
||||||
|
|
||||||
|
update(marks, drops) {
|
||||||
|
this.chart.hideLoading();
|
||||||
|
this.ringbuffer.push(marks, drops);
|
||||||
|
|
||||||
|
let series = this.ringbuffer.series();
|
||||||
|
for (let i=0; i<this.option.series.length; i++) {
|
||||||
|
this.option.series[i].data = series[i];
|
||||||
|
}
|
||||||
|
|
||||||
|
this.chart.setOption(this.option);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class RingBuffer {
|
||||||
|
constructor(size) {
|
||||||
|
this.size = size;
|
||||||
|
let data = [];
|
||||||
|
for (let i=0; i<size; i++) {
|
||||||
|
data.push({ marks: { down: 0, up: 0 }, drops: { down: 0, up: 0 } });
|
||||||
|
}
|
||||||
|
this.head = 0;
|
||||||
|
this.data = data;
|
||||||
|
}
|
||||||
|
|
||||||
|
push(marks, drops) {
|
||||||
|
this.data[this.head] = {
|
||||||
|
marks: marks,
|
||||||
|
drops: drops,
|
||||||
|
};
|
||||||
|
this.head += 1;
|
||||||
|
this.head %= this.size;
|
||||||
|
}
|
||||||
|
|
||||||
|
series() {
|
||||||
|
let result = [[], [], [], []];
|
||||||
|
for (let i=this.head; i<this.size; i++) {
|
||||||
|
result[0].push(this.data[i].marks.down);
|
||||||
|
result[1].push(0 - this.data[i].marks.up);
|
||||||
|
result[2].push(this.data[i].drops.down);
|
||||||
|
result[3].push(0 - this.data[i].drops.up);
|
||||||
|
}
|
||||||
|
for (let i=0; i<this.head; i++) {
|
||||||
|
result[0].push(this.data[i].marks.down);
|
||||||
|
result[1].push(0 - this.data[i].marks.up);
|
||||||
|
result[2].push(this.data[i].drops.down);
|
||||||
|
result[3].push(0 - this.data[i].drops.up);
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
@ -18,4 +18,5 @@ pub enum PublishedChannels {
|
|||||||
Cpu,
|
Cpu,
|
||||||
Ram,
|
Ram,
|
||||||
TreeSummary,
|
TreeSummary,
|
||||||
|
QueueStatsTotal,
|
||||||
}
|
}
|
||||||
|
@ -8,6 +8,7 @@ mod top_flows;
|
|||||||
mod flow_endpoints;
|
mod flow_endpoints;
|
||||||
pub mod system_info;
|
pub mod system_info;
|
||||||
mod tree_summary;
|
mod tree_summary;
|
||||||
|
mod queue_stats_total;
|
||||||
|
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use crate::node_manager::ws::publish_subscribe::PubSub;
|
use crate::node_manager::ws::publish_subscribe::PubSub;
|
||||||
@ -34,6 +35,7 @@ pub(super) async fn channel_ticker(channels: Arc<PubSub>) {
|
|||||||
system_info::cpu_info(channels.clone()),
|
system_info::cpu_info(channels.clone()),
|
||||||
system_info::ram_info(channels.clone()),
|
system_info::ram_info(channels.clone()),
|
||||||
tree_summary::tree_summary(channels.clone()),
|
tree_summary::tree_summary(channels.clone()),
|
||||||
|
queue_stats_total::queue_stats_totals(channels.clone()),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -0,0 +1,26 @@
|
|||||||
|
use lqos_queue_tracker::TOTAL_QUEUE_STATS;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use serde_json::json;
|
||||||
|
use crate::node_manager::ws::publish_subscribe::PubSub;
|
||||||
|
use crate::node_manager::ws::published_channels::PublishedChannels;
|
||||||
|
|
||||||
|
pub async fn queue_stats_totals(channels: Arc<PubSub>) {
|
||||||
|
if !channels.is_channel_alive(PublishedChannels::QueueStatsTotal).await {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let message = json!(
|
||||||
|
{
|
||||||
|
"event": PublishedChannels::QueueStatsTotal.to_string(),
|
||||||
|
"marks": {
|
||||||
|
"down" : TOTAL_QUEUE_STATS.marks.get_down(),
|
||||||
|
"up" : TOTAL_QUEUE_STATS.marks.get_up(),
|
||||||
|
},
|
||||||
|
"drops" : {
|
||||||
|
"down" : TOTAL_QUEUE_STATS.drops.get_down(),
|
||||||
|
"up" : TOTAL_QUEUE_STATS.drops.get_up(),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
).to_string();
|
||||||
|
channels.send(PublishedChannels::QueueStatsTotal, message).await;
|
||||||
|
}
|
@ -179,7 +179,7 @@ impl ThroughputTracker {
|
|||||||
if let Some(parents) = &entry.network_json_parents {
|
if let Some(parents) = &entry.network_json_parents {
|
||||||
let net_json = NETWORK_JSON.read().unwrap();
|
let net_json = NETWORK_JSON.read().unwrap();
|
||||||
// Send it upstream
|
// Send it upstream
|
||||||
net_json.add_queue_cycle(parents, drops, marks);
|
net_json.add_queue_cycle(parents, marks, drops);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
Loading…
Reference in New Issue
Block a user