First try at a sparkline based ASN explorer!

This commit is contained in:
Herbert Wolverson 2024-07-26 14:09:50 -05:00
parent 48a6ca5704
commit b250196d07
11 changed files with 360 additions and 4 deletions

View File

@ -1,6 +1,6 @@
#!/bin/bash
set -e
scripts=( index.js template.js login.js first-run.js shaped-devices.js tree.js help.js unknown-ips.js configuration.js circuit.js flow_map.js all_tree_sankey.js )
scripts=( index.js template.js login.js first-run.js shaped-devices.js tree.js help.js unknown-ips.js configuration.js circuit.js flow_map.js all_tree_sankey.js asn_explorer.js )
for script in "${scripts[@]}"
do
echo "Building {$script}"

View File

@ -0,0 +1,226 @@
import {clearDiv} from "./helpers/builders";
import {scaleNanos, scaleNumber} from "./helpers/scaling";
let asnList = [];
let asnData = [];
let graphMinTime = Number.MAX_SAFE_INTEGER;
let graphMaxTime = Number.MIN_SAFE_INTEGER;
function unixTimeToDate(unixTime) {
return new Date(unixTime * 1000);
}
function asnDropdown() {
$.get("local-api/asnList", (data) => {
asnList = data;
// Sort data by row.count, descending
data.sort((a, b) => {
return b.count - a.count;
});
// Build the dropdown
let parentDiv = document.createElement("div");
parentDiv.classList.add("dropdown");
let button = document.createElement("button");
button.classList.add("btn", "btn-secondary", "dropdown-toggle");
button.type = "button";
button.innerHTML = "Select ASN";
button.setAttribute("data-bs-toggle", "dropdown");
button.setAttribute("aria-expanded", "false");
parentDiv.appendChild(button);
let dropdownList = document.createElement("ul");
dropdownList.classList.add("dropdown-menu");
// Add items
data.forEach((row) => {
let li = document.createElement("li");
li.innerHTML = row.name + " (" + row.count + ")";
li.classList.add("dropdown-item");
li.onclick = () => {
selectAsn(row.asn);
};
dropdownList.appendChild(li);
});
parentDiv.appendChild(dropdownList);
let target = document.getElementById("asnList");
target.appendChild(parentDiv);
if (data.length > 0) {
selectAsn(data[0].asn);
}
});
}
function selectAsn(asn) {
let targetAsn = asnList.find((row) => row.asn === asn);
if (targetAsn === undefined || targetAsn === null) {
console.error("Could not find ASN: " + asn);
return;
}
let target = document.getElementById("asnDetails");
// Build the heading
let heading = document.createElement("h2");
heading.innerText = "ASN #" + asn.toFixed(0) + " (" + targetAsn.name + ")";
// Get the flow data
$.get("local-api/flowTimeline/" + asn, (data) => {
asnData = data;
// Sort data by row.start, ascending
data.sort((a, b) => {
return a.start - b.start;
});
// Build the flows display
let flowsDiv = document.createElement("div");
let count = 0;
let minTime = Number.MAX_SAFE_INTEGER;
let maxTime = Number.MIN_SAFE_INTEGER;
data.forEach((row) => {
// Update min/max time
if (row.start < minTime) {
minTime = row.start;
}
if (row.end > maxTime) {
maxTime = row.end;
}
let div = document.createElement("div");
div.classList.add("row");
// Build the heading
let headingCol = document.createElement("div");
headingCol.classList.add("col-1");
let ht = "<p class='text-secondary small'>" + scaleNumber(row.total_bytes.down, 0) + " / " + scaleNumber(row.total_bytes.up);
if (row.rtt[0] !== undefined) {
ht += "<br /> RTT: " + scaleNanos(row.rtt[0].nanoseconds, 0);
} else {
ht += "<br /> RTT: -";
}
if (row.rtt[1] !== undefined) {
ht += " / " + scaleNanos(row.rtt[1].nanoseconds, 0);
}
ht += "</p>";
headingCol.innerHTML = ht;
div.appendChild(headingCol);
// Build a canvas div, we'll decorate this later
let canvasCol = document.createElement("div");
canvasCol.classList.add("col-11");
let canvas = document.createElement("canvas");
canvas.id = "flowCanvas" + count;
canvas.style.width = "100%";
canvas.style.height = "50px";
canvasCol.appendChild(canvas);
div.appendChild(canvasCol);
flowsDiv.appendChild(div);
count++;
});
// Store the global time range
graphMinTime = minTime;
graphMaxTime = maxTime;
// Apply the data to the page
clearDiv(target);
target.appendChild(heading);
target.appendChild(flowsDiv);
// Wait for the page to render before drawing the graphs
requestAnimationFrame(() => {
setTimeout(() => {
drawTimeline();
});
});
});
}
function timeToX(time, width) {
let range = graphMaxTime - graphMinTime;
let offset = time - graphMinTime;
return (offset / range) * width;
}
function drawTimeline() {
var style = getComputedStyle(document.body)
let regionBg = style.getPropertyValue('--bs-tertiary-bg');
let lineColor = style.getPropertyValue('--bs-primary');
for (let i=0; i<asnData.length; i++) {
let row = asnData[i];
let canvasId = "flowCanvas" + i;
// Get the canvas context
let canvas = document.getElementById(canvasId);
const { width, height } = canvas.getBoundingClientRect();
canvas.width = width;
canvas.height = height;
let ctx = canvas.getContext("2d");
// Draw the background for the time period
ctx.fillStyle = regionBg;
ctx.fillRect(timeToX(row.start, width), 0, timeToX(row.end, width), height);
// Draw red lines for TCP retransmits
ctx.strokeStyle = "red";
row.retransmit_times_down.forEach((time) => {
// Start at y/2, end at y
ctx.beginPath();
ctx.moveTo(timeToX(time, width), height / 2);
ctx.lineTo(timeToX(time, width), height);
ctx.stroke();
});
row.retransmit_times_up.forEach((time) => {
// Start at 0, end at y/2
ctx.beginPath();
ctx.moveTo(timeToX(time, width), 0);
ctx.lineTo(timeToX(time, width), height / 2);
ctx.stroke();
});
// Find the max of row.throughput.down and row.throughput.up
let maxThroughputDown = 0;
let maxThroughputUp = 0;
row.throughput.forEach((value) => {
if (value.down > maxThroughputDown) {
maxThroughputDown = value.down;
}
if (value.up > maxThroughputUp) {
maxThroughputUp = value.up;
}
});
// Draw a throughput down line. Y from y/2 to height, scaled to maxThroughputDown
ctx.strokeStyle = lineColor;
ctx.beginPath();
let duration = row.end - row.start;
let numberOfSamples = row.throughput.length;
let startX = timeToX(row.start, width);
let endX = timeToX(row.end, width);
let sampleWidth = (endX - startX) / numberOfSamples;
let x = timeToX(row.start, width);
row.throughput.forEach((value, index) => {
let downPercent = value.down / maxThroughputDown;
let downHeight = downPercent * (height / 2);
let y = height - downHeight;
ctx.moveTo(x, y);
let upPercent = value.up / maxThroughputUp;
let upHeight = upPercent * (height / 2);
ctx.lineTo(x, upHeight);
x += sampleWidth;
});
ctx.stroke();
}
}
asnDropdown();

View File

@ -13,6 +13,7 @@ mod circuit;
mod packet_analysis;
mod flow_map;
mod warnings;
mod flow_explorer;
use axum::Router;
use axum::routing::{get, post};
@ -48,5 +49,7 @@ pub fn local_api() -> Router {
.route("/pcapDump/:id", get(packet_analysis::pcap_dump))
.route("/flowMap", get(flow_map::flow_lat_lon))
.route("/globalWarnings", get(warnings::get_global_warnings))
.route("/asnList", get(flow_explorer::asn_list))
.route("/flowTimeline/:asn_id", get(flow_explorer::flow_timeline))
.route_layer(axum::middleware::from_fn(auth_layer))
}

View File

@ -0,0 +1,58 @@
use std::time::Duration;
use axum::extract::Path;
use axum::Json;
use serde::Serialize;
use lqos_utils::units::DownUpOrder;
use lqos_utils::unix_time::{time_since_boot, unix_now};
use crate::throughput_tracker::flow_data::{AsnListEntry, RECENT_FLOWS, RttData};
pub async fn asn_list() -> Json<Vec<AsnListEntry>> {
Json(RECENT_FLOWS.asn_list())
}
#[derive(Serialize)]
pub struct FlowTimeline {
start: u64,
end: u64,
duration_nanos: u64,
throughput: Vec<DownUpOrder<u64>>,
tcp_retransmits: DownUpOrder<u16>,
rtt: [RttData; 2],
retransmit_times_down: Vec<u64>,
retransmit_times_up: Vec<u64>,
total_bytes: DownUpOrder<u64>,
}
pub async fn flow_timeline(Path(asn_id): Path<u32>) -> Json<Vec<FlowTimeline>> {
let time_since_boot = time_since_boot().unwrap();
let since_boot = Duration::from(time_since_boot);
let boot_time = unix_now().unwrap() - since_boot.as_secs();
let all_flows_for_asn = RECENT_FLOWS.all_flows_for_asn(asn_id);
let flows = all_flows_for_asn
.iter()
.map(|flow| {
FlowTimeline {
start: boot_time + Duration::from_nanos(flow.1.start_time).as_secs(),
end: boot_time + Duration::from_nanos(flow.1.last_seen).as_secs(),
duration_nanos: flow.1.last_seen - flow.1.start_time,
tcp_retransmits: flow.1.tcp_retransmits.clone(),
throughput: flow.1.throughput_buffer.clone(),
rtt: flow.1.rtt.clone(),
retransmit_times_down: flow.1.retry_times_down
.iter()
.map(|t| boot_time + *t)
.collect(),
retransmit_times_up: flow.1.retry_times_up
.iter()
.map(|t| boot_time + *t)
.collect(),
total_bytes: flow.1.bytes_sent.clone(),
}
})
.collect::<Vec<_>>();
Json(flows)
}

View File

@ -0,0 +1,11 @@
<div class="row">
<div class="col-12">
<span id="asnList"></span>
</div>
</div>
<div class="row">
<div class="col-12" id="asnDetails">
</div>
</div>
<script src="asn_explorer.js"></script>

View File

@ -77,6 +77,12 @@
<i class="fa fa-fw fa-centerline fa-server nav-icon"></i> Tree Overview
</a>
</li>
<!-- ASN Explorer -->
<li class="nav-item">
<a class="nav-link" href="asn_explorer.html">
<i class="fa fa-fw fa-centerline fa-globe nav-icon"></i> ASN Explorer
</a>
</li>
<!-- Statistics -->
<li class="nav-item">
<a class="nav-link" id="lnkStats">

View File

@ -32,6 +32,7 @@ pub(super) fn static_routes() -> Result<Router> {
"index.html", "shaped_devices.html", "tree.html",
"help.html", "unknown_ips.html", "configuration.html",
"circuit.html", "flow_map.html", "all_tree_sankey.html",
"asn_explorer.html",
];
// Iterate through pages and construct the router

View File

@ -168,6 +168,15 @@ impl GeoTable {
(0.0, 0.0)
}
pub fn find_name_by_id(&self, id: u32) -> String {
for (_, entry) in self.asn_trie.iter() {
if entry.asn == id {
return entry.organization.clone();
}
}
"Unknown".to_string()
}
}
#[derive(Default)]

View File

@ -1,4 +1,4 @@
use super::{get_asn_lat_lon, get_asn_name_and_country, FlowAnalysis};
use super::{get_asn_lat_lon, get_asn_name_and_country, FlowAnalysis, get_asn_name_by_id};
use crate::throughput_tracker::flow_data::{FlowbeeLocalData, FlowbeeRecipient};
use fxhash::FxHashMap;
use lqos_bus::BusResponse;
@ -14,6 +14,7 @@ pub struct TimeBuffer {
buffer: Mutex<Vec<TimeEntry>>,
}
#[derive(Clone, Debug)]
struct TimeEntry {
time: u64,
data: (FlowbeeKey, FlowbeeLocalData, FlowAnalysis),
@ -25,6 +26,13 @@ pub struct FlowDurationSummary {
duration: u64,
}
#[derive(Debug, Serialize)]
pub struct AsnListEntry {
count: usize,
asn: u32,
name: String,
}
impl TimeBuffer {
fn new() -> Self {
Self {
@ -261,6 +269,31 @@ impl TimeBuffer {
.map(|(count, duration)| FlowDurationSummary { count, duration })
.collect()
}
pub fn all_flows_for_asn(&self, id: u32) -> Vec<(FlowbeeKey, FlowbeeLocalData, FlowAnalysis)> {
let buffer = self.buffer.lock().unwrap();
buffer
.iter()
.filter(|flow| flow.data.2.asn_id.0 == id )
.map(|flow| flow.data.clone())
.collect()
}
/// Builds a list of all ASNs with recent data, and how many flows they have.
pub fn asn_list(&self) -> Vec<AsnListEntry> {
let buffer = self.buffer.lock().unwrap();
buffer
.iter()
.map(|flow| flow.data.2.asn_id.0)
.sorted()
.dedup_with_count()
.map(|(count, asn)| AsnListEntry {
count,
asn,
name: get_asn_name_by_id(asn),
})
.collect()
}
}
pub static RECENT_FLOWS: Lazy<TimeBuffer> = Lazy::new(|| TimeBuffer::new());

View File

@ -14,6 +14,7 @@ mod kernel_ringbuffer;
pub use kernel_ringbuffer::*;
mod rtt_types;
pub use rtt_types::RttData;
pub use finished_flows::AsnListEntry;
use crate::throughput_tracker::flow_data::flow_analysis::asn::AsnNameCountryFlag;
static ANALYSIS: Lazy<FlowAnalysisSystem> = Lazy::new(|| FlowAnalysisSystem::new());
@ -97,4 +98,13 @@ pub fn get_asn_lat_lon(ip: IpAddr) -> (f64, f64) {
}
}
(0.0, 0.0)
}
pub fn get_asn_name_by_id(id: u32) -> String {
if let Ok(table_lock) = ANALYSIS.asn_table.lock() {
if let Some(table) = table_lock.as_ref() {
return table.find_name_by_id(id);
}
}
"Unknown".to_string()
}

View File

@ -15,10 +15,9 @@ use std::sync::{
};
pub(crate) use flow_analysis::{setup_flow_analysis, get_asn_name_and_country,
FlowAnalysis, RECENT_FLOWS, flowbee_handle_events, get_flowbee_event_count_and_reset,
expire_rtt_flows, flowbee_rtt_map, RttData, get_rtt_events_per_second,
expire_rtt_flows, flowbee_rtt_map, RttData, get_rtt_events_per_second, AsnListEntry
};
trait FlowbeeRecipient {
fn enqueue(&self, key: FlowbeeKey, data: FlowbeeLocalData, analysis: FlowAnalysis);
}