diff --git a/backend/routes/asnLookup.js b/backend/routes/asnLookup.js
index 3a671d2..17f3dfe 100644
--- a/backend/routes/asnLookup.js
+++ b/backend/routes/asnLookup.js
@@ -1,28 +1,52 @@
// backend/routes/asnLookup.js
const express = require('express');
const https = require('https');
+const fs = require('fs');
+const path = require('path');
const pino = require('pino');
const Sentry = require('@sentry/node');
const logger = pino({ level: process.env.LOG_LEVEL || 'info' });
const router = express.Router();
-// ─── In-Memory Cache (24h TTL) ───────────────────────────────────────────────
+// ─── Filesystem Cache (24h TTL) ───────────────────────────────────────────────
const CACHE_TTL_MS = 24 * 60 * 60 * 1000; // 24 hours
-const cache = new Map(); // key → { data, expiresAt }
+const CACHE_DIR = process.env.ASN_CACHE_DIR || path.join(__dirname, '..', 'data', 'asn-cache');
+
+// Ensure cache directory exists
+try {
+ fs.mkdirSync(CACHE_DIR, { recursive: true });
+} catch (e) {
+ logger.warn({ error: e.message }, 'Could not create ASN cache directory');
+}
+
+function cacheFilePath(key) {
+ // Sanitize key to safe filename
+ return path.join(CACHE_DIR, key.replace(/[^a-zA-Z0-9_:-]/g, '_') + '.json');
+}
function getCached(key) {
- const entry = cache.get(key);
- if (!entry) return null;
- if (Date.now() > entry.expiresAt) {
- cache.delete(key);
- return null;
+ try {
+ const file = cacheFilePath(key);
+ const raw = fs.readFileSync(file, 'utf8');
+ const entry = JSON.parse(raw);
+ if (Date.now() > entry.expiresAt) {
+ fs.unlinkSync(file);
+ return null;
+ }
+ return entry.data;
+ } catch {
+ return null; // File doesn't exist or parse failed
}
- return entry.data;
}
function setCache(key, data) {
- cache.set(key, { data, expiresAt: Date.now() + CACHE_TTL_MS });
+ try {
+ const entry = { data, expiresAt: Date.now() + CACHE_TTL_MS };
+ fs.writeFileSync(cacheFilePath(key), JSON.stringify(entry), 'utf8');
+ } catch (e) {
+ logger.warn({ key, error: e.message }, 'ASN cache write failed');
+ }
}
// ─── HTTP Helper ──────────────────────────────────────────────────────────────
@@ -41,22 +65,18 @@ function fetchJson(url) {
if (res.statusCode < 200 || res.statusCode >= 300) {
return reject(new Error(`HTTP ${res.statusCode} from ${url}`));
}
- try {
- resolve(JSON.parse(raw));
- } catch (e) {
- reject(new Error(`JSON parse error from ${url}: ${e.message}`));
- }
+ try { resolve(JSON.parse(raw)); }
+ catch (e) { reject(new Error(`JSON parse error: ${e.message}`)); }
});
});
req.on('error', reject);
- req.on('timeout', () => { req.destroy(); reject(new Error(`Timeout fetching ${url}`)); });
+ req.on('timeout', () => { req.destroy(); reject(new Error(`Timeout: ${url}`)); });
});
}
// ─── ASN Validation ───────────────────────────────────────────────────────────
function parseAsn(raw) {
if (!raw || typeof raw !== 'string') return null;
- // Accept "15169", "AS15169", "as15169"
const cleaned = raw.trim().toUpperCase().replace(/^AS/, '');
const n = parseInt(cleaned, 10);
if (isNaN(n) || n < 1 || n > 4294967295 || String(n) !== cleaned) return null;
@@ -65,12 +85,11 @@ function parseAsn(raw) {
// ─── RIPE Stat Fetchers ───────────────────────────────────────────────────────
async function fetchOverview(asn) {
- const cacheKey = `overview:${asn}`;
- const cached = getCached(cacheKey);
+ const key = `overview:${asn}`;
+ const cached = getCached(key);
if (cached) return cached;
- const url = `https://stat.ripe.net/data/as-overview/data.json?resource=AS${asn}`;
- const json = await fetchJson(url);
+ const json = await fetchJson(`https://stat.ripe.net/data/as-overview/data.json?resource=AS${asn}`);
const d = json?.data;
const result = {
asn,
@@ -79,50 +98,47 @@ async function fetchOverview(asn) {
type: d?.type || null,
block: d?.block || null,
};
- setCache(cacheKey, result);
+ setCache(key, result);
return result;
}
async function fetchNeighbours(asn) {
- const cacheKey = `neighbours:${asn}`;
- const cached = getCached(cacheKey);
+ const key = `neighbours:${asn}`;
+ const cached = getCached(key);
if (cached) return cached;
- const url = `https://stat.ripe.net/data/asn-neighbours/data.json?resource=AS${asn}`;
- const json = await fetchJson(url);
+ const json = await fetchJson(`https://stat.ripe.net/data/asn-neighbours/data.json?resource=AS${asn}`);
const neighbours = (json?.data?.neighbours || []).map(n => ({
asn: n.asn,
- type: n.type, // 'left' = upstream, 'right' = downstream
+ type: n.type, // 'left' = upstream, 'right' = downstream
power: n.power || 0,
v4_peers: n.v4_peers || 0,
v6_peers: n.v6_peers || 0,
}));
- setCache(cacheKey, neighbours);
+ setCache(key, neighbours);
return neighbours;
}
async function fetchPrefixes(asn) {
- const cacheKey = `prefixes:${asn}`;
- const cached = getCached(cacheKey);
+ const key = `prefixes:${asn}`;
+ const cached = getCached(key);
if (cached) return cached;
- const url = `https://stat.ripe.net/data/announced-prefixes/data.json?resource=AS${asn}`;
- const json = await fetchJson(url);
+ const json = await fetchJson(`https://stat.ripe.net/data/announced-prefixes/data.json?resource=AS${asn}`);
const prefixes = (json?.data?.prefixes || []).map(p => p.prefix);
- setCache(cacheKey, prefixes);
+ setCache(key, prefixes);
return prefixes;
}
async function fetchPeeringDb(asn) {
- const cacheKey = `peeringdb:${asn}`;
- const cached = getCached(cacheKey);
- if (cached) return cached;
+ const key = `peeringdb:${asn}`;
+ const cached = getCached(key);
+ if (cached !== null) return cached;
try {
- const url = `https://www.peeringdb.com/api/net?asn=${asn}&depth=2`;
- const json = await fetchJson(url);
+ const json = await fetchJson(`https://www.peeringdb.com/api/net?asn=${asn}&depth=2`);
const net = json?.data?.[0];
- if (!net) { setCache(cacheKey, null); return null; }
+ if (!net) { setCache(key, null); return null; }
const result = {
peeringPolicy: net.policy_general || null,
@@ -133,9 +149,9 @@ async function fetchPeeringDb(asn) {
speed: ix.speed,
ipv4: ix.ipaddr4 || null,
ipv6: ix.ipaddr6 || null,
- })).slice(0, 20), // max 20 IXPs
+ })).slice(0, 20),
};
- setCache(cacheKey, result);
+ setCache(key, result);
return result;
} catch (e) {
logger.warn({ asn, error: e.message }, 'PeeringDB fetch failed');
@@ -143,6 +159,16 @@ async function fetchPeeringDb(asn) {
}
}
+// ─── Resolve names for a list of ASNs ────────────────────────────────────────
+async function resolveNames(asnList) {
+ const results = await Promise.allSettled(asnList.map(a => fetchOverview(a)));
+ const map = {};
+ results.forEach((r, i) => {
+ map[asnList[i]] = r.status === 'fulfilled' ? (r.value.name || null) : null;
+ });
+ return map;
+}
+
// ─── Route ────────────────────────────────────────────────────────────────────
router.get('/', async (req, res, next) => {
const rawAsn = req.query.asn;
@@ -150,13 +176,16 @@ router.get('/', async (req, res, next) => {
const asn = parseAsn(String(rawAsn || ''));
if (!asn) {
- return res.status(400).json({ success: false, error: 'Invalid ASN. Please provide a number between 1 and 4294967295, e.g. ?asn=15169' });
+ return res.status(400).json({
+ success: false,
+ error: 'Invalid ASN. Please provide a number between 1 and 4294967295, e.g. ?asn=15169'
+ });
}
logger.info({ requestIp, asn }, 'ASN lookup request');
try {
- // Level 1 + Level 2: overview + direct neighbours + prefixes + PeeringDB (parallel)
+ // Level 1 + Level 2: fetch all base data in parallel
const [overview, neighbours, prefixes, peeringdb] = await Promise.all([
fetchOverview(asn),
fetchNeighbours(asn),
@@ -164,67 +193,49 @@ router.get('/', async (req, res, next) => {
fetchPeeringDb(asn),
]);
- // Split neighbours into upstream (left) and downstream (right)
- const upstreams = neighbours
- .filter(n => n.type === 'left')
- .sort((a, b) => b.power - a.power)
- .slice(0, 10); // Top 10 upstreams for Level 2
+ // Split neighbours
+ const upstreams = neighbours.filter(n => n.type === 'left').sort((a, b) => b.power - a.power).slice(0, 10);
+ const downstreams = neighbours.filter(n => n.type === 'right').sort((a, b) => b.power - a.power).slice(0, 10);
- const downstreams = neighbours
- .filter(n => n.type === 'right')
- .sort((a, b) => b.power - a.power)
- .slice(0, 10); // Top 10 downstreams for Level 2
+ // Resolve names for ALL Level 2 nodes (both upstreams and downstreams)
+ const level2Asns = [...new Set([...upstreams, ...downstreams].map(n => n.asn))];
+ const level2Names = await resolveNames(level2Asns);
- // Level 3: fetch upstreams of upstreams (top 5 of Level 2 upstreams only)
+ // Level 3: fetch upstreams-of-upstreams for top 5 Level 2 upstreams
const level3Raw = await Promise.allSettled(
upstreams.slice(0, 5).map(async (upstreamNode) => {
const theirNeighbours = await fetchNeighbours(upstreamNode.asn);
- const overviewResult = await fetchOverview(upstreamNode.asn);
- // Their upstreams (left) = Level 3
const theirUpstreams = theirNeighbours
.filter(n => n.type === 'left')
.sort((a, b) => b.power - a.power)
- .slice(0, 3); // Top 3 per Level-2 upstream
- return {
- parentAsn: upstreamNode.asn,
- parentName: overviewResult.name,
- theirUpstreams,
- };
+ .slice(0, 3);
+ return { parentAsn: upstreamNode.asn, theirUpstreams };
})
);
- // Collect Level 3 nodes, resolve names for them
const level3Data = level3Raw
.filter(r => r.status === 'fulfilled')
.map(r => r.value);
- // Flatten all unique Level 3 ASNs and fetch their names
- const level3Asns = [...new Set(
- level3Data.flatMap(d => d.theirUpstreams.map(n => n.asn))
- )];
- const level3Names = await Promise.allSettled(
- level3Asns.map(a => fetchOverview(a))
- );
- const asnNameMap = {};
- level3Names.forEach((r, i) => {
- if (r.status === 'fulfilled') asnNameMap[level3Asns[i]] = r.value.name;
- });
- // Also include Level 2 names
- [...upstreams, ...downstreams].forEach(n => {
- if (!asnNameMap[n.asn]) asnNameMap[n.asn] = null;
- });
+ // Resolve names for Level 3 nodes
+ const level3Asns = [...new Set(level3Data.flatMap(d => d.theirUpstreams.map(n => n.asn)))];
+ const level3Names = await resolveNames(level3Asns);
- // Build graph structure for frontend
+ // ── Build graph ───────────────────────────────────────────────────────
const graph = {
center: { asn, name: overview.name },
level2: {
- upstreams: upstreams.map(n => ({ asn: n.asn, name: asnNameMap[n.asn] || null, power: n.power, v4: n.v4_peers, v6: n.v6_peers })),
- downstreams: downstreams.map(n => ({ asn: n.asn, name: asnNameMap[n.asn] || null, power: n.power, v4: n.v4_peers, v6: n.v6_peers })),
+ upstreams: upstreams.map(n => ({ asn: n.asn, name: level2Names[n.asn] || null, power: n.power, v4: n.v4_peers, v6: n.v6_peers })),
+ downstreams: downstreams.map(n => ({ asn: n.asn, name: level2Names[n.asn] || null, power: n.power, v4: n.v4_peers, v6: n.v6_peers })),
},
level3: level3Data.map(d => ({
parentAsn: d.parentAsn,
- parentName: d.parentName,
- upstreams: d.theirUpstreams.map(n => ({ asn: n.asn, name: asnNameMap[n.asn] || null, power: n.power })),
+ parentName: level2Names[d.parentAsn] || null,
+ upstreams: d.theirUpstreams.map(n => ({
+ asn: n.asn,
+ name: level3Names[n.asn] || null,
+ power: n.power,
+ })),
})),
};
@@ -234,7 +245,7 @@ router.get('/', async (req, res, next) => {
name: overview.name,
announced: overview.announced,
type: overview.type,
- prefixes: prefixes.slice(0, 100), // max 100 prefixes
+ prefixes: prefixes.slice(0, 100),
peeringdb,
graph,
});
diff --git a/compose.yml b/compose.yml
index 10b9f4a..28224d8 100644
--- a/compose.yml
+++ b/compose.yml
@@ -1,46 +1,45 @@
services:
# Backend Service (Node.js App)
backend:
- # Verwendet ein bereits gebautes Image
image: mrunknownde/utools-backend
container_name: utools_backend
restart: unless-stopped
environment:
- # Setze Umgebungsvariablen für das Backend
- NODE_ENV: production # Wichtig für Performance und Logging
- PORT: 3000 # Port innerhalb des Containers
- LOG_LEVEL: info # Oder 'warn' für weniger Logs in Produktion
+ NODE_ENV: production
+ PORT: 3000
+ LOG_LEVEL: info
PING_COUNT: 4
- # Die DB-Pfade werden aus dem Backend-Dockerfile ENV genommen,
- # könnten hier aber überschrieben werden, falls nötig.
- # GEOIP_CITY_DB: ./data/GeoLite2-City.mmdb
- # GEOIP_ASN_DB: ./data/GeoLite2-ASN.mmdb
- # Sentry DSN aus der Umgebung/ .env Datei übernehmen
- SENTRY_DSN: "https://7ea70caba68f548fb96482a573006a7b@o447623.ingest.us.sentry.io/4509062020333568" # Wichtig für die Laufzeit
+ SENTRY_DSN: "https://7ea70caba68f548fb96482a573006a7b@o447623.ingest.us.sentry.io/4509062020333568"
+ # ASN Cache directory (filesystem persistence across restarts)
+ ASN_CACHE_DIR: /app/asn-cache
+ volumes:
+ # Persistent ASN lookup cache — survives container restarts
+ - asn_cache:/app/asn-cache
dns:
- - 1.1.1.1 # Cloudflare DNS
- - 1.0.0.1 # Cloudflare DNS
- - 8.8.8.8 # Google DNS
- - 8.8.4.4 # Google DNS
+ - 1.1.1.1
+ - 1.0.0.1
+ - 8.8.8.8
+ - 8.8.4.4
networks:
- - utools_network # Verbinde mit unserem benutzerdefinierten Netzwerk
+ - utools_network
# Frontend Service (Nginx)
frontend:
- # Verwendet ein bereits gebautes Image
image: mrunknownde/utools-frontend
container_name: utools_frontend
restart: unless-stopped
ports:
- # Mappe Port 8080 vom Host auf Port 80 im Container (wo Nginx lauscht)
- # Zugriff von außen (Browser) erfolgt über localhost:8080
- "8080:80"
depends_on:
- - backend # Stellt sicher, dass Backend gestartet wird (aber nicht unbedingt bereit ist)
+ - backend
networks:
- - utools_network # Verbinde mit unserem benutzerdefinierten Netzwerk
+ - utools_network
-# Definiere ein benutzerdefiniertes Netzwerk (gute Praxis)
networks:
utools_network:
- driver: bridge
\ No newline at end of file
+ driver: bridge
+
+# Named volume — ASN cache persists across container restarts
+volumes:
+ asn_cache:
+ driver: local
diff --git a/frontend/app/dns-lookup.html b/frontend/app/dns-lookup.html
index c3e3bfe..04901a7 100644
--- a/frontend/app/dns-lookup.html
+++ b/frontend/app/dns-lookup.html
@@ -181,11 +181,12 @@
Suite
diff --git a/frontend/app/mac-lookup.html b/frontend/app/mac-lookup.html
index d73d6ca..91154d3 100644
--- a/frontend/app/mac-lookup.html
+++ b/frontend/app/mac-lookup.html
@@ -182,11 +182,12 @@
Suite
diff --git a/frontend/app/script.js b/frontend/app/script.js
index c3430b8..133e037 100644
--- a/frontend/app/script.js
+++ b/frontend/app/script.js
@@ -287,13 +287,16 @@ document.addEventListener('DOMContentLoaded', () => {
updateField(coordsEl, data.geo?.latitude ? `${data.geo.latitude}, ${data.geo.longitude}` : null);
updateField(timezoneEl, data.geo?.timezone, geoLoader); // Hide loader on last geo field
- updateField(asnNumberEl, data.asn?.number
- ? `AS${data.asn.number}`
- : null, null, asnErrorEl);
- // Make ASN a clickable link to ASN Lookup
- if (data.asn?.number && asnNumberEl) {
+ // ASN — render as clickable link if has a number (not an error object)
+ const asnNum = (data.asn && !data.asn.error) ? data.asn.number : null;
+ if (asnNum && asnNumberEl) {
+ // Reveal the hidden data container manually (updateField won't run the link path via error branch)
+ const asnContainer = asnNumberEl.closest('div:not(.loader)');
+ if (asnContainer) asnContainer.classList.remove('hidden');
asnNumberEl.innerHTML =
- `AS${data.asn.number}`;
+ `AS${asnNum}`;
+ } else {
+ updateField(asnNumberEl, null, null, asnErrorEl, data.asn?.error || '-');
}
updateField(asnOrgEl, data.asn?.organization, asnLoader);
diff --git a/frontend/app/subnet-calculator.html b/frontend/app/subnet-calculator.html
index 71d595a..00f1b55 100644
--- a/frontend/app/subnet-calculator.html
+++ b/frontend/app/subnet-calculator.html
@@ -165,11 +165,12 @@
Suite
diff --git a/frontend/app/whois-lookup.html b/frontend/app/whois-lookup.html
index b38ecb8..17f8175 100644
--- a/frontend/app/whois-lookup.html
+++ b/frontend/app/whois-lookup.html
@@ -181,11 +181,12 @@
Suite