mirror of
https://github.com/MrUnknownDE/cloudflare-prometheus-exporter.git
synced 2026-04-27 18:13:44 +02:00
Cloudflare Prometheus Exporter
This commit is contained in:
2460
src/cloudflare/client.ts
Normal file
2460
src/cloudflare/client.ts
Normal file
File diff suppressed because it is too large
Load Diff
21
src/cloudflare/gql/client.ts
Normal file
21
src/cloudflare/gql/client.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
import { initGraphQLTada } from "gql.tada";
|
||||
import type { introspection } from "./graphql-env";
|
||||
|
||||
export const graphql = initGraphQLTada<{
|
||||
introspection: introspection;
|
||||
scalars: {
|
||||
Date: string;
|
||||
DateTime: string;
|
||||
Time: string;
|
||||
bytes: string;
|
||||
float32: number;
|
||||
float64: number;
|
||||
string: string;
|
||||
uint8: number;
|
||||
uint16: number;
|
||||
uint32: number;
|
||||
uint64: number;
|
||||
};
|
||||
}>();
|
||||
|
||||
export type { FragmentOf, ResultOf, VariablesOf } from "gql.tada";
|
||||
1460
src/cloudflare/gql/graphql-env.d.ts
vendored
Normal file
1460
src/cloudflare/gql/graphql-env.d.ts
vendored
Normal file
File diff suppressed because one or more lines are too long
639
src/cloudflare/gql/queries.ts
Normal file
639
src/cloudflare/gql/queries.ts
Normal file
@@ -0,0 +1,639 @@
|
||||
import { graphql } from "./client";
|
||||
|
||||
export const HTTPMetricsQuery = graphql(`
|
||||
query HTTPMetrics(
|
||||
$zoneIDs: [string!]
|
||||
$mintime: Time!
|
||||
$maxtime: Time!
|
||||
$limit: uint64!
|
||||
) {
|
||||
viewer {
|
||||
zones(filter: { zoneTag_in: $zoneIDs }) {
|
||||
zoneTag
|
||||
httpRequests1mGroups(
|
||||
limit: $limit
|
||||
filter: { datetime_geq: $mintime, datetime_lt: $maxtime }
|
||||
) {
|
||||
uniq {
|
||||
uniques
|
||||
}
|
||||
sum {
|
||||
browserMap {
|
||||
pageViews
|
||||
uaBrowserFamily
|
||||
}
|
||||
bytes
|
||||
cachedBytes
|
||||
cachedRequests
|
||||
contentTypeMap {
|
||||
bytes
|
||||
requests
|
||||
edgeResponseContentTypeName
|
||||
}
|
||||
countryMap {
|
||||
bytes
|
||||
clientCountryName
|
||||
requests
|
||||
threats
|
||||
}
|
||||
encryptedBytes
|
||||
encryptedRequests
|
||||
pageViews
|
||||
requests
|
||||
responseStatusMap {
|
||||
edgeResponseStatus
|
||||
requests
|
||||
}
|
||||
threatPathingMap {
|
||||
requests
|
||||
threatPathingName
|
||||
}
|
||||
threats
|
||||
clientHTTPVersionMap {
|
||||
clientHTTPProtocol
|
||||
requests
|
||||
}
|
||||
clientSSLMap {
|
||||
clientSSLProtocol
|
||||
requests
|
||||
}
|
||||
ipClassMap {
|
||||
ipType
|
||||
requests
|
||||
}
|
||||
}
|
||||
dimensions {
|
||||
datetime
|
||||
}
|
||||
}
|
||||
firewallEventsAdaptiveGroups(
|
||||
limit: $limit
|
||||
filter: { datetime_geq: $mintime, datetime_lt: $maxtime }
|
||||
) {
|
||||
count
|
||||
dimensions {
|
||||
action
|
||||
source
|
||||
ruleId
|
||||
clientRequestHTTPHost
|
||||
clientCountryName
|
||||
botScore
|
||||
botScoreSrcName
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
export const HTTPMetricsQueryNoBots = graphql(`
|
||||
query HTTPMetricsNoBots(
|
||||
$zoneIDs: [string!]
|
||||
$mintime: Time!
|
||||
$maxtime: Time!
|
||||
$limit: uint64!
|
||||
) {
|
||||
viewer {
|
||||
zones(filter: { zoneTag_in: $zoneIDs }) {
|
||||
zoneTag
|
||||
httpRequests1mGroups(
|
||||
limit: $limit
|
||||
filter: { datetime_geq: $mintime, datetime_lt: $maxtime }
|
||||
) {
|
||||
uniq {
|
||||
uniques
|
||||
}
|
||||
sum {
|
||||
browserMap {
|
||||
pageViews
|
||||
uaBrowserFamily
|
||||
}
|
||||
bytes
|
||||
cachedBytes
|
||||
cachedRequests
|
||||
contentTypeMap {
|
||||
bytes
|
||||
requests
|
||||
edgeResponseContentTypeName
|
||||
}
|
||||
countryMap {
|
||||
bytes
|
||||
clientCountryName
|
||||
requests
|
||||
threats
|
||||
}
|
||||
encryptedBytes
|
||||
encryptedRequests
|
||||
pageViews
|
||||
requests
|
||||
responseStatusMap {
|
||||
edgeResponseStatus
|
||||
requests
|
||||
}
|
||||
threatPathingMap {
|
||||
requests
|
||||
threatPathingName
|
||||
}
|
||||
threats
|
||||
clientHTTPVersionMap {
|
||||
clientHTTPProtocol
|
||||
requests
|
||||
}
|
||||
clientSSLMap {
|
||||
clientSSLProtocol
|
||||
requests
|
||||
}
|
||||
ipClassMap {
|
||||
ipType
|
||||
requests
|
||||
}
|
||||
}
|
||||
dimensions {
|
||||
datetime
|
||||
}
|
||||
}
|
||||
firewallEventsAdaptiveGroups(
|
||||
limit: $limit
|
||||
filter: { datetime_geq: $mintime, datetime_lt: $maxtime }
|
||||
) {
|
||||
count
|
||||
dimensions {
|
||||
action
|
||||
source
|
||||
ruleId
|
||||
clientRequestHTTPHost
|
||||
clientCountryName
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
export const FirewallMetricsQuery = graphql(`
|
||||
query FirewallMetrics(
|
||||
$zoneIDs: [string!]
|
||||
$mintime: Time!
|
||||
$maxtime: Time!
|
||||
$limit: uint64!
|
||||
) {
|
||||
viewer {
|
||||
zones(filter: { zoneTag_in: $zoneIDs }) {
|
||||
zoneTag
|
||||
firewallEventsAdaptiveGroups(
|
||||
limit: $limit
|
||||
filter: { datetime_geq: $mintime, datetime_lt: $maxtime }
|
||||
) {
|
||||
count
|
||||
dimensions {
|
||||
action
|
||||
source
|
||||
ruleId
|
||||
clientRequestHTTPHost
|
||||
clientCountryName
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
export const HealthCheckMetricsQuery = graphql(`
|
||||
query HealthCheckMetrics(
|
||||
$zoneIDs: [string!]
|
||||
$mintime: Time!
|
||||
$maxtime: Time!
|
||||
$limit: uint64!
|
||||
) {
|
||||
viewer {
|
||||
zones(filter: { zoneTag_in: $zoneIDs }) {
|
||||
zoneTag
|
||||
healthCheckEventsAdaptiveGroups(
|
||||
limit: $limit
|
||||
filter: { datetime_geq: $mintime, datetime_lt: $maxtime }
|
||||
) {
|
||||
count
|
||||
avg {
|
||||
rttMs
|
||||
timeToFirstByteMs
|
||||
tcpConnMs
|
||||
tlsHandshakeMs
|
||||
}
|
||||
dimensions {
|
||||
healthStatus
|
||||
originIP
|
||||
region
|
||||
fqdn
|
||||
failureReason
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
export const AdaptiveMetricsQuery = graphql(`
|
||||
query AdaptiveMetrics(
|
||||
$zoneIDs: [string!]
|
||||
$mintime: Time!
|
||||
$maxtime: Time!
|
||||
$limit: uint64!
|
||||
) {
|
||||
viewer {
|
||||
zones(filter: { zoneTag_in: $zoneIDs }) {
|
||||
zoneTag
|
||||
httpRequestsAdaptiveGroups(
|
||||
limit: $limit
|
||||
filter: {
|
||||
datetime_geq: $mintime
|
||||
datetime_lt: $maxtime
|
||||
cacheStatus_notin: ["hit"]
|
||||
originResponseStatus_in: [
|
||||
400
|
||||
404
|
||||
500
|
||||
502
|
||||
503
|
||||
504
|
||||
522
|
||||
523
|
||||
524
|
||||
]
|
||||
}
|
||||
) {
|
||||
count
|
||||
dimensions {
|
||||
originResponseStatus
|
||||
clientCountryName
|
||||
clientRequestHTTPHost
|
||||
}
|
||||
avg {
|
||||
originResponseDurationMs
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
export const EdgeCountryMetricsQuery = graphql(`
|
||||
query EdgeCountryMetrics(
|
||||
$zoneIDs: [string!]
|
||||
$mintime: Time!
|
||||
$maxtime: Time!
|
||||
$limit: uint64!
|
||||
) {
|
||||
viewer {
|
||||
zones(filter: { zoneTag_in: $zoneIDs }) {
|
||||
zoneTag
|
||||
httpRequestsEdgeCountryHost: httpRequestsAdaptiveGroups(
|
||||
limit: $limit
|
||||
filter: { datetime_geq: $mintime, datetime_lt: $maxtime }
|
||||
) {
|
||||
count
|
||||
dimensions {
|
||||
edgeResponseStatus
|
||||
clientCountryName
|
||||
clientRequestHTTPHost
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
export const ColoMetricsQuery = graphql(`
|
||||
query ColoMetrics(
|
||||
$zoneIDs: [string!]
|
||||
$mintime: Time!
|
||||
$maxtime: Time!
|
||||
$limit: uint64!
|
||||
) {
|
||||
viewer {
|
||||
zones(filter: { zoneTag_in: $zoneIDs }) {
|
||||
zoneTag
|
||||
httpRequestsAdaptiveGroups(
|
||||
limit: $limit
|
||||
filter: { datetime_geq: $mintime, datetime_lt: $maxtime }
|
||||
) {
|
||||
count
|
||||
avg {
|
||||
sampleInterval
|
||||
}
|
||||
dimensions {
|
||||
clientRequestHTTPHost
|
||||
coloCode
|
||||
datetime
|
||||
originResponseStatus
|
||||
}
|
||||
sum {
|
||||
edgeResponseBytes
|
||||
visits
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
export const ColoErrorMetricsQuery = graphql(`
|
||||
query ColoErrorMetrics(
|
||||
$zoneIDs: [string!]
|
||||
$mintime: Time!
|
||||
$maxtime: Time!
|
||||
$limit: uint64!
|
||||
) {
|
||||
viewer {
|
||||
zones(filter: { zoneTag_in: $zoneIDs }) {
|
||||
zoneTag
|
||||
httpRequestsAdaptiveGroups(
|
||||
limit: $limit
|
||||
filter: {
|
||||
datetime_geq: $mintime
|
||||
datetime_lt: $maxtime
|
||||
edgeResponseStatus_geq: 400
|
||||
}
|
||||
) {
|
||||
count
|
||||
dimensions {
|
||||
clientRequestHTTPHost
|
||||
coloCode
|
||||
edgeResponseStatus
|
||||
}
|
||||
sum {
|
||||
edgeResponseBytes
|
||||
visits
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
export const WorkerTotalsQuery = graphql(`
|
||||
query WorkerTotals(
|
||||
$accountID: string!
|
||||
$mintime: Time!
|
||||
$maxtime: Time!
|
||||
$limit: uint64!
|
||||
) {
|
||||
viewer {
|
||||
accounts(filter: { accountTag: $accountID }) {
|
||||
workersInvocationsAdaptive(
|
||||
limit: $limit
|
||||
filter: { datetime_geq: $mintime, datetime_lt: $maxtime }
|
||||
) {
|
||||
dimensions {
|
||||
scriptName
|
||||
status
|
||||
}
|
||||
sum {
|
||||
requests
|
||||
errors
|
||||
duration
|
||||
}
|
||||
quantiles {
|
||||
cpuTimeP50
|
||||
cpuTimeP75
|
||||
cpuTimeP99
|
||||
cpuTimeP999
|
||||
durationP50
|
||||
durationP75
|
||||
durationP99
|
||||
durationP999
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
// Note: Cloudflare's accounts filter only supports single accountTag, not accountTag_in
|
||||
// Use WorkerTotalsQuery for individual account queries
|
||||
|
||||
export const LoadBalancerMetricsQuery = graphql(`
|
||||
query LoadBalancerMetrics(
|
||||
$zoneIDs: [string!]
|
||||
$mintime: Time!
|
||||
$maxtime: Time!
|
||||
$limit: uint64!
|
||||
) {
|
||||
viewer {
|
||||
zones(filter: { zoneTag_in: $zoneIDs }) {
|
||||
zoneTag
|
||||
loadBalancingRequestsAdaptiveGroups(
|
||||
filter: { datetime_geq: $mintime, datetime_lt: $maxtime }
|
||||
limit: $limit
|
||||
) {
|
||||
count
|
||||
dimensions {
|
||||
lbName
|
||||
selectedPoolName
|
||||
selectedOriginName
|
||||
region
|
||||
proxied
|
||||
selectedPoolAvgRttMs
|
||||
selectedPoolHealthy
|
||||
steeringPolicy
|
||||
numberOriginsSelected
|
||||
}
|
||||
}
|
||||
loadBalancingRequestsAdaptive(
|
||||
filter: { datetime_geq: $mintime, datetime_lt: $maxtime }
|
||||
limit: $limit
|
||||
) {
|
||||
lbName
|
||||
pools {
|
||||
id
|
||||
poolName
|
||||
healthy
|
||||
healthCheckEnabled
|
||||
avgRttMs
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
export const LogpushAccountMetricsQuery = graphql(`
|
||||
query LogpushAccountMetrics(
|
||||
$accountID: string!
|
||||
$limit: uint64!
|
||||
$mintime: Time!
|
||||
$maxtime: Time!
|
||||
) {
|
||||
viewer {
|
||||
accounts(filter: { accountTag: $accountID }) {
|
||||
logpushHealthAdaptiveGroups(
|
||||
filter: {
|
||||
datetime_geq: $mintime
|
||||
datetime_lt: $maxtime
|
||||
status_neq: 200
|
||||
}
|
||||
limit: $limit
|
||||
) {
|
||||
count
|
||||
dimensions {
|
||||
jobId
|
||||
status
|
||||
destinationType
|
||||
datetime
|
||||
final
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
// Note: Cloudflare's accounts filter only supports single accountTag, not accountTag_in
|
||||
// Use LogpushAccountMetricsQuery for individual account queries
|
||||
|
||||
export const LogpushZoneMetricsQuery = graphql(`
|
||||
query LogpushZoneMetrics(
|
||||
$zoneIDs: [string!]
|
||||
$limit: uint64!
|
||||
$mintime: Time!
|
||||
$maxtime: Time!
|
||||
) {
|
||||
viewer {
|
||||
zones(filter: { zoneTag_in: $zoneIDs }) {
|
||||
zoneTag
|
||||
logpushHealthAdaptiveGroups(
|
||||
filter: {
|
||||
datetime_geq: $mintime
|
||||
datetime_lt: $maxtime
|
||||
status_neq: 200
|
||||
}
|
||||
limit: $limit
|
||||
) {
|
||||
count
|
||||
dimensions {
|
||||
jobId
|
||||
status
|
||||
destinationType
|
||||
datetime
|
||||
final
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
export const MagicTransitMetricsQuery = graphql(`
|
||||
query MagicTransitMetrics(
|
||||
$accountID: string!
|
||||
$limit: uint64!
|
||||
$mintime: Time!
|
||||
$maxtime: Time!
|
||||
) {
|
||||
viewer {
|
||||
accounts(filter: { accountTag: $accountID }) {
|
||||
magicTransitTunnelHealthChecksAdaptiveGroups(
|
||||
limit: $limit
|
||||
filter: { datetime_geq: $mintime, datetime_lt: $maxtime }
|
||||
) {
|
||||
count
|
||||
dimensions {
|
||||
active
|
||||
datetime
|
||||
edgeColoCity
|
||||
edgeColoCountry
|
||||
edgePopName
|
||||
remoteTunnelIPv4
|
||||
resultStatus
|
||||
siteName
|
||||
tunnelName
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
// Note: Cloudflare's accounts filter only supports single accountTag, not accountTag_in
|
||||
// Use MagicTransitMetricsQuery for individual account queries
|
||||
|
||||
export const RequestMethodMetricsQuery = graphql(`
|
||||
query RequestMethodMetrics(
|
||||
$zoneIDs: [string!]
|
||||
$mintime: Time!
|
||||
$maxtime: Time!
|
||||
$limit: uint64!
|
||||
) {
|
||||
viewer {
|
||||
zones(filter: { zoneTag_in: $zoneIDs }) {
|
||||
zoneTag
|
||||
httpRequestsAdaptiveGroups(
|
||||
limit: $limit
|
||||
filter: { datetime_geq: $mintime, datetime_lt: $maxtime }
|
||||
) {
|
||||
count
|
||||
dimensions {
|
||||
clientRequestHTTPMethodName
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
export const OriginStatusMetricsQuery = graphql(`
|
||||
query OriginStatusMetrics(
|
||||
$zoneIDs: [string!]
|
||||
$mintime: Time!
|
||||
$maxtime: Time!
|
||||
$limit: uint64!
|
||||
) {
|
||||
viewer {
|
||||
zones(filter: { zoneTag_in: $zoneIDs }) {
|
||||
zoneTag
|
||||
httpRequestsAdaptiveGroups(
|
||||
limit: $limit
|
||||
filter: { datetime_geq: $mintime, datetime_lt: $maxtime }
|
||||
) {
|
||||
count
|
||||
dimensions {
|
||||
originResponseStatus
|
||||
clientCountryName
|
||||
clientRequestHTTPHost
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
export const CacheMissMetricsQuery = graphql(`
|
||||
query CacheMissMetrics(
|
||||
$zoneIDs: [string!]
|
||||
$mintime: Time!
|
||||
$maxtime: Time!
|
||||
$limit: uint64!
|
||||
) {
|
||||
viewer {
|
||||
zones(filter: { zoneTag_in: $zoneIDs }) {
|
||||
zoneTag
|
||||
httpRequestsAdaptiveGroups(
|
||||
filter: {
|
||||
datetime_geq: $mintime
|
||||
datetime_lt: $maxtime
|
||||
cacheStatus: "miss"
|
||||
}
|
||||
limit: $limit
|
||||
) {
|
||||
count
|
||||
avg {
|
||||
originResponseDurationMs
|
||||
}
|
||||
dimensions {
|
||||
clientCountryName
|
||||
clientRequestHTTPHost
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`);
|
||||
169198
src/cloudflare/gql/schema.gql
Normal file
169198
src/cloudflare/gql/schema.gql
Normal file
File diff suppressed because it is too large
Load Diff
104
src/cloudflare/queries.ts
Normal file
104
src/cloudflare/queries.ts
Normal file
@@ -0,0 +1,104 @@
|
||||
import z from "zod";
|
||||
|
||||
/**
|
||||
* Zod schema for all supported metric query names.
|
||||
* Includes both account-level and zone-level queries.
|
||||
*/
|
||||
export const MetricQueryNameSchema = z.enum([
|
||||
// Account-level
|
||||
"worker-totals",
|
||||
"logpush-account",
|
||||
"magic-transit",
|
||||
// Zone-level
|
||||
"http-metrics",
|
||||
"adaptive-metrics",
|
||||
"edge-country-metrics",
|
||||
"colo-metrics",
|
||||
"colo-error-metrics",
|
||||
"request-method-metrics",
|
||||
"health-check-metrics",
|
||||
"load-balancer-metrics",
|
||||
"logpush-zone",
|
||||
"origin-status-metrics",
|
||||
"cache-miss-metrics",
|
||||
// REST API
|
||||
"ssl-certificates",
|
||||
"lb-weight-metrics",
|
||||
]);
|
||||
|
||||
/**
|
||||
* Union of all metric query names (account and zone level).
|
||||
*/
|
||||
export type MetricQueryName = z.infer<typeof MetricQueryNameSchema>;
|
||||
|
||||
/**
|
||||
* Account-scoped metric queries (require single accountTag).
|
||||
*/
|
||||
export const ACCOUNT_LEVEL_QUERIES = [
|
||||
"worker-totals",
|
||||
"logpush-account",
|
||||
"magic-transit",
|
||||
] as const;
|
||||
|
||||
/**
|
||||
* Union of account-level query names.
|
||||
*/
|
||||
export type AccountLevelQuery = (typeof ACCOUNT_LEVEL_QUERIES)[number];
|
||||
|
||||
/**
|
||||
* Zone-scoped metric queries (support multiple zoneIDs).
|
||||
*/
|
||||
export const ZONE_LEVEL_QUERIES = [
|
||||
"http-metrics",
|
||||
"adaptive-metrics",
|
||||
"edge-country-metrics",
|
||||
"colo-metrics",
|
||||
"colo-error-metrics",
|
||||
"request-method-metrics",
|
||||
"health-check-metrics",
|
||||
"load-balancer-metrics",
|
||||
"logpush-zone",
|
||||
"origin-status-metrics",
|
||||
"cache-miss-metrics",
|
||||
"ssl-certificates",
|
||||
"lb-weight-metrics",
|
||||
] as const;
|
||||
|
||||
/**
|
||||
* Union of zone-level query names.
|
||||
*/
|
||||
export type ZoneLevelQuery = (typeof ZONE_LEVEL_QUERIES)[number];
|
||||
|
||||
/**
|
||||
* Type guard for account-level queries.
|
||||
*
|
||||
* @param query Query name to check.
|
||||
* @returns True if query is account-level.
|
||||
*/
|
||||
export function isAccountLevelQuery(query: string): query is AccountLevelQuery {
|
||||
return (ACCOUNT_LEVEL_QUERIES as readonly string[]).includes(query);
|
||||
}
|
||||
|
||||
/**
|
||||
* Type guard for zone-level queries.
|
||||
*
|
||||
* @param query Query name to check.
|
||||
* @returns True if query is zone-level.
|
||||
*/
|
||||
export function isZoneLevelQuery(query: string): query is ZoneLevelQuery {
|
||||
return (ZONE_LEVEL_QUERIES as readonly string[]).includes(query);
|
||||
}
|
||||
|
||||
/**
|
||||
* Query types available on free tier accounts.
|
||||
*/
|
||||
export const FREE_TIER_QUERIES = [
|
||||
"worker-totals",
|
||||
"logpush-account",
|
||||
"magic-transit",
|
||||
] as const;
|
||||
|
||||
/**
|
||||
* Type for free tier query names.
|
||||
*/
|
||||
export type FreeTierQuery = (typeof FREE_TIER_QUERIES)[number];
|
||||
1191
src/components/LandingPage.tsx
Normal file
1191
src/components/LandingPage.tsx
Normal file
File diff suppressed because it is too large
Load Diff
349
src/components/LandingPageScript.tsx
Normal file
349
src/components/LandingPageScript.tsx
Normal file
@@ -0,0 +1,349 @@
|
||||
import { html } from "hono/html";
|
||||
import type { FC } from "hono/jsx";
|
||||
|
||||
type Props = { metricsPath: string };
|
||||
|
||||
export const LandingPageScript: FC<Props> = ({ metricsPath }) => {
|
||||
return html`
|
||||
<script>
|
||||
// Config state management
|
||||
let serverConfig = {};
|
||||
let localConfig = {};
|
||||
let defaultConfig = {};
|
||||
let dirtyFields = new Set();
|
||||
|
||||
// Config field definitions
|
||||
const configFields = [
|
||||
'queryLimit', 'scrapeDelaySeconds', 'timeWindowSeconds', 'metricRefreshIntervalSeconds',
|
||||
'accountListCacheTtlSeconds', 'zoneListCacheTtlSeconds', 'sslCertsCacheTtlSeconds',
|
||||
'logLevel', 'logFormat', 'cfAccounts', 'cfZones', 'cfFreeTierAccounts', 'metricsDenylist',
|
||||
'excludeHost', 'httpStatusGroup'
|
||||
];
|
||||
|
||||
// Load config on page load
|
||||
async function loadConfig() {
|
||||
try {
|
||||
const [configRes, defaultsRes] = await Promise.all([
|
||||
fetch('/config'),
|
||||
fetch('/config/defaults')
|
||||
]);
|
||||
if (!configRes.ok) throw new Error('Failed to load config');
|
||||
if (!defaultsRes.ok) throw new Error('Failed to load defaults');
|
||||
serverConfig = await configRes.json();
|
||||
defaultConfig = await defaultsRes.json();
|
||||
localConfig = { ...serverConfig };
|
||||
dirtyFields.clear();
|
||||
populateForm();
|
||||
updateSaveButton();
|
||||
} catch (e) {
|
||||
console.error('Failed to load config:', e);
|
||||
document.getElementById('config-status').textContent = 'Failed to load configuration';
|
||||
}
|
||||
}
|
||||
|
||||
// Populate form fields from config
|
||||
function populateForm() {
|
||||
// Number fields
|
||||
['queryLimit', 'scrapeDelaySeconds', 'timeWindowSeconds', 'metricRefreshIntervalSeconds',
|
||||
'accountListCacheTtlSeconds', 'zoneListCacheTtlSeconds', 'sslCertsCacheTtlSeconds'].forEach(key => {
|
||||
const el = document.getElementById('cfg-' + key);
|
||||
if (el) el.value = localConfig[key] ?? '';
|
||||
});
|
||||
|
||||
// Select fields
|
||||
['logLevel', 'logFormat'].forEach(key => {
|
||||
const el = document.getElementById('cfg-' + key);
|
||||
if (el) el.value = localConfig[key] ?? '';
|
||||
});
|
||||
|
||||
// Text fields (nullable)
|
||||
['cfAccounts', 'cfZones'].forEach(key => {
|
||||
const el = document.getElementById('cfg-' + key);
|
||||
const allCheckbox = document.getElementById('cfg-' + key + '-all');
|
||||
if (el && allCheckbox) {
|
||||
const isAll = localConfig[key] === null;
|
||||
allCheckbox.checked = isAll;
|
||||
el.value = isAll ? '' : (localConfig[key] ?? '');
|
||||
el.disabled = isAll;
|
||||
}
|
||||
});
|
||||
|
||||
// Text fields (non-nullable)
|
||||
['cfFreeTierAccounts', 'metricsDenylist'].forEach(key => {
|
||||
const el = document.getElementById('cfg-' + key);
|
||||
if (el) el.value = localConfig[key] ?? '';
|
||||
});
|
||||
|
||||
// Toggle switches
|
||||
['excludeHost', 'httpStatusGroup'].forEach(key => {
|
||||
const el = document.getElementById('cfg-' + key);
|
||||
if (el) {
|
||||
const isActive = localConfig[key] === true;
|
||||
el.classList.toggle('active', isActive);
|
||||
el.setAttribute('aria-checked', isActive.toString());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Track field changes
|
||||
function onFieldChange(key, value) {
|
||||
localConfig[key] = value;
|
||||
if (JSON.stringify(value) !== JSON.stringify(serverConfig[key])) {
|
||||
dirtyFields.add(key);
|
||||
} else {
|
||||
dirtyFields.delete(key);
|
||||
}
|
||||
updateSaveButton();
|
||||
}
|
||||
|
||||
// Toggle for "All accounts/zones" checkboxes
|
||||
function toggleAllFilter(key, isAll) {
|
||||
const el = document.getElementById('cfg-' + key);
|
||||
if (el) {
|
||||
el.disabled = isAll;
|
||||
if (isAll) {
|
||||
el.value = '';
|
||||
onFieldChange(key, null);
|
||||
} else {
|
||||
onFieldChange(key, el.value || null);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Toggle switch handler
|
||||
function toggleSwitch(key) {
|
||||
const el = document.getElementById('cfg-' + key);
|
||||
if (el) {
|
||||
const newValue = !el.classList.contains('active');
|
||||
el.classList.toggle('active', newValue);
|
||||
el.setAttribute('aria-checked', newValue.toString());
|
||||
onFieldChange(key, newValue);
|
||||
}
|
||||
}
|
||||
|
||||
// Save all dirty fields
|
||||
async function saveConfig() {
|
||||
const btn = document.getElementById('save-btn');
|
||||
btn.disabled = true;
|
||||
btn.textContent = 'Saving...';
|
||||
|
||||
const errors = [];
|
||||
for (const key of dirtyFields) {
|
||||
try {
|
||||
const res = await fetch('/config/' + key, {
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ value: localConfig[key] })
|
||||
});
|
||||
if (!res.ok) {
|
||||
const data = await res.json();
|
||||
errors.push({ key, error: data.error || 'Unknown error' });
|
||||
}
|
||||
} catch (e) {
|
||||
errors.push({ key, error: e.message });
|
||||
}
|
||||
}
|
||||
|
||||
if (errors.length === 0) {
|
||||
serverConfig = { ...localConfig };
|
||||
dirtyFields.clear();
|
||||
showToast('Configuration saved', 'success');
|
||||
} else {
|
||||
showToast('Failed to save: ' + errors.map(e => e.key).join(', '), 'error');
|
||||
}
|
||||
|
||||
btn.textContent = 'Save Changes';
|
||||
updateSaveButton();
|
||||
}
|
||||
|
||||
// Reset single field to default (updates UI only, requires Save to persist)
|
||||
function resetField(key) {
|
||||
const defaultValue = defaultConfig[key];
|
||||
localConfig[key] = defaultValue;
|
||||
if (JSON.stringify(defaultValue) !== JSON.stringify(serverConfig[key])) {
|
||||
dirtyFields.add(key);
|
||||
} else {
|
||||
dirtyFields.delete(key);
|
||||
}
|
||||
updateFieldUI(key);
|
||||
updateSaveButton();
|
||||
}
|
||||
|
||||
// Reset all config to defaults (updates UI only, requires Save to persist)
|
||||
function resetAllConfig() {
|
||||
if (!confirm('Reset all configuration to defaults?')) return;
|
||||
localConfig = { ...defaultConfig };
|
||||
dirtyFields.clear();
|
||||
for (const key of configFields) {
|
||||
if (JSON.stringify(defaultConfig[key]) !== JSON.stringify(serverConfig[key])) {
|
||||
dirtyFields.add(key);
|
||||
}
|
||||
}
|
||||
populateForm();
|
||||
updateSaveButton();
|
||||
}
|
||||
|
||||
// Update single field UI
|
||||
function updateFieldUI(key) {
|
||||
const el = document.getElementById('cfg-' + key);
|
||||
if (!el) return;
|
||||
|
||||
if (['excludeHost', 'httpStatusGroup'].includes(key)) {
|
||||
const isActive = localConfig[key] === true;
|
||||
el.classList.toggle('active', isActive);
|
||||
el.setAttribute('aria-checked', isActive.toString());
|
||||
} else if (['cfAccounts', 'cfZones'].includes(key)) {
|
||||
const allCheckbox = document.getElementById('cfg-' + key + '-all');
|
||||
const isAll = localConfig[key] === null;
|
||||
if (allCheckbox) allCheckbox.checked = isAll;
|
||||
el.disabled = isAll;
|
||||
el.value = isAll ? '' : (localConfig[key] ?? '');
|
||||
} else {
|
||||
el.value = localConfig[key] ?? '';
|
||||
}
|
||||
}
|
||||
|
||||
// Update save button state
|
||||
function updateSaveButton() {
|
||||
const btn = document.getElementById('save-btn');
|
||||
const status = document.getElementById('config-status');
|
||||
btn.disabled = dirtyFields.size === 0;
|
||||
if (dirtyFields.size > 0) {
|
||||
status.textContent = dirtyFields.size + ' unsaved change' + (dirtyFields.size > 1 ? 's' : '');
|
||||
} else {
|
||||
status.textContent = 'All changes saved';
|
||||
}
|
||||
}
|
||||
|
||||
// Tab switching
|
||||
function switchTab(tabId) {
|
||||
document.querySelectorAll('.tab-btn').forEach(btn => btn.classList.remove('active'));
|
||||
document.querySelectorAll('.tab-panel').forEach(panel => panel.classList.add('hidden'));
|
||||
document.querySelector('[data-tab="' + tabId + '"]').classList.add('active');
|
||||
document.getElementById('tab-' + tabId).classList.remove('hidden');
|
||||
}
|
||||
|
||||
// Toast notification
|
||||
function showToast(message, type) {
|
||||
const toast = document.getElementById('toast');
|
||||
toast.textContent = message;
|
||||
toast.className = 'toast ' + type + ' show';
|
||||
setTimeout(() => {
|
||||
toast.classList.remove('show');
|
||||
}, 3000);
|
||||
}
|
||||
|
||||
async function checkHealth() {
|
||||
const indicator = document.getElementById('health-indicator');
|
||||
const status = document.getElementById('health-status');
|
||||
const badge = document.getElementById('health-badge');
|
||||
const cfApiIndicator = document.getElementById('cf-api-indicator');
|
||||
const cfApiLatency = document.getElementById('cf-api-latency');
|
||||
const cfApiError = document.getElementById('cf-api-error');
|
||||
const gqlApiIndicator = document.getElementById('gql-api-indicator');
|
||||
const gqlApiLatency = document.getElementById('gql-api-latency');
|
||||
const gqlApiError = document.getElementById('gql-api-error');
|
||||
const healthTimestamp = document.getElementById('health-timestamp');
|
||||
|
||||
const setCheckStatus = (indicatorEl, latencyEl, errorEl, check) => {
|
||||
if (check.status === 'healthy') {
|
||||
indicatorEl.className = 'w-2 h-2 rounded-full bg-green-500';
|
||||
latencyEl.className = 'text-xs font-mono text-green-600';
|
||||
latencyEl.textContent = check.latency_ms + 'ms';
|
||||
errorEl.classList.add('hidden');
|
||||
errorEl.textContent = '';
|
||||
} else {
|
||||
indicatorEl.className = 'w-2 h-2 rounded-full bg-red-500';
|
||||
latencyEl.className = 'text-xs font-mono text-red-600';
|
||||
latencyEl.textContent = check.latency_ms + 'ms';
|
||||
if (check.error) {
|
||||
errorEl.textContent = check.error;
|
||||
errorEl.classList.remove('hidden');
|
||||
} else {
|
||||
errorEl.classList.add('hidden');
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
try {
|
||||
const res = await fetch('/health');
|
||||
const data = await res.json();
|
||||
|
||||
setCheckStatus(cfApiIndicator, cfApiLatency, cfApiError, data.checks.cloudflare_api);
|
||||
setCheckStatus(gqlApiIndicator, gqlApiLatency, gqlApiError, data.checks.graphql_api);
|
||||
|
||||
const ts = new Date(data.timestamp);
|
||||
healthTimestamp.textContent = 'Last checked ' + ts.toLocaleTimeString();
|
||||
|
||||
if (data.status === 'healthy') {
|
||||
indicator.className = 'w-3 h-3 rounded-full bg-green-500 pulse-dot';
|
||||
status.textContent = 'All systems operational';
|
||||
badge.className = 'px-4 py-2 rounded-full text-sm font-medium bg-green-500/10 text-green-600 border border-green-500/20';
|
||||
badge.textContent = 'Healthy';
|
||||
} else {
|
||||
const unhealthyChecks = [];
|
||||
if (data.checks.cloudflare_api.status !== 'healthy') unhealthyChecks.push('REST API');
|
||||
if (data.checks.graphql_api.status !== 'healthy') unhealthyChecks.push('GraphQL');
|
||||
indicator.className = 'w-3 h-3 rounded-full bg-red-500';
|
||||
status.textContent = 'Degraded: ' + unhealthyChecks.join(', ');
|
||||
badge.className = 'px-4 py-2 rounded-full text-sm font-medium bg-red-500/10 text-red-600 border border-red-500/20';
|
||||
badge.textContent = 'Unhealthy';
|
||||
}
|
||||
} catch {
|
||||
indicator.className = 'w-3 h-3 rounded-full bg-red-500';
|
||||
status.textContent = 'Unable to reach health endpoint';
|
||||
badge.className = 'px-4 py-2 rounded-full text-sm font-medium bg-red-500/10 text-red-600 border border-red-500/20';
|
||||
badge.textContent = 'Error';
|
||||
cfApiIndicator.className = 'w-2 h-2 rounded-full bg-gray-300';
|
||||
cfApiLatency.textContent = '—';
|
||||
cfApiError.classList.add('hidden');
|
||||
gqlApiIndicator.className = 'w-2 h-2 rounded-full bg-gray-300';
|
||||
gqlApiLatency.textContent = '—';
|
||||
gqlApiError.classList.add('hidden');
|
||||
healthTimestamp.textContent = 'Check failed';
|
||||
}
|
||||
}
|
||||
|
||||
async function fetchMetrics() {
|
||||
const output = document.getElementById('metrics-output');
|
||||
const container = document.getElementById('metrics-container');
|
||||
const count = document.getElementById('metrics-count');
|
||||
const timestamp = document.getElementById('metrics-timestamp');
|
||||
const indicator = document.getElementById('metrics-indicator');
|
||||
const refreshIcon = document.getElementById('refresh-icon');
|
||||
indicator.className = 'w-2 h-2 rounded-full bg-gray-300';
|
||||
refreshIcon.classList.add('spin-ccw');
|
||||
const scrollTop = container.scrollTop;
|
||||
const minSpin = new Promise(r => setTimeout(r, 500));
|
||||
try {
|
||||
const [res] = await Promise.all([fetch('${metricsPath}'), minSpin]);
|
||||
if (!res.ok) {
|
||||
throw new Error('HTTP ' + res.status);
|
||||
}
|
||||
const text = await res.text();
|
||||
output.textContent = text || '# No metrics available';
|
||||
container.scrollTop = scrollTop;
|
||||
const lines = text.split('\\n').filter(l => l && !l.startsWith('#'));
|
||||
count.textContent = lines.length + ' metrics';
|
||||
timestamp.textContent = 'Updated ' + new Date().toLocaleTimeString();
|
||||
indicator.className = 'w-2 h-2 rounded-full bg-green-500 pulse-dot';
|
||||
} catch (e) {
|
||||
await minSpin;
|
||||
output.textContent = '# Error fetching metrics: ' + e.message;
|
||||
container.scrollTop = scrollTop;
|
||||
count.textContent = '-';
|
||||
timestamp.textContent = 'Failed';
|
||||
indicator.className = 'w-2 h-2 rounded-full bg-red-500';
|
||||
}
|
||||
refreshIcon.classList.remove('spin-ccw');
|
||||
}
|
||||
|
||||
// Initialize on page load
|
||||
loadConfig();
|
||||
checkHealth();
|
||||
fetchMetrics();
|
||||
setInterval(checkHealth, 10000);
|
||||
setInterval(fetchMetrics, 10000);
|
||||
</script>
|
||||
`;
|
||||
};
|
||||
400
src/durable-objects/AccountMetricCoordinator.ts
Normal file
400
src/durable-objects/AccountMetricCoordinator.ts
Normal file
@@ -0,0 +1,400 @@
|
||||
import { DurableObject } from "cloudflare:workers";
|
||||
import {
|
||||
ACCOUNT_LEVEL_QUERIES,
|
||||
getCloudflareMetricsClient,
|
||||
ZONE_LEVEL_QUERIES,
|
||||
} from "../cloudflare/client";
|
||||
import { FREE_TIER_QUERIES } from "../cloudflare/queries";
|
||||
import { filterZonesByIds, parseCommaSeparated } from "../lib/filters";
|
||||
import { createLogger, type Logger } from "../lib/logger";
|
||||
import type { MetricDefinition } from "../lib/metrics";
|
||||
import { getConfig, type ResolvedConfig } from "../lib/runtime-config";
|
||||
import { getTimeRange } from "../lib/time";
|
||||
import type { Zone } from "../lib/types";
|
||||
import { MetricExporter } from "./MetricExporter";
|
||||
|
||||
const STATE_KEY = "state";
|
||||
|
||||
// Account-scoped queries: all account-level + zone-batched (excludes zone-scoped REST queries)
|
||||
const ACCOUNT_SCOPED_QUERIES = [
|
||||
...ACCOUNT_LEVEL_QUERIES,
|
||||
...ZONE_LEVEL_QUERIES.filter(
|
||||
(q) => q !== "ssl-certificates" && q !== "lb-weight-metrics",
|
||||
),
|
||||
] as const;
|
||||
|
||||
// Zone-scoped REST queries (one DO per zone for parallelization and fault isolation)
|
||||
const ZONE_SCOPED_QUERIES = ["ssl-certificates", "lb-weight-metrics"] as const;
|
||||
|
||||
type AccountMetricCoordinatorState = {
|
||||
accountId: string;
|
||||
accountName: string;
|
||||
zones: Zone[];
|
||||
totalZoneCount: number;
|
||||
firewallRules: Record<string, string>;
|
||||
lastZoneFetch: number;
|
||||
lastRefresh: number;
|
||||
};
|
||||
|
||||
/**
|
||||
* Coordinates metric collection for a Cloudflare account and manages zone list caching and distributes work to MetricExporter DOs.
|
||||
*/
|
||||
export class AccountMetricCoordinator extends DurableObject<Env> {
|
||||
private state: AccountMetricCoordinatorState | undefined;
|
||||
|
||||
constructor(ctx: DurableObjectState, env: Env) {
|
||||
super(ctx, env);
|
||||
ctx.blockConcurrencyWhile(async () => {
|
||||
this.state =
|
||||
await ctx.storage.get<AccountMetricCoordinatorState>(STATE_KEY);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates logger instance with account-specific tag.
|
||||
*
|
||||
* @param config Resolved runtime configuration.
|
||||
* @returns Logger instance.
|
||||
*/
|
||||
private createLogger(config: ResolvedConfig): Logger {
|
||||
const state = this.getState();
|
||||
const tag = state.accountName.toLowerCase().replace(/[ -]/g, "_");
|
||||
return createLogger("account_coordinator", {
|
||||
format: config.logFormat,
|
||||
level: config.logLevel,
|
||||
}).child(tag);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets current coordinator state.
|
||||
*
|
||||
* @returns Current state.
|
||||
* @throws {Error} When state not initialized.
|
||||
*/
|
||||
private getState(): AccountMetricCoordinatorState {
|
||||
if (this.state === undefined) {
|
||||
console.error(
|
||||
"[account_coordinator] State not initialized - initialize() must be called first",
|
||||
);
|
||||
throw new Error("State not initialized");
|
||||
}
|
||||
return this.state;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets or creates coordinator stub for account and ensures coordinator is initialized before returning.
|
||||
*
|
||||
* @param accountId Cloudflare account ID.
|
||||
* @param accountName Account display name for logging.
|
||||
* @param env Worker environment bindings.
|
||||
* @returns Initialized coordinator stub.
|
||||
*/
|
||||
static async get(accountId: string, accountName: string, env: Env) {
|
||||
const stub = env.AccountMetricCoordinator.getByName(`account:${accountId}`);
|
||||
await stub.initialize(accountId, accountName);
|
||||
return stub;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initializes coordinator state and starts alarm cycle. Idempotent safe to call multiple times.
|
||||
*
|
||||
* @param accountId Cloudflare account ID.
|
||||
* @param accountName Account display name for logging.
|
||||
*/
|
||||
async initialize(accountId: string, accountName: string): Promise<void> {
|
||||
if (this.state !== undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
const config = await getConfig(this.env);
|
||||
|
||||
this.state = {
|
||||
accountId,
|
||||
accountName,
|
||||
zones: [],
|
||||
totalZoneCount: 0,
|
||||
firewallRules: {},
|
||||
lastZoneFetch: 0,
|
||||
lastRefresh: 0,
|
||||
};
|
||||
|
||||
await this.ctx.storage.put(STATE_KEY, this.state);
|
||||
await this.ctx.storage.setAlarm(
|
||||
Date.now() + config.metricRefreshIntervalSeconds * 1000,
|
||||
);
|
||||
}
|
||||
|
||||
override async alarm(): Promise<void> {
|
||||
const config = await getConfig(this.env);
|
||||
const logger = this.createLogger(config);
|
||||
logger.info("Alarm fired, refreshing zones");
|
||||
await this.refresh(config, logger);
|
||||
}
|
||||
|
||||
/**
|
||||
* Refreshes zone list and pushes context to exporters. Exporters handle their own metric fetching via alarms.
|
||||
*
|
||||
* @param config Resolved runtime configuration.
|
||||
* @param logger Logger instance.
|
||||
*/
|
||||
private async refresh(config: ResolvedConfig, logger: Logger): Promise<void> {
|
||||
logger.info("Starting refresh");
|
||||
|
||||
try {
|
||||
await this.refreshZonesAndPushContext(config, logger);
|
||||
|
||||
this.state = { ...this.getState(), lastRefresh: Date.now() };
|
||||
await this.ctx.storage.put(STATE_KEY, this.state);
|
||||
} catch (error) {
|
||||
const msg = error instanceof Error ? error.message : String(error);
|
||||
logger.error("Refresh failed", { error: msg });
|
||||
}
|
||||
|
||||
await this.ctx.storage.setAlarm(
|
||||
Date.now() + config.metricRefreshIntervalSeconds * 1000,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Refreshes zone list if stale then pushes context to all exporters.
|
||||
*
|
||||
* @param config Resolved runtime configuration.
|
||||
* @param logger Logger instance.
|
||||
*/
|
||||
private async refreshZonesAndPushContext(
|
||||
config: ResolvedConfig,
|
||||
logger: Logger,
|
||||
): Promise<void> {
|
||||
const state = this.getState();
|
||||
const ttlMs = config.zoneListCacheTtlSeconds * 1000;
|
||||
const isStale = Date.now() - state.lastZoneFetch >= ttlMs;
|
||||
|
||||
// Calculate shared time range once for all exporters in this refresh cycle
|
||||
const timeRange = getTimeRange(
|
||||
config.scrapeDelaySeconds,
|
||||
config.timeWindowSeconds,
|
||||
);
|
||||
|
||||
let zones = state.zones;
|
||||
let firewallRules = state.firewallRules;
|
||||
|
||||
if (isStale || zones.length === 0) {
|
||||
const client = getCloudflareMetricsClient(this.env);
|
||||
logger.info("Refreshing zones");
|
||||
|
||||
const allZones = await client.getZones(state.accountId);
|
||||
|
||||
// Apply zone whitelist if set
|
||||
const cfZonesSet =
|
||||
config.cfZones !== null ? parseCommaSeparated(config.cfZones) : null;
|
||||
zones =
|
||||
cfZonesSet !== null ? filterZonesByIds(allZones, cfZonesSet) : allZones;
|
||||
|
||||
// Build firewall rules map
|
||||
firewallRules = {};
|
||||
const rulesResults = await Promise.all(
|
||||
zones.map((zone) =>
|
||||
client.getFirewallRules(zone.id).catch((error) => {
|
||||
const msg = error instanceof Error ? error.message : String(error);
|
||||
logger.warn("Failed to fetch firewall rules", {
|
||||
zone: zone.name,
|
||||
error: msg,
|
||||
});
|
||||
return new Map<string, string>();
|
||||
}),
|
||||
),
|
||||
);
|
||||
for (const rules of rulesResults) {
|
||||
for (const [id, name] of rules) {
|
||||
firewallRules[id] = name;
|
||||
}
|
||||
}
|
||||
|
||||
this.state = {
|
||||
...state,
|
||||
zones,
|
||||
totalZoneCount: allZones.length,
|
||||
firewallRules,
|
||||
lastZoneFetch: Date.now(),
|
||||
};
|
||||
await this.ctx.storage.put(STATE_KEY, this.state);
|
||||
|
||||
logger.info("Zones cached", {
|
||||
total: allZones.length,
|
||||
filtered: zones.length,
|
||||
});
|
||||
}
|
||||
|
||||
// Check if this account is marked as free tier
|
||||
const cfFreeTierSet = parseCommaSeparated(config.cfFreeTierAccounts);
|
||||
const isFreeTierAccount = cfFreeTierSet.has(state.accountId);
|
||||
|
||||
// Filter queries based on account tier
|
||||
const accountQueries = isFreeTierAccount
|
||||
? ACCOUNT_SCOPED_QUERIES.filter((q) =>
|
||||
FREE_TIER_QUERIES.includes(q as (typeof FREE_TIER_QUERIES)[number]),
|
||||
)
|
||||
: ACCOUNT_SCOPED_QUERIES;
|
||||
|
||||
// Push zone context to account-scoped exporters AND initialize zone-scoped exporters concurrently
|
||||
await Promise.all([
|
||||
// Account-scoped exporters
|
||||
...accountQueries.map(async (query) => {
|
||||
try {
|
||||
const exporter = await MetricExporter.get(
|
||||
`account:${state.accountId}:${query}`,
|
||||
this.env,
|
||||
);
|
||||
await exporter.updateZoneContext(
|
||||
state.accountId,
|
||||
state.accountName,
|
||||
zones,
|
||||
firewallRules,
|
||||
timeRange,
|
||||
);
|
||||
} catch (error) {
|
||||
const msg = error instanceof Error ? error.message : String(error);
|
||||
logger.error("Failed to update zone context", {
|
||||
query,
|
||||
error: msg,
|
||||
});
|
||||
}
|
||||
}),
|
||||
// Zone-scoped exporters (skip for free tier accounts)
|
||||
...(isFreeTierAccount
|
||||
? []
|
||||
: zones.flatMap((zone) =>
|
||||
ZONE_SCOPED_QUERIES.map(async (query) => {
|
||||
try {
|
||||
const exporter = await MetricExporter.get(
|
||||
`zone:${zone.id}:${query}`,
|
||||
this.env,
|
||||
);
|
||||
await exporter.initializeZone(
|
||||
zone,
|
||||
state.accountId,
|
||||
state.accountName,
|
||||
timeRange,
|
||||
);
|
||||
} catch (error) {
|
||||
const msg =
|
||||
error instanceof Error ? error.message : String(error);
|
||||
logger.error("Failed to initialize zone exporter", {
|
||||
zone: zone.name,
|
||||
query,
|
||||
error: msg,
|
||||
});
|
||||
}
|
||||
}),
|
||||
)),
|
||||
]);
|
||||
|
||||
logger.info("Context pushed to exporters", {
|
||||
account_scoped: accountQueries.length,
|
||||
zone_scoped: isFreeTierAccount
|
||||
? 0
|
||||
: zones.length * ZONE_SCOPED_QUERIES.length,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Collects and aggregates metrics from all MetricExporter DOs.
|
||||
*
|
||||
* @returns Metrics and zone counts.
|
||||
*/
|
||||
async export(): Promise<{
|
||||
metrics: MetricDefinition[];
|
||||
zoneCounts: { total: number; filtered: number; processed: number };
|
||||
}> {
|
||||
const config = await getConfig(this.env);
|
||||
const logger = this.createLogger(config);
|
||||
|
||||
logger.info("Exporting metrics");
|
||||
|
||||
// Ensure exporters have been initialized
|
||||
const staleThreshold = config.metricRefreshIntervalSeconds * 2 * 1000;
|
||||
const initialState = this.getState();
|
||||
if (
|
||||
initialState.lastRefresh === 0 ||
|
||||
Date.now() - initialState.lastRefresh > staleThreshold
|
||||
) {
|
||||
await this.refresh(config, logger);
|
||||
}
|
||||
|
||||
// Re-get state after potential refresh (this.state may have been updated)
|
||||
const state = this.getState();
|
||||
|
||||
// Check if this account is marked as free tier
|
||||
const cfFreeTierSet = parseCommaSeparated(config.cfFreeTierAccounts);
|
||||
const isFreeTierAccount = cfFreeTierSet.has(state.accountId);
|
||||
|
||||
// Filter queries based on account tier
|
||||
const accountQueries = isFreeTierAccount
|
||||
? ACCOUNT_SCOPED_QUERIES.filter((q) =>
|
||||
FREE_TIER_QUERIES.includes(q as (typeof FREE_TIER_QUERIES)[number]),
|
||||
)
|
||||
: ACCOUNT_SCOPED_QUERIES;
|
||||
|
||||
// Collect from account-scoped exporters
|
||||
const accountMetricsResults = await Promise.all(
|
||||
accountQueries.map(async (query) => {
|
||||
try {
|
||||
const exporter = await MetricExporter.get(
|
||||
`account:${state.accountId}:${query}`,
|
||||
this.env,
|
||||
);
|
||||
return await exporter.export();
|
||||
} catch (error) {
|
||||
const msg = error instanceof Error ? error.message : String(error);
|
||||
logger.error("Failed to export account metrics", {
|
||||
query,
|
||||
error: msg,
|
||||
});
|
||||
return [];
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
// Collect from zone-scoped exporters (skip for free tier accounts)
|
||||
const zoneMetricsResults = isFreeTierAccount
|
||||
? []
|
||||
: await Promise.all(
|
||||
state.zones.flatMap((zone) =>
|
||||
ZONE_SCOPED_QUERIES.map(async (query) => {
|
||||
try {
|
||||
const exporter = await MetricExporter.get(
|
||||
`zone:${zone.id}:${query}`,
|
||||
this.env,
|
||||
);
|
||||
return await exporter.export();
|
||||
} catch (error) {
|
||||
const msg =
|
||||
error instanceof Error ? error.message : String(error);
|
||||
logger.error("Failed to export zone metrics", {
|
||||
zone: zone.name,
|
||||
query,
|
||||
error: msg,
|
||||
});
|
||||
return [];
|
||||
}
|
||||
}),
|
||||
),
|
||||
);
|
||||
|
||||
const allMetrics = [...accountMetricsResults, ...zoneMetricsResults].flat();
|
||||
|
||||
// Count processed zones (zones with at least one metric result)
|
||||
const processedZones = zoneMetricsResults.filter(
|
||||
(r) => r.length > 0,
|
||||
).length;
|
||||
|
||||
return {
|
||||
metrics: allMetrics,
|
||||
zoneCounts: {
|
||||
total: state.totalZoneCount,
|
||||
filtered: state.zones.length,
|
||||
processed: processedZones,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
291
src/durable-objects/MetricCoordinator.ts
Normal file
291
src/durable-objects/MetricCoordinator.ts
Normal file
@@ -0,0 +1,291 @@
|
||||
import { DurableObject } from "cloudflare:workers";
|
||||
import { getCloudflareMetricsClient } from "../cloudflare/client";
|
||||
import { extractErrorInfo } from "../lib/errors";
|
||||
import { filterAccountsByIds, parseCommaSeparated } from "../lib/filters";
|
||||
import { createLogger, type Logger } from "../lib/logger";
|
||||
import type { MetricDefinition } from "../lib/metrics";
|
||||
import { serializeToPrometheus } from "../lib/prometheus";
|
||||
import { getConfig, type ResolvedConfig } from "../lib/runtime-config";
|
||||
import type { Account } from "../lib/types";
|
||||
import { AccountMetricCoordinator } from "./AccountMetricCoordinator";
|
||||
|
||||
const STATE_KEY = "state";
|
||||
|
||||
type MetricCoordinatorState = {
|
||||
identifier: string;
|
||||
accounts: Account[];
|
||||
lastAccountFetch: number;
|
||||
};
|
||||
|
||||
/**
|
||||
* Coordinates metrics collection across all Cloudflare accounts and maintains cached account list.
|
||||
*/
|
||||
export class MetricCoordinator extends DurableObject<Env> {
|
||||
private state: MetricCoordinatorState | undefined;
|
||||
|
||||
/**
|
||||
* Gets or creates singleton MetricCoordinator instance.
|
||||
*
|
||||
* @param env Worker environment bindings.
|
||||
* @returns Initialized MetricCoordinator stub.
|
||||
*/
|
||||
static async get(env: Env) {
|
||||
const stub = env.MetricCoordinator.getByName("metric-coordinator");
|
||||
await stub.setIdentifier("metric-coordinator");
|
||||
return stub;
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs MetricCoordinator and initializes state from storage.
|
||||
*
|
||||
* @param ctx Durable Object state.
|
||||
* @param env Worker environment bindings.
|
||||
*/
|
||||
constructor(ctx: DurableObjectState, env: Env) {
|
||||
super(ctx, env);
|
||||
ctx.blockConcurrencyWhile(async () => {
|
||||
this.state = await ctx.storage.get<MetricCoordinatorState>(STATE_KEY);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates logger instance with resolved configuration.
|
||||
*
|
||||
* @param config Resolved runtime configuration.
|
||||
* @returns Logger instance.
|
||||
*/
|
||||
private createLogger(config: ResolvedConfig): Logger {
|
||||
return createLogger("metric_coordinator", {
|
||||
format: config.logFormat,
|
||||
level: config.logLevel,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Initializes coordinator state if not already set.
|
||||
*
|
||||
* @param id Unique identifier for this coordinator instance.
|
||||
*/
|
||||
async setIdentifier(id: string): Promise<void> {
|
||||
if (this.state !== undefined) {
|
||||
return;
|
||||
}
|
||||
this.state = { identifier: id, accounts: [], lastAccountFetch: 0 };
|
||||
await this.ctx.storage.put(STATE_KEY, this.state);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets coordinator state.
|
||||
*
|
||||
* @returns Current coordinator state.
|
||||
* @throws {Error} When state not initialized.
|
||||
*/
|
||||
private getState(): MetricCoordinatorState {
|
||||
if (this.state === undefined) {
|
||||
throw new Error("State not initialized");
|
||||
}
|
||||
return this.state;
|
||||
}
|
||||
|
||||
/**
|
||||
* Refreshes accounts from Cloudflare API if cache expired.
|
||||
*
|
||||
* @param config Resolved runtime configuration.
|
||||
* @param logger Logger instance.
|
||||
* @returns Cached or refreshed account list.
|
||||
*/
|
||||
private async refreshAccountsIfStale(
|
||||
config: ResolvedConfig,
|
||||
logger: Logger,
|
||||
): Promise<Account[]> {
|
||||
const state = this.getState();
|
||||
const ttlMs = config.accountListCacheTtlSeconds * 1000;
|
||||
|
||||
if (
|
||||
state.accounts.length > 0 &&
|
||||
Date.now() - state.lastAccountFetch < ttlMs
|
||||
) {
|
||||
return state.accounts;
|
||||
}
|
||||
|
||||
const client = getCloudflareMetricsClient(this.env);
|
||||
logger.info("Refreshing account list");
|
||||
const allAccounts = await client.getAccounts();
|
||||
|
||||
// Filter accounts if whitelist is set
|
||||
const cfAccountsSet =
|
||||
config.cfAccounts !== null
|
||||
? parseCommaSeparated(config.cfAccounts)
|
||||
: null;
|
||||
const accounts =
|
||||
cfAccountsSet !== null
|
||||
? filterAccountsByIds(allAccounts, cfAccountsSet)
|
||||
: allAccounts;
|
||||
|
||||
this.state = {
|
||||
...state,
|
||||
accounts,
|
||||
lastAccountFetch: Date.now(),
|
||||
};
|
||||
await this.ctx.storage.put(STATE_KEY, this.state);
|
||||
|
||||
logger.info("Accounts cached", {
|
||||
total: allAccounts.length,
|
||||
filtered: accounts.length,
|
||||
});
|
||||
return accounts;
|
||||
}
|
||||
|
||||
/**
|
||||
* Collects metrics from all accounts and serializes to Prometheus format.
|
||||
*
|
||||
* @returns Prometheus-formatted metrics string.
|
||||
*/
|
||||
async export(): Promise<string> {
|
||||
const config = await getConfig(this.env);
|
||||
const logger = this.createLogger(config);
|
||||
|
||||
logger.info("Collecting metrics");
|
||||
const accounts = await this.refreshAccountsIfStale(config, logger);
|
||||
|
||||
if (accounts.length === 0) {
|
||||
logger.warn("No accounts found");
|
||||
return "";
|
||||
}
|
||||
|
||||
logger.info("Exporting metrics", { account_count: accounts.length });
|
||||
|
||||
// Track errors by account and error code
|
||||
const errorsByAccount: Map<string, { code: string; count: number }[]> =
|
||||
new Map();
|
||||
|
||||
const results = await Promise.all(
|
||||
accounts.map(async (account) => {
|
||||
try {
|
||||
const coordinator = await AccountMetricCoordinator.get(
|
||||
account.id,
|
||||
account.name,
|
||||
this.env,
|
||||
);
|
||||
return await coordinator.export();
|
||||
} catch (error) {
|
||||
const info = extractErrorInfo(error);
|
||||
logger.error("Failed to export account", {
|
||||
account_id: account.id,
|
||||
error_code: info.code,
|
||||
error: info.message,
|
||||
...(info.stack && { stack: info.stack }),
|
||||
});
|
||||
|
||||
// Track error for metrics
|
||||
const accountErrors = errorsByAccount.get(account.id) ?? [];
|
||||
const existing = accountErrors.find((e) => e.code === info.code);
|
||||
if (existing) {
|
||||
existing.count++;
|
||||
} else {
|
||||
accountErrors.push({ code: info.code, count: 1 });
|
||||
}
|
||||
errorsByAccount.set(account.id, accountErrors);
|
||||
|
||||
return {
|
||||
metrics: [],
|
||||
zoneCounts: { total: 0, filtered: 0, processed: 0 },
|
||||
};
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
// Aggregate stats
|
||||
const zoneCounts = { total: 0, filtered: 0, processed: 0 };
|
||||
const allMetrics: MetricDefinition[] = [];
|
||||
for (const result of results) {
|
||||
allMetrics.push(...result.metrics);
|
||||
zoneCounts.total += result.zoneCounts.total;
|
||||
zoneCounts.filtered += result.zoneCounts.filtered;
|
||||
zoneCounts.processed += result.zoneCounts.processed;
|
||||
}
|
||||
|
||||
// Add exporter info metrics
|
||||
const exporterMetrics = this.buildExporterInfoMetrics(
|
||||
accounts.length,
|
||||
zoneCounts,
|
||||
errorsByAccount,
|
||||
);
|
||||
|
||||
const metricsDenylist = parseCommaSeparated(config.metricsDenylist);
|
||||
return serializeToPrometheus([...exporterMetrics, ...allMetrics], {
|
||||
denylist: metricsDenylist,
|
||||
excludeLabels: config.excludeHost ? new Set(["host"]) : undefined,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds exporter health and discovery metrics.
|
||||
*
|
||||
* @param accountCount Number of accounts discovered.
|
||||
* @param zoneCounts Zone counts (total, filtered, processed).
|
||||
* @param errorsByAccount Errors by account and error code.
|
||||
* @returns Exporter info metrics.
|
||||
*/
|
||||
private buildExporterInfoMetrics(
|
||||
accountCount: number,
|
||||
zoneCounts: { total: number; filtered: number; processed: number },
|
||||
errorsByAccount: Map<string, { code: string; count: number }[]>,
|
||||
): MetricDefinition[] {
|
||||
const metrics: MetricDefinition[] = [
|
||||
{
|
||||
name: "cloudflare_exporter_up",
|
||||
help: "Exporter health",
|
||||
type: "gauge",
|
||||
values: [{ labels: {}, value: 1 }],
|
||||
},
|
||||
{
|
||||
name: "cloudflare_accounts_total",
|
||||
help: "Total accounts discovered",
|
||||
type: "gauge",
|
||||
values: [{ labels: {}, value: accountCount }],
|
||||
},
|
||||
{
|
||||
name: "cloudflare_zones_total",
|
||||
help: "Total zones before filtering",
|
||||
type: "gauge",
|
||||
values: [{ labels: {}, value: zoneCounts.total }],
|
||||
},
|
||||
{
|
||||
name: "cloudflare_zones_filtered",
|
||||
help: "Zones after whitelist filter",
|
||||
type: "gauge",
|
||||
values: [{ labels: {}, value: zoneCounts.filtered }],
|
||||
},
|
||||
{
|
||||
name: "cloudflare_zones_processed",
|
||||
help: "Zones successfully processed",
|
||||
type: "gauge",
|
||||
values: [{ labels: {}, value: zoneCounts.processed }],
|
||||
},
|
||||
];
|
||||
|
||||
// Add error metrics if any errors occurred
|
||||
if (errorsByAccount.size > 0) {
|
||||
const errorsMetric: MetricDefinition = {
|
||||
name: "cloudflare_exporter_errors_total",
|
||||
help: "Total errors during metric collection by account and error code",
|
||||
type: "counter",
|
||||
values: [],
|
||||
};
|
||||
|
||||
for (const [accountId, errors] of errorsByAccount) {
|
||||
for (const { code, count } of errors) {
|
||||
errorsMetric.values.push({
|
||||
labels: { account_id: accountId, error_code: code },
|
||||
value: count,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
metrics.push(errorsMetric);
|
||||
}
|
||||
|
||||
return metrics;
|
||||
}
|
||||
}
|
||||
495
src/durable-objects/MetricExporter.ts
Normal file
495
src/durable-objects/MetricExporter.ts
Normal file
@@ -0,0 +1,495 @@
|
||||
import { DurableObject } from "cloudflare:workers";
|
||||
import {
|
||||
getCloudflareMetricsClient,
|
||||
isAccountLevelQuery,
|
||||
isZoneLevelQuery,
|
||||
} from "../cloudflare/client";
|
||||
import { createLogger, type Logger } from "../lib/logger";
|
||||
import type { MetricDefinition, MetricValue } from "../lib/metrics";
|
||||
import { getConfig, type ResolvedConfig } from "../lib/runtime-config";
|
||||
import { getTimeRange, metricKey } from "../lib/time";
|
||||
import {
|
||||
type CounterState,
|
||||
MetricExporterIdSchema,
|
||||
type MetricExporterIdString,
|
||||
type TimeRange,
|
||||
type Zone,
|
||||
} from "../lib/types";
|
||||
|
||||
const STATE_KEY = "state";
|
||||
|
||||
type MetricExporterState = {
|
||||
// Core identity
|
||||
scopeType: "account" | "zone";
|
||||
scopeId: string;
|
||||
queryName: string;
|
||||
|
||||
// Metric storage
|
||||
counters: Record<string, CounterState>;
|
||||
metrics: MetricDefinition[];
|
||||
lastIngest: number;
|
||||
|
||||
// Context for fetching (account-scoped)
|
||||
accountId: string;
|
||||
accountName: string;
|
||||
zones: Zone[];
|
||||
firewallRules: Record<string, string>;
|
||||
|
||||
// Context for fetching (zone-scoped)
|
||||
zoneMetadata: Zone | null;
|
||||
|
||||
// Refresh state
|
||||
refreshInterval: number;
|
||||
lastRefresh: number;
|
||||
lastError: string | null;
|
||||
|
||||
// SSL cert cache (zone-scoped only)
|
||||
lastSslFetch: number;
|
||||
};
|
||||
|
||||
/**
|
||||
* Durable Object that fetches and exports Prometheus metrics for a specific query scope.
|
||||
* Handles counter accumulation, alarm-based refresh scheduling, and metric caching.
|
||||
*/
|
||||
export class MetricExporter extends DurableObject<Env> {
|
||||
private state: MetricExporterState | undefined;
|
||||
|
||||
constructor(ctx: DurableObjectState, env: Env) {
|
||||
super(ctx, env);
|
||||
ctx.blockConcurrencyWhile(async () => {
|
||||
this.state = await ctx.storage.get<MetricExporterState>(STATE_KEY);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a logger instance with context from the exporter's state.
|
||||
*
|
||||
* @param config Resolved runtime configuration.
|
||||
* @returns Logger instance with scope type, scope ID, and query name context.
|
||||
*/
|
||||
private createLogger(config: ResolvedConfig): Logger {
|
||||
const state = this.getState();
|
||||
return createLogger("metric_exporter", {
|
||||
format: config.logFormat,
|
||||
level: config.logLevel,
|
||||
})
|
||||
.child(state.scopeType)
|
||||
.child(state.scopeId)
|
||||
.child(state.queryName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the current state or throw if not initialized.
|
||||
*
|
||||
* @returns Current state.
|
||||
* @throws {Error} When state is undefined.
|
||||
*/
|
||||
private getState(): MetricExporterState {
|
||||
if (this.state === undefined) {
|
||||
console.error(
|
||||
"State not initialized - initialize() must be called first",
|
||||
);
|
||||
throw new Error("State not initialized");
|
||||
}
|
||||
return this.state;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get or create a MetricExporter instance by ID, ensuring it's initialized.
|
||||
*
|
||||
* @param id Composite ID in format "scopeType:scopeId:queryName".
|
||||
* @param env Worker environment bindings.
|
||||
* @returns Initialized MetricExporter stub.
|
||||
*/
|
||||
static async get(id: MetricExporterIdString, env: Env) {
|
||||
const stub = env.MetricExporter.getByName(id);
|
||||
await stub.initialize(id);
|
||||
return stub;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the exporter state from a composite ID.
|
||||
* Idempotent - skips if already initialized.
|
||||
*
|
||||
* @param id Composite ID string to parse into scope type, scope ID, and query name.
|
||||
* @throws {ZodError} When ID format is invalid.
|
||||
*/
|
||||
async initialize(id: string): Promise<void> {
|
||||
if (this.state !== undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
const config = await getConfig(this.env);
|
||||
const parsed = MetricExporterIdSchema.parse(id);
|
||||
|
||||
this.state = {
|
||||
scopeType: parsed.scopeType,
|
||||
scopeId: parsed.scopeId,
|
||||
queryName: parsed.queryName,
|
||||
counters: {},
|
||||
metrics: [],
|
||||
lastIngest: 0,
|
||||
accountId: "",
|
||||
accountName: "",
|
||||
zones: [],
|
||||
firewallRules: {},
|
||||
zoneMetadata: null,
|
||||
refreshInterval: config.metricRefreshIntervalSeconds,
|
||||
lastRefresh: 0,
|
||||
lastError: null,
|
||||
lastSslFetch: 0,
|
||||
};
|
||||
|
||||
await this.ctx.storage.put(STATE_KEY, this.state);
|
||||
}
|
||||
|
||||
/**
|
||||
* Update zone context for account-scoped exporters.
|
||||
* Called by AccountMetricCoordinator after zone list refresh.
|
||||
* Triggers immediate fetch on first context push.
|
||||
*
|
||||
* @param accountId Cloudflare account ID.
|
||||
* @param accountName Account display name.
|
||||
* @param zones List of zones in the account.
|
||||
* @param firewallRules Map of firewall rule IDs to descriptions.
|
||||
* @param timeRange Shared time range for metrics queries.
|
||||
*/
|
||||
async updateZoneContext(
|
||||
accountId: string,
|
||||
accountName: string,
|
||||
zones: Zone[],
|
||||
firewallRules: Record<string, string>,
|
||||
timeRange: TimeRange,
|
||||
): Promise<void> {
|
||||
const config = await getConfig(this.env);
|
||||
const logger = this.createLogger(config);
|
||||
const state = this.getState();
|
||||
|
||||
if (state.scopeType !== "account") {
|
||||
logger.warn("updateZoneContext called on non-account exporter");
|
||||
return;
|
||||
}
|
||||
|
||||
const isFirstContext =
|
||||
state.zones.length === 0 && zones.length > 0 && state.lastRefresh === 0;
|
||||
|
||||
this.state = {
|
||||
...state,
|
||||
accountId,
|
||||
accountName,
|
||||
zones,
|
||||
firewallRules,
|
||||
};
|
||||
await this.ctx.storage.put(STATE_KEY, this.state);
|
||||
|
||||
logger.info("Zone context updated", { zone_count: zones.length });
|
||||
|
||||
// On first context push, fetch immediately then schedule recurring alarm
|
||||
if (isFirstContext) {
|
||||
await this.refreshWithTimeRange(timeRange, config, logger);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize zone-scoped exporter with zone metadata.
|
||||
* Called by AccountMetricCoordinator when ensuring zone exporters exist.
|
||||
* Triggers immediate fetch on first initialization.
|
||||
*
|
||||
* @param zone Zone metadata including ID, name, and plan.
|
||||
* @param accountId Cloudflare account ID that owns the zone.
|
||||
* @param accountName Account display name.
|
||||
* @param timeRange Shared time range for metrics queries.
|
||||
*/
|
||||
async initializeZone(
|
||||
zone: Zone,
|
||||
accountId: string,
|
||||
accountName: string,
|
||||
timeRange: TimeRange,
|
||||
): Promise<void> {
|
||||
const config = await getConfig(this.env);
|
||||
const logger = this.createLogger(config);
|
||||
const state = this.getState();
|
||||
|
||||
if (state.scopeType !== "zone") {
|
||||
logger.warn("initializeZone called on non-zone exporter");
|
||||
return;
|
||||
}
|
||||
|
||||
const isFirstInit = state.zoneMetadata === null && state.lastRefresh === 0;
|
||||
|
||||
this.state = {
|
||||
...state,
|
||||
accountId,
|
||||
accountName,
|
||||
zoneMetadata: zone,
|
||||
};
|
||||
await this.ctx.storage.put(STATE_KEY, this.state);
|
||||
|
||||
logger.info("Zone metadata set", { zone: zone.name });
|
||||
|
||||
// On first init, fetch immediately then schedule recurring alarm
|
||||
if (isFirstInit) {
|
||||
await this.refreshWithTimeRange(timeRange, config, logger);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Durable Object alarm handler.
|
||||
* Triggers metric refresh and reschedules next alarm with jitter.
|
||||
*/
|
||||
override async alarm(): Promise<void> {
|
||||
const config = await getConfig(this.env);
|
||||
const logger = this.createLogger(config);
|
||||
logger.info("Alarm fired, refreshing");
|
||||
const timeRange = getTimeRange(
|
||||
config.scrapeDelaySeconds,
|
||||
config.timeWindowSeconds,
|
||||
);
|
||||
await this.refreshWithTimeRange(timeRange, config, logger);
|
||||
}
|
||||
|
||||
/**
|
||||
* Public method for coordinator to trigger refresh with shared time range.
|
||||
* Called by AccountMetricCoordinator to ensure all exporters use the same time window.
|
||||
*
|
||||
* @param timeRange Shared time range calculated by coordinator.
|
||||
*/
|
||||
async triggerRefresh(timeRange: TimeRange): Promise<void> {
|
||||
const config = await getConfig(this.env);
|
||||
const logger = this.createLogger(config);
|
||||
await this.refreshWithTimeRange(timeRange, config, logger);
|
||||
}
|
||||
|
||||
/**
|
||||
* Refresh metrics from Cloudflare API using the provided time range.
|
||||
* Handles account-scoped and zone-scoped queries, processes counters, and schedules next alarm.
|
||||
*
|
||||
* @param timeRange Time range for metrics queries.
|
||||
* @param config Resolved runtime configuration.
|
||||
* @param logger Logger instance for logging.
|
||||
*/
|
||||
private async refreshWithTimeRange(
|
||||
timeRange: TimeRange,
|
||||
config: ResolvedConfig,
|
||||
logger: Logger,
|
||||
): Promise<void> {
|
||||
const state = this.getState();
|
||||
|
||||
// Skip if zone context not yet pushed (account-scoped needs zones)
|
||||
if (state.scopeType === "account" && state.zones.length === 0) {
|
||||
logger.info("Skipping refresh - no zone context yet");
|
||||
await this.scheduleNextAlarm(config);
|
||||
return;
|
||||
}
|
||||
|
||||
// Skip if zone metadata not set (zone-scoped)
|
||||
if (state.scopeType === "zone" && state.zoneMetadata === null) {
|
||||
logger.info("Skipping refresh - no zone metadata yet");
|
||||
await this.scheduleNextAlarm(config);
|
||||
return;
|
||||
}
|
||||
|
||||
// For zone-scoped (SSL certs), check cache TTL
|
||||
if (state.scopeType === "zone") {
|
||||
const cacheAgeMs = Date.now() - state.lastSslFetch;
|
||||
const cacheTtlMs = config.sslCertsCacheTtlSeconds * 1000;
|
||||
if (state.lastSslFetch > 0 && cacheAgeMs < cacheTtlMs) {
|
||||
logger.debug("SSL cert cache fresh, skipping fetch", {
|
||||
age_seconds: Math.floor(cacheAgeMs / 1000),
|
||||
ttl_seconds: config.sslCertsCacheTtlSeconds,
|
||||
});
|
||||
await this.scheduleNextAlarm(config);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const client = getCloudflareMetricsClient(this.env);
|
||||
|
||||
try {
|
||||
let metrics: MetricDefinition[];
|
||||
|
||||
if (state.scopeType === "account") {
|
||||
metrics = await this.fetchAccountScopedMetrics(
|
||||
client,
|
||||
state,
|
||||
timeRange,
|
||||
);
|
||||
} else {
|
||||
metrics = await this.fetchZoneScopedMetrics(client, state);
|
||||
}
|
||||
|
||||
const processed = this.processCounters(metrics, state.counters);
|
||||
|
||||
this.state = {
|
||||
...state,
|
||||
metrics: processed.metrics,
|
||||
counters: processed.counters,
|
||||
lastRefresh: Date.now(),
|
||||
lastSslFetch:
|
||||
state.scopeType === "zone" ? Date.now() : state.lastSslFetch,
|
||||
lastError: null,
|
||||
};
|
||||
await this.ctx.storage.put(STATE_KEY, this.state);
|
||||
|
||||
logger.info("Refresh complete", {
|
||||
metric_count: metrics.length,
|
||||
});
|
||||
} catch (error) {
|
||||
const msg = error instanceof Error ? error.message : String(error);
|
||||
logger.error("Refresh failed", { error: msg });
|
||||
this.state = { ...state, lastError: msg };
|
||||
await this.ctx.storage.put(STATE_KEY, this.state);
|
||||
}
|
||||
|
||||
await this.scheduleNextAlarm(config);
|
||||
}
|
||||
|
||||
/**
|
||||
* Schedule the next alarm with jitter for time range alignment.
|
||||
*
|
||||
* @param config Resolved runtime configuration.
|
||||
*/
|
||||
private async scheduleNextAlarm(config: ResolvedConfig): Promise<void> {
|
||||
const intervalMs = config.metricRefreshIntervalSeconds * 1000;
|
||||
// Jitter: 1-5s fixed (tighter clustering for time range alignment)
|
||||
const jitter = 1000 + Math.random() * 4000;
|
||||
const nextAlarm = Date.now() + intervalMs + jitter;
|
||||
|
||||
await this.ctx.storage.setAlarm(nextAlarm);
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch account-scoped metrics from Cloudflare API.
|
||||
* Handles both account-level and zone-batched queries.
|
||||
*
|
||||
* @param client Cloudflare metrics client.
|
||||
* @param state Current exporter state.
|
||||
* @param timeRange Time range for metrics queries.
|
||||
* @returns Array of metric definitions.
|
||||
*/
|
||||
private async fetchAccountScopedMetrics(
|
||||
client: ReturnType<typeof getCloudflareMetricsClient>,
|
||||
state: MetricExporterState,
|
||||
timeRange: TimeRange,
|
||||
): Promise<MetricDefinition[]> {
|
||||
const { queryName, accountId, accountName, zones, firewallRules } = state;
|
||||
|
||||
// Account-level queries (worker-totals, logpush-account, magic-transit)
|
||||
if (isAccountLevelQuery(queryName)) {
|
||||
return client.getAccountMetrics(
|
||||
queryName,
|
||||
accountId,
|
||||
accountName,
|
||||
timeRange,
|
||||
);
|
||||
}
|
||||
|
||||
// Zone-batched queries - fetch all zones in one GraphQL call
|
||||
if (isZoneLevelQuery(queryName)) {
|
||||
const zoneIds = zones.map((z) => z.id);
|
||||
return client.getZoneMetrics(
|
||||
queryName,
|
||||
zoneIds,
|
||||
zones,
|
||||
firewallRules,
|
||||
timeRange,
|
||||
);
|
||||
}
|
||||
|
||||
// Unknown query - should not happen if IDs are constructed correctly
|
||||
console.error("Unknown query type", { queryName });
|
||||
return [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch zone-scoped metrics from Cloudflare API.
|
||||
* Handles SSL certificates and load balancer weight metrics.
|
||||
*
|
||||
* @param client Cloudflare metrics client.
|
||||
* @param state Current exporter state.
|
||||
* @returns Array of metric definitions.
|
||||
*/
|
||||
private async fetchZoneScopedMetrics(
|
||||
client: ReturnType<typeof getCloudflareMetricsClient>,
|
||||
state: MetricExporterState,
|
||||
): Promise<MetricDefinition[]> {
|
||||
const { queryName, zoneMetadata } = state;
|
||||
|
||||
if (zoneMetadata === null) {
|
||||
return [];
|
||||
}
|
||||
|
||||
switch (queryName) {
|
||||
case "ssl-certificates":
|
||||
return client.getSSLCertificateMetricsForZone(zoneMetadata);
|
||||
case "lb-weight-metrics":
|
||||
return client.getLbWeightMetricsForZone(zoneMetadata);
|
||||
default:
|
||||
console.error("Unknown zone-scoped query", { queryName });
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Return cached accumulated metrics.
|
||||
*
|
||||
* @returns Current snapshot of metrics with accumulated counter values.
|
||||
*/
|
||||
async export(): Promise<MetricDefinition[]> {
|
||||
const state = this.getState();
|
||||
return state.metrics;
|
||||
}
|
||||
|
||||
/**
|
||||
* Process raw metrics and accumulate counter values.
|
||||
*
|
||||
* @param rawMetrics Raw metrics from Cloudflare API.
|
||||
* @param existingCounters Existing counter state.
|
||||
* @returns Processed metrics with accumulated counter values and updated counter state.
|
||||
*/
|
||||
private processCounters(
|
||||
rawMetrics: MetricDefinition[],
|
||||
existingCounters: Record<string, CounterState>,
|
||||
): { metrics: MetricDefinition[]; counters: Record<string, CounterState> } {
|
||||
const newCounters: Record<string, CounterState> = { ...existingCounters };
|
||||
|
||||
const metrics = rawMetrics.map((metric) => {
|
||||
if (metric.type !== "counter") {
|
||||
return metric;
|
||||
}
|
||||
|
||||
const processedValues: MetricValue[] = metric.values.map((value) => {
|
||||
const key = metricKey(metric.name, value.labels);
|
||||
newCounters[key] = this.updateCounter(newCounters[key], value.value);
|
||||
return { labels: value.labels, value: newCounters[key].accumulated };
|
||||
});
|
||||
|
||||
return { ...metric, values: processedValues };
|
||||
});
|
||||
|
||||
return { metrics, counters: newCounters };
|
||||
}
|
||||
|
||||
/**
|
||||
* Update counter state with a new raw value.
|
||||
* Handles counter resets by detecting decreases.
|
||||
*
|
||||
* @param existing Existing counter state or undefined for new counter.
|
||||
* @param rawValue New raw counter value from API.
|
||||
* @returns Updated counter state with accumulated value.
|
||||
*/
|
||||
private updateCounter(
|
||||
existing: CounterState | undefined,
|
||||
rawValue: number,
|
||||
): CounterState {
|
||||
if (!existing) {
|
||||
return { prev: rawValue, accumulated: rawValue };
|
||||
}
|
||||
const delta =
|
||||
rawValue < existing.prev ? rawValue : rawValue - existing.prev;
|
||||
return {
|
||||
prev: rawValue,
|
||||
accumulated: existing.accumulated + delta,
|
||||
};
|
||||
}
|
||||
}
|
||||
5
src/env.d.ts
vendored
Normal file
5
src/env.d.ts
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
declare namespace Cloudflare {
|
||||
interface Env {
|
||||
CLOUDFLARE_API_TOKEN: string;
|
||||
}
|
||||
}
|
||||
64
src/index.ts
64
src/index.ts
@@ -1,64 +0,0 @@
|
||||
import { DurableObject } from "cloudflare:workers";
|
||||
|
||||
/**
|
||||
* Welcome to Cloudflare Workers! This is your first Durable Objects application.
|
||||
*
|
||||
* - Run `npm run dev` in your terminal to start a development server
|
||||
* - Open a browser tab at http://localhost:8787/ to see your Durable Object in action
|
||||
* - Run `npm run deploy` to publish your application
|
||||
*
|
||||
* Bind resources to your worker in `wrangler.jsonc`. After adding bindings, a type definition for the
|
||||
* `Env` object can be regenerated with `npm run cf-typegen`.
|
||||
*
|
||||
* Learn more at https://developers.cloudflare.com/durable-objects
|
||||
*/
|
||||
|
||||
/** A Durable Object's behavior is defined in an exported Javascript class */
|
||||
export class MyDurableObject extends DurableObject<Env> {
|
||||
/**
|
||||
* The constructor is invoked once upon creation of the Durable Object, i.e. the first call to
|
||||
* `DurableObjectStub::get` for a given identifier (no-op constructors can be omitted)
|
||||
*
|
||||
* @param ctx - The interface for interacting with Durable Object state
|
||||
* @param env - The interface to reference bindings declared in wrangler.jsonc
|
||||
*/
|
||||
constructor(ctx: DurableObjectState, env: Env) {
|
||||
super(ctx, env);
|
||||
}
|
||||
|
||||
/**
|
||||
* The Durable Object exposes an RPC method sayHello which will be invoked when a Durable
|
||||
* Object instance receives a request from a Worker via the same method invocation on the stub
|
||||
*
|
||||
* @param name - The name provided to a Durable Object instance from a Worker
|
||||
* @returns The greeting to be sent back to the Worker
|
||||
*/
|
||||
async sayHello(name: string): Promise<string> {
|
||||
return `Hello, ${name}!`;
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
/**
|
||||
* This is the standard fetch handler for a Cloudflare Worker
|
||||
*
|
||||
* @param request - The request submitted to the Worker from the client
|
||||
* @param env - The interface to reference bindings declared in wrangler.jsonc
|
||||
* @param ctx - The execution context of the Worker
|
||||
* @returns The response to be sent back to the client
|
||||
*/
|
||||
async fetch(request, env, ctx): Promise<Response> {
|
||||
// Create a stub to open a communication channel with the Durable Object
|
||||
// instance named "foo".
|
||||
//
|
||||
// Requests from all Workers to the Durable Object instance named "foo"
|
||||
// will go to a single remote Durable Object instance.
|
||||
const stub = env.MY_DURABLE_OBJECT.getByName("foo");
|
||||
|
||||
// Call the `sayHello()` RPC method on the stub to invoke the method on
|
||||
// the remote Durable Object instance.
|
||||
const greeting = await stub.sayHello("world");
|
||||
|
||||
return new Response(greeting);
|
||||
},
|
||||
} satisfies ExportedHandler<Env>;
|
||||
93
src/lib/config.ts
Normal file
93
src/lib/config.ts
Normal file
@@ -0,0 +1,93 @@
|
||||
import z from "zod";
|
||||
|
||||
/**
|
||||
* Application configuration parsed from environment variables.
|
||||
*/
|
||||
export type AppConfig = {
|
||||
readonly excludeHost: boolean;
|
||||
readonly httpStatusGroup: boolean;
|
||||
readonly metricsDenylist: ReadonlySet<string>;
|
||||
readonly cfAccounts: ReadonlySet<string> | null;
|
||||
readonly cfZones: ReadonlySet<string> | null;
|
||||
readonly cfFreeTierAccounts: ReadonlySet<string>;
|
||||
readonly metricsPath: string;
|
||||
readonly disableUi: boolean;
|
||||
readonly disableConfigApi: boolean;
|
||||
};
|
||||
|
||||
/**
|
||||
* Parses comma-separated string into Set, trimming whitespace.
|
||||
* Returns empty Set for empty/undefined input.
|
||||
*
|
||||
* @param value Comma-separated string or undefined.
|
||||
* @returns Set of trimmed non-empty strings.
|
||||
*/
|
||||
function parseCommaSeparated(value: string | undefined): Set<string> {
|
||||
if (!value || value.trim() === "") {
|
||||
return new Set();
|
||||
}
|
||||
return new Set(
|
||||
value
|
||||
.split(",")
|
||||
.map((s) => s.trim())
|
||||
.filter((s) => s.length > 0),
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Optional environment variables not defined in wrangler.jsonc vars.
|
||||
*/
|
||||
type OptionalEnvVars = {
|
||||
METRICS_DENYLIST?: string;
|
||||
CF_ACCOUNTS?: string;
|
||||
CF_ZONES?: string;
|
||||
CF_FREE_TIER_ACCOUNTS?: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* Parses application configuration from environment variables.
|
||||
* Uses Zod for type coercion with sensible defaults.
|
||||
*
|
||||
* @param env Worker environment bindings.
|
||||
* @returns Parsed application configuration.
|
||||
*/
|
||||
export function parseConfig(env: Env): AppConfig {
|
||||
const optionalEnv = env as Env & OptionalEnvVars;
|
||||
|
||||
const excludeHost = z.coerce.boolean().catch(false).parse(env.EXCLUDE_HOST);
|
||||
const httpStatusGroup = z.coerce
|
||||
.boolean()
|
||||
.catch(false)
|
||||
.parse(env.CF_HTTP_STATUS_GROUP);
|
||||
const metricsPath = z
|
||||
.string()
|
||||
.min(1)
|
||||
.catch("/metrics")
|
||||
.parse(env.METRICS_PATH);
|
||||
const disableUi = z.coerce.boolean().catch(false).parse(env.DISABLE_UI);
|
||||
const disableConfigApi = z.coerce
|
||||
.boolean()
|
||||
.catch(false)
|
||||
.parse(env.DISABLE_CONFIG_API);
|
||||
|
||||
const metricsDenylist = parseCommaSeparated(optionalEnv.METRICS_DENYLIST);
|
||||
const cfAccountsRaw = parseCommaSeparated(optionalEnv.CF_ACCOUNTS);
|
||||
const cfAccounts = cfAccountsRaw.size > 0 ? cfAccountsRaw : null;
|
||||
const cfZonesRaw = parseCommaSeparated(optionalEnv.CF_ZONES);
|
||||
const cfZones = cfZonesRaw.size > 0 ? cfZonesRaw : null;
|
||||
const cfFreeTierAccounts = parseCommaSeparated(
|
||||
optionalEnv.CF_FREE_TIER_ACCOUNTS,
|
||||
);
|
||||
|
||||
return {
|
||||
excludeHost,
|
||||
httpStatusGroup,
|
||||
metricsDenylist,
|
||||
cfAccounts,
|
||||
cfZones,
|
||||
cfFreeTierAccounts,
|
||||
metricsPath,
|
||||
disableUi,
|
||||
disableConfigApi,
|
||||
};
|
||||
}
|
||||
459
src/lib/errors.ts
Normal file
459
src/lib/errors.ts
Normal file
@@ -0,0 +1,459 @@
|
||||
/**
|
||||
* Error codes for categorization and alerting.
|
||||
*/
|
||||
export const ErrorCode = {
|
||||
// API/Network
|
||||
API_RATE_LIMITED: "API_RATE_LIMITED",
|
||||
API_TIMEOUT: "API_TIMEOUT",
|
||||
API_UNAVAILABLE: "API_UNAVAILABLE",
|
||||
API_AUTH_FAILED: "API_AUTH_FAILED",
|
||||
|
||||
// GraphQL
|
||||
GRAPHQL_ERROR: "GRAPHQL_ERROR",
|
||||
GRAPHQL_FIELD_ACCESS: "GRAPHQL_FIELD_ACCESS",
|
||||
|
||||
// Config
|
||||
CONFIG_INVALID: "CONFIG_INVALID",
|
||||
CONFIG_PARSE_ERROR: "CONFIG_PARSE_ERROR",
|
||||
|
||||
// State
|
||||
STATE_NOT_INITIALIZED: "STATE_NOT_INITIALIZED",
|
||||
|
||||
// Validation
|
||||
VALIDATION_ERROR: "VALIDATION_ERROR",
|
||||
|
||||
// Timeout
|
||||
TIMEOUT: "TIMEOUT",
|
||||
|
||||
// Unknown
|
||||
UNKNOWN: "UNKNOWN",
|
||||
} as const;
|
||||
|
||||
/**
|
||||
* Error code type.
|
||||
*/
|
||||
export type ErrorCode = (typeof ErrorCode)[keyof typeof ErrorCode];
|
||||
|
||||
/**
|
||||
* Error context type.
|
||||
*/
|
||||
type ErrorContext = Record<string, unknown>;
|
||||
|
||||
/**
|
||||
* CloudflarePrometheusError options.
|
||||
*/
|
||||
type CloudflarePrometheusErrorOptions = ErrorOptions & {
|
||||
context?: ErrorContext;
|
||||
retryable?: boolean;
|
||||
};
|
||||
|
||||
/**
|
||||
* Base error class with cause chaining, error codes, and structured logging support.
|
||||
*/
|
||||
export class CloudflarePrometheusError extends Error {
|
||||
readonly code: ErrorCode;
|
||||
readonly context: ErrorContext;
|
||||
readonly timestamp: string;
|
||||
readonly retryable: boolean;
|
||||
|
||||
/**
|
||||
* Create a CloudflarePrometheusError.
|
||||
*
|
||||
* @param message Error message.
|
||||
* @param code Error code.
|
||||
* @param options Error options.
|
||||
*/
|
||||
constructor(
|
||||
message: string,
|
||||
code: ErrorCode,
|
||||
options?: CloudflarePrometheusErrorOptions,
|
||||
) {
|
||||
super(message, options);
|
||||
|
||||
// Fix prototype chain for instanceof checks
|
||||
Object.setPrototypeOf(this, new.target.prototype);
|
||||
|
||||
this.name = this.constructor.name;
|
||||
this.code = code;
|
||||
this.context = options?.context ?? {};
|
||||
this.timestamp = new Date().toISOString();
|
||||
this.retryable = options?.retryable ?? false;
|
||||
|
||||
// Append cause stack if available
|
||||
if (options?.cause instanceof Error) {
|
||||
this.stack = `${this.stack}\nCaused by: ${options.cause.stack}`;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert to structured data for logging.
|
||||
*
|
||||
* @returns Structured error context.
|
||||
*/
|
||||
toStructuredData(): ErrorContext {
|
||||
return {
|
||||
error_code: this.code,
|
||||
error_message: this.message,
|
||||
error_name: this.name,
|
||||
error_retryable: this.retryable,
|
||||
...this.context,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* API errors (rate limits, unavailable, auth failures).
|
||||
*/
|
||||
export class ApiError extends CloudflarePrometheusError {
|
||||
readonly statusCode?: number;
|
||||
|
||||
/**
|
||||
* Create an ApiError.
|
||||
*
|
||||
* @param message Error message.
|
||||
* @param options Error options.
|
||||
*/
|
||||
constructor(
|
||||
message: string,
|
||||
options?: CloudflarePrometheusErrorOptions & { statusCode?: number },
|
||||
) {
|
||||
const statusCode = options?.statusCode;
|
||||
let code: ErrorCode;
|
||||
let retryable = false;
|
||||
|
||||
if (statusCode === 429) {
|
||||
code = ErrorCode.API_RATE_LIMITED;
|
||||
retryable = true;
|
||||
} else if (statusCode === 401 || statusCode === 403) {
|
||||
code = ErrorCode.API_AUTH_FAILED;
|
||||
} else if (statusCode !== undefined && statusCode >= 500) {
|
||||
code = ErrorCode.API_UNAVAILABLE;
|
||||
retryable = true;
|
||||
} else {
|
||||
code = ErrorCode.API_UNAVAILABLE;
|
||||
}
|
||||
|
||||
super(message, code, { ...options, retryable });
|
||||
this.statusCode = statusCode;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert to structured data for logging.
|
||||
*
|
||||
* @returns Structured error context.
|
||||
*/
|
||||
override toStructuredData(): ErrorContext {
|
||||
return {
|
||||
...super.toStructuredData(),
|
||||
...(this.statusCode !== undefined && { status_code: this.statusCode }),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* GraphQL error detail.
|
||||
*/
|
||||
type GraphQLErrorDetail = {
|
||||
message: string;
|
||||
path?: ReadonlyArray<string | number>;
|
||||
extensions?: Record<string, unknown>;
|
||||
};
|
||||
|
||||
/**
|
||||
* GraphQL query errors with access to underlying error details.
|
||||
*/
|
||||
export class GraphQLError extends CloudflarePrometheusError {
|
||||
readonly graphqlErrors: GraphQLErrorDetail[];
|
||||
|
||||
/**
|
||||
* Create a GraphQLError.
|
||||
*
|
||||
* @param message Error message.
|
||||
* @param graphqlErrors GraphQL error details.
|
||||
* @param options Error options.
|
||||
*/
|
||||
constructor(
|
||||
message: string,
|
||||
graphqlErrors: GraphQLErrorDetail[] = [],
|
||||
options?: CloudflarePrometheusErrorOptions,
|
||||
) {
|
||||
const hasFieldAccessError = graphqlErrors.some(
|
||||
(e) =>
|
||||
e.message.includes("does not have access") ||
|
||||
e.extensions?.code === "FORBIDDEN",
|
||||
);
|
||||
const code = hasFieldAccessError
|
||||
? ErrorCode.GRAPHQL_FIELD_ACCESS
|
||||
: ErrorCode.GRAPHQL_ERROR;
|
||||
|
||||
super(message, code, options);
|
||||
this.graphqlErrors = graphqlErrors;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert to structured data for logging.
|
||||
*
|
||||
* @returns Structured error context.
|
||||
*/
|
||||
override toStructuredData(): ErrorContext {
|
||||
return {
|
||||
...super.toStructuredData(),
|
||||
graphql_error_count: this.graphqlErrors.length,
|
||||
graphql_paths: this.graphqlErrors
|
||||
.map((e) => e.path?.join("."))
|
||||
.filter(Boolean),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Configuration parsing/validation errors.
|
||||
*/
|
||||
export class ConfigError extends CloudflarePrometheusError {
|
||||
readonly issues?: Array<{ path: string; message: string }>;
|
||||
|
||||
/**
|
||||
* Create a ConfigError.
|
||||
*
|
||||
* @param message Error message.
|
||||
* @param options Error options.
|
||||
*/
|
||||
constructor(
|
||||
message: string,
|
||||
options?: CloudflarePrometheusErrorOptions & {
|
||||
issues?: Array<{ path: string; message: string }>;
|
||||
},
|
||||
) {
|
||||
const code = message.includes("parse")
|
||||
? ErrorCode.CONFIG_PARSE_ERROR
|
||||
: ErrorCode.CONFIG_INVALID;
|
||||
super(message, code, options);
|
||||
this.issues = options?.issues;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert to structured data for logging.
|
||||
*
|
||||
* @returns Structured error context.
|
||||
*/
|
||||
override toStructuredData(): ErrorContext {
|
||||
return {
|
||||
...super.toStructuredData(),
|
||||
...(this.issues && { validation_issues: this.issues }),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* State not initialized (DO not ready).
|
||||
*/
|
||||
export class StateNotInitializedError extends CloudflarePrometheusError {
|
||||
/**
|
||||
* Create a StateNotInitializedError.
|
||||
*
|
||||
* @param component Component name.
|
||||
* @param options Error options.
|
||||
*/
|
||||
constructor(
|
||||
component: string,
|
||||
options?: Omit<CloudflarePrometheusErrorOptions, "context">,
|
||||
) {
|
||||
super(
|
||||
`State not initialized - initialize() must be called first`,
|
||||
ErrorCode.STATE_NOT_INITIALIZED,
|
||||
{
|
||||
...options,
|
||||
context: { component },
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Operation timeout.
|
||||
*/
|
||||
export class TimeoutError extends CloudflarePrometheusError {
|
||||
readonly timeoutMs: number;
|
||||
readonly operation: string;
|
||||
|
||||
/**
|
||||
* Create a TimeoutError.
|
||||
*
|
||||
* @param operation Operation name.
|
||||
* @param timeoutMs Timeout in milliseconds.
|
||||
* @param options Error options.
|
||||
*/
|
||||
constructor(
|
||||
operation: string,
|
||||
timeoutMs: number,
|
||||
options?: CloudflarePrometheusErrorOptions,
|
||||
) {
|
||||
super(`${operation} timed out after ${timeoutMs}ms`, ErrorCode.TIMEOUT, {
|
||||
...options,
|
||||
retryable: true,
|
||||
context: { ...options?.context, operation, timeout_ms: timeoutMs },
|
||||
});
|
||||
this.timeoutMs = timeoutMs;
|
||||
this.operation = operation;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Race promise against timeout with proper cleanup.
|
||||
*
|
||||
* @param promise Promise to race.
|
||||
* @param ms Timeout in milliseconds.
|
||||
* @param operation Operation name.
|
||||
* @returns Discriminated union for type-safe handling.
|
||||
*/
|
||||
export async function withTimeout<T>(
|
||||
promise: Promise<T>,
|
||||
ms: number,
|
||||
operation = "Operation",
|
||||
): Promise<{ ok: true; value: T } | { ok: false; error: TimeoutError }> {
|
||||
let timeoutId: ReturnType<typeof setTimeout> | undefined;
|
||||
|
||||
const timeoutPromise = new Promise<never>((_, reject) => {
|
||||
timeoutId = setTimeout(() => reject(new TimeoutError(operation, ms)), ms);
|
||||
});
|
||||
|
||||
try {
|
||||
const value = await Promise.race([promise, timeoutPromise]);
|
||||
return { ok: true, value };
|
||||
} catch (err) {
|
||||
if (err instanceof TimeoutError) {
|
||||
return { ok: false, error: err };
|
||||
}
|
||||
throw err;
|
||||
} finally {
|
||||
if (timeoutId !== undefined) {
|
||||
clearTimeout(timeoutId);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validation error (Zod or other).
|
||||
*/
|
||||
export class ValidationError extends CloudflarePrometheusError {
|
||||
readonly field?: string;
|
||||
|
||||
/**
|
||||
* Create a ValidationError.
|
||||
*
|
||||
* @param message Error message.
|
||||
* @param options Error options.
|
||||
*/
|
||||
constructor(
|
||||
message: string,
|
||||
options?: CloudflarePrometheusErrorOptions & { field?: string },
|
||||
) {
|
||||
super(message, ErrorCode.VALIDATION_ERROR, options);
|
||||
this.field = options?.field;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert to structured data for logging.
|
||||
*
|
||||
* @returns Structured error context.
|
||||
*/
|
||||
override toStructuredData(): ErrorContext {
|
||||
return {
|
||||
...super.toStructuredData(),
|
||||
...(this.field && { field: this.field }),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract structured error info from any error type.
|
||||
*
|
||||
* @param error Error to extract info from.
|
||||
* @returns Structured error info.
|
||||
*/
|
||||
export function extractErrorInfo(error: unknown): {
|
||||
message: string;
|
||||
stack?: string;
|
||||
code: ErrorCode;
|
||||
context: ErrorContext;
|
||||
retryable: boolean;
|
||||
} {
|
||||
if (error instanceof CloudflarePrometheusError) {
|
||||
return {
|
||||
message: error.message,
|
||||
stack: error.stack,
|
||||
code: error.code,
|
||||
context: error.context,
|
||||
retryable: error.retryable,
|
||||
};
|
||||
}
|
||||
|
||||
if (error instanceof Error) {
|
||||
return {
|
||||
message: error.message,
|
||||
stack: error.stack,
|
||||
code: ErrorCode.UNKNOWN,
|
||||
context: {},
|
||||
retryable: false,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
message: String(error),
|
||||
code: ErrorCode.UNKNOWN,
|
||||
context: {},
|
||||
retryable: false,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if an error is retryable.
|
||||
*
|
||||
* @param error Error to check.
|
||||
* @returns True if retryable.
|
||||
*/
|
||||
export function isRetryable(error: unknown): boolean {
|
||||
if (error instanceof CloudflarePrometheusError) {
|
||||
return error.retryable;
|
||||
}
|
||||
|
||||
// Network errors are generally retryable
|
||||
if (error instanceof TypeError && error.message.includes("fetch")) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrap an unknown error as a CloudflarePrometheusError.
|
||||
*
|
||||
* @param error Error to wrap.
|
||||
* @param message Error message.
|
||||
* @param code Error code.
|
||||
* @param context Error context.
|
||||
* @returns Wrapped error.
|
||||
*/
|
||||
export function wrapError(
|
||||
error: unknown,
|
||||
message: string,
|
||||
code: ErrorCode = ErrorCode.UNKNOWN,
|
||||
context?: ErrorContext,
|
||||
): CloudflarePrometheusError {
|
||||
if (error instanceof CloudflarePrometheusError) {
|
||||
// Already our error type, just add context if needed
|
||||
if (context) {
|
||||
return new CloudflarePrometheusError(message, error.code, {
|
||||
cause: error,
|
||||
context: { ...error.context, ...context },
|
||||
retryable: error.retryable,
|
||||
});
|
||||
}
|
||||
return error;
|
||||
}
|
||||
|
||||
return new CloudflarePrometheusError(message, code, {
|
||||
cause: error instanceof Error ? error : undefined,
|
||||
context,
|
||||
});
|
||||
}
|
||||
58
src/lib/filters.ts
Normal file
58
src/lib/filters.ts
Normal file
@@ -0,0 +1,58 @@
|
||||
import type { Account, Zone } from "./types";
|
||||
|
||||
/**
|
||||
* Parses comma-separated string into Set, trimming whitespace.
|
||||
*
|
||||
* @param value Comma-separated string to parse.
|
||||
* @returns Set of trimmed non-empty strings, or empty Set for empty/undefined input.
|
||||
*/
|
||||
export function parseCommaSeparated(value: string | undefined): Set<string> {
|
||||
if (!value || value.trim() === "") {
|
||||
return new Set();
|
||||
}
|
||||
return new Set(
|
||||
value
|
||||
.split(",")
|
||||
.map((s) => s.trim())
|
||||
.filter((s) => s.length > 0),
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Filters accounts to only include those with IDs in the set.
|
||||
*
|
||||
* @param accounts Array of accounts to filter.
|
||||
* @param includeIds Set of account IDs to include.
|
||||
* @returns Filtered array of accounts.
|
||||
*/
|
||||
export function filterAccountsByIds(
|
||||
accounts: Account[],
|
||||
includeIds: ReadonlySet<string>,
|
||||
): Account[] {
|
||||
return accounts.filter((a) => includeIds.has(a.id));
|
||||
}
|
||||
|
||||
/**
|
||||
* Filters zones to only include those with IDs in the set.
|
||||
*
|
||||
* @param zones Array of zones to filter.
|
||||
* @param includeIds Set of zone IDs to include.
|
||||
* @returns Filtered array of zones.
|
||||
*/
|
||||
export function filterZonesByIds(
|
||||
zones: Zone[],
|
||||
includeIds: ReadonlySet<string>,
|
||||
): Zone[] {
|
||||
return zones.filter((z) => includeIds.has(z.id));
|
||||
}
|
||||
|
||||
/**
|
||||
* Looks up zone name by ID, falling back to ID if not found.
|
||||
*
|
||||
* @param zoneId Zone ID to look up.
|
||||
* @param zones Array of zones to search.
|
||||
* @returns Zone name if found, otherwise the zone ID.
|
||||
*/
|
||||
export function findZoneName(zoneId: string, zones: Zone[]): string {
|
||||
return zones.find((z) => z.id === zoneId)?.name ?? zoneId;
|
||||
}
|
||||
185
src/lib/health.ts
Normal file
185
src/lib/health.ts
Normal file
@@ -0,0 +1,185 @@
|
||||
import {
|
||||
CLOUDFLARE_GQL_URL,
|
||||
getCloudflareMetricsClient,
|
||||
} from "../cloudflare/client";
|
||||
import { extractErrorInfo, withTimeout } from "./errors";
|
||||
import { getConfig } from "./runtime-config";
|
||||
|
||||
const CHECK_TIMEOUT_MS = 5_000;
|
||||
|
||||
type CheckStatus = "healthy" | "unhealthy";
|
||||
|
||||
type HealthCheck = {
|
||||
status: CheckStatus;
|
||||
latency_ms: number;
|
||||
error?: string;
|
||||
error_code?: string;
|
||||
};
|
||||
|
||||
type HealthResponse = {
|
||||
status: CheckStatus;
|
||||
timestamp: string;
|
||||
checks: {
|
||||
cloudflare_api: HealthCheck;
|
||||
graphql_api: HealthCheck;
|
||||
};
|
||||
};
|
||||
|
||||
type CachedHealth = {
|
||||
response: HealthResponse;
|
||||
expires: number;
|
||||
};
|
||||
|
||||
let healthCache: CachedHealth | null = null;
|
||||
|
||||
/**
|
||||
* Check Cloudflare REST API connectivity by fetching accounts.
|
||||
*
|
||||
* @param env Environment variables.
|
||||
* @returns Health check result.
|
||||
*/
|
||||
async function checkCloudflareApi(env: Env): Promise<HealthCheck> {
|
||||
const start = performance.now();
|
||||
|
||||
try {
|
||||
const client = getCloudflareMetricsClient(env);
|
||||
const result = await withTimeout(
|
||||
client.getAccounts(),
|
||||
CHECK_TIMEOUT_MS,
|
||||
"Cloudflare API health check",
|
||||
);
|
||||
const latency_ms = Math.round(performance.now() - start);
|
||||
|
||||
if (result.ok) {
|
||||
return { status: "healthy", latency_ms };
|
||||
}
|
||||
return {
|
||||
status: "unhealthy",
|
||||
latency_ms,
|
||||
error: result.error.message,
|
||||
error_code: result.error.code,
|
||||
};
|
||||
} catch (err) {
|
||||
const latency_ms = Math.round(performance.now() - start);
|
||||
const info = extractErrorInfo(err);
|
||||
return {
|
||||
status: "unhealthy",
|
||||
latency_ms,
|
||||
error: info.message,
|
||||
error_code: info.code,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check Cloudflare GraphQL API connectivity via introspection.
|
||||
*
|
||||
* @param env Environment variables.
|
||||
* @returns Health check result.
|
||||
*/
|
||||
async function checkGraphqlApi(env: Env): Promise<HealthCheck> {
|
||||
const start = performance.now();
|
||||
|
||||
try {
|
||||
const result = await withTimeout(
|
||||
fetch(CLOUDFLARE_GQL_URL, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: `Bearer ${env.CLOUDFLARE_API_TOKEN}`,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
query: "{ __typename }",
|
||||
}),
|
||||
}),
|
||||
CHECK_TIMEOUT_MS,
|
||||
"GraphQL API health check",
|
||||
);
|
||||
|
||||
const latency_ms = Math.round(performance.now() - start);
|
||||
|
||||
if (!result.ok) {
|
||||
return {
|
||||
status: "unhealthy",
|
||||
latency_ms,
|
||||
error: result.error.message,
|
||||
error_code: result.error.code,
|
||||
};
|
||||
}
|
||||
|
||||
const response = result.value;
|
||||
if (!response.ok) {
|
||||
return {
|
||||
status: "unhealthy",
|
||||
latency_ms,
|
||||
error: `HTTP ${response.status}`,
|
||||
error_code: "API_UNAVAILABLE",
|
||||
};
|
||||
}
|
||||
|
||||
return { status: "healthy", latency_ms };
|
||||
} catch (err) {
|
||||
const latency_ms = Math.round(performance.now() - start);
|
||||
const info = extractErrorInfo(err);
|
||||
return {
|
||||
status: "unhealthy",
|
||||
latency_ms,
|
||||
error: info.message,
|
||||
error_code: info.code,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform health check with configurable caching.
|
||||
*
|
||||
* @param env Environment variables.
|
||||
* @returns Health check response.
|
||||
*/
|
||||
export async function checkHealth(env: Env): Promise<HealthResponse> {
|
||||
const now = Date.now();
|
||||
const config = await getConfig(env);
|
||||
const cacheTtlMs = config.healthCheckCacheTtlSeconds * 1000;
|
||||
|
||||
if (healthCache && healthCache.expires > now) {
|
||||
return healthCache.response;
|
||||
}
|
||||
|
||||
const [cloudflareApi, graphqlApi] = await Promise.all([
|
||||
checkCloudflareApi(env),
|
||||
checkGraphqlApi(env),
|
||||
]);
|
||||
|
||||
const allHealthy =
|
||||
cloudflareApi.status === "healthy" && graphqlApi.status === "healthy";
|
||||
|
||||
const response: HealthResponse = {
|
||||
status: allHealthy ? "healthy" : "unhealthy",
|
||||
timestamp: new Date().toISOString(),
|
||||
checks: {
|
||||
cloudflare_api: cloudflareApi,
|
||||
graphql_api: graphqlApi,
|
||||
},
|
||||
};
|
||||
|
||||
healthCache = {
|
||||
response,
|
||||
expires: now + cacheTtlMs,
|
||||
};
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
/**
|
||||
* Build HTTP response from health check result.
|
||||
*
|
||||
* @param health Health check response.
|
||||
* @returns HTTP response with JSON body.
|
||||
*/
|
||||
export function healthResponse(health: HealthResponse): Response {
|
||||
const status = health.status === "healthy" ? 200 : 503;
|
||||
return new Response(JSON.stringify(health), {
|
||||
status,
|
||||
headers: { "Content-Type": "application/json" },
|
||||
});
|
||||
}
|
||||
323
src/lib/logger.ts
Normal file
323
src/lib/logger.ts
Normal file
@@ -0,0 +1,323 @@
|
||||
import { createConsola, type LogObject } from "consola";
|
||||
|
||||
// Raw ANSI escape codes - bypass consola's color detection which doesn't work in wrangler dev
|
||||
const ansi = {
|
||||
reset: "\x1b[0m",
|
||||
dim: "\x1b[2m",
|
||||
bold: "\x1b[1m",
|
||||
red: "\x1b[31m",
|
||||
green: "\x1b[32m",
|
||||
yellow: "\x1b[33m",
|
||||
blue: "\x1b[34m",
|
||||
magenta: "\x1b[35m",
|
||||
cyan: "\x1b[36m",
|
||||
white: "\x1b[37m",
|
||||
gray: "\x1b[90m",
|
||||
};
|
||||
|
||||
const c = {
|
||||
dim: (s: string) => `${ansi.dim}${s}${ansi.reset}`,
|
||||
red: (s: string) => `${ansi.red}${s}${ansi.reset}`,
|
||||
green: (s: string) => `${ansi.green}${s}${ansi.reset}`,
|
||||
yellow: (s: string) => `${ansi.yellow}${s}${ansi.reset}`,
|
||||
cyan: (s: string) => `${ansi.cyan}${s}${ansi.reset}`,
|
||||
white: (s: string) => `${ansi.white}${s}${ansi.reset}`,
|
||||
gray: (s: string) => `${ansi.gray}${s}${ansi.reset}`,
|
||||
magenta: (s: string) => `${ansi.magenta}${s}${ansi.reset}`,
|
||||
};
|
||||
|
||||
/**
|
||||
* Log severity levels.
|
||||
*/
|
||||
export type LogLevel = "debug" | "info" | "warn" | "error";
|
||||
|
||||
/**
|
||||
* Output format: json for structured logs, pretty for human-readable.
|
||||
*/
|
||||
export type LogFormat = "json" | "pretty";
|
||||
|
||||
/**
|
||||
* Key-value pairs attached to log entries.
|
||||
*/
|
||||
export type StructuredData = Record<string, unknown>;
|
||||
|
||||
/**
|
||||
* Structured logger with level methods, namespacing, and context.
|
||||
*/
|
||||
export interface Logger {
|
||||
/**
|
||||
* Log debug message.
|
||||
*
|
||||
* @param msg Message text.
|
||||
* @param data Optional structured data.
|
||||
*/
|
||||
debug(msg: string, data?: StructuredData): void;
|
||||
|
||||
/**
|
||||
* Log info message.
|
||||
*
|
||||
* @param msg Message text.
|
||||
* @param data Optional structured data.
|
||||
*/
|
||||
info(msg: string, data?: StructuredData): void;
|
||||
|
||||
/**
|
||||
* Log warning message.
|
||||
*
|
||||
* @param msg Message text.
|
||||
* @param data Optional structured data.
|
||||
*/
|
||||
warn(msg: string, data?: StructuredData): void;
|
||||
|
||||
/**
|
||||
* Log error message.
|
||||
*
|
||||
* @param msg Message text.
|
||||
* @param data Optional structured data.
|
||||
*/
|
||||
error(msg: string, data?: StructuredData): void;
|
||||
|
||||
/**
|
||||
* Create child logger with namespaced tag.
|
||||
*
|
||||
* @param namespace Namespace appended to parent tag with colon separator.
|
||||
* @returns New logger instance.
|
||||
*/
|
||||
child(namespace: string): Logger;
|
||||
|
||||
/**
|
||||
* Create logger with merged context data.
|
||||
*
|
||||
* @param ctx Context data merged into all log entries.
|
||||
* @returns New logger instance.
|
||||
*/
|
||||
withContext(ctx: StructuredData): Logger;
|
||||
}
|
||||
|
||||
/**
|
||||
* Logger configuration.
|
||||
*/
|
||||
export interface LoggerConfig {
|
||||
/** Output format, defaults to pretty. */
|
||||
format?: LogFormat;
|
||||
|
||||
/** Minimum log level, defaults to info. */
|
||||
level?: LogLevel;
|
||||
}
|
||||
|
||||
const LEVELS: Record<LogLevel, number> = {
|
||||
debug: 0,
|
||||
info: 1,
|
||||
warn: 2,
|
||||
error: 3,
|
||||
};
|
||||
|
||||
const LEVEL_COLORS: Record<LogLevel, (s: string) => string> = {
|
||||
debug: c.gray,
|
||||
info: c.cyan,
|
||||
warn: c.yellow,
|
||||
error: c.red,
|
||||
};
|
||||
|
||||
const LEVEL_ICONS: Record<LogLevel, string> = {
|
||||
debug: "●",
|
||||
info: "◆",
|
||||
warn: "▲",
|
||||
error: "✖",
|
||||
};
|
||||
|
||||
/**
|
||||
* Format current time as HH:MM:SS.
|
||||
*
|
||||
* @returns Formatted time string.
|
||||
*/
|
||||
function formatTime(): string {
|
||||
const now = new Date();
|
||||
return `${String(now.getHours()).padStart(2, "0")}:${String(now.getMinutes()).padStart(2, "0")}:${String(now.getSeconds()).padStart(2, "0")}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current UTC timestamp in ISO format.
|
||||
*
|
||||
* @returns ISO 8601 timestamp string.
|
||||
*/
|
||||
function utcTimestamp(): string {
|
||||
return new Date().toISOString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Format value for display in logs.
|
||||
*
|
||||
* @param v Value to format.
|
||||
* @returns Formatted string representation.
|
||||
*/
|
||||
function formatValue(v: unknown): string {
|
||||
if (typeof v === "string") return v;
|
||||
if (typeof v === "number" || typeof v === "boolean") return String(v);
|
||||
return JSON.stringify(v);
|
||||
}
|
||||
|
||||
/**
|
||||
* Format structured data as key=value pairs.
|
||||
*
|
||||
* @param data Structured data object.
|
||||
* @returns Formatted string with colored key-value pairs.
|
||||
*/
|
||||
function formatData(data: StructuredData): string {
|
||||
return Object.entries(data)
|
||||
.map(([k, v]) => `${c.dim(k)}=${c.white(formatValue(v))}`)
|
||||
.join(" ");
|
||||
}
|
||||
|
||||
/**
|
||||
* Shorten tag for display: truncate zone/account IDs to 8 chars.
|
||||
*
|
||||
* @param tag Tag string to shorten.
|
||||
* @returns Shortened tag string.
|
||||
*/
|
||||
function shortenTag(tag: string): string {
|
||||
// Pattern: something:scope:longid:query -> something:scope:shortid:query
|
||||
return tag.replace(/([a-f0-9]{32})/g, (match) => match.slice(0, 8));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create pretty console reporter for human-readable logs.
|
||||
*
|
||||
* @param minLevel Minimum log level to output.
|
||||
* @returns Reporter object with log method.
|
||||
*/
|
||||
function createPrettyReporter(minLevel: LogLevel) {
|
||||
const minLevelNum = LEVELS[minLevel];
|
||||
|
||||
return {
|
||||
log(logObj: LogObject) {
|
||||
const level = logObj.type as LogLevel;
|
||||
if (LEVELS[level] === undefined || LEVELS[level] < minLevelNum) return;
|
||||
|
||||
const tag = logObj.tag || "app";
|
||||
const colorFn = LEVEL_COLORS[level] || c.white;
|
||||
const icon = LEVEL_ICONS[level] || "●";
|
||||
const args = logObj.args as [string, StructuredData?];
|
||||
const msg = args[0];
|
||||
const data = args[1];
|
||||
|
||||
const time = c.dim(formatTime());
|
||||
const levelBadge = colorFn(`${icon} ${level.toUpperCase().padEnd(5)}`);
|
||||
const shortTag = c.dim(shortenTag(tag));
|
||||
const suffix = data ? ` ${formatData(data)}` : "";
|
||||
|
||||
console.log(`${time} ${levelBadge} ${shortTag} ${msg}${suffix}`);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create JSON reporter for structured logs.
|
||||
*
|
||||
* @param minLevel Minimum log level to output.
|
||||
* @returns Reporter object with log method.
|
||||
*/
|
||||
function createJsonReporter(minLevel: LogLevel) {
|
||||
const minLevelNum = LEVELS[minLevel];
|
||||
|
||||
return {
|
||||
log(logObj: LogObject) {
|
||||
const level = logObj.type as LogLevel;
|
||||
if (LEVELS[level] === undefined || LEVELS[level] < minLevelNum) return;
|
||||
|
||||
const tagParts = (logObj.tag || "app").split(":");
|
||||
const [logger, ...namespaceParts] = tagParts;
|
||||
const namespace =
|
||||
namespaceParts.length > 0 ? namespaceParts.join(":") : undefined;
|
||||
|
||||
const args = logObj.args as [string, StructuredData?];
|
||||
const msg = args[0];
|
||||
const data = args[1];
|
||||
|
||||
console.log(
|
||||
JSON.stringify({
|
||||
ts: utcTimestamp(),
|
||||
logger,
|
||||
...(namespace && { namespace }),
|
||||
level,
|
||||
msg,
|
||||
...data,
|
||||
}),
|
||||
);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Consola log levels: 0=silent, 1=error, 2=warn, 3=info, 4=debug, 5=trace
|
||||
const CONSOLA_LEVELS: Record<LogLevel, number> = {
|
||||
error: 1,
|
||||
warn: 2,
|
||||
info: 3,
|
||||
debug: 4,
|
||||
};
|
||||
|
||||
/**
|
||||
* Create logger instance with specified name and config.
|
||||
*
|
||||
* @param name Logger name, normalized to lowercase with underscores.
|
||||
* @param config Logger configuration.
|
||||
* @returns Configured logger instance.
|
||||
*/
|
||||
export function createLogger(name: string, config: LoggerConfig = {}): Logger {
|
||||
const format = config.format ?? "pretty";
|
||||
const level = config.level ?? "info";
|
||||
|
||||
const reporter =
|
||||
format === "json" ? createJsonReporter(level) : createPrettyReporter(level);
|
||||
|
||||
const consola = createConsola({
|
||||
level: CONSOLA_LEVELS[level],
|
||||
reporters: [reporter],
|
||||
});
|
||||
|
||||
function makeLogger(tag: string, baseContext: StructuredData = {}): Logger {
|
||||
const instance = consola.withTag(tag);
|
||||
|
||||
const mergeData = (data?: StructuredData): StructuredData | undefined => {
|
||||
if (!data && Object.keys(baseContext).length === 0) return undefined;
|
||||
if (!data) return baseContext;
|
||||
return { ...baseContext, ...data };
|
||||
};
|
||||
|
||||
return {
|
||||
debug: (msg, data) => instance.debug(msg, mergeData(data)),
|
||||
info: (msg, data) => instance.info(msg, mergeData(data)),
|
||||
warn: (msg, data) => instance.warn(msg, mergeData(data)),
|
||||
error: (msg, data) => instance.error(msg, mergeData(data)),
|
||||
child: (ns) => makeLogger(`${tag}:${ns}`, baseContext),
|
||||
withContext: (ctx) => makeLogger(tag, { ...baseContext, ...ctx }),
|
||||
};
|
||||
}
|
||||
|
||||
const normalizedName = name.toLowerCase().replace(/[ -]/g, "_");
|
||||
return makeLogger(normalizedName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create logger config from Cloudflare Worker env.
|
||||
*
|
||||
* @param env Environment object with LOG_FORMAT and LOG_LEVEL.
|
||||
* @returns Logger configuration.
|
||||
*/
|
||||
export function configFromEnv(env: {
|
||||
LOG_FORMAT?: string;
|
||||
LOG_LEVEL?: string;
|
||||
}): LoggerConfig {
|
||||
const format = env.LOG_FORMAT;
|
||||
const level = env.LOG_LEVEL;
|
||||
return {
|
||||
format: format === "json" || format === "pretty" ? format : "pretty",
|
||||
level:
|
||||
level === "debug" ||
|
||||
level === "info" ||
|
||||
level === "warn" ||
|
||||
level === "error"
|
||||
? level
|
||||
: "info",
|
||||
};
|
||||
}
|
||||
42
src/lib/metrics.ts
Normal file
42
src/lib/metrics.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import z from "zod";
|
||||
|
||||
/**
|
||||
* Prometheus metric type discriminator.
|
||||
*/
|
||||
export type MetricType = z.infer<typeof MetricTypeSchema>;
|
||||
|
||||
/**
|
||||
* Zod schema validating Prometheus metric types (counter or gauge).
|
||||
*/
|
||||
export const MetricTypeSchema = z.union([
|
||||
z.literal("counter"),
|
||||
z.literal("gauge"),
|
||||
]);
|
||||
|
||||
/**
|
||||
* Single metric observation with labels and numeric value.
|
||||
*/
|
||||
export type MetricValue = z.infer<typeof MetricValueSchema>;
|
||||
|
||||
/**
|
||||
* Zod schema validating metric observations with label key-value pairs and numeric values.
|
||||
*/
|
||||
export const MetricValueSchema = z.object({
|
||||
labels: z.record(z.string(), z.string()),
|
||||
value: z.number(),
|
||||
});
|
||||
|
||||
/**
|
||||
* Complete metric definition with metadata and observations for Prometheus export.
|
||||
*/
|
||||
export type MetricDefinition = z.infer<typeof MetricDefinitionSchema>;
|
||||
|
||||
/**
|
||||
* Zod schema validating complete metric definitions including name, help text, type, and observations.
|
||||
*/
|
||||
export const MetricDefinitionSchema = z.object({
|
||||
name: z.string(),
|
||||
help: z.string(),
|
||||
type: MetricTypeSchema,
|
||||
values: z.array(MetricValueSchema),
|
||||
});
|
||||
149
src/lib/prometheus.ts
Normal file
149
src/lib/prometheus.ts
Normal file
@@ -0,0 +1,149 @@
|
||||
import type { MetricDefinition } from "./metrics";
|
||||
|
||||
/**
|
||||
* Options for Prometheus serialization.
|
||||
*/
|
||||
export type SerializeOptions = {
|
||||
/** Set of metric names to exclude from output. */
|
||||
denylist?: ReadonlySet<string>;
|
||||
/** Set of label keys to exclude from all metrics. */
|
||||
excludeLabels?: ReadonlySet<string>;
|
||||
};
|
||||
|
||||
/**
|
||||
* Serializes MetricDefinition array to Prometheus text exposition format.
|
||||
* Groups metrics by name, outputs HELP/TYPE headers, then values.
|
||||
*
|
||||
* @param metrics Array of metric definitions to serialize.
|
||||
* @param options Serialization options for filtering.
|
||||
* @returns Prometheus-formatted metrics string.
|
||||
*/
|
||||
export function serializeToPrometheus(
|
||||
metrics: readonly MetricDefinition[],
|
||||
options?: SerializeOptions,
|
||||
): string {
|
||||
const denylist = options?.denylist ?? new Set<string>();
|
||||
const excludeLabels = options?.excludeLabels ?? new Set<string>();
|
||||
|
||||
// Group metrics by name to consolidate HELP/TYPE headers
|
||||
const grouped = new Map<string, MetricDefinition>();
|
||||
|
||||
for (const metric of metrics) {
|
||||
// Skip denied metrics
|
||||
if (denylist.has(metric.name)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Filter excluded labels from all values
|
||||
const filteredValues =
|
||||
excludeLabels.size > 0
|
||||
? metric.values.map((v) => ({
|
||||
...v,
|
||||
labels: filterLabels(v.labels, excludeLabels),
|
||||
}))
|
||||
: metric.values;
|
||||
|
||||
const existing = grouped.get(metric.name);
|
||||
if (existing) {
|
||||
// Merge values
|
||||
grouped.set(metric.name, {
|
||||
...existing,
|
||||
values: [...existing.values, ...filteredValues],
|
||||
});
|
||||
} else {
|
||||
grouped.set(metric.name, { ...metric, values: [...filteredValues] });
|
||||
}
|
||||
}
|
||||
|
||||
const lines: string[] = [];
|
||||
|
||||
for (const [name, metric] of grouped) {
|
||||
// HELP line
|
||||
lines.push(`# HELP ${name} ${escapeHelp(metric.help)}`);
|
||||
// TYPE line
|
||||
lines.push(`# TYPE ${name} ${metric.type}`);
|
||||
|
||||
// Value lines
|
||||
for (const { labels, value } of metric.values) {
|
||||
const labelStr = formatLabels(labels);
|
||||
lines.push(`${name}${labelStr} ${formatValue(value)}`);
|
||||
}
|
||||
|
||||
// Blank line between metrics for readability
|
||||
lines.push("");
|
||||
}
|
||||
|
||||
return lines.join("\n");
|
||||
}
|
||||
|
||||
/**
|
||||
* Filters out excluded label keys from a labels object.
|
||||
*
|
||||
* @param labels Original label key-value pairs.
|
||||
* @param exclude Set of label keys to exclude.
|
||||
* @returns Filtered labels object.
|
||||
*/
|
||||
function filterLabels(
|
||||
labels: Record<string, string>,
|
||||
exclude: ReadonlySet<string>,
|
||||
): Record<string, string> {
|
||||
const filtered: Record<string, string> = {};
|
||||
for (const [key, value] of Object.entries(labels)) {
|
||||
if (!exclude.has(key)) {
|
||||
filtered[key] = value;
|
||||
}
|
||||
}
|
||||
return filtered;
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats labels object into Prometheus label string.
|
||||
*
|
||||
* @param labels Label key-value pairs.
|
||||
* @returns Formatted label string like `{key="value"}` or empty string.
|
||||
*/
|
||||
function formatLabels(labels: Record<string, string>): string {
|
||||
const entries = Object.entries(labels);
|
||||
if (entries.length === 0) return "";
|
||||
|
||||
const formatted = entries
|
||||
.map(([key, value]) => `${key}="${escapeLabel(value)}"`)
|
||||
.join(",");
|
||||
|
||||
return `{${formatted}}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats numeric value for Prometheus output.
|
||||
*
|
||||
* @param value Numeric value to format.
|
||||
* @returns String representation handling NaN and Infinity.
|
||||
*/
|
||||
function formatValue(value: number): string {
|
||||
if (Number.isNaN(value)) return "NaN";
|
||||
if (!Number.isFinite(value)) return value > 0 ? "+Inf" : "-Inf";
|
||||
return String(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Escapes special characters in HELP text.
|
||||
*
|
||||
* @param help Raw help text.
|
||||
* @returns Escaped help text.
|
||||
*/
|
||||
function escapeHelp(help: string): string {
|
||||
return help.replace(/\\/g, "\\\\").replace(/\n/g, "\\n");
|
||||
}
|
||||
|
||||
/**
|
||||
* Escapes special characters in label values.
|
||||
*
|
||||
* @param value Raw label value.
|
||||
* @returns Escaped label value.
|
||||
*/
|
||||
function escapeLabel(value: string): string {
|
||||
return value
|
||||
.replace(/\\/g, "\\\\")
|
||||
.replace(/"/g, '\\"')
|
||||
.replace(/\n/g, "\\n");
|
||||
}
|
||||
375
src/lib/runtime-config.ts
Normal file
375
src/lib/runtime-config.ts
Normal file
@@ -0,0 +1,375 @@
|
||||
import { z } from "zod";
|
||||
|
||||
/** KV storage key for configuration overrides. */
|
||||
const KV_KEY = "overrides";
|
||||
|
||||
/**
|
||||
* Zod schema for valid configuration key names.
|
||||
*/
|
||||
export const ConfigKeySchema = z.enum([
|
||||
// Timing/limits
|
||||
"queryLimit",
|
||||
"scrapeDelaySeconds",
|
||||
"timeWindowSeconds",
|
||||
"metricRefreshIntervalSeconds",
|
||||
// Cache TTLs
|
||||
"accountListCacheTtlSeconds",
|
||||
"zoneListCacheTtlSeconds",
|
||||
"sslCertsCacheTtlSeconds",
|
||||
"healthCheckCacheTtlSeconds",
|
||||
// Logging
|
||||
"logFormat",
|
||||
"logLevel",
|
||||
// Filters/whitelists
|
||||
"cfAccounts",
|
||||
"cfZones",
|
||||
"cfFreeTierAccounts",
|
||||
"metricsDenylist",
|
||||
// Output options
|
||||
"excludeHost",
|
||||
"httpStatusGroup",
|
||||
]);
|
||||
|
||||
/**
|
||||
* Union type of all valid configuration key names.
|
||||
*/
|
||||
export type ConfigKey = z.infer<typeof ConfigKeySchema>;
|
||||
|
||||
/**
|
||||
* Zod schemas for individual configuration values by key.
|
||||
*/
|
||||
const ConfigValueSchemas = {
|
||||
queryLimit: z.number().int().positive(),
|
||||
scrapeDelaySeconds: z.number().int().nonnegative(),
|
||||
timeWindowSeconds: z.number().int().positive(),
|
||||
metricRefreshIntervalSeconds: z.number().int().positive(),
|
||||
accountListCacheTtlSeconds: z.number().int().nonnegative(),
|
||||
zoneListCacheTtlSeconds: z.number().int().nonnegative(),
|
||||
sslCertsCacheTtlSeconds: z.number().int().nonnegative(),
|
||||
healthCheckCacheTtlSeconds: z.number().int().nonnegative(),
|
||||
logFormat: z.enum(["json", "pretty"]),
|
||||
logLevel: z.enum(["debug", "info", "warn", "error"]),
|
||||
cfAccounts: z.string().nullable(),
|
||||
cfZones: z.string().nullable(),
|
||||
cfFreeTierAccounts: z.string(),
|
||||
metricsDenylist: z.string(),
|
||||
excludeHost: z.boolean(),
|
||||
httpStatusGroup: z.boolean(),
|
||||
} as const;
|
||||
|
||||
/**
|
||||
* Zod schema for partial configuration overrides (all fields optional).
|
||||
*/
|
||||
export const ConfigOverridesSchema = z
|
||||
.object({
|
||||
queryLimit: ConfigValueSchemas.queryLimit.optional(),
|
||||
scrapeDelaySeconds: ConfigValueSchemas.scrapeDelaySeconds.optional(),
|
||||
timeWindowSeconds: ConfigValueSchemas.timeWindowSeconds.optional(),
|
||||
metricRefreshIntervalSeconds:
|
||||
ConfigValueSchemas.metricRefreshIntervalSeconds.optional(),
|
||||
accountListCacheTtlSeconds:
|
||||
ConfigValueSchemas.accountListCacheTtlSeconds.optional(),
|
||||
zoneListCacheTtlSeconds:
|
||||
ConfigValueSchemas.zoneListCacheTtlSeconds.optional(),
|
||||
sslCertsCacheTtlSeconds:
|
||||
ConfigValueSchemas.sslCertsCacheTtlSeconds.optional(),
|
||||
healthCheckCacheTtlSeconds:
|
||||
ConfigValueSchemas.healthCheckCacheTtlSeconds.optional(),
|
||||
logFormat: ConfigValueSchemas.logFormat.optional(),
|
||||
logLevel: ConfigValueSchemas.logLevel.optional(),
|
||||
cfAccounts: ConfigValueSchemas.cfAccounts.optional(),
|
||||
cfZones: ConfigValueSchemas.cfZones.optional(),
|
||||
cfFreeTierAccounts: ConfigValueSchemas.cfFreeTierAccounts.optional(),
|
||||
metricsDenylist: ConfigValueSchemas.metricsDenylist.optional(),
|
||||
excludeHost: ConfigValueSchemas.excludeHost.optional(),
|
||||
httpStatusGroup: ConfigValueSchemas.httpStatusGroup.optional(),
|
||||
})
|
||||
.readonly();
|
||||
|
||||
/**
|
||||
* Partial configuration overrides stored in KV.
|
||||
*/
|
||||
export type ConfigOverrides = z.infer<typeof ConfigOverridesSchema>;
|
||||
|
||||
/**
|
||||
* Zod schema for fully resolved configuration (all fields required).
|
||||
*/
|
||||
export const ResolvedConfigSchema = z
|
||||
.object({
|
||||
queryLimit: ConfigValueSchemas.queryLimit,
|
||||
scrapeDelaySeconds: ConfigValueSchemas.scrapeDelaySeconds,
|
||||
timeWindowSeconds: ConfigValueSchemas.timeWindowSeconds,
|
||||
metricRefreshIntervalSeconds:
|
||||
ConfigValueSchemas.metricRefreshIntervalSeconds,
|
||||
accountListCacheTtlSeconds: ConfigValueSchemas.accountListCacheTtlSeconds,
|
||||
zoneListCacheTtlSeconds: ConfigValueSchemas.zoneListCacheTtlSeconds,
|
||||
sslCertsCacheTtlSeconds: ConfigValueSchemas.sslCertsCacheTtlSeconds,
|
||||
healthCheckCacheTtlSeconds: ConfigValueSchemas.healthCheckCacheTtlSeconds,
|
||||
logFormat: ConfigValueSchemas.logFormat,
|
||||
logLevel: ConfigValueSchemas.logLevel,
|
||||
cfAccounts: ConfigValueSchemas.cfAccounts,
|
||||
cfZones: ConfigValueSchemas.cfZones,
|
||||
cfFreeTierAccounts: ConfigValueSchemas.cfFreeTierAccounts,
|
||||
metricsDenylist: ConfigValueSchemas.metricsDenylist,
|
||||
excludeHost: ConfigValueSchemas.excludeHost,
|
||||
httpStatusGroup: ConfigValueSchemas.httpStatusGroup,
|
||||
})
|
||||
.readonly();
|
||||
|
||||
/**
|
||||
* Fully resolved configuration with all fields populated.
|
||||
*/
|
||||
export type ResolvedConfig = z.infer<typeof ResolvedConfigSchema>;
|
||||
|
||||
/**
|
||||
* Optional environment variables not defined in wrangler.jsonc.
|
||||
*/
|
||||
type OptionalEnvVars = {
|
||||
METRICS_DENYLIST?: string;
|
||||
CF_ACCOUNTS?: string;
|
||||
CF_ZONES?: string;
|
||||
CF_FREE_TIER_ACCOUNTS?: string;
|
||||
HEALTH_CHECK_CACHE_TTL_SECONDS?: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* Gets default configuration values from environment variables.
|
||||
*
|
||||
* @param env Worker environment bindings.
|
||||
* @returns Resolved configuration with defaults applied.
|
||||
*/
|
||||
export function getEnvDefaults(env: Env): ResolvedConfig {
|
||||
const optionalEnv = env as Env & OptionalEnvVars;
|
||||
return {
|
||||
queryLimit: z.coerce.number().catch(10000).parse(env.QUERY_LIMIT),
|
||||
scrapeDelaySeconds: z.coerce
|
||||
.number()
|
||||
.catch(300)
|
||||
.parse(env.SCRAPE_DELAY_SECONDS),
|
||||
timeWindowSeconds: z.coerce
|
||||
.number()
|
||||
.catch(60)
|
||||
.parse(env.TIME_WINDOW_SECONDS),
|
||||
metricRefreshIntervalSeconds: z.coerce
|
||||
.number()
|
||||
.catch(60)
|
||||
.parse(env.METRIC_REFRESH_INTERVAL_SECONDS),
|
||||
accountListCacheTtlSeconds: z.coerce
|
||||
.number()
|
||||
.catch(600)
|
||||
.parse(env.ACCOUNT_LIST_CACHE_TTL_SECONDS),
|
||||
zoneListCacheTtlSeconds: z.coerce
|
||||
.number()
|
||||
.catch(1800)
|
||||
.parse(env.ZONE_LIST_CACHE_TTL_SECONDS),
|
||||
sslCertsCacheTtlSeconds: z.coerce
|
||||
.number()
|
||||
.catch(1800)
|
||||
.parse(env.SSL_CERTS_CACHE_TTL_SECONDS),
|
||||
healthCheckCacheTtlSeconds: z.coerce
|
||||
.number()
|
||||
.catch(10)
|
||||
.parse(optionalEnv.HEALTH_CHECK_CACHE_TTL_SECONDS),
|
||||
logFormat: z.enum(["json", "pretty"]).catch("pretty").parse(env.LOG_FORMAT),
|
||||
logLevel: z
|
||||
.enum(["debug", "info", "warn", "error"])
|
||||
.catch("info")
|
||||
.parse(env.LOG_LEVEL),
|
||||
cfAccounts: optionalEnv.CF_ACCOUNTS?.trim() || null,
|
||||
cfZones: optionalEnv.CF_ZONES?.trim() || null,
|
||||
cfFreeTierAccounts: optionalEnv.CF_FREE_TIER_ACCOUNTS?.trim() ?? "",
|
||||
metricsDenylist: optionalEnv.METRICS_DENYLIST?.trim() ?? "",
|
||||
excludeHost: z.coerce.boolean().catch(false).parse(env.EXCLUDE_HOST),
|
||||
httpStatusGroup: z.coerce
|
||||
.boolean()
|
||||
.catch(false)
|
||||
.parse(env.CF_HTTP_STATUS_GROUP),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads configuration overrides from KV storage.
|
||||
* Returns empty object on parse errors or missing data.
|
||||
*
|
||||
* @param env Worker environment bindings.
|
||||
* @returns Configuration overrides or empty object.
|
||||
*/
|
||||
async function readOverrides(env: Env): Promise<ConfigOverrides> {
|
||||
const raw = await env.CONFIG_KV.get(KV_KEY);
|
||||
if (!raw) return {};
|
||||
try {
|
||||
const parsed: unknown = JSON.parse(raw);
|
||||
const result = ConfigOverridesSchema.safeParse(parsed);
|
||||
if (!result.success) {
|
||||
console.error("Invalid config overrides in KV, using defaults", {
|
||||
error: result.error.message,
|
||||
});
|
||||
return {};
|
||||
}
|
||||
return result.data;
|
||||
} catch {
|
||||
console.error("Failed to parse config overrides from KV, using defaults");
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes configuration overrides to KV storage.
|
||||
*
|
||||
* @param env Worker environment bindings.
|
||||
* @param overrides Configuration overrides to persist.
|
||||
*/
|
||||
async function writeOverrides(
|
||||
env: Env,
|
||||
overrides: ConfigOverrides,
|
||||
): Promise<void> {
|
||||
await env.CONFIG_KV.put(KV_KEY, JSON.stringify(overrides));
|
||||
}
|
||||
|
||||
/**
|
||||
* Merges configuration overrides with environment defaults.
|
||||
*
|
||||
* @param defaults Default configuration from environment.
|
||||
* @param overrides Partial overrides from KV storage.
|
||||
* @returns Fully resolved configuration.
|
||||
*/
|
||||
function mergeConfig(
|
||||
defaults: ResolvedConfig,
|
||||
overrides: ConfigOverrides,
|
||||
): ResolvedConfig {
|
||||
return {
|
||||
queryLimit: overrides.queryLimit ?? defaults.queryLimit,
|
||||
scrapeDelaySeconds:
|
||||
overrides.scrapeDelaySeconds ?? defaults.scrapeDelaySeconds,
|
||||
timeWindowSeconds:
|
||||
overrides.timeWindowSeconds ?? defaults.timeWindowSeconds,
|
||||
metricRefreshIntervalSeconds:
|
||||
overrides.metricRefreshIntervalSeconds ??
|
||||
defaults.metricRefreshIntervalSeconds,
|
||||
accountListCacheTtlSeconds:
|
||||
overrides.accountListCacheTtlSeconds ??
|
||||
defaults.accountListCacheTtlSeconds,
|
||||
zoneListCacheTtlSeconds:
|
||||
overrides.zoneListCacheTtlSeconds ?? defaults.zoneListCacheTtlSeconds,
|
||||
sslCertsCacheTtlSeconds:
|
||||
overrides.sslCertsCacheTtlSeconds ?? defaults.sslCertsCacheTtlSeconds,
|
||||
healthCheckCacheTtlSeconds:
|
||||
overrides.healthCheckCacheTtlSeconds ??
|
||||
defaults.healthCheckCacheTtlSeconds,
|
||||
logFormat: overrides.logFormat ?? defaults.logFormat,
|
||||
logLevel: overrides.logLevel ?? defaults.logLevel,
|
||||
cfAccounts:
|
||||
overrides.cfAccounts !== undefined
|
||||
? overrides.cfAccounts
|
||||
: defaults.cfAccounts,
|
||||
cfZones:
|
||||
overrides.cfZones !== undefined ? overrides.cfZones : defaults.cfZones,
|
||||
cfFreeTierAccounts:
|
||||
overrides.cfFreeTierAccounts ?? defaults.cfFreeTierAccounts,
|
||||
metricsDenylist: overrides.metricsDenylist ?? defaults.metricsDenylist,
|
||||
excludeHost: overrides.excludeHost ?? defaults.excludeHost,
|
||||
httpStatusGroup: overrides.httpStatusGroup ?? defaults.httpStatusGroup,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets resolved configuration by merging KV overrides with environment defaults.
|
||||
*
|
||||
* @param env Worker environment bindings.
|
||||
* @returns Fully resolved configuration.
|
||||
*/
|
||||
export async function getConfig(env: Env): Promise<ResolvedConfig> {
|
||||
const defaults = getEnvDefaults(env);
|
||||
const overrides = await readOverrides(env);
|
||||
return mergeConfig(defaults, overrides);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a single configuration key value.
|
||||
*
|
||||
* @param env Worker environment bindings.
|
||||
* @param key Configuration key to retrieve.
|
||||
* @returns Value for the specified configuration key.
|
||||
*/
|
||||
export async function getConfigKey<K extends ConfigKey>(
|
||||
env: Env,
|
||||
key: K,
|
||||
): Promise<ResolvedConfig[K]> {
|
||||
const config = await getConfig(env);
|
||||
return config[key];
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates a value for a specific configuration key.
|
||||
*
|
||||
* @param key Configuration key to validate against.
|
||||
* @param value Value to validate.
|
||||
* @returns Validation result with parsed data or Zod error.
|
||||
*/
|
||||
export function validateConfigValue(
|
||||
key: ConfigKey,
|
||||
value: unknown,
|
||||
): { success: true; data: unknown } | { success: false; error: z.ZodError } {
|
||||
return ConfigValueSchemas[key].safeParse(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Result type for setConfigKey operation.
|
||||
*/
|
||||
type SetConfigKeyResult =
|
||||
| { success: true; config: ResolvedConfig }
|
||||
| { success: false; error: z.ZodError };
|
||||
|
||||
/**
|
||||
* Sets a single configuration key override with validation.
|
||||
*
|
||||
* @param env Worker environment bindings.
|
||||
* @param key Configuration key to set.
|
||||
* @param value Value to set for the key.
|
||||
* @returns Result with updated config or validation error.
|
||||
*/
|
||||
export async function setConfigKey(
|
||||
env: Env,
|
||||
key: ConfigKey,
|
||||
value: unknown,
|
||||
): Promise<SetConfigKeyResult> {
|
||||
const result = ConfigValueSchemas[key].safeParse(value);
|
||||
if (!result.success) {
|
||||
return { success: false, error: result.error };
|
||||
}
|
||||
const overrides = await readOverrides(env);
|
||||
const updated = { ...overrides, [key]: result.data };
|
||||
await writeOverrides(env, updated);
|
||||
return {
|
||||
success: true,
|
||||
config: mergeConfig(getEnvDefaults(env), updated),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Resets a single configuration key to its environment default.
|
||||
*
|
||||
* @param env Worker environment bindings.
|
||||
* @param key Configuration key to reset.
|
||||
* @returns Resolved configuration after reset.
|
||||
*/
|
||||
export async function resetConfigKey(
|
||||
env: Env,
|
||||
key: ConfigKey,
|
||||
): Promise<ResolvedConfig> {
|
||||
const overrides = await readOverrides(env);
|
||||
const { [key]: _, ...remaining } = overrides;
|
||||
await writeOverrides(env, remaining);
|
||||
return mergeConfig(getEnvDefaults(env), remaining);
|
||||
}
|
||||
|
||||
/**
|
||||
* Resets all configuration overrides to environment defaults.
|
||||
*
|
||||
* @param env Worker environment bindings.
|
||||
* @returns Resolved configuration with only environment defaults.
|
||||
*/
|
||||
export async function resetAllConfig(env: Env): Promise<ResolvedConfig> {
|
||||
await env.CONFIG_KV.delete(KV_KEY);
|
||||
return getEnvDefaults(env);
|
||||
}
|
||||
41
src/lib/time.ts
Normal file
41
src/lib/time.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
import type { TimeRange } from "./types";
|
||||
|
||||
/**
|
||||
* Computes time range for GraphQL queries with delay and window.
|
||||
* Rounds to nearest minute and applies delay to account for ingestion lag.
|
||||
*
|
||||
* @param scrapeDelaySeconds Delay in seconds to account for ingestion lag.
|
||||
* @param timeWindowSeconds Window size in seconds for the time range.
|
||||
* @returns Time range with mintime and maxtime ISO strings.
|
||||
*/
|
||||
export function getTimeRange(
|
||||
scrapeDelaySeconds: number = 300,
|
||||
timeWindowSeconds: number = 60,
|
||||
): TimeRange {
|
||||
const now = new Date();
|
||||
now.setSeconds(0, 0);
|
||||
now.setTime(now.getTime() - scrapeDelaySeconds * 1000);
|
||||
const maxtime = now.toISOString();
|
||||
now.setTime(now.getTime() - timeWindowSeconds * 1000);
|
||||
const mintime = now.toISOString();
|
||||
return { mintime, maxtime };
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates deterministic metric key from name and labels.
|
||||
* Labels are sorted alphabetically for consistency.
|
||||
*
|
||||
* @param name Metric name.
|
||||
* @param labels Label key value pairs.
|
||||
* @returns Formatted metric key string.
|
||||
*/
|
||||
export function metricKey(
|
||||
name: string,
|
||||
labels: Record<string, string>,
|
||||
): string {
|
||||
const sortedLabels = Object.entries(labels)
|
||||
.sort(([a], [b]) => a.localeCompare(b))
|
||||
.map(([k, v]) => `${k}=${v}`)
|
||||
.join(",");
|
||||
return `${name}{${sortedLabels}}`;
|
||||
}
|
||||
195
src/lib/types.ts
Normal file
195
src/lib/types.ts
Normal file
@@ -0,0 +1,195 @@
|
||||
import z from "zod";
|
||||
import { MetricDefinitionSchema } from "./metrics";
|
||||
|
||||
// Re-export metric types from metrics.ts
|
||||
export type { MetricDefinition, MetricType, MetricValue } from "./metrics";
|
||||
export { MetricDefinitionSchema } from "./metrics";
|
||||
|
||||
/**
|
||||
* Zod schema for MetricExporter scope: account-level or zone-level.
|
||||
*/
|
||||
export const ScopeTypeSchema = z.enum(["account", "zone"]);
|
||||
|
||||
/**
|
||||
* Scope discriminator for MetricExporter DOs.
|
||||
*/
|
||||
export type ScopeType = z.infer<typeof ScopeTypeSchema>;
|
||||
|
||||
/**
|
||||
* String literal type for MetricExporter DO IDs: "scope:id:queryName".
|
||||
*/
|
||||
export type MetricExporterIdString =
|
||||
`${"account" | "zone"}:${string}:${string}`;
|
||||
|
||||
/**
|
||||
* Zod schema that parses and validates MetricExporter DO ID strings.
|
||||
* Transforms "scope:id:query" into structured object.
|
||||
*/
|
||||
export const MetricExporterIdSchema = z
|
||||
.string()
|
||||
.regex(/^(account|zone):[^:]+:[^:]+$/)
|
||||
.transform((s) => {
|
||||
const parts = s.split(":");
|
||||
// Regex guarantees exactly 3 parts with account|zone prefix
|
||||
const scopeType = ScopeTypeSchema.parse(parts[0]);
|
||||
const scopeId = z.string().min(1).parse(parts[1]);
|
||||
const queryName = z.string().min(1).parse(parts[2]);
|
||||
return { scopeType, scopeId, queryName };
|
||||
});
|
||||
|
||||
/**
|
||||
* Parsed MetricExporter DO identifier with scope, ID, and query name.
|
||||
*/
|
||||
export type MetricExporterId = z.infer<typeof MetricExporterIdSchema>;
|
||||
|
||||
/**
|
||||
* Zod schema for counter state tracking previous value and accumulated total.
|
||||
*/
|
||||
export const CounterStateSchema = z
|
||||
.object({
|
||||
prev: z.number(),
|
||||
accumulated: z.number(),
|
||||
})
|
||||
.readonly();
|
||||
|
||||
/**
|
||||
* Counter state for Prometheus monotonic counter semantics.
|
||||
*/
|
||||
export type CounterState = z.infer<typeof CounterStateSchema>;
|
||||
|
||||
/**
|
||||
* Zod schema for persistent metric state in MetricExporter DO storage.
|
||||
*/
|
||||
export const MetricStateSchema = z
|
||||
.object({
|
||||
accountId: z.string().optional(),
|
||||
accountName: z.string().optional(),
|
||||
counters: z.record(z.string(), CounterStateSchema),
|
||||
metrics: z.array(MetricDefinitionSchema).readonly(),
|
||||
lastFetch: z.number(),
|
||||
lastError: z.string().optional(),
|
||||
})
|
||||
.readonly();
|
||||
|
||||
/**
|
||||
* Persistent metric state stored in MetricExporter DO.
|
||||
*/
|
||||
export type MetricState = z.infer<typeof MetricStateSchema>;
|
||||
|
||||
/**
|
||||
* Zod schema for Cloudflare account API response.
|
||||
*/
|
||||
export const AccountSchema = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
})
|
||||
.readonly();
|
||||
|
||||
/**
|
||||
* Cloudflare account with ID and name.
|
||||
*/
|
||||
export type Account = z.infer<typeof AccountSchema>;
|
||||
|
||||
/**
|
||||
* Zod schema for Cloudflare zone API response with plan and account.
|
||||
*/
|
||||
export const ZoneSchema = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
status: z.string(),
|
||||
plan: z.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
}),
|
||||
account: z.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
}),
|
||||
})
|
||||
.readonly();
|
||||
|
||||
/**
|
||||
* Cloudflare zone with plan and account associations.
|
||||
*/
|
||||
export type Zone = z.infer<typeof ZoneSchema>;
|
||||
|
||||
/**
|
||||
* Zod schema for Cloudflare SSL certificate API response.
|
||||
*/
|
||||
export const SSLCertificateSchema = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
type: z.string(),
|
||||
status: z.string(),
|
||||
issuer: z.string(),
|
||||
expiresOn: z.string(),
|
||||
hosts: z.array(z.string()),
|
||||
})
|
||||
.readonly();
|
||||
|
||||
/**
|
||||
* SSL certificate with expiration and host coverage.
|
||||
*/
|
||||
export type SSLCertificate = z.infer<typeof SSLCertificateSchema>;
|
||||
|
||||
/**
|
||||
* Zod schema for GraphQL query time range with ISO 8601 timestamps.
|
||||
*/
|
||||
export const TimeRangeSchema = z
|
||||
.object({
|
||||
mintime: z.string(),
|
||||
maxtime: z.string(),
|
||||
})
|
||||
.readonly();
|
||||
|
||||
/**
|
||||
* Time range for GraphQL queries with start and end timestamps.
|
||||
*/
|
||||
export type TimeRange = z.infer<typeof TimeRangeSchema>;
|
||||
|
||||
/**
|
||||
* Zod schema for load balancer origin configuration.
|
||||
*/
|
||||
export const LoadBalancerOriginSchema = z
|
||||
.object({
|
||||
name: z.string(),
|
||||
address: z.string(),
|
||||
enabled: z.boolean(),
|
||||
weight: z.number(),
|
||||
})
|
||||
.passthrough()
|
||||
.readonly();
|
||||
|
||||
/**
|
||||
* Load balancer origin with weight configuration.
|
||||
*/
|
||||
export type LoadBalancerOrigin = z.infer<typeof LoadBalancerOriginSchema>;
|
||||
|
||||
/**
|
||||
* Zod schema for load balancer pool configuration.
|
||||
*/
|
||||
export const LoadBalancerPoolSchema = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
enabled: z.boolean(),
|
||||
origins: z.array(LoadBalancerOriginSchema),
|
||||
})
|
||||
.passthrough()
|
||||
.readonly();
|
||||
|
||||
/**
|
||||
* Load balancer pool with origins.
|
||||
*/
|
||||
export type LoadBalancerPool = z.infer<typeof LoadBalancerPoolSchema>;
|
||||
|
||||
/**
|
||||
* Combined load balancer with resolved pools.
|
||||
*/
|
||||
export type LoadBalancerWithPools = {
|
||||
readonly id: string;
|
||||
readonly name: string;
|
||||
readonly pools: readonly LoadBalancerPool[];
|
||||
};
|
||||
128
src/worker.tsx
Normal file
128
src/worker.tsx
Normal file
@@ -0,0 +1,128 @@
|
||||
import { env } from "cloudflare:workers";
|
||||
import { Hono } from "hono";
|
||||
import { LandingPage } from "./components/LandingPage";
|
||||
import { AccountMetricCoordinator } from "./durable-objects/AccountMetricCoordinator";
|
||||
import { MetricCoordinator } from "./durable-objects/MetricCoordinator";
|
||||
import { MetricExporter } from "./durable-objects/MetricExporter";
|
||||
import { type AppConfig, parseConfig } from "./lib/config";
|
||||
import { checkHealth, healthResponse } from "./lib/health";
|
||||
import { configFromEnv, createLogger } from "./lib/logger";
|
||||
import {
|
||||
ConfigKeySchema,
|
||||
getConfig,
|
||||
getConfigKey,
|
||||
getEnvDefaults,
|
||||
resetAllConfig,
|
||||
resetConfigKey,
|
||||
setConfigKey,
|
||||
} from "./lib/runtime-config";
|
||||
|
||||
export { MetricCoordinator, AccountMetricCoordinator, MetricExporter };
|
||||
|
||||
type Variables = { config: AppConfig };
|
||||
|
||||
const app = new Hono<{ Bindings: Env; Variables: Variables }>();
|
||||
|
||||
// Parse config middleware
|
||||
app.use("*", async (c, next) => {
|
||||
c.set("config", parseConfig(c.env));
|
||||
await next();
|
||||
});
|
||||
|
||||
// Disable guards
|
||||
app.use("*", async (c, next) => {
|
||||
const path = c.req.path;
|
||||
if (c.var.config.disableUi && path === "/") {
|
||||
return c.text("Not Found", 404);
|
||||
}
|
||||
if (c.var.config.disableConfigApi && path.startsWith("/config")) {
|
||||
return c.text("Not Found", 404);
|
||||
}
|
||||
await next();
|
||||
});
|
||||
|
||||
// Dynamic metrics path middleware (runs before routing)
|
||||
app.get(env.METRICS_PATH, async (c) => {
|
||||
const logger = createLogger("worker", configFromEnv(c.env)).withContext({
|
||||
request_id: crypto.randomUUID(),
|
||||
});
|
||||
logger.info("Metrics request received");
|
||||
|
||||
try {
|
||||
const coordinator = await MetricCoordinator.get(c.env);
|
||||
const output = await coordinator.export();
|
||||
logger.info("Metrics exported successfully");
|
||||
return c.text(output, 200, {
|
||||
"Content-Type": "text/plain; charset=utf-8",
|
||||
});
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
logger.error("Failed to collect metrics", { error: message });
|
||||
return c.text(`Error collecting metrics: ${message}`, 500);
|
||||
}
|
||||
});
|
||||
|
||||
// Routes
|
||||
app.get("/", (c) => c.html(<LandingPage config={c.var.config} />));
|
||||
|
||||
app.get("/health", async (c) => {
|
||||
const health = await checkHealth(c.env);
|
||||
return healthResponse(health);
|
||||
});
|
||||
|
||||
// Config API routes
|
||||
app.get("/config", async (c) => {
|
||||
const config = await getConfig(c.env);
|
||||
return c.json(config);
|
||||
});
|
||||
|
||||
app.get("/config/defaults", (c) => {
|
||||
const defaults = getEnvDefaults(c.env);
|
||||
return c.json(defaults);
|
||||
});
|
||||
|
||||
app.get("/config/:key", async (c) => {
|
||||
const keyResult = ConfigKeySchema.safeParse(c.req.param("key"));
|
||||
if (!keyResult.success) {
|
||||
return c.json({ error: "Invalid config key" }, 400);
|
||||
}
|
||||
const value = await getConfigKey(c.env, keyResult.data);
|
||||
return c.json({ key: keyResult.data, value });
|
||||
});
|
||||
|
||||
app.put("/config/:key", async (c) => {
|
||||
const keyResult = ConfigKeySchema.safeParse(c.req.param("key"));
|
||||
if (!keyResult.success) {
|
||||
return c.json({ error: "Invalid config key" }, 400);
|
||||
}
|
||||
const body = await c.req.json<{ value: unknown }>().catch(() => null);
|
||||
if (!body || !("value" in body)) {
|
||||
return c.json({ error: "Request body must contain 'value'" }, 400);
|
||||
}
|
||||
const result = await setConfigKey(c.env, keyResult.data, body.value);
|
||||
if (!result.success) {
|
||||
return c.json(
|
||||
{ error: "Invalid value", details: result.error.issues },
|
||||
400,
|
||||
);
|
||||
}
|
||||
return c.json(result.config);
|
||||
});
|
||||
|
||||
app.delete("/config/:key", async (c) => {
|
||||
const keyResult = ConfigKeySchema.safeParse(c.req.param("key"));
|
||||
if (!keyResult.success) {
|
||||
return c.json({ error: "Invalid config key" }, 400);
|
||||
}
|
||||
const config = await resetConfigKey(c.env, keyResult.data);
|
||||
return c.json(config);
|
||||
});
|
||||
|
||||
app.delete("/config", async (c) => {
|
||||
const config = await resetAllConfig(c.env);
|
||||
return c.json(config);
|
||||
});
|
||||
|
||||
app.notFound((c) => c.text("Not Found", 404));
|
||||
|
||||
export default app;
|
||||
Reference in New Issue
Block a user