mirror of
https://github.com/OneUptime/oneuptime.git
synced 2026-04-06 00:32:12 +02:00
feat(LogDocumentation): add detailed markdown documentation for Log Pipelines, Log Processors, and Log Scrub Rules
This commit is contained in:
@@ -23,6 +23,75 @@ import React, {
|
||||
useState,
|
||||
} from "react";
|
||||
|
||||
const processorsDocMarkdown: string = `
|
||||
### How Processors Work
|
||||
|
||||
Processors are transformation steps that modify logs matched by this pipeline. They run **in order** — drag rows to reorder them.
|
||||
|
||||
\`\`\`mermaid
|
||||
flowchart LR
|
||||
A[Matched Log] --> B[Processor 1]
|
||||
B --> C[Processor 2]
|
||||
C --> D[Processor 3]
|
||||
D --> E[Transformed Log]
|
||||
\`\`\`
|
||||
|
||||
---
|
||||
|
||||
### Processor Types
|
||||
|
||||
#### Severity Remapper
|
||||
Maps a log field to a standard OpenTelemetry severity level.
|
||||
|
||||
**When to use:** Your application logs severity as custom text (e.g., "warn", "fatal", "verbose") instead of standard levels like WARNING or FATAL.
|
||||
|
||||
**How it works:**
|
||||
1. Reads the value from the **Source Key** (e.g., \`level\`)
|
||||
2. Looks up the value in your **Mappings** table
|
||||
3. If found, sets \`severityText\` and \`severityNumber\` on the log
|
||||
|
||||
| Match Value | Severity Text | Severity Number |
|
||||
|-------------|--------------|-----------------|
|
||||
| trace | TRACE | 1 |
|
||||
| debug | DEBUG | 5 |
|
||||
| info | INFO | 9 |
|
||||
| warn | WARNING | 13 |
|
||||
| error | ERROR | 17 |
|
||||
| fatal | FATAL | 21 |
|
||||
|
||||
---
|
||||
|
||||
#### Attribute Remapper
|
||||
Renames or copies a log attribute from one key to another.
|
||||
|
||||
**When to use:** Different services use different attribute names for the same concept (e.g., \`client_ip\` vs \`source_address\`).
|
||||
|
||||
**How it works:**
|
||||
1. Reads the value from the **Source Key**
|
||||
2. Writes it to the **Target Key**
|
||||
3. Optionally removes the source key (**Preserve Source** = off)
|
||||
4. Optionally overwrites if target already exists (**Override on Conflict** = on)
|
||||
|
||||
---
|
||||
|
||||
#### Category Processor
|
||||
Tags logs with a category label based on filter conditions.
|
||||
|
||||
**When to use:** You want to add business-level labels to logs (e.g., "Payment Error", "Auth Failure", "Rate Limit").
|
||||
|
||||
**How it works:**
|
||||
1. Evaluates each **Category** rule in order
|
||||
2. The first matching rule sets the **Target Key** to that category name
|
||||
3. Uses the same filter syntax as pipeline filters
|
||||
|
||||
---
|
||||
|
||||
### Tips
|
||||
- **Order matters** — processors run sequentially, so a severity remapper should run before a category processor that filters by severity
|
||||
- **Disable without deleting** — toggle a processor off to temporarily skip it
|
||||
- **Test incrementally** — add one processor at a time and verify in the Logs view
|
||||
`;
|
||||
|
||||
const LogPipelineView: FunctionComponent<PageComponentProps> = (
|
||||
_props: PageComponentProps,
|
||||
): ReactElement => {
|
||||
@@ -146,6 +215,12 @@ const LogPipelineView: FunctionComponent<PageComponentProps> = (
|
||||
},
|
||||
],
|
||||
}}
|
||||
helpContent={{
|
||||
title: "How Log Processors Work",
|
||||
description:
|
||||
"Understanding Severity Remapper, Attribute Remapper, and Category Processor",
|
||||
markdown: processorsDocMarkdown,
|
||||
}}
|
||||
noItemsMessage={
|
||||
"No processors configured. Click 'Add Processor' above to add your first processor."
|
||||
}
|
||||
|
||||
@@ -8,6 +8,92 @@ import LogPipeline from "Common/Models/DatabaseModels/LogPipeline";
|
||||
import ProjectUtil from "Common/UI/Utils/Project";
|
||||
import React, { Fragment, FunctionComponent, ReactElement } from "react";
|
||||
|
||||
const documentationMarkdown: string = `
|
||||
### How Log Pipelines Work
|
||||
|
||||
Log pipelines let you transform and enrich logs **at ingest time** — before they are stored. Each pipeline matches logs using a filter query, then runs a series of processors to modify them.
|
||||
|
||||
\`\`\`mermaid
|
||||
flowchart TD
|
||||
A[Log Arrives] --> B{Match Against Pipelines}
|
||||
B -->|Filter Matches| C[Run Processors In Order]
|
||||
B -->|No Match| D[Store Log As-Is]
|
||||
C --> E[Processor 1: Remap Severity]
|
||||
E --> F[Processor 2: Remap Attributes]
|
||||
F --> G[Processor 3: Categorize]
|
||||
G --> H[Store Transformed Log]
|
||||
\`\`\`
|
||||
|
||||
---
|
||||
|
||||
### Key Concepts
|
||||
|
||||
| Concept | Description |
|
||||
|---------|-------------|
|
||||
| **Pipeline** | A named rule that matches logs and applies processors to them |
|
||||
| **Filter Query** | Determines which logs this pipeline applies to |
|
||||
| **Processor** | A transformation step that modifies the log (e.g., remap severity, rename attributes) |
|
||||
| **Sort Order** | Pipelines run in order — drag rows to reorder. Lower = runs first |
|
||||
|
||||
---
|
||||
|
||||
### Filter Query Syntax
|
||||
|
||||
Filter queries let you target specific logs. If left empty, the pipeline matches **all logs**.
|
||||
|
||||
| Operator | Example | Description |
|
||||
|----------|---------|-------------|
|
||||
| \`=\` | \`severityText = 'ERROR'\` | Exact match |
|
||||
| \`!=\` | \`severityText != 'DEBUG'\` | Not equal |
|
||||
| \`LIKE\` | \`body LIKE '%timeout%'\` | Pattern match (\`%\` = wildcard) |
|
||||
| \`IN\` | \`severityText IN ('ERROR', 'WARN')\` | Match any value in list |
|
||||
| \`AND\` | \`severityText = 'ERROR' AND attributes.service = 'api'\` | Both conditions must match |
|
||||
| \`OR\` | \`severityText = 'ERROR' OR severityText = 'WARN'\` | Either condition matches |
|
||||
|
||||
**Available fields:** \`severityText\`, \`body\`, \`serviceId\`, \`attributes.<key>\`
|
||||
|
||||
---
|
||||
|
||||
### Processor Types
|
||||
|
||||
#### Severity Remapper
|
||||
Converts a log field value into a standard severity level. Useful when your application logs severity as text like "warn" or "fatal" instead of standard OpenTelemetry severity.
|
||||
|
||||
**Example:** Map \`level = "warn"\` → severity WARNING (13)
|
||||
|
||||
#### Attribute Remapper
|
||||
Renames or copies a log attribute from one key to another. Useful for normalizing attribute names across different services.
|
||||
|
||||
**Example:** Rename \`attributes.src_ip\` → \`attributes.source_address\`
|
||||
|
||||
#### Category Processor
|
||||
Adds a category label to logs based on filter conditions. Useful for tagging logs with business-level categories.
|
||||
|
||||
**Example:** Tag logs matching \`severityText = 'ERROR'\` with category "Error"
|
||||
|
||||
---
|
||||
|
||||
### Examples
|
||||
|
||||
#### Example 1: Normalize severity from application logs
|
||||
1. Create a pipeline with filter: \`attributes.source = 'legacy-app'\`
|
||||
2. Add a **Severity Remapper** processor:
|
||||
- Source Key: \`level\`
|
||||
- Mappings: \`warn\` → WARNING, \`fatal\` → FATAL, \`info\` → INFO
|
||||
|
||||
#### Example 2: Rename attributes for consistency
|
||||
1. Create a pipeline with no filter (matches all logs)
|
||||
2. Add an **Attribute Remapper** processor:
|
||||
- Source: \`attributes.client_ip\`
|
||||
- Target: \`attributes.source_address\`
|
||||
|
||||
#### Example 3: Categorize error logs
|
||||
1. Create a pipeline with filter: \`severityText IN ('ERROR', 'FATAL')\`
|
||||
2. Add a **Category Processor**:
|
||||
- Target Key: \`error_category\`
|
||||
- Categories: "Database Error" for \`body LIKE '%connection%'\`, "Timeout" for \`body LIKE '%timeout%'\`
|
||||
`;
|
||||
|
||||
const LogPipelines: FunctionComponent<
|
||||
PageComponentProps
|
||||
> = (): ReactElement => {
|
||||
@@ -32,7 +118,13 @@ const LogPipelines: FunctionComponent<
|
||||
cardProps={{
|
||||
title: "Log Pipelines",
|
||||
description:
|
||||
"Configure server-side log processing pipelines that transform logs at ingest time. Pipelines run in sort order and apply processors to matching logs.",
|
||||
"Transform and enrich logs at ingest time. Each pipeline matches logs using a filter, then runs processors in order to modify them. Click a pipeline to configure its filter and processors.",
|
||||
}}
|
||||
helpContent={{
|
||||
title: "How Log Pipelines Work",
|
||||
description:
|
||||
"Understanding filters, processors, and how logs are transformed at ingest time",
|
||||
markdown: documentationMarkdown,
|
||||
}}
|
||||
noItemsMessage={"No log pipelines found."}
|
||||
viewPageRoute={Navigation.getCurrentRoute()}
|
||||
|
||||
@@ -8,6 +8,67 @@ import LogScrubRule from "Common/Models/DatabaseModels/LogScrubRule";
|
||||
import ProjectUtil from "Common/UI/Utils/Project";
|
||||
import React, { Fragment, FunctionComponent, ReactElement } from "react";
|
||||
|
||||
const documentationMarkdown: string = `
|
||||
### How Log Scrub Rules Work
|
||||
|
||||
Log scrub rules automatically detect and remove sensitive data (PII) from your logs **at ingest time** — before they are stored. This ensures sensitive information never reaches your log storage.
|
||||
|
||||
\`\`\`mermaid
|
||||
flowchart TD
|
||||
A[Log Received] --> B{Match Against Scrub Rules}
|
||||
B -->|Pattern Matches| C[Apply Scrub Action]
|
||||
B -->|No Match| D[Store Log As-Is]
|
||||
C -->|Redact| E["Replace with [REDACTED]"]
|
||||
C -->|Mask| F["Partially hide e.g. j***@***.com"]
|
||||
C -->|Hash| G[Replace with deterministic hash]
|
||||
E --> H[Store Scrubbed Log]
|
||||
F --> H
|
||||
G --> H
|
||||
\`\`\`
|
||||
|
||||
---
|
||||
|
||||
### Pattern Types
|
||||
|
||||
| Pattern | What It Detects | Example Match |
|
||||
|---------|----------------|---------------|
|
||||
| **Email Address** | Email addresses | user@example.com |
|
||||
| **Credit Card** | Credit card numbers | 4111-1111-1111-1111 |
|
||||
| **SSN** | US Social Security Numbers | 123-45-6789 |
|
||||
| **Phone Number** | Phone numbers | +1 (555) 123-4567 |
|
||||
| **IP Address** | IPv4 addresses | 192.168.1.1 |
|
||||
| **Custom Regex** | Your own pattern | Any regex you define |
|
||||
|
||||
---
|
||||
|
||||
### Scrub Actions Explained
|
||||
|
||||
| Action | Behavior | Example |
|
||||
|--------|----------|---------|
|
||||
| **Redact** | Replaces the entire match with \`[REDACTED]\` | \`user@example.com\` → \`[REDACTED]\` |
|
||||
| **Mask** | Partially hides the value, preserving structure | \`user@example.com\` → \`u***@***.com\` |
|
||||
| **Hash** | Replaces with a deterministic SHA-256 hash | \`user@example.com\` → \`a1b2c3d4...\` |
|
||||
|
||||
> **Tip:** Use **Hash** when you need to correlate occurrences of the same value across logs without exposing the actual data. The same input always produces the same hash.
|
||||
|
||||
---
|
||||
|
||||
### Fields to Scrub
|
||||
|
||||
Each log entry has two parts that can contain sensitive data:
|
||||
|
||||
- **Body**: The main log message text
|
||||
- **Attributes**: Key-value metadata attached to the log (e.g. \`user.email\`, \`client.ip\`)
|
||||
|
||||
You can choose to scrub the body only, attributes only, or both.
|
||||
|
||||
---
|
||||
|
||||
### Rule Ordering
|
||||
|
||||
Rules are evaluated in the order shown in the table. Drag and drop to reorder. Earlier rules are applied first, so place more specific rules before broader ones.
|
||||
`;
|
||||
|
||||
const LogScrubRules: FunctionComponent<
|
||||
PageComponentProps
|
||||
> = (): ReactElement => {
|
||||
@@ -24,6 +85,9 @@ const LogScrubRules: FunctionComponent<
|
||||
isDeleteable={true}
|
||||
isEditable={true}
|
||||
isCreateable={true}
|
||||
createInitialValues={{
|
||||
sortOrder: 0,
|
||||
}}
|
||||
sortBy="sortOrder"
|
||||
sortOrder={SortOrder.Ascending}
|
||||
enableDragAndDrop={true}
|
||||
@@ -33,6 +97,12 @@ const LogScrubRules: FunctionComponent<
|
||||
description:
|
||||
"Automatically detect and scrub sensitive data (PII) from logs at ingest time. Matching patterns are masked, hashed, or redacted before storage. Drag to reorder.",
|
||||
}}
|
||||
helpContent={{
|
||||
title: "How Log Scrub Rules Work",
|
||||
description:
|
||||
"Understanding pattern types, scrub actions, and how sensitive data is removed from logs at ingest time",
|
||||
markdown: documentationMarkdown,
|
||||
}}
|
||||
noItemsMessage={"No scrub rules found."}
|
||||
formSteps={[
|
||||
{
|
||||
|
||||
Reference in New Issue
Block a user