Skip to content

Commit 0ab33f0

Browse files
committed
feat: Extract and expose timestamps from query results.
1 parent 146474a commit 0ab33f0

2 files changed

Lines changed: 65 additions & 19 deletions

File tree

README.md

Lines changed: 14 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -28,26 +28,31 @@ npm run server
2828

2929
### Usage
3030

31-
1. Open Grafana at http://localhost:3000.
31+
1. By default, Grafana listens to `0.0.0.0:3000`. Open `http://<GRAFANA_IP>:3000` in the browser.
3232

3333
2. Connect the datasource to a CLP API server:
34-
- Navigate to **Connections > Data sources > CLP**.
34+
- Navigate to **Connections > Data sources > CLP** on the left side panel.
3535
- Enter the API server URL (e.g., `http://<CLP_API_SERVER_HOST>:<PORT>`).
3636
- Click **Save & test** to verify connectivity.
3737

38-
3. Build a dashboard:
39-
- Add a visualization and choose **CLP** as the datasource.
38+
3. Explore log data:
39+
- Click **Explore** on the left side panel.
4040
- Configure your query in the query editor:
4141
- **Dataset**: the dataset to search (defaults to `default`).
4242
- **Query Text**: the search query string.
4343
- **Ignore Case**: whether to perform a case-insensitive search.
4444
- **Max Results**: the maximum number of results to return.
45+
- Set the desired time range and click **Run query** in the top right.
46+
- The results will be displayed.
47+
48+
4. Build a dashboard:
49+
- Add a visualization and choose **CLP** as the datasource.
50+
- Configure your query in the query editor.
4551
- Set the desired time range and click **Refresh** to run the query.
46-
- To view results in the Logs panel:
47-
1. Select the **Logs** visualization in the top right.
48-
2. Add an **Extract fields** transformation and choose **JSON** as the format.
49-
3. Add a **Convert field type** transformation to convert your timestamp field to the
50-
**Time** type.
52+
- To view results in the Logs panel, select the **Logs** visualization in the top right.
53+
- To enable Grafana log level detection:
54+
1. Add an **Extract fields** transformation and choose **JSON** as the format.
55+
2. Extract the JSON field path for the log level into a field named `severity`.
5156

5257
## Test the plugin in an existing Grafana deployment
5358

src/datasource.ts

Lines changed: 51 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ import {
1212
} from '@grafana/data';
1313

1414
import { SearchQuery, ClpDataSourceOptions, DEFAULT_QUERY } from './types';
15-
import { Observable, forkJoin, lastValueFrom } from 'rxjs';
15+
import { Observable, forkJoin, zip, lastValueFrom } from 'rxjs';
1616
import { map, switchMap, reduce } from 'rxjs/operators';
1717
import { createParser, type EventSourceMessage, type ParseError } from 'eventsource-parser';
1818

@@ -99,8 +99,26 @@ export class DataSource extends DataSourceApi<SearchQuery, ClpDataSourceOptions>
9999
);
100100
}
101101

102+
#fetchTimestampColumnNames(dataset: string): Observable<string[]> {
103+
return getBackendSrv()
104+
.fetch<string[]>({
105+
url: `${this.baseUrl}/column_metadata/${dataset}/timestamp`,
106+
method: 'GET',
107+
})
108+
.pipe(map((response) => response.data));
109+
}
110+
111+
#extractField(message: unknown, columnName: string): unknown {
112+
const fieldPath = columnName.split(/(?<!\\)\./).map((s) => s.replace(/\\\./g, '.'));
113+
let current = message;
114+
for (const segment of fieldPath) {
115+
current = (current as Record<string, unknown>)[segment];
116+
}
117+
return current;
118+
}
119+
102120
query(options: DataQueryRequest<SearchQuery>): Observable<DataQueryResponse> {
103-
const observables = options.targets.map((target) =>
121+
const queryResultsObservables = options.targets.map((target) =>
104122
this.#submitQuery(target, options.range).pipe(
105123
switchMap((uri) => {
106124
const searchJobId = uri.split('/').pop()!;
@@ -126,25 +144,48 @@ export class DataSource extends DataSourceApi<SearchQuery, ClpDataSourceOptions>
126144
}
127145
};
128146
});
129-
}),
130-
map((dataBuffer) => ({ target, dataBuffer }))
147+
})
131148
)
132149
);
133150

134-
return forkJoin(observables).pipe(
135-
map((results) => ({
136-
data: results.map(({ target, dataBuffer }) => {
151+
const timestampColumnNamesObservables = options.targets.map((target) =>
152+
this.#fetchTimestampColumnNames(target.dataset ?? 'default')
153+
);
154+
155+
const dataframeObservables = options.targets.map((target, i) =>
156+
zip(timestampColumnNamesObservables[i], queryResultsObservables[i]).pipe(
157+
map(([timestampColumnNames, dataBuffer]) => {
158+
const fields = [];
159+
137160
const values = target.maxNumResults ? dataBuffer.slice(0, target.maxNumResults) : dataBuffer;
161+
fields.push({ name: 'body', values, type: FieldType.string });
162+
163+
const [timestampColumnName] = timestampColumnNames;
164+
if ('undefined' !== typeof timestampColumnName) {
165+
const parsedValues = values.map((line) => JSON.parse(line));
166+
const timestamps = parsedValues.map((parsedValue) => {
167+
try {
168+
return this.#extractField(parsedValue, timestampColumnName);
169+
} catch (err: unknown) {
170+
return null;
171+
}
172+
});
173+
fields.push({ name: 'timestamp', values: timestamps, type: FieldType.time });
174+
}
175+
138176
return createDataFrame({
139177
refId: target.refId,
140-
fields: [{ name: target.refId, values, type: FieldType.string }],
178+
fields: fields,
141179
meta: {
142180
type: DataFrameType.LogLines,
181+
preferredVisualisationType: 'logs',
143182
},
144183
});
145-
}),
146-
}))
184+
})
185+
)
147186
);
187+
188+
return forkJoin(dataframeObservables).pipe(map((data) => ({ data })));
148189
}
149190

150191
async testDatasource() {

0 commit comments

Comments
 (0)