Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
20 commits
Select commit Hold shift + click to select a range
962d6dc
feat: add OpenClaw observability plugin shell
mnajafian-nv May 6, 2026
5fa14d7
feat: add OpenClaw hook replay backend
mnajafian-nv May 7, 2026
7f6c457
feat: align OpenClaw hook backend lifecycle
mnajafian-nv May 7, 2026
80de1ff
feat: add OpenClaw telemetry outputs
mnajafian-nv May 7, 2026
115ea38
feat: add OpenClaw LLM and tool replay
mnajafian-nv May 7, 2026
c48ea23
Update integrations/openclaw/package.json
mnajafian-nv May 7, 2026
97d9c18
fix: address OpenClaw observability plugin review feedback
mnajafian-nv May 7, 2026
65a4fee
test: harden OpenClaw replay edge cases
mnajafian-nv May 7, 2026
bd66d54
refactor: narrow OpenClaw plugin public surface
mnajafian-nv May 7, 2026
afb27ae
Merge remote-tracking branch 'upstream/main' into feat/openclaw-hooks…
mnajafian-nv May 7, 2026
b9962f5
fix: harden OpenClaw JSON normalization
mnajafian-nv May 7, 2026
e4f0b18
fix: align OpenClaw plugin metadata
mnajafian-nv May 7, 2026
fbd33d5
fix(openclaw): align package payload and docs
mnajafian-nv May 7, 2026
f701652
fix(openclaw): add README license header
mnajafian-nv May 7, 2026
1b557e1
refactor(openclaw): use typed plugin API handlers
mnajafian-nv May 7, 2026
4df12b3
fix: harden OpenClaw runtime lifecycle
mnajafian-nv May 8, 2026
7a812fb
fix: preserve LLM usage in OpenInference export
mnajafian-nv May 8, 2026
66645b8
build: manage Node packages with npm workspaces
mnajafian-nv May 8, 2026
dc46c6c
ci(openclaw): fix workspace validation paths
mnajafian-nv May 8, 2026
a3593d5
ci(openclaw): make pack check portable on Windows
mnajafian-nv May 8, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion .github/ci-path-filters.yml
Original file line number Diff line number Diff line change
Expand Up @@ -44,14 +44,16 @@ go:
- 'go/**/!(*.md)'

node:
- 'package.json'
- 'package-lock.json'
- 'crates/node/Cargo.toml'
- 'crates/node/build.rs'
- 'crates/node/package.json'
- 'crates/node/package-lock.json'
- 'crates/node/*.d.ts'
- 'crates/node/*.js'
- 'crates/node/src/**'
- 'crates/node/tests/**/*.mjs'
- 'integrations/openclaw/**'
- 'scripts/test-support/**'

python:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/ci_docs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ jobs:
run: uv sync --no-default-groups --group docs --no-install-project

- name: Install Node.js Documentation Dependencies
working-directory: ${{ env.NEMO_FLOW_CI_WORKSPACE }}/crates/node
working-directory: ${{ env.NEMO_FLOW_CI_WORKSPACE }}
run: npm ci --ignore-scripts

- name: Materialize Main Branch For Versioned Docs
Expand Down
4 changes: 4 additions & 0 deletions .github/workflows/ci_node.yml
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,10 @@ jobs:
working-directory: ${{ env.NEMO_FLOW_CI_WORKSPACE }}
run: just --set ci true --set output_dir "${{ github.workspace }}" test-node

- name: Run OpenClaw integration checks
working-directory: ${{ env.NEMO_FLOW_CI_WORKSPACE }}
run: just --set ci true test-openclaw

- name: Upload Node coverage to Codecov
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v6
if: ${{ !startsWith(matrix.platform, 'windows') }}
Expand Down
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,8 @@ crates/node/index.js
crates/node/index.d.ts
crates/node/coverage/
crates/node/junit.xml
integrations/openclaw/dist/
integrations/openclaw/.test-dist/

# WebAssembly
crates/wasm/pkg/
Expand Down
8 changes: 4 additions & 4 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -93,9 +93,9 @@ repos:

- id: node-lockfile-check
name: package-lock.json is up to date
entry: bash -c 'cd crates/node && npm install --package-lock-only --ignore-scripts --audit=false --fund=false'
entry: bash -c 'npm install --package-lock-only --ignore-scripts --audit=false --fund=false'
language: system
files: '^crates/node/package(?:-lock)?\.json$'
files: '^(package(?:-lock)?\.json|crates/node/package\.json|integrations/openclaw/package\.json)$'
pass_filenames: false

# Rust — fmt + clippy + cargo-deny
Expand Down Expand Up @@ -151,10 +151,10 @@ repos:
pass_filenames: false

- id: attributions-node
name: ATTRIBUTIONS-Node.md (crates/node/package-lock.json)
name: ATTRIBUTIONS-Node.md (package-lock.json)
entry: bash scripts/generate_attributions.sh node
language: system
files: '^(crates/node/package-lock\.json|ATTRIBUTIONS-Node\.md)$'
files: '^(package-lock\.json|ATTRIBUTIONS-Node\.md)$'
pass_filenames: false

# Go — fmt + vet
Expand Down
1 change: 0 additions & 1 deletion AGENTS.md
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,6 @@ cargo install wasm-pack --version 0.14.0 --locked
uv sync
uv run pre-commit install

cd crates/node
npm install --ignore-scripts
```

Expand Down
39,101 changes: 37,642 additions & 1,459 deletions ATTRIBUTIONS-Node.md

Large diffs are not rendered by default.

11 changes: 6 additions & 5 deletions RELEASING.md
Original file line number Diff line number Diff line change
Expand Up @@ -48,8 +48,9 @@ NeMo Flow versions are anchored on the workspace SemVer in the repository root
- The root `Cargo.toml` `workspace.dependencies` entries for
`nemo-flow`, `nemo-flow-adaptive`, and `nemo-flow-ffi` must stay aligned with
that same version.
- `crates/node/package.json` and `crates/node/package-lock.json` carry the base
npm version for the Node.js package and must be bumped explicitly.
- `crates/node/package.json` carries the base npm version for the Node.js
package. The repository-root `package-lock.json` carries the npm workspace
lock entries and must be updated with it.
- The Python package version is derived at packaging time. `pyproject.toml`
stays `dynamic = ["version"]` in the repository, and the packaging recipe
writes a concrete version into `pyproject.toml` and `crates/python/Cargo.toml`
Expand Down Expand Up @@ -101,9 +102,9 @@ Update the versioned source files in the release PR or release-prep commit:
1. Update the root [`Cargo.toml`](Cargo.toml) workspace version.
2. Update the root [`Cargo.toml`](Cargo.toml) `workspace.dependencies` versions
for `nemo-flow`, `nemo-flow-adaptive`, and `nemo-flow-ffi`.
3. Update [`crates/node/package.json`](crates/node/package.json) and
[`crates/node/package-lock.json`](crates/node/package-lock.json) to the same
release version.
3. Update [`crates/node/package.json`](crates/node/package.json) and the
`crates/node` entry in the root [`package-lock.json`](package-lock.json) to
the same release version.
4. Review docs and snippets that mention explicit versions, including:
- [`README.md`](README.md)
- [`CONTRIBUTING.md`](CONTRIBUTING.md)
Expand Down
103 changes: 102 additions & 1 deletion crates/core/src/observability/openinference.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,9 @@ use crate::api::event::{Event, ScopeCategory};
use crate::api::runtime::EventSubscriberFn;
use crate::api::scope::ScopeType;
use crate::api::subscriber::{deregister_subscriber, register_subscriber};
use crate::codec::response::Usage;
use crate::error::FlowError;
use crate::json::Json;
use chrono::{DateTime, Utc};
use openinference_semantic_conventions::SpanKind as OpenInferenceSpanKind;
use openinference_semantic_conventions::attributes as oi;
Expand Down Expand Up @@ -682,10 +684,22 @@ fn end_attributes(event: &Event) -> Vec<KeyValue> {
attributes.push(KeyValue::new(oi::output::VALUE, output));
attributes.push(KeyValue::new(oi::output::MIME_TYPE, "application/json"));
}
let fallback_usage = if event
.category()
.is_some_and(|category| category.as_str() == "llm")
{
usage_from_manual_llm_output(event.output())
} else {
None
};
let usage = event
.annotated_response()
.and_then(|response| response.usage.as_ref())
.or(fallback_usage.as_ref());
if event
.category()
.is_some_and(|category| category.as_str() == "llm")
&& let Some(usage) = event.annotated_response().and_then(|r| r.usage.as_ref())
&& let Some(usage) = usage
{
if let Some(v) = usage.prompt_tokens {
attributes.push(KeyValue::new(oi::llm::token_count::PROMPT, v as i64));
Expand All @@ -712,6 +726,93 @@ fn end_attributes(event: &Event) -> Vec<KeyValue> {
attributes
}

fn usage_from_manual_llm_output(output: Option<&Json>) -> Option<Usage> {
let object = output?.as_object()?;
let usage = object.get("usage").and_then(Json::as_object);
let token_usage = object.get("token_usage").and_then(Json::as_object);
if usage.is_none() && token_usage.is_none() {
return None;
}

let prompt_tokens = first_u64_from_manual_usage(
usage,
token_usage,
&["prompt_tokens", "input_tokens", "inputTokens", "input"],
);
let completion_tokens = first_u64_from_manual_usage(
usage,
token_usage,
&[
"completion_tokens",
"output_tokens",
"completionTokens",
"outputTokens",
"output",
],
);
let total_tokens = first_u64_from_manual_usage(
usage,
token_usage,
&["total_tokens", "totalTokens", "total"],
);
let cache_read_tokens = first_u64_from_manual_usage(
usage,
token_usage,
&[
"cache_read_tokens",
"cached_tokens",
"cache_read_input_tokens",
"cacheReadTokens",
"cachedTokens",
"cacheReadInputTokens",
"cacheRead",
],
);
let cache_write_tokens = first_u64_from_manual_usage(
usage,
token_usage,
&[
"cache_write_tokens",
"cache_creation_input_tokens",
"cacheWriteTokens",
"cacheCreationInputTokens",
"cacheWrite",
],
);

if prompt_tokens.is_none()
&& completion_tokens.is_none()
&& total_tokens.is_none()
&& cache_read_tokens.is_none()
&& cache_write_tokens.is_none()
{
return None;
}

Some(Usage {
prompt_tokens,
completion_tokens,
total_tokens,
cache_read_tokens,
cache_write_tokens,
})
}

fn first_u64_from_manual_usage(
usage: Option<&serde_json::Map<String, Json>>,
token_usage: Option<&serde_json::Map<String, Json>>,
keys: &[&str],
) -> Option<u64> {
usage
.and_then(|value| first_u64(value, keys))
.or_else(|| token_usage.and_then(|value| first_u64(value, keys)))
}

fn first_u64(usage: &serde_json::Map<String, Json>, keys: &[&str]) -> Option<u64> {
keys.iter()
.find_map(|key| usage.get(*key).and_then(Json::as_u64))
}

fn mark_attributes(event: &Event) -> Vec<KeyValue> {
let handle_attributes = event.attributes();
let mut attributes = vec![
Expand Down
56 changes: 56 additions & 0 deletions crates/core/tests/unit/observability/openinference_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1091,6 +1091,62 @@ fn llm_end_with_usage_emits_token_count_attributes() {
);
}

#[test]
fn llm_end_with_manual_usage_payload_emits_token_count_attributes() {
let (provider, exporter) = make_provider();
let mut processor =
OpenInferenceEventProcessor::new(provider.clone(), "test-scope".to_string());
let uuid = Uuid::now_v7();

processor.process(&make_start_event(uuid, None, "chat", ScopeType::Llm, None));
processor.process(&make_scope_event_with_profile(
ScopeCategory::End,
uuid,
None,
"chat",
ScopeType::Llm,
Some(json!({
"content": "hello",
"usage": {
"prompt_tokens": 100
},
"token_usage": {
"completion_tokens": 50,
"total_tokens": 150,
"cached_tokens": 25,
"cache_write_tokens": 10
}
})),
Some(CategoryProfile::builder().model_name("gpt-4").build()),
));

processor.force_flush().unwrap();

let spans = exporter.get_finished_spans().unwrap();
assert_eq!(spans.len(), 1);
let attributes = attr_map(&spans[0].attributes);
assert_eq!(
attributes.get("llm.token_count.prompt"),
Some(&"100".to_string())
);
assert_eq!(
attributes.get("llm.token_count.completion"),
Some(&"50".to_string())
);
assert_eq!(
attributes.get("llm.token_count.total"),
Some(&"150".to_string())
);
assert_eq!(
attributes.get("llm.token_count.prompt_details.cache_read"),
Some(&"25".to_string())
);
assert_eq!(
attributes.get("llm.token_count.prompt_details.cache_write"),
Some(&"10".to_string())
);
}

#[test]
fn llm_end_without_usage_omits_token_count_attributes() {
let (provider, exporter) = make_provider();
Expand Down
Loading
Loading