Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
16 commits
Select commit Hold shift + click to select a range
efaf6d2
fix: resolve 6 data integrity issues across triple/DAG pipeline
ApiliumDevTeam Mar 16, 2026
5882872
feat: persistent ProofStore with pluggable Sled backend
ApiliumDevTeam Mar 16, 2026
9efa202
feat: include proofs in Raft cluster snapshots
ApiliumDevTeam Mar 16, 2026
7b412a1
feat: periodic auto-flush for Ineru and proof data
ApiliumDevTeam Mar 16, 2026
15610de
fix: audit log now fsyncs writes and reports errors
ApiliumDevTeam Mar 16, 2026
78610eb
fix: atomic batch insert via sled::Batch prevents partial writes
ApiliumDevTeam Mar 16, 2026
5cc7c00
feat: P2P DAG action synchronization via tip exchange
ApiliumDevTeam Mar 16, 2026
6a46be3
test: add 8 cross-subsystem data integrity tests
ApiliumDevTeam Mar 16, 2026
fcfad0e
fix: add fsync to all critical file writes preventing data loss on crash
ApiliumDevTeam Mar 16, 2026
7a0200b
fix: WAL writer — replace lock().unwrap() with proper error propagation
ApiliumDevTeam Mar 16, 2026
fe57821
fix: rule engine — recover from poisoned locks instead of panicking
ApiliumDevTeam Mar 16, 2026
81cdce3
fix: kaneru agent learn() — guard against missing state instead of panic
ApiliumDevTeam Mar 16, 2026
8418d6a
fix: P2P REST endpoints — replace unwrap with proper HTTP error response
ApiliumDevTeam Mar 16, 2026
7271dab
release: bump all crates to v0.6.3 — total bug purge across data pipe…
ApiliumDevTeam Mar 19, 2026
c8a677b
feat: POST /api/v1/triples/batch — atomic bulk triple insert endpoint
ApiliumDevTeam Mar 19, 2026
53cca2c
fix: proof verify endpoint returns 200+valid:false instead of 422
ApiliumDevTeam Mar 19, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 13 additions & 12 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion crates/aingle_ai/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "aingle_ai"
version = "0.6.2"
version = "0.6.3"
description = "AI integration layer for AIngle - Ineru, Nested Learning, Kaneru"
license = "Apache-2.0 OR LicenseRef-Commercial"
repository = "https://github.com/ApiliumCode/aingle"
Expand Down
2 changes: 1 addition & 1 deletion crates/aingle_contracts/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "aingle_contracts"
version = "0.6.2"
version = "0.6.3"
description = "Smart Contracts DSL and WASM Runtime for AIngle"
license = "Apache-2.0 OR LicenseRef-Commercial"
repository = "https://github.com/ApiliumCode/aingle"
Expand Down
3 changes: 2 additions & 1 deletion crates/aingle_cortex/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "aingle_cortex"
version = "0.6.2"
version = "0.6.3"
description = "Córtex API - REST/GraphQL/SPARQL interface for AIngle semantic graphs"
license = "Apache-2.0 OR LicenseRef-Commercial"
repository = "https://github.com/ApiliumCode/aingle"
Expand Down Expand Up @@ -102,6 +102,7 @@ openraft = { version = "0.10.0-alpha.17", features = ["serde", "type-alias"], op
tokio-rustls = { version = "0.26", default-features = false, features = ["ring"], optional = true }
rustls-pemfile = { version = "2", optional = true }

sled = "0.34"
dirs = "6"
mdns-sd = { version = "0.18", optional = true }
if-addrs = { version = "0.13", optional = true }
Expand Down
27 changes: 23 additions & 4 deletions crates/aingle_cortex/src/cluster_init.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,23 @@ use crate::error::Error;
#[cfg(feature = "cluster")]
use crate::server::CortexServer;

#[cfg(feature = "cluster")]
use aingle_raft::state_machine::{ProofSnapshot, ProofSnapshotProvider};

#[cfg(feature = "cluster")]
use crate::proofs::ProofStore;

#[cfg(feature = "cluster")]
impl ProofSnapshotProvider for ProofStore {
fn export_proofs(&self) -> Vec<ProofSnapshot> {
self.export_proofs_sync()
}

fn import_proofs(&self, proofs: &[ProofSnapshot]) {
self.import_proofs_sync(proofs);
}
}

/// Configuration for cluster mode.
#[cfg(feature = "cluster")]
#[derive(Debug, Clone)]
Expand Down Expand Up @@ -362,12 +379,14 @@ pub async fn init_cluster(
server.state_mut().wal = Some(log_store.wal().clone());
server.state_mut().cluster_secret = config.secret.clone();

let state_machine = std::sync::Arc::new(
aingle_raft::state_machine::CortexStateMachine::new(
let state_machine = {
let mut sm = aingle_raft::state_machine::CortexStateMachine::new(
server.state().graph.clone(),
server.state().memory.clone(),
),
);
);
sm.set_proof_provider(server.state().proof_store.clone());
std::sync::Arc::new(sm)
};

let resolver = std::sync::Arc::new(aingle_raft::network::NodeResolver::new());
let node_id = config.node_id;
Expand Down
78 changes: 75 additions & 3 deletions crates/aingle_cortex/src/graphql/resolvers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,7 @@ pub struct MutationRoot;

#[Object]
impl MutationRoot {
/// Create a new triple
/// Create a new triple (routed through the same path as REST for DAG/Raft consistency)
async fn create_triple(&self, ctx: &Context<'_>, input: TripleInput) -> Result<Triple> {
let state = ctx.data::<AppState>()?;

Expand All @@ -165,9 +165,45 @@ impl MutationRoot {
object,
);

// Insert triple + record DAG action (same path as REST API)
{
let graph = state.graph.read().await;
graph.insert(triple.clone())?;

#[cfg(feature = "dag")]
if let Some(dag_store) = graph.dag_store() {
let dag_author = state
.dag_author
.clone()
.unwrap_or_else(|| aingle_graph::NodeId::named("node:local"));
let dag_seq = state
.dag_seq_counter
.fetch_add(1, std::sync::atomic::Ordering::SeqCst);
let parents = dag_store.tips().unwrap_or_default();

let mut action = aingle_graph::dag::DagAction {
parents,
author: dag_author,
seq: dag_seq,
timestamp: chrono::Utc::now(),
payload: aingle_graph::dag::DagPayload::TripleInsert {
triples: vec![aingle_graph::dag::TripleInsertPayload {
subject: input.subject.clone(),
predicate: input.predicate.clone(),
object: serde_json::json!({}),
}],
},
signature: None,
};

if let Some(ref key) = state.dag_signing_key {
key.sign(&mut action);
}

dag_store.put(&action).map_err(|e| {
Error::new(format!("DAG action failed: {e}"))
})?;
}
}

// Broadcast event
Expand All @@ -183,7 +219,7 @@ impl MutationRoot {
Ok(triple.into())
}

/// Delete a triple by ID
/// Delete a triple by ID (routed through the same path as REST for DAG/Raft consistency)
async fn delete_triple(&self, ctx: &Context<'_>, id: ID) -> Result<bool> {
let state = ctx.data::<AppState>()?;

Expand All @@ -192,7 +228,43 @@ impl MutationRoot {

let deleted = {
let graph = state.graph.read().await;
graph.delete(&triple_id)?
let result = graph.delete(&triple_id)?;

#[cfg(feature = "dag")]
if result {
if let Some(dag_store) = graph.dag_store() {
let dag_author = state
.dag_author
.clone()
.unwrap_or_else(|| aingle_graph::NodeId::named("node:local"));
let dag_seq = state
.dag_seq_counter
.fetch_add(1, std::sync::atomic::Ordering::SeqCst);
let parents = dag_store.tips().unwrap_or_default();

let mut action = aingle_graph::dag::DagAction {
parents,
author: dag_author,
seq: dag_seq,
timestamp: chrono::Utc::now(),
payload: aingle_graph::dag::DagPayload::TripleDelete {
triple_ids: vec![*triple_id.as_bytes()],
subjects: vec![],
},
signature: None,
};

if let Some(ref key) = state.dag_signing_key {
key.sign(&mut action);
}

dag_store.put(&action).map_err(|e| {
Error::new(format!("DAG action failed: {e}"))
})?;
}
}

result
};

if deleted {
Expand Down
Loading
Loading