Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
50 commits
Select commit Hold shift + click to select a range
0ec257c
feat(workflows): batch entity processing — entityList-only for automa…
yan-3005 Mar 20, 2026
e635e2f
Update generated TypeScript types
github-actions[bot] Mar 25, 2026
fb2e2b4
fix(workflows): migrate batch nodes from relatedEntity to entityList
yan-3005 Mar 27, 2026
90b6325
Update generated TypeScript types
github-actions[bot] Mar 27, 2026
5cc175a
fix(workflows): align false_entityList naming and remove dead rollbac…
yan-3005 Mar 27, 2026
ea9a384
fix(workflows): remove maxItems constraint from trigger output schemas
yan-3005 Mar 27, 2026
39880fe
Merge branch 'main' into ram/workflow-improvements
yan-3005 Mar 27, 2026
676740c
fix(workflows): update unit tests and v1105 migration for entityList …
yan-3005 Mar 27, 2026
0280942
refactor(migrations): move workflow inputNamespaceMap migration from …
yan-3005 Mar 27, 2026
3b191e8
Merge branch 'main' into ram/workflow-improvements
yan-3005 Mar 27, 2026
88c5f0e
test: add coverage for workflow automated task builders and impl classes
yan-3005 Mar 27, 2026
883ebd8
fix(workflows): set global entityList in FilterEntityImpl for event-b…
yan-3005 Mar 27, 2026
fb283ec
fix(migration): force Flowable redeploy in v1140 migration
yan-3005 Mar 27, 2026
685bdb4
fix(migration): graph-aware inputNamespaceMap migration with generic …
yan-3005 Mar 27, 2026
96bc193
fix(migration): preserve non-entityList keys and fix test signature m…
yan-3005 Mar 27, 2026
996928a
Merge origin/main into ram/workflow-improvements
yan-3005 Mar 27, 2026
7a75510
fix(workflow): address PR #26715 review items — retry, fallback, cons…
yan-3005 Mar 27, 2026
7e2af14
fix(workflows): address Copilot review comments and remaining bugs
yan-3005 Mar 27, 2026
5c2479c
Update generated TypeScript types
github-actions[bot] Mar 27, 2026
6394c3f
fix(test): update MigrationUtilTest for List<String[]> signature change
yan-3005 Mar 27, 2026
1f96932
chore: remove internal review notes file
yan-3005 Mar 27, 2026
93133fc
Merge branch 'main' into ram/workflow-improvements
yan-3005 Mar 27, 2026
5bdc592
refactor(workflows): remove backward-compat entityList from check nodes
yan-3005 Mar 30, 2026
12441aa
refactor(tests): use TRUE_ENTITY_LIST_VARIABLE constant in check impl…
yan-3005 Mar 30, 2026
4930d3c
fix(migration): preserve existing entityList when relatedEntity also …
yan-3005 Mar 30, 2026
7ced433
Merge branch 'main' into ram/workflow-improvements
yan-3005 Mar 30, 2026
889ca1a
Batch entity fetch + deduplicate retry config in workflow task impls
yan-3005 Mar 31, 2026
0820069
Fix Copilot review: retry batch fetch in SinkTaskDelegate and remove …
yan-3005 Mar 31, 2026
a0d4457
Address code review: comments, selective migration redeploy, Retry pl…
yan-3005 Mar 31, 2026
b92bb1c
Merge branch 'main' into ram/workflow-improvements
yan-3005 Mar 31, 2026
024d8ba
Change Event Based Governance Workflow (#26758)
yan-3005 Apr 6, 2026
abf6442
Merge branch 'main' into ram/workflow-improvements
yan-3005 Apr 6, 2026
03a321a
fix(workflows): use entity FQN instead of entity link string in SinkE…
yan-3005 Apr 7, 2026
de4ae6c
Merge branch 'main' into ram/workflow-improvements
yan-3005 Apr 7, 2026
26cb774
Merge branch 'main' into ram/workflow-improvements
yan-3005 Apr 12, 2026
7fadb00
feat(workflows): replace relatedEntity with entityList; bulk writes w…
yan-3005 Apr 14, 2026
2077ecd
Update generated TypeScript types
github-actions[bot] Apr 14, 2026
a631c29
Merge remote-tracking branch 'origin/main' into ram/workflow-improvem…
yan-3005 Apr 14, 2026
63f3940
feat(workflows): bypass bot guard in bulk entity updates via skipBotG…
yan-3005 Apr 15, 2026
c1fa551
Merge branch 'main' into ram/workflow-improvements
yan-3005 Apr 15, 2026
8eede11
fix(test): update SetEntityAttributeImplTest for bulkUpdateEntities path
yan-3005 Apr 15, 2026
1497060
fix(workflows): replace relatedEntity with entityList in RecognizerFe…
yan-3005 Apr 15, 2026
216b6a0
fix(workflows): replace relatedEntity with entityList in GlossaryAppr…
yan-3005 Apr 15, 2026
c8ab3ae
Merge branch 'main' into ram/workflow-improvements
yan-3005 Apr 15, 2026
793dcd2
fix(workflows): support conditional entityList keys in SetEntityAttri…
yan-3005 Apr 15, 2026
41f728d
fix(test): update SetEntityAttributeImplTest to expect governance-bot…
yan-3005 Apr 15, 2026
339d32f
fix(workflows): address copilot review comments — null FQN fallback, …
yan-3005 Apr 15, 2026
0f9cfcf
Merge branch 'main' into ram/workflow-improvements
yan-3005 Apr 15, 2026
ccbd1df
Merge origin/main into ram/workflow-improvements
yan-3005 Apr 23, 2026
06b2be6
Merge branch 'main' into ram/workflow-improvements
yan-3005 Apr 23, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -130,6 +130,15 @@ FROM user_entity ue, role_entity re
WHERE ue.name = 'mcpapplicationbot'
AND re.name = 'ApplicationBotImpersonationRole';

-- Add composite index on change_event(entityType, offset) for efficient incremental
-- change-event-driven workflow processing (filters by entityType + offset range).
CREATE INDEX idx_change_event_entity_type_offset ON change_event (entityType, `offset`);

-- Widen change_event_consumers.id from VARCHAR(36) to VARCHAR(500) to support workflow consumer IDs
-- which follow the pattern {workflowFQN}Trigger-{entityType} and can exceed 36 characters.
-- VARCHAR(500) keeps the composite UNIQUE(id, extension) key within MySQL's 3072-byte limit
-- (500 * 4 + 256 * 4 = 3024 bytes with utf8mb4).
ALTER TABLE change_event_consumers MODIFY COLUMN id VARCHAR(500) NOT NULL;

UPDATE entity_extension
SET json = JSON_SET(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -151,6 +151,13 @@ WHERE ue.name = 'mcpapplicationbot'
AND re.name = 'ApplicationBotImpersonationRole'
ON CONFLICT DO NOTHING;

-- Add composite index on change_event(entityType, offset) for efficient incremental
-- change-event-driven workflow processing (filters by entityType + offset range).
CREATE INDEX IF NOT EXISTS idx_change_event_entity_type_offset ON change_event (entitytype, "offset");

-- Widen change_event_consumers.id from VARCHAR(36) to VARCHAR(500) to support workflow consumer IDs
-- which follow the pattern {workflowFQN}Trigger-{entityType} and can exceed 36 characters.
ALTER TABLE change_event_consumers ALTER COLUMN id TYPE VARCHAR(500);
-- Migrate profiler sampling config: move flat profileSample/profileSampleType/samplingMethodType
-- into the new profileSampleConfig structure. Default to STATIC since DYNAMIC is new.

Expand Down

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
import java.util.EnumMap;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
Expand Down Expand Up @@ -652,6 +653,37 @@ public static <T> T getEntity(EntityLink link, String fields, Include include) {
return getEntityByName(link.getEntityType(), link.getEntityFQN(), fields, include);
}

public static Map<String, EntityInterface> getEntitiesByLinks(
List<String> entityLinkStrs, String fields, Include include) {
if (entityLinkStrs == null || entityLinkStrs.isEmpty()) {
return Map.of();
}
// All entity links in a workflow's entityList are always the same entity type —
// event-based triggers fire for a single entity, and periodic batch triggers are configured
// with one entity type. The type is taken from the first link.
Map<String, String> linkToFqn = new LinkedHashMap<>();
String entityType = null;
for (String linkStr : entityLinkStrs) {
EntityLink link = EntityLink.parse(linkStr);
if (entityType == null) entityType = link.getEntityType();
linkToFqn.put(linkStr, link.getEntityFQN());
}
Comment on lines +661 to +670
Copy link

Copilot AI Apr 15, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Entity.getEntitiesByLinks(...) parses all link strings without any per-item error handling and assumes all links share the same entity type (taken from the first link). A single malformed link string (or a mixed-type list) will throw during parse and fail the entire batch, which undermines the PR's goal of per-entity isolation. Consider: (1) catching parse exceptions per link and skipping/recording failures, and (2) validating that all links share the same entityType (or grouping by type) with a clear error message.

Suggested change
// All entity links in a workflow's entityList are always the same entity type —
// event-based triggers fire for a single entity, and periodic batch triggers are configured
// with one entity type. The type is taken from the first link.
Map<String, String> linkToFqn = new LinkedHashMap<>();
String entityType = null;
for (String linkStr : entityLinkStrs) {
EntityLink link = EntityLink.parse(linkStr);
if (entityType == null) entityType = link.getEntityType();
linkToFqn.put(linkStr, link.getEntityFQN());
}
// All entity links in a workflow's entityList are expected to be the same entity type.
// Malformed links are skipped so one bad item does not fail the entire batch, while
// mixed entity types are rejected explicitly because getEntityByNames works on one type.
Map<String, String> linkToFqn = new LinkedHashMap<>();
String entityType = null;
for (String linkStr : entityLinkStrs) {
try {
EntityLink link = EntityLink.parse(linkStr);
if (entityType == null) {
entityType = link.getEntityType();
} else if (!entityType.equals(link.getEntityType())) {
throw new BadRequestException(
String.format(
"All entity links must have the same entity type. Expected '%s' but found '%s' for link '%s'.",
entityType, link.getEntityType(), linkStr));
}
linkToFqn.put(linkStr, link.getEntityFQN());
} catch (RuntimeException e) {
LOG.warn("Skipping malformed entity link [{}]", linkStr, e);
}
}
if (linkToFqn.isEmpty()) {
return Map.of();
}

Copilot uses AI. Check for mistakes.
List<EntityInterface> entities =
getEntityByNames(entityType, new ArrayList<>(linkToFqn.values()), fields, include);
Map<String, EntityInterface> fqnToEntity = new HashMap<>();
for (EntityInterface entity : entities) {
fqnToEntity.put(entity.getFullyQualifiedName(), entity);
}
Map<String, EntityInterface> result = new LinkedHashMap<>();
for (Map.Entry<String, String> entry : linkToFqn.entrySet()) {
EntityInterface entity = fqnToEntity.get(entry.getValue());
if (entity != null) {
result.put(entry.getKey(), entity);
Comment on lines +661 to +681
Copy link

Copilot AI Mar 31, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Entity.getEntitiesByLinks() assumes all link strings are the same entity type (taken from the first link) and will throw if any link string cannot be parsed. Since callers now use this for batch processing, one malformed link (or a mixed-type list) can abort processing for the whole node. Consider validating that all links share the same type (or grouping by type) and handling per-link parse failures by skipping/recording them instead of throwing.

Suggested change
// All entity links in a workflow's entityList are always the same entity type —
// event-based triggers fire for a single entity, and periodic batch triggers are configured
// with one entity type. The type is taken from the first link.
Map<String, String> linkToFqn = new LinkedHashMap<>();
String entityType = null;
for (String linkStr : entityLinkStrs) {
EntityLink link = EntityLink.parse(linkStr);
if (entityType == null) entityType = link.getEntityType();
linkToFqn.put(linkStr, link.getEntityFQN());
}
List<EntityInterface> entities =
getEntityByNames(entityType, new ArrayList<>(linkToFqn.values()), fields, include);
Map<String, EntityInterface> fqnToEntity = new HashMap<>();
for (EntityInterface entity : entities) {
fqnToEntity.put(entity.getFullyQualifiedName(), entity);
}
Map<String, EntityInterface> result = new LinkedHashMap<>();
for (Map.Entry<String, String> entry : linkToFqn.entrySet()) {
EntityInterface entity = fqnToEntity.get(entry.getValue());
if (entity != null) {
result.put(entry.getKey(), entity);
// Parse all links defensively and allow mixed entity types. A single malformed
// link should not abort processing of the whole list.
Map<String, EntityLink> linkStrToLink = new LinkedHashMap<>();
for (String linkStr : entityLinkStrs) {
if (linkStr == null) {
LOG.warn("Skipping null entity link string");
continue;
}
try {
EntityLink link = EntityLink.parse(linkStr);
linkStrToLink.put(linkStr, link);
} catch (IllegalArgumentException | BadRequestException ex) {
LOG.warn("Skipping invalid entity link '{}': {}", linkStr, ex.getMessage());
}
}
if (linkStrToLink.isEmpty()) {
return Map.of();
}
// Group FQNs by entity type so we can fetch them in batches per type.
Map<String, List<String>> entityTypeToFqns = new LinkedHashMap<>();
for (EntityLink link : linkStrToLink.values()) {
entityTypeToFqns
.computeIfAbsent(link.getEntityType(), k -> new ArrayList<>())
.add(link.getEntityFQN());
}
Map<String, EntityInterface> fqnToEntity = new HashMap<>();
for (Map.Entry<String, List<String>> entry : entityTypeToFqns.entrySet()) {
String entityType = entry.getKey();
List<String> fqns = entry.getValue();
if (fqns.isEmpty()) {
continue;
}
List<EntityInterface> entitiesForType =
getEntityByNames(entityType, fqns, fields, include);
for (EntityInterface entity : entitiesForType) {
if (entity != null && entity.getFullyQualifiedName() != null) {
fqnToEntity.put(entity.getFullyQualifiedName(), entity);
}
}
}
Map<String, EntityInterface> result = new LinkedHashMap<>();
for (Map.Entry<String, EntityLink> entry : linkStrToLink.entrySet()) {
String linkStr = entry.getKey();
EntityLink link = entry.getValue();
EntityInterface entity = fqnToEntity.get(link.getEntityFQN());
if (entity != null) {
result.put(linkStr, entity);

Copilot uses AI. Check for mistakes.
}
}
Comment on lines +664 to +683
Copy link

Copilot AI Mar 31, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

getEntitiesByLinks (1) assumes all entity links share the same entity type (uses the first link’s type for the entire fetch), and (2) will throw if any link string is malformed (EntityLink.parse). In batch workflows, a single bad link can cause the whole node execution to fail instead of being isolated to that entity. Consider validating that all links are the same type (or grouping by type), and handling parse failures per-link (skip/collect errors) so callers can continue processing remaining entities.

Suggested change
Map<String, String> linkToFqn = new LinkedHashMap<>();
String entityType = null;
for (String linkStr : entityLinkStrs) {
EntityLink link = EntityLink.parse(linkStr);
if (entityType == null) entityType = link.getEntityType();
linkToFqn.put(linkStr, link.getEntityFQN());
}
List<EntityInterface> entities =
getEntityByNames(entityType, new ArrayList<>(linkToFqn.values()), fields, include);
Map<String, EntityInterface> fqnToEntity = new HashMap<>();
for (EntityInterface entity : entities) {
fqnToEntity.put(entity.getFullyQualifiedName(), entity);
}
Map<String, EntityInterface> result = new LinkedHashMap<>();
for (Map.Entry<String, String> entry : linkToFqn.entrySet()) {
EntityInterface entity = fqnToEntity.get(entry.getValue());
if (entity != null) {
result.put(entry.getKey(), entity);
}
}
// Group links by entity type and handle parse failures per-link.
Map<String, Map<String, String>> typeToLinkToFqn = new LinkedHashMap<>();
for (String linkStr : entityLinkStrs) {
if (linkStr == null) {
continue;
}
try {
EntityLink link = EntityLink.parse(linkStr);
String entityType = link.getEntityType();
String entityFqn = link.getEntityFQN();
typeToLinkToFqn
.computeIfAbsent(entityType, k -> new LinkedHashMap<>())
.put(linkStr, entityFqn);
} catch (RuntimeException e) {
// Skip malformed links but continue processing other links.
log.warn("Failed to parse entity link '{}'", linkStr, e);
}
}
if (typeToLinkToFqn.isEmpty()) {
// No valid links parsed.
return Map.of();
}
Map<String, EntityInterface> fqnToEntity = new HashMap<>();
for (Map.Entry<String, Map<String, String>> entry : typeToLinkToFqn.entrySet()) {
String entityType = entry.getKey();
Map<String, String> linkToFqn = entry.getValue();
if (linkToFqn.isEmpty()) {
continue;
}
List<EntityInterface> entities =
getEntityByNames(entityType, new ArrayList<>(linkToFqn.values()), fields, include);
for (EntityInterface entity : entities) {
if (entity != null && entity.getFullyQualifiedName() != null) {
fqnToEntity.put(entity.getFullyQualifiedName(), entity);
}
}
}
Map<String, EntityInterface> result = new LinkedHashMap<>();
for (Map.Entry<String, Map<String, String>> typeEntry : typeToLinkToFqn.entrySet()) {
Map<String, String> linkToFqn = typeEntry.getValue();
for (Map.Entry<String, String> linkEntry : linkToFqn.entrySet()) {
EntityInterface entity = fqnToEntity.get(linkEntry.getValue());
if (entity != null) {
result.put(linkEntry.getKey(), entity);
}
}
}

Copilot uses AI. Check for mistakes.
return result;
}

/** Retrieve the entity using id from given entity reference and fields */
public static <T> T getEntity(
String entityType, UUID id, String fields, Include include, boolean fromCache) {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
/*
* Copyright 2021 Collate
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.openmetadata.service.config;

import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Collections;
import java.util.Map;
import org.openmetadata.service.config.web.HeaderFactory;

public class CrossOriginEmbedderPolicyHeaderFactory extends HeaderFactory {
public static final String CROSS_ORIGIN_EMBEDDER_POLICY_HEADER = "Cross-Origin-Embedder-Policy";

@JsonProperty("option")
private String option;

public CrossOriginEmbedderPolicyHeaderFactory() {}

public String getOption() {
return this.option;
}

public void setOption(String option) {
this.option = option;
}

@Override
protected Map<String, String> buildHeaders() {
return Collections.singletonMap(CROSS_ORIGIN_EMBEDDER_POLICY_HEADER, this.option);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
/*
* Copyright 2021 Collate
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.openmetadata.service.config;

import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Collections;
import java.util.Map;
import org.openmetadata.service.config.web.HeaderFactory;

public class CrossOriginOpenerPolicyHeaderFactory extends HeaderFactory {
public static final String CROSS_ORIGIN_OPENER_POLICY_HEADER = "Cross-Origin-Opener-Policy";

@JsonProperty("option")
private String option;

public CrossOriginOpenerPolicyHeaderFactory() {}

public String getOption() {
return this.option;
}

public void setOption(String option) {
this.option = option;
}

@Override
protected Map<String, String> buildHeaders() {
return Collections.singletonMap(CROSS_ORIGIN_OPENER_POLICY_HEADER, this.option);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
/*
* Copyright 2021 Collate
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.openmetadata.service.config;

import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Collections;
import java.util.Map;
import org.openmetadata.service.config.web.HeaderFactory;

public class CrossOriginResourcePolicyHeaderFactory extends HeaderFactory {
public static final String CROSS_ORIGIN_RESOURCE_POLICY_HEADER = "Cross-Origin-Resource-Policy";

@JsonProperty("option")
private String option;

public CrossOriginResourcePolicyHeaderFactory() {}

public String getOption() {
return this.option;
}

public void setOption(String option) {
this.option = option;
}

@Override
protected Map<String, String> buildHeaders() {
return Collections.singletonMap(CROSS_ORIGIN_RESOURCE_POLICY_HEADER, this.option);
}
Comment on lines +21 to +40
Copy link

Copilot AI Apr 23, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

These Cross-Origin Policy header factories appear unrelated to the workflows/batch-processing scope described in the PR and may be better split into a dedicated PR to keep review and rollout isolated. Also, buildHeaders() will emit a null header value when enabled=true but option is unset; consider validating option (or returning an empty map) to avoid sending an invalid header.

Copilot uses AI. Check for mistakes.
}
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
package org.openmetadata.service.governance.workflows;

import io.github.resilience4j.retry.RetryConfig;
import java.time.Duration;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import org.openmetadata.schema.governance.workflows.WorkflowDefinition;
Expand All @@ -12,6 +14,8 @@ public class Workflow {
public static final String INGESTION_PIPELINE_ID_VARIABLE = "ingestionPipelineId";
public static final String RELATED_ENTITY_VARIABLE = "relatedEntity";
public static final String ENTITY_LIST_VARIABLE = "entityList";
public static final String TRUE_ENTITY_LIST_VARIABLE = "true_entityList";
public static final String FALSE_ENTITY_LIST_VARIABLE = "false_entityList";
Comment thread
yan-3005 marked this conversation as resolved.
Comment thread
yan-3005 marked this conversation as resolved.
Comment thread
gitar-bot[bot] marked this conversation as resolved.
public static final String BATCH_SINK_PROCESSED_VARIABLE = "batchSinkProcessed";
public static final String TRIGGERING_OBJECT_ID_VARIABLE = "triggeringObjectId";
public static final String RECOGNIZER_FEEDBACK = "recognizerFeedback";
Expand All @@ -26,6 +30,12 @@ public class Workflow {
public static final String GLOBAL_NAMESPACE = "global";
public static final String SUCCESSFUL_RESULT = "success";
public static final String FAILURE_RESULT = "failure";
public static final RetryConfig TASK_RETRY_CONFIG =
RetryConfig.custom()
.maxAttempts(3)
.waitDuration(Duration.ofMillis(500))
.retryExceptions(Exception.class)
.build();
private final TriggerWorkflow triggerWorkflow;
private final MainWorkflow mainWorkflow;
private final WorkflowDefinition workflowDefinition;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
package org.openmetadata.service.governance.workflows;

import static org.openmetadata.schema.entity.events.SubscriptionDestination.SubscriptionType.GOVERNANCE_WORKFLOW_CHANGE_EVENT;
import static org.openmetadata.service.governance.workflows.Workflow.ENTITY_LIST_VARIABLE;
import static org.openmetadata.service.governance.workflows.Workflow.GLOBAL_NAMESPACE;
import static org.openmetadata.service.governance.workflows.Workflow.RECOGNIZER_FEEDBACK;
import static org.openmetadata.service.governance.workflows.Workflow.RELATED_ENTITY_VARIABLE;
import static org.openmetadata.service.governance.workflows.Workflow.TRIGGERING_OBJECT_ID_VARIABLE;
import static org.openmetadata.service.governance.workflows.Workflow.UPDATED_BY_VARIABLE;
import static org.openmetadata.service.governance.workflows.WorkflowVariableHandler.getNamespacedVariableName;
Expand Down Expand Up @@ -229,9 +229,10 @@ public static Map<String, Object> defaultHandler(ChangeEvent event) {
MessageParser.EntityLink entityLink =
new MessageParser.EntityLink(entityType, entityReference.getFullyQualifiedName());

String entityLinkString = entityLink.getLinkString();
variables.put(
getNamespacedVariableName(GLOBAL_NAMESPACE, RELATED_ENTITY_VARIABLE),
entityLink.getLinkString());
getNamespacedVariableName(GLOBAL_NAMESPACE, ENTITY_LIST_VARIABLE),
List.of(entityLinkString));

Comment on lines +232 to 236
Copy link

Copilot AI Apr 14, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

defaultHandler() now seeds only global_entityList. There are still workflow definitions / nodes (including the bundled GlossaryApprovalWorkflow user tasks) that expect global_relatedEntity. Consider also setting global_relatedEntity to the first entity link (while keeping global_entityList) until all shipped workflows and migrations fully remove relatedEntity usage.

Copilot uses AI. Check for mistakes.
// Set the updatedBy variable from the change event userName
if (event.getUserName() != null) {
Expand All @@ -257,9 +258,10 @@ private static Map<String, Object> handleTagRecognizerFeedback(ChangeEvent event
MessageParser.EntityLink entityLink =
new MessageParser.EntityLink(Entity.TAG, entityReference.getFullyQualifiedName());

String entityLinkString = entityLink.getLinkString();
variables.put(
getNamespacedVariableName(GLOBAL_NAMESPACE, RELATED_ENTITY_VARIABLE),
entityLink.getLinkString());
getNamespacedVariableName(GLOBAL_NAMESPACE, ENTITY_LIST_VARIABLE),
List.of(entityLinkString));

variables.put(
getNamespacedVariableName(GLOBAL_NAMESPACE, TRIGGERING_OBJECT_ID_VARIABLE),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
import java.util.Set;
import java.util.UUID;
import javax.sql.DataSource;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import org.flowable.bpmn.converter.BpmnXMLConverter;
import org.flowable.common.engine.api.FlowableObjectNotFoundException;
Expand Down Expand Up @@ -73,7 +72,7 @@ public class WorkflowHandler {
private ProcessEngine processEngine;
private final Map<Object, Object> expressionMap = new HashMap<>();
private static WorkflowHandler instance;
@Getter private static volatile boolean initialized = false;
private static volatile boolean initialized = false;
private final boolean isMigrationContext;

private WorkflowHandler(OpenMetadataApplicationConfig config, boolean isMigrationContext) {
Expand Down Expand Up @@ -265,6 +264,10 @@ public static WorkflowHandler getInstance() {
throw new UnhandledServerException("WorkflowHandler is not initialized.");
}

public static boolean isInitialized() {
return initialized;
}

public ProcessEngineConfiguration getProcessEngineConfiguration() {
if (processEngine != null) {
return processEngine.getProcessEngineConfiguration();
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
package org.openmetadata.service.governance.workflows;

import static org.openmetadata.service.governance.workflows.Workflow.GLOBAL_NAMESPACE;
import static org.openmetadata.service.governance.workflows.Workflow.RELATED_ENTITY_VARIABLE;
import static org.openmetadata.service.governance.workflows.Workflow.WORKFLOW_INSTANCE_EXECUTION_ID_VARIABLE;
import static org.openmetadata.service.governance.workflows.WorkflowHandler.getProcessDefinitionKeyFromId;

Expand All @@ -17,47 +15,12 @@ public void execute(DelegateExecution execution) {
String workflowName = getProcessDefinitionKeyFromId(execution.getProcessDefinitionId());
String processInstanceId = execution.getProcessInstanceId();

// CRITICAL: Always set the execution ID first - this is mandatory for stage tracking
UUID workflowInstanceExecutionId = UUID.randomUUID();
execution.setVariable(WORKFLOW_INSTANCE_EXECUTION_ID_VARIABLE, workflowInstanceExecutionId);
LOG.debug(
"[WORKFLOW_EXEC_ID_SET] Workflow: {}, ProcessInstance: {}, ExecutionId: {} - Execution ID initialized",
workflowName,
processInstanceId,
workflowInstanceExecutionId);

WorkflowVariableHandler varHandler = new WorkflowVariableHandler(execution);
try {
String relatedEntity =
(String) varHandler.getNamespacedVariable(GLOBAL_NAMESPACE, RELATED_ENTITY_VARIABLE);

if (relatedEntity == null || relatedEntity.isEmpty()) {
LOG.error(
"[WORKFLOW_MISSING_ENTITY] Workflow: {}, ProcessInstance: {}, ExecutionId: {} - RELATED_ENTITY variable is null/empty. Workflow will likely fail.",
workflowName,
processInstanceId,
workflowInstanceExecutionId);
execution.setVariable(Workflow.FAILURE_VARIABLE, true);
execution.setVariable("startupError", "Missing required variable: relatedEntity");
} else {
LOG.debug(
"[WORKFLOW_EXEC_STARTED] Workflow: {}, ProcessInstance: {}, ExecutionId: {}, RelatedEntity: {} - Workflow execution initialized successfully",
workflowName,
processInstanceId,
workflowInstanceExecutionId,
relatedEntity);
}
} catch (Exception exc) {
LOG.error(
"[WORKFLOW_INIT_ERROR] Workflow: {}, ProcessInstance: {}, ExecutionId: {} - Failed to retrieve relatedEntity variable. Error: {}",
workflowName,
processInstanceId,
workflowInstanceExecutionId,
exc.getMessage(),
exc);
// Set failure indicator but don't prevent workflow from starting
execution.setVariable(Workflow.FAILURE_VARIABLE, true);
execution.setVariable("startupError", "Failed to get relatedEntity: " + exc.getMessage());
}
}
}
Loading
Loading