Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Empty file.
115 changes: 115 additions & 0 deletions google/cloud/dataproc_spark_connect/proto/sparkmonitor.proto
Original file line number Diff line number Diff line change
@@ -0,0 +1,115 @@
syntax = "proto3";

package spark.connect;

option java_multiple_files = true;
option java_package = "org.apache.spark.connect.proto";

// SparkMonitor progress data delivered via the upstream extension slot on ExecutePlanResponse
// (google.protobuf.Any extension = 999).
// type_url: "type.googleapis.com/spark.connect.SparkMonitorProgress"
message SparkMonitorProgress {
optional ApplicationInfo application_info = 1;
repeated JobEvent job_events = 2;
repeated DetailedStageEvent stage_events = 3;
repeated TaskEvent task_events = 4;
repeated ExecutorEvent executor_events = 5;
optional bool stream_complete = 6;

// Application lifecycle info (start_time present = start event, end_time present = end event)
message ApplicationInfo {
optional int64 start_time = 1;
optional int64 end_time = 2;
optional string app_id = 3;
optional string app_attempt_id = 4;
optional string app_name = 5;
optional string spark_user = 6;
}

// Job events (JOB_START=0, JOB_END=1)
message JobEvent {
enum JobEventType {
JOB_START = 0;
JOB_END = 1;
}
JobEventType event_type = 1;
int64 job_id = 2;
string status = 3;
optional int64 submission_time = 4;
optional int64 completion_time = 5;
optional string job_group = 6;
optional string name = 7;
repeated int32 stage_ids = 8;
map<string, JobStageInfo> stage_infos = 9;
optional int32 num_tasks = 10;
optional int32 total_cores = 11;
optional string app_id = 12;
optional int32 num_executors = 13;
}

message JobStageInfo {
int32 attempt_id = 1;
string name = 2;
int32 num_tasks = 3;
int64 completion_time = 4;
int64 submission_time = 5;
}

// Detailed stage events (STAGE_SUBMITTED=0, STAGE_ACTIVE=1, STAGE_COMPLETED=2)
message DetailedStageEvent {
enum StageEventType {
STAGE_SUBMITTED = 0;
STAGE_ACTIVE = 1;
STAGE_COMPLETED = 2;
}
StageEventType event_type = 1;
int64 stage_id = 2;
int32 stage_attempt_id = 3;
string name = 4;
int32 num_tasks = 5;
repeated int32 parent_ids = 6;
optional int64 submission_time = 7;
optional int64 completion_time = 8;
repeated int64 job_ids = 9;
optional int32 num_active_tasks = 10;
optional int32 num_failed_tasks = 11;
optional int32 num_completed_tasks = 12;
optional string status = 13;
}

// Task events (TASK_START=0, TASK_END=1)
message TaskEvent {
enum TaskEventType {
TASK_START = 0;
TASK_END = 1;
}
TaskEventType event_type = 1;
int64 task_id = 2;
int64 stage_id = 3;
int32 stage_attempt_id = 4;
int32 index = 5;
int32 attempt_number = 6;
string executor_id = 7;
string host = 8;
string status = 9;
bool speculative = 10;
optional int64 launch_time = 11;
optional int64 finish_time = 12;
optional string task_type = 13;
optional string error_message = 14;
}

// Executor events (EXECUTOR_ADDED=0, EXECUTOR_REMOVED=1)
message ExecutorEvent {
enum ExecutorEventType {
EXECUTOR_ADDED = 0;
EXECUTOR_REMOVED = 1;
}
ExecutorEventType event_type = 1;
string executor_id = 2;
int64 time = 3;
optional string host = 4;
optional int32 num_cores = 5;
optional int32 total_cores = 6;
}
}
64 changes: 64 additions & 0 deletions google/cloud/dataproc_spark_connect/proto/sparkmonitor_pb2.py

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading
Loading