Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
.idea/
target
/demo/data
/demo/clickhouse-udfs.xml
Expand Down
18 changes: 18 additions & 0 deletions ice/src/main/java/com/altinity/ice/cli/Main.java
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import com.altinity.ice.cli.internal.cmd.DescribeParquet;
import com.altinity.ice.cli.internal.cmd.Insert;
import com.altinity.ice.cli.internal.cmd.InsertWatch;
import com.altinity.ice.cli.internal.cmd.ListPartitions;
import com.altinity.ice.cli.internal.cmd.Scan;
import com.altinity.ice.cli.internal.config.Config;
import com.altinity.ice.cli.internal.iceberg.rest.RESTCatalogFactory;
Expand Down Expand Up @@ -594,6 +595,23 @@ void scanTable(
}
}

@CommandLine.Command(name = "list-partitions", description = "List partitions in a table.")
void listPartitions(
@CommandLine.Parameters(
arity = "1",
paramLabel = "<name>",
description = "Table name (e.g. ns1.table1)")
String name,
@CommandLine.Option(
names = {"--json"},
description = "Output JSON instead of YAML")
boolean json)
throws IOException {
try (RESTCatalog catalog = loadCatalog()) {
ListPartitions.run(catalog, TableIdentifier.parse(name), json);
}
}

@CommandLine.Command(name = "delete-table", description = "Delete table.")
void deleteTable(
@CommandLine.Parameters(
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
/*
* Copyright (c) 2025 Altinity Inc and/or its affiliates. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*/
package com.altinity.ice.cli.internal.cmd;

import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
import com.fasterxml.jackson.dataformat.yaml.YAMLGenerator;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.TreeSet;
import org.apache.iceberg.FileScanTask;
import org.apache.iceberg.PartitionField;
import org.apache.iceberg.PartitionSpec;
import org.apache.iceberg.catalog.TableIdentifier;
import org.apache.iceberg.io.CloseableIterable;
import org.apache.iceberg.rest.RESTCatalog;

public final class ListPartitions {

private ListPartitions() {}

public static void run(RESTCatalog catalog, TableIdentifier tableId, boolean json)
throws IOException {
org.apache.iceberg.Table table = catalog.loadTable(tableId);
PartitionSpec spec = table.spec();

if (!spec.isPartitioned()) {
var result = new Result(tableId.toString(), List.of(), List.of());
output(result, json);
return;
}

List<PartitionFieldInfo> partitionSpec = new ArrayList<>();
for (PartitionField field : spec.fields()) {
String sourceColumn = table.schema().findField(field.sourceId()).name();
partitionSpec.add(
new PartitionFieldInfo(sourceColumn, field.name(), field.transform().toString()));
}

TreeSet<String> partitionPaths = new TreeSet<>();
try (CloseableIterable<FileScanTask> tasks = table.newScan().planFiles()) {
for (FileScanTask task : tasks) {
String path = spec.partitionToPath(task.file().partition());
partitionPaths.add(path);
}
}

var result = new Result(tableId.toString(), partitionSpec, new ArrayList<>(partitionPaths));
output(result, json);
}

private static void output(Result result, boolean json) throws IOException {
ObjectMapper mapper =
json
? new ObjectMapper()
: new ObjectMapper(new YAMLFactory().enable(YAMLGenerator.Feature.MINIMIZE_QUOTES));
mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
System.out.println(mapper.writeValueAsString(result));
}

@JsonInclude(JsonInclude.Include.NON_NULL)
record Result(String table, List<PartitionFieldInfo> partitionSpec, List<String> partitions) {}

record PartitionFieldInfo(String sourceColumn, String name, String transform) {}
}