Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,8 @@ public class CleanupRequest {
private final String location;
private final List<Path> obsoleteDirs;
private final boolean purge;
private final boolean softDelete;
private final boolean sourceOfReplication;
private final String runAs;
private final String dbName;
private final String fullPartitionName;
Expand All @@ -38,6 +40,8 @@ public CleanupRequest(CleanupRequestBuilder builder) {
this.location = builder.location;
this.obsoleteDirs = builder.obsoleteDirs;
this.purge = builder.purge;
this.softDelete = builder.softDelete;
this.sourceOfReplication = builder.sourceOfReplication;
this.runAs = builder.runAs;
this.dbName = builder.dbName;
this.fullPartitionName = builder.fullPartitionName;
Expand All @@ -55,6 +59,14 @@ public boolean isPurge() {
return purge;
}

public boolean isSoftDelete() {
return softDelete;
}

public boolean isSourceOfReplication() {
return sourceOfReplication;
}

public String runAs() {
return runAs;
}
Expand All @@ -74,6 +86,8 @@ public static class CleanupRequestBuilder {
private String location;
private List<Path> obsoleteDirs;
private boolean purge;
private boolean softDelete;
private boolean sourceOfReplication;
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

isSourceOfReplication ?

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Have changed isSoftDelete to softDelete now to keep the naming convention consistent with existing purge field

private String runAs;
private String dbName;
private String fullPartitionName;
Expand All @@ -93,6 +107,16 @@ public CleanupRequestBuilder setPurge(boolean purge) {
return this;
}

public CleanupRequestBuilder setSoftDelete(boolean softDelete) {
this.softDelete = softDelete;
return this;
}

public CleanupRequestBuilder setSourceOfReplication(boolean sourceOfReplication) {
this.sourceOfReplication = sourceOfReplication;
return this;
}

public CleanupRequestBuilder setDbName(String dbName) {
this.dbName = dbName;
return this;
Expand Down
21 changes: 18 additions & 3 deletions ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/FSRemover.java
Original file line number Diff line number Diff line change
Expand Up @@ -99,17 +99,18 @@ private List<Path> removeFiles(CleanupRequest cr)
LOG.info("About to remove {} obsolete directories from {}. {}", cr.getObsoleteDirs().size(),
cr.getLocation(), CompactorUtil.getDebugInfo(cr.getObsoleteDirs()));
boolean needCmRecycle;
Database db = null;
try {
Database db = metadataCache.computeIfAbsent(cr.getDbName(),
db = metadataCache.computeIfAbsent(cr.getDbName(),
() -> CompactorUtil.resolveDatabase(conf, cr.getDbName()));
needCmRecycle = ReplChangeManager.isSourceOfReplication(db);
} catch (NoSuchObjectException ex) {
// can not drop a database which is a source of replication
needCmRecycle = false;
needCmRecycle = cr.isSourceOfReplication();
} catch (RuntimeException ex) {
if (ex.getCause() instanceof NoSuchObjectException) {
// can not drop a database which is a source of replication
needCmRecycle = false;
needCmRecycle = cr.isSourceOfReplication();
} else {
throw ex;
}
Expand All @@ -126,6 +127,20 @@ private List<Path> removeFiles(CleanupRequest cr)
deleted.add(dead);
}
}
removeDatabaseDirIfNecessary(db, cr, fs);
return deleted;
}

private void removeDatabaseDirIfNecessary(Database db, CleanupRequest cr, FileSystem fs) throws IOException {
if (db != null || !cr.isSoftDelete()) {
return;
}
Path databasePath = new Path(cr.getLocation()).getParent();
if (FileUtils.isDirEmpty(fs, databasePath)) {
if (cr.isSourceOfReplication()) {
replChangeManager.recycle(databasePath, ReplChangeManager.RecycleType.MOVE, cr.isPurge());
}
FileUtils.deleteDir(fs, databasePath, cr.isPurge(), conf);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -286,6 +286,8 @@ private CleanupRequest getCleaningRequestBasedOnLocation(CompactionInfo ci, Stri
.setFullPartitionName(ci.getFullPartitionName())
.setRunAs(ci.runAs)
.setPurge(ifPurge)
.setSoftDelete(ci.isSoftDelete())
.setSourceOfReplication(ci.isSourceOfReplication())
.setObsoleteDirs(Collections.singletonList(obsoletePath))
.build();
}
Expand Down
9 changes: 4 additions & 5 deletions ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands.java
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
package org.apache.hadoop.hive.ql;

import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
Expand Down Expand Up @@ -1827,11 +1828,9 @@ private void dropDatabaseCascadeNonBlocking() throws Exception {

runCleaner(hiveConf);

stat = fs.listStatus(new Path(getWarehouseDir(), database + ".db"),
t -> t.getName().matches("(mv_)?" + tableName + "2" + SOFT_DELETE_TABLE_PATTERN));
if (stat.length != 0) {
Assert.fail("Table data was not removed from FS");
}
Assert.assertThrows(database + ".db directory should not exist", FileNotFoundException.class, () -> {
fs.listStatus(new Path(getWarehouseDir(), database + ".db"));
});
}

@Test
Expand Down

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Original file line number Diff line number Diff line change
Expand Up @@ -1344,4 +1344,9 @@ public static String getHttpPath(String httpPath) {
public static boolean isDatabaseRemote(Database db) {
return db != null && db.getType() == DatabaseType.REMOTE;
}

/**
* Soft delete operation types.
*/
public enum SoftDeleteOperation {DROP_DATABASE, DROP_TABLE}
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
package org.apache.hadoop.hive.metastore;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.common.repl.ReplConst;
import org.apache.hadoop.hive.metastore.api.CompactionRequest;
import org.apache.hadoop.hive.metastore.api.CompactionType;
import org.apache.hadoop.hive.metastore.api.Database;
Expand All @@ -39,6 +40,7 @@
import org.apache.hadoop.hive.metastore.events.DropTableEvent;
import org.apache.hadoop.hive.metastore.txn.TxnStore;
import org.apache.hadoop.hive.metastore.txn.TxnUtils;
import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;

import java.io.IOException;
import java.util.Iterator;
Expand Down Expand Up @@ -91,6 +93,7 @@ public void onDropTable(DropTableEvent tableEvent) throws MetaException {
rqst.setRunas(TxnUtils.findUserToRunAs(table.getSd().getLocation(), table, conf));
rqst.putToProperties("location", table.getSd().getLocation());
rqst.putToProperties("ifPurge", Boolean.toString(isMustPurge(tableEvent.getEnvironmentContext(), table)));
addSoftDeletePropertiesToRequest(tableEvent.getEnvironmentContext(), rqst);
txnHandler.submitForCleanup(rqst, table.getWriteId(), currentTxn);
} catch (InterruptedException | IOException e) {
throwMetaException(e);
Expand Down Expand Up @@ -244,4 +247,13 @@ private long getTxnId(EnvironmentContext context) {
.map(Long::parseLong)
.orElse(0L);
}

private void addSoftDeletePropertiesToRequest(EnvironmentContext context, CompactionRequest request) {
request.putToProperties(hive_metastoreConstants.SOFT_DELETE_OPERATION,
Optional.ofNullable(context.getProperties().get(hive_metastoreConstants.SOFT_DELETE_OPERATION))
.orElse(MetaStoreUtils.SoftDeleteOperation.DROP_TABLE.name()));
if (Boolean.parseBoolean(context.getProperties().get(ReplConst.SOURCE_OF_REPLICATION))) {
request.putToProperties(ReplConst.SOURCE_OF_REPLICATION, Boolean.TRUE.toString());
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
import java.util.concurrent.atomic.AtomicReference;

import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.repl.ReplConst;
import org.apache.hadoop.hive.metastore.Batchable;
import org.apache.hadoop.hive.metastore.HMSHandler;
import org.apache.hadoop.hive.metastore.IHMSHandler;
Expand Down Expand Up @@ -136,6 +137,11 @@ public DropDatabaseResult execute() throws TException, IOException {
if (isSoftDelete) {
context = new EnvironmentContext();
context.putToProperties(hive_metastoreConstants.TXN_ID, String.valueOf(request.getTxnId()));
context.putToProperties(hive_metastoreConstants.SOFT_DELETE_OPERATION,
MetaStoreUtils.SoftDeleteOperation.DROP_DATABASE.name());
if (ReplChangeManager.isSourceOfReplication(db)) {
context.putToProperties(ReplConst.SOURCE_OF_REPLICATION, Boolean.TRUE.toString());
}
request.setDeleteManagedDir(false);
}
DropTableRequest dropRequest = new DropTableRequest(name, table.getTableName());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,10 @@

import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.hadoop.hive.common.ValidCompactorWriteIdList;
import org.apache.hadoop.hive.common.repl.ReplConst;
import org.apache.hadoop.hive.metastore.api.CompactionInfoStruct;
import org.apache.hadoop.hive.metastore.api.CompactionType;
import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.OptionalCompactionInfoStruct;
import org.apache.hadoop.hive.metastore.api.TableValidWriteIds;
Expand Down Expand Up @@ -368,4 +370,12 @@ public void setWriteIds(boolean hasUncompactedAborts, Set<Long> writeIds) {
public boolean isAbortedTxnCleanup() {
return type == CompactionType.ABORT_TXN_CLEANUP;
}

public boolean isSoftDelete() {
return getProperty(hive_metastoreConstants.SOFT_DELETE_OPERATION) != null;
}

public boolean isSourceOfReplication() {
return Boolean.parseBoolean(getProperty(ReplConst.SOURCE_OF_REPLICATION));
}
}
Loading