Skip to content
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.regex.PatternSyntaxException;

public abstract class BaseClient {
private static final Logger LOG = LoggerFactory.getLogger(BaseClient.class);
Expand Down Expand Up @@ -184,6 +185,168 @@ private void init() {
}
}

protected void validateSqlIdentifier(String identifier, String identifierType) throws HadoopException {
if (StringUtils.isBlank(identifier)) {
return;
}
if (identifier.contains("..") || identifier.contains("//") || identifier.contains("\\")) {
String msgDesc = "Invalid " + identifierType + ": [" + identifier + "]. Path traversal patterns are not allowed.";
HadoopException hdpException = new HadoopException(msgDesc);
hdpException.generateResponseDataMap(false, msgDesc, msgDesc + DEFAULT_ERROR_MESSAGE, null, null);
LOG.error(msgDesc);
throw hdpException;
}
if (!identifier.matches("^[a-zA-Z0-9*?\\[\\]\\-\\$%\\{\\}\\=\\/\\._]+$")) {
String msgDesc = "Invalid " + identifierType + ": [" + identifier + "]. Only alphanumeric characters along with ( ., _, -, *, ?, [], {}, %, $, = / ) are allowed.";
HadoopException hdpException = new HadoopException(msgDesc);
hdpException.generateResponseDataMap(false, msgDesc, msgDesc + DEFAULT_ERROR_MESSAGE, null, null);
LOG.error(msgDesc);
throw hdpException;
}
}

protected String convertToSqlPattern(String pattern) throws HadoopException {
if (pattern == null || pattern.isEmpty()) {
return "%";
}
// Convert custom wildcards to SQL LIKE pattern:
// '*' -> '%' (multi-character wildcard)
// '?' -> '_' (single-character wildcard)
String sqlPattern = pattern.replace("*", "%").replace("?", "_");
return sqlPattern;
}

protected boolean matchesSqlPattern(String value, String pattern) throws HadoopException {
if (pattern == null || pattern.equals("%")) {
return true;
}

String regex = convertSqlPatternToRegex(pattern);
try {
return value.matches(regex);
} catch (PatternSyntaxException pe) {
String msgDesc = "Invalid value: [" + value + "]. Only alphanumeric characters along with ( ., _, -, *, ?, [], {}, %, $, = / ) are allowed.";
HadoopException hdpException = new HadoopException(msgDesc);
hdpException.generateResponseDataMap(false, msgDesc, msgDesc + DEFAULT_ERROR_MESSAGE, null, null);
LOG.error(msgDesc);
throw hdpException;
}
}

protected void validateUrlResourceName(String resourceName, String resourceType) throws HadoopException {
if (resourceName == null) {
return;
}
if (resourceName.contains("..") || resourceName.contains("//") || resourceName.contains("\\")) {
String msgDesc = "Invalid " + resourceType + ": [" + resourceName + "]. Path traversal patterns are not allowed.";
HadoopException hdpException = new HadoopException(msgDesc);
hdpException.generateResponseDataMap(false, msgDesc, msgDesc + DEFAULT_ERROR_MESSAGE, null, null);
LOG.error(msgDesc);
throw hdpException;
}
if (!resourceName.matches("^[a-zA-Z0-9_.*\\-]+$")) {
String msgDesc = "Invalid " + resourceType + ": [" + resourceName + "]. Only alphanumeric characters with ( ., _, *, -) are allowed.";
HadoopException hdpException = new HadoopException(msgDesc);
hdpException.generateResponseDataMap(false, msgDesc, msgDesc + DEFAULT_ERROR_MESSAGE, null, null);
LOG.error(msgDesc);
throw hdpException;
}
}

protected void validateWildcardPattern(String pattern, String patternType) throws HadoopException {
if (pattern == null || pattern.isEmpty()) {
return;
}
if (pattern.contains("..") || pattern.contains("//") || pattern.contains("\\")) {
String msgDesc = "Invalid " + patternType + ": [" + pattern + "]. Path traversal patterns are not allowed.";
HadoopException hdpException = new HadoopException(msgDesc);
hdpException.generateResponseDataMap(false, msgDesc, msgDesc + DEFAULT_ERROR_MESSAGE, null, null);
LOG.error(msgDesc);
throw hdpException;
}
if (!pattern.matches("^[a-zA-Z0-9_.*?\\[\\]\\-\\$%\\{\\}\\=\\/]+$")) {
String msgDesc = "Invalid " + patternType + ": [" + pattern + "]. Only alphanumeric characters along with ( ., _, -, *, ?, [], {}, %, $, = / ) are allowed.";
HadoopException hdpException = new HadoopException(msgDesc);
hdpException.generateResponseDataMap(false, msgDesc, msgDesc + DEFAULT_ERROR_MESSAGE, null, null);
LOG.error(msgDesc);
throw hdpException;
}
}

protected String convertSqlPatternToRegex(String pattern) {
StringBuilder regexBuilder = new StringBuilder("^");

for (int i = 0; i < pattern.length(); i++) {
char c = pattern.charAt(i);
switch (c) {
case '%':
// SQL LIKE wildcard: zero or more characters
regexBuilder.append(".*");
break;
case '_':
// SQL LIKE wildcard: exactly one character
regexBuilder.append('.');
break;
case '.':
case '^':
case '$':
case '+':
case '?':
case '{':
case '}':
case '[':
case ']':
case '(':
case ')':
case '|':
case '\\':
// Escape regex metacharacters so they are treated literally
regexBuilder.append('\\').append(c);
break;
default:
regexBuilder.append(c);
break;
}
}

return regexBuilder.toString();
}

protected String convertWildcardToRegex(String wildcard) {
if (wildcard == null || wildcard.isEmpty()) {
return ".*";
}
StringBuilder regex = new StringBuilder("^");
for (int i = 0; i < wildcard.length(); i++) {
char c = wildcard.charAt(i);
switch (c) {
case '*':
regex.append(".*");
break;
case '?':
regex.append(".");
break;
case '.':
case '\\':
case '^':
case '$':
case '|':
regex.append('\\').append(c);
break;
case '{':
case '}':
case '[':
case ']':
regex.append('\\').append(c);
break;
default:
regex.append(c);
}
}
regex.append('$');
return regex.toString();
}

private HadoopException createException(Exception exp) {
return createException("Unable to login to Hadoop environment [" + serviceName + "]", exp);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -191,6 +191,8 @@ public List<String> getTableList(final String tableNameMatching, final List<Stri
ret = Subject.doAs(subj, new PrivilegedAction<List<String>>() {
@Override
public List<String> run() {
validateWildcardPattern(tableNameMatching, "table pattern");
String safeTablePattern = convertWildcardToRegex(tableNameMatching);
List<String> tableList = new ArrayList<>();
Admin admin = null;

Expand All @@ -205,8 +207,7 @@ public List<String> run() {
LOG.info("getTableList: no exception: HbaseAvailability true");

admin = conn.getAdmin();

List<TableDescriptor> htds = admin.listTableDescriptors(Pattern.compile(tableNameMatching));
List<TableDescriptor> htds = admin.listTableDescriptors(Pattern.compile(safeTablePattern));

if (htds != null) {
for (TableDescriptor htd : htds) {
Expand Down Expand Up @@ -240,6 +241,8 @@ public List<String> run() {
LOG.error(msgDesc + mnre);

throw hdpException;
} catch (HadoopException he) {
throw he;
} catch (IOException io) {
String msgDesc = "getTableList: Unable to get HBase table List for [repository:" + getConfigHolder().getDatasourceName() + ",table-match:" + tableNameMatching + "].";
HadoopException hdpException = new HadoopException(msgDesc, io);
Expand Down Expand Up @@ -291,14 +294,14 @@ public List<String> getColumnFamilyList(final String columnFamilyMatching, final

@Override
public List<String> run() {
validateWildcardPattern(columnFamilyMatching, "column family pattern");
String safeColumnPattern = convertWildcardToRegex(columnFamilyMatching);
List<String> colfList = new ArrayList<>();
Admin admin = null;

try {
LOG.info("getColumnFamilyList: setting config values from client");

setClientConfigValues(conf);

LOG.info("getColumnFamilyList: checking HbaseAvailability with the new config");

try (Connection conn = ConnectionFactory.createConnection(conf)) {
Expand All @@ -314,8 +317,7 @@ public List<String> run() {
if (htd != null) {
for (ColumnFamilyDescriptor hcd : htd.getColumnFamilies()) {
String colf = hcd.getNameAsString();

if (colf.matches(columnFamilyMatching)) {
if (colf.matches(safeColumnPattern)) {
if (existingColumnFamilies != null && existingColumnFamilies.contains(colf)) {
continue;
} else {
Expand Down Expand Up @@ -345,6 +347,8 @@ public List<String> run() {
LOG.error(msgDesc + mnre);

throw hdpException;
} catch (HadoopException he) {
throw he;
} catch (IOException io) {
String msgDesc = "getColumnFamilyList: Unable to get HBase ColumnFamilyList for [repository:" + getConfigHolder().getDatasourceName() + ",table:" + tblName + ", table-match:" + columnFamilyMatching + "] ";
HadoopException hdpException = new HadoopException(msgDesc, io);
Expand Down
Loading
Loading