Skip to content

Commit fe18de9

Browse files
committed
adding metrics support
1 parent 90a81e2 commit fe18de9

File tree

3 files changed

+17
-10
lines changed

3 files changed

+17
-10
lines changed

pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,7 @@
7272
<avro.version>1.8.2</avro.version>
7373
<bigquery.connector.hadoop2.version>hadoop2-1.0.0</bigquery.connector.hadoop2.version>
7474
<commons.codec.version>1.4</commons.codec.version>
75-
<cdap.version>6.7.0</cdap.version>
75+
<cdap.version>6.8.0-SNAPSHOT</cdap.version>
7676
<cdap.plugin.version>2.10.0-SNAPSHOT</cdap.plugin.version>
7777
<dropwizard.metrics-core.version>3.2.6</dropwizard.metrics-core.version>
7878
<flogger.system.backend.version>0.3.1</flogger.system.backend.version>

src/main/java/io/cdap/plugin/gcp/bigquery/sqlengine/BigQueryReadDataset.java

Lines changed: 11 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -47,6 +47,7 @@
4747
import com.google.common.reflect.TypeToken;
4848
import com.google.gson.Gson;
4949
import io.cdap.cdap.api.data.schema.Schema;
50+
import io.cdap.cdap.api.metrics.Metrics;
5051
import io.cdap.cdap.etl.api.engine.sql.dataset.SQLDataset;
5152
import io.cdap.cdap.etl.api.engine.sql.request.SQLReadRequest;
5253
import io.cdap.cdap.etl.api.engine.sql.request.SQLReadResult;
@@ -85,26 +86,30 @@ public class BigQueryReadDataset implements SQLDataset, BigQuerySQLDataset {
8586
private final String jobId;
8687
private Schema schema;
8788
private Long numRows;
89+
private Metrics metrics;
8890

8991
private BigQueryReadDataset(String datasetName,
9092
BigQuerySQLEngineConfig sqlEngineConfig,
9193
BigQuery bigQuery,
9294
SQLReadRequest readRequest,
9395
TableId destinationTableId,
94-
String jobId) {
96+
String jobId,
97+
Metrics metrics) {
9598
this.datasetName = datasetName;
9699
this.sqlEngineConfig = sqlEngineConfig;
97100
this.bigQuery = bigQuery;
98101
this.readRequest = readRequest;
99102
this.destinationTableId = destinationTableId;
100103
this.jobId = jobId;
104+
this.metrics = metrics;
101105
}
102106

103107
public static BigQueryReadDataset getInstance(String datasetName,
104108
BigQuerySQLEngineConfig sqlEngineConfig,
105109
BigQuery bigQuery,
106110
SQLReadRequest readRequest,
107-
TableId destinationTableId) {
111+
TableId destinationTableId,
112+
Metrics metrics) {
108113
// Get new Job ID for this push operation
109114
String jobId = BigQuerySQLEngineUtils.newIdentifier();
110115

@@ -113,7 +118,8 @@ public static BigQueryReadDataset getInstance(String datasetName,
113118
bigQuery,
114119
readRequest,
115120
destinationTableId,
116-
jobId);
121+
jobId,
122+
metrics);
117123
}
118124

119125
public SQLReadResult read() {
@@ -216,7 +222,7 @@ private SQLReadResult readInternal(SQLReadRequest readRequest,
216222

217223
// Check for errors
218224
if (queryJob.getStatus().getError() != null) {
219-
BigQuerySQLEngineUtils.logJobMetrics(queryJob);
225+
BigQuerySQLEngineUtils.logJobMetrics(queryJob, metrics);
220226
LOG.error("Error executing BigQuery Job: '{}' in Project '{}', Dataset '{}': {}",
221227
jobId, sqlEngineConfig.getProject(), sqlEngineConfig.getDatasetProject(),
222228
queryJob.getStatus().getError().toString());
@@ -230,7 +236,7 @@ private SQLReadResult readInternal(SQLReadRequest readRequest,
230236
LOG.info("Executed read operation for {} records from {}.{}.{} into {}.{}.{}", numRows,
231237
sourceTableId.getProject(), sourceTableId.getDataset(), sourceTableId.getTable(),
232238
sourceTableId.getProject(), sourceTableId.getDataset(), sourceTableId.getTable());
233-
BigQuerySQLEngineUtils.logJobMetrics(queryJob);
239+
BigQuerySQLEngineUtils.logJobMetrics(queryJob, metrics);
234240

235241
return SQLReadResult.success(datasetName, this);
236242
}

src/main/java/io/cdap/plugin/gcp/bigquery/sqlengine/BigQuerySQLEngine.java

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -350,9 +350,9 @@ public SQLReadResult read(SQLReadRequest readRequest) throws SQLEngineException
350350

351351
// TODO: implement direct source read toggle
352352
// Check if direct sink write is enabled. If not, skip.
353-
if (!sqlEngineConfig.shouldUseDirectSinkWrite()) {
354-
return SQLReadResult.unsupported(datasetName);
355-
}
353+
// if (!sqlEngineConfig.shouldUseDirectSinkWrite()) {
354+
// return SQLReadResult.unsupported(datasetName);
355+
// }
356356

357357
// Check if this output matches the expected engine. If it doesn't, skip execution for this write operation.;
358358
if (!BigQuerySQLEngine.class.getName().equals(readRequest.getInput().getSqlEngineClassName())) {
@@ -372,7 +372,8 @@ public SQLReadResult read(SQLReadRequest readRequest) throws SQLEngineException
372372
sqlEngineConfig,
373373
bigQuery,
374374
readRequest,
375-
destinationTableId);
375+
destinationTableId,
376+
metrics);
376377
SQLReadResult result = readDataset.read();
377378

378379
if (result.isSuccessful()) {

0 commit comments

Comments
 (0)