Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 5 additions & 3 deletions .evergreen/.evg.yml
Original file line number Diff line number Diff line change
Expand Up @@ -1939,16 +1939,18 @@ task_groups:
setup_group:
- func: "fetch-source"
- func: "prepare-resources"
- func: "assume-aws-test-secrets-role"
- command: subprocess.exec
type: "setup"
params:
working_dir: "src"
binary: bash
add_expansions_to_env: true
include_expansions_in_env: [ "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN" ]
env:
CLUSTER_PREFIX: "dbx-java"
MONGODB_VERSION: "8.0"
args:
- ${DRIVERS_TOOLS}/.evergreen/atlas/setup-atlas-cluster.sh
- ${DRIVERS_TOOLS}/.evergreen/atlas/setup.sh
- command: expansions.update
params:
file: src/atlas-expansion.yml
Expand All @@ -1960,7 +1962,7 @@ task_groups:
binary: bash
add_expansions_to_env: true
args:
- ${DRIVERS_TOOLS}/.evergreen/atlas/teardown-atlas-cluster.sh
- ${DRIVERS_TOOLS}/.evergreen/atlas/teardown.sh
tasks:
- "atlas-search-index-management-task"
- "aws-lambda-deployed-task"
Expand Down
3 changes: 3 additions & 0 deletions bson/src/main/org/bson/BinaryVector.java
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,9 @@
* @since 5.3
*/
public abstract class BinaryVector {
/**
* The BinaryVector logger
*/
protected static final Logger LOGGER = Loggers.getLogger("BinaryVector");
private final DataType dataType;

Expand Down
16 changes: 14 additions & 2 deletions bson/src/main/org/bson/BsonBinary.java
Original file line number Diff line number Diff line change
Expand Up @@ -127,9 +127,14 @@ public BsonBinary(final UUID uuid, final UuidRepresentation uuidRepresentation)
}

/**
* Returns the binary as a UUID. The binary type must be 4.
* Returns the binary as a UUID.
*
* <p><strong>Note:</strong>The BsonBinary subtype must be {@link BsonBinarySubType#UUID_STANDARD}.</p>
*
* @return the uuid
* @throws BsonInvalidOperationException if BsonBinary subtype is not {@link BsonBinarySubType#UUID_STANDARD}
* @see #asUuid(UuidRepresentation)
* @see BsonBinarySubType
* @since 3.9
*/
public UUID asUuid() {
Expand Down Expand Up @@ -162,8 +167,15 @@ public BinaryVector asVector() {
/**
* Returns the binary as a UUID.
*
* @param uuidRepresentation the UUID representation
* <p><strong>Note:</strong>The BsonBinary subtype must be either {@link BsonBinarySubType#UUID_STANDARD} or
* {@link BsonBinarySubType#UUID_LEGACY}.</p>
*
* @param uuidRepresentation the UUID representation, must be {@link UuidRepresentation#STANDARD} or
* {@link UuidRepresentation#JAVA_LEGACY}
* @return the uuid
* @throws BsonInvalidOperationException if the BsonBinary subtype is incompatible with the given {@code uuidRepresentation}, or if
* the {@code uuidRepresentation} is not {@link UuidRepresentation#STANDARD} or
* {@link UuidRepresentation#JAVA_LEGACY}.
* @since 3.9
*/
public UUID asUuid(final UuidRepresentation uuidRepresentation) {
Expand Down
2 changes: 1 addition & 1 deletion bson/src/main/org/bson/BsonBinarySubType.java
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ public enum BsonBinarySubType {
* Returns true if the given value is a UUID subtype.
*
* @param value the subtype value as a byte.
* @return true if value is a UUID subtype.
* @return true if value has a {@link #UUID_STANDARD} or {@link #UUID_LEGACY} subtype.
* @since 3.4
*/
public static boolean isUuid(final byte value) {
Expand Down
6 changes: 6 additions & 0 deletions bson/src/main/org/bson/codecs/BsonDocumentCodec.java
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import org.bson.BsonType;
import org.bson.BsonValue;
import org.bson.BsonWriter;
import org.bson.RawBsonDocument;
import org.bson.codecs.configuration.CodecRegistry;
import org.bson.types.ObjectId;

Expand All @@ -40,6 +41,7 @@ public class BsonDocumentCodec implements CollectibleCodec<BsonDocument> {
private static final String ID_FIELD_NAME = "_id";
private static final CodecRegistry DEFAULT_REGISTRY = fromProviders(new BsonValueCodecProvider());
private static final BsonTypeCodecMap DEFAULT_BSON_TYPE_CODEC_MAP = new BsonTypeCodecMap(getBsonTypeClassMap(), DEFAULT_REGISTRY);
private static final RawBsonDocumentCodec RAW_BSON_DOCUMENT_CODEC = new RawBsonDocumentCodec();

private final CodecRegistry codecRegistry;
private final BsonTypeCodecMap bsonTypeCodecMap;
Expand Down Expand Up @@ -101,6 +103,10 @@ protected BsonValue readValue(final BsonReader reader, final DecoderContext deco

@Override
public void encode(final BsonWriter writer, final BsonDocument value, final EncoderContext encoderContext) {
if (value instanceof RawBsonDocument) {
RAW_BSON_DOCUMENT_CODEC.encode(writer, (RawBsonDocument) value, encoderContext);
return;
}
writer.writeStartDocument();

beforeFields(writer, encoderContext, value);
Expand Down
2 changes: 1 addition & 1 deletion bson/src/test/unit/util/ThreadTestHelpers.java
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ public static void executeAll(final Runnable... runnables) {
CountDownLatch latch = new CountDownLatch(runnables.length);
List<Throwable> failures = Collections.synchronizedList(new ArrayList<>());
for (final Runnable runnable : runnables) {
service.submit(() -> {
service.execute(() -> {
try {
runnable.run();
} catch (Throwable e) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ public int getBytesPerRun() {
return fileLength * NUM_INTERNAL_ITERATIONS;
}

private byte[] getDocumentAsBuffer(final T document) throws IOException {
protected byte[] getDocumentAsBuffer(final T document) throws IOException {
BasicOutputBuffer buffer = new BasicOutputBuffer();
codec.encode(new BsonBinaryWriter(buffer), document, EncoderContext.builder().build());

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,9 @@ private static void runBenchmarks()
runBenchmark(new BsonDecodingBenchmark<>("Deep", "extended_bson/deep_bson.json", DOCUMENT_CODEC));
runBenchmark(new BsonDecodingBenchmark<>("Full", "extended_bson/full_bson.json", DOCUMENT_CODEC));

runBenchmark(new RawBsonNestedEncodingBenchmark("Full RawBsonDocument in BsonDocument BSON Encoding", "extended_bson/full_bson.json"));
runBenchmark(new RawBsonArrayEncodingBenchmark("Full RawBsonDocument Array in BsonDocument BSON Encoding", "extended_bson/full_bson.json", 10));

runBenchmark(new RunCommandBenchmark<>(DOCUMENT_CODEC));
runBenchmark(new FindOneBenchmark<Document>("single_and_multi_document/tweet.json", BenchmarkSuite.DOCUMENT_CLASS));

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ public void run() throws Exception {
CountDownLatch latch = new CountDownLatch(50);

for (int i = 0; i < 50; i++) {
gridFSService.submit(exportFile(latch, i));
gridFSService.execute(exportFile(latch, i));
}

latch.await(1, TimeUnit.MINUTES);
Expand All @@ -107,7 +107,7 @@ private Runnable exportFile(final CountDownLatch latch, final int fileId) {
return () -> {
UnsafeByteArrayOutputStream outputStream = new UnsafeByteArrayOutputStream(5242880);
bucket.downloadToStream(GridFSMultiFileDownloadBenchmark.this.getFileName(fileId), outputStream);
fileService.submit(() -> {
fileService.execute(() -> {
try {
FileOutputStream fos = new FileOutputStream(new File(tempDirectory, String.format("%02d", fileId) + ".txt"));
fos.write(outputStream.getByteArray());
Expand All @@ -124,7 +124,7 @@ private void importFiles() throws Exception {
CountDownLatch latch = new CountDownLatch(50);

for (int i = 0; i < 50; i++) {
fileService.submit(importFile(latch, i));
fileService.execute(importFile(latch, i));
}

latch.await(1, TimeUnit.MINUTES);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ public void run() throws Exception {
CountDownLatch latch = new CountDownLatch(50);

for (int i = 0; i < 50; i++) {
fileService.submit(importFile(latch, i));
fileService.execute(importFile(latch, i));
}

latch.await(1, TimeUnit.MINUTES);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ public void run() throws Exception {
CountDownLatch latch = new CountDownLatch(100);

for (int i = 0; i < 100; i++) {
documentReadingService.submit(exportJsonFile(i, latch));
documentReadingService.execute(exportJsonFile(i, latch));
}

latch.await(1, TimeUnit.MINUTES);
Expand All @@ -125,7 +125,7 @@ private Runnable exportJsonFile(final int fileId, final CountDownLatch latch) {
List<RawBsonDocument> documents = collection.find(new BsonDocument("fileId", new BsonInt32(fileId)))
.batchSize(5000)
.into(new ArrayList<>(5000));
fileWritingService.submit(writeJsonFile(fileId, documents, latch));
fileWritingService.execute(writeJsonFile(fileId, documents, latch));
};
}

Expand Down Expand Up @@ -154,7 +154,7 @@ private void importJsonFiles() throws InterruptedException {

for (int i = 0; i < 100; i++) {
int fileId = i;
importService.submit(() -> {
importService.execute(() -> {
String resourcePath = "parallel/ldjson_multi/ldjson" + String.format("%03d", fileId) + ".txt";
try (BufferedReader reader = new BufferedReader(readFromRelativePath(resourcePath), 1024 * 64)) {
String json;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ public void run() throws InterruptedException {
CountDownLatch latch = new CountDownLatch(500);

for (int i = 0; i < 100; i++) {
fileReadingService.submit(importJsonFile(latch, i));
fileReadingService.execute(importJsonFile(latch, i));
}

latch.await(1, TimeUnit.MINUTES);
Expand All @@ -104,7 +104,7 @@ private Runnable importJsonFile(final CountDownLatch latch, final int fileId) {
documents.add(document);
if (documents.size() == 1000) {
List<RawBsonDocument> documentsToInsert = documents;
documentWritingService.submit(() -> {
documentWritingService.execute(() -> {
collection.insertMany(documentsToInsert, new InsertManyOptions().ordered(false));
latch.countDown();
});
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
/*
* Copyright 2016-present MongoDB, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/

package com.mongodb.benchmark.benchmarks;

import org.bson.BsonArray;import org.bson.BsonDocument;
import org.bson.RawBsonDocument;
import org.bson.codecs.BsonDocumentCodec;

import java.io.IOException;

public class RawBsonArrayEncodingBenchmark extends BsonEncodingBenchmark<BsonDocument> {

private final int arraySize;

public RawBsonArrayEncodingBenchmark(final String name, final String resourcePath, final int arraySize) {
super(name, resourcePath, new BsonDocumentCodec());
this.arraySize = arraySize;
}

@Override
public void setUp() throws IOException {
super.setUp();
RawBsonDocument rawDoc = new RawBsonDocument(document, codec);

BsonArray array = new BsonArray();
for (int i = 0; i < arraySize; i++) {
array.add(rawDoc);
}
document = new BsonDocument("results", array);

// Recalculate documentBytes for accurate throughput reporting
documentBytes = getDocumentAsBuffer(document);

}

@Override
public int getBytesPerRun() {
return documentBytes.length * NUM_INTERNAL_ITERATIONS;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
/*
* Copyright 2016-present MongoDB, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/

package com.mongodb.benchmark.benchmarks;

import org.bson.BsonDocument;
import org.bson.RawBsonDocument;
import org.bson.codecs.BsonDocumentCodec;

import java.io.IOException;

public class RawBsonNestedEncodingBenchmark extends BsonEncodingBenchmark<BsonDocument> {

public RawBsonNestedEncodingBenchmark(final String name, final String resourcePath) {
super(name, resourcePath, new BsonDocumentCodec());
}

@Override
public void setUp() throws IOException {
super.setUp();

RawBsonDocument rawDoc = new RawBsonDocument(document, codec);
document = new BsonDocument("nested", rawDoc);

documentBytes = getDocumentAsBuffer(document);
}

@Override
public int getBytesPerRun() {
return documentBytes.length * NUM_INTERNAL_ITERATIONS;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,7 @@ public List<MongocryptBecnhmarkResult> run() throws InterruptedException {
for (int i = 0; i < threadCount; i++) {
DecryptTask decryptTask = new DecryptTask(mongoCrypt, encrypted, NUM_SECS, doneSignal);
decryptTasks.add(decryptTask);
executorService.submit(decryptTask);
executorService.execute(decryptTask);
}

// Await completion of all tasks. Tasks are expected to complete shortly after NUM_SECS. Time out `await` if time exceeds 2 * NUM_SECS.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1321,7 +1321,7 @@ private boolean initUnlessClosed() {
boolean result = true;
if (state == State.NEW) {
worker = Executors.newSingleThreadExecutor(new DaemonThreadFactory("AsyncGetter"));
worker.submit(() -> runAndLogUncaught(this::workerRun));
worker.execute(() -> runAndLogUncaught(this::workerRun));
state = State.INITIALIZED;
} else if (state == State.CLOSED) {
result = false;
Expand Down
Loading