Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/actions/setup-env/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ runs:
uses: actions/setup-python@v5
id: setup-python
with:
python-version: "3.11"
python-version: "3.13"
cache: "pip"
cache-dependency-path: poetry.lock

Expand Down
2 changes: 1 addition & 1 deletion MODULE.bazel.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

24 changes: 24 additions & 0 deletions buildscripts/resmokeconfig/suites/tde_deferred_cleanup_cbc.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
# Resmoke suite for testing deferred encryption key cleanup with AES256-CBC cipher mode.
# These tests stress test the encryptionKeyCleanupDeferred feature with aggressive
# cleanup intervals (1 second) while performing many dropDatabase operations.

config_variables:
- &keyFileGood jstests/percona/tde/ekf
- &cipherMode AES256-CBC

test_kind: js_test

selector:
roots:
- jstests/percona/tde/deferred_key_cleanup_drop_database.js
- jstests/percona/tde/deferred_key_cleanup_stress.js

# Tests start their own mongod with encryption and deferred cleanup enabled.
executor:
config:
shell_options:
global_vars:
TestData:
keyFileGood: *keyFileGood
cipherMode: *cipherMode
nodb: ""
24 changes: 24 additions & 0 deletions buildscripts/resmokeconfig/suites/tde_deferred_cleanup_gcm.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
# Resmoke suite for testing deferred encryption key cleanup with AES256-GCM cipher mode.
# These tests stress test the encryptionKeyCleanupDeferred feature with aggressive
# cleanup intervals (1 second) while performing many dropDatabase operations.

config_variables:
- &keyFileGood jstests/percona/tde/ekf
- &cipherMode AES256-GCM

test_kind: js_test

selector:
roots:
- jstests/percona/tde/deferred_key_cleanup_drop_database.js
- jstests/percona/tde/deferred_key_cleanup_stress.js

# Tests start their own mongod with encryption and deferred cleanup enabled.
executor:
config:
shell_options:
global_vars:
TestData:
keyFileGood: *keyFileGood
cipherMode: *cipherMode
nodb: ""
182 changes: 182 additions & 0 deletions jstests/percona/tde/deferred_key_cleanup_drop_database.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,182 @@
/**
* Tests that deferred encryption key cleanup works correctly with dropDatabase.
*
* This test verifies that when encryptionKeyCleanupDeferred is enabled:
* 1. Databases can be created and dropped without errors
* 2. Encryption keys are eventually cleaned up by the background process
* 3. No data corruption occurs
*
* @tags: [
* requires_wiredtiger,
* ]
*/
(function () {
"use strict";

const keyFile = TestData.keyFileGood || "jstests/percona/tde/ekf";
const cipherMode = TestData.cipherMode || "AES256-GCM";
const cleanupIntervalSecs = 1;
const numDatabases = 10;
const numCollections = 3;
const numDocsPerCollection = 100;

jsTestLog("Starting deferred key cleanup drop database test with cipher mode: " + cipherMode);

// Start mongod with encryption and deferred key cleanup enabled
const conn = MongoRunner.runMongod({
enableEncryption: "",
encryptionKeyFile: keyFile,
encryptionCipherMode: cipherMode,
setParameter: {
encryptionKeyCleanupDeferred: true,
encryptionKeyCleanupIntervalSeconds: cleanupIntervalSecs,
},
});
assert.neq(null, conn, "mongod failed to start with encryption enabled");

const testDbPrefix = "deferred_cleanup_test_";

// Helper function to create a database with collections and data
function createDatabaseWithData(dbName) {
const db = conn.getDB(dbName);
for (let c = 0; c < numCollections; c++) {
const collName = "coll_" + c;
const coll = db[collName];

// Insert documents
let docs = [];
for (let d = 0; d < numDocsPerCollection; d++) {
docs.push({
_id: d,
data: "test_data_" + d,
dbName: dbName,
collName: collName,
timestamp: new Date(),
});
}
assert.commandWorked(coll.insertMany(docs));

// Verify insertion
assert.eq(
numDocsPerCollection,
coll.countDocuments({}),
"Document count mismatch in " + dbName + "." + collName,
);
}
return db;
}

// Helper function to verify database integrity
function verifyDatabaseIntegrity(dbName) {
const db = conn.getDB(dbName);
for (let c = 0; c < numCollections; c++) {
const collName = "coll_" + c;
const coll = db[collName];

// Verify document count
const count = coll.countDocuments({});
assert.eq(numDocsPerCollection, count, "Document count mismatch in " + dbName + "." + collName);

// Verify data integrity by checking a few documents
for (let d = 0; d < Math.min(10, numDocsPerCollection); d++) {
const doc = coll.findOne({_id: d});
assert.neq(null, doc, "Missing document _id=" + d + " in " + dbName + "." + collName);
assert.eq("test_data_" + d, doc.data, "Data mismatch for document _id=" + d);
assert.eq(dbName, doc.dbName, "dbName mismatch for document _id=" + d);
}
}
}

jsTestLog("Phase 1: Create databases and verify data integrity");

// Create multiple databases
let createdDbs = [];
for (let i = 0; i < numDatabases; i++) {
const dbName = testDbPrefix + i;
jsTestLog("Creating database: " + dbName);
createDatabaseWithData(dbName);
createdDbs.push(dbName);
}

// Verify all databases have correct data
for (const dbName of createdDbs) {
verifyDatabaseIntegrity(dbName);
}
jsTestLog("All " + numDatabases + " databases created and verified");

jsTestLog("Phase 2: Drop half of the databases");

// Drop half of the databases
let droppedDbs = [];
let remainingDbs = [];
for (let i = 0; i < numDatabases; i++) {
const dbName = testDbPrefix + i;
if (i % 2 === 0) {
jsTestLog("Dropping database: " + dbName);
const db = conn.getDB(dbName);
assert.commandWorked(db.dropDatabase());
droppedDbs.push(dbName);
} else {
remainingDbs.push(dbName);
}
}

jsTestLog("Phase 3: Verify remaining databases are intact");

// Verify remaining databases are still intact
for (const dbName of remainingDbs) {
verifyDatabaseIntegrity(dbName);
}
jsTestLog("All remaining databases verified after partial drop");

jsTestLog("Phase 4: Wait for deferred cleanup to run");

// Wait for deferred cleanup to run (at least 2 intervals)
sleep(cleanupIntervalSecs * 3 * 1000);

jsTestLog("Phase 5: Verify remaining databases are still intact after cleanup");

// Verify remaining databases are still intact after cleanup ran
for (const dbName of remainingDbs) {
verifyDatabaseIntegrity(dbName);
}

jsTestLog("Phase 6: Drop remaining databases");

// Drop remaining databases
for (const dbName of remainingDbs) {
jsTestLog("Dropping database: " + dbName);
const db = conn.getDB(dbName);
assert.commandWorked(db.dropDatabase());
}

jsTestLog("Phase 7: Wait for final cleanup");

// Wait for final cleanup
sleep(cleanupIntervalSecs * 3 * 1000);

jsTestLog("Phase 8: Verify all test databases are gone");

// Verify all test databases are gone
const finalDbs = conn.getDB("admin").adminCommand({listDatabases: 1}).databases;
for (const dbInfo of finalDbs) {
assert(!dbInfo.name.startsWith(testDbPrefix), "Test database should have been dropped: " + dbInfo.name);
}

jsTestLog("Phase 9: Create new databases to verify system is still functional");

// Create new databases to verify system is still functional
for (let i = 0; i < 3; i++) {
const dbName = testDbPrefix + "final_" + i;
createDatabaseWithData(dbName);
verifyDatabaseIntegrity(dbName);

// Clean up
const db = conn.getDB(dbName);
assert.commandWorked(db.dropDatabase());
}

jsTestLog("Deferred key cleanup drop database test completed successfully");

MongoRunner.stopMongod(conn);
})();
Loading
Loading