diff --git a/example/src/tests/db.ts b/example/src/tests/db.ts index c02a67c..1c34226 100644 --- a/example/src/tests/db.ts +++ b/example/src/tests/db.ts @@ -36,6 +36,19 @@ export function resetTestDb() { } } +export function createArrayBufferTestDb(name: string) { + // Use a dedicated database so ArrayBuffer tests do not interfere + // with the default test database used in other specs. + const db = open({ name }) + + db.execute('DROP TABLE IF EXISTS BlobData;') + db.execute( + 'CREATE TABLE BlobData (id INTEGER PRIMARY KEY, data BLOB NOT NULL) STRICT;', + ) + + return db +} + const LARGE_DB_NAME = 'large' // Copyright 2024 Oscar Franco diff --git a/example/src/tests/unit/specs/operations/execute.spec.ts b/example/src/tests/unit/specs/operations/execute.spec.ts index 78fd5b0..d8d113b 100644 --- a/example/src/tests/unit/specs/operations/execute.spec.ts +++ b/example/src/tests/unit/specs/operations/execute.spec.ts @@ -1,6 +1,6 @@ import { chance, expect, isNitroSQLiteError } from '../../common' import { describe, it } from '../../../MochaRNAdapter' -import { testDb } from '../../../db' +import { createArrayBufferTestDb, testDb } from '../../../db' export default function registerExecuteUnitTests() { describe('execute', () => { @@ -136,5 +136,122 @@ export default function registerExecuteUnitTests() { ]) }) }) + + describe('ArrayBuffer support', () => { + describe('execute', () => { + it('stores and reads ArrayBuffer values from BLOB columns', () => { + const dbName = 'array_buffer_read' + const db = createArrayBufferTestDb(dbName) + + const originalBytes = new Uint8Array([10, 20, 30, 40]) + const originalBuffer = originalBytes.buffer + + try { + db.execute('INSERT INTO BlobData (id, data) VALUES (?, ?)', [ + 1, + originalBuffer, + ]) + + const result = db.execute( + 'SELECT data FROM BlobData WHERE id = ?', + [1], + ) + + expect(result.rowsAffected).to.equal(1) + expect(result.rows?.length).to.equal(1) + + const row = result.results[0] + // const row = result.rows?.item(0) + expect(row).to.not.equal(undefined) + + const value = row?.data + expect(value).to.be.instanceOf(ArrayBuffer) + + const returnedBytes = new Uint8Array(value as ArrayBuffer) + expect(Array.from(returnedBytes)).to.eql(Array.from(originalBytes)) + } finally { + db.close() + db.delete() + } + }) + }) + + describe('executeAsync', () => { + it('stores and reads ArrayBuffer values from BLOB columns', async () => { + const dbName = 'array_buffer_read' + const db = createArrayBufferTestDb(dbName) + + const originalBytes = new Uint8Array([10, 20, 30, 40]) + const originalBuffer = originalBytes.buffer + + try { + await db.executeAsync( + 'INSERT INTO BlobData (id, data) VALUES (?, ?)', + [1, originalBuffer], + ) + + const result = await db.executeAsync( + 'SELECT data FROM BlobData WHERE id = ?', + [1], + ) + + expect(result.rowsAffected).to.equal(1) + expect(result.rows?.length).to.equal(1) + + const row = result.results[0] + // const row = result.rows?.item(0) + expect(row).to.not.equal(undefined) + + const value = row?.data + expect(value).to.be.instanceOf(ArrayBuffer) + + const returnedBytes = new Uint8Array(value as ArrayBuffer) + expect(Array.from(returnedBytes)).to.eql(Array.from(originalBytes)) + } finally { + db.close() + db.delete() + } + }) + }) + + describe('executeBatchAsync', () => { + it('stores ArrayBuffer values in BLOB columns', async () => { + const dbName = 'array_buffer_batch_async' + const db = createArrayBufferTestDb(dbName) + + const originalBytes = new Uint8Array([1, 2, 3, 4, 5]) + const originalBuffer = originalBytes.buffer + + try { + await db.executeBatchAsync([ + { + query: 'INSERT INTO BlobData (id, data) VALUES (?, ?)', + params: [1, originalBuffer], + }, + ]) + + const result = db.execute( + 'SELECT data FROM BlobData WHERE id = ?', + [1], + ) + + expect(result.rowsAffected).to.equal(1) + expect(result.rows?.length).to.equal(1) + + const row = result.results[0] + expect(row).to.not.equal(undefined) + + const value = row?.data + expect(value).to.be.instanceOf(ArrayBuffer) + + const returnedBytes = new Uint8Array(value as ArrayBuffer) + expect(Array.from(returnedBytes)).to.eql(Array.from(originalBytes)) + } finally { + db.close() + db.delete() + } + }) + }) + }) }) } diff --git a/package/cpp/operations.cpp b/package/cpp/operations.cpp index af361dd..913280d 100644 --- a/package/cpp/operations.cpp +++ b/package/cpp/operations.cpp @@ -184,9 +184,15 @@ std::shared_ptr sqliteExecute(const std::string& d case SQLITE_BLOB: { int blob_size = sqlite3_column_bytes(statement, i); const void* blob = sqlite3_column_blob(statement, i); - uint8_t* data = new uint8_t[blob_size]; - memcpy(data, blob, blob_size); - row[column_name] = ArrayBuffer::wrap(data, blob_size, [&data]() -> void { delete[] data; }); + // Copy the SQLite BLOB into a new native ArrayBuffer. + // This avoids manual memory management and unsafe pointer handling. + if (blob_size > 0) { + const auto* blob_data = reinterpret_cast(blob); + row[column_name] = ArrayBuffer::copy(blob_data, static_cast(blob_size)); + } else { + // Represent empty BLOBs as an empty, but valid, ArrayBuffer. + row[column_name] = ArrayBuffer::allocate(0); + } break; } case SQLITE_NULL: diff --git a/package/cpp/specs/HybridNitroSQLite.cpp b/package/cpp/specs/HybridNitroSQLite.cpp index ceb0e55..60a1d63 100644 --- a/package/cpp/specs/HybridNitroSQLite.cpp +++ b/package/cpp/specs/HybridNitroSQLite.cpp @@ -8,11 +8,54 @@ #include "sqliteExecuteBatch.hpp" #include #include +#include #include +#include #include namespace margelo::nitro::rnnitrosqlite { +// Copy any JS-backed ArrayBuffers on the JS thread so they can be safely +// accessed from the background thread used by Promise::async. +static std::optional copyArrayBufferParamsForBackground(const std::optional& params) { + if (!params) { + return std::nullopt; + } + + SQLiteQueryParams copiedParams; + copiedParams.reserve(params->size()); + + for (const auto& value : *params) { + if (std::holds_alternative>(value)) { + const auto& buffer = std::get>(value); + const auto copiedBuffer = ArrayBuffer::copy(buffer); + copiedParams.push_back(copiedBuffer); + } else { + copiedParams.push_back(value); + } + } + + return copiedParams; +} + +// Overload for batch execution: copy ArrayBuffer params inside each BatchQuery. +static std::vector copyArrayBufferParamsForBackground(const std::vector& commands) { + std::vector copiedCommands; + copiedCommands.reserve(commands.size()); + + for (const auto& command : commands) { + BatchQuery copiedCommand = command; + + if (command.params) { + copiedCommand.params = copyArrayBufferParamsForBackground(command.params); + } + + copiedCommands.push_back(std::move(copiedCommand)); + } + + return copiedCommands; +} + const std::string getDocPath(const std::optional& location) { std::string tempDocPath = std::string(HybridNitroSQLite::docPath); if (location) { @@ -57,9 +100,11 @@ std::shared_ptr HybridNitroSQLite::execute(con std::shared_ptr>> HybridNitroSQLite::executeAsync(const std::string& dbName, const std::string& query, const std::optional& params) { + const auto copiedParams = copyArrayBufferParamsForBackground(params); + return Promise>::async( [=, this]() -> std::shared_ptr { - auto result = execute(dbName, query, params); + auto result = sqliteExecute(dbName, query, copiedParams); return result; }); }; @@ -73,9 +118,14 @@ BatchQueryResult HybridNitroSQLite::executeBatch(const std::string& dbName, cons std::shared_ptr> HybridNitroSQLite::executeBatchAsync(const std::string& dbName, const std::vector& batchParams) { + // Convert BatchQueryCommand objects on the JS thread and copy any JS-backed + // ArrayBuffers into native buffers before going off-thread. + const auto commands = batchParamsToCommands(batchParams); + const auto copiedCommands = copyArrayBufferParamsForBackground(commands); + return Promise::async([=, this]() -> BatchQueryResult { - auto result = executeBatch(dbName, batchParams); - return result; + auto result = sqliteExecuteBatch(dbName, copiedCommands); + return BatchQueryResult(result.rowsAffected); }); };