Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 13 additions & 0 deletions example/src/tests/db.ts
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,19 @@ export function resetTestDb() {
}
}

export function createArrayBufferTestDb(name: string) {
// Use a dedicated database so ArrayBuffer tests do not interfere
// with the default test database used in other specs.
const db = open({ name })

db.execute('DROP TABLE IF EXISTS BlobData;')
db.execute(
'CREATE TABLE BlobData (id INTEGER PRIMARY KEY, data BLOB NOT NULL) STRICT;',
)

return db
}

const LARGE_DB_NAME = 'large'

// Copyright 2024 Oscar Franco
Expand Down
119 changes: 118 additions & 1 deletion example/src/tests/unit/specs/operations/execute.spec.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { chance, expect, isNitroSQLiteError } from '../../common'
import { describe, it } from '../../../MochaRNAdapter'
import { testDb } from '../../../db'
import { createArrayBufferTestDb, testDb } from '../../../db'

export default function registerExecuteUnitTests() {
describe('execute', () => {
Expand Down Expand Up @@ -136,5 +136,122 @@ export default function registerExecuteUnitTests() {
])
})
})

describe('ArrayBuffer support', () => {
describe('execute', () => {
it('stores and reads ArrayBuffer values from BLOB columns', () => {
const dbName = 'array_buffer_read'
const db = createArrayBufferTestDb(dbName)

const originalBytes = new Uint8Array([10, 20, 30, 40])
const originalBuffer = originalBytes.buffer

try {
db.execute('INSERT INTO BlobData (id, data) VALUES (?, ?)', [
1,
originalBuffer,
])

const result = db.execute(
'SELECT data FROM BlobData WHERE id = ?',
[1],
)

expect(result.rowsAffected).to.equal(1)
expect(result.rows?.length).to.equal(1)

const row = result.results[0]
// const row = result.rows?.item(0)
expect(row).to.not.equal(undefined)

const value = row?.data
expect(value).to.be.instanceOf(ArrayBuffer)

const returnedBytes = new Uint8Array(value as ArrayBuffer)
expect(Array.from(returnedBytes)).to.eql(Array.from(originalBytes))
} finally {
db.close()
db.delete()
}
})
})

describe('executeAsync', () => {
it('stores and reads ArrayBuffer values from BLOB columns', async () => {
const dbName = 'array_buffer_read'
const db = createArrayBufferTestDb(dbName)

const originalBytes = new Uint8Array([10, 20, 30, 40])
const originalBuffer = originalBytes.buffer

try {
await db.executeAsync(
'INSERT INTO BlobData (id, data) VALUES (?, ?)',
[1, originalBuffer],
)

const result = await db.executeAsync(
'SELECT data FROM BlobData WHERE id = ?',
[1],
)

expect(result.rowsAffected).to.equal(1)
expect(result.rows?.length).to.equal(1)

const row = result.results[0]
// const row = result.rows?.item(0)
expect(row).to.not.equal(undefined)

const value = row?.data
expect(value).to.be.instanceOf(ArrayBuffer)

const returnedBytes = new Uint8Array(value as ArrayBuffer)
expect(Array.from(returnedBytes)).to.eql(Array.from(originalBytes))
} finally {
db.close()
db.delete()
}
})
})

describe('executeBatchAsync', () => {
it('stores ArrayBuffer values in BLOB columns', async () => {
const dbName = 'array_buffer_batch_async'
const db = createArrayBufferTestDb(dbName)

const originalBytes = new Uint8Array([1, 2, 3, 4, 5])
const originalBuffer = originalBytes.buffer

try {
await db.executeBatchAsync([
{
query: 'INSERT INTO BlobData (id, data) VALUES (?, ?)',
params: [1, originalBuffer],
},
])

const result = db.execute(
'SELECT data FROM BlobData WHERE id = ?',
[1],
)

expect(result.rowsAffected).to.equal(1)
expect(result.rows?.length).to.equal(1)

const row = result.results[0]
expect(row).to.not.equal(undefined)

const value = row?.data
expect(value).to.be.instanceOf(ArrayBuffer)

const returnedBytes = new Uint8Array(value as ArrayBuffer)
expect(Array.from(returnedBytes)).to.eql(Array.from(originalBytes))
} finally {
db.close()
db.delete()
}
})
})
})
})
}
12 changes: 9 additions & 3 deletions package/cpp/operations.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -184,9 +184,15 @@ std::shared_ptr<HybridNitroSQLiteQueryResult> sqliteExecute(const std::string& d
case SQLITE_BLOB: {
int blob_size = sqlite3_column_bytes(statement, i);
const void* blob = sqlite3_column_blob(statement, i);
uint8_t* data = new uint8_t[blob_size];
memcpy(data, blob, blob_size);
row[column_name] = ArrayBuffer::wrap(data, blob_size, [&data]() -> void { delete[] data; });
// Copy the SQLite BLOB into a new native ArrayBuffer.
// This avoids manual memory management and unsafe pointer handling.
if (blob_size > 0) {
const auto* blob_data = reinterpret_cast<const uint8_t*>(blob);
row[column_name] = ArrayBuffer::copy(blob_data, static_cast<size_t>(blob_size));
} else {
// Represent empty BLOBs as an empty, but valid, ArrayBuffer.
row[column_name] = ArrayBuffer::allocate(0);
}
break;
}
case SQLITE_NULL:
Expand Down
56 changes: 53 additions & 3 deletions package/cpp/specs/HybridNitroSQLite.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,54 @@
#include "sqliteExecuteBatch.hpp"
#include <iostream>
#include <map>
#include <optional>
#include <string>
#include <variant>
#include <vector>

namespace margelo::nitro::rnnitrosqlite {

// Copy any JS-backed ArrayBuffers on the JS thread so they can be safely
// accessed from the background thread used by Promise::async.
static std::optional<SQLiteQueryParams> copyArrayBufferParamsForBackground(const std::optional<SQLiteQueryParams>& params) {
if (!params) {
return std::nullopt;
}

SQLiteQueryParams copiedParams;
copiedParams.reserve(params->size());

for (const auto& value : *params) {
if (std::holds_alternative<std::shared_ptr<ArrayBuffer>>(value)) {
const auto& buffer = std::get<std::shared_ptr<ArrayBuffer>>(value);
const auto copiedBuffer = ArrayBuffer::copy(buffer);
copiedParams.push_back(copiedBuffer);
} else {
copiedParams.push_back(value);
}
}

return copiedParams;
}

// Overload for batch execution: copy ArrayBuffer params inside each BatchQuery.
static std::vector<BatchQuery> copyArrayBufferParamsForBackground(const std::vector<BatchQuery>& commands) {
std::vector<BatchQuery> copiedCommands;
copiedCommands.reserve(commands.size());

for (const auto& command : commands) {
BatchQuery copiedCommand = command;

if (command.params) {
copiedCommand.params = copyArrayBufferParamsForBackground(command.params);
}

copiedCommands.push_back(std::move(copiedCommand));
}

return copiedCommands;
}

const std::string getDocPath(const std::optional<std::string>& location) {
std::string tempDocPath = std::string(HybridNitroSQLite::docPath);
if (location) {
Expand Down Expand Up @@ -57,9 +100,11 @@ std::shared_ptr<HybridNitroSQLiteQueryResultSpec> HybridNitroSQLite::execute(con

std::shared_ptr<Promise<std::shared_ptr<HybridNitroSQLiteQueryResultSpec>>>
HybridNitroSQLite::executeAsync(const std::string& dbName, const std::string& query, const std::optional<SQLiteQueryParams>& params) {
const auto copiedParams = copyArrayBufferParamsForBackground(params);

return Promise<std::shared_ptr<HybridNitroSQLiteQueryResultSpec>>::async(
[=, this]() -> std::shared_ptr<HybridNitroSQLiteQueryResultSpec> {
auto result = execute(dbName, query, params);
auto result = sqliteExecute(dbName, query, copiedParams);
return result;
});
};
Expand All @@ -73,9 +118,14 @@ BatchQueryResult HybridNitroSQLite::executeBatch(const std::string& dbName, cons

std::shared_ptr<Promise<BatchQueryResult>> HybridNitroSQLite::executeBatchAsync(const std::string& dbName,
const std::vector<BatchQueryCommand>& batchParams) {
// Convert BatchQueryCommand objects on the JS thread and copy any JS-backed
// ArrayBuffers into native buffers before going off-thread.
const auto commands = batchParamsToCommands(batchParams);
const auto copiedCommands = copyArrayBufferParamsForBackground(commands);

return Promise<BatchQueryResult>::async([=, this]() -> BatchQueryResult {
auto result = executeBatch(dbName, batchParams);
return result;
auto result = sqliteExecuteBatch(dbName, copiedCommands);
return BatchQueryResult(result.rowsAffected);
});
};

Expand Down