Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
82 changes: 54 additions & 28 deletions src/export/csv.ts
Original file line number Diff line number Diff line change
@@ -1,51 +1,77 @@
import { getTableData, createExportResponse } from './index'
import {
tableExists,
forEachPage,
createStreamingExportResponse,
} from '.'
import { createResponse } from '../utils'
import { DataSource } from '../types'
import { StarbaseDBConfiguration } from '../handler'

function escapeCsvValue(value: unknown): string {
if (value === null || value === undefined) return ''
const str = String(value)
if (str.includes(',') || str.includes('"') || str.includes('\n')) {
return `"${str.replace(/"/g, '""')}"`
}
return str
}

export async function exportTableToCsvRoute(
tableName: string,
dataSource: DataSource,
config: StarbaseDBConfiguration
): Promise<Response> {
try {
const data = await getTableData(tableName, dataSource, config)
const exists = await tableExists(tableName, dataSource, config)

if (data === null) {
if (!exists) {
return createResponse(
undefined,
`Table '${tableName}' does not exist.`,
404
)
}

// Convert the result to CSV
let csvContent = ''
if (data.length > 0) {
// Add headers
csvContent += Object.keys(data[0]).join(',') + '\n'

// Add data rows
data.forEach((row: any) => {
csvContent +=
Object.values(row)
.map((value) => {
if (
typeof value === 'string' &&
(value.includes(',') ||
value.includes('"') ||
value.includes('\n'))
) {
return `"${value.replace(/"/g, '""')}"`
const encoder = new TextEncoder()
let headerWritten = false

const stream = new ReadableStream({
async start(controller) {
try {
await forEachPage(
tableName,
dataSource,
config,
async (rows, isFirstPage) => {
let chunk = ''

// Write CSV header from the first page
if (isFirstPage && !headerWritten && rows.length > 0) {
chunk += Object.keys(rows[0])
.map(escapeCsvValue)
.join(',') + '\n'
headerWritten = true
}
return value
})
.join(',') + '\n'
})
}

return createExportResponse(
csvContent,
for (const row of rows) {
chunk += Object.values(row)
.map(escapeCsvValue)
.join(',') + '\n'
}

controller.enqueue(encoder.encode(chunk))
}
)

controller.close()
} catch (err) {
controller.error(err)
}
},
})

return createStreamingExportResponse(
stream,
`${tableName}_export.csv`,
'text/csv'
)
Expand Down
115 changes: 72 additions & 43 deletions src/export/dump.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,8 @@
import { executeOperation } from '.'
import {
executeOperation,
forEachPage,
createStreamingExportResponse,
} from '.'
import { StarbaseDBConfiguration } from '../handler'
import { DataSource } from '../types'
import { createResponse } from '../utils'
Expand All @@ -8,62 +12,87 @@ export async function dumpDatabaseRoute(
config: StarbaseDBConfiguration
): Promise<Response> {
try {
// Get all table names
// Get all table names up front (small result set).
const tablesResult = await executeOperation(
[{ sql: "SELECT name FROM sqlite_master WHERE type='table';" }],
dataSource,
config
)

const tables = tablesResult.map((row: any) => row.name)
let dumpContent = 'SQLite format 3\0' // SQLite file header
const tables: string[] = tablesResult.map((row: any) => row.name)
const encoder = new TextEncoder()

// Iterate through all tables
for (const table of tables) {
// Get table schema
const schemaResult = await executeOperation(
[
{
sql: `SELECT sql FROM sqlite_master WHERE type='table' AND name='${table}';`,
},
],
dataSource,
config
)
const stream = new ReadableStream({
async start(controller) {
try {
controller.enqueue(
encoder.encode('-- StarbaseDB SQL Dump\n')
)
controller.enqueue(
encoder.encode(
`-- Generated at: ${new Date().toISOString()}\n\n`
)
)

if (schemaResult.length) {
const schema = schemaResult[0].sql
dumpContent += `\n-- Table: ${table}\n${schema};\n\n`
}
for (const table of tables) {
// Schema (tiny query – no pagination needed)
const schemaResult = await executeOperation(
[
{
sql: `SELECT sql FROM sqlite_master WHERE type='table' AND name=?;`,
params: [table],
},
],
dataSource,
config
)

// Get table data
const dataResult = await executeOperation(
[{ sql: `SELECT * FROM ${table};` }],
dataSource,
config
)
if (schemaResult.length) {
const schema = schemaResult[0].sql
controller.enqueue(
encoder.encode(
`-- Table: ${table}\n${schema};\n\n`
)
)
}

for (const row of dataResult) {
const values = Object.values(row).map((value) =>
typeof value === 'string'
? `'${value.replace(/'/g, "''")}'`
: value
)
dumpContent += `INSERT INTO ${table} VALUES (${values.join(', ')});\n`
}
// Stream row data page-by-page
await forEachPage(
table,
dataSource,
config,
async (rows) => {
let chunk = ''
for (const row of rows) {
const values = Object.values(row).map(
(value) =>
value === null
? 'NULL'
: typeof value === 'string'
? `'${value.replace(/'/g, "''")}'`
: value
)
chunk += `INSERT INTO ${table} VALUES (${values.join(', ')});\n`
}
controller.enqueue(encoder.encode(chunk))
}
)

dumpContent += '\n'
}
controller.enqueue(encoder.encode('\n'))
}

// Create a Blob from the dump content
const blob = new Blob([dumpContent], { type: 'application/x-sqlite3' })

const headers = new Headers({
'Content-Type': 'application/x-sqlite3',
'Content-Disposition': 'attachment; filename="database_dump.sql"',
controller.close()
} catch (err) {
controller.error(err)
}
},
})

return new Response(blob, { headers })
return createStreamingExportResponse(
stream,
'database_dump.sql',
'application/sql'
)
} catch (error: any) {
console.error('Database Dump Error:', error)
return createResponse(undefined, 'Failed to create database dump', 500)
Expand Down
82 changes: 82 additions & 0 deletions src/export/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -68,3 +68,85 @@ export function createExportResponse(

return new Response(blob, { headers })
}

/**
* The default page size used for paginated streaming queries.
* Balances memory usage against the number of round-trips to the database.
*/
const DEFAULT_PAGE_SIZE = 1000

/**
* Check whether a table exists in the database.
*/
export async function tableExists(
tableName: string,
dataSource: DataSource,
config: StarbaseDBConfiguration
): Promise<boolean> {
const result = await executeOperation(
[
{
sql: `SELECT name FROM sqlite_master WHERE type='table' AND name=?;`,
params: [tableName],
},
],
dataSource,
config
)
return !!(result && result.length > 0)
}

/**
* Create a streaming export Response with appropriate headers.
*/
export function createStreamingExportResponse(
stream: ReadableStream,
fileName: string,
contentType: string
): Response {
const headers = new Headers({
'Content-Type': contentType,
'Content-Disposition': `attachment; filename="${fileName}"`,
'Transfer-Encoding': 'chunked',
})

return new Response(stream, { headers })
}

/**
* Fetch rows from a table in pages using LIMIT/OFFSET and invoke a callback
* for each page. This avoids loading the entire table into memory at once.
*/
export async function forEachPage(
tableName: string,
dataSource: DataSource,
config: StarbaseDBConfiguration,
callback: (rows: any[], isFirstPage: boolean) => Promise<void>,
pageSize: number = DEFAULT_PAGE_SIZE
): Promise<void> {
let offset = 0
let isFirstPage = true

while (true) {
const rows = await executeOperation(
[
{
sql: `SELECT * FROM ${tableName} LIMIT ? OFFSET ?;`,
params: [pageSize, offset],
},
],
dataSource,
config
)

if (!rows || rows.length === 0) break

await callback(rows, isFirstPage)

isFirstPage = false
offset += rows.length

// If we got fewer rows than the page size we've reached the end.
if (rows.length < pageSize) break
}
}
48 changes: 41 additions & 7 deletions src/export/json.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,8 @@
import { getTableData, createExportResponse } from './index'
import {
tableExists,
forEachPage,
createStreamingExportResponse,
} from '.'
import { createResponse } from '../utils'
import { DataSource } from '../types'
import { StarbaseDBConfiguration } from '../handler'
Expand All @@ -9,21 +13,51 @@ export async function exportTableToJsonRoute(
config: StarbaseDBConfiguration
): Promise<Response> {
try {
const data = await getTableData(tableName, dataSource, config)
const exists = await tableExists(tableName, dataSource, config)

if (data === null) {
if (!exists) {
return createResponse(
undefined,
`Table '${tableName}' does not exist.`,
404
)
}

// Convert the result to JSON
const jsonData = JSON.stringify(data, null, 4)
const encoder = new TextEncoder()
let isFirstRow = true

return createExportResponse(
jsonData,
const stream = new ReadableStream({
async start(controller) {
try {
controller.enqueue(encoder.encode('[\n'))

await forEachPage(
tableName,
dataSource,
config,
async (rows) => {
let chunk = ''
for (const row of rows) {
if (!isFirstRow) {
chunk += ',\n'
}
chunk += JSON.stringify(row, null, 4)
isFirstRow = false
}
controller.enqueue(encoder.encode(chunk))
}
)

controller.enqueue(encoder.encode('\n]\n'))
controller.close()
} catch (err) {
controller.error(err)
}
},
})

return createStreamingExportResponse(
stream,
`${tableName}_export.json`,
'application/json'
)
Expand Down