@@ -14,7 +14,11 @@ import { Dialog, DialogContent, DialogHeader, DialogTitle } from '~/components/u
1414import { Progress } from '~/components/ui/progress'
1515import '~/polyfills/readable-stream'
1616
17+ import { useQueryClient } from '@tanstack/react-query'
1718import { Semaphore } from 'async-mutex'
19+ import Link from 'next/link'
20+ import { useRouter } from 'next/navigation'
21+ import { getDatabasesQueryKey } from '~/data/databases/databases-query'
1822import { DbManager } from '~/lib/db'
1923import { hasFile , saveFile } from '~/lib/files'
2024import { tarStreamEntryToFile , waitForChunk } from '~/lib/streams'
@@ -26,12 +30,12 @@ import {
2630 requestFileUpload ,
2731 stripSuffix ,
2832} from '~/lib/util'
29- import Link from 'next/link'
3033
3134export default function Page ( ) {
3235 const { dbManager } = useApp ( )
36+ const router = useRouter ( )
37+ const queryClient = useQueryClient ( )
3338 const [ progress , setProgress ] = useState < number > ( )
34- const [ isImportComplete , setIsImportComplete ] = useState ( false )
3539
3640 return (
3741 < >
@@ -108,158 +112,149 @@ export default function Page() {
108112 < li >
109113 Click < strong > Import</ strong > and select the previously exported tarball.
110114 < br />
111- { ! isImportComplete ? (
112- progress === undefined ? (
113- < Button
114- className = "my-2"
115- onClick = { async ( ) => {
116- if ( ! dbManager ) {
117- throw new Error ( 'dbManager is not available' )
115+ { progress === undefined ? (
116+ < Button
117+ className = "my-2"
118+ onClick = { async ( ) => {
119+ if ( ! dbManager ) {
120+ throw new Error ( 'dbManager is not available' )
121+ }
122+
123+ const file = await requestFileUpload ( )
124+
125+ setProgress ( 0 )
126+
127+ const metaDb = await dbManager . getMetaDb ( )
128+
129+ const fileStream = file
130+ . stream ( )
131+ . pipeThrough ( new DecompressionStream ( 'gzip' ) )
132+ . pipeThrough ( new UntarStream ( ) )
133+
134+ // Ensure that we load the meta DB first
135+ const [ metaDumpEntry , restEntryStream ] = await waitForChunk (
136+ fileStream ,
137+ ( entry ) => entry . path === 'meta.tar.gz'
138+ )
139+
140+ if ( ! metaDumpEntry ) {
141+ throw new Error ( 'Tarball is missing meta database dump' )
142+ }
143+
144+ const metaDump = await tarStreamEntryToFile ( metaDumpEntry )
145+
146+ // Load the external meta DB temporarily in memory
147+ const externalMetaDb = await DbManager . createPGlite ( {
148+ loadDataDir : metaDump ,
149+ } )
150+
151+ // Create a temporary DbManager from it
152+ // (so that migrations and other checks run)
153+ const externalDbManager = new DbManager ( externalMetaDb )
154+
155+ const databases = await externalDbManager . exportDatabases ( )
156+ const messages = await externalDbManager . exportMessages ( )
157+
158+ try {
159+ await metaDb . sql `begin`
160+ await dbManager . importDatabases ( databases )
161+ await dbManager . importMessages ( messages )
162+ await metaDb . sql `commit`
163+ } catch ( err ) {
164+ await metaDb . sql `rollback`
165+ throw err
166+ }
167+
168+ const existingIDBDatabases = await indexedDB . databases ( )
169+ const dbLoadSemaphore = new Semaphore ( 5 )
170+ const dbLoadPromises : Promise < void > [ ] = [ ]
171+
172+ for await ( const entry of restEntryStream ) {
173+ // Only handle file entries (vs. directory, etc)
174+ if ( entry . header . typeflag !== '0' ) {
175+ continue
118176 }
119177
120- const file = await requestFileUpload ( )
121-
122- setProgress ( 0 )
123-
124- const metaDb = await dbManager . getMetaDb ( )
178+ const pathSegments = entry . path . split ( '/' ) . filter ( ( v ) => ! ! v )
179+ const [ rootDir ] = pathSegments
125180
126- const fileStream = file
127- . stream ( )
128- . pipeThrough ( new DecompressionStream ( 'gzip' ) )
129- . pipeThrough ( new UntarStream ( ) )
130-
131- // Ensure that we load the meta DB first
132- const [ metaDumpEntry , restEntryStream ] = await waitForChunk (
133- fileStream ,
134- ( entry ) => entry . path === 'meta.tar.gz'
135- )
136-
137- if ( ! metaDumpEntry ) {
138- throw new Error ( 'Tarball is missing meta database dump' )
139- }
181+ switch ( rootDir ) {
182+ case 'dbs' : {
183+ const dump = await tarStreamEntryToFile ( entry )
184+ const databaseId = stripSuffix ( dump . name , '.tar.gz' )
140185
141- const metaDump = await tarStreamEntryToFile ( metaDumpEntry )
186+ if ( ! databaseId ) {
187+ throw new Error (
188+ `Failed to parse database ID from file '${ entry . path } '`
189+ )
190+ }
142191
143- // Load the external meta DB temporarily in memory
144- const externalMetaDb = await DbManager . createPGlite ( {
145- loadDataDir : metaDump ,
146- } )
192+ const databaseExists = existingIDBDatabases . some (
193+ ( db ) => db . name === `/pglite/${ dbManager . prefix } -${ databaseId } `
194+ )
147195
148- // Create a temporary DbManager from it
149- // (so that migrations and other checks run)
150- const externalDbManager = new DbManager ( externalMetaDb )
196+ if ( databaseExists ) {
197+ console . warn (
198+ `Database with ID '${ databaseId } ' already exists, skipping`
199+ )
200+ setProgress ( ( progress ) => ( progress ?? 0 ) + 100 / databases . length )
201+ continue
202+ }
151203
152- const databases = await externalDbManager . exportDatabases ( )
153- const messages = await externalDbManager . exportMessages ( )
204+ // Limit the number of concurrent loads to avoid excessive RAM use
205+ const dbLoadPromise = dbLoadSemaphore . runExclusive ( async ( ) => {
206+ try {
207+ // Load dump into PGlite instance (persists in IndexedDB)
208+ await dbManager . getDbInstance ( databaseId , dump )
209+ } catch ( err ) {
210+ console . warn ( `Failed to load database with ID '${ databaseId } '` , err )
211+ }
154212
155- try {
156- await metaDb . sql `begin`
157- await dbManager . importDatabases ( databases )
158- await dbManager . importMessages ( messages )
159- await metaDb . sql `commit`
160- } catch ( err ) {
161- await metaDb . sql `rollback`
162- throw err
163- }
213+ await dbManager . closeDbInstance ( databaseId )
214+ setProgress ( ( progress ) => ( progress ?? 0 ) + 100 / databases . length )
215+ } )
164216
165- const existingIDBDatabases = await indexedDB . databases ( )
166- const dbLoadSemaphore = new Semaphore ( 5 )
167- const dbLoadPromises : Promise < void > [ ] = [ ]
217+ dbLoadPromises . push ( dbLoadPromise )
168218
169- for await ( const entry of restEntryStream ) {
170- // Only handle file entries (vs. directory, etc)
171- if ( entry . header . typeflag !== '0' ) {
172- continue
219+ break
173220 }
221+ case 'files' : {
222+ const file = await tarStreamEntryToFile ( entry )
174223
175- const pathSegments = entry . path . split ( '/' ) . filter ( ( v ) => ! ! v )
176- const [ rootDir ] = pathSegments
224+ // File ID is captured as the name of the last sub-directory
225+ const fileId = pathSegments . at ( - 2 )
177226
178- switch ( rootDir ) {
179- case 'dbs' : {
180- const dump = await tarStreamEntryToFile ( entry )
181- const databaseId = stripSuffix ( dump . name , '.tar.gz' )
182-
183- if ( ! databaseId ) {
184- throw new Error (
185- `Failed to parse database ID from file '${ entry . path } '`
186- )
187- }
188-
189- const databaseExists = existingIDBDatabases . some (
190- ( db ) => db . name === `/pglite/${ dbManager . prefix } -${ databaseId } `
227+ if ( ! fileId ) {
228+ throw new Error (
229+ `Failed to parse file ID from file path '${ entry . path } '`
191230 )
192-
193- if ( databaseExists ) {
194- console . warn (
195- `Database with ID '${ databaseId } ' already exists, skipping`
196- )
197- setProgress ( ( progress ) => ( progress ?? 0 ) + 100 / databases . length )
198- continue
199- }
200-
201- // Limit the number of concurrent loads to avoid excessive RAM use
202- const dbLoadPromise = dbLoadSemaphore . runExclusive ( async ( ) => {
203- try {
204- // Load dump into PGlite instance (persists in IndexedDB)
205- await dbManager . getDbInstance ( databaseId , dump )
206- } catch ( err ) {
207- console . warn (
208- `Failed to load database with ID '${ databaseId } '` ,
209- err
210- )
211- }
212-
213- await dbManager . closeDbInstance ( databaseId )
214- setProgress ( ( progress ) => ( progress ?? 0 ) + 100 / databases . length )
215- } )
216-
217- dbLoadPromises . push ( dbLoadPromise )
218-
219- break
220231 }
221- case 'files' : {
222- const file = await tarStreamEntryToFile ( entry )
223232
224- // File ID is captured as the name of the last sub-directory
225- const fileId = pathSegments . at ( - 2 )
233+ const fileExists = await hasFile ( fileId )
226234
227- if ( ! fileId ) {
228- throw new Error (
229- `Failed to parse file ID from file path '${ entry . path } '`
230- )
231- }
232-
233- const fileExists = await hasFile ( fileId )
234-
235- if ( fileExists ) {
236- console . warn ( `File with ID '${ fileId } ' already exists, skipping` )
237- continue
238- }
239-
240- await saveFile ( fileId , file )
241- break
235+ if ( fileExists ) {
236+ console . warn ( `File with ID '${ fileId } ' already exists, skipping` )
237+ continue
242238 }
239+
240+ await saveFile ( fileId , file )
241+ break
243242 }
244243 }
244+ }
245+
246+ await Promise . all ( dbLoadPromises )
247+ await queryClient . invalidateQueries ( { queryKey : getDatabasesQueryKey ( ) } )
245248
246- await Promise . all ( dbLoadPromises )
247-
248- setIsImportComplete ( true )
249- } }
250- >
251- Import
252- </ Button >
253- ) : (
254- < div className = "flex gap-2 text-xs items-center" >
255- < Progress className = "my-2 w-[60%]" value = { Math . round ( progress ) } />
256- { Math . round ( progress ) } %
257- </ div >
258- )
249+ router . push ( '/' )
250+ } }
251+ >
252+ Import
253+ </ Button >
259254 ) : (
260- < div >
261- Import was successful. Head over to { ' ' }
262- < Link href = { currentDomainUrl } > { currentDomainHostname } </ Link > .
255+ < div className = "flex gap-2 text-xs items-center" >
256+ < Progress className = "my-2 w-[60%]" value = { Math . round ( progress ) } />
257+ { Math . round ( progress ) } %
263258 </ div >
264259 ) }
265260 </ li >
0 commit comments