diff --git a/pgloader.asd b/pgloader.asd index ed256ae..2664232 100644 --- a/pgloader.asd +++ b/pgloader.asd @@ -154,7 +154,7 @@ :depends-on ("common") :components ((:file "csv-guess") - (:file "csv-database") + ;; (:file "csv-database") (:file "csv"))) (:file "fixed" diff --git a/src/package.lisp b/src/package.lisp index 800069b..3cd6c6d 100644 --- a/src/package.lisp +++ b/src/package.lisp @@ -101,6 +101,7 @@ #:column-extra #:index-name + #:index-oid #:index-schema #:index-table #:index-primary @@ -110,8 +111,10 @@ #:index-conname #:index-condef #:index-filter + #:index-fk-deps #:fkey-name + #:fkey-oid #:fkey-foreign-table #:fkey-foreign-columns #:fkey-table @@ -357,13 +360,11 @@ #:process-index-definitions ;; postgresql introspection queries - #:list-databases - #:list-tables - #:list-columns-query - #:list-columns - #:list-indexes - #:list-tables-cols - #:list-tables-and-fkeys + #:list-all-columns + #:list-all-indexes + #:list-all-fkeys + #:list-missing-fk-deps + #:list-schemas #:list-table-oids ;; postgresql identifiers @@ -452,7 +453,6 @@ (:use #:cl #:pgloader.params #:pgloader.utils #:pgloader.connection) (:import-from #:alexandria #:read-file-into-string) (:import-from #:pgloader.pgsql - #:list-columns #:with-pgsql-transaction #:pgsql-execute) (:export #:read-ini-file @@ -557,7 +557,6 @@ #:with-pgsql-transaction #:pgsql-execute #:pgsql-execute-with-timing - #:list-tables-and-fkeys #:list-table-oids #:create-tables #:create-views @@ -578,7 +577,6 @@ #:copy-from #:copy-database #:list-databases - #:list-tables #:export-database #:export-import-database)) @@ -602,8 +600,7 @@ #:map-rows #:copy-to #:copy-from - #:copy-database - #:list-tables)) + #:copy-database)) (defpackage #:pgloader.mssql (:use #:cl @@ -616,7 +613,6 @@ #:pgsql-execute #:pgsql-execute-with-timing #:pgsql-connect-and-execute-with-timing - #:list-tables-and-fkeys #:list-table-oids #:create-tables #:create-views @@ -631,8 +627,7 @@ #:map-rows #:copy-to #:copy-from - #:copy-database - #:list-tables)) + #:copy-database)) (defpackage #:pgloader.mssql.index-filter (:use #:cl #:esrap #:pgloader.utils #:pgloader.mssql) @@ -739,10 +734,7 @@ (:import-from #:pgloader.pgsql #:pgconn-table-name #:pgsql-connection - #:copy-from-file - #:list-databases - #:list-tables - #:list-columns-query) + #:copy-from-file) (:import-from #:pgloader.pgsql #:with-pgsql-connection #:with-schema @@ -764,9 +756,7 @@ #:parse-commands #:with-database-uri #:slurp-file-into-string - #:copy-from-file - #:list-databases - #:list-tables)) + #:copy-from-file)) (in-package #:pgloader) diff --git a/src/pgsql/merge-catalogs.lisp b/src/pgsql/merge-catalogs.lisp index 59a4f64..54baa5d 100644 --- a/src/pgsql/merge-catalogs.lisp +++ b/src/pgsql/merge-catalogs.lisp @@ -20,8 +20,6 @@ Also, we want to recheck the cast situation and the selected transformation functions of each column." - (log-message :log "MERGE CATALOGS!") - (let (skip-list) (loop :for source-schema :in (catalog-schema-list source-catalog) :do (let* ((schema-name diff --git a/src/pgsql/pgsql-create-schema.lisp b/src/pgsql/pgsql-create-schema.lisp index a2b511c..cf2c1ec 100644 --- a/src/pgsql/pgsql-create-schema.lisp +++ b/src/pgsql/pgsql-create-schema.lisp @@ -25,6 +25,7 @@ ;; now create the types (loop :for sqltype :in sqltype-list :when include-drop + :count t :do (pgsql-execute (format-drop-sql sqltype :cascade t) :client-min-messages client-min-messages) :do (pgsql-execute @@ -111,7 +112,8 @@ :collect (format-create-sql (trigger-procedure trigger)) :collect (format-create-sql trigger))))) (loop :for sql :in sql-list - :do (pgsql-execute sql :client-min-messages client-min-messages)))) + :do (pgsql-execute sql :client-min-messages client-min-messages) + :count t))) ;;; @@ -121,14 +123,16 @@ (defun truncate-tables (catalog-or-table) "Truncate given TABLE-NAME in database DBNAME. A PostgreSQL connection must already be active when calling that function." - (let ((sql - (format nil "TRUNCATE ~{~a~^,~};" - (mapcar #'format-table-name - (etypecase catalog-or-table - (catalog (table-list catalog-or-table)) - (schema (table-list catalog-or-table)) - (table (list catalog-or-table))))))) - (pgsql-execute sql))) + (let* ((target-list (mapcar #'format-table-name + (etypecase catalog-or-table + (catalog (table-list catalog-or-table)) + (schema (table-list catalog-or-table)) + (table (list catalog-or-table))))) + (sql + (format nil "TRUNCATE ~{~a~^,~};" target-list))) + (pgsql-execute sql) + ;; return how many tables we just TRUNCATEd + (length target-list))) (defun disable-triggers (table-name) "Disable triggers on TABLE-NAME. Needs to be called with a PostgreSQL @@ -161,26 +165,36 @@ ;;; ;;; API for Foreign Keys ;;; -(defun drop-pgsql-fkeys (catalog) +(defun drop-pgsql-fkeys (catalog &key (cascade t)) "Drop all Foreign Key Definitions given, to prepare for a clean run." (loop :for table :in (table-list catalog) - :do - (loop :for fkey :in (table-fkey-list table) - :for sql := (format-drop-sql fkey :cascade t) - :when sql - :do (pgsql-execute sql)))) + :sum (loop :for fkey :in (table-fkey-list table) + :for sql := (format-drop-sql fkey :cascade cascade) + :do (pgsql-execute sql) + :count t) + ;; also DROP the foreign keys that depend on the indexes we want to DROP + :sum (loop :for index :in (table-index-list table) + :sum (loop :for fkey :in (index-fk-deps index) + :for sql := (format-drop-sql fkey :cascade t) + :do (progn + (log-message :debug "EXTRA FK DEPS!") + (pgsql-execute sql)) + :count t)))) -(defun create-pgsql-fkeys (catalog - &key - (section :post) - (label "Foreign Keys")) +(defun create-pgsql-fkeys (catalog) "Actually create the Foreign Key References that where declared in the MySQL database" (loop :for table :in (table-list catalog) :sum (loop :for fkey :in (table-fkey-list table) :for sql := (format-create-sql fkey) - :do (pgsql-execute-with-timing section label sql) - :count t))) + :do (pgsql-execute sql) + :count t) + :sum (loop :for index :in (table-index-list table) + :sum (loop :for fkey :in (index-fk-deps index) + :for sql := (format-create-sql fkey) + :do (log-message :debug "EXTRA FK DEPS!") + :do (pgsql-execute sql) + :count t)))) @@ -227,19 +241,26 @@ :for table-name := (format-table-name table) :for table-oid := (gethash table-name oid-map) :unless table-oid :do (error "OID not found for ~s." table-name) + :count t :do (setf (table-oid table) table-oid)))) ;;; ;;; Drop indexes before loading ;;; -(defun drop-indexes (section table) +(defun drop-indexes (table-or-catalog &key cascade) "Drop indexes in PGSQL-INDEX-LIST. A PostgreSQL connection must already be active when calling that function." (let ((sql-index-list - (loop :for index :in (table-index-list table) - :collect (format-drop-sql index :cascade t)))) - (pgsql-execute-with-timing section "drop indexes" sql-index-list))) + (loop :for index + :in (typecase table-or-catalog + (table (table-index-list table-or-catalog)) + (catalog (loop :for table :in (table-list table-or-catalog) + :append (table-index-list table)))) + :collect (format-drop-sql index :cascade cascade)))) + (pgsql-execute sql-index-list) + ;; return how many indexes we just DROPed + (length sql-index-list))) ;;; ;;; Higher level API to care about indexes @@ -265,9 +286,7 @@ "Consider the option 'drop indexes'.")) (indexes - ;; drop the indexes now - (with-stats-collection ("drop indexes" :section section) - (drop-indexes section table))))))) + (drop-indexes table)))))) (defun create-indexes-again (target catalog &key @@ -380,22 +399,19 @@ $$; " tables))) (map 'string #'code-char (loop :repeat 5 :collect (+ (random 26) (char-code #\A))))))) - (with-stats-collection ("Install comments" - :use-result-as-rows t - :section :post) - (loop :for table :in (table-list catalog) - :for sql := (when (table-comment table) - (format nil "comment on table ~a is $~a$~a$~a$" - (table-name table) - quote (table-comment table) quote)) - :count (when sql - (pgsql-execute-with-timing :post "Comments" sql)) + (loop :for table :in (table-list catalog) + :for sql := (when (table-comment table) + (format nil "comment on table ~a is $~a$~a$~a$" + (table-name table) + quote (table-comment table) quote)) + :count (when sql + (pgsql-execute-with-timing :post "Comments" sql)) - :sum (loop :for column :in (table-column-list table) - :for sql := (when (column-comment column) - (format nil "comment on column ~a.~a is $~a$~a$~a$" - (table-name table) - (column-name column) - quote (column-comment column) quote)) - :count (when sql - (pgsql-execute-with-timing :post "Comments" sql))))))) + :sum (loop :for column :in (table-column-list table) + :for sql := (when (column-comment column) + (format nil "comment on column ~a.~a is $~a$~a$~a$" + (table-name table) + (column-name column) + quote (column-comment column) quote)) + :count (when sql + (pgsql-execute-with-timing :post "Comments" sql)))))) diff --git a/src/pgsql/pgsql-schema.lisp b/src/pgsql/pgsql-schema.lisp index 5e90522..f3ce3c6 100644 --- a/src/pgsql/pgsql-schema.lisp +++ b/src/pgsql/pgsql-schema.lisp @@ -12,7 +12,7 @@ (including (cond ((and table (not including)) (make-including-expr-from-table table)) - ((and catalog (not including)) + ((and source-catalog (not including)) (make-including-expr-from-catalog source-catalog)) (t @@ -31,10 +31,18 @@ :including including :excluding excluding) - (log-message :debug "fetch-pgsql-catalog: ~d tables, ~d indexes, ~d fkeys" + ;; fetch fkey we depend on with UNIQUE indexes but that have been + ;; excluded from the target list, we still need to take care of them to + ;; be able to DROP then CREATE those indexes again + (list-missing-fk-deps catalog) + + (log-message :debug "fetch-pgsql-catalog: ~d tables, ~d indexes, ~d+~d fkeys" (count-tables catalog) (count-indexes catalog) - (count-fkeys catalog)) + (count-fkeys catalog) + (loop :for table :in (table-list catalog) + :sum (loop :for index :in (table-index-list table) + :sum (length (index-fk-deps index))))) (when (and table (/= 1 (count-tables catalog))) (error "pgloader found ~d target tables for name ~s|:~{~% ~a~}" @@ -185,10 +193,13 @@ (defun list-all-indexes (catalog &key including excluding) "Get the list of PostgreSQL index definitions per table." (loop - :for (schema-name name table-schema table-name primary unique sql conname condef) + :for (schema-name name oid + table-schema table-name + primary unique sql conname condef) :in (pomo:query (format nil " select n.nspname, i.relname, + i.oid, rn.nspname, r.relname, indisprimary, @@ -224,6 +235,7 @@ order by n.nspname, r.relname" (table (find-table tschema table-name)) (pg-index (make-index :name name + :oid oid :schema schema :table table :primary primary @@ -238,12 +250,15 @@ order by n.nspname, r.relname" (defun list-all-fkeys (catalog &key including excluding) "Get the list of PostgreSQL index definitions per table." (loop - :for (schema-name table-name fschema-name ftable-name conname cols fcols - updrule delrule mrule deferrable deferred condef) + :for (schema-name table-name fschema-name ftable-name + conoid conname condef + cols fcols + updrule delrule mrule deferrable deferred) :in (pomo:query (format nil " select n.nspname, c.relname, nf.nspname, cf.relname as frelname, - conname, + r.oid, conname, + pg_catalog.pg_get_constraintdef(r.oid, true) as condef, (select string_agg(attname, ',') from pg_attribute where attrelid = r.conrelid and array[attnum] <@ conkey @@ -253,8 +268,7 @@ order by n.nspname, r.relname" where attrelid = r.confrelid and array[attnum] <@ confkey ) as confkey, confupdtype, confdeltype, confmatchtype, - condeferrable, condeferred, - pg_catalog.pg_get_constraintdef(r.oid, true) as condef + condeferrable, condeferred from pg_catalog.pg_constraint r JOIN pg_class c on r.conrelid = c.oid JOIN pg_namespace n on c.relnamespace = n.oid @@ -306,6 +320,7 @@ order by n.nspname, r.relname" (ftable (find-table fschema ftable-name)) (fk (make-fkey :name conname + :oid conoid :condef condef :table table :columns (split-sequence:split-sequence #\, cols) @@ -322,6 +337,66 @@ order by n.nspname, r.relname" conname)))) :finally (return catalog))) +(defun list-missing-fk-deps (catalog) + "Add in the CATALOG the foreign keys we don't have to deal with directly + but that the primary keys we are going to DROP then CREATE again depend + on: we need to take care of those first." + (destructuring-bind (pkey-oid-hash-table pkey-oid-list fkey-oid-list) + (loop :with pk-hash := (make-hash-table) + :for table :in (table-list catalog) + :append (mapcar #'index-oid (table-index-list table)) :into pk + :append (mapcar #'fkey-oid (table-fkey-list table)) :into fk + :do (loop :for index :in (table-index-list table) + :do (setf (gethash (index-oid index) pk-hash) index)) + :finally (return (list pk-hash pk fk))) + + (when pkey-oid-list + (loop :for (schema-name table-name fschema-name ftable-name + conoid conname condef index-oid) + :in (pomo:query (format nil " +with pkeys(oid) as ( + values~{(~d)~^,~} +), + knownfkeys(oid) as ( + values~{(~d)~^,~} +), + pkdeps as ( + select pkeys.oid, pg_depend.objid + from pg_depend + join pkeys on pg_depend.refobjid = pkeys.oid + where classid = 'pg_catalog.pg_constraint'::regclass + and refclassid = 'pg_catalog.pg_class'::regclass +) + select n.nspname, c.relname, nf.nspname, cf.relname as frelname, + r.oid as conoid, conname, + pg_catalog.pg_get_constraintdef(r.oid, true) as condef, + pkdeps.oid as index_oid + from pg_catalog.pg_constraint r + JOIN pkdeps on r.oid = pkdeps.objid + JOIN pg_class c on r.conrelid = c.oid + JOIN pg_namespace n on c.relnamespace = n.oid + JOIN pg_class cf on r.confrelid = cf.oid + JOIN pg_namespace nf on cf.relnamespace = nf.oid + where NOT EXISTS (select 1 from knownfkeys where oid = r.oid)" + pkey-oid-list + (or fkey-oid-list (list -1)))) + ;; + ;; We don't need to reference the main catalog entries for the tables + ;; here, as the only goal is to be sure to DROP then CREATE again the + ;; existing constraint that depend on the UNIQUE indexes we have to + ;; DROP then CREATE again. + ;; + :do (let* ((schema (make-schema :name schema-name)) + (table (make-table :name table-name :schema schema)) + (fschema (make-schema :name fschema-name)) + (ftable (make-table :name ftable-name :schema fschema)) + (index (gethash index-oid pkey-oid-hash-table))) + (push-to-end (make-fkey :name conname + :oid conoid + :condef condef + :table table + :foreign-table ftable) + (index-fk-deps index))))))) ;;; diff --git a/src/sources/common/db-methods.lisp b/src/sources/common/db-methods.lisp index 8854aab..7ed9718 100644 --- a/src/sources/common/db-methods.lisp +++ b/src/sources/common/db-methods.lisp @@ -25,61 +25,80 @@ That function mutates index definitions in ALL-INDEXES." (log-message :notice "~:[~;DROP then ~]CREATE TABLES" include-drop) - (with-stats-collection ("create, drop" :use-result-as-rows t :section :pre) - (with-pgsql-transaction (:pgconn (target-db copy)) - (when create-schemas - (log-message :notice "Create schemas") - (create-schemas catalog :include-drop include-drop)) + (with-pgsql-transaction (:pgconn (target-db copy)) + (when create-schemas + (with-stats-collection ("Create Schemas" :section :pre + :use-result-as-read t + :use-result-as-rows t) + (create-schemas catalog :include-drop include-drop))) - (if create-tables - (progn - ;; create new SQL types (ENUMs, SETs) if needed and before we - ;; get to the table definitions that will use them - (log-message :notice "Create SQL types (enums, sets)") - (create-sqltypes catalog - :include-drop include-drop - :client-min-messages :error) - - ;; now the tables - (log-message :notice "Create tables") - (create-tables catalog + (if create-tables + (progn + ;; create new SQL types (ENUMs, SETs) if needed and before we + ;; get to the table definitions that will use them + (with-stats-collection ("Create SQL Types" :section :pre + :use-result-as-read t + :use-result-as-rows t) + (create-sqltypes catalog :include-drop include-drop :client-min-messages :error)) - (progn - ;; if we're not going to create the tables, now is the time to - ;; remove the constraints: indexes, primary keys, foreign keys - ;; - ;; to be able to do that properly, get the constraints from - ;; the pre-existing target database catalog - (let ((pgsql-catalog - (fetch-pgsql-catalog (db-name (target-db copy)) - :source-catalog catalog))) - (merge-catalogs catalog pgsql-catalog)) + ;; now the tables + (with-stats-collection ("Create tables" :section :pre + :use-result-as-read t + :use-result-as-rows t) + (create-tables catalog + :include-drop include-drop + :client-min-messages :error))) - ;; now the foreign keys and only then the indexes, because a - ;; drop constraint on a primary key cascades to the drop of - ;; any foreign key that targets the primary key - (when foreign-keys - (drop-pgsql-fkeys catalog)) + (progn + ;; if we're not going to create the tables, now is the time to + ;; remove the constraints: indexes, primary keys, foreign keys + ;; + ;; to be able to do that properly, get the constraints from + ;; the pre-existing target database catalog + (let ((pgsql-catalog + (fetch-pgsql-catalog (db-name (target-db copy)) + :source-catalog catalog))) + (merge-catalogs catalog pgsql-catalog)) - (loop :for table :in (table-list catalog) - :do (drop-indexes :pre table)) + ;; now the foreign keys and only then the indexes, because a + ;; drop constraint on a primary key cascades to the drop of + ;; any foreign key that targets the primary key + (when foreign-keys + (with-stats-collection ("Drop Foreign Keys" :section :pre + :use-result-as-read t + :use-result-as-rows t) + (drop-pgsql-fkeys catalog))) - (when truncate - (truncate-tables catalog)))) + (with-stats-collection ("Drop Indexes" :section :pre + :use-result-as-read t + :use-result-as-rows t) + ;; we want to error out early in case we can't DROP the + ;; index, don't CASCADE + (drop-indexes catalog :cascade nil)) - ;; Some database sources allow the same index name being used - ;; against several tables, so we add the PostgreSQL table OID in the - ;; index name, to differenciate. Set the table oids now. - (when (and create-tables set-table-oids) - (log-message :notice "Set table OIDs") - (set-table-oids catalog)) + (when truncate + (with-stats-collection ("Truncate" :section :pre + :use-result-as-read t + :use-result-as-rows t) + (truncate-tables catalog))))) - ;; We might have to MATERIALIZE VIEWS - (when materialize-views - (log-message :notice "Create tables for matview support") - (create-views catalog :include-drop include-drop))))) + ;; Some database sources allow the same index name being used + ;; against several tables, so we add the PostgreSQL table OID in the + ;; index name, to differenciate. Set the table oids now. + (when (and create-tables set-table-oids) + (with-stats-collection ("Set Table OIDs" :section :pre + :use-result-as-read t + :use-result-as-rows t) + (set-table-oids catalog))) + + ;; We might have to MATERIALIZE VIEWS + (when materialize-views + (with-stats-collection ("Create MatViews Tables" :section :pre + :use-result-as-read t + :use-result-as-rows t) + (create-views catalog :include-drop include-drop))))) (defmethod cleanup ((copy db-copy) (catalog catalog) &key materialize-views) "In case anything wrong happens at `prepare-pgsql-database' step, this @@ -121,20 +140,28 @@ ;; and indexes are imported before doing that. ;; (when foreign-keys - (create-pgsql-fkeys catalog)) + (with-stats-collection ("Create Foreign Keys" :section :post + :use-result-as-read t + :use-result-as-rows t) + (create-pgsql-fkeys catalog))) ;; ;; Triggers and stored procedures -- includes special default values ;; (when create-triggers - (with-pgsql-transaction (:pgconn (target-db copy)) - (create-triggers catalog)))) + (with-stats-collection ("Create Triggers" :section :post + :use-result-as-read t + :use-result-as-rows t) + (with-pgsql-transaction (:pgconn (target-db copy)) + (create-triggers catalog))))) ;; ;; And now, comments on tables and columns. ;; - (log-message :notice "Comments") - (comment-on-tables-and-columns catalog))) + (with-stats-collection ("Install Comments" :section :post + :use-result-as-read t + :use-result-as-rows t) + (comment-on-tables-and-columns catalog)))) (defmethod instanciate-table-copy-object ((copy db-copy) (table table)) "Create an new instance for copying TABLE data." diff --git a/src/sources/mysql/mysql-schema.lisp b/src/sources/mysql/mysql-schema.lisp index ab20959..3a2967d 100644 --- a/src/sources/mysql/mysql-schema.lisp +++ b/src/sources/mysql/mysql-schema.lisp @@ -310,7 +310,10 @@ FROM :delete-rule delete-rule))) (if (and name table ftable) (add-fkey table fk) - (log-message :error + ;; chances are this comes from the EXCLUDING clause, but + ;; we'll make for it in fetching missing dependencies for + ;; (unique) indexes + (log-message :info "Incomplete Foreign Key definition: constraint ~s on table ~s referencing table ~s" name (when table (format-table-name table)) diff --git a/src/utils/catalog.lisp b/src/utils/catalog.lisp index a6c1fe4..dc3f329 100644 --- a/src/utils/catalog.lisp +++ b/src/utils/catalog.lisp @@ -67,11 +67,21 @@ ;;; Index and Foreign Keys ;;; (defstruct fkey - name table columns foreign-table foreign-columns condef + name oid table columns foreign-table foreign-columns condef update-rule delete-rule match-rule deferrable initially-deferred) +;;; +;;; An index, that might be underlying a e.g. UNIQUE constraint conname, in +;;; which case we need to use condef to build the index again from its +;;; definition, and drop the conname to drop the index. +;;; +;;; Also, primary keys might be dependencies of foreign keys, including ones +;;; that are out of scope for our load specifications and hence, catalog. We +;;; keep track of them in fk-deps so that we know to remove them then +;;; install them again at proper times. +;;; (defstruct index - name schema table primary unique columns sql conname condef filter) + name oid schema table primary unique columns sql conname condef filter fk-deps) ;;; ;;; Triggers and trigger procedures, no args support (yet?) diff --git a/test/sakila-data.load b/test/sakila-data.load new file mode 100644 index 0000000..5c8aafc --- /dev/null +++ b/test/sakila-data.load @@ -0,0 +1,44 @@ +load database + from mysql://root@localhost/sakila + into postgresql:///sakila + + -- WITH include drop, create tables, no truncate, + -- create indexes, reset sequences, foreign keys + + -- WITH batch rows = 10000 + + WITH concurrency = 1, workers = 6, + max parallel create index = 4 + + -- uncomment the following line to test loading into an already + -- existing schema, and make sure the schema actually is ready by + -- having done a first migration without those options: + , create no tables, include drop, truncate + + SET maintenance_work_mem to '128MB', + work_mem to '12MB', + search_path to 'sakila, public, "$user"' + + CAST type date drop not null drop default using zero-dates-to-null, + type datetime to timestamp drop default drop not null using zero-dates-to-null + + -- type tinyint to boolean using tinyint-to-boolean, + -- type year to integer drop typemod -- now a default + + -- MATERIALIZE VIEWS film_list, staff_list + MATERIALIZE ALL VIEWS + + ALTER TABLE NAMES MATCHING ~/_list$/, 'sales_by_store', ~/sales_by/ + SET SCHEMA 'mv' + + ALTER TABLE NAMES MATCHING 'sales_by_store' RENAME TO 'sales_by_store_list' + ALTER TABLE NAMES MATCHING 'film' RENAME TO 'films' + + -- INCLUDING ONLY TABLE NAMES MATCHING ~/film/, 'actor' + EXCLUDING TABLE NAMES MATCHING ~ + + BEFORE LOAD DO + $$ create schema if not exists sakila; $$, + $$ create schema if not exists mv; $$, + $$ alter database sakila set search_path to sakila, mv, public; $$; + diff --git a/test/sakila.load b/test/sakila.load index ca11cbb..6f0f8e6 100644 --- a/test/sakila.load +++ b/test/sakila.load @@ -10,11 +10,6 @@ load database WITH concurrency = 1, workers = 6, max parallel create index = 4 - -- uncomment the following line to test loading into an already - -- existing schema, and make sure the schema actually is ready by - -- having done a first migration without those options: - --, create no tables, include drop, truncate - SET maintenance_work_mem to '128MB', work_mem to '12MB', search_path to 'sakila, public, "$user"'