Generic Function API for Materialized Views support. (#970)

Implement a generic-function API to discover the source database schema and
populate pgloader internal version of the catalogs. Cut down three copies of about
the same code-path down to a single shared one, thanks to applying some amount
of OOP to the code.
This commit is contained in:
Dimitri Fontaine 2019-05-20 19:28:38 +02:00 committed by GitHub
parent ee75bc4765
commit b8da7dd2e9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
32 changed files with 784 additions and 858 deletions

View File

@ -119,6 +119,7 @@
((:file "api")
(:file "methods")
(:file "md-methods")
(:file "matviews")
(:file "casting-rules")
(:file "files-and-pathnames")
(:file "project-fields")))
@ -141,16 +142,21 @@
:depends-on ("common" "csv"))
(:module "db3"
:serial t
:depends-on ("common" "csv")
:components
((:file "db3-schema")
(:file "db3-cast-rules")
(:file "db3" :depends-on ("db3-schema"))))
((:file "db3-cast-rules")
(:file "db3-connection")
(:file "db3-schema")
(:file "db3")))
(:module "ixf"
:serial t
:depends-on ("common")
:components
((:file "ixf-schema")
((:file "ixf-cast-rules")
(:file "ixf-connection")
(:file "ixf-schema")
(:file "ixf" :depends-on ("ixf-schema"))))
;(:file "syslog") ; experimental...
@ -160,6 +166,7 @@
:depends-on ("common")
:components
((:file "sqlite-cast-rules")
(:file "sqlite-connection")
(:file "sqlite-schema")
(:file "sqlite")))
@ -168,6 +175,7 @@
:depends-on ("common")
:components
((:file "mssql-cast-rules")
(:file "mssql-connection")
(:file "mssql-schema")
(:file "mssql")
(:file "mssql-index-filters")))
@ -185,7 +193,6 @@
:serial t
:depends-on ("common")
:components ((:file "pgsql-cast-rules")
(:file "pgsql-schema")
(:file "pgsql")))))
;; package pgloader.copy

View File

@ -281,7 +281,6 @@
(reindex nil)
(after-schema nil)
distribute
only-tables
including
excluding
set-table-oids
@ -334,7 +333,6 @@
:materialize-views materialize-views
:create-indexes create-indexes
:foreign-keys foreign-keys
:only-tables only-tables
:including including
:excluding excluding)
(mssql::mssql-error (e)

View File

@ -52,6 +52,7 @@
#:extension
#:sqltype
#:table
#:matview
#:column
#:index
#:fkey
@ -66,7 +67,7 @@
#:make-schema
#:make-table
#:create-table
#:make-view
#:make-matview
#:make-sqltype
#:make-column
#:make-index
@ -102,6 +103,11 @@
#:table-trigger-list
#:table-citus-rule
#:matview-name
#:matview-source-name
#:matview-schema
#:matview-definition
#:extension-name
#:extension-schema
@ -560,6 +566,17 @@
#:instanciate-table-copy-object
#:concurrency-support
#:filter-list-to-where-clause
#:fetch-columns
#:fetch-indexes
#:fetch-foreign-keys
#:fetch-comments
#:get-column-sql-expression
#:get-column-list
#:format-matview-name
#:create-matviews
#:drop-matviews
;; database cast machinery
#:*default-cast-rules*
#:*cast-rules*

View File

@ -9,14 +9,23 @@
(defrule view-name (or qualified-table-name maybe-quoted-namestring)
(:lambda (vn)
(etypecase vn
(cons vn)
(string (cons nil vn)))))
(cons (let* ((schema-name (apply-identifier-case (cdr vn)))
(schema (make-schema :source-name (cdr vn)
:name schema-name)))
(make-matview :source-name vn
:name (apply-identifier-case (car vn))
:schema schema)))
(string (make-matview :source-name (cons nil vn)
:schema nil
:name (apply-identifier-case vn))))))
(defrule view-sql (and kw-as dollar-quoted)
(:destructure (as sql) (declare (ignore as)) sql))
(defrule view-definition (and view-name (? view-sql))
(:destructure (name sql) (cons name sql)))
(:destructure (matview sql)
(setf (matview-definition matview) sql)
matview))
(defrule another-view-definition (and comma-separator view-definition)
(:lambda (source)

View File

@ -114,9 +114,8 @@
"Get PostgreSQL schema name where to locate TABLE-NAME by following the
current search_path rules. A PostgreSQL connection must be opened."
(make-schema :name
(pomo:query (format nil
(sql "/pgsql/query-table-schema.sql")
(table-name table))
(pomo:query (sql "/pgsql/query-table-schema.sql"
(table-name table))
:single)))
(defun make-including-expr-from-view-names (view-names)
@ -188,21 +187,18 @@
"Get the list of PostgreSQL column names per table."
(loop :for (schema-name table-name table-oid
name type typmod notnull default extra)
:in
(query nil
(format nil
(sql "/pgsql/list-all-columns.sql")
table-type-name
including ; do we print the clause?
(filter-list-to-where-clause including
nil
"n.nspname"
"c.relname")
excluding ; do we print the clause?
(filter-list-to-where-clause excluding
nil
"n.nspname"
"c.relname")))
:in (query nil (sql "/pgsql/list-all-columns.sql"
table-type-name
including ; do we print the clause?
(filter-list-to-where-clause including
nil
"n.nspname"
"c.relname")
excluding ; do we print the clause?
(filter-list-to-where-clause excluding
nil
"n.nspname"
"c.relname")))
:do
(let* ((schema (maybe-add-schema catalog schema-name))
(table (maybe-add-table schema table-name :oid table-oid))
@ -223,21 +219,19 @@
:for (schema-name name oid
table-schema table-name
primary unique cols sql conname condef)
:in (query nil
(format nil
(sql (sql-url-for-variant "pgsql"
"list-all-indexes.sql"
pgversion))
including ; do we print the clause?
(filter-list-to-where-clause including
nil
"rn.nspname"
"r.relname")
excluding ; do we print the clause?
(filter-list-to-where-clause excluding
nil
"rn.nspname"
"r.relname")))
:in (query nil (sql (sql-url-for-variant "pgsql"
"list-all-indexes.sql"
pgversion)
including ; do we print the clause?
(filter-list-to-where-clause including
nil
"rn.nspname"
"r.relname")
excluding ; do we print the clause?
(filter-list-to-where-clause excluding
nil
"rn.nspname"
"r.relname")))
:do (let* ((schema (find-schema catalog schema-name))
(tschema (find-schema catalog table-schema))
(table (find-table tschema table-name))
@ -265,29 +259,27 @@
conoid pkeyoid conname condef
cols fcols
updrule delrule mrule deferrable deferred)
:in (query nil
(format nil
(sql "/pgsql/list-all-fkeys.sql")
including ; do we print the clause (table)?
(filter-list-to-where-clause including
nil
"n.nspname"
"c.relname")
excluding ; do we print the clause (table)?
(filter-list-to-where-clause excluding
nil
"n.nspname"
"c.relname")
including ; do we print the clause (ftable)?
(filter-list-to-where-clause including
nil
"nf.nspname"
"cf.relname")
excluding ; do we print the clause (ftable)?
(filter-list-to-where-clause excluding
nil
"nf.nspname"
"cf.relname")))
:in (query nil (sql "/pgsql/list-all-fkeys.sql"
including ; do we print the clause (table)?
(filter-list-to-where-clause including
nil
"n.nspname"
"c.relname")
excluding ; do we print the clause (table)?
(filter-list-to-where-clause excluding
nil
"n.nspname"
"c.relname")
including ; do we print the clause (ftable)?
(filter-list-to-where-clause including
nil
"nf.nspname"
"cf.relname")
excluding ; do we print the clause (ftable)?
(filter-list-to-where-clause excluding
nil
"nf.nspname"
"cf.relname")))
:do (flet ((pg-fk-rule-to-action (rule)
(case rule
(#\a "NO ACTION")
@ -350,11 +342,9 @@
(when pkey-oid-list
(loop :for (schema-name table-name fschema-name ftable-name
conoid conname condef index-oid)
:in (query nil
(format nil
(sql "/pgsql/list-missing-fk-deps.sql")
pkey-oid-list
(or fkey-oid-list (list -1))))
:in (query nil (sql "/pgsql/list-missing-fk-deps.sql"
pkey-oid-list
(or fkey-oid-list (list -1))))
;;
;; We don't need to reference the main catalog entries for the tables
;; here, as the only goal is to be sure to DROP then CREATE again the
@ -403,9 +393,8 @@
:in (ecase variant
(:pgdg
;; use the SELECT ... FROM (VALUES ...) variant
(query nil (format nil
(sql "/pgsql/list-table-oids.sql")
(mapcar #'format-table-name table-list))))
(query nil (sql "/pgsql/list-table-oids.sql"
(mapcar #'format-table-name table-list))))
(:redshift
;; use the TEMP TABLE variant in Redshift, which doesn't
;; have proper support for VALUES (landed in PostgreSQL 8.2)
@ -433,19 +422,17 @@
(defun list-all-sqltypes (catalog &key including excluding)
"Set the catalog's schema extension list and sqltype list"
(loop :for (schema-name extension-name type-name enum-values)
:in (query nil
(format nil
(sql "/pgsql/list-all-sqltypes.sql")
including ; do we print the clause?
(filter-list-to-where-clause including
nil
"n.nspname"
"c.relname")
excluding ; do we print the clause?
(filter-list-to-where-clause excluding
nil
"n.nspname"
"c.relname")))
:in (query nil (sql "/pgsql/list-all-sqltypes.sql"
including ; do we print the clause?
(filter-list-to-where-clause including
nil
"n.nspname"
"c.relname")
excluding ; do we print the clause?
(filter-list-to-where-clause excluding
nil
"n.nspname"
"c.relname")))
:do
(let* ((schema (maybe-add-schema catalog schema-name))
(sqltype

View File

@ -116,7 +116,6 @@
(defgeneric fetch-metadata (db-copy catalog
&key
materialize-views
only-tables
create-indexes
foreign-keys
including
@ -127,3 +126,60 @@
(defgeneric instanciate-table-copy-object (db-copy table)
(:documentation "Create a new instance for copying TABLE data."))
;;;
;;; Database source schema introspection API
;;;
;;; The methods for those function query the source database catalogs and
;;; populate pgloader's internal representation of its catalog.
;;;
;;; On some source systems (such as MySQL) a single schema can be adressed
;;; at a time, and the catalog object might be a schema directly.
;;;
(defgeneric filter-list-to-where-clause (db-copy filter-list
&key
not
schema-col
table-col)
(:documentation "Transform a filter-list into SQL expression for DB-COPY."))
(defgeneric fetch-columns (catalog db-copy &key table-type including excluding)
(:documentation
"Get the list of schema, tables and columns from the source database."))
(defgeneric fetch-indexes (catalog db-copy &key including excluding)
(:documentation "Get the list of indexes from the source database."))
(defgeneric fetch-foreign-keys (catalog db-copy &key including excluding)
(:documentation "Get the list of foreign keys from the source database."))
(defgeneric fetch-comments (catalog db-copy &key including excluding)
(:documentation "Get the list of comments from the source database."))
;;;
;;; We're going to generate SELECT * FROM table; queries to fetch the data
;;; and COPY it to the PostgreSQL target database. In reality we don't use
;;; SELECT *, and in many interesting cases we have to generate some SQL
;;; expression to fetch the source values in a format we can then either
;;; process in pgloader or just send-over as-is to Postgres.
;;;
(defgeneric get-column-sql-expression (db-copy name type)
(:documentation
"Generate SQL expression for the SELECT clause for given column."))
(defgeneric get-column-list (copy-db)
(:documentation
"Generate the SQL projection column list for the SELECT clause."))
;;;
;;; Materialized Views support
;;;
(defgeneric format-matview-name (matview copy)
(:documentation "Format the materialized view name."))
(defgeneric create-matviews (matview-list db-copy)
(:documentation "Create Materialized Views."))
(defgeneric drop-matviews (matview-list db-copy)
(:documentation "Drop Materialized Views."))

View File

@ -0,0 +1,61 @@
;;;
;;; Materialized Views support is quite similar from a DB engine from another.
;;;
;; It happens that the view definition is given by the user, so pgloader is
;; not concerned with that part of the SQL compatiblity. The common
;; implementation uses the following two SQL comamnds:
;;;
;;; CREATE VIEW <schema>.<name> AS <sql>
;;; DROP VIEW <schema>.<name>, <schema>.<name>, ...;
;;;
(in-package :pgloader.sources)
(defmethod format-matview-name (matview (copy db-copy))
"Format the materialized view name."
(declare (ignore copy))
(let ((schema-name (when (matview-schema matview)
(schema-source-name schema)))
(view-name (cdr (matview-source-name matview))))
(format nil "~@[~s.~]~a" schema-name view-name)))
(defmethod create-matviews (matview-list copy)
"Create Materialized Views as per the pgloader command."
(unless (eq :all matview-list)
(let ((views (remove-if #'null matview-list :key #'matview-definition)))
(when views
(loop :for mv :in views
:for sql := (format nil
"CREATE VIEW ~a AS ~a"
(format-matview-name mv copy)
(matview-definition mv))
:do (progn
(log-message :info "SOURCE: ~a;" sql)
#+pgloader-image
(query (source-db copy) sql)
#-pgloader-image
(restart-case
(query (source-db copy) sql)
(use-existing-view ()
:report "Use the already existing view and continue"
nil)
(replace-view ()
:report
"Replace the view with the one from pgloader's command"
(let ((drop-sql (format nil "DROP VIEW ~a"
(format-matview-name mv copy))))
(log-message :info "SOURCE: ~a;" drop-sql)
;; drop the materialized view, then create it again
(query (source-db copy) drop-sql)
(query (source-db copy) sql))))))))))
(defmethod drop-matviews (matview-list copy)
"Drop Materialized Views created just for the pgloader migration."
(unless (eq :all matview-list)
(let ((views (remove-if #'null matview-list :key #'matview-definition)))
(when views
(let ((sql (format nil "DROP VIEW ~{~a~^, ~}"
(mapcar (lambda (mv) (format-matview-name mv copy))
views))))
(log-message :info "SOURCE: ~a;" sql)
(query (source-db copy) sql))))))

View File

@ -0,0 +1,35 @@
;;;
;;; Tools to handle the DBF file format
;;;
(in-package :pgloader.source.db3)
(defclass dbf-connection (fd-connection)
((db3 :initarg db3 :accessor fd-db3))
(:documentation "pgloader connection parameters for DBF files."))
(defmethod initialize-instance :after ((dbfconn dbf-connection) &key)
"Assign the type slot to dbf."
(setf (slot-value dbfconn 'type) "dbf"))
(defmethod open-connection ((dbfconn dbf-connection) &key)
(setf (conn-handle dbfconn)
(open (fd-path dbfconn)
:direction :input
:element-type '(unsigned-byte 8)))
(let ((db3 (make-instance 'db3:db3 :filename (fd-path dbfconn))))
(db3:load-header db3 (conn-handle dbfconn))
(setf (fd-db3 dbfconn) db3))
dbfconn)
(defmethod close-connection ((dbfconn dbf-connection))
(db3:close-memo (fd-db3 dbfconn))
(close (conn-handle dbfconn))
(setf (conn-handle dbfconn) nil
(fd-db3 dbfconn) nil)
dbfconn)
(defmethod clone-connection ((c dbf-connection))
(let ((clone (change-class (call-next-method c) 'dbf-connection)))
(setf (fd-db3 clone) (fd-db3 c))
clone))

View File

@ -4,40 +4,24 @@
(in-package :pgloader.source.db3)
(defclass dbf-connection (fd-connection)
((db3 :initarg db3 :accessor fd-db3))
(:documentation "pgloader connection parameters for DBF files."))
(defclass copy-db3 (db-copy)
((encoding :accessor encoding ; file encoding
:initarg :encoding))
(:documentation "pgloader DBF Data Source"))
(defmethod initialize-instance :after ((dbfconn dbf-connection) &key)
"Assign the type slot to dbf."
(setf (slot-value dbfconn 'type) "dbf"))
(defmethod initialize-instance :after ((db3 copy-db3) &key)
"Add a default value for transforms in case it's not been provided."
(setf (slot-value db3 'source)
(let ((table-name (pathname-name (fd-path (source-db db3)))))
(make-table :source-name table-name
:name (apply-identifier-case table-name)))))
(defmethod open-connection ((dbfconn dbf-connection) &key)
(setf (conn-handle dbfconn)
(open (fd-path dbfconn)
:direction :input
:element-type '(unsigned-byte 8)))
(let ((db3 (make-instance 'db3:db3 :filename (fd-path dbfconn))))
(db3:load-header db3 (conn-handle dbfconn))
(setf (fd-db3 dbfconn) db3))
dbfconn)
(defmethod close-connection ((dbfconn dbf-connection))
(db3:close-memo (fd-db3 dbfconn))
(close (conn-handle dbfconn))
(setf (conn-handle dbfconn) nil
(fd-db3 dbfconn) nil)
dbfconn)
(defmethod clone-connection ((c dbf-connection))
(let ((clone (change-class (call-next-method c) 'dbf-connection)))
(setf (fd-db3 clone) (fd-db3 c))
clone))
(defun list-all-columns (db3 table)
(defmethod fetch-columns ((table table) (db3 copy-db3)
&key &allow-other-keys
&aux (dbfconn (fd-db3 (source-db db3))))
"Return the list of columns for the given DB3-FILE-NAME."
(loop
:for field :in (db3::fields db3)
:for field :in (db3::fields dbfconn)
:do (add-field table (make-db3-coldef (db3::field-name field)
(string (db3::field-type field))
(db3::field-length field)))))

View File

@ -4,21 +4,6 @@
(in-package :pgloader.source.db3)
;;;
;;; Integration with pgloader
;;;
(defclass copy-db3 (db-copy)
((encoding :accessor encoding ; file encoding
:initarg :encoding))
(:documentation "pgloader DBF Data Source"))
(defmethod initialize-instance :after ((db3 copy-db3) &key)
"Add a default value for transforms in case it's not been provided."
(setf (slot-value db3 'source)
(let ((table-name (pathname-name (fd-path (source-db db3)))))
(make-table :source-name table-name
:name (apply-identifier-case table-name)))))
(defmethod map-rows ((copy-db3 copy-db3) &key process-row-fn)
"Extract DB3 data and call PROCESS-ROW-FN function with a single
argument (a list of column values) for each row."
@ -67,7 +52,7 @@
(push-to-end table (schema-table-list schema))
(with-connection (conn (source-db db3))
(list-all-columns (fd-db3 conn) table))
(fetch-columns table db3))
catalog))

View File

@ -0,0 +1,72 @@
;;;
;;; Tools to handle IBM PC version of IXF file format
;;;
(in-package :pgloader.source.ixf)
(defvar *ixf-pgsql-type-mapping*
'((#. ixf:+smallint+ . "smallint")
(#. ixf:+integer+ . "integer")
(#. ixf:+bigint+ . "bigint")
(#. ixf:+decimal+ . "numeric")
(#. ixf:+float+ . "double precision")
(#. ixf:+timestamp+ . "timestamptz")
(#. ixf:+date+ . "date")
(#. ixf:+time+ . "time")
(#. ixf:+char+ . "text")
(#. ixf:+varchar+ . "text")
(#. ixf:+blob-location-spec+ . "bytea")
(#. ixf:+dbblob-location-spec+ . "bytea")
(#. ixf:+dbclob-location-spec+ . "text")))
(defun cast-ixf-type (ixf-type)
"Return the PostgreSQL type name for a given IXF type name."
(let ((pgtype
(cdr (assoc ixf-type *ixf-pgsql-type-mapping*))))
(unless pgtype
(error "IXF Type mapping unknown for: ~d" ixf-type))
pgtype))
(defun transform-function (field)
"Return the transformation functions needed to cast from ixf-column data."
(let ((coltype (cast-ixf-type (ixf:ixf-column-type field))))
;;
;; The IXF driver we use maps the data type and gets
;; back proper CL typed objects, where we only want to
;; deal with text.
;;
(cond ((or (string-equal "float" coltype)
(string-equal "real" coltype)
(string-equal "double precision" coltype)
(and (<= 7 (length coltype))
(string-equal "numeric" coltype :end2 7)))
#'pgloader.transforms::float-to-string)
((string-equal "text" coltype)
nil)
((string-equal "bytea" coltype)
#'pgloader.transforms::byte-vector-to-bytea)
(t
(lambda (c)
(when c
(princ-to-string c)))))))
(defmethod cast ((col ixf:ixf-column) &key &allow-other-keys)
"Return the PostgreSQL type definition from given IXF column definition."
(make-column :name (apply-identifier-case (ixf:ixf-column-name col))
:type-name (cast-ixf-type (ixf:ixf-column-type col))
:nullable (ixf:ixf-column-nullable col)
:default (when (ixf:ixf-column-has-default col)
(format-default-value
(ixf:ixf-column-default col)))
:transform (transform-function col)
:comment (let ((comment (ixf:ixf-column-desc col)))
(unless (or (null comment)
(string= comment ""))
comment))))

View File

@ -0,0 +1,28 @@
;;;
;;; Tools to handle IBM PC version of IXF file format
;;;
(in-package :pgloader.source.ixf)
(defclass ixf-connection (fd-connection) ()
(:documentation "pgloader connection parameters for IXF files."))
(defmethod initialize-instance :after ((ixfconn ixf-connection) &key)
"Assign the type slot to dbf."
(setf (slot-value ixfconn 'type) "ixf"))
(defmethod open-connection ((ixfconn ixf-connection) &key)
(setf (conn-handle ixfconn)
(open (fd-path ixfconn)
:direction :input
:element-type '(unsigned-byte 8)))
ixfconn)
(defmethod close-connection ((ixfconn ixf-connection))
(close (conn-handle ixfconn))
(setf (conn-handle ixfconn) nil)
ixfconn)
(defmethod clone-connection ((c ixf-connection))
(change-class (call-next-method c) 'ixf-connection))

View File

@ -5,96 +5,26 @@
(in-package :pgloader.source.ixf)
(defclass ixf-connection (fd-connection) ()
(:documentation "pgloader connection parameters for IXF files."))
(defclass copy-ixf (db-copy)
((timezone :accessor timezone ; timezone
:initarg :timezone
:initform local-time:+utc-zone+))
(:documentation "pgloader IXF Data Source"))
(defmethod initialize-instance :after ((ixfconn ixf-connection) &key)
"Assign the type slot to dbf."
(setf (slot-value ixfconn 'type) "ixf"))
(defmethod initialize-instance :after ((source copy-ixf) &key)
"Add a default value for transforms in case it's not been provided."
(setf (slot-value source 'source)
(let ((table-name (pathname-name (fd-path (source-db source)))))
(make-table :source-name table-name
:name (apply-identifier-case table-name))))
(defmethod open-connection ((ixfconn ixf-connection) &key)
(setf (conn-handle ixfconn)
(open (fd-path ixfconn)
:direction :input
:element-type '(unsigned-byte 8)))
ixfconn)
;; force default timezone when nil
(when (null (timezone source))
(setf (timezone source) local-time:+utc-zone+)))
(defmethod close-connection ((ixfconn ixf-connection))
(close (conn-handle ixfconn))
(setf (conn-handle ixfconn) nil)
ixfconn)
(defmethod clone-connection ((c ixf-connection))
(change-class (call-next-method c) 'ixf-connection))
(defvar *ixf-pgsql-type-mapping*
'((#. ixf:+smallint+ . "smallint")
(#. ixf:+integer+ . "integer")
(#. ixf:+bigint+ . "bigint")
(#. ixf:+decimal+ . "numeric")
(#. ixf:+float+ . "double precision")
(#. ixf:+timestamp+ . "timestamptz")
(#. ixf:+date+ . "date")
(#. ixf:+time+ . "time")
(#. ixf:+char+ . "text")
(#. ixf:+varchar+ . "text")
(#. ixf:+blob-location-spec+ . "bytea")
(#. ixf:+dbblob-location-spec+ . "bytea")
(#. ixf:+dbclob-location-spec+ . "text")))
(defun cast-ixf-type (ixf-type)
"Return the PostgreSQL type name for a given IXF type name."
(let ((pgtype
(cdr (assoc ixf-type *ixf-pgsql-type-mapping*))))
(unless pgtype
(error "IXF Type mapping unknown for: ~d" ixf-type))
pgtype))
(defun transform-function (field)
"Return the transformation functions needed to cast from ixf-column data."
(let ((coltype (cast-ixf-type (ixf:ixf-column-type field))))
;;
;; The IXF driver we use maps the data type and gets
;; back proper CL typed objects, where we only want to
;; deal with text.
;;
(cond ((or (string-equal "float" coltype)
(string-equal "real" coltype)
(string-equal "double precision" coltype)
(and (<= 7 (length coltype))
(string-equal "numeric" coltype :end2 7)))
#'pgloader.transforms::float-to-string)
((string-equal "text" coltype)
nil)
((string-equal "bytea" coltype)
#'pgloader.transforms::byte-vector-to-bytea)
(t
(lambda (c)
(when c
(princ-to-string c)))))))
(defmethod cast ((col ixf:ixf-column) &key &allow-other-keys)
"Return the PostgreSQL type definition from given IXF column definition."
(make-column :name (apply-identifier-case (ixf:ixf-column-name col))
:type-name (cast-ixf-type (ixf:ixf-column-type col))
:nullable (ixf:ixf-column-nullable col)
:default (when (ixf:ixf-column-has-default col)
(format-default-value
(ixf:ixf-column-default col)))
:transform (transform-function col)
:comment (let ((comment (ixf:ixf-column-desc col)))
(unless (or (null comment)
(string= comment ""))
comment))))
(defun list-all-columns (ixf-stream table)
(defmethod fetch-columns ((table table) (ixf copy-ixf)
&key &allow-other-keys
&aux (ixf-stream (conn-handle (source-db ixf))))
"Return the list of columns for the given IXF-FILE-NAME."
(ixf:with-ixf-stream (ixf ixf-stream)
(loop :for field :across (ixf:ixf-table-columns (ixf:ixf-file-table ixf))

View File

@ -5,26 +5,6 @@
(in-package :pgloader.source.ixf)
;;;
;;; Integration with pgloader
;;;
(defclass copy-ixf (db-copy)
((timezone :accessor timezone ; timezone
:initarg :timezone
:initform local-time:+utc-zone+))
(:documentation "pgloader IXF Data Source"))
(defmethod initialize-instance :after ((source copy-ixf) &key)
"Add a default value for transforms in case it's not been provided."
(setf (slot-value source 'source)
(let ((table-name (pathname-name (fd-path (source-db source)))))
(make-table :source-name table-name
:name (apply-identifier-case table-name))))
;; force default timezone when nil
(when (null (timezone source))
(setf (timezone source) local-time:+utc-zone+)))
(defmethod map-rows ((copy-ixf copy-ixf) &key process-row-fn)
"Extract IXF data and call PROCESS-ROW-FN function with a single
argument (a list of column values) for each row."
@ -58,7 +38,7 @@
(push-to-end table (schema-table-list schema))
(with-connection (conn (source-db ixf))
(list-all-columns (conn-handle conn) table))
(fetch-columns table ixf))
catalog))

View File

@ -0,0 +1,48 @@
;;;
;;; Tools to query the MS SQL Schema to reproduce in PostgreSQL
;;;
(in-package :pgloader.source.mssql)
(defvar *mssql-db* nil
"The MS SQL database connection handler.")
;;;
;;; General utility to manage MS SQL connection
;;;
(defclass mssql-connection (db-connection) ())
(defmethod initialize-instance :after ((msconn mssql-connection) &key)
"Assign the type slot to mssql."
(setf (slot-value msconn 'type) "mssql"))
(defmethod open-connection ((msconn mssql-connection) &key)
(setf (conn-handle msconn) (mssql:connect (db-name msconn)
(db-user msconn)
(db-pass msconn)
(db-host msconn)))
;; apply mssql-settings, if any
(loop :for (name . value) :in *mssql-settings*
:for sql := (format nil "set ~a ~a;" name value)
:do (query msconn sql))
;; return the connection object
msconn)
(defmethod close-connection ((msconn mssql-connection))
(mssql:disconnect (conn-handle msconn))
(setf (conn-handle msconn) nil)
msconn)
(defmethod clone-connection ((c mssql-connection))
(change-class (call-next-method c) 'mssql-connection))
(defmethod query ((msconn mssql-connection) sql &key)
"Send SQL query to MSCONN connection."
(log-message :sql "MSSQL: sending query: ~a" sql)
(mssql:query sql :connection (conn-handle msconn)))
(defun mssql-query (query)
"Execute given QUERY within the current *connection*, and set proper
defaults for pgloader."
(query *mssql-db* query))

View File

@ -4,50 +4,12 @@
(in-package :pgloader.source.mssql)
(defvar *mssql-db* nil
"The MS SQL database connection handler.")
(defclass copy-mssql (db-copy)
((encoding :accessor encoding ; allows forcing encoding
:initarg :encoding
:initform nil))
(:documentation "pgloader MS SQL Data Source"))
;;;
;;; General utility to manage MySQL connection
;;;
(defclass mssql-connection (db-connection) ())
(defmethod initialize-instance :after ((msconn mssql-connection) &key)
"Assign the type slot to mssql."
(setf (slot-value msconn 'type) "mssql"))
(defmethod open-connection ((msconn mssql-connection) &key)
(setf (conn-handle msconn) (mssql:connect (db-name msconn)
(db-user msconn)
(db-pass msconn)
(db-host msconn)))
;; apply mysql-settings, if any
(loop :for (name . value) :in *mssql-settings*
:for sql := (format nil "set ~a ~a;" name value)
:do (query msconn sql))
;; return the connection object
msconn)
(defmethod close-connection ((msconn mssql-connection))
(mssql:disconnect (conn-handle msconn))
(setf (conn-handle msconn) nil)
msconn)
(defmethod clone-connection ((c mssql-connection))
(change-class (call-next-method c) 'mssql-connection))
(defmethod query ((msconn mssql-connection) sql &key)
"Send SQL query to MSCONN connection."
(log-message :sql "MSSQL: sending query: ~a" sql)
(mssql:query sql :connection (conn-handle msconn)))
(defun mssql-query (query)
"Execute given QUERY within the current *connection*, and set proper
defaults for pgloader."
(query *mssql-db* query))
;;;
;;; Those functions are to be called from withing an already established
;;; MS SQL Connection.
@ -61,11 +23,12 @@
"Associate internal table type symbol with what's found in MS SQL
information_schema.tables.table_type column.")
(defun filter-list-to-where-clause (filter-list
&optional
not
(schema-col "table_schema")
(table-col "table_name"))
(defmethod filter-list-to-where-clause ((mssql copy-mssql)
filter-list
&key
not
(schema-col "table_schema")
(table-col "table_name"))
"Given an INCLUDING or EXCLUDING clause, turn it into a MS SQL WHERE clause."
(loop :for (schema . table-name-list) :in filter-list
:append (mapcar (lambda (table-name)
@ -73,130 +36,133 @@
schema-col schema table-col not table-name))
table-name-list)))
(defun list-all-columns (catalog
&key
(table-type :table)
including
excluding
&aux
(table-type-name (cdr (assoc table-type *table-type*))))
(defmethod fetch-columns ((catalog catalog)
(mssql copy-mssql)
&key
(table-type :table)
including
excluding
&aux
(table-type-name
(cdr (assoc table-type *table-type*))))
(loop
:with incl-where := (filter-list-to-where-clause
mssql including :not nil
:schema-col "c.table_schema"
:table-col "c.table_name")
:with excl-where := (filter-list-to-where-clause
mssql excluding :not t
:schema-col "c.table_schema"
:table-col "c.table_name")
:for (schema-name table-name name type default nullable identity
character-maximum-length
numeric-precision numeric-precision-radix numeric-scale
datetime-precision
character-set-name collation-name)
:in
(mssql-query (format nil
(sql "/mssql/list-all-columns.sql")
(db-name *mssql-db*)
table-type-name
including ; do we print the clause?
(filter-list-to-where-clause including
nil
"c.table_schema"
"c.table_name")
excluding ; do we print the clause?
(filter-list-to-where-clause excluding
t
"c.table_schema"
"c.table_name")))
:do
(let* ((schema (maybe-add-schema catalog schema-name))
(table (maybe-add-table schema table-name))
(field
(make-mssql-column
schema-name table-name name type default nullable
(eq 1 identity)
character-maximum-length
numeric-precision numeric-precision-radix numeric-scale
datetime-precision
character-set-name collation-name)))
(add-field table field))
:in (mssql-query (sql "/mssql/list-all-columns.sql"
(db-name *mssql-db*)
table-type-name
incl-where ; do we print the clause?
incl-where
excl-where))
:do (let* ((schema (maybe-add-schema catalog schema-name))
(table (maybe-add-table schema table-name))
(field
(make-mssql-column
schema-name table-name name type default nullable
(eq 1 identity)
character-maximum-length
numeric-precision numeric-precision-radix numeric-scale
datetime-precision
character-set-name collation-name)))
(add-field table field))
:finally (return catalog)))
(defun list-all-indexes (catalog &key including excluding)
(defmethod fetch-indexes ((catalog catalog)
(mssql copy-mssql)
&key including excluding)
"Get the list of MSSQL index definitions per table."
(loop
:with incl-where := (filter-list-to-where-clause
mssql including :not nil
:schema-col "schema_name(schema_id)"
:table-col "o.name")
:with excl-where := (filter-list-to-where-clause
mssql excluding :not t
:schema-col "schema_name(schema_id)"
:table-col "o.name")
:for (schema-name table-name index-name colname unique pkey filter)
:in (mssql-query (format nil
(sql "/mssql/list-all-indexes.sql")
including ; do we print the clause?
(filter-list-to-where-clause including
nil
"schema_name(schema_id)"
"o.name"
)
excluding ; do we print the clause?
(filter-list-to-where-clause excluding
t
"schema_name(schema_id)"
"o.name"
)))
:do
(let* ((schema (find-schema catalog schema-name))
(table (find-table schema table-name))
(pg-index (make-index :name index-name
:schema schema
:table table
:primary (= pkey 1)
:unique (= unique 1)
:columns nil
:filter filter))
(index
(when table
(maybe-add-index table index-name pg-index :key #'index-name))))
(unless table
(log-message :warning
"Failed to find table ~s in schema ~s for index ~s, skipping the index"
table-name schema-name index-name))
(when index
(add-column index colname)))
:in (mssql-query (sql "/mssql/list-all-indexes.sql"
incl-where ; do we print the clause?
incl-where
excl-where ; do we print the clause?
excl-where))
:do (let* ((schema (find-schema catalog schema-name))
(table (find-table schema table-name))
(pg-index (make-index :name index-name
:schema schema
:table table
:primary (= pkey 1)
:unique (= unique 1)
:columns nil
:filter filter))
(index
(when table
(maybe-add-index table index-name pg-index :key #'index-name))))
(unless table
(log-message :warning
"Failed to find table ~s in schema ~s for index ~s, skipping the index"
table-name schema-name index-name))
(when index
(add-column index colname)))
:finally (return catalog)))
(defun list-all-fkeys (catalog &key including excluding)
(defmethod fetch-foreign-keys ((catalog catalog) (mssql copy-mssql)
&key including excluding)
"Get the list of MSSQL index definitions per table."
(loop
:with incl-where := (filter-list-to-where-clause
mssql including :not nil
:schema-col "kcu1.table_schema"
:table-col "kcu1.table_name")
:with excl-where := (filter-list-to-where-clause
mssql excluding :not t
:schema-col "kcu1.table_schema"
:table-col "kcu1.table_name")
:for (fkey-name schema-name table-name col
fschema-name ftable-name fcol
fk-update-rule fk-delete-rule)
:in (mssql-query (format nil
(sql "/mssql/list-all-fkeys.sql")
(db-name *mssql-db*) (db-name *mssql-db*)
including ; do we print the clause?
(filter-list-to-where-clause including
nil
"kcu1.table_schema"
"kcu1.table_name")
excluding ; do we print the clause?
(filter-list-to-where-clause excluding
t
"kcu1.table_schema"
"kcu1.table_name")))
:do
(let* ((schema (find-schema catalog schema-name))
(table (find-table schema table-name))
(fschema (find-schema catalog fschema-name))
(ftable (find-table fschema ftable-name))
(pg-fkey
(make-fkey :name fkey-name
:table table
:columns nil
:foreign-table ftable
:foreign-columns nil
:update-rule fk-update-rule
:delete-rule fk-delete-rule))
(fkey
(maybe-add-fkey table fkey-name pg-fkey :key #'fkey-name)))
(push-to-end (apply-identifier-case col) (fkey-columns fkey))
(push-to-end (apply-identifier-case fcol) (fkey-foreign-columns fkey)))
:in (mssql-query (sql "/mssql/list-all-fkeys.sql"
(db-name *mssql-db*) (db-name *mssql-db*)
incl-where ; do we print the clause?
incl-where
excl-where ; do we print the clause?
excl-where))
:do (let* ((schema (find-schema catalog schema-name))
(table (find-table schema table-name))
(fschema (find-schema catalog fschema-name))
(ftable (find-table fschema ftable-name))
(col-name (apply-identifier-case col))
(fcol-name (apply-identifier-case fcol))
(pg-fkey
(make-fkey :name fkey-name
:table table
:columns nil
:foreign-table ftable
:foreign-columns nil
:update-rule fk-update-rule
:delete-rule fk-delete-rule))
(fkey
(maybe-add-fkey table fkey-name pg-fkey :key #'fkey-name)))
(push-to-end col-name (fkey-columns fkey))
(push-to-end fcol-name (fkey-foreign-columns fkey)))
:finally (return catalog)))
;;;
;;; Tools to handle row queries.
;;;
(defun get-column-sql-expression (name type)
(defmethod get-column-sql-expression ((mssql copy-mssql) name type)
"Return per-TYPE SQL expression to use given a column NAME.
Mostly we just use the name, and make try to avoid parsing dates."
@ -208,44 +174,8 @@
(:bigint (format nil "cast([~a] as numeric)" name))
(t (format nil "[~a]" name))))
(defun get-column-list (columns)
(defmethod get-column-list ((mssql copy-mssql))
"Tweak how we fetch the column values to avoid parsing when possible."
(loop :for col :in columns
:collect (with-slots (name type) col
(get-column-sql-expression name type))))
;;;
;;; Materialize Views support
;;;
(defun create-ms-views (views-alist)
"VIEWS-ALIST associates view names with their SQL definition, which might
be empty for already existing views. Create only the views for which we
have an SQL definition."
(unless (eq :all views-alist)
(let ((views (remove-if #'null views-alist :key #'cdr)))
(when views
(loop :for (name . def) :in views
:for sql := (destructuring-bind (schema . v-name) name
(format nil
"CREATE VIEW ~@[~s~].~s AS ~a"
schema v-name def))
:do (progn
(log-message :info "MS SQL: ~a" sql)
(mssql-query sql)))))))
(defun drop-ms-views (views-alist)
"See `create-ms-views' for VIEWS-ALIST description. This time we DROP the
views to clean out after our work."
(unless (eq :all views-alist)
(let ((views (remove-if #'null views-alist :key #'cdr)))
(when views
(let ((sql
(format nil "DROP VIEW ~{~a~^, ~};"
(mapcar (lambda (qname)
(format nil "~@[~s.~]~a"
(car qname) (cdr qname)))
(mapcar #'car views)))))
(log-message :info "MSSQL: ~a" sql)
(mssql-query sql))))))
(loop :for field :in (fields mssql)
:collect (with-slots (name type) field
(get-column-sql-expression mssql name type))))

View File

@ -4,33 +4,12 @@
(in-package :pgloader.source.mssql)
(defclass copy-mssql (db-copy)
((encoding :accessor encoding ; allows forcing encoding
:initarg :encoding
:initform nil))
(:documentation "pgloader MS SQL Data Source"))
(defmethod initialize-instance :after ((source copy-mssql) &key)
"Add a default value for transforms in case it's not been provided."
(let* ((transforms (when (slot-boundp source 'transforms)
(slot-value source 'transforms))))
(when (and (slot-boundp source 'fields) (slot-value source 'fields))
;; cast typically happens in copy-database in the schema structure,
;; and the result is then copied into the copy-mysql instance.
(unless (and (slot-boundp source 'columns) (slot-value source 'columns))
(setf (slot-value source 'columns)
(mapcar #'cast (slot-value source 'fields))))
(unless transforms
(setf (slot-value source 'transforms)
(mapcar #'column-transform (slot-value source 'columns)))))))
(defmethod map-rows ((mssql copy-mssql) &key process-row-fn)
"Extract Mssql data and call PROCESS-ROW-FN function with a single
argument (a list of column values) for each row."
(with-connection (*mssql-db* (source-db mssql))
(let* ((sql (format nil "SELECT ~{~a~^, ~} FROM [~a].[~a];"
(get-column-list (fields mssql))
(get-column-list mssql)
(schema-source-name (table-schema (source mssql)))
(table-source-name (source mssql)))))
(log-message :debug "~a" sql)
@ -66,13 +45,11 @@
(catalog catalog)
&key
materialize-views
only-tables
create-indexes
foreign-keys
including
excluding)
"MS SQL introspection to prepare the migration."
(declare (ignore only-tables))
(with-stats-collection ("fetch meta data"
:use-result-as-rows t
:use-result-as-read t
@ -81,15 +58,15 @@
;; If asked to MATERIALIZE VIEWS, now is the time to create them in MS
;; SQL, when given definitions rather than existing view names.
(when (and materialize-views (not (eq :all materialize-views)))
(create-ms-views materialize-views))
(create-matviews materialize-views mssql))
(list-all-columns catalog
:including including
:excluding excluding)
(fetch-columns catalog mssql
:including including
:excluding excluding)
;; fetch view (and their columns) metadata, covering comments too
(let* ((view-names (unless (eq :all materialize-views)
(mapcar #'car materialize-views)))
(mapcar #'matview-source-name materialize-views)))
(including
(loop :for (schema-name . view-name) :in view-names
:do (let* ((schema-name (or schema-name "dbo"))
@ -100,27 +77,30 @@
:test #'string=)))))
(push-to-end view-name (cdr schema-entry))))))
(cond (view-names
(list-all-columns catalog
:including including
:excluding excluding
:table-type :view))
(fetch-columns catalog mssql
:including including
:excluding excluding
:table-type :view))
((eq :all materialize-views)
(list-all-columns catalog :table-type :view))))
(fetch-columns catalog mssql :table-type :view))))
(when create-indexes
(list-all-indexes catalog
:including including
:excluding excluding))
(fetch-indexes catalog mssql
:including including
:excluding excluding))
(when foreign-keys
(list-all-fkeys catalog
:including including
:excluding excluding))
(fetch-foreign-keys catalog mssql
:including including
:excluding excluding))
;; return how many objects we're going to deal with in total
;; for stats collection
(+ (count-tables catalog) (count-indexes catalog))))
(+ (count-tables catalog)
(count-views catalog)
(count-indexes catalog)
(count-fkeys catalog))))
;; be sure to return the catalog itself
catalog)
@ -132,4 +112,4 @@
migration purpose."
(when materialize-views
(with-connection (*mssql-db* (source-db mssql))
(drop-ms-views materialize-views))))
(drop-matviews materialize-views mssql))))

View File

@ -4,59 +4,15 @@
(in-package :pgloader.source.mysql)
;;;
;;; Those functions are to be called from withing an already established
;;; MySQL Connection.
;;;
;;; Handle MATERIALIZE VIEWS sections, where we need to create the views in
;;; the MySQL database before being able to process them.
;;;
(defun create-my-views (views-alist)
"VIEWS-ALIST associates view names with their SQL definition, which might
be empty for already existing views. Create only the views for which we
have an SQL definition."
(unless (eq :all views-alist)
(let ((views (remove-if #'null views-alist :key #'cdr)))
(when views
(loop :for (name . def) :in views
:for sql := (destructuring-bind (schema . v-name) name
(format nil
"CREATE VIEW ~@[~s.~]~a AS ~a"
schema v-name def))
:do
(log-message :info "MySQL: ~a" sql)
#+pgloader-image
(mysql-query sql)
#-pgloader-image
(restart-case
(mysql-query sql)
(use-existing-view ()
:report "Use the already existing view and continue"
nil)
(replace-view ()
:report "Replace the view with the one from pgloader's command"
(let* ((v-name (cdr name))
(drop-sql (format nil "DROP VIEW ~a;" v-name)))
(log-message :info "MySQL: ~a" drop-sql)
(mysql-query drop-sql)
(mysql-query sql)))))))))
(defclass copy-mysql (db-copy)
((encoding :accessor encoding ; allows forcing encoding
:initarg :encoding
:initform nil)
(range-list :accessor range-list
:initarg :range-list
:initform nil))
(:documentation "pgloader MySQL Data Source"))
(defun drop-my-views (views-alist)
"See `create-my-views' for VIEWS-ALIST description. This time we DROP the
views to clean out after our work."
(unless (eq :all views-alist)
(let ((views (remove-if #'null views-alist :key #'cdr)))
(when views
(let ((sql
(format nil "DROP VIEW ~{~a~^, ~};"
(mapcar (lambda (qname)
(format nil "~@[~s.~]~a"
(car qname) (cdr qname)))
(mapcar #'car views)))))
(log-message :info "MySQL: ~a" sql)
(mysql-query sql))))))
;;;
;;; Those functions are to be called from withing an already established
;;; MySQL Connection.
@ -69,8 +25,10 @@
"Associate internal table type symbol with what's found in MySQL
information_schema.tables.table_type column.")
(defun filter-list-to-where-clause (filter-list &optional not)
(defmethod filter-list-to-where-clause ((mysql copy-mysql) filter-list
&key not &allow-other-keys)
"Given an INCLUDING or EXCLUDING clause, turn it into a MySQL WHERE clause."
(declare (ignore mysql))
(mapcar (lambda (filter)
(typecase filter
(string-match-rule
@ -93,28 +51,25 @@
(t (ensure-unquoted default #\'))))
(defun list-all-columns (schema
&key
(table-type :table)
only-tables
including
excluding
&aux
(table-type-name (cdr (assoc table-type *table-type*))))
(defmethod fetch-columns ((schema schema)
(mysql copy-mysql)
&key
(table-type :table)
including
excluding
&aux
(table-type-name
(cdr (assoc table-type *table-type*))))
"Get the list of MySQL column names per table."
(loop
:for (tname tcomment cname ccomment dtype ctype default nullable extra)
:in
(mysql-query (format nil
(sql "/mysql/list-all-columns.sql")
(db-name *connection*)
table-type-name
only-tables ; do we print the clause?
only-tables
including ; do we print the clause?
(filter-list-to-where-clause including)
excluding ; do we print the clause?
(filter-list-to-where-clause excluding t)))
:in (mysql-query (sql "/mysql/list-all-columns.sql"
(db-name *connection*)
table-type-name
including ; do we print the clause?
including
excluding ; do we print the clause?
excluding))
:do
(let* ((table
(case table-type
@ -131,23 +86,17 @@
:finally
(return schema)))
(defun list-all-indexes (schema
&key
only-tables
including
excluding)
(defmethod fetch-indexes ((schema schema) (mysql copy-mysql)
&key including excluding)
"Get the list of MySQL index definitions per table."
(loop
:for (table-name name index-type non-unique cols)
:in (mysql-query (format nil
(sql "/mysql/list-all-indexes.sql")
(db-name *connection*)
only-tables ; do we print the clause?
only-tables
including ; do we print the clause?
(filter-list-to-where-clause including)
excluding ; do we print the clause?
(filter-list-to-where-clause excluding t)))
:in (mysql-query (sql "/mysql/list-all-indexes.sql"
(db-name *connection*)
including ; do we print the clause?
including
excluding ; do we print the clause?
excluding))
:do (let* ((table (find-table schema table-name))
(index
(make-index :name name ; further processing is needed
@ -166,23 +115,20 @@
;;;
;;; MySQL Foreign Keys
;;;
(defun list-all-fkeys (schema
&key
only-tables
including
excluding)
(defmethod fetch-foreign-keys ((schema schema)
(mysql copy-mysql)
&key
including
excluding)
"Get the list of MySQL Foreign Keys definitions per table."
(loop
:for (table-name name ftable-name cols fcols update-rule delete-rule)
:in (mysql-query (format nil
(sql "/mysql/list-all-fkeys.sql")
(db-name *connection*) (db-name *connection*)
only-tables ; do we print the clause?
only-tables
including ; do we print the clause?
(filter-list-to-where-clause including)
excluding ; do we print the clause?
(filter-list-to-where-clause excluding t)))
:in (mysql-query (sql "/mysql/list-all-fkeys.sql"
(db-name *connection*) (db-name *connection*)
including ; do we print the clause?
including
excluding ; do we print the clause?
excluding))
:do (let* ((table (find-table schema table-name))
(ftable (find-table schema ftable-name))
(fk
@ -216,41 +162,29 @@
;;; As it takes a separate PostgreSQL Query per comment it's useless to
;;; fetch them right into the the more general table and columns lists.
;;;
(defun list-table-comments (&key
only-tables
including
excluding)
(defun list-table-comments (&key including excluding)
"Return comments on MySQL tables."
(loop
:for (table-name comment)
:in (mysql-query (format nil
(sql "/mysql/list-table-comments.sql")
(db-name *connection*)
only-tables ; do we print the clause?
only-tables
including ; do we print the clause?
(filter-list-to-where-clause including)
excluding ; do we print the clause?
(filter-list-to-where-clause excluding t)))
:in (mysql-query (sql "/mysql/list-table-comments.sql"
(db-name *connection*)
including ; do we print the clause?
including
excluding ; do we print the clause?
excluding))
:when (and comment (not (string= comment "")))
:collect (list table-name comment)))
(defun list-columns-comments (&key
only-tables
including
excluding)
(defun list-columns-comments (&key including excluding)
"Return comments on MySQL tables."
(loop
:for (table-name column-name comment)
:in (mysql-query (format nil
(sql "/mysql/list-columns-comments.sql")
(db-name *connection*)
only-tables ; do we print the clause?
only-tables
including ; do we print the clause?
(filter-list-to-where-clause including)
excluding ; do we print the clause?
(filter-list-to-where-clause excluding t)))
:in (mysql-query (sql "/mysql/list-columns-comments.sql"
(db-name *connection*)
including ; do we print the clause?
including
excluding ; do we print the clause?
excluding))
:when (and comment (not (string= comment "")))
:collect (list table-name column-name comment)))
@ -259,42 +193,22 @@
;;; Tools to handle row queries, issuing separate is null statements and
;;; handling of geometric data types.
;;;
(defun get-column-sql-expression (name type)
(defmethod get-column-sql-expression ((mysql copy-mysql) name type)
"Return per-TYPE SQL expression to use given a column NAME.
Mostly we just use the name, but in case of POINT we need to use
astext(name)."
(declare (ignore mysql))
(case (intern (string-upcase type) "KEYWORD")
(:geometry (format nil "astext(`~a`) as `~a`" name name))
(:point (format nil "astext(`~a`) as `~a`" name name))
(:linestring (format nil "astext(`~a`) as `~a`" name name))
(t (format nil "`~a`" name))))
(defun get-column-list (copy)
(defmethod get-column-list ((mysql copy-mysql))
"Some MySQL datatypes have a meaningless default output representation, we
need to process them on the SQL side (geometric data types)."
(loop :for field :in (fields copy)
:collect (get-column-sql-expression (mysql-column-name field)
(mysql-column-dtype field))))
(declaim (inline fix-nulls))
(defun fix-nulls (row nulls)
"Given a cl-mysql row result and a nulls list as from
get-column-list-with-is-nulls, replace NIL with empty strings with when
we know from the added 'foo is null' that the actual value IS NOT NULL.
See http://bugs.mysql.com/bug.php?id=19564 for context."
(loop
for (current-col next-col) on row
for (current-null next-null) on nulls
;; next-null tells us if next column is an "is-null" col
;; when next-null is true, next-col is true if current-col is actually null
for is-null = (and next-null (string= next-col "1"))
for is-empty = (and next-null (string= next-col "0") (null current-col))
;; don't collect columns we added, e.g. "column_name is not null"
when (not current-null)
collect (cond (is-null :null)
(is-empty "")
(t current-col))))
(loop :for field :in (fields mysql)
:collect (let ((name (mysql-column-name field))
(type (mysql-column-dtype field)))
(get-column-sql-expression mysql name type))))

View File

@ -4,31 +4,6 @@
(in-package :pgloader.source.mysql)
(defclass copy-mysql (db-copy)
((encoding :accessor encoding ; allows forcing encoding
:initarg :encoding
:initform nil)
(range-list :accessor range-list
:initarg :range-list
:initform nil))
(:documentation "pgloader MySQL Data Source"))
(defmethod initialize-instance :after ((source copy-mysql) &key)
"Add a default value for transforms in case it's not been provided."
(let ((transforms (and (slot-boundp source 'transforms)
(slot-value source 'transforms))))
(when (and (slot-boundp source 'fields) (slot-value source 'fields))
;; cast typically happens in copy-database in the schema structure,
;; and the result is then copied into the copy-mysql instance.
(unless (and (slot-boundp source 'columns) (slot-value source 'columns))
(setf (slot-value source 'columns)
(mapcar #'cast (slot-value source 'fields))))
(unless transforms
(setf (slot-value source 'transforms)
(mapcar #'column-transform (slot-value source 'columns)))))))
;;;
;;; Implement the specific methods
;;;
@ -166,7 +141,6 @@ Illegal ~a character starting at position ~a~@[: ~a~].~%"
(catalog catalog)
&key
materialize-views
only-tables
(create-indexes t)
(foreign-keys t)
including
@ -174,8 +148,8 @@ Illegal ~a character starting at position ~a~@[: ~a~].~%"
"MySQL introspection to prepare the migration."
(let ((schema (add-schema catalog (catalog-name catalog)
:in-search-path t))
(view-names (unless (eq :all materialize-views)
(mapcar #'car materialize-views))))
(including (filter-list-to-where-clause mysql including))
(excluding (filter-list-to-where-clause mysql excluding :not t)))
(with-stats-collection ("fetch meta data"
:use-result-as-rows t
:use-result-as-read t
@ -184,41 +158,38 @@ Illegal ~a character starting at position ~a~@[: ~a~].~%"
;; If asked to MATERIALIZE VIEWS, now is the time to create them in
;; MySQL, when given definitions rather than existing view names.
(when (and materialize-views (not (eq :all materialize-views)))
(create-my-views materialize-views))
(create-matviews materialize-views mysql))
;; fetch table and columns metadata, covering table and column comments
(list-all-columns schema
:only-tables only-tables
:including including
:excluding excluding)
(fetch-columns schema mysql
:including including
:excluding excluding)
;; fetch view (and their columns) metadata, covering comments too
(let* ((view-names (unless (eq :all materialize-views)
(mapcar #'car materialize-views)))
(mapcar #'matview-source-name materialize-views)))
(including
(loop :for (schema-name . view-name) :in view-names
:collect (make-string-match-rule :target view-name))))
:collect (make-string-match-rule :target view-name)))
(including-clause (filter-list-to-where-clause mysql including)))
(cond (view-names
(list-all-columns schema
:only-tables only-tables
:including including
:excluding excluding
:table-type :view))
(fetch-columns schema mysql
:including including-clause
:excluding excluding
:table-type :view))
((eq :all materialize-views)
(list-all-columns schema :table-type :view))))
(fetch-columns schema mysql :table-type :view))))
(when foreign-keys
(list-all-fkeys schema
:only-tables only-tables
:including including
:excluding excluding))
(fetch-foreign-keys schema mysql
:including including
:excluding excluding))
(when create-indexes
(list-all-indexes schema
:only-tables only-tables
:including including
:excluding excluding))
(fetch-indexes schema mysql
:including including
:excluding excluding))
;; return how many objects we're going to deal with in total
;; for stats collection
@ -235,7 +206,7 @@ Illegal ~a character starting at position ~a~@[: ~a~].~%"
migration purpose."
(when materialize-views
(with-connection (*connection* (source-db mysql))
(drop-my-views materialize-views))))
(drop-matviews materialize-views mysql))))
(defvar *decoding-as* nil
"Special per-table encoding/decoding overloading rules for MySQL.")

View File

@ -13,7 +13,6 @@
from information_schema.columns c
join information_schema.tables t using(table_schema, table_name)
where c.table_schema = '~a' and t.table_type = '~a'
~:[~*~;and table_name in (~{'~a'~^,~})~]
~:[~*~;and (~{table_name ~a~^ or ~})~]
~:[~*~;and (~{table_name ~a~^ and ~})~]
order by table_name, ordinal_position;

View File

@ -30,7 +30,6 @@ FROM
WHERE tc.table_schema = '~a'
AND k.referenced_table_schema = '~a'
AND tc.constraint_type = 'FOREIGN KEY'
~:[~*~;and tc.table_name in (~{'~a'~^,~})~]
~:[~*~;and (~{tc.table_name ~a~^ or ~})~]
~:[~*~;and (~{tc.table_name ~a~^ and ~})~]

View File

@ -11,7 +11,6 @@
cast(GROUP_CONCAT(column_name order by seq_in_index) as char)
FROM information_schema.statistics
WHERE table_schema = '~a'
~:[~*~;and table_name in (~{'~a'~^,~})~]
~:[~*~;and (~{table_name ~a~^ or ~})~]
~:[~*~;and (~{table_name ~a~^ and ~})~]
GROUP BY table_name, index_name, index_type;

View File

@ -11,7 +11,6 @@
join information_schema.tables t using(table_schema, table_name)
where c.table_schema = '~a'
and t.table_type = 'BASE TABLE'
~:[~*~;and table_name in (~{'~a'~^,~})~]
~:[~*~;and (~{table_name ~a~^ or ~})~]
~:[~*~;and (~{table_name ~a~^ and ~})~]
order by table_name, ordinal_position;

View File

@ -10,6 +10,5 @@
FROM information_schema.tables
WHERE table_schema = '~a'
and table_type = 'BASE TABLE'
~:[~*~;and table_name in (~{'~a'~^,~})~]
~:[~*~;and (~{table_name ~a~^ or ~})~]
~:[~*~;and (~{table_name ~a~^ and ~})~];

View File

@ -1,50 +0,0 @@
(in-package :pgloader.source.pgsql)
(defun create-pg-views (views-alist)
"VIEWS-ALIST associates view names with their SQL definition, which might
be empty for already existing views. Create only the views for which we
have an SQL definition."
(unless (eq :all views-alist)
(let ((views (remove-if #'null views-alist :key #'cdr)))
(when views
(loop :for (name . def) :in views
:for sql := (destructuring-bind (schema . v-name) name
(format nil
"CREATE VIEW ~@[~s.~]~s AS ~a"
schema v-name def))
:do (progn
(log-message :info "PostgreSQL Source: ~a" sql)
#+pgloader-image
(pgsql-execute sql)
#-pgloader-image
(restart-case
(pgsql-execute sql)
(use-existing-view ()
:report "Use the already existing view and continue"
nil)
(replace-view ()
:report
"Replace the view with the one from pgloader's command"
(let ((drop-sql (format nil "DROP VIEW ~a;" (car name))))
(log-message :info "PostgreSQL Source: ~a" drop-sql)
(pgsql-execute drop-sql)
(pgsql-execute sql))))))))))
(defun drop-pg-views (views-alist)
"See `create-pg-views' for VIEWS-ALIST description. This time we DROP the
views to clean out after our work."
(unless (eq :all views-alist)
(let ((views (remove-if #'null views-alist :key #'cdr)))
(when views
(let ((sql
(with-output-to-string (sql)
(format sql "DROP VIEW ")
(loop :for view-definition :in views
:for i :from 0
:do (destructuring-bind (name . def) view-definition
(declare (ignore def))
(format sql
"~@[, ~]~@[~s.~]~s"
(not (zerop i)) (car name) (cdr name)))))))
(log-message :info "PostgreSQL Source: ~a" sql)
(pgsql-execute sql))))))

View File

@ -7,21 +7,6 @@
(defclass copy-pgsql (db-copy) ()
(:documentation "pgloader PostgreSQL Data Source"))
(defmethod initialize-instance :after ((source copy-pgsql) &key)
"Add a default value for transforms in case it's not been provided."
(let* ((transforms (when (slot-boundp source 'transforms)
(slot-value source 'transforms))))
(when (and (slot-boundp source 'fields) (slot-value source 'fields))
;; cast typically happens in copy-database in the schema structure,
;; and the result is then copied into the copy-mysql instance.
(unless (and (slot-boundp source 'columns) (slot-value source 'columns))
(setf (slot-value source 'columns)
(mapcar #'cast (slot-value source 'fields))))
(unless transforms
(setf (slot-value source 'transforms)
(mapcar #'column-transform (slot-value source 'columns)))))))
(defmethod map-rows ((pgsql copy-pgsql) &key process-row-fn)
"Extract PostgreSQL data and call PROCESS-ROW-FN function with a single
argument (a list of column values) for each row"
@ -90,7 +75,7 @@
;; the target database.
;;
(when (and materialize-views (not (eq :all materialize-views)))
(create-pg-views materialize-views))
(create-matviews materialize-views pgsql))
(when (eq :pgdg variant)
(list-all-sqltypes catalog
@ -102,7 +87,7 @@
:excluding excluding)
(let* ((view-names (unless (eq :all materialize-views)
(mapcar #'car materialize-views)))
(mapcar #'matview-source-name materialize-views)))
(including (make-including-expr-from-view-names view-names)))
(cond (view-names
(list-all-columns catalog
@ -140,4 +125,4 @@
the migration purpose."
(when materialize-views
(with-pgsql-transaction (:pgconn (source-db pgsql))
(drop-pg-views materialize-views))))
(drop-matviews materialize-views pgsql))))

View File

@ -0,0 +1,41 @@
;;;
;;; SQLite tools connecting to a database
;;;
(in-package :pgloader.source.sqlite)
(defvar *sqlite-db* nil
"The SQLite database connection handler.")
;;;
;;; Integration with the pgloader Source API
;;;
(defclass sqlite-connection (fd-connection)
((has-sequences :initform nil :accessor has-sequences)))
(defmethod initialize-instance :after ((slconn sqlite-connection) &key)
"Assign the type slot to sqlite."
(setf (slot-value slconn 'type) "sqlite"))
(defmethod open-connection ((slconn sqlite-connection) &key check-has-sequences)
(setf (conn-handle slconn)
(sqlite:connect (fd-path slconn)))
(log-message :debug "CONNECTED TO ~a" (fd-path slconn))
(when check-has-sequences
(let ((sql (format nil (sql "/sqlite/sqlite-sequence.sql"))))
(log-message :sql "SQLite: ~a" sql)
(when (sqlite:execute-single (conn-handle slconn) sql)
(setf (has-sequences slconn) t))))
slconn)
(defmethod close-connection ((slconn sqlite-connection))
(sqlite:disconnect (conn-handle slconn))
(setf (conn-handle slconn) nil)
slconn)
(defmethod clone-connection ((slconn sqlite-connection))
(change-class (call-next-method slconn) 'sqlite-connection))
(defmethod query ((slconn sqlite-connection) sql &key)
(log-message :sql "SQLite: sending query: ~a" sql)
(sqlite:execute-to-list (conn-handle slconn) sql))

View File

@ -3,66 +3,44 @@
;;;
(in-package :pgloader.source.sqlite)
(defvar *sqlite-db* nil
"The SQLite database connection handler.")
(defclass copy-sqlite (db-copy)
((db :accessor db :initarg :db))
(:documentation "pgloader SQLite Data Source"))
;;;
;;; Integration with the pgloader Source API
;;;
(defclass sqlite-connection (fd-connection)
((has-sequences :initform nil :accessor has-sequences)))
(defmethod initialize-instance :after ((slconn sqlite-connection) &key)
"Assign the type slot to sqlite."
(setf (slot-value slconn 'type) "sqlite"))
(defmethod open-connection ((slconn sqlite-connection) &key check-has-sequences)
(setf (conn-handle slconn)
(sqlite:connect (fd-path slconn)))
(log-message :debug "CONNECTED TO ~a" (fd-path slconn))
(when check-has-sequences
(let ((sql (format nil (sql "/sqlite/sqlite-sequence.sql"))))
(log-message :sql "SQLite: ~a" sql)
(when (sqlite:execute-single (conn-handle slconn) sql)
(setf (has-sequences slconn) t))))
slconn)
(defmethod close-connection ((slconn sqlite-connection))
(sqlite:disconnect (conn-handle slconn))
(setf (conn-handle slconn) nil)
slconn)
(defmethod clone-connection ((slconn sqlite-connection))
(change-class (call-next-method slconn) 'sqlite-connection))
(defmethod query ((slconn sqlite-connection) sql &key)
(log-message :sql "SQLite: sending query: ~a" sql)
(sqlite:execute-to-list (conn-handle slconn) sql))
;;;
;;; SQLite schema introspection facilities
;;;
(defun filter-list-to-where-clause (filter-list
&optional
not
(table-col "tbl_name"))
(defun sqlite-encoding (db)
"Return a BABEL suitable encoding for the SQLite db handle."
(let ((encoding-string (sqlite:execute-single db "pragma encoding;")))
(cond ((string-equal encoding-string "UTF-8") :utf-8)
((string-equal encoding-string "UTF-16") :utf-16)
((string-equal encoding-string "UTF-16le") :utf-16le)
((string-equal encoding-string "UTF-16be") :utf-16be))))
(defmethod filter-list-to-where-clause ((sqlite copy-sqlite)
filter-list
&key
not
(table-col "tbl_name")
&allow-other-keys)
"Given an INCLUDING or EXCLUDING clause, turn it into a SQLite WHERE clause."
(mapcar (lambda (table-name)
(format nil "(~a ~:[~;NOT ~]LIKE '~a')"
table-col not table-name))
filter-list))
(defun list-tables (&key
(defun list-tables (sqlite
&key
(db *sqlite-db*)
including
excluding)
"Return the list of tables found in SQLITE-DB."
(let ((sql (format nil (sql "/sqlite/list-tables.sql")
including ; do we print the clause?
(filter-list-to-where-clause including nil)
excluding ; do we print the clause?
(filter-list-to-where-clause excluding t))))
(let ((sql (sql "/sqlite/list-tables.sql"
including ; do we print the clause?
(filter-list-to-where-clause sqlite including :not nil)
excluding ; do we print the clause?
(filter-list-to-where-clause sqlite excluding :not t))))
(log-message :sql "~a" sql)
(loop for (name) in (sqlite:execute-to-list db sql)
collect name)))
@ -70,7 +48,7 @@
(defun find-sequence (db table-name column-name)
"Find if table-name.column-name is attached to a sequence in
sqlite_sequence catalog."
(let* ((sql (format nil (sql "/sqlite/find-sequence.sql") table-name))
(let* ((sql (sql "/sqlite/find-sequence.sql" table-name))
(seq (sqlite:execute-single db sql)))
(when (and seq (not (zerop seq)))
;; magic marker for `apply-casting-rules'
@ -83,7 +61,7 @@
added to the table. So we might fail to FIND-SEQUENCE, and still need to
consider the column has an autoincrement. Parse the SQL definition of the
table to find out."
(let* ((sql (format nil (sql "/sqlite/get-create-table.sql") table-name))
(let* ((sql (sql "/sqlite/get-create-table.sql" table-name))
(create-table (sqlite:execute-single db sql))
(open-paren (+ 1 (position #\( create-table)))
(close-paren (position #\) create-table :from-end t))
@ -111,7 +89,7 @@
(defun list-columns (table &key db-has-sequences (db *sqlite-db*) )
"Return the list of columns found in TABLE-NAME."
(let* ((table-name (table-source-name table))
(sql (format nil (sql "/sqlite/list-columns.sql") table-name)))
(sql (sql "/sqlite/list-columns.sql" table-name)))
(loop :for (ctid name type nullable default pk-id)
:in (sqlite:execute-to-list db sql)
:do (let* ((ctype (normalize type))
@ -136,14 +114,18 @@
(setf (coldef-extra field) :auto-increment))
(add-field table field)))))
(defun list-all-columns (schema
&key
db-has-sequences
(db *sqlite-db*)
including
excluding)
(defmethod fetch-columns ((schema schema)
(sqlite copy-sqlite)
&key
db-has-sequences
table-type
including
excluding
&aux (db (conn-handle (source-db sqlite))))
"Get the list of SQLite column definitions per table."
(loop :for table-name :in (list-tables :db db
(declare (ignore table-type))
(loop :for table-name :in (list-tables sqlite
:db db
:including including
:excluding excluding)
:do (let ((table (add-table schema table-name)))
@ -186,15 +168,14 @@
(defun list-index-cols (index-name &optional (db *sqlite-db*))
"Return the list of columns in INDEX-NAME."
(let ((sql (format nil (sql "/sqlite/list-index-cols.sql") index-name)))
(let ((sql (sql "/sqlite/list-index-cols.sql" index-name)))
(loop :for (index-pos table-pos col-name) :in (sqlite:execute-to-list db sql)
:collect (apply-identifier-case col-name))))
(defun list-indexes (table &optional (db *sqlite-db*))
"Return the list of indexes attached to TABLE."
(let* ((table-name (table-source-name table))
(sql
(format nil (sql "/sqlite/list-table-indexes.sql") table-name)))
(sql (sql "/sqlite/list-table-indexes.sql" table-name)))
(loop
:for (seq index-name unique origin partial)
:in (sqlite:execute-to-list db sql)
@ -213,7 +194,9 @@
;; might create double primary key indexes here
(add-unlisted-primary-key-index table))
(defun list-all-indexes (schema &key (db *sqlite-db*))
(defmethod fetch-indexes ((schema schema) (sqlite copy-sqlite)
&key &allow-other-keys
&aux (db (conn-handle (source-db sqlite))))
"Get the list of SQLite index definitions per table."
(loop :for table :in (schema-table-list schema)
:do (list-indexes table db)))
@ -225,8 +208,7 @@
(defun list-fkeys (table &optional (db *sqlite-db*))
"Return the list of indexes attached to TABLE."
(let* ((table-name (table-source-name table))
(sql
(format nil (sql "/sqlite/list-fkeys.sql") table-name)))
(sql (sql "/sqlite/list-fkeys.sql" table-name)))
(loop
:with fkey-table := (make-hash-table)
:for (id seq ftable-name from to on-update on-delete match)
@ -262,7 +244,9 @@
(when ftable (format-table-name ftable))
to))))))
(defun list-all-fkeys (schema &key (db *sqlite-db*))
(defmethod fetch-foreign-keys ((schema schema) (sqlite copy-sqlite)
&key &allow-other-keys
&aux (db (conn-handle (source-db sqlite))))
"Get the list of SQLite foreign keys definitions per table."
(loop :for table :in (schema-table-list schema)
:do (list-fkeys table db)))

View File

@ -4,35 +4,8 @@
(in-package :pgloader.source.sqlite)
(defclass copy-sqlite (db-copy)
((db :accessor db :initarg :db))
(:documentation "pgloader SQLite Data Source"))
(defmethod initialize-instance :after ((source copy-sqlite) &key)
"Add a default value for transforms in case it's not been provided."
(let* ((transforms (when (slot-boundp source 'transforms)
(slot-value source 'transforms))))
(when (and (slot-boundp source 'fields) (slot-value source 'fields))
;; cast typically happens in copy-database in the schema structure,
;; and the result is then copied into the copy-mysql instance.
(unless (and (slot-boundp source 'columns) (slot-value source 'columns))
(setf (slot-value source 'columns)
(mapcar #'cast (slot-value source 'fields))))
(unless transforms
(setf (slot-value source 'transforms)
(mapcar #'column-transform (slot-value source 'columns)))))))
;;; Map a function to each row extracted from SQLite
;;;
(defun sqlite-encoding (db)
"Return a BABEL suitable encoding for the SQLite db handle."
(let ((encoding-string (sqlite:execute-single db "pragma encoding;")))
(cond ((string-equal encoding-string "UTF-8") :utf-8)
((string-equal encoding-string "UTF-16") :utf-16)
((string-equal encoding-string "UTF-16le") :utf-16le)
((string-equal encoding-string "UTF-16be") :utf-16be))))
(declaim (inline parse-value))
(defun parse-value (value sqlite-type pgsql-type &key (encoding :utf-8))
@ -111,23 +84,25 @@
:use-result-as-rows t
:use-result-as-read t
:section :pre)
(with-connection (conn (source-db sqlite) :check-has-sequences t)
(let ((*sqlite-db* (conn-handle conn)))
(list-all-columns schema
:db *sqlite-db*
:including including
:excluding excluding
:db-has-sequences (has-sequences conn))
(with-connection (conn (source-db sqlite) :check-has-sequences t)
(let ((*sqlite-db* (conn-handle conn)))
(fetch-columns schema
sqlite
:including including
:excluding excluding
:db-has-sequences (has-sequences conn))
(when create-indexes
(list-all-indexes schema :db *sqlite-db*))
(when create-indexes
(fetch-indexes schema sqlite))
(when foreign-keys
(list-all-fkeys schema :db *sqlite-db*)))
(when foreign-keys
(fetch-foreign-keys schema sqlite)))
;; return how many objects we're going to deal with in total
;; for stats collection
(+ (count-tables catalog) (count-indexes catalog))))
;; return how many objects we're going to deal with in total
;; for stats collection
(+ (count-tables catalog)
(count-indexes catalog)
(count-fkeys catalog))))
catalog))

View File

@ -45,7 +45,7 @@
(defstruct catalog name schema-list types-without-btree distribution-rules)
(defstruct schema source-name name catalog in-search-path
table-list view-list extension-list sqltype-list)
table-list view-list matview-list extension-list sqltype-list)
(defstruct table source-name name schema oid comment
storage-parameter-list tablespace
@ -54,6 +54,8 @@
;; citus is an extra slot for citus support
field-list column-list index-list fkey-list trigger-list citus-rule)
(defstruct matview source-name name schema definition)
;;;
;;; When migrating from PostgreSQL to PostgreSQL we might have to install
;;; extensions to have data type coverage.

View File

@ -58,11 +58,13 @@
(walk-sources-and-build-fs)
"File system as an hash-table in memory.")
(defun sql (url)
(defun sql (url &rest args)
"Abstract the hash-table based implementation of our SQL file system."
(restart-case
(or (gethash url *fs*)
(error "URL ~s not found!" url))
(apply #'format nil
(or (gethash url *fs*)
(error "URL ~s not found!" url))
args)
(recompute-fs-and-retry ()
(setf *fs* (walk-sources-and-build-fs))
(sql url))))

View File

@ -11,7 +11,7 @@ load database
ALTER SCHEMA 'pgloader' RENAME TO 'mysql'
ALTER TABLE NAMES MATCHING ~/./ SET TABLESPACE 'pg_default'
INCLUDING ONLY TABLE NAMES MATCHING 'encryption_key_canary'
-- INCLUDING ONLY TABLE NAMES MATCHING 'encryption_key_canary'
CAST column utilisateurs__Yvelines2013-06-28.sexe
to text drop not null using empty-string-to-null,