Fix regression testing.

Previous patch made regression failures obvious that were hidden by strange
bugs with CCL.

One such regression was introduced in commit
ab7e77c2d00decce64ab739d0eb3d2ca5bdb6a7e where we played with the complex
code generation for field projection, where the following two cases weren't
cleanly processed anymore:

  column text using "constant"
  column text using "field-name"

In the first case we want to load a user-defined constant in the column, in
the second case we want to load the value of the field "field-name" in the
column --- we just have different source and target names.

Another regression was introduced in the recent commit
01e5c2376390749c2b7041b17b9a974ee8efb6b2 where the create-table function was
called too early, before we have fetched *pgsql-reserved-keywords*. As a
consequence table names weren't always properly quoted as shown in the
test/csv-header.load file which targets a table named "group".

Finally, skip the test/dbf.load regression test when using CCL as this
environment doesn't have the necessary CP850 code page / encoding.
This commit is contained in:
Dimitri Fontaine 2017-09-08 23:56:55 +02:00
parent ebf9f7a6a9
commit 38712d98e0
12 changed files with 68 additions and 51 deletions

View File

@ -176,7 +176,8 @@ Parameters here are meant to be already parsed, see parse-cli-optargs."
;;; Main API to use from outside of pgloader.
;;;
(defun load-data (&key ((:from source)) ((:into target))
encoding fields target-table options gucs casts before after
encoding fields target-table-name
options gucs casts before after
(start-logger t) (flush-summary t))
"Load data from SOURCE into TARGET."
(declare (type connection source)
@ -185,7 +186,7 @@ Parameters here are meant to be already parsed, see parse-cli-optargs."
(when (and (typep source (or 'csv-connection
'copy-connection
'fixed-connection))
(null target-table)
(null target-table-name)
(null (pgconn-table-name target)))
(error 'source-definition-error
:mesg (format nil
@ -201,18 +202,13 @@ Parameters here are meant to be already parsed, see parse-cli-optargs."
;; now generates the code for the command
(log-message :debug "LOAD DATA FROM ~s" source)
(let* ((target-table (or target-table
(let ((table (pgconn-table-name target)))
(etypecase (pgconn-table-name target)
(string (create-table table))
(cons (create-table table))
(table table)
(null nil)))))
(let* ((target-table-name (or target-table-name
(pgconn-table-name target)))
(code (lisp-code-for-loading :from source
:into target
:encoding encoding
:fields fields
:target-table target-table
:target-table-name target-table-name
:options options
:gucs gucs
:casts casts
@ -235,7 +231,7 @@ Parameters here are meant to be already parsed, see parse-cli-optargs."
(defun lisp-code-for-loading (&key
((:from source)) ((:into target))
encoding fields target-table
encoding fields target-table-name
options gucs casts before after)
(let ((func (cdr (assoc (type-of source) *get-code-for-source*))))
;; not all functions support the same set of &key parameters,
@ -245,7 +241,7 @@ Parameters here are meant to be already parsed, see parse-cli-optargs."
(funcall func
source
target
:target-table target-table
:target-table-name target-table-name
:fields fields
:encoding (or encoding :default)
:gucs gucs

View File

@ -97,7 +97,7 @@
encoding
fields
pguri
(create-table (or table-name (pgconn-table-name pguri)))
(or table-name (pgconn-table-name pguri))
columns
clauses))))
@ -105,7 +105,7 @@
&key
(encoding :utf-8)
fields
target-table
target-table-name
columns
gucs before after options
&aux
@ -130,7 +130,7 @@
(make-instance 'pgloader.copy:copy-copy
:target-db ,pg-db-conn
:source source-db
:target ,target-table
:target (create-table ',target-table-name)
:encoding ,encoding
:fields ',fields
:columns ',columns
@ -157,7 +157,7 @@
(defrule load-copy-file load-copy-file-command
(:lambda (command)
(bind (((source encoding fields pg-db-uri table columns
(bind (((source encoding fields pg-db-uri table-name columns
&key options gucs before after) command))
(cond (*dry-run*
(lisp-code-for-csv-dry-run pg-db-uri))
@ -165,7 +165,7 @@
(lisp-code-for-loading-from-copy source pg-db-uri
:encoding encoding
:fields fields
:target-table table
:target-table-name table-name
:columns columns
:gucs gucs
:before before

View File

@ -390,7 +390,7 @@
encoding
fields
pguri
(create-table (or table-name (pgconn-table-name pguri)))
(or table-name (pgconn-table-name pguri))
columns
clauses))))
@ -406,7 +406,7 @@
&key
(encoding :utf-8)
fields
target-table
target-table-name
columns
gucs before after options
&allow-other-keys
@ -432,7 +432,7 @@
(make-instance 'pgloader.csv:copy-csv
:target-db ,pg-db-conn
:source source-db
:target ,target-table
:target (create-table ',target-table-name)
:encoding ,encoding
:fields ',fields
:columns ',columns
@ -459,7 +459,7 @@
(defrule load-csv-file load-csv-file-command
(:lambda (command)
(bind (((source encoding fields pg-db-uri table columns
(bind (((source encoding fields pg-db-uri table-name columns
&key options gucs before after) command))
(cond (*dry-run*
(lisp-code-for-csv-dry-run pg-db-uri))
@ -467,7 +467,7 @@
(lisp-code-for-loading-from-csv source pg-db-uri
:encoding encoding
:fields fields
:target-table table
:target-table-name table-name
:columns columns
:gucs gucs
:before before

View File

@ -79,7 +79,7 @@
(list* source
encoding
pguri
(create-table (or table-name (pgconn-table-name pguri)))
(or table-name (pgconn-table-name pguri))
clauses))))
(defun lisp-code-for-dbf-dry-run (dbf-db-conn pg-db-conn)
@ -90,7 +90,7 @@
(defun lisp-code-for-loading-from-dbf (dbf-db-conn pg-db-conn
&key
target-table
target-table-name
(encoding :ascii)
gucs before after options
&allow-other-keys)
@ -105,7 +105,7 @@
:target-db ,pg-db-conn
:encoding ,encoding
:source-db source-db
:target ,target-table)))
:target (create-table ',target-table-name))))
,(sql-code-block pg-db-conn :pre before "before load")
@ -119,13 +119,13 @@
(defrule load-dbf-file load-dbf-command
(:lambda (command)
(bind (((source encoding pg-db-uri table
(bind (((source encoding pg-db-uri table-name
&key options gucs before after) command))
(cond (*dry-run*
(lisp-code-for-dbf-dry-run source pg-db-uri))
(t
(lisp-code-for-loading-from-dbf source pg-db-uri
:target-table table
:target-table-name table-name
:encoding encoding
:gucs gucs
:before before

View File

@ -105,7 +105,7 @@
encoding
fields
pguri
(create-table (or table-name (pgconn-table-name pguri)))
(or table-name (pgconn-table-name pguri))
columns
clauses))))
@ -113,7 +113,7 @@
&key
(encoding :utf-8)
fields
target-table
target-table-name
columns
gucs before after options
&allow-other-keys
@ -139,7 +139,7 @@
(make-instance 'pgloader.fixed:copy-fixed
:target-db ,pg-db-conn
:source source-db
:target ,target-table
:target (create-table ',target-table-name)
:encoding ,encoding
:fields ',fields
:columns ',columns
@ -160,7 +160,7 @@
(defrule load-fixed-cols-file load-fixed-cols-file-command
(:lambda (command)
(bind (((source encoding fields pg-db-uri table columns
(bind (((source encoding fields pg-db-uri table-name columns
&key options gucs before after) command))
(cond (*dry-run*
(lisp-code-for-csv-dry-run pg-db-uri))
@ -168,7 +168,7 @@
(lisp-code-for-loading-from-fixed source pg-db-uri
:encoding encoding
:fields fields
:target-table table
:target-table-name table-name
:columns columns
:gucs gucs
:before before

View File

@ -70,12 +70,13 @@
(destructuring-bind (source pguri table-name clauses) command
(list* source
pguri
(create-table (or table-name (pgconn-table-name pguri)))
(or table-name (pgconn-table-name pguri))
clauses))))
(defun lisp-code-for-loading-from-ixf (ixf-db-conn pg-db-conn
&key
target-table gucs before after options
target-table-name
gucs before after options
&allow-other-keys)
`(lambda ()
(let* (,@(pgsql-connection-bindings pg-db-conn gucs)
@ -88,7 +89,7 @@
(make-instance 'pgloader.ixf:copy-ixf
:target-db ,pg-db-conn
:source-db source-db
:target ,target-table
:target (create-table ',target-table-name)
:timezone timezone)))
,(sql-code-block pg-db-conn :pre before "before load")
@ -104,13 +105,13 @@
(defrule load-ixf-file load-ixf-command
(:lambda (command)
(bind (((source pg-db-uri table
(bind (((source pg-db-uri table-name
&key options gucs before after) command))
(cond (*dry-run*
(lisp-code-for-csv-dry-run pg-db-uri))
(t
(lisp-code-for-loading-from-ixf source pg-db-uri
:target-table table
:target-table-name table-name
:gucs gucs
:before before
:after after

View File

@ -294,11 +294,11 @@
load-copy-file-command
load-fixed-cols-file-command)
(:lambda (command)
(destructuring-bind (source encoding fields pg-db-uri table columns
(destructuring-bind (source encoding fields pg-db-uri table-name columns
&key gucs &allow-other-keys)
command
(declare (ignore source encoding fields columns))
(list pg-db-uri table gucs))))
(list pg-db-uri table-name gucs))))
(defrule pg-db-uri-from-source-target (or load-sqlite-command
load-mysql-command
@ -311,18 +311,18 @@
(defrule pg-db-uri-from-source-table-target (or load-ixf-command)
(:lambda (command)
(destructuring-bind (source pg-db-uri table &key gucs &allow-other-keys)
(destructuring-bind (source pg-db-uri table-name &key gucs &allow-other-keys)
command
(declare (ignore source))
(list pg-db-uri table gucs))))
(list pg-db-uri table-name gucs))))
(defrule pg-db-uri-from-source-and-encoding (or load-dbf-command)
(:lambda (command)
(destructuring-bind (source encoding pg-db-uri table
(destructuring-bind (source encoding pg-db-uri table-name
&key gucs &allow-other-keys)
command
(declare (ignore source encoding))
(list pg-db-uri table gucs))))
(list pg-db-uri table-name gucs))))
(defun parse-target-pg-db-uri (command-file)
"Partially parse COMMAND-FILE and return its target connection string."

View File

@ -34,7 +34,8 @@
(expected-data-file (make-pathname :defaults load-file
:type "out"
:directory expected-subdir))
((target-conn target-table gucs) (parse-target-pg-db-uri load-file))
((target-conn target-table-name gucs) (parse-target-pg-db-uri load-file))
(target-table (create-table target-table-name))
(*pg-settings* (pgloader.pgsql:sanitize-user-gucs gucs))
(*pgsql-reserved-keywords* (list-reserved-keywords target-conn))
@ -75,7 +76,7 @@
;; load expected data
(load-data :from expected-data-source
:into expected-data-target
:target-table expected-target-table
:target-table-name expected-target-table
:options '(:truncate t)
:start-logger nil
:flush-summary t)

View File

@ -102,12 +102,19 @@
`(funcall ,(process-field field-name)
,field-name))))
(newrow
(loop for (name type fn) in columns
(loop for (name type sexp) in columns
collect
;; we expect the name of a COLUMN to be the same
;; as the name of its derived FIELD when we
;; don't have any transformation function
(or fn (field-name-as-symbol name)))))
(typecase sexp
(null (field-name-as-symbol name))
(string (if (assoc sexp fields :test #'string=)
;; col text using "Field-Name"
(field-name-as-symbol sexp)
;; col text using "Constant String"
sexp))
(t sexp)))))
`(lambda (row)
(declare (optimize speed) (type list row))
(destructuring-bind (&optional ,@args &rest extra) row

View File

@ -185,7 +185,7 @@
(defun create-table (maybe-qualified-name)
"Create a table instance from the db-uri component, either a string or a
cons of two strings: (schema . table)."
(typecase maybe-qualified-name
(etypecase maybe-qualified-name
(string (make-table :source-name maybe-qualified-name
:name (apply-identifier-case maybe-qualified-name)))
@ -196,7 +196,11 @@
(let ((sname (car maybe-qualified-name)))
(make-schema :catalog nil
:source-name sname
:name (apply-identifier-case sname)))))))
:name (apply-identifier-case sname)))))
;; some code path using pgloader as an API might end-up here with an
;; already cooked table structure, try it and see...
(table maybe-qualified-name)))
(defmethod add-schema ((catalog catalog) schema-name &key)
"Add SCHEMA-NAME to CATALOG and return the new schema instance."

View File

@ -94,6 +94,15 @@ sakila.out: sakila sakila.load
csv-districts-stdin.out: csv-districts-stdin.load
cat data/2013_Gaz_113CDs_national.txt | $(PGLOADER) $^
ifneq (,$(findstring ccl,$(CL)))
regress/out/dbf.out: dbf.load
@echo "Skipping $@, CCL doesn't have CP850 encoding"
touch $@
else
$(PGLOADER) $(EXTRA_OPTS) --regress $<
touch $@
endif
# General case where we do NOT expect any error
%.out: %.load
$(PGLOADER) $<
@ -101,6 +110,5 @@ csv-districts-stdin.out: csv-districts-stdin.load
# Regression tests
regress/out/%.out: %.load
#./regress.sh $(PGLOADER) $<
$(PGLOADER) $(EXTRA_OPTS) --regress $<
touch $@

View File

@ -20,6 +20,6 @@ LOAD CSV
$$;
somefields,reklpcode,repl$grpid,repl$id,another,fields
somefields,rekplcode,repl$grpid,repl$id,another,fields
a,b,c,d,e,f
foo,bar,baz,quux,foobar,fizzbuzz