pgloader/test/Makefile
Dimitri Fontaine 38712d98e0 Fix regression testing.
Previous patch made regression failures obvious that were hidden by strange
bugs with CCL.

One such regression was introduced in commit
ab7e77c2d00decce64ab739d0eb3d2ca5bdb6a7e where we played with the complex
code generation for field projection, where the following two cases weren't
cleanly processed anymore:

  column text using "constant"
  column text using "field-name"

In the first case we want to load a user-defined constant in the column, in
the second case we want to load the value of the field "field-name" in the
column --- we just have different source and target names.

Another regression was introduced in the recent commit
01e5c2376390749c2b7041b17b9a974ee8efb6b2 where the create-table function was
called too early, before we have fetched *pgsql-reserved-keywords*. As a
consequence table names weren't always properly quoted as shown in the
test/csv-header.load file which targets a table named "group".

Finally, skip the test/dbf.load regression test when using CCL as this
environment doesn't have the necessary CP850 code page / encoding.
2017-09-09 00:51:07 +02:00

115 lines
3.0 KiB
Makefile

TMPDIR ?= /tmp
TESTS = $(wildcard *.load)
OUT = $(TESTS:.load=.out)
REMOTE = archive.load bossa-all.load bossa.load census-places.load dbf-zip.load
LOCAL = $(filter-out $(REMOTE:.load=.out),$(OUT))
REGRESS= allcols.load \
csv-before-after.load \
csv-districts.load \
csv-parse-date.load \
csv-error.load \
csv-escape-mode.load \
csv-filename-pattern.load \
csv-guess.load \
csv-header.load \
csv-json.load \
csv-keep-extra-blanks.load \
csv-missing-col.load \
csv-non-printable.load \
csv-nulls.load \
csv-temp.load \
csv-trim-extra-blanks.load \
csv.load \
copy.load \
copy-hex.load \
dbf.load \
errors.load \
fixed.load \
fields-with-periods.load \
ixf.load \
overflow.load \
partial.load \
serial.load \
udc.load \
xzero.load
PGLOADER ?= ../build/bin/pgloader
EXTRA_OPTS =
ifneq (,$(findstring ccl,$(CL)))
EXTRA_OPTS = --batch --heap-reserve 150g
endif
regress: clean-out $(addprefix regress/out/, $(REGRESS:.load=.out)) ;
clean-out:
rm -f regress/out/*
local: prepare $(LOCAL)
remote: prepare $(REMOTE:.load=.out)
all: prepare $(OUT)
prepare: bossa.sql sakila
-dropdb -U postgres pgloader
-dropdb -U postgres stocks
-dropdb -U postgres ip4r
-createdb -U postgres -O `whoami` pgloader
-createdb -U postgres -O `whoami` stocks
-createdb -U postgres -O `whoami` ip4r
-psql -d pgloader -c 'create schema expected'
-psql -U postgres -d pgloader -c 'create extension ip4r'
-psql -U postgres -d ip4r -c 'create extension ip4r'
-psql -d stocks -f bossa.sql
errors.out: errors.load
-$(PGLOADER) $<
@echo
nofile.out: nofile.load
-$(PGLOADER) $<
@echo
csv-hstore.out: csv-hstore.load
@echo skipping $@
# sakila needs preparing a MySQL database too
$(TMPDIR)/sakila-db/sakila-schema.sql: data/sakila-db.zip
rm -rf $(TMPDIR)/sakila-db
unzip $< -d $(TMPDIR)
sakila: $(TMPDIR)/sakila-db/sakila-schema.sql
-dropdb -U postgres sakila
-createdb -U postgres -O `whoami` sakila
-echo "DROP DATABASE sakila" | mysql -u root
echo "SOURCE $(TMPDIR)/sakila-db/sakila-schema.sql" | mysql -u root
echo "SOURCE $(TMPDIR)/sakila-db/sakila-data.sql" | mysql -u root
sakila.out: sakila sakila.load
-$(PGLOADER) sakila.load
@echo
csv-districts-stdin.out: csv-districts-stdin.load
cat data/2013_Gaz_113CDs_national.txt | $(PGLOADER) $^
ifneq (,$(findstring ccl,$(CL)))
regress/out/dbf.out: dbf.load
@echo "Skipping $@, CCL doesn't have CP850 encoding"
touch $@
else
$(PGLOADER) $(EXTRA_OPTS) --regress $<
touch $@
endif
# General case where we do NOT expect any error
%.out: %.load
$(PGLOADER) $<
@echo
# Regression tests
regress/out/%.out: %.load
$(PGLOADER) $(EXTRA_OPTS) --regress $<
touch $@