Merge tag 'v3.6.2' into debian

This commit is contained in:
Christoph Berg 2020-06-10 14:26:33 +02:00
commit f8ef9c2dc3
97 changed files with 1943 additions and 1056 deletions

View File

@ -15,8 +15,8 @@ lisp_install() {
;;
sbcl)
sbcl_checksum='eb44d9efb4389f71c05af0327bab7cd18f8bb221fb13a6e458477a9194853958'
sbcl_version='1.3.18'
sbcl_checksum='22ccd9409b2ea16d4be69235c5ad5fde833452955cb24483815312d3b1d7401c'
sbcl_version='1.5.2'
remote_file "/tmp/sbcl-${sbcl_version}.tgz" "http://prdownloads.sourceforge.net/sbcl/sbcl-${sbcl_version}-x86-64-linux-binary.tar.bz2" "$sbcl_checksum"
tar --file "/tmp/sbcl-${sbcl_version}.tgz" --extract --directory '/tmp'

View File

@ -21,10 +21,10 @@ before_script:
- PGUSER=pgloader psql -d pgloader -c "create schema expected;"
- PGUSER=pgloader psql -d pgloader -c "create schema err;"
- make --version
- make "CL=$LISP"
- make "CL=$LISP" clones save
script:
- PGUSER=pgloader make "CL=$LISP" check
- PGUSER=pgloader make "CL=$LISP" check-saved
notifications:
email:

View File

@ -26,7 +26,7 @@ FROM debian:stable-slim as builder
RUN mkdir -p /opt/src/pgloader/build/bin \
&& cd /opt/src/pgloader \
&& make
&& make clones save
FROM debian:stable-slim

View File

@ -14,7 +14,6 @@ FROM debian:stable-slim as builder
make \
openssl \
patch \
sbcl \
time \
unzip \
wget \
@ -30,7 +29,7 @@ FROM debian:stable-slim as builder
RUN mkdir -p /opt/src/pgloader/build/bin \
&& cd /opt/src/pgloader \
&& make CL=ccl DYNSIZE=256
&& make CL=ccl DYNSIZE=256 clones save
FROM debian:stable-slim
@ -40,6 +39,7 @@ FROM debian:stable-slim
freetds-dev \
gawk \
libsqlite3-dev \
libssl1.1 \
libzip-dev \
make \
sbcl \

View File

@ -27,14 +27,45 @@ about how to backport a recent enough SBCL here (1.2.5 or newer).
### Redhat / CentOS
You will need to install the Steel Bank Common Lisp package (sbcl) from EPEL, as
well as the freetds-devel package for some shared libraries. With RHEL/CentOS 6,
if the packaged version isn't >=1.3.6, you'll need to build it from source. With
v7, after installing freetds, you also need to create a softlink from the versioned
shared library `libsybdb.so.5` to `libsybdb.so`.
To build and install pgloader the Steel Bank Common Lisp package (sbcl) from EPEL,
and the freetds packages are required.
The above steps are prepared for you with `boostrap-centos.sh` and `bootstrap-centos7.sh` respectively.
Please report to us if your standard RHEL/CentOS installation required additional steps.
With RHEL/CentOS 6, if the packaged version of sbcl isn't >=1.3.6, you'll need
to build it from source.
It is recommended to build the RPM yourself, see below, to ensure that all installed
files are properly tracked and that you can safely update to newer versions of
pgloader as they're released.
To do an adhoc build and install run `boostrap-centos.sh` for CentOS 6 or
`bootstrap-centos7.sh` for CentOS 7 to install the required dependencies.
[Build pgloader](INSTALL.md#building-pgloader).
#### rpmbuild
The spec file in the root of the pgloader repository can be used to build your
own RPM. For production deployments it is recommended that you build this RPM on
a dedicated build box and then copy the RPM to your production environment for
use; it is considered bad practice to have compilers and build tools present in
production environments.
1. Install the [EPEL repo](https://fedoraproject.org/wiki/EPEL#Quickstart).
1. Install rpmbuild dependencies:
sudo yum -y install yum-utils rpmdevtools @"Development Tools"
1. Install pgloader build dependencies:
sudo yum-builddep pgloader.spec
1. Download pgloader source:
spectool -g -R pgloader.spec
1. Build the source and binary RPMs (see `rpmbuild --help` for other build options):
rpmbuild -ba pgloader.spec
### Mac OS X

View File

@ -1,6 +1,6 @@
# pgloader build tool
APP_NAME = pgloader
VERSION = 3.6.1
VERSION = 3.6.2
# use either sbcl or ccl
CL = sbcl
@ -24,7 +24,7 @@ QLDIR = $(BUILDDIR)/quicklisp
MANIFEST = $(BUILDDIR)/manifest.ql
LATEST = $(BUILDDIR)/pgloader-latest.tgz
BUNDLEDIST = 2019-01-07
BUNDLEDIST = 2020-02-18
BUNDLENAME = pgloader-bundle-$(VERSION)
BUNDLEDIR = $(BUILDDIR)/bundle/$(BUNDLENAME)
BUNDLE = $(BUILDDIR)/$(BUNDLENAME).tgz
@ -103,7 +103,6 @@ $(LIBS): $(QLDIR)/setup.lisp
$(CL) $(CL_OPTS) --load $(QLDIR)/setup.lisp \
--eval '(push :pgloader-image *features*)' \
--eval '(setf *print-circle* t *print-pretty* t)' \
--eval '(ql:quickload "pgloader")' \
--eval '(push "$(PWD)/" ql:*local-project-directories*)' \
--eval '(ql:quickload "pgloader")' \
--eval '(quit)'
@ -173,7 +172,7 @@ test: $(PGLOADER)
save: ./src/save.lisp $(LISP_SRC)
$(CL) $(CL_OPTS) --load ./src/save.lisp
check-saved: save
check-saved:
$(MAKE) PGLOADER=$(realpath $(PGLOADER)) CL=$(CL) -C test regress
clean-bundle:

View File

@ -1,8 +1,9 @@
# PGLoader
[![Build Status](https://travis-ci.org/dimitri/pgloader.svg?branch=master)](https://travis-ci.org/dimitri/pgloader)
[![Join the chat at https://gitter.im/dimitri/pgloader](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/dimitri/pgloader?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
[![Docker Build Status](https://img.shields.io/docker/cloud/build/dimitri/pgloader.svg)](https://cloud.docker.com/repository/docker/dimitri/pgloader)
[![Read The Docs Status](https://readthedocs.org/projects/pgloader/badge/?version=latest&style=plastic)](http://pgloader.readthedocs.io/en/latest/)
pgloader is a data loading tool for PostgreSQL, using the `COPY` command.
@ -89,8 +90,12 @@ it builds aginst current set of dependencies versions.
$ apt-get install sbcl unzip libsqlite3-dev make curl gawk freetds-dev libzip-dev
$ cd /path/to/pgloader
$ make pgloader
$ ./build/bin/pgloader --help
$ make pgloader
$ ./build/bin/pgloader --help
### Building from sources on RedHat/CentOS
See "Redhat / CentOS" in [INSTALL.md](INSTALL.md#redhat--centos)
### Building from sources on macOS
@ -178,9 +183,3 @@ indexes, foreign keys, comments) and parallel loading of the corrected data:
$ createdb pagila
$ pgloader mysql://user@localhost/sakila postgresql:///pagila
See the documentation file `pgloader.1.md` for details. You can compile that
file into a manual page or an HTML page thanks to the `ronn` application:
$ apt-get install ruby-ronn
$ make docs

View File

@ -147,7 +147,7 @@ Schema only, Data only
Repeatable (DROP+CREATE)
By default, pgloader issues DROP statements in the target PostgreSQL
database before issing any CREATE statement, so that you can repeat the
database before issuing any CREATE statement, so that you can repeat the
migration as many times as necessary until migration specifications and
rules are bug free.

View File

@ -1,13 +1,19 @@
Loading DBF data
=================
This command instructs pgloader to load data from a `DBF` file. Here's an
example::
This command instructs pgloader to load data from a `DBF` file. A default
set of casting rules are provided and might be overloaded and appended to by
the command.
Here's an example with a remote HTTP source and some user defined casting
rules::
LOAD DBF
FROM http://www.insee.fr/fr/methodes/nomenclatures/cog/telechargement/2013/dbf/reg2013.dbf
INTO postgresql://user@localhost/dbname
WITH truncate, create table;
WITH truncate, create table
CAST column reg2013.region to integer,
column reg2013.tncc to smallint;
The `dbf` format command accepts the following clauses and options.
@ -51,3 +57,16 @@ When loading from a `DBF` file, the following options are supported:
This options expects as its value the possibly qualified name of the
table to create.
Default DB3 Casting Rules
-------------------------
When migrating from DB3 the following Casting Rules are provided::
type C to text using db3-trim-string
type M to text using db3-trim-string
type N to numeric using db3-numeric-to-pgsql-integer
type I to numeric using db3-numeric-to-pgsql-numeric
type L to boolean using logical-to-boolean
type D to date using db3-date-to-pgsql-date

View File

@ -103,7 +103,7 @@ MySQL Database Migration Options: WITH
--------------------------------------
When loading from a `MySQL` database, the following options are supported,
and the default *WITH* clause is: *no truncate*, *create schema*, *create
and the default *WITH* clause is: *no truncate*, *create
tables*, *include drop*, *create indexes*, *reset sequences*, *foreign
keys*, *downcase identifiers*, *uniquify index names*.
@ -567,6 +567,12 @@ Numbers::
type tinyint to boolean when (= 1 precision) using tinyint-to-boolean
type bit when (= 1 precision) to boolean drop typemod using bits-to-boolean
type bit to bit drop typemod using bits-to-hex-bitstring
type bigint when signed to bigint drop typemod
type bigint when (< 19 precision) to numeric drop typemod
type tinyint when unsigned to smallint drop typemod
type smallint when unsigned to integer drop typemod
type mediumint when unsigned to integer drop typemod
@ -595,12 +601,12 @@ Texts::
Binary::
type binary to bytea
type varbinary to bytea
type tinyblob to bytea
type blob to bytea
type mediumblob to bytea
type longblob to bytea
type binary to bytea using byte-vecotr-to-bytea
type varbinary to bytea using byte-vecotr-to-bytea
type tinyblob to bytea using byte-vecotr-to-bytea
type blob to bytea using byte-vecotr-to-bytea
type mediumblob to bytea using byte-vecotr-to-bytea
type longblob to bytea using byte-vecotr-to-bytea
Date::
@ -638,7 +644,9 @@ Date::
Geometric::
type point to point using pgloader.transforms::convert-mysql-point
type geometry to point using convert-mysql-point
type point to point using convert-mysql-point
type linestring to path using convert-mysql-linestring
Enum types are declared inline in MySQL and separately with a `CREATE TYPE`
command in PostgreSQL, so each column of Enum Type is converted to a type

View File

@ -8,7 +8,7 @@ Redshift database by parsing the output of the `SELECT version()` SQL query.
Redhift as a data source
^^^^^^^^^^^^^^^^^^^^^^^^
Redshit is a variant of PostgreSQL version 8.0.2, which allows pgloader to
Redshift is a variant of PostgreSQL version 8.0.2, which allows pgloader to
work with only a very small amount of adaptation in the catalog queries
used. In other words, migrating from Redshift to PostgreSQL works just the
same as when migrating from a PostgreSQL data source, including the

View File

@ -119,6 +119,7 @@
((:file "api")
(:file "methods")
(:file "md-methods")
(:file "matviews")
(:file "casting-rules")
(:file "files-and-pathnames")
(:file "project-fields")))
@ -130,22 +131,32 @@
;; (:file "csv-database")
(:file "csv")))
(:file "fixed"
:depends-on ("common" "csv"))
(:module "fixed"
:depends-on ("common")
:serial t
:components
((:file "fixed-guess")
(:file "fixed")))
(:file "copy"
:depends-on ("common" "csv"))
(:module "db3"
:serial t
:depends-on ("common" "csv")
:components
((:file "db3-schema")
(:file "db3" :depends-on ("db3-schema"))))
((:file "db3-cast-rules")
(:file "db3-connection")
(:file "db3-schema")
(:file "db3")))
(:module "ixf"
:serial t
:depends-on ("common")
:components
((:file "ixf-schema")
((:file "ixf-cast-rules")
(:file "ixf-connection")
(:file "ixf-schema")
(:file "ixf" :depends-on ("ixf-schema"))))
;(:file "syslog") ; experimental...
@ -155,6 +166,7 @@
:depends-on ("common")
:components
((:file "sqlite-cast-rules")
(:file "sqlite-connection")
(:file "sqlite-schema")
(:file "sqlite")))
@ -163,6 +175,7 @@
:depends-on ("common")
:components
((:file "mssql-cast-rules")
(:file "mssql-connection")
(:file "mssql-schema")
(:file "mssql")
(:file "mssql-index-filters")))
@ -180,7 +193,6 @@
:serial t
:depends-on ("common")
:components ((:file "pgsql-cast-rules")
(:file "pgsql-schema")
(:file "pgsql")))))
;; package pgloader.copy

View File

@ -1,11 +1,20 @@
Summary: extract, transform and load data into PostgreSQL
Name: pgloader
Version: 3.3.2
Version: 3.6.1
Release: 22%{?dist}
License: The PostgreSQL Licence
Group: System Environment/Base
Source: %{name}-%{version}.tar.gz
URL: https://github.com/dimitri/pgloader
Source0: %{url}/archive/v%{version}.tar.gz
BuildRequires: sbcl
BuildRequires: freetds-devel
BuildRequires: sqlite-devel
BuildRequires: zlib-devel
Requires: freetds
Requires: sbcl
Requires: zlib
Requires: sqlite
%description
pgloader imports data from different kind of sources and COPY it into
@ -22,7 +31,7 @@ PostgreSQL. In the MySQL case it's possible to edit CASTing rules from the
pgloader command directly.
%prep
%setup -q -n %{name}
%setup -q -n %{name}-%{version}
%build
%define debug_package %{nil}
@ -35,11 +44,17 @@ mkdir -p $RPM_BUILD_ROOT/etc/prelink.conf.d
echo '-b /usr/bin/pgloader' > $RPM_BUILD_ROOT/etc/prelink.conf.d/%{name}.conf
%files
%doc README.md pgloader.1.md
%doc README.md
%{_bindir}/*
/etc/prelink.conf.d/%{name}.conf
%changelog
* Tue Sep 24 2019 Phil Ingram <pingram.au@gmail.com> - 3.6.1
- Release 3.6.1
- Use Requires and BuildRequires
- Variablise Source0
- Fix Files
* Thu Jan 22 2015 Dimitri Fontaine <dimitri@2ndQuadrant.fr> - 3.2.1.preview-22
- Release 3.2.1.preview

View File

@ -281,7 +281,6 @@
(reindex nil)
(after-schema nil)
distribute
only-tables
including
excluding
set-table-oids
@ -334,7 +333,6 @@
:materialize-views materialize-views
:create-indexes create-indexes
:foreign-keys foreign-keys
:only-tables only-tables
:including including
:excluding excluding)
(mssql::mssql-error (e)

View File

@ -225,6 +225,11 @@
(let ((*self-upgraded-already* t))
(main argv))))
;; --list-encodings, -E
(when list-encodings
(show-encodings)
(uiop:quit +os-code-success+))
;; First care about the root directory where pgloader is supposed to
;; output its data logs and reject files
(let ((root-dir-truename (or (probe-file root-dir)
@ -268,10 +273,6 @@
(usage argv)
(uiop:quit +os-code-error-usage+))
(when list-encodings
(show-encodings)
(uiop:quit +os-code-success+))
(when upgrade-config
(loop for filename in arguments
do

View File

@ -95,7 +95,11 @@
(:syb-int8 (mem-ref data :int8))
(:syb-real (mem-ref data :float))
(:syb-flt8 (mem-ref data :double))
((:syb-datetime :syb-datetime4 :syb-msdate :syb-mstime)
((:syb-datetime
:syb-datetime4
:syb-msdate
:syb-mstime
:syb-msdatetime2)
(with-foreign-pointer (%buf +numeric-buf-sz+)
(let ((count
(%dbconvert %dbproc

View File

@ -52,6 +52,7 @@
#:extension
#:sqltype
#:table
#:matview
#:column
#:index
#:fkey
@ -66,7 +67,7 @@
#:make-schema
#:make-table
#:create-table
#:make-view
#:make-matview
#:make-sqltype
#:make-column
#:make-index
@ -102,6 +103,11 @@
#:table-trigger-list
#:table-citus-rule
#:matview-name
#:matview-source-name
#:matview-schema
#:matview-definition
#:extension-name
#:extension-schema
@ -560,6 +566,17 @@
#:instanciate-table-copy-object
#:concurrency-support
#:filter-list-to-where-clause
#:fetch-columns
#:fetch-indexes
#:fetch-foreign-keys
#:fetch-comments
#:get-column-sql-expression
#:get-column-list
#:format-matview-name
#:create-matviews
#:drop-matviews
;; database cast machinery
#:*default-cast-rules*
#:*cast-rules*
@ -704,6 +721,7 @@
#:create-tables
#:format-vector-row)
(:export #:dbf-connection
#:*db3-default-cast-rules*
#:copy-db3
#:map-rows
#:copy-to
@ -850,7 +868,8 @@
#:*sqlite-default-cast-rules*)
(:import-from #:pgloader.source.db3
#:copy-db3
#:dbf-connection)
#:dbf-connection
#:*db3-default-cast-rules*)
(:import-from #:pgloader.source.ixf
#:copy-ixf
#:ixf-connection)

View File

@ -44,7 +44,7 @@
"non-nil when this build is a release build.")
(defparameter *major-version* "3.6")
(defparameter *minor-version* "1")
(defparameter *minor-version* "2")
(defun git-hash ()
"Return the current abbreviated git hash of the development tree."

View File

@ -13,6 +13,9 @@
(defrule cast-unsigned-guard (and kw-when kw-unsigned)
(:constant (cons :unsigned t)))
(defrule cast-signed-guard (and kw-when kw-signed)
(:constant (cons :signed t)))
;; at the moment we only know about extra auto_increment
(defrule cast-source-extra (and kw-with kw-extra
(or kw-auto-increment
@ -20,10 +23,19 @@
(:lambda (extra)
(cons (third extra) t)))
(defrule cast-source-type (and kw-type trimmed-name)
;; type names may be "double quoted"
(defrule cast-type-name (or double-quoted-namestring
(and (alpha-char-p character)
(* (or (alpha-char-p character)
(digit-char-p character)))))
(:text t))
(defrule cast-source-type (and kw-type cast-type-name)
(:destructure (kw name) (declare (ignore kw)) (list :type name)))
(defrule table-column-name (and namestring "." namestring)
(defrule table-column-name (and maybe-quoted-namestring
"."
maybe-quoted-namestring)
(:destructure (table-name dot column-name)
(declare (ignore dot))
(list :column (cons (text table-name) (text column-name)))))
@ -33,6 +45,7 @@
(:destructure (kw name) (declare (ignore kw)) name))
(defrule cast-source-extra-or-guard (* (or cast-unsigned-guard
cast-signed-guard
cast-default-guard
cast-typemod-guard
cast-source-extra))
@ -44,6 +57,7 @@
(bind (((name-and-type extra-and-guards) source)
((&key (default nil d-s-p)
(typemod nil t-s-p)
(signed nil s-s-p)
(unsigned nil u-s-p)
(auto-increment nil ai-s-p)
(on-update-current-timestamp nil ouct-s-p)
@ -52,17 +66,12 @@
`(,@name-and-type
,@(when t-s-p (list :typemod typemod))
,@(when d-s-p (list :default default))
,@(when s-s-p (list :signed signed))
,@(when u-s-p (list :unsigned unsigned))
,@(when ai-s-p (list :auto-increment auto-increment))
,@(when ouct-s-p (list :on-update-current-timestamp
on-update-current-timestamp))))))
(defrule cast-type-name (or double-quoted-namestring
(and (alpha-char-p character)
(* (or (alpha-char-p character)
(digit-char-p character)))))
(:text t))
(defrule cast-to-type (and kw-to cast-type-name ignore-whitespace)
(:lambda (source)
(bind (((_ type-name _) source))
@ -139,7 +148,12 @@
(string (intern (string-upcase fname) :pgloader.transforms))
(symbol fname))))
(defrule cast-function (and kw-using maybe-qualified-function-name)
(defrule transform-expression sexp
(:lambda (sexp)
(eval sexp)))
(defrule cast-function (and kw-using (or maybe-qualified-function-name
transform-expression))
(:destructure (using symbol) (declare (ignore using)) symbol))
(defun fix-target-type (source target)

View File

@ -55,6 +55,10 @@
(:lambda (ipv4)
(list :ipv4 (text ipv4))))
(defrule ipv6 (and #\[ (+ (or (digit-char-p character) ":")) #\])
(:lambda (ipv6)
(list :ipv6 (text ipv6))))
;;; socket directory is unix only, so we can forbid ":" on the parsing
(defun socket-directory-character-p (char)
(or (member char #.(quote (coerce "/.-_" 'list)))
@ -66,12 +70,41 @@
(declare (ignore unix))
(list :unix (when socket-directory (text socket-directory)))))
(defrule network-name (and namestring (* (and "." namestring)))
;;;
;;; See https://en.wikipedia.org/wiki/Hostname#Restrictions_on_valid_hostnames
;;;
;;; The characters allowed in labels are a subset of the ASCII character
;;; set, consisting of characters a through z, A through Z, digits 0 through
;;; 9, and hyphen.
;;;
;;; This rule is known as the LDH rule (letters, digits, hyphen).
;;;
;;; - Domain names are interpreted in case-independent manner.
;;; - Labels may not start or end with a hyphen.
;;; - An additional rule requires that top-level domain names should not be
;;; all-numeric.
;;;
(defrule network-label-letters-digit (or (alpha-char-p character)
(digit-char-p character)))
(defrule network-label-with-hyphen
(and network-label-letters-digit
(+ (or (and #\- network-label-letters-digit)
network-label-letters-digit)))
(:text t))
(defrule network-label-no-hyphen (+ network-label-letters-digit)
(:text t))
(defrule network-label (or network-label-with-hyphen network-label-no-hyphen)
(:identity t))
(defrule network-hostname (and network-label (* (and "." network-label)))
(:lambda (name)
(let ((host (text name)))
(list :host (unless (string= "" host) host)))))
(defrule hostname (or ipv4 socket-directory network-name)
(defrule hostname (or ipv4 ipv6 socket-directory network-hostname)
(:identity t))
(defun process-hostname (hostname)
@ -79,6 +112,7 @@
(ecase type
(:unix (if name (cons :unix name) :unix))
(:ipv4 name)
(:ipv6 name)
(:host name))))
(defrule dsn-hostname (and (? hostname) (? dsn-port))
@ -87,9 +121,10 @@
(append (list :host (when host (process-hostname host)))
port))))
(defrule dsn-dbname (and "/" (? (* (or (alpha-char-p character)
(digit-char-p character)
punct))))
(defrule dsn-dbname (and "/" (? (or single-quoted-string
(* (or (alpha-char-p character)
(digit-char-p character)
punct)))))
(:lambda (dbn)
(list :dbname (text (second dbn)))))

View File

@ -57,7 +57,9 @@
(defrule load-dbf-optional-clauses (* (or dbf-options
gucs
casts
before-load
after-schema
after-load))
(:lambda (clauses-list)
(alexandria:alist-plist clauses-list)))
@ -65,9 +67,8 @@
;;; dbf defaults to ascii rather than utf-8
(defrule dbf-file-encoding (? (and kw-with kw-encoding encoding))
(:lambda (enc)
(if enc
(bind (((_ _ encoding) enc)) encoding)
:ascii)))
(when enc
(bind (((_ _ encoding) enc)) encoding))))
(defrule load-dbf-command (and dbf-source
(? dbf-file-encoding)
@ -92,11 +93,14 @@
(defun lisp-code-for-loading-from-dbf (dbf-db-conn pg-db-conn
&key
target-table-name
(encoding :ascii)
gucs before after options
encoding
gucs casts options
before after-schema after
&allow-other-keys)
`(lambda ()
(let* (,@(pgsql-connection-bindings pg-db-conn gucs)
(let* ((*default-cast-rules* ',*db3-default-cast-rules*)
(*cast-rules* ',casts)
,@(pgsql-connection-bindings pg-db-conn gucs)
,@(batch-control-bindings options)
,@(identifier-case-binding options)
(on-error-stop (getf ',options :on-error-stop))
@ -114,6 +118,7 @@
(copy-database source
,@(remove-batch-control-option options)
:after-schema ',after-schema
:on-error-stop on-error-stop
:create-indexes nil
:foreign-keys nil
@ -124,7 +129,8 @@
(defrule load-dbf-file load-dbf-command
(:lambda (command)
(bind (((source encoding pg-db-uri table-name
&key options gucs before after) command))
&key options gucs casts before after-schema after)
command))
(cond (*dry-run*
(lisp-code-for-dbf-dry-run source pg-db-uri))
(t
@ -132,6 +138,8 @@
:target-table-name table-name
:encoding encoding
:gucs gucs
:casts casts
:before before
:after-schema after-schema
:after after
:options options))))))

View File

@ -7,6 +7,9 @@
(in-package #:pgloader.parser)
(defrule option-fixed-header (and kw-fixed kw-header)
(:constant (cons :header t)))
(defrule hex-number (and "0x" (+ (hexdigit-char-p character)))
(:lambda (hex)
(bind (((_ digits) hex))
@ -55,7 +58,8 @@
option-drop-indexes
option-disable-triggers
option-identifiers-case
option-skip-header))
option-skip-header
option-fixed-header))
(defrule fixed-options (and kw-with
(and fixed-option (* (and comma fixed-option))))
@ -94,7 +98,7 @@
(alexandria:alist-plist clauses-list)))
(defrule load-fixed-cols-file-command (and fixed-source (? file-encoding)
fixed-source-field-list
(? fixed-source-field-list)
target
(? csv-target-table)
(? csv-target-column-list)
@ -144,7 +148,8 @@
:encoding ,encoding
:fields ',fields
:columns ',columns
:skip-lines ,(or (getf options :skip-line) 0))))
:skip-lines ,(or (getf options :skip-lines) 0)
:header ,(getf options :header))))
(copy-database source
,@ (when worker-count

View File

@ -59,6 +59,7 @@
(defrule load-ixf-optional-clauses (* (or ixf-options
gucs
before-load
after-schema
after-load))
(:lambda (clauses-list)
(alexandria:alist-plist clauses-list)))
@ -76,8 +77,8 @@
(defun lisp-code-for-loading-from-ixf (ixf-db-conn pg-db-conn
&key
target-table-name
gucs before after options
target-table-name gucs options
before after-schema after
&allow-other-keys)
`(lambda ()
(let* (,@(pgsql-connection-bindings pg-db-conn gucs)
@ -101,6 +102,7 @@
options
:extras '(:timezone))
:on-error-stop on-error-stop
:after-schema ',after-schema
:foreign-keys nil
:reset-sequences nil)
@ -109,7 +111,8 @@
(defrule load-ixf-file load-ixf-command
(:lambda (command)
(bind (((source pg-db-uri table-name
&key options gucs before after) command))
&key options gucs before after-schema after)
command))
(cond (*dry-run*
(lisp-code-for-csv-dry-run pg-db-uri))
(t
@ -117,5 +120,6 @@
:target-table-name table-name
:gucs gucs
:before before
:after-schema after-schema
:after after
:options options))))))

View File

@ -143,6 +143,7 @@
(def-keyword-rule "per")
(def-keyword-rule "thread")
(def-keyword-rule "range")
(def-keyword-rule "signed")
(def-keyword-rule "unsigned")
;; option for loading from an archive
(def-keyword-rule "archive")

View File

@ -9,14 +9,23 @@
(defrule view-name (or qualified-table-name maybe-quoted-namestring)
(:lambda (vn)
(etypecase vn
(cons vn)
(string (cons nil vn)))))
(cons (let* ((schema-name (apply-identifier-case (cdr vn)))
(schema (make-schema :source-name (cdr vn)
:name schema-name)))
(make-matview :source-name vn
:name (apply-identifier-case (car vn))
:schema schema)))
(string (make-matview :source-name (cons nil vn)
:schema nil
:name (apply-identifier-case vn))))))
(defrule view-sql (and kw-as dollar-quoted)
(:destructure (as sql) (declare (ignore as)) sql))
(defrule view-definition (and view-name (? view-sql))
(:destructure (name sql) (cons name sql)))
(:destructure (matview sql)
(setf (matview-definition matview) sql)
matview))
(defrule another-view-definition (and comma-separator view-definition)
(:lambda (source)

View File

@ -31,6 +31,7 @@
option-create-tables
option-create-schemas
option-create-indexes
option-index-names
option-reset-sequences
option-foreign-keys
option-encoding
@ -86,6 +87,7 @@
materialize-views
distribute-commands
before-load
after-schema
after-load
including-like-in-schema
excluding-like-in-schema))
@ -141,7 +143,7 @@
(defun lisp-code-for-loading-from-mssql (ms-db-conn pg-db-conn
&key
gucs mssql-gucs
casts before after
casts before after after-schema
options distribute views
alter-schema alter-table
including excluding
@ -170,6 +172,7 @@
:excluding ',excluding
:alter-schema ',alter-schema
:alter-table ',alter-table
:after-schema ',after-schema
:materialize-views ',views
:distribute ',distribute
:set-table-oids t
@ -182,7 +185,7 @@
(:lambda (source)
(bind (((ms-db-uri pg-db-uri
&key
gucs mssql-gucs casts views before after
gucs mssql-gucs casts views before after-schema after
alter-schema alter-table distribute
including excluding options)
source))
@ -195,6 +198,7 @@
:casts casts
:views views
:before before
:after-schema after-schema
:after after
:alter-schema alter-schema
:alter-table alter-table

View File

@ -89,6 +89,7 @@
excluding-matching
decoding-tables-as
before-load
after-schema
after-load
distribute-commands))
(:lambda (clauses-list)
@ -164,8 +165,10 @@
(defun lisp-code-for-loading-from-mysql (my-db-conn pg-db-conn
&key
gucs mysql-gucs
casts views before after options
alter-table alter-schema distribute
casts options views
before after after-schema
alter-table alter-schema
distribute
((:including incl))
((:excluding excl))
((:decoding decoding-as))
@ -192,6 +195,7 @@
:materialize-views ',views
:alter-table ',alter-table
:alter-schema ',alter-schema
:after-schema ',after-schema
:distribute ',distribute
:set-table-oids t
:on-error-stop on-error-stop
@ -204,8 +208,8 @@
(destructuring-bind (my-db-uri
pg-db-uri
&key
gucs mysql-gucs casts views before after options
alter-table alter-schema distribute
gucs mysql-gucs casts views before after after-schema
options alter-table alter-schema distribute
including excluding decoding)
source
(cond (*dry-run*
@ -218,6 +222,7 @@
:views views
:before before
:after after
:after-schema after-schema
:options options
:alter-table alter-table
:alter-schema alter-schema

View File

@ -17,11 +17,23 @@
(defrule sexp-symbol (and (symbol-first-character-p character)
(* (symbol-character-p character)))
(:lambda (schars)
(pgloader.transforms:intern-symbol
(text schars)
'(("nil" . cl:nil)
("precision" . pgloader.transforms::precision)
("scale" . pgloader.transforms::scale)))))
(if (char= #\: (car schars))
(read-from-string (text schars))
(pgloader.transforms:intern-symbol
(text schars)
'(("nil" . cl:nil)
("cl:t" . cl:t)
("precision" . pgloader.transforms::precision)
("scale" . pgloader.transforms::scale)
("split-sequence" . split-sequence:split-sequence))))))
(defrule sexp-char (and #\# #\\
(alpha-char-p character)
(+ (or (alpha-char-p character)
(digit-char-p character)
#\_)))
(:lambda (char-name)
(read-from-string (text char-name))))
(defrule sexp-string-char (or (not-doublequote character) (and #\\ #\")))
@ -40,7 +52,7 @@
(cons car cdr)))
(defrule sexp-atom (and ignore-whitespace
(or sexp-string sexp-integer sexp-symbol))
(or sexp-char sexp-string sexp-integer sexp-symbol))
(:lambda (atom)
(bind (((_ a) atom)) a)))

View File

@ -92,7 +92,8 @@ load database
(defun lisp-code-for-loading-from-sqlite (sqlite-db-conn pg-db-conn
&key
gucs casts before after options
gucs casts options
before after-schema after
alter-table alter-schema
((:including incl))
((:excluding excl))
@ -116,6 +117,7 @@ load database
(copy-database source
:alter-table ',alter-table
:alter-schema ',alter-schema
:after-schema ',after-schema
:set-table-oids t
:including ',incl
:excluding ',excl
@ -129,7 +131,7 @@ load database
(destructuring-bind (sqlite-uri
pg-db-uri
&key
gucs casts before after options
gucs casts before after after-schema options
alter-table alter-schema
including excluding)
source
@ -140,6 +142,7 @@ load database
:gucs gucs
:casts casts
:before before
:after-schema after-schema
:after after
:options options
:alter-table alter-table

View File

@ -31,7 +31,7 @@
(defrule sqlite-typemod (or sqlite-double-typemod sqlite-single-typemod))
(defrule sqlite-type-name (and (* extra-qualifiers)
(+ (alpha-char-p character))
(+ (or (alpha-char-p character) #\_))
(* extra-qualifiers)
ignore-whitespace
(? sqlite-typemod)

View File

@ -62,6 +62,34 @@
(log-message :info "Entering error recovery.")
;; Not all COPY errors produce a COPY error message. Foreign key violation
;; produce a detailed message containing the data that we can't insert. In
;; that case we're going to insert every single row of the batch, one at a
;; time, and handle the error(s) individually.
;;
(unless (parse-copy-error-context (database-error-context condition))
(let ((table-name (format-table-name table))
(first-error t))
(loop :repeat (batch-count batch)
:for pos :from 0
:do (handler-case
(incf pos
(copy-partial-batch table-name columns batch 1 pos))
(postgresql-retryable (condition)
(pomo:execute "ROLLBACK")
(process-bad-row table condition (aref (batch-data batch) pos))
(if first-error
;; the first error has been logged about already
(setf first-error nil)
(log-message :error "PostgreSQL [~s] ~a"
table-name condition))
(incf nb-errors)))))
;; that's all folks, we're done.
(return-from retry-batch nb-errors))
;; now deal with the COPY error case where we have a line number and have
;; the opportunity to be smart about it.
(loop
:with table-name := (format-table-name table)
:with next-error := (parse-copy-error-context
@ -70,7 +98,7 @@
:while (< current-batch-pos (batch-count batch))
:do
(progn ; indenting helper
(progn ; indenting helper
(log-message :debug "pos: ~s ; err: ~a" current-batch-pos next-error)
(when (= current-batch-pos next-error)
(log-message :info "error recovery at ~d/~d, processing bad row"

View File

@ -67,9 +67,13 @@
(log-message :error "PostgreSQL [~s] ~a" table-name condition)
;; clean the current transaction before retrying new ones
(let ((errors
(retry-batch table columns batch condition)))
(handler-case
(retry-batch table columns batch condition)
(condition (e)
(log-message :error "BUG: failed to retry-batch: ~a" e)
(batch-count batch)))))
(log-message :debug "retry-batch found ~d errors" errors)
(update-stats :data table :rows (- errors))))
(update-stats :data table :errs errors :rows (- errors))))
(postgresql-unavailable (condition)

View File

@ -411,10 +411,12 @@
;;; PostgreSQL 8.0.2 on i686-pc-linux-gnu, compiled by GCC gcc (GCC) 3.4.2 20041017 (Red Hat 3.4.2-6.fc3), Redshift 1.0.2058
;;; PostgreSQL 10.1 on x86_64-apple-darwin14.5.0, compiled by Apple LLVM version 7.0.0 (clang-700.1.76), 64-bit
;;; PostgreSQL 10.6 (Ubuntu 10.6-1.pgdg14.04+1) on x86_64-pc-linux-gnu, compiled by gcc (Ubuntu 4.8.4-2ubuntu1~14.04.4) 4.8.4, 64-bit
;;; PostgreSQL 10.6, compiled by Visual C++ build 1800, 64-bit
(defun parse-postgresql-version-string (version-string)
"Parse PostgreSQL select version() output."
(cl-ppcre:register-groups-bind (full-version maybe-variant)
("PostgreSQL ([0-9.]+) [^,]+, [^,]+, (.*)" version-string)
(cl-ppcre:register-groups-bind (full-version maybe-os maybe-variant)
("PostgreSQL ([0-9.]+)( [^,]+)?, [^,]+, (.*)" version-string)
(declare (ignore maybe-os))
(let* ((version-dots (split-sequence:split-sequence #\. full-version))
(major-version (if (= 3 (length version-dots))
(format nil "~a.~a"

View File

@ -148,7 +148,7 @@
similar enough that we may continue with the migration."
(let ((source-type-name (get-type-name source-column))
(target-type-name (column-type-name target-column)))
(or (string= source-type-name target-type-name)
(or (string-equal source-type-name target-type-name)
(member target-type-name (cdr (assoc source-type-name *type-name-mapping*
:test #'string=))
:test #'string=))))
:test #'string-equal))))

View File

@ -95,7 +95,7 @@
:collect schema-name)))
(when missing-schemas
(let ((sql (format nil
"ALTER DATABASE ~a SET search_path TO ~{~a~^, ~};"
"ALTER DATABASE ~s SET search_path TO ~{~a~^, ~};"
dbname
(append search-path missing-schemas))))
(pgsql-execute-with-timing section
@ -233,8 +233,15 @@
(let ((fk-sql-list
(loop :for table :in (table-list catalog)
:append (loop :for fkey :in (table-fkey-list table)
:for sql := (format-create-sql fkey)
:collect sql)
;; we might have loaded fkeys referencing tables that
;; have not been included in (or have been excluded
;; from) the load
:unless (and (fkey-table fkey)
(fkey-foreign-table fkey))
:do (log-message :debug "Skipping foreign key ~a" fkey)
:when (and (fkey-table fkey)
(fkey-foreign-table fkey))
:collect (format-create-sql fkey))
:append (loop :for index :in (table-index-list table)
:do (loop :for fkey :in (index-fk-deps index)
:for sql := (format-create-sql fkey)

View File

@ -17,4 +17,4 @@
(rule-col-name (column-name (citus-distributed-rule-using rule))))
(format stream "SELECT create_distributed_table('~a', '~a');"
(format-table-name rule-table)
(apply-identifier-case rule-col-name))))
(ensure-unquoted rule-col-name))))

View File

@ -114,9 +114,8 @@
"Get PostgreSQL schema name where to locate TABLE-NAME by following the
current search_path rules. A PostgreSQL connection must be opened."
(make-schema :name
(pomo:query (format nil
(sql "/pgsql/query-table-schema.sql")
(table-name table))
(pomo:query (sql "/pgsql/query-table-schema.sql"
(table-name table))
:single)))
(defun make-including-expr-from-view-names (view-names)
@ -188,21 +187,18 @@
"Get the list of PostgreSQL column names per table."
(loop :for (schema-name table-name table-oid
name type typmod notnull default extra)
:in
(query nil
(format nil
(sql "/pgsql/list-all-columns.sql")
table-type-name
including ; do we print the clause?
(filter-list-to-where-clause including
nil
"n.nspname"
"c.relname")
excluding ; do we print the clause?
(filter-list-to-where-clause excluding
nil
"n.nspname"
"c.relname")))
:in (query nil (sql "/pgsql/list-all-columns.sql"
table-type-name
including ; do we print the clause?
(filter-list-to-where-clause including
nil
"n.nspname"
"c.relname")
excluding ; do we print the clause?
(filter-list-to-where-clause excluding
nil
"n.nspname"
"c.relname")))
:do
(let* ((schema (maybe-add-schema catalog schema-name))
(table (maybe-add-table schema table-name :oid table-oid))
@ -223,21 +219,19 @@
:for (schema-name name oid
table-schema table-name
primary unique cols sql conname condef)
:in (query nil
(format nil
(sql (sql-url-for-variant "pgsql"
"list-all-indexes.sql"
pgversion))
including ; do we print the clause?
(filter-list-to-where-clause including
nil
"rn.nspname"
"r.relname")
excluding ; do we print the clause?
(filter-list-to-where-clause excluding
nil
"rn.nspname"
"r.relname")))
:in (query nil (sql (sql-url-for-variant "pgsql"
"list-all-indexes.sql"
pgversion)
including ; do we print the clause?
(filter-list-to-where-clause including
nil
"rn.nspname"
"r.relname")
excluding ; do we print the clause?
(filter-list-to-where-clause excluding
nil
"rn.nspname"
"r.relname")))
:do (let* ((schema (find-schema catalog schema-name))
(tschema (find-schema catalog table-schema))
(table (find-table tschema table-name))
@ -265,29 +259,27 @@
conoid pkeyoid conname condef
cols fcols
updrule delrule mrule deferrable deferred)
:in (query nil
(format nil
(sql "/pgsql/list-all-fkeys.sql")
including ; do we print the clause (table)?
(filter-list-to-where-clause including
nil
"n.nspname"
"c.relname")
excluding ; do we print the clause (table)?
(filter-list-to-where-clause excluding
nil
"n.nspname"
"c.relname")
including ; do we print the clause (ftable)?
(filter-list-to-where-clause including
nil
"nf.nspname"
"cf.relname")
excluding ; do we print the clause (ftable)?
(filter-list-to-where-clause excluding
nil
"nf.nspname"
"cf.relname")))
:in (query nil (sql "/pgsql/list-all-fkeys.sql"
including ; do we print the clause (table)?
(filter-list-to-where-clause including
nil
"n.nspname"
"c.relname")
excluding ; do we print the clause (table)?
(filter-list-to-where-clause excluding
nil
"n.nspname"
"c.relname")
including ; do we print the clause (ftable)?
(filter-list-to-where-clause including
nil
"nf.nspname"
"cf.relname")
excluding ; do we print the clause (ftable)?
(filter-list-to-where-clause excluding
nil
"nf.nspname"
"cf.relname")))
:do (flet ((pg-fk-rule-to-action (rule)
(case rule
(#\a "NO ACTION")
@ -350,11 +342,9 @@
(when pkey-oid-list
(loop :for (schema-name table-name fschema-name ftable-name
conoid conname condef index-oid)
:in (query nil
(format nil
(sql "/pgsql/list-missing-fk-deps.sql")
pkey-oid-list
(or fkey-oid-list (list -1))))
:in (query nil (sql "/pgsql/list-missing-fk-deps.sql"
pkey-oid-list
(or fkey-oid-list (list -1))))
;;
;; We don't need to reference the main catalog entries for the tables
;; here, as the only goal is to be sure to DROP then CREATE again the
@ -403,9 +393,8 @@
:in (ecase variant
(:pgdg
;; use the SELECT ... FROM (VALUES ...) variant
(query nil (format nil
(sql "/pgsql/list-table-oids.sql")
(mapcar #'format-table-name table-list))))
(query nil (sql "/pgsql/list-table-oids.sql"
(mapcar #'format-table-name table-list))))
(:redshift
;; use the TEMP TABLE variant in Redshift, which doesn't
;; have proper support for VALUES (landed in PostgreSQL 8.2)
@ -433,19 +422,17 @@
(defun list-all-sqltypes (catalog &key including excluding)
"Set the catalog's schema extension list and sqltype list"
(loop :for (schema-name extension-name type-name enum-values)
:in (query nil
(format nil
(sql "/pgsql/list-all-sqltypes.sql")
including ; do we print the clause?
(filter-list-to-where-clause including
nil
"n.nspname"
"c.relname")
excluding ; do we print the clause?
(filter-list-to-where-clause excluding
nil
"n.nspname"
"c.relname")))
:in (query nil (sql "/pgsql/list-all-sqltypes.sql"
including ; do we print the clause?
(filter-list-to-where-clause including
nil
"n.nspname"
"c.relname")
excluding ; do we print the clause?
(filter-list-to-where-clause excluding
nil
"n.nspname"
"c.relname")))
:do
(let* ((schema (maybe-add-schema catalog schema-name))
(sqltype

View File

@ -7,8 +7,8 @@ with seqattr as
(
select adrelid,
adnum,
adsrc,
case when adsrc ~~ 'nextval'
pg_get_expr(d.adbin, d.adrelid) as adsrc,
case when pg_get_expr(d.adbin, d.adrelid) ~~ 'nextval'
then substring(pg_get_expr(d.adbin, d.adrelid)
from '''([^'']+)'''
)
@ -23,7 +23,9 @@ with seqattr as
else null
end as typmod,
attnotnull,
case when atthasdef then def.adsrc end as default,
case when atthasdef
then pg_get_expr(def.adbin, def.adrelid)
end as default ,
case when s.seqname is not null then 'auto_increment' end as extra
from pg_class c
join pg_namespace n on n.oid = c.relnamespace

View File

@ -81,4 +81,4 @@
:toplevel #'cl-user::pgloader-image-main
:executable t
:save-runtime-options t
:compression t))
:compression (uiop:featurep :sb-core-compression)))

View File

@ -116,7 +116,6 @@
(defgeneric fetch-metadata (db-copy catalog
&key
materialize-views
only-tables
create-indexes
foreign-keys
including
@ -127,3 +126,60 @@
(defgeneric instanciate-table-copy-object (db-copy table)
(:documentation "Create a new instance for copying TABLE data."))
;;;
;;; Database source schema introspection API
;;;
;;; The methods for those function query the source database catalogs and
;;; populate pgloader's internal representation of its catalog.
;;;
;;; On some source systems (such as MySQL) a single schema can be adressed
;;; at a time, and the catalog object might be a schema directly.
;;;
(defgeneric filter-list-to-where-clause (db-copy filter-list
&key
not
schema-col
table-col)
(:documentation "Transform a filter-list into SQL expression for DB-COPY."))
(defgeneric fetch-columns (catalog db-copy &key table-type including excluding)
(:documentation
"Get the list of schema, tables and columns from the source database."))
(defgeneric fetch-indexes (catalog db-copy &key including excluding)
(:documentation "Get the list of indexes from the source database."))
(defgeneric fetch-foreign-keys (catalog db-copy &key including excluding)
(:documentation "Get the list of foreign keys from the source database."))
(defgeneric fetch-comments (catalog db-copy &key including excluding)
(:documentation "Get the list of comments from the source database."))
;;;
;;; We're going to generate SELECT * FROM table; queries to fetch the data
;;; and COPY it to the PostgreSQL target database. In reality we don't use
;;; SELECT *, and in many interesting cases we have to generate some SQL
;;; expression to fetch the source values in a format we can then either
;;; process in pgloader or just send-over as-is to Postgres.
;;;
(defgeneric get-column-sql-expression (db-copy name type)
(:documentation
"Generate SQL expression for the SELECT clause for given column."))
(defgeneric get-column-list (copy-db)
(:documentation
"Generate the SQL projection column list for the SELECT clause."))
;;;
;;; Materialized Views support
;;;
(defgeneric format-matview-name (matview copy)
(:documentation "Format the materialized view name."))
(defgeneric create-matviews (matview-list db-copy)
(:documentation "Create Materialized Views."))
(defgeneric drop-matviews (matview-list db-copy)
(:documentation "Drop Materialized Views."))

View File

@ -28,6 +28,7 @@
((:column rule-source-column) nil c-s-p)
((:typemod typemod-expr) nil tm-s-p)
((:default rule-source-default) nil d-s-p)
((:signed rule-signed) nil s-s-p)
((:unsigned rule-unsigned) nil u-s-p)
((:not-null rule-source-not-null) nil n-s-p)
((:auto-increment rule-source-auto-increment))
@ -57,10 +58,11 @@
;; otherwide, we do the full dance
(and
(or (and t-s-p (string= type rule-source-type)))
(or (and t-s-p (string-equal type rule-source-type)))
(or (null tm-s-p) (when typemod
(typemod-expr-matches-p typemod-expr typemod)))
(or (null d-s-p) (string= default rule-source-default))
(or (null d-s-p) (string-equal default rule-source-default))
(or (null s-s-p) (eq unsigned (not rule-signed)))
(or (null u-s-p) (eq unsigned rule-unsigned))
(or (null n-s-p) (eq not-null rule-source-not-null))

View File

@ -0,0 +1,61 @@
;;;
;;; Materialized Views support is quite similar from a DB engine from another.
;;;
;; It happens that the view definition is given by the user, so pgloader is
;; not concerned with that part of the SQL compatiblity. The common
;; implementation uses the following two SQL comamnds:
;;;
;;; CREATE VIEW <schema>.<name> AS <sql>
;;; DROP VIEW <schema>.<name>, <schema>.<name>, ...;
;;;
(in-package :pgloader.sources)
(defmethod format-matview-name (matview (copy db-copy))
"Format the materialized view name."
(declare (ignore copy))
(let ((schema-name (when (matview-schema matview)
(schema-source-name schema)))
(view-name (cdr (matview-source-name matview))))
(format nil "~@[~s.~]~a" schema-name view-name)))
(defmethod create-matviews (matview-list copy)
"Create Materialized Views as per the pgloader command."
(unless (eq :all matview-list)
(let ((views (remove-if #'null matview-list :key #'matview-definition)))
(when views
(loop :for mv :in views
:for sql := (format nil
"CREATE VIEW ~a AS ~a"
(format-matview-name mv copy)
(matview-definition mv))
:do (progn
(log-message :info "SOURCE: ~a;" sql)
#+pgloader-image
(query (source-db copy) sql)
#-pgloader-image
(restart-case
(query (source-db copy) sql)
(use-existing-view ()
:report "Use the already existing view and continue"
nil)
(replace-view ()
:report
"Replace the view with the one from pgloader's command"
(let ((drop-sql (format nil "DROP VIEW ~a"
(format-matview-name mv copy))))
(log-message :info "SOURCE: ~a;" drop-sql)
;; drop the materialized view, then create it again
(query (source-db copy) drop-sql)
(query (source-db copy) sql))))))))))
(defmethod drop-matviews (matview-list copy)
"Drop Materialized Views created just for the pgloader migration."
(unless (eq :all matview-list)
(let ((views (remove-if #'null matview-list :key #'matview-definition)))
(when views
(let ((sql (format nil "DROP VIEW ~{~a~^, ~}"
(mapcar (lambda (mv) (format-matview-name mv copy))
views))))
(log-message :info "SOURCE: ~a;" sql)
(query (source-db copy) sql))))))

View File

@ -0,0 +1,60 @@
;;;
;;; Tools to handle MySQL data type casting rules
;;;
(in-package :pgloader.source.db3)
;;;
;;; The default DB3 Type Casting Rules
;;;
(defparameter *db3-default-cast-rules*
`((:source (:type "C")
:target (:type "text")
:using pgloader.transforms::db3-trim-string)
(:source (:type "N")
:target (:type "numeric")
:using pgloader.transforms::db3-numeric-to-pgsql-numeric)
(:source (:type "I")
:target (:type "integer")
:using pgloader.transforms::db3-numeric-to-pgsql-integer)
(:source (:type "Y")
:target (:type "bigint")
:using pgloader.transforms::db3-numeric-to-pgsql-integer)
(:source (:type "+")
:target (:type "serial")
:using pgloader.transforms::db3-numeric-to-pgsql-integer)
(:source (:type "F")
:target (:type "double precision")
:using pgloader.transforms::float-to-string)
(:source (:type "L")
:target (:type "boolean")
:using pgloader.transforms::logical-to-boolean)
(:source (:type "D")
:target (:type "date")
:using pgloader.transforms::db3-date-to-pgsql-date)
(:source (:type "M")
:target (:type "text")
:using pgloader.transforms::db3-trim-string)
(:source (:type "0")
:target (:type "bit(8)")
:using pgloader.transforms::bits-to-hex-bitstring))
"Data Type Casting rules to migrate from DB3 to PostgreSQL")
(defstruct (db3-coldef
(:constructor make-db3-coldef (name type length)))
name type length default (nullable t) extra)
(defmethod cast ((field db3-coldef) &key table)
"Return the PostgreSQL type definition given the DB3 one."
(let ((table-name (table-name table)))
(with-slots (name type length default nullable extra) field
(apply-casting-rules table-name name type type default nullable extra))))

View File

@ -0,0 +1,35 @@
;;;
;;; Tools to handle the DBF file format
;;;
(in-package :pgloader.source.db3)
(defclass dbf-connection (fd-connection)
((db3 :initarg db3 :accessor fd-db3))
(:documentation "pgloader connection parameters for DBF files."))
(defmethod initialize-instance :after ((dbfconn dbf-connection) &key)
"Assign the type slot to dbf."
(setf (slot-value dbfconn 'type) "dbf"))
(defmethod open-connection ((dbfconn dbf-connection) &key)
(setf (conn-handle dbfconn)
(open (fd-path dbfconn)
:direction :input
:element-type '(unsigned-byte 8)))
(let ((db3 (make-instance 'db3:db3 :filename (fd-path dbfconn))))
(db3:load-header db3 (conn-handle dbfconn))
(setf (fd-db3 dbfconn) db3))
dbfconn)
(defmethod close-connection ((dbfconn dbf-connection))
(db3:close-memo (fd-db3 dbfconn))
(close (conn-handle dbfconn))
(setf (conn-handle dbfconn) nil
(fd-db3 dbfconn) nil)
dbfconn)
(defmethod clone-connection ((c dbf-connection))
(let ((clone (change-class (call-next-method c) 'dbf-connection)))
(setf (fd-db3 clone) (fd-db3 c))
clone))

View File

@ -4,92 +4,24 @@
(in-package :pgloader.source.db3)
(defclass dbf-connection (fd-connection)
((db3 :initarg db3 :accessor fd-db3))
(:documentation "pgloader connection parameters for DBF files."))
(defclass copy-db3 (db-copy)
((encoding :accessor encoding ; file encoding
:initarg :encoding))
(:documentation "pgloader DBF Data Source"))
(defmethod initialize-instance :after ((dbfconn dbf-connection) &key)
"Assign the type slot to dbf."
(setf (slot-value dbfconn 'type) "dbf"))
(defmethod initialize-instance :after ((db3 copy-db3) &key)
"Add a default value for transforms in case it's not been provided."
(setf (slot-value db3 'source)
(let ((table-name (pathname-name (fd-path (source-db db3)))))
(make-table :source-name table-name
:name (apply-identifier-case table-name)))))
(defmethod open-connection ((dbfconn dbf-connection) &key)
(setf (conn-handle dbfconn)
(open (fd-path dbfconn)
:direction :input
:element-type '(unsigned-byte 8)))
(let ((db3 (make-instance 'db3:db3)))
(db3:load-header db3 (conn-handle dbfconn))
(setf (fd-db3 dbfconn) db3))
dbfconn)
(defmethod close-connection ((dbfconn dbf-connection))
(close (conn-handle dbfconn))
(setf (conn-handle dbfconn) nil
(fd-db3 dbfconn) nil)
dbfconn)
(defmethod clone-connection ((c dbf-connection))
(let ((clone (change-class (call-next-method c) 'dbf-connection)))
(setf (fd-db3 clone) (fd-db3 c))
clone))
(defvar *db3-pgsql-type-mapping*
'(("C" . "text") ; ignore field-length
("N" . "numeric") ; handle both integers and floats
("L" . "boolean") ; PostgreSQL compatible representation
("D" . "date") ; no TimeZone in DB3 files
("M" . "text"))) ; not handled yet
(defstruct (db3-field
(:constructor make-db3-field (name type length)))
name type length)
(defun list-all-columns (db3 table)
(defmethod fetch-columns ((table table) (db3 copy-db3)
&key &allow-other-keys
&aux (dbfconn (fd-db3 (source-db db3))))
"Return the list of columns for the given DB3-FILE-NAME."
(loop
:for field :in (db3::fields db3)
:do (add-field table (make-db3-field (db3::field-name field)
(db3::field-type field)
(db3::field-length field)))))
(defmethod cast ((field db3-field) &key &allow-other-keys)
"Return the PostgreSQL type definition given the DB3 one."
(let* ((type (db3-field-type field))
(transform
(cond ((string= type "C") #'db3-trim-string)
((string= type "N") #'db3-numeric-to-pgsql-numeric)
((string= type "L") #'logical-to-boolean)
((string= type "D") #'db3-date-to-pgsql-date)
(t nil))))
(make-column :name (apply-identifier-case (db3-field-name field))
:type-name (cdr (assoc type *db3-pgsql-type-mapping*
:test #'string=))
:transform transform)))
(declaim (inline logical-to-boolean
db3-trim-string
db3-date-to-pgsql-date))
(defun logical-to-boolean (value)
"Convert a DB3 logical value to a PostgreSQL boolean."
(if (string= value "?") nil value))
(defun db3-trim-string (value)
"DB3 Strings a right padded with spaces, fix that."
(string-right-trim '(#\Space) value))
(defun db3-numeric-to-pgsql-numeric (value)
"DB3 numerics should be good to go, but might contain spaces."
(let ((trimmed-string (string-right-trim '(#\Space) value)))
(unless (string= "" trimmed-string)
trimmed-string)))
(defun db3-date-to-pgsql-date (value)
"Convert a DB3 date to a PostgreSQL date."
(when (and value (string/= "" value) (= 8 (length value)))
(let ((year (parse-integer (subseq value 0 4) :junk-allowed t))
(month (parse-integer (subseq value 4 6) :junk-allowed t))
(day (parse-integer (subseq value 6 8) :junk-allowed t)))
(when (and year month day)
(format nil "~4,'0d-~2,'0d-~2,'0d" year month day)))))
:for field :in (db3::fields dbfconn)
:do (add-field table (make-db3-coldef (db3::field-name field)
(string (db3::field-type field))
(db3::field-length field)))))

View File

@ -4,32 +4,28 @@
(in-package :pgloader.source.db3)
;;;
;;; Integration with pgloader
;;;
(defclass copy-db3 (db-copy)
((encoding :accessor encoding ; file encoding
:initarg :encoding))
(:documentation "pgloader DBF Data Source"))
(defmethod initialize-instance :after ((db3 copy-db3) &key)
"Add a default value for transforms in case it's not been provided."
(setf (slot-value db3 'source)
(let ((table-name (pathname-name (fd-path (source-db db3)))))
(make-table :source-name table-name
:name (apply-identifier-case table-name)))))
(defmethod map-rows ((copy-db3 copy-db3) &key process-row-fn)
"Extract DB3 data and call PROCESS-ROW-FN function with a single
argument (a list of column values) for each row."
(with-connection (conn (source-db copy-db3))
(let ((stream (conn-handle (source-db copy-db3)))
(db3 (fd-db3 (source-db copy-db3)))
(db3:*external-format* (encoding copy-db3)))
(db3 (fd-db3 (source-db copy-db3))))
;; when the pgloader command has an ENCODING clause, it takes
;; precedence to the encoding embedded in the db3 file, if any.
(when (and (encoding copy-db3)
(db3::encoding db3)
(not (eq (encoding copy-db3) (db3::encoding db3))))
(log-message :warning "Forcing encoding to ~a, db3 file has ~a"
(encoding copy-db3) (db3::encoding db3))
(setf (db3::encoding db3) (encoding copy-db3)))
(loop
:with count := (db3:record-count db3)
:repeat count
:for row-array := (db3:load-record db3 stream)
:for (row-array deleted) := (multiple-value-list
(db3:load-record db3 stream))
:unless deleted
:do (funcall process-row-fn row-array)
:finally (return count)))))
@ -58,7 +54,7 @@
(push-to-end table (schema-table-list schema))
(with-connection (conn (source-db db3))
(list-all-columns (fd-db3 conn) table))
(fetch-columns table db3))
catalog))

View File

@ -0,0 +1,74 @@
;;;
;;; Given a list of columns in PostgreSQL, try to guess the fixed format
;;; specification from a sample of the file.
;;;
(in-package :pgloader.source.fixed)
(defgeneric get-first-lines (filename-or-stream &optional n)
(:documentation "Get the first line of given FILENAME-OR-STREAM.")
(:method ((stream stream) &optional (n 1))
(let ((pos (file-position stream)))
(file-position stream 0)
(prog1
(loop :repeat n
:for line := (read-line stream nil nil)
:while line
:collect line)
(file-position stream pos))))
(:method ((filename string) &optional (n 1))
(with-open-file (stream filename
:direction :input
:external-format :utf-8
:if-does-not-exist nil)
(loop :repeat n
:for line := (read-line stream nil nil)
:while line
:collect line))))
(defun guess-fixed-specs-from-header (header)
"Try to guess fixed specs from whitespace in the first line of the file."
(let* ((size (length header))
current-field-name (current-field-start 0)
specs)
(loop :for pos :from 0
:for previous-char := #\Space :then current-char
:for current-char :across header
:do (cond ((or (= (+ 1 pos) size) ; last char
(and (< 0 pos) ; new field
(char= #\Space previous-char)
(char/= #\Space current-char)))
(when (= (+ 1 pos) size)
(push current-char current-field-name))
(push (list (map 'string #'identity
(reverse current-field-name))
:start current-field-start
:length (- pos current-field-start)
:null-as :blanks
:trim-right t)
specs)
(setf current-field-name (list current-char))
(setf current-field-start pos))
((char/= #\Space current-char)
(push current-char current-field-name))))
(reverse specs)))
(defun guess-fixed-specs (filename-or-stream &optional (sample-size 1000))
"Use the first line as an header to guess the specification of the fixed
file from, and then match that against a sample of data from the file to
see if that matches what data we have there."
(let* ((sample (get-first-lines filename-or-stream sample-size))
(header (first sample))
(data (rest sample))
(fields (guess-fixed-specs-from-header header))
(specs (mapcar #'cdr fields)))
(loop :for line :in data
:collect (handler-case
(parse-row specs line)
(condition (e)
(log-message :error
"Fixed: failed to use header as ~
specification for columns: ~a" e)
(return-from guess-fixed-specs nil))))
fields))

View File

@ -10,12 +10,7 @@
"Assign the type slot to sqlite."
(setf (slot-value fixed 'type) "fixed"))
(defclass copy-fixed (md-copy)
((encoding :accessor encoding ; file encoding
:initarg :encoding) ;
(skip-lines :accessor skip-lines ; CSV headers
:initarg :skip-lines ;
:initform 0))
(defclass copy-fixed (md-copy) ()
(:documentation "pgloader Fixed Columns Data Source"))
(defmethod clone-copy-for ((fixed copy-fixed) path-spec)
@ -30,6 +25,29 @@
;; return the new instance!
fixed-clone))
(defmethod parse-header ((fixed copy-fixed))
"Parse the header line given a FIXED setup."
(with-connection (cnx (source fixed)
:direction :input
:external-format (encoding fixed)
:if-does-not-exist nil)
(let ((input (md-strm cnx)))
(loop :repeat (skip-lines fixed) :do (read-line input nil nil))
(let* ((field-spec-list (guess-fixed-specs input))
(specifications
(loop :for specs :in field-spec-list
:collect (destructuring-bind (name &key start length
&allow-other-keys)
specs
(format nil
"~a from ~d for ~d ~a"
name start length
"[null if blanks, trim right whitespace]")))))
(setf (fields fixed) field-spec-list)
(log-message :log
"Parsed ~d columns specs from header:~%(~%~{ ~a~^,~%~}~%)"
(length (fields fixed)) specifications)))))
(declaim (inline parse-row))
(defun parse-row (fixed-cols-specs line)

View File

@ -0,0 +1,72 @@
;;;
;;; Tools to handle IBM PC version of IXF file format
;;;
(in-package :pgloader.source.ixf)
(defvar *ixf-pgsql-type-mapping*
'((#. ixf:+smallint+ . "smallint")
(#. ixf:+integer+ . "integer")
(#. ixf:+bigint+ . "bigint")
(#. ixf:+decimal+ . "numeric")
(#. ixf:+float+ . "double precision")
(#. ixf:+timestamp+ . "timestamptz")
(#. ixf:+date+ . "date")
(#. ixf:+time+ . "time")
(#. ixf:+char+ . "text")
(#. ixf:+varchar+ . "text")
(#. ixf:+blob-location-spec+ . "bytea")
(#. ixf:+dbblob-location-spec+ . "bytea")
(#. ixf:+dbclob-location-spec+ . "text")))
(defun cast-ixf-type (ixf-type)
"Return the PostgreSQL type name for a given IXF type name."
(let ((pgtype
(cdr (assoc ixf-type *ixf-pgsql-type-mapping*))))
(unless pgtype
(error "IXF Type mapping unknown for: ~d" ixf-type))
pgtype))
(defun transform-function (field)
"Return the transformation functions needed to cast from ixf-column data."
(let ((coltype (cast-ixf-type (ixf:ixf-column-type field))))
;;
;; The IXF driver we use maps the data type and gets
;; back proper CL typed objects, where we only want to
;; deal with text.
;;
(cond ((or (string-equal "float" coltype)
(string-equal "real" coltype)
(string-equal "double precision" coltype)
(and (<= 7 (length coltype))
(string-equal "numeric" coltype :end2 7)))
#'pgloader.transforms::float-to-string)
((string-equal "text" coltype)
nil)
((string-equal "bytea" coltype)
#'pgloader.transforms::byte-vector-to-bytea)
(t
(lambda (c)
(when c
(princ-to-string c)))))))
(defmethod cast ((col ixf:ixf-column) &key &allow-other-keys)
"Return the PostgreSQL type definition from given IXF column definition."
(make-column :name (apply-identifier-case (ixf:ixf-column-name col))
:type-name (cast-ixf-type (ixf:ixf-column-type col))
:nullable (ixf:ixf-column-nullable col)
:default (when (ixf:ixf-column-has-default col)
(format-default-value
(ixf:ixf-column-default col)))
:transform (transform-function col)
:comment (let ((comment (ixf:ixf-column-desc col)))
(unless (or (null comment)
(string= comment ""))
comment))))

View File

@ -0,0 +1,28 @@
;;;
;;; Tools to handle IBM PC version of IXF file format
;;;
(in-package :pgloader.source.ixf)
(defclass ixf-connection (fd-connection) ()
(:documentation "pgloader connection parameters for IXF files."))
(defmethod initialize-instance :after ((ixfconn ixf-connection) &key)
"Assign the type slot to dbf."
(setf (slot-value ixfconn 'type) "ixf"))
(defmethod open-connection ((ixfconn ixf-connection) &key)
(setf (conn-handle ixfconn)
(open (fd-path ixfconn)
:direction :input
:element-type '(unsigned-byte 8)))
ixfconn)
(defmethod close-connection ((ixfconn ixf-connection))
(close (conn-handle ixfconn))
(setf (conn-handle ixfconn) nil)
ixfconn)
(defmethod clone-connection ((c ixf-connection))
(change-class (call-next-method c) 'ixf-connection))

View File

@ -5,96 +5,26 @@
(in-package :pgloader.source.ixf)
(defclass ixf-connection (fd-connection) ()
(:documentation "pgloader connection parameters for IXF files."))
(defclass copy-ixf (db-copy)
((timezone :accessor timezone ; timezone
:initarg :timezone
:initform local-time:+utc-zone+))
(:documentation "pgloader IXF Data Source"))
(defmethod initialize-instance :after ((ixfconn ixf-connection) &key)
"Assign the type slot to dbf."
(setf (slot-value ixfconn 'type) "ixf"))
(defmethod initialize-instance :after ((source copy-ixf) &key)
"Add a default value for transforms in case it's not been provided."
(setf (slot-value source 'source)
(let ((table-name (pathname-name (fd-path (source-db source)))))
(make-table :source-name table-name
:name (apply-identifier-case table-name))))
(defmethod open-connection ((ixfconn ixf-connection) &key)
(setf (conn-handle ixfconn)
(open (fd-path ixfconn)
:direction :input
:element-type '(unsigned-byte 8)))
ixfconn)
;; force default timezone when nil
(when (null (timezone source))
(setf (timezone source) local-time:+utc-zone+)))
(defmethod close-connection ((ixfconn ixf-connection))
(close (conn-handle ixfconn))
(setf (conn-handle ixfconn) nil)
ixfconn)
(defmethod clone-connection ((c ixf-connection))
(change-class (call-next-method c) 'ixf-connection))
(defvar *ixf-pgsql-type-mapping*
'((#. ixf:+smallint+ . "smallint")
(#. ixf:+integer+ . "integer")
(#. ixf:+bigint+ . "bigint")
(#. ixf:+decimal+ . "numeric")
(#. ixf:+float+ . "double precision")
(#. ixf:+timestamp+ . "timestamptz")
(#. ixf:+date+ . "date")
(#. ixf:+time+ . "time")
(#. ixf:+char+ . "text")
(#. ixf:+varchar+ . "text")
(#. ixf:+blob-location-spec+ . "bytea")
(#. ixf:+dbblob-location-spec+ . "bytea")
(#. ixf:+dbclob-location-spec+ . "text")))
(defun cast-ixf-type (ixf-type)
"Return the PostgreSQL type name for a given IXF type name."
(let ((pgtype
(cdr (assoc ixf-type *ixf-pgsql-type-mapping*))))
(unless pgtype
(error "IXF Type mapping unknown for: ~d" ixf-type))
pgtype))
(defun transform-function (field)
"Return the transformation functions needed to cast from ixf-column data."
(let ((coltype (cast-ixf-type (ixf:ixf-column-type field))))
;;
;; The IXF driver we use maps the data type and gets
;; back proper CL typed objects, where we only want to
;; deal with text.
;;
(cond ((or (string-equal "float" coltype)
(string-equal "real" coltype)
(string-equal "double precision" coltype)
(and (<= 7 (length coltype))
(string-equal "numeric" coltype :end2 7)))
#'pgloader.transforms::float-to-string)
((string-equal "text" coltype)
nil)
((string-equal "bytea" coltype)
#'pgloader.transforms::byte-vector-to-bytea)
(t
(lambda (c)
(when c
(princ-to-string c)))))))
(defmethod cast ((col ixf:ixf-column) &key &allow-other-keys)
"Return the PostgreSQL type definition from given IXF column definition."
(make-column :name (apply-identifier-case (ixf:ixf-column-name col))
:type-name (cast-ixf-type (ixf:ixf-column-type col))
:nullable (ixf:ixf-column-nullable col)
:default (when (ixf:ixf-column-has-default col)
(format-default-value
(ixf:ixf-column-default col)))
:transform (transform-function col)
:comment (let ((comment (ixf:ixf-column-desc col)))
(unless (or (null comment)
(string= comment ""))
comment))))
(defun list-all-columns (ixf-stream table)
(defmethod fetch-columns ((table table) (ixf copy-ixf)
&key &allow-other-keys
&aux (ixf-stream (conn-handle (source-db ixf))))
"Return the list of columns for the given IXF-FILE-NAME."
(ixf:with-ixf-stream (ixf ixf-stream)
(loop :for field :across (ixf:ixf-table-columns (ixf:ixf-file-table ixf))

View File

@ -5,26 +5,6 @@
(in-package :pgloader.source.ixf)
;;;
;;; Integration with pgloader
;;;
(defclass copy-ixf (db-copy)
((timezone :accessor timezone ; timezone
:initarg :timezone
:initform local-time:+utc-zone+))
(:documentation "pgloader IXF Data Source"))
(defmethod initialize-instance :after ((source copy-ixf) &key)
"Add a default value for transforms in case it's not been provided."
(setf (slot-value source 'source)
(let ((table-name (pathname-name (fd-path (source-db source)))))
(make-table :source-name table-name
:name (apply-identifier-case table-name))))
;; force default timezone when nil
(when (null (timezone source))
(setf (timezone source) local-time:+utc-zone+)))
(defmethod map-rows ((copy-ixf copy-ixf) &key process-row-fn)
"Extract IXF data and call PROCESS-ROW-FN function with a single
argument (a list of column values) for each row."
@ -58,7 +38,7 @@
(push-to-end table (schema-table-list schema))
(with-connection (conn (source-db ixf))
(list-all-columns (conn-handle conn) table))
(fetch-columns table ixf))
catalog))

View File

@ -0,0 +1,48 @@
;;;
;;; Tools to query the MS SQL Schema to reproduce in PostgreSQL
;;;
(in-package :pgloader.source.mssql)
(defvar *mssql-db* nil
"The MS SQL database connection handler.")
;;;
;;; General utility to manage MS SQL connection
;;;
(defclass mssql-connection (db-connection) ())
(defmethod initialize-instance :after ((msconn mssql-connection) &key)
"Assign the type slot to mssql."
(setf (slot-value msconn 'type) "mssql"))
(defmethod open-connection ((msconn mssql-connection) &key)
(setf (conn-handle msconn) (mssql:connect (db-name msconn)
(db-user msconn)
(db-pass msconn)
(db-host msconn)))
;; apply mssql-settings, if any
(loop :for (name . value) :in *mssql-settings*
:for sql := (format nil "set ~a ~a;" name value)
:do (query msconn sql))
;; return the connection object
msconn)
(defmethod close-connection ((msconn mssql-connection))
(mssql:disconnect (conn-handle msconn))
(setf (conn-handle msconn) nil)
msconn)
(defmethod clone-connection ((c mssql-connection))
(change-class (call-next-method c) 'mssql-connection))
(defmethod query ((msconn mssql-connection) sql &key)
"Send SQL query to MSCONN connection."
(log-message :sql "MSSQL: sending query: ~a" sql)
(mssql:query sql :connection (conn-handle msconn)))
(defun mssql-query (query)
"Execute given QUERY within the current *connection*, and set proper
defaults for pgloader."
(query *mssql-db* query))

View File

@ -4,50 +4,12 @@
(in-package :pgloader.source.mssql)
(defvar *mssql-db* nil
"The MS SQL database connection handler.")
(defclass copy-mssql (db-copy)
((encoding :accessor encoding ; allows forcing encoding
:initarg :encoding
:initform nil))
(:documentation "pgloader MS SQL Data Source"))
;;;
;;; General utility to manage MySQL connection
;;;
(defclass mssql-connection (db-connection) ())
(defmethod initialize-instance :after ((msconn mssql-connection) &key)
"Assign the type slot to mssql."
(setf (slot-value msconn 'type) "mssql"))
(defmethod open-connection ((msconn mssql-connection) &key)
(setf (conn-handle msconn) (mssql:connect (db-name msconn)
(db-user msconn)
(db-pass msconn)
(db-host msconn)))
;; apply mysql-settings, if any
(loop :for (name . value) :in *mssql-settings*
:for sql := (format nil "set ~a ~a;" name value)
:do (query msconn sql))
;; return the connection object
msconn)
(defmethod close-connection ((msconn mssql-connection))
(mssql:disconnect (conn-handle msconn))
(setf (conn-handle msconn) nil)
msconn)
(defmethod clone-connection ((c mssql-connection))
(change-class (call-next-method c) 'mssql-connection))
(defmethod query ((msconn mssql-connection) sql &key)
"Send SQL query to MSCONN connection."
(log-message :sql "MSSQL: sending query: ~a" sql)
(mssql:query sql :connection (conn-handle msconn)))
(defun mssql-query (query)
"Execute given QUERY within the current *connection*, and set proper
defaults for pgloader."
(query *mssql-db* query))
;;;
;;; Those functions are to be called from withing an already established
;;; MS SQL Connection.
@ -61,11 +23,12 @@
"Associate internal table type symbol with what's found in MS SQL
information_schema.tables.table_type column.")
(defun filter-list-to-where-clause (filter-list
&optional
not
(schema-col "table_schema")
(table-col "table_name"))
(defmethod filter-list-to-where-clause ((mssql copy-mssql)
filter-list
&key
not
(schema-col "table_schema")
(table-col "table_name"))
"Given an INCLUDING or EXCLUDING clause, turn it into a MS SQL WHERE clause."
(loop :for (schema . table-name-list) :in filter-list
:append (mapcar (lambda (table-name)
@ -73,183 +36,148 @@
schema-col schema table-col not table-name))
table-name-list)))
(defun list-all-columns (catalog
&key
(table-type :table)
including
excluding
&aux
(table-type-name (cdr (assoc table-type *table-type*))))
(defmethod fetch-columns ((catalog catalog)
(mssql copy-mssql)
&key
(table-type :table)
including
excluding
&aux
(table-type-name
(cdr (assoc table-type *table-type*))))
(loop
:with incl-where := (filter-list-to-where-clause
mssql including :not nil
:schema-col "c.table_schema"
:table-col "c.table_name")
:with excl-where := (filter-list-to-where-clause
mssql excluding :not t
:schema-col "c.table_schema"
:table-col "c.table_name")
:for (schema-name table-name name type default nullable identity
character-maximum-length
numeric-precision numeric-precision-radix numeric-scale
datetime-precision
character-set-name collation-name)
:in
(mssql-query (format nil
(sql "/mssql/list-all-columns.sql")
(db-name *mssql-db*)
table-type-name
including ; do we print the clause?
(filter-list-to-where-clause including
nil
"c.table_schema"
"c.table_name")
excluding ; do we print the clause?
(filter-list-to-where-clause excluding
t
"c.table_schema"
"c.table_name")))
:do
(let* ((schema (maybe-add-schema catalog schema-name))
(table (maybe-add-table schema table-name))
(field
(make-mssql-column
schema-name table-name name type default nullable
(eq 1 identity)
character-maximum-length
numeric-precision numeric-precision-radix numeric-scale
datetime-precision
character-set-name collation-name)))
(add-field table field))
:in (mssql-query (sql "/mssql/list-all-columns.sql"
(db-name *mssql-db*)
table-type-name
incl-where ; do we print the clause?
incl-where
excl-where ; do we print the clause?
excl-where))
:do (let* ((schema (maybe-add-schema catalog schema-name))
(table (maybe-add-table schema table-name))
(field
(make-mssql-column
schema-name table-name name type default nullable
(eq 1 identity)
character-maximum-length
numeric-precision numeric-precision-radix numeric-scale
datetime-precision
character-set-name collation-name)))
(add-field table field))
:finally (return catalog)))
(defun list-all-indexes (catalog &key including excluding)
(defmethod fetch-indexes ((catalog catalog)
(mssql copy-mssql)
&key including excluding)
"Get the list of MSSQL index definitions per table."
(loop
:with incl-where := (filter-list-to-where-clause
mssql including :not nil
:schema-col "schema_name(schema_id)"
:table-col "o.name")
:with excl-where := (filter-list-to-where-clause
mssql excluding :not t
:schema-col "schema_name(schema_id)"
:table-col "o.name")
:for (schema-name table-name index-name colname unique pkey filter)
:in (mssql-query (format nil
(sql "/mssql/list-all-indexes.sql")
including ; do we print the clause?
(filter-list-to-where-clause including
nil
"schema_name(schema_id)"
"o.name"
)
excluding ; do we print the clause?
(filter-list-to-where-clause excluding
t
"schema_name(schema_id)"
"o.name"
)))
:do
(let* ((schema (find-schema catalog schema-name))
(table (find-table schema table-name))
(pg-index (make-index :name index-name
:schema schema
:table table
:primary (= pkey 1)
:unique (= unique 1)
:columns nil
:filter filter))
(index
(when table
(maybe-add-index table index-name pg-index :key #'index-name))))
(unless table
(log-message :warning
"Failed to find table ~s in schema ~s for index ~s, skipping the index"
table-name schema-name index-name))
(when index
(add-column index colname)))
:in (mssql-query (sql "/mssql/list-all-indexes.sql"
incl-where ; do we print the clause?
incl-where
excl-where ; do we print the clause?
excl-where))
:do (let* ((schema (find-schema catalog schema-name))
(table (find-table schema table-name))
(pg-index (make-index :name index-name
:schema schema
:table table
:primary (= pkey 1)
:unique (= unique 1)
:columns nil
:filter filter))
(index
(when table
(maybe-add-index table index-name pg-index :key #'index-name))))
(unless table
(log-message :warning
"Failed to find table ~s in schema ~s for index ~s, skipping the index"
table-name schema-name index-name))
(when index
(add-column index colname)))
:finally (return catalog)))
(defun list-all-fkeys (catalog &key including excluding)
(defmethod fetch-foreign-keys ((catalog catalog) (mssql copy-mssql)
&key including excluding)
"Get the list of MSSQL index definitions per table."
(loop
:with incl-where := (filter-list-to-where-clause
mssql including :not nil
:schema-col "kcu1.table_schema"
:table-col "kcu1.table_name")
:with excl-where := (filter-list-to-where-clause
mssql excluding :not t
:schema-col "kcu1.table_schema"
:table-col "kcu1.table_name")
:for (fkey-name schema-name table-name col
fschema-name ftable-name fcol
fk-update-rule fk-delete-rule)
:in (mssql-query (format nil
(sql "/mssql/list-all-fkeys.sql")
(db-name *mssql-db*) (db-name *mssql-db*)
including ; do we print the clause?
(filter-list-to-where-clause including
nil
"kcu1.table_schema"
"kcu1.table_name")
excluding ; do we print the clause?
(filter-list-to-where-clause excluding
t
"kcu1.table_schema"
"kcu1.table_name")))
:do
(let* ((schema (find-schema catalog schema-name))
(table (find-table schema table-name))
(fschema (find-schema catalog fschema-name))
(ftable (find-table fschema ftable-name))
(pg-fkey
(make-fkey :name fkey-name
:table table
:columns nil
:foreign-table ftable
:foreign-columns nil
:update-rule fk-update-rule
:delete-rule fk-delete-rule))
(fkey
(maybe-add-fkey table fkey-name pg-fkey :key #'fkey-name)))
(push-to-end col (fkey-columns fkey))
(push-to-end fcol (fkey-foreign-columns fkey)))
:in (mssql-query (sql "/mssql/list-all-fkeys.sql"
(db-name *mssql-db*) (db-name *mssql-db*)
incl-where ; do we print the clause?
incl-where
excl-where ; do we print the clause?
excl-where))
:do (let* ((schema (find-schema catalog schema-name))
(table (find-table schema table-name))
(fschema (find-schema catalog fschema-name))
(ftable (find-table fschema ftable-name))
(col-name (apply-identifier-case col))
(fcol-name (apply-identifier-case fcol))
(pg-fkey
(make-fkey :name fkey-name
:table table
:columns nil
:foreign-table ftable
:foreign-columns nil
:update-rule fk-update-rule
:delete-rule fk-delete-rule))
(fkey
(maybe-add-fkey table fkey-name pg-fkey :key #'fkey-name)))
(push-to-end col-name (fkey-columns fkey))
(push-to-end fcol-name (fkey-foreign-columns fkey)))
:finally (return catalog)))
;;;
;;; Tools to handle row queries.
;;;
(defun get-column-sql-expression (name type)
(defmethod get-column-sql-expression ((mssql copy-mssql) name type)
"Return per-TYPE SQL expression to use given a column NAME.
Mostly we just use the name, and make try to avoid parsing dates."
(case (intern (string-upcase type) "KEYWORD")
(:time (format nil "convert(varchar, [~a], 114)" name))
(:datetime (format nil "convert(varchar, [~a], 126)" name))
(:datetime2 (format nil "convert(varchar, [~a], 126)" name))
(:smalldatetime (format nil "convert(varchar, [~a], 126)" name))
(:date (format nil "convert(varchar, [~a], 126)" name))
(:bigint (format nil "cast([~a] as numeric)" name))
(t (format nil "[~a]" name))))
(defun get-column-list (columns)
(defmethod get-column-list ((mssql copy-mssql))
"Tweak how we fetch the column values to avoid parsing when possible."
(loop :for col :in columns
:collect (with-slots (name type) col
(get-column-sql-expression name type))))
;;;
;;; Materialize Views support
;;;
(defun create-ms-views (views-alist)
"VIEWS-ALIST associates view names with their SQL definition, which might
be empty for already existing views. Create only the views for which we
have an SQL definition."
(unless (eq :all views-alist)
(let ((views (remove-if #'null views-alist :key #'cdr)))
(when views
(loop :for (name . def) :in views
:for sql := (destructuring-bind (schema . v-name) name
(format nil
"CREATE VIEW ~@[~s~].~s AS ~a"
schema v-name def))
:do (progn
(log-message :info "MS SQL: ~a" sql)
(mssql-query sql)))))))
(defun drop-ms-views (views-alist)
"See `create-ms-views' for VIEWS-ALIST description. This time we DROP the
views to clean out after our work."
(unless (eq :all views-alist)
(let ((views (remove-if #'null views-alist :key #'cdr)))
(when views
(let ((sql
(with-output-to-string (sql)
(format sql "DROP VIEW ")
(loop :for view-definition :in views
:for i :from 0
:do (destructuring-bind (name . def) view-definition
(declare (ignore def))
(format sql
"~@[, ~]~@[~s.~]~s"
(not (zerop i)) (car name) (cdr name)))))))
(log-message :info "PostgreSQL Source: ~a" sql)
(mssql-query sql))))))
(loop :for field :in (fields mssql)
:collect (with-slots (name type) field
(get-column-sql-expression mssql name type))))

View File

@ -4,33 +4,12 @@
(in-package :pgloader.source.mssql)
(defclass copy-mssql (db-copy)
((encoding :accessor encoding ; allows forcing encoding
:initarg :encoding
:initform nil))
(:documentation "pgloader MS SQL Data Source"))
(defmethod initialize-instance :after ((source copy-mssql) &key)
"Add a default value for transforms in case it's not been provided."
(let* ((transforms (when (slot-boundp source 'transforms)
(slot-value source 'transforms))))
(when (and (slot-boundp source 'fields) (slot-value source 'fields))
;; cast typically happens in copy-database in the schema structure,
;; and the result is then copied into the copy-mysql instance.
(unless (and (slot-boundp source 'columns) (slot-value source 'columns))
(setf (slot-value source 'columns)
(mapcar #'cast (slot-value source 'fields))))
(unless transforms
(setf (slot-value source 'transforms)
(mapcar #'column-transform (slot-value source 'columns)))))))
(defmethod map-rows ((mssql copy-mssql) &key process-row-fn)
"Extract Mssql data and call PROCESS-ROW-FN function with a single
argument (a list of column values) for each row."
(with-connection (*mssql-db* (source-db mssql))
(let* ((sql (format nil "SELECT ~{~a~^, ~} FROM [~a].[~a];"
(get-column-list (fields mssql))
(get-column-list mssql)
(schema-source-name (table-schema (source mssql)))
(table-source-name (source mssql)))))
(log-message :debug "~a" sql)
@ -66,13 +45,11 @@
(catalog catalog)
&key
materialize-views
only-tables
create-indexes
foreign-keys
including
excluding)
"MS SQL introspection to prepare the migration."
(declare (ignore only-tables))
(with-stats-collection ("fetch meta data"
:use-result-as-rows t
:use-result-as-read t
@ -81,15 +58,15 @@
;; If asked to MATERIALIZE VIEWS, now is the time to create them in MS
;; SQL, when given definitions rather than existing view names.
(when (and materialize-views (not (eq :all materialize-views)))
(create-ms-views materialize-views))
(create-matviews materialize-views mssql))
(list-all-columns catalog
:including including
:excluding excluding)
(fetch-columns catalog mssql
:including including
:excluding excluding)
;; fetch view (and their columns) metadata, covering comments too
(let* ((view-names (unless (eq :all materialize-views)
(mapcar #'car materialize-views)))
(mapcar #'matview-source-name materialize-views)))
(including
(loop :for (schema-name . view-name) :in view-names
:do (let* ((schema-name (or schema-name "dbo"))
@ -100,26 +77,30 @@
:test #'string=)))))
(push-to-end view-name (cdr schema-entry))))))
(cond (view-names
(list-all-columns catalog
:including including
:table-type :view))
(fetch-columns catalog mssql
:including including
:excluding excluding
:table-type :view))
((eq :all materialize-views)
(list-all-columns catalog :table-type :view))))
(fetch-columns catalog mssql :table-type :view))))
(when create-indexes
(list-all-indexes catalog
:including including
:excluding excluding))
(fetch-indexes catalog mssql
:including including
:excluding excluding))
(when foreign-keys
(list-all-fkeys catalog
:including including
:excluding excluding))
(fetch-foreign-keys catalog mssql
:including including
:excluding excluding))
;; return how many objects we're going to deal with in total
;; for stats collection
(+ (count-tables catalog) (count-indexes catalog))))
(+ (count-tables catalog)
(count-views catalog)
(count-indexes catalog)
(count-fkeys catalog))))
;; be sure to return the catalog itself
catalog)
@ -131,4 +112,4 @@
migration purpose."
(when materialize-views
(with-connection (*mssql-db* (source-db mssql))
(drop-ms-views materialize-views))))
(drop-matviews materialize-views mssql))))

View File

@ -36,14 +36,24 @@
:target (:type "boolean" :drop-typemod t)
:using pgloader.transforms::tinyint-to-boolean)
;; bit(1) is most often used as a boolean too
(:source (:type "bit" :typemod (= 1 precision))
:target (:type "boolean" :drop-typemod t)
:using pgloader.transforms::bits-to-boolean)
;; bigint(20) unsigned (or not, actually) does not fit into PostgreSQL
;; bigint (-9223372036854775808 to +9223372036854775807):
;; bit(X) might be flags or another use case for bitstrings
(:source (:type "bit")
:target (:type "bit" :drop-typemod nil)
:using pgloader.transforms::bits-to-hex-bitstring)
;; bigint(20) signed do fit into PostgreSQL bigint
;; (-9223372036854775808 to +9223372036854775807):
(:source (:type "bigint" :signed t)
:target (:type "bigint" :drop-typemod t))
;; bigint(20) unsigned does not fit into PostgreSQL bigint
(:source (:type "bigint" :typemod (< 19 precision))
:target (:type "numeric" :drop-typemod t))
:target (:type "numeric" :drop-typemod t))
;; now unsigned types
(:source (:type "tinyint" :unsigned t)

View File

@ -4,51 +4,15 @@
(in-package :pgloader.source.mysql)
;;;
;;; Those functions are to be called from withing an already established
;;; MySQL Connection.
;;;
;;; Handle MATERIALIZE VIEWS sections, where we need to create the views in
;;; the MySQL database before being able to process them.
;;;
(defun create-my-views (views-alist)
"VIEWS-ALIST associates view names with their SQL definition, which might
be empty for already existing views. Create only the views for which we
have an SQL definition."
(unless (eq :all views-alist)
(let ((views (remove-if #'null views-alist :key #'cdr)))
(when views
(loop for (name . def) in views
for sql = (format nil "CREATE VIEW ~a AS ~a" name def)
do
(log-message :info "MySQL: ~a" sql)
#+pgloader-image
(mysql-query sql)
#-pgloader-image
(restart-case
(mysql-query sql)
(use-existing-view ()
:report "Use the already existing view and continue"
nil)
(replace-view ()
:report "Replace the view with the one from pgloader's command"
(let ((drop-sql (format nil "DROP VIEW ~a;" name)))
(log-message :info "MySQL: ~a" drop-sql)
(mysql-query drop-sql)
(mysql-query sql)))))))))
(defclass copy-mysql (db-copy)
((encoding :accessor encoding ; allows forcing encoding
:initarg :encoding
:initform nil)
(range-list :accessor range-list
:initarg :range-list
:initform nil))
(:documentation "pgloader MySQL Data Source"))
(defun drop-my-views (views-alist)
"See `create-my-views' for VIEWS-ALIST description. This time we DROP the
views to clean out after our work."
(unless (eq :all views-alist)
(let ((views (remove-if #'null views-alist :key #'cdr)))
(when views
(let ((sql
(format nil "DROP VIEW ~{~a~^, ~};" (mapcar #'car views))))
(log-message :info "MySQL: ~a" sql)
(mysql-query sql))))))
;;;
;;; Those functions are to be called from withing an already established
;;; MySQL Connection.
@ -61,8 +25,10 @@
"Associate internal table type symbol with what's found in MySQL
information_schema.tables.table_type column.")
(defun filter-list-to-where-clause (filter-list &optional not)
(defmethod filter-list-to-where-clause ((mysql copy-mysql) filter-list
&key not &allow-other-keys)
"Given an INCLUDING or EXCLUDING clause, turn it into a MySQL WHERE clause."
(declare (ignore mysql))
(mapcar (lambda (filter)
(typecase filter
(string-match-rule
@ -85,28 +51,25 @@
(t (ensure-unquoted default #\'))))
(defun list-all-columns (schema
&key
(table-type :table)
only-tables
including
excluding
&aux
(table-type-name (cdr (assoc table-type *table-type*))))
(defmethod fetch-columns ((schema schema)
(mysql copy-mysql)
&key
(table-type :table)
including
excluding
&aux
(table-type-name
(cdr (assoc table-type *table-type*))))
"Get the list of MySQL column names per table."
(loop
:for (tname tcomment cname ccomment dtype ctype default nullable extra)
:in
(mysql-query (format nil
(sql "/mysql/list-all-columns.sql")
(db-name *connection*)
table-type-name
only-tables ; do we print the clause?
only-tables
including ; do we print the clause?
(filter-list-to-where-clause including)
excluding ; do we print the clause?
(filter-list-to-where-clause excluding t)))
:in (mysql-query (sql "/mysql/list-all-columns.sql"
(db-name *connection*)
table-type-name
including ; do we print the clause?
including
excluding ; do we print the clause?
excluding))
:do
(let* ((table
(case table-type
@ -123,23 +86,17 @@
:finally
(return schema)))
(defun list-all-indexes (schema
&key
only-tables
including
excluding)
(defmethod fetch-indexes ((schema schema) (mysql copy-mysql)
&key including excluding)
"Get the list of MySQL index definitions per table."
(loop
:for (table-name name index-type non-unique cols)
:in (mysql-query (format nil
(sql "/mysql/list-all-indexes.sql")
(db-name *connection*)
only-tables ; do we print the clause?
only-tables
including ; do we print the clause?
(filter-list-to-where-clause including)
excluding ; do we print the clause?
(filter-list-to-where-clause excluding t)))
:in (mysql-query (sql "/mysql/list-all-indexes.sql"
(db-name *connection*)
including ; do we print the clause?
including
excluding ; do we print the clause?
excluding))
:do (let* ((table (find-table schema table-name))
(index
(make-index :name name ; further processing is needed
@ -158,23 +115,20 @@
;;;
;;; MySQL Foreign Keys
;;;
(defun list-all-fkeys (schema
&key
only-tables
including
excluding)
(defmethod fetch-foreign-keys ((schema schema)
(mysql copy-mysql)
&key
including
excluding)
"Get the list of MySQL Foreign Keys definitions per table."
(loop
:for (table-name name ftable-name cols fcols update-rule delete-rule)
:in (mysql-query (format nil
(sql "/mysql/list-all-fkeys.sql")
(db-name *connection*) (db-name *connection*)
only-tables ; do we print the clause?
only-tables
including ; do we print the clause?
(filter-list-to-where-clause including)
excluding ; do we print the clause?
(filter-list-to-where-clause excluding t)))
:in (mysql-query (sql "/mysql/list-all-fkeys.sql"
(db-name *connection*) (db-name *connection*)
including ; do we print the clause?
including
excluding ; do we print the clause?
excluding))
:do (let* ((table (find-table schema table-name))
(ftable (find-table schema ftable-name))
(fk
@ -208,41 +162,29 @@
;;; As it takes a separate PostgreSQL Query per comment it's useless to
;;; fetch them right into the the more general table and columns lists.
;;;
(defun list-table-comments (&key
only-tables
including
excluding)
(defun list-table-comments (&key including excluding)
"Return comments on MySQL tables."
(loop
:for (table-name comment)
:in (mysql-query (format nil
(sql "/mysql/list-table-comments.sql")
(db-name *connection*)
only-tables ; do we print the clause?
only-tables
including ; do we print the clause?
(filter-list-to-where-clause including)
excluding ; do we print the clause?
(filter-list-to-where-clause excluding t)))
:in (mysql-query (sql "/mysql/list-table-comments.sql"
(db-name *connection*)
including ; do we print the clause?
including
excluding ; do we print the clause?
excluding))
:when (and comment (not (string= comment "")))
:collect (list table-name comment)))
(defun list-columns-comments (&key
only-tables
including
excluding)
(defun list-columns-comments (&key including excluding)
"Return comments on MySQL tables."
(loop
:for (table-name column-name comment)
:in (mysql-query (format nil
(sql "/mysql/list-columns-comments.sql")
(db-name *connection*)
only-tables ; do we print the clause?
only-tables
including ; do we print the clause?
(filter-list-to-where-clause including)
excluding ; do we print the clause?
(filter-list-to-where-clause excluding t)))
:in (mysql-query (sql "/mysql/list-columns-comments.sql"
(db-name *connection*)
including ; do we print the clause?
including
excluding ; do we print the clause?
excluding))
:when (and comment (not (string= comment "")))
:collect (list table-name column-name comment)))
@ -251,42 +193,22 @@
;;; Tools to handle row queries, issuing separate is null statements and
;;; handling of geometric data types.
;;;
(defun get-column-sql-expression (name type)
(defmethod get-column-sql-expression ((mysql copy-mysql) name type)
"Return per-TYPE SQL expression to use given a column NAME.
Mostly we just use the name, but in case of POINT we need to use
astext(name)."
st_astext(name)."
(declare (ignore mysql))
(case (intern (string-upcase type) "KEYWORD")
(:geometry (format nil "astext(`~a`) as `~a`" name name))
(:point (format nil "astext(`~a`) as `~a`" name name))
(:linestring (format nil "astext(`~a`) as `~a`" name name))
(:geometry (format nil "st_astext(`~a`) as `~a`" name name))
(:point (format nil "st_astext(`~a`) as `~a`" name name))
(:linestring (format nil "st_astext(`~a`) as `~a`" name name))
(t (format nil "`~a`" name))))
(defun get-column-list (copy)
(defmethod get-column-list ((mysql copy-mysql))
"Some MySQL datatypes have a meaningless default output representation, we
need to process them on the SQL side (geometric data types)."
(loop :for field :in (fields copy)
:collect (get-column-sql-expression (mysql-column-name field)
(mysql-column-dtype field))))
(declaim (inline fix-nulls))
(defun fix-nulls (row nulls)
"Given a cl-mysql row result and a nulls list as from
get-column-list-with-is-nulls, replace NIL with empty strings with when
we know from the added 'foo is null' that the actual value IS NOT NULL.
See http://bugs.mysql.com/bug.php?id=19564 for context."
(loop
for (current-col next-col) on row
for (current-null next-null) on nulls
;; next-null tells us if next column is an "is-null" col
;; when next-null is true, next-col is true if current-col is actually null
for is-null = (and next-null (string= next-col "1"))
for is-empty = (and next-null (string= next-col "0") (null current-col))
;; don't collect columns we added, e.g. "column_name is not null"
when (not current-null)
collect (cond (is-null :null)
(is-empty "")
(t current-col))))
(loop :for field :in (fields mysql)
:collect (let ((name (mysql-column-name field))
(type (mysql-column-dtype field)))
(get-column-sql-expression mysql name type))))

View File

@ -4,31 +4,6 @@
(in-package :pgloader.source.mysql)
(defclass copy-mysql (db-copy)
((encoding :accessor encoding ; allows forcing encoding
:initarg :encoding
:initform nil)
(range-list :accessor range-list
:initarg :range-list
:initform nil))
(:documentation "pgloader MySQL Data Source"))
(defmethod initialize-instance :after ((source copy-mysql) &key)
"Add a default value for transforms in case it's not been provided."
(let ((transforms (and (slot-boundp source 'transforms)
(slot-value source 'transforms))))
(when (and (slot-boundp source 'fields) (slot-value source 'fields))
;; cast typically happens in copy-database in the schema structure,
;; and the result is then copied into the copy-mysql instance.
(unless (and (slot-boundp source 'columns) (slot-value source 'columns))
(setf (slot-value source 'columns)
(mapcar #'cast (slot-value source 'fields))))
(unless transforms
(setf (slot-value source 'transforms)
(mapcar #'column-transform (slot-value source 'columns)))))))
;;;
;;; Implement the specific methods
;;;
@ -166,7 +141,6 @@ Illegal ~a character starting at position ~a~@[: ~a~].~%"
(catalog catalog)
&key
materialize-views
only-tables
(create-indexes t)
(foreign-keys t)
including
@ -174,50 +148,55 @@ Illegal ~a character starting at position ~a~@[: ~a~].~%"
"MySQL introspection to prepare the migration."
(let ((schema (add-schema catalog (catalog-name catalog)
:in-search-path t))
(view-names (unless (eq :all materialize-views)
(mapcar #'car materialize-views))))
(including (filter-list-to-where-clause mysql including))
(excluding (filter-list-to-where-clause mysql excluding :not t)))
(with-stats-collection ("fetch meta data"
:use-result-as-rows t
:use-result-as-read t
:section :pre)
(with-connection (*connection* (source-db mysql))
;; If asked to MATERIALIZE VIEWS, now is the time to create them in
;; MySQL, when given definitions rather than existing view names.
(when (and materialize-views (not (eq :all materialize-views)))
(create-my-views materialize-views))
(with-connection (*connection* (source-db mysql))
;; If asked to MATERIALIZE VIEWS, now is the time to create them in
;; MySQL, when given definitions rather than existing view names.
(when (and materialize-views (not (eq :all materialize-views)))
(create-matviews materialize-views mysql))
;; fetch table and columns metadata, covering table and column comments
(list-all-columns schema
:only-tables only-tables
:including including
:excluding excluding)
;; fetch table and columns metadata, covering table and column comments
(fetch-columns schema mysql
:including including
:excluding excluding)
;; fetch view (and their columns) metadata, covering comments too
(cond (view-names (list-all-columns schema
:only-tables view-names
:table-type :view))
;; fetch view (and their columns) metadata, covering comments too
(let* ((view-names (unless (eq :all materialize-views)
(mapcar #'matview-source-name materialize-views)))
(including
(loop :for (schema-name . view-name) :in view-names
:collect (make-string-match-rule :target view-name)))
(including-clause (filter-list-to-where-clause mysql including)))
(cond (view-names
(fetch-columns schema mysql
:including including-clause
:excluding excluding
:table-type :view))
((eq :all materialize-views)
(list-all-columns schema :table-type :view)))
(fetch-columns schema mysql :table-type :view))))
(when foreign-keys
(list-all-fkeys schema
:only-tables only-tables
:including including
:excluding excluding))
(when create-indexes
(list-all-indexes schema
:only-tables only-tables
(when foreign-keys
(fetch-foreign-keys schema mysql
:including including
:excluding excluding))
;; return how many objects we're going to deal with in total
;; for stats collection
(+ (count-tables catalog)
(count-views catalog)
(count-indexes catalog)
(count-fkeys catalog))))
(when create-indexes
(fetch-indexes schema mysql
:including including
:excluding excluding))
;; return how many objects we're going to deal with in total
;; for stats collection
(+ (count-tables catalog)
(count-views catalog)
(count-indexes catalog)
(count-fkeys catalog))))
catalog))
@ -227,7 +206,7 @@ Illegal ~a character starting at position ~a~@[: ~a~].~%"
migration purpose."
(when materialize-views
(with-connection (*connection* (source-db mysql))
(drop-my-views materialize-views))))
(drop-matviews materialize-views mysql))))
(defvar *decoding-as* nil
"Special per-table encoding/decoding overloading rules for MySQL.")

View File

@ -13,7 +13,6 @@
from information_schema.columns c
join information_schema.tables t using(table_schema, table_name)
where c.table_schema = '~a' and t.table_type = '~a'
~:[~*~;and table_name in (~{'~a'~^,~})~]
~:[~*~;and (~{table_name ~a~^ or ~})~]
~:[~*~;and (~{table_name ~a~^ and ~})~]
order by table_name, ordinal_position;

View File

@ -30,7 +30,6 @@ FROM
WHERE tc.table_schema = '~a'
AND k.referenced_table_schema = '~a'
AND tc.constraint_type = 'FOREIGN KEY'
~:[~*~;and tc.table_name in (~{'~a'~^,~})~]
~:[~*~;and (~{tc.table_name ~a~^ or ~})~]
~:[~*~;and (~{tc.table_name ~a~^ and ~})~]

View File

@ -11,7 +11,6 @@
cast(GROUP_CONCAT(column_name order by seq_in_index) as char)
FROM information_schema.statistics
WHERE table_schema = '~a'
~:[~*~;and table_name in (~{'~a'~^,~})~]
~:[~*~;and (~{table_name ~a~^ or ~})~]
~:[~*~;and (~{table_name ~a~^ and ~})~]
GROUP BY table_name, index_name, index_type;

View File

@ -11,7 +11,6 @@
join information_schema.tables t using(table_schema, table_name)
where c.table_schema = '~a'
and t.table_type = 'BASE TABLE'
~:[~*~;and table_name in (~{'~a'~^,~})~]
~:[~*~;and (~{table_name ~a~^ or ~})~]
~:[~*~;and (~{table_name ~a~^ and ~})~]
order by table_name, ordinal_position;

View File

@ -10,6 +10,5 @@
FROM information_schema.tables
WHERE table_schema = '~a'
and table_type = 'BASE TABLE'
~:[~*~;and table_name in (~{'~a'~^,~})~]
~:[~*~;and (~{table_name ~a~^ or ~})~]
~:[~*~;and (~{table_name ~a~^ and ~})~];

View File

@ -1,50 +0,0 @@
(in-package :pgloader.source.pgsql)
(defun create-pg-views (views-alist)
"VIEWS-ALIST associates view names with their SQL definition, which might
be empty for already existing views. Create only the views for which we
have an SQL definition."
(unless (eq :all views-alist)
(let ((views (remove-if #'null views-alist :key #'cdr)))
(when views
(loop :for (name . def) :in views
:for sql := (destructuring-bind (schema . v-name) name
(format nil
"CREATE VIEW ~@[~s.~]~s AS ~a"
schema v-name def))
:do (progn
(log-message :info "PostgreSQL Source: ~a" sql)
#+pgloader-image
(pgsql-execute sql)
#-pgloader-image
(restart-case
(pgsql-execute sql)
(use-existing-view ()
:report "Use the already existing view and continue"
nil)
(replace-view ()
:report
"Replace the view with the one from pgloader's command"
(let ((drop-sql (format nil "DROP VIEW ~a;" (car name))))
(log-message :info "PostgreSQL Source: ~a" drop-sql)
(pgsql-execute drop-sql)
(pgsql-execute sql))))))))))
(defun drop-pg-views (views-alist)
"See `create-pg-views' for VIEWS-ALIST description. This time we DROP the
views to clean out after our work."
(unless (eq :all views-alist)
(let ((views (remove-if #'null views-alist :key #'cdr)))
(when views
(let ((sql
(with-output-to-string (sql)
(format sql "DROP VIEW ")
(loop :for view-definition :in views
:for i :from 0
:do (destructuring-bind (name . def) view-definition
(declare (ignore def))
(format sql
"~@[, ~]~@[~s.~]~s"
(not (zerop i)) (car name) (cdr name)))))))
(log-message :info "PostgreSQL Source: ~a" sql)
(pgsql-execute sql))))))

View File

@ -7,21 +7,6 @@
(defclass copy-pgsql (db-copy) ()
(:documentation "pgloader PostgreSQL Data Source"))
(defmethod initialize-instance :after ((source copy-pgsql) &key)
"Add a default value for transforms in case it's not been provided."
(let* ((transforms (when (slot-boundp source 'transforms)
(slot-value source 'transforms))))
(when (and (slot-boundp source 'fields) (slot-value source 'fields))
;; cast typically happens in copy-database in the schema structure,
;; and the result is then copied into the copy-mysql instance.
(unless (and (slot-boundp source 'columns) (slot-value source 'columns))
(setf (slot-value source 'columns)
(mapcar #'cast (slot-value source 'fields))))
(unless transforms
(setf (slot-value source 'transforms)
(mapcar #'column-transform (slot-value source 'columns)))))))
(defmethod map-rows ((pgsql copy-pgsql) &key process-row-fn)
"Extract PostgreSQL data and call PROCESS-ROW-FN function with a single
argument (a list of column values) for each row"
@ -63,8 +48,9 @@
(cl-postgres:exec-query pomo:*database* sql map-reader))))))
(defmethod copy-column-list ((pgsql copy-pgsql))
"We are sending the data in the MySQL columns ordering here."
(mapcar #'column-name (fields pgsql)))
"We are sending the data in the source columns ordering here."
(mapcar (lambda (field) (ensure-quoted (column-name field)))
(fields pgsql)))
(defmethod fetch-metadata ((pgsql copy-pgsql)
(catalog catalog)
@ -89,7 +75,7 @@
;; the target database.
;;
(when (and materialize-views (not (eq :all materialize-views)))
(create-pg-views materialize-views))
(create-matviews materialize-views pgsql))
(when (eq :pgdg variant)
(list-all-sqltypes catalog
@ -101,7 +87,7 @@
:excluding excluding)
(let* ((view-names (unless (eq :all materialize-views)
(mapcar #'car materialize-views)))
(mapcar #'matview-source-name materialize-views)))
(including (make-including-expr-from-view-names view-names)))
(cond (view-names
(list-all-columns catalog
@ -139,4 +125,4 @@
the migration purpose."
(when materialize-views
(with-pgsql-transaction (:pgconn (source-db pgsql))
(drop-pg-views materialize-views))))
(drop-matviews materialize-views pgsql))))

View File

@ -0,0 +1,41 @@
;;;
;;; SQLite tools connecting to a database
;;;
(in-package :pgloader.source.sqlite)
(defvar *sqlite-db* nil
"The SQLite database connection handler.")
;;;
;;; Integration with the pgloader Source API
;;;
(defclass sqlite-connection (fd-connection)
((has-sequences :initform nil :accessor has-sequences)))
(defmethod initialize-instance :after ((slconn sqlite-connection) &key)
"Assign the type slot to sqlite."
(setf (slot-value slconn 'type) "sqlite"))
(defmethod open-connection ((slconn sqlite-connection) &key check-has-sequences)
(setf (conn-handle slconn)
(sqlite:connect (fd-path slconn)))
(log-message :debug "CONNECTED TO ~a" (fd-path slconn))
(when check-has-sequences
(let ((sql (format nil (sql "/sqlite/sqlite-sequence.sql"))))
(log-message :sql "SQLite: ~a" sql)
(when (sqlite:execute-single (conn-handle slconn) sql)
(setf (has-sequences slconn) t))))
slconn)
(defmethod close-connection ((slconn sqlite-connection))
(sqlite:disconnect (conn-handle slconn))
(setf (conn-handle slconn) nil)
slconn)
(defmethod clone-connection ((slconn sqlite-connection))
(change-class (call-next-method slconn) 'sqlite-connection))
(defmethod query ((slconn sqlite-connection) sql &key)
(log-message :sql "SQLite: sending query: ~a" sql)
(sqlite:execute-to-list (conn-handle slconn) sql))

View File

@ -3,66 +3,44 @@
;;;
(in-package :pgloader.source.sqlite)
(defvar *sqlite-db* nil
"The SQLite database connection handler.")
(defclass copy-sqlite (db-copy)
((db :accessor db :initarg :db))
(:documentation "pgloader SQLite Data Source"))
;;;
;;; Integration with the pgloader Source API
;;;
(defclass sqlite-connection (fd-connection)
((has-sequences :initform nil :accessor has-sequences)))
(defmethod initialize-instance :after ((slconn sqlite-connection) &key)
"Assign the type slot to sqlite."
(setf (slot-value slconn 'type) "sqlite"))
(defmethod open-connection ((slconn sqlite-connection) &key check-has-sequences)
(setf (conn-handle slconn)
(sqlite:connect (fd-path slconn)))
(log-message :debug "CONNECTED TO ~a" (fd-path slconn))
(when check-has-sequences
(let ((sql (format nil (sql "/sqlite/sqlite-sequence.sql"))))
(log-message :sql "SQLite: ~a" sql)
(when (sqlite:execute-single (conn-handle slconn) sql)
(setf (has-sequences slconn) t))))
slconn)
(defmethod close-connection ((slconn sqlite-connection))
(sqlite:disconnect (conn-handle slconn))
(setf (conn-handle slconn) nil)
slconn)
(defmethod clone-connection ((slconn sqlite-connection))
(change-class (call-next-method slconn) 'sqlite-connection))
(defmethod query ((slconn sqlite-connection) sql &key)
(log-message :sql "SQLite: sending query: ~a" sql)
(sqlite:execute-to-list (conn-handle slconn) sql))
;;;
;;; SQLite schema introspection facilities
;;;
(defun filter-list-to-where-clause (filter-list
&optional
not
(table-col "tbl_name"))
(defun sqlite-encoding (db)
"Return a BABEL suitable encoding for the SQLite db handle."
(let ((encoding-string (sqlite:execute-single db "pragma encoding;")))
(cond ((string-equal encoding-string "UTF-8") :utf-8)
((string-equal encoding-string "UTF-16") :utf-16)
((string-equal encoding-string "UTF-16le") :utf-16le)
((string-equal encoding-string "UTF-16be") :utf-16be))))
(defmethod filter-list-to-where-clause ((sqlite copy-sqlite)
filter-list
&key
not
(table-col "tbl_name")
&allow-other-keys)
"Given an INCLUDING or EXCLUDING clause, turn it into a SQLite WHERE clause."
(mapcar (lambda (table-name)
(format nil "(~a ~:[~;NOT ~]LIKE '~a')"
table-col not table-name))
filter-list))
(defun list-tables (&key
(defun list-tables (sqlite
&key
(db *sqlite-db*)
including
excluding)
"Return the list of tables found in SQLITE-DB."
(let ((sql (format nil (sql "/sqlite/list-tables.sql")
including ; do we print the clause?
(filter-list-to-where-clause including nil)
excluding ; do we print the clause?
(filter-list-to-where-clause excluding t))))
(let ((sql (sql "/sqlite/list-tables.sql"
including ; do we print the clause?
(filter-list-to-where-clause sqlite including :not nil)
excluding ; do we print the clause?
(filter-list-to-where-clause sqlite excluding :not t))))
(log-message :sql "~a" sql)
(loop for (name) in (sqlite:execute-to-list db sql)
collect name)))
@ -70,7 +48,7 @@
(defun find-sequence (db table-name column-name)
"Find if table-name.column-name is attached to a sequence in
sqlite_sequence catalog."
(let* ((sql (format nil (sql "/sqlite/find-sequence.sql") table-name))
(let* ((sql (sql "/sqlite/find-sequence.sql" table-name))
(seq (sqlite:execute-single db sql)))
(when (and seq (not (zerop seq)))
;; magic marker for `apply-casting-rules'
@ -83,7 +61,7 @@
added to the table. So we might fail to FIND-SEQUENCE, and still need to
consider the column has an autoincrement. Parse the SQL definition of the
table to find out."
(let* ((sql (format nil (sql "/sqlite/get-create-table.sql") table-name))
(let* ((sql (sql "/sqlite/get-create-table.sql" table-name))
(create-table (sqlite:execute-single db sql))
(open-paren (+ 1 (position #\( create-table)))
(close-paren (position #\) create-table :from-end t))
@ -111,7 +89,7 @@
(defun list-columns (table &key db-has-sequences (db *sqlite-db*) )
"Return the list of columns found in TABLE-NAME."
(let* ((table-name (table-source-name table))
(sql (format nil (sql "/sqlite/list-columns.sql") table-name)))
(sql (sql "/sqlite/list-columns.sql" table-name)))
(loop :for (ctid name type nullable default pk-id)
:in (sqlite:execute-to-list db sql)
:do (let* ((ctype (normalize type))
@ -136,14 +114,18 @@
(setf (coldef-extra field) :auto-increment))
(add-field table field)))))
(defun list-all-columns (schema
&key
db-has-sequences
(db *sqlite-db*)
including
excluding)
(defmethod fetch-columns ((schema schema)
(sqlite copy-sqlite)
&key
db-has-sequences
table-type
including
excluding
&aux (db (conn-handle (source-db sqlite))))
"Get the list of SQLite column definitions per table."
(loop :for table-name :in (list-tables :db db
(declare (ignore table-type))
(loop :for table-name :in (list-tables sqlite
:db db
:including including
:excluding excluding)
:do (let ((table (add-table schema table-name)))
@ -186,15 +168,14 @@
(defun list-index-cols (index-name &optional (db *sqlite-db*))
"Return the list of columns in INDEX-NAME."
(let ((sql (format nil (sql "/sqlite/list-index-cols.sql") index-name)))
(let ((sql (sql "/sqlite/list-index-cols.sql" index-name)))
(loop :for (index-pos table-pos col-name) :in (sqlite:execute-to-list db sql)
:collect (apply-identifier-case col-name))))
(defun list-indexes (table &optional (db *sqlite-db*))
"Return the list of indexes attached to TABLE."
(let* ((table-name (table-source-name table))
(sql
(format nil (sql "/sqlite/list-table-indexes.sql") table-name)))
(sql (sql "/sqlite/list-table-indexes.sql" table-name)))
(loop
:for (seq index-name unique origin partial)
:in (sqlite:execute-to-list db sql)
@ -213,7 +194,9 @@
;; might create double primary key indexes here
(add-unlisted-primary-key-index table))
(defun list-all-indexes (schema &key (db *sqlite-db*))
(defmethod fetch-indexes ((schema schema) (sqlite copy-sqlite)
&key &allow-other-keys
&aux (db (conn-handle (source-db sqlite))))
"Get the list of SQLite index definitions per table."
(loop :for table :in (schema-table-list schema)
:do (list-indexes table db)))
@ -225,8 +208,7 @@
(defun list-fkeys (table &optional (db *sqlite-db*))
"Return the list of indexes attached to TABLE."
(let* ((table-name (table-source-name table))
(sql
(format nil (sql "/sqlite/list-fkeys.sql") table-name)))
(sql (sql "/sqlite/list-fkeys.sql" table-name)))
(loop
:with fkey-table := (make-hash-table)
:for (id seq ftable-name from to on-update on-delete match)
@ -262,7 +244,9 @@
(when ftable (format-table-name ftable))
to))))))
(defun list-all-fkeys (schema &key (db *sqlite-db*))
(defmethod fetch-foreign-keys ((schema schema) (sqlite copy-sqlite)
&key &allow-other-keys
&aux (db (conn-handle (source-db sqlite))))
"Get the list of SQLite foreign keys definitions per table."
(loop :for table :in (schema-table-list schema)
:do (list-fkeys table db)))

View File

@ -4,35 +4,8 @@
(in-package :pgloader.source.sqlite)
(defclass copy-sqlite (db-copy)
((db :accessor db :initarg :db))
(:documentation "pgloader SQLite Data Source"))
(defmethod initialize-instance :after ((source copy-sqlite) &key)
"Add a default value for transforms in case it's not been provided."
(let* ((transforms (when (slot-boundp source 'transforms)
(slot-value source 'transforms))))
(when (and (slot-boundp source 'fields) (slot-value source 'fields))
;; cast typically happens in copy-database in the schema structure,
;; and the result is then copied into the copy-mysql instance.
(unless (and (slot-boundp source 'columns) (slot-value source 'columns))
(setf (slot-value source 'columns)
(mapcar #'cast (slot-value source 'fields))))
(unless transforms
(setf (slot-value source 'transforms)
(mapcar #'column-transform (slot-value source 'columns)))))))
;;; Map a function to each row extracted from SQLite
;;;
(defun sqlite-encoding (db)
"Return a BABEL suitable encoding for the SQLite db handle."
(let ((encoding-string (sqlite:execute-single db "pragma encoding;")))
(cond ((string-equal encoding-string "UTF-8") :utf-8)
((string-equal encoding-string "UTF-16") :utf-16)
((string-equal encoding-string "UTF-16le") :utf-16le)
((string-equal encoding-string "UTF-16be") :utf-16be))))
(declaim (inline parse-value))
(defun parse-value (value sqlite-type pgsql-type &key (encoding :utf-8))
@ -111,23 +84,25 @@
:use-result-as-rows t
:use-result-as-read t
:section :pre)
(with-connection (conn (source-db sqlite) :check-has-sequences t)
(let ((*sqlite-db* (conn-handle conn)))
(list-all-columns schema
:db *sqlite-db*
:including including
:excluding excluding
:db-has-sequences (has-sequences conn))
(with-connection (conn (source-db sqlite) :check-has-sequences t)
(let ((*sqlite-db* (conn-handle conn)))
(fetch-columns schema
sqlite
:including including
:excluding excluding
:db-has-sequences (has-sequences conn))
(when create-indexes
(list-all-indexes schema :db *sqlite-db*))
(when create-indexes
(fetch-indexes schema sqlite))
(when foreign-keys
(list-all-fkeys schema :db *sqlite-db*)))
(when foreign-keys
(fetch-foreign-keys schema sqlite)))
;; return how many objects we're going to deal with in total
;; for stats collection
(+ (count-tables catalog) (count-indexes catalog))))
;; return how many objects we're going to deal with in total
;; for stats collection
(+ (count-tables catalog)
(count-indexes catalog)
(count-fkeys catalog))))
catalog))

View File

@ -65,7 +65,10 @@
"Alter the schema of TABLE, set SCHEMA-NAME instead."
(let* ((catalog (schema-catalog (table-schema table)))
(schema (maybe-add-schema catalog schema-name)))
(setf (table-schema table) schema)))
(setf (table-schema table) schema)
;; this voids any index definition extracted from the source database...
(reset-sql-definitions table)))
(defun alter-table-rename (table new-name)
"Alter the name of TABLE to NEW-NAME."
@ -130,4 +133,26 @@
(defun alter-schema-rename (schema new-name)
"Alter the name fo the given schema to new-name."
(setf (schema-name schema) new-name))
(setf (schema-name schema) new-name)
;; this voids any index definition extracted from the source database...
(loop :for table :in (schema-table-list schema)
:do (reset-sql-definitions table)))
;;;
;;; When a table targets a new schema, we need to refrain from using its
;;; index SQL definition when we got it from the source system, such as with
;;; pg_get_indexdef() on PostgreSQL.
;;;
(defun reset-sql-definitions (table)
"Reset source database given wholesale SQL definition for table's indexes
and foreign keys."
(loop :for index :in (table-index-list table)
:do (when (index-sql index)
(setf (index-sql index) nil)))
(loop :for fkey :in (table-fkey-list table)
:do (when (fkey-condef fkey)
(setf (fkey-condef fkey) nil))))

View File

@ -45,7 +45,7 @@
(defstruct catalog name schema-list types-without-btree distribution-rules)
(defstruct schema source-name name catalog in-search-path
table-list view-list extension-list sqltype-list)
table-list view-list matview-list extension-list sqltype-list)
(defstruct table source-name name schema oid comment
storage-parameter-list tablespace
@ -54,6 +54,8 @@
;; citus is an extra slot for citus support
field-list column-list index-list fkey-list trigger-list citus-rule)
(defstruct matview source-name name schema definition)
;;;
;;; When migrating from PostgreSQL to PostgreSQL we might have to install
;;; extensions to have data type coverage.

View File

@ -70,7 +70,10 @@
table-name schema-name))))))
(defun citus-find-table (catalog table)
(let* ((table-name (cdr (table-source-name table)))
(let* ((source-name (table-source-name table))
(table-name (etypecase source-name
(string source-name)
(cons (cdr source-name))))
(schema-name (schema-name (table-schema table))))
(or (find-table (find-schema catalog schema-name) table-name)
(error (make-condition 'citus-rule-table-not-found

View File

@ -58,11 +58,13 @@
(walk-sources-and-build-fs)
"File system as an hash-table in memory.")
(defun sql (url)
(defun sql (url &rest args)
"Abstract the hash-table based implementation of our SQL file system."
(restart-case
(or (gethash url *fs*)
(error "URL ~s not found!" url))
(apply #'format nil
(or (gethash url *fs*)
(error "URL ~s not found!" url))
args)
(recompute-fs-and-retry ()
(setf *fs* (walk-sources-and-build-fs))
(sql url))))

View File

@ -82,6 +82,7 @@
time-with-no-separator
tinyint-to-boolean
bits-to-boolean
bits-to-hex-bitstring
int-to-ip
ip-range
convert-mysql-point
@ -96,7 +97,15 @@
sql-server-bit-to-boolean
varbinary-to-string
base64-decode
hex-to-dec))
hex-to-dec
byte-vector-to-hexstring
;; db3 specifics
logical-to-boolean
db3-trim-string
db3-numeric-to-pgsql-numeric
db3-numeric-to-pgsql-integer
db3-date-to-pgsql-date))
;;;
@ -112,7 +121,7 @@
;; month is 00
((string= date-string "0000-00-00" :start1 5 :end1 7 :start2 5 :end2 7) nil)
;; year is 0000
((string= date-string "0000-00-00" :start1 0 :end1 3 :start2 0 :end2 3) nil)
((string= date-string "0000-00-00" :start1 0 :end1 4 :start2 0 :end2 4) nil)
(t date-string)))
(defun date-with-no-separator
@ -179,6 +188,29 @@
(fixnum (if (= 0 bit) "f" "t"))
(character (if (= 0 (char-code bit)) "f" "t"))))))
(defun bits-to-hex-bitstring (bit-vector-or-string)
"Transform bit(XX) from MySQL to bit(XX) in PostgreSQL."
(etypecase bit-vector-or-string
(null nil)
;; default value as string looks like "b'0'", skip b' and then closing '
(string (let ((default bit-vector-or-string)
(size (length bit-vector-or-string)))
(subseq default 2 (+ -1 size))))
(array (let* ((bytes bit-vector-or-string)
(size (length bit-vector-or-string))
(digits "0123456789abcdef")
(hexstr
(make-array (+ 1 (* size 2)) :element-type 'character)))
;; use Postgres hex bitstring support: x0ff
(setf (aref hexstr 0) #\X)
(loop :for pos :from 1 :by 2
:for byte :across bytes
:do (let ((high (ldb (byte 4 4) byte))
(low (ldb (byte 4 0) byte)))
(setf (aref hexstr pos) (aref digits high))
(setf (aref hexstr (+ pos 1)) (aref digits low))))
hexstr))))
(defun int-to-ip (int)
"Transform an IP as integer into its dotted notation, optimised code from
stassats."
@ -350,10 +382,40 @@
(t
date-string-or-integer)))))))
;;;
;;; MS SQL Server GUID binary representation is a mix of endianness, as
;;; documented at
;;; https://dba.stackexchange.com/questions/121869/sql-server-uniqueidentifier-guid-internal-representation
;;; and
;;; https://en.wikipedia.org/wiki/Globally_unique_identifier#Binary_encoding.
;;;
;;; "Other systems, notably Microsoft's marshalling of UUIDs in their
;;; COM/OLE libraries, use a mixed-endian format, whereby the first three
;;; components of the UUID are little-endian, and the last two are
;;; big-endian."
;;;
;;; So here we steal some code from the UUID lib and make it compatible with
;;; this strange mix of endianness for SQL Server.
;;;
(defmacro arr-to-bytes-rev (from to array)
"Helper macro used in byte-array-to-uuid."
`(loop for i from ,to downto ,from
with res = 0
do (setf (ldb (byte 8 (* 8 (- i ,from))) res) (aref ,array i))
finally (return res)))
(defun sql-server-uniqueidentifier-to-uuid (id)
(declare (type (or null (array (unsigned-byte 8) (16))) id))
(when id
(format nil "~a" (uuid:byte-array-to-uuid id))))
(let ((uuid
(make-instance 'uuid:uuid
:time-low (arr-to-bytes-rev 0 3 id)
:time-mid (arr-to-bytes-rev 4 5 id)
:time-high (arr-to-bytes-rev 6 7 id)
:clock-seq-var (aref id 8)
:clock-seq-low (aref id 9)
:node (uuid::arr-to-bytes 10 15 id))))
(princ-to-string uuid))))
(defun unix-timestamp-to-timestamptz (unixtime-string)
"Takes a unix timestamp (seconds since beginning of 1970) and converts it
@ -383,6 +445,32 @@
((string= "((1))" bit-string-or-integer) "t")
(t nil)))))
(defun byte-vector-to-hexstring (vector)
"Transform binary input received as a vector of bytes into a string of
hexadecimal digits, as per the following example:
Input: #(136 194 152 47 66 138 70 183 183 27 33 6 24 174 22 88)
Output: 88C2982F428A46B7B71B210618AE1658"
(declare (type (or null string (simple-array (unsigned-byte 8) (*))) vector))
(etypecase vector
(null nil)
(string (if (string= "" vector)
nil
(error "byte-vector-to-bytea called on a string: ~s" vector)))
(simple-array
(let ((hex-digits "0123456789abcdef")
(bytea (make-array (* 2 (length vector))
:initial-element #\0
:element-type 'standard-char)))
(loop for pos from 0 by 2
for byte across vector
do (let ((high (ldb (byte 4 4) byte))
(low (ldb (byte 4 0) byte)))
(setf (aref bytea pos) (aref hex-digits high))
(setf (aref bytea (+ pos 1)) (aref hex-digits low)))
finally (return bytea))))))
(defun varbinary-to-string (string)
(let ((babel::*default-character-encoding*
(or qmynd::*mysql-encoding*
@ -402,3 +490,40 @@
(null nil)
(integer hex-string)
(string (write-to-string (parse-integer hex-string :radix 16)))))
;;;
;;; DBF/DB3 transformation functions
;;;
(defun logical-to-boolean (value)
"Convert a DB3 logical value to a PostgreSQL boolean."
(if (member value '("?" " ") :test #'string=) nil value))
(defun db3-trim-string (value)
"DB3 Strings a right padded with spaces, fix that."
(string-right-trim '(#\Space) value))
(defun db3-numeric-to-pgsql-numeric (value)
"DB3 numerics should be good to go, but might contain spaces."
(let ((trimmed-string (string-trim '(#\Space) value)))
(unless (string= "" trimmed-string)
trimmed-string)))
(defun db3-numeric-to-pgsql-integer (value)
"DB3 numerics should be good to go, but might contain spaces."
(etypecase value
(null nil)
(integer (write-to-string value))
(string (let ((integer-or-nil (parse-integer value :junk-allowed t)))
(when integer-or-nil
(write-to-string integer-or-nil))))))
(defun db3-date-to-pgsql-date (value)
"Convert a DB3 date to a PostgreSQL date."
(when (and value (string/= "" value) (= 8 (length value)))
(let ((year (parse-integer (subseq value 0 4) :junk-allowed t))
(month (parse-integer (subseq value 4 6) :junk-allowed t))
(day (parse-integer (subseq value 6 8) :junk-allowed t)))
(when (and year month day)
(format nil "~4,'0d-~2,'0d-~2,'0d" year month day)))))

View File

@ -20,6 +20,7 @@ REGRESS= allcols.load \
csv-nulls.load \
csv-temp.load \
csv-trim-extra-blanks.load \
csv-using-sexp.load \
csv.load \
copy.load \
copy-hex.load \
@ -34,6 +35,13 @@ REGRESS= allcols.load \
udc.load \
xzero.load
# Those are not included in the tests because CCL doesn't have the cp866
# encoding, and then PostgreSQL 9.6 lacks "create schema if not exists".
#
# dbf-memo.load
# dbf-31.load
# dbf-8b.load
PGLOADER ?= ../build/bin/pgloader
PGSUPERUSER ?= postgres

29
test/csv-using-sexp.load Normal file
View File

@ -0,0 +1,29 @@
/*
* See https://github.com/dimitri/pgloader/issues/965
*/
LOAD CSV
FROM INLINE(id,f1)
INTO postgresql:///pgloader
TARGET TABLE sexp
(
id,
f1 text using (format nil "~{~a~^ ~}"
(split-sequence #\Space f1 :remove-empty-subseqs cl:t))
)
WITH truncate,
fields terminated by ','
BEFORE LOAD DO
$$ drop table if exists sexp; $$,
$$ CREATE TABLE sexp
(
id int,
f1 text
)
$$;
1,Hello World
2,Hello World
2, foobar foobaz

BIN
test/data/DNORDOC.DBF Normal file

Binary file not shown.

BIN
test/data/DNORDOC.DBT Normal file

Binary file not shown.

14
test/data/README.md Normal file
View File

@ -0,0 +1,14 @@
# Test data files.
Most of the files have been contributed by pgloader users in the context of
an issue where it was helpful to have a test case to reproduce and fix a
bug.
The following DBF test files come from the Open Source repository at
https://github.com/infused/dbf/tree/master/spec/fixtures
- dbase_31.dbf
- dbase_31_summary.txt
- dbase_8b.dbf
- dbase_8b.dbt
- dbase_8b_summary.txt

BIN
test/data/dbase_31.dbf Normal file

Binary file not shown.

View File

@ -0,0 +1,20 @@
Database: dbase_31.dbf
Type: (31) Visual FoxPro with AutoIncrement field
Memo File: false
Records: 77
Fields:
Name Type Length Decimal
------------------------------------------------------------------------------
PRODUCTID I 4 0
PRODUCTNAM C 40 0
SUPPLIERID I 4 0
CATEGORYID I 4 0
QUANTITYPE C 20 0
UNITPRICE Y 8 4
UNITSINSTO I 4 0
UNITSONORD I 4 0
REORDERLEV I 4 0
DISCONTINU L 1 0
_NullFlags 0 1 0

BIN
test/data/dbase_8b.dbf Normal file

Binary file not shown.

BIN
test/data/dbase_8b.dbt Normal file

Binary file not shown.

View File

@ -0,0 +1,15 @@
Database: dbase_8b.dbf
Type: (8b) dBase IV with memo file
Memo File: true
Records: 10
Fields:
Name Type Length Decimal
------------------------------------------------------------------------------
CHARACTER C 100 0
NUMERICAL N 20 2
DATE D 8 0
LOGICAL L 1 0
FLOAT F 20 18
MEMO M 10 0

6
test/dbf-31.load Normal file
View File

@ -0,0 +1,6 @@
LOAD DBF
FROM data/dbase_31.dbf
INTO postgresql:///pgloader
TARGET TABLE dbf.dbase_31
WITH truncate, create table, disable triggers
BEFORE LOAD DO $$ create schema if not exists dbf; $$;

6
test/dbf-8b.load Normal file
View File

@ -0,0 +1,6 @@
LOAD DBF
FROM data/dbase_8b.dbf
INTO postgresql:///pgloader
TARGET TABLE dbf.dbase_8b
WITH truncate, create table, disable triggers
BEFORE LOAD DO $$ create schema if not exists dbf; $$;

13
test/dbf-memo.load Normal file
View File

@ -0,0 +1,13 @@
LOAD DBF
FROM data/DNORDOC.DBF with encoding cp866
INTO postgresql:///pgloader
TARGET TABLE dbf.dnordoc
WITH truncate, create table, disable triggers
BEFORE LOAD DO $$ create schema if not exists dbf; $$
cast
column dnordoc.normdocid
to uuid
using (lambda (normdocid)
(empty-string-to-null (right-trim normdocid))),
column dnordoc.doctype to integer using db3-numeric-to-pgsql-integer;

View File

@ -1,5 +1,8 @@
LOAD DBF
FROM http://www.insee.fr/fr/methodes/nomenclatures/cog/telechargement/2013/dbf/historiq2013.zip
-- FROM http://www.insee.fr/fr/methodes/nomenclatures/cog/telechargement/2013/dbf/historiq2013.zip
FROM https://www.insee.fr/fr/statistiques/fichier/2114819/france2016-dbf.zip
with encoding cp850
INTO postgresql:///pgloader
TARGET TABLE dbf.france2016
WITH truncate, create table
SET client_encoding TO 'latin1';
BEFORE LOAD DO $$ create schema if not exists dbf; $$;

View File

@ -6,4 +6,7 @@
LOAD DBF
FROM data/reg2013.dbf with encoding cp850
INTO postgresql:///pgloader?public.reg2013
WITH truncate, create table, disable triggers;
WITH truncate, create table, disable triggers
CAST column "reg2013"."region" to integer,
column "reg2013".tncc to smallint;

7
test/fixed-guess.dat Normal file
View File

@ -0,0 +1,7 @@
QECDPO QECSEM QELSEM QERSEM QENCOM QESCRE QEACRE QEMCRE QEJCRE QEHCRE QECUCR QESMAJ QEAMAJ QEMMAJ QEJMAJ QEHMAJ QECUMJ QETOPD
ACT INV Inventaire par niveau Par niveau 0 20 7 10 29 180457 HLJFD 20 7 10 29 180457 HLJFD 0
ACT IPI Inventaire pickings / colonnes Picking / Colo. 0 20 8 4 28 164330 HLJFD 20 8 4 28 164330 HLJFD 0
ACT STB SEQUENCE STANDARD BACKUP STANDARD 0 20 12 11 13 154308 HLJFL 20 12 11 13 154308 HLJFL 0
ACT STC SEQUENCE backup 250913 STANDARD 0 20 13 9 25 161133 HLJFL 20 13 9 25 161133 HLJFL 0
ACT STD SEQUENCE STANDARD STANDARD 0 20 12 11 13 154813 HLJFL 20 12 11 13 154813 HLJFL 0
ACT TTT test test 0 20 12 11 13 102211 HLJFL 20 12 11 13 102211 HLJFL 0

36
test/fixed-guess.load Normal file
View File

@ -0,0 +1,36 @@
LOAD FIXED
FROM fixed-guess.dat
INTO postgresql:///pgloader
TARGET TABLE fixed.guess
WITH fixed header
SET work_mem to '14MB',
standard_conforming_strings to 'on'
before load do
$$ create schema if not exists fixed; $$,
$$ drop table if exists fixed.guess; $$,
$$
create table fixed.guess
(
QECDPO character varying(3),
QECSEM character varying(3),
QELSEM character varying(30),
QERSEM character varying(15),
QENCOM integer,
QESCRE smallint,
QEACRE smallint,
QEMCRE smallint,
QEJCRE smallint,
QEHCRE integer,
QECUCR character varying(10),
QESMAJ smallint,
QEAMAJ smallint,
QEMMAJ smallint,
QEJMAJ smallint,
QEHMAJ integer,
QECUMJ character varying(10),
QETOPD character varying(1)
);
$$ ;

25
test/mysql/hex.sql vendored Normal file
View File

@ -0,0 +1,25 @@
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `encryption_key_canary` (
`encrypted_value` blob,
`nonce` tinyblob,
`uuid` binary(16) NOT NULL,
`salt` tinyblob,
PRIMARY KEY (`uuid`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Dumping data for table `encryption_key_canary`
--
LOCK TABLES `encryption_key_canary` WRITE;
/*!40000 ALTER TABLE `encryption_key_canary` DISABLE KEYS */;
INSERT INTO `encryption_key_canary` VALUES (
0x1F36F183D7EE47C71453850B756945C16D9D711B2F0594E5D5E54D1EC94E081716AB8642AA60F84B50F69454D098122B7136A0DEB3AF200C2C5C7500BDFA0BD9689CCBF10A76972374882B304F7F15A227E815989FC87EEB72612396F569C662E72A2A7555E654605A3B83C1C753297832E52C5961E81EBC60DC43D929ABAB8CB14601DEFED121604CEB26210AB6D724,
0x044AA707DF17021E55E9A1E4,
0x88C2982F428A46B7B71B210618AE1658,
0xAE7F18028E7984FB5630F7D23FB77999C6CA7CF5355EF0194F3F16521EA7EC503F566229ED8DC5EFBBE9C12BA491BDDC939FE60FA31FB9AF123B2B4D5B7A61FE
);
/*!40000 ALTER TABLE `encryption_key_canary` ENABLE KEYS */;
UNLOCK TABLES;

View File

@ -11,12 +11,17 @@ load database
ALTER SCHEMA 'pgloader' RENAME TO 'mysql'
ALTER TABLE NAMES MATCHING ~/./ SET TABLESPACE 'pg_default'
-- INCLUDING ONLY TABLE NAMES MATCHING 'encryption_key_canary'
CAST column utilisateurs__Yvelines2013-06-28.sexe
to text drop not null using empty-string-to-null,
column base64.id to uuid drop typemod,
column base64.data to jsonb using base64-decode,
-- This is now a default casting rule for MySQL
-- type bigint when signed to bigint drop typemod,
type decimal
when (and (= 18 precision) (= 6 scale))
to "double precision" drop typemod,
@ -31,6 +36,10 @@ load database
using zero-dates-to-null,
type timestamp with extra on update current timestamp
to "timestamp with time zone" drop extra
to "timestamp with time zone" drop extra,
BEFORE LOAD DO $$ create schema if not exists mysql; $$;
column encryption_key_canary.uuid
to uuid drop typemod using byte-vector-to-hexstring
BEFORE LOAD DO
$$ create schema if not exists mysql; $$;

60
test/mysql/my.sql vendored
View File

@ -118,6 +118,36 @@ CREATE TABLE `domain_filter` (
KEY `domain_filter` (`type`)
) ENGINE=InnoDB DEFAULT CHARSET=ascii;
/*
* https://github.com/dimitri/pgloader/issues/904
*/
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `encryption_key_canary` (
`encrypted_value` blob,
`nonce` tinyblob,
`uuid` binary(16) NOT NULL,
`salt` tinyblob,
PRIMARY KEY (`uuid`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Dumping data for table `encryption_key_canary`
--
LOCK TABLES `encryption_key_canary` WRITE;
/*!40000 ALTER TABLE `encryption_key_canary` DISABLE KEYS */;
INSERT INTO `encryption_key_canary` VALUES (
0x1F36F183D7EE47C71453850B756945C16D9D711B2F0594E5D5E54D1EC94E081716AB8642AA60F84B50F69454D098122B7136A0DEB3AF200C2C5C7500BDFA0BD9689CCBF10A76972374882B304F7F15A227E815989FC87EEB72612396F569C662E72A2A7555E654605A3B83C1C753297832E52C5961E81EBC60DC43D929ABAB8CB14601DEFED121604CEB26210AB6D724,
0x044AA707DF17021E55E9A1E4,
0x88C2982F428A46B7B71B210618AE1658,
0xAE7F18028E7984FB5630F7D23FB77999C6CA7CF5355EF0194F3F16521EA7EC503F566229ED8DC5EFBBE9C12BA491BDDC939FE60FA31FB9AF123B2B4D5B7A61FE
);
/*!40000 ALTER TABLE `encryption_key_canary` ENABLE KEYS */;
UNLOCK TABLES;
/*
* https://github.com/dimitri/pgloader/issues/703
*/
@ -125,6 +155,36 @@ create table `CamelCase` (
`validSizes` varchar(12)
);
/*
* https://github.com/dimitri/pgloader/issues/943
*/
CREATE TABLE `countdata_template`
(
`id` int(11) NOT NULL AUTO_INCREMENT,
`data` int(11) DEFAULT NULL,
`date_time` datetime DEFAULT NULL,
`gmt_offset` smallint(6) NOT NULL DEFAULT '0' COMMENT 'Offset GMT en minute',
`measurement_id` int(11) NOT NULL,
`flags` bit(16) NOT NULL DEFAULT b'0' COMMENT 'mot binaire : b1000=validé, b10000000=supprimé',
PRIMARY KEY (`id`),
UNIQUE KEY `ak_countdata_idx` (`measurement_id`,`date_time`,`gmt_offset`)
)
ENGINE=InnoDB DEFAULT CHARSET=utf8 COMMENT='données de comptage';
INSERT INTO `countdata_template`(`date_time`, `measurement_id`, `flags`)
VALUES (now(), 1, b'1000'),
(now(), 2, b'10000000');
/*
* https://github.com/dimitri/pgloader/issues/1102
*/
CREATE TABLE `uw_defined_meaning` (
`defined_meaning_id` int(8) unsigned NOT NULL,
`expression_id` int(10) NOT NULL DEFAULT '0'
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
CREATE TABLE `fcm_batches` (
`id` int(10) unsigned NOT NULL AUTO_INCREMENT,
`raw_payload` mediumtext COLLATE utf8_unicode_ci,

View File

@ -0,0 +1,3 @@
1 Hello World
2 Hello World
2 foobar foobaz

View File

@ -0,0 +1,77 @@
1 Chai 1 1 10 boxes x 20 bags 180000 39 0 10 f 00000000
2 Chang 1 1 24 - 12 oz bottles 190000 17 40 25 f 00000000
3 Aniseed Syrup 1 2 12 - 550 ml bottles 100000 13 70 25 f 00000000
4 Chef Anton's Cajun Seasoning 2 2 48 - 6 oz jars 220000 53 0 0 f 00000000
5 Chef Anton's Gumbo Mix 2 2 36 boxes 213500 0 0 0 t 00000000
6 Grandma's Boysenberry Spread 3 2 12 - 8 oz jars 250000 120 0 25 f 00000000
7 Uncle Bob's Organic Dried Pears 3 7 12 - 1 lb pkgs. 300000 15 0 10 f 00000000
8 Northwoods Cranberry Sauce 3 2 12 - 12 oz jars 400000 6 0 0 f 00000000
9 Mishi Kobe Niku 4 6 18 - 500 g pkgs. 970000 29 0 0 t 00000000
10 Ikura 4 8 12 - 200 ml jars 310000 31 0 0 f 00000000
11 Queso Cabrales 5 4 1 kg pkg. 210000 22 30 30 f 00000000
12 Queso Manchego La Pastora 5 4 10 - 500 g pkgs. 380000 86 0 0 f 00000000
13 Konbu 6 8 2 kg box 60000 24 0 5 f 00000000
14 Tofu 6 7 40 - 100 g pkgs. 232500 35 0 0 f 00000000
15 Genen Shouyu 6 2 24 - 250 ml bottles 155000 39 0 5 f 00000000
16 Pavlova 7 3 32 - 500 g boxes 174500 29 0 10 f 00000000
17 Alice Mutton 7 6 20 - 1 kg tins 390000 0 0 0 t 00000000
18 Carnarvon Tigers 7 8 16 kg pkg. 625000 42 0 0 f 00000000
19 Teatime Chocolate Biscuits 8 3 10 boxes x 12 pieces 92000 25 0 5 f 00000000
20 Sir Rodney's Marmalade 8 3 30 gift boxes 810000 40 0 0 f 00000000
21 Sir Rodney's Scones 8 3 24 pkgs. x 4 pieces 100000 3 40 5 f 00000000
22 Gustaf's Knäckebröd 9 5 24 - 500 g pkgs. 210000 104 0 25 f 00000000
23 Tunnbröd 9 5 12 - 250 g pkgs. 90000 61 0 25 f 00000000
24 Guaran  Fant stica 10 1 12 - 355 ml cans 45000 20 0 0 t 00000000
25 NuNuCa Nuá-Nougat-Creme 11 3 20 - 450 g glasses 140000 76 0 30 f 00000000
26 Gumbär Gummibärchen 11 3 100 - 250 g bags 312300 15 0 0 f 00000000
27 Schoggi Schokolade 11 3 100 - 100 g pieces 439000 49 0 30 f 00000000
28 Rössle Sauerkraut 12 7 25 - 825 g cans 456000 26 0 0 t 00000000
29 Thüringer Rostbratwurst 12 6 50 bags x 30 sausgs. 1237900 0 0 0 t 00000000
30 Nord-Ost Matjeshering 13 8 10 - 200 g glasses 258900 10 0 15 f 00000000
31 Gorgonzola Telino 14 4 12 - 100 g pkgs 125000 0 70 20 f 00000000
32 Mascarpone Fabioli 14 4 24 - 200 g pkgs. 320000 9 40 25 f 00000000
33 Geitost 15 4 500 g 25000 112 0 20 f 00000000
34 Sasquatch Ale 16 1 24 - 12 oz bottles 140000 111 0 15 f 00000000
35 Steeleye Stout 16 1 24 - 12 oz bottles 180000 20 0 15 f 00000000
36 Inlagd Sill 17 8 24 - 250 g jars 190000 112 0 20 f 00000000
37 Gravad lax 17 8 12 - 500 g pkgs. 260000 11 50 25 f 00000000
38 Côte de Blaye 18 1 12 - 75 cl bottles 2635000 17 0 15 f 00000000
39 Chartreuse verte 18 1 750 cc per bottle 180000 69 0 5 f 00000000
40 Boston Crab Meat 19 8 24 - 4 oz tins 184000 123 0 30 f 00000000
41 Jack's New England Clam Chowder 19 8 12 - 12 oz cans 96500 85 0 10 f 00000000
42 Singaporean Hokkien Fried Mee 20 5 32 - 1 kg pkgs. 140000 26 0 0 t 00000000
43 Ipoh Coffee 20 1 16 - 500 g tins 460000 17 10 25 f 00000000
44 Gula Malacca 20 2 20 - 2 kg bags 194500 27 0 15 f 00000000
45 Rogede sild 21 8 1k pkg. 95000 5 70 15 f 00000000
46 Spegesild 21 8 4 - 450 g glasses 120000 95 0 0 f 00000000
47 Zaanse koeken 22 3 10 - 4 oz boxes 95000 36 0 0 f 00000000
48 Chocolade 22 3 10 pkgs. 127500 15 70 25 f 00000000
49 Maxilaku 23 3 24 - 50 g pkgs. 200000 10 60 15 f 00000000
50 Valkoinen suklaa 23 3 12 - 100 g bars 162500 65 0 30 f 00000000
51 Manjimup Dried Apples 24 7 50 - 300 g pkgs. 530000 20 0 10 f 00000000
52 Filo Mix 24 5 16 - 2 kg boxes 70000 38 0 25 f 00000000
53 Perth Pasties 24 6 48 pieces 328000 0 0 0 t 00000000
54 Tourtière 25 6 16 pies 74500 21 0 10 f 00000000
55 Pâté chinois 25 6 24 boxes x 2 pies 240000 115 0 20 f 00000000
56 Gnocchi di nonna Alice 26 5 24 - 250 g pkgs. 380000 21 10 30 f 00000000
57 Ravioli Angelo 26 5 24 - 250 g pkgs. 195000 36 0 20 f 00000000
58 Escargots de Bourgogne 27 8 24 pieces 132500 62 0 20 f 00000000
59 Raclette Courdavault 28 4 5 kg pkg. 550000 79 0 0 f 00000000
60 Camembert Pierrot 28 4 15 - 300 g rounds 340000 19 0 0 f 00000000
61 Sirop d'érable 29 2 24 - 500 ml bottles 285000 113 0 25 f 00000000
62 Tarte au sucre 29 3 48 pies 493000 17 0 0 f 00000000
63 Vegie-spread 7 2 15 - 625 g jars 439000 24 0 5 f 00000000
64 Wimmers gute Semmelknödel 12 5 20 bags x 4 pieces 332500 22 80 30 f 00000000
65 Louisiana Fiery Hot Pepper Sauce 2 2 32 - 8 oz bottles 210500 76 0 0 f 00000000
66 Louisiana Hot Spiced Okra 2 2 24 - 8 oz jars 170000 4 100 20 f 00000000
67 Laughing Lumberjack Lager 16 1 24 - 12 oz bottles 140000 52 0 10 f 00000000
68 Scottish Longbreads 8 3 10 boxes x 8 pieces 125000 6 10 15 f 00000000
69 Gudbrandsdalsost 15 4 10 kg pkg. 360000 26 0 15 f 00000000
70 Outback Lager 7 1 24 - 355 ml bottles 150000 15 10 30 f 00000000
71 Flotemysost 15 4 10 - 500 g pkgs. 215000 26 0 0 f 00000000
72 Mozzarella di Giovanni 14 4 24 - 200 g pkgs. 348000 14 0 0 f 00000000
73 Röd Kaviar 17 8 24 - 150 g jars 150000 101 0 5 f 00000000
74 Longlife Tofu 4 7 5 kg pkg. 100000 4 20 5 f 00000000
75 Rhönbräu Klosterbier 12 1 24 - 0.5 l bottles 77500 125 0 25 f 00000000
76 Lakkalikööri 23 1 500 ml 180000 57 0 20 f 00000000
77 Original Frankfurter grüne Soáe 12 2 12 boxes 130000 32 0 15 f 00000000

View File

@ -0,0 +1,10 @@
One 1.00 1970-01-01 t 1.23456789012346 First memo\r\n
Two 2.00 1970-12-31 t 2 Second memo\n
Three 3.00 1980-01-01 \N 3 Thierd memo\n
Four 4.00 1900-01-01 \N 4 Fourth memo\n
Five 5.00 1900-12-31 \N 5 Fifth memoo\n
Six 6.00 1901-01-01 \N 6 Sixth memoo\n
Seven 7.00 1999-12-31 \N 7 Seventh memo
Eight 8.00 1919-12-31 \N 8 Eigth memomo
Nine 9.00 \N \N \N Nineth memoo
Ten records stored in this database 10.00 \N \N 0.1 NIL

View File

@ -0,0 +1,108 @@
8755876c-3ba8-4454-a00b-f780fb646773 Переведен в разряд ориентиров \N 0
454de0f6-dd9b-425f-997f-5b7f5f480335 Введен ориентир \N 0
10332651-bc57-4c40-8c5f-75ecd53fa4be Распоряжение № 3-р от 10.01.2006 \N 0
82f3d158-b256-4a60-a339-cfea607ba9ca На оснований сведений предоставленных Муниципальным образованием \N 0
ece08605-28cf-4783-bc47-ca786abcbbc6 На основании сведений предоставленных Муниципальным образованием \N 0
12507136-242a-4c2b-abf5-11ce11e5686c Распоряжение № 6-р от 10.01.2006 \N 0
f2cc78d6-ce1e-42dc-9e6f-95cdb31c83e3 первого заместителя Главы Администрации города Климовска 2011-12-05 1229 11
41153bb0-f274-4b73-8866-a58d5044703e Постановление Главы администрации Зарайского района Московской области от 25.12.2001г. № 563/15 \N 0
d5c11fd3-9215-4fbf-a12a-31f6f6cdb34e Постановление №517 от 26.11.2009 \N 0
01b65e52-ae9f-4e4b-8182-b224ffc54bb3 постановление от 11.01.1999. № 6 Аксеновская сельская администрация. \N 0
0f0a5064-3920-4611-aa6d-fa0e476344ad Постановление № 156 от 05.09.2011 \N 0
cf266913-a688-49f5-929e-540ce7318e15 Договор дарения целого домовладения 1990-07-11 46 900
f979d2c9-2cb1-41d6-b7f9-889f819a2371 Свидетельство о государственной регистрации права 2009-12-03 50-НДN424140 900
f20a44e0-63ab-4d98-a9c6-72b1e06f201f Свидетельство о государственной регистрации права 2010-03-04 50-НДN858931 900
f19cbb61-fcff-4a68-8178-96776a2df798 Свидетельство о государственной регистрации права 2006-11-24 50НАN1475922 900
a47a6d3f-fe98-40d7-b0e3-8c88ba955ce7 Свидетельство о государственной регистрации права 2011-11-21 50-АБN907253 900
ac6d55ad-c0a7-4adc-9c40-d768378ccd13 Свидетельство о государственной регистрации права 2007-10-17 50НБN714944 900
9fadd827-c15f-4e92-9c3a-a5e13ebf2d97 Свидетельство о государственной регистрации права 1999-05-13 АБ 0374899 900
f55c89a7-9336-4480-bdff-6b234db44b87 Свидетельство о государственной регистрации права 2005-08-09 НАN146918 900
557b67d2-d7db-4595-aa15-e53341aedaf8 Свидетельство о государственной регистрации права 2004-03-29 50АЖ № 001490 900
d23d98ef-ace0-4a49-b7ac-d6938a8a2e26 Свидетельство о государственной регистрации права 2004-12-16 50АЖ № 593812 900
f357b341-9b7e-43c9-8653-39b8aca4fd0f Свидетельство о праве на наследство по закону 1990-03-30 1348 900
e474ab43-5c97-463d-b49b-db580bfa056e Свидетельство о праве на наследство по закону 1997-07-04 2220 900
3bda6fb8-c23e-4d09-aa92-40b886ec320b Свидетельство о праве на наследство по завещанию 1985-12-07 1-8905 900
26731327-8ffe-4c59-a1df-9748c304e282 Свидетельство о государственной регистрации права 2011-07-15 50-АГN794863 900
86e31d66-cd25-4087-aabe-1b36769eabdf Свидетельство о государственной регистрации права 2008-05-14 50-НВN042191 900
27b4fc61-07f1-4387-a354-53dd3fd36760 Свидетельство о государственной регистрации права 2004-08-05 50АЖ № 380550 900
1c6c712d-9416-49ca-ac60-2a2b128fb7af Свидетельство о праве на наследство по закону 1953-03-02 1563 900
ce8ade69-13e8-4c22-a342-c08172294eef Свидетельство о государственной регистрации права 2011-07-07 50-АБN901345 900
05cf9c29-089d-4174-bd2c-10399f24d254 Свидетельство о праве на наследство по завещанию 1999-01-22 188 900
1588ff21-bc5a-4c5b-b806-f0e54813aab7 Свидетельство о праве на наследство по завещанию 1986-11-29 1-8356 900
90efbfdb-8e7a-4ac1-9912-0796d5aeb5ee Свидетельство о государственной регистрации права 2010-06-03 50-ААN047931 900
ebdec9ef-91bc-4be8-9aa5-c08a1b56a148 Свидетельство о государственной регистрации права 2002-10-17 50АД №000587 900
4a65951f-d6bb-46ef-8e4b-cb8e0c33c119 Договор купли-продажи дома 1978-03-31 7 900
12c95a57-13fc-48ba-be10-9d3b890842ca Свидетельство о праве на наследство по закону 1980-03-17 2562 900
b8921abf-f480-4ec7-83c2-2c0ebce56ab1 Свидетельство о праве на наследство по завещанию 1985-05-24 1-3681 900
1111bf0b-72dc-4363-880d-4feb71d5f18f Постановление отсутствует \N 0
991b1c77-d355-45ae-bdad-b1bbff5ae80d О присвоении территориям статуса микрорайонов, расположенным в границах р.п. Маркова Иркутского района Иркутской области 2012-10-17 278 7
2295abe5-0dcd-4b6a-895e-5ee419593ff0 Св-во ЕГРЮЛ 47 №000759130 \N 0
114a986a-0299-48d0-8564-8c4f80f46e2c О присвоении адреса пятиквартирному блокированному жилому дому и земельному участку 2013-06-24 1722 6
e1d5998f-ca4f-4a20-927e-ea57623870fc О присвоении адреса пятиквартирному блокированному жилому дому и земельному участку 2013-08-28 2478 6
379196a7-6504-4209-8322-6715c5702c3e О преобразовании МО Семеновского р-на 2010-12-22 211-3 1
e5efb18c-37b2-4e24-9337-8cad8f2ab61d Решение Совета депутатов Пушкинского района Московской области от 23.08.2001г. № 258/26 \N 0
4aacd181-edd2-4e1f-96d9-1eef2cb6b4dc Распоряжение Главы Пушкинского муниципального района Московской области от 22.09.2004г. № 416-р \N 0
28b30309-34a7-48c7-9dda-948355da2e48 NIL 2011-05-13 41-ПГ 0
6bd9f0fa-8b88-4768-9b70-205f30b89f66 NIL 2011-05-13 41-ПГ 6
57a6e03e-ca69-4839-be79-e111b18b48fb о предоставлении информации 2014-04-22 22-17/86546 11
27440870-1632-4dc0-9351-35322c8388cd Получено по результатам сверки, соглашение №18 \N 0
5fe6051f-e0fb-4f48-89d5-90095c7a80f4 Об утверждении наименований улиц и домов 2001-12-20 № 87 9
1fdd110a-79ee-454d-974e-a6e4b1c214a1 Справка об адресной регистрации объекта недвижимости 2012-07-23 63 1361 14
1fcd2e15-569d-4f94-b119-4b0fe95c0af5 Справка об адресной регистрации объекта недвижимости 2014-11-10 АРС 63 3168 14
1eb4103e-fa24-440b-b02c-a2ebdd1e85c3 Администрация Б_Исаковского МО (84015130236-Вера Итальева). Нет объектов по этой улице \N 0
6dd49c0a-e438-4d38-a018-0b2daef97bc0 справочник ОКТМО 2014-03-01 б/н 0
da97a3d9-80fd-4e54-93ce-7d5d43dc08b7 Об утверждении адреса объекта недвижимости 2015-08-11 1119-РП 15 1e1128e6-9745-411c-9f40-c6c0dabc5179
ecd27d88-e0b1-4707-8ca1-300fac707219 Об утверждении адреса объекта недвижимости\r\n 2014-07-30 610-РП 15 00d8403a-07db-4246-932e-b39e83cb84fb
bf487456-39ff-40b8-8f1f-ad42219983bb Об утверждении адреса объекта недвижимости 2014-07-30 621-РП 15 dbf5bbb6-37ef-44ef-966a-0b1297283c08
9359b964-8b7e-4ba8-b730-b81351990781 Об утверждении адреса объекта недвижимости\r\n 2014-07-30 627-РП 15 af07b22f-2564-4a3a-932f-6c01d416d0ae
cc9d10c9-be58-4002-8949-786b1c421695 Об утверждении адреса объекта недвижимости\r\n 2014-07-30 624-РП 15 0ed85e7d-4a63-44f3-a6be-33e68edb428f
5b279d51-b2b9-4022-9738-2f8288b6c50b Об утверждении адреса объекта недвижимости\r\n 2014-07-30 644-РП 15 da4e3cf4-96da-45c6-a01a-7fe46baaf520
792fff5b-4c46-4bb0-8a7f-85772c66bd00 Об утверждении адреса объекта недвижимости 2014-07-30 620-РП 15 c97a7c94-b633-48e1-ad85-dd34ae3a2962
bd9ff5a4-56b7-4c8d-8dd9-9b271d507363 Об утверждении адреса объекта недвижимости\r\n 2014-07-30 625-РП 15 baf7eb49-3913-4698-8561-338143a15a7a
9090e528-db38-42b3-922d-d624023b02a7 Об утверждении адреса объекта недвижимости 2014-07-02 648-РП 15 d03ca1a0-a6fe-4887-8b7d-c60c78000de7
bd07d6c8-5acd-45b8-a2ac-8b0bb76ed8f1 Об утверждении адреса объекта недвижимости 2016-10-27 754-РП 15 99cce508-dbc6-4b5a-bbdc-1ee431d59776
29a3937b-4144-4553-bae2-381bd1c9b3e1 Об утверждении адреса объекта недвижимости 2014-07-30 640-РП 15 fe388e7b-25ef-4d48-9e1d-e1172a8dc1d7
72117b35-47ba-45e8-9549-ec1138d5c110 Об утверждении адреса объекта недвижимости 2014-07-30 628-РП 15 c2a6912c-0daa-4dc4-8174-652124f67b8f
f1e0ba8e-41e1-49bd-827f-f51384f69a63 Об утверждении адреса объекта недвижимости 2015-08-11 1118-РП 15 13661d19-f7d2-4a81-a4f3-4c68d064e092
7b4ee1ec-ac52-4051-878a-b760ebd11984 Об утверждении адреса объекта недвижимости 2014-07-30 612-РП 15 10a18a62-8a1c-4485-9864-04c5ce2b9697
58676266-ce1c-43dd-a9ba-2d0f07e08f9b Об утверждении адреса объекта недвижимости 2015-08-11 1117-РП 15 59a223b5-d9b4-417b-80b0-5359888a3697
202059db-5850-4c47-9a64-4c393283e37b Об утверждении адреса объекта недвижимости 2014-07-30 630-РП 15 fd9a6a09-8883-4ba2-8101-4ce45b0ed842
44d608c4-66b3-443b-af0f-88cfc02693d2 Об утверждении адреса объекта недвижимости 2014-06-30 616-РП 15 d04eb2ef-7064-49ec-a5b5-cd187bd31aff
7cfd36b2-3815-4c89-a988-3fd4202d6848 Об утверждении адреса объекта недвижимости\r\n 2014-07-02 657-РП 15 c9d056cb-725b-4450-931e-2bfd334aeeb1
78aaf39c-a3bd-4f64-9f66-913dc14e99f6 Об утверждении адреса объекта недвижимости\r\n 2014-07-30 641-РП 15 246461e5-243e-431f-9f24-690135c3b629
1f493b45-55f7-4ca5-898e-a950b39aa74e Об утверждении адреса объекта недвижимости\r\n 2014-07-30 617-РП 15 ca1d0095-d16c-44e6-b339-ea20184cba00
7ec1bb44-8156-4477-bc38-46a89e5eb32e Об утверждении адреса объекта недвижимости\r\n 2014-07-30 615-РП 15 88397fca-3ef7-4a1c-8191-b3ec455e5a4f
fd933a45-132d-4edf-8b1a-783ef6632237 Об утверждении адреса объекта недвижимости 2014-07-30 632-РП 15 abb2cced-e0e4-488d-8320-6e27b21dcb41
8bd3faba-0a22-4f55-b0dc-3e7bea10aa7d Об утверждении адреса объекта недвижимости\r\n 2014-07-30 636-РП 15 d703647e-6ded-4696-a7cb-aa28331bd587
8f0c5a11-fc4a-405b-bd99-9ef3ff69882a Об утверждении адреса объекта недвижимости 2014-05-30 626-РП 15 a96e41f2-a013-4bbd-b785-0555483cbe20
20031eb3-426e-469d-baca-e1229e7e6401 Об утверждении адреса объекта недвижимости 2014-07-21 756-РП 15 b6bd034a-d3d6-4d8f-98ad-51c53a447a20
4e8b79c1-d0c7-4a0b-8893-40a9633f21ba Об утверждении адреса объекта недвижимости\r\n 2014-12-22 1520-РП 15 74a7ca58-8f15-4a99-ad7c-497770d317d9
c32f5dc2-294c-4629-949c-e09a0f4c933d Об утверждении адреса объекта недвижимости\r\n 2014-07-30 635-РП 15 e67214b3-ffff-4505-afae-5b0fe0e21202
5b2ba662-a9e4-45bc-ac6c-e179533d27e6 Об утверждении адреса объекта недвижимости\r\n 2014-07-30 629-РП 15 29ddb992-901f-4e74-8c49-b041c801eb96
fd270594-7084-4556-87de-b51d7fe5104e Об утверждении адреса объекта недвижимости 2014-07-02 649-РП 15 f1075a71-5dcd-4200-9266-24d1e8e24019
e3b1e44e-5443-4072-ab85-2372c960bbb1 Об утверждении адреса объекта недвижимости 2014-07-30 633-РП 15 def76252-d24b-4fdf-8ff9-933f6b3510df
cc5c27ca-110e-4a02-84e3-341461d229c7 Об утверждении адреса объекта недвижимости\r\n 2014-07-30 623-РП 15 4530591f-a238-4e22-b3ed-84b1fa9e8dc8
bfeec173-c831-4388-ab0e-df13a934a2b7 Об утверждении адреса объекта недвижимости 2014-07-30 614-РП 15 e7f8201a-0963-4fe4-a312-c1c956ebfe74
26d06f2f-e45c-48a0-8038-c35061c5dc62 Об утверждении адреса объекта недвижимости 2015-08-11 1120-РП 15 971ab4be-9ac7-4caa-b168-ce4b6ab4311b
e5158880-1a87-4753-8c78-9bcb964a1f98 Об утверждении адреса объекта недвижимости 2014-07-30 622-РП 15 4b9930c9-210b-43ca-8f80-d97d8ead0562
6cbc4342-6c7d-4acd-9d7d-b4f0fe33a8a7 Об утверждении адреса объекта недвижимости 2014-07-30 634-РП 15 071a699a-ecda-4a88-8589-acf56d1d464f
1ba0606b-0cd1-45f8-9291-aee4f3cb27bf Об утверждении адреса объекта недвижимости 2015-08-11 1125-РП 15 01710dea-f49c-40b9-8e9a-3f70b87301b6
c5ebb66b-8511-4e9f-854c-e124ad376eae Об утверждении адреса объекта недвижимости 2014-07-30 637-РП 15 61db357b-1091-414d-b2e3-6dd9398b374d
ebf0837a-210d-4876-a5b1-b89d75e6f773 Об утверждении адреса объекта недвижимости\r\n 2014-07-30 613-РП 15 61e5c44e-4d44-42ba-8fa0-06aa6d9cc001
028a34b0-8a72-4e59-8d03-ea63294be820 Об утверждении адреса объекта недвижимости\r\n 2014-07-30 642-РП 15 0732cd48-4c49-4917-ba23-96ae8eb8c246
0b7b38c4-3132-470e-a532-62d23d75abb3 Об утверждении адреса объекта недвижимости 2014-07-02 658-РП 15 e3c7da30-60a3-414f-a547-ce9cc1cb7e3a
74429ace-0bca-4090-bb4b-90c2d0dd443b Об утверждении адреса объекта недвижимости 2014-07-30 631-РП 15 21be5ae7-6fbc-4d04-99c5-242412298be5
93886316-51c7-409d-a2a4-110006a99ef6 Об утверждении адреса объекта недвижимости 2015-08-11 1121-РП 15 0c3486f6-368a-4d94-ba71-f03e46f9e59a
40789141-9eac-4329-8ed2-afbbecf260f7 Об утверждении адреса объекта недвижимости 2015-08-11 1122-РП 15 812db3e7-9505-4ccf-8348-d7553b9459c2
13655622-c02a-4225-916d-5e5f64bd4f5d Об утверждении адреса объекта недвижимости 2014-07-30 638-РП 15 0a924b39-51bb-4600-ad02-658ed92341e4
e0e3f194-b1a7-4bc8-a4f7-106b9691720c Об утверждении адреса объекта недвижимости 2014-06-20 640-РП 15 1f06db79-7bf9-4236-872f-97f13cd5135f
08b98fbe-9223-45cf-9c3f-8a65d0131c90 Об утверждении адреса объекта недвижимости\r\n 2014-07-30 643-РП 15 ed2a5f60-5b6d-418a-b5a3-e223313391fb
055c5487-f33d-4e58-8b73-259e16819497 Об утверждении адреса объекта недвижимости 2014-07-30 611-РП 15 ce08eb2c-eee0-477d-bf99-978f5ca2f5de
0d9ff7f0-0a9b-49e4-9b05-fd598933b79a Об утверждении адреса объекта недвижимости 2014-07-30 618-РП 15 59671ff1-17f8-466d-84e2-24fd60733066
71f7fcb7-befa-4695-8e02-31295bb9c96b Об утверждении адреса объекта недвижимости 2015-08-11 1124-РП 15 df79a580-5857-432a-8c04-4b3f78056a2a
107e9b21-deb2-48ca-bd7a-47fa25ab6339 Об утверждении адреса объекта недвижимости\r\n 2014-07-02 656-РП 15 c93fac38-d568-4d74-a06e-d28dc3f3d5e9
1313de53-5b75-446a-80e5-89a01d618284 Об утверждении адреса объекта недвижимости 2015-08-11 1123-РП 15 63c9e3e2-ee59-452f-b4b1-18aeee7f961a
94dbc5dc-fa6e-45bd-9c4c-abd2899ead24 Об утверждении адреса объекта недвижимости 2014-07-30 639-РП 15 f4c96b2a-ec89-4eb1-b476-f56d4dd41099
67427af0-14d7-467a-8416-12210f828d6e Постановление Главы с.п. Дороховское 2012-04-27 209 7 778ace47-3a56-4af4-9829-6d8798bbce3c
a817e5ab-f2e5-412f-a9a5-b3ade5d95d90 < Об образовании нового муниципального образования Лобановское сельское поселение> 2013-05-06 194-ПК 3
f1fcf1db-d065-4f42-92d0-25d6c8f17588 NIL 2015-10-15 4992/10 8