pax_global_header 0000666 0000000 0000000 00000000064 15005352415 0014512 g ustar 00root root 0000000 0000000 52 comment=4f2d0cf8e7a0d3b1ffbfe84989f38eabea5589b4
pg-el-0.54/ 0000775 0000000 0000000 00000000000 15005352415 0012446 5 ustar 00root root 0000000 0000000 pg-el-0.54/.github/ 0000775 0000000 0000000 00000000000 15005352415 0014006 5 ustar 00root root 0000000 0000000 pg-el-0.54/.github/workflows/ 0000775 0000000 0000000 00000000000 15005352415 0016043 5 ustar 00root root 0000000 0000000 pg-el-0.54/.github/workflows/elisp-check.yml 0000664 0000000 0000000 00000001360 15005352415 0020755 0 ustar 00root root 0000000 0000000 name: elisp-check
on: [push]
jobs:
check:
runs-on: ubuntu-latest
timeout-minutes: 15
strategy:
fail-fast: false
matrix:
# Emacs versions 26.3 and 28.2 are raising spurious "wrong number of arguments" errors, so
# don't test with them.
emacs_version:
- 27.2
- 29.1
- snapshot
ignore_warnings:
- true
include:
- emacs_version: snapshot
ignore_warnings: false
steps:
- uses: actions/checkout@v4
- uses: purcell/setup-emacs@master
with:
version: ${{ matrix.emacs_version }}
- uses: leotaku/elisp-check@master
with:
file: pg.el
ignore_warnings: ${{ matrix.ignore_warnings }}
pg-el-0.54/.github/workflows/mdbook.yml 0000664 0000000 0000000 00000002474 15005352415 0020050 0 ustar 00root root 0000000 0000000 # Build our user documentation using mdbook and publish to GitHub Pages
#
# The default build of highlight.js bundled with mdbook does not support Emacs Lisp syntax
# highlighting, so we use a custom build downloaded from https://highlightjs.org/download with the
# "lisp" language enabled. This is placed at theme/highlight.js to override the bundled version.
name: Build documentation
on: workflow_dispatch
jobs:
build-mdbook:
runs-on: ubuntu-latest
permissions:
contents: write
pages: write
id-token: write # To update the deployment status
name: Build documentation with mdbook and deploy to GitHub pages
steps:
- uses: dtolnay/rust-toolchain@nightly
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Install latest mdbook
run: |
cargo install mdbook
cargo install mdbook-admonish
- name: Build book
run: |
(cd doc && mdbook-admonish install .)
(cd doc && mdbook build )
- name: Setup GitHub Pages
uses: actions/configure-pages@v4
- name: Upload artifact
uses: actions/upload-pages-artifact@v3
with:
path: 'doc/book'
- name: Deploy to GitHub Pages
id: deployment
uses: actions/deploy-pages@v4
pg-el-0.54/.github/workflows/test-macos.yml 0000664 0000000 0000000 00000001671 15005352415 0020652 0 ustar 00root root 0000000 0000000 name: test-macos
on: push
jobs:
runner-job:
# using the PostgreSQL version pre-installed on GitHub's MacOS virtual images
runs-on: macos-latest
timeout-minutes: 20
steps:
- name: Set runner timezone
uses: szenius/set-timezone@v2.0
with:
timezoneLinux: "UTC-01:00"
timezoneMacos: "UTC-01:00"
timezoneWindows: "UTC-01:00"
- name: Set up pre-installed PostgreSQL
uses: ikalnytskyi/action-setup-postgres@v6
with:
username: pgeltestuser
password: pgeltest
database: pgeltestdb
id: postgres
env:
TZ: UTC-01:00
- name: Install Emacs
uses: purcell/setup-emacs@master
with:
version: 29.4
- name: Check out repository code
uses: actions/checkout@v4
- name: Run tests over network
run: make -C test test
env:
TZ: UTC-01:00
pg-el-0.54/.github/workflows/test-pgv12.yml 0000664 0000000 0000000 00000002064 15005352415 0020504 0 ustar 00root root 0000000 0000000 name: test-pgv12
on: push
jobs:
runner-job:
runs-on: ubuntu-24.04
timeout-minutes: 15
services:
postgres:
# Docker Hub image
image: postgres:12-alpine
env:
TZ: UTC-01:00
POSTGRES_DB: pgeltestdb
POSTGRES_USER: pgeltestuser
POSTGRES_PASSWORD: pgeltest
# Set health checks to wait until postgres has started
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
# Maps tcp port 5432 on service container to the host
- 5432:5432
volumes:
- /var/run/postgresql:/var/run/postgresql
steps:
- name: Install Emacs
uses: purcell/setup-emacs@master
with:
version: 29.4
- name: Check out repository code
uses: actions/checkout@v4
- name: Run connection tests over TCP
run: make -C test
- name: Run connection tests over local Unix socket
run: make -C test test-local
pg-el-0.54/.github/workflows/test-pgv15.yml 0000664 0000000 0000000 00000002430 15005352415 0020504 0 ustar 00root root 0000000 0000000 name: test-pgv15
on: push
jobs:
runner-job:
runs-on: ubuntu-latest
timeout-minutes: 15
services:
# Label used to access the service container
postgres:
# Docker Hub image
image: postgres:15
env:
TZ: UTC-01:00
POSTGRES_DB: pgeltestdb
POSTGRES_USER: pgeltestuser
POSTGRES_PASSWORD: pgeltest
# Set health checks to wait until postgres has started
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
# Maps tcp port 5432 on service container to the host
- 5432:5432
volumes:
- /var/run/postgresql:/var/run/postgresql
steps:
# The version of Emacs available in the ancient Ubuntu distribution used by GitHub
# actions is too old.
- name: Install Emacs
uses: purcell/setup-emacs@master
with:
version: 29.4
- name: Check out repository code
uses: actions/checkout@v4
- name: Run tests over TCP socket
run: make -C test test
env:
TZ: UTC-01:00
- name: Run tests over local Unix socket
run: make -C test test-local
env:
TZ: UTC-01:00
pg-el-0.54/.github/workflows/test-pgv15b-docker.yml 0000664 0000000 0000000 00000002035 15005352415 0022114 0 ustar 00root root 0000000 0000000 name: test-pgv15b-docker
on: push
jobs:
runner-job:
runs-on: ubuntu-latest
timeout-minutes: 15
steps:
- name: Install Emacs
run: |
sudo apt-get update
sudo apt-get install emacs podman
- name: Start Docker/Podman image of PostgreSQL
run: sudo docker run --detach \
--name pgsql \
--publish 5432:5432 \
-e POSTGRES_DB=pgeltestdb \
-e POSTGRES_USER=pgeltestuser \
-e POSTGRES_PASSWORD=pgeltest \
docker.io/library/postgres:15
# -c ssl=on \
# -c ssl_cert_file=/etc/ssl/certs/ssl-cert-snakeoil.pem \
# -c ssl_key_file=/etc/ssl/private/ssl-cert-snakeoil.key
# --health-cmd pg_isready \
# --health-interval 10s \
# --health-timeout 5s \
# --health-retries 5 \
- name: Check out repository code
uses: actions/checkout@v4
- name: Run connection tests from Emacs
run: make -C test test test-tls
pg-el-0.54/.github/workflows/test-pgv16.yml 0000664 0000000 0000000 00000002126 15005352415 0020507 0 ustar 00root root 0000000 0000000 name: test-pgv16
on: push
jobs:
runner-job:
runs-on: ubuntu-24.04
timeout-minutes: 15
services:
# Label used to access the service container
postgres:
# Docker Hub image
image: postgres:16
env:
TZ: UTC-01:00
POSTGRES_DB: pgeltestdb
POSTGRES_USER: pgeltestuser
POSTGRES_PASSWORD: pgeltest
# Set health checks to wait until postgres has started
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
# Maps tcp port 5432 on service container to the host
- 5432:5432
volumes:
- /var/run/postgresql:/var/run/postgresql
steps:
- name: Install Emacs
uses: purcell/setup-emacs@master
with:
version: 29.4
- name: Check out repository code
uses: actions/checkout@v4
- name: Run tests over TCP socket
run: make -C test test
- name: Run tests over local Unix socket
run: make -C test test-local
pg-el-0.54/.github/workflows/test-windows.yml 0000664 0000000 0000000 00000001432 15005352415 0021235 0 ustar 00root root 0000000 0000000 name: test-windows
on: push
jobs:
runner-job:
runs-on: windows-latest
timeout-minutes: 20
steps:
- name: Set runner timezone
uses: szenius/set-timezone@v2.0
with:
timezoneLinux: "UTC-01:00"
timezoneMacos: "UTC-01:00"
timezoneWindows: "UTC-01:00"
- name: Set up pre-installed PostgreSQL
uses: ikalnytskyi/action-setup-postgres@v6
with:
username: pgeltestuser
password: pgeltest
database: pgeltestdb
id: postgres
env:
TZ: UTC-01:00
- name: Install Emacs
run: choco install --no-progress -y emacs
- name: Check out repository code
uses: actions/checkout@v4
- name: Run tests
run: make -C test test
pg-el-0.54/.justfile 0000664 0000000 0000000 00000001134 15005352415 0014273 0 ustar 00root root 0000000 0000000 # For use with the just command runner, https://just.systems/
default:
@just --list
export INSTALL_EL := '''
(unless (package-installed-p 'pg)
(package-vc-install "https://github.com/emarsden/pg-el" nil nil 'pg))
(require 'pg)
'''
tmpdir := `mktemp -d`
init-el := tmpdir / "init.el"
# Check whether our package-vc-install instructions work on a pristine install.
installability:
printf '%s' "$INSTALL_EL" > {{ init-el }}
ls -l {{ init-el }}
cat {{ init-el }}
podman run --rm -ti -v {{ tmpdir }}:/tmp docker.io/silex/emacs:29.4-ci \
${EMACS:-emacs} -l /tmp/init.el
pg-el-0.54/CHANGELOG.md 0000775 0000000 0000000 00000057131 15005352415 0014271 0 ustar 00root root 0000000 0000000 # Changelog
## [0.54] - Unreleased
- Handle `ParameterStatus` and `NotificationResponse` messages in `pg-fetch`.
- New function `pg-set-client-encoding` to set the client-side encoding for data sent to the
backend. Calling this function sends an SQL request to the backend telling it of the new client
encoding, and sets the per-connection client encoding (accessible via `pgcon-client-encoding`). It
also checks that the requested client encoding is one supported by PostgreSQL. Note that most of
the PostgreSQL variants only support UTF8 as a client-encoding.
- Implement workaround for `pg-column-default` for Google Spanner and QuestDB.
- Fix for parsing empty arrays.
- New subclass of `pg-error` `pg-transaction-missing` triggered by an attempt to rollback with no
transaction in progress.
- Add preliminary support for the ReadySet PostgreSQL proxy as a PostgreSQL variant.
- Add preliminary support for the YottaDB Octo database as a PostgreSQL variant.
## [0.53] - 2025-04-19
- In `pg-sync`, try to read the `ReadyForQuery` message sent by the backend.
- Add test code for the PgDog and PgCat sharding connection poolers.
- Implement workarounds for the RisingWave variant in `pg-table-comment` and `pg-column-comment` and
their companion setf functions.
- Populate our oid<->typname mappings with predefined data for variants that lack a properly populated
`pg_type` table (this is the case for GreptimeDB, which contains invalud information such as `UInt8`
<-> 7). Although strictly speaking there is no guarantee that this internal information will
remain unchanged in future PostgreSQL releases, it is unlikely to change.
## [0.52] - 2025-04-06
- In `pg-fetch-prepared`, close the portal after fetching the tuple data.
- Provide a basic stub implementation for `pg-table-owner` for CrateDB.
- Add code to detect the Greenplum PostgreSQL variant.
## [0.51] - 2025-03-29
- In `pg-connect/uri`, call `url-unhex-string` on user/password only if non-nil. This restores the
ability to fall back to `PGUSER` and `PGPASSWORD` environment variables. Patch from @akurth.
- Fix bug in `pg-table-comment` function and in the associated setf function.
- Provide an empty implementation of `pg-column-default` and `pg-table-comment` for the YDB variant.
- New error types `pg-invalid-catalog` name and `pg-timeout`.
## [0.50] - 2025-03-22
- Implement new function `pg-column-comment` with a defsetf.
- Improve `cl-print-object` for a connection object when the pid and database slots are unbound.
- Further workarounds in `pg-table-comment` for QuestDB and Spanner variants.
- Add workarounds in `pg-column-comment` for CrateDB and QuestDB.
- Add workaround in `pg-function-p` for QuestDB.
- Add a custom SQL query for `pg-column-autogenerated-p` to handle limitations in the CrateDB
variant.
- Add workaround for variant YDB in `pg-tables`.
## [0.49] - 2025-03-08
- Implement hex-decoding for the username and password in `pg-connect/uri`.
- New error classes `pg-character-not-in-repertoire` and `pg-plpgsl-error`.
- New error subclasses of `pg-error`: `pg-database-error`, `pg-operational-error`,
`pg-programming-error`, `pg-data-error`, `pg-integrity-error`. These are superclasses of some of
the leaf error subclasses: for example the errors related to integrity violations
`pg-restrict-violation`, `pg-not-null-violation` and `pg-foreign-key-violation` are all subclasses
of `pg-integrity-error`.
- Filter out system-internal tables in the list returned by `pg-tables` for the Clickhouse variant.
- New function `pg-current-schema` which returns the value of `current_schema()` (or equivalent on
PostgreSQL variants that do not implement that function).
- Implement custom logic for `pg-table-comment` for the semi-compatible PostgreSQL variant
CockroachDB.
- Provide parsing and serialization support for the types defined by the vchord_bm25 extension,
which implements the BM25 ranking algorithm that is useful for information retrieval applications.
See file `pg-bm25.el`.
## [0.48] - 2025-02-22
- The error hierarchy has been enriched with many subclasses of `pg-error`, distinguishing between
an SQL syntax error, a division by zero, a numerical overflow, and so on. See the pg-el manual for
details.
- Added logic to recognize the PostgreSQL variant Materialize.
- Error reporting: if the constraint name field is present, it is saved in the pgerror struct and
reported to the user.
## [0.47] - 2025-01-25
- New variable `pg-new-connection-hook` contains a list of functions to be run when a new PostgreSQL
connection is established. Each function will be called with the new connection as the single
argument. The default value of this variable includes the function `pg-detect-server-variant`,
which attempts to determine the type of semi-compatible PostgreSQL variant that we are connected
to.
- New generic function `pg-do-variant-specific-setup` that allows you to specify setup operations to
run for a particular semi-compatible PostgreSQL variant.
- Added logic to recognize the PostgreSQL variant AlloyDB Omni.
## [0.46] - 2025-01-12
- Fixes to the handling of timezones when parsing and serializing `time` and `timetz` data. Timezone
information was previously lost during parsing and serialization. Patch from @akurth.
- Further workarounds in `pg-table-column` and `pg-column-default` to tolerate deficiencies in
the PostgreSQL compatibility of CrateDB and CockroachDB.
- Add a workaround in `pg-schemas` for RisingWave database.
## [0.45] - 2024-12-22
- When supported by Emacs, enable `TCP_NODELAY` on the network connections to PostgreSQL to disable
Nagle's algorithm (network segments are sent as soon as possible, even when they contain little
data). This is done by libpq and results in a 12x (!) speedup on some benchmarks. This is a new
feature available in (currently unreleased) Emacs 31 (bug#74793).
- New function `pg-schemas` which returns the list of the schemas in a PostgreSQL database. Schemas
are a form of namespace, which can contain elements such as tables, sequences, indexes and views.
The default schema name is `public`.
- Add support for detecting the RisingWave database, which is compatible with the PostgreSQL wire
protocol, with certain limitations. For this database, `pgcon-server-variant` returns the symbol
`risingwave`.
- When parsing timestamp and time data, preserve the fractional part of a second (patch from @akurth).
## [0.44] - 2024-12-04
- Detect the PostgreSQL variant TimescaleDB. Implement a specific SQL query for `pg-tables` for this
variant to avoid returning TimescaleDB-internal tables alongside user tables.
- Detect the PostgreSQL variant OrioleDB (really an extension that provides an additional storage
mechanism).
- Implement a specific SQL query for `pg-tables` for the CrateDB variant, to avoid returning system
tables alongside user tables.
- The serialization function for floating point values accepts non-float numeric values.
- When parsing a PostgreSQL connection URI or connection string, additional environment variables
`PGHOST`, `PGHOSTADDR`, `PGPORT`, `PGDATABASE`, `PGUSER`, `PGPASSWORD` and `PGSSLMODE` are used as
default values when a value has not been explicitly set in the string.
## [0.43] - 2024-10-15
- Fix serialization for `UUID` types in prepared statements.
- The serialization of key/value pairs in an `HSTORE` object now respects the client encoding, so
will work correctly with non-ASCII characters.
- Improved error reporting in the pg-geometry library (signalling a subclass of pg-error instead of
triggering an assertion).
- Additional checks on connection startup to identify the PostgreSQL variant IvorySQL (currently a
very compatible variant with additional Oracle compatibility functions).
- Fix bug in startup sequence exposed by very short usernames (reported by Ákos Kiss aka `ak`).
## [0.42] - 2024-09-21
- Fix serialization and deserialization for `CHARACTER` and `BPCHAR` types for non-ASCII values.
PostgreSQL stores these as a single octet, an integer < 256. Characters that are below this limit
but not in the ASCII range (such as many accented characters if your Emacs uses a Latin-1 charset)
need to be encoded and decoded.
- Add support for parameters `connect_timeout` and (nonstandard extension) `read_timeout` when
parsing PostgreSQL connection URIs and connection strings.
- Add functionality to detect the “flavour” variant of (potentially semi-compatible) PostgreSQL
database that we are connected to. The variant is accessible via `pgcon-server-variant` on a
connection object. Detected variants are 'cratedb, 'xata, 'cockroachdb, 'yugabyte, 'questdb,
'greptimedb, 'immudb, 'ydb. This functionality is used to work around certain bugs or incomplete
emulation of PostgreSQL system tables by some of these semi-compatible database implementations.
## [0.41] - 2024-08-31
- User errors in serialization functions (arguments supplied to `pg-exec-prepared` whose type does
not correspond to the SQL-defined type) now signal an error of type `pg-type-error`, which is a
subclass of `pg-user-error`, instead of triggering an assertion failure. This means that they can
be handled using `condition-case`.
- Delete all uses of variable `pg--MAX_MESSAGE_LEN`, because PostgreSQL no longer has such low
limits on message sizes (the only limit being the 4-octet size fields in many message types).
- Support for TLS authentication using client certificates (see the documentation for function
`pg-connect`). The `test-certificates` Makefile target in the `test` directory illustrates the
creation of a working certificate authority and signed client certificates; depending on the value
of connection parameter `clientcert`, PostgreSQL is careful to check that the `CN` field of the
client certificate corresponds to the PostgreSQL user you are connecting as.
## [0.40] - 2024-08-22
- Serialization and deserialization support for [JSONPATH
expressions](https://www.postgresql.org/docs/current/functions-json.html#FUNCTIONS-SQLJSON-PATH).
These expressions are represented in Emacs Lisp as strings.
- Serialization functions now take a second argument `ENCODING` specifying the client-encoding in
use, in the same way as for deserialization functions.
- The mappings `pg--parser-by-oid`, `pg--oid-by-typname` and `pg--type-name-by-oid` have been moved
into the pgcon object, rather than being local variables. This makes it possible to connect from
the same Emacs instance to PostgreSQL-compatible databases that have different OID values for
builtin or user-defined types.
- `pgcon` objects are now defined using `defclass` from EIEIO, instead of using `cl-defstruct`. This
makes it possible to customize the way they are printed, making use in an interactive REPL more
pleasant.
## [0.39] - 2024-07-29
- New function `pg-fetch-prepared` to bind arguments to a prepared statement and fetch results.
Preparing a statement once then reusing it multiple times with different argument values allows
you to avoid the overhead of sending and parsing the SQL query and calculating the query plan.
This is a simple convenience wrapper around functions `pg-bind`, `pg-describe-portal` and
`pg-fetch`.
- New function `pg-ensure-prepared-statement` that either returns the previously prepared statement
saved in the prepared statement cache of our PostgreSQL connection, or prepares the statement and
saves it in the cache. This simplifies the use of cached prepared statements.
- New function `pg-column-autogenerated-p`, which returns non-nil for columns for which you can
insert a row without specifying a value for the column. That includes columns:
- with a specified `DEFAULT` (including `SERIAL` columns)
- specified as `BIGINT GENERATED ALWAYS AS IDENTITY`
- specified as `GENERATED ALWAYS AS expr STORED` (calculated from other columns)
- Fix serialization for the PostgreSQL `BPCHAR` type.
- Move to GPL v3 or later licence (from GPL-2.0-or-later).
## [0.38] - 2024-07-21
- The network connection timeout functionality is disabled on Microsoft Windows platforms, where it
does not function correctly. This is implemented by setting the variable `pg-connect-timeout`
to 0. This setting can also be used to disable the connection timeout on other platforms.
- Fix the deserialization of `TIMESTAMP WITH TIMEZONE` and `TIMESTAMP WITHOUT TIMEZONE` values when
the timezone is not explicitly specified.
- Preliminary serialization and deserialization support for the types used by the PostGIS extension
(types `GEOMETRY`, `GEOGRAPHY`, `SPHEROID`, `BOX2D`, `BOX3D`). Some of these types are
deserialized from the native hex encoding of EWKB returned by PostGIS to text format using the
`geosop` commandline utility, if the variable `pg-gis-use-geosop` is non-nil (which is the
default).
## [0.37] - 2024-07-08
- Fix serialization of JSONB parameters in prepared statements.
- Preliminary serialization and deserialization support for the PostgreSQL `POINT`, `LINE`, `LSEG`,
`BOX`, `PATH` and `POLYGON` geometric types.
- Parsing support for the PostgreSQL `timetz` type (simply parsed as text).
- New function `pg-serialize` converts an Emacs Lisp object to its serialized text format, playing
the inverse role to `pg-parse`.
- Source code split into multiple source files to improve maintainability.
## [0.36] - 2024-06-23
- New utility function `pg-function-p` which checks whether a function with a specified name is
defined in PostgreSQL.
- Better support for the dynamic creation of PostgreSQL types. The cache used to map between
PostgreSQL type names and the corresponding OIDs is now only used for builtin PostgreSQL types,
which should not change over time. Values of all other PostgreSQL types, in particular new types
created using `CREATE TYPE`, will be sent over the wire using the pseudo-OID of 0, telling
PostgreSQL to look up the OID on the backend. This avoids the possibility of invalid cached OID
values caused by type creation or destruction on other connections to PostgreSQL.
- Parse the PostgreSQL UUID type as text.
- Parse the PostgreSQL XML type as text.
- Fix deserialization for the PostgreSQL BPCHAR type.
- Fix serialization and deserialization for the PostgreSQL VECTOR type used by the `pgvector`
extension. Vector embeddings can be floating point numbers, not just integers.
- Implement some workarounds better to support CrateDB.
## [0.35] - 2024-06-08
- New variable `pg-read-timeout` allows you to specify a timeout expressed in seconds when reading
data from PostgreSQL over the network. A complement to the variable `pg-connect-timeout`.
- Accept a `server_version` backend parameter of the form "17beta1", in addition to the standard
format of the form "16.3". Tests pass with PostgreSQL 17 beta1.
## [0.34] - 2024-05-20
- Add deserialization support for the `tsvector` type, used by the PostgreSQL full text search
functionality. They will now be parsed into a list of `pg-ts` structures.
## [0.33] - 2024-05-14
- Add serialization support for the PostgreSQL `date` type.
- Add serialization support for the `vector` type used by the pgvector extension.
## [0.32] - 2024-04-14
- Integer datatypes are now parsed with `cl-parse-integer` instead of `string-to-number` to provide
error detection.
- Add serialization support for the PostgreSQL `timestamp`, `timestamptz` and `datetime` types.
- New feature: logging of SQL queries sent to PostgreSQL on a per-connection basis. Call function
`pg-enable-query-log` on a connection object, and SQL sent to the backend by `pg-exec` and
`pg-exec-prepared` will be logged to a buffer named ` *PostgreSQL query log*` (made unique if
multiple pg-el connections have been made). The name of the buffer is given by accessor function
`pgcon-query-log` on the connection object.
- New variable `pg-connect-timeout` to set a timeout (in seconds) for attempts to connect to
PostgreSQL over the network (does not apply to Unix socket connections).
## [0.31] - 2024-03-28
- Add serialization support for the `hstore` datatype from Emacs Lisp hashtables.
- Add support for schema-qualified names for tables and roles. These are names of the form
`public.tablename` or `username.tablename` (see the PostgreSQL documentation for `CREATE SCHEMA`).
“Ordinary” table names in the `public` schema can be specified as a simple string. Table names
with a different schema are represented by `pg-qualified-name` objects (these are cl-defstruct
objects). Functions that take a table name as an argument (such as `pg-columns` accept either a
normal string or a `pg-qualified-name` object. Functions that return table names, in particular
`pg-tables`, will return strings for tables in the normal `public` schema, and `pg-qualified-name`
objects otherwise.
- Fix bug in the parsing of `pgcon-server-version-major`.
## [0.30] - 2024-03-11
- Add for receiving data in Emacs using the COPY protocol, as a complement to the existing
functionality which uses the COPY protocol to send data from Emacs to PostgreSQL. This allows you
to dump a PostgreSQL table or query result in TSV or CSV format into an Emacs buffer. See function
`pg-copy-to-buffer`.
- Preliminary implementation of connection to PostgreSQL via a connection string of the form
`host=localhost port=5432 dbname=mydb` (see function `pg-connect/string`).
- Preliminary implementation of connection to PostgreSQL via a connection URI of the form
`postgresql://other@localhost/otherdb?connect_timeout=10&application_name=myapp&ssl=true` (see
function `pg-connect/uri`).
- Add serialization function for the `bpchar` type.
- If the environment variable `PGAPPNAME` is set, it will override the default value of variable
`pg-application-name`.
## [0.29] - 2024-03-02
- New function `pg-table-owner`.
- New functions `pg-table-comment` and `(setf pg-table-comment)`.
- New function `pg-column-default` which returns the default value for a column.
## [0.28] - 2024-02-21
- New functions `pg-escape-identifier` and `pg-escape-literal` to escape an SQL identifier (table,
column or function name) or a string in an SQL command. These functions are similar respectively
to libpq functions `PQescapeIdentifier` and `PQescapeLiteral`. These functions help to prevent SQL
injection attacks. However, you should use prepared statements (elisp function `pg-exec-prepared`)
instead of these functions whenever possible.
## [0.27] - 2024-01-10
- Improvements to the internal parsing functionality to use a hashtable instead of an alist to look
up parsing functions (performance should be improved).
- Improved support for user-defined parsing for custom PostgreSQL types (see the function
`pg-register-parser`).
- Add support for the pgvector extension for vector embeddings.
## [0.26] - 2023-12-18
- API change for `pg-fetch` when using prepared statements with a suspended portal. The second
argument is a pgresult structure, rather than the portal name. This changes improves performance
by making it possible to avoid redundant DescribeRow messages.
- The extended query flow used by `pg-exec-prepared` has been modified to be more asynchronous to
improve performance: instead of waiting for a response after the Prepare and Bind phases, only
wait a single time after the Execute phase.
## [0.25] - 2023-12-14
- Add support for the extended query syntax, which helps to avoid the risk of SQL injection attacks.
See function `pg-exec-prepared`. This also makes it easier to fetch partial results from a query
that returns a large number of rows.
- Fix parsing of PostgreSQL `CHAR` type to an Emacs character instead of a string of length 1.
- Various internal functions and variables renamed with the `pg--` prefix instead of `pg-`.
## [0.24] - 2023-11-15
### New
- Add function `pg-add-notification-handler` to add a function to the list of handlers for
`NotificationResponse` messages from PostgreSQL. A handler takes two arguments, the channel and
the payload, which correspond to SQL-level `NOTIFY channel, 'payload'`.
- Add support for asynchronous processing of messages from PostgreSQL, in particular for use of
LISTEN/NOTIFY. This allows PostgreSQL and Emacs to be used in a publish-subscribe pattern which
decouples event publication from the number and the speed of event processing nodes. See the
notification-publisher.el and notification-subscriber.el tests for a basic example.
### Fixed
- Fix the implementation of `pg-tables` and `pg-columns` to use the information schema instead of
historical alternative SQL queries.
## [0.23] - 2023-08-20
### New
- Preliminary support for the COPY protocol. See function `pg-copy-from-buffer`.
## [0.22] - 2023-07-16
### Fixed
- The backend can send NoticeResponse messages during connection startup, for example indicating a
collation version mismatch between your database and the operating system.
## [0.21] - 2023-04-23
### Fixed
- Declare some autoloaded functions to avoid warning from the bytecode compiler.
## [0.20] - 2022-12-10
### Fixed
- Wait for further data from the network in `pg-read-chars` if the process buffer doesn't yet
contain the necessary data (fix from swilsons).
## [0.19] - 2022-11-19
### New
- Add support for parsing the `BIT` and `VARBIT` datatypes.
- Add support for parsing ARRAY datatypes.
- Add support for parsing RANGE datatypes (integer and numerical).
- Add support for parsing HSTORE datatypes (see function `pg-hstore-setup` to prepare the database
connection for use of the HSTORE datatype).
- Add function `pg-cancel` to request cancellation of the command currently being processed
by the backend.
### Fixed
- Fix bug in handling of DataRow messages when zero columns returned by query.
## [0.18] - 2022-10-16
### New
- Add support for connecting to PostgreSQL over a local Unix socket.
- Add support for parsing the `BYTEA` datatype (binary strings). We assume that the PostgreSQL
configuration variable `bytea_output` is set to `hex` (the default setting).
- Add support for parsing the `JSON` datatype, into the Emacs JSON representation.
- Add support for parsing the `JSONB` datatype, into the Emacs JSON representation.
- Add support for handling ParameterStatus messages sent by the backend (see variable
`pg-parameter-change-functions`).
- Add support for handling NOTICE messages sent by the backend (see variable
`pg-handle-notice-functions`).
- New pg-error and pg-protocol-error error types. All errors raised by the library will be a
subclass of pg-error.
### Fixed
- Fix bug in parsing of NULL column values in DataRow messages.
- Fix handling of encoding of attribute column names.
- Fix handling of PostgreSQL error messages (correctly resync with the backend).
## [0.17] - 2022-09-30
### Updated
- Support for encrypted (TLS) connections with PostgreSQL
- Native support for PBKDF2 algorithm to allow SCRAM-SHA-256 authentication without the external
nettle-pbkdf2 application
- Implement multibyte encoding and decoding for pg-exec requests
- Send application_name to PostgreSQL backend to improve observability
- Fix handling of NotificationResponse messages
- Improve test coverage
- Include continuous integration tests on Windows and MacOS (GitHub actions)
- This version distributed via MELPA
## [0.16] - 2022-09-18
### Updated
- Fix MD5 authentication
- Use client-encoding to decode PostgreSQL error messages
- Improve GitHub Actions continuous integration workflow
## [0.15] - 2022-09-06
### Updated
- Moved from cl library to cl-lib
- pg: prefix for symbol names changed to pg- (Emacs Lisp coding conventions)
- Implemented version 3.0 of the PostgreSQL wire protocol
- Implemented SCRAM-SHA-256 authentication
- Implemented MD5 authentication
- Distributed via github repository
## [0.11] - 2001
This version was distributed from http://purl.org/net/emarsden/home/downloads/
and via the EmacsWiki.
pg-el-0.54/LICENSE 0000664 0000000 0000000 00000104515 15005352415 0013461 0 ustar 00root root 0000000 0000000 GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc.
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU General Public License is a free, copyleft license for
software and other kinds of works.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
the GNU General Public License is intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users. We, the Free Software Foundation, use the
GNU General Public License for most of our software; it applies also to
any other work released this way by its authors. You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
To protect your rights, we need to prevent others from denying you
these rights or asking you to surrender the rights. Therefore, you have
certain responsibilities if you distribute copies of the software, or if
you modify it: responsibilities to respect the freedom of others.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must pass on to the recipients the same
freedoms that you received. You must make sure that they, too, receive
or can get the source code. And you must show them these terms so they
know their rights.
Developers that use the GNU GPL protect your rights with two steps:
(1) assert copyright on the software, and (2) offer you this License
giving you legal permission to copy, distribute and/or modify it.
For the developers' and authors' protection, the GPL clearly explains
that there is no warranty for this free software. For both users' and
authors' sake, the GPL requires that modified versions be marked as
changed, so that their problems will not be attributed erroneously to
authors of previous versions.
Some devices are designed to deny users access to install or run
modified versions of the software inside them, although the manufacturer
can do so. This is fundamentally incompatible with the aim of
protecting users' freedom to change the software. The systematic
pattern of such abuse occurs in the area of products for individuals to
use, which is precisely where it is most unacceptable. Therefore, we
have designed this version of the GPL to prohibit the practice for those
products. If such problems arise substantially in other domains, we
stand ready to extend this provision to those domains in future versions
of the GPL, as needed to protect the freedom of users.
Finally, every program is threatened constantly by software patents.
States should not allow patents to restrict development and use of
software on general-purpose computers, but in those that do, we wish to
avoid the special danger that patents applied to a free program could
make it effectively proprietary. To prevent this, the GPL assures that
patents cannot be used to render the program non-free.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Use with the GNU Affero General Public License.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU Affero General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the special requirements of the GNU Affero General Public License,
section 13, concerning interaction through a network will apply to the
combination as such.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
Copyright (C)
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see .
Also add information on how to contact you by electronic and paper mail.
If the program does terminal interaction, make it output a short
notice like this when it starts in an interactive mode:
Copyright (C)
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, your program's commands
might be different; for a GUI interface, you would use an "about box".
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU GPL, see
.
The GNU General Public License does not permit incorporating your program
into proprietary programs. If your program is a subroutine library, you
may consider it more useful to permit linking proprietary applications with
the library. If this is what you want to do, use the GNU Lesser General
Public License instead of this License. But first, please read
.
pg-el-0.54/README.md 0000664 0000000 0000000 00000033457 15005352415 0013741 0 ustar 00root root 0000000 0000000 # pg.el -- Emacs Lisp socket-level interface to the PostgreSQL RDBMS
[](https://www.gnu.org/licenses/gpl-3.0.html)
[](https://github.com/emarsden/pg-el/)
[](https://melpa.org/#/pg)
[](https://github.com/emarsden/pg-el/actions/)
[](https://github.com/emarsden/pg-el/actions/)
This library lets you access the PostgreSQL 🐘 database management system from Emacs, using its
network-level frontend/backend “wire” protocol. The module is capable of automatic type coercions from a
range of SQL types to the equivalent Emacs Lisp type.
📖 You may be interested in the [user manual](https://emarsden.github.io/pg-el/).
This is a developer-oriented library, which won’t be useful to end users. If you’re looking for a
browsing/editing interface to PostgreSQL in Emacs, you may be interested in
[PGmacs](https://github.com/emarsden/pgmacs/).
This library has support for:
- **SCRAM-SHA-256 authentication** (the default authentication method since PostgreSQL version 14),
as well as MD5 and password authentication. There is currently no support for authentication
using client-side certificates.
- Encrypted (**TLS**) connections with the PostgreSQL database, if your Emacs has been built with
GnuTLS support. This includes support for authentication using client certificates.
- **Prepared statements** using PostgreSQL’s extended query protocol, to avoid SQL injection
attacks.
- The PostgreSQL **COPY protocol** to copy preformatted data from an Emacs buffer to PostgreSQL, or
to dump a PostgreSQL table or query result to an Emacs buffer in CSV or TSV format.
- Asynchronous handling of **LISTEN/NOTIFY** notification messages from PostgreSQL, allowing the
implementation of **publish-subscribe architectures** (PostgreSQL as an “event broker” or
“message bus” and Emacs as event publisher and consumer).
- Parsing various PostgreSQL types including integers, floats, array types, numerical ranges, JSON
and JSONB objects into their native Emacs Lisp equivalents. The parsing support is
user-extensible. Support for the HSTORE, pgvector, PostGIS, BM25 extensions.
- Connections over TCP or (on Unix machines) a local Unix socket.
Tested **PostgreSQL versions**: The code has been tested with versions 17.4, 16.4, 15.4, 13.8,
11.17, and 10.22 on Linux. It is also tested via GitHub actions on MacOS and Windows. This library
also works, more or less, against other “PostgreSQL-compatible” databases. There are four main points
where this compatibility may be problematic:
- Compatibility with the PostgreSQL wire protocol. This is the most basic form of compatibility.
- Compatibility with the PostgreSQL flavour of SQL, such as row expressions, non-standard functions
such as `CHR`, data types such as `BIT`, `VARBIT`, `JSON` and `JSONB`, user-defined ENUMS and so
on, functionality such as `LISTEN`. Some databases that claim to be “Postgres compatible” don’t
even support foreign keys, views, triggers, sequences, tablespaces and temporary tables (looking
at you, Amazon Aurora DSQL).
- Implementation of the system tables that are used by certain pg-el functions, to retrieve the list
of tables in a database, the list of types, and so on.
- Establishing encrypted TLS connections to hosted services. Most PostgreSQL client libraries (in
particular the official client library libpq) use OpenSSL for TLS support, whereas Emacs uses
GnuTLS, and you may encounter incompatibilities.
The following PostgreSQL-compatible databases have been tested:
- [Neon](https://neon.tech/) “serverless PostgreSQL” works perfectly. This is a commercially hosted
service using a new storage engine for PostgreSQL, that they make available under the Apache
licence. Last tested 2025-05.
- [ParadeDB](https://www.paradedb.com/) version 0.13.1 works perfectly (it's really a PostgreSQL
extension rather than a distinct database implementation).
- [IvorySQL](https://www.ivorysql.org/) works perfectly (this Apache licensed fork of PostgreSQL
adds some features for compatibility with Oracle). Last tested 2025-04 with version 4.4.
- The [Timescale DB](https://www.timescale.com/) extension for time series data, source available
but non open source. This works perfectly (last tested 2025-05 with version 2.19.3).
- The [CitusDB](https://github.com/citusdata/citus) extension for sharding PostgreSQL over multiple
hosts (AGPLv3 licence). Works perfectly (last tested 2025-05 with Citus version 13.0).
- The [OrioleDB](https://github.com/orioledb/orioledb) extension, which adds a new storage engine
designed for better multithreading and solid state storage, works perfectly. Last tested 2025-04
with version beta10.
- The [Microsoft DocumentDB](https://github.com/microsoft/documentdb) extension for MongoDB-like
queries (MIT licensed). Works perfectly. Note that this is not the same product as Amazon
DocumentDB. Last tested 2025-04 with the FerretDB distribution 2.1.0.
- The [Hydra Columnar](https://github.com/hydradatabase/columnar) extension for column-oriented
storage and parallel queries (Apache license). Works perfectly (last tested 2025-05 with v1.1.2).
- The [PgBouncer](https://www.pgbouncer.org/) connection pooler for PostgreSQL (open source, ISC
licensed). Works fine (last tested 2025-05 with version 1.24 in the default session pooling mode).
- The [PgDog](https://github.com/pgdogdev/pgdog) sharding connection pooler for PostgreSQL (AGPLv3
licensed). Works mostly fine but disconnects the client when the client-encoding is switched to
`LATIN1` (last tested 2025-05).
- The [PgCat](https://github.com/postgresml/pgcat) sharding connection pooler for PostgreSQL (MIT
license). Works fine (last tested 2025-05 with v0.2.5).
- [Google AlloyDB Omni](https://cloud.google.com/alloydb/omni/docs/quickstart) is a proprietary fork
of PostgreSQL with Google-developed extensions, including a columnar storage extension, adaptive
autovacuum, and an index advisor. It works perfectly with pg-el as of 2025-05 (version that
reports itself as "15.7").
- [Xata](https://xata.io/) “serverless PostgreSQL” has many limitations including lack of support
for `CREATE DATABASE`, `CREATE COLLATION`, for XML processing, for temporary tables, for cursors,
for `EXPLAIN`, for `CREATE EXTENSION`, for `DROP FUNCTION`, for functions such as `pg_notify`.
- The [YugabyteDB](https://yugabyte.com/) distributed database (Apache licence). Mostly working
though the `pg_sequences` table is not implemented so certain tests fail. YugabyteDB does not have
full compatibility with PostgreSQL SQL, and for example `GENERATED ALWAYS AS` columns are not
supported, and `LISTEN` and `NOTIFY` are not supported. It does support certain extensions such as
pgvector, however. Last tested on 2025-05 against version 2.25.
- The [RisingWave](https://github.com/risingwavelabs/risingwave) event streaming database (Apache
license) is mostly working. It does not support `GENERATED ALWAYS AS IDENTITY` or `SERIAL`
columns, nor `VACUUM ANALYZE`. Last tested 2025-05 with v2.3.1.
- The [CrateDB](https://crate.io/) distributed database (Apache licence). CrateDB does not support
rows (e.g. `SELECT (1,2)`), does not support the `time`, `varbit`, `bytea`, `jsonb` and `hstore`
types, does not handle a query which only contains an SQL comment, does not handle various
PostgreSQL functions such as `factorial`, does not return a correct type OID for text columns in
rows returned from a prepared statement, doesn't support Unicode identifiers, doesn't support the
`COPY` protocol, doesn't support `TRUNCATE TABLE`. It works with these limitations with pg-el
(last tested 2025-05 with version 5.10.5).
- The [CockroachDB](https://github.com/cockroachdb/cockroach) distributed database (source-available
but non-free software licence). Note that this database does not implement the large object
functionality, and its interpretation of SQL occasionally differs from that of PostgreSQL.
Currently fails with an internal error on the SQL generated by our query for `pg-table-owner`, and
fails on the boolean vector syntax b'1001000'. Works with these limitations with pg-el (last
tested 2025-04 with CockroachDB CCL v25.1).
- The [QuestDB](https://questdb.io/) time series database (Apache licensed) has very limited
PostgreSQL support, and does not support the `integer` type for example. Last tested 2025-05 with
version 8.3.1.
- [Google Spanner](https://cloud.google.com/spanner) proprietary distributed database: tested with
the Spanner emulator (that reports itself as `PostgreSQL 14.1`) and the PGAdapter library that
enables support for the PostgreSQL wire protocol. Spanner has very limited PostgreSQL
compatibility, for example refusing to create tables that do not have a primary key. It does not
recognize basic PostgreSQL types such as `INT2`. It also does not for example support the `CHR`
and `MD5` functions, row expressions, and `WHERE` clauses without a `FROM` clause.
- The [YDB by Yandex](https://ydb.tech/docs/en/postgresql/docker-connect) distributed database
(Apache licence). Has very limited PostgreSQL compatibility. For example, an empty query string
leads to a hung connection, and the `bit` type is returned as a string with the wrong oid. Last
tested 2025-05 with version 23-4.
- The [Materialize](https://materialize.com/) operational database (a proprietary differential
dataflow database) has many limitations in its PostgreSQL compatibility: no support for primary
keys, unique constraints, check constraints, for the 'bit' type for example. It works with these
limitations with pg-el (last tested 2025-05 with Materialize v0.142).
- [YottaDB Octo](https://gitlab.com/YottaDB/DBMS/YDBOcto), which is built on the YottaDB key-value
store (which is historically based on the MUMPS programming language). GNU AGPL v3 licence. There
are many limitations in the PostgreSQL compatibility: no user metainformation, no cursors, no
server-side prepared statements, no support for various types including arrays, JSON, UUID,
vectors, tsvector, numeric ranges, geometric types. It works with these limitations with pg-el
(last tested 2025-05 with YottaDB 2.0.2).
- The [GreptimeDB](https://github.com/GrepTimeTeam/greptimedb) time series database (Apache license)
implements quite a lot of the PostgreSQL wire protocol, but the names it uses for types in the
`pg_catalog.pg_types` table are not the same as those used by PostgreSQL (e.g. `Int64` instead of
`int8`), so our parsing machinery does not work. This database also has more restrictions on the
use of identifiers than PostgreSQL (for example, `id` is not accepted as a column name, nor are
identifiers containing Unicode characters). Last tested v0.14.2 in 2025-05.
- Hosted PostgreSQL services that have been tested: as of 2024-12
[Railway.app](https://railway.app/) is running a Debian build of PostgreSQL 16.4, and works fine;
[Aiven.io](https://aiven.io/) is running a Red Hat build of PostgreSQL 16.4 on Linux/Aarch64 and
works fine.
- Untested but likely to work: Amazon RDS, Google Cloud SQL, Azure Database for PostgreSQL, Amazon
Aurora. You may however encounter difficulties with TLS connections, as noted above.
It does not work in a satisfactory manner with the ClickHouse database, whose PostgreSQL support is
too limited. As of version 25.4 in 2025-04, there is no implementation of the `pg_types` system
table, no support for basic PostgreSQL-flavoured SQL commands such as `SET`, no support for the
extended query mechanism.
Tested **Emacs versions**: mostly tested with versions 31 pre-release, 30.1 and 29.4. Emacs versions
older than 26.1 will not work against a recent PostgreSQL version (whose default configuration
requires SCRAM-SHA-256 authentication), because they don’t include the GnuTLS support which we use
to calculate HMACs. They may however work against a database set up to allow unauthenticated local
connections. Emacs versions older than 28.1 (from April 2022) will not be able to use the extended
query protocol (prepared statements), because they don’t have the necessary bindat functionality. It
should however be easy to update the installed version of bindat.el for these older versions.
> [!TIP]
> Emacs 31 (in pre-release) has support for disabling the Nagle algorithm on TCP network
> connections (`TCP_NODELAY`). This leads to far better performance for PostgreSQL connections, in
> particular on Unix platforms. This performance difference does not apply when you connect to
> PostgreSQL over a local Unix socket connection.
You may be interested in an alternative library [emacs-libpq](https://github.com/anse1/emacs-libpq)
that enables access to PostgreSQL from Emacs by binding to the libpq library.
## Installation
Install via the [MELPA package archive](https://melpa.org/partials/getting-started.html) by
including the following in your Emacs initialization file (`.emacs.el` or `init.el`):
(require 'package)
(add-to-list 'package-archives '("melpa" . "https://melpa.org/packages/") t)
then saying
M-x package-install RET pg
Alternatively, you can install the library from the latest GitHub revision using:
(unless (package-installed-p 'pg)
(package-vc-install "https://github.com/emarsden/pg-el" nil nil 'pg))
You can later update to the latest version with `M-x package-vc-upgrade RET pg RET`.
## Acknowledgements
Thanks to Eric Ludlam for discovering a bug in the date parsing routines, to Hartmut Pilch and
Yoshio Katayama for adding multibyte support, and to Doug McNaught and Pavel Janik for bug fixes.
pg-el-0.54/doc/ 0000775 0000000 0000000 00000000000 15005352415 0013213 5 ustar 00root root 0000000 0000000 pg-el-0.54/doc/.gitignore 0000664 0000000 0000000 00000000005 15005352415 0015176 0 ustar 00root root 0000000 0000000 book
pg-el-0.54/doc/book.toml 0000664 0000000 0000000 00000000554 15005352415 0015046 0 ustar 00root root 0000000 0000000 [book]
title = "Using pg-el"
authors = ["Eric Marsden "]
description = "User manual for the pg-el Emacs Lisp socket-level interface to PostgreSQL."
language = "en"
multilingual = false
src = "src"
[output.html]
git-repository-url = "https://github.com/emarsden/pg-el"
git-repository-icon = "fa-github"
smart-punctuation = true
pg-el-0.54/doc/src/ 0000775 0000000 0000000 00000000000 15005352415 0014002 5 ustar 00root root 0000000 0000000 pg-el-0.54/doc/src/API.md 0000664 0000000 0000000 00000033417 15005352415 0014745 0 ustar 00root root 0000000 0000000 # The pg-el API
The entry points in the pg-el library are documented below.
(with-pg-connection con (dbname user [password host port]) &body body)
A macro which opens a TCP network connection to database `DBNAME`, executes the `BODY` forms then
disconnects. See function `pg-connect` for details of the connection arguments.
(with-pg-connection-local con (path dbname user [password]) &body body)
A macro which opens a connection to database `DBNAME` over a local Unix socket at `PATH`, executes
the `BODY` forms then disconnects. See function `pg-connect-local` for details of the connection
arguments.
(with-pg-transaction con &body body)
A macro which executes the `BODY` forms wrapped in an SQL transaction. `CON` is a connection to the
database. If an error occurs during the execution of the forms, a ROLLBACK instruction is executed.
(pg-connect dbname user [password host port tls-options]) -> con
Connect to the database `DBNAME` on `HOST` (defaults to localhost) at `PORT` (defaults to 5432) via
TCP/IP and authenticate as `USER` with `PASSWORD`. This library currently supports SCRAM-SHA-256
authentication (the default method from PostgreSQL version 14 onwards), MD5 authentication and
cleartext password authentication. This function also sets the output date type to `ISO` and
initializes our type parser tables.
If `tls-options` is non-NIL, attempt to establish an encrypted connection to PostgreSQL by
passing `tls-options` to Emacs function `gnutls-negotiate`. `tls-options` is a Common-Lisp style
argument list of the form
```lisp
(list :priority-string "NORMAL:-MD5" :trustfiles (list "/etc/company/RootCA.crt"))
```
To use client certificates to authenticate the TLS connection, use a value of `TLS-OPTIONS` of the
form
```lisp
`(list :keylist ((,key ,cert)))
```
where `key` is the filename of the client certificate private key and `cert` is the filename of the
client certificate. These are passed to GnuTLS.
(pg-connect-local path dbname user [password]) -> con
Initiate a connection with the PostgreSQL backend over local Unix socket `PATH`. Connect to the
database `DBNAME` with the username `USER`, providing `PASSWORD` if necessary. Returns a connection
to the database (as an opaque type). `PASSWORD` defaults to an empty string.
(pg-exec con &rest sql) -> pgresult
Concatenate the SQL strings and send to the PostgreSQL backend over connection `CON`. Retrieve the
information returned by the database and return it in an opaque record PGRESULT. The content of the
pgresult should be accessed using the `pg-result` function.
(pg-exec-prepared con query typed-arguments &key (max-rows 0)) -> pgresult
Execute SQL query `QUERY`, which may include numbered parameters such as `$1`, ` $2` and so on,
using PostgreSQL's extended query protocol, on database connection `CON`. The `TYPED-ARGUMENTS` are
a list of the form
'((42 . "int4") ("42" . "text"))
This query will return at most `MAX-ROWS` rows (a value of zero indicates no limit). It returns a
pgresult structure (see function `pg-result`). This method is useful to reduce the risk of SQL
injection attacks.
(pg-result pgresult what &rest args) -> info
Extract information from the `PGRESULT` returned by `pg-exec`. The `WHAT` keyword can be one of
* `:connection`: retrieve the database connection.
* `:status`: a string returned by the backend to indicate the status of the command; it is something
like "SELECT" for a select command, "DELETE 1" if the deletion affected a single row, etc.
* `:attributes`: a list of tuples providing metadata: the first component of each tuple is the
attribute's name as a string, the second an integer representing its PostgreSQL type, and the third
an integer representing the size of that type.
* `:tuples`: all the data retrieved from the database, as a list of lists, each list corresponding
to one row of data returned by the backend.
* `:tuple` tuple-number: return a specific tuple (numbering starts at 0).
* `:incomplete`: determine whether the set of tuples returned in this query set is incomplete, due
to a suspended portal. If true, further tuples can be obtained by calling `pg-fetch`.
* `:oid`: allows you to retrieve the OID returned by the backend if the command was an insertion.
The OID is a unique identifier for that row in the database (this is PostgreSQL-specific; please
refer to the documentation for more details).
.
(pg-fetch con result &key (max-rows 0))
Fetch pending rows from the suspended portal in `RESULT` on database connection `CON`.
This query will retrieve at most `MAX-ROWS` rows (default value of zero means no limit).
Returns a pgresult structure (see function `pg-result`). When used in multiple fetch situations
(with the `:max-rows` parameter to `pg-exec-prepared` which allows you to retrieve large result sets
incrementally), the same pgresult structure (initally returned by `pg-exec-prepared`) should be
passed to each successive call to `pg-fetch`, because it contains column metainformation that is
required to parse the incoming data. Each successive call to `pg-fetch` will return this pgresult
structure with new tuples accessible via `pg-result :tuples`. When no more tuples are available,
the `:incomplete` slot of the pgresult structure will be nil.
(pg-cancel con) -> nil
Ask the server to cancel the command currently being processed by the backend. The cancellation
request concerns the command requested over database connection `CON`.
(pg-disconnect con) -> nil
Close the database connection `CON`.
(pg-for-each con select-form callback)
Calls `CALLBACK` on each tuple returned by `SELECT-FORM`. Declares a cursor for `SELECT-FORM`, then
fetches tuples using repeated executions of `FETCH 1`, until no results are left. The cursor is then
closed. The work is performed within a transaction. When you have a large amount of data to handle,
this usage is more efficient than fetching all the tuples in one go.
If you wish to browse the results, each one in a separate buffer, you could have the callback insert
each tuple into a buffer created with `(generate-new-buffer "myprefix")`, then use ibuffer's "/ n" to
list/visit/delete all buffers whose names match myprefix.
(pg-databases con) -> list of strings
Return a list of the databases available over PostgreSQL connection `CON`. A database is a set of
tables; in a fresh PostgreSQL installation there is a single database named "template1".
(pg-schemas con) -> list of strings
Return the list of the schemas in the PostgreSQL server to which we are connected with `CON`.
Schemas are a form of namespace, which can contain elements such as tables, sequences, indexes and
views. The default schema name is `public`.
(pg-tables con) -> list of strings
Return a list of the tables present in the database to which we are currently connected over `CON`.
Only include user tables: system tables are not included in this list.
(pg-columns con table) -> list of strings
Return a list of the columns (or attributes) in `TABLE`, which must be a table in the database to
which we are connected over `CON`. We only include the column names; if you want more detailed
information (attribute types, for example), it can be obtained from `pg-result` on a SELECT
statement for that table.
(pg-table-comment con table) -> opt-string
Return the comment on `TABLE`, which must be a table in the database to which we are connected over
`CON`. Return nil if no comment is defined for `TABLE`. A setf function allows you to change the
table comment, or delete it with an argument of nil:
(setf (pg-table-comment con "table") "The comment")
(pg-column-comment con table column) -> opt-string
Return the comment on `COLUMN` in `TABLE` in a PostgreSQL database. `TABLE` can be a string or a
schema-qualified name. Uses database connection `CON`. Returns a string or nil if no comment is
defined. A setf function allows you to change the column comment, or delete it with a value of nil:
(setf (pg-column-comment con "table" "column") "The comment")
(pg-hstore-setup con)
Prepare for the use of HSTORE datatypes over database connection `CON`. This function must be called
before using the HSTORE extension. It loads the extension if necessary, and sets up the parsing
support for HSTORE datatypes.
(pg-vector-setup con)
Prepare for the use of VECTOR datatypes from the pgvector extension over database connection `CON`.
This function must be called before using the pgvector extension. It loads the extension if
necessary, and sets up the parsing support for vector datatypes.
(pg-lo-create con . args) -> oid
Create a new large object (BLOB, or binary large object in other DBMSes parlance) in the database to
which we are connected via `CON`. Returns an `OID` (which is represented as an elisp integer) which
will allow you to use the large object. Optional `ARGS` are a Unix-style mode string which determines
the permissions of the newly created large object, one of "r" for read-only permission, "w" for
write-only, "rw" for read+write. Default is "r".
Large-object functions MUST be used within a transaction (see the macro `with-pg-transaction`).
(pg-lo-open con oid . args) -> fd
Open a large object whose unique identifier is `OID` (an elisp integer) in the database to which we
are connected via `CON`. Optional `ARGS` is a Unix-style mode string as for `pg-lo-create`; which
defaults to "r" read-only permissions. Returns a file descriptor (an elisp integer) which can be
used in other large-object functions.
(pg-lo-close con fd)
Close the file descriptor `FD` which was associated with a large object. Note that this does not
delete the large object; use `pg-lo-unlink` for that.
(pg-lo-read con fd bytes) -> string
Read `BYTES` from the file descriptor `FD` which is associated with a large object. Return an elisp
string which should be `BYTES` characters long.
(pg-lo-write con fd buf)
Write the bytes contained in the elisp string `BUF` to the large object associated with the file
descriptor `FD`.
(pg-lo-lseek con fd offset whence)
Do the equivalent of a `lseek(2)` on the file descriptor `FD` which is associated with a large
object; i.e. reposition the read/write file offset for that large object to `OFFSET` (an elisp
integer). `WHENCE` has the same significance as in `lseek()`; it should be one of `SEEK_SET` (set the
offset to the absolute position), `SEEK_CUR` (set the offset relative to the current offset) or
`SEEK_END` (set the offset relative to the end of the file). `WHENCE` should be an elisp integer whose
values can be obtained from the header file `` (probably 0, 1 and 2 respectively).
(pg-lo-tell con oid) -> integer
Do the equivalent of an `ftell(3)` on the file associated with the large object whose unique
identifier is `OID`. Returns the current position of the file offset for the object's associated file
descriptor, as an elisp integer.
(pg-lo-unlink con oid)
Remove the large object whose unique identifier is `OID` from the system. In the current
implementation of large objects in PostgreSQL, each large object is associated with an object in the
filesystem.
(pg-lo-import con filename) -> oid
Create a new large object and initialize it to the data contained in the file whose name is
`FILENAME`. Returns an `OID` (as an elisp integer). Note that this operation is only syntactic sugar
around the basic large-object operations listed above.
(pg-lo-export con oid filename)
Create a new file named `FILENAME` and fill it with the contents of the large object whose unique
identifier is `OID`. This operation is also syntactic sugar.
Variable `pg-parameter-change-functions` is a list of handlers to be called when the backend informs
us of a parameter change, for example a change to the session time zone. Each handler is called with
three arguments: the connection to the backend, the parameter name and the parameter value. It is
initially set to a function that looks out for `client_encoding` messages and updates the value
recorded in the connection.
Variable `pg-handle-notice-functions` is a list of handlers to be called when the backend sends us a
`NOTICE` message. Each handler is called with one argument, the notice, as a pgerror struct.
Boolean variable `pg-disable-type-coercion` can be set to non-nil (before initiating a connection)
to disable the library's type coercion facility. Default is `t`.
~~~admonish warning title="Security note"
Setting up PostgreSQL to accept TCP/IP connections has security implications; please consult the
documentation for details. It is possible to use the port forwarding capabilities of ssh to
establish a connection to the backend over TCP/IP, which provides both a secure authentication
mechanism and encryption (and optionally compression) of data passing through the tunnel. Here's how
to do it (thanks to Gene Selkov, Jr. for the description):
1. Establish a tunnel to the backend machine, like this:
ssh -L 3333:backend.dom:5432 postgres@backend.dom
The first number in the -L argument, 3333, is the port number of your end of the tunnel. The
second number, 5432, is the remote end of the tunnel -- the port number your backend is using.
The name or the address in between the port numbers belongs to the server machine, as does the
last argument to ssh that also includes the optional user name. Without the user name, ssh will
try the name you are currently logged on as on the client machine. You can use any user name the
server machine will accept, not necessarily those related to postgres.
2. Now that you have a running ssh session, you can point pg.el to the local host at the port number
which you specified in step 1. For example,
(pg-connect "dbname" "user" "password" "localhost" 3333)
You can omit the port argument if you chose 5432 as the local end of the tunnel, since pg.el
defaults to this value.
~~~
pg-el-0.54/doc/src/SUMMARY.md 0000664 0000000 0000000 00000001050 15005352415 0015455 0 ustar 00root root 0000000 0000000 # Summary
- [About](./about.md)
- [Installation](./installation.md)
- [Quickstart](./quickstart.md)
- [Using pg-el](./usage.md)
- [Using prepared statements](./prepared-statements.md)
- [JSON and JSONB support](./json.md)
- [HSTORE support](./hstore.md)
- [PostGIS support](./postgis.md)
- [Collation support](./collation.md)
- [The COPY protocol](./copy-protocol.md)
- [Using schema-qualified names](./schemas.md)
- [Error handling](./error-handling.md)
- [Special pg-el features](./special-features.md)
- [API](./API.md)
- [Feedback](./feedback.md)
pg-el-0.54/doc/src/about.md 0000664 0000000 0000000 00000025273 15005352415 0015447 0 ustar 00root root 0000000 0000000 # pg-el
[](https://www.gnu.org/licenses/gpl-3.0.html)
[](https://github.com/emarsden/pg-el/)
[](https://melpa.org/#/pg)
[](https://github.com/emarsden/pg-el/workflows/test-pgv16/badge.svg)
This Emacs Lisp library lets you access the [PostgreSQL](https://www.postgresql.org/) 🐘 database
from Emacs, using its network-level frontend/backend protocol. The library is capable of automatic
type coercions from a range of SQL types to and from the equivalent Emacs Lisp type.
This libary will be useful for developers, rather than end users. If you’re looking for an
Emacs-based browser/editor for PostgreSQL, you may be interested in
[PGmacs](https://github.com/emarsden/pgmacs/), which uses this library to communicate with
PostgreSQL or a compatible database.
~~~admonish note title="Supported features"
- SCRAM-SHA-256 authentication (the default method since PostgreSQL version 14) as well as MD5 and
password authentication.
- Encrypted (TLS) connections between Emacs and the PostgreSQL backend. This includes support for
client certificates.
- **Prepared statements** using PostgreSQL's extended query message flow, that allows for
parameterized queries to protect from SQL injection issues.
- The PostgreSQL **COPY protocol** to copy preformatted data to PostgreSQL from an Emacs buffer.
- Asynchronous handling of LISTEN/NOTIFY notification messages from PostgreSQL, allowing the
implementation of publish-subscribe type architectures (PostgreSQL as an “event broker” or
“message bus” and Emacs as event publisher and consumer).
- Parsing various PostgreSQL types including integers, floats, array types, numerical ranges,
JSON and JSONB objects into their native Emacs Lisp equivalents. The parsing support is
user-extensible. Support for the HSTORE, pgvector and PostGIS extensions.
- Connections over TCP or (on Unix machines) a local Unix socket.
~~~
The code has been tested with **PostgreSQL versions** 17.4, 16.3, 15.4, 13.8, 11.17, and 10.22 on
Linux. It is also tested via GitHub actions on MacOS and Microsoft Windows. This library also works,
to a variable extent, against other databases that implement the PostgreSQL wire protocol:
- [Neon](https://neon.tech/) “serverless PostgreSQL” works perfectly.
- [ParadeDB](https://www.paradedb.com/) version 0.9.1 works perfectly (it's really a PostgreSQL
extension rather than a distinct database implementation).
- [IvorySQL](https://www.ivorysql.org/) version 3.4 works perfectly (this fork of PostgreSQL adds
some features for compatibility with Oracle).
- The [CitusDB](https://github.com/citusdata/citus) extension for sharding PostgreSQL over multiple
hosts works perfectly (last tested with Citus version 12.1.5, which is based on PostgreSQL 16.6).
- The [Microsoft DocumentDB](https://github.com/microsoft/documentdb) extension for MongoDB-like
queries works perfectly (last tested 2025-02 with version 16.6). Note that this is not the same
product as Amazon DocumentDB.
- The [Hydra Columnar](https://github.com/hydradatabase/columnar) extension for column-oriented
storage and parallel queries works perfectly (last tested 2025-02).
- The [Timescale DB](https://www.timescale.com/) extension for time series data works perfectly
(tested with version 2.16.1).
- The [PgBouncer](https://www.pgbouncer.org/) connection pooler for PostgreSQL works fine (tested
with version 1.23 in the default session pooling mode).
- [Google AlloyDB Omni](https://cloud.google.com/alloydb/omni/docs/quickstart) is a proprietary fork
of PostgreSQL with Google-developed extensions, including a columnar storage extension, adaptive
autovacuum, and an index advisor. It works perfectly with pg-el as of 2025-01.
- [ParadeDB](https://www.paradedb.com/): This ElasticSearch alternative is very
PostgreSQL-compatible (more of an extension than a reimplementation). Tested with the Dockerhub
instance which is based on PostgreSQL 16.3. All tests pass.
- [Xata](https://xata.io/) “serverless PostgreSQL” has many limitations including lack of support
for `CREATE DATABASE`, `CREATE COLLATION`, for XML processing, for temporary tables, for cursors,
for `EXPLAIN`, for `CREATE EXTENSION`, for functions such as `pg_notify`.
- [YugabyteDB](https://yugabyte.com/): tested against version 2.23. This database uses a lot of
code from PostgreSQL 11 and is quite compatible, including with the HSTORE and pgvector
extensions. However, some system tables differ from PostgreSQL, such as the `pg_sequences` table.
It does not support the XML type. It does not support `LISTEN`/`NOTIFY`.
- The [RisingWave](https://github.com/risingwavelabs/risingwave) event streaming database is mostly
working. It does not support `GENERATED ALWAYS AS IDENTITY` columns. Last tested 2025-02 with v2.1.2.
- [CrateDB](https://crate.io/): last tested 2025-02 with version 5.9.9. There are limitations in
this database's emulation of the PostgreSQL system tables: for example, it's not possible to query
the owner of a table (function `pg-table-owner`). It doesn't accept SQL statements that only
include an SQL comment. It doesn't support setting comments on SQL tables. As
[documented](https://cratedb.com/docs/crate/reference/en/latest/interfaces/postgres.html), CrateDB
does not support the `TIME` type without a time zone. It doesn't support casting integers to bits.
It doesn't support the `VARBIT` type. It has no support for the COPY protocol.
- [CockroachDB](https://github.com/cockroachdb/cockroach): tested with CockroachDB CCL v24.3. Note
that this database does not implement the large object functionality, and its interpretation of
SQL occasionally differs from that of PostgreSQL. It is currently [reporting an internal
error](https://github.com/cockroachdb/cockroach/issues/104009) when we call `pg-table-comment`.
- [QuestDB](https://questdb.io/): tested against version 6.5.4. This is not very
PostgreSQL-compatible: it fails on the SQL query `SELECT 1::integer` because it doesn't recognize
integer as a type. It doesn't support `DELETE` statements.
- [Google Spanner](https://cloud.google.com/spanner): tested with the Spanner emulator (that reports
itself as `PostgreSQL 14.1`) and the PGAdapter library that enables support for the PostgreSQL
wire protocol. Spanner has very limited PostgreSQL compatibility, for example refusing to create
tables that do not have a primary key. It does not recognize basic PostgreSQL types such as `INT2`.
It also does not for example support the `CHR` and `MD5` functions, row expressions, and WHERE
clauses without a FROM clause.
- [YDB by Yandex](https://ydb.tech/docs/en/postgresql/docker-connect) last tested with version 23-4.
Has very limited PostgreSQL compatibility. For example, an empty query string leads to a hung
connection, and the `bit` type is returned as a string with the wrong oid.
- The [Materialize](https://materialize.com/) operational database (a proprietary differential
dataflow database) has many limitations in its PostgreSQL compatibility: no support for primary
keys, unique constraints, check constraints, for the `bit` type for example. It works with these
limitations with pg-el (last tested 2025-02 with Materialize v0.133).
- [ClickHouse](https://clickhouse.com/) doesn't work with pg-el. Their version 24.5 has a very basic
implementation of the PostgreSQL wire protocol. It doesn’t support the `pg_type` system table
which provides information on the OIDs associated with different PostgreSQL types. All values are
returned in textual format using the pseudo-OID of 0, which means the client must parse the value.
The database immediately closes the connection on any SQL error. It doesn't support configuration
statements such as `SET datestyle`. It doesn't specify a `server_name` in the startup sequence,
which might allow us to detect this special case and restrict functionality to the most basic
aspects.
- [GreptimeDB](https://github.com/GrepTimeTeam/greptimedb) version 0.9.5 implements quite a lot of
the PostgreSQL wire protocol, but the names it uses for types in the `pg_catalog.pg_types` table
are not the same as those used by PostgreSQL (e.g. `Int64` instead of `int8`), so our parsing
machinery does not work.
- Untested but likely to work: Amazon RDS, Google Cloud SQL, Azure Database for PostgreSQL, Amazon
Auroa. You may however encounter difficulties with TLS connections, as noted above.
The generic function `pg-do-variant-specific-setup` allows you to specify setup operations to
run for a particular semi-compatible PostgreSQL variant. You can specialize it on the symbol name of the
variant, currently one of `postgresql`, `alloydb`, `cratedb`, `cockroachdb`, `yugabyte`, `questdb`,
`greptimedb`, `risingwave`, `immudb`, `timescaledb`, `ydb`, `orioledb`, `xata`, `spanner`,
`ivorydb`. As an example, the following specializer is already defined to run for AlloyDB variants:
```lisp
;; Register the OIDs associated with these OmniDB-specific types, so that their types appear in
;; column metadata listings.
(cl-defmethod pg-do-variant-specific-setup ((con pgcon) (_variant (eql 'alloydb)))
(message "pg-el: running variant-specific setup for AlloyDB Omni")
;; These type names are in the google_ml schema
(pg-register-parser "model_family_type" #'pg-text-parser)
(pg-register-parser "model_family_info" #'pg-text-parser)
(pg-register-parser "model_provider" #'pg-text-parser)
(pg-register-parser "model_type" #'pg-text-parser)
(pg-register-parser "auth_type" #'pg-text-parser)
(pg-register-parser "auth_info" #'pg-text-parser)
(pg-register-parser "models" #'pg-text-parser)
(pg-initialize-parsers con))
```
Tested with **Emacs versions** 30-pre-release, 29.4, 28.2, 27.2 and 26.3. Emacs versions older than
26.1 will not work against a recent PostgreSQL version (whose default configuration requires
SCRAM-SHA-256 authentication), because they don’t include the GnuTLS support which we use to
calculate HMACs. They may however work against a database set up to allow unauthenticated local
connections. Emacs versions before 28.1 will not support the extended query protocol, because the
`bindat` package is required. We mostly test with Emacs on Linux, but the library also works fine on
Microsoft Windows and MacOS.
You may be interested in an alternative library [emacs-libpq](https://github.com/anse1/emacs-libpq)
that enables access to PostgreSQL from Emacs by binding to the libpq library.
## Licence
pg-el is free software distributed under the terms of the GNU GPL v3 or later.
pg-el-0.54/doc/src/collation.md 0000664 0000000 0000000 00000002122 15005352415 0016305 0 ustar 00root root 0000000 0000000 # Collation
Case support in PostgreSQL (`lower()` and `upper()` functions) depend on the current [collation
rules](https://www.postgresql.org/docs/current/collation.html). A table has a default collation
which is specified at creation (with a default). To remove the dependency on the
table's collation, you can specify the desired collation explicitly.
Note that PostgreSQL can be compiled with or without support for libicu, as a complement to the
collation support in your libc.
~~~admonish example title="Using different collation rules"
```lisp
ELISP> (let ((res (pg-exec *pg* "SELECT lower('FÔÖÉ' COLLATE \"fr_FR\")")))
(car (pg-result res :tuple 0)))
"fôöé"
ELISP> (let ((res (pg-exec *pg* "SELECT lower('FÔ🐘💥bz' COLLATE \"fr_FR\")")))
(car (pg-result res :tuple 0)))
"fô🐘💥bz"
ELISP> (pg-result (pg-exec *pg* "CREATE COLLATION IF NOT EXISTS \"french\" (provider = icu, locale = 'fr_FR')") :status)
"CREATE COLLATION"
ELISP> (let ((res (pg-exec *pg* "SELECT lower('FÔÖÉ' COLLATE \"french\")")))
(car (pg-result res :tuple 0)))
"fôöé"
```
~~~
pg-el-0.54/doc/src/copy-protocol.md 0000664 0000000 0000000 00000010671 15005352415 0017142 0 ustar 00root root 0000000 0000000 # The COPY protocol
The [COPY protocol](https://www.postgresql.org/docs/current/sql-copy.html) can be used to send and
receive large amounts of data to/from PostgreSQL. It can be used with CSV or TSV data.
## From Emacs to PostgreSQL
The pg-el library allows you to COPY from an Emacs buffer into PostgreSQL using function
`pg-copy-from-buffer`, as illustrated below.
~~~admonish example title="Inserting tab-separated data"
```lisp
ELISP> (defun ascii (n) (+ ?A (mod n 26)))
ascii
ELISP> (defun random-word ()
(apply #'string (cl-loop for count to 10 collect (+ ?a (random 26)))))
random-word
ELISP> (pg-result (pg-exec *pg* "CREATE TABLE copy_tsv(a INTEGER, b CHAR, c TEXT)") :status)
"CREATE TABLE"
ELISP> (let ((buf (get-buffer-create " *pg-copy-temp-tsv*")))
(with-current-buffer buf
(dotimes (i 42)
(insert (format "%d\t%c\t%s\n" i (ascii i) (random-word)))))
(pg-result (pg-copy-from-buffer *pg* "COPY copy_tsv(a,b,c) FROM STDIN" buf) :status))
"COPY 84"
ELISP> (pg-result (pg-exec *pg* "SELECT COUNT(*) FROM copy_tsv") :tuple 0)
(84)
ELISP> (pg-result (pg-exec *pg* "SELECT * FROM copy_tsv LIMIT 5") :tuples)
((0 "A" "ufyhdnkoyfi")
(1 "B" "jpnlxbftdpm")
(2 "C" "lqvazrhesdg")
(3 "D" "epxkjdsfdpg")
(4 "E" "yjhgdwjzbvt"))
```
~~~
~~~admonish example title="Inserting comma-separated data (CSV)"
The use of CSV formatted data is very similar; you simply need to specify `WITH (FORMAT CSV)` in the
[`COPY` statement](https://www.postgresql.org/docs/current/sql-copy.html).
```lisp
ELISP> (pg-result (pg-exec *pg* "CREATE TABLE copy_csv (a INT2, b INTEGER, c CHAR, d TEXT)") :status)
"CREATE TABLE"
ELISP> (let ((buf (get-buffer-create " *pg-copy-temp-csv*")))
(with-current-buffer buf
(dotimes (i 1000)
(insert (format "%d,%d,%c,%s\n" i (* i i) (ascii i) (random-word)))))
(pg-result (pg-copy-from-buffer *pg* "COPY copy_csv(a,b,c,d) FROM STDIN WITH (FORMAT CSV)" buf) :status))
"COPY 1000"
ELISP> (pg-result (pg-exec *pg* "SELECT * FROM copy_csv LIMIT 3") :tuples)
((0 0 "A" "ajoskqunbrx")
(1 1 "B" "pzmoyefgywu")
(2 4 "C" "blylbnhnrdb"))
```
~~~
## From PostgreSQL to Emacs
You can copy from PostgreSQL into an Emacs buffer using the function `pg-copy-to-buffer`, as
illustrated below.
~~~admonish example title="Dumping a PostgreSQL table into an Emacs buffer as CSV"
```lisp
ELISP> (let ((res (pg-copy-to-buffer *pg* "COPY copy_csv TO STDOUT WITH (FORMAT CSV, HEADER TRUE)"
(get-buffer-create "*pg-csv*"))))
(pg-result res :status))
"COPY 1000"
```
~~~
The following more verbose example illustrates fetching CSV data from an online source, importing it
into PostgreSQL, removing some unneeded columns and querying the data.
~~~admonish example title="Fetching and querying online CSV datasets"
```lisp
ELISP> (with-temp-buffer
(url-insert-file-contents "https://www.data.gouv.fr/fr/datasets/r/51606633-fb13-4820-b795-9a2a575a72f1")
(pg-exec *pg* "CREATE TABLE cities(
insee_code TEXT NOT NULL,
city_code TEXT,
zip_code NUMERIC,
label TEXT NOT NULL,
latitude FLOAT,
longitude FLOAT,
department_name TEXT,
department_number VARCHAR(3),
region_name TEXT,
region_geojson_name TEXT)")
(pg-result (pg-copy-from-buffer *pg* "COPY cities FROM STDIN WITH (FORMAT CSV, DELIMITER ',', HEADER TRUE)"
(current-buffer)) :status))
"COPY 39145"
ELISP> (pg-result (pg-exec *pg* "ALTER TABLE cities DROP COLUMN region_name") :status)
"ALTER TABLE"
ELISP> (pg-result (pg-exec *pg* "ALTER TABLE cities DROP COLUMN region_geojson_name") :status)
"ALTER TABLE"
ELISP> (pg-result (pg-exec *pg* "ALTER TABLE cities DROP COLUMN label") :status)
"ALTER TABLE"
ELISP> (pg-result (pg-exec *pg* "SELECT * FROM cities WHERE city_code LIKE 'toulouse%'") :tuples)
(("39533" "toulouse le chateau" 39230 46.821901729 5.583200112 "jura" "39")
("31555" "toulouse" 31100 43.596037953 1.432094901 "haute-garonne" "31")
("31555" "toulouse" 31300 43.596037953 1.432094901 "haute-garonne" "31")
("31555" "toulouse" 31400 43.596037953 1.432094901 "haute-garonne" "31")
("31555" "toulouse" 31500 43.596037953 1.432094901 "haute-garonne" "31")
("31555" "toulouse" 31000 43.596037953 1.432094901 "haute-garonne" "31")
("31555" "toulouse" 31200 43.596037953 1.432094901 "haute-garonne" "31"))
```
~~~
pg-el-0.54/doc/src/error-handling.md 0000664 0000000 0000000 00000012614 15005352415 0017243 0 ustar 00root root 0000000 0000000 # Error handling
Errors signaled by PostgreSQL will be converted into an Emacs Lisp error that subclasses `pg-error`.
You can handle these errors as usual in Emacs Lisp, as shown in the example below.
~~~admonish example title="Basic error handling"
```lisp
ELISP> (ignore-errors (pg-exec *pg* "SELECT ###"))
nil
ELISP> (condition-case nil
(pg-exec *pg* "SELECT ###")
(pg-error 42))
42 (#o52, #x2a, ?*)
```
~~~
Some errors are converted into specific subclasses of `pg-error`, as listed below. We can
discriminate the error category thanks to [PostgreSQL's SQLSTATE support](See
https://www.postgresql.org/docs/17/errcodes-appendix.html).
| Error class | Meaning |
|-----------------------------------|---------------------------------------------------------------------------------|
| pg-connection-error | Connection failure |
| pg-invalid-password | Invalid password or authentication data |
| pg-feature-not-supported | PostgreSQL feature not supported |
| pg-syntax-error | Syntax error |
| pg-undefined-table | Undefined table |
| pg-undefined-column | Undefined column |
| pg-undefined-function | Undefined function |
| pg-copy-failed | PostgreSQL COPY failed |
| pg-connect-timeout | PostgreSQL connection attempt timed out |
| pg-type-error | When serializing, an argument was of an unexpected type |
| pg-numeric-value-out-of-range | Numeric value out of range |
| pg-division-by-zero | Division by zero |
| pg-floating-point-exception | Floating point exception |
| pg-array-subscript-error | Array subscript error |
| pg-datetime-field-overflow | Overflow in a datetime field |
| pg-invalid-text-representation | Invalid text representation |
| pg-invalid-binary-representation | Invalid binary representation |
| pg-character-not-in-repertoire | Character not in repertoire |
| pg-datatype-mismatch | Datatype mismatch |
| pg-json-error | JSON-related error |
| pg-integrity-constraint-violation | Violation of an integrity constraint |
| pg-restrict-violation | Restrict violation |
| pg-not-null-violation | Violation of a not NULL constraint |
| pg-foreign-key-violation | Violation of a FOREIGN KEY constraint |
| pg-unique-violation | Violation of a UNIQUE constraint |
| pg-check-violation | Violation of a CHECK constraint |
| pg-exclusion-violation | Violation of an exclusion constraint |
| pg-plpgsql-error | PL/pgSQL error |
| pg-transaction-timeout | Transaction timeout |
| pg-insufficient-resources | Insufficient resources on the backend server (eg. memory full) |
| pg-disk-full | Disk full on the backend server |
| pg-too-many-connections | Too many connections to the backend |
| pg-internal-error | Internal error in the backend |
You can undertake error handling for specific error categories as shown in the example below:
~~~admonish example title="Differentiated error handling"
```lisp
ELISP> (condition-case nil
(pg-exec *pg* "SELECT 2147483649::int4")
(pg-numeric-value-out-of-range (message "Numeric overflow"))
(pg-syntax-error (message "Syntax error"))
(pg-error (message "Generic error")))
"Numeric overflow"
```
~~~
Please note that some semi-compatible PostgreSQL variants do not implement fine-grained SQLSTATE
error reporting, simply returning most errors as an “internal error” (this is the case of CrateDB in
2025-02, for example).
pg-el-0.54/doc/src/feedback.md 0000664 0000000 0000000 00000000232 15005352415 0016045 0 ustar 00root root 0000000 0000000 # Your feedback
Bug reports should be filed as issues on [our GitHub project page](https://github.com/emarsden/pg-el).
Pull requests are also welcome!
pg-el-0.54/doc/src/hstore.md 0000664 0000000 0000000 00000002631 15005352415 0015632 0 ustar 00root root 0000000 0000000 # The HSTORE key-value type
There is support for the PostgreSQL [HSTORE
extension](https://www.postgresql.org/docs/current/hstore.html), which can store key/value pairs
in a single PostgreSQL column. It's necessary to call `pg-hstore-setup` before using this
functionality, to load the extension if necessary and to set up our parser support for the HSTORE
type.
~~~admonish example title="Using HSTORE values"
```lisp
ELISP> (pg-hstore-setup *pg*)
ELISP> (defvar *hs* (car (pg-result (pg-exec *pg* "SELECT 'foo=>bar'::hstore") :tuple 0)))
*hs*
ELISP> (gethash "foo" *hs*)
"bar"
ELISP> (hash-table-count *hs*)
1 (#o1, #x1, ?\C-a)
;; There is no guarantee as to the value stored for the 'a' key (duplicate)
ELISP> (setq *hs* (car (pg-result (pg-exec *pg* "SELECT 'a=>1,foobles=>2,a=>66'::hstore") :tuple 0)))
#
ELISP> (hash-table-count *hs*)
2 (#o2, #x2, ?\C-b)
ELISP> (pg-result (pg-exec *pg* "SELECT akeys('biz=>NULL,baz=>42,boz=>66'::hstore)") :tuple 0)
(["baz" "biz" "boz"])
```
~~~
~~~admonish example title="Serialization support for HSTORE values"
```lisp
ELISP> (pg-hstore-setup *pg*)
ELISP> (let ((ht (make-hash-table :test #'equal)))
(puthash "biz" "baz" ht)
(puthash "foo" "bar" ht)
(puthash "more" "than" ht)
(let* ((res (pg-exec-prepared con "SELECT $1 ? 'foo'" `((,ht . "hstore"))))
(pg-result res :tuple 0))))
(t)
```
~~~
pg-el-0.54/doc/src/installation.md 0000664 0000000 0000000 00000002053 15005352415 0017025 0 ustar 00root root 0000000 0000000 # Installation
You can install via the MELPA package archive, or with `package-vc-install`, or with `use-package`.
## Installing via MELPA
Install via the [MELPA package archive](https://melpa.org/partials/getting-started.html) by
including the following in your Emacs initialization file (`.emacs.el` or `init.el`):
```lisp
(require 'package)
(add-to-list 'package-archives '("melpa" . "https://melpa.org/packages/") t)
```
then saying
M-x package-install RET pg
## Installing with package-vc-install
With Emacs 29, you can install the library from the latest Github revision (this requires git to be
installed) using:
(unless (package-installed-p 'pg)
(package-vc-install "https://github.com/emarsden/pg-el" nil nil 'pg))
You can later update to the latest version with `M-x package-vc-upgrade RET pg RET`.
## Installing with `use-package`
If you prefer to use the `use-package` macro, which is built in to Emacs 29, you can use (requires
git to be installed):
(use-package pg :vc (:url "https://github.com/emarsden/pg-el"))
pg-el-0.54/doc/src/json.md 0000664 0000000 0000000 00000006170 15005352415 0015301 0 ustar 00root root 0000000 0000000 # JSON and JSONB values
PostgreSQL has quite a lot of [support for storing, saving and processing JSON and JSONB
data](https://www.postgresql.org/docs/current/functions-json.html). pg-el is able to deserialize
JSON and JSONB values into Emacs Lisp structures such as hashtables (for dicts), arrays, numbers,
strings and so on.
This library will parse and represent JSON/JSONB data either using the JSON support built into Emacs
with libjansson (see function `json-available-p`, from version 28.1), or using the `json.el`
library. There are some differences in the ways these methods handle dictionaries and specific
values such as NULL, false, [] and {}. Our examples below use the builtin JSON support in Emacs.
~~~admonish example title="Retrieving and manipulating JSON data"
```lisp
ELISP> (defun scalar (sql) (car (pg-result (pg-exec *pg* sql) :tuple 0)))
scalar
ELISP> (let ((json (scalar "SELECT '[5,7]'::json")))
(aref json 0))
5 (#o5, #x5, ?\C-e)
ELISP> (let ((json (scalar "SELECT '[42.0,77.7]'::jsonb")))
(aref json 1))
77.7
ELISP> (scalar "SELECT '[]'::json")
[]
ELISP> (scalar "SELECT '{}'::json")
#
ELISP> (let ((json (scalar "SELECT '{\"a\": 42, \"b\": \"foo\"}'::json")))
(gethash "b" json))
"foo"
ELISP> (let ((json (scalar "SELECT '{\"a\": [0,1,2,null]}'::json")))
(gethash "a" json))
[0 1 2 :null]
```
~~~
pg-el can also serialize Emacs Lisp structures into the PostgreSQL JSON format, for use in prepared
statements.
~~~admonish example title="Serializing objects to JSON / JSONB"
```lisp
ELISP> (let ((ht (make-hash-table)))
(puthash "biz" 45 ht)
(puthash "boz" -5.5 ht)
(puthash "comment" "good stuff" ht)
(pg-result (pg-exec-prepared *pg* "SELECT $1->'boz'" `((,ht . "json"))) :tuple 0))
(-5.5)
ELISP> (let ((ht (make-hash-table)))
(puthash "biz" 45 ht)
(puthash "boz" -5.5 ht)
(puthash "comment" "good stuff" ht)
;; the '-' jsonb operator deletes a matching key/value mapping
(let* ((res (pg-exec-prepared *pg* "SELECT $1 - 'boz'" `((,ht . "jsonb"))))
(row (pg-result res :tuple 0)))
(gethash "comment" (cl-first row) )))
"good stuff"
```
~~~
## Support for the JSON path language (jsonpath type)
pg-el serializes and deserializes [JSONPATH
expressions](https://www.postgresql.org/docs/current/functions-json.html#FUNCTIONS-SQLJSON-PATH) as
strings, as illustrated below. You can use them as arguments to prepared statements.
~~~admonish example title="Serializing and deserializing JSON path expressions"
```lisp
ELISP> (pg-result (pg-exec *pg* "SELECT 'true'::jsonpath") :tuple 0)
(list "true")
ELISP> (pg-result (pg-exec *pg* "SELECT '$[*] ? (@ < 1 || @ > 5)'::jsonpath") :tuple 0)
(list "$[*]?(@ < 1 || @ > 5)")
ELISP> (let* ((sql "SELECT jsonb_path_query($1, $2)")
(dict (make-hash-table :test #'equal))
(_ (puthash "h" 5.6 dict))
(params `((,dict . "jsonb") ("$.h.floor()" . "jsonpath")))
(res (pg-exec-prepared con sql params))
(row (pg-result res :tuple 0)))
(cl-first row))
5
```
~~~
pg-el-0.54/doc/src/postgis.md 0000664 0000000 0000000 00000003570 15005352415 0016021 0 ustar 00root root 0000000 0000000 # Data types used by the PostGIS extension
There is deserialization support (and trivial serialization) support for the data types used by the
[PostGIS extension](http://www.postgis.net/). It's necessary to require the `pg-gis` library and to
call `pg-setup-postgis` before using this functionality, to load the extension if necessary and to
set up our deserialization support for the types used by PostGIS (in particular, the `geometry` and
`geography` types).
```lisp
(require 'pg-gis)
(pg-setup-postgis *pg*)
```
PostGIS sends values over the wire in HEXEWKB format ([Extended Well-Known
Binary](https://en.wikipedia.org/wiki/Well-known_text_representation_of_geometry#Well-known_binary)
encoded in hexademical), such as `01010000200400000000000000000000000000000000000000` which
represents the well-known text (WKT) `POINT (0 0)`.
If the variable `pg-gis-use-geosop` is non-nil, we parse this format using the `geosop` commandline
utility function from GEOS (often available in packages named `geos-bin` or similar). Otherwise, we
leave it as a string (it can be parsed using PostGIS functions such as `ST_AsText`).
```shell
sudo apt install geos-bin
```
~~~admonish example title="Using PostGIS datatypes"
```lisp
ELISP> (require 'pg-gis)
ELISP> (pg-setup-postgis *pg*)
ELISP> (pg-result (pg-exec *pg* "SELECT 'POINT(4 5)'::geometry") :tuple 0)
("POINT (4 5)")
ELISP> (pg-result (pg-exec *pg* "SELECT Box2D(ST_GeomFromText('LINESTRING(1 2, 3 4, 5 6)'))") :tuple 0)
("BOX(1 2,5 6)")
ELISP> (pg-result (pg-exec *pg* "SELECT 'MULTILINESTRING((-118.584 38.374 20,-118.583 38.5 30),(-71.05957 42.3589 75, -71.061 43 90))'::geometry") :tuple 0)
("MULTILINESTRING Z ((-118.584 38.374 20, -118.583 38.5 30), (-71.05957 42.3589 75, -71.061 43 90))")
ELISP> (pg-result (pg-exec *pg* "SELECT 'SPHEROID[\"GRS_1980\",6378137,298.2572]'::spheroid") :tuple 0)
("SPHEROID(\"GRS_1980\",6378137,298.2572)")
```
~~~
pg-el-0.54/doc/src/prepared-statements.md 0000664 0000000 0000000 00000006506 15005352415 0020322 0 ustar 00root root 0000000 0000000 # Using prepared statements
pg-el has support for PostgreSQL's **extended query protocol** (prepared statements), which you
should use to prevent SQL injection attacks.
~~~admonish example title="Prepared statements and the extended query protocol"
```lisp
ELISP> (pg-result (pg-exec *pg* "CREATE TABLE count_test(key INTEGER, val INTEGER)") :status)
"CREATE TABLE"
ELISP> (dotimes (i 100)
(pg-exec-prepared *pg* "INSERT INTO count_test VALUES($1, $2)"
`((,i . "int4") (,(* i i) . "int4"))))
nil
ELISP> (let ((res (pg-exec *pg* "SELECT count(*) FROM count_test")))
(car (pg-result res :tuple 0)))
100
ELISP> (defvar *multires* (pg-exec-prepared *pg* "SELECT key FROM count_test" nil :max-rows 10))
*multires*
ELISP> (pg-result *multires* :tuples)
((0)
(1)
(2)
(3)
(4)
(5)
(6)
(7)
(8)
(9))
ELISP> (pg-result *multires* :incomplete)
t
ELISP> (setq *multires* (pg-fetch *pg* *multires* :max-rows 5))
;; *multires*
ELISP> (pg-result *multires* :tuples)
((10)
(11)
(12)
(13)
(14))
ELISP> (pg-result *multires* :incomplete)
t
ELISP> (setq *multires* (pg-fetch *pg* *multires* :max-rows 100))
;; *multires*
ELISP> (length (pg-result *multires* :tuples))
85
ELISP> (pg-result *multires* :incomplete)
nil
```
~~~
If your application will use the same prepared statement multiple times, you can ask PostgreSQL to
parse/analyze the SQL query and bind parameters once, then use the prepared statement with different
variable values multiple times. This will improve performance by avoiding the overhead of reparsing
and reoptimizing a query plan multiple times.
~~~admonish example title="Fetching from a previously prepared statement"
The example function below (which comes from the [PGmacs](https://github.com/emarsden/pgmacs)
browsing/editing interface for PostgreSQL) illustrates the use of the utility function
`pg-ensure-prepared-statement`, which either retrieves the cached prepared statement if the function
has already been called (pg-el maintains a per-connection cache of prepared statements), or prepares
the statement given the SQL and the argument types if the function has not yet been called in this
PostgreSQL connection. The prepared statement is executed using `pg-fetch-prepared`, which functions
in a similar way to function `pg-fetch`.
```lisp
(defun pgmacs--table-primary-keys (con table)
"Return the columns active as PRIMARY KEY in TABLE.
Uses PostgreSQL connection CON."
(let* ((schema (if (pg-qualified-name-p table)
(pg-qualified-name-schema table)
"public"))
(tname (if (pg-qualified-name-p table)
(pg-qualified-name-name table)
table))
(sql "SELECT a.attname
FROM pg_catalog.pg_index idx
JOIN pg_catalog.pg_class c ON c.oid = idx.indrelid
JOIN pg_catalog.pg_attribute a ON a.attrelid = c.oid AND a.attnum = ANY(idx.indkey)
JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
WHERE relname = $1 AND nspname = $2 AND indisprimary")
(argument-types (list "text" "text"))
(params `((,tname . "text") (,schema . "text")))
(ps-name (pg-ensure-prepared-statement con "QRY-tbl-primary-keys" sql argument-types))
(res (pg-fetch-prepared con ps-name params)))
(mapcar #'cl-first (pg-result res :tuples))))
```
~~~
pg-el-0.54/doc/src/quickstart.md 0000664 0000000 0000000 00000007530 15005352415 0016523 0 ustar 00root root 0000000 0000000 # Quickstart to using pg-el
These illustrative examples assume you have a PostgreSQL user `pgeltestuser` with password `pgeltest`
who owns a database `pgeltestdb`. To set that up with a local PostgreSQL database, use commands
similar to the following:
~~~admonish example title="Create a test user and database with a local PostgreSQL"
```shell
sudo -u postgres createuser --createdb pgeltestuser
sudo -u postgres createdb --owner=pgeltestuser pgeltestdb
sudo -u postgres psql
postgres=# alter user pgeltestuser with encrypted password 'pgeltest';
```
If you want to enable and test the support for the HSTORE and pgvector extensions, you will need to
load them into the test database as PostgreSQL superuser (the normal user `pgeltestuser` we created
above is not allowed to load extensions). The pgvector extension generally needs to be installed
separately from PostgreSQL (for example by installing the `postgresql-17-pgvector` package on Debian).
```shell
sudo -u postgres psql
postgres=# CREATE EXTENSION hstore;
CREATE EXTENSION
postgres=# CREATE EXTENSION vector;
CREATE EXTENSION
```
The same applies to the PostGIS extension.
~~~
Now, from your preferred Emacs Lisp shell (here `M-x ielm`), check that you are able to connect to
and authenticate with the database from Emacs:
~~~admonish example title="Connect to PostgreSQL from Emacs"
```lisp
ELISP> (require 'pg)
pg
ELISP> (defvar *pg* (pg-connect "pgeltestdb" "pgeltestuser" "pgeltest" "localhost" 5432))
*pg*
```
~~~
If you don’t already have PostgreSQL installed locally, it may be convenient for you to use
[PostgreSQL Docker Community images](https://hub.docker.com/_/postgres/), using
[Docker](https://www.docker.com/) or [Podman](https://podman.io/). I recommend installing Podman
because it’s fully free software, whereas Docker is partly commercial. Podman is also able to run
containers “rootless”, without special privileges, which is good for security, and doesn’t require a
background daemon. Podman has a docker-compatible commandline interface.
~~~admonish example title="Start up PostgreSQL inside a Podman container"
```shell
podman run -d --name pgsql \
-v /dev/log:/dev/log \
-v /var/run/postgresql:/var/run/postgresql \
--publish 5432:5432 \
-e POSTGRES_DB=pgeltestdb \
-e POSTGRES_USER=pgeltestuser \
-e POSTGRES_PASSWORD=pgeltest \
docker.io/library/postgres:latest
```
~~~
then connect from Emacs with
ELISP> (pg-connect "pgeltestdb" "pgeltestuser" "pgeltest" "localhost" 5432)
or connect over a local Unix socket
ELISP> (pg-connect-local "/var/run/postgresql/.s.PGSQL.5432" "pgeltestdb" "pgeltestuser "pgeltest")
Now some simple interactions with the database:
```lisp
ELISP> (pg-backend-version *pg*)
"PostgreSQL 16.1 (Debian 16.1-1) on x86_64-pc-linux-gnu, compiled by gcc (Debian 13.2.0-6) 13.2.0, 64-bit"
ELISP> (let ((res (pg-exec *pg* "SELECT 42, 1.23")))
(pg-result res :tuple 0))
(42 1.23)
```
Note that the first query has returned an Emacs Lisp string, and the second query has returned a
tuple (represented as a list) where the first value is an Emacs Lisp integer, and the second and
Emacs Lisp float. The pg-el library has ensured **automatic type coercion** from the SQL types to the
most appropriate Emacs Lisp type.
The following example shows the output from a query that returns multiple rows. It returns a list of
tuples, each tuple containing a single integer.
~~~admonish example title="A query that returns multiple rows"
```lisp
ELISP> (let ((res (pg-exec *pg* "SELECT * FROM generate_series(50,55)")))
(pg-result res :tuples))
((50)
(51)
(52)
(53)
(54)
(55))
```
~~~
An SQL query that returns no results will return the empty list.
```lisp
ELISP> (let ((res (pg-exec *pg* "SELECT 3 where 1=0")))
(pg-result res :tuples))
nil
```
For more, see the [usage information](usage.html) and the [API documentation](API.html).
pg-el-0.54/doc/src/schemas.md 0000664 0000000 0000000 00000005375 15005352415 0015761 0 ustar 00root root 0000000 0000000 # Schema-qualified names
## Introduction to PostgreSQL schemas
A [schema in PostgreSQL](https://www.postgresql.org/docs/current/ddl-schemas.html) (and in the ANSI
SQL standard) is a collection of tables, views, functions, constraints, indexes and sequences.
PostgreSQL allows you to define multiple schemas in the same database, and different schemas can
include datastructres (such as tables) with the same name. You can think of them as namespaces for
tables.
Here is the hierarchy of names:
- each PostgreSQL instance can have multiple databases;
- each database can contain multiple schemas;
- each schema can contain multiple tables (and views, functions and so on).
In SQL syntax you will use **qualified names** including a schema, such as `myschema.mytable`
anywhere were you use a “normal” unqualified name `mytable`. Default objects created by the user are
in the schema named `public` (this schema is not normally printed in query results because the
current [`search_path`](https://www.postgresql.org/docs/current/ddl-schemas.html#DDL-SCHEMAS-PATH)
includes `public`). Objects used internally by PostgreSQL are in system-defined schemas
such as `pg_catalog`, `pg_toast` and `information_schema`.
The pg-el library represents schema-qualified names using `pg-qualified-name` objects, which are
`cl-defstruct` objects. This means you can create them as follows:
```lisp
(make-pg-qualified-name :schema "myschema" :name "mytable")
```
You can then use these objects anywhere you would use a normal table name, escaping special
characters using `pg-print-qualified-name`. The `pg-tables` function will return normal string names
for tables in the `public` namespace, and `pg-qualified-name` objects for tables in other namespaces.
~~~admonish example title="Using schema-qualified names"
```lisp
ELISP> (let ((res (pg-exec *pg* "SHOW search_path")))
(pg-result res :tuple 0))
("\"$user\", public")
ELISP> (let ((res (pg-exec *pg* "CREATE SCHEMA custom")))
(pg-result res :status))
"CREATE SCHEMA"
ELISP> (let* ((qn (make-pg-qualified-name :schema "custom" :name "mytable"))
(sql (format "CREATE TABLE IF NOT EXISTS %s(id INTEGER)"
(pg-print-qualified-name qn)))
(res (pg-exec *pg* sql)))
(pg-result res :status))
"CREATE TABLE"
ELISP> (pg-tables *pg*)
("purchases" "customers" #s(pg-qualified-name :schema "custom" :name "mytable"))
;; We can use schema-qualified names as parameters for a prepared query.
ELISP> (let* ((qn (make-pg-qualified-name :schema "custom" :name "mytable"))
(pqn (pg-print-qualified-name qn))
(sql "SELECT pg_total_relation_size($1)")
(res (pg-exec-prepared *pg* sql `((,pqn . "text")))))
(pg-result res :tuple 0))
(0)
```
~~~
pg-el-0.54/doc/src/special-features.md 0000664 0000000 0000000 00000010334 15005352415 0017561 0 ustar 00root root 0000000 0000000 # Special pg-el features
## Handling parameter changes
The PostgreSQL backend informs connected clients when certain server parameters change, by sending
them a special `ParameterStatus` message. These notifications are sent for `GUC_REPORT` parameters,
which include the `client_encoding`, the `DateStyle`, `TimeZone`, `server_encoding`,
`in_hot_standby` and `is_superuser`. You can register your interest in these messages by adding a
handler function to `pg-parameter-change-functions`. Each of these handler functions will be called
when such a message is received, with three arguments: the connection to PostgreSQL, the parameter
name and the parameter value.
These messages are sent asynchronously.
~~~admonish example title="Handling changes to session timezone"
```lisp
ELISP> (defun handle-tz-change (_con name value)
(when (string= name "TimeZone")
(message "New timezone in PostgreSQL is %s" value)))
handle-tz-change
ELISP> (cl-pushnew #'handle-tz-change pg-parameter-change-functions)
(handle-tz-change pg-handle-parameter-client-encoding)
ELISP> (pg-result (pg-exec *pg* "SET SESSION TIME ZONE 'Europe/Paris'") :status)
"SET"
ELISP> (pg-result (pg-exec *pg* "SET SESSION TIME ZONE 'America/Chicago'") :status)
"SET"
```
You should see either one or two messages announcing a parameter change (the first statement won't
generate a ParameterStatus message if the time zone was already set to Europe/Paris).
~~~
## Handling asynchronous notifications
PostgreSQL has an asynchronous notification functionality based on the [LISTEN and NOTIFY
commands](https://www.postgresql.org/docs/current/libpq-notify.html). A client can register its
interest in a particular notification channel with the `LISTEN` command, and later stop listening
with the `UNLISTEN` command. All clients listening on a particular channel will be notified
asynchronously when a `NOTIFY` command with that channel name is executed by any client. A “payload”
string can be passed to communicate additional data to the listeners. In pg-el you can register
functions to be called when an asynchronous notification is received by adding them to the
`pg-handle-notice-functions`. Each handler function is called with a single argument, the notice, in
the form of a `pgerror` struct.
~~~admonish example title="Looking out for DROP TABLE commands"
PostgreSQL will signal an asynchronous notification for a `DROP TABLE IF EXISTS` command that attempts
to remove a table that doesn't exist, as a form of warning message. We can register our interest in
this message by locally binding the `pg-handle-notice-functions` variable.
```lisp
ELISP> (defun deity-p (notif)
;; the notification message will be localized, but should contain the table name
(when (cl-search "deity" (pgerror-message notif))
(message "Indeed")))
ELISP> (let ((pg-handle-notice-functions (list #'deity-p)))
(pg-result (pg-exec *pg* "DROP TABLE IF EXISTS deity") :status))
"DROP TABLE"
```
You should see the message in the minibuffer.
~~~
~~~admonish example title="Using NOTIFY / LISTEN"
This example illustrates the use of NOTIFY and LISTEN. It's obviously not very useful with a single
client; real applications would involve multiple event consumers and possibly also multiple event
producers. This functionality can be used to implement simple publish-subscribe communication
patterns, with PostgreSQL serving as an event broker.
```lisp
(cl-flet ((notification-handler (channel payload)
(message "Async notification on %s: %s" channel payload)))
(pg-add-notification-handler *pg* #'notification-handler)
(pg-exec *pg* "LISTEN yourheart")
(pg-exec *pg* "NOTIFY yourheart, 'foobles'")
(pg-exec *pg* "SELECT 'ignored'")
(pg-exec *pg* "NOTIFY yourheart, 'bazzles'")
(sleep-for 10)
(pg-exec *pg* "SELECT 'ignored'")
(pg-exec *pg* "NOTIFY yourheart")
(pg-exec *pg* "SELECT 'ignored'")
;; The function pg_notify is an alternative to the LISTEN statement, and more flexible if your
;; channel name is determined by a variable.
(pg-exec *pg* "SELECT pg_notify('yourheart', 'leaving')")
(pg-exec *pg* "SELECT 'ignored'")
(pg-exec *pg* "UNLISTEN yourheart")
(pg-exec *pg* "NOTIFY yourheart, 'Et redit in nihilum quod fuit ante nihil.'")))
```
~~~
pg-el-0.54/doc/src/usage.md 0000664 0000000 0000000 00000021475 15005352415 0015441 0 ustar 00root root 0000000 0000000 # Using pg-el
The examples below illustrate various features of pg-el in conjunction with PostgreSQL. A more
complete set of examples can be found in our [test suite](https://github.com/emarsden/pg-el/tree/main/test).
The examples all assume that you are using ielm as an Emacs Lisp shell (start with `M-x ielm`) and
that you have a connection to PostgreSQL:
~~~admonish example title="Connect to PostgreSQL from Emacs"
```lisp
ELISP> (require 'cl-lib)
cl-lib
ELISP> (require 'pg)
pg
ELISP> (defvar *pg* (pg-connect "pgeltestdb" "pgeltestuser" "pgeltest" "localhost" 5432))
*pg*
```
~~~
The library should in principle convert from any obscure Emacs encoding to the UTF-8 supported by
PostgreSQL.
~~~admonish example title="Unicode support"
```lisp
ELISP> (let ((res (pg-exec *pg* "SELECT 'était ' || 'là'")))
(pg-result res :tuple 0))
("était là")
ELISP> (let ((res (pg-exec *pg* "select length('(╯°□°)╯︵ ┻━┻')")))
(pg-result res :tuple 0))
(12)
ELISP> (let ((res (pg-exec *pg* "SELECT '😎'")))
(pg-result res :tuple 0))
("😎")
```
~~~
You can create and delete tables, and query the database metainformation using `pg-tables` and
`pg-columns`.
~~~admonish example title="Working with tables and DDL"
```lisp
ELISP> (pg-result (pg-exec *pg* "CREATE TABLE messages(id BIGSERIAL PRIMARY KEY, msg TEXT)") :status)
"CREATE TABLE"
ELISP> (member "messages" (pg-tables *pg*))
("messages")
ELISP> (member "msg" (pg-columns *pg* "messages"))
("msg")
ELISP> (pg-result (pg-exec *pg* "DROP TABLE messages") :status)
"DROP TABLE"
ELISP> (member "messages" (pg-tables *pg*))
nil
```
~~~
The library has support for PostgreSQL's **extended query protocol** (prepared statements), which you
should use to prevent SQL injection attacks.
~~~admonish example title="Prepared statements and the extended query protocol"
```lisp
ELISP> (pg-result (pg-exec *pg* "CREATE TABLE count_test(key INTEGER, val INTEGER)") :status)
"CREATE TABLE"
ELISP> (dotimes (i 100)
(pg-exec-prepared *pg* "INSERT INTO count_test VALUES($1, $2)"
`((,i . "int4") (,(* i i) . "int4"))))
nil
ELISP> (let ((res (pg-exec *pg* "SELECT count(*) FROM count_test")))
(car (pg-result res :tuple 0)))
100
ELISP> (defvar *multires* (pg-exec-prepared *pg* "SELECT key FROM count_test" nil :max-rows 10))
*multires*
ELISP> (pg-result *multires* :tuples)
((0)
(1)
(2)
(3)
(4)
(5)
(6)
(7)
(8)
(9))
ELISP> (pg-result *multires* :incomplete)
t
ELISP> (setq *multires* (pg-fetch *pg* *multires* :max-rows 5))
;; *multires*
ELISP> (pg-result *multires* :tuples)
((10)
(11)
(12)
(13)
(14))
ELISP> (pg-result *multires* :incomplete)
t
ELISP> (setq *multires* (pg-fetch *pg* *multires* :max-rows 100))
;; *multires*
ELISP> (length (pg-result *multires* :tuples))
85
ELISP> (pg-result *multires* :incomplete)
nil
```
~~~
~~~admonish example title="Casting SQL values to a specific type"
```lisp
ELISP> (let ((res (pg-exec *pg* "SELECT pi()::int4")))
(car (pg-result res :tuple 0)))
3
ELISP> (let ((res (pg-exec *pg* "SELECT 42::text")))
(car (pg-result res :tuple 0)))
"42"
ELISP> (let ((res (pg-exec *pg* "SELECT '42'::smallint")))
(car (pg-result res :tuple 0)))
42 (#o52, #x2a, ?*)
ELISP> (let ((res (pg-exec *pg* "SELECT 'PT3H4M42S'::interval")))
(car (pg-result res :tuple 0)))
"03:04:42"
```
~~~
~~~admonish example title="Working with boolean vectors"
Boolean vectors are only supported in Emacs from version 27 onwards (you can check whether the
function `make-bool-vector` is fboundp).
```lisp
ELISP> (let ((res (pg-exec *pg* "SELECT '1010'::bit(4)")))
(equal (car (pg-result res :tuple 0))
(coerce (vector t nil t nil) 'bool-vector)))
t
ELISP> (let ((res (pg-exec *pg* "SELECT b'1001000'")))
(equal (car (pg-result res :tuple 0))
(coerce (vector t nil nil t nil nil nil) 'bool-vector)))
t
ELISP> (let ((res (pg-exec *pg* "SELECT '101111'::varbit(6)")))
(equal (car (pg-result res :tuple 0))
(coerce (vector t nil t t t t) 'bool-vector)))
t
```
~~~
Emacs has support for bignums from version 27.2 onwards.
~~~admonish example title="Using bignums"
```lisp
ELISP> (fboundp 'bignump)
t
ELISP> (let ((res (pg-exec *pg* "SELECT factorial(25)")))
(car (pg-result res :tuple 0)))
15511210043330985984000000 (#o6324500606375411017360000000, #xcd4a0619fb0907bc00000)
```
~~~
~~~admonish example title="Special floating point syntax"
```lisp
ELISP> (let ((res (pg-exec *pg* "SELECT 'Infinity'::float4")))
(car (pg-result res :tuple 0)))
1.0e+INF
ELISP> (let ((res (pg-exec *pg* "SELECT '-Infinity'::float8")))
(car (pg-result res :tuple 0)))
-1.0e+INF
ELISP> (let ((res (pg-exec *pg* "SELECT 'NaN'::float8")))
(car (pg-result res :tuple 0)))
0.0e+NaN
```
~~~
~~~admonish title="Numerical ranges"
```lisp
ELISP> (let ((res (pg-exec *pg* "SELECT int4range(10, 20)")))
(car (pg-result res :tuple 0)))
(:range 91 10 41 20)
;; note that 91 is the character ?\[ and 41 is the character ?\)
ELISP> (let ((res (pg-exec *pg* "SELECT int4range(10, 20)")))
(equal (car (pg-result res :tuple 0))
(list :range ?\[ 10 ?\) 20)))
t
ELISP> (let ((res (pg-exec *pg* "SELECT int4range(5,15) + int4range(10,20)")))
(equal (car (pg-result res :tuple 0))
(list :range ?\[ 5 ?\) 20)))
t
ELISP> (let ((res (pg-exec *pg* "SELECT int8range(5,15) * int8range(10,20)")))
(equal (car (pg-result res :tuple 0))
(list :range ?\[ 10 ?\) 15)))
t
ELISP> (let ((res (pg-exec *pg* "SELECT '(3,7)'::int4range")))
(equal (car (pg-result res :tuple 0))
(list :range ?\[ 4 ?\) 7)))
t
ELISP> (let ((res (pg-exec *pg* "SELECT int8range(1, 14, '(]')")))
(equal (car (pg-result res :tuple 0))
(list :range ?\[ 2 ?\) 15)))
t
ELISP> (let ((res (pg-exec *pg* "SELECT '[4,4)'::int4range")))
(equal (car (pg-result res :tuple 0))
(list :range)))
t
ELISP> (let ((res (pg-exec *pg* "SELECT numrange(33.33, 66.66)")))
(car (pg-result res :tuple 0)))
(:range 91 33.33 41 66.66)
ELISP> (let ((res (pg-exec *pg* "SELECT upper(numrange(-50.0, -40.0))")))
(car (pg-result res :tuple 0)))
-40.0
ELISP> (let ((res (pg-exec *pg* "SELECT numrange(NULL, 2.2)")))
(car (pg-result res :tuple 0)))
(:range 40 nil 41 2.2)
ELISP> (let ((res (pg-exec *pg* "SELECT numrange(NULL, NULL)")))
(car (pg-result res :tuple 0)))
(:range 40 nil 41 nil)
```
~~~
## Working with binary data
The [BYTEA type](https://www.postgresql.org/docs/current/datatype-binary.html) allows the storage of
binary strings, i.e. sequences of octets. They can contain NUL octets (the value zero).
~~~admonish example title="Using the BYTEA type"
```lisp
ELISP> (let ((res (pg-exec *pg* "SELECT '\\xDEADBEEF'::bytea")))
(equal (car (pg-result res :tuple 0))
(decode-hex-string "DEADBEEF")))
t
ELISP> (let ((res (pg-exec *pg* "SELECT '\\001\\003\\005'::bytea")))
(equal (car (pg-result res :tuple 0))
(string 1 3 5)))
t
ELISP> (let ((res (pg-exec *pg* "SELECT '\\x123456'::bytea || '\\x789a00bcde'::bytea")))
(equal (car (pg-result res :tuple 0))
(decode-hex-string "123456789a00bcde")))
t
ELISP> (let ((res (pg-exec *pg* "SELECT 'warning\\000'::bytea")))
(equal (length (car (pg-result res :tuple 0))) 8))
t
```
~~~
When sending binary data to PostgreSQL, either encode all potentially problematic octets, as we did
for NUL above, or send base64-encoded content and decode it in PostgreSQL. There are various other
useful functions for working with binary data on PostgreSQL, such as hash functions.
~~~admonish example title="Encoding and decoding binary data"
```lisp
ELISP> (pg-result (pg-exec *pg* "CREATE TABLE bt(blob BYTEA, tag int)") :status)
"CREATE TABLE"
ELISP> (let* ((size 512)
(random-octets (make-string size 0)))
(dotimes (i size)
(setf (aref random-octets i) (random 256)))
(setf (aref random-octets 0) 0)
(pg-exec-prepared *pg*
"INSERT INTO bt VALUES (decode($1, 'base64'), 42)"
`((,(base64-encode-string random-octets) . "text")))
(equal random-octets (car (pg-result (pg-exec *pg* "SELECT blob FROM bt WHERE tag=42") :tuple 0))))
t
ELISP> (let* ((res (pg-exec *pg* "SELECT sha256('foobles'::bytea)"))
(hx (encode-hex-string (car (pg-result res :tuple 0)))))
(equal hx (secure-hash 'sha256 "foobles")))
t
ELISP> (let* ((res (pg-exec *pg* "SELECT md5('foobles')"))
(r (car (pg-result res :tuple 0))))
(equal r (md5 "foobles")))
t
ELISP> (let* ((res (pg-exec *pg* "SELECT encode('foobles', 'base64')"))
(r (car (pg-result res :tuple 0))))
(equal r (base64-encode-string "foobles")))
t
```
~~~
## PostgreSQL arrays
(To be documented)
pg-el-0.54/doc/theme/ 0000775 0000000 0000000 00000000000 15005352415 0014315 5 ustar 00root root 0000000 0000000 pg-el-0.54/doc/theme/highlight.js 0000664 0000000 0000000 00000101260 15005352415 0016622 0 ustar 00root root 0000000 0000000 /*!
Highlight.js v11.9.0 (git: f47103d4f1)
(c) 2006-2023 undefined and other contributors
License: BSD-3-Clause
*/
var hljs=function(){"use strict";function e(t){
return t instanceof Map?t.clear=t.delete=t.set=()=>{
throw Error("map is read-only")}:t instanceof Set&&(t.add=t.clear=t.delete=()=>{
throw Error("set is read-only")
}),Object.freeze(t),Object.getOwnPropertyNames(t).forEach((n=>{
const i=t[n],s=typeof i;"object"!==s&&"function"!==s||Object.isFrozen(i)||e(i)
})),t}class t{constructor(e){
void 0===e.data&&(e.data={}),this.data=e.data,this.isMatchIgnored=!1}
ignoreMatch(){this.isMatchIgnored=!0}}function n(e){
return e.replace(/&/g,"&").replace(//g,">").replace(/"/g,""").replace(/'/g,"'")
}function i(e,...t){const n=Object.create(null);for(const t in e)n[t]=e[t]
;return t.forEach((e=>{for(const t in e)n[t]=e[t]})),n}const s=e=>!!e.scope
;class o{constructor(e,t){
this.buffer="",this.classPrefix=t.classPrefix,e.walk(this)}addText(e){
this.buffer+=n(e)}openNode(e){if(!s(e))return;const t=((e,{prefix:t})=>{
if(e.startsWith("language:"))return e.replace("language:","language-")
;if(e.includes(".")){const n=e.split(".")
;return[`${t}${n.shift()}`,...n.map(((e,t)=>`${e}${"_".repeat(t+1)}`))].join(" ")
}return`${t}${e}`})(e.scope,{prefix:this.classPrefix});this.span(t)}
closeNode(e){s(e)&&(this.buffer+="")}value(){return this.buffer}span(e){
this.buffer+=``}}const r=(e={})=>{const t={children:[]}
;return Object.assign(t,e),t};class a{constructor(){
this.rootNode=r(),this.stack=[this.rootNode]}get top(){
return this.stack[this.stack.length-1]}get root(){return this.rootNode}add(e){
this.top.children.push(e)}openNode(e){const t=r({scope:e})
;this.add(t),this.stack.push(t)}closeNode(){
if(this.stack.length>1)return this.stack.pop()}closeAllNodes(){
for(;this.closeNode(););}toJSON(){return JSON.stringify(this.rootNode,null,4)}
walk(e){return this.constructor._walk(e,this.rootNode)}static _walk(e,t){
return"string"==typeof t?e.addText(t):t.children&&(e.openNode(t),
t.children.forEach((t=>this._walk(e,t))),e.closeNode(t)),e}static _collapse(e){
"string"!=typeof e&&e.children&&(e.children.every((e=>"string"==typeof e))?e.children=[e.children.join("")]:e.children.forEach((e=>{
a._collapse(e)})))}}class c extends a{constructor(e){super(),this.options=e}
addText(e){""!==e&&this.add(e)}startScope(e){this.openNode(e)}endScope(){
this.closeNode()}__addSublanguage(e,t){const n=e.root
;t&&(n.scope="language:"+t),this.add(n)}toHTML(){
return new o(this,this.options).value()}finalize(){
return this.closeAllNodes(),!0}}function l(e){
return e?"string"==typeof e?e:e.source:null}function g(e){return h("(?=",e,")")}
function u(e){return h("(?:",e,")*")}function d(e){return h("(?:",e,")?")}
function h(...e){return e.map((e=>l(e))).join("")}function f(...e){const t=(e=>{
const t=e[e.length-1]
;return"object"==typeof t&&t.constructor===Object?(e.splice(e.length-1,1),t):{}
})(e);return"("+(t.capture?"":"?:")+e.map((e=>l(e))).join("|")+")"}
function p(e){return RegExp(e.toString()+"|").exec("").length-1}
const b=/\[(?:[^\\\]]|\\.)*\]|\(\??|\\([1-9][0-9]*)|\\./
;function m(e,{joinWith:t}){let n=0;return e.map((e=>{n+=1;const t=n
;let i=l(e),s="";for(;i.length>0;){const e=b.exec(i);if(!e){s+=i;break}
s+=i.substring(0,e.index),
i=i.substring(e.index+e[0].length),"\\"===e[0][0]&&e[1]?s+="\\"+(Number(e[1])+t):(s+=e[0],
"("===e[0]&&n++)}return s})).map((e=>`(${e})`)).join(t)}
const E="[a-zA-Z]\\w*",x="[a-zA-Z_]\\w*",w="\\b\\d+(\\.\\d+)?",y="(-?)(\\b0[xX][a-fA-F0-9]+|(\\b\\d+(\\.\\d*)?|\\.\\d+)([eE][-+]?\\d+)?)",_="\\b(0b[01]+)",O={
begin:"\\\\[\\s\\S]",relevance:0},v={scope:"string",begin:"'",end:"'",
illegal:"\\n",contains:[O]},k={scope:"string",begin:'"',end:'"',illegal:"\\n",
contains:[O]},N=(e,t,n={})=>{const s=i({scope:"comment",begin:e,end:t,
contains:[]},n);s.contains.push({scope:"doctag",
begin:"[ ]*(?=(TODO|FIXME|NOTE|BUG|OPTIMIZE|HACK|XXX):)",
end:/(TODO|FIXME|NOTE|BUG|OPTIMIZE|HACK|XXX):/,excludeBegin:!0,relevance:0})
;const o=f("I","a","is","so","us","to","at","if","in","it","on",/[A-Za-z]+['](d|ve|re|ll|t|s|n)/,/[A-Za-z]+[-][a-z]+/,/[A-Za-z][a-z]{2,}/)
;return s.contains.push({begin:h(/[ ]+/,"(",o,/[.]?[:]?([.][ ]|[ ])/,"){3}")}),s
},S=N("//","$"),M=N("/\\*","\\*/"),R=N("#","$");var j=Object.freeze({
__proto__:null,APOS_STRING_MODE:v,BACKSLASH_ESCAPE:O,BINARY_NUMBER_MODE:{
scope:"number",begin:_,relevance:0},BINARY_NUMBER_RE:_,COMMENT:N,
C_BLOCK_COMMENT_MODE:M,C_LINE_COMMENT_MODE:S,C_NUMBER_MODE:{scope:"number",
begin:y,relevance:0},C_NUMBER_RE:y,END_SAME_AS_BEGIN:e=>Object.assign(e,{
"on:begin":(e,t)=>{t.data._beginMatch=e[1]},"on:end":(e,t)=>{
t.data._beginMatch!==e[1]&&t.ignoreMatch()}}),HASH_COMMENT_MODE:R,IDENT_RE:E,
MATCH_NOTHING_RE:/\b\B/,METHOD_GUARD:{begin:"\\.\\s*"+x,relevance:0},
NUMBER_MODE:{scope:"number",begin:w,relevance:0},NUMBER_RE:w,
PHRASAL_WORDS_MODE:{
begin:/\b(a|an|the|are|I'm|isn't|don't|doesn't|won't|but|just|should|pretty|simply|enough|gonna|going|wtf|so|such|will|you|your|they|like|more)\b/
},QUOTE_STRING_MODE:k,REGEXP_MODE:{scope:"regexp",begin:/\/(?=[^/\n]*\/)/,
end:/\/[gimuy]*/,contains:[O,{begin:/\[/,end:/\]/,relevance:0,contains:[O]}]},
RE_STARTERS_RE:"!|!=|!==|%|%=|&|&&|&=|\\*|\\*=|\\+|\\+=|,|-|-=|/=|/|:|;|<<|<<=|<=|<|===|==|=|>>>=|>>=|>=|>>>|>>|>|\\?|\\[|\\{|\\(|\\^|\\^=|\\||\\|=|\\|\\||~",
SHEBANG:(e={})=>{const t=/^#![ ]*\//
;return e.binary&&(e.begin=h(t,/.*\b/,e.binary,/\b.*/)),i({scope:"meta",begin:t,
end:/$/,relevance:0,"on:begin":(e,t)=>{0!==e.index&&t.ignoreMatch()}},e)},
TITLE_MODE:{scope:"title",begin:E,relevance:0},UNDERSCORE_IDENT_RE:x,
UNDERSCORE_TITLE_MODE:{scope:"title",begin:x,relevance:0}});function A(e,t){
"."===e.input[e.index-1]&&t.ignoreMatch()}function I(e,t){
void 0!==e.className&&(e.scope=e.className,delete e.className)}function T(e,t){
t&&e.beginKeywords&&(e.begin="\\b("+e.beginKeywords.split(" ").join("|")+")(?!\\.)(?=\\b|\\s)",
e.__beforeBegin=A,e.keywords=e.keywords||e.beginKeywords,delete e.beginKeywords,
void 0===e.relevance&&(e.relevance=0))}function L(e,t){
Array.isArray(e.illegal)&&(e.illegal=f(...e.illegal))}function B(e,t){
if(e.match){
if(e.begin||e.end)throw Error("begin & end are not supported with match")
;e.begin=e.match,delete e.match}}function P(e,t){
void 0===e.relevance&&(e.relevance=1)}const D=(e,t)=>{if(!e.beforeMatch)return
;if(e.starts)throw Error("beforeMatch cannot be used with starts")
;const n=Object.assign({},e);Object.keys(e).forEach((t=>{delete e[t]
})),e.keywords=n.keywords,e.begin=h(n.beforeMatch,g(n.begin)),e.starts={
relevance:0,contains:[Object.assign(n,{endsParent:!0})]
},e.relevance=0,delete n.beforeMatch
},H=["of","and","for","in","not","or","if","then","parent","list","value"],C="keyword"
;function $(e,t,n=C){const i=Object.create(null)
;return"string"==typeof e?s(n,e.split(" ")):Array.isArray(e)?s(n,e):Object.keys(e).forEach((n=>{
Object.assign(i,$(e[n],t,n))})),i;function s(e,n){
t&&(n=n.map((e=>e.toLowerCase()))),n.forEach((t=>{const n=t.split("|")
;i[n[0]]=[e,U(n[0],n[1])]}))}}function U(e,t){
return t?Number(t):(e=>H.includes(e.toLowerCase()))(e)?0:1}const z={},W=e=>{
console.error(e)},X=(e,...t)=>{console.log("WARN: "+e,...t)},G=(e,t)=>{
z[`${e}/${t}`]||(console.log(`Deprecated as of ${e}. ${t}`),z[`${e}/${t}`]=!0)
},K=Error();function F(e,t,{key:n}){let i=0;const s=e[n],o={},r={}
;for(let e=1;e<=t.length;e++)r[e+i]=s[e],o[e+i]=!0,i+=p(t[e-1])
;e[n]=r,e[n]._emit=o,e[n]._multi=!0}function Z(e){(e=>{
e.scope&&"object"==typeof e.scope&&null!==e.scope&&(e.beginScope=e.scope,
delete e.scope)})(e),"string"==typeof e.beginScope&&(e.beginScope={
_wrap:e.beginScope}),"string"==typeof e.endScope&&(e.endScope={_wrap:e.endScope
}),(e=>{if(Array.isArray(e.begin)){
if(e.skip||e.excludeBegin||e.returnBegin)throw W("skip, excludeBegin, returnBegin not compatible with beginScope: {}"),
K
;if("object"!=typeof e.beginScope||null===e.beginScope)throw W("beginScope must be object"),
K;F(e,e.begin,{key:"beginScope"}),e.begin=m(e.begin,{joinWith:""})}})(e),(e=>{
if(Array.isArray(e.end)){
if(e.skip||e.excludeEnd||e.returnEnd)throw W("skip, excludeEnd, returnEnd not compatible with endScope: {}"),
K
;if("object"!=typeof e.endScope||null===e.endScope)throw W("endScope must be object"),
K;F(e,e.end,{key:"endScope"}),e.end=m(e.end,{joinWith:""})}})(e)}function V(e){
function t(t,n){
return RegExp(l(t),"m"+(e.case_insensitive?"i":"")+(e.unicodeRegex?"u":"")+(n?"g":""))
}class n{constructor(){
this.matchIndexes={},this.regexes=[],this.matchAt=1,this.position=0}
addRule(e,t){
t.position=this.position++,this.matchIndexes[this.matchAt]=t,this.regexes.push([t,e]),
this.matchAt+=p(e)+1}compile(){0===this.regexes.length&&(this.exec=()=>null)
;const e=this.regexes.map((e=>e[1]));this.matcherRe=t(m(e,{joinWith:"|"
}),!0),this.lastIndex=0}exec(e){this.matcherRe.lastIndex=this.lastIndex
;const t=this.matcherRe.exec(e);if(!t)return null
;const n=t.findIndex(((e,t)=>t>0&&void 0!==e)),i=this.matchIndexes[n]
;return t.splice(0,n),Object.assign(t,i)}}class s{constructor(){
this.rules=[],this.multiRegexes=[],
this.count=0,this.lastIndex=0,this.regexIndex=0}getMatcher(e){
if(this.multiRegexes[e])return this.multiRegexes[e];const t=new n
;return this.rules.slice(e).forEach((([e,n])=>t.addRule(e,n))),
t.compile(),this.multiRegexes[e]=t,t}resumingScanAtSamePosition(){
return 0!==this.regexIndex}considerAll(){this.regexIndex=0}addRule(e,t){
this.rules.push([e,t]),"begin"===t.type&&this.count++}exec(e){
const t=this.getMatcher(this.regexIndex);t.lastIndex=this.lastIndex
;let n=t.exec(e)
;if(this.resumingScanAtSamePosition())if(n&&n.index===this.lastIndex);else{
const t=this.getMatcher(0);t.lastIndex=this.lastIndex+1,n=t.exec(e)}
return n&&(this.regexIndex+=n.position+1,
this.regexIndex===this.count&&this.considerAll()),n}}
if(e.compilerExtensions||(e.compilerExtensions=[]),
e.contains&&e.contains.includes("self"))throw Error("ERR: contains `self` is not supported at the top-level of a language. See documentation.")
;return e.classNameAliases=i(e.classNameAliases||{}),function n(o,r){const a=o
;if(o.isCompiled)return a
;[I,B,Z,D].forEach((e=>e(o,r))),e.compilerExtensions.forEach((e=>e(o,r))),
o.__beforeBegin=null,[T,L,P].forEach((e=>e(o,r))),o.isCompiled=!0;let c=null
;return"object"==typeof o.keywords&&o.keywords.$pattern&&(o.keywords=Object.assign({},o.keywords),
c=o.keywords.$pattern,
delete o.keywords.$pattern),c=c||/\w+/,o.keywords&&(o.keywords=$(o.keywords,e.case_insensitive)),
a.keywordPatternRe=t(c,!0),
r&&(o.begin||(o.begin=/\B|\b/),a.beginRe=t(a.begin),o.end||o.endsWithParent||(o.end=/\B|\b/),
o.end&&(a.endRe=t(a.end)),
a.terminatorEnd=l(a.end)||"",o.endsWithParent&&r.terminatorEnd&&(a.terminatorEnd+=(o.end?"|":"")+r.terminatorEnd)),
o.illegal&&(a.illegalRe=t(o.illegal)),
o.contains||(o.contains=[]),o.contains=[].concat(...o.contains.map((e=>(e=>(e.variants&&!e.cachedVariants&&(e.cachedVariants=e.variants.map((t=>i(e,{
variants:null},t)))),e.cachedVariants?e.cachedVariants:q(e)?i(e,{
starts:e.starts?i(e.starts):null
}):Object.isFrozen(e)?i(e):e))("self"===e?o:e)))),o.contains.forEach((e=>{n(e,a)
})),o.starts&&n(o.starts,r),a.matcher=(e=>{const t=new s
;return e.contains.forEach((e=>t.addRule(e.begin,{rule:e,type:"begin"
}))),e.terminatorEnd&&t.addRule(e.terminatorEnd,{type:"end"
}),e.illegal&&t.addRule(e.illegal,{type:"illegal"}),t})(a),a}(e)}function q(e){
return!!e&&(e.endsWithParent||q(e.starts))}class J extends Error{
constructor(e,t){super(e),this.name="HTMLInjectionError",this.html=t}}
const Y=n,Q=i,ee=Symbol("nomatch"),te=n=>{
const i=Object.create(null),s=Object.create(null),o=[];let r=!0
;const a="Could not find the language '{}', did you forget to load/include a language module?",l={
disableAutodetect:!0,name:"Plain text",contains:[]};let p={
ignoreUnescapedHTML:!1,throwUnescapedHTML:!1,noHighlightRe:/^(no-?highlight)$/i,
languageDetectRe:/\blang(?:uage)?-([\w-]+)\b/i,classPrefix:"hljs-",
cssSelector:"pre code",languages:null,__emitter:c};function b(e){
return p.noHighlightRe.test(e)}function m(e,t,n){let i="",s=""
;"object"==typeof t?(i=e,
n=t.ignoreIllegals,s=t.language):(G("10.7.0","highlight(lang, code, ...args) has been deprecated."),
G("10.7.0","Please use highlight(code, options) instead.\nhttps://github.com/highlightjs/highlight.js/issues/2277"),
s=e,i=t),void 0===n&&(n=!0);const o={code:i,language:s};N("before:highlight",o)
;const r=o.result?o.result:E(o.language,o.code,n)
;return r.code=o.code,N("after:highlight",r),r}function E(e,n,s,o){
const c=Object.create(null);function l(){if(!N.keywords)return void M.addText(R)
;let e=0;N.keywordPatternRe.lastIndex=0;let t=N.keywordPatternRe.exec(R),n=""
;for(;t;){n+=R.substring(e,t.index)
;const s=_.case_insensitive?t[0].toLowerCase():t[0],o=(i=s,N.keywords[i]);if(o){
const[e,i]=o
;if(M.addText(n),n="",c[s]=(c[s]||0)+1,c[s]<=7&&(j+=i),e.startsWith("_"))n+=t[0];else{
const n=_.classNameAliases[e]||e;u(t[0],n)}}else n+=t[0]
;e=N.keywordPatternRe.lastIndex,t=N.keywordPatternRe.exec(R)}var i
;n+=R.substring(e),M.addText(n)}function g(){null!=N.subLanguage?(()=>{
if(""===R)return;let e=null;if("string"==typeof N.subLanguage){
if(!i[N.subLanguage])return void M.addText(R)
;e=E(N.subLanguage,R,!0,S[N.subLanguage]),S[N.subLanguage]=e._top
}else e=x(R,N.subLanguage.length?N.subLanguage:null)
;N.relevance>0&&(j+=e.relevance),M.__addSublanguage(e._emitter,e.language)
})():l(),R=""}function u(e,t){
""!==e&&(M.startScope(t),M.addText(e),M.endScope())}function d(e,t){let n=1
;const i=t.length-1;for(;n<=i;){if(!e._emit[n]){n++;continue}
const i=_.classNameAliases[e[n]]||e[n],s=t[n];i?u(s,i):(R=s,l(),R=""),n++}}
function h(e,t){
return e.scope&&"string"==typeof e.scope&&M.openNode(_.classNameAliases[e.scope]||e.scope),
e.beginScope&&(e.beginScope._wrap?(u(R,_.classNameAliases[e.beginScope._wrap]||e.beginScope._wrap),
R=""):e.beginScope._multi&&(d(e.beginScope,t),R="")),N=Object.create(e,{parent:{
value:N}}),N}function f(e,n,i){let s=((e,t)=>{const n=e&&e.exec(t)
;return n&&0===n.index})(e.endRe,i);if(s){if(e["on:end"]){const i=new t(e)
;e["on:end"](n,i),i.isMatchIgnored&&(s=!1)}if(s){
for(;e.endsParent&&e.parent;)e=e.parent;return e}}
if(e.endsWithParent)return f(e.parent,n,i)}function b(e){
return 0===N.matcher.regexIndex?(R+=e[0],1):(T=!0,0)}function m(e){
const t=e[0],i=n.substring(e.index),s=f(N,e,i);if(!s)return ee;const o=N
;N.endScope&&N.endScope._wrap?(g(),
u(t,N.endScope._wrap)):N.endScope&&N.endScope._multi?(g(),
d(N.endScope,e)):o.skip?R+=t:(o.returnEnd||o.excludeEnd||(R+=t),
g(),o.excludeEnd&&(R=t));do{
N.scope&&M.closeNode(),N.skip||N.subLanguage||(j+=N.relevance),N=N.parent
}while(N!==s.parent);return s.starts&&h(s.starts,e),o.returnEnd?0:t.length}
let w={};function y(i,o){const a=o&&o[0];if(R+=i,null==a)return g(),0
;if("begin"===w.type&&"end"===o.type&&w.index===o.index&&""===a){
if(R+=n.slice(o.index,o.index+1),!r){const t=Error(`0 width match regex (${e})`)
;throw t.languageName=e,t.badRule=w.rule,t}return 1}
if(w=o,"begin"===o.type)return(e=>{
const n=e[0],i=e.rule,s=new t(i),o=[i.__beforeBegin,i["on:begin"]]
;for(const t of o)if(t&&(t(e,s),s.isMatchIgnored))return b(n)
;return i.skip?R+=n:(i.excludeBegin&&(R+=n),
g(),i.returnBegin||i.excludeBegin||(R=n)),h(i,e),i.returnBegin?0:n.length})(o)
;if("illegal"===o.type&&!s){
const e=Error('Illegal lexeme "'+a+'" for mode "'+(N.scope||"")+'"')
;throw e.mode=N,e}if("end"===o.type){const e=m(o);if(e!==ee)return e}
if("illegal"===o.type&&""===a)return 1
;if(I>1e5&&I>3*o.index)throw Error("potential infinite loop, way more iterations than matches")
;return R+=a,a.length}const _=O(e)
;if(!_)throw W(a.replace("{}",e)),Error('Unknown language: "'+e+'"')
;const v=V(_);let k="",N=o||v;const S={},M=new p.__emitter(p);(()=>{const e=[]
;for(let t=N;t!==_;t=t.parent)t.scope&&e.unshift(t.scope)
;e.forEach((e=>M.openNode(e)))})();let R="",j=0,A=0,I=0,T=!1;try{
if(_.__emitTokens)_.__emitTokens(n,M);else{for(N.matcher.considerAll();;){
I++,T?T=!1:N.matcher.considerAll(),N.matcher.lastIndex=A
;const e=N.matcher.exec(n);if(!e)break;const t=y(n.substring(A,e.index),e)
;A=e.index+t}y(n.substring(A))}return M.finalize(),k=M.toHTML(),{language:e,
value:k,relevance:j,illegal:!1,_emitter:M,_top:N}}catch(t){
if(t.message&&t.message.includes("Illegal"))return{language:e,value:Y(n),
illegal:!0,relevance:0,_illegalBy:{message:t.message,index:A,
context:n.slice(A-100,A+100),mode:t.mode,resultSoFar:k},_emitter:M};if(r)return{
language:e,value:Y(n),illegal:!1,relevance:0,errorRaised:t,_emitter:M,_top:N}
;throw t}}function x(e,t){t=t||p.languages||Object.keys(i);const n=(e=>{
const t={value:Y(e),illegal:!1,relevance:0,_top:l,_emitter:new p.__emitter(p)}
;return t._emitter.addText(e),t})(e),s=t.filter(O).filter(k).map((t=>E(t,e,!1)))
;s.unshift(n);const o=s.sort(((e,t)=>{
if(e.relevance!==t.relevance)return t.relevance-e.relevance
;if(e.language&&t.language){if(O(e.language).supersetOf===t.language)return 1
;if(O(t.language).supersetOf===e.language)return-1}return 0})),[r,a]=o,c=r
;return c.secondBest=a,c}function w(e){let t=null;const n=(e=>{
let t=e.className+" ";t+=e.parentNode?e.parentNode.className:""
;const n=p.languageDetectRe.exec(t);if(n){const t=O(n[1])
;return t||(X(a.replace("{}",n[1])),
X("Falling back to no-highlight mode for this block.",e)),t?n[1]:"no-highlight"}
return t.split(/\s+/).find((e=>b(e)||O(e)))})(e);if(b(n))return
;if(N("before:highlightElement",{el:e,language:n
}),e.dataset.highlighted)return void console.log("Element previously highlighted. To highlight again, first unset `dataset.highlighted`.",e)
;if(e.children.length>0&&(p.ignoreUnescapedHTML||(console.warn("One of your code blocks includes unescaped HTML. This is a potentially serious security risk."),
console.warn("https://github.com/highlightjs/highlight.js/wiki/security"),
console.warn("The element with unescaped HTML:"),
console.warn(e)),p.throwUnescapedHTML))throw new J("One of your code blocks includes unescaped HTML.",e.innerHTML)
;t=e;const i=t.textContent,o=n?m(i,{language:n,ignoreIllegals:!0}):x(i)
;e.innerHTML=o.value,e.dataset.highlighted="yes",((e,t,n)=>{const i=t&&s[t]||n
;e.classList.add("hljs"),e.classList.add("language-"+i)
})(e,n,o.language),e.result={language:o.language,re:o.relevance,
relevance:o.relevance},o.secondBest&&(e.secondBest={
language:o.secondBest.language,relevance:o.secondBest.relevance
}),N("after:highlightElement",{el:e,result:o,text:i})}let y=!1;function _(){
"loading"!==document.readyState?document.querySelectorAll(p.cssSelector).forEach(w):y=!0
}function O(e){return e=(e||"").toLowerCase(),i[e]||i[s[e]]}
function v(e,{languageName:t}){"string"==typeof e&&(e=[e]),e.forEach((e=>{
s[e.toLowerCase()]=t}))}function k(e){const t=O(e)
;return t&&!t.disableAutodetect}function N(e,t){const n=e;o.forEach((e=>{
e[n]&&e[n](t)}))}
"undefined"!=typeof window&&window.addEventListener&&window.addEventListener("DOMContentLoaded",(()=>{
y&&_()}),!1),Object.assign(n,{highlight:m,highlightAuto:x,highlightAll:_,
highlightElement:w,
highlightBlock:e=>(G("10.7.0","highlightBlock will be removed entirely in v12.0"),
G("10.7.0","Please use highlightElement now."),w(e)),configure:e=>{p=Q(p,e)},
initHighlighting:()=>{
_(),G("10.6.0","initHighlighting() deprecated. Use highlightAll() now.")},
initHighlightingOnLoad:()=>{
_(),G("10.6.0","initHighlightingOnLoad() deprecated. Use highlightAll() now.")
},registerLanguage:(e,t)=>{let s=null;try{s=t(n)}catch(t){
if(W("Language definition for '{}' could not be registered.".replace("{}",e)),
!r)throw t;W(t),s=l}
s.name||(s.name=e),i[e]=s,s.rawDefinition=t.bind(null,n),s.aliases&&v(s.aliases,{
languageName:e})},unregisterLanguage:e=>{delete i[e]
;for(const t of Object.keys(s))s[t]===e&&delete s[t]},
listLanguages:()=>Object.keys(i),getLanguage:O,registerAliases:v,
autoDetection:k,inherit:Q,addPlugin:e=>{(e=>{
e["before:highlightBlock"]&&!e["before:highlightElement"]&&(e["before:highlightElement"]=t=>{
e["before:highlightBlock"](Object.assign({block:t.el},t))
}),e["after:highlightBlock"]&&!e["after:highlightElement"]&&(e["after:highlightElement"]=t=>{
e["after:highlightBlock"](Object.assign({block:t.el},t))})})(e),o.push(e)},
removePlugin:e=>{const t=o.indexOf(e);-1!==t&&o.splice(t,1)}}),n.debugMode=()=>{
r=!1},n.safeMode=()=>{r=!0},n.versionString="11.9.0",n.regex={concat:h,
lookahead:g,either:f,optional:d,anyNumberOfTimes:u}
;for(const t in j)"object"==typeof j[t]&&e(j[t]);return Object.assign(n,j),n
},ne=te({});return ne.newInstance=()=>te({}),ne}()
;"object"==typeof exports&&"undefined"!=typeof module&&(module.exports=hljs);/*! `bash` grammar compiled for Highlight.js 11.9.0 */
(()=>{var e=(()=>{"use strict";return e=>{const s=e.regex,t={},n={begin:/\$\{/,
end:/\}/,contains:["self",{begin:/:-/,contains:[t]}]};Object.assign(t,{
className:"variable",variants:[{
begin:s.concat(/\$[\w\d#@][\w\d_]*/,"(?![\\w\\d])(?![$])")},n]});const a={
className:"subst",begin:/\$\(/,end:/\)/,contains:[e.BACKSLASH_ESCAPE]},i={
begin:/<<-?\s*(?=\w+)/,starts:{contains:[e.END_SAME_AS_BEGIN({begin:/(\w+)/,
end:/(\w+)/,className:"string"})]}},c={className:"string",begin:/"/,end:/"/,
contains:[e.BACKSLASH_ESCAPE,t,a]};a.contains.push(c);const o={begin:/\$?\(\(/,
end:/\)\)/,contains:[{begin:/\d+#[0-9a-f]+/,className:"number"},e.NUMBER_MODE,t]
},r=e.SHEBANG({binary:"(fish|bash|zsh|sh|csh|ksh|tcsh|dash|scsh)",relevance:10
}),l={className:"function",begin:/\w[\w\d_]*\s*\(\s*\)\s*\{/,returnBegin:!0,
contains:[e.inherit(e.TITLE_MODE,{begin:/\w[\w\d_]*/})],relevance:0};return{
name:"Bash",aliases:["sh"],keywords:{$pattern:/\b[a-z][a-z0-9._-]+\b/,
keyword:["if","then","else","elif","fi","for","while","until","in","do","done","case","esac","function","select"],
literal:["true","false"],
built_in:["break","cd","continue","eval","exec","exit","export","getopts","hash","pwd","readonly","return","shift","test","times","trap","umask","unset","alias","bind","builtin","caller","command","declare","echo","enable","help","let","local","logout","mapfile","printf","read","readarray","source","type","typeset","ulimit","unalias","set","shopt","autoload","bg","bindkey","bye","cap","chdir","clone","comparguments","compcall","compctl","compdescribe","compfiles","compgroups","compquote","comptags","comptry","compvalues","dirs","disable","disown","echotc","echoti","emulate","fc","fg","float","functions","getcap","getln","history","integer","jobs","kill","limit","log","noglob","popd","print","pushd","pushln","rehash","sched","setcap","setopt","stat","suspend","ttyctl","unfunction","unhash","unlimit","unsetopt","vared","wait","whence","where","which","zcompile","zformat","zftp","zle","zmodload","zparseopts","zprof","zpty","zregexparse","zsocket","zstyle","ztcp","chcon","chgrp","chown","chmod","cp","dd","df","dir","dircolors","ln","ls","mkdir","mkfifo","mknod","mktemp","mv","realpath","rm","rmdir","shred","sync","touch","truncate","vdir","b2sum","base32","base64","cat","cksum","comm","csplit","cut","expand","fmt","fold","head","join","md5sum","nl","numfmt","od","paste","ptx","pr","sha1sum","sha224sum","sha256sum","sha384sum","sha512sum","shuf","sort","split","sum","tac","tail","tr","tsort","unexpand","uniq","wc","arch","basename","chroot","date","dirname","du","echo","env","expr","factor","groups","hostid","id","link","logname","nice","nohup","nproc","pathchk","pinky","printenv","printf","pwd","readlink","runcon","seq","sleep","stat","stdbuf","stty","tee","test","timeout","tty","uname","unlink","uptime","users","who","whoami","yes"]
},contains:[r,e.SHEBANG(),l,o,e.HASH_COMMENT_MODE,i,{match:/(\/[a-z._-]+)+/},c,{
match:/\\"/},{className:"string",begin:/'/,end:/'/},{match:/\\'/},t]}}})()
;hljs.registerLanguage("bash",e)})();/*! `lisp` grammar compiled for Highlight.js 11.9.0 */
(()=>{var e=(()=>{"use strict";return e=>{
const n="[a-zA-Z_\\-+\\*\\/<=>][a-zA-Z0-9_\\-+*\\/<=>!]*",a="\\|[^]*?\\|",i="(-|\\+)?\\d+(\\.\\d+|\\/\\d+)?((d|e|f|l|s|D|E|F|L|S)(\\+|-)?\\d+)?",s={
className:"literal",begin:"\\b(t{1}|nil)\\b"},l={className:"number",variants:[{
begin:i,relevance:0},{begin:"#(b|B)[0-1]+(/[0-1]+)?"},{
begin:"#(o|O)[0-7]+(/[0-7]+)?"},{begin:"#(x|X)[0-9a-fA-F]+(/[0-9a-fA-F]+)?"},{
begin:"#(c|C)\\("+i+" +"+i,end:"\\)"}]},b=e.inherit(e.QUOTE_STRING_MODE,{
illegal:null}),g=e.COMMENT(";","$",{relevance:0}),r={begin:"\\*",end:"\\*"},t={
className:"symbol",begin:"[:&]"+n},c={begin:n,relevance:0},d={begin:a},o={
contains:[l,b,r,t,{begin:"\\(",end:"\\)",contains:["self",s,b,l,c]},c],
variants:[{begin:"['`]\\(",end:"\\)"},{begin:"\\(quote ",end:"\\)",keywords:{
name:"quote"}},{begin:"'"+a}]},v={variants:[{begin:"'"+n},{
begin:"#'"+n+"(::"+n+")*"}]},m={begin:"\\(\\s*",end:"\\)"},u={endsWithParent:!0,
relevance:0};return m.contains=[{className:"name",variants:[{begin:n,relevance:0
},{begin:a}]},u],u.contains=[o,v,m,s,l,b,g,r,t,d,c],{name:"Lisp",illegal:/\S/,
contains:[l,e.SHEBANG(),s,b,g,o,v,m,c]}}})();hljs.registerLanguage("lisp",e)
})();/*! `sql` grammar compiled for Highlight.js 11.9.0 */
(()=>{var e=(()=>{"use strict";return e=>{
const r=e.regex,t=e.COMMENT("--","$"),n=["true","false","unknown"],a=["bigint","binary","blob","boolean","char","character","clob","date","dec","decfloat","decimal","float","int","integer","interval","nchar","nclob","national","numeric","real","row","smallint","time","timestamp","varchar","varying","varbinary"],i=["abs","acos","array_agg","asin","atan","avg","cast","ceil","ceiling","coalesce","corr","cos","cosh","count","covar_pop","covar_samp","cume_dist","dense_rank","deref","element","exp","extract","first_value","floor","json_array","json_arrayagg","json_exists","json_object","json_objectagg","json_query","json_table","json_table_primitive","json_value","lag","last_value","lead","listagg","ln","log","log10","lower","max","min","mod","nth_value","ntile","nullif","percent_rank","percentile_cont","percentile_disc","position","position_regex","power","rank","regr_avgx","regr_avgy","regr_count","regr_intercept","regr_r2","regr_slope","regr_sxx","regr_sxy","regr_syy","row_number","sin","sinh","sqrt","stddev_pop","stddev_samp","substring","substring_regex","sum","tan","tanh","translate","translate_regex","treat","trim","trim_array","unnest","upper","value_of","var_pop","var_samp","width_bucket"],s=["create table","insert into","primary key","foreign key","not null","alter table","add constraint","grouping sets","on overflow","character set","respect nulls","ignore nulls","nulls first","nulls last","depth first","breadth first"],o=i,c=["abs","acos","all","allocate","alter","and","any","are","array","array_agg","array_max_cardinality","as","asensitive","asin","asymmetric","at","atan","atomic","authorization","avg","begin","begin_frame","begin_partition","between","bigint","binary","blob","boolean","both","by","call","called","cardinality","cascaded","case","cast","ceil","ceiling","char","char_length","character","character_length","check","classifier","clob","close","coalesce","collate","collect","column","commit","condition","connect","constraint","contains","convert","copy","corr","corresponding","cos","cosh","count","covar_pop","covar_samp","create","cross","cube","cume_dist","current","current_catalog","current_date","current_default_transform_group","current_path","current_role","current_row","current_schema","current_time","current_timestamp","current_path","current_role","current_transform_group_for_type","current_user","cursor","cycle","date","day","deallocate","dec","decimal","decfloat","declare","default","define","delete","dense_rank","deref","describe","deterministic","disconnect","distinct","double","drop","dynamic","each","element","else","empty","end","end_frame","end_partition","end-exec","equals","escape","every","except","exec","execute","exists","exp","external","extract","false","fetch","filter","first_value","float","floor","for","foreign","frame_row","free","from","full","function","fusion","get","global","grant","group","grouping","groups","having","hold","hour","identity","in","indicator","initial","inner","inout","insensitive","insert","int","integer","intersect","intersection","interval","into","is","join","json_array","json_arrayagg","json_exists","json_object","json_objectagg","json_query","json_table","json_table_primitive","json_value","lag","language","large","last_value","lateral","lead","leading","left","like","like_regex","listagg","ln","local","localtime","localtimestamp","log","log10","lower","match","match_number","match_recognize","matches","max","member","merge","method","min","minute","mod","modifies","module","month","multiset","national","natural","nchar","nclob","new","no","none","normalize","not","nth_value","ntile","null","nullif","numeric","octet_length","occurrences_regex","of","offset","old","omit","on","one","only","open","or","order","out","outer","over","overlaps","overlay","parameter","partition","pattern","per","percent","percent_rank","percentile_cont","percentile_disc","period","portion","position","position_regex","power","precedes","precision","prepare","primary","procedure","ptf","range","rank","reads","real","recursive","ref","references","referencing","regr_avgx","regr_avgy","regr_count","regr_intercept","regr_r2","regr_slope","regr_sxx","regr_sxy","regr_syy","release","result","return","returns","revoke","right","rollback","rollup","row","row_number","rows","running","savepoint","scope","scroll","search","second","seek","select","sensitive","session_user","set","show","similar","sin","sinh","skip","smallint","some","specific","specifictype","sql","sqlexception","sqlstate","sqlwarning","sqrt","start","static","stddev_pop","stddev_samp","submultiset","subset","substring","substring_regex","succeeds","sum","symmetric","system","system_time","system_user","table","tablesample","tan","tanh","then","time","timestamp","timezone_hour","timezone_minute","to","trailing","translate","translate_regex","translation","treat","trigger","trim","trim_array","true","truncate","uescape","union","unique","unknown","unnest","update","upper","user","using","value","values","value_of","var_pop","var_samp","varbinary","varchar","varying","versioning","when","whenever","where","width_bucket","window","with","within","without","year","add","asc","collation","desc","final","first","last","view"].filter((e=>!i.includes(e))),l={
begin:r.concat(/\b/,r.either(...o),/\s*\(/),relevance:0,keywords:{built_in:o}}
;return{name:"SQL",case_insensitive:!0,illegal:/[{}]|<\//,keywords:{
$pattern:/\b[\w\.]+/,keyword:((e,{exceptions:r,when:t}={})=>{const n=t
;return r=r||[],e.map((e=>e.match(/\|\d+$/)||r.includes(e)?e:n(e)?e+"|0":e))
})(c,{when:e=>e.length<3}),literal:n,type:a,
built_in:["current_catalog","current_date","current_default_transform_group","current_path","current_role","current_schema","current_transform_group_for_type","current_user","session_user","system_time","system_user","current_time","localtime","current_timestamp","localtimestamp"]
},contains:[{begin:r.either(...s),relevance:0,keywords:{$pattern:/[\w\.]+/,
keyword:c.concat(s),literal:n,type:a}},{className:"type",
begin:r.either("double precision","large object","with timezone","without timezone")
},l,{className:"variable",begin:/@[a-z0-9][a-z0-9_]*/},{className:"string",
variants:[{begin:/'/,end:/'/,contains:[{begin:/''/}]}]},{begin:/"/,end:/"/,
contains:[{begin:/""/}]},e.C_NUMBER_MODE,e.C_BLOCK_COMMENT_MODE,t,{
className:"operator",begin:/[-+*/=%^~]|&&?|\|\|?|!=?|<(?:=>?|<|>)?|>[>=]?/,
relevance:0}]}}})();hljs.registerLanguage("sql",e)})();/*! `xml` grammar compiled for Highlight.js 11.9.0 */
(()=>{var e=(()=>{"use strict";return e=>{
const a=e.regex,n=a.concat(/[\p{L}_]/u,a.optional(/[\p{L}0-9_.-]*:/u),/[\p{L}0-9_.-]*/u),s={
className:"symbol",begin:/&[a-z]+;|[0-9]+;|[a-f0-9]+;/},t={begin:/\s/,
contains:[{className:"keyword",begin:/#?[a-z_][a-z1-9_-]+/,illegal:/\n/}]
},i=e.inherit(t,{begin:/\(/,end:/\)/}),c=e.inherit(e.APOS_STRING_MODE,{
className:"string"}),l=e.inherit(e.QUOTE_STRING_MODE,{className:"string"}),r={
endsWithParent:!0,illegal:/,relevance:0,contains:[{className:"attr",
begin:/[\p{L}0-9._:-]+/u,relevance:0},{begin:/=\s*/,relevance:0,contains:[{
className:"string",endsParent:!0,variants:[{begin:/"/,end:/"/,contains:[s]},{
begin:/'/,end:/'/,contains:[s]},{begin:/[^\s"'=<>`]+/}]}]}]};return{
name:"HTML, XML",
aliases:["html","xhtml","rss","atom","xjb","xsd","xsl","plist","wsf","svg"],
case_insensitive:!0,unicodeRegex:!0,contains:[{className:"meta",begin://,relevance:10,contains:[t,l,c,i,{begin:/\[/,end:/\]/,contains:[{
className:"meta",begin://,contains:[t,i,l,c]}]}]
},e.COMMENT(//,{relevance:10}),{begin://,
relevance:10},s,{className:"meta",end:/\?>/,variants:[{begin:/<\?xml/,
relevance:10,contains:[l]},{begin:/<\?[a-z][a-z0-9]+/}]},{className:"tag",
begin:/