37 Commits
jdbc ... 2.1.0

Author SHA1 Message Date
a13ca2d954 update dependencies 2021-02-26 23:00:41 +01:00
36ed678c5a Add disconnect method 2021-02-26 23:00:29 +01:00
c2c8b91dc5 Remove Mutable entities 2020-12-15 14:21:07 +01:00
9d208292a5 fix immutable entities 2020-11-26 09:29:13 +01:00
8bb3b61f2d Fix Generate UUID if null is defined as ID 2020-07-10 22:43:58 +02:00
5023f229ca Generate UUID if null is defined as ID 2020-07-10 17:24:35 +02:00
55b547db75 Fix migrations of Functions 2020-05-11 01:39:36 +02:00
377459f430 Add ShadowJar compatibility and Query file can be define name with comment
Add Migration and Query definition class
Add Docker DB for tests
2020-05-10 23:07:47 +02:00
df99bf4743 set JVM to 11 2020-05-10 22:05:55 +02:00
fc4fde490f config package 2020-03-25 15:26:19 +01:00
a6d2a4d34f add method function.perform 2020-03-25 00:30:54 +01:00
232158f85d Add serialise list 2020-03-15 20:18:55 +01:00
3c998edb3c add methode$ "isLastPage" to pagination 2020-02-24 20:41:29 +01:00
92722b0fef Improve SQL log message 2020-02-19 16:46:20 +01:00
265c586198 change Idea config 2020-02-19 12:46:04 +01:00
440ecc8193 RepositoryI::requester is now readonly 2020-02-11 20:39:43 +01:00
1bae960139 Improve UuidEntityVersioning 2020-01-29 17:03:22 +01:00
1dec96551d Add Immutable Entities 2020-01-23 00:40:47 +01:00
dbf5fbcfc5 Add Serializable Parameter 2019-10-22 11:10:02 +02:00
8eeb1e9e9d update jasync 2019-10-11 22:05:15 +02:00
c4125f1429 Lint 2019-10-10 21:44:56 +02:00
21c05f72f5 remove EntityI.className && RepositoryI.entityName 2019-10-09 22:50:04 +02:00
2a738e0595 remove id in EntityI interface & remove usless EntitiesCollections 2019-10-02 11:16:28 +02:00
6641ed78e7 remove returns in function definition 2019-09-20 02:36:21 +02:00
8810c1f08d Drop function if already exists and conflict 2019-09-20 02:27:15 +02:00
88c45aaf77 fix return function definition 2019-09-16 23:12:28 +02:00
2f9f397f50 add EntityDeletedBy & EntityDeletedAt 2019-08-30 15:14:54 +02:00
215c054f27 rename interface UpdatedBy to EntityUpdatedBy 2019-08-27 22:57:49 +02:00
65d8e00902 add sendQuery(vararg) 2019-08-24 23:42:08 +02:00
2922c9309c set UuidEntity open 2019-08-24 23:41:45 +02:00
ae96bfd737 replace params begin by _ 2019-08-24 23:39:23 +02:00
e42f3b07cb clean entities 2019-08-07 22:45:40 +02:00
19e98250b7 change UuidEntityVersioning 2019-08-05 20:29:15 +02:00
f56c84c675 Fix function replaceArgsIntoSql 2019-08-04 22:24:24 +02:00
13f0f12c1b Fix migration if executed two or more times 2019-08-04 22:23:57 +02:00
01a0000b15 clean code 2019-08-04 22:23:12 +02:00
24349fc71f use sendQuery if no return is expected 2019-08-04 21:09:24 +02:00
66 changed files with 1727 additions and 586 deletions

3
.gitignore vendored
View File

@@ -2,4 +2,5 @@
/out/
/build/
.gradle
/var/log/
/var/log/
/.idea/*

3
.idea/.gitignore generated vendored
View File

@@ -6,4 +6,5 @@
/compiler.xml
/uiDesigner.xml
/dataSources.xml
/sonarlint/
/sonarlint/
/jarRepositories.xml

View File

@@ -1,7 +1,13 @@
<component name="ProjectCodeStyleConfiguration">
<code_scheme name="Project" version="173">
<JetCodeStyleSettings>
<option name="PACKAGES_TO_USE_STAR_IMPORTS">
<value>
<package name="java.util" withSubpackages="true" static="false" />
</value>
</option>
<option name="SPACE_BEFORE_EXTEND_COLON" value="false" />
<option name="NAME_COUNT_TO_USE_STAR_IMPORT" value="2147483647" />
<option name="CODE_STYLE_DEFAULTS" value="KOTLIN_OFFICIAL" />
</JetCodeStyleSettings>
<codeStyleSettings language="kotlin">

2
.idea/gradle.xml generated
View File

@@ -8,13 +8,11 @@
<option name="testRunner" value="PLATFORM" />
<option name="distributionType" value="DEFAULT_WRAPPED" />
<option name="externalProjectPath" value="$PROJECT_DIR$" />
<option name="gradleHome" value="$PROJECT_DIR$" />
<option name="modules">
<set>
<option value="$PROJECT_DIR$" />
</set>
</option>
<option name="useAutoImport" value="true" />
<option name="useQualifiedModuleNames" value="true" />
</GradleProjectSettings>
</option>

6
.idea/kotlinc.xml generated Normal file
View File

@@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="Kotlin2JvmCompilerArguments">
<option name="jvmTarget" value="11" />
</component>
</project>

6
.idea/misc.xml generated
View File

@@ -1,7 +1,9 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ExternalStorageConfigurationManager" enabled="true" />
<component name="ProjectRootManager" version="2" languageLevel="JDK_11" project-jdk-name="11" project-jdk-type="JavaSDK">
<component name="FrameworkDetectionExcludesConfiguration">
<file type="web" url="file://$PROJECT_DIR$" />
</component>
<component name="ProjectRootManager" version="2" languageLevel="JDK_11" default="true" project-jdk-name="corretto-11" project-jdk-type="JavaSDK">
<output url="file://$PROJECT_DIR$/out" />
</component>
<component name="TaskProjectConfiguration">

8
.idea/modules.xml generated
View File

@@ -1,8 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/postgres-json.iml" filepath="$PROJECT_DIR$/.idea/postgres-json.iml" />
</modules>
</component>
</project>

View File

@@ -1,11 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="JAVA_MODULE" version="4">
<component name="NewModuleRootManager" inherit-compiler-output="true">
<exclude-output />
<content url="file://$MODULE_DIR$">
<excludeFolder url="file://$MODULE_DIR$/var" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>

8
.idea/runConfigurations/Check.xml generated Normal file
View File

@@ -0,0 +1,8 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="Check" type="CompoundRunConfigurationType">
<toRun name="run DB" type="docker-deploy" />
<toRun name="Lint" type="GradleRunConfiguration" />
<toRun name="tests" type="JUnit" />
<method v="2" />
</configuration>
</component>

31
.idea/runConfigurations/Lint.xml generated Normal file
View File

@@ -0,0 +1,31 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="Lint" type="GradleRunConfiguration" factoryName="Gradle" singleton="false">
<ExternalSystemSettings>
<option name="executionName" />
<option name="externalProjectPath" value="$PROJECT_DIR$" />
<option name="externalSystemIdString" value="GRADLE" />
<option name="scriptParameters" value="" />
<option name="taskDescriptions">
<list />
</option>
<option name="taskNames">
<list>
<option value="ktlintCheck" />
</list>
</option>
<option name="vmOptions" value="" />
</ExternalSystemSettings>
<extension name="net.ashald.envfile">
<option name="IS_ENABLED" value="false" />
<option name="IS_SUBST" value="false" />
<option name="IS_PATH_MACRO_SUPPORTED" value="false" />
<option name="IS_IGNORE_MISSING_FILES" value="false" />
<option name="IS_ENABLE_EXPERIMENTAL_INTEGRATIONS" value="false" />
<ENTRIES>
<ENTRY IS_ENABLED="true" PARSER="runconfig" />
</ENTRIES>
</extension>
<GradleScriptDebugEnabled>true</GradleScriptDebugEnabled>
<method v="2" />
</configuration>
</component>

View File

@@ -0,0 +1,31 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="Publish To MavenLocal" type="GradleRunConfiguration" factoryName="Gradle" singleton="true">
<ExternalSystemSettings>
<option name="executionName" />
<option name="externalProjectPath" value="$PROJECT_DIR$" />
<option name="externalSystemIdString" value="GRADLE" />
<option name="scriptParameters" value="" />
<option name="taskDescriptions">
<list />
</option>
<option name="taskNames">
<list>
<option value="publishToMavenLocal" />
</list>
</option>
<option name="vmOptions" value="" />
</ExternalSystemSettings>
<extension name="net.ashald.envfile">
<option name="IS_ENABLED" value="false" />
<option name="IS_SUBST" value="false" />
<option name="IS_PATH_MACRO_SUPPORTED" value="false" />
<option name="IS_IGNORE_MISSING_FILES" value="false" />
<option name="IS_ENABLE_EXPERIMENTAL_INTEGRATIONS" value="false" />
<ENTRIES>
<ENTRY IS_ENABLED="true" PARSER="runconfig" />
</ENTRIES>
</extension>
<GradleScriptDebugEnabled>true</GradleScriptDebugEnabled>
<method v="2" />
</configuration>
</component>

View File

@@ -0,0 +1,34 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="Test and Publish To MavenLocal" type="GradleRunConfiguration" factoryName="Gradle" singleton="true">
<ExternalSystemSettings>
<option name="executionName" />
<option name="externalProjectPath" value="$PROJECT_DIR$" />
<option name="externalSystemIdString" value="GRADLE" />
<option name="scriptParameters" value="" />
<option name="taskDescriptions">
<list />
</option>
<option name="taskNames">
<list>
<option value="publishToMavenLocal" />
</list>
</option>
<option name="vmOptions" value="" />
</ExternalSystemSettings>
<extension name="net.ashald.envfile">
<option name="IS_ENABLED" value="false" />
<option name="IS_SUBST" value="false" />
<option name="IS_PATH_MACRO_SUPPORTED" value="false" />
<option name="IS_IGNORE_MISSING_FILES" value="false" />
<option name="IS_ENABLE_EXPERIMENTAL_INTEGRATIONS" value="false" />
<ENTRIES>
<ENTRY IS_ENABLED="true" PARSER="runconfig" />
</ENTRIES>
</extension>
<GradleScriptDebugEnabled>true</GradleScriptDebugEnabled>
<method v="2">
<option name="RunConfigurationTask" enabled="true" run_configuration_name="Lint" run_configuration_type="GradleRunConfiguration" />
<option name="RunConfigurationTask" enabled="true" run_configuration_name="tests" run_configuration_type="JUnit" />
</method>
</configuration>
</component>

View File

@@ -1,6 +1,7 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="tests" type="JUnit" factoryName="JUnit">
<module name="postgresjson.test" />
<configuration default="false" name="tests" type="JUnit" factoryName="JUnit" singleton="false">
<module name="postgres-json.test" />
<useClassPathOnly />
<extension name="net.ashald.envfile">
<option name="IS_ENABLED" value="false" />
<option name="IS_SUBST" value="false" />
@@ -12,7 +13,7 @@
</ENTRIES>
</extension>
<option name="ALTERNATIVE_JRE_PATH_ENABLED" value="true" />
<option name="ALTERNATIVE_JRE_PATH" value="11" />
<option name="ALTERNATIVE_JRE_PATH" value="corretto-11" />
<option name="MAIN_CLASS_NAME" value="" />
<option name="METHOD_NAME" value="" />
<option name="TEST_OBJECT" value="directory" />

View File

@@ -1,41 +1,68 @@
import org.jetbrains.kotlin.gradle.tasks.KotlinCompile
plugins {
jacoco
id("maven-publish")
id("org.jetbrains.kotlin.jvm") version "1.3.31"
kotlin("jvm") version "1.4.30"
id("org.jlleitschuh.gradle.ktlint") version "10.0.0"
id("org.owasp.dependencycheck") version "6.1.1"
id("fr.coppernic.versioning") version "3.2.1"
}
apply(plugin = "kotlin")
group = "fr.postgresjson"
version = "0.1"
group = "com.github.flecomte"
version = versioning.info.tag
repositories {
mavenCentral()
jcenter()
}
tasks.withType<KotlinCompile> {
kotlinOptions {
jvmTarget = "11"
sourceCompatibility = "11"
targetCompatibility = "11"
}
}
dependencies {
implementation("org.jetbrains.kotlin:kotlin-stdlib-jdk8")
implementation("org.jetbrains.kotlin:kotlin-reflect:1.3.31")
implementation("com.fasterxml.jackson.module:jackson-module-kotlin:2.9.9")
implementation("com.fasterxml.jackson.datatype:jackson-datatype-joda:2.9.9")
implementation("com.github.jasync-sql:jasync-postgresql:0.9.53")
implementation("org.slf4j:slf4j-api:1.7.26")
implementation("org.jetbrains.kotlin:kotlin-reflect:1.4.31")
implementation("com.fasterxml.jackson.module:jackson-module-kotlin:2.12.1")
implementation("com.fasterxml.jackson.datatype:jackson-datatype-joda:2.12.1")
implementation("com.github.jasync-sql:jasync-postgresql:1.1.7")
implementation("org.slf4j:slf4j-api:1.7.30")
testImplementation("ch.qos.logback:logback-classic:1.2.3")
testImplementation("ch.qos.logback:logback-core:1.2.3")
testImplementation("io.mockk:mockk:1.9")
testImplementation("org.junit.jupiter:junit-jupiter:5.4.2")
testImplementation("org.amshove.kluent:kluent:1.47")
testImplementation("io.mockk:mockk:1.10.6")
testImplementation("org.junit.jupiter:junit-jupiter:5.7.1")
testImplementation("org.amshove.kluent:kluent:1.65")
}
val sourcesJar by tasks.creating(Jar::class) {
archiveClassifier.set("sources")
from(sourceSets.getByName("main").allSource)
}
publishing {
publications {
create<MavenPublication>("maven") {
groupId = "fr.postgresjson"
artifactId = "postgresjson"
version = "0.1"
from(components["java"])
repositories {
maven {
name = "postgres-json"
url = uri("https://maven.pkg.github.com/flecomte/postgres-json")
credentials {
username = System.getenv("GITHUB_USERNAME")
password = System.getenv("GITHUB_TOKEN")
}
}
}
}
publications {
create<MavenPublication>("postgres-json") {
from(components["java"])
artifact(sourcesJar)
}
}
}

15
docker-compose.yml Normal file
View File

@@ -0,0 +1,15 @@
# To execute this docker-compose yml file use docker-compose -f <file_name> up
# Add the "-d" flag at the end for detached execution
version: '3.7'
services:
db:
container_name: postgres_json
build:
context: docker/postgresql
restart: always
ports:
- 5555:5432
environment:
POSTGRES_DB: json_test
POSTGRES_USER: test
POSTGRES_PASSWORD: test

View File

@@ -0,0 +1,9 @@
FROM postgres:11
COPY postgresql.conf /tmp/postgresql.conf
COPY extension.sh /docker-entrypoint-initdb.d/000-extension.sh
COPY setup.sh /docker-entrypoint-initdb.d/100-setup.sh
ENTRYPOINT ["docker-entrypoint.sh"]
EXPOSE 5432
CMD ["postgres"]

View File

@@ -0,0 +1,7 @@
#!/usr/bin/env bash
set -e
psql -v ON_ERROR_STOP=1 --username "test" --dbname "json_test" <<-EOSQL
create extension if not exists plpgsql;
create extension if not exists "uuid-ossp";
EOSQL

View File

@@ -0,0 +1,668 @@
# -----------------------------
# PostgreSQL configuration file
# -----------------------------
#
# This file consists of lines of the form:
#
# name = value
#
# (The "=" is optional.) Whitespace may be used. Comments are introduced with
# "#" anywhere on a line. The complete list of parameter names and allowed
# values can be found in the PostgreSQL documentation.
#
# The commented-out settings shown in this file represent the default values.
# Re-commenting a setting is NOT sufficient to revert it to the default value;
# you need to reload the server.
#
# This file is read on server startup and when the server receives a SIGHUP
# signal. If you edit the file on a running system, you have to SIGHUP the
# server for the changes to take effect, run "pg_ctl reload", or execute
# "SELECT pg_reload_conf()". Some parameters, which are marked below,
# require a server shutdown and restart to take effect.
#
# Any parameter can also be given as a command-line option to the server, e.g.,
# "postgres -c log_connections=on". Some parameters can be changed at run time
# with the "SET" SQL command.
#
# Memory units: kB = kilobytes Time units: ms = milliseconds
# MB = megabytes s = seconds
# GB = gigabytes min = minutes
# TB = terabytes h = hours
# d = days
#------------------------------------------------------------------------------
# FILE LOCATIONS
#------------------------------------------------------------------------------
# The default values of these variables are driven from the -D command-line
# option or PGDATA environment variable, represented here as ConfigDir.
#data_directory = 'ConfigDir' # use data in another directory
# (change requires restart)
#hba_file = 'ConfigDir/pg_hba.conf' # host-based authentication file
# (change requires restart)
#ident_file = 'ConfigDir/pg_ident.conf' # ident configuration file
# (change requires restart)
# If external_pid_file is not explicitly set, no extra PID file is written.
#external_pid_file = '' # write an extra PID file
# (change requires restart)
#------------------------------------------------------------------------------
# CONNECTIONS AND AUTHENTICATION
#------------------------------------------------------------------------------
# - Connection Settings -
listen_addresses = '*'
# comma-separated list of addresses;
# defaults to 'localhost'; use '*' for all
# (change requires restart)
#port = 5432 # (change requires restart)
#max_connections = 100 # (change requires restart)
#superuser_reserved_connections = 3 # (change requires restart)
#unix_socket_directories = '/tmp' # comma-separated list of directories
# (change requires restart)
#unix_socket_group = '' # (change requires restart)
#unix_socket_permissions = 0777 # begin with 0 to use octal notation
# (change requires restart)
#bonjour = off # advertise server via Bonjour
# (change requires restart)
#bonjour_name = '' # defaults to the computer name
# (change requires restart)
# - Security and Authentication -
#authentication_timeout = 1min # 1s-600s
#ssl = off
#ssl_ciphers = 'HIGH:MEDIUM:+3DES:!aNULL' # allowed SSL ciphers
#ssl_prefer_server_ciphers = on
#ssl_ecdh_curve = 'prime256v1'
#ssl_dh_params_file = ''
#ssl_cert_file = 'server.crt'
#ssl_key_file = 'server.key'
#ssl_ca_file = ''
#ssl_crl_file = ''
#password_encryption = md5 # md5 or scram-sha-256
#db_user_namespace = off
#row_security = on
# GSSAPI using Kerberos
#krb_server_keyfile = ''
#krb_caseins_users = off
# - TCP Keepalives -
# see "man 7 tcp" for details
#tcp_keepalives_idle = 0 # TCP_KEEPIDLE, in seconds;
# 0 selects the system default
#tcp_keepalives_interval = 0 # TCP_KEEPINTVL, in seconds;
# 0 selects the system default
#tcp_keepalives_count = 0 # TCP_KEEPCNT;
# 0 selects the system default
#------------------------------------------------------------------------------
# RESOURCE USAGE (except WAL)
#------------------------------------------------------------------------------
# - Memory -
shared_buffers = 1GB # min 128kB
# (change requires restart)
#huge_pages = try # on, off, or try
# (change requires restart)
#temp_buffers = 8MB # min 800kB
#max_prepared_transactions = 0 # zero disables the feature
# (change requires restart)
# Caution: it is not advisable to set max_prepared_transactions nonzero unless
# you actively intend to use prepared transactions.
work_mem = 256MB # min 64kB
#maintenance_work_mem = 64MB # min 1MB
#replacement_sort_tuples = 150000 # limits use of replacement selection sort
#autovacuum_work_mem = -1 # min 1MB, or -1 to use maintenance_work_mem
#max_stack_depth = 2MB # min 100kB
#dynamic_shared_memory_type = posix # the default is the first option
# supported by the operating system:
# posix
# sysv
# windows
# mmap
# use none to disable dynamic shared memory
# (change requires restart)
# - Disk -
#temp_file_limit = -1 # limits per-process temp file space
# in kB, or -1 for no limit
# - Kernel Resource Usage -
#max_files_per_process = 1000 # min 25
# (change requires restart)
shared_preload_libraries = 'pg_stat_statements'
pg_stat_statements.max = 10000
pg_stat_statements.track = all
# - Cost-Based Vacuum Delay -
#vacuum_cost_delay = 0 # 0-100 milliseconds
#vacuum_cost_page_hit = 1 # 0-10000 credits
#vacuum_cost_page_miss = 10 # 0-10000 credits
#vacuum_cost_page_dirty = 20 # 0-10000 credits
#vacuum_cost_limit = 200 # 1-10000 credits
# - Background Writer -
#bgwriter_delay = 200ms # 10-10000ms between rounds
#bgwriter_lru_maxpages = 100 # 0-1000 max buffers written/round
#bgwriter_lru_multiplier = 2.0 # 0-10.0 multiplier on buffers scanned/round
#bgwriter_flush_after = 0 # measured in pages, 0 disables
# - Asynchronous Behavior -
effective_io_concurrency = 100 # 1-1000; 0 disables prefetching
#max_worker_processes = 8 # (change requires restart)
max_parallel_workers_per_gather = 4 # taken from max_parallel_workers
#max_parallel_workers = 8 # maximum number of max_worker_processes that
# can be used in parallel queries
#old_snapshot_threshold = -1 # 1min-60d; -1 disables; 0 is immediate
# (change requires restart)
#backend_flush_after = 0 # measured in pages, 0 disables
#------------------------------------------------------------------------------
# WRITE AHEAD LOG
#------------------------------------------------------------------------------
# - Settings -
#wal_level = replica # minimal, replica, or logical
# (change requires restart)
#fsync = on # flush data to disk for crash safety
# (turning this off can cause
# unrecoverable data corruption)
#synchronous_commit = on # synchronization level;
# off, local, remote_write, remote_apply, or on
#wal_sync_method = fsync # the default is the first option
# supported by the operating system:
# open_datasync
# fdatasync (default on Linux)
# fsync
# fsync_writethrough
# open_sync
#full_page_writes = on # recover from partial page writes
#wal_compression = off # enable compression of full-page writes
#wal_log_hints = off # also do full page writes of non-critical updates
# (change requires restart)
#wal_buffers = -1 # min 32kB, -1 sets based on shared_buffers
# (change requires restart)
#wal_writer_delay = 200ms # 1-10000 milliseconds
#wal_writer_flush_after = 1MB # measured in pages, 0 disables
#commit_delay = 0 # range 0-100000, in microseconds
#commit_siblings = 5 # range 1-1000
# - Checkpoints -
#checkpoint_timeout = 5min # range 30s-1d
#max_wal_size = 1GB
#min_wal_size = 80MB
#checkpoint_completion_target = 0.5 # checkpoint target duration, 0.0 - 1.0
#checkpoint_flush_after = 0 # measured in pages, 0 disables
#checkpoint_warning = 30s # 0 disables
# - Archiving -
#archive_mode = off # enables archiving; off, on, or always
# (change requires restart)
#archive_command = '' # command to use to archive a logfile segment
# placeholders: %p = path of file to archive
# %f = file name only
# e.g. 'test ! -f /mnt/server/archivedir/%f && cp %p /mnt/server/archivedir/%f'
#archive_timeout = 0 # force a logfile segment switch after this
# number of seconds; 0 disables
#------------------------------------------------------------------------------
# REPLICATION
#------------------------------------------------------------------------------
# - Sending Server(s) -
# Set these on the master and on any standby that will send replication data.
#max_wal_senders = 10 # max number of walsender processes
# (change requires restart)
#wal_keep_segments = 0 # in logfile segments, 16MB each; 0 disables
#wal_sender_timeout = 60s # in milliseconds; 0 disables
#max_replication_slots = 10 # max number of replication slots
# (change requires restart)
#track_commit_timestamp = off # collect timestamp of transaction commit
# (change requires restart)
# - Master Server -
# These settings are ignored on a standby server.
#synchronous_standby_names = '' # standby servers that provide sync rep
# method to choose sync standbys, number of sync standbys,
# and comma-separated list of application_name
# from standby(s); '*' = all
#vacuum_defer_cleanup_age = 0 # number of xacts by which cleanup is delayed
# - Standby Servers -
# These settings are ignored on a master server.
#hot_standby = on # "off" disallows queries during recovery
# (change requires restart)
#max_standby_archive_delay = 30s # max delay before canceling queries
# when reading WAL from archive;
# -1 allows indefinite delay
#max_standby_streaming_delay = 30s # max delay before canceling queries
# when reading streaming WAL;
# -1 allows indefinite delay
#wal_receiver_status_interval = 10s # send replies at least this often
# 0 disables
#hot_standby_feedback = off # send info from standby to prevent
# query conflicts
#wal_receiver_timeout = 60s # time that receiver waits for
# communication from master
# in milliseconds; 0 disables
#wal_retrieve_retry_interval = 5s # time to wait before retrying to
# retrieve WAL after a failed attempt
# - Subscribers -
# These settings are ignored on a publisher.
#max_logical_replication_workers = 4 # taken from max_worker_processes
# (change requires restart)
#max_sync_workers_per_subscription = 2 # taken from max_logical_replication_workers
#------------------------------------------------------------------------------
# QUERY TUNING
#------------------------------------------------------------------------------
# - Planner Method Configuration -
#enable_bitmapscan = on
#enable_hashagg = on
#enable_hashjoin = on
#enable_indexscan = on
#enable_indexonlyscan = on
#enable_material = on
#enable_mergejoin = on
#enable_nestloop = on
#enable_seqscan = on
#enable_sort = on
#enable_tidscan = on
# - Planner Cost Constants -
#seq_page_cost = 1.0 # measured on an arbitrary scale
#random_page_cost = 4.0 # same scale as above
#cpu_tuple_cost = 0.01 # same scale as above
#cpu_index_tuple_cost = 0.005 # same scale as above
#cpu_operator_cost = 0.0025 # same scale as above
#parallel_tuple_cost = 0.1 # same scale as above
#parallel_setup_cost = 1000.0 # same scale as above
#min_parallel_table_scan_size = 8MB
#min_parallel_index_scan_size = 512kB
#effective_cache_size = 4GB
# - Genetic Query Optimizer -
#geqo = on
#geqo_threshold = 12
#geqo_effort = 5 # range 1-10
#geqo_pool_size = 0 # selects default based on effort
#geqo_generations = 0 # selects default based on effort
#geqo_selection_bias = 2.0 # range 1.5-2.0
#geqo_seed = 0.0 # range 0.0-1.0
# - Other Planner Options -
#default_statistics_target = 100 # range 1-10000
#constraint_exclusion = partition # on, off, or partition
#cursor_tuple_fraction = 0.1 # range 0.0-1.0
#from_collapse_limit = 8
#join_collapse_limit = 8 # 1 disables collapsing of explicit
# JOIN clauses
#force_parallel_mode = off
#------------------------------------------------------------------------------
# ERROR REPORTING AND LOGGING
#------------------------------------------------------------------------------
# - Where to Log -
#log_destination = 'stderr' # Valid values are combinations of
# stderr, csvlog, syslog, and eventlog,
# depending on platform. csvlog
# requires logging_collector to be on.
# This is used when logging to stderr:
logging_collector = on
# Enable capturing of stderr and csvlog
# into log files. Required to be on for
# csvlogs.
# (change requires restart)
# These are only used if logging_collector is on:
log_directory = '/var/log/postgresql'
# directory where log files are written,
# can be absolute or relative to PGDATA
log_filename = 'postgresql-%Y-%m-%d_%H%M%S.log'
# log file name pattern,
# can include strftime() escapes
#log_file_mode = 0600 # creation mode for log files,
# begin with 0 to use octal notation
log_file_mode = 0660
#log_truncate_on_rotation = off # If on, an existing log file with the
# same name as the new log file will be
# truncated rather than appended to.
# But such truncation only occurs on
# time-driven rotation, not on restarts
# or size-driven rotation. Default is
# off, meaning append to existing files
# in all cases.
log_rotation_age = 1d
# Automatic rotation of logfiles will
# happen after that time. 0 disables.
log_rotation_size = 100MB
# Automatic rotation of logfiles will
# happen after that much log output.
# 0 disables.
# These are relevant when logging to syslog:
#syslog_facility = 'LOCAL0'
#syslog_ident = 'postgres'
#syslog_sequence_numbers = on
#syslog_split_messages = on
# This is only relevant when logging to eventlog (win32):
# (change requires restart)
#event_source = 'PostgreSQL'
# - When to Log -
#client_min_messages = notice # values in order of decreasing detail:
# debug5
# debug4
# debug3
# debug2
# debug1
# log
# notice
# warning
# error
#log_min_messages = warning # values in order of decreasing detail:
# debug5
# debug4
# debug3
# debug2
# debug1
# info
# notice
# warning
# error
# log
# fatal
# panic
#log_min_error_statement = error # values in order of decreasing detail:
# debug5
# debug4
# debug3
# debug2
# debug1
# info
# notice
# warning
# error
# log
# fatal
# panic (effectively off)
#log_min_duration_statement = -1 # -1 is disabled, 0 logs all statements
# and their durations, > 0 logs only
# statements running at least this number
# of milliseconds
# - What to Log -
#debug_print_parse = off
#debug_print_rewritten = off
#debug_print_plan = off
#debug_pretty_print = on
#log_checkpoints = off
#log_connections = off
#log_disconnections = off
#log_duration = off
#log_error_verbosity = default # terse, default, or verbose messages
#log_hostname = off
#log_line_prefix = '%m [%p] ' # special values:
# %a = application name
# %u = user name
# %d = database name
# %r = remote host and port
# %h = remote host
# %p = process ID
# %t = timestamp without milliseconds
# %m = timestamp with milliseconds
# %n = timestamp with milliseconds (as a Unix epoch)
# %i = command tag
# %e = SQL state
# %c = session ID
# %l = session line number
# %s = session start timestamp
# %v = virtual transaction ID
# %x = transaction ID (0 if none)
# %q = stop here in non-session
# processes
# %% = '%'
# e.g. '<%u%%%d> '
#log_lock_waits = off # log lock waits >= deadlock_timeout
#log_statement = 'none' # none, ddl, mod, all
#log_replication_commands = off
#log_temp_files = -1 # log temporary files equal or larger
# than the specified size in kilobytes;
# -1 disables, 0 logs all temp files
#log_timezone = 'GMT'
# - Process Title -
#cluster_name = '' # added to process titles if nonempty
# (change requires restart)
#update_process_title = on
#------------------------------------------------------------------------------
# RUNTIME STATISTICS
#------------------------------------------------------------------------------
# - Query/Index Statistics Collector -
#track_activities = on
#track_counts = on
#track_io_timing = off
#track_functions = none # none, pl, all
#track_activity_query_size = 1024 # (change requires restart)
#stats_temp_directory = 'pg_stat_tmp'
# - Statistics Monitoring -
#log_parser_stats = off
#log_planner_stats = off
#log_executor_stats = off
#log_statement_stats = off
#------------------------------------------------------------------------------
# AUTOVACUUM PARAMETERS
#------------------------------------------------------------------------------
#autovacuum = on # Enable autovacuum subprocess? 'on'
# requires track_counts to also be on.
#log_autovacuum_min_duration = -1 # -1 disables, 0 logs all actions and
# their durations, > 0 logs only
# actions running at least this number
# of milliseconds.
#autovacuum_max_workers = 3 # max number of autovacuum subprocesses
# (change requires restart)
#autovacuum_naptime = 1min # time between autovacuum runs
#autovacuum_vacuum_threshold = 50 # min number of row updates before
# vacuum
#autovacuum_analyze_threshold = 50 # min number of row updates before
# analyze
#autovacuum_vacuum_scale_factor = 0.2 # fraction of table size before vacuum
#autovacuum_analyze_scale_factor = 0.1 # fraction of table size before analyze
#autovacuum_freeze_max_age = 200000000 # maximum XID age before forced vacuum
# (change requires restart)
#autovacuum_multixact_freeze_max_age = 400000000 # maximum multixact age
# before forced vacuum
# (change requires restart)
#autovacuum_vacuum_cost_delay = 20ms # default vacuum cost delay for
# autovacuum, in milliseconds;
# -1 means use vacuum_cost_delay
#autovacuum_vacuum_cost_limit = -1 # default vacuum cost limit for
# autovacuum, -1 means use
# vacuum_cost_limit
#------------------------------------------------------------------------------
# CLIENT CONNECTION DEFAULTS
#------------------------------------------------------------------------------
# - Statement Behavior -
#search_path = '"$user", public' # schema names
#default_tablespace = '' # a tablespace name, '' uses the default
#temp_tablespaces = '' # a list of tablespace names, '' uses
# only default tablespace
#check_function_bodies = on
#default_transaction_isolation = 'read committed'
#default_transaction_read_only = off
#default_transaction_deferrable = off
#session_replication_role = 'origin'
#statement_timeout = 0 # in milliseconds, 0 is disabled
#lock_timeout = 0 # in milliseconds, 0 is disabled
#idle_in_transaction_session_timeout = 0 # in milliseconds, 0 is disabled
#vacuum_freeze_min_age = 50000000
#vacuum_freeze_table_age = 150000000
#vacuum_multixact_freeze_min_age = 5000000
#vacuum_multixact_freeze_table_age = 150000000
#bytea_output = 'hex' # hex, escape
#xmlbinary = 'base64'
#xmloption = 'content'
#gin_fuzzy_search_limit = 0
#gin_pending_list_limit = 4MB
# - Locale and Formatting -
#datestyle = 'iso, mdy'
#intervalstyle = 'postgres'
#timezone = 'GMT'
#timezone_abbreviations = 'Default' # Select the set of available time zone
# abbreviations. Currently, there are
# Default
# Australia (historical usage)
# India
# You can create your own file in
# share/timezonesets/.
#extra_float_digits = 0 # min -15, max 3
#client_encoding = sql_ascii # actually, defaults to database
# encoding
# These settings are initialized by initdb, but they can be changed.
#lc_messages = 'C' # locale for system error message
# strings
#lc_monetary = 'C' # locale for monetary formatting
#lc_numeric = 'C' # locale for number formatting
#lc_time = 'C' # locale for time formatting
# default configuration for text search
#default_text_search_config = 'pg_catalog.simple'
# - Other Defaults -
#dynamic_library_path = '$libdir'
#local_preload_libraries = ''
#session_preload_libraries = ''
#------------------------------------------------------------------------------
# LOCK MANAGEMENT
#------------------------------------------------------------------------------
#deadlock_timeout = 1s
#max_locks_per_transaction = 64 # min 10
# (change requires restart)
#max_pred_locks_per_transaction = 64 # min 10
# (change requires restart)
#max_pred_locks_per_relation = -2 # negative values mean
# (max_pred_locks_per_transaction
# / -max_pred_locks_per_relation) - 1
#max_pred_locks_per_page = 2 # min 0
#------------------------------------------------------------------------------
# VERSION/PLATFORM COMPATIBILITY
#------------------------------------------------------------------------------
# - Previous PostgreSQL Versions -
#array_nulls = on
#backslash_quote = safe_encoding # on, off, or safe_encoding
#default_with_oids = off
#escape_string_warning = on
#lo_compat_privileges = off
#operator_precedence_warning = off
#quote_all_identifiers = off
#standard_conforming_strings = on
#synchronize_seqscans = on
# - Other Platforms and Clients -
#transform_null_equals = off
#------------------------------------------------------------------------------
# ERROR HANDLING
#------------------------------------------------------------------------------
#exit_on_error = off # terminate session on any error?
#restart_after_crash = on # reinitialize after backend crash?
#------------------------------------------------------------------------------
# CONFIG FILE INCLUDES
#------------------------------------------------------------------------------
# These options allow settings to be loaded from files other than the
# default postgresql.conf.
#include_dir = 'conf.d' # include files ending in '.conf' from
# directory 'conf.d'
#include_if_exists = 'exists.conf' # include file only if it exists
#include = 'special.conf' # include file
#------------------------------------------------------------------------------
# CUSTOMIZED OPTIONS
#------------------------------------------------------------------------------
# Add settings for extensions here
zdb.default_elasticsearch_url = 'http://elasticsearch:9200/'

View File

@@ -0,0 +1,4 @@
#!/usr/bin/env bash
set -e
cat /tmp/postgresql.conf > /var/lib/postgresql/data/postgresql.conf

View File

@@ -1,6 +1,5 @@
#Wed May 29 17:08:27 CEST 2019
distributionUrl=https\://services.gradle.org/distributions/gradle-6.8-bin.zip
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-4.10.3-all.zip
zipStoreBase=GRADLE_USER_HOME

2
jitpack.yml Normal file
View File

@@ -0,0 +1,2 @@
jdk:
- openjdk11

View File

@@ -1 +1 @@
rootProject.name = 'postgresjson'
rootProject.name = 'postgres-json'

View File

@@ -6,11 +6,13 @@ import com.github.jasync.sql.db.QueryResult
import com.github.jasync.sql.db.pool.ConnectionPool
import com.github.jasync.sql.db.postgresql.PostgreSQLConnection
import com.github.jasync.sql.db.postgresql.PostgreSQLConnectionBuilder
import com.github.jasync.sql.db.util.length
import fr.postgresjson.entity.EntityI
import fr.postgresjson.entity.Serializable
import fr.postgresjson.serializer.Serializer
import fr.postgresjson.utils.LoggerDelegate
import org.slf4j.Logger
import java.util.concurrent.CompletableFuture
import java.util.concurrent.*
typealias SelectOneCallback<T> = QueryResult.(T?) -> Unit
typealias SelectCallback<T> = QueryResult.(List<T>) -> Unit
@@ -22,30 +24,39 @@ class Connection(
private val password: String,
private val host: String = "localhost",
private val port: Int = 5432
): Executable {
private lateinit var connection: ConnectionPool<PostgreSQLConnection>
) : Executable {
private var connection: ConnectionPool<PostgreSQLConnection>? = null
private val serializer = Serializer()
private val logger: Logger? by LoggerDelegate()
internal fun connect(): ConnectionPool<PostgreSQLConnection> {
if (!::connection.isInitialized || !connection.isConnected()) {
connection = PostgreSQLConnectionBuilder.createConnectionPool(
"jdbc:postgresql://$host:$port/$database?user=$username&password=$password"
)
return connection.let { connectionPool ->
if (connectionPool == null || !connectionPool.isConnected()) {
PostgreSQLConnectionBuilder.createConnectionPool(
"jdbc:postgresql://$host:$port/$database?user=$username&password=$password"
).also {
connection = it
}
} else {
connectionPool
}
}
return connection
}
fun disconnect() {
connection?.run { disconnect() }
}
fun <A> inTransaction(f: (Connection) -> CompletableFuture<A>) = connect().inTransaction(f)
override fun <R: EntityI<*>> select(
override fun <R : EntityI> select(
sql: String,
typeReference: TypeReference<R>,
values: List<Any?>,
block: (QueryResult, R?) -> Unit
): R? {
val primaryObject = values.firstOrNull {
it is EntityI<*> && typeReference.type.typeName == it::class.java.name
it is EntityI && typeReference.type.typeName == it::class.java.name
} as R?
val result = exec(sql, compileArgs(values))
val json = result.rows[0].getString(0)
@@ -62,14 +73,14 @@ class Connection(
}
}
inline fun <reified R: EntityI<*>> selectOne(
inline fun <reified R : EntityI> selectOne(
sql: String,
values: List<Any?> = emptyList(),
noinline block: SelectOneCallback<R> = {}
): R? =
select(sql, object: TypeReference<R>() {}, values, block)
select(sql, object : TypeReference<R>() {}, values, block)
override fun <R: EntityI<*>> select(
override fun <R : EntityI> select(
sql: String,
typeReference: TypeReference<R>,
values: Map<String, Any?>,
@@ -80,14 +91,14 @@ class Connection(
}
}
inline fun <reified R: EntityI<*>> selectOne(
inline fun <reified R : EntityI> selectOne(
sql: String,
values: Map<String, Any?>,
noinline block: SelectOneCallback<R> = {}
): R? =
select(sql, object: TypeReference<R>() {}, values, block)
select(sql, object : TypeReference<R>() {}, values, block)
override fun <R: EntityI<*>> select(
override fun <R : EntityI> select(
sql: String,
typeReference: TypeReference<List<R>>,
values: List<Any?>,
@@ -96,7 +107,7 @@ class Connection(
val result = exec(sql, compileArgs(values))
val json = result.rows[0].getString(0)
return if (json === null) {
listOf<EntityI<*>>() as List<R>
listOf<EntityI>() as List<R>
} else {
serializer.deserializeList(json, typeReference)
}.also {
@@ -104,14 +115,14 @@ class Connection(
}
}
inline fun <reified R: EntityI<*>> select(
inline fun <reified R : EntityI> select(
sql: String,
values: List<Any?> = emptyList(),
noinline block: SelectCallback<R> = {}
): List<R> =
select(sql, object: TypeReference<List<R>>() {}, values, block)
select(sql, object : TypeReference<List<R>>() {}, values, block)
override fun <R: EntityI<*>> select(
override fun <R : EntityI> select(
sql: String,
page: Int,
limit: Int,
@@ -125,13 +136,13 @@ class Connection(
.plus("limit" to limit)
val line = replaceArgs(sql, newValues) {
exec(this.sql, compileArgs(this.parameters))
exec(this.sql, this.parameters)
}
return line.run {
val json = rows[0].getString(0)
val entities = if (json === null) {
listOf<EntityI<*>>() as List<R>
listOf<EntityI>() as List<R>
} else {
serializer.deserializeList(json, typeReference)
}
@@ -146,16 +157,16 @@ class Connection(
}
}
inline fun <reified R: EntityI<*>> select(
inline fun <reified R : EntityI> select(
sql: String,
page: Int,
limit: Int,
values: Map<String, Any?> = emptyMap(),
noinline block: SelectPaginatedCallback<R> = {}
): Paginated<R> =
select(sql, page, limit, object: TypeReference<List<R>>() {}, values, block)
select(sql, page, limit, object : TypeReference<List<R>>() {}, values, block)
override fun <R: EntityI<*>> select(
override fun <R : EntityI> select(
sql: String,
typeReference: TypeReference<List<R>>,
values: Map<String, Any?>,
@@ -166,16 +177,17 @@ class Connection(
}
}
inline fun <reified R: EntityI<*>> select(
inline fun <reified R : EntityI> select(
sql: String,
values: Map<String, Any?>,
noinline block: SelectCallback<R> = {}
): List<R> =
select(sql, object: TypeReference<List<R>>() {}, values, block)
select(sql, object : TypeReference<List<R>>() {}, values, block)
override fun exec(sql: String, values: List<Any?>): QueryResult {
return stopwatchQuery(sql, values) {
connect().sendPreparedStatement(sql, compileArgs(values)).join()
val compiledValues = compileArgs(values)
return stopwatchQuery(sql, compiledValues) {
connect().sendPreparedStatement(sql, compiledValues).join()
}
}
@@ -185,18 +197,25 @@ class Connection(
}
}
override fun sendQuery(sql: String): QueryResult {
return stopwatchQuery(sql) {
connect().sendQuery(sql).join()
override fun sendQuery(sql: String, values: List<Any?>): Int {
val compiledValues = compileArgs(values)
return stopwatchQuery(sql, compiledValues) {
replaceArgsIntoSql(sql, compiledValues) {
connect().sendQuery(it).join().rowsAffected.toInt()
}
}
}
override fun sendQuery(sql: String, values: Map<String, Any?>): Int {
return replaceArgs(sql, values) {
sendQuery(this.sql, this.parameters)
}
}
private fun compileArgs(values: List<Any?>): List<Any?> {
return values.map {
if (it is EntityI<*>) {
serializer.serialize(it).apply {
serializer.collection.set<Any?, EntityI<Any?>>(it as EntityI<Any?>)
}
if (it is Serializable || (it is List<*> && it.firstOrNull() is Serializable)) {
serializer.serialize(it)
} else {
it
}
@@ -207,32 +226,66 @@ class Connection(
val paramRegex = "(?<!:):([a-zA-Z0-9_-]+)".toRegex(RegexOption.IGNORE_CASE)
val newArgs = paramRegex.findAll(sql).map { match ->
val name = match.groups[1]!!.value
values[name] ?: error("Parameter $name missing")
values[name] ?: values[name.trimStart('_')] ?: error("Parameter $name missing")
}.toList()
var newSql = sql
values.forEach { (key, _) ->
val regex = ":$key".toRegex()
val regex = ":_?$key".toRegex()
newSql = newSql.replace(regex, "?")
}
return block(ParametersQuery(newSql, newArgs))
}
private fun <T> replaceArgsIntoSql(sql: String, values: List<Any?>, block: (String) -> T): T {
val paramRegex = "(?<!\\?)(\\?)(?!\\?)".toRegex(RegexOption.IGNORE_CASE)
var i = 0
if (values.isNotEmpty()) {
val newSql = paramRegex.replace(sql) {
values[i] ?: error("Parameter $i missing")
val valToReplace = values[i].toString()
++i
"'$valToReplace'"
}
return block(newSql)
}
return block(sql)
}
data class ParametersQuery(val sql: String, val parameters: List<Any?>)
private fun <T> stopwatchQuery(sql: String, values: List<Any?> = emptyList(), callback: () -> T): T {
val sqlForLog = "\n${sql.prependIndent()}"
try {
val start = System.currentTimeMillis()
val result = callback()
val duration = (System.currentTimeMillis() - start)
logger?.debug("$duration ms for query: $sqlForLog", values)
val resultText = when (result) {
null -> "with no result"
is QueryResult -> result.rows.firstOrNull()?.joinToString(", ")?.let { text ->
if (text.length > 100) "${text.take(100)}... (size: ${text.length})" else text
} ?: "with no result"
else -> "unknown"
}
val args = """
|Query ($duration ms):
|${sql.trimIndent().prependIndent()}
|Arguments (${values.length}):
|${values.joinToString("\n").ifBlank { "No arguments" }.prependIndent()}
|Result:
|${resultText.trimIndent().prependIndent()}
""".trimMargin().prependIndent(" > ")
logger?.debug("Query executed in $duration ms \n{}", args)
return result
} catch (e: Throwable) {
logger?.info("Query Error: $sqlForLog, $values", e)
logger?.info("""
Query Error:
${sql.prependIndent()},
${values.joinToString(", ").prependIndent()}
""".trimIndent(), e)
throw e
}
}
}

View File

@@ -13,33 +13,33 @@ interface EmbedExecutable {
/**
* Select One entity with list of parameters
*/
fun <R: EntityI<*>> select(
fun <R : EntityI> select(
typeReference: TypeReference<R>,
values: List<Any?> = emptyList(),
block: SelectOneCallback<R> = {}
): R?
fun <R: EntityI<*>> select(
fun <R : EntityI> select(
typeReference: TypeReference<R>,
values: Map<String, Any?>,
block: SelectOneCallback<R> = {}
): R?
/* Select Miltiples */
fun <R: EntityI<*>> select(
fun <R : EntityI> select(
typeReference: TypeReference<List<R>>,
values: List<Any?> = emptyList(),
block: SelectCallback<R> = {}
): List<R>
fun <R: EntityI<*>> select(
fun <R : EntityI> select(
typeReference: TypeReference<List<R>>,
values: Map<String, Any?>,
block: SelectCallback<R> = {}
): List<R>
/* Select Paginated */
fun <R: EntityI<*>> select(
fun <R : EntityI> select(
page: Int,
limit: Int,
typeReference: TypeReference<List<R>>,
@@ -49,4 +49,14 @@ interface EmbedExecutable {
fun exec(values: List<Any?> = emptyList()): QueryResult
fun exec(values: Map<String, Any?>): QueryResult
fun exec(vararg values: Pair<String, Any?>): QueryResult = exec(values.toMap())
fun perform(values: List<Any?>) { exec(values) }
fun perform(values: Map<String, Any?>) { exec(values) }
fun perform(vararg values: Pair<String, Any?>) = perform(values.toMap())
fun sendQuery(values: List<Any?> = emptyList()): Int
fun sendQuery(values: Map<String, Any?>): Int
fun sendQuery(vararg values: Pair<String, Any?>): Int =
sendQuery(values.toMap())
}

View File

@@ -7,14 +7,14 @@ import fr.postgresjson.entity.EntityI
interface Executable {
/* Select One */
fun <R: EntityI<*>> select(
fun <R : EntityI> select(
sql: String,
typeReference: TypeReference<R>,
values: List<Any?> = emptyList(),
block: SelectOneCallback<R> = {}
): R?
fun <R: EntityI<*>> select(
fun <R : EntityI> select(
sql: String,
typeReference: TypeReference<R>,
values: Map<String, Any?>,
@@ -23,14 +23,14 @@ interface Executable {
/* Select Miltiples */
fun <R: EntityI<*>> select(
fun <R : EntityI> select(
sql: String,
typeReference: TypeReference<List<R>>,
values: List<Any?> = emptyList(),
block: SelectCallback<R> = {}
): List<R>
fun <R: EntityI<*>> select(
fun <R : EntityI> select(
sql: String,
typeReference: TypeReference<List<R>>,
values: Map<String, Any?>,
@@ -39,7 +39,7 @@ interface Executable {
/* Select Paginated */
fun <R: EntityI<*>> select(
fun <R : EntityI> select(
sql: String,
page: Int,
limit: Int,
@@ -50,5 +50,6 @@ interface Executable {
fun exec(sql: String, values: List<Any?> = emptyList()): QueryResult
fun exec(sql: String, values: Map<String, Any?>): QueryResult
fun sendQuery(sql: String): QueryResult
fun sendQuery(sql: String, values: List<Any?> = emptyList()): Int
fun sendQuery(sql: String, values: Map<String, Any?>): Int
}

View File

@@ -5,7 +5,7 @@ import com.github.jasync.sql.db.QueryResult
import fr.postgresjson.definition.Function
import fr.postgresjson.entity.EntityI
class Function(val definition: Function, override val connection: Connection): EmbedExecutable {
class Function(val definition: Function, override val connection: Connection) : EmbedExecutable {
override fun toString(): String {
return definition.name
}
@@ -17,7 +17,7 @@ class Function(val definition: Function, override val connection: Connection): E
/**
* Select One entity with list of parameters
*/
override fun <R: EntityI<*>> select(
override fun <R : EntityI> select(
typeReference: TypeReference<R>,
values: List<Any?>,
block: (QueryResult, R?) -> Unit
@@ -28,22 +28,22 @@ class Function(val definition: Function, override val connection: Connection): E
return connection.select(sql, typeReference, values, block)
}
inline fun <reified R: EntityI<*>> selectOne(
inline fun <reified R : EntityI> selectOne(
values: List<Any?> = emptyList(),
noinline block: SelectOneCallback<R> = {}
): R? =
select(object: TypeReference<R>() {}, values, block)
select(object : TypeReference<R>() {}, values, block)
inline fun <reified R: EntityI<*>> selectOne(
inline fun <reified R : EntityI> selectOne(
value: R,
noinline block: SelectOneCallback<R> = {}
): R? =
select(object: TypeReference<R>() {}, listOf(value), block)
select(object : TypeReference<R>() {}, listOf(value), block)
/**
* Select One entity with named parameters
*/
override fun <R: EntityI<*>> select(
override fun <R : EntityI> select(
typeReference: TypeReference<R>,
values: Map<String, Any?>,
block: (QueryResult, R?) -> Unit
@@ -54,13 +54,13 @@ class Function(val definition: Function, override val connection: Connection): E
return connection.select(sql, typeReference, values, block)
}
inline fun <reified R: EntityI<*>> selectOne(
inline fun <reified R : EntityI> selectOne(
values: Map<String, Any?>,
noinline block: SelectOneCallback<R> = {}
): R? =
select(object: TypeReference<R>() {}, values, block)
select(object : TypeReference<R>() {}, values, block)
inline fun <reified R: EntityI<*>> selectOne(
inline fun <reified R : EntityI> selectOne(
vararg values: Pair<String, Any?>,
noinline block: SelectOneCallback<R> = {}
): R? =
@@ -71,7 +71,7 @@ class Function(val definition: Function, override val connection: Connection): E
/**
* Select list of entities with list of parameters
*/
override fun <R: EntityI<*>> select(
override fun <R : EntityI> select(
typeReference: TypeReference<List<R>>,
values: List<Any?>,
block: (QueryResult, List<R>) -> Unit
@@ -82,16 +82,16 @@ class Function(val definition: Function, override val connection: Connection): E
return connection.select(sql, typeReference, values, block)
}
inline fun <reified R: EntityI<*>> select(
inline fun <reified R : EntityI> select(
values: List<Any?> = emptyList(),
noinline block: SelectCallback<R> = {}
): List<R> =
select(object: TypeReference<List<R>>() {}, values, block)
select(object : TypeReference<List<R>>() {}, values, block)
/**
* Select list of entities with named parameters
*/
override fun <R: EntityI<*>> select(
override fun <R : EntityI> select(
typeReference: TypeReference<List<R>>,
values: Map<String, Any?>,
block: (QueryResult, List<R>) -> Unit
@@ -102,13 +102,13 @@ class Function(val definition: Function, override val connection: Connection): E
return connection.select(sql, typeReference, values, block)
}
inline fun <reified R: EntityI<*>> select(
inline fun <reified R : EntityI> select(
values: Map<String, Any?>,
noinline block: SelectCallback<R> = {}
): List<R> =
select(object: TypeReference<List<R>>() {}, values, block)
select(object : TypeReference<List<R>>() {}, values, block)
inline fun <reified R: EntityI<*>> select(
inline fun <reified R : EntityI> select(
vararg values: Pair<String, Any?>,
noinline block: SelectCallback<R> = {}
): List<R> =
@@ -119,7 +119,7 @@ class Function(val definition: Function, override val connection: Connection): E
/**
* Select Multiple with pagination
*/
override fun <R: EntityI<*>> select(
override fun <R : EntityI> select(
page: Int,
limit: Int,
typeReference: TypeReference<List<R>>,
@@ -137,21 +137,21 @@ class Function(val definition: Function, override val connection: Connection): E
return connection.select(sql, page, limit, typeReference, values, block)
}
inline fun <reified R: EntityI<*>> select(
inline fun <reified R : EntityI> select(
page: Int,
limit: Int,
values: Map<String, Any?> = emptyMap(),
noinline block: SelectPaginatedCallback<R> = {}
): Paginated<R> =
select(page, limit, object: TypeReference<List<R>>() {}, values, block)
select(page, limit, object : TypeReference<List<R>>() {}, values, block)
inline fun <reified R: EntityI<*>> select(
inline fun <reified R : EntityI> select(
page: Int,
limit: Int,
vararg values: Pair<String, Any?>,
noinline block: SelectPaginatedCallback<R> = {}
): Paginated<R> =
select(page, limit, object: TypeReference<List<R>>() {}, values.toMap(), block)
select(page, limit, object : TypeReference<List<R>>() {}, values.toMap(), block)
/* Execute function without traitements */
@@ -169,12 +169,22 @@ class Function(val definition: Function, override val connection: Connection): E
return connection.exec(sql, values)
}
override fun sendQuery(values: List<Any?>): Int {
exec(values)
return 0
}
override fun sendQuery(values: Map<String, Any?>): Int {
exec(values)
return 0
}
private fun compileArgs(values: List<Any?>): String {
val placeholders = values
.filterIndexed { index, any ->
definition.parameters[index].default === null || any !== null
.filterIndexed { index, value ->
definition.parameters[index].default === null || value != null
}
.mapIndexed { index, any ->
.mapIndexed { index, _ ->
"?::" + definition.parameters[index].type
}
@@ -185,11 +195,11 @@ class Function(val definition: Function, override val connection: Connection): E
val parameters = definition.getParametersIndexedByName()
val placeholders = values
.filter { entry ->
val parameter = parameters[entry.key] ?: error("Parameter ${entry.key} not exist")
val parameter = parameters[entry.key] ?: parameters["_" + entry.key] ?: error("Parameter ${entry.key} of function ${definition.name} not exist")
parameter.default === null || entry.value !== null
}
.map { entry ->
val parameter = parameters[entry.key]!!
val parameter = parameters[entry.key] ?: parameters["_" + entry.key] ?: error("Parameter ${entry.key} of function ${definition.name} not exist")
""""${parameter.name}" := :${parameter.name}::${parameter.type}"""
}

View File

@@ -1,20 +1,25 @@
package fr.postgresjson.connexion
import com.github.jasync.sql.db.util.length
import fr.postgresjson.entity.EntityI
import kotlin.math.ceil
data class Paginated<T: EntityI<*>>(
data class Paginated<T : EntityI>(
val result: List<T>,
val offset: Int,
val limit: Int,
val total: Int
) {
val currentPage: Int = (offset / limit) + 1
val count: Int = result.length
val count: Int = result.size
val totalPages: Int = (total.toDouble() / limit.toDouble()).ceil()
init {
if (offset < 0) error("offset must be greather or equal than 0")
if (limit < 1) error("limit must be greather or equal than 1")
if (total < 0) error("total must be greather or equal than 0")
}
fun isLastPage(): Boolean = currentPage >= totalPages
private fun Double.ceil(): Int = ceil(this).toInt()
}

View File

@@ -4,15 +4,14 @@ import com.fasterxml.jackson.core.type.TypeReference
import com.github.jasync.sql.db.QueryResult
import fr.postgresjson.entity.EntityI
class Query(override val name: String, private val sql: String, override val connection: Connection): EmbedExecutable {
class Query(override val name: String, private val sql: String, override val connection: Connection) : EmbedExecutable {
override fun toString(): String {
return sql
}
/* Select One */
override fun <R: EntityI<*>> select(
override fun <R : EntityI> select(
typeReference: TypeReference<R>,
values: List<Any?>,
block: (QueryResult, R?) -> Unit
@@ -20,13 +19,13 @@ class Query(override val name: String, private val sql: String, override val con
return connection.select(this.toString(), typeReference, values, block)
}
inline fun <reified R: EntityI<*>> selectOne(
inline fun <reified R : EntityI> selectOne(
values: List<Any?> = emptyList(),
noinline block: SelectOneCallback<R> = {}
): R? =
select(object: TypeReference<R>() {}, values, block)
select(object : TypeReference<R>() {}, values, block)
override fun <R: EntityI<*>> select(
override fun <R : EntityI> select(
typeReference: TypeReference<R>,
values: Map<String, Any?>,
block: (QueryResult, R?) -> Unit
@@ -34,15 +33,15 @@ class Query(override val name: String, private val sql: String, override val con
return connection.select(this.toString(), typeReference, values, block)
}
inline fun <reified R: EntityI<*>> selectOne(
inline fun <reified R : EntityI> selectOne(
values: Map<String, Any?>,
noinline block: SelectOneCallback<R> = {}
): R? =
select(object: TypeReference<R>() {}, values, block)
select(object : TypeReference<R>() {}, values, block)
/* Select Multiples */
override fun <R: EntityI<*>> select(
override fun <R : EntityI> select(
typeReference: TypeReference<List<R>>,
values: List<Any?>,
block: (QueryResult, List<R>) -> Unit
@@ -50,13 +49,13 @@ class Query(override val name: String, private val sql: String, override val con
return connection.select(this.toString(), typeReference, values, block)
}
inline fun <reified R: EntityI<*>> select(
inline fun <reified R : EntityI> select(
values: List<Any?> = emptyList(),
noinline block: SelectCallback<R> = {}
): List<R> =
select(object: TypeReference<List<R>>() {}, values, block)
select(object : TypeReference<List<R>>() {}, values, block)
override fun <R: EntityI<*>> select(
override fun <R : EntityI> select(
typeReference: TypeReference<List<R>>,
values: Map<String, Any?>,
block: (QueryResult, List<R>) -> Unit
@@ -64,13 +63,13 @@ class Query(override val name: String, private val sql: String, override val con
return connection.select(this.toString(), typeReference, values, block)
}
inline fun <reified R: EntityI<*>> select(
inline fun <reified R : EntityI> select(
values: Map<String, Any?>,
noinline block: SelectCallback<R> = {}
): List<R> =
select(object: TypeReference<List<R>>() {}, values, block)
select(object : TypeReference<List<R>>() {}, values, block)
override fun <R: EntityI<*>> select(
override fun <R : EntityI> select(
page: Int,
limit: Int,
typeReference: TypeReference<List<R>>,
@@ -82,13 +81,13 @@ class Query(override val name: String, private val sql: String, override val con
/* Select Paginated */
inline fun <reified R: EntityI<*>> select(
inline fun <reified R : EntityI> select(
page: Int,
limit: Int,
values: Map<String, Any?> = emptyMap(),
noinline block: SelectPaginatedCallback<R> = {}
): Paginated<R> =
select(page, limit, object: TypeReference<List<R>>() {}, values, block)
select(page, limit, object : TypeReference<List<R>>() {}, values, block)
/* Execute function without traitements */
@@ -99,4 +98,12 @@ class Query(override val name: String, private val sql: String, override val con
override fun exec(values: Map<String, Any?>): QueryResult {
return connection.exec(sql, values)
}
override fun sendQuery(values: List<Any?>): Int {
return connection.sendQuery(sql, values)
}
override fun sendQuery(values: Map<String, Any?>): Int {
return connection.sendQuery(sql, values)
}
}

View File

@@ -1,7 +1,10 @@
package fr.postgresjson.connexion
import java.io.File
import fr.postgresjson.utils.searchSqlFiles
import java.net.URI
import fr.postgresjson.definition.Function as DefinitionFunction
import fr.postgresjson.definition.Function as FunctionDefinition
import fr.postgresjson.definition.Query as QueryDefinition
class Requester(
private val connection: Connection,
@@ -13,17 +16,19 @@ class Requester(
return this
}
fun addQuery(query: QueryDefinition): Requester = addQuery(query.name, query.script)
fun addQuery(name: String, sql: String): Requester {
addQuery(Query(name, sql, connection))
return this
}
fun addQuery(queriesDirectory: File): Requester {
queriesDirectory.walk()
.filter { it.isFile && it.extension == "sql" }
fun addQuery(queriesDirectory: URI): Requester {
queriesDirectory.searchSqlFiles()
.forEach {
val path = it.parentFile.nameWithoutExtension
addQuery("$path/${it.nameWithoutExtension}", it.readText())
if (it is QueryDefinition) {
addQuery(it)
}
}
return this
}
@@ -44,11 +49,12 @@ class Requester(
return this
}
fun addFunction(functionsDirectory: File): Requester {
functionsDirectory.walk()
.filter { it.isFile && it.extension == "sql" }
fun addFunction(functionsDirectory: URI): Requester {
functionsDirectory.searchSqlFiles()
.forEach {
addFunction(it.readText())
if (it is FunctionDefinition) {
addFunction(it)
}
}
return this
}
@@ -69,8 +75,8 @@ class Requester(
class RequesterFactory(
private val connection: Connection,
private val queriesDirectory: File? = null,
private val functionsDirectory: File? = null
private val queriesDirectory: URI? = null,
private val functionsDirectory: URI? = null
) {
constructor(
host: String = "localhost",
@@ -78,9 +84,9 @@ class Requester(
database: String = "dc-project",
username: String = "dc-project",
password: String = "dc-project",
queriesDirectory: File? = null,
functionsDirectory: File? = null
): this(
queriesDirectory: URI? = null,
functionsDirectory: URI? = null
) : this(
Connection(host = host, port = port, database = database, username = username, password = password),
queriesDirectory,
functionsDirectory

View File

@@ -1,19 +1,19 @@
package fr.postgresjson.definition
import java.io.File
import java.nio.file.Path
open class Function(
override val script: String
): Resource, ParametersInterface {
val returns: String?
class Function(
override val script: String,
override var source: Path? = null
) : Resource, ParametersInterface {
val returns: String
override val name: String
override val parameters: List<Parameter>
override var source: File? = null
init {
val functionRegex =
"""create (or replace )?(procedure|function) *(?<name>[^(\s]+)\s*\((?<params>(\s*((IN|OUT|INOUT|VARIADIC)?\s+)?([^\s,)]+\s+)?([^\s,)]+)(\s+(?:default\s|=)\s*[^\s,)]+)?\s*(,|(?=\))))*)\) *(?<return>RETURNS *[^ ]+)?"""
"""create (or replace )?(procedure|function) *(?<name>[^(\s]+)\s*\((?<params>(\s*((IN|OUT|INOUT|VARIADIC)?\s+)?([^\s,)]+\s+)?([^\s,)]+)(\s+(?:default\s|=)\s*[^\s,)]+)?\s*(,|(?=\))))*)\) *(?<return>RETURNS *[^ \n]+)?"""
.toRegex(setOf(RegexOption.IGNORE_CASE, RegexOption.MULTILINE))
val paramsRegex =
@@ -22,9 +22,9 @@ open class Function(
val queryMatch = functionRegex.find(script)
if (queryMatch !== null) {
val functionName = queryMatch.groups.get("name")?.value?.trim()
val functionName = queryMatch.groups.get("name")?.value?.trim() ?: error("Function name not found")
val functionParameters = queryMatch.groups["params"]?.value?.trim()
this.returns = queryMatch.groups["return"]?.value?.trim()
this.returns = queryMatch.groups["return"]?.value?.trim() ?: ""
/* Create parameters definition */
val parameters = if (functionParameters !== null) {
@@ -40,23 +40,21 @@ open class Function(
} else {
listOf()
}
this.name = functionName!!
this.name = functionName
this.parameters = parameters
} else {
throw FunctionNotFound()
}
}
abstract class ParseException(message: String, cause: Throwable? = null): Exception(message, cause)
class FunctionNotFound(cause: Throwable? = null): ParseException("Function not found in script", cause)
class FunctionNotFound(cause: Throwable? = null) : Resource.ParseException("Function not found in script", cause)
fun getDefinition(): String {
return parameters
.filter { it.direction == Parameter.Direction.IN }
.joinToString(", ") { "${it.name} ${it.type}" }.let {
"$name ($it) $returns"
"$name ($it)"
}
}
fun getParametersIndexedByName(): Map<String, Parameter> {

View File

@@ -0,0 +1,35 @@
package fr.postgresjson.definition
import java.nio.file.Path
class Migration(
override val script: String,
source: Path
) : Resource {
override val name: String
val direction: Direction
override var source: Path? = null
init {
this.source = source
this.direction = source.fileName.toString()
.let {
when {
it.endsWith(".down.sql") -> Direction.DOWN
it.endsWith(".up.sql") -> Direction.UP
else -> throw MigrationNotFound()
}
}
this.name = source.fileName.toString()
.substringAfterLast("/")
.let {
when (direction) {
Direction.DOWN -> it.substringBefore(".down.sql")
Direction.UP -> it.substringBefore(".up.sql")
}
}
}
class MigrationNotFound(cause: Throwable? = null) : Resource.ParseException("Migration not found in script", cause)
enum class Direction { UP, DOWN }
}

View File

@@ -18,7 +18,7 @@ class Parameter(val name: String, val type: String, direction: Direction? = Dire
}
}
constructor(name: String, type: String, direction: String? = "IN", default: Any? = null): this(
constructor(name: String, type: String, direction: String? = "IN", default: Any? = null) : this(
name = name,
type = type,
direction = direction?.let { Direction.valueOf(direction.toUpperCase()) },

View File

@@ -0,0 +1,25 @@
package fr.postgresjson.definition
import java.nio.file.Path
class Query(
override val script: String,
source: Path
) : Resource {
override var source: Path? = source
override val name: String = getNameFromComment(script) ?: getNameFromFile(source)
/** Try to get name from comment in file */
private fun getNameFromComment(script: String): String? =
"""-- *name ?: ?(?<name>[^ \n]+)"""
.toRegex(setOf(RegexOption.IGNORE_CASE, RegexOption.MULTILINE))
.find(script)?.let {
it.groups["name"]?.value?.trim()
}
/** Try to get name from the filename */
private fun getNameFromFile(source: Path): String = source
.fileName.toString()
.substringAfterLast("/")
.substringBeforeLast(".sql")
}

View File

@@ -1,11 +1,38 @@
package fr.postgresjson.definition
import java.io.File
import java.net.URL
import java.nio.file.Path
interface Resource {
val name: String
val script: String
var source: File?
var source: Path?
open class ParseException(message: String, cause: Throwable? = null) : Exception(message, cause)
companion object {
fun build(file: File): Resource =
build(file.readText(), Path.of(file.toURI()))
fun build(url: URL): Resource =
build(url.readText(), Path.of(url.toURI()))
fun build(resource: String, path: Path): Resource =
try {
Migration(resource, path)
} catch (e: ParseException) {
try {
Function(resource, path)
} catch (e: ParseException) {
try {
Query(resource, path)
} catch (e: ParseException) {
throw ParseException("No SQL resource found")
}
}
}
}
}
interface ResourceCollection {

View File

@@ -1,42 +0,0 @@
package fr.postgresjson.entity
import kotlin.reflect.KClass
class EntitiesCollections {
private val collections: MutableMap<KClass<*>, EntityCollection<Any, EntityI<Any?>>> = mutableMapOf()
fun <I, R: EntityI<I?>> get(id: I, className: KClass<R>): R? {
val collection = collections[className]
val entity = collection?.get(id!!)
return entity as R?
}
inline fun <I, reified R: EntityI<I?>> get(id: I): R? {
return get(id, R::class)
}
fun <I, R: EntityI<out I?>> set(entity: R): EntitiesCollections {
if (collections[entity.className] == null) {
collections[entity.className] = EntityCollection()
}
collections[entity.className]!!.set(entity as EntityI<Any?>)
return this
}
class EntityCollection<T, E: EntityI<T?>> {
private var collection: MutableMap<T, E> = mutableMapOf()
fun get(id: T): E? {
return collection[id]
}
fun set(entity: E) {
val id = entity.id
if (id !== null) {
collection[id] = entity
}
}
}
}

View File

@@ -0,0 +1,126 @@
package fr.postgresjson.entity
import org.joda.time.DateTime
import java.util.*
interface EntityRefI<T> : EntityI {
val id: T
}
interface UuidEntityI : EntityRefI<UUID> {
override val id: UUID
}
abstract class Entity<T>(override val id: T) : EntityRefI<T>
open class UuidEntity(id: UUID? = null) : UuidEntityI, Entity<UUID>(id ?: UUID.randomUUID())
/* Version */
interface EntityVersioning<ID, NUMBER> {
val versionNumber: NUMBER
val versionId: ID
}
class UuidEntityVersioning(
override val versionNumber: Int,
versionId: UUID? = null
) : EntityVersioning<UUID, Int> {
override val versionId: UUID = versionId ?: UUID.randomUUID()
}
/* Dates */
interface EntityCreatedAt {
val createdAt: DateTime
}
interface EntityUpdatedAt {
val updatedAt: DateTime
}
interface EntityDeletedAt {
val deletedAt: DateTime?
fun isDeleted(): Boolean {
return deletedAt?.let {
it < DateTime.now()
} ?: false
}
}
class EntityCreatedAtImp(
override val createdAt: DateTime = DateTime.now()
) : EntityCreatedAt
class EntityUpdatedAtImp(
override val updatedAt: DateTime = DateTime.now()
) : EntityUpdatedAt
class EntityDeletedAtImp(
override val deletedAt: DateTime? = null
) : EntityDeletedAt
/* Author */
interface EntityCreatedBy<T : EntityI> {
val createdBy: T
}
interface EntityUpdatedBy<T : EntityI> {
val updatedBy: T
}
interface EntityDeletedBy<T : EntityI> {
val deletedBy: T?
}
class EntityCreatedByImp<UserT : EntityI>(
override val createdBy: UserT
) : EntityCreatedBy<UserT>
class EntityUpdatedByImp<UserT : EntityI>(
override val updatedBy: UserT
) : EntityUpdatedBy<UserT>
class EntityDeletedByImp<UserT : EntityI>(
override val deletedBy: UserT?
) : EntityDeletedBy<UserT>
/* Mixed */
class EntityCreatedImp<UserT : EntityI>(
override val createdAt: DateTime = DateTime.now(),
createdBy: UserT
) : EntityCreatedBy<UserT> by EntityCreatedByImp(createdBy),
EntityCreatedAt by EntityCreatedAtImp()
class EntityUpdatedImp<UserT : EntityI>(
updatedAt: DateTime = DateTime.now(),
override val updatedBy: UserT
) : EntityUpdatedBy<UserT>,
EntityUpdatedAt by EntityUpdatedAtImp(updatedAt)
/* Published */
interface Published<UserT : EntityI> {
val publishedAt: DateTime?
val publishedBy: UserT?
}
class EntityPublishedImp<UserT : EntityI>(
override val publishedBy: UserT?
) : Published<UserT> {
override val publishedAt: DateTime? = null
}
/* Implementation */
abstract class EntityImp<T, UserT : EntityI>(
updatedBy: UserT,
updatedAt: DateTime = DateTime.now()
) : UuidEntity(),
EntityCreatedAt by EntityCreatedAtImp(updatedAt),
EntityUpdatedAt by EntityUpdatedAtImp(updatedAt),
EntityDeletedAt by EntityDeletedAtImp(),
EntityCreatedBy<UserT> by EntityCreatedByImp(updatedBy),
EntityUpdatedBy<UserT> by EntityUpdatedByImp(updatedBy),
EntityDeletedBy<UserT> by EntityDeletedByImp(updatedBy)
abstract class UuidEntityExtended<T, UserT : EntityI>(
updatedBy: UserT,
publishedBy: UserT?
) :
EntityImp<T, UserT>(updatedBy),
EntityVersioning<UUID, Int> by UuidEntityVersioning(0),
Published<UserT> by EntityPublishedImp(publishedBy)

View File

@@ -1,90 +1,5 @@
package fr.postgresjson.entity
import com.fasterxml.jackson.annotation.JsonIgnore
import org.joda.time.DateTime
import java.util.*
import kotlin.reflect.KClass
/* ID */
interface EntityI<T> {
var id: T?
val className: KClass<EntityI<T?>>
@JsonIgnore() get() = this::class as KClass<EntityI<T?>>
}
abstract class Entity<T>(override var id: T? = null): EntityI<T?>
abstract class UuidEntity(override var id: UUID? = UUID.randomUUID()): Entity<UUID?>(id)
abstract class IdEntity(override var id: Int? = null): Entity<Int?>(id)
/* Version */
interface EntityVersioning<T> {
var version: T
}
interface EntityVersioningIncrement: EntityVersioning<Int?>
class EntityVersioningIncrementImp: EntityVersioningIncrement {
override var version: Int? = null
}
interface EntityVersioningDate: EntityVersioning<DateTime?>
class EntityVersioningDateImp: EntityVersioningDate {
override var version: DateTime? = null
}
/* Dates */
interface EntityCreatedAt {
var createdAt: DateTime?
}
interface EntityUpdatedAt {
var updatedAt: DateTime?
}
class EntityCreatedAtImp: EntityCreatedAt {
override var createdAt: DateTime? = null
}
class EntityUpdatedAtImp: EntityUpdatedAt {
override var updatedAt: DateTime? = null
}
/* Author */
interface CreatedBy<T: EntityI<*>> {
var createdBy: T?
}
interface UpdatedBy<T: EntityI<*>> {
var updatedBy: T?
}
class EntityCreatedByImp<UserT: EntityI<*>>: CreatedBy<UserT> {
override var createdBy: UserT? = null
}
class EntityUpdatedByImp<UserT: EntityI<*>>: UpdatedBy<UserT> {
override var updatedBy: UserT? = null
}
/* Published */
interface Published<UserT: EntityI<*>> {
var publishedAt: DateTime?
var publishedBy: UserT?
}
class EntityPublishedImp<UserT: EntityI<*>>: Published<UserT> {
override var publishedAt: DateTime? = null
override var publishedBy: UserT? = null
}
/* Implementation */
abstract class EntityImp<T, UserT: EntityI<*>>: Entity<T>(),
EntityCreatedAt by EntityCreatedAtImp(),
EntityUpdatedAt by EntityUpdatedAtImp(),
CreatedBy<UserT> by EntityCreatedByImp(),
UpdatedBy<UserT> by EntityUpdatedByImp()
abstract class EntityExtended<T, UserT: EntityI<*>>:
EntityImp<T, UserT>(),
EntityVersioningIncrement by EntityVersioningIncrementImp(),
Published<UserT> by EntityPublishedImp()
interface Serializable
interface EntityI : Serializable
interface Parameter : Serializable

View File

@@ -1,9 +1,11 @@
package fr.postgresjson.migration
import com.github.jasync.sql.db.postgresql.exceptions.GenericDatabaseException
import fr.postgresjson.connexion.Connection
import fr.postgresjson.migration.Migration.Action
import fr.postgresjson.migration.Migration.Status
import java.util.*
import java.util.concurrent.*
import fr.postgresjson.definition.Function as DefinitionFunction
data class Function(
@@ -11,13 +13,13 @@ data class Function(
val down: DefinitionFunction,
private val connection: Connection,
override var executedAt: Date? = null
): Migration {
) : Migration {
val name = up.name
override var doExecute: Action? = null
init {
if (up.name != down.name) {
throw Exception("UP and DOWN migration must have the same name [${up.name} !== ${down.name}]")
throw Exception("UP and DOWN migration must have the same name [${up.name} != ${down.name}]")
}
}
@@ -26,7 +28,7 @@ data class Function(
down: String,
connection: Connection,
executedAt: Date? = null
): this(
) : this(
DefinitionFunction(up),
DefinitionFunction(down),
connection,
@@ -34,7 +36,15 @@ data class Function(
)
override fun up(): Status {
connection.sendQuery(up.script)
try {
connection.sendQuery(up.script)
} catch (e: CompletionException) {
val cause = e.cause
if (cause is GenericDatabaseException && cause.errorMessage.fields['C'] == "42P13") {
connection.sendQuery("drop function ${down.getDefinition()}")
connection.sendQuery(up.script)
}
}
this::class.java.classLoader.getResource("sql/migration/insertFunction.sql")!!.readText().let {
connection.selectOne<MigrationEntity>(it, listOf(up.name, up.getDefinition(), up.script, down.script))?.let { function ->
@@ -49,7 +59,7 @@ data class Function(
connection.sendQuery(down.script)
this::class.java.classLoader.getResource("sql/migration/deleteFunction.sql")!!.readText().let {
connection.exec(it, listOf(down))
connection.sendQuery(it, listOf(down.name))
}
return Status.OK
}
@@ -65,7 +75,7 @@ data class Function(
}
override fun status(): Status {
val result = connection.inTransaction {
connection.inTransaction {
up()
down()
it.sendQuery("ROLLBACK")

View File

@@ -5,13 +5,13 @@ import fr.postgresjson.entity.Entity
import fr.postgresjson.migration.Migration.Action
import java.util.*
data class Query(
data class MigrationScript(
val name: String,
val up: String,
val down: String,
private val connection: Connection,
override var executedAt: Date? = null
): Migration, Entity<String?>(name) {
) : Migration, Entity<String?>(name) {
override var doExecute: Action? = null
override fun up(): Migration.Status {
@@ -48,7 +48,7 @@ data class Query(
}
override fun status(): Migration.Status {
val result = connection.inTransaction {
connection.inTransaction {
up()
down()
it.sendQuery("ROLLBACK")
@@ -57,7 +57,7 @@ data class Query(
return Migration.Status.OK // TODO
}
fun copy(): Query {
fun copy(): MigrationScript {
return this.copy(name = name, up = up, down = down, connection = connection, executedAt = executedAt).also {
it.doExecute = this.doExecute
}

View File

@@ -1,16 +1,16 @@
package fr.postgresjson.migration
import com.fasterxml.jackson.core.type.TypeReference
import com.github.jasync.sql.db.util.size
import fr.postgresjson.connexion.Connection
import fr.postgresjson.definition.Function.FunctionNotFound
import fr.postgresjson.definition.Migration as DefinitionMigration
import fr.postgresjson.entity.Entity
import fr.postgresjson.migration.Migration.Action
import fr.postgresjson.migration.Migration.Status
import fr.postgresjson.utils.LoggerDelegate
import fr.postgresjson.utils.searchSqlFiles
import org.slf4j.Logger
import java.io.File
import java.io.FileNotFoundException
import java.net.URI
import java.util.*
import fr.postgresjson.definition.Function as DefinitionFunction
@@ -20,7 +20,7 @@ class MigrationEntity(
val up: String,
val down: String,
val version: Int
): Entity<String?>(filename)
) : Entity<String?>(filename)
interface Migration {
var executedAt: Date?
@@ -36,17 +36,28 @@ interface Migration {
data class Migrations private constructor(
private val connection: Connection,
private val queries: MutableMap<String, Query> = mutableMapOf(),
private val migrationsScripts: MutableMap<String, MigrationScript> = mutableMapOf(),
private val functions: MutableMap<String, Function> = mutableMapOf()
) {
private var directories: List<URI> = emptyList()
private val logger: Logger? by LoggerDelegate()
constructor(directory: File, connection: Connection): this(listOf(directory), connection)
constructor(directory: URI, connection: Connection) : this(listOf(directory), connection)
constructor(connection: Connection, vararg directory: URI) : this(directory.toList(), connection)
constructor(directories: List<File>, connection: Connection): this(connection) {
constructor(directories: List<URI>, connection: Connection) : this(connection) {
initDB()
this.directories = directories
reset()
}
fun reset() {
migrationsScripts.clear()
functions.clear()
getMigrationFromDB()
getMigrationFromDirectory(directories)
queries.forEach { (_, query) ->
migrationsScripts.forEach { (_, query) ->
if (query.doExecute === null) {
query.doExecute = Action.DOWN
}
@@ -66,16 +77,16 @@ data class Migrations private constructor(
*/
private fun getMigrationFromDB() {
this::class.java.classLoader.getResource("sql/migration/findAllFunction.sql")!!.readText().let {
connection.select<MigrationEntity>(it, object: TypeReference<List<MigrationEntity>>() {})
connection.select<MigrationEntity>(it, object : TypeReference<List<MigrationEntity>>() {})
.map { function ->
functions[function.filename] = Function(function.up, function.down, connection, function.executedAt)
}
}
this::class.java.classLoader.getResource("sql/migration/findAllHistory.sql")!!.readText().let {
connection.select<MigrationEntity>(it, object: TypeReference<List<MigrationEntity>>() {})
connection.select<MigrationEntity>(it, object : TypeReference<List<MigrationEntity>>() {})
.map { query ->
queries[query.filename] = Query(query.filename, query.up, query.down, connection, query.executedAt)
migrationsScripts[query.filename] = MigrationScript(query.filename, query.up, query.down, connection, query.executedAt)
}
}
}
@@ -83,7 +94,7 @@ data class Migrations private constructor(
/**
* Get all migration from multiples Directories
*/
private fun getMigrationFromDirectory(directory: List<File>) {
private fun getMigrationFromDirectory(directory: List<URI>) {
directory.forEach {
getMigrationFromDirectory(it)
}
@@ -92,29 +103,26 @@ data class Migrations private constructor(
/**
* Get all migration from Directory
*/
private fun getMigrationFromDirectory(directory: File) {
directory.walk().filter {
it.isFile
}.forEach { file ->
if (file.name.endsWith(".up.sql")) {
file.path.substring(0, file.path.size - 7).let {
try {
val down = File("$it.down.sql").readText()
val up = file.readText()
val name = file.name.substring(0, file.name.size - 7)
addQuery(name, up, down)
} catch (e: FileNotFoundException) {
throw DownMigrationNotDefined("$it.down.sql", e)
}
private fun getMigrationFromDirectory(directory: URI) {
val downs: MutableMap<String, DefinitionMigration> = mutableMapOf()
directory.searchSqlFiles().apply {
/* Set Down Migration */
forEach { migration ->
if (migration is DefinitionMigration && migration.direction == DefinitionMigration.Direction.DOWN) {
downs += migration.name to migration
}
} else if (file.name.endsWith(".down.sql")) {
// Nothing
} else {
val fileContent = file.readText()
try {
addFunction(fileContent)
} catch(e: FunctionNotFound) {
// Nothing
}
/* Set up migrations and functions */
forEach { migration ->
if (migration is DefinitionMigration && migration.direction == DefinitionMigration.Direction.UP) {
val down = downs[migration.name] ?: throw DownMigrationNotDefined(migration.name + ".down.sql")
downs -= migration.name
addMigrationScript(migration, down)
} else if (migration is DefinitionFunction) {
addFunction(migration)
}
}
}
@@ -122,13 +130,14 @@ data class Migrations private constructor(
enum class Direction { UP, DOWN }
internal class DownMigrationNotDefined(path: String, cause: FileNotFoundException):
internal class DownMigrationNotDefined(path: String, cause: FileNotFoundException? = null) :
Throwable("The file $path whas not found", cause)
fun addFunction(newDefinition: DefinitionFunction, callback: (Function) -> Unit = {}): Migrations {
val currentFunction = functions[newDefinition.name]
if (currentFunction === null || currentFunction `is different from` newDefinition) {
functions[newDefinition.name] = Function(newDefinition, newDefinition, connection).apply {
val oldDefinition = functions[newDefinition.name]?.up ?: newDefinition
functions[newDefinition.name] = Function(newDefinition, oldDefinition, connection).apply {
doExecute = Action.UP
}
} else {
@@ -145,18 +154,21 @@ data class Migrations private constructor(
return this
}
fun addQuery(name: String, up: String, down: String, callback: (Query) -> Unit = {}): Migrations {
if (queries[name] === null) {
queries[name] = Query(name, up, down, connection).apply {
fun addMigrationScript(up: DefinitionMigration, down: DefinitionMigration, callback: (MigrationScript) -> Unit = {}): Migrations =
addMigrationScript(up.name, up.script, down.script, callback)
fun addMigrationScript(name: String, up: String, down: String, callback: (MigrationScript) -> Unit = {}): Migrations {
if (migrationsScripts[name] === null) {
migrationsScripts[name] = MigrationScript(name, up, down, connection).apply {
doExecute = Action.UP
}
} else {
queries[name]!!.apply {
migrationsScripts[name]!!.apply {
doExecute = Action.OK
}
}
callback(queries[name]!!)
callback(migrationsScripts[name]!!)
return this
}
@@ -181,7 +193,7 @@ data class Migrations private constructor(
internal fun up(): Map<String, Status> {
val list: MutableMap<String, Status> = mutableMapOf()
queries.forEach {
migrationsScripts.forEach {
it.value.let { query ->
if (query.doExecute == Action.UP) {
query.up().let { status ->
@@ -206,7 +218,7 @@ data class Migrations private constructor(
internal fun down(force: Boolean = false): Map<String, Status> {
val list: MutableMap<String, Status> = mutableMapOf()
queries.forEach {
migrationsScripts.forEach {
it.value.let { query ->
if (query.doExecute == Action.DOWN || force) {
query.down().let { status ->
@@ -244,6 +256,7 @@ data class Migrations private constructor(
sendQuery("COMMIT")
}
logger?.info("Migration done")
reset()
return list.toMap()
}
@@ -264,6 +277,7 @@ data class Migrations private constructor(
sendQuery("COMMIT")
}
logger?.info("Migration DOWN done")
reset()
return list.toMap()
}
@@ -285,7 +299,7 @@ data class Migrations private constructor(
}
fun copy(): Migrations {
val queriesCopy = queries.map {
val queriesCopy = migrationsScripts.map {
it.key to it.value.copy()
}.toMap().toMutableMap()

View File

@@ -1,13 +0,0 @@
package fr.postgresjson.repository
import fr.postgresjson.connexion.Requester
import fr.postgresjson.entity.EntityI
import kotlin.reflect.KClass
interface RepositoryI<E: EntityI<*>> {
val entityName: KClass<E>
var requester: Requester
fun getClassName(): String {
return entityName.simpleName!!
}
}

View File

@@ -0,0 +1,12 @@
package fr.postgresjson.repository
import fr.postgresjson.connexion.Requester
interface RepositoryI {
val requester: Requester
enum class Direction {
asc,
desc
}
}

View File

@@ -1,47 +1,37 @@
package fr.postgresjson.serializer
import com.fasterxml.jackson.core.JsonParser
import com.fasterxml.jackson.core.JsonProcessingException
import com.fasterxml.jackson.core.type.TypeReference
import com.fasterxml.jackson.databind.*
import com.fasterxml.jackson.databind.deser.std.StdDeserializer
import com.fasterxml.jackson.databind.DeserializationFeature
import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.databind.PropertyNamingStrategy
import com.fasterxml.jackson.databind.SerializationFeature
import com.fasterxml.jackson.databind.module.SimpleModule
import com.fasterxml.jackson.datatype.joda.JodaModule
import com.fasterxml.jackson.module.kotlin.jacksonObjectMapper
import com.fasterxml.jackson.module.kotlin.readValue
import fr.postgresjson.entity.EntitiesCollections
import fr.postgresjson.entity.EntityI
import fr.postgresjson.entity.IdEntity
import fr.postgresjson.entity.UuidEntity
import java.io.IOException
import java.util.*
import fr.postgresjson.entity.Serializable
class Serializer(val mapper: ObjectMapper = jacksonObjectMapper()) {
var collection: EntitiesCollections = EntitiesCollections()
init {
val module = SimpleModule()
module.addDeserializer(UuidEntity::class.java, EntityUuidDeserializer(collection))
module.addDeserializer(IdEntity::class.java, EntityIdDeserializer(collection))
mapper.registerModule(module)
mapper.propertyNamingStrategy = PropertyNamingStrategy.SNAKE_CASE
mapper.registerModule(JodaModule())
mapper.disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS)
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)
}
fun <T> serialize(source: EntityI<T>, pretty: Boolean = false): String {
fun serialize(source: Any, pretty: Boolean = false): String {
return if (pretty) mapper.writerWithDefaultPrettyPrinter().writeValueAsString(source)
else mapper.writeValueAsString(source)
}
fun <E: EntityI<*>> deserialize(json: String, valueTypeRef: TypeReference<E>): E {
fun <E> deserialize(json: String, valueTypeRef: TypeReference<E>): E {
return this.mapper.readValue(json, valueTypeRef)
}
inline fun <reified E: EntityI<*>> deserialize(json: String): E? {
inline fun <reified E> deserialize(json: String): E? {
return this.mapper.readValue(json)
}
@@ -50,53 +40,15 @@ class Serializer(val mapper: ObjectMapper = jacksonObjectMapper()) {
}
inline fun <reified E> deserializeList(json: String): E {
return deserializeList(json, object: TypeReference<E>() {})
return deserializeList(json, object : TypeReference<E>() {})
}
fun <E: EntityI<*>> deserialize(json: String, target: E): E {
fun <E> deserialize(json: String, target: E): E {
return mapper.readerForUpdating(target).readValue<E>(json)
}
}
fun <T> EntityI<T?>.serialize(pretty: Boolean = false) = Serializer().serialize(this, pretty)
inline fun <reified E: EntityI<*>> E.deserialize(json: String) = Serializer().deserialize(json, this)
inline fun <reified E: EntityI<*>> String.deserialize() = Serializer().deserialize<E>(this)
class EntityUuidDeserializer<T: UuidEntity> @JvmOverloads constructor(vc: Class<*>? = null): StdDeserializer<T>(vc) {
var collection: EntitiesCollections = EntitiesCollections()
constructor(collection: EntitiesCollections): this() {
this.collection = collection
}
@Throws(IOException::class, JsonProcessingException::class)
override fun deserialize(jp: JsonParser, ctxt: DeserializationContext): T {
val node = jp.codec.readTree<JsonNode>(jp)
val id = node.get("id").asText()
val entity = collection.get<UUID, UuidEntity>(UUID.fromString(id))
return (entity ?: ctxt.readValue(jp, UuidEntity::class.javaObjectType)) as T
}
}
class EntityIdDeserializer<T: IdEntity> @JvmOverloads constructor(vc: Class<*>? = null): StdDeserializer<T>(vc) {
var collection: EntitiesCollections = EntitiesCollections()
constructor(collection: EntitiesCollections): this() {
this.collection = collection
}
@Throws(IOException::class, JsonProcessingException::class)
override fun deserialize(jp: JsonParser, ctxt: DeserializationContext): T {
val node = jp.codec.readTree<JsonNode>(jp)
val id = node.get("id").asInt()
val entity = collection.get<Int?, IdEntity>(id)
val obj = (entity ?: ctxt.readValue(jp, UuidEntity::class.javaObjectType)) as EntityI<Int?>
collection.set(obj)
return obj as T
}
}
fun Serializable.serialize(pretty: Boolean = false) = Serializer().serialize(this, pretty)
fun List<Serializable>.serialize(pretty: Boolean = false) = Serializer().serialize(this, pretty)
inline fun <reified E : Serializable> E.deserialize(json: String) = Serializer().deserialize(json, this)
inline fun <reified E : Serializable> String.deserialize() = Serializer().deserialize<E>(this)

View File

@@ -5,6 +5,6 @@ import org.slf4j.LoggerFactory
import kotlin.properties.ReadOnlyProperty
import kotlin.reflect.KProperty
internal class LoggerDelegate<in R: Any>: ReadOnlyProperty<R, Logger> {
internal class LoggerDelegate<in R : Any> : ReadOnlyProperty<R, Logger> {
override fun getValue(thisRef: R, property: KProperty<*>) = LoggerFactory.getLogger(thisRef.javaClass.packageName)
}

View File

@@ -0,0 +1,51 @@
package fr.postgresjson.utils
import fr.postgresjson.definition.Resource
import org.slf4j.Logger
import org.slf4j.LoggerFactory
import java.net.URI
import java.net.URL
import java.nio.file.FileSystemNotFoundException
import java.nio.file.FileSystems
import java.nio.file.FileVisitOption
import java.nio.file.Files
import java.nio.file.Path
import kotlin.streams.asSequence
fun URL.searchSqlFiles() = this.toURI().searchSqlFiles()
fun URI.searchSqlFiles() = sequence<Resource> {
val logger: Logger = LoggerFactory.getLogger("sqlFilesSearch")
val uri: URI = this@searchSqlFiles
logger.debug("""SQL files found in "${uri.toString().substringAfter('!')}" :""")
if (uri.scheme == "jar") {
try {
FileSystems.getFileSystem(uri)
} catch (e: FileSystemNotFoundException) {
FileSystems.newFileSystem(uri, emptyMap<String, Any>())
}
uri
.walk(5)
.asSequence()
.filter { it.fileName.toString().endsWith(".sql") }
.map { it.toUri().toURL() }
.forEach {
logger.debug(it.toString())
yield(Resource.build(it))
}
} else {
uri
.walk(5)
.asSequence()
.map { it.toFile() }
.filter { it.isFile && it.extension == "sql" }
.forEach {
logger.debug(it.toString())
yield(Resource.build(it))
}
}
}
private fun Path.walk(maxDepth: Int = 2147483647, vararg options: FileVisitOption) = Files.walk(this, maxDepth, *options)
private fun URI.walk(maxDepth: Int = 2147483647, vararg options: FileVisitOption) = Files.walk(Path.of(this), maxDepth, *options)

View File

@@ -1,32 +1,27 @@
package fr.postgresjson
import fr.postgresjson.connexion.Connection
import fr.postgresjson.connexion.Paginated
import fr.postgresjson.entity.IdEntity
import fr.postgresjson.entity.UuidEntity
import fr.postgresjson.entity.Parameter
import org.junit.Assert.*
import org.junit.jupiter.api.Assertions
import org.junit.jupiter.api.BeforeEach
import org.junit.jupiter.api.Test
import org.junit.jupiter.api.TestInstance
import java.util.*
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
class ConnectionTest(): TestAbstract() {
private class ObjTest(var name: String): IdEntity()
private class ObjTest2(var title: String, var test: ObjTest?): IdEntity()
private class ObjTest3(var first: String, var seconde: String, var third: Int): IdEntity()
private lateinit var connection: Connection
@BeforeEach
fun before() {
connection = getConnextion()
}
class ConnectionTest() : TestAbstract() {
private class ObjTest(val name: String, id: UUID = UUID.fromString("2c0243ed-ff4d-4b9f-a52b-e38c71b0ed00")) : UuidEntity(id)
private class ObjTest2(val title: String, var test: ObjTest?) : UuidEntity()
private class ObjTest3(val first: String, var seconde: String, var third: Int) : UuidEntity()
private class ObjTestWithParameterObject(var first: ParameterObject, var seconde: ParameterObject) : UuidEntity()
private class ParameterObject(var third: String) : Parameter
@Test
fun getObject() {
val obj: ObjTest? = connection.selectOne("select to_json(a) from test a limit 1")
assertTrue(obj is ObjTest)
assertTrue(obj!!.id == 1)
assertTrue(obj!!.id == UUID.fromString("1e5f5d41-6d14-4007-897b-0ed2616bec96"))
}
@Test
@@ -44,34 +39,32 @@ class ConnectionTest(): TestAbstract() {
""".trimIndent()
)
assertNotNull(objs)
assertTrue(objs is List<ObjTest2>)
assertEquals(objs!!.size, 2)
assertEquals(objs[0].id, 1)
assertEquals(objs[0].test!!.id, 1)
assertEquals(objs.size, 2)
assertEquals(objs[0].id, UUID.fromString("1e5f5d41-6d14-4007-897b-0ed2616bec96"))
assertEquals(objs[0].test!!.id, UUID.fromString("1e5f5d41-6d14-4007-897b-0ed2616bec96"))
}
@Test
fun callRequestWithArgs() {
val result: ObjTest? = connection.selectOne("select json_build_object('id', 1, 'name', ?::text)", listOf("myName"))
val result: ObjTest? = connection.selectOne("select json_build_object('id', '2c0243ed-ff4d-4b9f-a52b-e38c71b0ed00', 'name', ?::text)", listOf("myName"))
assertNotNull(result)
assertEquals("myName", result!!.name)
}
@Test
fun callRequestWithArgsEntity() {
val o = ObjTest("myName")
o.id = 88
val obj: ObjTest? = connection.selectOne("select json_build_object('id', id, 'name', name) FROM json_to_record(?::json) as o(id int, name text);", listOf(o))
val o = ObjTest("myName", id = UUID.fromString("2c0243ed-ff4d-4b9f-a52b-e38c71b0ed00"))
val obj: ObjTest? = connection.selectOne("select json_build_object('id', id, 'name', name) FROM json_to_record(?::json) as o(id uuid, name text);", listOf(o))
assertNotNull(obj)
assertTrue(obj is ObjTest)
assertEquals(obj!!.id, 88)
assertEquals(obj!!.id, UUID.fromString("2c0243ed-ff4d-4b9f-a52b-e38c71b0ed00"))
assertEquals(obj.name, "myName")
}
@Test
fun callExec() {
val o = ObjTest("myName")
val result = connection.exec("select json_build_object('id', 1, 'name', ?::json->>'name')", listOf(o))
val result = connection.exec("select json_build_object('id', '2c0243ed-ff4d-4b9f-a52b-e38c71b0ed00', 'name', ?::json->>'name')", listOf(o))
Assertions.assertEquals(1, result.rowsAffected)
}
@@ -90,6 +83,19 @@ class ConnectionTest(): TestAbstract() {
assertEquals(result.third, 123)
}
@Test
fun `select one with named parameters object`() {
val result: ObjTestWithParameterObject? = connection.selectOne(
"SELECT json_build_object('first', :first::json, 'seconde', :seconde::json)",
mapOf(
"first" to ParameterObject("one"),
"seconde" to ParameterObject("two")
)
)
assertEquals("one", result!!.first.third)
assertEquals("two", result.seconde.third)
}
@Test
fun `select with named parameters`() {
val params: Map<String, Any?> = mapOf(
@@ -135,8 +141,8 @@ class ConnectionTest(): TestAbstract() {
val result: Paginated<ObjTest> = connection.select(
"""
SELECT json_build_array(
json_build_object('id', 3, 'name', :name::text),
json_build_object('id', 4, 'name', :name::text || '-2')
json_build_object('id', '417aaa7e-7bc6-49b7-9fe8-6c8433b3f430', 'name', :name::text),
json_build_object('id', 'abd46e7a-e749-4ce4-8361-e7b64da89da6', 'name', :name::text || '-2')
), 10 as total
LIMIT :limit OFFSET :offset
""".trimIndent(),

View File

@@ -4,22 +4,23 @@ import fr.postgresjson.entity.*
import org.junit.jupiter.api.Assertions.assertTrue
import org.junit.jupiter.api.Test
import org.junit.jupiter.api.TestInstance
import java.util.*
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
class EntityTest() {
private class User(override var id: Int?): EntityI<Int?>
private class ObjTest(var name: String): EntityExtended<Int?, User>()
private class User(id: UUID = UUID.randomUUID()) : Entity<UUID>(id)
private class ObjTest(var name: String) : UuidEntityExtended<Int?, User>(User(), User())
@Test
fun getObject() {
val obj: ObjTest? = ObjTest("plop")
assertTrue(obj is ObjTest)
assertTrue(obj is EntityExtended<Int?, User>)
assertTrue(obj is EntityI<Int?>)
assertTrue(obj is Entity<Int?>)
assertTrue(obj is UuidEntityExtended<Int?, User>)
assertTrue(obj is EntityI)
assertTrue(obj is Entity<UUID>)
assertTrue(obj is Published<User>)
assertTrue(obj is CreatedBy<User>)
assertTrue(obj is UpdatedBy<User>)
assertTrue(obj is EntityCreatedBy<User>)
assertTrue(obj is EntityUpdatedBy<User>)
assertTrue(obj is EntityCreatedAt)
assertTrue(obj is EntityUpdatedAt)
}

View File

@@ -10,14 +10,14 @@ import org.amshove.kluent.shouldThrow
import org.junit.jupiter.api.Assertions
import org.junit.jupiter.api.Test
import org.junit.jupiter.api.TestInstance
import java.io.File
import java.util.*
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
class MigrationTest(): TestAbstract() {
class MigrationTest() : TestAbstract() {
@Test
fun `run up query`() {
val resources = File(this::class.java.getResource("/sql/migrations").toURI())
val m = Migrations(resources, getConnextion())
val resources = this::class.java.getResource("/sql/migrations").toURI()
val m = Migrations(connection, resources)
m.up().apply {
this `should contain` Pair("1", Migration.Status.OK)
size `should be equal to` 1
@@ -28,16 +28,16 @@ class MigrationTest(): TestAbstract() {
@Test
fun `migration up Query should throw error if no down`() {
val resources = File(this::class.java.getResource("/sql/migration_without_down").toURI())
val resources = this::class.java.getResource("/sql/migration_without_down").toURI()
invoking {
Migrations(resources, getConnextion())
Migrations(resources, connection)
} shouldThrow Migrations.DownMigrationNotDefined::class
}
@Test
fun `run forced down query`() {
val resources = File(this::class.java.getResource("/sql/migrations").toURI())
val m = Migrations(resources, getConnextion())
val resources = this::class.java.getResource("/sql/migrations").toURI()
val m = Migrations(resources, connection)
repeat(3) {
m.down(true).apply {
this `should contain` Pair("1", Migration.Status.OK)
@@ -48,19 +48,19 @@ class MigrationTest(): TestAbstract() {
@Test
fun `run dry migrations`() {
val resources = File(this::class.java.getResource("/sql/real_migrations").toURI())
Migrations(resources, getConnextion()).apply {
val resources = this::class.java.getResource("/sql/real_migrations").toURI()
Migrations(resources, connection).apply {
runDry().size `should be equal to` 2
}
Migrations(resources, getConnextion()).apply {
Migrations(resources, connection).apply {
runDry().size `should be equal to` 2
}
}
@Test
fun `run dry migrations launch twice`() {
val resources = File(this::class.java.getResource("/sql/real_migrations").toURI())
Migrations(resources, getConnextion()).apply {
val resources = this::class.java.getResource("/sql/real_migrations").toURI()
Migrations(resources, connection).apply {
runDry().size `should be equal to` 2
runDry().size `should be equal to` 2
}
@@ -68,8 +68,8 @@ class MigrationTest(): TestAbstract() {
@Test
fun `run migrations`() {
val resources = File(this::class.java.getResource("/sql/real_migrations").toURI())
Migrations(resources, getConnextion()).apply {
val resources = this::class.java.getResource("/sql/real_migrations").toURI()
Migrations(resources, connection).apply {
run().apply {
size `should be equal to` 1
}
@@ -78,32 +78,54 @@ class MigrationTest(): TestAbstract() {
@Test
fun `run migrations force down`() {
val resources = File(this::class.java.getResource("/sql/real_migrations").toURI())
Migrations(resources, getConnextion()).apply {
val resources = this::class.java.getResource("/sql/real_migrations").toURI()
val resourcesFunctions = this::class.java.getResource("/sql/function/Test").toURI()
Migrations(listOf(resources, resourcesFunctions), connection).apply {
up().apply {
size `should be equal to` 1
size `should be equal to` 6
}
}
Migrations(resources, getConnextion()).apply {
Migrations(listOf(resources, resourcesFunctions), connection).apply {
forceAllDown().apply {
size `should be equal to` 1
size `should be equal to` 6
}
}
}
@Test
fun `run functions migrations`() {
val resources = File(this::class.java.getResource("/sql/function").toURI())
Migrations(resources, getConnextion()).apply {
run().size `should be equal to` 4
val resources = this::class.java.getResource("/sql/function/Test").toURI()
Migrations(resources, connection).apply {
run().size `should be equal to` 5
}
val objTest: RequesterTest.ObjTest? = Requester(getConnextion())
val objTest: RequesterTest.ObjTest? = Requester(connection)
.addFunction(resources)
.getFunction("test_function")
.selectOne(listOf("test", "plip"))
Assertions.assertEquals(objTest!!.id, 3)
Assertions.assertEquals(objTest!!.id, UUID.fromString("457daad5-4f1b-4eb7-80ec-6882adb8cc7d"))
Assertions.assertEquals(objTest.name, "test")
}
@Test
fun `run functions migrations and drop if exist`() {
val resources = this::class.java.getResource("/sql/function/Test1").toURI()
Migrations(resources, connection).apply {
run().size `should be equal to` 1
}
val objTest: RequesterTest.ObjTest? = Requester(connection)
.addFunction(resources)
.getFunction("test_function_duplicate")
.selectOne(listOf("test"))
Assertions.assertEquals(objTest!!.id, UUID.fromString("457daad5-4f1b-4eb7-80ec-6882adb8cc7d"))
Assertions.assertEquals(objTest.name, "test")
val resources2 = this::class.java.getResource("/sql/function/Test2").toURI()
Migrations(resources2, connection).apply {
run().size `should be equal to` 1
}
}
}

View File

@@ -2,45 +2,45 @@ package fr.postgresjson
import fr.postgresjson.connexion.Paginated
import fr.postgresjson.connexion.Requester
import fr.postgresjson.entity.IdEntity
import fr.postgresjson.entity.UuidEntity
import org.junit.Assert
import org.junit.jupiter.api.Assertions.assertEquals
import org.junit.jupiter.api.Test
import java.io.File
import java.util.*
class RequesterTest: TestAbstract() {
class ObjTest(var name:String): IdEntity(1)
class RequesterTest : TestAbstract() {
class ObjTest(var name: String, id: UUID = UUID.fromString("5623d902-3067-42f3-bfd9-095dbb12c29f")) : UuidEntity(id)
@Test
fun `get query from file`() {
val resources = File(this::class.java.getResource("/sql/query").toURI())
val objTest: ObjTest? = Requester(getConnextion())
val resources = this::class.java.getResource("/sql/query").toURI()
val objTest: ObjTest? = Requester(connection)
.addQuery(resources)
.getQuery("Test/selectOne")
.getQuery("selectOne")
.selectOne()
assertEquals(objTest!!.id, 2)
assertEquals(objTest!!.id, UUID.fromString("829b1a29-5db8-47f9-9562-961c561ac528"))
assertEquals(objTest.name, "test")
}
@Test
fun `get function from file`() {
val resources = File(this::class.java.getResource("/sql/function").toURI())
val objTest: ObjTest? = Requester(getConnextion())
val resources = this::class.java.getResource("/sql/function/Test").toURI()
val objTest: ObjTest? = Requester(connection)
.addFunction(resources)
.getFunction("test_function")
.selectOne(listOf("test", "plip"))
assertEquals(objTest!!.id, 3)
assertEquals(objTest!!.id, UUID.fromString("457daad5-4f1b-4eb7-80ec-6882adb8cc7d"))
assertEquals(objTest.name, "test")
}
@Test
fun `call exec on query`() {
val resources = File(this::class.java.getResource("/sql/query").toURI())
val result = Requester(getConnextion())
val resources = this::class.java.getResource("/sql/query").toURI()
val result = Requester(connection)
.addQuery(resources)
.getQuery("Test/selectOne")
.getQuery("selectOne")
.exec()
assertEquals(1, result.rowsAffected)
@@ -48,8 +48,8 @@ class RequesterTest: TestAbstract() {
@Test
fun `call exec on function`() {
val resources = File(this::class.java.getResource("/sql/function").toURI())
val result = Requester(getConnextion())
val resources = this::class.java.getResource("/sql/function/Test").toURI()
val result = Requester(connection)
.addFunction(resources)
.getFunction("test_function")
.exec(listOf("test", "plip"))
@@ -57,10 +57,32 @@ class RequesterTest: TestAbstract() {
assertEquals(1, result.rowsAffected)
}
@Test
fun `call sendQuery on query with name`() {
val resources = this::class.java.getResource("/sql/query").toURI()
val result = Requester(connection)
.addQuery(resources)
.getQuery("DeleteTest")
.sendQuery()
assertEquals(0, result)
}
@Test
fun `call sendQuery on function`() {
val resources = this::class.java.getResource("/sql/function/Test").toURI()
val result = Requester(connection)
.addFunction(resources)
.getFunction("function_void")
.sendQuery(listOf("test"))
assertEquals(0, result)
}
@Test
fun `call selectOne on function`() {
val resources = File(this::class.java.getResource("/sql/function").toURI())
val obj: ObjTest = Requester(getConnextion())
val resources = this::class.java.getResource("/sql/function/Test").toURI()
val obj: ObjTest = Requester(connection)
.addFunction(resources)
.getFunction("test_function")
.selectOne(mapOf("name" to "myName"))!!
@@ -70,9 +92,9 @@ class RequesterTest: TestAbstract() {
@Test
fun `call selectOne on function with object`() {
val resources = File(this::class.java.getResource("/sql/function").toURI())
val resources = this::class.java.getResource("/sql/function/Test").toURI()
val obj2 = ObjTest("original")
val obj: ObjTest = Requester(getConnextion())
val obj: ObjTest = Requester(connection)
.addFunction(resources)
.getFunction("test_function_object")
.selectOne("resource" to obj2)!!
@@ -83,10 +105,10 @@ class RequesterTest: TestAbstract() {
@Test
fun `call selectOne on query`() {
val resources = File(this::class.java.getResource("/sql/query").toURI())
val obj: ObjTest = Requester(getConnextion())
val resources = this::class.java.getResource("/sql/query").toURI()
val obj: ObjTest = Requester(connection)
.addQuery(resources)
.getQuery("Test/selectOneWithParameters")
.getQuery("selectOneWithParameters")
.selectOne(mapOf("name" to "myName"))!!
assertEquals("myName", obj.name)
@@ -94,8 +116,8 @@ class RequesterTest: TestAbstract() {
@Test
fun `call select (multiple) on function`() {
val resources = File(this::class.java.getResource("/sql/function").toURI())
val obj: List<ObjTest>? = Requester(getConnextion())
val resources = this::class.java.getResource("/sql/function/Test").toURI()
val obj: List<ObjTest>? = Requester(connection)
.addFunction(resources)
.getFunction("test_function_multiple")
.select(mapOf("name" to "myName"))
@@ -105,38 +127,38 @@ class RequesterTest: TestAbstract() {
@Test
fun `call select paginated on query`() {
val resources = File(this::class.java.getResource("/sql/query").toURI())
val result: Paginated<ObjTest> = Requester(getConnextion())
val resources = this::class.java.getResource("/sql/query").toURI()
val result: Paginated<ObjTest> = Requester(connection)
.addQuery(resources)
.getQuery("Test/selectPaginated")
.getQuery("selectPaginated")
.select(1, 2, mapOf("name" to "ff"))
Assert.assertNotNull(result)
Assert.assertEquals(result.result[0].name, "ff")
Assert.assertEquals(result.result[1].name, "ff-2")
Assert.assertEquals(result.total, 10)
Assert.assertEquals(result.offset, 0)
Assert.assertEquals("ff", result.result[0].name)
Assert.assertEquals("ff-2", result.result[1].name)
Assert.assertEquals(10, result.total)
Assert.assertEquals(0, result.offset)
}
@Test
fun `call select paginated on function`() {
val resources = File(this::class.java.getResource("/sql/function").toURI())
val result: Paginated<ObjTest> = Requester(getConnextion())
val resources = this::class.java.getResource("/sql/function").toURI()
val result: Paginated<ObjTest> = Requester(connection)
.addFunction(resources)
.getFunction("test_function_paginated")
.select(1, 2, mapOf("name" to "ff"))
Assert.assertNotNull(result)
Assert.assertEquals(result.result[0].name, "ff")
Assert.assertEquals(result.result[1].name, "ff-2")
Assert.assertEquals(result.total, 10)
Assert.assertEquals(result.offset, 0)
Assert.assertEquals("ff", result.result[0].name)
Assert.assertEquals("ff-2", result.result[1].name)
Assert.assertEquals(10, result.total)
Assert.assertEquals(0, result.offset)
}
@Test
fun `call selectOne on query with extra parameter`() {
val resources = File(this::class.java.getResource("/sql/query").toURI())
val obj: ObjTest = Requester(getConnextion())
val resources = this::class.java.getResource("/sql/query").toURI()
val obj: ObjTest = Requester(connection)
.addQuery(resources)
.getQuery("Test/selectOneWithParameters")
.getQuery("selectOneWithParameters")
.selectOne(mapOf("name" to "myName")) {
assertEquals("myName", it!!.name)
Assert.assertEquals("plop", rows[0].getString("other"))

View File

@@ -1,6 +1,6 @@
package fr.postgresjson
import fr.postgresjson.entity.IdEntity
import fr.postgresjson.entity.UuidEntity
import fr.postgresjson.serializer.Serializer
import fr.postgresjson.serializer.deserialize
import fr.postgresjson.serializer.serialize
@@ -10,23 +10,23 @@ import org.junit.jupiter.api.Assertions.assertTrue
import org.junit.jupiter.api.BeforeEach
import org.junit.jupiter.api.Test
import org.junit.jupiter.api.TestInstance
import java.util.*
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
internal class SerializerTest: TestAbstract() {
private class ObjTest(var val1: String, var val2: Int) : IdEntity(1)
private class ObjTestDate(var val1: DateTime) : IdEntity(2)
internal class SerializerTest {
private class ObjTest(var val1: String, var val2: Int, id: UUID = UUID.fromString("1e5f5d41-6d14-4007-897b-0ed2616bec96")) : UuidEntity(id)
private class ObjTestDate(var val1: DateTime, id: UUID = UUID.fromString("829b1a29-5db8-47f9-9562-961c561ac528")) : UuidEntity(id)
private val serializer = Serializer()
private val objSerialized: String = """{"val1":"plop","val2":123,"id":2}"""
private val objSerializedWithExtra: String = """{"val1":"plop","val2":123,"id":2,"toto":"tata"}"""
private val objSerialized: String = """{"val1":"plop","val2":123,"id":"829b1a29-5db8-47f9-9562-961c561ac528"}"""
private val objSerializedWithExtra: String = """{"val1":"plop","val2":123,"id":"829b1a29-5db8-47f9-9562-961c561ac528","toto":"tata"}"""
private val objSerializedUpdate = """{"val1":"update","val2":123}"""
private lateinit var obj: ObjTest
@BeforeEach
fun before() {
obj = ObjTest("plop", 123)
obj.id = 2
obj = ObjTest("plop", 123, UUID.fromString("829b1a29-5db8-47f9-9562-961c561ac528"))
}
@Test
@@ -41,6 +41,14 @@ internal class SerializerTest: TestAbstract() {
assertTrue(json.contains(""""val1":"plop","val2":123"""))
}
@Test
fun serializeList() {
val list = listOf(ObjTest("one", 1), ObjTest("two", 2))
val json = list.serialize()
assertTrue(json.contains(""""val1":"one","val2":1"""))
assertTrue(json.contains(""""val1":"two","val2":2"""))
}
@Test
fun serializeDate() {
val objDate = ObjTestDate(DateTime.parse("2019-07-30T14:08:51.420108+04:00"))

View File

@@ -9,20 +9,22 @@ import java.io.File
@TestInstance(PER_CLASS)
abstract class TestAbstract {
protected fun getConnextion(): Connection {
return Connection(database = "test", username = "test", password = "test")
}
protected val connection = Connection(database = "json_test", username = "test", password = "test", port = 5555)
@BeforeEach
fun beforeAll() {
val initSQL = File(this::class.java.getResource("/fixtures/init.sql").toURI())
val promise = getConnextion().connect().sendQuery(initSQL.readText())
promise.join()
connection
.connect()
.sendQuery(initSQL.readText())
.join()
}
@AfterEach
fun afterAll() {
val downSQL = File(this::class.java.getResource("/fixtures/down.sql").toURI())
getConnextion().connect().sendQuery(downSQL.readText()).join()
connection.connect().apply {
sendQuery(downSQL.readText()).join()
}.disconnect()
}
}

View File

@@ -8,7 +8,7 @@ create schema if not exists public;
create table if not exists test
(
id serial not null
id uuid not null
constraint test_pk
primary key,
name text
@@ -16,24 +16,24 @@ create table if not exists test
create table if not exists test2
(
id serial not null,
id uuid not null,
title text,
test_id integer
test_id uuid
constraint test2_test_id_fk
references test
);
INSERT INTO test (id, name) VALUES (1, 'plop') ON CONFLICT DO NOTHING;
INSERT INTO test2 (id, title, test_id) VALUES (1, 'plop', 1) ON CONFLICT DO NOTHING;
INSERT INTO test2 (id, title, test_id) VALUES (2, 'plip', 1) ON CONFLICT DO NOTHING;
INSERT INTO test2 (id, title, test_id) VALUES (3, 'ttt', null) ON CONFLICT DO NOTHING;
INSERT INTO test (id, name) VALUES ('1e5f5d41-6d14-4007-897b-0ed2616bec96', 'plop') ON CONFLICT DO NOTHING;
INSERT INTO test2 (id, title, test_id) VALUES ('1e5f5d41-6d14-4007-897b-0ed2616bec96', 'plop', '1e5f5d41-6d14-4007-897b-0ed2616bec96') ON CONFLICT DO NOTHING;
INSERT INTO test2 (id, title, test_id) VALUES ('829b1a29-5db8-47f9-9562-961c561ac528', 'plip', '1e5f5d41-6d14-4007-897b-0ed2616bec96') ON CONFLICT DO NOTHING;
INSERT INTO test2 (id, title, test_id) VALUES ('457daad5-4f1b-4eb7-80ec-6882adb8cc7d', 'ttt', null) ON CONFLICT DO NOTHING;
CREATE OR REPLACE FUNCTION test_function (name text default 'plop', IN hi text default 'hello', out result json)
LANGUAGE plpgsql
AS
$$
BEGIN
result = json_build_object('id', 3, 'name', name);
result = json_build_object('id', '457daad5-4f1b-4eb7-80ec-6882adb8cc7d', 'name', name);
END;
$$;
@@ -44,8 +44,8 @@ AS
$$
BEGIN
result = json_build_array(
json_build_object('id', 3, 'name', name),
json_build_object('id', 4, 'name', hi)
json_build_object('id', '457daad5-4f1b-4eb7-80ec-6882adb8cc7d', 'name', name),
json_build_object('id', '8d20abb0-7f77-4b6c-9991-44acd3c88faa', 'name', hi)
);
END;
$$;
@@ -56,8 +56,8 @@ AS
$$
BEGIN
SELECT json_build_array(
json_build_object('id', 3, 'name', name::text),
json_build_object('id', 4, 'name', name::text || '-2')
json_build_object('id', '457daad5-4f1b-4eb7-80ec-6882adb8cc7d', 'name', name::text),
json_build_object('id', '8d20abb0-7f77-4b6c-9991-44acd3c88faa', 'name', name::text || '-2')
),
10
INTO result, total
@@ -70,6 +70,15 @@ CREATE OR REPLACE FUNCTION test_function_object (inout resource json)
AS
$$
BEGIN
resource = json_build_object('id', 1, 'name', 'changedName');
resource = json_build_object('id', '1e5f5d41-6d14-4007-897b-0ed2616bec96', 'name', 'changedName');
END;
$$;
$$;
CREATE OR REPLACE FUNCTION function_void (name text default 'plop') returns void
LANGUAGE plpgsql
AS
$$
BEGIN
PERFORM 1;
END;
$$

View File

@@ -3,6 +3,6 @@ LANGUAGE plpgsql
AS
$$
BEGIN
result = json_build_object('id', 3, 'name', name);
result = json_build_object('id', '457daad5-4f1b-4eb7-80ec-6882adb8cc7d', 'name', name);
END;
$$

View File

@@ -3,6 +3,6 @@ LANGUAGE plpgsql
AS
$$
BEGIN
resource = json_build_object('id', 1, 'name', 'changedName');
resource = json_build_object('id', '1e5f5d41-6d14-4007-897b-0ed2616bec96', 'name', 'changedName');
END;
$$

View File

@@ -0,0 +1,8 @@
CREATE OR REPLACE FUNCTION function_void (name text default 'plop') returns void
LANGUAGE plpgsql
AS
$$
BEGIN
PERFORM 1;
END;
$$;

View File

@@ -4,8 +4,8 @@ AS
$$
BEGIN
result = json_build_array(
json_build_object('id', 3, 'name', name),
json_build_object('id', 4, 'name', hi)
json_build_object('id', '457daad5-4f1b-4eb7-80ec-6882adb8cc7d', 'name', name),
json_build_object('id', '8d20abb0-7f77-4b6c-9991-44acd3c88faa', 'name', hi)
);
END;
$$

View File

@@ -4,8 +4,8 @@ AS
$$
BEGIN
SELECT json_build_array(
json_build_object('id', 3, 'name', name::text),
json_build_object('id', 4, 'name', name::text || '-2')
json_build_object('id', '457daad5-4f1b-4eb7-80ec-6882adb8cc7d', 'name', name::text),
json_build_object('id', '8d20abb0-7f77-4b6c-9991-44acd3c88faa', 'name', name::text || '-2')
),
10
INTO result, total

View File

@@ -0,0 +1,8 @@
CREATE OR REPLACE FUNCTION test_function_duplicate (name text default 'plop') returns json
LANGUAGE plpgsql
AS
$$
BEGIN
return json_build_object('id', '457daad5-4f1b-4eb7-80ec-6882adb8cc7d', 'name', name);
END;
$$

View File

@@ -0,0 +1,8 @@
CREATE OR REPLACE FUNCTION test_function_duplicate (name text default 'plop', out result text)
LANGUAGE plpgsql
AS
$$
BEGIN
result = name;
END;
$$

View File

@@ -1 +1,5 @@
SELECT 1;
do $$
begin
PERFORM 1;
end;
$$

View File

@@ -1 +1,5 @@
SELECT 1;
do $$
begin
PERFORM 1;
end;
$$

View File

@@ -1 +1,5 @@
SELECT 1;
do $$
begin
PERFORM 1;
end;
$$

View File

@@ -0,0 +1,2 @@
-- name: DeleteTest
delete FROM test where 2038538 = 2;

View File

@@ -1 +1 @@
select json_build_object('id', 2, 'name', 'test');
select json_build_object('id', '829b1a29-5db8-47f9-9562-961c561ac528', 'name', 'test');

View File

@@ -1 +1 @@
select json_build_object('id', 2, 'name', :name::text), 'plop'::text as other;
select json_build_object('id', '829b1a29-5db8-47f9-9562-961c561ac528', 'name', :name::text), 'plop'::text as other;

View File

@@ -1,5 +1,5 @@
SELECT json_build_array(
json_build_object('id', 3, 'name', :name::text),
json_build_object('id', 4, 'name', :name::text || '-2')
json_build_object('id', '457daad5-4f1b-4eb7-80ec-6882adb8cc7d', 'name', :name::text),
json_build_object('id', '6085c12e-e94d-4ae1-b7ad-23acc7a82a98', 'name', :name::text || '-2')
), 10 as total
LIMIT :limit OFFSET :offset