13 Commits

Author SHA1 Message Date
8a452a6897 Add ShadowJar compatibility and Query file can be define name with comment
Add Migration and Query definition class
Add Docker DB for tests
2020-05-10 21:45:50 +02:00
Fabrice Lecomte
4c73ef1e0f set JVM to 11 for jitpack 2020-03-25 16:35:18 +01:00
Fabrice Lecomte
4c03eb7c87 set JVM to 11 2020-03-25 16:25:55 +01:00
Fabrice Lecomte
07678afbe7 set JVM to 11 2020-03-25 16:18:17 +01:00
Fabrice Lecomte
d3bcfdd627 config package 2020-03-25 15:26:19 +01:00
Fabrice Lecomte
deea5153f2 add method function.perform 2020-03-25 00:30:54 +01:00
Fabrice Lecomte
05fb868574 Add serialise list 2020-03-15 20:18:55 +01:00
Fabrice Lecomte
bbeec7bb60 add methode$ "isLastPage" to pagination 2020-02-24 20:41:29 +01:00
Fabrice Lecomte
012beb6884 Improve SQL log message 2020-02-19 16:46:20 +01:00
Fabrice Lecomte
45707f9734 change Idea config 2020-02-19 12:46:04 +01:00
Fabrice Lecomte
4e89aa072d RepositoryI::requester is now readonly 2020-02-11 20:39:43 +01:00
Fabrice Lecomte
b65e82a52f Improve UuidEntityVersioning 2020-01-29 17:03:22 +01:00
Fabrice Lecomte
898951e91a Add Immutable Entities 2020-01-23 00:40:47 +01:00
43 changed files with 1288 additions and 273 deletions

1
.gitignore vendored
View File

@@ -3,3 +3,4 @@
/build/
.gradle
/var/log/
/.idea/*

View File

@@ -1,7 +1,13 @@
<component name="ProjectCodeStyleConfiguration">
<code_scheme name="Project" version="173">
<JetCodeStyleSettings>
<option name="PACKAGES_TO_USE_STAR_IMPORTS">
<value>
<package name="java.util" withSubpackages="true" static="false" />
</value>
</option>
<option name="SPACE_BEFORE_EXTEND_COLON" value="false" />
<option name="NAME_COUNT_TO_USE_STAR_IMPORT" value="2147483647" />
<option name="CODE_STYLE_DEFAULTS" value="KOTLIN_OFFICIAL" />
</JetCodeStyleSettings>
<codeStyleSettings language="kotlin">

2
.idea/gradle.xml generated
View File

@@ -8,13 +8,11 @@
<option name="testRunner" value="PLATFORM" />
<option name="distributionType" value="DEFAULT_WRAPPED" />
<option name="externalProjectPath" value="$PROJECT_DIR$" />
<option name="gradleHome" value="$PROJECT_DIR$" />
<option name="modules">
<set>
<option value="$PROJECT_DIR$" />
</set>
</option>
<option name="useAutoImport" value="true" />
<option name="useQualifiedModuleNames" value="true" />
</GradleProjectSettings>
</option>

6
.idea/kotlinc.xml generated Normal file
View File

@@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="Kotlin2JvmCompilerArguments">
<option name="jvmTarget" value="11" />
</component>
</project>

6
.idea/misc.xml generated
View File

@@ -1,7 +1,9 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ExternalStorageConfigurationManager" enabled="true" />
<component name="ProjectRootManager" version="2" languageLevel="JDK_11" project-jdk-name="11" project-jdk-type="JavaSDK">
<component name="FrameworkDetectionExcludesConfiguration">
<file type="web" url="file://$PROJECT_DIR$" />
</component>
<component name="ProjectRootManager" version="2" languageLevel="JDK_11" default="true" project-jdk-name="adopt-openjdk-11" project-jdk-type="JavaSDK">
<output url="file://$PROJECT_DIR$/out" />
</component>
<component name="TaskProjectConfiguration">

8
.idea/modules.xml generated
View File

@@ -1,8 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/postgres-json.iml" filepath="$PROJECT_DIR$/.idea/postgres-json.iml" />
</modules>
</component>
</project>

View File

@@ -1,11 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="JAVA_MODULE" version="4">
<component name="NewModuleRootManager" inherit-compiler-output="true">
<exclude-output />
<content url="file://$MODULE_DIR$">
<excludeFolder url="file://$MODULE_DIR$/var" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>

View File

@@ -1,7 +1,7 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="Check" type="CompoundRunConfigurationType">
<toRun name="tests" type="JUnit" />
<toRun name="Lint" type="GradleRunConfiguration" />
<toRun name="tests" type="JUnit" />
<method v="2" />
</configuration>
</component>

View File

@@ -27,7 +27,7 @@
</extension>
<GradleScriptDebugEnabled>true</GradleScriptDebugEnabled>
<method v="2">
<option name="RunConfigurationTask" enabled="true" run_configuration_name="ktlint" run_configuration_type="GradleRunConfiguration" />
<option name="RunConfigurationTask" enabled="true" run_configuration_name="Lint" run_configuration_type="GradleRunConfiguration" />
<option name="RunConfigurationTask" enabled="true" run_configuration_name="tests" run_configuration_type="JUnit" />
</method>
</configuration>

View File

@@ -1,6 +1,7 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="tests" type="JUnit" factoryName="JUnit" singleton="false">
<module name="postgresjson.test" />
<module name="postgres-json.test" />
<useClassPathOnly />
<extension name="net.ashald.envfile">
<option name="IS_ENABLED" value="false" />
<option name="IS_SUBST" value="false" />
@@ -12,7 +13,7 @@
</ENTRIES>
</extension>
<option name="ALTERNATIVE_JRE_PATH_ENABLED" value="true" />
<option name="ALTERNATIVE_JRE_PATH" value="11" />
<option name="ALTERNATIVE_JRE_PATH" value="corretto-11" />
<option name="MAIN_CLASS_NAME" value="" />
<option name="METHOD_NAME" value="" />
<option name="TEST_OBJECT" value="directory" />

View File

@@ -1,5 +1,4 @@
group = "fr.postgresjson"
version = "0.1"
import org.jetbrains.kotlin.gradle.tasks.KotlinCompile
plugins {
jacoco
@@ -9,13 +8,25 @@ plugins {
id("org.jlleitschuh.gradle.ktlint") version "8.2.0"
id("org.owasp.dependencycheck") version "5.1.0"
id("fr.coppernic.versioning") version "3.1.2"
}
group = "flecomte"
version = versioning.info.tag
repositories {
mavenCentral()
jcenter()
}
tasks.withType<KotlinCompile> {
kotlinOptions {
jvmTarget = "11"
sourceCompatibility = "11"
targetCompatibility = "11"
}
}
dependencies {
implementation("org.jetbrains.kotlin:kotlin-stdlib-jdk8")
implementation("org.jetbrains.kotlin:kotlin-reflect:1.3.31")
@@ -31,14 +42,27 @@ dependencies {
testImplementation("org.amshove.kluent:kluent:1.47")
}
publishing {
publications {
create<MavenPublication>("maven") {
groupId = "fr.postgresjson"
artifactId = "postgresjson"
version = "0.1"
val sourcesJar by tasks.creating(Jar::class) {
archiveClassifier.set("sources")
from(sourceSets.getByName("main").allSource)
}
publishing {
repositories {
maven {
name = "postgres-json"
url = uri("https://maven.pkg.github.com/flecomte/postgres-json")
credentials {
username = System.getenv("GITHUB_USERNAME")
password = System.getenv("GITHUB_TOKEN")
}
}
}
publications {
create<MavenPublication>("postgres-json") {
from(components["java"])
artifact(sourcesJar)
}
}
}

25
docker-compose.yml Normal file
View File

@@ -0,0 +1,25 @@
# To execute this docker-compose yml file use docker-compose -f <file_name> up
# Add the "-d" flag at the end for detached execution
version: '3.7'
services:
db:
container_name: postgres_json
build:
context: docker/postgresql
restart: always
ports:
- 5555:5432
environment:
POSTGRES_DB: json_test
POSTGRES_USER: test
POSTGRES_PASSWORD: test
pgadmin:
container_name: pgadmin4_json
image: dpage/pgadmin4
restart: always
ports:
- 8585:80
environment:
PGADMIN_DEFAULT_EMAIL: rusk23@gmail.com
PGADMIN_DEFAULT_PASSWORD: azerty

View File

@@ -0,0 +1,9 @@
FROM postgres:11
COPY postgresql.conf /tmp/postgresql.conf
COPY extension.sh /docker-entrypoint-initdb.d/000-extension.sh
COPY setup.sh /docker-entrypoint-initdb.d/100-setup.sh
ENTRYPOINT ["docker-entrypoint.sh"]
EXPOSE 5432
CMD ["postgres"]

View File

@@ -0,0 +1,7 @@
#!/usr/bin/env bash
set -e
psql -v ON_ERROR_STOP=1 --username "test" --dbname "json_test" <<-EOSQL
create extension if not exists plpgsql;
create extension if not exists "uuid-ossp";
EOSQL

View File

@@ -0,0 +1,668 @@
# -----------------------------
# PostgreSQL configuration file
# -----------------------------
#
# This file consists of lines of the form:
#
# name = value
#
# (The "=" is optional.) Whitespace may be used. Comments are introduced with
# "#" anywhere on a line. The complete list of parameter names and allowed
# values can be found in the PostgreSQL documentation.
#
# The commented-out settings shown in this file represent the default values.
# Re-commenting a setting is NOT sufficient to revert it to the default value;
# you need to reload the server.
#
# This file is read on server startup and when the server receives a SIGHUP
# signal. If you edit the file on a running system, you have to SIGHUP the
# server for the changes to take effect, run "pg_ctl reload", or execute
# "SELECT pg_reload_conf()". Some parameters, which are marked below,
# require a server shutdown and restart to take effect.
#
# Any parameter can also be given as a command-line option to the server, e.g.,
# "postgres -c log_connections=on". Some parameters can be changed at run time
# with the "SET" SQL command.
#
# Memory units: kB = kilobytes Time units: ms = milliseconds
# MB = megabytes s = seconds
# GB = gigabytes min = minutes
# TB = terabytes h = hours
# d = days
#------------------------------------------------------------------------------
# FILE LOCATIONS
#------------------------------------------------------------------------------
# The default values of these variables are driven from the -D command-line
# option or PGDATA environment variable, represented here as ConfigDir.
#data_directory = 'ConfigDir' # use data in another directory
# (change requires restart)
#hba_file = 'ConfigDir/pg_hba.conf' # host-based authentication file
# (change requires restart)
#ident_file = 'ConfigDir/pg_ident.conf' # ident configuration file
# (change requires restart)
# If external_pid_file is not explicitly set, no extra PID file is written.
#external_pid_file = '' # write an extra PID file
# (change requires restart)
#------------------------------------------------------------------------------
# CONNECTIONS AND AUTHENTICATION
#------------------------------------------------------------------------------
# - Connection Settings -
listen_addresses = '*'
# comma-separated list of addresses;
# defaults to 'localhost'; use '*' for all
# (change requires restart)
#port = 5432 # (change requires restart)
#max_connections = 100 # (change requires restart)
#superuser_reserved_connections = 3 # (change requires restart)
#unix_socket_directories = '/tmp' # comma-separated list of directories
# (change requires restart)
#unix_socket_group = '' # (change requires restart)
#unix_socket_permissions = 0777 # begin with 0 to use octal notation
# (change requires restart)
#bonjour = off # advertise server via Bonjour
# (change requires restart)
#bonjour_name = '' # defaults to the computer name
# (change requires restart)
# - Security and Authentication -
#authentication_timeout = 1min # 1s-600s
#ssl = off
#ssl_ciphers = 'HIGH:MEDIUM:+3DES:!aNULL' # allowed SSL ciphers
#ssl_prefer_server_ciphers = on
#ssl_ecdh_curve = 'prime256v1'
#ssl_dh_params_file = ''
#ssl_cert_file = 'server.crt'
#ssl_key_file = 'server.key'
#ssl_ca_file = ''
#ssl_crl_file = ''
#password_encryption = md5 # md5 or scram-sha-256
#db_user_namespace = off
#row_security = on
# GSSAPI using Kerberos
#krb_server_keyfile = ''
#krb_caseins_users = off
# - TCP Keepalives -
# see "man 7 tcp" for details
#tcp_keepalives_idle = 0 # TCP_KEEPIDLE, in seconds;
# 0 selects the system default
#tcp_keepalives_interval = 0 # TCP_KEEPINTVL, in seconds;
# 0 selects the system default
#tcp_keepalives_count = 0 # TCP_KEEPCNT;
# 0 selects the system default
#------------------------------------------------------------------------------
# RESOURCE USAGE (except WAL)
#------------------------------------------------------------------------------
# - Memory -
shared_buffers = 1GB # min 128kB
# (change requires restart)
#huge_pages = try # on, off, or try
# (change requires restart)
#temp_buffers = 8MB # min 800kB
#max_prepared_transactions = 0 # zero disables the feature
# (change requires restart)
# Caution: it is not advisable to set max_prepared_transactions nonzero unless
# you actively intend to use prepared transactions.
work_mem = 256MB # min 64kB
#maintenance_work_mem = 64MB # min 1MB
#replacement_sort_tuples = 150000 # limits use of replacement selection sort
#autovacuum_work_mem = -1 # min 1MB, or -1 to use maintenance_work_mem
#max_stack_depth = 2MB # min 100kB
#dynamic_shared_memory_type = posix # the default is the first option
# supported by the operating system:
# posix
# sysv
# windows
# mmap
# use none to disable dynamic shared memory
# (change requires restart)
# - Disk -
#temp_file_limit = -1 # limits per-process temp file space
# in kB, or -1 for no limit
# - Kernel Resource Usage -
#max_files_per_process = 1000 # min 25
# (change requires restart)
shared_preload_libraries = 'pg_stat_statements'
pg_stat_statements.max = 10000
pg_stat_statements.track = all
# - Cost-Based Vacuum Delay -
#vacuum_cost_delay = 0 # 0-100 milliseconds
#vacuum_cost_page_hit = 1 # 0-10000 credits
#vacuum_cost_page_miss = 10 # 0-10000 credits
#vacuum_cost_page_dirty = 20 # 0-10000 credits
#vacuum_cost_limit = 200 # 1-10000 credits
# - Background Writer -
#bgwriter_delay = 200ms # 10-10000ms between rounds
#bgwriter_lru_maxpages = 100 # 0-1000 max buffers written/round
#bgwriter_lru_multiplier = 2.0 # 0-10.0 multiplier on buffers scanned/round
#bgwriter_flush_after = 0 # measured in pages, 0 disables
# - Asynchronous Behavior -
effective_io_concurrency = 100 # 1-1000; 0 disables prefetching
#max_worker_processes = 8 # (change requires restart)
max_parallel_workers_per_gather = 4 # taken from max_parallel_workers
#max_parallel_workers = 8 # maximum number of max_worker_processes that
# can be used in parallel queries
#old_snapshot_threshold = -1 # 1min-60d; -1 disables; 0 is immediate
# (change requires restart)
#backend_flush_after = 0 # measured in pages, 0 disables
#------------------------------------------------------------------------------
# WRITE AHEAD LOG
#------------------------------------------------------------------------------
# - Settings -
#wal_level = replica # minimal, replica, or logical
# (change requires restart)
#fsync = on # flush data to disk for crash safety
# (turning this off can cause
# unrecoverable data corruption)
#synchronous_commit = on # synchronization level;
# off, local, remote_write, remote_apply, or on
#wal_sync_method = fsync # the default is the first option
# supported by the operating system:
# open_datasync
# fdatasync (default on Linux)
# fsync
# fsync_writethrough
# open_sync
#full_page_writes = on # recover from partial page writes
#wal_compression = off # enable compression of full-page writes
#wal_log_hints = off # also do full page writes of non-critical updates
# (change requires restart)
#wal_buffers = -1 # min 32kB, -1 sets based on shared_buffers
# (change requires restart)
#wal_writer_delay = 200ms # 1-10000 milliseconds
#wal_writer_flush_after = 1MB # measured in pages, 0 disables
#commit_delay = 0 # range 0-100000, in microseconds
#commit_siblings = 5 # range 1-1000
# - Checkpoints -
#checkpoint_timeout = 5min # range 30s-1d
#max_wal_size = 1GB
#min_wal_size = 80MB
#checkpoint_completion_target = 0.5 # checkpoint target duration, 0.0 - 1.0
#checkpoint_flush_after = 0 # measured in pages, 0 disables
#checkpoint_warning = 30s # 0 disables
# - Archiving -
#archive_mode = off # enables archiving; off, on, or always
# (change requires restart)
#archive_command = '' # command to use to archive a logfile segment
# placeholders: %p = path of file to archive
# %f = file name only
# e.g. 'test ! -f /mnt/server/archivedir/%f && cp %p /mnt/server/archivedir/%f'
#archive_timeout = 0 # force a logfile segment switch after this
# number of seconds; 0 disables
#------------------------------------------------------------------------------
# REPLICATION
#------------------------------------------------------------------------------
# - Sending Server(s) -
# Set these on the master and on any standby that will send replication data.
#max_wal_senders = 10 # max number of walsender processes
# (change requires restart)
#wal_keep_segments = 0 # in logfile segments, 16MB each; 0 disables
#wal_sender_timeout = 60s # in milliseconds; 0 disables
#max_replication_slots = 10 # max number of replication slots
# (change requires restart)
#track_commit_timestamp = off # collect timestamp of transaction commit
# (change requires restart)
# - Master Server -
# These settings are ignored on a standby server.
#synchronous_standby_names = '' # standby servers that provide sync rep
# method to choose sync standbys, number of sync standbys,
# and comma-separated list of application_name
# from standby(s); '*' = all
#vacuum_defer_cleanup_age = 0 # number of xacts by which cleanup is delayed
# - Standby Servers -
# These settings are ignored on a master server.
#hot_standby = on # "off" disallows queries during recovery
# (change requires restart)
#max_standby_archive_delay = 30s # max delay before canceling queries
# when reading WAL from archive;
# -1 allows indefinite delay
#max_standby_streaming_delay = 30s # max delay before canceling queries
# when reading streaming WAL;
# -1 allows indefinite delay
#wal_receiver_status_interval = 10s # send replies at least this often
# 0 disables
#hot_standby_feedback = off # send info from standby to prevent
# query conflicts
#wal_receiver_timeout = 60s # time that receiver waits for
# communication from master
# in milliseconds; 0 disables
#wal_retrieve_retry_interval = 5s # time to wait before retrying to
# retrieve WAL after a failed attempt
# - Subscribers -
# These settings are ignored on a publisher.
#max_logical_replication_workers = 4 # taken from max_worker_processes
# (change requires restart)
#max_sync_workers_per_subscription = 2 # taken from max_logical_replication_workers
#------------------------------------------------------------------------------
# QUERY TUNING
#------------------------------------------------------------------------------
# - Planner Method Configuration -
#enable_bitmapscan = on
#enable_hashagg = on
#enable_hashjoin = on
#enable_indexscan = on
#enable_indexonlyscan = on
#enable_material = on
#enable_mergejoin = on
#enable_nestloop = on
#enable_seqscan = on
#enable_sort = on
#enable_tidscan = on
# - Planner Cost Constants -
#seq_page_cost = 1.0 # measured on an arbitrary scale
#random_page_cost = 4.0 # same scale as above
#cpu_tuple_cost = 0.01 # same scale as above
#cpu_index_tuple_cost = 0.005 # same scale as above
#cpu_operator_cost = 0.0025 # same scale as above
#parallel_tuple_cost = 0.1 # same scale as above
#parallel_setup_cost = 1000.0 # same scale as above
#min_parallel_table_scan_size = 8MB
#min_parallel_index_scan_size = 512kB
#effective_cache_size = 4GB
# - Genetic Query Optimizer -
#geqo = on
#geqo_threshold = 12
#geqo_effort = 5 # range 1-10
#geqo_pool_size = 0 # selects default based on effort
#geqo_generations = 0 # selects default based on effort
#geqo_selection_bias = 2.0 # range 1.5-2.0
#geqo_seed = 0.0 # range 0.0-1.0
# - Other Planner Options -
#default_statistics_target = 100 # range 1-10000
#constraint_exclusion = partition # on, off, or partition
#cursor_tuple_fraction = 0.1 # range 0.0-1.0
#from_collapse_limit = 8
#join_collapse_limit = 8 # 1 disables collapsing of explicit
# JOIN clauses
#force_parallel_mode = off
#------------------------------------------------------------------------------
# ERROR REPORTING AND LOGGING
#------------------------------------------------------------------------------
# - Where to Log -
#log_destination = 'stderr' # Valid values are combinations of
# stderr, csvlog, syslog, and eventlog,
# depending on platform. csvlog
# requires logging_collector to be on.
# This is used when logging to stderr:
logging_collector = on
# Enable capturing of stderr and csvlog
# into log files. Required to be on for
# csvlogs.
# (change requires restart)
# These are only used if logging_collector is on:
log_directory = '/var/log/postgresql'
# directory where log files are written,
# can be absolute or relative to PGDATA
log_filename = 'postgresql-%Y-%m-%d_%H%M%S.log'
# log file name pattern,
# can include strftime() escapes
#log_file_mode = 0600 # creation mode for log files,
# begin with 0 to use octal notation
log_file_mode = 0660
#log_truncate_on_rotation = off # If on, an existing log file with the
# same name as the new log file will be
# truncated rather than appended to.
# But such truncation only occurs on
# time-driven rotation, not on restarts
# or size-driven rotation. Default is
# off, meaning append to existing files
# in all cases.
log_rotation_age = 1d
# Automatic rotation of logfiles will
# happen after that time. 0 disables.
log_rotation_size = 100MB
# Automatic rotation of logfiles will
# happen after that much log output.
# 0 disables.
# These are relevant when logging to syslog:
#syslog_facility = 'LOCAL0'
#syslog_ident = 'postgres'
#syslog_sequence_numbers = on
#syslog_split_messages = on
# This is only relevant when logging to eventlog (win32):
# (change requires restart)
#event_source = 'PostgreSQL'
# - When to Log -
#client_min_messages = notice # values in order of decreasing detail:
# debug5
# debug4
# debug3
# debug2
# debug1
# log
# notice
# warning
# error
#log_min_messages = warning # values in order of decreasing detail:
# debug5
# debug4
# debug3
# debug2
# debug1
# info
# notice
# warning
# error
# log
# fatal
# panic
#log_min_error_statement = error # values in order of decreasing detail:
# debug5
# debug4
# debug3
# debug2
# debug1
# info
# notice
# warning
# error
# log
# fatal
# panic (effectively off)
#log_min_duration_statement = -1 # -1 is disabled, 0 logs all statements
# and their durations, > 0 logs only
# statements running at least this number
# of milliseconds
# - What to Log -
#debug_print_parse = off
#debug_print_rewritten = off
#debug_print_plan = off
#debug_pretty_print = on
#log_checkpoints = off
#log_connections = off
#log_disconnections = off
#log_duration = off
#log_error_verbosity = default # terse, default, or verbose messages
#log_hostname = off
#log_line_prefix = '%m [%p] ' # special values:
# %a = application name
# %u = user name
# %d = database name
# %r = remote host and port
# %h = remote host
# %p = process ID
# %t = timestamp without milliseconds
# %m = timestamp with milliseconds
# %n = timestamp with milliseconds (as a Unix epoch)
# %i = command tag
# %e = SQL state
# %c = session ID
# %l = session line number
# %s = session start timestamp
# %v = virtual transaction ID
# %x = transaction ID (0 if none)
# %q = stop here in non-session
# processes
# %% = '%'
# e.g. '<%u%%%d> '
#log_lock_waits = off # log lock waits >= deadlock_timeout
#log_statement = 'none' # none, ddl, mod, all
#log_replication_commands = off
#log_temp_files = -1 # log temporary files equal or larger
# than the specified size in kilobytes;
# -1 disables, 0 logs all temp files
#log_timezone = 'GMT'
# - Process Title -
#cluster_name = '' # added to process titles if nonempty
# (change requires restart)
#update_process_title = on
#------------------------------------------------------------------------------
# RUNTIME STATISTICS
#------------------------------------------------------------------------------
# - Query/Index Statistics Collector -
#track_activities = on
#track_counts = on
#track_io_timing = off
#track_functions = none # none, pl, all
#track_activity_query_size = 1024 # (change requires restart)
#stats_temp_directory = 'pg_stat_tmp'
# - Statistics Monitoring -
#log_parser_stats = off
#log_planner_stats = off
#log_executor_stats = off
#log_statement_stats = off
#------------------------------------------------------------------------------
# AUTOVACUUM PARAMETERS
#------------------------------------------------------------------------------
#autovacuum = on # Enable autovacuum subprocess? 'on'
# requires track_counts to also be on.
#log_autovacuum_min_duration = -1 # -1 disables, 0 logs all actions and
# their durations, > 0 logs only
# actions running at least this number
# of milliseconds.
#autovacuum_max_workers = 3 # max number of autovacuum subprocesses
# (change requires restart)
#autovacuum_naptime = 1min # time between autovacuum runs
#autovacuum_vacuum_threshold = 50 # min number of row updates before
# vacuum
#autovacuum_analyze_threshold = 50 # min number of row updates before
# analyze
#autovacuum_vacuum_scale_factor = 0.2 # fraction of table size before vacuum
#autovacuum_analyze_scale_factor = 0.1 # fraction of table size before analyze
#autovacuum_freeze_max_age = 200000000 # maximum XID age before forced vacuum
# (change requires restart)
#autovacuum_multixact_freeze_max_age = 400000000 # maximum multixact age
# before forced vacuum
# (change requires restart)
#autovacuum_vacuum_cost_delay = 20ms # default vacuum cost delay for
# autovacuum, in milliseconds;
# -1 means use vacuum_cost_delay
#autovacuum_vacuum_cost_limit = -1 # default vacuum cost limit for
# autovacuum, -1 means use
# vacuum_cost_limit
#------------------------------------------------------------------------------
# CLIENT CONNECTION DEFAULTS
#------------------------------------------------------------------------------
# - Statement Behavior -
#search_path = '"$user", public' # schema names
#default_tablespace = '' # a tablespace name, '' uses the default
#temp_tablespaces = '' # a list of tablespace names, '' uses
# only default tablespace
#check_function_bodies = on
#default_transaction_isolation = 'read committed'
#default_transaction_read_only = off
#default_transaction_deferrable = off
#session_replication_role = 'origin'
#statement_timeout = 0 # in milliseconds, 0 is disabled
#lock_timeout = 0 # in milliseconds, 0 is disabled
#idle_in_transaction_session_timeout = 0 # in milliseconds, 0 is disabled
#vacuum_freeze_min_age = 50000000
#vacuum_freeze_table_age = 150000000
#vacuum_multixact_freeze_min_age = 5000000
#vacuum_multixact_freeze_table_age = 150000000
#bytea_output = 'hex' # hex, escape
#xmlbinary = 'base64'
#xmloption = 'content'
#gin_fuzzy_search_limit = 0
#gin_pending_list_limit = 4MB
# - Locale and Formatting -
#datestyle = 'iso, mdy'
#intervalstyle = 'postgres'
#timezone = 'GMT'
#timezone_abbreviations = 'Default' # Select the set of available time zone
# abbreviations. Currently, there are
# Default
# Australia (historical usage)
# India
# You can create your own file in
# share/timezonesets/.
#extra_float_digits = 0 # min -15, max 3
#client_encoding = sql_ascii # actually, defaults to database
# encoding
# These settings are initialized by initdb, but they can be changed.
#lc_messages = 'C' # locale for system error message
# strings
#lc_monetary = 'C' # locale for monetary formatting
#lc_numeric = 'C' # locale for number formatting
#lc_time = 'C' # locale for time formatting
# default configuration for text search
#default_text_search_config = 'pg_catalog.simple'
# - Other Defaults -
#dynamic_library_path = '$libdir'
#local_preload_libraries = ''
#session_preload_libraries = ''
#------------------------------------------------------------------------------
# LOCK MANAGEMENT
#------------------------------------------------------------------------------
#deadlock_timeout = 1s
#max_locks_per_transaction = 64 # min 10
# (change requires restart)
#max_pred_locks_per_transaction = 64 # min 10
# (change requires restart)
#max_pred_locks_per_relation = -2 # negative values mean
# (max_pred_locks_per_transaction
# / -max_pred_locks_per_relation) - 1
#max_pred_locks_per_page = 2 # min 0
#------------------------------------------------------------------------------
# VERSION/PLATFORM COMPATIBILITY
#------------------------------------------------------------------------------
# - Previous PostgreSQL Versions -
#array_nulls = on
#backslash_quote = safe_encoding # on, off, or safe_encoding
#default_with_oids = off
#escape_string_warning = on
#lo_compat_privileges = off
#operator_precedence_warning = off
#quote_all_identifiers = off
#standard_conforming_strings = on
#synchronize_seqscans = on
# - Other Platforms and Clients -
#transform_null_equals = off
#------------------------------------------------------------------------------
# ERROR HANDLING
#------------------------------------------------------------------------------
#exit_on_error = off # terminate session on any error?
#restart_after_crash = on # reinitialize after backend crash?
#------------------------------------------------------------------------------
# CONFIG FILE INCLUDES
#------------------------------------------------------------------------------
# These options allow settings to be loaded from files other than the
# default postgresql.conf.
#include_dir = 'conf.d' # include files ending in '.conf' from
# directory 'conf.d'
#include_if_exists = 'exists.conf' # include file only if it exists
#include = 'special.conf' # include file
#------------------------------------------------------------------------------
# CUSTOMIZED OPTIONS
#------------------------------------------------------------------------------
# Add settings for extensions here
zdb.default_elasticsearch_url = 'http://elasticsearch:9200/'

View File

@@ -0,0 +1,4 @@
#!/usr/bin/env bash
set -e
cat /tmp/postgresql.conf > /var/lib/postgresql/data/postgresql.conf

View File

@@ -1,5 +1,5 @@
distributionUrl=https\://services.gradle.org/distributions/gradle-5.6.2-all.zip
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-5.6.2-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME

2
jitpack.yml Normal file
View File

@@ -0,0 +1,2 @@
jdk:
- openjdk11

View File

@@ -1 +1 @@
rootProject.name = 'postgresjson'
rootProject.name = 'postgres-json'

View File

@@ -6,12 +6,13 @@ import com.github.jasync.sql.db.QueryResult
import com.github.jasync.sql.db.pool.ConnectionPool
import com.github.jasync.sql.db.postgresql.PostgreSQLConnection
import com.github.jasync.sql.db.postgresql.PostgreSQLConnectionBuilder
import com.github.jasync.sql.db.util.length
import fr.postgresjson.entity.EntityI
import fr.postgresjson.entity.Serializable
import fr.postgresjson.serializer.Serializer
import fr.postgresjson.utils.LoggerDelegate
import org.slf4j.Logger
import java.util.concurrent.CompletableFuture
import java.util.concurrent.*
typealias SelectOneCallback<T> = QueryResult.(T?) -> Unit
typealias SelectCallback<T> = QueryResult.(List<T>) -> Unit
@@ -204,7 +205,7 @@ class Connection(
private fun compileArgs(values: List<Any?>): List<Any?> {
return values.map {
if (it is Serializable) {
if (it is Serializable || (it is List<*> && it.firstOrNull() is Serializable)) {
serializer.serialize(it)
} else {
it
@@ -248,15 +249,33 @@ class Connection(
data class ParametersQuery(val sql: String, val parameters: List<Any?>)
private fun <T> stopwatchQuery(sql: String, values: List<Any?> = emptyList(), callback: () -> T): T {
val sqlForLog = "\n${sql.prependIndent()}"
try {
val start = System.currentTimeMillis()
val result = callback()
val duration = (System.currentTimeMillis() - start)
logger?.debug("$duration ms for query: $sqlForLog \n {}", values.joinToString(", "))
val resultText = when (result) {
null -> "with no result"
is QueryResult -> result.rows.firstOrNull()?.joinToString(", ")?.let { text ->
if (text.length > 100) "${text.take(100)}... (size: ${text.length})" else text
} ?: "with no result"
else -> "unknown"
}
val args = """
|Query ($duration ms):
|${sql.trimIndent().prependIndent()}
|Arguments (${values.length}):
|${values.joinToString("\n").ifBlank { "No arguments" }.prependIndent()}
|Result:
|${resultText.trimIndent().prependIndent()}
""".trimMargin().prependIndent(" > ")
logger?.debug("Query executed in $duration ms \n{}", args)
return result
} catch (e: Throwable) {
logger?.info("Query Error: $sqlForLog, $values", e)
logger?.info("""
Query Error:
${sql.prependIndent()},
${values.joinToString(", ").prependIndent()}
""".trimIndent(), e)
throw e
}
}

View File

@@ -49,6 +49,12 @@ interface EmbedExecutable {
fun exec(values: List<Any?> = emptyList()): QueryResult
fun exec(values: Map<String, Any?>): QueryResult
fun exec(vararg values: Pair<String, Any?>): QueryResult = exec(values.toMap())
fun perform(values: List<Any?>) { exec(values) }
fun perform(values: Map<String, Any?>) { exec(values) }
fun perform(vararg values: Pair<String, Any?>) = perform(values.toMap())
fun sendQuery(values: List<Any?> = emptyList()): Int
fun sendQuery(values: Map<String, Any?>): Int
fun sendQuery(vararg values: Pair<String, Any?>): Int =

View File

@@ -1,6 +1,7 @@
package fr.postgresjson.connexion
import fr.postgresjson.entity.EntityI
import kotlin.math.ceil
data class Paginated<T : EntityI>(
val result: List<T>,
@@ -10,10 +11,15 @@ data class Paginated<T : EntityI>(
) {
val currentPage: Int = (offset / limit) + 1
val count: Int = result.size
val totalPages: Int = (total.toDouble() / limit.toDouble()).ceil()
init {
if (offset < 0) error("offset must be greather or equal than 0")
if (limit < 1) error("limit must be greather or equal than 1")
if (total < 0) error("total must be greather or equal than 0")
}
fun isLastPage(): Boolean = currentPage >= totalPages
private fun Double.ceil(): Int = ceil(this).toInt()
}

View File

@@ -1,7 +1,10 @@
package fr.postgresjson.connexion
import java.io.File
import fr.postgresjson.utils.searchSqlFiles
import java.net.URI
import fr.postgresjson.definition.Function as DefinitionFunction
import fr.postgresjson.definition.Function as FunctionDefinition
import fr.postgresjson.definition.Query as QueryDefinition
class Requester(
private val connection: Connection,
@@ -13,17 +16,19 @@ class Requester(
return this
}
fun addQuery(query: QueryDefinition): Requester = addQuery(query.name, query.script)
fun addQuery(name: String, sql: String): Requester {
addQuery(Query(name, sql, connection))
return this
}
fun addQuery(queriesDirectory: File): Requester {
queriesDirectory.walk()
.filter { it.isFile && it.extension == "sql" }
fun addQuery(queriesDirectory: URI): Requester {
queriesDirectory.searchSqlFiles()
.forEach {
val path = it.parentFile.nameWithoutExtension
addQuery("$path/${it.nameWithoutExtension}", it.readText())
if (it is QueryDefinition) {
addQuery(it)
}
}
return this
}
@@ -44,11 +49,12 @@ class Requester(
return this
}
fun addFunction(functionsDirectory: File): Requester {
functionsDirectory.walk()
.filter { it.isFile && it.extension == "sql" }
fun addFunction(functionsDirectory: URI): Requester {
functionsDirectory.searchSqlFiles()
.forEach {
addFunction(it.readText())
if (it is FunctionDefinition) {
addFunction(it)
}
}
return this
}
@@ -69,8 +75,8 @@ class Requester(
class RequesterFactory(
private val connection: Connection,
private val queriesDirectory: File? = null,
private val functionsDirectory: File? = null
private val queriesDirectory: URI? = null,
private val functionsDirectory: URI? = null
) {
constructor(
host: String = "localhost",
@@ -78,8 +84,8 @@ class Requester(
database: String = "dc-project",
username: String = "dc-project",
password: String = "dc-project",
queriesDirectory: File? = null,
functionsDirectory: File? = null
queriesDirectory: URI? = null,
functionsDirectory: URI? = null
) : this(
Connection(host = host, port = port, database = database, username = username, password = password),
queriesDirectory,

View File

@@ -1,14 +1,15 @@
package fr.postgresjson.definition
import java.io.File
import java.nio.file.Path
open class Function(
override val script: String
class Function(
override val script: String,
override var source: Path? = null
) : Resource, ParametersInterface {
val returns: String
override val name: String
override val parameters: List<Parameter>
override var source: File? = null
init {
val functionRegex =
@@ -46,8 +47,7 @@ open class Function(
}
}
abstract class ParseException(message: String, cause: Throwable? = null) : Exception(message, cause)
class FunctionNotFound(cause: Throwable? = null) : ParseException("Function not found in script", cause)
class FunctionNotFound(cause: Throwable? = null) : Resource.ParseException("Function not found in script", cause)
fun getDefinition(): String {
return parameters

View File

@@ -0,0 +1,35 @@
package fr.postgresjson.definition
import java.nio.file.Path
class Migration(
override val script: String,
source: Path
) : Resource {
override val name: String
val direction: Direction
override var source: Path? = null
init {
this.source = source
this.direction = source.fileName.toString()
.let {
when {
it.endsWith(".down.sql") -> Direction.DOWN
it.endsWith(".up.sql") -> Direction.UP
else -> throw MigrationNotFound()
}
}
this.name = source.fileName.toString()
.substringAfterLast("/")
.let {
when (direction) {
Direction.DOWN -> it.substringBefore(".down.sql")
Direction.UP -> it.substringBefore(".up.sql")
}
}
}
class MigrationNotFound(cause: Throwable? = null) : Resource.ParseException("Migration not found in script", cause)
enum class Direction { UP, DOWN }
}

View File

@@ -0,0 +1,25 @@
package fr.postgresjson.definition
import java.nio.file.Path
class Query(
override val script: String,
source: Path
) : Resource {
override var source: Path? = source
override val name: String = getNameFromComment(script) ?: getNameFromFile(source)
/** Try to get name from comment in file */
private fun getNameFromComment(script: String): String? =
"""-- *name ?: ?(?<name>[^ \n]+)"""
.toRegex(setOf(RegexOption.IGNORE_CASE, RegexOption.MULTILINE))
.find(script)?.let {
it.groups["name"]?.value?.trim()
}
/** Try to get name from the filename */
private fun getNameFromFile(source: Path): String = source
.fileName.toString()
.substringAfterLast("/")
.substringBeforeLast(".sql")
}

View File

@@ -1,11 +1,38 @@
package fr.postgresjson.definition
import java.io.File
import java.net.URL
import java.nio.file.Path
interface Resource {
val name: String
val script: String
var source: File?
var source: Path?
open class ParseException(message: String, cause: Throwable? = null) : Exception(message, cause)
companion object {
fun build(file: File): Resource =
build(file.readText(), Path.of(file.toURI()))
fun build(url: URL): Resource =
build(url.readText(), Path.of(url.toURI()))
fun build(resource: String, path: Path): Resource =
try {
Function(resource, path)
} catch (e: ParseException) {
try {
Migration(resource, path)
} catch (e: ParseException) {
try {
Query(resource, path)
} catch (e: ParseException) {
throw ParseException("No SQL resource found")
}
}
}
}
}
interface ResourceCollection {

View File

@@ -1,126 +1,5 @@
package fr.postgresjson.entity
import org.joda.time.DateTime
import java.util.*
interface Serializable
interface EntityI : Serializable
interface Parameter : Serializable
abstract class Entity<T>(open var id: T? = null) : EntityI
open class UuidEntity(override var id: UUID? = UUID.randomUUID()) : Entity<UUID>(id)
open class IdEntity(override var id: Int? = null) : Entity<Int>(id)
/* Version */
interface EntityVersioning<ID, NUMBER> {
var versionId: ID
var versionNumber: NUMBER?
}
class UuidEntityVersioning : EntityVersioning<UUID, Int> {
override var versionId: UUID = UUID.randomUUID()
override var versionNumber: Int? = null
}
/* Dates */
interface EntityCreatedAt {
var createdAt: DateTime?
}
interface EntityUpdatedAt {
var updatedAt: DateTime?
}
interface EntityDeletedAt {
var deletedAt: DateTime?
fun isDeleted(): Boolean {
val deletedAt = deletedAt
return deletedAt != null && deletedAt < DateTime.now()
}
}
class EntityCreatedAtImp : EntityCreatedAt {
override var createdAt: DateTime? = null
}
class EntityUpdatedAtImp : EntityUpdatedAt {
override var updatedAt: DateTime? = null
}
class EntityDeletedAtImp : EntityDeletedAt {
override var deletedAt: DateTime? = null
}
/* Author */
interface EntityCreatedBy<T : EntityI> {
var createdBy: T?
}
interface EntityUpdatedBy<T : EntityI> {
var updatedBy: T?
}
interface EntityDeletedBy<T : EntityI> {
var deletedBy: T?
}
class EntityCreatedByImp<UserT : EntityI>(
override var createdBy: UserT?
) : EntityCreatedBy<UserT>
class EntityUpdatedByImp<UserT : EntityI>(
override var updatedBy: UserT?
) : EntityUpdatedBy<UserT>
class EntityDeletedByImp<UserT : EntityI>(
override var deletedBy: UserT?
) : EntityDeletedBy<UserT>
/* Mixed */
class EntityDeletedImp<UserT : EntityI>(
override var deletedBy: UserT? = null
) : EntityDeletedBy<UserT>,
EntityDeletedAt by EntityDeletedAtImp()
class EntityUpdatedImp<UserT : EntityI>(
override var updatedAt: DateTime? = null,
override var updatedBy: UserT? = null
) : EntityUpdatedBy<UserT>,
EntityUpdatedAt by EntityUpdatedAtImp()
class EntityCreatedImp<UserT : EntityI>(
override var createdAt: DateTime? = null,
override var createdBy: UserT? = null
) : EntityCreatedBy<UserT>,
EntityCreatedAt by EntityCreatedAtImp()
/* Published */
interface Published<UserT : EntityI> {
var publishedAt: DateTime?
var publishedBy: UserT?
}
class EntityPublishedImp<UserT : EntityI>(
override var publishedBy: UserT?
) : Published<UserT> {
override var publishedAt: DateTime? = null
}
/* Implementation */
abstract class EntityImp<T, UserT : EntityI>(
updatedBy: UserT?
) : Entity<T>(),
EntityCreatedAt by EntityCreatedAtImp(),
EntityUpdatedAt by EntityUpdatedAtImp(),
EntityDeletedAt by EntityDeletedAtImp(),
EntityCreatedBy<UserT> by EntityCreatedByImp(updatedBy),
EntityUpdatedBy<UserT> by EntityUpdatedByImp(updatedBy),
EntityDeletedBy<UserT> by EntityDeletedByImp(updatedBy)
abstract class UuidEntityExtended<T, UserT : EntityI>(
updatedBy: UserT?,
publishedBy: UserT?
) :
EntityImp<T, UserT>(updatedBy),
EntityVersioning<UUID, Int> by UuidEntityVersioning(),
Published<UserT> by EntityPublishedImp(publishedBy)

View File

@@ -0,0 +1,88 @@
package fr.postgresjson.entity.immutable
import fr.postgresjson.entity.EntityI
import fr.postgresjson.entity.mutable.EntityDeletedAt
import fr.postgresjson.entity.mutable.EntityDeletedAtImp
import fr.postgresjson.entity.mutable.EntityDeletedBy
import fr.postgresjson.entity.mutable.EntityDeletedByImp
import org.joda.time.DateTime
import java.util.*
interface EntityRefI<T> : EntityI {
val id: T
}
interface UuidEntityI : EntityRefI<UUID> {
override val id: UUID
}
abstract class Entity<T>(override val id: T) : EntityRefI<T>
open class UuidEntity(override val id: UUID = UUID.randomUUID()) : UuidEntityI, Entity<UUID>(id)
/* Version */
interface EntityVersioning<ID, NUMBER> {
val versionNumber: NUMBER
val versionId: ID
}
class UuidEntityVersioning(
override var versionNumber: Int? = null,
override val versionId: UUID = UUID.randomUUID()
) : EntityVersioning<UUID, Int?>
/* Dates */
interface EntityCreatedAt {
val createdAt: DateTime
}
interface EntityUpdatedAt {
var updatedAt: DateTime
}
class EntityCreatedAtImp(
override val createdAt: DateTime = DateTime.now()
) : EntityCreatedAt
class EntityUpdatedAtImp(
override var updatedAt: DateTime = DateTime.now()
) : EntityUpdatedAt
/* Author */
interface EntityCreatedBy<T : EntityI> {
val createdBy: T
}
interface EntityUpdatedBy<T : EntityI> {
var updatedBy: T
}
class EntityCreatedByImp<UserT : EntityI>(
override val createdBy: UserT
) : EntityCreatedBy<UserT>
class EntityUpdatedByImp<UserT : EntityI>(
override var updatedBy: UserT
) : EntityUpdatedBy<UserT>
/* Mixed */
class EntityCreatedImp<UserT : EntityI>(
override val createdAt: DateTime = DateTime.now(),
createdBy: UserT
) : EntityCreatedBy<UserT> by EntityCreatedByImp(createdBy),
EntityCreatedAt by EntityCreatedAtImp()
class EntityUpdatedImp<UserT : EntityI>(
updatedAt: DateTime = DateTime.now(),
override var updatedBy: UserT
) : EntityUpdatedBy<UserT>,
EntityUpdatedAt by EntityUpdatedAtImp(updatedAt)
/* Implementation */
abstract class EntityImp<T, UserT : EntityI>(
updatedBy: UserT,
updatedAt: DateTime = DateTime.now()
) : UuidEntity(),
EntityCreatedAt by EntityCreatedAtImp(updatedAt),
EntityUpdatedAt by EntityUpdatedAtImp(updatedAt),
EntityDeletedAt by EntityDeletedAtImp(),
EntityCreatedBy<UserT> by EntityCreatedByImp(updatedBy),
EntityUpdatedBy<UserT> by EntityUpdatedByImp(updatedBy),
EntityDeletedBy<UserT> by EntityDeletedByImp(updatedBy)

View File

@@ -0,0 +1,135 @@
package fr.postgresjson.entity.mutable
import fr.postgresjson.entity.EntityI
import org.joda.time.DateTime
import java.util.*
interface EntityRefI<T> : EntityI {
var id: T?
}
interface UuidEntityI : EntityRefI<UUID> {
override var id: UUID?
}
interface IdEntityI : EntityRefI<Int> {
override var id: Int?
}
abstract class Entity<T>(override var id: T? = null) : EntityRefI<T>
open class UuidEntity(override var id: UUID? = UUID.randomUUID()) : UuidEntityI, Entity<UUID>(id)
open class IdEntity(override var id: Int? = null) : IdEntityI, Entity<Int>(id)
/* Version */
interface EntityVersioning<ID, NUMBER> {
var versionId: ID
var versionNumber: NUMBER?
}
class UuidEntityVersioning(
override var versionNumber: Int? = null,
override var versionId: UUID = UUID.randomUUID()
) : EntityVersioning<UUID, Int>
/* Dates */
interface EntityCreatedAt {
var createdAt: DateTime?
}
interface EntityUpdatedAt {
var updatedAt: DateTime?
}
interface EntityDeletedAt {
var deletedAt: DateTime?
fun isDeleted(): Boolean {
val deletedAt = deletedAt
return deletedAt != null && deletedAt < DateTime.now()
}
}
class EntityCreatedAtImp : EntityCreatedAt {
override var createdAt: DateTime? = null
}
class EntityUpdatedAtImp : EntityUpdatedAt {
override var updatedAt: DateTime? = null
}
class EntityDeletedAtImp : EntityDeletedAt {
override var deletedAt: DateTime? = null
}
/* Author */
interface EntityCreatedBy<T : EntityI> {
var createdBy: T?
}
interface EntityUpdatedBy<T : EntityI> {
var updatedBy: T?
}
interface EntityDeletedBy<T : EntityI> {
var deletedBy: T?
}
class EntityCreatedByImp<UserT : EntityI>(
override var createdBy: UserT?
) : EntityCreatedBy<UserT>
class EntityUpdatedByImp<UserT : EntityI>(
override var updatedBy: UserT?
) : EntityUpdatedBy<UserT>
class EntityDeletedByImp<UserT : EntityI>(
override var deletedBy: UserT?
) : EntityDeletedBy<UserT>
/* Mixed */
class EntityDeletedImp<UserT : EntityI>(
override var deletedBy: UserT? = null
) : EntityDeletedBy<UserT>,
EntityDeletedAt by EntityDeletedAtImp()
class EntityUpdatedImp<UserT : EntityI>(
override var updatedAt: DateTime? = null,
override var updatedBy: UserT? = null
) : EntityUpdatedBy<UserT>,
EntityUpdatedAt by EntityUpdatedAtImp()
class EntityCreatedImp<UserT : EntityI>(
override var createdAt: DateTime? = null,
override var createdBy: UserT? = null
) : EntityCreatedBy<UserT>,
EntityCreatedAt by EntityCreatedAtImp()
/* Published */
interface Published<UserT : EntityI> {
var publishedAt: DateTime?
var publishedBy: UserT?
}
class EntityPublishedImp<UserT : EntityI>(
override var publishedBy: UserT?
) : Published<UserT> {
override var publishedAt: DateTime? = null
}
/* Implementation */
abstract class EntityImp<T, UserT : EntityI>(
updatedBy: UserT?
) : Entity<T>(),
EntityCreatedAt by EntityCreatedAtImp(),
EntityUpdatedAt by EntityUpdatedAtImp(),
EntityDeletedAt by EntityDeletedAtImp(),
EntityCreatedBy<UserT> by EntityCreatedByImp(updatedBy),
EntityUpdatedBy<UserT> by EntityUpdatedByImp(updatedBy),
EntityDeletedBy<UserT> by EntityDeletedByImp(updatedBy)
abstract class UuidEntityExtended<T, UserT : EntityI>(
updatedBy: UserT?,
publishedBy: UserT?
) :
EntityImp<T, UserT>(updatedBy),
EntityVersioning<UUID, Int> by UuidEntityVersioning(),
Published<UserT> by EntityPublishedImp(publishedBy)

View File

@@ -5,7 +5,7 @@ import fr.postgresjson.connexion.Connection
import fr.postgresjson.migration.Migration.Action
import fr.postgresjson.migration.Migration.Status
import java.util.*
import java.util.concurrent.CompletionException
import java.util.concurrent.*
import fr.postgresjson.definition.Function as DefinitionFunction
data class Function(
@@ -75,7 +75,7 @@ data class Function(
}
override fun status(): Status {
val result = connection.inTransaction {
connection.inTransaction {
up()
down()
it.sendQuery("ROLLBACK")

View File

@@ -2,14 +2,15 @@ package fr.postgresjson.migration
import com.fasterxml.jackson.core.type.TypeReference
import fr.postgresjson.connexion.Connection
import fr.postgresjson.definition.Function.FunctionNotFound
import fr.postgresjson.entity.Entity
import fr.postgresjson.definition.Migration as DefinitionMigration
import fr.postgresjson.entity.mutable.Entity
import fr.postgresjson.migration.Migration.Action
import fr.postgresjson.migration.Migration.Status
import fr.postgresjson.utils.LoggerDelegate
import fr.postgresjson.utils.searchSqlFiles
import org.slf4j.Logger
import java.io.File
import java.io.FileNotFoundException
import java.net.URI
import java.util.*
import fr.postgresjson.definition.Function as DefinitionFunction
@@ -35,27 +36,27 @@ interface Migration {
data class Migrations private constructor(
private val connection: Connection,
private val queries: MutableMap<String, Query> = mutableMapOf(),
private val migrationsScripts: MutableMap<String, Query> = mutableMapOf(),
private val functions: MutableMap<String, Function> = mutableMapOf()
) {
private var directories: List<File> = emptyList()
private var directories: List<URI> = emptyList()
private val logger: Logger? by LoggerDelegate()
constructor(directory: File, connection: Connection) : this(listOf(directory), connection)
constructor(directory: URI, connection: Connection) : this(listOf(directory), connection)
constructor(directories: List<File>, connection: Connection) : this(connection) {
constructor(directories: List<URI>, connection: Connection) : this(connection) {
initDB()
this.directories = directories
reset()
}
fun reset() {
queries.clear()
migrationsScripts.clear()
functions.clear()
getMigrationFromDB()
getMigrationFromDirectory(directories)
queries.forEach { (_, query) ->
migrationsScripts.forEach { (_, query) ->
if (query.doExecute === null) {
query.doExecute = Action.DOWN
}
@@ -84,7 +85,7 @@ data class Migrations private constructor(
this::class.java.classLoader.getResource("sql/migration/findAllHistory.sql")!!.readText().let {
connection.select<MigrationEntity>(it, object : TypeReference<List<MigrationEntity>>() {})
.map { query ->
queries[query.filename] = Query(query.filename, query.up, query.down, connection, query.executedAt)
migrationsScripts[query.filename] = Query(query.filename, query.up, query.down, connection, query.executedAt)
}
}
}
@@ -92,7 +93,7 @@ data class Migrations private constructor(
/**
* Get all migration from multiples Directories
*/
private fun getMigrationFromDirectory(directory: List<File>) {
private fun getMigrationFromDirectory(directory: List<URI>) {
directory.forEach {
getMigrationFromDirectory(it)
}
@@ -101,29 +102,26 @@ data class Migrations private constructor(
/**
* Get all migration from Directory
*/
private fun getMigrationFromDirectory(directory: File) {
directory.walk().filter {
it.isFile
}.forEach { file ->
if (file.name.endsWith(".up.sql")) {
file.path.substring(0, file.path.length - 7).let {
try {
val down = File("$it.down.sql").readText()
val up = file.readText()
val name = file.name.substring(0, file.name.length - 7)
addQuery(name, up, down)
} catch (e: FileNotFoundException) {
throw DownMigrationNotDefined("$it.down.sql", e)
}
private fun getMigrationFromDirectory(directory: URI) {
val downs: MutableMap<String, DefinitionMigration> = mutableMapOf()
/* Set Down Migration */
directory.searchSqlFiles().apply {
forEach { migration ->
if (migration is DefinitionMigration && migration.direction == DefinitionMigration.Direction.DOWN) {
downs += migration.name to migration
}
} else if (file.name.endsWith(".down.sql")) {
// Nothing
} else {
val fileContent = file.readText()
try {
addFunction(fileContent)
} catch (e: FunctionNotFound) {
// Nothing
}
/* Set up migrations and functions */
forEach { migration ->
if (migration is DefinitionMigration && migration.direction == DefinitionMigration.Direction.UP) {
val down = downs[migration.name] ?: throw DownMigrationNotDefined(migration.name + ".down.sql")
downs -= migration.name
addQuery(migration, down)
} else if (migration is DefinitionFunction) {
addFunction(migration)
}
}
}
@@ -131,7 +129,7 @@ data class Migrations private constructor(
enum class Direction { UP, DOWN }
internal class DownMigrationNotDefined(path: String, cause: FileNotFoundException) :
internal class DownMigrationNotDefined(path: String, cause: FileNotFoundException? = null) :
Throwable("The file $path whas not found", cause)
fun addFunction(newDefinition: DefinitionFunction, callback: (Function) -> Unit = {}): Migrations {
@@ -155,18 +153,21 @@ data class Migrations private constructor(
return this
}
fun addQuery(up: DefinitionMigration, down: DefinitionMigration, callback: (Query) -> Unit = {}): Migrations =
addQuery(up.name, up.script, down.script, callback)
fun addQuery(name: String, up: String, down: String, callback: (Query) -> Unit = {}): Migrations {
if (queries[name] === null) {
queries[name] = Query(name, up, down, connection).apply {
if (migrationsScripts[name] === null) {
migrationsScripts[name] = Query(name, up, down, connection).apply {
doExecute = Action.UP
}
} else {
queries[name]!!.apply {
migrationsScripts[name]!!.apply {
doExecute = Action.OK
}
}
callback(queries[name]!!)
callback(migrationsScripts[name]!!)
return this
}
@@ -191,7 +192,7 @@ data class Migrations private constructor(
internal fun up(): Map<String, Status> {
val list: MutableMap<String, Status> = mutableMapOf()
queries.forEach {
migrationsScripts.forEach {
it.value.let { query ->
if (query.doExecute == Action.UP) {
query.up().let { status ->
@@ -216,7 +217,7 @@ data class Migrations private constructor(
internal fun down(force: Boolean = false): Map<String, Status> {
val list: MutableMap<String, Status> = mutableMapOf()
queries.forEach {
migrationsScripts.forEach {
it.value.let { query ->
if (query.doExecute == Action.DOWN || force) {
query.down().let { status ->
@@ -297,7 +298,7 @@ data class Migrations private constructor(
}
fun copy(): Migrations {
val queriesCopy = queries.map {
val queriesCopy = migrationsScripts.map {
it.key to it.value.copy()
}.toMap().toMutableMap()

View File

@@ -1,7 +1,7 @@
package fr.postgresjson.migration
import fr.postgresjson.connexion.Connection
import fr.postgresjson.entity.Entity
import fr.postgresjson.entity.mutable.Entity
import fr.postgresjson.migration.Migration.Action
import java.util.*
@@ -48,7 +48,7 @@ data class Query(
}
override fun status(): Migration.Status {
val result = connection.inTransaction {
connection.inTransaction {
up()
down()
it.sendQuery("ROLLBACK")

View File

@@ -3,7 +3,7 @@ package fr.postgresjson.repository
import fr.postgresjson.connexion.Requester
interface RepositoryI {
var requester: Requester
val requester: Requester
enum class Direction {
asc,

View File

@@ -49,5 +49,6 @@ class Serializer(val mapper: ObjectMapper = jacksonObjectMapper()) {
}
fun Serializable.serialize(pretty: Boolean = false) = Serializer().serialize(this, pretty)
fun List<Serializable>.serialize(pretty: Boolean = false) = Serializer().serialize(this, pretty)
inline fun <reified E : Serializable> E.deserialize(json: String) = Serializer().deserialize(json, this)
inline fun <reified E : Serializable> String.deserialize() = Serializer().deserialize<E>(this)

View File

@@ -0,0 +1,46 @@
package fr.postgresjson.utils
import fr.postgresjson.definition.Resource
import org.slf4j.Logger
import org.slf4j.LoggerFactory
import java.net.URI
import java.net.URL
import java.nio.file.FileSystems
import java.nio.file.FileVisitOption
import java.nio.file.Files
import java.nio.file.Path
import kotlin.streams.asSequence
fun URL.searchSqlFiles() = this.toURI().searchSqlFiles()
fun URI.searchSqlFiles() = sequence<Resource> {
val logger: Logger = LoggerFactory.getLogger("sqlFilesSearch")
val uri: URI = this@searchSqlFiles
if (uri.scheme == "jar") {
val relativePath = uri.toString().substringAfter('!')
FileSystems
.newFileSystem(uri, emptyMap<String, Any>())
.getPath(relativePath)
.walk(5)
.asSequence()
.filter { it.fileName.toString().endsWith(".sql") }
.map { this::class.java.getResource(path.toString()) }
.forEach {
logger.debug(it.toString())
yield(Resource.build(it))
}
} else {
uri
.walk(5)
.asSequence()
.map { it.toFile() }
.filter { it.isFile && it.extension == "sql" }
.forEach {
logger.debug(it.toString())
yield(Resource.build(it))
}
}
}
private fun Path.walk(maxDepth: Int = 2147483647, vararg options: FileVisitOption) = Files.walk(this, maxDepth, *options)
private fun URI.walk(maxDepth: Int = 2147483647, vararg options: FileVisitOption) = Files.walk(Path.of(this), maxDepth, *options)

View File

@@ -1,7 +1,7 @@
package fr.postgresjson
import fr.postgresjson.connexion.Paginated
import fr.postgresjson.entity.IdEntity
import fr.postgresjson.entity.mutable.IdEntity
import fr.postgresjson.entity.Parameter
import org.junit.Assert.*
import org.junit.jupiter.api.Assertions
@@ -38,8 +38,7 @@ class ConnectionTest() : TestAbstract() {
""".trimIndent()
)
assertNotNull(objs)
assertTrue(objs is List<ObjTest2>)
assertEquals(objs!!.size, 2)
assertEquals(objs.size, 2)
assertEquals(objs[0].id, 1)
assertEquals(objs[0].test!!.id, 1)
}

View File

@@ -1,6 +1,7 @@
package fr.postgresjson
import fr.postgresjson.entity.*
import fr.postgresjson.entity.EntityI
import fr.postgresjson.entity.mutable.*
import org.junit.jupiter.api.Assertions.assertTrue
import org.junit.jupiter.api.Test
import org.junit.jupiter.api.TestInstance

View File

@@ -10,13 +10,12 @@ import org.amshove.kluent.shouldThrow
import org.junit.jupiter.api.Assertions
import org.junit.jupiter.api.Test
import org.junit.jupiter.api.TestInstance
import java.io.File
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
class MigrationTest() : TestAbstract() {
@Test
fun `run up query`() {
val resources = File(this::class.java.getResource("/sql/migrations").toURI())
val resources = this::class.java.getResource("/sql/migrations").toURI()
val m = Migrations(resources, connection)
m.up().apply {
this `should contain` Pair("1", Migration.Status.OK)
@@ -28,7 +27,7 @@ class MigrationTest() : TestAbstract() {
@Test
fun `migration up Query should throw error if no down`() {
val resources = File(this::class.java.getResource("/sql/migration_without_down").toURI())
val resources = this::class.java.getResource("/sql/migration_without_down").toURI()
invoking {
Migrations(resources, connection)
} shouldThrow Migrations.DownMigrationNotDefined::class
@@ -36,7 +35,7 @@ class MigrationTest() : TestAbstract() {
@Test
fun `run forced down query`() {
val resources = File(this::class.java.getResource("/sql/migrations").toURI())
val resources = this::class.java.getResource("/sql/migrations").toURI()
val m = Migrations(resources, connection)
repeat(3) {
m.down(true).apply {
@@ -48,7 +47,7 @@ class MigrationTest() : TestAbstract() {
@Test
fun `run dry migrations`() {
val resources = File(this::class.java.getResource("/sql/real_migrations").toURI())
val resources = this::class.java.getResource("/sql/real_migrations").toURI()
Migrations(resources, connection).apply {
runDry().size `should be equal to` 2
}
@@ -59,7 +58,7 @@ class MigrationTest() : TestAbstract() {
@Test
fun `run dry migrations launch twice`() {
val resources = File(this::class.java.getResource("/sql/real_migrations").toURI())
val resources = this::class.java.getResource("/sql/real_migrations").toURI()
Migrations(resources, connection).apply {
runDry().size `should be equal to` 2
runDry().size `should be equal to` 2
@@ -68,7 +67,7 @@ class MigrationTest() : TestAbstract() {
@Test
fun `run migrations`() {
val resources = File(this::class.java.getResource("/sql/real_migrations").toURI())
val resources = this::class.java.getResource("/sql/real_migrations").toURI()
Migrations(resources, connection).apply {
run().apply {
size `should be equal to` 1
@@ -78,8 +77,8 @@ class MigrationTest() : TestAbstract() {
@Test
fun `run migrations force down`() {
val resources = File(this::class.java.getResource("/sql/real_migrations").toURI())
val resourcesFunctions = File(this::class.java.getResource("/sql/function/Test").toURI())
val resources = this::class.java.getResource("/sql/real_migrations").toURI()
val resourcesFunctions = this::class.java.getResource("/sql/function/Test").toURI()
Migrations(listOf(resources, resourcesFunctions), connection).apply {
up().apply {
size `should be equal to` 6
@@ -94,7 +93,7 @@ class MigrationTest() : TestAbstract() {
@Test
fun `run functions migrations`() {
val resources = File(this::class.java.getResource("/sql/function/Test").toURI())
val resources = this::class.java.getResource("/sql/function/Test").toURI()
Migrations(resources, connection).apply {
run().size `should be equal to` 5
}
@@ -110,7 +109,7 @@ class MigrationTest() : TestAbstract() {
@Test
fun `run functions migrations and drop if exist`() {
val resources = File(this::class.java.getResource("/sql/function/Test1").toURI())
val resources = this::class.java.getResource("/sql/function/Test1").toURI()
Migrations(resources, connection).apply {
run().size `should be equal to` 1
}
@@ -123,7 +122,7 @@ class MigrationTest() : TestAbstract() {
Assertions.assertEquals(objTest!!.id, 3)
Assertions.assertEquals(objTest.name, "test")
val resources2 = File(this::class.java.getResource("/sql/function/Test2").toURI())
val resources2 = this::class.java.getResource("/sql/function/Test2").toURI()
Migrations(resources2, connection).apply {
run().size `should be equal to` 1
}

View File

@@ -2,21 +2,20 @@ package fr.postgresjson
import fr.postgresjson.connexion.Paginated
import fr.postgresjson.connexion.Requester
import fr.postgresjson.entity.IdEntity
import fr.postgresjson.entity.mutable.IdEntity
import org.junit.Assert
import org.junit.jupiter.api.Assertions.assertEquals
import org.junit.jupiter.api.Test
import java.io.File
class RequesterTest : TestAbstract() {
class ObjTest(var name: String) : IdEntity(1)
@Test
fun `get query from file`() {
val resources = File(this::class.java.getResource("/sql/query").toURI())
val resources = this::class.java.getResource("/sql/query").toURI()
val objTest: ObjTest? = Requester(connection)
.addQuery(resources)
.getQuery("Test/selectOne")
.getQuery("selectOne")
.selectOne()
assertEquals(objTest!!.id, 2)
@@ -25,7 +24,7 @@ class RequesterTest : TestAbstract() {
@Test
fun `get function from file`() {
val resources = File(this::class.java.getResource("/sql/function/Test").toURI())
val resources = this::class.java.getResource("/sql/function/Test").toURI()
val objTest: ObjTest? = Requester(connection)
.addFunction(resources)
.getFunction("test_function")
@@ -37,10 +36,10 @@ class RequesterTest : TestAbstract() {
@Test
fun `call exec on query`() {
val resources = File(this::class.java.getResource("/sql/query").toURI())
val resources = this::class.java.getResource("/sql/query").toURI()
val result = Requester(connection)
.addQuery(resources)
.getQuery("Test/selectOne")
.getQuery("selectOne")
.exec()
assertEquals(1, result.rowsAffected)
@@ -48,7 +47,7 @@ class RequesterTest : TestAbstract() {
@Test
fun `call exec on function`() {
val resources = File(this::class.java.getResource("/sql/function/Test").toURI())
val resources = this::class.java.getResource("/sql/function/Test").toURI()
val result = Requester(connection)
.addFunction(resources)
.getFunction("test_function")
@@ -58,11 +57,11 @@ class RequesterTest : TestAbstract() {
}
@Test
fun `call sendQuery on query`() {
val resources = File(this::class.java.getResource("/sql/query").toURI())
fun `call sendQuery on query with name`() {
val resources = this::class.java.getResource("/sql/query").toURI()
val result = Requester(connection)
.addQuery(resources)
.getQuery("Test/exec")
.getQuery("DeleteTest")
.sendQuery()
assertEquals(0, result)
@@ -70,7 +69,7 @@ class RequesterTest : TestAbstract() {
@Test
fun `call sendQuery on function`() {
val resources = File(this::class.java.getResource("/sql/function/Test").toURI())
val resources = this::class.java.getResource("/sql/function/Test").toURI()
val result = Requester(connection)
.addFunction(resources)
.getFunction("function_void")
@@ -81,7 +80,7 @@ class RequesterTest : TestAbstract() {
@Test
fun `call selectOne on function`() {
val resources = File(this::class.java.getResource("/sql/function/Test").toURI())
val resources = this::class.java.getResource("/sql/function/Test").toURI()
val obj: ObjTest = Requester(connection)
.addFunction(resources)
.getFunction("test_function")
@@ -92,7 +91,7 @@ class RequesterTest : TestAbstract() {
@Test
fun `call selectOne on function with object`() {
val resources = File(this::class.java.getResource("/sql/function/Test").toURI())
val resources = this::class.java.getResource("/sql/function/Test").toURI()
val obj2 = ObjTest("original")
val obj: ObjTest = Requester(connection)
.addFunction(resources)
@@ -105,10 +104,10 @@ class RequesterTest : TestAbstract() {
@Test
fun `call selectOne on query`() {
val resources = File(this::class.java.getResource("/sql/query").toURI())
val resources = this::class.java.getResource("/sql/query").toURI()
val obj: ObjTest = Requester(connection)
.addQuery(resources)
.getQuery("Test/selectOneWithParameters")
.getQuery("selectOneWithParameters")
.selectOne(mapOf("name" to "myName"))!!
assertEquals("myName", obj.name)
@@ -116,7 +115,7 @@ class RequesterTest : TestAbstract() {
@Test
fun `call select (multiple) on function`() {
val resources = File(this::class.java.getResource("/sql/function/Test").toURI())
val resources = this::class.java.getResource("/sql/function/Test").toURI()
val obj: List<ObjTest>? = Requester(connection)
.addFunction(resources)
.getFunction("test_function_multiple")
@@ -127,10 +126,10 @@ class RequesterTest : TestAbstract() {
@Test
fun `call select paginated on query`() {
val resources = File(this::class.java.getResource("/sql/query").toURI())
val resources = this::class.java.getResource("/sql/query").toURI()
val result: Paginated<ObjTest> = Requester(connection)
.addQuery(resources)
.getQuery("Test/selectPaginated")
.getQuery("selectPaginated")
.select(1, 2, mapOf("name" to "ff"))
Assert.assertNotNull(result)
Assert.assertEquals("ff", result.result[0].name)
@@ -141,7 +140,7 @@ class RequesterTest : TestAbstract() {
@Test
fun `call select paginated on function`() {
val resources = File(this::class.java.getResource("/sql/function").toURI())
val resources = this::class.java.getResource("/sql/function").toURI()
val result: Paginated<ObjTest> = Requester(connection)
.addFunction(resources)
.getFunction("test_function_paginated")
@@ -155,10 +154,10 @@ class RequesterTest : TestAbstract() {
@Test
fun `call selectOne on query with extra parameter`() {
val resources = File(this::class.java.getResource("/sql/query").toURI())
val resources = this::class.java.getResource("/sql/query").toURI()
val obj: ObjTest = Requester(connection)
.addQuery(resources)
.getQuery("Test/selectOneWithParameters")
.getQuery("selectOneWithParameters")
.selectOne(mapOf("name" to "myName")) {
assertEquals("myName", it!!.name)
Assert.assertEquals("plop", rows[0].getString("other"))

View File

@@ -1,6 +1,6 @@
package fr.postgresjson
import fr.postgresjson.entity.IdEntity
import fr.postgresjson.entity.mutable.IdEntity
import fr.postgresjson.serializer.Serializer
import fr.postgresjson.serializer.deserialize
import fr.postgresjson.serializer.serialize
@@ -41,6 +41,14 @@ internal class SerializerTest {
assertTrue(json.contains(""""val1":"plop","val2":123"""))
}
@Test
fun serializeList() {
val list = listOf(ObjTest("one", 1), ObjTest("two", 2))
val json = list.serialize()
assertTrue(json.contains(""""val1":"one","val2":1"""))
assertTrue(json.contains(""""val1":"two","val2":2"""))
}
@Test
fun serializeDate() {
val objDate = ObjTestDate(DateTime.parse("2019-07-30T14:08:51.420108+04:00"))

View File

@@ -9,7 +9,7 @@ import java.io.File
@TestInstance(PER_CLASS)
abstract class TestAbstract {
protected val connection = Connection(database = "test_json", username = "test", password = "test")
protected val connection = Connection(database = "json_test", username = "test", password = "test", port = 5555)
@BeforeEach
fun beforeAll() {

View File

@@ -1 +1,2 @@
-- name: DeleteTest
delete FROM test where 2038538 = 2;