diff --git a/.mvn/wrapper/maven-wrapper.jar b/.mvn/wrapper/maven-wrapper.jar deleted file mode 100755 index 01e67997..00000000 Binary files a/.mvn/wrapper/maven-wrapper.jar and /dev/null differ diff --git a/.mvn/wrapper/maven-wrapper.properties b/.mvn/wrapper/maven-wrapper.properties deleted file mode 100755 index 84fe0f88..00000000 --- a/.mvn/wrapper/maven-wrapper.properties +++ /dev/null @@ -1,2 +0,0 @@ -#Mon Oct 11 14:30:22 CEST 2021 -distributionUrl=https\://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.8.3/apache-maven-3.8.3-bin.zip diff --git a/CI.adoc b/CI.adoc deleted file mode 100644 index d0f0478e..00000000 --- a/CI.adoc +++ /dev/null @@ -1,29 +0,0 @@ -= Continuous Integration - -image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-r2dbc%2Fmain&subject=main["Spring Data R2DBC", link="https://jenkins.spring.io/view/SpringData/job/spring-data-r2dbc/"] - -== Running CI tasks locally - -Since this pipeline is purely Docker-based, it's easy to: - -* Debug what went wrong on your local machine. -* Test out a a tweak to your `test.sh` script before sending it out. -* Experiment against a new image before submitting your pull request. - -All of these use cases are great reasons to essentially run what the CI server does on your local machine. - -IMPORTANT: To do this you must have Docker installed on your machine. - -1. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-r2dbc-github -v /usr/bin/docker:/usr/bin/docker -v /var/run/docker.sock:/var/run/docker.sock adoptopenjdk/openjdk8:latest /bin/bash` -+ -This will launch the Docker image and mount your source code at `spring-data-r2dbc-github`. -+ -2. `cd spring-data-r2dbc-github` -+ -Next, test everything from inside the container: -+ -3. `./mvnw -Pci,all-dbs clean dependency:list test -Dsort -B` (or whatever test configuration you must use) - -Since the container is binding to your source, you can make edits from your IDE and continue to run build jobs. - -NOTE: Docker containers can eat up disk space fast! From time to time, run `docker system prune` to clean out old images. diff --git a/CONTRIBUTING.adoc b/CONTRIBUTING.adoc deleted file mode 100644 index 740e8bd0..00000000 --- a/CONTRIBUTING.adoc +++ /dev/null @@ -1,3 +0,0 @@ -= Spring Data contribution guidelines - -You find the contribution guidelines for Spring Data projects https://github.com/spring-projects/spring-data-build/blob/main/CONTRIBUTING.adoc[here]. diff --git a/Jenkinsfile b/Jenkinsfile deleted file mode 100644 index d3885a6f..00000000 --- a/Jenkinsfile +++ /dev/null @@ -1,103 +0,0 @@ -def p = [:] -node { - checkout scm - p = readProperties interpolate: true, file: 'ci/pipeline.properties' -} - -pipeline { - agent none - - triggers { - pollSCM 'H/10 * * * *' - upstream(upstreamProjects: "spring-data-commons/3.0.x,spring-data-jdbc/3.0.x", threshold: hudson.model.Result.SUCCESS) - } - - options { - disableConcurrentBuilds() - buildDiscarder(logRotator(numToKeepStr: '14')) - } - - stages { - stage("test: baseline (Java 17)") { - when { - beforeAgent(true) - anyOf { - branch(pattern: "main|(\\d\\.\\d\\.x)", comparator: "REGEXP") - not { triggeredBy 'UpstreamCause' } - } - } - agent { - label 'data' - } - options { timeout(time: 30, unit: 'MINUTES') } - - environment { - DOCKER_HUB = credentials('hub.docker.com-springbuildmaster') - ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c') - } - - steps { - script { - docker.withRegistry(p['docker.registry'], p['docker.credentials']) { - docker.image(p['docker.java.main.image']).inside(p['docker.java.inside.docker']) { - sh "docker login --username ${DOCKER_HUB_USR} --password ${DOCKER_HUB_PSW}" - sh 'PROFILE=ci ci/test.sh' - sh "ci/clean.sh" - } - } - } - } - } - - stage('Release to artifactory') { - when { - beforeAgent(true) - anyOf { - branch(pattern: "main|(\\d\\.\\d\\.x)", comparator: "REGEXP") - not { triggeredBy 'UpstreamCause' } - } - } - agent { - label 'data' - } - options { timeout(time: 20, unit: 'MINUTES') } - - environment { - ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c') - } - - steps { - script { - docker.withRegistry(p['docker.registry'], p['docker.credentials']) { - docker.image(p['docker.java.main.image']).inside(p['docker.java.inside.basic']) { - sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml -Pci,artifactory -Dmaven.repo.local=/tmp/jenkins-home/.m2/spring-data-r2dbc-non-root ' + - '-Dartifactory.server=https://repo.spring.io ' + - "-Dartifactory.username=${ARTIFACTORY_USR} " + - "-Dartifactory.password=${ARTIFACTORY_PSW} " + - "-Dartifactory.staging-repository=libs-snapshot-local " + - "-Dartifactory.build-name=spring-data-r2dbc " + - "-Dartifactory.build-number=${BUILD_NUMBER} " + - '-Dmaven.test.skip=true clean deploy -U -B' - } - } - } - } - } - } - - post { - changed { - script { - slackSend( - color: (currentBuild.currentResult == 'SUCCESS') ? 'good' : 'danger', - channel: '#spring-data-dev', - message: "${currentBuild.fullDisplayName} - `${currentBuild.currentResult}`\n${env.BUILD_URL}") - emailext( - subject: "[${currentBuild.fullDisplayName}] ${currentBuild.currentResult}", - mimeType: 'text/html', - recipientProviders: [[$class: 'CulpritsRecipientProvider'], [$class: 'RequesterRecipientProvider']], - body: "${currentBuild.fullDisplayName} is reported as ${currentBuild.currentResult}") - } - } - } -} diff --git a/README.adoc b/README.adoc index 247f4d5b..11be9ddd 100644 --- a/README.adoc +++ b/README.adoc @@ -1,157 +1,6 @@ -image:https://spring.io/badges/spring-data-r2dbc/snapshot.svg["Spring Data R2DBC", link="https://spring.io/projects/spring-data-r2dbc#learn"] += Spring Data R2DBC -= Spring Data R2DBC image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-r2dbc%2Fmain&subject=Build[link=https://jenkins.spring.io/view/SpringData/job/spring-data-r2dbc/] https://gitter.im/spring-projects/spring-data[image:https://badges.gitter.im/spring-projects/spring-data.svg[Gitter]] - -The primary goal of the https://projects.spring.io/spring-data[Spring Data] project is to make it easier to build Spring-powered applications that use data access technologies. *Spring Data R2DBC* offers the popular Repository abstraction based on https://r2dbc.io[R2DBC]. - -R2DBC is the abbreviation for https://github.com/r2dbc/[Reactive Relational Database Connectivity], an incubator to integrate relational databases using a reactive driver. - -== This is NOT an ORM - -Spring Data R2DBC aims at being conceptually easy. In order to achieve this it does NOT offer caching, lazy loading, write behind or many other features of ORM frameworks. This makes Spring Data R2DBC a simple, limited, opinionated object mapper. - -== Features - -* Spring configuration support using Java based `@Configuration` classes. -* Annotation based mapping metadata. -* Automatic implementation of Repository interfaces including support. -* Support for Reactive Transactions -* Schema and data initialization utilities. - -== Code of Conduct - -This project is governed by the https://github.com/spring-projects/.github/blob/e3cc2ff230d8f1dca06535aa6b5a4a23815861d4/CODE_OF_CONDUCT.md[Spring Code of Conduct]. By participating, you are expected to uphold this code of conduct. Please report unacceptable behavior to spring-code-of-conduct@pivotal.io. - -== Getting Started - -Here is a quick teaser of an application using Spring Data Repositories in Java: - -[source,java] ----- -public interface PersonRepository extends ReactiveCrudRepository { - - @Query("SELECT * FROM person WHERE lastname = :lastname") - Flux findByLastname(String lastname); - - @Query("SELECT * FROM person WHERE firstname LIKE :firstname") - Flux findByFirstnameLike(String firstname); -} - -@Service -public class MyService { - - private final PersonRepository repository; - - public MyService(PersonRepository repository) { - this.repository = repository; - } - - public void doWork() { - - repository.deleteAll().block(); - - Person person = new Person(); - person.setFirstname("Mark"); - person.setLastname("Paluch"); - repository.save(person).block(); - - Flux lastNameResults = repository.findByLastname("Paluch"); - Flux firstNameResults = repository.findByFirstnameLike("M%"); - } -} - -@Configuration -@EnableR2dbcRepositories -class ApplicationConfig extends AbstractR2dbcConfiguration { - - @Bean - public ConnectionFactory connectionFactory() { - return ConnectionFactories.get("r2dbc:h2:mem:///test?options=DB_CLOSE_DELAY=-1;DB_CLOSE_ON_EXIT=FALSE"); - } -} ----- - -=== Maven configuration - -Add the Maven dependency: - -[source,xml] ----- - - org.springframework.data - spring-data-r2dbc - ${version} - ----- - -If you'd rather like the latest snapshots of the upcoming major version, use our Maven snapshot repository and declare the appropriate dependency version. - -[source,xml] ----- - - org.springframework.data - spring-data-r2dbc - ${version}-SNAPSHOT - - - - spring-libs-snapshot - Spring Snapshot Repository - https://repo.spring.io/libs-snapshot - ----- - -== Getting Help - -Having trouble with Spring Data? We’d love to help! - -* Check the -https://docs.spring.io/spring-data/r2dbc/docs/1.0.x/reference/html/#reference[reference documentation], and https://docs.spring.io/spring-data/r2dbc/docs/1.0.x/api/[Javadocs]. -* Learn the Spring basics – Spring Data builds on Spring Framework, check the https://spring.io[spring.io] web-site for a wealth of reference documentation. -If you are just starting out with Spring, try one of the https://spring.io/guides[guides]. -* If you are upgrading, check out the https://docs.spring.io/spring-data/r2dbc/docs/current/changelog.txt[changelog] for "`new and noteworthy`" features. -* Ask a question - we monitor https://stackoverflow.com[stackoverflow.com] for questions tagged with https://stackoverflow.com/tags/spring-data-r2dbc[`spring-data-r2dbc`]. -* Report bugs with Spring Data R2DBC at https://github.com/spring-projects/spring-data-r2dbc/issues[github.com/spring-projects/spring-data-r2dbc/issues]. - -== Reporting Issues - -Spring Data uses GitHub as issue tracking system to record bugs and feature requests. If you want to raise an issue, please follow the recommendations below: - -* Before you log a bug, please search the -https://github.com/spring-projects/spring-data-r2dbc/issues[issue tracker] to see if someone has already reported the problem. -* If the issue does not already exist, https://github.com/spring-projects/spring-data-r2dbc/issues/new[create a new issue]. -* Please provide as much information as possible with the issue report, we like to know the version of Spring Data that you are using and JVM version. -* If you need to paste code, or include a stack trace use Markdown +++```+++ escapes before and after your text. -* If possible try to create a test-case or project that replicates the issue. Attach a link to your code or a compressed file containing your code. - -== Building from Source - -You don’t need to build from source to use Spring Data (binaries in https://repo.spring.io[repo.spring.io]), but if you want to try out the latest and greatest, Spring Data can be easily built with the https://github.com/takari/maven-wrapper[maven wrapper]. -You also need JDK 1.8. - -[source,bash] ----- - $ ./mvnw clean install ----- - -If you want to build with the regular `mvn` command, you will need https://maven.apache.org/run-maven/index.html[Maven v3.5.0 or above]. - -_Also see link:CONTRIBUTING.adoc[CONTRIBUTING.adoc] if you wish to submit pull requests, and in particular please sign the https://cla.pivotal.io/sign/spring[Contributor’s Agreement] before your first non-trivial change._ - -=== Building reference documentation - -Building the documentation builds also the project without running tests. - -[source,bash] ----- - $ ./mvnw clean install -Pdistribute ----- - -The generated documentation is available from `target/site/reference/html/index.html`. - -== Examples - -* https://github.com/spring-projects/spring-data-examples/[Spring Data Examples] contains example projects that explain specific features in more detail. +This project is merged as of version 3.0 in the https://github.com/spring-projects/spring-data-relational[Spring Data Relational] repository. == License diff --git a/ci/clean.sh b/ci/clean.sh deleted file mode 100755 index f45d0f5f..00000000 --- a/ci/clean.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/bash -x - -set -euo pipefail - -MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" \ - ./mvnw -s settings.xml clean -Dmaven.repo.local=/tmp/jenkins-home/.m2/spring-data-r2dbc diff --git a/ci/pipeline.properties b/ci/pipeline.properties deleted file mode 100644 index 43b4e65e..00000000 --- a/ci/pipeline.properties +++ /dev/null @@ -1,24 +0,0 @@ -# Java versions -java.main.tag=17.0.2_8-jdk - -# Docker container images - standard -docker.java.main.image=harbor-repo.vmware.com/dockerhub-proxy-cache/library/eclipse-temurin:${java.main.tag} - -# Supported versions of MongoDB -docker.mongodb.4.4.version=4.4.12 -docker.mongodb.5.0.version=5.0.6 - -# Supported versions of Redis -docker.redis.6.version=6.2.6 - -# Supported versions of Cassandra -docker.cassandra.3.version=3.11.12 - -# Docker environment settings -docker.java.inside.basic=-v $HOME:/tmp/jenkins-home -docker.java.inside.docker=-u root -v /var/run/docker.sock:/var/run/docker.sock -v /usr/bin/docker:/usr/bin/docker -v $HOME:/tmp/jenkins-home - -# Credentials -docker.registry= -docker.credentials=hub.docker.com-springbuildmaster -artifactory.credentials=02bd1690-b54f-4c9f-819d-a77cb7a9822c diff --git a/ci/test.sh b/ci/test.sh deleted file mode 100755 index 0c4b3892..00000000 --- a/ci/test.sh +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/bash -x - -set -euo pipefail - -mkdir -p /tmp/jenkins-home/.m2/spring-data-r2dbc -chown -R 1001:1001 . - -MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" \ - ./mvnw -s settings.xml \ - -P${PROFILE} clean dependency:list test -Dsort -U -B -Dmaven.repo.local=/tmp/jenkins-home/.m2/spring-data-r2dbc \ No newline at end of file diff --git a/docs/favicon.png b/docs/favicon.png deleted file mode 100644 index 890ef063..00000000 Binary files a/docs/favicon.png and /dev/null differ diff --git a/docs/index.html b/docs/index.html deleted file mode 100644 index 7b9c255e..00000000 --- a/docs/index.html +++ /dev/null @@ -1,11 +0,0 @@ - - - - Redirecting… - - - -

Redirecting…

- Click here if you are not redirected. - - diff --git a/lombok.config b/lombok.config deleted file mode 100644 index e50c7ea4..00000000 --- a/lombok.config +++ /dev/null @@ -1,2 +0,0 @@ -lombok.nonNull.exceptionType = IllegalArgumentException -lombok.log.fieldName = LOG diff --git a/mvnw b/mvnw deleted file mode 100755 index 5551fde8..00000000 --- a/mvnw +++ /dev/null @@ -1,286 +0,0 @@ -#!/bin/sh -# ---------------------------------------------------------------------------- -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# ---------------------------------------------------------------------------- - -# ---------------------------------------------------------------------------- -# Maven2 Start Up Batch script -# -# Required ENV vars: -# ------------------ -# JAVA_HOME - location of a JDK home dir -# -# Optional ENV vars -# ----------------- -# M2_HOME - location of maven2's installed home dir -# MAVEN_OPTS - parameters passed to the Java VM when running Maven -# e.g. to debug Maven itself, use -# set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 -# MAVEN_SKIP_RC - flag to disable loading of mavenrc files -# ---------------------------------------------------------------------------- - -if [ -z "$MAVEN_SKIP_RC" ] ; then - - if [ -f /etc/mavenrc ] ; then - . /etc/mavenrc - fi - - if [ -f "$HOME/.mavenrc" ] ; then - . "$HOME/.mavenrc" - fi - -fi - -# OS specific support. $var _must_ be set to either true or false. -cygwin=false; -darwin=false; -mingw=false -case "`uname`" in - CYGWIN*) cygwin=true ;; - MINGW*) mingw=true;; - Darwin*) darwin=true - # Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home - # See https://developer.apple.com/library/mac/qa/qa1170/_index.html - if [ -z "$JAVA_HOME" ]; then - if [ -x "/usr/libexec/java_home" ]; then - export JAVA_HOME="`/usr/libexec/java_home`" - else - export JAVA_HOME="/Library/Java/Home" - fi - fi - ;; -esac - -if [ -z "$JAVA_HOME" ] ; then - if [ -r /etc/gentoo-release ] ; then - JAVA_HOME=`java-config --jre-home` - fi -fi - -if [ -z "$M2_HOME" ] ; then - ## resolve links - $0 may be a link to maven's home - PRG="$0" - - # need this for relative symlinks - while [ -h "$PRG" ] ; do - ls=`ls -ld "$PRG"` - link=`expr "$ls" : '.*-> \(.*\)$'` - if expr "$link" : '/.*' > /dev/null; then - PRG="$link" - else - PRG="`dirname "$PRG"`/$link" - fi - done - - saveddir=`pwd` - - M2_HOME=`dirname "$PRG"`/.. - - # make it fully qualified - M2_HOME=`cd "$M2_HOME" && pwd` - - cd "$saveddir" - # echo Using m2 at $M2_HOME -fi - -# For Cygwin, ensure paths are in UNIX format before anything is touched -if $cygwin ; then - [ -n "$M2_HOME" ] && - M2_HOME=`cygpath --unix "$M2_HOME"` - [ -n "$JAVA_HOME" ] && - JAVA_HOME=`cygpath --unix "$JAVA_HOME"` - [ -n "$CLASSPATH" ] && - CLASSPATH=`cygpath --path --unix "$CLASSPATH"` -fi - -# For Mingw, ensure paths are in UNIX format before anything is touched -if $mingw ; then - [ -n "$M2_HOME" ] && - M2_HOME="`(cd "$M2_HOME"; pwd)`" - [ -n "$JAVA_HOME" ] && - JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`" - # TODO classpath? -fi - -if [ -z "$JAVA_HOME" ]; then - javaExecutable="`which javac`" - if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then - # readlink(1) is not available as standard on Solaris 10. - readLink=`which readlink` - if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then - if $darwin ; then - javaHome="`dirname \"$javaExecutable\"`" - javaExecutable="`cd \"$javaHome\" && pwd -P`/javac" - else - javaExecutable="`readlink -f \"$javaExecutable\"`" - fi - javaHome="`dirname \"$javaExecutable\"`" - javaHome=`expr "$javaHome" : '\(.*\)/bin'` - JAVA_HOME="$javaHome" - export JAVA_HOME - fi - fi -fi - -if [ -z "$JAVACMD" ] ; then - if [ -n "$JAVA_HOME" ] ; then - if [ -x "$JAVA_HOME/jre/sh/java" ] ; then - # IBM's JDK on AIX uses strange locations for the executables - JAVACMD="$JAVA_HOME/jre/sh/java" - else - JAVACMD="$JAVA_HOME/bin/java" - fi - else - JAVACMD="`which java`" - fi -fi - -if [ ! -x "$JAVACMD" ] ; then - echo "Error: JAVA_HOME is not defined correctly." >&2 - echo " We cannot execute $JAVACMD" >&2 - exit 1 -fi - -if [ -z "$JAVA_HOME" ] ; then - echo "Warning: JAVA_HOME environment variable is not set." -fi - -CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher - -# traverses directory structure from process work directory to filesystem root -# first directory with .mvn subdirectory is considered project base directory -find_maven_basedir() { - - if [ -z "$1" ] - then - echo "Path not specified to find_maven_basedir" - return 1 - fi - - basedir="$1" - wdir="$1" - while [ "$wdir" != '/' ] ; do - if [ -d "$wdir"/.mvn ] ; then - basedir=$wdir - break - fi - # workaround for JBEAP-8937 (on Solaris 10/Sparc) - if [ -d "${wdir}" ]; then - wdir=`cd "$wdir/.."; pwd` - fi - # end of workaround - done - echo "${basedir}" -} - -# concatenates all lines of a file -concat_lines() { - if [ -f "$1" ]; then - echo "$(tr -s '\n' ' ' < "$1")" - fi -} - -BASE_DIR=`find_maven_basedir "$(pwd)"` -if [ -z "$BASE_DIR" ]; then - exit 1; -fi - -########################################################################################## -# Extension to allow automatically downloading the maven-wrapper.jar from Maven-central -# This allows using the maven wrapper in projects that prohibit checking in binary data. -########################################################################################## -if [ -r "$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" ]; then - if [ "$MVNW_VERBOSE" = true ]; then - echo "Found .mvn/wrapper/maven-wrapper.jar" - fi -else - if [ "$MVNW_VERBOSE" = true ]; then - echo "Couldn't find .mvn/wrapper/maven-wrapper.jar, downloading it ..." - fi - jarUrl="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.4.2/maven-wrapper-0.4.2.jar" - while IFS="=" read key value; do - case "$key" in (wrapperUrl) jarUrl="$value"; break ;; - esac - done < "$BASE_DIR/.mvn/wrapper/maven-wrapper.properties" - if [ "$MVNW_VERBOSE" = true ]; then - echo "Downloading from: $jarUrl" - fi - wrapperJarPath="$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" - - if command -v wget > /dev/null; then - if [ "$MVNW_VERBOSE" = true ]; then - echo "Found wget ... using wget" - fi - wget "$jarUrl" -O "$wrapperJarPath" - elif command -v curl > /dev/null; then - if [ "$MVNW_VERBOSE" = true ]; then - echo "Found curl ... using curl" - fi - curl -o "$wrapperJarPath" "$jarUrl" - else - if [ "$MVNW_VERBOSE" = true ]; then - echo "Falling back to using Java to download" - fi - javaClass="$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.java" - if [ -e "$javaClass" ]; then - if [ ! -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then - if [ "$MVNW_VERBOSE" = true ]; then - echo " - Compiling MavenWrapperDownloader.java ..." - fi - # Compiling the Java class - ("$JAVA_HOME/bin/javac" "$javaClass") - fi - if [ -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then - # Running the downloader - if [ "$MVNW_VERBOSE" = true ]; then - echo " - Running MavenWrapperDownloader.java ..." - fi - ("$JAVA_HOME/bin/java" -cp .mvn/wrapper MavenWrapperDownloader "$MAVEN_PROJECTBASEDIR") - fi - fi - fi -fi -########################################################################################## -# End of extension -########################################################################################## - -export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"} -if [ "$MVNW_VERBOSE" = true ]; then - echo $MAVEN_PROJECTBASEDIR -fi -MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS" - -# For Cygwin, switch paths to Windows format before running java -if $cygwin; then - [ -n "$M2_HOME" ] && - M2_HOME=`cygpath --path --windows "$M2_HOME"` - [ -n "$JAVA_HOME" ] && - JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"` - [ -n "$CLASSPATH" ] && - CLASSPATH=`cygpath --path --windows "$CLASSPATH"` - [ -n "$MAVEN_PROJECTBASEDIR" ] && - MAVEN_PROJECTBASEDIR=`cygpath --path --windows "$MAVEN_PROJECTBASEDIR"` -fi - -WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain - -exec "$JAVACMD" \ - $MAVEN_OPTS \ - -classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \ - "-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \ - ${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@" diff --git a/mvnw.cmd b/mvnw.cmd deleted file mode 100755 index e5cfb0ae..00000000 --- a/mvnw.cmd +++ /dev/null @@ -1,161 +0,0 @@ -@REM ---------------------------------------------------------------------------- -@REM Licensed to the Apache Software Foundation (ASF) under one -@REM or more contributor license agreements. See the NOTICE file -@REM distributed with this work for additional information -@REM regarding copyright ownership. The ASF licenses this file -@REM to you under the Apache License, Version 2.0 (the -@REM "License"); you may not use this file except in compliance -@REM with the License. You may obtain a copy of the License at -@REM -@REM http://www.apache.org/licenses/LICENSE-2.0 -@REM -@REM Unless required by applicable law or agreed to in writing, -@REM software distributed under the License is distributed on an -@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -@REM KIND, either express or implied. See the License for the -@REM specific language governing permissions and limitations -@REM under the License. -@REM ---------------------------------------------------------------------------- - -@REM ---------------------------------------------------------------------------- -@REM Maven2 Start Up Batch script -@REM -@REM Required ENV vars: -@REM JAVA_HOME - location of a JDK home dir -@REM -@REM Optional ENV vars -@REM M2_HOME - location of maven2's installed home dir -@REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands -@REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a key stroke before ending -@REM MAVEN_OPTS - parameters passed to the Java VM when running Maven -@REM e.g. to debug Maven itself, use -@REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 -@REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files -@REM ---------------------------------------------------------------------------- - -@REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on' -@echo off -@REM set title of command window -title %0 -@REM enable echoing my setting MAVEN_BATCH_ECHO to 'on' -@if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO% - -@REM set %HOME% to equivalent of $HOME -if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%") - -@REM Execute a user defined script before this one -if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre -@REM check for pre script, once with legacy .bat ending and once with .cmd ending -if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat" -if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd" -:skipRcPre - -@setlocal - -set ERROR_CODE=0 - -@REM To isolate internal variables from possible post scripts, we use another setlocal -@setlocal - -@REM ==== START VALIDATION ==== -if not "%JAVA_HOME%" == "" goto OkJHome - -echo. -echo Error: JAVA_HOME not found in your environment. >&2 -echo Please set the JAVA_HOME variable in your environment to match the >&2 -echo location of your Java installation. >&2 -echo. -goto error - -:OkJHome -if exist "%JAVA_HOME%\bin\java.exe" goto init - -echo. -echo Error: JAVA_HOME is set to an invalid directory. >&2 -echo JAVA_HOME = "%JAVA_HOME%" >&2 -echo Please set the JAVA_HOME variable in your environment to match the >&2 -echo location of your Java installation. >&2 -echo. -goto error - -@REM ==== END VALIDATION ==== - -:init - -@REM Find the project base dir, i.e. the directory that contains the folder ".mvn". -@REM Fallback to current working directory if not found. - -set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR% -IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir - -set EXEC_DIR=%CD% -set WDIR=%EXEC_DIR% -:findBaseDir -IF EXIST "%WDIR%"\.mvn goto baseDirFound -cd .. -IF "%WDIR%"=="%CD%" goto baseDirNotFound -set WDIR=%CD% -goto findBaseDir - -:baseDirFound -set MAVEN_PROJECTBASEDIR=%WDIR% -cd "%EXEC_DIR%" -goto endDetectBaseDir - -:baseDirNotFound -set MAVEN_PROJECTBASEDIR=%EXEC_DIR% -cd "%EXEC_DIR%" - -:endDetectBaseDir - -IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig - -@setlocal EnableExtensions EnableDelayedExpansion -for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a -@endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS% - -:endReadAdditionalConfig - -SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe" -set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar" -set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain - -set DOWNLOAD_URL="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.4.2/maven-wrapper-0.4.2.jar" -FOR /F "tokens=1,2 delims==" %%A IN (%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties) DO ( - IF "%%A"=="wrapperUrl" SET DOWNLOAD_URL=%%B -) - -@REM Extension to allow automatically downloading the maven-wrapper.jar from Maven-central -@REM This allows using the maven wrapper in projects that prohibit checking in binary data. -if exist %WRAPPER_JAR% ( - echo Found %WRAPPER_JAR% -) else ( - echo Couldn't find %WRAPPER_JAR%, downloading it ... - echo Downloading from: %DOWNLOAD_URL% - powershell -Command "(New-Object Net.WebClient).DownloadFile('%DOWNLOAD_URL%', '%WRAPPER_JAR%')" - echo Finished downloading %WRAPPER_JAR% -) -@REM End of extension - -%MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %* -if ERRORLEVEL 1 goto error -goto end - -:error -set ERROR_CODE=1 - -:end -@endlocal & set ERROR_CODE=%ERROR_CODE% - -if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost -@REM check for post script, once with legacy .bat ending and once with .cmd ending -if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat" -if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd" -:skipRcPost - -@REM pause the script if MAVEN_BATCH_PAUSE is set to 'on' -if "%MAVEN_BATCH_PAUSE%" == "on" pause - -if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE% - -exit /B %ERROR_CODE% diff --git a/pom.xml b/pom.xml deleted file mode 100644 index 50e5344f..00000000 --- a/pom.xml +++ /dev/null @@ -1,493 +0,0 @@ - - - - 4.0.0 - - org.springframework.data - spring-data-r2dbc - 3.0.0-SNAPSHOT - - Spring Data R2DBC - Spring Data module for R2DBC - https://projects.spring.io/spring-data-r2dbc - - - org.springframework.data.build - spring-data-parent - 3.0.0-SNAPSHOT - - - - - DATAR2DBC - - 3.0.0-SNAPSHOT - 3.0.0-SNAPSHOT - ${springdata.jdbc} - spring.data.r2dbc - reuseReports - - 0.1.4 - 42.2.25 - 8.0.21 - 0.9.1.RELEASE - 7.1.2.jre8-preview - 2.5.4 - Borca-RELEASE - 1.0.3 - 4.1.73.Final - - - 2018 - - - - mpaluch - Mark Paluch - mpaluch(at)pivotal.io - Pivotal Software, Inc. - https://pivotal.io - - Project Lead - - +1 - - - ogierke - Oliver Gierke - ogierke(at)pivotal.io - Pivotal Software, Inc. - https://pivotal.io - - Project Lead - - +1 - - - - - - - io.r2dbc - r2dbc-bom - ${r2dbc-releasetrain.version} - pom - import - - - org.testcontainers - testcontainers-bom - ${testcontainers} - pom - import - - - io.netty - netty-bom - ${netty} - pom - import - - - - - - - - ${project.groupId} - spring-data-commons - ${springdata.commons} - - - - ${project.groupId} - spring-data-relational - ${springdata.relational} - - - - org.springframework - spring-r2dbc - - - - org.springframework - spring-tx - - - - org.springframework - spring-context - - - - org.springframework - spring-beans - - - - org.springframework - spring-jdbc - true - - - - org.springframework - spring-core - - - - io.r2dbc - r2dbc-spi - - - - io.projectreactor - reactor-core - - - - - org.jetbrains.kotlin - kotlin-stdlib - true - - - - org.jetbrains.kotlin - kotlin-reflect - true - - - - org.jetbrains.kotlinx - kotlinx-coroutines-core - true - - - - org.jetbrains.kotlinx - kotlinx-coroutines-reactor - true - - - - org.assertj - assertj-core - ${assertj} - test - - - - io.projectreactor - reactor-test - test - - - - - - org.postgresql - postgresql - ${postgresql.version} - test - - - - mysql - mysql-connector-java - ${mysql.version} - test - - - - org.mariadb.jdbc - mariadb-java-client - ${mariadb-jdbc.version} - test - - - - com.microsoft.sqlserver - mssql-jdbc - ${mssql-jdbc.version} - test - - - - com.oracle.database.jdbc - ojdbc11 - 21.4.0.0.1 - test - - - - - - org.postgresql - r2dbc-postgresql - true - - - - io.r2dbc - r2dbc-h2 - test - - - - io.r2dbc - r2dbc-mssql - test - - - - org.mariadb - r2dbc-mariadb - test - - - - io.r2dbc - r2dbc-spi-test - ${r2dbc-spi-test.version} - test - - - - - - org.testcontainers - mysql - test - - - org.slf4j - jcl-over-slf4j - - - - - - org.testcontainers - mariadb - test - - - org.slf4j - jcl-over-slf4j - - - - - - org.testcontainers - oracle-xe - test - - - - org.testcontainers - postgresql - test - - - - de.schauderhaft.degraph - degraph-check - ${degraph-check.version} - test - - - - io.mockk - mockk - ${mockk} - test - - - - org.awaitility - awaitility - 4.0.3 - test - - - - - - - - - - - org.jacoco - jacoco-maven-plugin - ${jacoco} - - ${jacoco.destfile} - - - - jacoco-initialize - - prepare-agent - - - - - - - org.apache.maven.plugins - maven-javadoc-plugin - - - https://docs.spring.io/spring/docs/${spring}/javadoc-api/ - - - https://docs.spring.io/spring-data/commons/docs/current/api/ - - https://docs.oracle.com/javase/8/docs/api/ - https://r2dbc.io/spec/0.8.0.RELEASE/api/ - - - - - - org.apache.maven.plugins - maven-surefire-plugin - - - default-test - - - **/*Tests.java - - - - - - - - org.apache.maven.plugins - maven-assembly-plugin - - - - org.asciidoctor - asciidoctor-maven-plugin - - ${project.root}/src/main/asciidoc - index.adoc - book - - ${project.version} - ${project.name} - ${project.version} - ${aspectj} - ${querydsl} - ${spring} - ${r2dbc-releasetrain.version} - ${reactive-streams.version} - - ${releasetrain} - true - 3 - true - - - - - - org.codehaus.mojo - flatten-maven-plugin - 1.1.0 - - - flatten - process-resources - - flatten - - - true - oss - - keep - keep - expand - remove - - - - - flatten-clean - clean - - clean - - - - - - - - - - no-jacoco - - - - org.jacoco - jacoco-maven-plugin - - - jacoco-initialize - none - - - - - - - - - java11 - - - - com.oracle.database.r2dbc - oracle-r2dbc - 0.1.0 - test - - - - - - - - - spring-libs-snapshot - https://repo.spring.io/libs-snapshot - - - oss-sonatype-snapshots - https://oss.sonatype.org/content/repositories/snapshots/ - - true - - - - - - - spring-plugins-release - https://repo.spring.io/plugins-release - - - - diff --git a/settings.xml b/settings.xml deleted file mode 100644 index b3227cc1..00000000 --- a/settings.xml +++ /dev/null @@ -1,29 +0,0 @@ - - - - - spring-plugins-release - ${env.ARTIFACTORY_USR} - ${env.ARTIFACTORY_PSW} - - - spring-libs-snapshot - ${env.ARTIFACTORY_USR} - ${env.ARTIFACTORY_PSW} - - - spring-libs-milestone - ${env.ARTIFACTORY_USR} - ${env.ARTIFACTORY_PSW} - - - spring-libs-release - ${env.ARTIFACTORY_USR} - ${env.ARTIFACTORY_PSW} - - - - \ No newline at end of file diff --git a/src/main/asciidoc/index.adoc b/src/main/asciidoc/index.adoc deleted file mode 100644 index f48f1687..00000000 --- a/src/main/asciidoc/index.adoc +++ /dev/null @@ -1,53 +0,0 @@ -= Spring Data R2DBC - Reference Documentation - Mark Paluch, Jay Bryant, Stephen Cohen -:revnumber: {version} -:revdate: {localdate} -ifdef::backend-epub3[:front-cover-image: image:epub-cover.png[Front Cover,1050,1600]] -:spring-data-commons-docs: ../../../../spring-data-commons/src/main/asciidoc -:spring-data-r2dbc-javadoc: https://docs.spring.io/spring-data/r2dbc/docs/{version}/api -:spring-framework-ref: https://docs.spring.io/spring/docs/{springVersion}/reference/html -:reactiveStreamsJavadoc: https://www.reactive-streams.org/reactive-streams-{reactiveStreamsVersion}-javadoc -:example-root: ../../../src/test/java/org/springframework/data/r2dbc/documentation -:tabsize: 2 - -(C) 2018-2022 The original authors. - -NOTE: Copies of this document may be made for your own use and for distribution to others, provided that you do not charge any fee for such copies and further provided that each copy contains this Copyright Notice, whether distributed in print or electronically. - -toc::[] - -// The blank line before each include prevents content from running together in a bad way -// (because an included bit does not have its own blank lines). - -include::preface.adoc[] - -include::new-features.adoc[leveloffset=+1] - -include::{spring-data-commons-docs}/dependencies.adoc[leveloffset=+1] - -include::{spring-data-commons-docs}/repositories.adoc[leveloffset=+1] - -[[reference]] -= Reference Documentation - -include::reference/introduction.adoc[leveloffset=+1] - -include::reference/r2dbc.adoc[leveloffset=+1] - -include::reference/r2dbc-repositories.adoc[leveloffset=+1] - -include::{spring-data-commons-docs}/auditing.adoc[leveloffset=+1] - -include::reference/r2dbc-auditing.adoc[leveloffset=+1] - -include::reference/mapping.adoc[leveloffset=+1] - -include::reference/kotlin.adoc[leveloffset=+1] - -[[appendix]] -= Appendix - -:numbered!: -include::{spring-data-commons-docs}/repository-query-keywords-reference.adoc[leveloffset=+1] -include::{spring-data-commons-docs}/repository-query-return-types-reference.adoc[leveloffset=+1] -include::reference/r2dbc-upgrading.adoc[leveloffset=+1] diff --git a/src/main/asciidoc/new-features.adoc b/src/main/asciidoc/new-features.adoc deleted file mode 100644 index 5665a848..00000000 --- a/src/main/asciidoc/new-features.adoc +++ /dev/null @@ -1,45 +0,0 @@ -[[new-features]] -= New & Noteworthy - -[[new-features.1-3-0]] -== What's New in Spring Data R2DBC 1.3.0 - -* Introduce <>. - -[[new-features.1-2-0]] -== What's New in Spring Data R2DBC 1.2.0 - -* Deprecate Spring Data R2DBC `DatabaseClient` and move off deprecated API in favor of Spring R2DBC. -Consult the <> for further details. -* Support for <>. -* <> through `@EnableR2dbcAuditing`. -* Support for `@Value` in persistence constructors. -* Support for Oracle's R2DBC driver. - -[[new-features.1-1-0]] -== What's New in Spring Data R2DBC 1.1.0 - -* Introduction of `R2dbcEntityTemplate` for entity-oriented operations. -* <>. -* Support interface projections with `DatabaseClient.as(…)`. -* <>. - -[[new-features.1-0-0]] -== What's New in Spring Data R2DBC 1.0.0 - -* Upgrade to R2DBC 0.8.0.RELEASE. -* `@Modifying` annotation for query methods to consume affected row count. -* Repository `save(…)` with an associated ID completes with `TransientDataAccessException` if the row does not exist in the database. -* Added `SingleConnectionConnectionFactory` for testing using connection singletons. -* Support for {spring-framework-ref}/core.html#expressions[SpEL expressions] in `@Query`. -* `ConnectionFactory` routing through `AbstractRoutingConnectionFactory`. -* Utilities for schema initialization through `ResourceDatabasePopulator` and `ScriptUtils`. -* Propagation and reset of Auto-Commit and Isolation Level control through `TransactionDefinition`. -* Support for Entity-level converters. -* Kotlin extensions for reified generics and <>. -* Add pluggable mechanism to register dialects. -* Support for named parameters. -* Initial R2DBC support through `DatabaseClient`. -* Initial Transaction support through `TransactionalDatabaseClient`. -* Initial R2DBC Repository Support through `R2dbcRepository`. -* Initial Dialect support for Postgres and Microsoft SQL Server. diff --git a/src/main/asciidoc/preface.adoc b/src/main/asciidoc/preface.adoc deleted file mode 100644 index 126d4bae..00000000 --- a/src/main/asciidoc/preface.adoc +++ /dev/null @@ -1,122 +0,0 @@ -[[preface]] -= Preface - -The Spring Data R2DBC project applies core Spring concepts to the development of solutions that use the https://r2dbc.io[R2DBC] drivers for relational databases. -We provide a `DatabaseClient` as a high-level abstraction for storing and querying rows. - -This document is the reference guide for Spring Data - R2DBC Support. -It explains R2DBC module concepts and semantics. - -This section provides some basic introduction to Spring and databases. -[[get-started:first-steps:spring]] -== Learning Spring - -Spring Data uses Spring framework's {spring-framework-ref}/core.html[core] functionality, including: - -* {spring-framework-ref}/core.html#beans[IoC] container -* {spring-framework-ref}/core.html#validation[type conversion system] -* {spring-framework-ref}/core.html#expressions[expression language] -* {spring-framework-ref}/integration.html#jmx[JMX integration] -* {spring-framework-ref}/data-access.html#dao-exceptions[DAO exception hierarchy]. - -While you need not know the Spring APIs, understanding the concepts behind them is important. -At a minimum, the idea behind Inversion of Control (IoC) should be familiar, and you should be familiar with whatever IoC container you choose to use. - -You can use the core functionality of the R2DBC support directly, with no need to invoke the IoC services of the Spring Container. -This is much like `JdbcTemplate`, which can be used "`standalone`" without any other services of the Spring container. -To use all the features of Spring Data R2DBC, such as the repository support, you need to configure some parts of the library to use Spring. - -To learn more about Spring, refer to the comprehensive documentation that explains the Spring Framework in detail. -There are a lot of articles, blog entries, and books on the subject. -See the Spring framework https://spring.io/docs[home page] for more information. - -[[get-started:first-steps:what]] -== What is R2DBC? - -https://r2dbc.io[R2DBC] is the acronym for Reactive Relational Database Connectivity. -R2DBC is an API specification initiative that declares a reactive API to be implemented by driver vendors to access their relational databases. - -Part of the answer as to why R2DBC was created is the need for a non-blocking application stack to handle concurrency with a small number of threads and scale with fewer hardware resources. -This need cannot be satisfied by reusing standardized relational database access APIs -- namely JDBC –- as JDBC is a fully blocking API. -Attempts to compensate for blocking behavior with a `ThreadPool` are of limited use. - -The other part of the answer is that most applications use a relational database to store their data. -While several NoSQL database vendors provide reactive database clients for their databases, migration to NoSQL is not an option for most projects. -This was the motivation for a new common API to serve as a foundation for any non-blocking database driver. -While the open source ecosystem hosts various non-blocking relational database driver implementations, each client comes with a vendor-specific API, so a generic layer on top of these libraries is not possible. - -[[get-started:first-steps:reactive]] -== What is Reactive? - -The term, "`reactive`", refers to programming models that are built around reacting to change, availability, and processability -— network components reacting to I/O events, UI controllers reacting to mouse events, resources being made available, and others. -In that sense, non-blocking is reactive, because, instead of being blocked, we are now in the mode of reacting to notifications as operations complete or data becomes available. - -There is also another important mechanism that we on the Spring team associate with reactive, and that is non-blocking back pressure. -In synchronous, imperative code, blocking calls serve as a natural form of back pressure that forces the caller to wait. -In non-blocking code, it becomes essential to control the rate of events so that a fast producer does not overwhelm its destination. - -https://github.com/reactive-streams/reactive-streams-jvm/blob/v{reactiveStreamsVersion}/README.md#specification[Reactive Streams is a small spec] (also https://docs.oracle.com/javase/9/docs/api/java/util/concurrent/Flow.html[adopted in Java 9]) that defines the interaction between asynchronous components with back pressure. -For example, a data repository (acting as a {reactiveStreamsJavadoc}/org/reactivestreams/Publisher.html[`Publisher`]) can produce data that an HTTP server (acting as a {reactiveStreamsJavadoc}/org/reactivestreams/Subscriber.html`[`Subscriber`]) can then write to the response. -The main purpose of Reactive Streams is to let the subscriber control how quickly or how slowly the publisher produces data. - -[[get-started:first-steps:reactive-api]] -== Reactive API - -Reactive Streams plays an important role for interoperability. It is of interest to libraries and infrastructure components but less useful as an application API, because it is too low-level. -Applications need a higher-level and richer, functional API to compose async logic —- similar to the Java 8 Stream API but not only for tables. -This is the role that reactive libraries play. - -https://github.com/reactor/reactor[Project Reactor] is the reactive library of choice for Spring Data R2DBC. -It provides the https://projectreactor.io/docs/core/release/api/reactor/core/publisher/Mono.html[`Mono`] and https://projectreactor.io/docs/core/release/api/reactor/core/publisher/Flux.html[`Flux`] API types to work on data sequences of `0..1` (`Mono`) and `0..N` (`Flux`) through a rich set of operators aligned with the ReactiveX vocabulary of operators. -Reactor is a Reactive Streams library, and, therefore, all of its operators support non-blocking back pressure. -Reactor has a strong focus on server-side Java. It is developed in close collaboration with Spring. - -Spring Data R2DBC requires Project Reactor as a core dependency, but it is interoperable with other reactive libraries through the Reactive Streams specification. -As a general rule, a Spring Data R2DBC repository accepts a plain `Publisher` as input, adapts it to a Reactor type internally, uses that, and returns either a `Mono` or a `Flux` as output. -So, you can pass any `Publisher` as input and apply operations on the output, but you need to adapt the output for use with another reactive library. -Whenever feasible, Spring Data adapts transparently to the use of RxJava or another reactive library. - -[[requirements]] -== Requirements - -The Spring Data R2DBC 1.x binaries require: - -* JDK level 8.0 and above -* https://spring.io/docs[Spring Framework] {springVersion} and above -* https://r2dbc.io[R2DBC] {r2dbcVersion} and above - -[[get-started:help]] -== Additional Help Resources - -Learning a new framework is not always straightforward. -In this section, we try to provide what we think is an easy-to-follow guide for starting with the Spring Data R2DBC module. -However, if you encounter issues or you need advice, use one of the following links: - -[[get-started:help:community]] -Community Forum :: Spring Data on https://stackoverflow.com/questions/tagged/spring-data[Stack Overflow] is a tag for all Spring Data (not just R2DBC) users to share information and help each other. -Note that registration is needed only for posting. - -[[get-started:help:professional]] -Professional Support :: Professional, from-the-source support, with guaranteed response time, is available from https://pivotal.io/[Pivotal Sofware, Inc.], the company behind Spring Data and Spring. - -[[get-started:up-to-date]] -== Following Development - -* For information on the Spring Data R2DBC source code repository, nightly builds, and snapshot artifacts, see the Spring Data R2DBC https://projects.spring.io/spring-data-r2dbc/[home page]. - -* You can help make Spring Data best serve the needs of the Spring community by interacting with developers through the community on https://stackoverflow.com/questions/tagged/spring-data[Stack Overflow]. - -* If you encounter a bug or want to suggest an improvement, please create a ticket on the Spring Data R2DBC https://github.com/spring-projects/spring-data-r2dbc/issues[issue tracker]. - -* To stay up to date with the latest news and announcements in the Spring ecosystem, subscribe to the Spring Community https://spring.io[Portal]. - -* You can also follow the Spring https://spring.io/blog[blog] or the Spring Data project team on Twitter (https://twitter.com/SpringData[SpringData]). - -[[project-metadata]] -== Project Metadata - -* Version control: https://github.com/spring-projects/spring-data-r2dbc -* Bugtracker: https://github.com/spring-projects/spring-data-r2dbc/issues -* Release repository: https://repo.spring.io/libs-release -* Milestone repository: https://repo.spring.io/libs-milestone -* Snapshot repository: https://repo.spring.io/libs-snapshot diff --git a/src/main/asciidoc/reference/introduction.adoc b/src/main/asciidoc/reference/introduction.adoc deleted file mode 100644 index 597beb24..00000000 --- a/src/main/asciidoc/reference/introduction.adoc +++ /dev/null @@ -1,10 +0,0 @@ -[[introduction]] -= Introduction - -== Document Structure - -This part of the reference documentation explains the core functionality offered by Spring Data R2DBC. - -"`<>`" introduces the R2DBC module feature set. - -"`<>`" introduces the repository support for R2DBC. diff --git a/src/main/asciidoc/reference/kotlin.adoc b/src/main/asciidoc/reference/kotlin.adoc deleted file mode 100644 index c64b3b4d..00000000 --- a/src/main/asciidoc/reference/kotlin.adoc +++ /dev/null @@ -1,28 +0,0 @@ -include::../{spring-data-commons-docs}/kotlin.adoc[] - -include::../{spring-data-commons-docs}/kotlin-extensions.adoc[leveloffset=+1] - -To retrieve a list of `SWCharacter` objects in Java, you would normally write the following: - -[source,java] ----- -Flux characters = client.select().from(SWCharacter.class).fetch().all(); ----- - -With Kotlin and the Spring Data extensions, you can instead write the following: - -[source,kotlin] ----- -val characters = client.select().from().fetch().all() -// or (both are equivalent) -val characters : Flux = client.select().from().fetch().all() ----- - -As in Java, `characters` in Kotlin is strongly typed, but Kotlin's clever type inference allows for shorter syntax. - -Spring Data R2DBC provides the following extensions: - -* Reified generics support for `DatabaseClient` and `Criteria`. -* <> extensions for `DatabaseClient`. - -include::../{spring-data-commons-docs}/kotlin-coroutines.adoc[leveloffset=+1] diff --git a/src/main/asciidoc/reference/mapping.adoc b/src/main/asciidoc/reference/mapping.adoc deleted file mode 100644 index a9f17e6d..00000000 --- a/src/main/asciidoc/reference/mapping.adoc +++ /dev/null @@ -1,324 +0,0 @@ -[[mapping]] -= Mapping - -Rich mapping support is provided by the `MappingR2dbcConverter`. `MappingR2dbcConverter` has a rich metadata model that allows mapping domain objects to a data row. -The mapping metadata model is populated by using annotations on your domain objects. -However, the infrastructure is not limited to using annotations as the only source of metadata information. -The `MappingR2dbcConverter` also lets you map objects to rows without providing any additional metadata, by following a set of conventions. - -This section describes the features of the `MappingR2dbcConverter`, including how to use conventions for mapping objects to rows and how to override those conventions with annotation-based mapping metadata. - -include::../{spring-data-commons-docs}/object-mapping.adoc[leveloffset=+1] - -[[mapping.conventions]] -== Convention-based Mapping - -`MappingR2dbcConverter` has a few conventions for mapping objects to rows when no additional mapping metadata is provided. -The conventions are: - -* The short Java class name is mapped to the table name in the following manner. -The `com.bigbank.SavingsAccount` class maps to the `SAVINGS_ACCOUNT` table name. -The same name mapping is applied for mapping fields to column names. -For example, the `firstName` field maps to the `FIRST_NAME` column. -You can control this mapping by providing a custom `NamingStrategy`. See <> for more detail. -Table and column names that are derived from property or class names are used in SQL statements without quotes by default. -You can control this behavior by setting `R2dbcMappingContext.setForceQuote(true)`. - -* Nested objects are not supported. - -* The converter uses any Spring Converters registered with it to override the default mapping of object properties to row columns and values. - -* The fields of an object are used to convert to and from columns in the row. -Public `JavaBean` properties are not used. - -* If you have a single non-zero-argument constructor whose constructor argument names match top-level column names of the row, that constructor is used. -Otherwise, the zero-argument constructor is used. -If there is more than one non-zero-argument constructor, an exception is thrown. - -[[mapping.configuration]] -== Mapping Configuration - -By default (unless explicitly configured) an instance of `MappingR2dbcConverter` is created when you create a `DatabaseClient`. -You can create your own instance of the `MappingR2dbcConverter`. -By creating your own instance, you can register Spring converters to map specific classes to and from the database. - -You can configure the `MappingR2dbcConverter` as well as `DatabaseClient` and `ConnectionFactory` by using Java-based metadata. The following example uses Spring's Java-based configuration: - -If you set `setForceQuote` of the `R2dbcMappingContext to` true, table and column names derived from classes and properties are used with database specific quotes. -This means that it is OK to use reserved SQL words (such as order) in these names. -You can do so by overriding `r2dbcMappingContext(Optional)` of `AbstractR2dbcConfiguration`. -Spring Data converts the letter casing of such a name to that form which is also used by the configured database when no quoting is used. -Therefore, you can use unquoted names when creating tables, as long as you do not use keywords or special characters in your names. -For databases that adhere to the SQL standard, this means that names are converted to upper case. -The quoting character and the way names get capitalized is controlled by the used `Dialect`. -See <> for how to configure custom dialects. - -.@Configuration class to configure R2DBC mapping support -==== -[source,java] ----- -@Configuration -public class MyAppConfig extends AbstractR2dbcConfiguration { - - public ConnectionFactory connectionFactory() { - return ConnectionFactories.get("r2dbc:…"); - } - - // the following are optional - - @Override - protected List getCustomConverters() { - - List> converterList = new ArrayList>(); - converterList.add(new org.springframework.data.r2dbc.test.PersonReadConverter()); - converterList.add(new org.springframework.data.r2dbc.test.PersonWriteConverter()); - return converterList; - } -} ----- -==== - -`AbstractR2dbcConfiguration` requires you to implement a method that defines a `ConnectionFactory`. - -You can add additional converters to the converter by overriding the `r2dbcCustomConversions` method. - -You can configure a custom `NamingStrategy` by registering it as a bean. -The `NamingStrategy` controls how the names of classes and properties get converted to the names of tables and columns. - -NOTE: `AbstractR2dbcConfiguration` creates a `DatabaseClient` instance and registers it with the container under the name of `databaseClient`. - -[[mapping.usage]] -== Metadata-based Mapping - -To take full advantage of the object mapping functionality inside the Spring Data R2DBC support, you should annotate your mapped objects with the `@Table` annotation. -Although it is not necessary for the mapping framework to have this annotation (your POJOs are mapped correctly, even without any annotations), it lets the classpath scanner find and pre-process your domain objects to extract the necessary metadata. -If you do not use this annotation, your application takes a slight performance hit the first time you store a domain object, because the mapping framework needs to build up its internal metadata model so that it knows about the properties of your domain object and how to persist them. -The following example shows a domain object: - -.Example domain object -==== -[source,java] ----- -package com.mycompany.domain; - -@Table -public class Person { - - @Id - private Long id; - - private Integer ssn; - - private String firstName; - - private String lastName; -} ----- -==== - -IMPORTANT: The `@Id` annotation tells the mapper which property you want to use as the primary key. - -[[mapping.types]] -=== Default Type Mapping - -The following table explains how property types of an entity affect mapping: - -|=== -|Source Type | Target Type | Remarks - -|Primitive types and wrapper types -|Passthru -|Can be customized using <>. - -|JSR-310 Date/Time types -|Passthru -|Can be customized using <>. - - -|`String`, `BigInteger`, `BigDecimal`, and `UUID` -|Passthru -|Can be customized using <>. - -|`Enum` -|String -|Can be customized by registering a <>. - -|`Blob` and `Clob` -|Passthru -|Can be customized using <>. - -|`byte[]`, `ByteBuffer` -|Passthru -|Considered a binary payload. - -|`Collection` -|Array of `T` -|Conversion to Array type if supported by the configured <>, not supported otherwise. - -|Arrays of primitive types, wrapper types and `String` -|Array of wrapper type (e.g. `int[]` -> `Integer[]`) -|Conversion to Array type if supported by the configured <>, not supported otherwise. - -|Driver-specific types -|Passthru -|Contributed as a simple type by the used `R2dbcDialect`. - -|Complex objects -|Target type depends on registered `Converter`. -|Requires a <>, not supported otherwise. - -|=== - -NOTE: The native data type for a column depends on the R2DBC driver type mapping. -Drivers can contribute additional simple types such as Geometry types. - -[[mapping.usage.annotations]] -=== Mapping Annotation Overview - -The `MappingR2dbcConverter` can use metadata to drive the mapping of objects to rows. -The following annotations are available: - -* `@Id`: Applied at the field level to mark the primary key. -* `@Table`: Applied at the class level to indicate this class is a candidate for mapping to the database. -You can specify the name of the table where the database is stored. -* `@Transient`: By default, all fields are mapped to the row. -This annotation excludes the field where it is applied from being stored in the database. -Transient properties cannot be used within a persistence constructor as the converter cannot materialize a value for the constructor argument. -* `@PersistenceConstructor`: Marks a given constructor -- even a package protected one -- to use when instantiating the object from the database. -Constructor arguments are mapped by name to the values in the retrieved row. -* `@Value`: This annotation is part of the Spring Framework. -Within the mapping framework it can be applied to constructor arguments. -This lets you use a Spring Expression Language statement to transform a key’s value retrieved in the database before it is used to construct a domain object. -In order to reference a column of a given row one has to use expressions like: `@Value("#root.myProperty")` where root refers to the root of the given `Row`. -* `@Column`: Applied at the field level to describe the name of the column as it is represented in the row, letting the name be different from the field name of the class. -Names specified with a `@Column` annotation are always quoted when used in SQL statements. -For most databases, this means that these names are case-sensitive. -It also means that you can use special characters in these names. -However, this is not recommended, since it may cause problems with other tools. -* `@Version`: Applied at field level is used for optimistic locking and checked for modification on save operations. -The value is `null` (`zero` for primitive types) is considered as marker for entities to be new. -The initially stored value is `zero` (`one` for primitive types). -The version gets incremented automatically on every update. -See <> for further reference. - -The mapping metadata infrastructure is defined in the separate `spring-data-commons` project that is technology-agnostic. -Specific subclasses are used in the R2DBC support to support annotation based metadata. -Other strategies can also be put in place (if there is demand). - -[[mapping.custom.object.construction]] -=== Customized Object Construction - -The mapping subsystem allows the customization of the object construction by annotating a constructor with the `@PersistenceConstructor` annotation.The values to be used for the constructor parameters are resolved in the following way: - -* If a parameter is annotated with the `@Value` annotation, the given expression is evaluated, and the result is used as the parameter value. -* If the Java type has a property whose name matches the given field of the input row, then its property information is used to select the appropriate constructor parameter to which to pass the input field value. -This works only if the parameter name information is present in the Java `.class` files, which you can achieve by compiling the source with debug information or using the `-parameters` command-line switch for `javac` in Java 8. -* Otherwise, a `MappingException` is thrown to indicate that the given constructor parameter could not be bound. - -==== -[source,java] ----- -class OrderItem { - - private @Id final String id; - private final int quantity; - private final double unitPrice; - - OrderItem(String id, int quantity, double unitPrice) { - this.id = id; - this.quantity = quantity; - this.unitPrice = unitPrice; - } - - // getters/setters ommitted -} ----- -==== - -[[mapping.explicit.converters]] -=== Overriding Mapping with Explicit Converters - -When storing and querying your objects, it is often convenient to have a `R2dbcConverter` instance to handle the mapping of all Java types to `OutboundRow` instances. -However, you may sometimes want the `R2dbcConverter` instances to do most of the work but let you selectively handle the conversion for a particular type -- perhaps to optimize performance. - -To selectively handle the conversion yourself, register one or more one or more `org.springframework.core.convert.converter.Converter` instances with the `R2dbcConverter`. - -You can use the `r2dbcCustomConversions` method in `AbstractR2dbcConfiguration` to configure converters. -The examples <> show how to perform the configuration with Java. - -NOTE: Custom top-level entity conversion requires asymmetric types for conversion. -Inbound data is extracted from R2DBC's `Row`. -Outbound data (to be used with `INSERT`/`UPDATE` statements) is represented as `OutboundRow` and later assembled to a statement. - -The following example of a Spring Converter implementation converts from a `Row` to a `Person` POJO: - -==== -[source,java] ----- -@ReadingConverter - public class PersonReadConverter implements Converter { - - public Person convert(Row source) { - Person p = new Person(source.get("id", String.class),source.get("name", String.class)); - p.setAge(source.get("age", Integer.class)); - return p; - } -} ----- -==== - -Please note that converters get applied on singular properties. -Collection properties (e.g. `Collection`) are iterated and converted element-wise. -Collection converters (e.g. `Converter>, OutboundRow`) are not supported. - -NOTE: R2DBC uses boxed primitives (`Integer.class` instead of `int.class`) to return primitive values. - -The following example converts from a `Person` to a `OutboundRow`: - -==== -[source,java] ----- -@WritingConverter -public class PersonWriteConverter implements Converter { - - public OutboundRow convert(Person source) { - OutboundRow row = new OutboundRow(); - row.put("id", Parameter.from(source.getId())); - row.put("name", Parameter.from(source.getFirstName())); - row.put("age", Parameter.from(source.getAge())); - return row; - } -} ----- -==== - -[[mapping.explicit.enum.converters]] -==== Overriding Enum Mapping with Explicit Converters - -Some databases, such as https://github.com/pgjdbc/r2dbc-postgresql#postgres-enum-types[Postgres], can natively write enum values using their database-specific enumerated column type. -Spring Data converts `Enum` values by default to `String` values for maximum portability. -To retain the actual enum value, register a `@Writing` converter whose source and target types use the actual enum type to avoid using `Enum.name()` conversion. -Additionally, you need to configure the enum type on the driver level so that the driver is aware how to represent the enum type. - -The following example shows the involved components to read and write `Color` enum values natively: - -==== -[source,java] ----- -enum Color { - Grey, Blue -} - -class ColorConverter extends EnumWriteSupport { - -} - - -class Product { - @Id long id; - Color color; - - // … -} ----- -==== diff --git a/src/main/asciidoc/reference/r2dbc-auditing.adoc b/src/main/asciidoc/reference/r2dbc-auditing.adoc deleted file mode 100644 index 818ec2c4..00000000 --- a/src/main/asciidoc/reference/r2dbc-auditing.adoc +++ /dev/null @@ -1,23 +0,0 @@ -[[r2dbc.auditing]] -== General Auditing Configuration for R2DBC - -Since Spring Data R2DBC 1.2, auditing can be enabled by annotating a configuration class with the `@EnableR2dbcAuditing` annotation, as the following example shows: - -.Activating auditing using JavaConfig -==== -[source,java] ----- -@Configuration -@EnableR2dbcAuditing -class Config { - - @Bean - public ReactiveAuditorAware myAuditorProvider() { - return new AuditorAwareImpl(); - } -} ----- -==== - -If you expose a bean of type `ReactiveAuditorAware` to the `ApplicationContext`, the auditing infrastructure picks it up automatically and uses it to determine the current user to be set on domain types. -If you have multiple implementations registered in the `ApplicationContext`, you can select the one to be used by explicitly setting the `auditorAwareRef` attribute of `@EnableR2dbcAuditing`. diff --git a/src/main/asciidoc/reference/r2dbc-core.adoc b/src/main/asciidoc/reference/r2dbc-core.adoc deleted file mode 100644 index 76efe26d..00000000 --- a/src/main/asciidoc/reference/r2dbc-core.adoc +++ /dev/null @@ -1,201 +0,0 @@ -R2DBC contains a wide range of features: - -* Spring configuration support with Java-based `@Configuration` classes for an R2DBC driver instance. -* `R2dbcEntityTemplate` as central class for entity-bound operations that increases productivity when performing common R2DBC operations with integrated object mapping between rows and POJOs. -* Feature-rich object mapping integrated with Spring's Conversion Service. -* Annotation-based mapping metadata that is extensible to support other metadata formats. -* Automatic implementation of Repository interfaces, including support for custom query methods. - -For most tasks, you should use `R2dbcEntityTemplate` or the repository support, which both use the rich mapping functionality. -`R2dbcEntityTemplate` is the place to look for accessing functionality such as ad-hoc CRUD operations. - -[[r2dbc.getting-started]] -== Getting Started - -An easy way to set up a working environment is to create a Spring-based project through https://start.spring.io[start.spring.io]. -To do so: - -. Add the following to the pom.xml files `dependencies` element: -+ -==== -[source,xml,subs="+attributes"] ----- - - - - io.r2dbc - r2dbc-bom - ${r2dbc-releasetrain.version} - pom - import - - - - - - - - - - org.springframework.data - spring-data-r2dbc - {version} - - - - - io.r2dbc - r2dbc-h2 - {r2dbcVersion} - - - ----- -==== - -. Change the version of Spring in the pom.xml to be -+ -==== -[source,xml,subs="+attributes"] ----- -{springVersion} ----- -==== - -. Add the following location of the Spring Milestone repository for Maven to your `pom.xml` such that it is at the same level as your `` element: -+ -==== -[source,xml] ----- - - - spring-milestone - Spring Maven MILESTONE Repository - https://repo.spring.io/libs-milestone - - ----- -==== - -The repository is also https://repo.spring.io/milestone/org/springframework/data/[browseable here]. - -You may also want to set the logging level to `DEBUG` to see some additional information. -To do so, edit the `application.properties` file to have the following content: - -==== -[source] ----- -logging.level.org.springframework.r2dbc=DEBUG ----- -==== - -Then you can, for example, create a `Person` class to persist, as follows: - -==== -[source,java,indent=0] ----- -include::../{example-root}/Person.java[tags=class] ----- -==== - -Next, you need to create a table structure in your database, as follows: - -==== -[source,sql] ----- -CREATE TABLE person - (id VARCHAR(255) PRIMARY KEY, - name VARCHAR(255), - age INT); ----- -==== - -You also need a main application to run, as follows: - - -==== -[source,java,indent=0] ----- -include::../{example-root}/R2dbcApp.java[tag=class] ----- -==== - -When you run the main program, the preceding examples produce output similar to the following: - -==== -[source] ----- -2018-11-28 10:47:03,893 DEBUG amework.core.r2dbc.DefaultDatabaseClient: 310 - Executing SQL statement [CREATE TABLE person - (id VARCHAR(255) PRIMARY KEY, - name VARCHAR(255), - age INT)] -2018-11-28 10:47:04,074 DEBUG amework.core.r2dbc.DefaultDatabaseClient: 908 - Executing SQL statement [INSERT INTO person (id, name, age) VALUES($1, $2, $3)] -2018-11-28 10:47:04,092 DEBUG amework.core.r2dbc.DefaultDatabaseClient: 575 - Executing SQL statement [SELECT id, name, age FROM person] -2018-11-28 10:47:04,436 INFO org.spring.r2dbc.example.R2dbcApp: 43 - Person [id='joe', name='Joe', age=34] ----- -==== - -Even in this simple example, there are few things to notice: - -* You can create an instance of the central helper class in Spring Data R2DBC (`R2dbcEntityTemplate`) by using a standard `io.r2dbc.spi.ConnectionFactory` object. -* The mapper works against standard POJO objects without the need for any additional metadata (though you can, optionally, provide that information -- see <>.). -* Mapping conventions can use field access.Notice that the `Person` class has only getters. -* If the constructor argument names match the column names of the stored row, they are used to instantiate the object. - -[[r2dbc.examples-repo]] -== Examples Repository - -There is a https://github.com/spring-projects/spring-data-examples[GitHub repository with several examples] that you can download and play around with to get a feel for how the library works. - -[[r2dbc.connecting]] -== Connecting to a Relational Database with Spring - -One of the first tasks when using relational databases and Spring is to create a `io.r2dbc.spi.ConnectionFactory` object by using the IoC container.Make sure to use a <>. - -[[r2dbc.connectionfactory]] -=== Registering a `ConnectionFactory` Instance using Java-based Metadata - -The following example shows an example of using Java-based bean metadata to register an instance of `io.r2dbc.spi.ConnectionFactory`: - -.Registering a `io.r2dbc.spi.ConnectionFactory` object using Java-based bean metadata -==== -[source,java] ----- -@Configuration -public class ApplicationConfiguration extends AbstractR2dbcConfiguration { - - @Override - @Bean - public ConnectionFactory connectionFactory() { - return … - } -} ----- -==== - -This approach lets you use the standard `io.r2dbc.spi.ConnectionFactory` instance, with the container using Spring's `AbstractR2dbcConfiguration`.As compared to registering a `ConnectionFactory` instance directly, the configuration support has the added advantage of also providing the container with an `ExceptionTranslator` implementation that translates R2DBC exceptions to exceptions in Spring's portable `DataAccessException` hierarchy for data access classes annotated with the `@Repository` annotation.This hierarchy and the use of `@Repository` is described in {spring-framework-ref}/data-access.html[Spring's DAO support features]. - -`AbstractR2dbcConfiguration` also registers `DatabaseClient`, which is required for database interaction and for Repository implementation. - -[[r2dbc.drivers]] -=== R2DBC Drivers - -Spring Data R2DBC supports drivers through R2DBC's pluggable SPI mechanism. -You can use any driver that implements the R2DBC spec with Spring Data R2DBC. -Since Spring Data R2DBC reacts to specific features of each database, it requires a `Dialect` implementation otherwise your application won't start up. -Spring Data R2DBC ships with dialect implementations for the following drivers: - -* https://github.com/r2dbc/r2dbc-h2[H2] (`io.r2dbc:r2dbc-h2`) -* https://github.com/mariadb-corporation/mariadb-connector-r2dbc[MariaDB] (`org.mariadb:r2dbc-mariadb`) -* https://github.com/r2dbc/r2dbc-mssql[Microsoft SQL Server] (`io.r2dbc:r2dbc-mssql`) -* https://github.com/mirromutth/r2dbc-mysql[MySQL] (`dev.miku:r2dbc-mysql`) -* https://github.com/jasync-sql/jasync-sql[jasync-sql MySQL] (`com.github.jasync-sql:jasync-r2dbc-mysql`) -* https://github.com/r2dbc/r2dbc-postgresql[Postgres] (`io.r2dbc:r2dbc-postgresql`) -* https://github.com/oracle/oracle-r2dbc[Oracle] (`com.oracle.database.r2dbc:oracle-r2dbc`) - -Spring Data R2DBC reacts to database specifics by inspecting the `ConnectionFactory` and selects the appropriate database dialect accordingly. -You need to configure your own {spring-data-r2dbc-javadoc}/api/org/springframework/data/r2dbc/dialect/R2dbcDialect.html[`R2dbcDialect`] if the driver you use is not yet known to Spring Data R2DBC. - -TIP: Dialects are resolved by {spring-data-r2dbc-javadoc}/org/springframework/data/r2dbc/dialect/DialectResolver.html[`DialectResolver`] from a `ConnectionFactory`, typically by inspecting `ConnectionFactoryMetadata`. -+ You can let Spring auto-discover your `R2dbcDialect` by registering a class that implements `org.springframework.data.r2dbc.dialect.DialectResolver$R2dbcDialectProvider` through `META-INF/spring.factories`. -`DialectResolver` discovers dialect provider implementations from the class path using Spring's `SpringFactoriesLoader`. diff --git a/src/main/asciidoc/reference/r2dbc-entity-callbacks.adoc b/src/main/asciidoc/reference/r2dbc-entity-callbacks.adoc deleted file mode 100644 index 34a79cf7..00000000 --- a/src/main/asciidoc/reference/r2dbc-entity-callbacks.adoc +++ /dev/null @@ -1,43 +0,0 @@ -[[r2dbc.entity-callbacks]] -= Store specific EntityCallbacks - -Spring Data R2DBC uses the `EntityCallback` API for its auditing support and reacts on the following callbacks. - -.Supported Entity Callbacks -[%header,cols="4"] -|=== -| Callback -| Method -| Description -| Order - -| BeforeConvertCallback -| `onBeforeConvert(T entity, SqlIdentifier table)` -| Invoked before a domain object is converted to `OutboundRow`. -| `Ordered.LOWEST_PRECEDENCE` - -| AfterConvertCallback -| `onAfterConvert(T entity, SqlIdentifier table)` -| Invoked after a domain object is loaded. + -Can modify the domain object after reading it from a row. -| `Ordered.LOWEST_PRECEDENCE` - -| AuditingEntityCallback -| `onBeforeConvert(T entity, SqlIdentifier table)` -| Marks an auditable entity _created_ or _modified_ -| 100 - -| BeforeSaveCallback -| `onBeforeSave(T entity, OutboundRow row, SqlIdentifier table)` -| Invoked before a domain object is saved. + -Can modify the target, to be persisted, `OutboundRow` containing all mapped entity information. -| `Ordered.LOWEST_PRECEDENCE` - -| AfterSaveCallback -| `onAfterSave(T entity, OutboundRow row, SqlIdentifier table)` -| Invoked after a domain object is saved. + -Can modify the domain object, to be returned after save, `OutboundRow` containing all mapped entity information. -| `Ordered.LOWEST_PRECEDENCE` - -|=== - diff --git a/src/main/asciidoc/reference/r2dbc-repositories.adoc b/src/main/asciidoc/reference/r2dbc-repositories.adoc deleted file mode 100644 index 76c15dbd..00000000 --- a/src/main/asciidoc/reference/r2dbc-repositories.adoc +++ /dev/null @@ -1,438 +0,0 @@ -[[r2dbc.repositories]] -= R2DBC Repositories - -[[r2dbc.repositories.intro]] -This chapter points out the specialties for repository support for R2DBC. -This chapter builds on the core repository support explained in <>. -Before reading this chapter, you should have a sound understanding of the basic concepts explained there. - -[[r2dbc.repositories.usage]] -== Usage - -To access domain entities stored in a relational database, you can use our sophisticated repository support that eases implementation quite significantly. -To do so, create an interface for your repository. -Consider the following `Person` class: - -.Sample Person entity -==== -[source,java] ----- -public class Person { - - @Id - private Long id; - private String firstname; - private String lastname; - - // … getters and setters omitted -} ----- -==== - -The following example shows a repository interface for the preceding `Person` class: - -.Basic repository interface to persist Person entities -==== -[source,java] ----- -public interface PersonRepository extends ReactiveCrudRepository { - - // additional custom query methods go here -} ----- -==== - -To configure R2DBC repositories, you can use the `@EnableR2dbcRepositories` annotation. -If no base package is configured, the infrastructure scans the package of the annotated configuration class. -The following example shows how to use Java configuration for a repository: - -.Java configuration for repositories -==== -[source,java] ----- -@Configuration -@EnableR2dbcRepositories -class ApplicationConfig extends AbstractR2dbcConfiguration { - - @Override - public ConnectionFactory connectionFactory() { - return … - } -} ----- -==== - -Because our domain repository extends `ReactiveCrudRepository`, it provides you with reactive CRUD operations to access the entities. -On top of `ReactiveCrudRepository`, there is also `ReactiveSortingRepository`, which adds additional sorting functionality similar to that of `PagingAndSortingRepository`. -Working with the repository instance is merely a matter of dependency injecting it into a client. -Consequently, you can retrieve all `Person` objects with the following code: - -.Paging access to Person entities -==== -[source,java,indent=0] ----- -include::../{example-root}/PersonRepositoryTests.java[tags=class] ----- -==== - -The preceding example creates an application context with Spring's unit test support, which performs annotation-based dependency injection into test cases. -Inside the test method, we use the repository to query the database. -We use `StepVerifier` as a test aid to verify our expectations against the results. - -[[r2dbc.repositories.queries]] -== Query Methods - -Most of the data access operations you usually trigger on a repository result in a query being run against the databases. -Defining such a query is a matter of declaring a method on the repository interface, as the following example shows: - -.PersonRepository with query methods -==== -[source,java] ----- -interface ReactivePersonRepository extends ReactiveSortingRepository { - - Flux findByFirstname(String firstname); <1> - - Flux findByFirstname(Publisher firstname); <2> - - Flux findByFirstnameOrderByLastname(String firstname, Pageable pageable); <3> - - Mono findByFirstnameAndLastname(String firstname, String lastname); <4> - - Mono findFirstByLastname(String lastname); <5> - - @Query("SELECT * FROM person WHERE lastname = :lastname") - Flux findByLastname(String lastname); <6> - - @Query("SELECT firstname, lastname FROM person WHERE lastname = $1") - Mono findFirstByLastname(String lastname); <7> -} ----- -<1> The method shows a query for all people with the given `firstname`. The query is derived by parsing the method name for constraints that can be concatenated with `And` and `Or`. Thus, the method name results in a query expression of `SELECT … FROM person WHERE firstname = :firstname`. -<2> The method shows a query for all people with the given `firstname` once the `firstname` is emitted by the given `Publisher`. -<3> Use `Pageable` to pass offset and sorting parameters to the database. -<4> Find a single entity for the given criteria. It completes with `IncorrectResultSizeDataAccessException` on non-unique results. -<5> Unless <4>, the first entity is always emitted even if the query yields more result rows. -<6> The `findByLastname` method shows a query for all people with the given last name. -<7> A query for a single `Person` entity projecting only `firstname` and `lastname` columns. -The annotated query uses native bind markers, which are Postgres bind markers in this example. -==== - -Note that the columns of a select statement used in a `@Query` annotation must match the names generated by the `NamingStrategy` for the respective property. -If a select statement does not include a matching column, that property is not set. If that property is required by the persistence constructor, either null or (for primitive types) the default value is provided. - -The following table shows the keywords that are supported for query methods: - -[cols="1,2,3", options="header", subs="quotes"] -.Supported keywords for query methods -|=== -| Keyword -| Sample -| Logical result - -| `After` -| `findByBirthdateAfter(Date date)` -| `birthdate > date` - -| `GreaterThan` -| `findByAgeGreaterThan(int age)` -| `age > age` - -| `GreaterThanEqual` -| `findByAgeGreaterThanEqual(int age)` -| `age >= age` - -| `Before` -| `findByBirthdateBefore(Date date)` -| `birthdate < date` - -| `LessThan` -| `findByAgeLessThan(int age)` -| `age < age` - -| `LessThanEqual` -| `findByAgeLessThanEqual(int age)` -| `age \<= age` - -| `Between` -| `findByAgeBetween(int from, int to)` -| `age BETWEEN from AND to` - -| `NotBetween` -| `findByAgeNotBetween(int from, int to)` -| `age NOT BETWEEN from AND to` - -| `In` -| `findByAgeIn(Collection ages)` -| `age IN (age1, age2, ageN)` - -| `NotIn` -| `findByAgeNotIn(Collection ages)` -| `age NOT IN (age1, age2, ageN)` - -| `IsNotNull`, `NotNull` -| `findByFirstnameNotNull()` -| `firstname IS NOT NULL` - -| `IsNull`, `Null` -| `findByFirstnameNull()` -| `firstname IS NULL` - -| `Like`, `StartingWith`, `EndingWith` -| `findByFirstnameLike(String name)` -| `firstname LIKE name` - -| `NotLike`, `IsNotLike` -| `findByFirstnameNotLike(String name)` -| `firstname NOT LIKE name` - -| `Containing` on String -| `findByFirstnameContaining(String name)` -| `firstname LIKE '%' + name +'%'` - -| `NotContaining` on String -| `findByFirstnameNotContaining(String name)` -| `firstname NOT LIKE '%' + name +'%'` - -| `(No keyword)` -| `findByFirstname(String name)` -| `firstname = name` - -| `Not` -| `findByFirstnameNot(String name)` -| `firstname != name` - -| `IsTrue`, `True` -| `findByActiveIsTrue()` -| `active IS TRUE` - -| `IsFalse`, `False` -| `findByActiveIsFalse()` -| `active IS FALSE` -|=== - -[[r2dbc.repositories.modifying]] -=== Modifying Queries - -The previous sections describe how to declare queries to access a given entity or collection of entities. -Using keywords from the preceding table can be used in conjunction with `delete…By` or `remove…By` to create derived queries that delete matching rows. - -.`Delete…By` Query -==== -[source,java] ----- -interface ReactivePersonRepository extends ReactiveSortingRepository { - - Mono deleteByLastname(String lastname); <1> - - Mono deletePersonByLastname(String lastname); <2> - - Mono deletePersonByLastname(String lastname); <3> -} ----- -<1> Using a return type of `Mono` returns the number of affected rows. -<2> Using `Void` just reports whether the rows were successfully deleted without emitting a result value. -<3> Using `Boolean` reports whether at least one row was removed. -==== - -As this approach is feasible for comprehensive custom functionality, you can modify queries that only need parameter binding by annotating the query method with `@Modifying`, as shown in the following example: - -==== -[source,java,indent=0] ----- -include::../{example-root}/PersonRepository.java[tags=atModifying] ----- -==== - -The result of a modifying query can be: - -* `Void` (or Kotlin `Unit`) to discard update count and await completion. -* `Integer` or another numeric type emitting the affected rows count. -* `Boolean` to emit whether at least one row was updated. - -The `@Modifying` annotation is only relevant in combination with the `@Query` annotation. -Derived custom methods do not require this annotation. - -Alternatively, you can add custom modifying behavior by using the facilities described in <>. - -[[r2dbc.repositories.queries.spel]] -=== Queries with SpEL Expressions - -Query string definitions can be used together with SpEL expressions to create dynamic queries at runtime. -SpEL expressions can provide predicate values which are evaluated right before running the query. - -Expressions expose method arguments through an array that contains all the arguments. -The following query uses `[0]` -to declare the predicate value for `lastname` (which is equivalent to the `:lastname` parameter binding): - -==== -[source,java,indent=0] ----- -include::../{example-root}/PersonRepository.java[tags=spel] ----- -==== - -SpEL in query strings can be a powerful way to enhance queries. -However, they can also accept a broad range of unwanted arguments. -You should make sure to sanitize strings before passing them to the query to avoid unwanted changes to your query. - -Expression support is extensible through the Query SPI: `org.springframework.data.spel.spi.EvaluationContextExtension`. -The Query SPI can contribute properties and functions and can customize the root object. -Extensions are retrieved from the application context at the time of SpEL evaluation when the query is built. - -TIP: When using SpEL expressions in combination with plain parameters, use named parameter notation instead of native bind markers to ensure a proper binding order. - -[[r2dbc.repositories.queries.query-by-example]] -=== Query By Example - -Spring Data R2DBC also lets you use Query By Example to fashion queries. -This technique allows you to use a "probe" object. -Essentially, any field that isn't empty or `null` will be used to match. - -Here's an example: - -==== -[source,java,indent=0] ----- -include::../{example-root}/QueryByExampleTests.java[tag=example] ----- -<1> Create a domain object with the criteria (`null` fields will be ignored). -<2> Using the domain object, create an `Example`. -<3> Through the `R2dbcRepository`, execute query (use `findOne` for a `Mono`). -==== - -This illustrates how to craft a simple probe using a domain object. -In this case, it will query based on the `Employee` object's `name` field being equal to `Frodo`. -`null` fields are ignored. - -==== -[source,java,indent=0] ----- -include::../{example-root}/QueryByExampleTests.java[tag=example-2] ----- -<1> Create a custom `ExampleMatcher` that matches on ALL fields (use `matchingAny()` to match on *ANY* fields) -<2> For the `name` field, use a wildcard that matches against the end of the field -<3> Match columns against `null` (don't forget that `NULL` doesn't equal `NULL` in relational databases). -<4> Ignore the `role` field when forming the query. -<5> Plug the custom `ExampleMatcher` into the probe. -==== - -It's also possible to apply a `withTransform()` against any property, allowing you to transform a property before forming the query. -For example, you can apply a `toUpperCase()` to a `String` -based property before the query is created. - -Query By Example really shines when you you don't know all the fields needed in a query in advance. -If you were building a filter on a web page where the user can pick the fields, Query By Example is a great way to flexibly capture that into an efficient query. - -[[r2dbc.entity-persistence.state-detection-strategies]] -include::../{spring-data-commons-docs}/is-new-state-detection.adoc[leveloffset=+2] - -[[r2dbc.entity-persistence.id-generation]] -=== ID Generation - -Spring Data R2DBC uses the ID to identify entities. -The ID of an entity must be annotated with Spring Data's https://docs.spring.io/spring-data/commons/docs/current/api/org/springframework/data/annotation/Id.html[`@Id`] annotation. - -When your database has an auto-increment column for the ID column, the generated value gets set in the entity after inserting it into the database. - -Spring Data R2DBC does not attempt to insert values of identifier columns when the entity is new and the identifier value defaults to its initial value. -That is `0` for primitive types and `null` if the identifier property uses a numeric wrapper type such as `Long`. - -One important constraint is that, after saving an entity, the entity must not be new anymore. -Note that whether an entity is new is part of the entity's state. -With auto-increment columns, this happens automatically, because the ID gets set by Spring Data with the value from the ID column. - -[[r2dbc.optimistic-locking]] -=== Optimistic Locking - -The `@Version` annotation provides syntax similar to that of JPA in the context of R2DBC and makes sure updates are only applied to rows with a matching version. -Therefore, the actual value of the version property is added to the update query in such a way that the update does not have any effect if another operation altered the row in the meantime. -In that case, an `OptimisticLockingFailureException` is thrown. -The following example shows these features: - -==== -[source,java] ----- -@Table -class Person { - - @Id Long id; - String firstname; - String lastname; - @Version Long version; -} - -R2dbcEntityTemplate template = …; - -Mono daenerys = template.insert(new Person("Daenerys")); <1> - -Person other = template.select(Person.class) - .matching(query(where("id").is(daenerys.getId()))) - .first().block(); <2> - -daenerys.setLastname("Targaryen"); -template.update(daenerys); <3> - -template.update(other).subscribe(); // emits OptimisticLockingFailureException <4> ----- -<1> Initially insert row. `version` is set to `0`. -<2> Load the just inserted row. `version` is still `0`. -<3> Update the row with `version = 0`.Set the `lastname` and bump `version` to `1`. -<4> Try to update the previously loaded row that still has `version = 0`.The operation fails with an `OptimisticLockingFailureException`, as the current `version` is `1`. -==== - -:projection-collection: Flux -include::../{spring-data-commons-docs}/repository-projections.adoc[leveloffset=+2] - -[[projections.resultmapping]] -==== Result Mapping - -A query method returning an Interface- or DTO projection is backed by results produced by the actual query. -Interface projections generally rely on mapping results onto the domain type first to consider potential `@Column` type mappings and the actual projection proxy uses a potentially partially materialized entity to expose projection data. - -Result mapping for DTO projections depends on the actual query type. -Derived queries use the domain type to map results, and Spring Data creates DTO instances solely from properties available on the domain type. -Declaring properties in your DTO that are not available on the domain type is not supported. - -String-based queries use a different approach since the actual query, specifically the field projection, and result type declaration are close together. -DTO projections used with query methods annotated with `@Query` map query results directly into the DTO type. -Field mappings on the domain type are not considered. -Using the DTO type directly, your query method can benefit from a more dynamic projection that isn't restricted to the domain model. - -include::../{spring-data-commons-docs}/entity-callbacks.adoc[leveloffset=+1] -include::./r2dbc-entity-callbacks.adoc[leveloffset=+2] - -[[r2dbc.multiple-databases]] -== Working with multiple Databases - -When working with multiple, potentially different databases, your application will require a different approach to configuration. -The provided `AbstractR2dbcConfiguration` support class assumes a single `ConnectionFactory` from which the `Dialect` gets derived. -That being said, you need to define a few beans yourself to configure Spring Data R2DBC to work with multiple databases. - -R2DBC repositories require `R2dbcEntityOperations` to implement repositories. -A simple configuration to scan for repositories without using `AbstractR2dbcConfiguration` looks like: - -[source,java] ----- -@Configuration -@EnableR2dbcRepositories(basePackages = "com.acme.mysql", entityOperationsRef = "mysqlR2dbcEntityOperations") -static class MySQLConfiguration { - - @Bean - @Qualifier("mysql") - public ConnectionFactory mysqlConnectionFactory() { - return … - } - - @Bean - public R2dbcEntityOperations mysqlR2dbcEntityOperations(@Qualifier("mysql") ConnectionFactory connectionFactory) { - - DatabaseClient databaseClient = DatabaseClient.create(connectionFactory); - - return new R2dbcEntityTemplate(databaseClient, MySqlDialect.INSTANCE); - } -} ----- - -Note that `@EnableR2dbcRepositories` allows configuration either through `databaseClientRef` or `entityOperationsRef`. -Using various `DatabaseClient` beans is useful when connecting to multiple databases of the same type. -When using different database systems that differ in their dialect, use `@EnableR2dbcRepositories`(entityOperationsRef = …)` instead. diff --git a/src/main/asciidoc/reference/r2dbc-template.adoc b/src/main/asciidoc/reference/r2dbc-template.adoc deleted file mode 100644 index 970ae896..00000000 --- a/src/main/asciidoc/reference/r2dbc-template.adoc +++ /dev/null @@ -1,193 +0,0 @@ -[[r2dbc.datbaseclient.fluent-api]] -[[r2dbc.entityoperations]] -= R2dbcEntityOperations Data Access API - -`R2dbcEntityTemplate` is the central entrypoint for Spring Data R2DBC. -It provides direct entity-oriented methods and a more narrow, fluent interface for typical ad-hoc use-cases, such as querying, inserting, updating, and deleting data. - -The entry points (`insert()`, `select()`, `update()`, and others) follow a natural naming schema based on the operation to be run. -Moving on from the entry point, the API is designed to offer only context-dependent methods that lead to a terminating method that creates and runs a SQL statement. -Spring Data R2DBC uses a `R2dbcDialect` abstraction to determine bind markers, pagination support and the data types natively supported by the underlying driver. - -NOTE: All terminal methods return always a `Publisher` type that represents the desired operation. -The actual statements are sent to the database upon subscription. - -[[r2dbc.entityoperations.save-insert]] -== Methods for Inserting and Updating Entities - -There are several convenient methods on `R2dbcEntityTemplate` for saving and inserting your objects. -To have more fine-grained control over the conversion process, you can register Spring converters with `R2dbcCustomConversions` -- for example `Converter` and `Converter`. - -The simple case of using the save operation is to save a POJO. In this case, the table name is determined by name (not fully qualified) of the class. -You may also call the save operation with a specific collection name. -You can use mapping metadata to override the collection in which to store the object. - -When inserting or saving, if the `Id` property is not set, the assumption is that its value will be auto-generated by the database. -Consequently, for auto-generation the type of the `Id` property or field in your class must be a `Long`, or `Integer`. - -The following example shows how to insert a row and retrieving its contents: - -.Inserting and retrieving entities using the `R2dbcEntityTemplate` -==== -[source,java,indent=0] ----- -include::../{example-root}/R2dbcEntityTemplateSnippets.java[tag=insertAndSelect] ----- -==== - -The following insert and update operations are available: - -A similar set of insert operations is also available: - -* `Mono` *insert* `(T objectToSave)`: Insert the object to the default table. -* `Mono` *update* `(T objectToSave)`: Insert the object to the default table. - -Table names can be customized by using the fluent API. - -[[r2dbc.entityoperations.selecting]] -== Selecting Data - -The `select(…)` and `selectOne(…)` methods on `R2dbcEntityTemplate` are used to select data from a table. -Both methods take a <> object that defines the field projection, the `WHERE` clause, the `ORDER BY` clause and limit/offset pagination. -Limit/offset functionality is transparent to the application regardless of the underlying database. -This functionality is supported by the <> to cater for differences between the individual SQL flavors. - -.Selecting entities using the `R2dbcEntityTemplate` -==== -[source,java,indent=0] ----- -include::../{example-root}/R2dbcEntityTemplateSnippets.java[tag=select] ----- -==== - -[[r2dbc.entityoperations.fluent-api]] -== Fluent API - -This section explains the fluent API usage. -Consider the following simple query: - -==== -[source,java,indent=0] ----- -include::../{example-root}/R2dbcEntityTemplateSnippets.java[tag=simpleSelect] ----- -<1> Using `Person` with the `select(…)` method maps tabular results on `Person` result objects. -<2> Fetching `all()` rows returns a `Flux` without limiting results. -==== - -The following example declares a more complex query that specifies the table name by name, a `WHERE` condition, and an `ORDER BY` clause: - -==== -[source,java,indent=0] ----- -include::../{example-root}/R2dbcEntityTemplateSnippets.java[tag=fullSelect] ----- -<1> Selecting from a table by name returns row results using the given domain type. -<2> The issued query declares a `WHERE` condition on `firstname` and `lastname` columns to filter results. -<3> Results can be ordered by individual column names, resulting in an `ORDER BY` clause. -<4> Selecting the one result fetches only a single row. -This way of consuming rows expects the query to return exactly a single result. -`Mono` emits a `IncorrectResultSizeDataAccessException` if the query yields more than a single result. -==== - -TIP: You can directly apply <> to results by providing the target type via `select(Class)`. - -You can switch between retrieving a single entity and retrieving multiple entities through the following terminating methods: - -* `first()`: Consume only the first row, returning a `Mono`. -The returned `Mono` completes without emitting an object if the query returns no results. -* `one()`: Consume exactly one row, returning a `Mono`. -The returned `Mono` completes without emitting an object if the query returns no results. -If the query returns more than one row, `Mono` completes exceptionally emitting `IncorrectResultSizeDataAccessException`. -* `all()`: Consume all returned rows returning a `Flux`. -* `count()`: Apply a count projection returning `Mono`. -* `exists()`: Return whether the query yields any rows by returning `Mono`. - -You can use the `select()` entry point to express your `SELECT` queries. -The resulting `SELECT` queries support the commonly used clauses (`WHERE` and `ORDER BY`) and support pagination. -The fluent API style let you chain together multiple methods while having easy-to-understand code. -To improve readability, you can use static imports that let you avoid using the 'new' keyword for creating `Criteria` instances. - -[[r2dbc.datbaseclient.fluent-api.criteria]] -=== Methods for the Criteria Class - -The `Criteria` class provides the following methods, all of which correspond to SQL operators: - -* `Criteria` *and* `(String column)`: Adds a chained `Criteria` with the specified `property` to the current `Criteria` and returns the newly created one. -* `Criteria` *or* `(String column)`: Adds a chained `Criteria` with the specified `property` to the current `Criteria` and returns the newly created one. -* `Criteria` *greaterThan* `(Object o)`: Creates a criterion by using the `>` operator. -* `Criteria` *greaterThanOrEquals* `(Object o)`: Creates a criterion by using the `>=` operator. -* `Criteria` *in* `(Object... o)`: Creates a criterion by using the `IN` operator for a varargs argument. -* `Criteria` *in* `(Collection collection)`: Creates a criterion by using the `IN` operator using a collection. -* `Criteria` *is* `(Object o)`: Creates a criterion by using column matching (`property = value`). -* `Criteria` *isNull* `()`: Creates a criterion by using the `IS NULL` operator. -* `Criteria` *isNotNull* `()`: Creates a criterion by using the `IS NOT NULL` operator. -* `Criteria` *lessThan* `(Object o)`: Creates a criterion by using the `<` operator. -* `Criteria` *lessThanOrEquals* `(Object o)`: Creates a criterion by using the `<=` operator. -* `Criteria` *like* `(Object o)`: Creates a criterion by using the `LIKE` operator without escape character processing. -* `Criteria` *not* `(Object o)`: Creates a criterion by using the `!=` operator. -* `Criteria` *notIn* `(Object... o)`: Creates a criterion by using the `NOT IN` operator for a varargs argument. -* `Criteria` *notIn* `(Collection collection)`: Creates a criterion by using the `NOT IN` operator using a collection. - -You can use `Criteria` with `SELECT`, `UPDATE`, and `DELETE` queries. - -[[r2dbc.entityoperations.fluent-api.insert]] -== Inserting Data - -You can use the `insert()` entry point to insert data. - -Consider the following simple typed insert operation: - -==== -[source,java,indent=0] ----- -include::../{example-root}/R2dbcEntityTemplateSnippets.java[tag=insert] ----- -<1> Using `Person` with the `into(…)` method sets the `INTO` table, based on mapping metadata. -It also prepares the insert statement to accept `Person` objects for inserting. -<2> Provide a scalar `Person` object. -Alternatively, you can supply a `Publisher` to run a stream of `INSERT` statements. -This method extracts all non-`null` values and inserts them. -==== - -[[r2dbc.entityoperations.fluent-api.update]] -== Updating Data - -You can use the `update()` entry point to update rows. -Updating data starts by specifying the table to update by accepting `Update` specifying assignments. -It also accepts `Query` to create a `WHERE` clause. - -Consider the following simple typed update operation: - -==== -[source,java] ----- -Person modified = … - -include::../{example-root}/R2dbcEntityTemplateSnippets.java[tag=update] ----- -<1> Update `Person` objects and apply mapping based on mapping metadata. -<2> Set a different table name by calling the `inTable(…)` method. -<3> Specify a query that translates into a `WHERE` clause. -<4> Apply the `Update` object. -Set in this case `age` to `42` and return the number of affected rows. -==== - -[[r2dbc.entityoperations.fluent-api.delete]] -== Deleting Data - -You can use the `delete()` entry point to delete rows. -Removing data starts with a specification of the table to delete from and, optionally, accepts a `Criteria` to create a `WHERE` clause. - -Consider the following simple insert operation: - -==== -[source,java] ----- -include::../{example-root}/R2dbcEntityTemplateSnippets.java[tag=delete] ----- -<1> Delete `Person` objects and apply mapping based on mapping metadata. -<2> Set a different table name by calling the `from(…)` method. -<3> Specify a query that translates into a `WHERE` clause. -<4> Apply the delete operation and return the number of affected rows. -==== diff --git a/src/main/asciidoc/reference/r2dbc-upgrading.adoc b/src/main/asciidoc/reference/r2dbc-upgrading.adoc deleted file mode 100644 index 504c66fd..00000000 --- a/src/main/asciidoc/reference/r2dbc-upgrading.adoc +++ /dev/null @@ -1,62 +0,0 @@ -[appendix] -[[migration-guide]] -= Migration Guide - -The following sections explain how to migrate to a newer version of Spring Data R2DBC. - -[[upgrading.1.1-1.2]] -== Upgrading from 1.1.x to 1.2.x - -Spring Data R2DBC was developed with the intent to evaluate how well R2DBC can integrate with Spring applications. -One of the main aspects was to move core support into Spring Framework once R2DBC support has proven useful. -Spring Framework 5.3 ships with a new module: Spring R2DBC (`spring-r2dbc`). - -`spring-r2dbc` ships core R2DBC functionality (a slim variant of `DatabaseClient`, Transaction Manager, Connection Factory initialization, Exception translation) that was initially provided by Spring Data R2DBC. -The 1.2.0 release aligns with what's provided in Spring R2DBC by making several changes outlined in the following sections. - -Spring R2DBC's `DatabaseClient` is a more lightweight implementation that encapsulates a pure SQL-oriented interface. -You will notice that the method to run SQL statements changed from `DatabaseClient.execute(…)` to `DatabaseClient.sql(…)`. -The fluent API for CRUD operations has moved into `R2dbcEntityTemplate`. - -If you use logging of SQL statements through the logger prefix `org.springframework.data.r2dbc`, make sure to update it to `org.springframework.r2dbc` (that is removing `.data`) to point to Spring R2DBC components. - -[[upgrading.1.1-1.2.deprecation]] -=== Deprecations - -* Deprecation of `o.s.d.r2dbc.core.DatabaseClient` and its support classes `ConnectionAccessor`, `FetchSpec`, `SqlProvider` and a few more. -Named parameter support classes such as `NamedParameterExpander` are encapsulated by Spring R2DBC's `DatabaseClient` implementation hence we're not providing replacements as this was internal API in the first place. -Use `o.s.r2dbc.core.DatabaseClient` and their Spring R2DBC replacements available from `org.springframework.r2dbc.core`. -Entity-based methods (`select`/`insert`/`update`/`delete`) methods are available through `R2dbcEntityTemplate` which was introduced with version 1.1. -* Deprecation of `o.s.d.r2dbc.connectionfactory`, `o.s.d.r2dbc.connectionfactory.init`, and `o.s.d.r2dbc.connectionfactory.lookup` packages. -Use Spring R2DBC's variant which you can find at `o.s.r2dbc.connection`. -* Deprecation of `o.s.d.r2dbc.convert.ColumnMapRowMapper`. -Use `o.s.r2dbc.core.ColumnMapRowMapper` instead. -* Deprecation of binding support classes `o.s.d.r2dbc.dialect.Bindings`, `BindMarker`, `BindMarkers`, `BindMarkersFactory` and related types. -Use replacements from `org.springframework.r2dbc.core.binding`. -* Deprecation of `BadSqlGrammarException`, `UncategorizedR2dbcException` and exception translation at `o.s.d.r2dbc.support`. -Spring R2DBC provides a slim exception translation variant without an SPI for now available through `o.s.r2dbc.connection.ConnectionFactoryUtils#convertR2dbcException`. - -[[upgrading.1.1-1.2.replacements]] -=== Usage of replacements provided by Spring R2DBC - -To ease migration, several deprecated types are now subtypes of their replacements provided by Spring R2DBC. -Spring Data R2DBC has changes several methods or introduced new methods accepting Spring R2DBC types. -Specifically the following classes are changed: - -* `R2dbcEntityTemplate` -* `R2dbcDialect` -* Types in `org.springframework.data.r2dbc.query` - -We recommend that you review and update your imports if you work with these types directly. - -=== Breaking Changes - -* `OutboundRow` and statement mappers switched from using `SettableValue` to `Parameter` -* Repository factory support requires `o.s.r2dbc.core.DatabaseClient` instead of `o.s.data.r2dbc.core.DatabaseClient`. - -[[upgrading.1.1-1.2.dependencies]] -=== Dependency Changes - -To make use of Spring R2DBC, make sure to include the following dependency: - -* `org.springframework:spring-r2dbc` diff --git a/src/main/asciidoc/reference/r2dbc.adoc b/src/main/asciidoc/reference/r2dbc.adoc deleted file mode 100644 index 6e379a9f..00000000 --- a/src/main/asciidoc/reference/r2dbc.adoc +++ /dev/null @@ -1,6 +0,0 @@ -[[r2dbc.core]] -= R2DBC support - -include::r2dbc-core.adoc[] - -include::r2dbc-template.adoc[leveloffset=+1] diff --git a/src/main/java/org/springframework/data/r2dbc/config/AbstractR2dbcConfiguration.java b/src/main/java/org/springframework/data/r2dbc/config/AbstractR2dbcConfiguration.java deleted file mode 100644 index 5d68ae48..00000000 --- a/src/main/java/org/springframework/data/r2dbc/config/AbstractR2dbcConfiguration.java +++ /dev/null @@ -1,242 +0,0 @@ -/* - * Copyright 2018-2022 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.r2dbc.config; - -import io.r2dbc.spi.ConnectionFactory; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Optional; - -import org.springframework.beans.BeansException; -import org.springframework.context.ApplicationContext; -import org.springframework.context.ApplicationContextAware; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import org.springframework.core.convert.converter.Converter; -import org.springframework.data.convert.CustomConversions; -import org.springframework.data.convert.CustomConversions.StoreConversions; -import org.springframework.data.r2dbc.convert.MappingR2dbcConverter; -import org.springframework.data.r2dbc.convert.R2dbcConverter; -import org.springframework.data.r2dbc.convert.R2dbcCustomConversions; -import org.springframework.data.r2dbc.core.DefaultReactiveDataAccessStrategy; -import org.springframework.data.r2dbc.core.R2dbcEntityTemplate; -import org.springframework.data.r2dbc.core.ReactiveDataAccessStrategy; -import org.springframework.data.r2dbc.dialect.DialectResolver; -import org.springframework.data.r2dbc.dialect.R2dbcDialect; -import org.springframework.data.r2dbc.mapping.R2dbcMappingContext; -import org.springframework.data.relational.core.conversion.BasicRelationalConverter; -import org.springframework.data.relational.core.mapping.NamingStrategy; -import org.springframework.lang.Nullable; -import org.springframework.r2dbc.core.DatabaseClient; -import org.springframework.util.Assert; - -/** - * Base class for Spring Data R2DBC configuration containing bean declarations that must be registered for Spring Data - * R2DBC to work. - * - * @author Mark Paluch - * @see ConnectionFactory - * @see DatabaseClient - * @see org.springframework.data.r2dbc.repository.config.EnableR2dbcRepositories - */ -@Configuration(proxyBeanMethods = false) -public abstract class AbstractR2dbcConfiguration implements ApplicationContextAware { - - private static final String CONNECTION_FACTORY_BEAN_NAME = "connectionFactory"; - - private @Nullable ApplicationContext context; - - /* - * (non-Javadoc) - * @see org.springframework.context.ApplicationContextAware#setApplicationContext(org.springframework.context.ApplicationContext) - */ - @Override - public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { - this.context = applicationContext; - } - - /** - * Return a R2DBC {@link ConnectionFactory}. Annotate with {@link Bean} in case you want to expose a - * {@link ConnectionFactory} instance to the {@link org.springframework.context.ApplicationContext}. - * - * @return the configured {@link ConnectionFactory}. - */ - public abstract ConnectionFactory connectionFactory(); - - /** - * Return a {@link R2dbcDialect} for the given {@link ConnectionFactory}. This method attempts to resolve a - * {@link R2dbcDialect} from {@link io.r2dbc.spi.ConnectionFactoryMetadata}. Override this method to specify a dialect - * instead of attempting to resolve one. - * - * @param connectionFactory the configured {@link ConnectionFactory}. - * @return the resolved {@link R2dbcDialect}. - * @throws org.springframework.data.r2dbc.dialect.DialectResolver.NoDialectException if the {@link R2dbcDialect} cannot be determined. - */ - public R2dbcDialect getDialect(ConnectionFactory connectionFactory) { - return DialectResolver.getDialect(connectionFactory); - } - - /** - * Register a {@link DatabaseClient} using {@link #connectionFactory()} and {@link ReactiveDataAccessStrategy}. - * - * @return must not be {@literal null}. - * @throws IllegalArgumentException if any of the required args is {@literal null}. - */ - @Bean({ "r2dbcDatabaseClient", "databaseClient" }) - public DatabaseClient databaseClient() { - - ConnectionFactory connectionFactory = lookupConnectionFactory(); - - return DatabaseClient.builder() // - .connectionFactory(connectionFactory) // - .bindMarkers(getDialect(connectionFactory).getBindMarkersFactory()) // - .build(); - } - - /** - * Register {@link R2dbcEntityTemplate} using {@link #databaseClient()} and {@link #connectionFactory()}. - * - * @param databaseClient must not be {@literal null}. - * @param dataAccessStrategy must not be {@literal null}. - * @return - * @since 1.2 - */ - @Bean - public R2dbcEntityTemplate r2dbcEntityTemplate(DatabaseClient databaseClient, - ReactiveDataAccessStrategy dataAccessStrategy) { - - Assert.notNull(databaseClient, "DatabaseClient must not be null!"); - Assert.notNull(dataAccessStrategy, "ReactiveDataAccessStrategy must not be null!"); - - return new R2dbcEntityTemplate(databaseClient, dataAccessStrategy); - } - - /** - * Register a {@link R2dbcMappingContext} and apply an optional {@link NamingStrategy}. - * - * @param namingStrategy optional {@link NamingStrategy}. Use {@link NamingStrategy#INSTANCE} as fallback. - * @param r2dbcCustomConversions customized R2DBC conversions. - * @return must not be {@literal null}. - * @throws IllegalArgumentException if any of the required args is {@literal null}. - */ - @Bean - public R2dbcMappingContext r2dbcMappingContext(Optional namingStrategy, - R2dbcCustomConversions r2dbcCustomConversions) { - - Assert.notNull(namingStrategy, "NamingStrategy must not be null!"); - - R2dbcMappingContext context = new R2dbcMappingContext(namingStrategy.orElse(NamingStrategy.INSTANCE)); - context.setSimpleTypeHolder(r2dbcCustomConversions.getSimpleTypeHolder()); - - return context; - } - - /** - * Creates a {@link ReactiveDataAccessStrategy} using the configured - * {@link #r2dbcConverter(R2dbcMappingContext, R2dbcCustomConversions) R2dbcConverter}. - * - * @param converter the configured {@link R2dbcConverter}. - * @return must not be {@literal null}. - * @see #r2dbcConverter(R2dbcMappingContext, R2dbcCustomConversions) - * @see #getDialect(ConnectionFactory) - * @throws IllegalArgumentException if any of the {@literal mappingContext} is {@literal null}. - */ - @Bean - public ReactiveDataAccessStrategy reactiveDataAccessStrategy(R2dbcConverter converter) { - - Assert.notNull(converter, "MappingContext must not be null!"); - - return new DefaultReactiveDataAccessStrategy(getDialect(lookupConnectionFactory()), converter); - } - - /** - * Creates a {@link org.springframework.data.r2dbc.convert.R2dbcConverter} using the configured - * {@link #r2dbcMappingContext(Optional, R2dbcCustomConversions)} R2dbcMappingContext}. - * - * @param mappingContext the configured {@link R2dbcMappingContext}. - * @param r2dbcCustomConversions customized R2DBC conversions. - * @return must not be {@literal null}. - * @see #r2dbcMappingContext(Optional, R2dbcCustomConversions) - * @see #getDialect(ConnectionFactory) - * @throws IllegalArgumentException if any of the {@literal mappingContext} is {@literal null}. - * @since 1.2 - */ - @Bean - public MappingR2dbcConverter r2dbcConverter(R2dbcMappingContext mappingContext, - R2dbcCustomConversions r2dbcCustomConversions) { - - Assert.notNull(mappingContext, "MappingContext must not be null!"); - - return new MappingR2dbcConverter(mappingContext, r2dbcCustomConversions); - } - - /** - * Register custom {@link Converter}s in a {@link CustomConversions} object if required. These - * {@link CustomConversions} will be registered with the {@link BasicRelationalConverter} and - * {@link #r2dbcMappingContext(Optional, R2dbcCustomConversions)}. Returns an empty {@link R2dbcCustomConversions} - * instance by default. Override {@link #getCustomConverters()} to supply custom converters. - * - * @return must not be {@literal null}. - * @see #getCustomConverters() - */ - @Bean - public R2dbcCustomConversions r2dbcCustomConversions() { - return new R2dbcCustomConversions(getStoreConversions(), getCustomConverters()); - } - - /** - * Customization hook to return custom converters. - * - * @return return custom converters. - */ - protected List getCustomConverters() { - return Collections.emptyList(); - } - - /** - * Returns the {@link R2dbcDialect}-specific {@link StoreConversions}. - * - * @return the {@link R2dbcDialect}-specific {@link StoreConversions}. - */ - protected StoreConversions getStoreConversions() { - - R2dbcDialect dialect = getDialect(lookupConnectionFactory()); - - List converters = new ArrayList<>(dialect.getConverters()); - converters.addAll(R2dbcCustomConversions.STORE_CONVERTERS); - - return StoreConversions.of(dialect.getSimpleTypeHolder(), converters); - } - - ConnectionFactory lookupConnectionFactory() { - - ApplicationContext context = this.context; - Assert.notNull(context, "ApplicationContext is not yet initialized"); - - String[] beanNamesForType = context.getBeanNamesForType(ConnectionFactory.class); - - for (String beanName : beanNamesForType) { - - if (beanName.equals(CONNECTION_FACTORY_BEAN_NAME)) { - return context.getBean(CONNECTION_FACTORY_BEAN_NAME, ConnectionFactory.class); - } - } - - return connectionFactory(); - } -} diff --git a/src/main/java/org/springframework/data/r2dbc/config/EnableR2dbcAuditing.java b/src/main/java/org/springframework/data/r2dbc/config/EnableR2dbcAuditing.java deleted file mode 100644 index 3f859d88..00000000 --- a/src/main/java/org/springframework/data/r2dbc/config/EnableR2dbcAuditing.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright 2020-2022 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.r2dbc.config; - -import java.lang.annotation.Documented; -import java.lang.annotation.ElementType; -import java.lang.annotation.Inherited; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -import org.springframework.context.annotation.Import; -import org.springframework.data.auditing.DateTimeProvider; -import org.springframework.data.domain.ReactiveAuditorAware; - -/** - * Annotation to enable auditing in R2DBC via annotation configuration. - * - * @author Mark Paluch - * @since 1.2 - */ -@Inherited -@Documented -@Target(ElementType.TYPE) -@Retention(RetentionPolicy.RUNTIME) -@Import(R2dbcAuditingRegistrar.class) -public @interface EnableR2dbcAuditing { - - /** - * Configures the {@link ReactiveAuditorAware} bean to be used to lookup the current principal. - * - * @return empty {@link String} by default. - */ - String auditorAwareRef() default ""; - - /** - * Configures whether the creation and modification dates are set. Defaults to {@literal true}. - * - * @return {@literal true} by default. - */ - boolean setDates() default true; - - /** - * Configures whether the entity shall be marked as modified on creation. Defaults to {@literal true}. - * - * @return {@literal true} by default. - */ - boolean modifyOnCreate() default true; - - /** - * Configures a {@link DateTimeProvider} bean name that allows customizing the timestamp to be used for setting - * creation and modification dates. - * - * @return empty {@link String} by default. - */ - String dateTimeProviderRef() default ""; -} diff --git a/src/main/java/org/springframework/data/r2dbc/config/PersistentEntitiesFactoryBean.java b/src/main/java/org/springframework/data/r2dbc/config/PersistentEntitiesFactoryBean.java deleted file mode 100644 index 399e1599..00000000 --- a/src/main/java/org/springframework/data/r2dbc/config/PersistentEntitiesFactoryBean.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright 2020-2022 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.r2dbc.config; - -import org.springframework.beans.factory.FactoryBean; -import org.springframework.data.mapping.context.PersistentEntities; -import org.springframework.data.r2dbc.mapping.R2dbcMappingContext; - -/** - * Simple helper to be able to wire the {@link PersistentEntities} from a {@link R2dbcMappingContext} bean available in - * the application context. - * - * @author Mark Paluch - * @since 1.2 - */ -public class PersistentEntitiesFactoryBean implements FactoryBean { - - private final R2dbcMappingContext mappingContext; - - /** - * Creates a new {@link PersistentEntitiesFactoryBean} for the given {@link R2dbcMappingContext}. - * - * @param mappingContext must not be {@literal null}. - */ - public PersistentEntitiesFactoryBean(R2dbcMappingContext mappingContext) { - this.mappingContext = mappingContext; - } - - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.FactoryBean#getObject() - */ - @Override - public PersistentEntities getObject() { - return PersistentEntities.of(mappingContext); - } - - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.FactoryBean#getObjectType() - */ - @Override - public Class getObjectType() { - return PersistentEntities.class; - } -} diff --git a/src/main/java/org/springframework/data/r2dbc/config/R2dbcAuditingRegistrar.java b/src/main/java/org/springframework/data/r2dbc/config/R2dbcAuditingRegistrar.java deleted file mode 100644 index 1420ae44..00000000 --- a/src/main/java/org/springframework/data/r2dbc/config/R2dbcAuditingRegistrar.java +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Copyright 2020-2022 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.r2dbc.config; - -import java.lang.annotation.Annotation; - -import org.springframework.beans.factory.config.BeanDefinition; -import org.springframework.beans.factory.support.AbstractBeanDefinition; -import org.springframework.beans.factory.support.BeanDefinitionBuilder; -import org.springframework.beans.factory.support.BeanDefinitionRegistry; -import org.springframework.context.annotation.ImportBeanDefinitionRegistrar; -import org.springframework.data.auditing.ReactiveIsNewAwareAuditingHandler; -import org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport; -import org.springframework.data.auditing.config.AuditingConfiguration; -import org.springframework.data.config.ParsingUtils; -import org.springframework.data.r2dbc.mapping.event.ReactiveAuditingEntityCallback; -import org.springframework.util.Assert; - -/** - * {@link ImportBeanDefinitionRegistrar} to enable {@link EnableR2dbcAuditing} annotation. - * - * @author Mark Paluch - * @since 1.2 - */ -class R2dbcAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport { - - /* - * (non-Javadoc) - * @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAnnotation() - */ - @Override - protected Class getAnnotation() { - return EnableR2dbcAuditing.class; - } - - /* - * (non-Javadoc) - * @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAuditingHandlerBeanName() - */ - @Override - protected String getAuditingHandlerBeanName() { - return "r2dbcAuditingHandler"; - } - - /* - * (non-Javadoc) - * @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAuditHandlerBeanDefinitionBuilder(org.springframework.data.auditing.config.AuditingConfiguration) - */ - @Override - protected BeanDefinitionBuilder getAuditHandlerBeanDefinitionBuilder(AuditingConfiguration configuration) { - - Assert.notNull(configuration, "AuditingConfiguration must not be null!"); - - BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(ReactiveIsNewAwareAuditingHandler.class); - - BeanDefinitionBuilder definition = BeanDefinitionBuilder.genericBeanDefinition(PersistentEntitiesFactoryBean.class); - definition.setAutowireMode(AbstractBeanDefinition.AUTOWIRE_CONSTRUCTOR); - - builder.addConstructorArgValue(definition.getBeanDefinition()); - return configureDefaultAuditHandlerAttributes(configuration, builder); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#registerAuditListener(org.springframework.beans.factory.config.BeanDefinition, org.springframework.beans.factory.support.BeanDefinitionRegistry) - */ - @Override - protected void registerAuditListenerBeanDefinition(BeanDefinition auditingHandlerDefinition, - BeanDefinitionRegistry registry) { - - Assert.notNull(auditingHandlerDefinition, "BeanDefinition must not be null!"); - Assert.notNull(registry, "BeanDefinitionRegistry must not be null!"); - - BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(ReactiveAuditingEntityCallback.class); - - builder.addConstructorArgValue(ParsingUtils.getObjectFactoryBeanDefinition(getAuditingHandlerBeanName(), registry)); - builder.getRawBeanDefinition().setSource(auditingHandlerDefinition.getSource()); - - registerInfrastructureBeanWithId(builder.getBeanDefinition(), ReactiveAuditingEntityCallback.class.getName(), - registry); - } - -} diff --git a/src/main/java/org/springframework/data/r2dbc/config/package-info.java b/src/main/java/org/springframework/data/r2dbc/config/package-info.java deleted file mode 100644 index e4e7fb58..00000000 --- a/src/main/java/org/springframework/data/r2dbc/config/package-info.java +++ /dev/null @@ -1,6 +0,0 @@ -/** - * Configuration classes for Spring Data R2DBC. - */ -@org.springframework.lang.NonNullApi -@org.springframework.lang.NonNullFields -package org.springframework.data.r2dbc.config; diff --git a/src/main/java/org/springframework/data/r2dbc/convert/EntityRowMapper.java b/src/main/java/org/springframework/data/r2dbc/convert/EntityRowMapper.java deleted file mode 100644 index a1e7c7da..00000000 --- a/src/main/java/org/springframework/data/r2dbc/convert/EntityRowMapper.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright 2018-2022 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.r2dbc.convert; - -import io.r2dbc.spi.Row; -import io.r2dbc.spi.RowMetadata; - -import java.util.function.BiFunction; - -/** - * Maps a {@link io.r2dbc.spi.Row} to an entity of type {@code T}, including entities referenced. - * - * @author Mark Paluch - * @author Ryland Degnan - */ -public class EntityRowMapper implements BiFunction { - - private final Class typeRoRead; - private final R2dbcConverter converter; - - public EntityRowMapper(Class typeRoRead, R2dbcConverter converter) { - - this.typeRoRead = typeRoRead; - this.converter = converter; - } - - /* - * (non-Javadoc) - * @see java.util.function.BiFunction#apply(java.lang.Object, java.lang.Object) - */ - @Override - public T apply(Row row, RowMetadata metadata) { - return converter.read(typeRoRead, row, metadata); - } -} diff --git a/src/main/java/org/springframework/data/r2dbc/convert/EnumWriteSupport.java b/src/main/java/org/springframework/data/r2dbc/convert/EnumWriteSupport.java deleted file mode 100644 index 2e4c7b2c..00000000 --- a/src/main/java/org/springframework/data/r2dbc/convert/EnumWriteSupport.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright 2020-2022 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.r2dbc.convert; - -import org.springframework.core.convert.converter.Converter; -import org.springframework.data.convert.WritingConverter; - -/** - * Support class to natively write {@link Enum} values to the database. - *

- * By default, Spring Data converts enum values by to {@link Enum#name() String} for maximum portability. Registering a - * {@link WritingConverter} allows retaining the enum type so that actual enum values get passed thru to the driver. - *

- * Enum types that should be written using their actual enum value to the database should require a converter for type - * pinning. Extend this class as the {@link org.springframework.data.convert.CustomConversions} support inspects - * {@link Converter} generics to identify conversion rules. - *

- * For example: - * - *

- * enum Color {
- * 	Grey, Blue
- * }
- *
- * class ColorConverter extends EnumWriteSupport<Color> {
- *
- * }
- * 
- * - * @author Mark Paluch - * @param the enum type that should be written using the actual value. - * @since 1.2 - */ -@WritingConverter -public abstract class EnumWriteSupport> implements Converter { - - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ - @Override - public E convert(E enumInstance) { - return enumInstance; - } - -} diff --git a/src/main/java/org/springframework/data/r2dbc/convert/MappingR2dbcConverter.java b/src/main/java/org/springframework/data/r2dbc/convert/MappingR2dbcConverter.java deleted file mode 100644 index 6b1afec4..00000000 --- a/src/main/java/org/springframework/data/r2dbc/convert/MappingR2dbcConverter.java +++ /dev/null @@ -1,751 +0,0 @@ -/* - * Copyright 2018-2022 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.r2dbc.convert; - -import io.r2dbc.spi.ColumnMetadata; -import io.r2dbc.spi.Row; -import io.r2dbc.spi.RowMetadata; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.Iterator; -import java.util.List; -import java.util.Optional; -import java.util.function.BiFunction; - -import org.springframework.core.CollectionFactory; -import org.springframework.core.convert.ConversionService; -import org.springframework.dao.InvalidDataAccessApiUsageException; -import org.springframework.data.convert.CustomConversions; -import org.springframework.data.mapping.IdentifierAccessor; -import org.springframework.data.mapping.MappingException; -import org.springframework.data.mapping.PersistentProperty; -import org.springframework.data.mapping.PersistentPropertyAccessor; -import org.springframework.data.mapping.PreferredConstructor; -import org.springframework.data.mapping.context.MappingContext; -import org.springframework.data.mapping.model.ConvertingPropertyAccessor; -import org.springframework.data.mapping.model.DefaultSpELExpressionEvaluator; -import org.springframework.data.mapping.model.ParameterValueProvider; -import org.springframework.data.mapping.model.SpELContext; -import org.springframework.data.mapping.model.SpELExpressionEvaluator; -import org.springframework.data.mapping.model.SpELExpressionParameterValueProvider; -import org.springframework.data.r2dbc.mapping.OutboundRow; -import org.springframework.data.r2dbc.support.ArrayUtils; -import org.springframework.data.relational.core.conversion.BasicRelationalConverter; -import org.springframework.data.relational.core.conversion.RelationalConverter; -import org.springframework.data.relational.core.dialect.ArrayColumns; -import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; -import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; -import org.springframework.data.util.ClassTypeInformation; -import org.springframework.data.util.TypeInformation; -import org.springframework.lang.Nullable; -import org.springframework.r2dbc.core.Parameter; -import org.springframework.util.Assert; -import org.springframework.util.ClassUtils; -import org.springframework.util.CollectionUtils; - -/** - * Converter for R2DBC. - * - * @author Mark Paluch - * @author Oliver Drotbohm - */ -public class MappingR2dbcConverter extends BasicRelationalConverter implements R2dbcConverter { - - /** - * Creates a new {@link MappingR2dbcConverter} given {@link MappingContext}. - * - * @param context must not be {@literal null}. - */ - public MappingR2dbcConverter( - MappingContext, ? extends RelationalPersistentProperty> context) { - super(context, new R2dbcCustomConversions(R2dbcCustomConversions.STORE_CONVERSIONS, Collections.emptyList())); - } - - /** - * Creates a new {@link MappingR2dbcConverter} given {@link MappingContext} and {@link CustomConversions}. - * - * @param context must not be {@literal null}. - */ - public MappingR2dbcConverter( - MappingContext, ? extends RelationalPersistentProperty> context, - CustomConversions conversions) { - super(context, conversions); - } - - // ---------------------------------- - // Entity reading - // ---------------------------------- - - /* - * (non-Javadoc) - * @see org.springframework.data.convert.EntityReader#read(java.lang.Class, S) - */ - @Override - public R read(Class type, Row row) { - return read(type, row, null); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.r2dbc.convert.R2dbcConverter#read(java.lang.Class, io.r2dbc.spi.Row, io.r2dbc.spi.RowMetadata) - */ - @Override - public R read(Class type, Row row, @Nullable RowMetadata metadata) { - - TypeInformation typeInfo = ClassTypeInformation.from(type); - Class rawType = typeInfo.getType(); - - if (Row.class.isAssignableFrom(rawType)) { - return type.cast(row); - } - - if (getConversions().hasCustomReadTarget(Row.class, rawType) - && getConversionService().canConvert(Row.class, rawType)) { - return getConversionService().convert(row, rawType); - } - - return read(getRequiredPersistentEntity(type), row, metadata); - } - - private R read(RelationalPersistentEntity entity, Row row, @Nullable RowMetadata metadata) { - - R result = createInstance(row, metadata, "", entity); - - if (entity.requiresPropertyPopulation()) { - ConvertingPropertyAccessor propertyAccessor = new ConvertingPropertyAccessor<>( - entity.getPropertyAccessor(result), getConversionService()); - - for (RelationalPersistentProperty property : entity) { - - if (entity.isConstructorArgument(property)) { - continue; - } - - Object value = readFrom(row, metadata, property, ""); - - if (value != null) { - propertyAccessor.setProperty(property, value); - } - } - } - - return result; - } - - /** - * Read a single value or a complete Entity from the {@link Row} passed as an argument. - * - * @param row the {@link Row} to extract the value from. Must not be {@literal null}. - * @param metadata the {@link RowMetadata}. Can be {@literal null}. - * @param property the {@link RelationalPersistentProperty} for which the value is intended. Must not be - * {@literal null}. - * @param prefix to be used for all column names accessed by this method. Must not be {@literal null}. - * @return the value read from the {@link Row}. May be {@literal null}. - */ - @Nullable - private Object readFrom(Row row, @Nullable RowMetadata metadata, RelationalPersistentProperty property, - String prefix) { - - String identifier = prefix + property.getColumnName().getReference(); - - try { - - Object value = null; - if (metadata == null || RowMetadataUtils.containsColumn(metadata, identifier)) { - value = row.get(identifier); - } - - if (value == null) { - return null; - } - - if (getConversions().hasCustomReadTarget(value.getClass(), property.getType())) { - return readValue(value, property.getTypeInformation()); - } - - if (property.isEntity()) { - return readEntityFrom(row, metadata, property); - } - - return readValue(value, property.getTypeInformation()); - - } catch (Exception o_O) { - throw new MappingException(String.format("Could not read property %s from column %s!", property, identifier), - o_O); - } - } - - - public Object readValue(@Nullable Object value, TypeInformation type) { - - if (null == value) { - return null; - } - - if (getConversions().hasCustomReadTarget(value.getClass(), type.getType())) { - return getConversionService().convert(value, type.getType()); - } else if (value instanceof Collection || value.getClass().isArray()) { - return readCollectionOrArray(asCollection(value), type); - } else { - return getPotentiallyConvertedSimpleRead(value, type.getType()); - } - } - - /** - * Reads the given value into a collection of the given {@link TypeInformation}. - * - * @param source must not be {@literal null}. - * @param targetType must not be {@literal null}. - * @return the converted {@link Collection} or array, will never be {@literal null}. - */ - @SuppressWarnings("unchecked") - private Object readCollectionOrArray(Collection source, TypeInformation targetType) { - - Assert.notNull(targetType, "Target type must not be null!"); - - Class collectionType = targetType.isSubTypeOf(Collection.class) // - ? targetType.getType() // - : List.class; - - TypeInformation componentType = targetType.getComponentType() != null // - ? targetType.getComponentType() // - : ClassTypeInformation.OBJECT; - Class rawComponentType = componentType.getType(); - - Collection items = targetType.getType().isArray() // - ? new ArrayList<>(source.size()) // - : CollectionFactory.createCollection(collectionType, rawComponentType, source.size()); - - if (source.isEmpty()) { - return getPotentiallyConvertedSimpleRead(items, targetType.getType()); - } - - for (Object element : source) { - - if (!Object.class.equals(rawComponentType) && element instanceof Collection) { - if (!rawComponentType.isArray() && !ClassUtils.isAssignable(Iterable.class, rawComponentType)) { - throw new MappingException(String.format( - "Cannot convert %1$s of type %2$s into an instance of %3$s! Implement a custom Converter<%2$s, %3$s> and register it with the CustomConversions", - element, element.getClass(), rawComponentType)); - } - } - if (element instanceof List) { - items.add(readCollectionOrArray((Collection) element, componentType)); - } else { - items.add(getPotentiallyConvertedSimpleRead(element, rawComponentType)); - } - } - - return getPotentiallyConvertedSimpleRead(items, targetType.getType()); - } - - /** - * Checks whether we have a custom conversion for the given simple object. Converts the given value if so, applies - * {@link Enum} handling or returns the value as is. - * - * @param value - * @param target must not be {@literal null}. - * @return - */ - @Nullable - @SuppressWarnings({ "rawtypes", "unchecked" }) - private Object getPotentiallyConvertedSimpleRead(@Nullable Object value, @Nullable Class target) { - - if (value == null || target == null || ClassUtils.isAssignableValue(target, value)) { - return value; - } - - if (getConversions().hasCustomReadTarget(value.getClass(), target)) { - return getConversionService().convert(value, target); - } - - if (Enum.class.isAssignableFrom(target)) { - return Enum.valueOf((Class) target, value.toString()); - } - - return getConversionService().convert(value, target); - } - - @SuppressWarnings("unchecked") - private S readEntityFrom(Row row, @Nullable RowMetadata metadata, PersistentProperty property) { - - String prefix = property.getName() + "_"; - - RelationalPersistentEntity entity = getMappingContext().getRequiredPersistentEntity(property.getActualType()); - - if (entity.hasIdProperty()) { - if (readFrom(row, metadata, entity.getRequiredIdProperty(), prefix) == null) { - return null; - } - } - - Object instance = createInstance(row, metadata, prefix, entity); - - if (entity.requiresPropertyPopulation()) { - PersistentPropertyAccessor accessor = entity.getPropertyAccessor(instance); - ConvertingPropertyAccessor propertyAccessor = new ConvertingPropertyAccessor<>(accessor, - getConversionService()); - - for (RelationalPersistentProperty p : entity) { - if (!entity.isConstructorArgument(property)) { - propertyAccessor.setProperty(p, readFrom(row, metadata, p, prefix)); - } - } - } - - return (S) instance; - } - - private S createInstance(Row row, @Nullable RowMetadata rowMetadata, String prefix, - RelationalPersistentEntity entity) { - - PreferredConstructor persistenceConstructor = entity.getPersistenceConstructor(); - ParameterValueProvider provider; - - if (persistenceConstructor != null && persistenceConstructor.hasParameters()) { - - SpELContext spELContext = new SpELContext(new RowPropertyAccessor(rowMetadata)); - SpELExpressionEvaluator expressionEvaluator = new DefaultSpELExpressionEvaluator(row, spELContext); - provider = new SpELExpressionParameterValueProvider<>(expressionEvaluator, getConversionService(), - new RowParameterValueProvider(row, rowMetadata, entity, this, prefix)); - } else { - provider = NoOpParameterValueProvider.INSTANCE; - } - - return createInstance(entity, provider::getParameterValue); - } - - // ---------------------------------- - // Entity writing - // ---------------------------------- - - /* - * (non-Javadoc) - * @see org.springframework.data.convert.EntityWriter#write(java.lang.Object, java.lang.Object) - */ - @Override - public void write(Object source, OutboundRow sink) { - - Class userClass = ClassUtils.getUserClass(source); - - Optional> customTarget = getConversions().getCustomWriteTarget(userClass, OutboundRow.class); - if (customTarget.isPresent()) { - - OutboundRow result = getConversionService().convert(source, OutboundRow.class); - sink.putAll(result); - return; - } - - writeInternal(source, sink, userClass); - } - - private void writeInternal(Object source, OutboundRow sink, Class userClass) { - - RelationalPersistentEntity entity = getRequiredPersistentEntity(userClass); - PersistentPropertyAccessor propertyAccessor = entity.getPropertyAccessor(source); - - writeProperties(sink, entity, propertyAccessor, entity.isNew(source)); - } - - private void writeProperties(OutboundRow sink, RelationalPersistentEntity entity, - PersistentPropertyAccessor accessor, boolean isNew) { - - for (RelationalPersistentProperty property : entity) { - - if (!property.isWritable()) { - continue; - } - - Object value; - - if (property.isIdProperty()) { - IdentifierAccessor identifierAccessor = entity.getIdentifierAccessor(accessor.getBean()); - value = identifierAccessor.getIdentifier(); - } else { - value = accessor.getProperty(property); - } - - if (value == null) { - writeNullInternal(sink, property); - continue; - } - - if (getConversions().isSimpleType(value.getClass())) { - writeSimpleInternal(sink, value, isNew, property); - } else { - writePropertyInternal(sink, value, isNew, property); - } - } - } - - private void writeSimpleInternal(OutboundRow sink, Object value, boolean isNew, - RelationalPersistentProperty property) { - - Object result = getPotentiallyConvertedSimpleWrite(value); - - sink.put(property.getColumnName(), - Parameter.fromOrEmpty(result, getPotentiallyConvertedSimpleNullType(property.getType()))); - } - - private void writePropertyInternal(OutboundRow sink, Object value, boolean isNew, - RelationalPersistentProperty property) { - - TypeInformation valueType = ClassTypeInformation.from(value.getClass()); - - if (valueType.isCollectionLike()) { - - if (valueType.getActualType() != null && valueType.getRequiredActualType().isCollectionLike()) { - - // pass-thru nested collections - writeSimpleInternal(sink, value, isNew, property); - return; - } - - List collectionInternal = createCollection(asCollection(value), property); - sink.put(property.getColumnName(), Parameter.from(collectionInternal)); - return; - } - - throw new InvalidDataAccessApiUsageException("Nested entities are not supported"); - } - - /** - * Writes the given {@link Collection} using the given {@link RelationalPersistentProperty} information. - * - * @param collection must not be {@literal null}. - * @param property must not be {@literal null}. - * @return - */ - protected List createCollection(Collection collection, RelationalPersistentProperty property) { - return writeCollectionInternal(collection, property.getTypeInformation(), new ArrayList<>()); - } - - /** - * Populates the given {@link Collection sink} with converted values from the given {@link Collection source}. - * - * @param source the collection to create a {@link Collection} for, must not be {@literal null}. - * @param type the {@link TypeInformation} to consider or {@literal null} if unknown. - * @param sink the {@link Collection} to write to. - * @return - */ - @SuppressWarnings("unchecked") - private List writeCollectionInternal(Collection source, @Nullable TypeInformation type, - Collection sink) { - - TypeInformation componentType = null; - - List collection = sink instanceof List ? (List) sink : new ArrayList<>(sink); - - if (type != null) { - componentType = type.getComponentType(); - } - - for (Object element : source) { - - Class elementType = element == null ? null : element.getClass(); - - if (elementType == null || getConversions().isSimpleType(elementType)) { - collection.add(getPotentiallyConvertedSimpleWrite(element, - componentType != null ? componentType.getType() : Object.class)); - } else if (element instanceof Collection || elementType.isArray()) { - collection.add(writeCollectionInternal(asCollection(element), componentType, new ArrayList<>())); - } else { - throw new InvalidDataAccessApiUsageException("Nested entities are not supported"); - } - } - - return collection; - } - - private void writeNullInternal(OutboundRow sink, RelationalPersistentProperty property) { - - sink.put(property.getColumnName(), Parameter.empty(getPotentiallyConvertedSimpleNullType(property.getType()))); - } - - private Class getPotentiallyConvertedSimpleNullType(Class type) { - - Optional> customTarget = getConversions().getCustomWriteTarget(type); - - if (customTarget.isPresent()) { - return customTarget.get(); - - } - - if (type.isEnum()) { - return String.class; - } - - return type; - } - - /** - * Checks whether we have a custom conversion registered for the given value into an arbitrary simple type. Returns - * the converted value if so. If not, we perform special enum handling or simply return the value as is. - * - * @param value - * @return - */ - @Nullable - private Object getPotentiallyConvertedSimpleWrite(@Nullable Object value) { - return getPotentiallyConvertedSimpleWrite(value, Object.class); - } - - /** - * Checks whether we have a custom conversion registered for the given value into an arbitrary simple type. Returns - * the converted value if so. If not, we perform special enum handling or simply return the value as is. - * - * @param value - * @return - */ - @Nullable - private Object getPotentiallyConvertedSimpleWrite(@Nullable Object value, Class typeHint) { - - if (value == null) { - return null; - } - - if (Object.class != typeHint) { - - if (getConversionService().canConvert(value.getClass(), typeHint)) { - value = getConversionService().convert(value, typeHint); - } - } - - Optional> customTarget = getConversions().getCustomWriteTarget(value.getClass()); - - if (customTarget.isPresent()) { - return getConversionService().convert(value, customTarget.get()); - } - - return Enum.class.isAssignableFrom(value.getClass()) ? ((Enum) value).name() : value; - } - - /* - * (non-Javadoc) - * @see org.springframework.data.r2dbc.convert.R2dbcConverter#getArrayValue(org.springframework.data.r2dbc.dialect.ArrayColumns, org.springframework.data.relational.core.mapping.RelationalPersistentProperty, java.lang.Object) - */ - @Override - public Object getArrayValue(ArrayColumns arrayColumns, RelationalPersistentProperty property, Object value) { - - Class actualType = null; - if (value instanceof Collection) { - actualType = CollectionUtils.findCommonElementType((Collection) value); - } else if (value.getClass().isArray()) { - actualType = value.getClass().getComponentType(); - } - - if (actualType == null) { - actualType = property.getActualType(); - } - - Class targetType = arrayColumns.getArrayType(actualType); - - if (!property.isArray() || !targetType.isAssignableFrom(value.getClass())) { - - int depth = value.getClass().isArray() ? ArrayUtils.getDimensionDepth(value.getClass()) : 1; - Class targetArrayType = ArrayUtils.getArrayClass(targetType, depth); - return getConversionService().convert(value, targetArrayType); - } - - return value; - } - - /* - * (non-Javadoc) - * @see org.springframework.data.r2dbc.convert.R2dbcConverter#getTargetType(Class) - */ - @Override - public Class getTargetType(Class valueType) { - - Optional> writeTarget = getConversions().getCustomWriteTarget(valueType); - - return writeTarget.orElseGet(() -> { - return Enum.class.isAssignableFrom(valueType) ? String.class : valueType; - }); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.r2dbc.convert.R2dbcConverter#isSimpleType(Class) - */ - @Override - public boolean isSimpleType(Class type) { - return getConversions().isSimpleType(type); - } - - // ---------------------------------- - // Id handling - // ---------------------------------- - - /** - * Returns a {@link java.util.function.Function} that populates the id property of the {@code object} from a - * {@link Row}. - * - * @param object must not be {@literal null}. - * @return - */ - @Override - @SuppressWarnings("unchecked") - public BiFunction populateIdIfNecessary(T object) { - - Assert.notNull(object, "Entity object must not be null!"); - - Class userClass = ClassUtils.getUserClass(object); - RelationalPersistentEntity entity = getMappingContext().getRequiredPersistentEntity(userClass); - - if (!entity.hasIdProperty()) { - return (row, rowMetadata) -> object; - } - - return (row, metadata) -> { - - PersistentPropertyAccessor propertyAccessor = entity.getPropertyAccessor(object); - RelationalPersistentProperty idProperty = entity.getRequiredIdProperty(); - - boolean idPropertyUpdateNeeded = false; - - Object id = propertyAccessor.getProperty(idProperty); - if (idProperty.getType().isPrimitive()) { - idPropertyUpdateNeeded = id instanceof Number && ((Number) id).longValue() == 0; - } else { - idPropertyUpdateNeeded = id == null; - } - - if (idPropertyUpdateNeeded) { - return potentiallySetId(row, metadata, propertyAccessor, idProperty) // - ? (T) propertyAccessor.getBean() // - : object; - } - - return object; - }; - } - - private boolean potentiallySetId(Row row, RowMetadata metadata, PersistentPropertyAccessor propertyAccessor, - RelationalPersistentProperty idProperty) { - - String idColumnName = idProperty.getColumnName().getReference(); - Object generatedIdValue = extractGeneratedIdentifier(row, metadata, idColumnName); - - if (generatedIdValue == null) { - return false; - } - - ConversionService conversionService = getConversionService(); - propertyAccessor.setProperty(idProperty, conversionService.convert(generatedIdValue, idProperty.getType())); - - return true; - } - - @Nullable - private Object extractGeneratedIdentifier(Row row, RowMetadata metadata, String idColumnName) { - - if (RowMetadataUtils.containsColumn(metadata, idColumnName)) { - return row.get(idColumnName); - } - - Iterable columns = RowMetadataUtils.getColumnMetadata(metadata); - Iterator it = columns.iterator(); - - if (it.hasNext()) { - ColumnMetadata column = it.next(); - return row.get(column.getName()); - } - - return null; - } - - private RelationalPersistentEntity getRequiredPersistentEntity(Class type) { - return (RelationalPersistentEntity) getMappingContext().getRequiredPersistentEntity(type); - } - - /** - * Returns given object as {@link Collection}. Will return the {@link Collection} as is if the source is a - * {@link Collection} already, will convert an array into a {@link Collection} or simply create a single element - * collection for everything else. - * - * @param source - * @return - */ - private static Collection asCollection(Object source) { - - if (source instanceof Collection) { - return (Collection) source; - } - - return source.getClass().isArray() ? CollectionUtils.arrayToList(source) : Collections.singleton(source); - } - - enum NoOpParameterValueProvider implements ParameterValueProvider { - - INSTANCE; - - @Override - public T getParameterValue( - org.springframework.data.mapping.Parameter parameter) { - return null; - } - } - - private class RowParameterValueProvider implements ParameterValueProvider { - - private final Row resultSet; - private final RowMetadata metadata; - private final RelationalPersistentEntity entity; - private final RelationalConverter converter; - private final String prefix; - - public RowParameterValueProvider(Row resultSet, RowMetadata metadata, RelationalPersistentEntity entity, - RelationalConverter converter, String prefix) { - this.resultSet = resultSet; - this.metadata = metadata; - this.entity = entity; - this.converter = converter; - this.prefix = prefix; - } - - /* - * (non-Javadoc) - * @see org.springframework.data.mapping.model.ParameterValueProvider#getParameterValue(org.springframework.data.mapping.PreferredConstructor.Parameter) - */ - @Override - @Nullable - public T getParameterValue( - org.springframework.data.mapping.Parameter parameter) { - - RelationalPersistentProperty property = this.entity.getRequiredPersistentProperty(parameter.getName()); - Object value = readFrom(this.resultSet, this.metadata, property, this.prefix); - - if (value == null) { - return null; - } - - Class type = parameter.getType().getType(); - - if (type.isInstance(value)) { - return type.cast(value); - } - - try { - return this.converter.getConversionService().convert(value, type); - } catch (Exception o_O) { - throw new MappingException(String.format("Couldn't read parameter %s.", parameter.getName()), o_O); - } - } - } -} diff --git a/src/main/java/org/springframework/data/r2dbc/convert/R2dbcConverter.java b/src/main/java/org/springframework/data/r2dbc/convert/R2dbcConverter.java deleted file mode 100644 index 0d038119..00000000 --- a/src/main/java/org/springframework/data/r2dbc/convert/R2dbcConverter.java +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Copyright 2019-2022 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.r2dbc.convert; - -import io.r2dbc.spi.Row; -import io.r2dbc.spi.RowMetadata; - -import java.util.function.BiFunction; - -import org.springframework.core.convert.ConversionService; -import org.springframework.data.convert.EntityReader; -import org.springframework.data.convert.EntityWriter; -import org.springframework.data.mapping.context.MappingContext; -import org.springframework.data.r2dbc.mapping.OutboundRow; -import org.springframework.data.relational.core.conversion.RelationalConverter; -import org.springframework.data.relational.core.dialect.ArrayColumns; -import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; -import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; - -/** - * Central R2DBC specific converter interface. - * - * @author Mark Paluch - * @see EntityReader - */ -public interface R2dbcConverter - extends EntityReader, EntityWriter, RelationalConverter { - - /** - * Returns the underlying {@link MappingContext} used by the converter. - * - * @return never {@literal null} - */ - MappingContext, ? extends RelationalPersistentProperty> getMappingContext(); - - /** - * Returns the underlying {@link ConversionService} used by the converter. - * - * @return never {@literal null}. - */ - ConversionService getConversionService(); - - /** - * Convert a {@code value} into an array representation according to {@link ArrayColumns}. - * - * @param arrayColumns dialect-specific array handling configuration. - * @param property - * @param value - * @return - */ - Object getArrayValue(ArrayColumns arrayColumns, RelationalPersistentProperty property, Object value); - - /** - * Return the target type for a value considering registered converters. - * - * @param valueType must not be {@literal null}. - * @return - * @since 1.1 - */ - Class getTargetType(Class valueType); - - /** - * Return whether the {@code type} is a simple type. Simple types are database primitives or types with a custom - * mapping strategy. - * - * @param type the type to inspect, must not be {@literal null}. - * @return {@literal true} if the type is a simple one. - * @see org.springframework.data.mapping.model.SimpleTypeHolder - * @since 1.2 - */ - boolean isSimpleType(Class type); - - /** - * Returns a {@link java.util.function.Function} that populates the id property of the {@code object} from a - * {@link Row}. - * - * @param object must not be {@literal null}. - * @return - */ - BiFunction populateIdIfNecessary(T object); - - /** - * Reads the given source into the given type. - * - * @param type they type to convert the given source to. - * @param source the source to create an object of the given type from. - * @param metadata the {@link RowMetadata}. - * @return - */ - R read(Class type, Row source, RowMetadata metadata); - -} diff --git a/src/main/java/org/springframework/data/r2dbc/convert/R2dbcConverters.java b/src/main/java/org/springframework/data/r2dbc/convert/R2dbcConverters.java deleted file mode 100644 index 97d65b71..00000000 --- a/src/main/java/org/springframework/data/r2dbc/convert/R2dbcConverters.java +++ /dev/null @@ -1,303 +0,0 @@ -/* - * Copyright 2019-2022 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.data.r2dbc.convert; - -import io.r2dbc.spi.Row; - -import java.time.LocalDate; -import java.time.LocalDateTime; -import java.time.LocalTime; -import java.time.OffsetDateTime; -import java.time.ZonedDateTime; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; -import java.util.UUID; - -import org.springframework.core.convert.converter.Converter; -import org.springframework.core.convert.converter.ConverterFactory; -import org.springframework.data.convert.CustomConversions; -import org.springframework.data.convert.Jsr310Converters; -import org.springframework.data.convert.WritingConverter; -import org.springframework.data.r2dbc.convert.R2dbcConverters.RowToNumberConverterFactory.LocalDateConverterOverride; -import org.springframework.data.r2dbc.convert.R2dbcConverters.RowToNumberConverterFactory.LocalDateTimeConverterOverride; -import org.springframework.data.r2dbc.convert.R2dbcConverters.RowToNumberConverterFactory.LocalTimeConverterOverride; -import org.springframework.data.r2dbc.convert.R2dbcConverters.RowToNumberConverterFactory.RowToOffsetDateTimeConverter; -import org.springframework.data.r2dbc.convert.R2dbcConverters.RowToNumberConverterFactory.RowToStringConverter; -import org.springframework.data.r2dbc.convert.R2dbcConverters.RowToNumberConverterFactory.RowToUuidConverter; -import org.springframework.data.r2dbc.convert.R2dbcConverters.RowToNumberConverterFactory.RowToZonedDateTimeConverter; -import org.springframework.util.Assert; -import org.springframework.util.NumberUtils; - -/** - * Wrapper class to contain useful converters for the usage with R2DBC. - * - * @author Hebert Coelho - * @author Mark Paluch - */ -abstract class R2dbcConverters { - - private R2dbcConverters() {} - - /** - * @return A list of the registered converters - */ - public static Collection getConvertersToRegister() { - - List converters = new ArrayList<>(); - - converters.add(RowToBooleanConverter.INSTANCE); - converters.add(RowToNumberConverterFactory.INSTANCE); - converters.add(RowToLocalDateConverter.INSTANCE); - converters.add(RowToLocalDateTimeConverter.INSTANCE); - converters.add(RowToLocalTimeConverter.INSTANCE); - converters.add(RowToOffsetDateTimeConverter.INSTANCE); - converters.add(RowToStringConverter.INSTANCE); - converters.add(RowToUuidConverter.INSTANCE); - converters.add(RowToZonedDateTimeConverter.INSTANCE); - - return converters; - } - - /** - * @return A list of the registered converters to enforce JSR-310 type usage. - * @see CustomConversions#DEFAULT_CONVERTERS - * @see Jsr310Converters - */ - public static Collection getOverrideConvertersToRegister() { - - List converters = new ArrayList<>(); - - converters.add(LocalDateConverterOverride.INSTANCE); - converters.add(LocalDateTimeConverterOverride.INSTANCE); - converters.add(LocalTimeConverterOverride.INSTANCE); - - return converters; - } - - /** - * Simple singleton to convert {@link Row}s to their {@link Boolean} representation. - * - * @author Hebert Coelho - */ - public enum RowToBooleanConverter implements Converter { - - INSTANCE; - - @Override - public Boolean convert(Row row) { - return row.get(0, Boolean.class); - } - } - - /** - * Simple singleton to convert {@link Row}s to their {@link LocalDate} representation. - * - * @author Hebert Coelho - */ - public enum RowToLocalDateConverter implements Converter { - - INSTANCE; - - @Override - public LocalDate convert(Row row) { - return row.get(0, LocalDate.class); - } - } - - /** - * Simple singleton to convert {@link Row}s to their {@link LocalDateTime} representation. - * - * @author Hebert Coelho - */ - public enum RowToLocalDateTimeConverter implements Converter { - - INSTANCE; - - @Override - public LocalDateTime convert(Row row) { - return row.get(0, LocalDateTime.class); - } - } - - /** - * Simple singleton to convert {@link Row}s to their {@link LocalTime} representation. - * - * @author Hebert Coelho - */ - public enum RowToLocalTimeConverter implements Converter { - - INSTANCE; - - @Override - public LocalTime convert(Row row) { - return row.get(0, LocalTime.class); - } - } - - /** - * Singleton converter factory to convert the first column of a {@link Row} to a {@link Number}. - *

- * Support Number classes including Byte, Short, Integer, Float, Double, Long, BigInteger, BigDecimal. This class - * delegates to {@link NumberUtils#convertNumberToTargetClass(Number, Class)} to perform the conversion. - * - * @see Byte - * @see Short - * @see Integer - * @see Long - * @see java.math.BigInteger - * @see Float - * @see Double - * @see java.math.BigDecimal - * @author Hebert Coelho - */ - public enum RowToNumberConverterFactory implements ConverterFactory { - - INSTANCE; - - @Override - public Converter getConverter(Class targetType) { - Assert.notNull(targetType, "Target type must not be null"); - return new RowToNumber<>(targetType); - } - - static class RowToNumber implements Converter { - - private final Class targetType; - - RowToNumber(Class targetType) { - this.targetType = targetType; - } - - @Override - public T convert(Row source) { - - Object object = source.get(0); - - return (object != null ? NumberUtils.convertNumberToTargetClass((Number) object, this.targetType) : null); - } - } - - /** - * Simple singleton to convert {@link Row}s to their {@link OffsetDateTime} representation. - * - * @author Hebert Coelho - */ - public enum RowToOffsetDateTimeConverter implements Converter { - - INSTANCE; - - @Override - public OffsetDateTime convert(Row row) { - return row.get(0, OffsetDateTime.class); - } - } - - /** - * Simple singleton to convert {@link Row}s to their {@link String} representation. - * - * @author Hebert Coelho - */ - public enum RowToStringConverter implements Converter { - - INSTANCE; - - @Override - public String convert(Row row) { - return row.get(0, String.class); - } - } - - /** - * Simple singleton to convert {@link Row}s to their {@link UUID} representation. - * - * @author Hebert Coelho - */ - public enum RowToUuidConverter implements Converter { - - INSTANCE; - - @Override - public UUID convert(Row row) { - return row.get(0, UUID.class); - } - } - - /** - * Simple singleton to convert {@link Row}s to their {@link ZonedDateTime} representation. - * - * @author Hebert Coelho - */ - public enum RowToZonedDateTimeConverter implements Converter { - - INSTANCE; - - @Override - public ZonedDateTime convert(Row row) { - return row.get(0, ZonedDateTime.class); - } - } - - /** - * {@link Converter} override that forces {@link LocalDate} to stay on {@link LocalDate}. - * - * @author Mark Paluch - */ - @WritingConverter - public enum LocalDateConverterOverride implements Converter { - - INSTANCE; - - @Override - public LocalDate convert(LocalDate value) { - return value; - } - } - - /** - * {@link Converter} override that forces {@link LocalDateTime} to stay on {@link LocalDateTime}. - * - * @author Mark Paluch - */ - @WritingConverter - public enum LocalDateTimeConverterOverride implements Converter { - - INSTANCE; - - @Override - public LocalDateTime convert(LocalDateTime value) { - return value; - } - } - - /** - * {@link Converter} override that forces {@link LocalTime} to stay on {@link LocalTime}. - * - * @author Mark Paluch - */ - @WritingConverter - public enum LocalTimeConverterOverride implements Converter { - - INSTANCE; - - @Override - public LocalTime convert(LocalTime value) { - return value; - } - } - } -} diff --git a/src/main/java/org/springframework/data/r2dbc/convert/R2dbcCustomConversions.java b/src/main/java/org/springframework/data/r2dbc/convert/R2dbcCustomConversions.java deleted file mode 100644 index f1cc032d..00000000 --- a/src/main/java/org/springframework/data/r2dbc/convert/R2dbcCustomConversions.java +++ /dev/null @@ -1,109 +0,0 @@ -package org.springframework.data.r2dbc.convert; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.Date; -import java.util.List; - -import org.springframework.data.convert.CustomConversions; -import org.springframework.data.r2dbc.dialect.R2dbcDialect; -import org.springframework.data.r2dbc.mapping.R2dbcSimpleTypeHolder; - -/** - * Value object to capture custom conversion. {@link R2dbcCustomConversions} also act as factory for - * {@link org.springframework.data.mapping.model.SimpleTypeHolder} - * - * @author Mark Paluch - * @see CustomConversions - * @see org.springframework.data.mapping.model.SimpleTypeHolder - */ -public class R2dbcCustomConversions extends CustomConversions { - - public static final List STORE_CONVERTERS; - - public static final StoreConversions STORE_CONVERSIONS; - - static { - - List converters = new ArrayList<>(R2dbcConverters.getConvertersToRegister()); - - STORE_CONVERTERS = Collections.unmodifiableList(converters); - STORE_CONVERSIONS = StoreConversions.of(R2dbcSimpleTypeHolder.HOLDER, STORE_CONVERTERS); - } - - /** - * Create a new {@link R2dbcCustomConversions} instance registering the given converters. - * - * @param converters must not be {@literal null}. - * @deprecated since 1.3, use {@link #of(R2dbcDialect, Object...)} or - * {@link #R2dbcCustomConversions(StoreConversions, Collection)} directly to consider dialect-native - * simple types. Use {@link CustomConversions.StoreConversions#NONE} to omit store-specific converters. - */ - @Deprecated - public R2dbcCustomConversions(Collection converters) { - super(new R2dbcCustomConversionsConfiguration(STORE_CONVERSIONS, appendOverrides(converters))); - } - - /** - * Create a new {@link R2dbcCustomConversions} instance registering the given converters. - * - * @param storeConversions must not be {@literal null}. - * @param converters must not be {@literal null}. - */ - public R2dbcCustomConversions(StoreConversions storeConversions, Collection converters) { - super(new R2dbcCustomConversionsConfiguration(storeConversions, appendOverrides(converters))); - } - - /** - * Create a new {@link R2dbcCustomConversions} from the given {@link R2dbcDialect} and {@code converters}. - * - * @param dialect must not be {@literal null}. - * @param converters must not be {@literal null}. - * @return the custom conversions object. - * @since 1.2 - */ - public static R2dbcCustomConversions of(R2dbcDialect dialect, Object... converters) { - return of(dialect, Arrays.asList(converters)); - } - - /** - * Create a new {@link R2dbcCustomConversions} from the given {@link R2dbcDialect} and {@code converters}. - * - * @param dialect must not be {@literal null}. - * @param converters must not be {@literal null}. - * @return the custom conversions object. - * @since 1.2 - */ - public static R2dbcCustomConversions of(R2dbcDialect dialect, Collection converters) { - - List storeConverters = new ArrayList<>(dialect.getConverters()); - storeConverters.addAll(R2dbcCustomConversions.STORE_CONVERTERS); - - return new R2dbcCustomConversions(StoreConversions.of(dialect.getSimpleTypeHolder(), storeConverters), converters); - } - - private static List appendOverrides(Collection converters) { - - List objects = new ArrayList<>(converters); - objects.addAll(R2dbcConverters.getOverrideConvertersToRegister()); - - return objects; - } - - static class R2dbcCustomConversionsConfiguration extends ConverterConfiguration { - - public R2dbcCustomConversionsConfiguration(StoreConversions storeConversions, List userConverters) { - super(storeConversions, userConverters, convertiblePair -> { - - if (convertiblePair.getSourceType().getName().startsWith("java.time.") - && convertiblePair.getTargetType().equals(Date.class)) { - return false; - } - - return true; - }); - } - } -} diff --git a/src/main/java/org/springframework/data/r2dbc/convert/RowMetadataUtils.java b/src/main/java/org/springframework/data/r2dbc/convert/RowMetadataUtils.java deleted file mode 100644 index b57506b0..00000000 --- a/src/main/java/org/springframework/data/r2dbc/convert/RowMetadataUtils.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright 2021-2022 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.r2dbc.convert; - -import io.r2dbc.spi.ColumnMetadata; -import io.r2dbc.spi.RowMetadata; - -import java.lang.reflect.Method; - -import org.springframework.lang.Nullable; -import org.springframework.util.ReflectionUtils; - -/** - * Utility methods for {@link io.r2dbc.spi.RowMetadata} - * - * @author Mark Paluch - * @since 1.3.7 - */ -class RowMetadataUtils { - - private static final @Nullable Method getColumnMetadatas = ReflectionUtils.findMethod(RowMetadata.class, - "getColumnMetadatas"); - - /** - * Check whether the column {@code name} is contained in {@link RowMetadata}. The check happens case-insensitive. - * - * @param metadata the metadata object to inspect. - * @param name column name. - * @return {@code true} if the metadata contains the column {@code name}. - */ - public static boolean containsColumn(RowMetadata metadata, String name) { - - Iterable columns = getColumnMetadata(metadata); - - for (ColumnMetadata columnMetadata : columns) { - if (name.equalsIgnoreCase(columnMetadata.getName())) { - return true; - } - } - - return false; - } - - /** - * Return the {@link Iterable} of {@link ColumnMetadata} from {@link RowMetadata}. - * - * @param metadata the metadata object to inspect. - * @return - * @since 1.4.1 - */ - @SuppressWarnings("unchecked") - public static Iterable getColumnMetadata(RowMetadata metadata) { - - if (getColumnMetadatas != null) { - // Return type of RowMetadata.getColumnMetadatas was updated with R2DBC 0.9. - return (Iterable) ReflectionUtils.invokeMethod(getColumnMetadatas, metadata); - } - - return metadata.getColumnMetadatas(); - } -} diff --git a/src/main/java/org/springframework/data/r2dbc/convert/RowPropertyAccessor.java b/src/main/java/org/springframework/data/r2dbc/convert/RowPropertyAccessor.java deleted file mode 100644 index eaf08fde..00000000 --- a/src/main/java/org/springframework/data/r2dbc/convert/RowPropertyAccessor.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright 2013-2022 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.r2dbc.convert; - -import io.r2dbc.spi.Row; -import io.r2dbc.spi.RowMetadata; - -import org.springframework.expression.EvaluationContext; -import org.springframework.expression.PropertyAccessor; -import org.springframework.expression.TypedValue; -import org.springframework.lang.Nullable; - -/** - * {@link PropertyAccessor} to read values from a {@link Row}. - * - * @author Mark Paluch - * @since 1.2 - */ -class RowPropertyAccessor implements PropertyAccessor { - - private final @Nullable RowMetadata rowMetadata; - - RowPropertyAccessor(@Nullable RowMetadata rowMetadata) { - this.rowMetadata = rowMetadata; - } - - @Override - public Class[] getSpecificTargetClasses() { - return new Class[] { Row.class }; - } - - @Override - public boolean canRead(EvaluationContext context, @Nullable Object target, String name) { - return rowMetadata != null && target != null && RowMetadataUtils.containsColumn(rowMetadata, name); - } - - @Override - public TypedValue read(EvaluationContext context, @Nullable Object target, String name) { - - if (target == null) { - return TypedValue.NULL; - } - - Object value = ((Row) target).get(name); - - if (value == null) { - return TypedValue.NULL; - } - - return new TypedValue(value); - } - - @Override - public boolean canWrite(EvaluationContext context, @Nullable Object target, String name) { - return false; - } - - @Override - public void write(EvaluationContext context, @Nullable Object target, String name, @Nullable Object newValue) { - throw new UnsupportedOperationException(); - } -} diff --git a/src/main/java/org/springframework/data/r2dbc/convert/package-info.java b/src/main/java/org/springframework/data/r2dbc/convert/package-info.java deleted file mode 100644 index cb313f6a..00000000 --- a/src/main/java/org/springframework/data/r2dbc/convert/package-info.java +++ /dev/null @@ -1,6 +0,0 @@ -/** - * R2DBC-specific conversion and converter implementations. - */ -@org.springframework.lang.NonNullApi -@org.springframework.lang.NonNullFields -package org.springframework.data.r2dbc.convert; diff --git a/src/main/java/org/springframework/data/r2dbc/core/BindParameterSource.java b/src/main/java/org/springframework/data/r2dbc/core/BindParameterSource.java deleted file mode 100644 index bf17e32b..00000000 --- a/src/main/java/org/springframework/data/r2dbc/core/BindParameterSource.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright 2019-2022 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.r2dbc.core; - -import org.springframework.data.util.Streamable; -import org.springframework.lang.Nullable; - -/** - * Interface that defines common functionality for objects that can offer parameter values for named bind parameters, - * serving as argument for {@link NamedParameterExpander} operations. - *

- * This interface allows for the specification of the type in addition to parameter values. All parameter values and - * types are identified by specifying the name of the parameter. - *

- * Intended to wrap various implementations like a {@link java.util.Map} with a consistent interface. - * - * @author Mark Paluch - * @see MapBindParameterSource - * @deprecated since 1.2, without replacement. - */ -@Deprecated -interface BindParameterSource { - - /** - * Determine whether there is a value for the specified named parameter. - * - * @param paramName the name of the parameter. - * @return {@literal true} if there is a value defined; {@literal false} otherwise. - */ - boolean hasValue(String paramName); - - /** - * Return the parameter value for the requested named parameter. - * - * @param paramName the name of the parameter. - * @return the value of the specified parameter, can be {@literal null}. - * @throws IllegalArgumentException if there is no value for the requested parameter. - */ - @Nullable - Object getValue(String paramName) throws IllegalArgumentException; - - /** - * Determine the type for the specified named parameter. - * - * @param paramName the name of the parameter. - * @return the type of the specified parameter, or {@link Object#getClass()} if not known. - */ - default Class getType(String paramName) { - return Object.class; - } - - /** - * Returns parameter names of the underlying parameter source. - * - * @return parameter names of the underlying parameter source. - */ - Streamable getParameterNames(); -} diff --git a/src/main/java/org/springframework/data/r2dbc/core/DefaultReactiveDataAccessStrategy.java b/src/main/java/org/springframework/data/r2dbc/core/DefaultReactiveDataAccessStrategy.java deleted file mode 100644 index 82559088..00000000 --- a/src/main/java/org/springframework/data/r2dbc/core/DefaultReactiveDataAccessStrategy.java +++ /dev/null @@ -1,354 +0,0 @@ -/* - * Copyright 2018-2022 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.r2dbc.core; - -import io.r2dbc.spi.Row; -import io.r2dbc.spi.RowMetadata; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.function.BiFunction; - -import org.springframework.dao.InvalidDataAccessApiUsageException; -import org.springframework.dao.InvalidDataAccessResourceUsageException; -import org.springframework.data.mapping.context.MappingContext; -import org.springframework.data.r2dbc.convert.EntityRowMapper; -import org.springframework.data.r2dbc.convert.MappingR2dbcConverter; -import org.springframework.data.r2dbc.convert.R2dbcConverter; -import org.springframework.data.r2dbc.convert.R2dbcCustomConversions; -import org.springframework.data.r2dbc.dialect.R2dbcDialect; -import org.springframework.data.r2dbc.mapping.OutboundRow; -import org.springframework.data.r2dbc.mapping.R2dbcMappingContext; -import org.springframework.data.r2dbc.query.UpdateMapper; -import org.springframework.data.r2dbc.support.ArrayUtils; -import org.springframework.data.relational.core.dialect.ArrayColumns; -import org.springframework.data.relational.core.dialect.RenderContextFactory; -import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; -import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; -import org.springframework.data.relational.core.sql.SqlIdentifier; -import org.springframework.lang.Nullable; -import org.springframework.r2dbc.core.Parameter; -import org.springframework.r2dbc.core.PreparedOperation; -import org.springframework.util.Assert; -import org.springframework.util.ClassUtils; -import org.springframework.util.CollectionUtils; - -/** - * Default {@link ReactiveDataAccessStrategy} implementation. - * - * @author Mark Paluch - * @author Louis Morgan - * @author Jens Schauder - */ -public class DefaultReactiveDataAccessStrategy implements ReactiveDataAccessStrategy { - - private final R2dbcDialect dialect; - private final R2dbcConverter converter; - private final UpdateMapper updateMapper; - private final MappingContext, ? extends RelationalPersistentProperty> mappingContext; - private final StatementMapper statementMapper; - private final NamedParameterExpander expander = new NamedParameterExpander(); - - /** - * Creates a new {@link DefaultReactiveDataAccessStrategy} given {@link R2dbcDialect} and optional - * {@link org.springframework.core.convert.converter.Converter}s. - * - * @param dialect the {@link R2dbcDialect} to use. - */ - public DefaultReactiveDataAccessStrategy(R2dbcDialect dialect) { - this(dialect, Collections.emptyList()); - } - - /** - * Creates a new {@link DefaultReactiveDataAccessStrategy} given {@link R2dbcDialect} and optional - * {@link org.springframework.core.convert.converter.Converter}s. - * - * @param dialect the {@link R2dbcDialect} to use. - * @param converters custom converters to register, must not be {@literal null}. - * @see R2dbcCustomConversions - * @see org.springframework.core.convert.converter.Converter - */ - public DefaultReactiveDataAccessStrategy(R2dbcDialect dialect, Collection converters) { - this(dialect, createConverter(dialect, converters)); - } - - /** - * Creates a new {@link R2dbcConverter} given {@link R2dbcDialect} and custom {@code converters}. - * - * @param dialect must not be {@literal null}. - * @param converters must not be {@literal null}. - * @return the {@link R2dbcConverter}. - */ - public static R2dbcConverter createConverter(R2dbcDialect dialect, Collection converters) { - - Assert.notNull(dialect, "Dialect must not be null"); - Assert.notNull(converters, "Converters must not be null"); - - R2dbcCustomConversions customConversions = R2dbcCustomConversions.of(dialect, converters); - - R2dbcMappingContext context = new R2dbcMappingContext(); - context.setSimpleTypeHolder(customConversions.getSimpleTypeHolder()); - - return new MappingR2dbcConverter(context, customConversions); - } - - /** - * Creates a new {@link DefaultReactiveDataAccessStrategy} given {@link R2dbcDialect} and {@link R2dbcConverter}. - * - * @param dialect the {@link R2dbcDialect} to use. - * @param converter must not be {@literal null}. - */ - @SuppressWarnings("unchecked") - public DefaultReactiveDataAccessStrategy(R2dbcDialect dialect, R2dbcConverter converter) { - - Assert.notNull(dialect, "Dialect must not be null"); - Assert.notNull(converter, "RelationalConverter must not be null"); - - this.converter = converter; - this.updateMapper = new UpdateMapper(dialect, converter); - this.mappingContext = (MappingContext, ? extends RelationalPersistentProperty>) this.converter - .getMappingContext(); - this.dialect = dialect; - - RenderContextFactory factory = new RenderContextFactory(dialect); - this.statementMapper = new DefaultStatementMapper(dialect, factory.createRenderContext(), this.updateMapper, - this.mappingContext); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.r2dbc.function.ReactiveDataAccessStrategy#getAllColumns(java.lang.Class) - */ - @Override - public List getAllColumns(Class entityType) { - - RelationalPersistentEntity persistentEntity = getPersistentEntity(entityType); - - if (persistentEntity == null) { - return Collections.singletonList(SqlIdentifier.unquoted("*")); - } - - List columnNames = new ArrayList<>(); - for (RelationalPersistentProperty property : persistentEntity) { - columnNames.add(property.getColumnName()); - } - - return columnNames; - } - - /* - * (non-Javadoc) - * @see org.springframework.data.r2dbc.function.ReactiveDataAccessStrategy#getIdentifierColumns(java.lang.Class) - */ - @Override - public List getIdentifierColumns(Class entityType) { - - RelationalPersistentEntity persistentEntity = getRequiredPersistentEntity(entityType); - - List columnNames = new ArrayList<>(); - for (RelationalPersistentProperty property : persistentEntity) { - - if (property.isIdProperty()) { - columnNames.add(property.getColumnName()); - } - } - - return columnNames; - } - - /* - * (non-Javadoc) - * @see org.springframework.data.r2dbc.function.ReactiveDataAccessStrategy#getOutboundRow(java.lang.Object) - */ - public OutboundRow getOutboundRow(Object object) { - - Assert.notNull(object, "Entity object must not be null!"); - - OutboundRow row = new OutboundRow(); - - this.converter.write(object, row); - - RelationalPersistentEntity entity = getRequiredPersistentEntity(ClassUtils.getUserClass(object)); - - for (RelationalPersistentProperty property : entity) { - - Parameter value = row.get(property.getColumnName()); - if (value != null && shouldConvertArrayValue(property, value)) { - - Parameter writeValue = getArrayValue(value, property); - row.put(property.getColumnName(), writeValue); - } - } - - return row; - } - - private boolean shouldConvertArrayValue(RelationalPersistentProperty property, Parameter value) { - - if (!property.isCollectionLike()) { - return false; - } - - if (value.hasValue() && (value.getValue() instanceof Collection || value.getValue().getClass().isArray())) { - return true; - } - - if (Collection.class.isAssignableFrom(value.getType()) || value.getType().isArray()) { - return true; - } - - return false; - } - - private Parameter getArrayValue(Parameter value, RelationalPersistentProperty property) { - - if (value.getType().equals(byte[].class)) { - return value; - } - - ArrayColumns arrayColumns = this.dialect.getArraySupport(); - - if (!arrayColumns.isSupported()) { - - throw new InvalidDataAccessResourceUsageException( - "Dialect " + this.dialect.getClass().getName() + " does not support array columns"); - } - - Class actualType = null; - if (value.getValue() instanceof Collection) { - actualType = CollectionUtils.findCommonElementType((Collection) value.getValue()); - } else if (!value.isEmpty() && value.getValue().getClass().isArray()) { - actualType = value.getValue().getClass().getComponentType(); - } - - if (actualType == null) { - actualType = property.getActualType(); - } - - actualType = converter.getTargetType(actualType); - - if (value.isEmpty()) { - - Class targetType = arrayColumns.getArrayType(actualType); - int depth = actualType.isArray() ? ArrayUtils.getDimensionDepth(actualType) : 1; - Class targetArrayType = ArrayUtils.getArrayClass(targetType, depth); - return Parameter.empty(targetArrayType); - } - - return Parameter.fromOrEmpty(this.converter.getArrayValue(arrayColumns, property, value.getValue()), - actualType); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.r2dbc.function.ReactiveDataAccessStrategy#getBindValue(Parameter) - */ - @Override - public Parameter getBindValue(Parameter value) { - return this.updateMapper.getBindValue(value); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.r2dbc.function.ReactiveDataAccessStrategy#getRowMapper(java.lang.Class) - */ - @Override - public BiFunction getRowMapper(Class typeToRead) { - return new EntityRowMapper<>(typeToRead, this.converter); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.r2dbc.core.ReactiveDataAccessStrategy#processNamedParameters(java.lang.String, org.springframework.data.r2dbc.core.ReactiveDataAccessStrategy.NamedParameterProvider) - */ - @Override - public PreparedOperation processNamedParameters(String query, NamedParameterProvider parameterProvider) { - - List parameterNames = this.expander.getParameterNames(query); - - Map namedBindings = new LinkedHashMap<>(parameterNames.size()); - for (String parameterName : parameterNames) { - - Parameter value = parameterProvider.getParameter(parameterNames.indexOf(parameterName), parameterName); - - if (value == null) { - throw new InvalidDataAccessApiUsageException( - String.format("No parameter specified for [%s] in query [%s]", parameterName, query)); - } - - namedBindings.put(parameterName, value); - } - - return this.expander.expand(query, this.dialect.getBindMarkersFactory(), new MapBindParameterSource(namedBindings)); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.r2dbc.function.ReactiveDataAccessStrategy#getTableName(java.lang.Class) - */ - @Override - public SqlIdentifier getTableName(Class type) { - return getRequiredPersistentEntity(type).getTableName(); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.r2dbc.function.ReactiveDataAccessStrategy#toSql(SqlIdentifier) - */ - @Override - public String toSql(SqlIdentifier identifier) { - return this.updateMapper.toSql(identifier); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.r2dbc.function.ReactiveDataAccessStrategy#getStatementMapper() - */ - @Override - public StatementMapper getStatementMapper() { - return this.statementMapper; - } - - /* - * (non-Javadoc) - * @see org.springframework.data.r2dbc.function.ReactiveDataAccessStrategy#getConverter() - */ - public R2dbcConverter getConverter() { - return this.converter; - } - - public MappingContext, ? extends RelationalPersistentProperty> getMappingContext() { - return this.mappingContext; - } - - @Override - public String renderForGeneratedValues(SqlIdentifier identifier) { - return dialect.renderForGeneratedValues(identifier); - } - - private RelationalPersistentEntity getRequiredPersistentEntity(Class typeToRead) { - return this.mappingContext.getRequiredPersistentEntity(typeToRead); - } - - @Nullable - private RelationalPersistentEntity getPersistentEntity(Class typeToRead) { - return this.mappingContext.getPersistentEntity(typeToRead); - } -} diff --git a/src/main/java/org/springframework/data/r2dbc/core/DefaultStatementMapper.java b/src/main/java/org/springframework/data/r2dbc/core/DefaultStatementMapper.java deleted file mode 100644 index 5fb20ddd..00000000 --- a/src/main/java/org/springframework/data/r2dbc/core/DefaultStatementMapper.java +++ /dev/null @@ -1,417 +0,0 @@ -/* - * Copyright 2019-2022 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.r2dbc.core; - -import java.util.ArrayList; -import java.util.List; - -import org.springframework.data.mapping.context.MappingContext; -import org.springframework.data.r2dbc.convert.R2dbcConverter; -import org.springframework.data.r2dbc.dialect.R2dbcDialect; -import org.springframework.data.r2dbc.query.BoundAssignments; -import org.springframework.data.r2dbc.query.BoundCondition; -import org.springframework.data.r2dbc.query.UpdateMapper; -import org.springframework.data.relational.core.dialect.RenderContextFactory; -import org.springframework.data.relational.core.mapping.RelationalPersistentEntity; -import org.springframework.data.relational.core.mapping.RelationalPersistentProperty; -import org.springframework.data.relational.core.query.CriteriaDefinition; -import org.springframework.data.relational.core.sql.*; -import org.springframework.data.relational.core.sql.InsertBuilder.InsertValuesWithBuild; -import org.springframework.data.relational.core.sql.render.RenderContext; -import org.springframework.data.relational.core.sql.render.SqlRenderer; -import org.springframework.lang.Nullable; -import org.springframework.r2dbc.core.PreparedOperation; -import org.springframework.r2dbc.core.binding.BindMarkers; -import org.springframework.r2dbc.core.binding.BindTarget; -import org.springframework.r2dbc.core.binding.Bindings; -import org.springframework.util.Assert; - -/** - * Default {@link StatementMapper} implementation. - * - * @author Mark Paluch - * @author Roman Chigvintsev - * @author Mingyuan Wu - */ -class DefaultStatementMapper implements StatementMapper { - - private final R2dbcDialect dialect; - private final RenderContext renderContext; - private final UpdateMapper updateMapper; - private final MappingContext, ? extends RelationalPersistentProperty> mappingContext; - - DefaultStatementMapper(R2dbcDialect dialect, R2dbcConverter converter) { - - RenderContextFactory factory = new RenderContextFactory(dialect); - - this.dialect = dialect; - this.renderContext = factory.createRenderContext(); - this.updateMapper = new UpdateMapper(dialect, converter); - this.mappingContext = converter.getMappingContext(); - } - - DefaultStatementMapper(R2dbcDialect dialect, RenderContext renderContext, UpdateMapper updateMapper, - MappingContext, ? extends RelationalPersistentProperty> mappingContext) { - this.dialect = dialect; - this.renderContext = renderContext; - this.updateMapper = updateMapper; - this.mappingContext = mappingContext; - } - - /* - * (non-Javadoc) - * @see org.springframework.data.r2dbc.function.StatementMapper#forType(java.lang.Class) - */ - @Override - @SuppressWarnings("unchecked") - public TypedStatementMapper forType(Class type) { - - Assert.notNull(type, "Type must not be null!"); - - return new DefaultTypedStatementMapper<>( - (RelationalPersistentEntity) this.mappingContext.getRequiredPersistentEntity(type)); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.r2dbc.function.StatementMapper#getMappedObject(org.springframework.data.r2dbc.function.StatementMapper.SelectSpec) - */ - @Override - public PreparedOperation getMappedObject(SelectSpec selectSpec) { - return getMappedObject(selectSpec, null); - } - - private PreparedOperation