, String> typeMap = new HashMap<>();
+
+ public DefaultSqlTypeMapping() {
+
+ typeMap.put(String.class, "VARCHAR(255 BYTE)");
+ typeMap.put(Boolean.class, "TINYINT");
+ typeMap.put(Double.class, "DOUBLE");
+ typeMap.put(Float.class, "FLOAT");
+ typeMap.put(Integer.class, "INT");
+ typeMap.put(Long.class, "BIGINT");
+
+ typeMap.put(BigInteger.class, "BIGINT");
+ typeMap.put(BigDecimal.class, "NUMERIC");
+
+ typeMap.put(UUID.class, "UUID");
+
+ typeMap.put(LocalDate.class, "DATE");
+ typeMap.put(LocalTime.class, "TIME");
+ typeMap.put(LocalDateTime.class, "TIMESTAMP");
+
+ typeMap.put(ZonedDateTime.class, "TIMESTAMPTZ");
+ }
+
+ @Override
+ public String getColumnType(RelationalPersistentProperty property) {
+ return typeMap.get(ClassUtils.resolvePrimitiveIfNecessary(property.getActualType()));
+ }
+}
diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/LiquibaseChangeSetWriter.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/LiquibaseChangeSetWriter.java
new file mode 100644
index 0000000000..b935127547
--- /dev/null
+++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/LiquibaseChangeSetWriter.java
@@ -0,0 +1,596 @@
+/*
+ * Copyright 2023 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.springframework.data.jdbc.core.mapping.schema;
+
+import liquibase.CatalogAndSchema;
+import liquibase.change.AddColumnConfig;
+import liquibase.change.ColumnConfig;
+import liquibase.change.ConstraintsConfig;
+import liquibase.change.core.AddColumnChange;
+import liquibase.change.core.CreateTableChange;
+import liquibase.change.core.DropColumnChange;
+import liquibase.change.core.DropTableChange;
+import liquibase.changelog.ChangeLogChild;
+import liquibase.changelog.ChangeLogParameters;
+import liquibase.changelog.ChangeSet;
+import liquibase.changelog.DatabaseChangeLog;
+import liquibase.database.Database;
+import liquibase.exception.ChangeLogParseException;
+import liquibase.exception.LiquibaseException;
+import liquibase.parser.ChangeLogParser;
+import liquibase.parser.core.yaml.YamlChangeLogParser;
+import liquibase.resource.DirectoryResourceAccessor;
+import liquibase.serializer.ChangeLogSerializer;
+import liquibase.serializer.core.yaml.YamlChangeLogSerializer;
+import liquibase.snapshot.DatabaseSnapshot;
+import liquibase.snapshot.SnapshotControl;
+import liquibase.snapshot.SnapshotGeneratorFactory;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.text.Collator;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Comparator;
+import java.util.List;
+import java.util.Locale;
+import java.util.Set;
+import java.util.function.BiPredicate;
+import java.util.function.Predicate;
+
+import org.springframework.core.io.Resource;
+import org.springframework.data.mapping.context.MappingContext;
+import org.springframework.data.relational.core.mapping.RelationalPersistentEntity;
+import org.springframework.data.relational.core.mapping.RelationalPersistentProperty;
+import org.springframework.data.util.Predicates;
+import org.springframework.lang.Nullable;
+import org.springframework.util.Assert;
+
+/**
+ * Use this class to write Liquibase ChangeSets.
+ *
+ * This writer uses {@link MappingContext} as input to determine mapped entities. Entities can be filtered through a
+ * {@link #setSchemaFilter(Predicate) schema filter} to include/exclude entities. By default, all entities within the
+ * mapping context are considered for computing the expected schema.
+ *
+ * This writer operates in two modes:
+ *
+ * - Initial Schema Creation
+ * - Differential Schema Change Creation
+ *
+ * The {@link #writeChangeSet(Resource) initial mode} allows creating the full schema without considering any existing
+ * tables. The {@link #writeChangeSet(Resource, Database) differential schema mode} uses a {@link Database} object to
+ * determine existing tables and columns. It creates in addition to table creations also changes to drop tables, drop
+ * columns and add columns. By default, the {@link #setDropTableFilter(Predicate) DROP TABLE} and the
+ * {@link #setDropColumnFilter(BiPredicate) DROP COLUMN} filters exclude all tables respective columns from being
+ * dropped.
+ *
+ * In differential schema mode, table and column names are compared using a case-insensitive comparator, see
+ * {@link Collator#PRIMARY}.
+ *
+ * The writer can be configured to use specific ChangeLogSerializers and ChangeLogParsers defaulting to YAML.
+ *
+ * @author Kurt Niemi
+ * @author Mark Paluch
+ * @since 3.2
+ */
+public class LiquibaseChangeSetWriter {
+
+ public static final String DEFAULT_AUTHOR = "Spring Data Relational";
+ private final MappingContext extends RelationalPersistentEntity>, ? extends RelationalPersistentProperty> mappingContext;
+
+ private SqlTypeMapping sqlTypeMapping = new DefaultSqlTypeMapping();
+
+ private ChangeLogSerializer changeLogSerializer = new YamlChangeLogSerializer();
+
+ private ChangeLogParser changeLogParser = new YamlChangeLogParser();
+
+ /**
+ * Predicate to identify Liquibase system tables.
+ */
+ private final Predicate isLiquibaseTable = table -> table.toUpperCase(Locale.ROOT)
+ .startsWith("DATABASECHANGELOG");
+
+ /**
+ * Comparator to compare table and column names.
+ */
+ private final Comparator nameComparator = createComparator();
+
+ private static Comparator createComparator() {
+
+ Collator instance = Collator.getInstance(Locale.ROOT);
+ instance.setStrength(Collator.PRIMARY);
+
+ return instance::compare;
+ }
+
+ /**
+ * Filter predicate to determine which persistent entities should be used for schema generation.
+ */
+ private Predicate> schemaFilter = Predicates.isTrue();
+
+ /**
+ * Filter predicate used to determine whether an existing table should be removed. Defaults to {@code false} to keep
+ * existing tables.
+ */
+ private Predicate dropTableFilter = Predicates.isFalse();
+
+ /**
+ * Filter predicate used to determine whether an existing column should be removed. Defaults to {@code false} to keep
+ * existing columns.
+ */
+ private BiPredicate dropColumnFilter = (table, column) -> false;
+
+ /**
+ * Use this to generate a ChangeSet that can be used on an empty database.
+ *
+ * @param mappingContext source to determine persistent entities, must not be {@literal null}.
+ */
+ public LiquibaseChangeSetWriter(
+ MappingContext extends RelationalPersistentEntity>, ? extends RelationalPersistentProperty> mappingContext) {
+
+ Assert.notNull(mappingContext, "MappingContext must not be null");
+
+ this.mappingContext = mappingContext;
+ }
+
+ /**
+ * Configure SQL type mapping. Defaults to {@link DefaultSqlTypeMapping}.
+ *
+ * @param sqlTypeMapping must not be {@literal null}.
+ */
+ public void setSqlTypeMapping(SqlTypeMapping sqlTypeMapping) {
+
+ Assert.notNull(sqlTypeMapping, "SqlTypeMapping must not be null");
+
+ this.sqlTypeMapping = sqlTypeMapping;
+ }
+
+ /**
+ * Set the {@link ChangeLogSerializer}.
+ *
+ * @param changeLogSerializer must not be {@literal null}.
+ */
+ public void setChangeLogSerializer(ChangeLogSerializer changeLogSerializer) {
+
+ Assert.notNull(changeLogSerializer, "ChangeLogSerializer must not be null");
+
+ this.changeLogSerializer = changeLogSerializer;
+ }
+
+ /**
+ * Set the {@link ChangeLogParser}.
+ *
+ * @param changeLogParser must not be {@literal null}.
+ */
+ public void setChangeLogParser(ChangeLogParser changeLogParser) {
+
+ Assert.notNull(changeLogParser, "ChangeLogParser must not be null");
+
+ this.changeLogParser = changeLogParser;
+ }
+
+ /**
+ * Set the filter predicate to identify for which entities to create schema definitions. Existing tables for excluded
+ * entities will show up in {@link #setDropTableFilter(Predicate)}. Returning {@code true} includes the entity;
+ * {@code false} excludes the entity from schema creation.
+ *
+ * @param schemaFilter must not be {@literal null}.
+ */
+ public void setSchemaFilter(Predicate> schemaFilter) {
+
+ Assert.notNull(schemaFilter, "Schema filter must not be null");
+
+ this.schemaFilter = schemaFilter;
+ }
+
+ /**
+ * Set the filter predicate to identify tables to drop. The predicate accepts the table name. Returning {@code true}
+ * will delete the table; {@code false} retains the table.
+ *
+ * @param dropTableFilter must not be {@literal null}.
+ */
+ public void setDropTableFilter(Predicate dropTableFilter) {
+
+ Assert.notNull(dropTableFilter, "Drop Column filter must not be null");
+
+ this.dropTableFilter = dropTableFilter;
+ }
+
+ /**
+ * Set the filter predicate to identify columns within a table to drop. The predicate accepts the table- and column
+ * name. Returning {@code true} will delete the column; {@code false} retains the column.
+ *
+ * @param dropColumnFilter must not be {@literal null}.
+ */
+ public void setDropColumnFilter(BiPredicate dropColumnFilter) {
+
+ Assert.notNull(dropColumnFilter, "Drop Column filter must not be null");
+
+ this.dropColumnFilter = dropColumnFilter;
+ }
+
+ /**
+ * Write a Liquibase ChangeSet containing all tables as initial ChangeSet.
+ *
+ * @param changeLogResource resource that ChangeSet will be written to (or append to an existing ChangeSet file). The
+ * resource must resolve to a valid {@link Resource#getFile()}.
+ * @throws IOException in case of I/O errors.
+ */
+ public void writeChangeSet(Resource changeLogResource) throws IOException {
+ writeChangeSet(changeLogResource, ChangeSetMetadata.create());
+ }
+
+ /**
+ * Write a Liquibase ChangeSet using a {@link Database} to identify the differences between mapped entities and the
+ * existing database.
+ *
+ * @param changeLogResource resource that ChangeSet will be written to (or append to an existing ChangeSet file). The
+ * resource must resolve to a valid {@link Resource#getFile()}.
+ * @param database database to identify the differences.
+ * @throws LiquibaseException
+ * @throws IOException in case of I/O errors.
+ */
+ public void writeChangeSet(Resource changeLogResource, Database database) throws IOException, LiquibaseException {
+ writeChangeSet(changeLogResource, ChangeSetMetadata.create(), database);
+ }
+
+ /**
+ * Write a Liquibase ChangeSet containing all tables as initial ChangeSet.
+ *
+ * @param changeLogResource resource that ChangeSet will be written to (or append to an existing ChangeSet file).
+ * @param metadata the ChangeSet metadata.
+ * @throws IOException in case of I/O errors.
+ */
+ public void writeChangeSet(Resource changeLogResource, ChangeSetMetadata metadata) throws IOException {
+
+ DatabaseChangeLog databaseChangeLog = getDatabaseChangeLog(changeLogResource.getFile(), null);
+ ChangeSet changeSet = createChangeSet(metadata, databaseChangeLog);
+
+ writeChangeSet(databaseChangeLog, changeSet, changeLogResource.getFile());
+ }
+
+ /**
+ * Write a Liquibase ChangeSet using a {@link Database} to identify the differences between mapped entities and the
+ * existing database.
+ *
+ * @param changeLogResource resource that ChangeSet will be written to (or append to an existing ChangeSet file).
+ * @param metadata the ChangeSet metadata.
+ * @param database database to identify the differences.
+ * @throws LiquibaseException
+ * @throws IOException in case of I/O errors.
+ */
+ public void writeChangeSet(Resource changeLogResource, ChangeSetMetadata metadata, Database database)
+ throws LiquibaseException, IOException {
+
+ DatabaseChangeLog databaseChangeLog = getDatabaseChangeLog(changeLogResource.getFile(), database);
+ ChangeSet changeSet = createChangeSet(metadata, database, databaseChangeLog);
+
+ writeChangeSet(databaseChangeLog, changeSet, changeLogResource.getFile());
+ }
+
+ /**
+ * Creates an initial ChangeSet.
+ *
+ * @param metadata must not be {@literal null}.
+ * @param databaseChangeLog must not be {@literal null}.
+ * @return the initial ChangeSet.
+ */
+ protected ChangeSet createChangeSet(ChangeSetMetadata metadata, DatabaseChangeLog databaseChangeLog) {
+ return createChangeSet(metadata, initial(), databaseChangeLog);
+ }
+
+ /**
+ * Creates a diff ChangeSet by comparing {@link Database} with {@link MappingContext mapped entities}.
+ *
+ * @param metadata must not be {@literal null}.
+ * @param databaseChangeLog must not be {@literal null}.
+ * @return the diff ChangeSet.
+ */
+ protected ChangeSet createChangeSet(ChangeSetMetadata metadata, Database database,
+ DatabaseChangeLog databaseChangeLog) throws LiquibaseException {
+ return createChangeSet(metadata, differenceOf(database), databaseChangeLog);
+ }
+
+ private ChangeSet createChangeSet(ChangeSetMetadata metadata, SchemaDiff difference,
+ DatabaseChangeLog databaseChangeLog) {
+
+ ChangeSet changeSet = new ChangeSet(metadata.getId(), metadata.getAuthor(), false, false, "", "", "",
+ databaseChangeLog);
+
+ generateTableAdditionsDeletions(changeSet, difference);
+ generateTableModifications(changeSet, difference);
+ return changeSet;
+ }
+
+ private SchemaDiff initial() {
+
+ Tables mappedEntities = Tables.from(mappingContext.getPersistentEntities().stream().filter(schemaFilter),
+ sqlTypeMapping, null);
+ return SchemaDiff.diff(mappedEntities, Tables.empty(), nameComparator);
+ }
+
+ private SchemaDiff differenceOf(Database database) throws LiquibaseException {
+
+ Tables existingTables = getLiquibaseModel(database);
+ Tables mappedEntities = Tables.from(mappingContext.getPersistentEntities().stream().filter(schemaFilter),
+ sqlTypeMapping, database.getDefaultCatalogName());
+
+ return SchemaDiff.diff(mappedEntities, existingTables, nameComparator);
+ }
+
+ private DatabaseChangeLog getDatabaseChangeLog(File changeLogFile, @Nullable Database database) throws IOException {
+
+ ChangeLogParameters parameters = database != null ? new ChangeLogParameters(database) : new ChangeLogParameters();
+
+ if (!changeLogFile.exists()) {
+ DatabaseChangeLog databaseChangeLog = new DatabaseChangeLog(changeLogFile.getName());
+ if (database != null) {
+ databaseChangeLog.setChangeLogParameters(parameters);
+ }
+ return databaseChangeLog;
+ }
+
+ try {
+
+ File parentDirectory = changeLogFile.getParentFile();
+ if (parentDirectory == null) {
+ parentDirectory = new File("./");
+ }
+
+ DirectoryResourceAccessor resourceAccessor = new DirectoryResourceAccessor(parentDirectory);
+ return changeLogParser.parse(changeLogFile.getName(), parameters, resourceAccessor);
+ } catch (ChangeLogParseException ex) {
+ throw new IOException(ex);
+ }
+ }
+
+ private void generateTableAdditionsDeletions(ChangeSet changeSet, SchemaDiff difference) {
+
+ for (Table table : difference.tableAdditions()) {
+ CreateTableChange newTable = changeTable(table);
+ changeSet.addChange(newTable);
+ }
+
+ for (Table table : difference.tableDeletions()) {
+ // Do not delete/drop table if it is an external application table
+ if (dropTableFilter.test(table.name())) {
+ changeSet.addChange(dropTable(table));
+ }
+ }
+ }
+
+ private void generateTableModifications(ChangeSet changeSet, SchemaDiff difference) {
+
+ for (TableDiff table : difference.tableDiffs()) {
+
+ if (!table.columnsToAdd().isEmpty()) {
+ changeSet.addChange(addColumns(table));
+ }
+
+ List deletedColumns = getColumnsToDrop(table);
+
+ if (!deletedColumns.isEmpty()) {
+ changeSet.addChange(dropColumns(table, deletedColumns));
+ }
+ }
+ }
+
+ private List getColumnsToDrop(TableDiff table) {
+
+ List deletedColumns = new ArrayList<>();
+ for (Column column : table.columnsToDrop()) {
+
+ if (dropColumnFilter.test(table.table().name(), column.name())) {
+ deletedColumns.add(column);
+ }
+ }
+ return deletedColumns;
+ }
+
+ private void writeChangeSet(DatabaseChangeLog databaseChangeLog, ChangeSet changeSet, File changeLogFile)
+ throws IOException {
+
+ List changes = new ArrayList<>(databaseChangeLog.getChangeSets());
+ changes.add(changeSet);
+
+ try (FileOutputStream fos = new FileOutputStream(changeLogFile)) {
+ changeLogSerializer.write(changes, fos);
+ }
+ }
+
+ private Tables getLiquibaseModel(Database targetDatabase) throws LiquibaseException {
+
+ CatalogAndSchema[] schemas = new CatalogAndSchema[] { targetDatabase.getDefaultSchema() };
+ SnapshotControl snapshotControl = new SnapshotControl(targetDatabase);
+
+ DatabaseSnapshot snapshot = SnapshotGeneratorFactory.getInstance().createSnapshot(schemas, targetDatabase,
+ snapshotControl);
+ Set tables = snapshot.get(liquibase.structure.core.Table.class);
+ List existingTables = new ArrayList<>(tables.size());
+
+ for (liquibase.structure.core.Table table : tables) {
+
+ // Exclude internal Liquibase tables from comparison
+ if (isLiquibaseTable.test(table.getName())) {
+ continue;
+ }
+
+ Table tableModel = new Table(table.getSchema().getCatalogName(), table.getName());
+
+ List columns = table.getColumns();
+
+ for (liquibase.structure.core.Column column : columns) {
+
+ String type = column.getType().toString();
+ boolean nullable = column.isNullable();
+ Column columnModel = new Column(column.getName(), type, nullable, false);
+
+ tableModel.columns().add(columnModel);
+ }
+
+ existingTables.add(tableModel);
+ }
+
+ return new Tables(existingTables);
+ }
+
+ private static AddColumnChange addColumns(TableDiff table) {
+
+ AddColumnChange addColumnChange = new AddColumnChange();
+ addColumnChange.setSchemaName(table.table().schema());
+ addColumnChange.setTableName(table.table().name());
+
+ for (Column column : table.columnsToAdd()) {
+ AddColumnConfig addColumn = createAddColumnChange(column);
+ addColumnChange.addColumn(addColumn);
+ }
+ return addColumnChange;
+ }
+
+ private static AddColumnConfig createAddColumnChange(Column column) {
+
+ AddColumnConfig config = new AddColumnConfig();
+ config.setName(column.name());
+ config.setType(column.type());
+
+ if (column.identity()) {
+ config.setAutoIncrement(true);
+ }
+
+ return config;
+ }
+
+ private static DropColumnChange dropColumns(TableDiff table, Collection deletedColumns) {
+
+ DropColumnChange dropColumnChange = new DropColumnChange();
+ dropColumnChange.setSchemaName(table.table().schema());
+ dropColumnChange.setTableName(table.table().name());
+
+ List dropColumns = new ArrayList<>();
+
+ for (Column column : deletedColumns) {
+ ColumnConfig config = new ColumnConfig();
+ config.setName(column.name());
+ dropColumns.add(config);
+ }
+
+ dropColumnChange.setColumns(dropColumns);
+ return dropColumnChange;
+ }
+
+ private static CreateTableChange changeTable(Table table) {
+
+ CreateTableChange change = new CreateTableChange();
+ change.setSchemaName(table.schema());
+ change.setTableName(table.name());
+
+ for (Column column : table.columns()) {
+
+ ColumnConfig columnConfig = new ColumnConfig();
+ columnConfig.setName(column.name());
+ columnConfig.setType(column.type());
+
+ ConstraintsConfig constraints = new ConstraintsConfig();
+ constraints.setNullable(column.nullable());
+
+ if (column.identity()) {
+
+ columnConfig.setAutoIncrement(true);
+ constraints.setPrimaryKey(true);
+ }
+
+ columnConfig.setConstraints(constraints);
+ change.addColumn(columnConfig);
+ }
+
+ return change;
+ }
+
+ private static DropTableChange dropTable(Table table) {
+
+ DropTableChange change = new DropTableChange();
+ change.setSchemaName(table.schema());
+ change.setTableName(table.name());
+ change.setCascadeConstraints(true);
+
+ return change;
+ }
+
+ /**
+ * Metadata for a ChangeSet.
+ */
+ interface ChangeSetMetadata {
+
+ /**
+ * Creates a new default {@link ChangeSetMetadata} using the {@link #DEFAULT_AUTHOR default author}.
+ *
+ * @return a new default {@link ChangeSetMetadata} using the {@link #DEFAULT_AUTHOR default author}.
+ */
+ static ChangeSetMetadata create() {
+ return ofAuthor(LiquibaseChangeSetWriter.DEFAULT_AUTHOR);
+ }
+
+ /**
+ * Creates a new default {@link ChangeSetMetadata} using a generated {@code identifier} and provided {@code author}.
+ *
+ * @return a new default {@link ChangeSetMetadata} using a generated {@code identifier} and provided {@code author}.
+ */
+ static ChangeSetMetadata ofAuthor(String author) {
+ return of(Long.toString(System.currentTimeMillis()), author);
+ }
+
+ /**
+ * Creates a new default {@link ChangeSetMetadata} using the provided {@code identifier} and {@code author}.
+ *
+ * @return a new default {@link ChangeSetMetadata} using the provided {@code identifier} and {@code author}.
+ */
+ static ChangeSetMetadata of(String identifier, String author) {
+ return new DefaultChangeSetMetadata(identifier, author);
+ }
+
+ /**
+ * @return the ChangeSet identifier.
+ */
+ String getId();
+
+ /**
+ * @return the ChangeSet author.
+ */
+ String getAuthor();
+ }
+
+ private record DefaultChangeSetMetadata(String id, String author) implements ChangeSetMetadata {
+
+ private DefaultChangeSetMetadata {
+
+ Assert.hasText(id, "ChangeSet identifier must not be empty or null");
+ Assert.hasText(author, "Author must not be empty or null");
+ }
+
+ @Override
+ public String getId() {
+ return id();
+ }
+
+ @Override
+ public String getAuthor() {
+ return author();
+ }
+ }
+}
diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/SchemaDiff.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/SchemaDiff.java
new file mode 100644
index 0000000000..079c40dde1
--- /dev/null
+++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/SchemaDiff.java
@@ -0,0 +1,147 @@
+/*
+ * Copyright 2023 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.data.jdbc.core.mapping.schema;
+
+import java.util.ArrayList;
+import java.util.Comparator;
+import java.util.List;
+import java.util.Map;
+import java.util.SortedMap;
+import java.util.TreeMap;
+import java.util.function.Function;
+import java.util.function.Predicate;
+
+/**
+ * This class is created to return the difference between a source and target {@link Tables} The difference consists of
+ * Table Additions, Deletions, and Modified Tables (i.e. table exists in both source and target - but has columns to add
+ * or delete)
+ *
+ * @author Kurt Niemi
+ * @since 3.2
+ */
+record SchemaDiff(List tableAdditions, List tableDeletions, List tableDiffs) {
+
+ public static SchemaDiff diff(Tables mappedEntities, Tables existingTables, Comparator nameComparator) {
+
+ Map existingIndex = createMapping(existingTables.tables(), SchemaDiff::getKey, nameComparator);
+ Map mappedIndex = createMapping(mappedEntities.tables(), SchemaDiff::getKey, nameComparator);
+
+ List toCreate = getTablesToCreate(mappedEntities, withTableKey(existingIndex::containsKey));
+ List toDrop = getTablesToDrop(existingTables, withTableKey(mappedIndex::containsKey));
+
+ List tableDiffs = diffTable(mappedEntities, existingIndex, withTableKey(existingIndex::containsKey),
+ nameComparator);
+
+ return new SchemaDiff(toCreate, toDrop, tableDiffs);
+ }
+
+ private static List getTablesToCreate(Tables mappedEntities, Predicate excludeTable) {
+
+ List toCreate = new ArrayList<>(mappedEntities.tables().size());
+
+ for (Table table : mappedEntities.tables()) {
+ if (!excludeTable.test(table)) {
+ toCreate.add(table);
+ }
+ }
+
+ return toCreate;
+ }
+
+ private static List getTablesToDrop(Tables existingTables, Predicate excludeTable) {
+
+ List toDrop = new ArrayList<>(existingTables.tables().size());
+
+ for (Table table : existingTables.tables()) {
+ if (!excludeTable.test(table)) {
+ toDrop.add(table);
+ }
+ }
+
+ return toDrop;
+ }
+
+ private static List diffTable(Tables mappedEntities, Map existingIndex,
+ Predicate includeTable, Comparator nameComparator) {
+
+ List tableDiffs = new ArrayList<>();
+
+ for (Table mappedEntity : mappedEntities.tables()) {
+
+ if (!includeTable.test(mappedEntity)) {
+ continue;
+ }
+
+ // TODO: How to handle changed columns (type?)
+
+ Table existingTable = existingIndex.get(getKey(mappedEntity));
+ TableDiff tableDiff = new TableDiff(mappedEntity);
+
+ Map mappedColumns = createMapping(mappedEntity.columns(), Column::name, nameComparator);
+ mappedEntity.keyColumns().forEach(it -> mappedColumns.put(it.name(), it));
+
+ Map existingColumns = createMapping(existingTable.columns(), Column::name, nameComparator);
+ existingTable.keyColumns().forEach(it -> existingColumns.put(it.name(), it));
+
+ // Identify deleted columns
+ Map toDelete = new TreeMap<>(nameComparator);
+ toDelete.putAll(existingColumns);
+ mappedColumns.keySet().forEach(toDelete::remove);
+
+ tableDiff.columnsToDrop().addAll(toDelete.values());
+
+ // Identify added columns
+ Map addedColumns = new TreeMap<>(nameComparator);
+ addedColumns.putAll(mappedColumns);
+
+ existingColumns.keySet().forEach(addedColumns::remove);
+
+ // Add columns in order. This order can interleave with existing columns.
+ for (Column column : mappedEntity.keyColumns()) {
+ if (addedColumns.containsKey(column.name())) {
+ tableDiff.columnsToAdd().add(column);
+ }
+ }
+
+ for (Column column : mappedEntity.columns()) {
+ if (addedColumns.containsKey(column.name())) {
+ tableDiff.columnsToAdd().add(column);
+ }
+ }
+
+ tableDiffs.add(tableDiff);
+ }
+
+ return tableDiffs;
+ }
+
+ private static SortedMap createMapping(List items, Function keyFunction,
+ Comparator nameComparator) {
+
+ SortedMap mapping = new TreeMap<>(nameComparator);
+ items.forEach(it -> mapping.put(keyFunction.apply(it), it));
+ return mapping;
+ }
+
+ private static String getKey(Table table) {
+ return table.schema() + "." + table.name();
+ }
+
+ private static Predicate withTableKey(Predicate predicate) {
+ return it -> predicate.test(getKey(it));
+ }
+
+}
diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/SqlTypeMapping.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/SqlTypeMapping.java
new file mode 100644
index 0000000000..d66f932ca4
--- /dev/null
+++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/SqlTypeMapping.java
@@ -0,0 +1,101 @@
+/*
+ * Copyright 2023 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.data.jdbc.core.mapping.schema;
+
+import org.springframework.data.relational.core.mapping.RelationalPersistentProperty;
+import org.springframework.lang.Nullable;
+import org.springframework.util.Assert;
+import org.springframework.util.ObjectUtils;
+
+/**
+ * Strategy interface for mapping a {@link RelationalPersistentProperty} to a Database type.
+ *
+ * @author Kurt Niemi
+ * @author Mark Paluch
+ * @since 3.2
+ */
+@FunctionalInterface
+public interface SqlTypeMapping {
+
+ /**
+ * Determines a column type for a persistent property.
+ *
+ * @param property the property for which the type should be determined.
+ * @return the SQL type to use, such as {@code VARCHAR} or {@code NUMERIC}. Can be {@literal null} if the strategy
+ * cannot provide a column type.
+ */
+ @Nullable
+ String getColumnType(RelationalPersistentProperty property);
+
+ /**
+ * Returns the required column type for a persistent property or throws {@link IllegalArgumentException} if the type
+ * cannot be determined.
+ *
+ * @param property the property for which the type should be determined.
+ * @return the SQL type to use, such as {@code VARCHAR} or {@code NUMERIC}. Can be {@literal null} if the strategy
+ * cannot provide a column type.
+ * @throws IllegalArgumentException if the column type cannot be determined.
+ */
+ default String getRequiredColumnType(RelationalPersistentProperty property) {
+
+ String columnType = getColumnType(property);
+
+ if (ObjectUtils.isEmpty(columnType)) {
+ throw new IllegalArgumentException(String.format("Cannot determined required column type for %s", property));
+ }
+
+ return columnType;
+ }
+
+ /**
+ * Determine whether a column is nullable.
+ *
+ * @param property the property for which nullability should be determined.
+ * @return whether the property is nullable.
+ */
+ default boolean isNullable(RelationalPersistentProperty property) {
+ return !property.getActualType().isPrimitive();
+ }
+
+ /**
+ * Returns a composed {@link SqlTypeMapping} that represents a fallback of this type mapping and another. When
+ * evaluating the composed predicate, if this mapping does not contain a column mapping (i.e.
+ * {@link #getColumnType(RelationalPersistentProperty)} returns{@literal null}), then the {@code other} mapping is
+ * evaluated.
+ *
+ * Any exceptions thrown during evaluation of either type mapping are relayed to the caller; if evaluation of this
+ * type mapping throws an exception, the {@code other} predicate will not be evaluated.
+ *
+ * @param other a type mapping that will be used as fallback, must not be {@literal null}.
+ * @return a composed type mapping
+ */
+ default SqlTypeMapping and(SqlTypeMapping other) {
+
+ Assert.notNull(other, "Other SqlTypeMapping must not be null");
+
+ return property -> {
+
+ String columnType = getColumnType(property);
+
+ if (ObjectUtils.isEmpty(columnType)) {
+ return other.getColumnType(property);
+ }
+
+ return columnType;
+ };
+ }
+
+}
diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/Table.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/Table.java
new file mode 100644
index 0000000000..43b465d9a7
--- /dev/null
+++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/Table.java
@@ -0,0 +1,65 @@
+/*
+ * Copyright 2023 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.data.jdbc.core.mapping.schema;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.springframework.lang.Nullable;
+import org.springframework.util.ObjectUtils;
+
+/**
+ * Models a Table for generating SQL for Schema generation.
+ *
+ * @author Kurt Niemi
+ * @since 3.2
+ */
+record Table(@Nullable String schema, String name, List keyColumns, List columns) {
+
+ public Table(@Nullable String schema, String name) {
+ this(schema, name, new ArrayList<>(), new ArrayList<>());
+ }
+
+ public Table(String name) {
+ this(null, name);
+ }
+
+ @Override
+ public boolean equals(Object o) {
+
+ if (this == o) {
+ return true;
+ }
+
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
+
+ Table table = (Table) o;
+ return ObjectUtils.nullSafeEquals(schema, table.schema) && ObjectUtils.nullSafeEquals(name, table.name);
+ }
+
+ @Override
+ public int hashCode() {
+
+ int result = 17;
+
+ result += ObjectUtils.nullSafeHashCode(this.schema);
+ result += ObjectUtils.nullSafeHashCode(this.name);
+
+ return result;
+ }
+}
diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/TableDiff.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/TableDiff.java
new file mode 100644
index 0000000000..5ff5e01e71
--- /dev/null
+++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/TableDiff.java
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2023 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.data.jdbc.core.mapping.schema;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Used to keep track of columns that should be added or deleted, when performing a difference between a source and
+ * target {@link Tables}.
+ *
+ * @author Kurt Niemi
+ * @since 3.2
+ */
+record TableDiff(Table table, List columnsToAdd, List columnsToDrop) {
+
+ public TableDiff(Table table) {
+ this(table, new ArrayList<>(), new ArrayList<>());
+ }
+
+}
diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/Tables.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/Tables.java
new file mode 100644
index 0000000000..12a35ce535
--- /dev/null
+++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/Tables.java
@@ -0,0 +1,79 @@
+/*
+ * Copyright 2023 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.data.jdbc.core.mapping.schema;
+
+import java.util.Collections;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Set;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+import org.springframework.data.annotation.Id;
+import org.springframework.data.relational.core.mapping.RelationalMappingContext;
+import org.springframework.data.relational.core.mapping.RelationalPersistentEntity;
+import org.springframework.data.relational.core.mapping.RelationalPersistentProperty;
+import org.springframework.lang.Nullable;
+
+/**
+ * Model class that contains Table/Column information that can be used to generate SQL for Schema generation.
+ *
+ * @author Kurt Niemi
+ * @since 3.2
+ */
+record Tables(List tables) {
+
+ public static Tables from(RelationalMappingContext context) {
+ return from(context.getPersistentEntities().stream(), new DefaultSqlTypeMapping(), null);
+ }
+
+ // TODO: Add support (i.e. create tickets) to support mapped collections, entities, embedded properties, and aggregate
+ // references.
+
+ public static Tables from(Stream extends RelationalPersistentEntity>> persistentEntities,
+ SqlTypeMapping sqlTypeMapping, @Nullable String defaultSchema) {
+
+ List tables = persistentEntities
+ .filter(it -> it.isAnnotationPresent(org.springframework.data.relational.core.mapping.Table.class)) //
+ .map(entity -> {
+
+ Table table = new Table(defaultSchema, entity.getTableName().getReference());
+
+ Set identifierColumns = new LinkedHashSet<>();
+ entity.getPersistentProperties(Id.class).forEach(identifierColumns::add);
+
+ for (RelationalPersistentProperty property : entity) {
+
+ if (property.isEntity() && !property.isEmbedded()) {
+ continue;
+ }
+
+ String columnType = sqlTypeMapping.getRequiredColumnType(property);
+
+ Column column = new Column(property.getColumnName().getReference(), sqlTypeMapping.getColumnType(property),
+ sqlTypeMapping.isNullable(property), identifierColumns.contains(property));
+ table.columns().add(column);
+ }
+ return table;
+ }).collect(Collectors.toList());
+
+ return new Tables(tables);
+ }
+
+ public static Tables empty() {
+ return new Tables(Collections.emptyList());
+ }
+}
diff --git a/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/package-info.java b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/package-info.java
new file mode 100644
index 0000000000..2173c50d6f
--- /dev/null
+++ b/spring-data-jdbc/src/main/java/org/springframework/data/jdbc/core/mapping/schema/package-info.java
@@ -0,0 +1,7 @@
+/**
+ * Schema creation and schema update integration with Liquibase.
+ */
+@NonNullApi
+package org.springframework.data.jdbc.core.mapping.schema;
+
+import org.springframework.lang.NonNullApi;
diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/mapping/schema/LiquibaseChangeSetWriterIntegrationTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/mapping/schema/LiquibaseChangeSetWriterIntegrationTests.java
new file mode 100644
index 0000000000..d27e59a37e
--- /dev/null
+++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/mapping/schema/LiquibaseChangeSetWriterIntegrationTests.java
@@ -0,0 +1,249 @@
+/*
+ * Copyright 2023 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.data.jdbc.core.mapping.schema;
+
+import static org.assertj.core.api.Assertions.*;
+
+import liquibase.change.AddColumnConfig;
+import liquibase.change.ColumnConfig;
+import liquibase.change.core.AddColumnChange;
+import liquibase.change.core.DropColumnChange;
+import liquibase.change.core.DropTableChange;
+import liquibase.changelog.ChangeSet;
+import liquibase.changelog.DatabaseChangeLog;
+import liquibase.database.core.H2Database;
+import liquibase.database.jvm.JdbcConnection;
+
+import java.io.File;
+import java.io.InputStream;
+import java.nio.file.Files;
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.util.Set;
+
+import org.assertj.core.api.ThrowingConsumer;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.io.TempDir;
+import org.springframework.core.io.ClassRelativeResourceLoader;
+import org.springframework.core.io.FileSystemResource;
+import org.springframework.data.annotation.Id;
+import org.springframework.data.jdbc.core.mapping.schema.LiquibaseChangeSetWriter.ChangeSetMetadata;
+import org.springframework.data.relational.core.mapping.RelationalMappingContext;
+import org.springframework.data.relational.core.mapping.Table;
+import org.springframework.data.util.Predicates;
+import org.springframework.jdbc.datasource.embedded.EmbeddedDatabase;
+import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder;
+import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType;
+
+/**
+ * Integration tests for {@link LiquibaseChangeSetWriter}.
+ *
+ * @author Mark Paluch
+ */
+class LiquibaseChangeSetWriterIntegrationTests {
+
+ @Test // GH-1430
+ void shouldRemoveUnusedTable() {
+
+ withEmbeddedDatabase("unused-table.sql", c -> {
+
+ H2Database h2Database = new H2Database();
+ h2Database.setConnection(new JdbcConnection(c));
+
+ LiquibaseChangeSetWriter writer = new LiquibaseChangeSetWriter(new RelationalMappingContext());
+ writer.setDropTableFilter(Predicates.isTrue());
+
+ ChangeSet changeSet = writer.createChangeSet(ChangeSetMetadata.create(), h2Database, new DatabaseChangeLog());
+
+ assertThat(changeSet.getChanges()).hasSize(1);
+ assertThat(changeSet.getChanges().get(0)).isInstanceOf(DropTableChange.class);
+
+ DropTableChange drop = (DropTableChange) changeSet.getChanges().get(0);
+ assertThat(drop.getTableName()).isEqualToIgnoringCase("DELETE_ME");
+ });
+ }
+
+ @Test // GH-1430
+ void shouldNotDropTablesByDefault() {
+
+ withEmbeddedDatabase("unused-table.sql", c -> {
+
+ H2Database h2Database = new H2Database();
+ h2Database.setConnection(new JdbcConnection(c));
+
+ LiquibaseChangeSetWriter writer = new LiquibaseChangeSetWriter(new RelationalMappingContext());
+
+ ChangeSet changeSet = writer.createChangeSet(ChangeSetMetadata.create(), h2Database, new DatabaseChangeLog());
+
+ assertThat(changeSet.getChanges()).isEmpty();
+ });
+ }
+
+ @Test // GH-1430
+ void shouldAddColumnToTable() {
+
+ withEmbeddedDatabase("person-with-id-and-name.sql", c -> {
+
+ H2Database h2Database = new H2Database();
+ h2Database.setConnection(new JdbcConnection(c));
+
+ LiquibaseChangeSetWriter writer = new LiquibaseChangeSetWriter(contextOf(Person.class));
+
+ ChangeSet changeSet = writer.createChangeSet(ChangeSetMetadata.create(), h2Database, new DatabaseChangeLog());
+
+ assertThat(changeSet.getChanges()).hasSize(1);
+ assertThat(changeSet.getChanges().get(0)).isInstanceOf(AddColumnChange.class);
+
+ AddColumnChange addColumns = (AddColumnChange) changeSet.getChanges().get(0);
+ assertThat(addColumns.getTableName()).isEqualToIgnoringCase("PERSON");
+ assertThat(addColumns.getColumns()).hasSize(1);
+
+ AddColumnConfig addColumn = addColumns.getColumns().get(0);
+ assertThat(addColumn.getName()).isEqualTo("last_name");
+ assertThat(addColumn.getType()).isEqualTo("VARCHAR(255 BYTE)");
+ });
+ }
+
+ @Test // GH-1430
+ void shouldRemoveColumnFromTable() {
+
+ withEmbeddedDatabase("person-with-id-and-name.sql", c -> {
+
+ H2Database h2Database = new H2Database();
+ h2Database.setConnection(new JdbcConnection(c));
+
+ LiquibaseChangeSetWriter writer = new LiquibaseChangeSetWriter(contextOf(DifferentPerson.class));
+ writer.setDropColumnFilter((s, s2) -> true);
+
+ ChangeSet changeSet = writer.createChangeSet(ChangeSetMetadata.create(), h2Database, new DatabaseChangeLog());
+
+ assertThat(changeSet.getChanges()).hasSize(2);
+ assertThat(changeSet.getChanges().get(0)).isInstanceOf(AddColumnChange.class);
+
+ AddColumnChange addColumns = (AddColumnChange) changeSet.getChanges().get(0);
+ assertThat(addColumns.getTableName()).isEqualToIgnoringCase("PERSON");
+ assertThat(addColumns.getColumns()).hasSize(2);
+ assertThat(addColumns.getColumns()).extracting(AddColumnConfig::getName).containsExactly("my_id", "hello");
+
+ DropColumnChange dropColumns = (DropColumnChange) changeSet.getChanges().get(1);
+ assertThat(dropColumns.getTableName()).isEqualToIgnoringCase("PERSON");
+ assertThat(dropColumns.getColumns()).hasSize(2);
+ assertThat(dropColumns.getColumns()).extracting(ColumnConfig::getName).map(String::toUpperCase).contains("ID",
+ "FIRST_NAME");
+ });
+ }
+
+ @Test // GH-1430
+ void doesNotRemoveColumnsByDefault() {
+
+ withEmbeddedDatabase("person-with-id-and-name.sql", c -> {
+
+ H2Database h2Database = new H2Database();
+ h2Database.setConnection(new JdbcConnection(c));
+
+ LiquibaseChangeSetWriter writer = new LiquibaseChangeSetWriter(contextOf(DifferentPerson.class));
+
+ ChangeSet changeSet = writer.createChangeSet(ChangeSetMetadata.create(), h2Database, new DatabaseChangeLog());
+
+ assertThat(changeSet.getChanges()).hasSize(1);
+ assertThat(changeSet.getChanges().get(0)).isInstanceOf(AddColumnChange.class);
+ });
+ }
+
+ @Test // GH-1430
+ void shouldCreateNewChangeLog(@TempDir File tempDir) {
+
+ withEmbeddedDatabase("person-with-id-and-name.sql", c -> {
+
+ File changelogYml = new File(tempDir, "changelog.yml");
+ H2Database h2Database = new H2Database();
+ h2Database.setConnection(new JdbcConnection(c));
+
+ LiquibaseChangeSetWriter writer = new LiquibaseChangeSetWriter(contextOf(DifferentPerson.class));
+ writer.writeChangeSet(new FileSystemResource(changelogYml));
+
+ assertThat(tempDir).isDirectoryContaining(it -> it.getName().equalsIgnoreCase("changelog.yml"));
+
+ assertThat(changelogYml).content().contains("author: Spring Data Relational").contains("name: hello");
+ });
+ }
+
+ @Test // GH-1430
+ void shouldAppendToChangeLog(@TempDir File tempDir) {
+
+ withEmbeddedDatabase("person-with-id-and-name.sql", c -> {
+
+ H2Database h2Database = new H2Database();
+ h2Database.setConnection(new JdbcConnection(c));
+
+ File changelogYml = new File(tempDir, "changelog.yml");
+ try (InputStream is = getClass().getResourceAsStream("changelog.yml")) {
+ Files.copy(is, changelogYml.toPath());
+ }
+
+ LiquibaseChangeSetWriter writer = new LiquibaseChangeSetWriter(contextOf(DifferentPerson.class));
+ writer.writeChangeSet(new FileSystemResource(new File(tempDir, "changelog.yml")));
+
+ assertThat(changelogYml).content().contains("author: Someone").contains("author: Spring Data Relational")
+ .contains("name: hello");
+ });
+ }
+
+ RelationalMappingContext contextOf(Class>... classes) {
+
+ RelationalMappingContext context = new RelationalMappingContext();
+ context.setInitialEntitySet(Set.of(classes));
+ context.afterPropertiesSet();
+ return context;
+ }
+
+ void withEmbeddedDatabase(String script, ThrowingConsumer c) {
+
+ EmbeddedDatabase embeddedDatabase = new EmbeddedDatabaseBuilder(new ClassRelativeResourceLoader(getClass())) //
+ .generateUniqueName(true) //
+ .setType(EmbeddedDatabaseType.H2) //
+ .setScriptEncoding("UTF-8") //
+ .ignoreFailedDrops(true) //
+ .addScript(script) //
+ .build();
+
+ try {
+
+ try (Connection connection = embeddedDatabase.getConnection()) {
+ c.accept(connection);
+ }
+
+ } catch (SQLException e) {
+ throw new RuntimeException(e);
+ } finally {
+ embeddedDatabase.shutdown();
+ }
+ }
+
+ @Table
+ static class Person {
+ @Id int id;
+ String firstName;
+ String lastName;
+ }
+
+ @Table("person")
+ static class DifferentPerson {
+ @Id int my_id;
+ String hello;
+ }
+
+}
diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/mapping/schema/LiquibaseChangeSetWriterUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/mapping/schema/LiquibaseChangeSetWriterUnitTests.java
new file mode 100644
index 0000000000..314bbea8f4
--- /dev/null
+++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/mapping/schema/LiquibaseChangeSetWriterUnitTests.java
@@ -0,0 +1,91 @@
+/*
+ * Copyright 2023 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.data.jdbc.core.mapping.schema;
+
+import static org.assertj.core.api.Assertions.*;
+
+import liquibase.change.ColumnConfig;
+import liquibase.change.core.CreateTableChange;
+import liquibase.changelog.ChangeSet;
+import liquibase.changelog.DatabaseChangeLog;
+
+import org.junit.jupiter.api.Test;
+import org.springframework.data.annotation.Id;
+import org.springframework.data.jdbc.core.mapping.schema.LiquibaseChangeSetWriter.ChangeSetMetadata;
+import org.springframework.data.relational.core.mapping.RelationalMappingContext;
+
+/**
+ * Unit tests for {@link LiquibaseChangeSetWriter}.
+ *
+ * @author Mark Paluch
+ */
+class LiquibaseChangeSetWriterUnitTests {
+
+ @Test // GH-1480
+ void newTableShouldCreateChangeSet() {
+
+ RelationalMappingContext context = new RelationalMappingContext();
+ context.getRequiredPersistentEntity(VariousTypes.class);
+
+ LiquibaseChangeSetWriter writer = new LiquibaseChangeSetWriter(context);
+
+ ChangeSet changeSet = writer.createChangeSet(ChangeSetMetadata.create(), new DatabaseChangeLog());
+
+ CreateTableChange createTable = (CreateTableChange) changeSet.getChanges().get(0);
+
+ assertThat(createTable.getColumns()).extracting(ColumnConfig::getName).containsSequence("id",
+ "luke_i_am_your_father", "dark_side", "floater");
+ assertThat(createTable.getColumns()).extracting(ColumnConfig::getType).containsSequence("BIGINT",
+ "VARCHAR(255 BYTE)", "TINYINT", "FLOAT");
+
+ ColumnConfig id = createTable.getColumns().get(0);
+ assertThat(id.getConstraints().isNullable()).isFalse();
+ }
+
+ @Test // GH-1480
+ void shouldApplySchemaFilter() {
+
+ RelationalMappingContext context = new RelationalMappingContext();
+ context.getRequiredPersistentEntity(VariousTypes.class);
+ context.getRequiredPersistentEntity(OtherTable.class);
+
+ LiquibaseChangeSetWriter writer = new LiquibaseChangeSetWriter(context);
+ writer.setSchemaFilter(it -> it.getName().contains("OtherTable"));
+
+ ChangeSet changeSet = writer.createChangeSet(ChangeSetMetadata.create(), new DatabaseChangeLog());
+
+ assertThat(changeSet.getChanges()).hasSize(1);
+ CreateTableChange createTable = (CreateTableChange) changeSet.getChanges().get(0);
+
+ assertThat(createTable.getTableName()).isEqualTo("other_table");
+ }
+
+ @org.springframework.data.relational.core.mapping.Table
+ static class VariousTypes {
+ @Id long id;
+ String lukeIAmYourFather;
+ Boolean darkSide;
+ Float floater;
+ Double doubleClass;
+ Integer integerClass;
+ }
+
+ @org.springframework.data.relational.core.mapping.Table
+ static class OtherTable {
+ @Id long id;
+ }
+
+}
diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/mapping/schema/SchemaDiffUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/mapping/schema/SchemaDiffUnitTests.java
new file mode 100644
index 0000000000..f44372da22
--- /dev/null
+++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/mapping/schema/SchemaDiffUnitTests.java
@@ -0,0 +1,90 @@
+/*
+ * Copyright 2023 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.data.jdbc.core.mapping.schema;
+
+import static org.assertj.core.api.Assertions.*;
+
+import java.text.Collator;
+import java.util.Locale;
+
+import org.junit.jupiter.api.Test;
+import org.springframework.data.relational.core.mapping.RelationalMappingContext;
+
+/**
+ * Unit tests for the {@link Tables}.
+ *
+ * @author Kurt Niemi
+ * @author Mark Paluch
+ */
+class SchemaDiffUnitTests {
+
+ @Test
+ void testDiffSchema() {
+
+ RelationalMappingContext context = new RelationalMappingContext();
+ context.getRequiredPersistentEntity(SchemaDiffUnitTests.Table1.class);
+ context.getRequiredPersistentEntity(SchemaDiffUnitTests.Table2.class);
+
+ Tables mappedEntities = Tables.from(context);
+ Tables existingTables = Tables.from(context);
+
+ // Table table1 does not exist on the database yet.
+ existingTables.tables().remove(new Table("table1"));
+
+ // Add column to table2
+ Column newColumn = new Column("newcol", "VARCHAR(255)");
+ Table table2 = mappedEntities.tables().get(mappedEntities.tables().indexOf(new Table("table2")));
+ table2.columns().add(newColumn);
+
+ // This should be deleted
+ Table delete_me = new Table(null, "delete_me");
+ delete_me.columns().add(newColumn);
+ existingTables.tables().add(delete_me);
+
+ SchemaDiff diff = SchemaDiff.diff(mappedEntities, existingTables, Collator.getInstance(Locale.ROOT)::compare);
+
+ // Verify that newtable is an added table in the diff
+ assertThat(diff.tableAdditions()).isNotEmpty();
+ assertThat(diff.tableAdditions()).extracting(Table::name).containsOnly("table1");
+
+ assertThat(diff.tableDeletions()).isNotEmpty();
+ assertThat(diff.tableDeletions()).extracting(Table::name).containsOnly("delete_me");
+
+ assertThat(diff.tableDiffs()).hasSize(1);
+ assertThat(diff.tableDiffs()).extracting(it -> it.table().name()).containsOnly("table2");
+ assertThat(diff.tableDiffs().get(0).columnsToAdd()).contains(newColumn);
+ assertThat(diff.tableDiffs().get(0).columnsToDrop()).isEmpty();
+ }
+
+ // Test table classes for performing schema diff
+ @org.springframework.data.relational.core.mapping.Table
+ static class Table1 {
+ String force;
+ String be;
+ String with;
+ String you;
+ }
+
+ @org.springframework.data.relational.core.mapping.Table
+ static class Table2 {
+ String lukeIAmYourFather;
+ Boolean darkSide;
+ Float floater;
+ Double doubleClass;
+ Integer integerClass;
+ }
+
+}
diff --git a/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/mapping/schema/SqlTypeMappingUnitTests.java b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/mapping/schema/SqlTypeMappingUnitTests.java
new file mode 100644
index 0000000000..600fcd53dd
--- /dev/null
+++ b/spring-data-jdbc/src/test/java/org/springframework/data/jdbc/core/mapping/schema/SqlTypeMappingUnitTests.java
@@ -0,0 +1,68 @@
+/*
+ * Copyright 2023 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.data.jdbc.core.mapping.schema;
+
+import static org.assertj.core.api.Assertions.*;
+import static org.mockito.Mockito.*;
+
+import java.nio.charset.Charset;
+import java.time.Duration;
+import java.time.ZoneId;
+
+import org.junit.jupiter.api.Test;
+import org.springframework.data.relational.core.mapping.RelationalPersistentProperty;
+
+/**
+ * Unit tests for {@link SqlTypeMapping}.
+ *
+ * @author Mark Paluch
+ */
+class SqlTypeMappingUnitTests {
+
+ SqlTypeMapping typeMapping = new DefaultSqlTypeMapping() //
+ .and(property -> property.getActualType().equals(ZoneId.class) ? "ZONEID" : null)
+ .and(property -> property.getActualType().equals(Duration.class) ? "INTERVAL" : null);
+
+ @Test // GH-1480
+ void shouldComposeTypeMapping() {
+
+ RelationalPersistentProperty p = mock(RelationalPersistentProperty.class);
+ doReturn(String.class).when(p).getActualType();
+
+ assertThat(typeMapping.getColumnType(p)).isEqualTo("VARCHAR(255 BYTE)");
+ assertThat(typeMapping.getRequiredColumnType(p)).isEqualTo("VARCHAR(255 BYTE)");
+ }
+
+ @Test // GH-1480
+ void shouldDelegateToCompositeTypeMapping() {
+
+ RelationalPersistentProperty p = mock(RelationalPersistentProperty.class);
+ doReturn(Duration.class).when(p).getActualType();
+
+ assertThat(typeMapping.getColumnType(p)).isEqualTo("INTERVAL");
+ assertThat(typeMapping.getRequiredColumnType(p)).isEqualTo("INTERVAL");
+ }
+
+ @Test // GH-1480
+ void shouldPassThruNullValues() {
+
+ RelationalPersistentProperty p = mock(RelationalPersistentProperty.class);
+ doReturn(Charset.class).when(p).getActualType();
+
+ assertThat(typeMapping.getColumnType(p)).isNull();
+ assertThatIllegalArgumentException().isThrownBy(() -> typeMapping.getRequiredColumnType(p));
+ }
+}
diff --git a/spring-data-jdbc/src/test/resources/org/springframework/data/jdbc/core/mapping/schema/changelog.yml b/spring-data-jdbc/src/test/resources/org/springframework/data/jdbc/core/mapping/schema/changelog.yml
new file mode 100644
index 0000000000..0e7566de1c
--- /dev/null
+++ b/spring-data-jdbc/src/test/resources/org/springframework/data/jdbc/core/mapping/schema/changelog.yml
@@ -0,0 +1,16 @@
+databaseChangeLog:
+ - changeSet:
+ id: '123'
+ author: Someone
+ objectQuotingStrategy: LEGACY
+ changes:
+ - createTable:
+ columns:
+ - column:
+ autoIncrement: true
+ constraints:
+ nullable: false
+ primaryKey: true
+ name: id
+ type: INT
+ tableName: foo
diff --git a/spring-data-jdbc/src/test/resources/org/springframework/data/jdbc/core/mapping/schema/person-with-id-and-name.sql b/spring-data-jdbc/src/test/resources/org/springframework/data/jdbc/core/mapping/schema/person-with-id-and-name.sql
new file mode 100644
index 0000000000..226bde05eb
--- /dev/null
+++ b/spring-data-jdbc/src/test/resources/org/springframework/data/jdbc/core/mapping/schema/person-with-id-and-name.sql
@@ -0,0 +1,5 @@
+CREATE TABLE person
+(
+ id int,
+ first_name varchar(255)
+);
diff --git a/spring-data-jdbc/src/test/resources/org/springframework/data/jdbc/core/mapping/schema/unused-table.sql b/spring-data-jdbc/src/test/resources/org/springframework/data/jdbc/core/mapping/schema/unused-table.sql
new file mode 100644
index 0000000000..efbc517647
--- /dev/null
+++ b/spring-data-jdbc/src/test/resources/org/springframework/data/jdbc/core/mapping/schema/unused-table.sql
@@ -0,0 +1,4 @@
+CREATE TABLE DELETE_ME
+(
+ id int
+);
diff --git a/spring-data-r2dbc/pom.xml b/spring-data-r2dbc/pom.xml
index 98019e0295..9e4104f54f 100644
--- a/spring-data-r2dbc/pom.xml
+++ b/spring-data-r2dbc/pom.xml
@@ -6,7 +6,7 @@
4.0.0
spring-data-r2dbc
- 3.2.0-SNAPSHOT
+ 3.2.0-1480-SNAPSHOT
Spring Data R2DBC
Spring Data module for R2DBC
@@ -15,7 +15,7 @@
org.springframework.data
spring-data-relational-parent
- 3.2.0-SNAPSHOT
+ 3.2.0-1480-SNAPSHOT
@@ -39,31 +39,6 @@
2018
-
-
- mpaluch
- Mark Paluch
- mpaluch(at)pivotal.io
- Pivotal Software, Inc.
- https://pivotal.io
-
- Project Lead
-
- +1
-
-
- ogierke
- Oliver Gierke
- ogierke(at)pivotal.io
- Pivotal Software, Inc.
- https://pivotal.io
-
- Project Lead
-
- +1
-
-
-
diff --git a/spring-data-relational/pom.xml b/spring-data-relational/pom.xml
index 57b9d707a6..892a9d3503 100644
--- a/spring-data-relational/pom.xml
+++ b/spring-data-relational/pom.xml
@@ -6,7 +6,7 @@
4.0.0
spring-data-relational
- 3.2.0-SNAPSHOT
+ 3.2.0-1480-SNAPSHOT
Spring Data Relational
Spring Data Relational support
@@ -14,7 +14,7 @@
org.springframework.data
spring-data-relational-parent
- 3.2.0-SNAPSHOT
+ 3.2.0-1480-SNAPSHOT
@@ -50,6 +50,13 @@
spring-core
+
+ org.liquibase
+ liquibase-core
+ ${liquibase.version}
+ true
+
+
com.google.code.findbugs
jsr305
diff --git a/src/main/asciidoc/index.adoc b/src/main/asciidoc/index.adoc
index 0016afe037..4b815d1c65 100644
--- a/src/main/asciidoc/index.adoc
+++ b/src/main/asciidoc/index.adoc
@@ -23,6 +23,7 @@ include::{spring-data-commons-docs}/repositories.adoc[leveloffset=+1]
= Reference Documentation
include::jdbc.adoc[leveloffset=+1]
+include::schema-support.adoc[leveloffset=+1]
[[appendix]]
= Appendix
diff --git a/src/main/asciidoc/schema-support.adoc b/src/main/asciidoc/schema-support.adoc
new file mode 100644
index 0000000000..6845c05ef8
--- /dev/null
+++ b/src/main/asciidoc/schema-support.adoc
@@ -0,0 +1,90 @@
+[[jdbc.schema]]
+= Schema Creation
+
+When working with SQL databases, the schema is an essential part.
+Spring Data JDBC supports a wide range of schema options yet when starting with a domain model it can be challenging to come up with an initial domain model.
+To assist you with a code-first approach, Spring Data JDBC ships with an integration to create database change sets using https://www.liquibase.org/[Liquibase].
+
+Consider the following domain entity:
+
+[source,java]
+----
+@Table
+class Person {
+ @Id long id;
+ String firstName;
+ String lastName;
+ LocalDate birthday;
+ boolean active;
+}
+----
+
+Rendering the initial ChangeSet through the following code:
+
+[source,java]
+----
+
+RelationalMappingContext context = … // The context contains the Person entity, ideally initialized through initialEntitySet
+LiquibaseChangeSetWriter writer = new LiquibaseChangeSetWriter(context);
+
+writer.writeChangeSet(new FileSystemResource(new File(…)));
+----
+
+yields the following change log:
+
+[source,yaml]
+----
+databaseChangeLog:
+- changeSet:
+ id: '1685969572426'
+ author: Spring Data Relational
+ objectQuotingStrategy: LEGACY
+ changes:
+ - createTable:
+ columns:
+ - column:
+ autoIncrement: true
+ constraints:
+ nullable: false
+ primaryKey: true
+ name: id
+ type: BIGINT
+ - column:
+ constraints:
+ nullable: true
+ name: first_name
+ type: VARCHAR(255 BYTE)
+ - column:
+ constraints:
+ nullable: true
+ name: last_name
+ type: VARCHAR(255 BYTE)
+ - column:
+ constraints:
+ nullable: true
+ name: birthday
+ type: DATE
+ - column:
+ constraints:
+ nullable: false
+ name: active
+ type: TINYINT
+ tableName: person
+----
+
+Column types are computed from an object implementing the `SqlTypeMapping` strategy interface.
+Nullability is inferred from the type and set to `false` if a property type use primitive Java types.
+
+Schema support can assist you throughout the application development lifecycle.
+In differential mode, you provide an existing Liquibase `Database` to the schema writer instance and the schema writer compares existing tables to mapped entities and derives from the difference which tables and columns to create/to drop.
+By default, no tables and no columns are dropped unless you configure `dropTableFilter` and `dropColumnFilter`.
+Both filter predicate provide the table name respective column name so your code can computer which tables and columns can be dropped.
+
+[source,java]
+----
+writer.setDropTableFilter(tableName -> …);
+writer.setDropColumnFilter((tableName, columnName) -> …);
+----
+
+NOTE: Schema support can only identify additions and removals in the sense of removing tables/columns that are not mapped or adding columns that do not exist in the database.
+Columns cannot be renamed nor data cannot be migrated because entity mapping does not provide details of how the schema has evolved.