mirror of
https://github.com/keycloak/keycloak.git
synced 2026-02-18 18:37:54 -05:00
Migrations in db-compatibility-verifier-maven-plugin
Closes #46389 Signed-off-by: Ryan Emerson <remerson@ibm.com>
This commit is contained in:
parent
ec1ddc73d4
commit
ee9f364b8d
30 changed files with 994 additions and 608 deletions
|
|
@ -3,7 +3,7 @@
|
|||
## Overview
|
||||
|
||||
This Maven plugin is used to verify the database compatibility of Keycloak. It ensures that all database schema changes
|
||||
(ChangeSets) are explicitly marked as either supported or unsupported by the rolling upgrades feature.
|
||||
are explicitly marked as either supported or unsupported by the rolling upgrades feature.
|
||||
|
||||
## Goals
|
||||
|
||||
|
|
@ -16,21 +16,41 @@ The plugin provides the following goals:
|
|||
|
||||
## Usage
|
||||
|
||||
### `snapshot` - Creates a snapshot of the current database ChangeSets.
|
||||
### `snapshot` - Creates a snapshot of the current database ChangeSets and org.keycloak.migration.migrators.Migration implementations.
|
||||
|
||||
This goal is used to create an initial snapshot of the database ChangeSets. It creates a supported and unsupported JSON
|
||||
file, specified via the `db.verify.supportedFile` and `db.verify.unsupportedFile` property, respectively.
|
||||
This goal is used to create an initial snapshot of liquibase ChangeSets and org.keycloak.migration.migrators.Migration implementations.
|
||||
It creates a supported and unsupported JSON file, specified via the `db.verify.supportedFile` and `db.verify.unsupportedFile` property, respectively.
|
||||
|
||||
```bash
|
||||
mvn org.keycloak:db-compatibility-verifier-maven-plugin:999.0.0-SNAPSHOT:snapshot \
|
||||
-Ddb.verify.supportedFile=<relative-path-to-create-json-file> \
|
||||
-Ddb.verify.unsupportedFile=<relative-path-to-create-json-file>
|
||||
-Ddb.verify.unsupportedFile=<relative-path-to-create-json-file> \
|
||||
-Ddb.verify.migration.package=org.keycloak.example # Optional java package containing org.keycloak.migration.migrators.Migration implementations
|
||||
```
|
||||
|
||||
The `supportedFile` will be created with a record of all known ChangeSets and the `unsupportedFile` will be initialized
|
||||
as an empty JSON array.
|
||||
The `supportedFile` will be created with a record of all known ChangeSets and Migrations. The `unsupportedFile` will be initialized
|
||||
with empty JSON arrays.
|
||||
|
||||
### `verify` - Verifies that all detected ChangeSets recorded in either the supported or unsupported JSON files.
|
||||
Each file is created with the following JSON format:
|
||||
|
||||
```json
|
||||
{
|
||||
"changeSets" : [
|
||||
{
|
||||
"id" : "<id>",
|
||||
"author" : "<author>",
|
||||
"filename" : "<filename>"
|
||||
}
|
||||
],
|
||||
"migrations" : [
|
||||
{
|
||||
"class" : "<fully-qualified-class-name>"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### `verify` - Verifies that all detected ChangeSets and Migrations are recorded in either the supported or unsupported JSON files.
|
||||
|
||||
```bash
|
||||
mvn org.keycloak:db-compatibility-verifier-maven-plugin:999.0.0-SNAPSHOT:verify \
|
||||
|
|
@ -38,7 +58,7 @@ mvn org.keycloak:db-compatibility-verifier-maven-plugin:999.0.0-SNAPSHOT:verify
|
|||
-Ddb.verify.unsupportedFile=<relative-path-to-json-file>
|
||||
```
|
||||
|
||||
### `supported` - Adds one or all missing ChangeSets to the supported JSON file
|
||||
### `supported` - Adds one or all missing ChangeSets, or Migration, to the supported JSON file
|
||||
|
||||
This goal is used to mark a ChangeSet as supported for rolling upgrades.
|
||||
|
||||
|
|
@ -62,6 +82,15 @@ mvn org.keycloak:db-compatibility-verifier-maven-plugin:999.0.0-SNAPSHOT:support
|
|||
-Ddb.verify.changset.addAll=true
|
||||
```
|
||||
|
||||
To mark a Migration as supported:
|
||||
|
||||
```bash
|
||||
mvn org.keycloak:db-compatibility-verifier-maven-plugin:999.0.0-SNAPSHOT:supported \
|
||||
-Ddb.verify.supportedFile=<relative-path-to-json-file> \
|
||||
-Ddb.verify.unsupportedFile=<relative-path-to-json-file> \
|
||||
-Ddb.verify.migration.class=org.example.migration.MigrationExample
|
||||
```
|
||||
|
||||
### `unsupported` - Adds one or all missing ChangeSets to the unsupported JSON file
|
||||
|
||||
This goal is used to mark a ChangeSet as unsupported for rolling upgrades.
|
||||
|
|
@ -85,3 +114,12 @@ mvn org.keycloak:db-compatibility-verifier-maven-plugin:999.0.0-SNAPSHOT:unsuppo
|
|||
-Ddb.verify.unsupportedFile=<relative-path-to-json-file> \
|
||||
-Ddb.verify.changset.addAll=true
|
||||
```
|
||||
|
||||
To mark a Migration as unsupported:
|
||||
|
||||
```bash
|
||||
mvn org.keycloak:db-compatibility-verifier-maven-plugin:999.0.0-SNAPSHOT:unsupported \
|
||||
-Ddb.verify.supportedFile=<relative-path-to-json-file> \
|
||||
-Ddb.verify.unsupportedFile=<relative-path-to-json-file> \
|
||||
-Ddb.verify.migration.class=org.example.migration.MigrationExample
|
||||
```
|
||||
|
|
|
|||
|
|
@ -1,83 +0,0 @@
|
|||
package org.keycloak.db.compatibility.verifier;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import com.fasterxml.jackson.core.type.TypeReference;
|
||||
import org.apache.maven.plugin.MojoExecutionException;
|
||||
import org.apache.maven.plugins.annotations.Parameter;
|
||||
|
||||
abstract class AbstractChangeSetMojo extends AbstractMojo {
|
||||
@Parameter(property = "db.verify.changeset.all", defaultValue = "false")
|
||||
boolean addAll;
|
||||
|
||||
@Parameter(property = "db.verify.changeset.id")
|
||||
String id;
|
||||
|
||||
@Parameter(property = "db.verify.changeset.author")
|
||||
String author;
|
||||
|
||||
@Parameter(property = "db.verify.changeset.filename")
|
||||
String filename;
|
||||
|
||||
|
||||
void checkFileExist(String ref, File file) throws MojoExecutionException {
|
||||
if (!file.exists()) {
|
||||
throw new MojoExecutionException("%s file does not exist".formatted(ref));
|
||||
}
|
||||
}
|
||||
|
||||
void checkUnknownChangeSet(Set<ChangeSet> knownChangeSets, ChangeSet changeSet) throws MojoExecutionException {
|
||||
if (!knownChangeSets.contains(changeSet)) {
|
||||
throw new MojoExecutionException("Unknown ChangeSet: " + changeSet);
|
||||
}
|
||||
}
|
||||
|
||||
protected void checkValidChangeSetId(String id, String author, String filename) throws MojoExecutionException {
|
||||
if (id == null || id.isBlank()) {
|
||||
throw new MojoExecutionException("ChangeSet id not set");
|
||||
}
|
||||
if (author == null || author.isBlank()) {
|
||||
throw new MojoExecutionException("ChangeSet author not set");
|
||||
}
|
||||
if (filename == null || filename.isBlank()) {
|
||||
throw new MojoExecutionException("ChangeSet filename not set");
|
||||
}
|
||||
}
|
||||
|
||||
void addAll(ClassLoader classLoader, File dest, File exclusions) throws IOException {
|
||||
// Discover all known ChangeSets
|
||||
ChangeLogXMLParser xmlParser = new ChangeLogXMLParser(classLoader);
|
||||
Set<ChangeSet> knownChangeSets = xmlParser.discoverAllChangeSets();
|
||||
|
||||
// Load changes to exclude and remove them from the known changesets
|
||||
Set<ChangeSet> excludedChanges = objectMapper.readValue(exclusions, new TypeReference<>() {});
|
||||
knownChangeSets.removeAll(excludedChanges);
|
||||
|
||||
// Overwrite all content in the destination file
|
||||
objectMapper.writeValue(dest, knownChangeSets);
|
||||
}
|
||||
|
||||
void addIndividual(ClassLoader classLoader, ChangeSet changeSet, File dest, File alternate) throws IOException, MojoExecutionException {
|
||||
// Discover all known ChangeSets
|
||||
ChangeLogXMLParser xmlParser = new ChangeLogXMLParser(classLoader);
|
||||
Set<ChangeSet> knownChangeSets = xmlParser.discoverAllChangeSets();
|
||||
|
||||
// It should not be possible to add an unknown changeset
|
||||
checkUnknownChangeSet(knownChangeSets, changeSet);
|
||||
|
||||
Set<ChangeSet> alternateChangeSets = objectMapper.readValue(alternate, new TypeReference<>() {});
|
||||
if (alternateChangeSets.contains(changeSet)) {
|
||||
throw new MojoExecutionException("ChangeSet already defined in the %s file".formatted(alternate.getName()));
|
||||
}
|
||||
|
||||
List<ChangeSet> destChanges = objectMapper.readValue(dest, new TypeReference<>() {});
|
||||
if (!destChanges.contains(changeSet)) {
|
||||
// If the ChangeSet is not already known, append to the end of the JSON array and overwrite the existing file
|
||||
destChanges.add(changeSet);
|
||||
objectMapper.writeValue(dest, destChanges);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,128 @@
|
|||
package org.keycloak.db.compatibility.verifier;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import com.fasterxml.jackson.core.type.TypeReference;
|
||||
import org.apache.maven.plugin.MojoExecutionException;
|
||||
import org.apache.maven.plugins.annotations.Parameter;
|
||||
|
||||
abstract class AbstractNewEntryMojo extends AbstractMojo {
|
||||
@Parameter(property = "db.verify.changeset.all", defaultValue = "false")
|
||||
boolean addAll;
|
||||
|
||||
@Parameter(property = "db.verify.changeset.id")
|
||||
String id;
|
||||
|
||||
@Parameter(property = "db.verify.changeset.author")
|
||||
String author;
|
||||
|
||||
@Parameter(property = "db.verify.changeset.filename")
|
||||
String filename;
|
||||
|
||||
@Parameter(property = "db.verify.migration.class")
|
||||
String migration;
|
||||
|
||||
protected void execute(File dest, File alternate) throws Exception {
|
||||
ClassLoader classLoader = classLoader();
|
||||
if (addAll) {
|
||||
addAllChangeSets(classLoader, dest, alternate);
|
||||
} else if (migration != null && !migration.isEmpty()) {
|
||||
addMigration(classLoader, new Migration(migration), dest, alternate);
|
||||
} else {
|
||||
checkValidChangeSetId(id, author, filename);
|
||||
ChangeSet changeSet = new ChangeSet(id, author, filename);
|
||||
addChangeSet(classLoader, changeSet, dest, alternate);
|
||||
}
|
||||
}
|
||||
|
||||
protected void checkFileExist(String ref, File file) throws MojoExecutionException {
|
||||
if (!file.exists()) {
|
||||
throw new MojoExecutionException("%s file does not exist".formatted(ref));
|
||||
}
|
||||
}
|
||||
|
||||
protected void checkValidChangeSetId(String id, String author, String filename) throws MojoExecutionException {
|
||||
if (id == null || id.isBlank()) {
|
||||
throw new MojoExecutionException("ChangeSet id not set");
|
||||
}
|
||||
if (author == null || author.isBlank()) {
|
||||
throw new MojoExecutionException("ChangeSet author not set");
|
||||
}
|
||||
if (filename == null || filename.isBlank()) {
|
||||
throw new MojoExecutionException("ChangeSet filename not set");
|
||||
}
|
||||
}
|
||||
|
||||
void addAllChangeSets(ClassLoader classLoader, File dest, File exclusions) throws IOException {
|
||||
// Discover all known ChangeSets
|
||||
ChangeLogXMLParser xmlParser = new ChangeLogXMLParser(classLoader);
|
||||
Set<ChangeSet> knownChangeSets = xmlParser.discoverAllChangeSets();
|
||||
|
||||
// Load changes to exclude and remove them from the known changesets
|
||||
JsonParent excludedParent = objectMapper.readValue(exclusions, new TypeReference<>() {});
|
||||
Collection<ChangeSet> excludedChanges = excludedParent.changeSets();
|
||||
knownChangeSets.removeAll(excludedChanges);
|
||||
|
||||
// Overwrite all ChangeSet content in the destination file
|
||||
JsonParent parent = objectMapper.readValue(dest, new TypeReference<>() {});
|
||||
objectMapper.writeValue(dest, new JsonParent(knownChangeSets, parent.migrations()));
|
||||
}
|
||||
|
||||
void addChangeSet(ClassLoader classLoader, ChangeSet changeSet, File dest, File alternate) throws IOException, MojoExecutionException {
|
||||
// Discover all known ChangeSets
|
||||
ChangeLogXMLParser xmlParser = new ChangeLogXMLParser(classLoader);
|
||||
Set<ChangeSet> knownChangeSets = xmlParser.discoverAllChangeSets();
|
||||
|
||||
// It should not be possible to add an unknown changeset
|
||||
if (!knownChangeSets.contains(changeSet)) {
|
||||
throw new MojoExecutionException("Unknown ChangeSet: " + changeSet);
|
||||
}
|
||||
|
||||
JsonParent parent = objectMapper.readValue(alternate, new TypeReference<>() {});
|
||||
Set<ChangeSet> alternateChangeSets = new HashSet<>(parent.changeSets());
|
||||
if (alternateChangeSets.contains(changeSet)) {
|
||||
throw new MojoExecutionException("ChangeSet already defined in the %s file".formatted(alternate.getName()));
|
||||
}
|
||||
|
||||
parent = objectMapper.readValue(dest, new TypeReference<>() {});
|
||||
Collection<ChangeSet> destChanges = parent.changeSets();
|
||||
if (!destChanges.contains(changeSet)) {
|
||||
// If the ChangeSet is not already known, append to the end of the JSON array and overwrite the existing file
|
||||
destChanges.add(changeSet);
|
||||
objectMapper.writeValue(dest, parent);
|
||||
}
|
||||
}
|
||||
|
||||
void addMigration(ClassLoader classLoader, Migration migration, File dest, File alternate) throws IOException, MojoExecutionException {
|
||||
// Discover all known migrations
|
||||
String clazz = migration.clazz();
|
||||
int idx = clazz.lastIndexOf(".");
|
||||
String pkg = idx == -1 ? "" : clazz.substring(0, idx);
|
||||
|
||||
KeycloakMigrationParser migrationParser = new KeycloakMigrationParser(classLoader, pkg);
|
||||
Set<Migration> knownMigrations = migrationParser.discoverAllMigrations();
|
||||
|
||||
// It should not be possible to add an unknown Migration class
|
||||
if (!knownMigrations.contains(migration)) {
|
||||
throw new MojoExecutionException("Unknown Migration: " + migration);
|
||||
}
|
||||
|
||||
JsonParent parent = objectMapper.readValue(alternate, new TypeReference<>() {});
|
||||
Set<Migration> alternateMigrations = new HashSet<>(parent.migrations());
|
||||
if (alternateMigrations.contains(migration)) {
|
||||
throw new MojoExecutionException("Migration already defined in the %s file".formatted(alternate.getName()));
|
||||
}
|
||||
|
||||
parent = objectMapper.readValue(dest, new TypeReference<>() {});
|
||||
Collection<Migration> destChanges = parent.migrations();
|
||||
if (!destChanges.contains(migration)) {
|
||||
// If the Migration is not already known, append to the end of the JSON array and overwrite the existing file
|
||||
destChanges.add(migration);
|
||||
objectMapper.writeValue(dest, parent);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
package org.keycloak.db.compatibility.verifier;
|
||||
|
||||
import java.util.Collection;
|
||||
|
||||
record JsonParent(Collection<ChangeSet> changeSets, Collection<Migration> migrations) {}
|
||||
|
|
@ -0,0 +1,106 @@
|
|||
package org.keycloak.db.compatibility.verifier;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.net.JarURLConnection;
|
||||
import java.net.URI;
|
||||
import java.net.URISyntaxException;
|
||||
import java.net.URL;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Enumeration;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.jar.JarEntry;
|
||||
import java.util.jar.JarFile;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
record KeycloakMigrationParser(ClassLoader classLoader, String packageName) {
|
||||
Set<Migration> discoverAllMigrations() throws IOException {
|
||||
return findAllClassNamesInPackage(classLoader, packageName)
|
||||
.filter(s -> {
|
||||
var parts = s.split("\\.");
|
||||
var clazz = parts[parts.length - 1];
|
||||
// Ignore anonymous/lambda/inner classes
|
||||
return !clazz.contains("$");
|
||||
})
|
||||
.map(Migration::new)
|
||||
.collect(Collectors.toSet());
|
||||
}
|
||||
|
||||
private Stream<String> findAllClassNamesInPackage(ClassLoader classLoader, String packageName) throws IOException {
|
||||
if (packageName == null) {
|
||||
return Stream.of();
|
||||
}
|
||||
|
||||
List<String> classNames = new ArrayList<>();
|
||||
String path = packageName.replace('.', '/');
|
||||
|
||||
Enumeration<URL> resources = classLoader.getResources(path);
|
||||
while (resources.hasMoreElements()) {
|
||||
URL resource = resources.nextElement();
|
||||
|
||||
if (resource.getProtocol().equals("file")) {
|
||||
URI uri;
|
||||
try {
|
||||
uri = resource.toURI();
|
||||
} catch (URISyntaxException e) {
|
||||
// Should never happen
|
||||
throw new IllegalStateException(e);
|
||||
}
|
||||
classNames.addAll(findNamesInDirectory(new File(uri), packageName));
|
||||
} else if (resource.getProtocol().equals("jar")) {
|
||||
classNames.addAll(findNamesInJar(resource, path));
|
||||
}
|
||||
}
|
||||
return classNames.stream();
|
||||
}
|
||||
|
||||
// Helper for file system (IDE)
|
||||
private static List<String> findNamesInDirectory(File directory, String packageName) {
|
||||
List<String> classNames = new ArrayList<>();
|
||||
if (!directory.exists()) {
|
||||
return classNames;
|
||||
}
|
||||
|
||||
File[] files = directory.listFiles();
|
||||
if (files != null) {
|
||||
for (File file : files) {
|
||||
if (file.isDirectory()) {
|
||||
// Recursive scan
|
||||
classNames.addAll(findNamesInDirectory(file, packageName + "." + file.getName()));
|
||||
} else if (file.getName().endsWith(".class")) {
|
||||
// Just strip the extension and append to package
|
||||
String className = packageName + "." + file.getName().substring(0, file.getName().length() - 6);
|
||||
classNames.add(className);
|
||||
}
|
||||
}
|
||||
}
|
||||
return classNames;
|
||||
}
|
||||
|
||||
// Helper for JAR files (Maven)
|
||||
private static List<String> findNamesInJar(URL resource, String packagePath) throws IOException {
|
||||
List<String> classNames = new ArrayList<>();
|
||||
|
||||
JarURLConnection jarConn = (JarURLConnection) resource.openConnection();
|
||||
try (JarFile jarFile = jarConn.getJarFile()) {
|
||||
Enumeration<JarEntry> entries = jarFile.entries();
|
||||
|
||||
while (entries.hasMoreElements()) {
|
||||
JarEntry entry = entries.nextElement();
|
||||
String entryName = entry.getName();
|
||||
|
||||
// Check if it matches the package path and is a class file
|
||||
// We add a "/" to the packagePath check to ensure we don't accidentally match "com/tester" when searching for "com/test"
|
||||
if (entryName.startsWith(packagePath + "/") && entryName.endsWith(".class")) {
|
||||
|
||||
// Convert path "com/example/MyClass.class" -> "com.example.MyClass"
|
||||
String className = entryName.replace('/', '.').substring(0, entryName.length() - 6);
|
||||
classNames.add(className);
|
||||
}
|
||||
}
|
||||
}
|
||||
return classNames;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
package org.keycloak.db.compatibility.verifier;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
record Migration(@JsonProperty("class") String clazz) {
|
||||
}
|
||||
|
|
@ -4,12 +4,15 @@ import java.io.File;
|
|||
import java.io.IOException;
|
||||
import java.util.Set;
|
||||
|
||||
import com.fasterxml.jackson.databind.SerializationFeature;
|
||||
import org.apache.maven.plugin.MojoExecutionException;
|
||||
import org.apache.maven.plugins.annotations.Mojo;
|
||||
import org.apache.maven.plugins.annotations.Parameter;
|
||||
|
||||
@Mojo(name = "snapshot")
|
||||
public class CreateSnapshotMojo extends AbstractMojo {
|
||||
public class SnapshotMojo extends AbstractMojo {
|
||||
|
||||
@Parameter(property = "db.verify.migration.package")
|
||||
String migrationsPackage;
|
||||
|
||||
@Override
|
||||
public void execute() throws MojoExecutionException {
|
||||
|
|
@ -24,20 +27,25 @@ public class CreateSnapshotMojo extends AbstractMojo {
|
|||
File uFile = new File(root, unsupportedFile);
|
||||
|
||||
ClassLoader classLoader = classLoader();
|
||||
createSnapshot(classLoader, sFile, uFile);
|
||||
createSnapshot(classLoader, sFile, uFile, migrationsPackage);
|
||||
} catch (Exception e) {
|
||||
throw new MojoExecutionException("Error creating ChangeSet snapshot", e);
|
||||
}
|
||||
}
|
||||
|
||||
void createSnapshot(ClassLoader classLoader, File sFile, File uFile) throws IOException {
|
||||
// Write all known ChangeSet defined in the jpa-changelog*.xml files to the supported file
|
||||
void createSnapshot(ClassLoader classLoader, File sFile, File uFile, String migrationsPackage) throws IOException {
|
||||
// Record all known ChangeSet defined in the jpa-changelog*.xml files
|
||||
ChangeLogXMLParser xmlParser = new ChangeLogXMLParser(classLoader);
|
||||
Set<ChangeSet> changeSets = xmlParser.discoverAllChangeSets();
|
||||
objectMapper.enable(SerializationFeature.INDENT_OUTPUT);
|
||||
objectMapper.writeValue(sFile, changeSets);
|
||||
|
||||
// Record all known org.keycloak.migration.migrators.Migration implementations
|
||||
Set<Migration> migrations = new KeycloakMigrationParser(classLoader, migrationsPackage).discoverAllMigrations();
|
||||
|
||||
// Write all to the supported file
|
||||
JsonParent jsonFile = new JsonParent(changeSets, migrations);
|
||||
objectMapper.writeValue(sFile, jsonFile);
|
||||
|
||||
// Create an empty JSON array in the unsupported file
|
||||
objectMapper.writeValue(uFile, Set.of());
|
||||
objectMapper.writeValue(uFile, new JsonParent(Set.of(), Set.of()));
|
||||
}
|
||||
}
|
||||
|
|
@ -6,7 +6,7 @@ import org.apache.maven.plugin.MojoExecutionException;
|
|||
import org.apache.maven.plugins.annotations.Mojo;
|
||||
|
||||
@Mojo(name = "supported")
|
||||
public class ChangeSetSupportedMojo extends AbstractChangeSetMojo {
|
||||
public class SupportedMojo extends AbstractNewEntryMojo {
|
||||
|
||||
@Override
|
||||
public void execute() throws MojoExecutionException {
|
||||
|
|
@ -22,16 +22,9 @@ public class ChangeSetSupportedMojo extends AbstractChangeSetMojo {
|
|||
checkFileExist("unsupported", uFile);
|
||||
|
||||
try {
|
||||
if (addAll) {
|
||||
addAll(classLoader(), sFile, uFile);
|
||||
} else {
|
||||
checkValidChangeSetId(id, author, filename);
|
||||
ChangeSet changeSet = new ChangeSet(id, author, filename);
|
||||
addIndividual(classLoader(), changeSet, sFile, uFile);
|
||||
}
|
||||
execute(sFile, uFile);
|
||||
} catch (Exception e) {
|
||||
throw new MojoExecutionException("Error adding ChangeSet to supported file", e);
|
||||
throw new MojoExecutionException("Error adding entry to supported file", e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -6,7 +6,7 @@ import org.apache.maven.plugin.MojoExecutionException;
|
|||
import org.apache.maven.plugins.annotations.Mojo;
|
||||
|
||||
@Mojo(name = "unsupported")
|
||||
public class ChangeSetUnsupportedMojo extends AbstractChangeSetMojo {
|
||||
public class UnsupportedMojo extends AbstractNewEntryMojo {
|
||||
|
||||
@Override
|
||||
public void execute() throws MojoExecutionException {
|
||||
|
|
@ -22,15 +22,9 @@ public class ChangeSetUnsupportedMojo extends AbstractChangeSetMojo {
|
|||
checkFileExist("unsupported", uFile);
|
||||
|
||||
try {
|
||||
if (addAll) {
|
||||
addAll(classLoader(), uFile, sFile);
|
||||
} else {
|
||||
checkValidChangeSetId(id, author, filename);
|
||||
ChangeSet changeSet = new ChangeSet(id, author, filename);
|
||||
addIndividual(classLoader(), changeSet, uFile, sFile);
|
||||
}
|
||||
execute(uFile, sFile);
|
||||
} catch (Exception e) {
|
||||
throw new MojoExecutionException("Error adding ChangeSet to unsupported file", e);
|
||||
throw new MojoExecutionException("Error adding entry to unsupported file", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,90 +0,0 @@
|
|||
package org.keycloak.db.compatibility.verifier;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import com.fasterxml.jackson.core.type.TypeReference;
|
||||
import org.apache.maven.plugin.MojoExecutionException;
|
||||
import org.apache.maven.plugins.annotations.Mojo;
|
||||
|
||||
@Mojo(name = "verify")
|
||||
public class VerifyCompatibilityMojo extends AbstractMojo {
|
||||
|
||||
@Override
|
||||
public void execute() throws MojoExecutionException {
|
||||
if (skip) {
|
||||
getLog().info("Skipping execution");
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
File root = project.getBasedir();
|
||||
File sFile = new File(root, supportedFile);
|
||||
File uFile = new File(root, unsupportedFile);
|
||||
verifyCompatibility(classLoader(), sFile, uFile);
|
||||
} catch (Exception e) {
|
||||
throw new MojoExecutionException("Error loading project resources", e);
|
||||
}
|
||||
}
|
||||
|
||||
void verifyCompatibility(ClassLoader classLoader, File sFile, File uFile) throws IOException, MojoExecutionException {
|
||||
if (!sFile.exists() && !uFile.exists()) {
|
||||
getLog().info("No JSON ChangeSet files exist to verify");
|
||||
return;
|
||||
}
|
||||
|
||||
// Parse JSON files to determine all committed ChangeSets
|
||||
List<ChangeSet> sChanges = objectMapper.readValue(sFile, new TypeReference<>() {});
|
||||
List<ChangeSet> uChanges = objectMapper.readValue(uFile, new TypeReference<>() {});
|
||||
Set<ChangeSet> recordedChanges = Stream.of(sChanges, uChanges)
|
||||
.flatMap(List::stream)
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
if (recordedChanges.isEmpty()) {
|
||||
getLog().info("No supported or unsupported ChangeSet exist in specified files");
|
||||
return;
|
||||
}
|
||||
|
||||
checkIntersection(sChanges, uChanges);
|
||||
|
||||
// Parse all ChangeSets currently defined in the jpa-changegetLog() files
|
||||
ChangeLogXMLParser xmlParser = new ChangeLogXMLParser(classLoader);
|
||||
Set<ChangeSet> currentChanges = xmlParser.discoverAllChangeSets();
|
||||
checkMissingChangeSet(currentChanges, recordedChanges, sFile, uFile);
|
||||
}
|
||||
|
||||
void checkIntersection(List<ChangeSet> sChanges, List<ChangeSet> uChanges) throws MojoExecutionException {
|
||||
Set<ChangeSet> intersection = new HashSet<>(sChanges);
|
||||
intersection.retainAll(uChanges);
|
||||
if (!intersection.isEmpty()) {
|
||||
getLog().error("The following ChangeSets should be defined in either the supported or unsupported file, they cannot appear in both:");
|
||||
intersection.forEach(change -> getLog().error("\t\t" + change.toString()));
|
||||
getLog().error("The offending ChangeSets should be removed from one of the files");
|
||||
throw new MojoExecutionException("One or more ChangeSet definitions exist in both the supported and unsupported file");
|
||||
}
|
||||
}
|
||||
|
||||
void checkMissingChangeSet(Set<ChangeSet> currentChanges, Set<ChangeSet> recordedChanges, File sFile, File uFile) throws MojoExecutionException {
|
||||
if (recordedChanges.equals(currentChanges)) {
|
||||
getLog().info("All ChangeSets in the module recorded as expected in the supported and unsupported files");
|
||||
} else {
|
||||
getLog().error("The recorded ChangeSet JSON files differ from the current repository state");
|
||||
getLog().error("The following ChangeSets should be defined in either the supported '%s' or unsupported '%s' file:".formatted(sFile.toString(), uFile.toString()));
|
||||
currentChanges.removeAll(recordedChanges);
|
||||
currentChanges.forEach(change -> getLog().error("\t\t" + change.toString()));
|
||||
getLog().error("You must determine whether the ChangeSet(s) is compatible with rolling upgrades or not");
|
||||
getLog().error("A ChangeSet that requires locking preventing other cluster members accessing the database or makes schema changes that breaks functionality in earlier Keycloak versions is NOT compatible with rolling upgrades");
|
||||
getLog().error("Rolling upgrade compatibility must be verified against all supported database vendors before the supported file is updated");
|
||||
getLog().error("If the change IS compatible, then it should be committed to the repository in the supported file: '%s'".formatted(sFile.toString()));
|
||||
getLog().error("If the change IS NOT compatible, then it should be committed to the repository in the unsupported file: '%s'".formatted(sFile.toString()));
|
||||
getLog().error("Adding a ChangeSet to the unsupported file ensures that a rolling upgrade is not attempted when upgrading to the first patch version containing the change");
|
||||
getLog().error("ChangeSets can be added to the supported or unsupported files using the org.keycloak:db-compatibility-verifier-maven-plugin. See the module README for usage instructions");
|
||||
throw new MojoExecutionException("One or more ChangeSet definitions are missing from the supported or unsupported files");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,124 @@
|
|||
package org.keycloak.db.compatibility.verifier;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import com.fasterxml.jackson.core.type.TypeReference;
|
||||
import org.apache.maven.plugin.MojoExecutionException;
|
||||
import org.apache.maven.plugins.annotations.Mojo;
|
||||
import org.apache.maven.plugins.annotations.Parameter;
|
||||
|
||||
@Mojo(name = "verify")
|
||||
public class VerifyMojo extends AbstractMojo {
|
||||
|
||||
@Parameter(property = "db.verify.migration.package")
|
||||
String migrationsPackage;
|
||||
|
||||
@Override
|
||||
public void execute() throws MojoExecutionException {
|
||||
if (skip) {
|
||||
getLog().info("Skipping execution");
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
File root = project.getBasedir();
|
||||
File sFile = new File(root, supportedFile);
|
||||
File uFile = new File(root, unsupportedFile);
|
||||
verify(classLoader(), sFile, uFile);
|
||||
} catch (Exception e) {
|
||||
throw new MojoExecutionException("Error loading project resources", e);
|
||||
}
|
||||
}
|
||||
|
||||
void verify(ClassLoader classLoader, File sFile, File uFile) throws IOException, MojoExecutionException {
|
||||
if (!sFile.exists() && !uFile.exists()) {
|
||||
getLog().info("No JSON files exist to verify");
|
||||
return;
|
||||
}
|
||||
|
||||
verifyChangeSets(classLoader, sFile, uFile);
|
||||
verifyMigrations(classLoader, sFile, uFile);
|
||||
}
|
||||
|
||||
void verifyChangeSets(ClassLoader classLoader, File sFile, File uFile) throws IOException, MojoExecutionException {
|
||||
// Parse JSON files to determine all committed ChangeSets
|
||||
Collection<ChangeSet> sChanges = objectMapper.readValue(sFile, new TypeReference<JsonParent>() {}).changeSets();
|
||||
Collection<ChangeSet> uChanges = objectMapper.readValue(uFile, new TypeReference<JsonParent>() {}).changeSets();
|
||||
Set<ChangeSet> recordedChanges = Stream.of(sChanges, uChanges)
|
||||
.flatMap(Collection::stream)
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
var description = "ChangeSet";
|
||||
if (recordedChanges.isEmpty()) {
|
||||
getLog().info("No supported or unsupported ChangeSets exist in specified files");
|
||||
return;
|
||||
}
|
||||
|
||||
verifyIntersection(description, sChanges, uChanges);
|
||||
|
||||
// Parse all ChangeSets currently defined in the jpa-changelog* files
|
||||
ChangeLogXMLParser xmlParser = new ChangeLogXMLParser(classLoader);
|
||||
Set<ChangeSet> currentChanges = xmlParser.discoverAllChangeSets();
|
||||
verifyMissing(description, currentChanges, recordedChanges, sFile, uFile);
|
||||
}
|
||||
|
||||
void verifyMigrations(ClassLoader classLoader, File sFile, File uFile) throws IOException, MojoExecutionException {
|
||||
if (migrationsPackage != null && migrationsPackage.isEmpty()) {
|
||||
getLog().info("Skipping Migrations verification as no package configured");
|
||||
return;
|
||||
}
|
||||
// Parse JSON files to determine all committed Migrations
|
||||
Collection<Migration> sChanges = objectMapper.readValue(sFile, new TypeReference<JsonParent>() {}).migrations();
|
||||
Collection<Migration> uChanges = objectMapper.readValue(uFile, new TypeReference<JsonParent>() {}).migrations();
|
||||
Set<Migration> recordedChanges = Stream.of(sChanges, uChanges)
|
||||
.flatMap(Collection::stream)
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
var description = "Migration";
|
||||
if (recordedChanges.isEmpty()) {
|
||||
getLog().info("No supported or unsupported Migrations exist in specified files");
|
||||
return;
|
||||
}
|
||||
verifyIntersection(description, sChanges, uChanges);
|
||||
|
||||
// Parse all Migrations currently defined in the configured migrationsPackage
|
||||
Set<Migration> currentChanges = new KeycloakMigrationParser(classLoader, migrationsPackage).discoverAllMigrations();
|
||||
verifyMissing(description, currentChanges, recordedChanges, sFile, uFile);
|
||||
}
|
||||
|
||||
void verifyIntersection(String description, Collection<?> sChanges, Collection<?> uChanges) throws MojoExecutionException {
|
||||
Set<?> intersection = new HashSet<>(sChanges);
|
||||
intersection.retainAll(uChanges);
|
||||
if (!intersection.isEmpty()) {
|
||||
getLog().error("The following %s should be defined in either the supported or unsupported file, they cannot appear in both:".formatted(description));
|
||||
intersection.forEach(change -> getLog().error("\t\t" + change.toString()));
|
||||
getLog().error("The offending %s should be removed from one of the files".formatted(description));
|
||||
throw new MojoExecutionException("One or more %s definitions exist in both the supported and unsupported file".formatted(description));
|
||||
}
|
||||
}
|
||||
|
||||
void verifyMissing(String description, Set<?> currentChanges, Set<?> recordedChanges, File sFile, File uFile) throws MojoExecutionException {
|
||||
if (recordedChanges.equals(currentChanges)) {
|
||||
getLog().info("All %s in the module recorded as expected in the supported and unsupported files".formatted(description));
|
||||
} else {
|
||||
getLog().error("The recorded %s differ from the current repository state".formatted(description));
|
||||
getLog().error("The following %s should be defined in either the supported '%s' or unsupported '%s' file:".formatted(description, sFile.toString(), uFile.toString()));
|
||||
currentChanges.removeAll(recordedChanges);
|
||||
currentChanges.forEach(change -> getLog().error("\t\t" + change.toString()));
|
||||
getLog().error("You must determine whether the %s is compatible with rolling upgrades or not".formatted(description));
|
||||
getLog().error("A %s that requires locking preventing other cluster members accessing the database or makes schema changes that breaks functionality in earlier Keycloak versions is NOT compatible with rolling upgrades".formatted(description));
|
||||
getLog().error("Rolling upgrade compatibility must be verified against all supported database vendors before the supported file is updated");
|
||||
getLog().error("If the schema change IS compatible, then it should be committed to the repository in the supported file: '%s'".formatted(sFile.toString()));
|
||||
getLog().error("If the schema change IS NOT compatible, then it should be committed to the repository in the unsupported file: '%s'".formatted(sFile.toString()));
|
||||
getLog().error("Adding a %s to the unsupported file ensures that a rolling upgrade is not attempted when upgrading to the first patch version containing the change".formatted(description));
|
||||
getLog().error("%s can be added to the supported or unsupported files using the org.keycloak:db-compatibility-verifier-maven-plugin. See the module README for usage instructions".formatted(description));
|
||||
throw new MojoExecutionException("One or more %s definitions are missing from the supported or unsupported files".formatted(description));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -18,7 +18,7 @@ abstract class AbstractMojoTest {
|
|||
|
||||
@BeforeEach
|
||||
void init() throws IOException {
|
||||
testDir = Files.createTempDirectory(ChangeSetSupportedMojoTest.class.getSimpleName());
|
||||
testDir = Files.createTempDirectory(SupportedMojoTest.class.getSimpleName());
|
||||
supportedFile = testDir.resolve("supported.json").toFile();
|
||||
unsupportedFile = testDir.resolve("unsupported.json").toFile();
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,201 @@
|
|||
package org.keycloak.db.compatibility.verifier;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import com.fasterxml.jackson.core.type.TypeReference;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import org.apache.maven.plugin.MojoExecutionException;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
abstract class AbstractNewEntryMojoTest extends AbstractMojoTest {
|
||||
|
||||
protected AbstractNewEntryMojo mojo;
|
||||
protected ClassLoader classLoader;
|
||||
protected ObjectMapper mapper;
|
||||
|
||||
protected AbstractNewEntryMojoTest(AbstractNewEntryMojo mojo) {
|
||||
this.mojo = mojo;
|
||||
this.classLoader = getClass().getClassLoader();
|
||||
this.mapper = new ObjectMapper();
|
||||
}
|
||||
|
||||
protected abstract File getTargetFile();
|
||||
|
||||
protected abstract File getAlternateFile();
|
||||
|
||||
@Test
|
||||
void testAddAllChangeSets() throws Exception {
|
||||
// Create alternate file with a single ChangeSet
|
||||
List<ChangeSet> alternateChanges = new ChangeLogXMLParser(classLoader).extractChangeSets("META-INF/jpa-changelog-2.xml");
|
||||
assertEquals(1, alternateChanges.size());
|
||||
mapper.writeValue(getAlternateFile(), new JsonParent(alternateChanges, List.of()));
|
||||
|
||||
// Execute add all and expect all ChangeSets from jpa-changelog-1.xml to be present
|
||||
mapper.writeValue(getTargetFile(), new JsonParent(List.of(), List.of()));
|
||||
mojo.addAllChangeSets(classLoader, getTargetFile(), getAlternateFile());
|
||||
|
||||
JsonParent parent = mapper.readValue(getTargetFile(), new TypeReference<>() {});
|
||||
List<ChangeSet> targetChanges = new ArrayList<>(parent.changeSets());
|
||||
assertEquals(1, targetChanges.size());
|
||||
|
||||
ChangeSet sChange = targetChanges.get(0);
|
||||
assertEquals("test", sChange.id());
|
||||
assertEquals("keycloak", sChange.author());
|
||||
assertEquals("META-INF/jpa-changelog-1.xml", sChange.filename());
|
||||
}
|
||||
|
||||
@Test
|
||||
void testAddChangeSet() throws Exception {
|
||||
var changeLogParser = new ChangeLogXMLParser(classLoader);
|
||||
|
||||
assertTrue(supportedFile.createNewFile());
|
||||
assertTrue(unsupportedFile.createNewFile());
|
||||
mapper.writeValue(supportedFile, new JsonParent(List.of(), List.of()));
|
||||
mapper.writeValue(unsupportedFile, new JsonParent(List.of(), List.of()));
|
||||
|
||||
// Test ChangeSet is added to target file as expected
|
||||
ChangeSet changeSet = changeLogParser.extractChangeSets("META-INF/jpa-changelog-1.xml").get(0);
|
||||
mojo.addChangeSet(classLoader, changeSet, getTargetFile(), getAlternateFile());
|
||||
|
||||
JsonParent parent = mapper.readValue(getTargetFile(), new TypeReference<>() {});
|
||||
List<ChangeSet> targetChanges = new ArrayList<>(parent.changeSets());
|
||||
assertEquals(1, targetChanges.size());
|
||||
ChangeSet sChange = targetChanges.get(0);
|
||||
assertEquals(changeSet.id(), sChange.id());
|
||||
assertEquals(changeSet.author(), sChange.author());
|
||||
assertEquals(changeSet.filename(), sChange.filename());
|
||||
|
||||
// Test subsequent ChangeSets are added to already populated target file
|
||||
changeSet = changeLogParser.extractChangeSets("META-INF/jpa-changelog-2.xml").get(0);
|
||||
mojo.addChangeSet(classLoader, changeSet, getTargetFile(), getAlternateFile());
|
||||
|
||||
parent = mapper.readValue(getTargetFile(), new TypeReference<>() {});
|
||||
targetChanges = new ArrayList<>(parent.changeSets());
|
||||
assertEquals(2, targetChanges.size());
|
||||
|
||||
sChange = targetChanges.get(1);
|
||||
assertEquals(changeSet.id(), sChange.id());
|
||||
assertEquals(changeSet.author(), sChange.author());
|
||||
assertEquals(changeSet.filename(), sChange.filename());
|
||||
|
||||
// Test ChangeSet already exists handled gracefully
|
||||
mojo.addChangeSet(classLoader, changeSet, getTargetFile(), getAlternateFile());
|
||||
|
||||
parent = mapper.readValue(getTargetFile(), new TypeReference<>() {});
|
||||
targetChanges = new ArrayList<>(parent.changeSets());
|
||||
assertEquals(2, targetChanges.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
void testChangeAlreadyExistsInAlternateFile() throws Exception {
|
||||
assertTrue(supportedFile.createNewFile());
|
||||
assertTrue(unsupportedFile.createNewFile());
|
||||
mapper.writeValue(getTargetFile(), new JsonParent(List.of(), List.of()));
|
||||
|
||||
// Create alternate file with a single ChangeSet
|
||||
List<ChangeSet> alternateChanges = new ChangeLogXMLParser(classLoader).extractChangeSets("META-INF/jpa-changelog-1.xml");
|
||||
assertEquals(1, alternateChanges.size());
|
||||
|
||||
ChangeSet changeSet = alternateChanges.get(0);
|
||||
mapper.writeValue(getAlternateFile(), new JsonParent(alternateChanges, List.of()));
|
||||
|
||||
Exception e = assertThrows(
|
||||
MojoExecutionException.class,
|
||||
() -> mojo.addChangeSet(classLoader, changeSet, getTargetFile(), getAlternateFile())
|
||||
);
|
||||
|
||||
assertEquals("ChangeSet already defined in the %s file".formatted(getAlternateFile().getName()), e.getMessage());
|
||||
}
|
||||
|
||||
@Test
|
||||
void testAddUnknownChangeSet() throws Exception {
|
||||
assertTrue(supportedFile.createNewFile());
|
||||
assertTrue(unsupportedFile.createNewFile());
|
||||
|
||||
mapper.writeValue(getAlternateFile(), new JsonParent(List.of(), List.of()));
|
||||
ChangeSet unknown = new ChangeSet("asf", "asfgasg", "afasgfas");
|
||||
|
||||
Exception e = assertThrows(
|
||||
MojoExecutionException.class,
|
||||
() -> mojo.addChangeSet(classLoader, unknown, getTargetFile(), getAlternateFile())
|
||||
);
|
||||
|
||||
assertEquals("Unknown ChangeSet: " + unknown, e.getMessage());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
void testAddMigration() throws Exception {
|
||||
assertTrue(supportedFile.createNewFile());
|
||||
assertTrue(unsupportedFile.createNewFile());
|
||||
mapper.writeValue(supportedFile, new JsonParent(List.of(), List.of()));
|
||||
mapper.writeValue(unsupportedFile, new JsonParent(List.of(), List.of()));
|
||||
|
||||
// Test Migration is added to target file as expected
|
||||
Migration migration = new Migration(getClass().getName());
|
||||
mojo.addMigration(classLoader, migration, getTargetFile(), getAlternateFile());
|
||||
|
||||
JsonParent parent = mapper.readValue(getTargetFile(), new TypeReference<>() {});
|
||||
List<Migration> supportedMigrations = new ArrayList<>(parent.migrations());
|
||||
assertEquals(1, supportedMigrations.size());
|
||||
Migration sMigration = supportedMigrations.get(0);
|
||||
assertEquals(migration.clazz(), sMigration.clazz());
|
||||
|
||||
// Test subsequent Migration is added to already populated target file
|
||||
migration = new Migration(VerifyMojoTest.class.getName());
|
||||
mojo.addMigration(classLoader, migration, getTargetFile(), getAlternateFile());
|
||||
|
||||
parent = mapper.readValue(getTargetFile(), new TypeReference<>() {});
|
||||
supportedMigrations = new ArrayList<>(parent.migrations());
|
||||
assertEquals(2, supportedMigrations.size());
|
||||
|
||||
sMigration = supportedMigrations.get(1);
|
||||
assertEquals(migration.clazz(), sMigration.clazz());
|
||||
|
||||
// Test existing Migration handled gracefully
|
||||
mojo.addMigration(classLoader, migration, getTargetFile(), getAlternateFile());
|
||||
|
||||
parent = mapper.readValue(getTargetFile(), new TypeReference<>() {});
|
||||
supportedMigrations = new ArrayList<>(parent.migrations());
|
||||
assertEquals(2, supportedMigrations.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMigrationAlreadyExistsInAlternateFile() throws Exception {
|
||||
assertTrue(supportedFile.createNewFile());
|
||||
assertTrue(unsupportedFile.createNewFile());
|
||||
mapper.writeValue(getTargetFile(), new JsonParent(List.of(), List.of()));
|
||||
|
||||
// Create alternate file with a single Migration
|
||||
var migration = new Migration(getClass().getName());
|
||||
mapper.writeValue(getAlternateFile(), new JsonParent(List.of(), List.of(migration)));
|
||||
|
||||
Exception e = assertThrows(
|
||||
MojoExecutionException.class,
|
||||
() -> mojo.addMigration(classLoader, migration, getTargetFile(), getAlternateFile())
|
||||
);
|
||||
|
||||
assertEquals("Migration already defined in the %s file".formatted(getAlternateFile().getName()), e.getMessage());
|
||||
}
|
||||
|
||||
@Test
|
||||
void testAddUnknownMigration() throws Exception {
|
||||
assertTrue(supportedFile.createNewFile());
|
||||
assertTrue(unsupportedFile.createNewFile());
|
||||
|
||||
mapper.writeValue(getTargetFile(), new JsonParent(List.of(), List.of()));
|
||||
var unknown = new Migration("unknownClass");
|
||||
|
||||
Exception e = assertThrows(
|
||||
MojoExecutionException.class,
|
||||
() -> mojo.addMigration(classLoader, unknown, getTargetFile(), getAlternateFile())
|
||||
);
|
||||
assertEquals("Unknown Migration: " + unknown, e.getMessage());
|
||||
}
|
||||
}
|
||||
|
|
@ -1,125 +0,0 @@
|
|||
package org.keycloak.db.compatibility.verifier;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import com.fasterxml.jackson.core.type.TypeReference;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import org.apache.maven.plugin.MojoExecutionException;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
public class ChangeSetSupportedMojoTest extends AbstractMojoTest {
|
||||
|
||||
@Test
|
||||
void testAddAll() throws Exception {
|
||||
var classLoader = ChangeSetSupportedMojoTest.class.getClassLoader();
|
||||
var mojo = new ChangeSetSupportedMojo();
|
||||
var mapper = new ObjectMapper();
|
||||
|
||||
// Create unsupported file with a single ChangeSet
|
||||
List<ChangeSet> unsupportedChanges = new ChangeLogXMLParser(classLoader).extractChangeSets("META-INF/jpa-changelog-2.xml");
|
||||
assertEquals(1, unsupportedChanges.size());
|
||||
mapper.writeValue(unsupportedFile, unsupportedChanges);
|
||||
|
||||
// Execute add all and expect all ChangeSets from jpa-changelog-1.xml to be present
|
||||
assertTrue(supportedFile.createNewFile());
|
||||
mojo.addAll(classLoader, supportedFile, unsupportedFile);
|
||||
|
||||
List<ChangeSet> supportedChanges = mapper.readValue(supportedFile, new TypeReference<>() {});
|
||||
assertEquals(1, supportedChanges.size());
|
||||
|
||||
ChangeSet sChange = supportedChanges.get(0);
|
||||
assertEquals("test", sChange.id());
|
||||
assertEquals("keycloak", sChange.author());
|
||||
assertEquals("META-INF/jpa-changelog-1.xml", sChange.filename());
|
||||
}
|
||||
|
||||
@Test
|
||||
void testAddIndividual() throws Exception {
|
||||
var classLoader = ChangeSetSupportedMojoTest.class.getClassLoader();
|
||||
var changeLogParser = new ChangeLogXMLParser(classLoader);
|
||||
var mojo = new ChangeSetSupportedMojo();
|
||||
var mapper = new ObjectMapper();
|
||||
|
||||
assertTrue(supportedFile.createNewFile());
|
||||
assertTrue(unsupportedFile.createNewFile());
|
||||
mapper.writeValue(supportedFile, List.of());
|
||||
mapper.writeValue(unsupportedFile, List.of());
|
||||
|
||||
// Test ChangeSet is added to supported file as expected
|
||||
ChangeSet changeSet = changeLogParser.extractChangeSets("META-INF/jpa-changelog-1.xml").get(0);
|
||||
mojo.addIndividual(classLoader, changeSet, supportedFile, unsupportedFile);
|
||||
|
||||
List<ChangeSet> supportedChanges = mapper.readValue(supportedFile, new TypeReference<>() {});
|
||||
assertEquals(1, supportedChanges.size());
|
||||
ChangeSet sChange = supportedChanges.get(0);
|
||||
assertEquals(changeSet.id(), sChange.id());
|
||||
assertEquals(changeSet.author(), sChange.author());
|
||||
assertEquals(changeSet.filename(), sChange.filename());
|
||||
|
||||
// Test subsequent ChangeSets are added to already populated supported file
|
||||
changeSet = changeLogParser.extractChangeSets("META-INF/jpa-changelog-2.xml").get(0);
|
||||
mojo.addIndividual(classLoader, changeSet, supportedFile, unsupportedFile);
|
||||
|
||||
supportedChanges = mapper.readValue(supportedFile, new TypeReference<>() {});
|
||||
assertEquals(2, supportedChanges.size());
|
||||
|
||||
sChange = supportedChanges.get(1);
|
||||
assertEquals(changeSet.id(), sChange.id());
|
||||
assertEquals(changeSet.author(), sChange.author());
|
||||
assertEquals(changeSet.filename(), sChange.filename());
|
||||
|
||||
// Test ChangeSet already exists handled gracefully
|
||||
mojo.addIndividual(classLoader, changeSet, supportedFile, unsupportedFile);
|
||||
|
||||
supportedChanges = mapper.readValue(supportedFile, new TypeReference<>() {});
|
||||
assertEquals(2, supportedChanges.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
void testChangeAlreadyUnsupported() throws Exception {
|
||||
var classLoader = ChangeSetSupportedMojoTest.class.getClassLoader();
|
||||
var mojo = new ChangeSetSupportedMojo();
|
||||
var mapper = new ObjectMapper();
|
||||
|
||||
assertTrue(supportedFile.createNewFile());
|
||||
assertTrue(unsupportedFile.createNewFile());
|
||||
mapper.writeValue(supportedFile, List.of());
|
||||
|
||||
// Create unsupported file with a single ChangeSet
|
||||
List<ChangeSet> unsupportedChanges = new ChangeLogXMLParser(classLoader).extractChangeSets("META-INF/jpa-changelog-1.xml");
|
||||
assertEquals(1, unsupportedChanges.size());
|
||||
|
||||
ChangeSet changeSet = unsupportedChanges.get(0);
|
||||
mapper.writeValue(unsupportedFile, unsupportedChanges);
|
||||
|
||||
Exception e = assertThrows(
|
||||
MojoExecutionException.class,
|
||||
() -> mojo.addIndividual(classLoader, changeSet, supportedFile, unsupportedFile)
|
||||
);
|
||||
|
||||
assertEquals("ChangeSet already defined in the %s file".formatted(unsupportedFile.getName()), e.getMessage());
|
||||
}
|
||||
|
||||
@Test
|
||||
void testAddUnknownChangeSet() throws Exception {
|
||||
var classLoader = ChangeSetSupportedMojoTest.class.getClassLoader();
|
||||
var mojo = new ChangeSetSupportedMojo();
|
||||
var mapper = new ObjectMapper();
|
||||
|
||||
assertTrue(supportedFile.createNewFile());
|
||||
assertTrue(unsupportedFile.createNewFile());
|
||||
mapper.writeValue(supportedFile, List.of());
|
||||
ChangeSet unknown = new ChangeSet("asf", "asfgasg", "afasgfas");
|
||||
|
||||
Exception e = assertThrows(
|
||||
MojoExecutionException.class,
|
||||
() -> mojo.addIndividual(classLoader, unknown, supportedFile, unsupportedFile)
|
||||
);
|
||||
|
||||
assertEquals("Unknown ChangeSet: " + unknown, e.getMessage());
|
||||
}
|
||||
}
|
||||
|
|
@ -1,125 +0,0 @@
|
|||
package org.keycloak.db.compatibility.verifier;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import com.fasterxml.jackson.core.type.TypeReference;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import org.apache.maven.plugin.MojoExecutionException;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
public class ChangeSetUnsupportedMojoTest extends AbstractMojoTest {
|
||||
|
||||
@Test
|
||||
void testAddAll() throws Exception {
|
||||
var classLoader = ChangeSetUnsupportedMojoTest.class.getClassLoader();
|
||||
var mojo = new ChangeSetUnsupportedMojo();
|
||||
var mapper = new ObjectMapper();
|
||||
|
||||
// Create supported file with a single ChangeSet
|
||||
List<ChangeSet> supportedChanges = new ChangeLogXMLParser(classLoader).extractChangeSets("META-INF/jpa-changelog-2.xml");
|
||||
assertEquals(1, supportedChanges.size());
|
||||
mapper.writeValue(unsupportedFile, supportedChanges);
|
||||
|
||||
// Execute add all and expect all ChangeSets from jpa-changelog-1.xml to be present
|
||||
assertTrue(supportedFile.createNewFile());
|
||||
mojo.addAll(classLoader, supportedFile, unsupportedFile);
|
||||
|
||||
List<ChangeSet> unsupportedChanges = mapper.readValue(supportedFile, new TypeReference<>() {});
|
||||
assertEquals(1, unsupportedChanges.size());
|
||||
|
||||
ChangeSet sChange = unsupportedChanges.get(0);
|
||||
assertEquals("test", sChange.id());
|
||||
assertEquals("keycloak", sChange.author());
|
||||
assertEquals("META-INF/jpa-changelog-1.xml", sChange.filename());
|
||||
}
|
||||
|
||||
@Test
|
||||
void testAddIndividual() throws Exception {
|
||||
var classLoader = ChangeSetUnsupportedMojoTest.class.getClassLoader();
|
||||
var changeLogParser = new ChangeLogXMLParser(classLoader);
|
||||
var mojo = new ChangeSetUnsupportedMojo();
|
||||
var mapper = new ObjectMapper();
|
||||
|
||||
assertTrue(supportedFile.createNewFile());
|
||||
assertTrue(unsupportedFile.createNewFile());
|
||||
mapper.writeValue(supportedFile, List.of());
|
||||
mapper.writeValue(unsupportedFile, List.of());
|
||||
|
||||
// Test ChangeSet is added to unsupported file as expected
|
||||
ChangeSet changeSet = changeLogParser.extractChangeSets("META-INF/jpa-changelog-1.xml").get(0);
|
||||
mojo.addIndividual(classLoader, changeSet, unsupportedFile, supportedFile);
|
||||
|
||||
List<ChangeSet> unsupportedChanges = mapper.readValue(unsupportedFile, new TypeReference<>() {});
|
||||
assertEquals(1, unsupportedChanges.size());
|
||||
ChangeSet sChange = unsupportedChanges.get(0);
|
||||
assertEquals(changeSet.id(), sChange.id());
|
||||
assertEquals(changeSet.author(), sChange.author());
|
||||
assertEquals(changeSet.filename(), sChange.filename());
|
||||
|
||||
// Test subsequent ChangeSets are added to already populated supported file
|
||||
changeSet = changeLogParser.extractChangeSets("META-INF/jpa-changelog-2.xml").get(0);
|
||||
mojo.addIndividual(classLoader, changeSet, unsupportedFile, supportedFile);
|
||||
|
||||
unsupportedChanges = mapper.readValue(unsupportedFile, new TypeReference<>() {});
|
||||
assertEquals(2, unsupportedChanges.size());
|
||||
|
||||
sChange = unsupportedChanges.get(1);
|
||||
assertEquals(changeSet.id(), sChange.id());
|
||||
assertEquals(changeSet.author(), sChange.author());
|
||||
assertEquals(changeSet.filename(), sChange.filename());
|
||||
|
||||
// Test ChangeSet already exists handled gracefully
|
||||
mojo.addIndividual(classLoader, changeSet, unsupportedFile, supportedFile);
|
||||
|
||||
unsupportedChanges = mapper.readValue(unsupportedFile, new TypeReference<>() {});
|
||||
assertEquals(2, unsupportedChanges.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
void testChangeAlreadySupported() throws Exception {
|
||||
var classLoader = ChangeSetUnsupportedMojoTest.class.getClassLoader();
|
||||
var mojo = new ChangeSetUnsupportedMojo();
|
||||
var mapper = new ObjectMapper();
|
||||
|
||||
assertTrue(supportedFile.createNewFile());
|
||||
assertTrue(unsupportedFile.createNewFile());
|
||||
mapper.writeValue(unsupportedFile, List.of());
|
||||
|
||||
// Create supported file with a single ChangeSet
|
||||
List<ChangeSet> unsupportedChanges = new ChangeLogXMLParser(classLoader).extractChangeSets("META-INF/jpa-changelog-1.xml");
|
||||
assertEquals(1, unsupportedChanges.size());
|
||||
|
||||
ChangeSet changeSet = unsupportedChanges.get(0);
|
||||
mapper.writeValue(supportedFile, unsupportedChanges);
|
||||
|
||||
Exception e = assertThrows(
|
||||
MojoExecutionException.class,
|
||||
() -> mojo.addIndividual(classLoader, changeSet, unsupportedFile, supportedFile)
|
||||
);
|
||||
|
||||
assertEquals("ChangeSet already defined in the %s file".formatted(supportedFile.getName()), e.getMessage());
|
||||
}
|
||||
|
||||
@Test
|
||||
void testAddUnknownChangeSet() throws Exception {
|
||||
var classLoader = ChangeSetSupportedMojoTest.class.getClassLoader();
|
||||
var mojo = new ChangeSetSupportedMojo();
|
||||
var mapper = new ObjectMapper();
|
||||
|
||||
assertTrue(supportedFile.createNewFile());
|
||||
assertTrue(unsupportedFile.createNewFile());
|
||||
mapper.writeValue(unsupportedFile, List.of());
|
||||
ChangeSet unknown = new ChangeSet("asf", "asfgasg", "afasgfas");
|
||||
|
||||
Exception e = assertThrows(
|
||||
MojoExecutionException.class,
|
||||
() -> mojo.addIndividual(classLoader, unknown, unsupportedFile, supportedFile)
|
||||
);
|
||||
|
||||
assertEquals("Unknown ChangeSet: " + unknown, e.getMessage());
|
||||
}
|
||||
}
|
||||
|
|
@ -1,30 +0,0 @@
|
|||
package org.keycloak.db.compatibility.verifier;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import com.fasterxml.jackson.core.type.TypeReference;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
public class CreateSnapshotMojoTest extends AbstractMojoTest {
|
||||
|
||||
@Test
|
||||
void testSnapshotFilesCreated() throws Exception {
|
||||
var classLoader = CreateSnapshotMojoTest.class.getClassLoader();
|
||||
var mojo = new CreateSnapshotMojo();
|
||||
mojo.createSnapshot(classLoader, supportedFile, unsupportedFile);
|
||||
|
||||
assertTrue(supportedFile.exists());
|
||||
assertTrue(unsupportedFile.exists());
|
||||
|
||||
var mapper = new ObjectMapper();
|
||||
List<ChangeSet> supportedChanges = mapper.readValue(supportedFile, new TypeReference<>() {});
|
||||
assertEquals(2, supportedChanges.size());
|
||||
|
||||
List<ChangeSet> unsupportedChanges = mapper.readValue(unsupportedFile, new TypeReference<>() {});
|
||||
assertEquals(0, unsupportedChanges.size());
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,31 @@
|
|||
package org.keycloak.db.compatibility.verifier;
|
||||
|
||||
|
||||
import com.fasterxml.jackson.core.type.TypeReference;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
public class SnapshotMojoTest extends AbstractMojoTest {
|
||||
|
||||
@Test
|
||||
void testSnapshotFilesCreated() throws Exception {
|
||||
var classLoader = SnapshotMojoTest.class.getClassLoader();
|
||||
var mojo = new SnapshotMojo();
|
||||
mojo.createSnapshot(classLoader, supportedFile, unsupportedFile, "org.keycloak.db.compatibility.verifier.test");
|
||||
|
||||
assertTrue(supportedFile.exists());
|
||||
assertTrue(unsupportedFile.exists());
|
||||
|
||||
var mapper = new ObjectMapper();
|
||||
JsonParent json = mapper.readValue(supportedFile, new TypeReference<>() {});;
|
||||
assertEquals(2, json.changeSets().size());
|
||||
assertEquals(1, json.migrations().size());
|
||||
|
||||
json = mapper.readValue(unsupportedFile, new TypeReference<>() {});;
|
||||
assertEquals(0, json.changeSets().size());
|
||||
assertEquals(0, json.migrations().size());
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
package org.keycloak.db.compatibility.verifier;
|
||||
|
||||
import java.io.File;
|
||||
|
||||
public class SupportedMojoTest extends AbstractNewEntryMojoTest {
|
||||
|
||||
SupportedMojoTest() {
|
||||
super(new SupportedMojo());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected File getTargetFile() {
|
||||
return supportedFile;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected File getAlternateFile() {
|
||||
return unsupportedFile;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
package org.keycloak.db.compatibility.verifier;
|
||||
|
||||
import java.io.File;
|
||||
|
||||
public class UnsupportedMojoTest extends AbstractNewEntryMojoTest {
|
||||
|
||||
UnsupportedMojoTest() {
|
||||
super(new UnsupportedMojo());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected File getTargetFile() {
|
||||
return unsupportedFile;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected File getAlternateFile() {
|
||||
return supportedFile;
|
||||
}
|
||||
}
|
||||
|
|
@ -1,75 +0,0 @@
|
|||
package org.keycloak.db.compatibility.verifier;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.maven.plugin.MojoExecutionException;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
|
||||
public class VerifyCompatibilityMojoTest {
|
||||
|
||||
final ClassLoader classLoader = VerifyCompatibilityMojoTest.class.getClassLoader();
|
||||
|
||||
@Test
|
||||
void testChangeSetFilesDoNotExist() {
|
||||
var mojo = new VerifyCompatibilityMojo();
|
||||
File noneExistingFile = new File("noneExistingFile");
|
||||
assertFalse(noneExistingFile.exists());
|
||||
|
||||
assertDoesNotThrow(() -> mojo.verifyCompatibility(classLoader, noneExistingFile, noneExistingFile));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testEmptyChangeSetFiles() {
|
||||
var mojo = new VerifyCompatibilityMojo();
|
||||
File emptyJson = new File(classLoader.getResource("META-INF/empty-array.json").getFile());
|
||||
|
||||
assertDoesNotThrow(() -> mojo.verifyCompatibility(classLoader, emptyJson, emptyJson));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testChangeSetIncludedInSupportedAndUnsupportedFiles() {
|
||||
var mojo = new VerifyCompatibilityMojo();
|
||||
var changeSet = new ChangeSet("1", "keycloak", "example.xml");
|
||||
|
||||
Exception e = assertThrows(
|
||||
MojoExecutionException.class,
|
||||
() -> mojo.checkIntersection(List.of(changeSet), List.of(changeSet))
|
||||
);
|
||||
assertEquals("One or more ChangeSet definitions exist in both the supported and unsupported file", e.getMessage());
|
||||
}
|
||||
|
||||
@Test
|
||||
void testAllChangeSetsRecorded() {
|
||||
var mojo = new VerifyCompatibilityMojo();
|
||||
var changeSets = Set.of(
|
||||
new ChangeSet("1", "keycloak", "example.xml"),
|
||||
new ChangeSet("2", "keycloak", "example.xml")
|
||||
);
|
||||
|
||||
assertDoesNotThrow(() -> mojo.checkMissingChangeSet(changeSets, new HashSet<>(changeSets), new File(""), new File("")));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMissingChangeSet() {
|
||||
var mojo = new VerifyCompatibilityMojo();
|
||||
var currentChanges = new HashSet<ChangeSet>();
|
||||
currentChanges.add(new ChangeSet("1", "keycloak", "example.xml"));
|
||||
currentChanges.add(new ChangeSet("2", "keycloak", "example.xml"));
|
||||
|
||||
var recordedChanges = Set.of(currentChanges.iterator().next());
|
||||
|
||||
Exception e = assertThrows(
|
||||
MojoExecutionException.class,
|
||||
() -> mojo.checkMissingChangeSet(currentChanges, recordedChanges, new File(""), new File(""))
|
||||
);
|
||||
assertEquals("One or more ChangeSet definitions are missing from the supported or unsupported files", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,107 @@
|
|||
package org.keycloak.db.compatibility.verifier;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.maven.plugin.MojoExecutionException;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
|
||||
public class VerifyMojoTest {
|
||||
|
||||
final ClassLoader classLoader = VerifyMojoTest.class.getClassLoader();
|
||||
final VerifyMojo mojo = new VerifyMojo();
|
||||
|
||||
@Test
|
||||
void testFilesDoNotExist() {
|
||||
File noneExistingFile = new File("noneExistingFile");
|
||||
assertFalse(noneExistingFile.exists());
|
||||
|
||||
assertDoesNotThrow(() -> mojo.verify(classLoader, noneExistingFile, noneExistingFile));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testEmptySnapshotFiles() {
|
||||
File emptyJson = new File(classLoader.getResource("META-INF/empty-snapshot.json").getFile());
|
||||
|
||||
assertDoesNotThrow(() -> mojo.verifyChangeSets(classLoader, emptyJson, emptyJson));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testChangeSetIncludedInSupportedAndUnsupportedFiles() {
|
||||
var changeSet = new ChangeSet("1", "keycloak", "example.xml");
|
||||
|
||||
Exception e = assertThrows(
|
||||
MojoExecutionException.class,
|
||||
() -> mojo.verifyIntersection("ChangeSet", List.of(changeSet), List.of(changeSet))
|
||||
);
|
||||
assertEquals("One or more ChangeSet definitions exist in both the supported and unsupported file", e.getMessage());
|
||||
}
|
||||
|
||||
@Test
|
||||
void testAllChangeSetsRecorded() {
|
||||
var changeSets = Set.of(
|
||||
new ChangeSet("1", "keycloak", "example.xml"),
|
||||
new ChangeSet("2", "keycloak", "example.xml")
|
||||
);
|
||||
|
||||
assertDoesNotThrow(() -> mojo.verifyMissing("ChangeSet", changeSets, new HashSet<>(changeSets), new File(""), new File("")));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMissingChangeSet() {
|
||||
var currentChanges = new HashSet<ChangeSet>();
|
||||
currentChanges.add(new ChangeSet("1", "keycloak", "example.xml"));
|
||||
currentChanges.add(new ChangeSet("2", "keycloak", "example.xml"));
|
||||
|
||||
var recordedChanges = Set.of(currentChanges.iterator().next());
|
||||
|
||||
Exception e = assertThrows(
|
||||
MojoExecutionException.class,
|
||||
() -> mojo.verifyMissing("ChangeSet", currentChanges, recordedChanges, new File(""), new File(""))
|
||||
);
|
||||
assertEquals("One or more ChangeSet definitions are missing from the supported or unsupported files", e.getMessage());
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMigrationIncludedInSupportedAndUnsupportedFiles() {
|
||||
var migration = new Migration("example.Migration");
|
||||
|
||||
Exception e = assertThrows(
|
||||
MojoExecutionException.class,
|
||||
() -> mojo.verifyIntersection("Migration", List.of(migration), List.of(migration))
|
||||
);
|
||||
assertEquals("One or more Migration definitions exist in both the supported and unsupported file", e.getMessage());
|
||||
}
|
||||
|
||||
@Test
|
||||
void testAllMigrationsRecorded() {
|
||||
var migrations = Set.of(
|
||||
new Migration("example.Migration1"),
|
||||
new Migration("example.Migration2")
|
||||
);
|
||||
|
||||
assertDoesNotThrow(() -> mojo.verifyMissing("Migration", migrations, new HashSet<>(migrations), new File(""), new File("")));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMissingMigration() {
|
||||
var currentChanges = new HashSet<Migration>();
|
||||
currentChanges.add(new Migration("example.Migration1"));
|
||||
currentChanges.add(new Migration("example.Migration2"));
|
||||
|
||||
var recordedChanges = Set.of(currentChanges.iterator().next());
|
||||
|
||||
Exception e = assertThrows(
|
||||
MojoExecutionException.class,
|
||||
() -> mojo.verifyMissing("Migration", currentChanges, recordedChanges, new File(""), new File(""))
|
||||
);
|
||||
assertEquals("One or more Migration definitions are missing from the supported or unsupported files", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
package org.keycloak.db.compatibility.verifier.test;
|
||||
|
||||
public class ExampleMigration {
|
||||
|
||||
final Runnable anonymous = new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
}
|
||||
};
|
||||
|
||||
static class Inner {}
|
||||
}
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
{
|
||||
"changeSets": [],
|
||||
"migrations": []
|
||||
}
|
||||
|
|
@ -19,7 +19,7 @@ file initialized with an empty array. Both of these files must be committed to t
|
|||
A snapshot can be created by executing:
|
||||
|
||||
```
|
||||
./mvnw clean install -am -pl model/jpa -Pdb-changeset-snapshot -DskipTests
|
||||
./mvnw clean install -am -pl model/jpa -Pdb-snapshot -DskipTests
|
||||
```
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -192,7 +192,7 @@
|
|||
|
||||
<profiles>
|
||||
<profile>
|
||||
<id>db-changeset-snapshot</id>
|
||||
<id>db-snapshot</id>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
|
|
|
|||
53
model/storage-private/README.md
Normal file
53
model/storage-private/README.md
Normal file
|
|
@ -0,0 +1,53 @@
|
|||
# Rolling updates database compatibility
|
||||
|
||||
In order to track database schema changes that are compatible/incompatible with the `rolling-updates` feature, this module
|
||||
makes use of the `db-compatibility-verifier-maven-plugin`. See `misc/db-compaotibility-verifier/README.md` for detailed
|
||||
usage instructions.
|
||||
|
||||
The rolling-update:v2 feature only supports rolling updates of Keycloak patch releases, therefore database changes
|
||||
are only tracked in release branches and not `main`.
|
||||
|
||||
## Tracking supported org.keycloak.migration.migrators.Migration implementations
|
||||
|
||||
All `org.keycloak.migration.migrators.Migration` implementations should be created in this module's `org.keycloak.migration.migrators`
|
||||
package.
|
||||
|
||||
All `Migration` implementations in this module are considered supported by the `rolling-updates:v2` feature at branch creation time,
|
||||
as this is the initial database state from the perspective of the current release stream. When creating a new release branch, a "snapshot"
|
||||
of all known `Migration` implementations in this module is recorded using the `db-compatibility-verifier:snapshot`
|
||||
maven plugin. This generates two JSON files: a "supported" file with all known Migrations and an "unsupported"
|
||||
file initialized with an empty array. Both of these files must be committed to the repository.
|
||||
|
||||
A snapshot can be created by executing:
|
||||
|
||||
```
|
||||
./mvnw clean install -am -pl model/storage-private -Pdb-snapshot -DskipTests
|
||||
```
|
||||
|
||||
## Verifying all database changes are tracked
|
||||
|
||||
The `db-compatibility-verifier:verify` plugin is used as part of the `model/storage-private` test phase to ensure that
|
||||
any `Migration` implementation added during the release branches lifecycle are tracked in either the supported or unsupported files.
|
||||
If one of more unrecorded Migration is detected, contributors need to determine if the ChangeSet is compatible with a
|
||||
rolling update. If the change is not compatible, then it must be recorded in the unsupported file. Conversely, if it is
|
||||
compatible it must be recorded in the supported file.
|
||||
|
||||
Execution of the `db-compatibility-verifier:verify` plugin can be skipped during the test phase by specifying: `-Ddb.verify.skip=true`.
|
||||
|
||||
## Adding a supported database change
|
||||
|
||||
To add an individual Migration to the supported file users can execute:
|
||||
|
||||
```
|
||||
./mvnw -pl model/storage-private org.keycloak:db-compatibility-verifier-maven-plugin:999.0.0-SNAPSHOT:supported \
|
||||
-Ddb.verify.migration.class=org.keycloak.migration.migrators.MigrateTo<x_y_z>
|
||||
```
|
||||
|
||||
## Adding an unsupported database change
|
||||
|
||||
To add an individual ChangeSet to the supported file users can execute:
|
||||
|
||||
```
|
||||
./mvnw -pl model/storage-private org.keycloak:db-compatibility-verifier-maven-plugin:999.0.0-SNAPSHOT:unsupported \
|
||||
-Ddb.verify.migration.class=org.keycloak.migration.migrators.MigrateTo<x_y_z>
|
||||
```
|
||||
|
|
@ -11,6 +11,12 @@
|
|||
<name>Keycloak Database Support - Private classes</name>
|
||||
<description>Keycloak Database Support - Private classes</description>
|
||||
|
||||
<properties>
|
||||
<db.verify.supportedFile>src/main/resources/META-INF/rolling-upgrades-supported-changes.json</db.verify.supportedFile>
|
||||
<db.verify.unsupportedFile>src/main/resources/META-INF/rolling-upgrades-unsupported-changes.json</db.verify.unsupportedFile>
|
||||
<db.verify.migration.package>org.keycloak.migration.migrators</db.verify.migration.package>
|
||||
</properties>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.keycloak</groupId>
|
||||
|
|
@ -61,4 +67,46 @@
|
|||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.keycloak</groupId>
|
||||
<artifactId>db-compatibility-verifier-maven-plugin</artifactId>
|
||||
<version>${project.version}</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>verify</id>
|
||||
<phase>test</phase>
|
||||
<goals>
|
||||
<goal>verify</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
<profiles>
|
||||
<profile>
|
||||
<id>db-snapshot</id>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.keycloak</groupId>
|
||||
<artifactId>db-compatibility-verifier-maven-plugin</artifactId>
|
||||
<version>${project.version}</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>snapshot</id>
|
||||
<phase>compile</phase>
|
||||
<goals>
|
||||
<goal>snapshot</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</profile>
|
||||
</profiles>
|
||||
</project>
|
||||
|
|
@ -2,9 +2,13 @@ package org.keycloak.quarkus.runtime.configuration.compatibility;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.net.URL;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Collection;
|
||||
import java.util.Comparator;
|
||||
import java.util.Enumeration;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
|
@ -15,6 +19,7 @@ import org.keycloak.config.DatabaseOptions;
|
|||
import org.keycloak.config.Option;
|
||||
import org.keycloak.jose.jws.crypto.HashUtils;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.core.type.TypeReference;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import io.smallrye.config.ConfigValue;
|
||||
|
|
@ -28,7 +33,7 @@ public class DatabaseCompatibilityMetadataProvider implements CompatibilityMetad
|
|||
private static final Logger log = Logger.getLogger(DatabaseCompatibilityMetadataProvider.class);
|
||||
|
||||
public static final String ID = "database";
|
||||
public static final String UNSUPPORTED_CHANGE_SET_HASH_KEY = "unsupported-changeset-hash";
|
||||
public static final String UNSUPPORTED_CHANGES_HASH_KEY = "unsupported-changeset-hash";
|
||||
|
||||
@Override
|
||||
public Map<String, String> metadata() {
|
||||
|
|
@ -43,25 +48,7 @@ public class DatabaseCompatibilityMetadataProvider implements CompatibilityMetad
|
|||
addOptional(DatabaseOptions.DB_URL_PORT, metadata);
|
||||
addOptional(DatabaseOptions.DB_URL_DATABASE, metadata);
|
||||
}
|
||||
|
||||
ObjectMapper objectMapper = new ObjectMapper();
|
||||
try (InputStream inputStream = DatabaseCompatibilityMetadataProvider.class.getResourceAsStream("/META-INF/rolling-upgrades-unsupported-changes.json")) {
|
||||
if (inputStream != null) {
|
||||
// Load the ChangeSet JSON into memory and write to a JSON String in order to avoid whitespace changes impacting the hash
|
||||
Set<ChangeSet> changeSets = objectMapper.readValue(inputStream, new TypeReference<>() {});
|
||||
List<ChangeSet> sortedChanges = changeSets.stream().sorted(
|
||||
Comparator.comparing(ChangeSet::id)
|
||||
.thenComparing(ChangeSet::author)
|
||||
.thenComparing(ChangeSet::filename)
|
||||
).toList();
|
||||
|
||||
String changeSetJson = objectMapper.writeValueAsString(sortedChanges);
|
||||
String hash = HashUtils.sha256UrlEncodedHash(changeSetJson, StandardCharsets.UTF_8);
|
||||
metadata.put(UNSUPPORTED_CHANGE_SET_HASH_KEY, hash);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
log.error("Unable to close InputStream when creating database unsupported change hash", e);
|
||||
}
|
||||
addUnsupportedDatabaseChanges(metadata);
|
||||
return metadata;
|
||||
}
|
||||
|
||||
|
|
@ -75,6 +62,50 @@ public class DatabaseCompatibilityMetadataProvider implements CompatibilityMetad
|
|||
return ID;
|
||||
}
|
||||
|
||||
public record ChangeSet(String id, String author, String filename) {
|
||||
public record JsonParent(Collection<ChangeSet> changeSets, Collection<Migration> migrations) {
|
||||
}
|
||||
|
||||
record ChangeSet(String id, String author, String filename) {
|
||||
}
|
||||
|
||||
record Migration(@JsonProperty("class") String clazz) {
|
||||
}
|
||||
|
||||
public static void addUnsupportedDatabaseChanges(Map<String, String> metadata) {
|
||||
try {
|
||||
// Load JSON into memory and write to a JSON String in order to avoid whitespace changes impacting the hash
|
||||
Enumeration<URL> resources = DatabaseCompatibilityMetadataProvider.class.getClassLoader().getResources("/META-INF/rolling-upgrades-unsupported-changes.json");
|
||||
Set<ChangeSet> changeSets = new HashSet<>();
|
||||
Set<Migration> migrations = new HashSet<>();
|
||||
|
||||
ObjectMapper objectMapper = new ObjectMapper();
|
||||
while(resources.hasMoreElements()) {
|
||||
URL url = resources.nextElement();
|
||||
try (InputStream inputStream = url.openStream()) {
|
||||
JsonParent parent = objectMapper.readValue(inputStream, new TypeReference<>() {});
|
||||
changeSets.addAll(parent.changeSets);
|
||||
migrations.addAll(parent.migrations);
|
||||
}
|
||||
}
|
||||
|
||||
if (!changeSets.isEmpty()) {
|
||||
List<ChangeSet> sortedChanges = changeSets.stream().sorted(
|
||||
Comparator.comparing(ChangeSet::id)
|
||||
.thenComparing(ChangeSet::author)
|
||||
.thenComparing(ChangeSet::filename)
|
||||
).toList();
|
||||
|
||||
List<Migration> sortedMigrations = migrations.stream()
|
||||
.sorted(Comparator.comparing(Migration::clazz))
|
||||
.toList();
|
||||
|
||||
JsonParent parent = new JsonParent(sortedChanges, sortedMigrations);
|
||||
String changeSetJson = objectMapper.writeValueAsString(parent);
|
||||
String hash = HashUtils.sha256UrlEncodedHash(changeSetJson, StandardCharsets.UTF_8);
|
||||
metadata.put(UNSUPPORTED_CHANGES_HASH_KEY, hash);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
log.error("Unable to close InputStream when creating database unsupported change hash", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -18,13 +18,10 @@
|
|||
package org.keycloak.it.cli.dist;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.keycloak.common.Profile;
|
||||
import org.keycloak.common.Version;
|
||||
|
|
@ -38,7 +35,6 @@ import org.keycloak.it.junit5.extension.RawDistOnly;
|
|||
import org.keycloak.it.utils.KeycloakDistribution;
|
||||
import org.keycloak.it.utils.RawKeycloakDistribution;
|
||||
import org.keycloak.jgroups.certificates.DefaultJGroupsCertificateProviderFactory;
|
||||
import org.keycloak.jose.jws.crypto.HashUtils;
|
||||
import org.keycloak.quarkus.runtime.cli.command.UpdateCompatibility;
|
||||
import org.keycloak.quarkus.runtime.cli.command.UpdateCompatibilityCheck;
|
||||
import org.keycloak.quarkus.runtime.cli.command.UpdateCompatibilityMetadata;
|
||||
|
|
@ -50,14 +46,12 @@ import org.keycloak.spi.infinispan.impl.embedded.DefaultCacheEmbeddedConfigProvi
|
|||
import org.keycloak.spi.infinispan.impl.remote.DefaultCacheRemoteConfigProviderFactory;
|
||||
import org.keycloak.util.JsonSerialization;
|
||||
|
||||
import com.fasterxml.jackson.core.type.TypeReference;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import io.quarkus.test.junit.main.Launch;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import static org.keycloak.infinispan.compatibility.CachingEmbeddedMetadataProvider.majorMinorOf;
|
||||
import static org.keycloak.it.cli.dist.Util.createTempFile;
|
||||
import static org.keycloak.quarkus.runtime.configuration.compatibility.DatabaseCompatibilityMetadataProvider.UNSUPPORTED_CHANGE_SET_HASH_KEY;
|
||||
import static org.keycloak.quarkus.runtime.configuration.compatibility.DatabaseCompatibilityMetadataProvider.UNSUPPORTED_CHANGES_HASH_KEY;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
|
|
@ -313,9 +307,9 @@ public class UpdateCommandDistTest {
|
|||
info = JsonSerialization.mapper.readValue(jsonFile, UpdateCompatibilityCheck.METADATA_TYPE_REF);
|
||||
Map<String, String> expectedDbMeta = new HashMap<>();
|
||||
expectedDbMeta.put(DatabaseOptions.DB.getKey(), DatabaseOptions.DB.getDefaultValue().get());
|
||||
String expectedHash = dbMeta.get(UNSUPPORTED_CHANGE_SET_HASH_KEY);
|
||||
String expectedHash = dbMeta.get(UNSUPPORTED_CHANGES_HASH_KEY);
|
||||
if (expectedHash != null) {
|
||||
expectedDbMeta.put(UNSUPPORTED_CHANGE_SET_HASH_KEY, expectedHash);
|
||||
expectedDbMeta.put(UNSUPPORTED_CHANGES_HASH_KEY, expectedHash);
|
||||
}
|
||||
expectedMeta.put(DatabaseCompatibilityMetadataProvider.ID, expectedDbMeta);
|
||||
|
||||
|
|
@ -327,21 +321,13 @@ public class UpdateCommandDistTest {
|
|||
result.assertExitCode(CompatibilityResult.ExitCode.ROLLING.value());
|
||||
}
|
||||
|
||||
private Map<String, Map<String, String>> defaultMeta(KeycloakDistribution distribution) throws IOException {
|
||||
private Map<String, Map<String, String>> defaultMeta(KeycloakDistribution distribution) {
|
||||
Map<String, String> keycloak = new HashMap<>(1);
|
||||
keycloak.put("version", Version.VERSION);
|
||||
|
||||
Map<String, String> dbMeta = new HashMap<>();
|
||||
dbMeta.put(DatabaseOptions.DB.getKey(), DatabaseOptions.DB.getDefaultValue().get());
|
||||
try (InputStream inputStream = UpdateCommandDistTest.class.getResourceAsStream("/META-INF/rolling-upgrades-unsupported-changes.json")) {
|
||||
if (inputStream != null) {
|
||||
ObjectMapper objectMapper = new ObjectMapper();
|
||||
Set<DatabaseCompatibilityMetadataProvider.ChangeSet> changeSets = objectMapper.readValue(inputStream, new TypeReference<>() {});
|
||||
String changeSetJson = objectMapper.writeValueAsString(changeSets);
|
||||
String hash = HashUtils.sha256UrlEncodedHash(changeSetJson, StandardCharsets.UTF_8);
|
||||
dbMeta.put(UNSUPPORTED_CHANGE_SET_HASH_KEY, hash);
|
||||
}
|
||||
}
|
||||
DatabaseCompatibilityMetadataProvider.addUnsupportedDatabaseChanges(dbMeta);
|
||||
|
||||
Map<String, Map<String, String>> m = new HashMap<>();
|
||||
m.put(KeycloakCompatibilityMetadataProvider.ID, keycloak);
|
||||
|
|
|
|||
Loading…
Reference in a new issue