diff --git a/core/pom.xml b/core/pom.xml index 124b2ca12..6e4f02cc6 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -69,6 +69,11 @@ http-client test + + + org.jboss.intersmash + intersmash-kubernetes-client + diff --git a/core/src/main/java/org/jboss/intersmash/IntersmashConfig.java b/core/src/main/java/org/jboss/intersmash/IntersmashConfig.java index bead26a12..39dee9ccc 100644 --- a/core/src/main/java/org/jboss/intersmash/IntersmashConfig.java +++ b/core/src/main/java/org/jboss/intersmash/IntersmashConfig.java @@ -33,10 +33,15 @@ public class IntersmashConfig { private static final String DEPLOYMENTS_REPOSITORY_REF = "intersmash.deployments.repository.ref"; // Default Catalog for Operators - private static final String DEFAULT_OPERATOR_CATALOG_SOURCE_NAMESPACE = "openshift-marketplace"; + private static final String KUBERNETES_OPERATOR_CATALOG_SOURCE_NAMESPACE = "olm"; + private static final String OPENSHIFT_OPERATOR_CATALOG_SOURCE_NAMESPACE = "openshift-marketplace"; + private static final String DEFAULT_OPERATOR_CATALOG_SOURCE_NAMESPACE = KUBERNETES_OPERATOR_CATALOG_SOURCE_NAMESPACE; private static final String REDHAT_OPERATOR_CATALOG_SOURCE_NAME = "redhat-operators"; private static final String COMMUNITY_OPERATOR_CATALOG_SOURCE_NAME = "community-operators"; + private static final String OPERATORHUB_IO_OPERATOR_CATALOG_SOURCE_NAME = "operatorhubio-catalog"; private static final String DEFAULT_OPERATOR_CATALOG_SOURCE_NAME = COMMUNITY_OPERATOR_CATALOG_SOURCE_NAME; + private static final String OLM_OPERATOR_CATALOG_SOURCE_NAME = "intersmash.olm.operators.catalog_source"; + private static final String OLM_OPERATOR_CATALOG_SOURCE_NAMESPACE = "intersmash.olm.operators.namespace"; // Custom Catalogs for operators private static final String INFINISPAN_OPERATOR_CATALOG_SOURCE_NAME = "intersmash.infinispan.operators.catalog_source"; @@ -133,11 +138,11 @@ public static String[] getKnownCatalogSources() { } public static String defaultOperatorCatalogSourceName() { - return DEFAULT_OPERATOR_CATALOG_SOURCE_NAME; + return XTFConfig.get(OLM_OPERATOR_CATALOG_SOURCE_NAME, DEFAULT_OPERATOR_CATALOG_SOURCE_NAME); } public static String defaultOperatorCatalogSourceNamespace() { - return DEFAULT_OPERATOR_CATALOG_SOURCE_NAMESPACE; + return XTFConfig.get(OLM_OPERATOR_CATALOG_SOURCE_NAMESPACE, DEFAULT_OPERATOR_CATALOG_SOURCE_NAMESPACE); } public static String infinispanOperatorCatalogSource() { @@ -221,7 +226,7 @@ public static String activeMQOperatorPackageManifest() { } public static String hyperfoilOperatorCatalogSource() { - return XTFConfig.get(HYPERFOIL_OPERATOR_CATALOG_SOURCE_NAME, COMMUNITY_OPERATOR_CATALOG_SOURCE_NAME); + return XTFConfig.get(HYPERFOIL_OPERATOR_CATALOG_SOURCE_NAME, defaultOperatorCatalogSourceName()); } public static String hyperfoilOperatorIndexImage() { diff --git a/core/src/main/java/org/jboss/intersmash/application/openshift/HasConfigMaps.java b/core/src/main/java/org/jboss/intersmash/application/k8s/HasConfigMaps.java similarity index 96% rename from core/src/main/java/org/jboss/intersmash/application/openshift/HasConfigMaps.java rename to core/src/main/java/org/jboss/intersmash/application/k8s/HasConfigMaps.java index 1c24f72ee..e5f300301 100644 --- a/core/src/main/java/org/jboss/intersmash/application/openshift/HasConfigMaps.java +++ b/core/src/main/java/org/jboss/intersmash/application/k8s/HasConfigMaps.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jboss.intersmash.application.openshift; +package org.jboss.intersmash.application.k8s; import java.util.List; diff --git a/core/src/main/java/org/jboss/intersmash/provision/openshift/HasPods.java b/core/src/main/java/org/jboss/intersmash/application/k8s/HasPods.java similarity index 83% rename from core/src/main/java/org/jboss/intersmash/provision/openshift/HasPods.java rename to core/src/main/java/org/jboss/intersmash/application/k8s/HasPods.java index ad22974cb..fc7e80184 100644 --- a/core/src/main/java/org/jboss/intersmash/provision/openshift/HasPods.java +++ b/core/src/main/java/org/jboss/intersmash/application/k8s/HasPods.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jboss.intersmash.provision.openshift; +package org.jboss.intersmash.application.k8s; import java.util.List; @@ -25,8 +25,8 @@ public interface HasPods { /** - * Return pods of the application. - * @return a list of related {@link Pod} instances + * Get the provisioned application service related Pods + * @return A list of related {@link Pod} instances */ List getPods(); } diff --git a/core/src/main/java/org/jboss/intersmash/application/openshift/HasSecrets.java b/core/src/main/java/org/jboss/intersmash/application/k8s/HasSecrets.java similarity index 96% rename from core/src/main/java/org/jboss/intersmash/application/openshift/HasSecrets.java rename to core/src/main/java/org/jboss/intersmash/application/k8s/HasSecrets.java index c55ce808f..8e40fb3ca 100644 --- a/core/src/main/java/org/jboss/intersmash/application/openshift/HasSecrets.java +++ b/core/src/main/java/org/jboss/intersmash/application/k8s/HasSecrets.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jboss.intersmash.application.openshift; +package org.jboss.intersmash.application.k8s; import java.util.List; diff --git a/core/src/main/java/org/jboss/intersmash/application/openshift/OperatorApplication.java b/core/src/main/java/org/jboss/intersmash/application/operator/OperatorApplication.java similarity index 79% rename from core/src/main/java/org/jboss/intersmash/application/openshift/OperatorApplication.java rename to core/src/main/java/org/jboss/intersmash/application/operator/OperatorApplication.java index 37501bd3c..c1fc9dd72 100644 --- a/core/src/main/java/org/jboss/intersmash/application/openshift/OperatorApplication.java +++ b/core/src/main/java/org/jboss/intersmash/application/operator/OperatorApplication.java @@ -13,11 +13,15 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jboss.intersmash.application.openshift; +package org.jboss.intersmash.application.operator; import java.util.Collections; import java.util.List; +import org.jboss.intersmash.application.Application; +import org.jboss.intersmash.application.k8s.HasConfigMaps; +import org.jboss.intersmash.application.k8s.HasSecrets; + import io.fabric8.kubernetes.api.model.ConfigMap; import io.fabric8.kubernetes.api.model.Secret; @@ -25,7 +29,7 @@ * This interface is not supposed to be implemented by user Applications. See the "Mapping of implemented provisioners" * section of Intersmash README.md file for the up-to-date list of supported end users Applications. */ -public interface OperatorApplication extends OpenShiftApplication, HasSecrets, HasConfigMaps { +public interface OperatorApplication extends Application, HasSecrets, HasConfigMaps { @Override default List getSecrets() { diff --git a/core/src/main/java/org/jboss/intersmash/junit5/IntersmashExecutionCondition.java b/core/src/main/java/org/jboss/intersmash/junit5/IntersmashExecutionCondition.java index 1b50061e6..646dd5bab 100644 --- a/core/src/main/java/org/jboss/intersmash/junit5/IntersmashExecutionCondition.java +++ b/core/src/main/java/org/jboss/intersmash/junit5/IntersmashExecutionCondition.java @@ -23,7 +23,7 @@ import org.jboss.intersmash.IntersmashConfig; import org.jboss.intersmash.annotations.Intersmash; import org.jboss.intersmash.annotations.Service; -import org.jboss.intersmash.application.openshift.OperatorApplication; +import org.jboss.intersmash.application.operator.OperatorApplication; import org.junit.jupiter.api.extension.ConditionEvaluationResult; import org.junit.jupiter.api.extension.ExecutionCondition; import org.junit.jupiter.api.extension.ExtensionContext; diff --git a/core/src/main/java/org/jboss/intersmash/junit5/IntersmashExtension.java b/core/src/main/java/org/jboss/intersmash/junit5/IntersmashExtension.java index d7bafcb4e..a0db31356 100644 --- a/core/src/main/java/org/jboss/intersmash/junit5/IntersmashExtension.java +++ b/core/src/main/java/org/jboss/intersmash/junit5/IntersmashExtension.java @@ -15,6 +15,7 @@ */ package org.jboss.intersmash.junit5; +import java.io.IOException; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.net.URL; @@ -29,9 +30,11 @@ import org.jboss.intersmash.annotations.ServiceProvisioner; import org.jboss.intersmash.annotations.ServiceUrl; import org.jboss.intersmash.application.Application; +import org.jboss.intersmash.k8s.KubernetesConfig; +import org.jboss.intersmash.k8s.client.Kuberneteses; import org.jboss.intersmash.provision.Provisioner; import org.jboss.intersmash.provision.ProvisionerManager; -import org.jboss.intersmash.provision.openshift.operator.resources.OperatorGroup; +import org.jboss.intersmash.provision.olm.OperatorGroup; import org.junit.jupiter.api.extension.AfterAllCallback; import org.junit.jupiter.api.extension.BeforeAllCallback; import org.junit.jupiter.api.extension.ExtensionContext; @@ -42,6 +45,7 @@ import org.opentest4j.AssertionFailedError; import org.opentest4j.TestAbortedException; +import cz.xtf.core.config.OpenShiftConfig; import cz.xtf.core.openshift.OpenShifts; import lombok.extern.slf4j.Slf4j; @@ -69,15 +73,20 @@ public void beforeAll(ExtensionContext extensionContext) throws Exception { // we don't want to touch anything if the deployment phase is skipped if (!IntersmashConfig.skipDeploy()) { - if (IntersmashExtensionHelper.isIntersmashTargetingOpenShift(extensionContext)) { - if (!IntersmashConfig.isOcp3x(OpenShifts.admin())) { - operatorCleanup(); - log.debug("Deploy operatorgroup [{}] to enable operators subscription into tested namespace", - OperatorGroup.SINGLE_NAMESPACE.getMetadata().getName()); - OpenShifts.adminBinary().execute("apply", "-f", - OperatorGroup.SINGLE_NAMESPACE.save().getAbsolutePath()); + if (IntersmashExtensionHelper.isIntersmashTargetingOperator(extensionContext)) { + final boolean cleanupKubernetes = IntersmashExtensionHelper + .isIntersmashTargetingKubernetes(extensionContext) && !IntersmashConfig.isOcp3x(OpenShifts.admin()), + cleanupOpenShift = IntersmashExtensionHelper.isIntersmashTargetingOpenShift(extensionContext) + && !IntersmashConfig.isOcp3x(OpenShifts.admin()); + operatorCleanup(cleanupKubernetes, cleanupOpenShift); + deployOperatorGroup(extensionContext); + + if (IntersmashExtensionHelper.isIntersmashTargetingOpenShift(extensionContext)) { + OpenShifts.master().clean().waitFor(); + } + if (IntersmashExtensionHelper.isIntersmashTargetingKubernetes(extensionContext)) { + Kuberneteses.master().clean().waitFor(); } - OpenShifts.master().clean().waitFor(); } } @@ -145,28 +154,26 @@ public void afterAll(ExtensionContext extensionContext) { } // operator group is not bound to a specific product // no Operator support on OCP3 clusters, OLM doesn't run there - if (IntersmashExtensionHelper.isIntersmashTargetingOpenShift(extensionContext) - && !IntersmashConfig.isOcp3x(OpenShifts.admin())) { - operatorCleanup(); + if (IntersmashExtensionHelper.isIntersmashTargetingOperator(extensionContext)) { + final boolean cleanupKubernetes = IntersmashExtensionHelper.isIntersmashTargetingKubernetes(extensionContext) + && !IntersmashConfig.isOcp3x(OpenShifts.admin()), + cleanupOpenShift = IntersmashExtensionHelper.isIntersmashTargetingOpenShift(extensionContext) + && !IntersmashConfig.isOcp3x(OpenShifts.admin()); + operatorCleanup(cleanupKubernetes, cleanupOpenShift); } // let's cleanup once we're done - safetyCleanup(); + safetyCleanup(extensionContext); } } - private static void safetyCleanup() { + private static void safetyCleanup(ExtensionContext extensionContext) { log.info("Cleaning up the remaining resources on the cluster."); - OpenShifts.master().clean().waitFor(); - } - - /** - * Clean all OLM related objects. - *

- */ - public static void operatorCleanup() { - OpenShifts.adminBinary().execute("delete", "subscription", "--all"); - OpenShifts.adminBinary().execute("delete", "csvs", "--all"); - OpenShifts.adminBinary().execute("delete", "operatorgroup", "--all"); + if (IntersmashExtensionHelper.isIntersmashTargetingOpenShift(extensionContext)) { + OpenShifts.master().clean().waitFor(); + } + if (IntersmashExtensionHelper.isIntersmashTargetingKubernetes(extensionContext)) { + Kuberneteses.master().clean().waitFor(); + } } @Override @@ -223,4 +230,37 @@ private void injectServiceProvisioner(Object o, ExtensionContext extensionContex } } } + + private static void deployOperatorGroup(ExtensionContext extensionContext) throws IOException { + if (IntersmashExtensionHelper.isIntersmashTargetingKubernetes(extensionContext)) { + log.debug("Deploy operatorgroup [{}] to enable operators subscription into tested namespace", + new OperatorGroup(KubernetesConfig.namespace()).getMetadata().getName()); + OpenShifts.adminBinary().execute("apply", "-f", + new OperatorGroup(KubernetesConfig.namespace()).save().getAbsolutePath()); + } + if (IntersmashExtensionHelper.isIntersmashTargetingOpenShift(extensionContext) + && !IntersmashConfig.isOcp3x(OpenShifts.admin())) { + log.debug("Deploy operatorgroup [{}] to enable operators subscription into tested namespace", + new OperatorGroup(OpenShiftConfig.namespace()).getMetadata().getName()); + OpenShifts.adminBinary().execute("apply", "-f", + new OperatorGroup(OpenShiftConfig.namespace()).save().getAbsolutePath()); + } + } + + /** + * Clean all OLM related objects. + *

+ */ + public static void operatorCleanup(final boolean cleanupKubernetes, final boolean cleanupOpenShift) { + if (cleanupKubernetes) { + Kuberneteses.adminBinary().execute("delete", "subscription", "--all"); + Kuberneteses.adminBinary().execute("delete", "csvs", "--all"); + Kuberneteses.adminBinary().execute("delete", "operatorgroup", "--all"); + } + if (cleanupOpenShift) { + OpenShifts.adminBinary().execute("delete", "subscription", "--all"); + OpenShifts.adminBinary().execute("delete", "csvs", "--all"); + OpenShifts.adminBinary().execute("delete", "operatorgroup", "--all"); + } + } } diff --git a/core/src/main/java/org/jboss/intersmash/junit5/IntersmashExtensionHelper.java b/core/src/main/java/org/jboss/intersmash/junit5/IntersmashExtensionHelper.java index ece8683f7..50fae781a 100644 --- a/core/src/main/java/org/jboss/intersmash/junit5/IntersmashExtensionHelper.java +++ b/core/src/main/java/org/jboss/intersmash/junit5/IntersmashExtensionHelper.java @@ -22,6 +22,7 @@ import org.jboss.intersmash.annotations.Intersmash; import org.jboss.intersmash.application.k8s.KubernetesApplication; import org.jboss.intersmash.application.openshift.OpenShiftApplication; +import org.jboss.intersmash.application.operator.OperatorApplication; import org.jboss.intersmash.provision.Provisioner; import org.junit.jupiter.api.extension.ExtensionContext; @@ -50,7 +51,7 @@ public static Intersmash getIntersmash(ExtensionContext extensionContext) { return result; } else { Intersmash[] intersmashes = extensionContext.getRequiredTestClass().getAnnotationsByType(Intersmash.class); - Intersmash intersmash; + if (intersmashes.length > 0) { store.put(INTERSMASH, intersmashes[0]); return (Intersmash) store.get(INTERSMASH); @@ -59,6 +60,11 @@ public static Intersmash getIntersmash(ExtensionContext extensionContext) { } } + public static Boolean isIntersmashTargetingOperator(ExtensionContext extensionContext) { + return Arrays.stream(getIntersmash(extensionContext).value()) + .anyMatch(app -> OperatorApplication.class.isAssignableFrom(app.value())); + } + public static Boolean isIntersmashTargetingOpenShift(ExtensionContext extensionContext) { return Arrays.stream(getIntersmash(extensionContext).value()) .anyMatch(app -> OpenShiftApplication.class.isAssignableFrom(app.value())); diff --git a/core/src/main/java/org/jboss/intersmash/provision/k8s/KubernetesProvisioner.java b/core/src/main/java/org/jboss/intersmash/provision/k8s/KubernetesProvisioner.java index a67acfdb5..98572f97c 100644 --- a/core/src/main/java/org/jboss/intersmash/provision/k8s/KubernetesProvisioner.java +++ b/core/src/main/java/org/jboss/intersmash/provision/k8s/KubernetesProvisioner.java @@ -17,31 +17,34 @@ import java.net.MalformedURLException; import java.net.URL; +import java.util.List; -import org.jboss.intersmash.application.k8s.KubernetesApplication; -import org.jboss.intersmash.application.openshift.HasConfigMaps; -import org.jboss.intersmash.application.openshift.HasSecrets; +import org.jboss.intersmash.application.Application; +import org.jboss.intersmash.application.k8s.HasConfigMaps; +import org.jboss.intersmash.application.k8s.HasSecrets; +import org.jboss.intersmash.k8s.client.Kubernetes; +import org.jboss.intersmash.k8s.client.Kuberneteses; import org.jboss.intersmash.provision.Provisioner; -import org.jboss.intersmash.provision.openshift.HasPods; +import org.jboss.intersmash.application.k8s.HasPods; import org.jboss.intersmash.provision.openshift.Scalable; -import io.fabric8.kubernetes.client.Config; -import io.fabric8.kubernetes.client.ConfigBuilder; -import io.fabric8.kubernetes.client.DefaultKubernetesClient; -import io.fabric8.kubernetes.client.KubernetesClient; +import io.fabric8.kubernetes.api.model.Pod; +import io.fabric8.kubernetes.client.NamespacedKubernetesClient; +import io.fabric8.kubernetes.client.NamespacedKubernetesClientAdapter; /** * Provisioner that is supposed to deploy an application on Kubernetes. */ -public interface KubernetesProvisioner extends Provisioner, Scalable, HasPods { +public interface KubernetesProvisioner extends Provisioner, Scalable, HasPods { - // TODO - check for aq new class of statics like XTF OpenShifts? - KubernetesClient kubernetes = newKubernetesClient(); + Kubernetes kubernetes = Kuberneteses.master(); - static KubernetesClient newKubernetesClient() { - Config config = new ConfigBuilder() - .build(); - return new DefaultKubernetesClient(config); + default NamespacedKubernetesClientAdapter client() { + return kubernetes; + } + + default String execute(String... args) { + return Kuberneteses.adminBinary().execute(args); } @Override @@ -84,4 +87,10 @@ default URL getURL() { String.format("Failed to get an URL for the \"%s\" route", this.getClass().getSimpleName()), ex); } } + + @Override + default List getPods() { + return KubernetesProvisioner.kubernetes.pods().inNamespace(KubernetesProvisioner.kubernetes.getNamespace()).list() + .getItems(); + } } diff --git a/core/src/main/java/org/jboss/intersmash/provision/openshift/operator/resources/CatalogSource.java b/core/src/main/java/org/jboss/intersmash/provision/olm/CatalogSource.java similarity index 94% rename from core/src/main/java/org/jboss/intersmash/provision/openshift/operator/resources/CatalogSource.java rename to core/src/main/java/org/jboss/intersmash/provision/olm/CatalogSource.java index e375d4b94..6d1a466c6 100644 --- a/core/src/main/java/org/jboss/intersmash/provision/openshift/operator/resources/CatalogSource.java +++ b/core/src/main/java/org/jboss/intersmash/provision/olm/CatalogSource.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jboss.intersmash.provision.openshift.operator.resources; +package org.jboss.intersmash.provision.olm; import io.fabric8.openshift.api.model.operatorhub.v1alpha1.CatalogSourceBuilder; @@ -21,7 +21,7 @@ * Wrapper for creating and using {@link io.fabric8.openshift.api.model.operatorhub.v1alpha1.CatalogSource} */ public class CatalogSource extends io.fabric8.openshift.api.model.operatorhub.v1alpha1.CatalogSource - implements OpenShiftResource { + implements SerializationCapableResource { public CatalogSource() { super(); diff --git a/core/src/main/java/org/jboss/intersmash/provision/openshift/operator/resources/OperatorGroup.java b/core/src/main/java/org/jboss/intersmash/provision/olm/OperatorGroup.java similarity index 90% rename from core/src/main/java/org/jboss/intersmash/provision/openshift/operator/resources/OperatorGroup.java rename to core/src/main/java/org/jboss/intersmash/provision/olm/OperatorGroup.java index 0101a65cf..e737cf2db 100644 --- a/core/src/main/java/org/jboss/intersmash/provision/openshift/operator/resources/OperatorGroup.java +++ b/core/src/main/java/org/jboss/intersmash/provision/olm/OperatorGroup.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jboss.intersmash.provision.openshift.operator.resources; +package org.jboss.intersmash.provision.olm; import java.util.ArrayList; import java.util.HashMap; @@ -21,7 +21,6 @@ import java.util.Map; import java.util.Objects; -import cz.xtf.core.config.OpenShiftConfig; import io.fabric8.kubernetes.client.CustomResource; import io.fabric8.kubernetes.model.annotation.Group; import io.fabric8.kubernetes.model.annotation.Version; @@ -34,8 +33,8 @@ */ @Group("operators.coreos.com") @Version("v1") -public class OperatorGroup extends CustomResource implements OpenShiftResource { - public static final OperatorGroup SINGLE_NAMESPACE = new OperatorGroup(OpenShiftConfig.namespace()); +public class OperatorGroup extends CustomResource implements SerializationCapableResource { + private Map> spec = new HashMap<>(); public OperatorGroup() { diff --git a/core/src/main/java/org/jboss/intersmash/provision/openshift/operator/resources/OpenShiftResource.java b/core/src/main/java/org/jboss/intersmash/provision/olm/SerializationCapableResource.java similarity index 86% rename from core/src/main/java/org/jboss/intersmash/provision/openshift/operator/resources/OpenShiftResource.java rename to core/src/main/java/org/jboss/intersmash/provision/olm/SerializationCapableResource.java index b0a671f58..939d11a6d 100644 --- a/core/src/main/java/org/jboss/intersmash/provision/openshift/operator/resources/OpenShiftResource.java +++ b/core/src/main/java/org/jboss/intersmash/provision/olm/SerializationCapableResource.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jboss.intersmash.provision.openshift.operator.resources; +package org.jboss.intersmash.provision.olm; import java.io.File; import java.io.IOException; @@ -28,7 +28,7 @@ /** * Interface of common methods for OpenShift resources. */ -public interface OpenShiftResource> { +public interface SerializationCapableResource> { ObjectMapper mapper = new ObjectMapper(new YAMLFactory()); @@ -70,7 +70,7 @@ static File save(File file, O data) throws IOException { * @param file a file with object definition in yaml format * @return OpenShiftResource setup with values loaded from external source * @throws IOException if something goes wrong when de-serializing the resource from a file. - * @see OpenShiftResource#load(OpenShiftResource) which is used to configure the current object with the values from a loaded one. + * @see SerializationCapableResource#load(SerializationCapableResource) which is used to configure the current object with the values from a loaded one. */ default T load(File file) throws IOException { return load((T) mapper.readValue(file, this.getClass())); @@ -90,7 +90,7 @@ static CR load(File file, Class clazz, CR target * @param inputStream a input stream with object definition in yaml format * @return OpenShiftResource setup with values loaded from external source * @throws IOException if something goes wrong when de-serializing the resource from a file. - * @see OpenShiftResource#load(OpenShiftResource) which is used to configure the current object with the values from a loaded one. + * @see SerializationCapableResource#load(SerializationCapableResource) which is used to configure the current object with the values from a loaded one. */ default T load(InputStream inputStream) throws IOException { return load((T) mapper.readValue(inputStream, this.getClass())); @@ -102,7 +102,7 @@ default T load(InputStream inputStream) throws IOException { * @param url a URL with object definition in yaml format * @return OpenShiftResource setup with values loaded from external source * @throws IOException if something goes wrong when de-serializing the resource from a file. - * @see OpenShiftResource#load(OpenShiftResource) which is used to configure the current object with the values from a loaded one. + * @see SerializationCapableResource#load(SerializationCapableResource) which is used to configure the current object with the values from a loaded one. */ default T load(URL url) throws IOException { return load((T) mapper.readValue(url, this.getClass())); diff --git a/core/src/main/java/org/jboss/intersmash/provision/openshift/operator/resources/Subscription.java b/core/src/main/java/org/jboss/intersmash/provision/olm/Subscription.java similarity index 81% rename from core/src/main/java/org/jboss/intersmash/provision/openshift/operator/resources/Subscription.java rename to core/src/main/java/org/jboss/intersmash/provision/olm/Subscription.java index db76c3cc6..cdd097f48 100644 --- a/core/src/main/java/org/jboss/intersmash/provision/openshift/operator/resources/Subscription.java +++ b/core/src/main/java/org/jboss/intersmash/provision/olm/Subscription.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jboss.intersmash.provision.openshift.operator.resources; +package org.jboss.intersmash.provision.olm; import java.util.Map; import java.util.stream.Collectors; @@ -35,16 +35,15 @@ *

*/ public class Subscription extends io.fabric8.openshift.api.model.operatorhub.v1alpha1.Subscription - implements OpenShiftResource { + implements SerializationCapableResource { public Subscription() { super(); } private SubscriptionFluent.SpecNested getConfiguredSubscriptionBuilder( - String sourceNamespace, String targetNamespace, - String source, String name, String channel, - String installPlanApproval) { + final String sourceNamespace, final String targetNamespace, final String source, final String name, + final String channel, final String installPlanApproval) { return new SubscriptionBuilder() .withNewMetadata() .withName(name) @@ -58,9 +57,9 @@ private SubscriptionFluent.SpecNested .withInstallPlanApproval(Strings.isNullOrEmpty(installPlanApproval) ? "Automatic" : installPlanApproval); } - public Subscription(String sourceNamespace, String targetNamespace, String source, String name, String channel, - String installPlanApproval, - Map envVariables) { + public Subscription(final String sourceNamespace, final String targetNamespace, final String source, + final String name, final String channel, final String installPlanApproval, + final Map envVariables) { this(); io.fabric8.openshift.api.model.operatorhub.v1alpha1.Subscription loaded = getConfiguredSubscriptionBuilder( sourceNamespace, targetNamespace, source, name, channel, installPlanApproval) @@ -76,8 +75,8 @@ public Subscription(String sourceNamespace, String targetNamespace, String sourc this.setSpec(loaded.getSpec()); } - public Subscription(String sourceNamespace, String targetNamespace, String source, String name, String channel, - String installPlanApproval) { + public Subscription(final String sourceNamespace, final String targetNamespace, final String source, + final String name, final String channel, final String installPlanApproval) { this(); io.fabric8.openshift.api.model.operatorhub.v1alpha1.Subscription loaded = getConfiguredSubscriptionBuilder( sourceNamespace, targetNamespace, source, name, channel, installPlanApproval) diff --git a/core/src/main/java/org/jboss/intersmash/provision/openshift/OpenShiftProvisioner.java b/core/src/main/java/org/jboss/intersmash/provision/openshift/OpenShiftProvisioner.java index 870d7591b..65a992513 100644 --- a/core/src/main/java/org/jboss/intersmash/provision/openshift/OpenShiftProvisioner.java +++ b/core/src/main/java/org/jboss/intersmash/provision/openshift/OpenShiftProvisioner.java @@ -17,24 +17,42 @@ import java.net.MalformedURLException; import java.net.URL; +import java.util.List; -import org.jboss.intersmash.application.openshift.HasConfigMaps; -import org.jboss.intersmash.application.openshift.HasSecrets; -import org.jboss.intersmash.application.openshift.OpenShiftApplication; +import org.jboss.intersmash.application.Application; +import org.jboss.intersmash.application.k8s.HasConfigMaps; +import org.jboss.intersmash.application.k8s.HasPods; +import org.jboss.intersmash.application.k8s.HasSecrets; import org.jboss.intersmash.provision.Provisioner; +import cz.xtf.core.config.OpenShiftConfig; import cz.xtf.core.openshift.OpenShift; import cz.xtf.core.openshift.OpenShifts; +import io.fabric8.kubernetes.api.model.Pod; +import io.fabric8.kubernetes.api.model.apiextensions.v1.CustomResourceDefinition; +import io.fabric8.kubernetes.api.model.apiextensions.v1.CustomResourceDefinitionList; +import io.fabric8.kubernetes.client.NamespacedKubernetesClientAdapter; +import io.fabric8.kubernetes.client.dsl.NonNamespaceOperation; +import io.fabric8.kubernetes.client.dsl.Resource; +import io.fabric8.openshift.client.NamespacedOpenShiftClient; /** * Provisioner that is supposed to deploy an application on OpenShift. */ -public interface OpenShiftProvisioner extends Provisioner, Scalable, HasPods { +public interface OpenShiftProvisioner extends Provisioner, Scalable, HasPods { String SCRIPT_DEBUG = "SCRIPT_DEBUG"; String APP_LABEL_KEY = "intersmash.app"; OpenShift openShift = OpenShifts.master(); + default NamespacedKubernetesClientAdapter client() { + return openShift; + } + + default String execute(String... args) { + return OpenShifts.adminBinary().execute(args); + } + @Override default void preDeploy() { // create secrets @@ -59,10 +77,6 @@ default void postUndeploy() { } } - default OpenShift getOpenShift() { - return openShift; - } - default String getUrl(String routeName, boolean secure) { String protocol = secure ? "https" : "http"; return protocol + "://" + openShift.generateHostname(routeName); @@ -77,4 +91,13 @@ default URL getURL() { String.format("Failed to get an URL for the \"%s\" route", this.getClass().getSimpleName()), ex); } } + + @Override + default List getPods() { + return OpenShiftProvisioner.openShift.inNamespace(OpenShiftConfig.namespace()).pods().list().getItems(); + } + + default NonNamespaceOperation> customResourceDefinitionsClient() { + return OpenShifts.admin().apiextensions().v1().customResourceDefinitions(); + } } diff --git a/core/src/main/java/org/jboss/intersmash/provision/openshift/operator/OperatorProvisioner.java b/core/src/main/java/org/jboss/intersmash/provision/operator/OperatorProvisioner.java similarity index 72% rename from core/src/main/java/org/jboss/intersmash/provision/openshift/operator/OperatorProvisioner.java rename to core/src/main/java/org/jboss/intersmash/provision/operator/OperatorProvisioner.java index 739ffcdd6..d9d5c7f18 100644 --- a/core/src/main/java/org/jboss/intersmash/provision/openshift/operator/OperatorProvisioner.java +++ b/core/src/main/java/org/jboss/intersmash/provision/operator/OperatorProvisioner.java @@ -13,39 +13,40 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jboss.intersmash.provision.openshift.operator; +package org.jboss.intersmash.provision.operator; import java.io.IOException; -import java.net.URL; import java.time.Duration; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Set; +import java.util.*; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; +import java.util.function.BooleanSupplier; import java.util.stream.Collectors; import org.assertj.core.util.Strings; import org.jboss.intersmash.IntersmashConfig; -import org.jboss.intersmash.application.openshift.OperatorApplication; -import org.jboss.intersmash.provision.openshift.OpenShiftProvisioner; -import org.jboss.intersmash.provision.openshift.operator.resources.CatalogSource; -import org.jboss.intersmash.provision.openshift.operator.resources.Subscription; +import org.jboss.intersmash.application.operator.OperatorApplication; +import org.jboss.intersmash.provision.Provisioner; +import org.jboss.intersmash.provision.olm.Subscription; +import org.jboss.intersmash.provision.openshift.Scalable; import org.slf4j.event.Level; -import cz.xtf.core.config.OpenShiftConfig; -import cz.xtf.core.openshift.OpenShift; -import cz.xtf.core.openshift.OpenShiftBinary; -import cz.xtf.core.openshift.OpenShiftWaiters; -import cz.xtf.core.openshift.OpenShifts; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; + import cz.xtf.core.waiting.SimpleWaiter; import cz.xtf.core.waiting.failfast.FailFastCheck; import dev.failsafe.Failsafe; import dev.failsafe.RetryPolicy; +import io.fabric8.kubernetes.api.model.Pod; +import io.fabric8.kubernetes.client.NamespacedKubernetesClient; +import io.fabric8.kubernetes.client.NamespacedKubernetesClientAdapter; import io.fabric8.openshift.api.model.operatorhub.lifecyclemanager.v1.PackageChannel; import io.fabric8.openshift.api.model.operatorhub.lifecyclemanager.v1.PackageManifest; import io.fabric8.openshift.api.model.operatorhub.v1alpha1.CRDDescription; +import io.fabric8.openshift.api.model.operatorhub.v1alpha1.CatalogSource; +import io.fabric8.openshift.api.model.operatorhub.v1alpha1.CatalogSourceBuilder; +import io.fabric8.openshift.api.model.operatorhub.v1alpha1.CatalogSourceList; import lombok.extern.slf4j.Slf4j; /** @@ -59,17 +60,16 @@ * oc get clusterserviceversion */ @Slf4j -public abstract class OperatorProvisioner implements OpenShiftProvisioner { +public abstract class OperatorProvisioner + implements Provisioner, Scalable { // cache the current csv and list of provided custom resource definitions static String currentCSV; final String packageManifestName; private CatalogSource catalogSource; - private final T operatorApplication; + private final A operatorApplication; private PackageManifest packageManifest; private String operatorChannel; protected FailFastCheck ffCheck = () -> false; - private OpenShift adminShift; - private OpenShiftBinary adminBinary; private Set customResourceDefinitions; private static final RetryPolicy RETRY_POLICY_LOOKUP_MATCHING_PACKAGE_MANIFEST = RetryPolicy . builder() @@ -79,15 +79,53 @@ public abstract class OperatorProvisioner impleme .build(); public static final String INSTALLPLAN_APPROVAL_MANUAL = "Manual"; - public OperatorProvisioner(T operatorApplication, String packageManifestName) { + public OperatorProvisioner(A operatorApplication, String packageManifestName) { this.operatorApplication = operatorApplication; this.packageManifestName = packageManifestName; } + protected abstract NamespacedKubernetesClientAdapter client(); + + public List getPods() { + return this.client().pods().inNamespace(this.client().getNamespace()).list().getItems(); + } + + protected abstract String execute(String... args); + + public PackageManifest getPackageManifest(String operatorName, String operatorNamespace) { + try { + return new ObjectMapper() + .readValue(this.execute("get", "packagemanifest", operatorName, "-n", operatorNamespace, "-o", "json"), + PackageManifest.class); + } catch (JsonProcessingException e) { + throw new IllegalStateException("Couldn't deserialize package manifest data: " + operatorName, e); + } + } + + public List getCatalogSources(final String catalogSourceNamespace) { + try { + return new ObjectMapper().readValue(this.execute("get", "catsrc", "-n", catalogSourceNamespace, "-o", "json"), + CatalogSourceList.class).getItems(); + } catch (JsonProcessingException e) { + throw new IllegalStateException("Couldn't deserialize catalog source data: " + catalogSourceNamespace, e); + } + } + + public CatalogSource getCatalogSource(final String catalogSourceNamespace, final String catalogSourceName) { + io.fabric8.openshift.api.model.operatorhub.v1alpha1.CatalogSource loaded = getCatalogSources(catalogSourceNamespace) + .stream() + .filter(cs -> cs.getMetadata().getName().equalsIgnoreCase(catalogSourceName)) + .findFirst().orElseThrow( + () -> new IllegalStateException( + "Unable to retrieve CatalogSource " + catalogSourceName)); + CatalogSource catalogSource = new CatalogSource(); + catalogSource.setMetadata(loaded.getMetadata()); + catalogSource.setSpec(loaded.getSpec()); + return catalogSource; + } + @Override public void configure() { - this.adminShift = OpenShifts.admin(); - this.adminBinary = OpenShifts.adminBinary(); // custom catalog source initialization catalogSource = initCatalogSource(); @@ -123,17 +161,19 @@ public void configure() { protected abstract String getOperatorChannel(); + protected abstract String getOperatorNamespace(); + + protected String getTargetNamespace() { + return this.client().getNamespace(); + } + /** * The CatalogSource is in the "openshift-marketplace" namespace by default; * When a custom operator image must be used, then a custom CatalogSource will be created in the current namespace; * @return namespace where the custom CatalogSource is located */ - private String getCatalogSourceNamespace() { - String namespace = IntersmashConfig.defaultOperatorCatalogSourceNamespace(); // default namespace for CatalogSources - if (!Strings.isNullOrEmpty(getOperatorIndexImage())) { - namespace = OpenShiftConfig.namespace(); - } - return namespace; + protected String getCatalogSourceNamespace() { + return IntersmashConfig.defaultOperatorCatalogSourceNamespace(); // default namespace for CatalogSources } /** @@ -185,20 +225,28 @@ private CatalogSource initCatalogSource() { catalogSourceName = operatorCatalogSource; } // create CatalogSource pointing to our custom IndexImage - catalogSource = new CatalogSource( - // a composite name is needed in order to avoid conflicts in case of multiple custom CatalogSources - catalogSourceName, - operatorCatalogSourceNamespace, - "grpc", - operatorIndexImage, - catalogSourceName, - "jboss-tests@redhat.com"); + catalogSource = new CatalogSourceBuilder() + .withNewMetadata() + .withName(catalogSourceName) + .withNamespace(operatorCatalogSourceNamespace) + .endMetadata() + .withNewSpec() + .withSourceType("grpc") + .withImage(operatorIndexImage) + .withDisplayName(catalogSourceName) + .withPublisher("intersmash@intersmash.org") + .endSpec() + .build(); try { - adminBinary.execute("apply", "-f", catalogSource.save().getAbsolutePath()); + this.execute("apply", "-f", + new org.jboss.intersmash.provision.olm.CatalogSource() + .load(catalogSource) + .save() + .getAbsolutePath()); AtomicReference catalogSourceStatus = new AtomicReference<>(); new SimpleWaiter(() -> { // oc get CatalogSource redhat-operators -n openshift-marketplace -o template --template {{.status.connectionState.lastObservedState}} - catalogSourceStatus.set(adminBinary.execute("get", "CatalogSource", catalogSource.getMetadata().getName(), + catalogSourceStatus.set(this.execute("get", "CatalogSource", catalogSource.getMetadata().getName(), "-n", operatorCatalogSourceNamespace, "-o", "template", "--template", "{{.status.connectionState.lastObservedState}}", @@ -219,34 +267,23 @@ private CatalogSource initCatalogSource() { catalogSource.getMetadata().getName()), e); } } else { - // load CatalogSource by name from OpenShift cluster - io.fabric8.openshift.api.model.operatorhub.v1alpha1.CatalogSource existing = OpenShifts - .admin(IntersmashConfig.defaultOperatorCatalogSourceNamespace()).operatorHub() - .catalogSources().list().getItems() - .stream().filter(cs -> cs.getMetadata().getName().equalsIgnoreCase(operatorCatalogSource)) - .findFirst().orElseThrow( - () -> new IllegalStateException( - "Unable to retrieve CatalogSource " + operatorCatalogSource)); - catalogSource = new CatalogSource(); - catalogSource.load(existing); + // load CatalogSource by name from cluster + catalogSource = getCatalogSource(IntersmashConfig.defaultOperatorCatalogSourceNamespace(), operatorCatalogSource); } return catalogSource; } private PackageManifest initPackageManifest() { log.debug("Listing package manifests belonging to: " + this.catalogSource.getMetadata().getName()); - List catalogSourcePackageManifests = adminShift.operatorHub().packageManifests().list().getItems() - .stream() - .filter(pm -> this.catalogSource.getMetadata().getName().equals(pm.getStatus().getCatalogSource())) - .collect(Collectors.toList()); - catalogSourcePackageManifests.stream() - .forEach(pm -> log.debug("---> " + pm.getMetadata().getName())); - return catalogSourcePackageManifests.stream() - .filter(pm -> this.packageManifestName.equals(pm.getMetadata().getName())) - .findFirst().orElseThrow( - () -> new IllegalStateException( - "Unable to retrieve PackageManifest " + this.packageManifestName + " in CatalogSource " - + this.catalogSource.getMetadata().getName())); + PackageManifest catalogSourcePackageManifest = this.getPackageManifest(this.packageManifestName, + this.getOperatorNamespace()); + if (catalogSourcePackageManifest == null) { + throw new IllegalStateException( + "Unable to retrieve PackageManifest " + this.packageManifestName + " in CatalogSource " + + this.catalogSource.getMetadata().getName()); + } + log.debug("---> " + catalogSourcePackageManifest.getMetadata().getName()); + return catalogSourcePackageManifest; } private PackageChannel initPackageChannel(String channelName) { @@ -258,7 +295,7 @@ private PackageChannel initPackageChannel(String channelName) { } @Override - public T getApplication() { + public A getApplication() { return operatorApplication; } @@ -311,14 +348,14 @@ public void subscribe(String installPlanApproval, Map envVariabl log.info("Subscribing the {} operator", packageManifestName); // oc get packagemanifest wildfly -o template --template {{.status.defaultChannel}} Subscription operatorSubscription = (envVariables == null || envVariables.isEmpty()) - ? new Subscription(getCatalogSourceNamespace(), OpenShiftConfig.namespace(), getOperatorCatalogSource(), + ? new Subscription(getCatalogSourceNamespace(), getTargetNamespace(), getOperatorCatalogSource(), packageManifestName, operatorChannel, installPlanApproval) - : new Subscription(getCatalogSourceNamespace(), OpenShiftConfig.namespace(), getOperatorCatalogSource(), + : new Subscription(getCatalogSourceNamespace(), getTargetNamespace(), getOperatorCatalogSource(), packageManifestName, operatorChannel, installPlanApproval, envVariables); try { - adminBinary.execute("apply", "-f", operatorSubscription.save().getAbsolutePath()); + this.execute("apply", "-f", operatorSubscription.save().getAbsolutePath()); } catch (IOException e) { throw new RuntimeException(String.format("Failed to serialize the %s subscription object into a yaml file.", operatorSubscription.getMetadata().getName()), e); @@ -330,7 +367,7 @@ public void subscribe(String installPlanApproval, Map envVariabl // wait for installPlan to be attached to the subscription new SimpleWaiter(() -> { // oc get subscription rhsso-operator -o template --template="{{.status.installplan.name}}" - installPlan.set(adminBinary.execute("get", "subscription", operatorSubscription.getMetadata().getName(), + installPlan.set(this.execute("get", "subscription", operatorSubscription.getMetadata().getName(), "-o", "template", "--template", "{{ if .status.installPlanRef.name }}{{.status.installPlanRef.name}}{{ end }}", "--ignore-not-found")); @@ -344,7 +381,7 @@ public void subscribe(String installPlanApproval, Map envVariabl .level(Level.DEBUG) .failFast(getFailFastCheck()) .waitFor(); - String outcome = adminBinary.execute("patch", "InstallPlan", installPlan.get(), + String outcome = this.execute("patch", "InstallPlan", installPlan.get(), "--type", "merge", "--patch", "{\"spec\":{\"approved\":true}}"); if (!Strings.isNullOrEmpty(outcome) && outcome.contains("patched")) { log.info("Approved InstallPlan {} for subscription {}", @@ -358,7 +395,7 @@ public void subscribe(String installPlanApproval, Map envVariabl } // oc get clusterserviceversion wildfly-operator.v1.0.0 -o template --template {{.status.phase}} new SimpleWaiter(() -> { - String clusterServicePhase = adminBinary.execute("get", "csvs", currentCSV, "-o", "template", "--template", + String clusterServicePhase = this.execute("get", "csvs", currentCSV, "-o", "template", "--template", "{{.status.phase}}", "--ignore-not-found"); // this is the one where the operator image is pulled return clusterServicePhase != null && clusterServicePhase.equals("Succeeded"); @@ -405,7 +442,7 @@ public void subscribe(String installPlanApproval, Map envVariabl protected void waitForOperatorPod() { final String metadataNameLabelLegacyName = "name"; final String metadataNameLabelName = "app.kubernetes.io/name"; - String[] operatorSpecs = adminBinary.execute( + String[] operatorSpecs = this.execute( "get", "csvs", currentCSV, @@ -421,8 +458,14 @@ protected void waitForOperatorPod() { if (operatorSpec.length != 3) { throw new IllegalStateException("Failed to get operator deployment spec from csvs!"); } - OpenShiftWaiters.get(openShift, getFailFastCheck()) - .areExactlyNPodsReady(Integer.valueOf(operatorSpec[0]), operatorSpec[1], operatorSpec[2]).level(Level.DEBUG) + BooleanSupplier bs = () -> getPods().stream() + .filter(p -> !com.google.common.base.Strings.isNullOrEmpty(p.getMetadata().getLabels().get(operatorSpec[1])) + && p.getMetadata().getLabels().get(operatorSpec[1]).equals(operatorSpec[2])) + .collect(Collectors.toList()).size() == Integer.valueOf(operatorSpec[0]); + String reason = "Waiting for exactly " + Integer.valueOf(operatorSpec[0]) + " pods with label \"app\"=" + + getApplication().getName() + " to be ready."; + new SimpleWaiter(bs, TimeUnit.MINUTES, 2, reason) + .level(Level.DEBUG) .waitFor(); } } @@ -436,45 +479,36 @@ protected FailFastCheck getFailFastCheck() { * Documentation: https://docs.openshift.com/container-platform/4.4/operators/olm-deleting-operators-from-cluster.html#olm-deleting-operator-from-a-cluster-using-cli_olm-deleting-operators-from-a-cluster */ public void unsubscribe() { - adminBinary.execute("delete", "subscription", packageManifestName, "--ignore-not-found"); - adminBinary.execute("delete", "csvs", currentCSV, "--ignore-not-found"); + this.execute("delete", "subscription", packageManifestName, "--ignore-not-found"); + this.execute("delete", "csvs", currentCSV, "--ignore-not-found"); for (String customResource : getCustomResourceDefinitions()) { - final String crds = adminBinary.execute("get", "crd", customResource, "--ignore-not-found"); + final String crds = this.execute("get", "crd", customResource, "--ignore-not-found"); if (crds != null && !crds.isEmpty()) { log.info("CRD: {} is still defined on the cluster", customResource); } } } - @Override - public URL getURL() { - throw new UnsupportedOperationException("To be implemented!"); - } - /** * @return true is there is an active subscription for the current operator */ - protected boolean isSubscribed() { - return !Strings.isNullOrEmpty(adminBinary.execute("get", "subscription", packageManifestName, + public boolean isSubscribed() { + return !Strings.isNullOrEmpty(this.execute("get", "subscription", packageManifestName, "-o", "template", "--template", "{{ .status.state }}", "--ignore-not-found")); } - protected static String getCurrentCSV() { + public String getCurrentCSV() { return currentCSV; } - protected OpenShiftBinary getAdminBinary() { - return adminBinary; - } - @Override public void dismiss() { // let's remove any custom catalog source if (Arrays.stream(IntersmashConfig.getKnownCatalogSources()) .noneMatch(cs -> this.catalogSource.getMetadata().getName().equals(cs))) { - adminBinary.execute("delete", "catalogsource", catalogSource.getMetadata().getName(), "--ignore-not-found"); + this.execute("delete", "catalogsource", catalogSource.getMetadata().getName(), "--ignore-not-found"); } } } diff --git a/core/src/test/java/org/jboss/intersmash/provision/openshift/operator/resources/CatalogSourceTest.java b/core/src/test/java/org/jboss/intersmash/provision/olm/CatalogSourceTest.java similarity index 95% rename from core/src/test/java/org/jboss/intersmash/provision/openshift/operator/resources/CatalogSourceTest.java rename to core/src/test/java/org/jboss/intersmash/provision/olm/CatalogSourceTest.java index c05223c3d..e3fab7752 100644 --- a/core/src/test/java/org/jboss/intersmash/provision/openshift/operator/resources/CatalogSourceTest.java +++ b/core/src/test/java/org/jboss/intersmash/provision/olm/CatalogSourceTest.java @@ -13,11 +13,12 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jboss.intersmash.provision.openshift.operator.resources; +package org.jboss.intersmash.provision.olm; import java.io.File; import java.io.IOException; +import org.jboss.intersmash.provision.olm.CatalogSource; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; diff --git a/core/src/test/java/org/jboss/intersmash/provision/openshift/operator/resources/SubscriptionTest.java b/core/src/test/java/org/jboss/intersmash/provision/olm/SubscriptionTest.java similarity index 92% rename from core/src/test/java/org/jboss/intersmash/provision/openshift/operator/resources/SubscriptionTest.java rename to core/src/test/java/org/jboss/intersmash/provision/olm/SubscriptionTest.java index 873f82a7e..c62f7832d 100644 --- a/core/src/test/java/org/jboss/intersmash/provision/openshift/operator/resources/SubscriptionTest.java +++ b/core/src/test/java/org/jboss/intersmash/provision/olm/SubscriptionTest.java @@ -13,13 +13,14 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jboss.intersmash.provision.openshift.operator.resources; +package org.jboss.intersmash.provision.olm; import java.io.File; import java.io.IOException; import java.util.Map; -import org.jboss.intersmash.provision.openshift.operator.OperatorProvisioner; +import org.jboss.intersmash.provision.olm.Subscription; +import org.jboss.intersmash.provision.operator.OperatorProvisioner; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; diff --git a/examples/ws-bootable-jar-example/jaxws/src/test/java/org/jboss/jaxws/SoapWildflyBootableOpenShiftJarApplication.java b/examples/ws-bootable-jar-example/jaxws/src/test/java/org/jboss/jaxws/SoapWildflyBootableOpenShiftJarApplication.java index ca6f0c8b1..a5dabbbbe 100644 --- a/examples/ws-bootable-jar-example/jaxws/src/test/java/org/jboss/jaxws/SoapWildflyBootableOpenShiftJarApplication.java +++ b/examples/ws-bootable-jar-example/jaxws/src/test/java/org/jboss/jaxws/SoapWildflyBootableOpenShiftJarApplication.java @@ -24,7 +24,7 @@ import org.apache.maven.settings.building.SettingsBuildingException; import org.eclipse.aether.resolution.ArtifactResolutionException; import org.jboss.intersmash.application.openshift.BootableJarOpenShiftApplication; -import org.jboss.intersmash.application.openshift.input.BinarySource; +import org.jboss.intersmash.application.input.BinarySource; import org.jboss.intersmash.util.maven.ArtifactProvider; import cz.xtf.builder.builders.SecretBuilder; diff --git a/examples/wstrust/test/src/test/java/org/jboss/jaxws/STSWstrustOpenShiftJarApplication.java b/examples/wstrust/test/src/test/java/org/jboss/jaxws/STSWstrustOpenShiftJarApplication.java index fa01a745d..a4141e911 100644 --- a/examples/wstrust/test/src/test/java/org/jboss/jaxws/STSWstrustOpenShiftJarApplication.java +++ b/examples/wstrust/test/src/test/java/org/jboss/jaxws/STSWstrustOpenShiftJarApplication.java @@ -24,7 +24,7 @@ import org.apache.maven.settings.building.SettingsBuildingException; import org.eclipse.aether.resolution.ArtifactResolutionException; import org.jboss.intersmash.application.openshift.BootableJarOpenShiftApplication; -import org.jboss.intersmash.application.openshift.input.BinarySource; +import org.jboss.intersmash.application.input.BinarySource; import org.jboss.intersmash.util.maven.ArtifactProvider; import cz.xtf.builder.builders.SecretBuilder; diff --git a/examples/wstrust/test/src/test/java/org/jboss/jaxws/ServiceWstrustOpenShiftJarApplication.java b/examples/wstrust/test/src/test/java/org/jboss/jaxws/ServiceWstrustOpenShiftJarApplication.java index 79b986c64..66b90c9e0 100644 --- a/examples/wstrust/test/src/test/java/org/jboss/jaxws/ServiceWstrustOpenShiftJarApplication.java +++ b/examples/wstrust/test/src/test/java/org/jboss/jaxws/ServiceWstrustOpenShiftJarApplication.java @@ -24,7 +24,7 @@ import org.apache.maven.settings.building.SettingsBuildingException; import org.eclipse.aether.resolution.ArtifactResolutionException; import org.jboss.intersmash.application.openshift.BootableJarOpenShiftApplication; -import org.jboss.intersmash.application.openshift.input.BinarySource; +import org.jboss.intersmash.application.input.BinarySource; import org.jboss.intersmash.util.maven.ArtifactProvider; import cz.xtf.builder.builders.SecretBuilder; diff --git a/kubernetes-client/pom.xml b/kubernetes-client/pom.xml new file mode 100644 index 000000000..bbd2acd2b --- /dev/null +++ b/kubernetes-client/pom.xml @@ -0,0 +1,81 @@ + + + 4.0.0 + + org.jboss.intersmash + intersmash-parent + 0.0.3-SNAPSHOT + ../pom.xml + + intersmash-kubernetes-client + + Intersmash Kubernetes client + + + ${project.parent.basedir}/ide-config + + + + + io.fabric8 + kubernetes-client + + + org.apache.commons + commons-lang3 + + + com.google.code.gson + gson + + + + org.slf4j + slf4j-api + + + ch.qos.logback + logback-classic + + + org.projectlombok + lombok + + + + org.slf4j + jcl-over-slf4j + + + + cz.xtf + core + + + + org.junit.platform + junit-platform-launcher + + + org.junit.jupiter + junit-jupiter-api + + + + uk.org.webcompere + system-stubs-jupiter + test + + + + org.assertj + assertj-core + test + + + + + + \ No newline at end of file diff --git a/kubernetes-client/src/main/java/org/jboss/intersmash/k8s/KubernetesConfig.java b/kubernetes-client/src/main/java/org/jboss/intersmash/k8s/KubernetesConfig.java new file mode 100644 index 000000000..0ed4c2d4d --- /dev/null +++ b/kubernetes-client/src/main/java/org/jboss/intersmash/k8s/KubernetesConfig.java @@ -0,0 +1,163 @@ +package org.jboss.intersmash.k8s; + +import java.nio.file.Paths; + +import cz.xtf.core.config.XTFConfig; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public final class KubernetesConfig { + public static final String KUBERNETES_URL = "intersmash.kubernetes.url"; + public static final String KUBERNETES_HOSTNAME = "intersmash.kubernetes.hostname"; + public static final String KUBERNETES_TOKEN = "intersmash.kubernetes.token"; + public static final String KUBERNETES_VERSION = "intersmash.kubernetes.version"; + public static final String KUBERNETES_NAMESPACE = "intersmash.kubernetes.namespace"; + public static final String KUBERNETES_BINARY_PATH = "intersmash.kubernetes.binary.path"; + public static final String KUBERNETES_BINARY_CACHE_ENABLED = "intersmash.kubernetes.binary.cache.enabled"; + public static final String KUBERNETES_BINARY_CACHE_PATH = "intersmash.kubernetes.binary.cache.path"; + public static final String KUBERNETES_BINARY_CACHE_DEFAULT_FOLDER = "kubectl-cache"; + public static final String KUBERNETES_ADMIN_USERNAME = "intersmash.kubernetes.admin.username"; + public static final String KUBERNETES_ADMIN_PASSWORD = "intersmash.kubernetes.admin.password"; + public static final String KUBERNETES_ADMIN_KUBECONFIG = "intersmash.kubernetes.admin.kubeconfig"; + public static final String KUBERNETES_ADMIN_TOKEN = "intersmash.kubernetes.admin.token"; + public static final String KUBERNETES_MASTER_USERNAME = "intersmash.kubernetes.master.username"; + public static final String KUBERNETES_MASTER_PASSWORD = "intersmash.kubernetes.master.password"; + public static final String KUBERNETES_MASTER_KUBECONFIG = "intersmash.kubernetes.master.kubeconfig"; + public static final String KUBERNETES_MASTER_TOKEN = "intersmash.kubernetes.master.token"; + public static final String KUBERNETES_ROUTE_DOMAIN = "intersmash.kubernetes.route_domain"; + public static final String KUBERNETES_PULL_SECRET = "intersmash.kubernetes.pullsecret"; + public static final String KUBERNETES_NAMESPACE_PER_TESTCASE = "intersmash.kubernetes.namespace.per.testcase"; + + /** + * Used only if intersmash.kubernetes.namespace.per.testcase=true - this property can configure its maximum length. This is useful + * in case + * where namespace is used in first part of URL of route which must have <64 chars length. + */ + public static final String KUBERNETES_NAMESPACE_NAME_LENGTH_LIMIT = "intersmash.kubernetes.namespace.per.testcase.length.limit"; + + /** + * Used only if intersmash.kubernetes.namespace.per.testcase=true - this property configures default maximum length of namespace + * name. + */ + private static final String DEFAULT_KUBERNETES_NAMESPACE_NAME_LENGTH_LIMIT = "25"; + + public static String url() { + return XTFConfig.get(KUBERNETES_URL); + } + + public static String getKubernetesHostname() { + return XTFConfig.get(KUBERNETES_HOSTNAME, "localhost"); + } + + private static final String CLEAN_KUBERNETES = "intersmash.junit.clean_namespace"; + + /** + * Used only if intersmash.kubernetes.namespace.per.testcase=true + * + * @return limit on namespace if it's set by -Dintersmash.kubernetes.namespace.per.testcase.length.limit property + */ + public static int getNamespaceLengthLimitForUniqueNamespacePerTest() { + return Integer.parseInt(XTFConfig.get(KUBERNETES_NAMESPACE_NAME_LENGTH_LIMIT, + DEFAULT_KUBERNETES_NAMESPACE_NAME_LENGTH_LIMIT)); + } + + public static boolean cleanKubernetes() { + return Boolean.valueOf(XTFConfig.get(CLEAN_KUBERNETES, "false")); + } + + /** + * @return if property xtf.openshift.namespace.per.testcase is empty or true then returns true otherwise false + */ + public static boolean useNamespacePerTestCase() { + return XTFConfig.get(KUBERNETES_NAMESPACE_PER_TESTCASE) != null + && (XTFConfig.get(KUBERNETES_NAMESPACE_PER_TESTCASE).equals("") + || XTFConfig.get(KUBERNETES_NAMESPACE_PER_TESTCASE).toLowerCase().equals("true")); + } + + /** + * @return returns token + * @deprecated Use masterToken {@link #masterToken()} + */ + @Deprecated + public static String token() { + String token = XTFConfig.get(KUBERNETES_TOKEN); + if (token == null) { + return XTFConfig.get(KUBERNETES_MASTER_TOKEN); + } + return token; + } + + public static String adminToken() { + return XTFConfig.get(KUBERNETES_ADMIN_TOKEN); + } + + public static String version() { + return XTFConfig.get(KUBERNETES_VERSION); + } + + /** + * Default namespace for currently running test. + * + * @return Returns namespace as defined in intersmash.kubernetes.namespace property + */ + public static String namespace() { + return XTFConfig.get(KUBERNETES_NAMESPACE); + } + + public static String binaryPath() { + return XTFConfig.get(KUBERNETES_BINARY_PATH); + } + + public static boolean isBinaryCacheEnabled() { + return Boolean.parseBoolean(XTFConfig.get(KUBERNETES_BINARY_CACHE_ENABLED, "true")); + } + + public static String binaryCachePath() { + return XTFConfig.get(KUBERNETES_BINARY_CACHE_PATH, Paths.get(System.getProperty("java.io.tmpdir"), + KUBERNETES_BINARY_CACHE_DEFAULT_FOLDER).toAbsolutePath().normalize().toString()); + } + + public static String adminUsername() { + return XTFConfig.get(KUBERNETES_ADMIN_USERNAME); + } + + public static String adminPassword() { + return XTFConfig.get(KUBERNETES_ADMIN_PASSWORD); + } + + public static String adminKubeconfig() { + return XTFConfig.get(KUBERNETES_ADMIN_KUBECONFIG); + } + + public static String masterUsername() { + return XTFConfig.get(KUBERNETES_MASTER_USERNAME); + } + + public static String masterPassword() { + return XTFConfig.get(KUBERNETES_MASTER_PASSWORD); + } + + public static String masterKubeconfig() { + return XTFConfig.get(KUBERNETES_MASTER_KUBECONFIG); + } + + public static String pullSecret() { + return XTFConfig.get(KUBERNETES_PULL_SECRET); + } + + /** + * @return For backwards-compatibility reasons, also returns the value of intersmash.kubernetes.token if + * intersmash.kubernetes.master.token not specified + */ + public static String masterToken() { + String masterToken = XTFConfig.get(KUBERNETES_MASTER_TOKEN); + if (masterToken == null) { + return XTFConfig.get(KUBERNETES_TOKEN); + } + return masterToken; + } + + public static String routeDomain() { + return XTFConfig.get(KUBERNETES_ROUTE_DOMAIN); + } +} diff --git a/kubernetes-client/src/main/java/org/jboss/intersmash/k8s/client/Kubernetes.java b/kubernetes-client/src/main/java/org/jboss/intersmash/k8s/client/Kubernetes.java new file mode 100644 index 000000000..56e4125f4 --- /dev/null +++ b/kubernetes-client/src/main/java/org/jboss/intersmash/k8s/client/Kubernetes.java @@ -0,0 +1,403 @@ +package org.jboss.intersmash.k8s.client; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Base64; +import java.util.Collections; +import java.util.List; +import java.util.Optional; +import java.util.ServiceLoader; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; + +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.builder.EqualsBuilder; +import org.jboss.intersmash.k8s.KubernetesConfig; + +import com.google.common.collect.ArrayListMultimap; +import com.google.common.collect.Multimap; +import com.google.common.collect.Multimaps; + +import cz.xtf.core.config.WaitingConfig; +import cz.xtf.core.openshift.crd.CustomResourceDefinitionContextProvider; +import cz.xtf.core.waiting.SimpleWaiter; +import cz.xtf.core.waiting.Waiter; +import cz.xtf.core.waiting.failfast.FailFastCheck; +import io.fabric8.kubernetes.api.builder.Visitor; +import io.fabric8.kubernetes.api.model.*; +import io.fabric8.kubernetes.api.model.rbac.RoleBinding; +import io.fabric8.kubernetes.client.Config; +import io.fabric8.kubernetes.client.ConfigBuilder; +import io.fabric8.kubernetes.client.DefaultKubernetesClient; +import io.fabric8.kubernetes.client.KubernetesClientException; +import io.fabric8.kubernetes.client.dsl.base.ResourceDefinitionContext; +import io.fabric8.kubernetes.client.dsl.internal.HasMetadataOperationsImpl; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class Kubernetes extends DefaultKubernetesClient { + + private static ServiceLoader crdContextProviderLoader; + + /** + * This label is supposed to be used for any resource created by Intersmash to easily distinguish which resources have + * been created by Intersmash automation. + * NOTE: at the moment only place where this is used is for labeling namespaces. Other usages may be added in the future. + */ + public static final String INTERSMASH_MANAGED_LABEL = "intersmash/managed"; + /** + * Used to cache created Kubernetes clients for given test case. + */ + public static final Multimap namespaceToKubernetesClientMap = Multimaps + .synchronizedListMultimap(ArrayListMultimap.create()); + private static final String KEEP_LABEL = "intersmash/keep"; + + /** + * Autoconfigures the client with the default fabric8 client rules + * + * @param namespace set namespace to the Kubernetes client instance + * @return this Kubernetes client instance + */ + public static Kubernetes get(String namespace) { + Config kubeconfig = Config.autoConfigure(null); + + setupTimeouts(kubeconfig); + + if (StringUtils.isNotEmpty(namespace)) { + kubeconfig.setNamespace(namespace); + } + + return get(kubeconfig); + } + + public static Kubernetes get(Path kubeconfigPath, String namespace) { + try { + String kubeconfigContents = new String(Files.readAllBytes(kubeconfigPath), StandardCharsets.UTF_8); + Config kubeconfig = Config.fromKubeconfig(null, kubeconfigContents, kubeconfigPath.toAbsolutePath().toString()); + + setupTimeouts(kubeconfig); + + if (StringUtils.isNotEmpty(namespace)) { + kubeconfig.setNamespace(namespace); + } + + return get(kubeconfig); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + public static Kubernetes get(String masterUrl, String namespace, String username, String password) { + Config kubeconfig = new ConfigBuilder() + .withMasterUrl(masterUrl) + .withTrustCerts(true) + .withNamespace(namespace) + .withUsername(username) + .withPassword(password).build(); + + setupTimeouts(kubeconfig); + + return get(kubeconfig); + } + + public static Kubernetes get(String masterUrl, String namespace, String token) { + Config kubeconfig = Config.empty(); + kubeconfig.setMasterUrl(masterUrl); + kubeconfig.setTrustCerts(true); + kubeconfig.setNamespace(namespace); + kubeconfig.setOauthToken(token); + + setupTimeouts(kubeconfig); + + return get(kubeconfig); + } + + public Kubernetes(Config kubeconfig) { + super(kubeconfig); + } + + public void setupPullSecret(String secret) { + setupPullSecret("xtf-pull-secret", secret); + } + + /** + * Convenient method to create pull secret for authenticated image registries. + * The secret content must be provided in "dockerconfigjson" formar. + * + * E.g.: {@code {"auths":{"registry.redhat.io":{"auth":""}}}} + * + * TODO - Check Linking Secret to ServiceAccount + * + * @param name of the Secret to be created + * @param secret content of Secret in json format + */ + public void setupPullSecret(String name, String secret) { + Secret pullSecret = new SecretBuilder() + .withNewMetadata() + .withName(name) + .addToLabels(Kubernetes.KEEP_LABEL, "true") + .endMetadata() + .withType("kubernetes.io/dockerconfigjson") + .withData(Collections.singletonMap(".dockerconfigjson", Base64.getEncoder().encodeToString(secret.getBytes()))) + .build(); + secrets().createOrReplace(pullSecret); + serviceAccounts().withName("default").edit(new Visitor() { + @Override + public void visit(ServiceAccountBuilder builder) { + builder.addToImagePullSecrets( + new LocalObjectReferenceBuilder().withName(pullSecret.getMetadata().getName()).build()); + } + }); + + serviceAccounts().withName("builder").edit(new Visitor() { + @Override + public void visit(ServiceAccountBuilder builder) { + builder.addToSecrets(new ObjectReferenceBuilder().withName(pullSecret.getMetadata().getName()).build()); + } + }); + } + + private static Kubernetes get(Config kubeconfig) { + Kubernetes kubernetes; + + // check whether such a client already exists + Optional optionalKubernetes = namespaceToKubernetesClientMap + .get(kubeconfig.getNamespace()).stream() + .filter(kc -> isEqualConfig(kubeconfig, kc.getConfiguration())) + .findFirst(); + + if (optionalKubernetes.isPresent()) { + return optionalKubernetes.get(); + } else { + kubernetes = new Kubernetes(kubeconfig); + namespaceToKubernetesClientMap.put(kubeconfig.getNamespace(), kubernetes); + } + return kubernetes; + } + + private static void setupTimeouts(Config config) { + //___*** (ShipWright?)config.setBuildTimeout(10 * 60 * 1000); + config.setRequestTimeout(120_000); + config.setConnectionTimeout(120_000); + } + + protected static synchronized ServiceLoader getCRDContextProviders() { + if (crdContextProviderLoader == null) { + crdContextProviderLoader = ServiceLoader.load(CustomResourceDefinitionContextProvider.class); + } + return crdContextProviderLoader; + } + + private static boolean isEqualConfig(Config newConfig, Config existingConfig) { + return new EqualsBuilder() + .append(newConfig.getMasterUrl(), existingConfig.getMasterUrl()) + .append(newConfig.getNamespace(), existingConfig.getNamespace()) + .append(newConfig.getUsername(), existingConfig.getUsername()) + .append(newConfig.getPassword(), existingConfig.getPassword()) + .append(newConfig.getOauthToken(), existingConfig.getOauthToken()) + .append(newConfig.isTrustCerts(), existingConfig.isTrustCerts()) + .isEquals(); + } + + /** + * Retrieves all configmaps but "kube-root-ca.crt" and "openshift-service-ca.crt" which are created out of the box. + * + * @return List of configmaps created by user + */ + public List getUserConfigMaps() { + return configMaps().withLabelNotIn(Kubernetes.KEEP_LABEL, "", "true").list().getItems().stream() + .filter(cm -> !cm.getMetadata().getName().equals("kube-root-ca.crt")) + //.filter(cm -> !cm.getMetadata().getName().equals("openshift-service-ca.crt")) + .collect(Collectors.toList()); + } + + /** + * Retrieves secrets that aren't considered default. Secrets that are left out contain type starting with 'kubernetes.io/'. + * + * @return List of secrets that aren't considered default. + */ + public List getUserSecrets() { + return secrets().withLabelNotIn(Kubernetes.KEEP_LABEL, "", "true").list().getItems().stream() + .filter(s -> !s.getType().startsWith("kubernetes.io/")) + .collect(Collectors.toList()); + } + + /** + * Retrieves service accounts that aren't considered default. + * Service accounts that are left out from list: + *
    + *
  • builder
  • + *
  • default
  • + *
  • deployer
  • + *
+ * + * @return List of service accounts that aren't considered default. + */ + public List getUserServiceAccounts() { + return serviceAccounts().withLabelNotIn(Kubernetes.KEEP_LABEL, "", "true").list().getItems().stream() + .filter(sa -> !sa.getMetadata().getName().matches("builder|default|deployer")) + .collect(Collectors.toList()); + } + + /** + * Retrieves role bindings that aren't considered default. + * Role bindings that are left out from list: + *
    + *
  • admin
  • + *
  • system:deployers
  • + *
  • system:image-builders
  • + *
  • system:image-pullers
  • + *
+ * + * @return List of role bindings that aren't considered default. + */ + public List getUserRoleBindings() { + return rbac().roleBindings().withLabelNotIn(Kubernetes.KEEP_LABEL, "", "true") + .withLabelNotIn("olm.owner.kind", "ClusterServiceVersion").list().getItems().stream() + .filter(rb -> !rb.getMetadata().getName() + .matches("admin|system:deployers|system:image-builders|system:image-pullers")) + .collect(Collectors.toList()); + } + + public Waiter clean() { + for (CustomResourceDefinitionContextProvider crdContextProvider : Kubernetes.getCRDContextProviders()) { + try { + // TODO - check + genericKubernetesResources(crdContextProvider.getContext()) + .inNamespace(getNamespace()).delete(); + log.debug("DELETE :: " + crdContextProvider.getContext().getName() + " instances"); + } catch (KubernetesClientException kce) { + log.debug(crdContextProvider.getContext().getName() + " might not be installed on the cluster.", kce); + } + } + + /* Only OpenShift has the following ones, which are missing from k8s + templates().withLabelNotIn(KEEP_LABEL, "", "true").delete(); + deploymentConfigs().withLabelNotIn(KEEP_LABEL, "", "true").delete(); + buildConfigs().withLabelNotIn(KEEP_LABEL, "", "true").delete(); + imageStreams().withLabelNotIn(KEEP_LABEL, "", "true").delete(); + builds().withLabelNotIn(KEEP_LABEL, "", "true").delete(); + routes().withLabelNotIn(KEEP_LABEL, "", "true").delete(); + */ + + // keep the order for deletion to prevent K8s creating resources again + apps().deployments().withLabelNotIn(Kubernetes.KEEP_LABEL, "", "true").delete(); + apps().replicaSets().withLabelNotIn(Kubernetes.KEEP_LABEL, "", "true").delete(); + apps().statefulSets().withLabelNotIn(Kubernetes.KEEP_LABEL, "", "true").delete(); + batch().jobs().withLabelNotIn(Kubernetes.KEEP_LABEL, "", "true").delete(); + replicationControllers().withLabelNotIn(Kubernetes.KEEP_LABEL, "", "true").delete(); + endpoints().withLabelNotIn(Kubernetes.KEEP_LABEL, "", "true").delete(); + services().withLabelNotIn(Kubernetes.KEEP_LABEL, "", "true").delete(); + pods().withLabelNotIn(Kubernetes.KEEP_LABEL, "", "true").withGracePeriod(0).delete(); + persistentVolumeClaims().withLabelNotIn(Kubernetes.KEEP_LABEL, "", "true").delete(); + autoscaling().v1().horizontalPodAutoscalers().withLabelNotIn(Kubernetes.KEEP_LABEL, "", "true").delete(); + + getUserConfigMaps().forEach(c -> configMaps().delete(c)); + getUserSecrets().forEach(s -> secrets().delete(s)); + getUserServiceAccounts().forEach((sa) -> { + serviceAccounts().delete(sa); + }); + getUserRoleBindings().forEach(r -> rbac().roleBindings().delete(r)); + rbac().roles().withLabelNotIn(Kubernetes.KEEP_LABEL, "", "true") + .withLabelNotIn("olm.owner.kind", "ClusterServiceVersion") + .delete(); + + for (HasMetadata hasMetadata : listRemovableResources()) { + log.warn("DELETE LEFTOVER :: " + hasMetadata.getKind() + "/" + hasMetadata.getMetadata().getName()); + resource(hasMetadata).cascading(true).withGracePeriod(0).delete(); + } + + FailFastCheck failFastCheck = () -> false; + return new SimpleWaiter( + () -> isNamespaceClean(), + TimeUnit.MILLISECONDS, WaitingConfig.timeoutCleanup(), "Cleaning project - " + getNamespace()) + .onTimeout(() -> log.info("Cleaning namespace: " + getNamespace() + " - timed out.")) + .onFailure(() -> log.info("Cleaning namespace: " + getNamespace() + " - failed.")) + .onSuccess(() -> log.info("Cleaning namespace: " + getNamespace() + " - finished.")) + .failFast(failFastCheck); + } + + List listRemovableResources() { + /* Only OpenShift has the following ones, which are missing from k8s + removables.addAll(templates().withLabelNotIn(Kubernetes.KEEP_LABEL, "", "true").list().getItems()); + removables.addAll(deploymentConfigs().withLabelNotIn(Kubernetes.KEEP_LABEL, "", "true").list().getItems()); + removables.addAll(buildConfigs().withLabelNotIn(Kubernetes.KEEP_LABEL, "", "true").list().getItems()); + removables.addAll(imageStreams().withLabelNotIn(Kubernetes.KEEP_LABEL, "", "true").list().getItems()); + removables.addAll(builds().withLabelNotIn(Kubernetes.KEEP_LABEL, "", "true").list().getItems()); + removables.addAll(routes().withLabelNotIn(Kubernetes.KEEP_LABEL, "", "true").list().getItems()); + */ + + // keep the order for deletion to prevent K8s creating resources again + List removables = new ArrayList<>(); + removables.addAll(apps().deployments().withLabelNotIn(Kubernetes.KEEP_LABEL, "", "true").list().getItems()); + removables.addAll(apps().replicaSets().withLabelNotIn(Kubernetes.KEEP_LABEL, "", "true").list().getItems()); + removables.addAll(batch().jobs().withLabelNotIn(Kubernetes.KEEP_LABEL, "", "true").list().getItems()); + removables.addAll(apps().statefulSets().withLabelNotIn(Kubernetes.KEEP_LABEL, "", "true").list().getItems()); + removables.addAll(replicationControllers().withLabelNotIn(Kubernetes.KEEP_LABEL, "", "true").list().getItems()); + removables.addAll(endpoints().withLabelNotIn(Kubernetes.KEEP_LABEL, "", "true").list().getItems()); + removables.addAll(services().withLabelNotIn(Kubernetes.KEEP_LABEL, "", "true").list().getItems()); + removables.addAll(pods().withLabelNotIn(Kubernetes.KEEP_LABEL, "", "true").list().getItems()); + removables.addAll(persistentVolumeClaims().withLabelNotIn(Kubernetes.KEEP_LABEL, "", "true").list().getItems()); + removables.addAll(autoscaling().v1().horizontalPodAutoscalers().withLabelNotIn(Kubernetes.KEEP_LABEL, "", "true").list() + .getItems()); + removables.addAll(getUserConfigMaps()); + removables.addAll(getUserSecrets()); + removables.addAll(getUserServiceAccounts()); + removables.addAll(getUserRoleBindings()); + removables.addAll(rbac().roles().withLabelNotIn(Kubernetes.KEEP_LABEL, "", "true") + .withLabelNotIn("olm.owner.kind", "ClusterServiceVersion").list().getItems()); + + return removables; + } + + private boolean isNamespaceClean() { + int crdInstances = 0; + List customResourceDefinitionList = null; + for (CustomResourceDefinitionContextProvider crdContextProvider : Kubernetes.getCRDContextProviders()) { + try { + customResourceDefinitionList = genericKubernetesResources(crdContextProvider.getContext()) + .inNamespace(getNamespace()) + .list().getItems(); + crdInstances += customResourceDefinitionList.size(); + } catch (KubernetesClientException kce) { + // CRD might not be installed on the cluster + } + } + + boolean isClean = false; + List listRemovableResources = listRemovableResources(); + if (crdInstances == 0 & listRemovableResources.isEmpty()) { + isClean = true; + } else { + StringBuilder strBuilderResourcesToDelete = new StringBuilder( + "Cleaning project - " + getNamespace() + + " Waiting for following resources to be deleted: \n"); + if (customResourceDefinitionList != null && !customResourceDefinitionList.isEmpty()) { + customResourceDefinitionList.stream().forEach((r) -> { + strBuilderResourcesToDelete.append(r + "\n"); + }); + } + if (!listRemovableResources.isEmpty()) { + listRemovableResources.stream().forEach((r) -> { + strBuilderResourcesToDelete.append(r + "\n"); + }); + } + log.debug(strBuilderResourcesToDelete.toString()); + } + return isClean; + } + + public String generateHostname() { + log.info("Kubernetes generateHostname returns: " + KubernetesConfig.getKubernetesHostname()); + return KubernetesConfig.getKubernetesHostname(); + } + + public > HasMetadataOperationsImpl newHasMetadataOperation( + ResourceDefinitionContext rdContext, Class resourceType, Class listClass) { + return new HasMetadataOperationsImpl(this, rdContext, resourceType, listClass); + } + +} diff --git a/kubernetes-client/src/main/java/org/jboss/intersmash/k8s/client/Kuberneteses.java b/kubernetes-client/src/main/java/org/jboss/intersmash/k8s/client/Kuberneteses.java new file mode 100644 index 000000000..dac5ac67f --- /dev/null +++ b/kubernetes-client/src/main/java/org/jboss/intersmash/k8s/client/Kuberneteses.java @@ -0,0 +1,223 @@ +package org.jboss.intersmash.k8s.client; + +import java.io.File; +import java.nio.file.Paths; + +import org.apache.commons.lang3.StringUtils; +import org.jboss.intersmash.k8s.KubernetesConfig; +import org.jboss.intersmash.k8s.client.binary.KubernetesClientBinary; +import org.jboss.intersmash.k8s.client.binary.KubernetesClientBinaryManagerFactory; + +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class Kuberneteses { + + public static Kubernetes admin() { + return Kuberneteses.admin(NamespaceManager.getNamespace()); + } + + public static Kubernetes admin(String namespace) { + if (StringUtils.isNotEmpty(KubernetesConfig.adminToken())) { + return Kubernetes.get(KubernetesConfig.url(), namespace, KubernetesConfig.adminToken()); + } + + if (StringUtils.isNotEmpty(KubernetesConfig.adminUsername())) { + return Kubernetes.get(KubernetesConfig.url(), namespace, KubernetesConfig.adminUsername(), + KubernetesConfig.adminPassword()); + } + + if (StringUtils.isNotEmpty(KubernetesConfig.adminKubeconfig())) { + return Kubernetes.get(Paths.get(KubernetesConfig.adminKubeconfig()), namespace); + } + + return Kubernetes.get(namespace); + } + + public static Kubernetes master() { + + return Kuberneteses.master(NamespaceManager.getNamespace()); + + } + + public static Kubernetes master(String namespace) { + if (StringUtils.isNotEmpty(KubernetesConfig.masterToken())) { + return Kubernetes.get(KubernetesConfig.url(), namespace, KubernetesConfig.masterToken()); + } + + if (StringUtils.isNotEmpty(KubernetesConfig.masterUsername())) { + return Kubernetes.get(KubernetesConfig.url(), namespace, KubernetesConfig.masterUsername(), + KubernetesConfig.masterPassword()); + } + + if (StringUtils.isNotEmpty(KubernetesConfig.masterKubeconfig())) { + return Kubernetes.get(Paths.get(KubernetesConfig.masterKubeconfig()), namespace); + } + + return Kubernetes.get(namespace); + } + + public static String getBinaryPath() { + return KubernetesClientBinaryManagerFactory.INSTANCE.getKubernetesClientBinaryManager().getBinaryPath(); + } + + public static KubernetesClientBinary masterBinary() { + return masterBinary(NamespaceManager.getNamespace()); + } + + public static KubernetesClientBinary masterBinary(String namespace) { + return KubernetesClientBinaryManagerFactory.INSTANCE.getKubernetesClientBinaryManager().masterBinary(namespace); + } + + public static KubernetesClientBinary adminBinary() { + return adminBinary(NamespaceManager.getNamespace()); + } + + public static KubernetesClientBinary adminBinary(String namespace) { + return KubernetesClientBinaryManagerFactory.INSTANCE.getKubernetesClientBinaryManager().adminBinary(namespace); + } + + private static String getHomeDir() { + String home = System.getenv("HOME"); + if (home != null && !home.isEmpty()) { + File f = new File(home); + if (f.exists() && f.isDirectory()) { + return home; + } + } + return System.getProperty("user.home", "."); + } + + // /** + // * Save oc binary in a folder to use as cache to avoid to download it again. + // * The folder path depends on the OCP version and the download url. + // * The file can be accessed using {@link #getOcFromCache(String, String, File)}. + // * It works only if {@link OpenShiftConfig#isBinaryCacheEnabled()}. + // * + // * @param version String, OCP cluster version. + // * @param ocUrl String, download URL. + // * @param ocTarFile String, workdir file. + // * @throws IOException + // * @deprecated this should have never been made public, can be removed in future versions. It is not used internally by XTF + // */ + // @Deprecated + // public static void saveOcOnCache(String version, String ocUrl, File ocTarFile) throws IOException { + // if (OpenShiftConfig.isBinaryCacheEnabled()) { + // File cacheRootFile = new File(OpenShiftConfig.binaryCachePath()); + // if (!cacheRootFile.exists() && !cacheRootFile.mkdirs()) { + // throw new IllegalStateException("Cannot mkdirs " + cacheRootFile); + // } + // Path cachePath = getOcCachePath(version, ocUrl); + // Files.createDirectories(cachePath); + // FileUtils.copyFile(ocTarFile, new File(cachePath.toFile(), ocTarFile.getName())); + // } + // } + // + // /** + // * Retrieve the file from the folder populated by {@link #saveOcOnCache(String, String, File)}. + // * + // * @param version String, OCP cluster version. + // * @param ocUrl String, download URL. + // * @param ocTarFile String, workdir file. + // * @return File, reference to the file, if the cache is not populated, the file is not null, but it doesn't exist. + // * @throws IOException + // * @deprecated this should have never been made public, can be removed in future versions. It is not used internally by XTF + // */ + // @Deprecated + // public static File getOcFromCache(String version, String ocUrl, File ocTarFile) throws IOException { + // return new File(getOcCachePath(version, ocUrl).toFile(), ocTarFile.getName()); + // } + // + // /** + // * * @deprecated this should have never been made public, can be removed in future versions. It is not used internally by + // * XTF + // */ + // @Deprecated + // private static Path getOcCachePath(String version, String ocUrl) { + // return Paths.get(OpenShiftConfig.binaryCachePath(), version, DigestUtils.md5Hex(ocUrl)); + // } + // + // /** + // * Returns {@link OpenShiftConfig#version()}. If not available then access OpenShift endpoint for a version. Be aware + // * that this operation requires admin role for OpenShift 4 unlike to OpenShift 3. + // * + // * @return Openshift cluster version if configured or detected from cluster, null otherwise + // */ + // public static String getVersion() { + // return ClusterVersionInfoFactory.INSTANCE.getClusterVersionInfo().getOpenshiftVersion(); + // } + // + // public static String getMasterToken() { + // return getToken(OpenShiftConfig.masterToken(), OpenShiftConfig.masterUsername(), OpenShiftConfig.masterPassword(), + // OpenShiftConfig.masterKubeconfig()); + // } + // + // public static String getAdminToken() { + // return getToken(OpenShiftConfig.adminToken(), OpenShiftConfig.adminUsername(), OpenShiftConfig.adminPassword(), + // OpenShiftConfig.adminKubeconfig()); + // } + // + // private static String getToken(String token, String username, String password, String kubeconfig) { + // if (StringUtils.isNotEmpty(token)) { + // return token; + // } + // + // // Attempt to get the token via HTTP basic auth: + // if (StringUtils.isNotEmpty(username)) { + // HttpsURLConnection connection = null; + // try { + // if (getVersion() != null && getVersion().startsWith("3")) { + // connection = Https.getHttpsConnection(new URL( + // OpenShiftConfig.url() + // + "/oauth/authorize?response_type=token&client_id=openshift-challenging-client")); + // } else { + // connection = Https.getHttpsConnection(new URL("https://oauth-openshift.apps." + + // StringUtils.substringBetween(OpenShiftConfig.url(), "api.", ":") + // + "/oauth/authorize?response_type=token&client_id=openshift-challenging-client")); + // } + // String encoded = Base64.getEncoder() + // .encodeToString((username + ":" + password).getBytes(StandardCharsets.UTF_8)); + // connection.setRequestProperty("Authorization", "Basic " + encoded); + // connection.setInstanceFollowRedirects(false); + // + // connection.connect(); + // Map> headers = connection.getHeaderFields(); + // connection.disconnect(); + // + // List location = headers.get("Location"); + // if (location != null) { + // Optional acces_token = location.stream().filter(s -> s.contains("access_token")).findFirst(); + // return acces_token.map(s -> StringUtils.substringBetween(s, "#access_token=", "&")).orElse(null); + // } + // } catch (IOException ex) { + // log.error("Unable to retrieve token from Location header: {} ", ex.getMessage()); + // } finally { + // if (connection != null) + // connection.disconnect(); + // } + // return null; + // } + // + // if (StringUtils.isNotEmpty(kubeconfig)) { + // try { + // Config config = Config.fromKubeconfig(null, + // new String(Files.readAllBytes(Paths.get(kubeconfig)), StandardCharsets.UTF_8), kubeconfig); + // return config.getOauthToken(); + // } catch (IOException e) { + // log.error("Unable to retrieve token from kubeconfig: {} ", kubeconfig, e); + // } + // return null; + // } + // + // File defaultKubeConfig = Paths.get(getHomeDir(), ".kube", "config").toFile(); + // try { + // Config config = Config.fromKubeconfig(null, + // new String(Files.readAllBytes(defaultKubeConfig.toPath()), StandardCharsets.UTF_8), + // defaultKubeConfig.getAbsolutePath()); + // return config.getOauthToken(); + // } catch (IOException e) { + // log.error("Unable to retrieve token from default kubeconfig: {} ", defaultKubeConfig, e); + // } + // return null; + // } +} diff --git a/kubernetes-client/src/main/java/org/jboss/intersmash/k8s/client/NamespaceManager.java b/kubernetes-client/src/main/java/org/jboss/intersmash/k8s/client/NamespaceManager.java new file mode 100644 index 000000000..60c975923 --- /dev/null +++ b/kubernetes-client/src/main/java/org/jboss/intersmash/k8s/client/NamespaceManager.java @@ -0,0 +1,241 @@ +package org.jboss.intersmash.k8s.client; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; +import java.util.function.BooleanSupplier; + +import org.apache.commons.codec.digest.DigestUtils; +import org.apache.commons.lang3.StringUtils; +import org.jboss.intersmash.k8s.KubernetesConfig; +import org.junit.jupiter.engine.descriptor.MethodBasedTestDescriptor; +import org.junit.platform.engine.TestDescriptor; + +import cz.xtf.core.context.TestCaseContext; +import cz.xtf.core.waiting.SimpleWaiter; +import io.fabric8.kubernetes.api.builder.Visitor; +import io.fabric8.kubernetes.api.model.Namespace; +import io.fabric8.kubernetes.api.model.NamespaceBuilder; +import io.fabric8.kubernetes.api.model.ObjectMeta; +import io.fabric8.kubernetes.api.model.StatusDetails; +import io.fabric8.kubernetes.client.KubernetesClientException; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class NamespaceManager { + /** + * By default (xtf.openshift.namespace.per.testcase=false) all entries in map point to value returned by + * + * @see KubernetesConfig#namespace(). + * If intersmash.k8s.namespace.per.testcase=true then each entry points to namespace + * assigned to each test case by {@see NamespaceManager#getNamespaceForTestClass} + * + * Maps testcase -> namespace + */ + private static final Map testcaseToNamespaceMap = new HashMap(); + + /** + * @return Map testcase -> namespace + */ + private static Map getTestCaseToNamespaceMap() { + return testcaseToNamespaceMap; + } + + /** + * @return return namespace for testcase or null if not present + */ + private static String getNamespaceForTestCase(String testcase) { + return getTestCaseToNamespaceMap().get(testcase); + } + + /** + * Creates namespace with name returned by @see #getNamespace if it does not exist + * + * @return true if successful, false otherwise + */ + public static boolean createIfDoesNotExistsProject() { + return createIfDoesNotExistsProject(getNamespace()); + } + + /** + * Creates namespace if it does not exist + * + * @return true if newly created, false otherwise (failed or namespace already present) + */ + public static boolean createIfDoesNotExistsProject(String namespace) { + Kubernetes kubernetes = Kuberneteses.master(namespace); + + // in case namespace is terminating (means is being deleted) then wait + checkAndWaitIfNamespaceIsTerminating(namespace); + + if (kubernetes.namespaces().withName(namespace).get() == null) { + log.info("Creating namespace: " + kubernetes.getNamespace()); + + Namespace projectNamespace = new Namespace(); + ObjectMeta objectMeta = new ObjectMeta(); + objectMeta.setName(KubernetesConfig.namespace()); + projectNamespace.setMetadata(objectMeta); + + kubernetes.namespaces().create(projectNamespace); + + new SimpleWaiter(() -> kubernetes.namespaces().withName(kubernetes.getNamespace()) != null, + TimeUnit.MINUTES, 2, + "Waiting for " + namespace + " project deletion").waitFor(); + + try { + // Adding a label can be only done via 'namespace'. It cannot be set via 'project' API. Thus we do this + // separately. Also, to update namespace label, it's necessary to have 'patch resource "namespaces"' + // permission for current user and updated namespace, e.g. by having 'cluster-admin' role. + // Otherwise you can see: + // $ oc label namespace "label1=foo" + // Error from server (Forbidden): namespaces "" is forbidden: User "" cannot patch resource "namespaces" in API group "" in the namespace "" + Kuberneteses.admin(namespace).namespaces().withName(kubernetes.getNamespace()) + .edit(new Visitor() { + @Override + public void visit(NamespaceBuilder builder) { + builder.editMetadata() + .addToLabels(Kubernetes.INTERSMASH_MANAGED_LABEL, "true"); + } + }); + } catch (KubernetesClientException e) { + // We weren't able to assign a label to the new project. Let's just print warning since this information + // is not critical to the tests execution. Possible cause for this are insufficient permissions since + // some projects using XTF are executed on OCP instances without 'admin' accounts available. + log.warn("Couldn't assign label '" + Kubernetes.INTERSMASH_MANAGED_LABEL + "' to the new project '" + + kubernetes.getNamespace() + "'. Possible cause are insufficient permissions."); + log.debug(e.getMessage()); + } + + if (KubernetesConfig.pullSecret() != null) { + kubernetes.setupPullSecret(KubernetesConfig.pullSecret()); + } + log.info("Created namespace: " + kubernetes.getNamespace()); + return true; + } + return false; + } + + private static void checkAndWaitIfNamespaceIsTerminating(String namespace) { + Namespace n = Kuberneteses.admin(namespace).namespaces().withName(namespace).get(); + if (n != null && n.getStatus().getPhase().equals("Terminating")) { + waitForNamespaceToBeDeleted(namespace); + } + } + + /** + * Deletes namespace as returned by @see #getNamespace + * + * @param waitForDeletion whether to wait for deletion (timeout 2 min) + * + * @return true if successful, false otherwise + */ + public static boolean deleteProject(boolean waitForDeletion) { + return deleteProject(getNamespace(), waitForDeletion); + } + + /** + * Deletes namespace + * + * @param namespace namespace name to delete + * @param waitForDeletion whether to wait for deletion (timeout 2 min) + * + * @return true if successful, false otherwise + */ + public static boolean deleteProject(String namespace, boolean waitForDeletion) { + boolean deleted = false; + // problem with OpenShift.getProject() is that it might return null even if namespace still exists (is in terminating state) + // thus use Openshift.namespaces() which do not suffer by this problem + // openshift.namespaces() requires admin privileges otherwise following KubernetesClientException is thrown: + // ... User "xpaasqe" cannot get resource "namespaces" in API group "" in the namespace ... + if (Kuberneteses.admin(namespace).namespaces().withName(namespace).get() != null) { + Kubernetes kubernetes = Kuberneteses.master(namespace); + log.info("Start deleting namespace: " + kubernetes.getNamespace() + ", wait for deletion: " + waitForDeletion); + List details = kubernetes.namespaces().withName(namespace).delete(); + deleted = details.stream().allMatch(d -> d.getCauses().isEmpty()); + if (deleted) { + log.info("Namespace: " + namespace + ", successfully deleted"); + } + if (!deleted && waitForDeletion) { + waitForNamespaceToBeDeleted(namespace); + } + } + return deleted; + } + + /** + * + * Deletes namespace as returned by @see #getNamespace. + * Deletes namespace only if @see {@link KubernetesConfig#useNamespacePerTestCase()} is true. + * + * @param waitForDeletion wait for deletion of namespace + * @return true if successful, false otherwise + */ + public static boolean deleteProjectIfUsedNamespacePerTestCase(boolean waitForDeletion) { + if (KubernetesConfig.useNamespacePerTestCase()) { + return deleteProject(waitForDeletion); + } + return false; + } + + private static void waitForNamespaceToBeDeleted(String namespace) { + BooleanSupplier bs = () -> Kuberneteses.admin(namespace).namespaces().withName(namespace).get() == null; + new SimpleWaiter(bs, TimeUnit.MINUTES, 2, "Waiting for " + namespace + " project deletion") + .waitFor(); + } + + private static String getNamespaceForTestClass(TestDescriptor testDescriptor) { + if (KubernetesConfig.useNamespacePerTestCase()) { + // some test case names can be really long resulting in long namespace names. This can cause issues + // with routes which have 64 chars limit for prefix of domain name. In case of route like: + // galleon-provisioning-xml-prio-mnovak-galleonprovisioningxmltest.apps.eapqe-024-dryf.eapqe.psi.redhat.com + // route prefix is: galleon-provisioning-xml-prio-mnovak-galleonprovisioningxmltest + // route suffix is: .apps.eapqe-024-dryf.eapqe.psi.redhat.com + if ((KubernetesConfig.namespace() + "-" + + testDescriptor.getParent().get().getDisplayName().toLowerCase()) + .length() > KubernetesConfig.getNamespaceLengthLimitForUniqueNamespacePerTest()) { + + return KubernetesConfig.namespace() + "-" + + StringUtils.truncate(DigestUtils.sha256Hex(testDescriptor.getParent().get().getDisplayName() + .toLowerCase()), + KubernetesConfig.getNamespaceLengthLimitForUniqueNamespacePerTest() + - KubernetesConfig.namespace().length()); + } else { + return KubernetesConfig.namespace() + "-" + + testDescriptor.getParent().get().getDisplayName().toLowerCase(); // namespace must not have upper case letters + } + } else { + return KubernetesConfig.namespace(); + } + } + + /** + * Add mapping test case name -> (automatically generated) namespace for given test case if absent + * + * @param testDescriptor test descriptor + */ + public static void addTestCaseToNamespaceEntryIfAbsent(TestDescriptor testDescriptor) { + getTestCaseToNamespaceMap().putIfAbsent(((MethodBasedTestDescriptor) testDescriptor).getTestClass().getName(), + getNamespaceForTestClass(testDescriptor)); + } + + /** + * @return Returns default namespace as defined in xtf.openshift.namespace property or namespace for currently running test + * case when: + * -Dintersmash.kubernetes.namespace.per.testcase=true. + * In case when current thread does not have associated test case (for example when initializing Kubernetes instance + * in static variable or static block) then {@link java.lang.RuntimeException} exception is thrown. + */ + public static String getNamespace() { + if (KubernetesConfig.useNamespacePerTestCase()) { + String namespace = NamespaceManager.getNamespaceForTestCase(TestCaseContext.getRunningTestCaseName()); + if (StringUtils.isEmpty(namespace)) { + throw new RuntimeException( + "There is no namespace associated with current thread or test case. This can happen in case that OpenShift instance is created in static variable. In this case avoid using static. Or in thread which is not associated with any test case."); + } + return namespace; + } else { + return KubernetesConfig.namespace(); + } + } +} diff --git a/kubernetes-client/src/main/java/org/jboss/intersmash/k8s/client/TestCaseContext.java b/kubernetes-client/src/main/java/org/jboss/intersmash/k8s/client/TestCaseContext.java new file mode 100644 index 000000000..05fd0cccc --- /dev/null +++ b/kubernetes-client/src/main/java/org/jboss/intersmash/k8s/client/TestCaseContext.java @@ -0,0 +1,24 @@ +package org.jboss.intersmash.k8s.client; + +public class TestCaseContext { + + /** + * + * This allows to track currently running test case for correct namespace mapping. This is used to automatically find + * namespace for running test case when + * creating {@link Kubernetes} instances. + */ + private static String runningTestCaseName; + + /** + * @return test case name associated with current thread or null if not such mapping exists, for example for com.SmokeTest + * returns SmokeTest + */ + public static String getRunningTestCaseName() { + return runningTestCaseName; + } + + public static void setRunningTestCase(String currentlyRunningTestCaseName) { + runningTestCaseName = currentlyRunningTestCaseName; + } +} diff --git a/kubernetes-client/src/main/java/org/jboss/intersmash/k8s/client/binary/ClusterVersionBasedKubernetesClientBinaryPathResolver.java b/kubernetes-client/src/main/java/org/jboss/intersmash/k8s/client/binary/ClusterVersionBasedKubernetesClientBinaryPathResolver.java new file mode 100644 index 000000000..70b1218db --- /dev/null +++ b/kubernetes-client/src/main/java/org/jboss/intersmash/k8s/client/binary/ClusterVersionBasedKubernetesClientBinaryPathResolver.java @@ -0,0 +1,130 @@ +package org.jboss.intersmash.k8s.client.binary; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.StandardCopyOption; +import java.nio.file.attribute.PosixFilePermission; +import java.util.Objects; +import java.util.Set; + +import org.apache.commons.codec.digest.DigestUtils; +import org.apache.commons.lang3.SystemUtils; +import org.jboss.intersmash.k8s.KubernetesConfig; +import org.jboss.intersmash.k8s.client.Kuberneteses; + +import com.google.common.base.Strings; + +import cz.xtf.core.http.Https; +import io.fabric8.kubernetes.client.VersionInfo; +import lombok.extern.slf4j.Slf4j; + +/** + * Class for resolving Kubernetes client binary path based on the Kubernetes cluster version, either the + * one referenced by the configuration properties or the actual cluster one. + * + * Based on the configuration properties, users can configure the resolver to cache the binary client. + */ +@Slf4j +public class ClusterVersionBasedKubernetesClientBinaryPathResolver implements KubernetesClientBinaryPathResolver { + private static final String KUBERNETES_CLIENT_BINARY_DOWNLOAD_BASE_URL = "https://dl.k8s.io/release/"; + public static final int BINARY_DOWNLOAD_CONNECTION_TIMEOUT = 20_000; + public static final int BINARY_DOWNLOAD_READ_TIMEOUT = 300_000; + + @Override + public String resolve() { + // gets the cluster version from configuration + final String configuredClusterVersion = KubernetesConfig.version(); + // priority is given to the configuration, then fallback on k8s APIs for determining the actual cluster version + final VersionInfo clusterVersionInfo = !Strings.isNullOrEmpty(configuredClusterVersion) + ? new VersionInfo.Builder().withGitVersion(configuredClusterVersion).build() + : Kuberneteses.admin().getKubernetesVersion(); + // is a local cache configured to be used? + final boolean cacheEnabled = KubernetesConfig.isBinaryCacheEnabled(); + // let's compute the Kubernetes client binary path + Path binaryPath; + if (cacheEnabled) { + log.debug("Trying to load Kubernetes client binary from cache"); + Path cachePath = getCachePath(clusterVersionInfo); + binaryPath = cachePath.resolve(BINARY_NAME); + if (Files.exists(binaryPath)) { + // the required binary is there already, let's skip the download + log.debug("Kubernetes client binary is already in cache: {}.", binaryPath.toAbsolutePath()); + } else { + log.debug("Kubernetes client binary not found in cache, downloading it."); + downloadKubernetesClient(clusterVersionInfo, binaryPath); + } + binaryPath = copyKubernetesClientBinaryToTemporaryRuntimeLocation(binaryPath); + } else { + binaryPath = ClusterVersionBasedKubernetesClientBinaryPathResolver.getRuntimeKubectl(); + log.debug("Cache is disabled, downloading Kubernetes client binary to {}.", binaryPath.toAbsolutePath()); + downloadKubernetesClient(clusterVersionInfo, binaryPath); + } + return binaryPath.toAbsolutePath().toString(); + } + + /** + * Get the proper URL for a Kubernetes client, based on required version and taking the OS into account as well. + * + * @param clusterVersion {@link VersionInfo} instance holding the required Kubernetes version. + * @return A string representing the required Kubernetes client URL + */ + private String getBinaryUrlBasedOnKubernetesVersion(final VersionInfo clusterVersion) { + Objects.requireNonNull(clusterVersion); + + String systemType = "linux"; + String arch = "amd64"; + if (SystemUtils.IS_OS_MAC) { + systemType = "darwin"; + } + return String.format("%s/%s/bin/%s/%s/%s", KUBERNETES_CLIENT_BINARY_DOWNLOAD_BASE_URL, clusterVersion.getGitVersion(), + systemType, arch, BINARY_NAME); + } + + private void downloadKubernetesClient(final VersionInfo clusterVersionInfo, final Path binaryPath) { + final String url = getBinaryUrlBasedOnKubernetesVersion(clusterVersionInfo); + try { + Https.copyHttpsURLToFile(url, binaryPath.toFile(), BINARY_DOWNLOAD_CONNECTION_TIMEOUT, + BINARY_DOWNLOAD_READ_TIMEOUT); + } catch (IOException ioe) { + throw new IllegalStateException("Failed to download the kubectl binary from " + url, ioe); + } + } + + private Path copyKubernetesClientBinaryToTemporaryRuntimeLocation(final Path binaryPath) { + Objects.requireNonNull(binaryPath); + + final Path runtimeKubectl = ClusterVersionBasedKubernetesClientBinaryPathResolver.getRuntimeKubectl(); + final Set permissions = Set.of( + PosixFilePermission.OWNER_READ, PosixFilePermission.OWNER_WRITE, PosixFilePermission.OWNER_EXECUTE, + PosixFilePermission.GROUP_READ, PosixFilePermission.GROUP_WRITE, PosixFilePermission.GROUP_EXECUTE, + PosixFilePermission.OTHERS_READ, PosixFilePermission.OTHERS_EXECUTE); + try { + Files.copy(binaryPath, runtimeKubectl, StandardCopyOption.REPLACE_EXISTING); + Files.setPosixFilePermissions(runtimeKubectl, permissions); + } catch (IOException e) { + throw new IllegalStateException("Error when copying the Kubernetes client binary", e); + } + return runtimeKubectl; + } + + static Path getCachePath(VersionInfo clusterVersionInfo) { + return Paths.get(KubernetesConfig.binaryCachePath(), clusterVersionInfo.getGitVersion(), + DigestUtils.md5Hex(clusterVersionInfo.getGitVersion())); + } + + static Path getRuntimeKubectl() { + return getProjectKubernetesDir().resolve(BINARY_NAME); + } + + static Path getProjectKubernetesDir() { + Path dir = Paths.get(LOCAL_BINARY_CLIENT_TMP_DIR); + try { + Files.createDirectories(dir); + } catch (IOException ioe) { + throw new IllegalStateException("Failed to create directory " + dir.toAbsolutePath(), ioe); + } + return dir; + } +} diff --git a/kubernetes-client/src/main/java/org/jboss/intersmash/k8s/client/binary/ConfigurationBasedKubernetesClientBinaryPathResolver.java b/kubernetes-client/src/main/java/org/jboss/intersmash/k8s/client/binary/ConfigurationBasedKubernetesClientBinaryPathResolver.java new file mode 100644 index 000000000..69641508c --- /dev/null +++ b/kubernetes-client/src/main/java/org/jboss/intersmash/k8s/client/binary/ConfigurationBasedKubernetesClientBinaryPathResolver.java @@ -0,0 +1,12 @@ +package org.jboss.intersmash.k8s.client.binary; + +import org.jboss.intersmash.k8s.KubernetesConfig; + +public class ConfigurationBasedKubernetesClientBinaryPathResolver implements KubernetesClientBinaryPathResolver { + + @Override + public String resolve() { + + return KubernetesConfig.binaryPath(); + } +} diff --git a/kubernetes-client/src/main/java/org/jboss/intersmash/k8s/client/binary/KubernetesClientBinary.java b/kubernetes-client/src/main/java/org/jboss/intersmash/k8s/client/binary/KubernetesClientBinary.java new file mode 100644 index 000000000..5cfa3d405 --- /dev/null +++ b/kubernetes-client/src/main/java/org/jboss/intersmash/k8s/client/binary/KubernetesClientBinary.java @@ -0,0 +1,128 @@ +package org.jboss.intersmash.k8s.client.binary; + +import java.lang.reflect.Type; +import java.util.ArrayList; +import java.util.List; + +import org.apache.commons.lang3.ArrayUtils; + +import com.google.common.reflect.TypeToken; +import com.google.gson.Gson; + +import cz.xtf.core.openshift.CLIUtils; +import io.fabric8.openshift.api.model.operatorhub.lifecyclemanager.v1.PackageManifest; +import io.fabric8.openshift.api.model.operatorhub.v1alpha1.CatalogSource; +import lombok.Getter; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class KubernetesClientBinary { + private final String path; + + @Getter + private String configPath; + + public KubernetesClientBinary(String path) { + this.path = path; + } + + public KubernetesClientBinary(String path, String configPath) { + this(path); + this.configPath = configPath; + } + + public void login(String url, String token) { + this.execute("login", url, "--insecure-skip-tls-verify=true", "--token=" + token); + } + + public void login(String url, String username, String password) { + this.execute("login", url, "--insecure-skip-tls-verify=true", "-u", username, "-p", password); + } + + /** + * Apply configuration file in the specified namespace. + * Delegates to `oc apply --filename='sourcepath' --namespace='namespace'` + * + * @param sourcePath path to configration file + * @param namespace namespace + */ + public void apply(String namespace, String sourcePath) { + this.execute("apply", "--namespace=" + namespace, "--filename=" + sourcePath); + } + + /** + * Apply configuration file. Delegates to `oc apply --filename='sourcepath` + * + * @param sourcePath path to configration file + */ + public void apply(String sourcePath) { + this.execute("apply", "--filename=" + sourcePath); + } + + /** + * Apply configuration files in the order they appear in the list + * + * @param sourcePaths list of paths to configuration files + */ + public void apply(List sourcePaths) { + for (String sourcePath : sourcePaths) { + apply(sourcePath); + } + } + + /** + * Apply configuration files in the order they appear in the list, using supplied namespace. + * + * @param namespace namespace in which the configuration files should be applied + * @param sourcePaths list of paths to configuration files + */ + public void apply(String namespace, List sourcePaths) { + for (String sourcePath : sourcePaths) { + apply(namespace, sourcePath); + } + } + + public void namespace(String projectName) { + // see https://kubernetes.io/docs/reference/kubectl/cheatsheet/#kubectl-context-and-configuration + this.execute("config", "set-context", "--current", String.format("--namespace=%s", projectName)); + } + + // TODO? check ShipWrigth + // public void startBuild(String buildConfig, String sourcePath) { + // this.execute("start-build", buildConfig, "--from-dir=" + sourcePath); + // } + + // Common method for any oc command call + public String execute(String... args) { + if (configPath == null) { + return CLIUtils.executeCommand(ArrayUtils.addAll(new String[] { path }, args)); + } else { + return CLIUtils.executeCommand(ArrayUtils.addAll(new String[] { path, "--kubeconfig=" + configPath }, args)); + } + } + + public List packageManifests(final String operatorName, final String operatorNamespace) { + Type targetClassType = new TypeToken>() { + }.getType(); + return new Gson().fromJson(this.execute("get", "packagemanifest", operatorName, "-n", operatorNamespace, "-o", "json"), + targetClassType); + } + + public List catalogSources(final String operatorNamespace) { + Type targetClassType = new TypeToken>() { + }.getType(); + return new Gson().fromJson(this.execute("get", "catsrc", "-n", operatorNamespace, "-o", "json"), targetClassType); + } + + public CatalogSource catalogSource(final String operatorNamespace, final String catalogSourceName) { + io.fabric8.openshift.api.model.operatorhub.v1alpha1.CatalogSource loaded = catalogSources(operatorNamespace).stream() + .filter(cs -> cs.getMetadata().getName().equalsIgnoreCase(catalogSourceName)) + .findFirst().orElseThrow( + () -> new IllegalStateException( + "Unable to retrieve CatalogSource " + catalogSourceName)); + CatalogSource catalogSource = new CatalogSource(); + catalogSource.setMetadata(loaded.getMetadata()); + catalogSource.setSpec(loaded.getSpec()); + return catalogSource; + } +} diff --git a/kubernetes-client/src/main/java/org/jboss/intersmash/k8s/client/binary/KubernetesClientBinaryManager.java b/kubernetes-client/src/main/java/org/jboss/intersmash/k8s/client/binary/KubernetesClientBinaryManager.java new file mode 100644 index 000000000..1a084c155 --- /dev/null +++ b/kubernetes-client/src/main/java/org/jboss/intersmash/k8s/client/binary/KubernetesClientBinaryManager.java @@ -0,0 +1,125 @@ +package org.jboss.intersmash.k8s.client.binary; + +import java.io.File; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Arrays; +import java.util.Objects; + +import org.apache.commons.lang3.StringUtils; +import org.jboss.intersmash.k8s.KubernetesConfig; +import org.jboss.intersmash.k8s.client.Kuberneteses; + +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class KubernetesClientBinaryManager { + private final String kubernetesClientBinaryPath; + + KubernetesClientBinaryManager(final String kubernetesClientBinaryPath) { + this.kubernetesClientBinaryPath = kubernetesClientBinaryPath; + } + + public String getBinaryPath() { + return kubernetesClientBinaryPath; + } + + public KubernetesClientBinary masterBinary(final String namespace) { + Objects.requireNonNull(namespace); + + return getBinary(KubernetesConfig.masterToken(), KubernetesConfig.masterUsername(), KubernetesConfig.masterPassword(), + KubernetesConfig.masterKubeconfig(), namespace); + } + + public KubernetesClientBinary adminBinary(final String namespace) { + Objects.requireNonNull(namespace); + + return getBinary(KubernetesConfig.adminToken(), KubernetesConfig.adminUsername(), KubernetesConfig.adminPassword(), + KubernetesConfig.adminKubeconfig(), namespace); + } + + private KubernetesClientBinary getBinary(final String token, final String username, final String password, + final String kubeconfig, + String namespace) { + String configPath = createUniqueKubernetesConfigFolder().resolve("kube.config").toAbsolutePath().toString(); + KubernetesClientBinary kubernetesClientBinary; + + if (StringUtils.isNotEmpty(token) || StringUtils.isNotEmpty(username)) { + // If we are using a token or username/password, we start with a nonexisting kubeconfig and do a "kubectl login" + kubernetesClientBinary = new KubernetesClientBinary(Kuberneteses.getBinaryPath(), configPath); + if (StringUtils.isNotEmpty(token)) { + kubernetesClientBinary.login(KubernetesConfig.url(), token); + } else { + kubernetesClientBinary.login(KubernetesConfig.url(), username, password); + } + } else { + // If we are using an existing kubeconfig (or a default kubeconfig), we copy the original kubeconfig + final Path actualConfigPath = Paths.get(configPath); + if (StringUtils.isNotEmpty(kubeconfig)) { + // flatten kubeconfig in case it contains certs/keys + try { + Files.write(actualConfigPath, + Arrays.asList(new KubernetesClientBinary(Kuberneteses.getBinaryPath(), null) + .execute("config", "view", "--kubeconfig", kubeconfig, "--flatten")), + StandardCharsets.UTF_8); + } catch (IOException e) { + throw new IllegalStateException("Couldn't create a copy of an existing Kubernetes configuration file", e); + } + } else { + // We copy the default ~/.kube/config + File defaultKubeConfig = Paths.get(getHomeDir(), ".kube", "config").toFile(); + if (defaultKubeConfig.isFile()) { + try { + Files.write(actualConfigPath, + Arrays.asList(new KubernetesClientBinary(Kuberneteses.getBinaryPath(), null) + .execute("config", "view", "--kubeconfig", defaultKubeConfig.getAbsolutePath(), + "--flatten")), + StandardCharsets.UTF_8); + } catch (IOException e) { + throw new IllegalStateException("Couldn't create a copy of the default Kubernetes configuration file", + e); + } + } else { + throw new IllegalStateException(defaultKubeConfig.getAbsolutePath() + + " does not exist and no other Kubernetes master option specified"); + } + } + kubernetesClientBinary = new KubernetesClientBinary(Kuberneteses.getBinaryPath(), configPath); + } + + if (StringUtils.isNotEmpty(namespace)) { + kubernetesClientBinary.namespace(namespace); + } + + return kubernetesClientBinary; + } + + private Path createUniqueKubernetesConfigFolder() { + try { + return Files.createTempDirectory(getProjectKubernetesConfigDir(), "config"); + } catch (IOException e) { + throw new IllegalStateException("Temporary folder for kubectl config couldn't be created", e); + } + } + + // TODO: this code is duplicated from OpenShifts.getHomeDir + // it should be revised together with token management + // https://github.com/xtf-cz/xtf/issues/464 + private static String getHomeDir() { + String home = System.getenv("HOME"); + if (home != null && !home.isEmpty()) { + File f = new File(home); + if (f.exists() && f.isDirectory()) { + return home; + } + } + return System.getProperty("user.home", "."); + } + + private Path getProjectKubernetesConfigDir() { + return Paths.get(KubernetesClientBinaryPathResolver.LOCAL_BINARY_CLIENT_TMP_DIR); + } +} diff --git a/kubernetes-client/src/main/java/org/jboss/intersmash/k8s/client/binary/KubernetesClientBinaryManagerFactory.java b/kubernetes-client/src/main/java/org/jboss/intersmash/k8s/client/binary/KubernetesClientBinaryManagerFactory.java new file mode 100644 index 000000000..f59481461 --- /dev/null +++ b/kubernetes-client/src/main/java/org/jboss/intersmash/k8s/client/binary/KubernetesClientBinaryManagerFactory.java @@ -0,0 +1,36 @@ +package org.jboss.intersmash.k8s.client.binary; + +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +public enum KubernetesClientBinaryManagerFactory { + INSTANCE; + + private volatile KubernetesClientBinaryManager kubernetesClientBinaryManager; + + public KubernetesClientBinaryManager getKubernetesClientBinaryManager() { + KubernetesClientBinaryManager localRef = kubernetesClientBinaryManager; + if (localRef == null) { + synchronized (KubernetesClientBinaryManager.class) { + localRef = kubernetesClientBinaryManager; + if (localRef == null) { + for (KubernetesClientBinaryPathResolver resolver : resolverList()) { + String path = resolver.resolve(); + if (path != null) { + kubernetesClientBinaryManager = localRef = new KubernetesClientBinaryManager(path); + break; + } + } + } + } + } + return localRef; + } + + private List resolverList() { + return Stream.of( + new ConfigurationBasedKubernetesClientBinaryPathResolver(), + new ClusterVersionBasedKubernetesClientBinaryPathResolver()).collect(Collectors.toList()); + } +} diff --git a/kubernetes-client/src/main/java/org/jboss/intersmash/k8s/client/binary/KubernetesClientBinaryPathResolver.java b/kubernetes-client/src/main/java/org/jboss/intersmash/k8s/client/binary/KubernetesClientBinaryPathResolver.java new file mode 100644 index 000000000..2e2e4c1a1 --- /dev/null +++ b/kubernetes-client/src/main/java/org/jboss/intersmash/k8s/client/binary/KubernetesClientBinaryPathResolver.java @@ -0,0 +1,17 @@ +package org.jboss.intersmash.k8s.client.binary; + +/** + * Defines a contract for resolving the path of a Kubernetes client binary, which concrete implementations will + * base upon different criteria. + */ +public interface KubernetesClientBinaryPathResolver { + String BINARY_NAME = "kubectl"; + String LOCAL_BINARY_CLIENT_TMP_DIR = "tmp/kubectl"; + + /** + * Resolves Kubernetes client binary path + * + * @return Kubernetes client binary path + */ + String resolve(); +} diff --git a/kubernetes-client/src/main/java/org/jboss/intersmash/k8s/junit5/KubernetesNamespaceCreator.java b/kubernetes-client/src/main/java/org/jboss/intersmash/k8s/junit5/KubernetesNamespaceCreator.java new file mode 100644 index 000000000..808f559ff --- /dev/null +++ b/kubernetes-client/src/main/java/org/jboss/intersmash/k8s/junit5/KubernetesNamespaceCreator.java @@ -0,0 +1,72 @@ +package org.jboss.intersmash.k8s.junit5; + +import java.util.Arrays; + +import org.jboss.intersmash.k8s.KubernetesConfig; +import org.jboss.intersmash.k8s.client.Kuberneteses; +import org.jboss.intersmash.k8s.client.NamespaceManager; +import org.jboss.intersmash.k8s.client.TestCaseContext; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.extension.AfterAllCallback; +import org.junit.jupiter.api.extension.BeforeAllCallback; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.engine.descriptor.MethodBasedTestDescriptor; +import org.junit.platform.engine.FilterResult; +import org.junit.platform.engine.TestDescriptor; +import org.junit.platform.launcher.PostDiscoveryFilter; +import org.junit.platform.launcher.TestExecutionListener; +import org.junit.platform.launcher.TestPlan; + +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class KubernetesNamespaceCreator + implements TestExecutionListener, BeforeAllCallback, AfterAllCallback, PostDiscoveryFilter { + + @Override + public void testPlanExecutionStarted(TestPlan testPlan) { + NamespaceManager.createIfDoesNotExistsProject(KubernetesConfig.namespace()); + } + + @Override + public void beforeAll(ExtensionContext context) { + // todo this can be removed once TestCaseContextExtension is called always before ProjectCreator extension + setTestExecutionContext(context); + + log.debug("BeforeAll - Test case: " + context.getTestClass().get().getName() + " running in thread name: " + + Thread.currentThread().getName() + + " will use namespace: " + Kuberneteses.master().getNamespace() + " - thread context is: " + + TestCaseContext.getRunningTestCaseName()); + NamespaceManager.createIfDoesNotExistsProject(); + } + + private void setTestExecutionContext(ExtensionContext context) { + TestCaseContext.setRunningTestCase(context.getTestClass().get().getName()); + } + + @Override + public void afterAll(ExtensionContext context) { + if (KubernetesConfig.cleanKubernetes()) { + NamespaceManager.deleteProjectIfUsedNamespacePerTestCase(false); + } + } + + @Override + public void testPlanExecutionFinished(TestPlan testPlan) { + if (KubernetesConfig.cleanKubernetes()) { + NamespaceManager.deleteProject(KubernetesConfig.namespace(), true); + } + } + + @Override + public FilterResult apply(TestDescriptor testDescriptor) { + if (testDescriptor instanceof MethodBasedTestDescriptor) { + boolean disabled = Arrays.stream(((MethodBasedTestDescriptor) testDescriptor).getTestClass().getAnnotations()) + .filter(annotation -> annotation instanceof Disabled).count() > 0; + if (!disabled) { + NamespaceManager.addTestCaseToNamespaceEntryIfAbsent(testDescriptor); + } + } + return FilterResult.included(testDescriptor.getDisplayName()); + } +} diff --git a/kubernetes-client/src/test/java/org/jboss/intersmash/k8s/client/binary/ClusterVersionBasedKubernetesClientBinaryPathResolverTest.java b/kubernetes-client/src/test/java/org/jboss/intersmash/k8s/client/binary/ClusterVersionBasedKubernetesClientBinaryPathResolverTest.java new file mode 100644 index 000000000..0ecc1e3da --- /dev/null +++ b/kubernetes-client/src/test/java/org/jboss/intersmash/k8s/client/binary/ClusterVersionBasedKubernetesClientBinaryPathResolverTest.java @@ -0,0 +1,85 @@ +package org.jboss.intersmash.k8s.client.binary; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; + +import org.assertj.core.api.SoftAssertions; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; + +import cz.xtf.core.config.XTFConfig; +import io.fabric8.kubernetes.client.VersionInfo; +import uk.org.webcompere.systemstubs.jupiter.SystemStub; +import uk.org.webcompere.systemstubs.jupiter.SystemStubsExtension; +import uk.org.webcompere.systemstubs.properties.SystemProperties; + +@ExtendWith(SystemStubsExtension.class) +public class ClusterVersionBasedKubernetesClientBinaryPathResolverTest { + + @SystemStub + private SystemProperties systemProperties; + + private ClusterVersionBasedKubernetesClientBinaryPathResolver resolver = new ClusterVersionBasedKubernetesClientBinaryPathResolver(); + + @Test + public void existingKubernetesVersionPathIsResolvedWhenCacheEnabledTest() throws IOException { + final VersionInfo clusterVersion = new VersionInfo.Builder().withGitVersion("v1.27.3").build(); + systemProperties.set("intersmash.kubernetes.version", clusterVersion.getGitVersion()); + XTFConfig.loadConfig(); + // resolve (which includes the binary client download if it is not cached already) should pass here + final String resolvedPath = resolver.resolve(); + try { + // make assertions now + assertBinaryPathIsProperlyResolved(clusterVersion, resolvedPath); + } finally { + Files.deleteIfExists(ClusterVersionBasedKubernetesClientBinaryPathResolver.getRuntimeKubectl()); + Files.deleteIfExists(ClusterVersionBasedKubernetesClientBinaryPathResolver.getProjectKubernetesDir()); + } + } + + @Test + public void existingKubernetesVersionPathIsResolvedWhenCacheDisabledTest() throws IOException { + final VersionInfo clusterVersion = new VersionInfo.Builder().withGitVersion("v1.27.3").build(); + systemProperties.set("intersmash.kubernetes.version", clusterVersion.getGitVersion()); + systemProperties.set("intersmash.kubernetes.binary.cache.enabled", "false"); + try { + XTFConfig.loadConfig(); + // resolve (which includes the binary client download in this very case) should pass here + final String resolvedPath = resolver.resolve(); + try { + assertBinaryPathIsProperlyResolved(clusterVersion, resolvedPath); + } finally { + Files.deleteIfExists(ClusterVersionBasedKubernetesClientBinaryPathResolver.getRuntimeKubectl()); + Files.deleteIfExists(ClusterVersionBasedKubernetesClientBinaryPathResolver.getProjectKubernetesDir()); + } + } finally { + systemProperties.set("intersmash.kubernetes.binary.cache.enabled", "true"); + } + } + + @Test + public void fakeKubernetesVersionPathIsNotResolvedTest() { + final VersionInfo clusterVersion = new VersionInfo.Builder().withGitVersion("vX.Y.Z").build(); + systemProperties.set("intersmash.kubernetes.version", clusterVersion.getGitVersion()); + XTFConfig.loadConfig(); + // resolve (which includes the binary client download) should fail here + Assertions.assertThrows(IllegalStateException.class, () -> resolver.resolve()); + } + + private static void assertBinaryPathIsProperlyResolved(VersionInfo clusterVersion, String resolvedPath) { + // make assertions now + SoftAssertions softAssertions = new SoftAssertions(); + // path is not null + softAssertions.assertThat(resolvedPath).isNotNull(); + // path is correct and binary file exists + Path tmpPath = ClusterVersionBasedKubernetesClientBinaryPathResolver.getRuntimeKubectl(); + softAssertions.assertThat(resolvedPath).isEqualTo(tmpPath.toAbsolutePath().toString()); + softAssertions.assertThat(Files.exists(tmpPath)).isTrue(); + // archive is in cache + Path cachedPath = ClusterVersionBasedKubernetesClientBinaryPathResolver.getCachePath(clusterVersion); + softAssertions.assertThat(Files.exists(cachedPath)).isTrue(); + softAssertions.assertAll(); + } +} diff --git a/kubernetes-client/src/test/java/org/jboss/intersmash/k8s/client/binary/ConfigurationBasedKubernetesClientBinaryPathResolverTest.java b/kubernetes-client/src/test/java/org/jboss/intersmash/k8s/client/binary/ConfigurationBasedKubernetesClientBinaryPathResolverTest.java new file mode 100644 index 000000000..99629af1f --- /dev/null +++ b/kubernetes-client/src/test/java/org/jboss/intersmash/k8s/client/binary/ConfigurationBasedKubernetesClientBinaryPathResolverTest.java @@ -0,0 +1,33 @@ +package org.jboss.intersmash.k8s.client.binary; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; + +import com.google.common.base.Strings; + +import uk.org.webcompere.systemstubs.jupiter.SystemStub; +import uk.org.webcompere.systemstubs.jupiter.SystemStubsExtension; +import uk.org.webcompere.systemstubs.properties.SystemProperties; + +@ExtendWith(SystemStubsExtension.class) +public class ConfigurationBasedKubernetesClientBinaryPathResolverTest { + @SystemStub + private SystemProperties systemProperties; + + private KubernetesClientBinaryPathResolver resolver = new ConfigurationBasedKubernetesClientBinaryPathResolver(); + + @Test + public void resolveTest() { + final String currentBinaryPath = System.getProperty("intersmash.kubernetes.binary.path"); + final String testedBinaryPath = "/tmp"; + systemProperties.set("intersmash.kubernetes.binary.path", testedBinaryPath); + try { + Assertions.assertEquals(testedBinaryPath, resolver.resolve()); + } finally { + if (!Strings.isNullOrEmpty(currentBinaryPath)) { + systemProperties.set("intersmash.kubernetes.binary.path", currentBinaryPath); + } + } + } +} diff --git a/kubernetes-client/src/test/java/org/jboss/intersmash/k8s/client/binary/KubernetesClientBinaryManagerFactoryTest.java b/kubernetes-client/src/test/java/org/jboss/intersmash/k8s/client/binary/KubernetesClientBinaryManagerFactoryTest.java new file mode 100644 index 000000000..7fd1e3b30 --- /dev/null +++ b/kubernetes-client/src/test/java/org/jboss/intersmash/k8s/client/binary/KubernetesClientBinaryManagerFactoryTest.java @@ -0,0 +1,23 @@ +package org.jboss.intersmash.k8s.client.binary; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; + +import uk.org.webcompere.systemstubs.jupiter.SystemStub; +import uk.org.webcompere.systemstubs.jupiter.SystemStubsExtension; +import uk.org.webcompere.systemstubs.properties.SystemProperties; + +@ExtendWith(SystemStubsExtension.class) +public class KubernetesClientBinaryManagerFactoryTest { + @SystemStub + private SystemProperties systemProperties; + + @Test + public void managerBinaryPathDefaultsToCachedBinaryPathTest() { + KubernetesClientBinaryManager binaryManager = KubernetesClientBinaryManagerFactory.INSTANCE + .getKubernetesClientBinaryManager(); + Assertions.assertNotNull(binaryManager); + Assertions.assertTrue(binaryManager.getBinaryPath().endsWith("tmp/kubectl/kubectl")); + } +} diff --git a/pom.xml b/pom.xml index 80d17e8e1..cf4be7536 100644 --- a/pom.xml +++ b/pom.xml @@ -35,6 +35,7 @@ + kubernetes-client core provisioners testsuite @@ -126,6 +127,8 @@ 3.1.0 1.1.0 3.3.9 + 1.7.0 + 3.24.2 @@ -171,6 +174,11 @@ commons-lang3 ${version.org.apache.commons.commons-lang3} + + org.jboss.intersmash + intersmash-kubernetes-client + ${project.version} + org.jboss.intersmash intersmash-core @@ -228,6 +236,11 @@ junit-jupiter-params ${version.junit.jupiter} + + org.junit.platform + junit-platform-launcher + ${version.junit5.platform} + io.rest-assured @@ -381,6 +394,21 @@ openshift-client ${version.openshift-client} + + io.fabric8 + kubernetes-model + ${version.io.fabric8} + + + io.fabric8 + kubernetes-client + ${version.io.fabric8} + + + io.fabric8 + kubernetes-client-api + ${version.io.fabric8} + io.sundr @@ -433,6 +461,11 @@ maven-settings-builder ${version.maven} + + org.assertj + assertj-core + ${version.assertj-core} + diff --git a/provisioners/pom.xml b/provisioners/pom.xml index 6a477343b..a4b9ee852 100644 --- a/provisioners/pom.xml +++ b/provisioners/pom.xml @@ -74,6 +74,10 @@ cz.xtf junit5 + + org.jboss.intersmash + intersmash-kubernetes-client + org.jboss.intersmash intersmash-core @@ -181,6 +185,20 @@ kubernetes-model-core + + io.fabric8 + kubernetes-model + + + + io.fabric8 + kubernetes-client + + + io.fabric8 + kubernetes-client-api + + com.fasterxml.jackson.dataformat jackson-dataformat-yaml diff --git a/provisioners/src/main/java/org/jboss/intersmash/application/openshift/input/BinarySource.java b/provisioners/src/main/java/org/jboss/intersmash/application/input/BinarySource.java similarity index 93% rename from provisioners/src/main/java/org/jboss/intersmash/application/openshift/input/BinarySource.java rename to provisioners/src/main/java/org/jboss/intersmash/application/input/BinarySource.java index ef8ceba4b..ffec94ef6 100644 --- a/provisioners/src/main/java/org/jboss/intersmash/application/openshift/input/BinarySource.java +++ b/provisioners/src/main/java/org/jboss/intersmash/application/input/BinarySource.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jboss.intersmash.application.openshift.input; +package org.jboss.intersmash.application.input; import java.nio.file.Path; diff --git a/provisioners/src/main/java/org/jboss/intersmash/application/openshift/input/BinarySourceBuilder.java b/provisioners/src/main/java/org/jboss/intersmash/application/input/BinarySourceBuilder.java similarity index 93% rename from provisioners/src/main/java/org/jboss/intersmash/application/openshift/input/BinarySourceBuilder.java rename to provisioners/src/main/java/org/jboss/intersmash/application/input/BinarySourceBuilder.java index 69112da64..a173e877b 100644 --- a/provisioners/src/main/java/org/jboss/intersmash/application/openshift/input/BinarySourceBuilder.java +++ b/provisioners/src/main/java/org/jboss/intersmash/application/input/BinarySourceBuilder.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jboss.intersmash.application.openshift.input; +package org.jboss.intersmash.application.input; import java.nio.file.Path; diff --git a/provisioners/src/main/java/org/jboss/intersmash/application/openshift/input/BuildInput.java b/provisioners/src/main/java/org/jboss/intersmash/application/input/BuildInput.java similarity index 93% rename from provisioners/src/main/java/org/jboss/intersmash/application/openshift/input/BuildInput.java rename to provisioners/src/main/java/org/jboss/intersmash/application/input/BuildInput.java index cdadc4d77..c80d81ed1 100644 --- a/provisioners/src/main/java/org/jboss/intersmash/application/openshift/input/BuildInput.java +++ b/provisioners/src/main/java/org/jboss/intersmash/application/input/BuildInput.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jboss.intersmash.application.openshift.input; +package org.jboss.intersmash.application.input; /** * Use the {@link BuildInputBuilder} to get instances implementing the {@link BuildInput} interface. diff --git a/provisioners/src/main/java/org/jboss/intersmash/application/openshift/input/BuildInputBuilder.java b/provisioners/src/main/java/org/jboss/intersmash/application/input/BuildInputBuilder.java similarity index 97% rename from provisioners/src/main/java/org/jboss/intersmash/application/openshift/input/BuildInputBuilder.java rename to provisioners/src/main/java/org/jboss/intersmash/application/input/BuildInputBuilder.java index 39e359903..24854a1cd 100644 --- a/provisioners/src/main/java/org/jboss/intersmash/application/openshift/input/BuildInputBuilder.java +++ b/provisioners/src/main/java/org/jboss/intersmash/application/input/BuildInputBuilder.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jboss.intersmash.application.openshift.input; +package org.jboss.intersmash.application.input; import java.io.File; import java.io.IOException; diff --git a/provisioners/src/main/java/org/jboss/intersmash/application/openshift/input/GitSource.java b/provisioners/src/main/java/org/jboss/intersmash/application/input/GitSource.java similarity index 93% rename from provisioners/src/main/java/org/jboss/intersmash/application/openshift/input/GitSource.java rename to provisioners/src/main/java/org/jboss/intersmash/application/input/GitSource.java index bfdc35263..e617449af 100644 --- a/provisioners/src/main/java/org/jboss/intersmash/application/openshift/input/GitSource.java +++ b/provisioners/src/main/java/org/jboss/intersmash/application/input/GitSource.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jboss.intersmash.application.openshift.input; +package org.jboss.intersmash.application.input; /** * {@link BuildInput} represented by Git URI and reference diff --git a/provisioners/src/main/java/org/jboss/intersmash/application/openshift/input/GitSourceBuilder.java b/provisioners/src/main/java/org/jboss/intersmash/application/input/GitSourceBuilder.java similarity index 93% rename from provisioners/src/main/java/org/jboss/intersmash/application/openshift/input/GitSourceBuilder.java rename to provisioners/src/main/java/org/jboss/intersmash/application/input/GitSourceBuilder.java index 5e3635537..88c34db18 100644 --- a/provisioners/src/main/java/org/jboss/intersmash/application/openshift/input/GitSourceBuilder.java +++ b/provisioners/src/main/java/org/jboss/intersmash/application/input/GitSourceBuilder.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jboss.intersmash.application.openshift.input; +package org.jboss.intersmash.application.input; public interface GitSourceBuilder { diff --git a/provisioners/src/main/java/org/jboss/intersmash/application/openshift/BootableJarOpenShiftApplication.java b/provisioners/src/main/java/org/jboss/intersmash/application/openshift/BootableJarOpenShiftApplication.java index 0e8bb1b53..2ad1d19ce 100644 --- a/provisioners/src/main/java/org/jboss/intersmash/application/openshift/BootableJarOpenShiftApplication.java +++ b/provisioners/src/main/java/org/jboss/intersmash/application/openshift/BootableJarOpenShiftApplication.java @@ -18,7 +18,8 @@ import java.util.Collections; import java.util.List; -import org.jboss.intersmash.application.openshift.input.BinarySource; +import org.jboss.intersmash.application.input.BinarySource; +import org.jboss.intersmash.application.k8s.HasSecrets; import org.jboss.intersmash.provision.openshift.WildflyBootableJarImageOpenShiftProvisioner; import io.fabric8.kubernetes.api.model.EnvVar; diff --git a/provisioners/src/main/java/org/jboss/intersmash/application/openshift/Eap7ImageOpenShiftApplication.java b/provisioners/src/main/java/org/jboss/intersmash/application/openshift/Eap7ImageOpenShiftApplication.java index 685f14cb3..fb6e4c3f1 100644 --- a/provisioners/src/main/java/org/jboss/intersmash/application/openshift/Eap7ImageOpenShiftApplication.java +++ b/provisioners/src/main/java/org/jboss/intersmash/application/openshift/Eap7ImageOpenShiftApplication.java @@ -20,10 +20,10 @@ import java.util.Map; import java.util.Set; -import org.jboss.intersmash.application.openshift.input.BinarySourceBuilder; -import org.jboss.intersmash.application.openshift.input.BuildInput; -import org.jboss.intersmash.application.openshift.input.BuildInputBuilder; -import org.jboss.intersmash.application.openshift.input.GitSourceBuilder; +import org.jboss.intersmash.application.input.BinarySourceBuilder; +import org.jboss.intersmash.application.input.BuildInput; +import org.jboss.intersmash.application.input.BuildInputBuilder; +import org.jboss.intersmash.application.input.GitSourceBuilder; import org.jboss.intersmash.provision.openshift.Eap7ImageOpenShiftProvisioner; import cz.xtf.builder.builders.pod.PersistentVolumeClaim; diff --git a/provisioners/src/main/java/org/jboss/intersmash/application/openshift/PostgreSQLImageOpenShiftApplication.java b/provisioners/src/main/java/org/jboss/intersmash/application/openshift/PostgreSQLImageOpenShiftApplication.java index 62d0822b2..6869b1337 100644 --- a/provisioners/src/main/java/org/jboss/intersmash/application/openshift/PostgreSQLImageOpenShiftApplication.java +++ b/provisioners/src/main/java/org/jboss/intersmash/application/openshift/PostgreSQLImageOpenShiftApplication.java @@ -18,6 +18,7 @@ import java.util.Collections; import java.util.List; +import org.jboss.intersmash.application.k8s.HasSecrets; import org.jboss.intersmash.provision.openshift.PostgreSQLImageOpenShiftProvisioner; import cz.xtf.builder.builders.SecretBuilder; diff --git a/provisioners/src/main/java/org/jboss/intersmash/application/openshift/RhSsoTemplateOpenShiftApplication.java b/provisioners/src/main/java/org/jboss/intersmash/application/openshift/RhSsoTemplateOpenShiftApplication.java index 8f145aaf7..8d24c6067 100644 --- a/provisioners/src/main/java/org/jboss/intersmash/application/openshift/RhSsoTemplateOpenShiftApplication.java +++ b/provisioners/src/main/java/org/jboss/intersmash/application/openshift/RhSsoTemplateOpenShiftApplication.java @@ -2,6 +2,7 @@ import java.nio.file.Path; +import org.jboss.intersmash.application.k8s.HasSecrets; import org.jboss.intersmash.application.openshift.template.RhSsoTemplate; /** diff --git a/provisioners/src/main/java/org/jboss/intersmash/application/openshift/WildflyImageOpenShiftApplication.java b/provisioners/src/main/java/org/jboss/intersmash/application/openshift/WildflyImageOpenShiftApplication.java index c7c914fc8..da37e70ff 100644 --- a/provisioners/src/main/java/org/jboss/intersmash/application/openshift/WildflyImageOpenShiftApplication.java +++ b/provisioners/src/main/java/org/jboss/intersmash/application/openshift/WildflyImageOpenShiftApplication.java @@ -21,10 +21,10 @@ import java.util.Map; import java.util.Set; -import org.jboss.intersmash.application.openshift.input.BinarySourceBuilder; -import org.jboss.intersmash.application.openshift.input.BuildInput; -import org.jboss.intersmash.application.openshift.input.BuildInputBuilder; -import org.jboss.intersmash.application.openshift.input.GitSourceBuilder; +import org.jboss.intersmash.application.input.BinarySourceBuilder; +import org.jboss.intersmash.application.input.BuildInput; +import org.jboss.intersmash.application.input.BuildInputBuilder; +import org.jboss.intersmash.application.input.GitSourceBuilder; import cz.xtf.builder.builders.pod.PersistentVolumeClaim; import cz.xtf.builder.builders.pod.VolumeMount; diff --git a/provisioners/src/main/java/org/jboss/intersmash/application/openshift/WildflyOpenShiftApplication.java b/provisioners/src/main/java/org/jboss/intersmash/application/openshift/WildflyOpenShiftApplication.java index 60cb4466c..0eb829ca8 100644 --- a/provisioners/src/main/java/org/jboss/intersmash/application/openshift/WildflyOpenShiftApplication.java +++ b/provisioners/src/main/java/org/jboss/intersmash/application/openshift/WildflyOpenShiftApplication.java @@ -18,6 +18,8 @@ import java.util.Collections; import java.util.List; +import org.jboss.intersmash.application.k8s.HasSecrets; + import io.fabric8.kubernetes.api.model.Secret; /** diff --git a/provisioners/src/main/java/org/jboss/intersmash/application/openshift/helm/HelmChartOpenShiftApplication.java b/provisioners/src/main/java/org/jboss/intersmash/application/openshift/helm/HelmChartOpenShiftApplication.java index cfc7079ec..7b32743fe 100644 --- a/provisioners/src/main/java/org/jboss/intersmash/application/openshift/helm/HelmChartOpenShiftApplication.java +++ b/provisioners/src/main/java/org/jboss/intersmash/application/openshift/helm/HelmChartOpenShiftApplication.java @@ -18,7 +18,7 @@ import java.util.Collections; import java.util.Map; -import org.jboss.intersmash.application.openshift.HasSecrets; +import org.jboss.intersmash.application.k8s.HasSecrets; import org.jboss.intersmash.application.openshift.OpenShiftApplication; import org.jboss.intersmash.provision.helm.HelmChartOpenShiftProvisioner; diff --git a/provisioners/src/main/java/org/jboss/intersmash/application/openshift/ActiveMQOperatorApplication.java b/provisioners/src/main/java/org/jboss/intersmash/application/operator/ActiveMQOperatorApplication.java similarity index 83% rename from provisioners/src/main/java/org/jboss/intersmash/application/openshift/ActiveMQOperatorApplication.java rename to provisioners/src/main/java/org/jboss/intersmash/application/operator/ActiveMQOperatorApplication.java index 08e97c11f..6cd7c2d05 100644 --- a/provisioners/src/main/java/org/jboss/intersmash/application/openshift/ActiveMQOperatorApplication.java +++ b/provisioners/src/main/java/org/jboss/intersmash/application/operator/ActiveMQOperatorApplication.java @@ -13,21 +13,19 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jboss.intersmash.application.openshift; +package org.jboss.intersmash.application.operator; import java.util.List; -import org.jboss.intersmash.provision.openshift.ActiveMQOperatorProvisioner; - import io.amq.broker.v1beta1.ActiveMQArtemis; import io.amq.broker.v1beta1.ActiveMQArtemisAddress; /** - * End user Application interface which presents ActiveMQ operator application on OpenShift Container Platform. + * End user Application interface which presents ActiveMQ operator application. * * The application will be deployed by: *
    - *
  • {@link ActiveMQOperatorProvisioner}
  • + *
  • {@link org.jboss.intersmash.provision.operator.ActiveMQOperatorProvisioner}
  • *
*/ public interface ActiveMQOperatorApplication extends OperatorApplication { diff --git a/provisioners/src/main/java/org/jboss/intersmash/application/openshift/HyperfoilOperatorApplication.java b/provisioners/src/main/java/org/jboss/intersmash/application/operator/HyperfoilOperatorApplication.java similarity index 93% rename from provisioners/src/main/java/org/jboss/intersmash/application/openshift/HyperfoilOperatorApplication.java rename to provisioners/src/main/java/org/jboss/intersmash/application/operator/HyperfoilOperatorApplication.java index 4bb29d5d3..29f96ce05 100644 --- a/provisioners/src/main/java/org/jboss/intersmash/application/openshift/HyperfoilOperatorApplication.java +++ b/provisioners/src/main/java/org/jboss/intersmash/application/operator/HyperfoilOperatorApplication.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jboss.intersmash.application.openshift; +package org.jboss.intersmash.application.operator; import io.hyperfoil.v1alpha2.Hyperfoil; diff --git a/provisioners/src/main/java/org/jboss/intersmash/application/openshift/InfinispanOperatorApplication.java b/provisioners/src/main/java/org/jboss/intersmash/application/operator/InfinispanOperatorApplication.java similarity index 90% rename from provisioners/src/main/java/org/jboss/intersmash/application/openshift/InfinispanOperatorApplication.java rename to provisioners/src/main/java/org/jboss/intersmash/application/operator/InfinispanOperatorApplication.java index 48b41623e..5d1355ea5 100644 --- a/provisioners/src/main/java/org/jboss/intersmash/application/openshift/InfinispanOperatorApplication.java +++ b/provisioners/src/main/java/org/jboss/intersmash/application/operator/InfinispanOperatorApplication.java @@ -13,13 +13,13 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jboss.intersmash.application.openshift; +package org.jboss.intersmash.application.operator; import java.util.List; import org.infinispan.v1.Infinispan; import org.infinispan.v2alpha1.Cache; -import org.jboss.intersmash.provision.openshift.InfinispanOperatorProvisioner; +import org.jboss.intersmash.provision.operator.InfinispanOperatorProvisioner; /** * End user Application interface which presents Infinispan operator application on OpenShift Container Platform. diff --git a/provisioners/src/main/java/org/jboss/intersmash/application/openshift/KafkaOperatorApplication.java b/provisioners/src/main/java/org/jboss/intersmash/application/operator/KafkaOperatorApplication.java similarity index 94% rename from provisioners/src/main/java/org/jboss/intersmash/application/openshift/KafkaOperatorApplication.java rename to provisioners/src/main/java/org/jboss/intersmash/application/operator/KafkaOperatorApplication.java index b190ac86d..5939a4a5a 100644 --- a/provisioners/src/main/java/org/jboss/intersmash/application/openshift/KafkaOperatorApplication.java +++ b/provisioners/src/main/java/org/jboss/intersmash/application/operator/KafkaOperatorApplication.java @@ -13,11 +13,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jboss.intersmash.application.openshift; +package org.jboss.intersmash.application.operator; import java.util.List; -import org.jboss.intersmash.provision.openshift.KafkaOperatorProvisioner; +import org.jboss.intersmash.provision.operator.KafkaOperatorProvisioner; import io.strimzi.api.kafka.model.Kafka; import io.strimzi.api.kafka.model.KafkaTopic; diff --git a/provisioners/src/main/java/org/jboss/intersmash/application/openshift/KeycloakOperatorApplication.java b/provisioners/src/main/java/org/jboss/intersmash/application/operator/KeycloakOperatorApplication.java similarity index 90% rename from provisioners/src/main/java/org/jboss/intersmash/application/openshift/KeycloakOperatorApplication.java rename to provisioners/src/main/java/org/jboss/intersmash/application/operator/KeycloakOperatorApplication.java index ffd49e8fd..5e2658279 100644 --- a/provisioners/src/main/java/org/jboss/intersmash/application/openshift/KeycloakOperatorApplication.java +++ b/provisioners/src/main/java/org/jboss/intersmash/application/operator/KeycloakOperatorApplication.java @@ -13,12 +13,12 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jboss.intersmash.application.openshift; +package org.jboss.intersmash.application.operator; import java.util.Collections; import java.util.List; -import org.jboss.intersmash.provision.openshift.KeycloakOperatorProvisioner; +import org.jboss.intersmash.provision.operator.KeycloakOperatorProvisioner; import org.keycloak.k8s.v2alpha1.Keycloak; import org.keycloak.k8s.v2alpha1.KeycloakRealmImport; diff --git a/provisioners/src/main/java/org/jboss/intersmash/application/openshift/RhSsoOperatorApplication.java b/provisioners/src/main/java/org/jboss/intersmash/application/operator/RhSsoOperatorApplication.java similarity index 89% rename from provisioners/src/main/java/org/jboss/intersmash/application/openshift/RhSsoOperatorApplication.java rename to provisioners/src/main/java/org/jboss/intersmash/application/operator/RhSsoOperatorApplication.java index ff59f08c1..b7bebd77b 100644 --- a/provisioners/src/main/java/org/jboss/intersmash/application/openshift/RhSsoOperatorApplication.java +++ b/provisioners/src/main/java/org/jboss/intersmash/application/operator/RhSsoOperatorApplication.java @@ -13,12 +13,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jboss.intersmash.application.openshift; +package org.jboss.intersmash.application.operator; import java.util.Collections; import java.util.List; -import org.jboss.intersmash.provision.openshift.RhSsoOperatorProvisioner; import org.keycloak.v1alpha1.Keycloak; import org.keycloak.v1alpha1.KeycloakBackup; import org.keycloak.v1alpha1.KeycloakClient; @@ -30,7 +29,7 @@ * * The application will be deployed by: *
    - *
  • {@link RhSsoOperatorProvisioner}
  • + *
  • {@link org.jboss.intersmash.provision.operator.RhSsoOperatorProvisioner}
  • *
*/ @Deprecated(since = "0.0.2") diff --git a/provisioners/src/main/java/org/jboss/intersmash/application/openshift/WildflyOperatorApplication.java b/provisioners/src/main/java/org/jboss/intersmash/application/operator/WildflyOperatorApplication.java similarity index 84% rename from provisioners/src/main/java/org/jboss/intersmash/application/openshift/WildflyOperatorApplication.java rename to provisioners/src/main/java/org/jboss/intersmash/application/operator/WildflyOperatorApplication.java index 7cacf9ebd..05a2b108a 100644 --- a/provisioners/src/main/java/org/jboss/intersmash/application/openshift/WildflyOperatorApplication.java +++ b/provisioners/src/main/java/org/jboss/intersmash/application/operator/WildflyOperatorApplication.java @@ -13,9 +13,8 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jboss.intersmash.application.openshift; +package org.jboss.intersmash.application.operator; -import org.jboss.intersmash.provision.openshift.WildflyOperatorProvisioner; import org.wildfly.v1alpha1.WildFlyServer; /** @@ -23,7 +22,7 @@ * * The application will be deployed by: *
    - *
  • {@link WildflyOperatorProvisioner}
  • + *
  • {@link org.jboss.intersmash.provision.operator.WildflyOperatorProvisioner}
  • *
*/ public interface WildflyOperatorApplication extends OperatorApplication { diff --git a/provisioners/src/main/java/org/jboss/intersmash/provision/k8s/HyperfoilKubernetesOperatorProvisioner.java b/provisioners/src/main/java/org/jboss/intersmash/provision/k8s/HyperfoilKubernetesOperatorProvisioner.java new file mode 100644 index 000000000..9cf05e201 --- /dev/null +++ b/provisioners/src/main/java/org/jboss/intersmash/provision/k8s/HyperfoilKubernetesOperatorProvisioner.java @@ -0,0 +1,153 @@ +/** + * Copyright (C) 2023 Red Hat, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.jboss.intersmash.provision.k8s; + +import java.net.MalformedURLException; +import java.net.URL; +import java.util.Collections; +import java.util.Objects; + +import org.jboss.intersmash.application.operator.HyperfoilOperatorApplication; +import org.jboss.intersmash.k8s.client.Kuberneteses; +import org.jboss.intersmash.provision.openshift.WaitersUtil; +import org.jboss.intersmash.provision.operator.HyperfoilOperatorProvisioner; +import org.slf4j.event.Level; + +import io.fabric8.kubernetes.api.model.ObjectMeta; +import io.fabric8.kubernetes.api.model.apiextensions.v1.CustomResourceDefinition; +import io.fabric8.kubernetes.api.model.apiextensions.v1.CustomResourceDefinitionList; +import io.fabric8.kubernetes.api.model.networking.v1.HTTPIngressPathBuilder; +import io.fabric8.kubernetes.api.model.networking.v1.HTTPIngressRuleValueBuilder; +import io.fabric8.kubernetes.api.model.networking.v1.Ingress; +import io.fabric8.kubernetes.api.model.networking.v1.IngressBackendBuilder; +import io.fabric8.kubernetes.api.model.networking.v1.IngressBuilder; +import io.fabric8.kubernetes.api.model.networking.v1.IngressRuleBuilder; +import io.fabric8.kubernetes.api.model.networking.v1.IngressServiceBackendBuilder; +import io.fabric8.kubernetes.api.model.networking.v1.IngressSpecBuilder; +import io.fabric8.kubernetes.api.model.networking.v1.ServiceBackendPortBuilder; +import io.fabric8.kubernetes.client.NamespacedKubernetesClient; +import io.fabric8.kubernetes.client.NamespacedKubernetesClientAdapter; +import io.fabric8.kubernetes.client.dsl.NonNamespaceOperation; +import io.fabric8.kubernetes.client.dsl.Resource; +import io.fabric8.kubernetes.client.dsl.base.CustomResourceDefinitionContext; +import io.fabric8.kubernetes.client.dsl.internal.HasMetadataOperationsImpl; +import io.hyperfoil.v1alpha2.Hyperfoil; +import io.hyperfoil.v1alpha2.HyperfoilList; +import lombok.NonNull; + +/** + *

@see io.hyperfoil.v1alpha2 package-info.java file, for details about how to create/update/delete an + * Hyperfoil Custom Resource

+ *

@see org.jboss.intersmash.tools.provision.openshift.operator.hyperfoil.client.release021 package-info.java + * file, for details about how to interact with the Hyperfoil Server which is started by the Hyperfoil Operator + * when an Hyperfoil Custom Resource is created

+ */ +public class HyperfoilKubernetesOperatorProvisioner + // leverage Hyperfoil common Operator based provisioner behavior + extends HyperfoilOperatorProvisioner + // ... and common K8s provisioning logic, too + implements KubernetesProvisioner { + + public HyperfoilKubernetesOperatorProvisioner(@NonNull HyperfoilOperatorApplication application) { + super(application); + } + + @Override + public NamespacedKubernetesClientAdapter client() { + return KubernetesProvisioner.super.client(); + } + + @Override + public String execute(String... args) { + return KubernetesProvisioner.super.execute(args); + } + + @Override + public void deploy() { + // call super + super.deploy(); + // on k8s we need to explicitly create one Ingress for external access + ObjectMeta metadata = new ObjectMeta(); + metadata.setName(getApplication().getName()); + Ingress ingress = new IngressBuilder().withMetadata(metadata) + .withSpec( + new IngressSpecBuilder() + .withRules(new IngressRuleBuilder() + .withHost(getApplication().getName()) + .withHttp( + new HTTPIngressRuleValueBuilder() + .withPaths(Collections.singletonList( + new HTTPIngressPathBuilder().withPath("/") + .withPathType("Prefix") + .withBackend( + new IngressBackendBuilder() + .withService( + new IngressServiceBackendBuilder() + .withName("hyperfoil") + .withPort( + new ServiceBackendPortBuilder() + .withNumber( + 8090) + .build()) + .build()) + .build()) + .build())) + .build()) + .build()) + .build()) + .build(); + Kuberneteses.admin().network().v1().ingresses().create(ingress); + WaitersUtil.routeIsUp(getURL().toExternalForm()) + .level(Level.DEBUG) + .waitFor(); + } + + @Override + public void undeploy() { + // let's remove the Ingress for external access + Kuberneteses.admin().network().v1().ingresses().withName(getApplication().getName()); + // call super + super.undeploy(); + } + + @Override + public URL getURL() { + Ingress ingress = retrieveNamedIngress(getApplication().getName()); + if (Objects.nonNull(ingress)) { + String host = ingress.getSpec().getRules().get(0).getHost(); + String url = String.format("http://%s", host); + try { + return new URL(url); + } catch (MalformedURLException e) { + throw new RuntimeException(String.format("Hyperfoil operator route \"%s\"is malformed.", url), e); + } + } + return null; + } + + // ================================================================================================================= + // Client related + // ================================================================================================================= + public HasMetadataOperationsImpl hyperfoilCustomResourcesClient( + CustomResourceDefinitionContext crdc) { + return Kuberneteses.master().newHasMetadataOperation(crdc, Hyperfoil.class, HyperfoilList.class); + } + + @Override + public NonNamespaceOperation> customResourceDefinitionsClient() { + return Kuberneteses.admin().apiextensions().v1().customResourceDefinitions(); + } +} diff --git a/provisioners/src/main/java/org/jboss/intersmash/provision/k8s/HyperfoilKubernetesOperatorProvisionerFactory.java b/provisioners/src/main/java/org/jboss/intersmash/provision/k8s/HyperfoilKubernetesOperatorProvisionerFactory.java new file mode 100644 index 000000000..85bc35d28 --- /dev/null +++ b/provisioners/src/main/java/org/jboss/intersmash/provision/k8s/HyperfoilKubernetesOperatorProvisionerFactory.java @@ -0,0 +1,34 @@ +/** + * Copyright (C) 2023 Red Hat, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.jboss.intersmash.provision.k8s; + +import org.jboss.intersmash.application.Application; +import org.jboss.intersmash.application.operator.HyperfoilOperatorApplication; +import org.jboss.intersmash.provision.ProvisionerFactory; + +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class HyperfoilKubernetesOperatorProvisionerFactory + implements ProvisionerFactory { + + @Override + public HyperfoilKubernetesOperatorProvisioner getProvisioner(Application application) { + if (HyperfoilOperatorApplication.class.isAssignableFrom(application.getClass())) + return new HyperfoilKubernetesOperatorProvisioner((HyperfoilOperatorApplication) application); + return null; + } +} diff --git a/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/ActiveMQOpenShiftOperatorProvisioner.java b/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/ActiveMQOpenShiftOperatorProvisioner.java new file mode 100644 index 000000000..1031717ad --- /dev/null +++ b/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/ActiveMQOpenShiftOperatorProvisioner.java @@ -0,0 +1,88 @@ +/** + * Copyright (C) 2023 Red Hat, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.jboss.intersmash.provision.openshift; + +import org.jboss.intersmash.application.operator.ActiveMQOperatorApplication; +import org.jboss.intersmash.provision.openshift.operator.activemq.address.ActiveMQArtemisAddressList; +import org.jboss.intersmash.provision.openshift.operator.activemq.broker.ActiveMQArtemisList; +import org.jboss.intersmash.provision.operator.ActiveMQOperatorProvisioner; + +import cz.xtf.core.openshift.OpenShifts; +import io.amq.broker.v1beta1.ActiveMQArtemis; +import io.amq.broker.v1beta1.ActiveMQArtemisAddress; +import io.fabric8.kubernetes.api.model.apiextensions.v1.CustomResourceDefinition; +import io.fabric8.kubernetes.api.model.apiextensions.v1.CustomResourceDefinitionList; +import io.fabric8.kubernetes.client.NamespacedKubernetesClientAdapter; +import io.fabric8.kubernetes.client.dsl.NonNamespaceOperation; +import io.fabric8.kubernetes.client.dsl.Resource; +import io.fabric8.kubernetes.client.dsl.base.CustomResourceDefinitionContext; +import io.fabric8.kubernetes.client.dsl.internal.HasMetadataOperationsImpl; +import io.fabric8.openshift.client.NamespacedOpenShiftClient; +import lombok.NonNull; + +/** + * ActiveMQ Operator based provisioner for OpenShift + */ +public class ActiveMQOpenShiftOperatorProvisioner + // leverage ActiveMQ Artemis common Operator based provisioner behavior + extends ActiveMQOperatorProvisioner + // ... and common OpenShift provisioning logic, too + implements OpenShiftProvisioner { + + public ActiveMQOpenShiftOperatorProvisioner(@NonNull ActiveMQOperatorApplication application) { + super(application); + } + + @Override + public NamespacedKubernetesClientAdapter client() { + return OpenShiftProvisioner.super.client(); + } + + @Override + public String execute(String... args) { + return OpenShiftProvisioner.super.execute(args); + } + + // ================================================================================================================= + // Operator based provisioning concrete implementation, see OperatorProvisioner + // ================================================================================================================= + @Override + protected String getOperatorNamespace() { + return "openshift-marketplace"; + } + + // ================================================================================================================= + // Client related + // ================================================================================================================= + @Override + public NonNamespaceOperation> customResourceDefinitionsClient() { + return OpenShifts.admin().apiextensions().v1().customResourceDefinitions(); + } + + @Override + public HasMetadataOperationsImpl activeMQArtemisCustomResourcesClient( + CustomResourceDefinitionContext crdc) { + return OpenShifts + .master().newHasMetadataOperation(crdc, ActiveMQArtemis.class, ActiveMQArtemisList.class); + } + + @Override + public HasMetadataOperationsImpl activeMQArtemisAddressesCustomResourcesClient( + CustomResourceDefinitionContext crdc) { + return OpenShifts + .master().newHasMetadataOperation(crdc, ActiveMQArtemisAddress.class, ActiveMQArtemisAddressList.class); + } +} diff --git a/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/ActiveMQOperatorProvisionerFactory.java b/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/ActiveMQOpenShiftOperatorProvisionerFactory.java similarity index 70% rename from provisioners/src/main/java/org/jboss/intersmash/provision/openshift/ActiveMQOperatorProvisionerFactory.java rename to provisioners/src/main/java/org/jboss/intersmash/provision/openshift/ActiveMQOpenShiftOperatorProvisionerFactory.java index eb150f696..31ee1e687 100644 --- a/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/ActiveMQOperatorProvisionerFactory.java +++ b/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/ActiveMQOpenShiftOperatorProvisionerFactory.java @@ -16,18 +16,18 @@ package org.jboss.intersmash.provision.openshift; import org.jboss.intersmash.application.Application; -import org.jboss.intersmash.application.openshift.ActiveMQOperatorApplication; +import org.jboss.intersmash.application.operator.ActiveMQOperatorApplication; import org.jboss.intersmash.provision.ProvisionerFactory; import lombok.extern.slf4j.Slf4j; @Slf4j -public class ActiveMQOperatorProvisionerFactory implements ProvisionerFactory { +public class ActiveMQOpenShiftOperatorProvisionerFactory implements ProvisionerFactory { @Override - public ActiveMQOperatorProvisioner getProvisioner(Application application) { + public ActiveMQOpenShiftOperatorProvisioner getProvisioner(Application application) { if (ActiveMQOperatorApplication.class.isAssignableFrom(application.getClass())) - return new ActiveMQOperatorProvisioner((ActiveMQOperatorApplication) application); + return new ActiveMQOpenShiftOperatorProvisioner((ActiveMQOperatorApplication) application); return null; } } diff --git a/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/BootableJarImageOpenShiftProvisioner.java b/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/BootableJarImageOpenShiftProvisioner.java index f1b4cb9a3..ee4ab6ff7 100644 --- a/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/BootableJarImageOpenShiftProvisioner.java +++ b/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/BootableJarImageOpenShiftProvisioner.java @@ -23,9 +23,9 @@ import java.util.stream.Collectors; import org.jboss.intersmash.IntersmashConfig; +import org.jboss.intersmash.application.input.BinarySource; +import org.jboss.intersmash.application.input.BuildInput; import org.jboss.intersmash.application.openshift.BootableJarOpenShiftApplication; -import org.jboss.intersmash.application.openshift.input.BinarySource; -import org.jboss.intersmash.application.openshift.input.BuildInput; import org.slf4j.event.Level; import cz.xtf.builder.builders.ApplicationBuilder; diff --git a/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/Eap7ImageOpenShiftProvisioner.java b/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/Eap7ImageOpenShiftProvisioner.java index fa6d2c295..73262a0e4 100644 --- a/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/Eap7ImageOpenShiftProvisioner.java +++ b/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/Eap7ImageOpenShiftProvisioner.java @@ -26,10 +26,10 @@ import org.assertj.core.util.Strings; import org.jboss.intersmash.IntersmashConfig; +import org.jboss.intersmash.application.input.BinarySource; +import org.jboss.intersmash.application.input.BuildInput; +import org.jboss.intersmash.application.input.GitSource; import org.jboss.intersmash.application.openshift.Eap7ImageOpenShiftApplication; -import org.jboss.intersmash.application.openshift.input.BinarySource; -import org.jboss.intersmash.application.openshift.input.BuildInput; -import org.jboss.intersmash.application.openshift.input.GitSource; import org.slf4j.event.Level; import cz.xtf.builder.builders.ApplicationBuilder; diff --git a/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/HyperfoilOpenShiftOperatorProvisioner.java b/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/HyperfoilOpenShiftOperatorProvisioner.java new file mode 100644 index 000000000..cb845bf2f --- /dev/null +++ b/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/HyperfoilOpenShiftOperatorProvisioner.java @@ -0,0 +1,96 @@ +/** + * Copyright (C) 2023 Red Hat, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.jboss.intersmash.provision.openshift; + +import java.net.MalformedURLException; +import java.net.URL; +import java.util.Objects; + +import org.jboss.intersmash.application.operator.HyperfoilOperatorApplication; +import org.jboss.intersmash.provision.operator.HyperfoilOperatorProvisioner; + +import cz.xtf.core.openshift.OpenShifts; +import io.fabric8.kubernetes.api.model.apiextensions.v1.CustomResourceDefinition; +import io.fabric8.kubernetes.api.model.apiextensions.v1.CustomResourceDefinitionList; +import io.fabric8.kubernetes.client.NamespacedKubernetesClientAdapter; +import io.fabric8.kubernetes.client.dsl.NonNamespaceOperation; +import io.fabric8.kubernetes.client.dsl.Resource; +import io.fabric8.kubernetes.client.dsl.base.CustomResourceDefinitionContext; +import io.fabric8.kubernetes.client.dsl.internal.HasMetadataOperationsImpl; +import io.fabric8.openshift.api.model.Route; +import io.fabric8.openshift.client.NamespacedOpenShiftClient; +import io.hyperfoil.v1alpha2.Hyperfoil; +import io.hyperfoil.v1alpha2.HyperfoilList; +import lombok.NonNull; + +/** + *

@see io.hyperfoil.v1alpha2 package-info.java file, for details about how to create/update/delete an + * Hyperfoil Custom Resource

+ *

@see org.jboss.intersmash.tools.provision.openshift.operator.hyperfoil.client.release021 package-info.java + * file, for details about how to interact with the Hyperfoil Server which is started by the Hyperfoil Operator + * when an Hyperfoil Custom Resource is created

+ */ +public class HyperfoilOpenShiftOperatorProvisioner + // leverage Hyperfoil common Operator based provisioner behavior + extends HyperfoilOperatorProvisioner + // ... and common K8s provisioning logic, too + implements OpenShiftProvisioner { + + public HyperfoilOpenShiftOperatorProvisioner(@NonNull HyperfoilOperatorApplication application) { + super(application); + } + + @Override + public NamespacedKubernetesClientAdapter client() { + return OpenShiftProvisioner.super.client(); + } + + @Override + public String execute(String... args) { + return OpenShiftProvisioner.super.execute(args); + } + + @Override + public URL getURL() { + Route route = OpenShiftProvisioner.openShift.getRoute(getApplication().getName()); + if (Objects.nonNull(route)) { + String host = route.getSpec().getHost() != null ? route.getSpec().getHost() + : route.getStatus().getIngress().get(0).getHost(); + String url = String.format("https://%s", host); + try { + return new URL( + url); + } catch (MalformedURLException e) { + throw new RuntimeException(String.format("Hyperfoil operator route \"%s\"is malformed.", url), e); + } + } + return null; + } + + // ================================================================================================================= + // Client related + // ================================================================================================================= + public HasMetadataOperationsImpl hyperfoilCustomResourcesClient( + CustomResourceDefinitionContext crdc) { + return OpenShifts + .master().newHasMetadataOperation(crdc, Hyperfoil.class, HyperfoilList.class); + } + + @Override + public NonNamespaceOperation> customResourceDefinitionsClient() { + return OpenShifts.admin().apiextensions().v1().customResourceDefinitions(); + } +} diff --git a/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/HyperfoilOperatorProvisionerFactory.java b/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/HyperfoilOpenShiftOperatorProvisionerFactory.java similarity index 70% rename from provisioners/src/main/java/org/jboss/intersmash/provision/openshift/HyperfoilOperatorProvisionerFactory.java rename to provisioners/src/main/java/org/jboss/intersmash/provision/openshift/HyperfoilOpenShiftOperatorProvisionerFactory.java index 914457c04..2dc8fc51a 100644 --- a/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/HyperfoilOperatorProvisionerFactory.java +++ b/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/HyperfoilOpenShiftOperatorProvisionerFactory.java @@ -16,18 +16,18 @@ package org.jboss.intersmash.provision.openshift; import org.jboss.intersmash.application.Application; -import org.jboss.intersmash.application.openshift.HyperfoilOperatorApplication; +import org.jboss.intersmash.application.operator.HyperfoilOperatorApplication; import org.jboss.intersmash.provision.ProvisionerFactory; import lombok.extern.slf4j.Slf4j; @Slf4j -public class HyperfoilOperatorProvisionerFactory implements ProvisionerFactory { +public class HyperfoilOpenShiftOperatorProvisionerFactory implements ProvisionerFactory { @Override - public HyperfoilOperatorProvisioner getProvisioner(Application application) { + public HyperfoilOpenShiftOperatorProvisioner getProvisioner(Application application) { if (HyperfoilOperatorApplication.class.isAssignableFrom(application.getClass())) - return new HyperfoilOperatorProvisioner((HyperfoilOperatorApplication) application); + return new HyperfoilOpenShiftOperatorProvisioner((HyperfoilOperatorApplication) application); return null; } } diff --git a/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/HyperfoilOperatorProvisioner.java b/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/HyperfoilOperatorProvisioner.java deleted file mode 100644 index 578f0a492..000000000 --- a/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/HyperfoilOperatorProvisioner.java +++ /dev/null @@ -1,234 +0,0 @@ -/** - * Copyright (C) 2023 Red Hat, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.jboss.intersmash.provision.openshift; - -import java.net.MalformedURLException; -import java.net.URL; -import java.util.ArrayList; -import java.util.List; -import java.util.Objects; - -import org.jboss.intersmash.IntersmashConfig; -import org.jboss.intersmash.application.openshift.HyperfoilOperatorApplication; -import org.jboss.intersmash.provision.openshift.operator.OperatorProvisioner; -import org.slf4j.event.Level; - -import cz.xtf.core.config.OpenShiftConfig; -import cz.xtf.core.event.helpers.EventHelper; -import cz.xtf.core.openshift.OpenShiftWaiters; -import cz.xtf.core.openshift.OpenShifts; -import cz.xtf.core.waiting.SimpleWaiter; -import io.fabric8.kubernetes.api.model.DeletionPropagation; -import io.fabric8.kubernetes.api.model.Pod; -import io.fabric8.kubernetes.api.model.apiextensions.v1.CustomResourceDefinition; -import io.fabric8.kubernetes.client.dsl.MixedOperation; -import io.fabric8.kubernetes.client.dsl.NonNamespaceOperation; -import io.fabric8.kubernetes.client.dsl.Resource; -import io.fabric8.kubernetes.client.dsl.base.CustomResourceDefinitionContext; -import io.fabric8.openshift.api.model.Route; -import io.hyperfoil.v1alpha2.Hyperfoil; -import io.hyperfoil.v1alpha2.HyperfoilList; -import lombok.NonNull; - -/** - *

@see io.hyperfoil.v1alpha2 package-info.java file, for details about how to create/update/delete an - * Hyperfoil Custom Resource

- *

@see org.jboss.intersmash.provision.openshift.operator.hyperfoil.client.release021 package-info.java - * file, for details about how to interact with the Hyperfoil Server which is started by the Hyperfoil Operator - * when an Hyperfoil Custom Resource is created

- */ -public class HyperfoilOperatorProvisioner extends OperatorProvisioner { - // this is the name of the Hyperfoil CustomResourceDefinition - // you can get it with command: - // oc get crd hyperfoils.hyperfoil.io -o template --template='{{ .metadata.name }}' - private final static String HYPERFOIL_CUSTOM_RESOURCE_DEFINITION = "hyperfoils.hyperfoil.io"; - private static NonNamespaceOperation> HYPERFOIL_CUSTOM_RESOURCE_CLIENT; - // this is the packagemanifest for the hyperfoil operator; - // you can get it with command: - // oc get packagemanifest hyperfoil-bundle -o template --template='{{ .metadata.name }}' - private static final String OPERATOR_ID = IntersmashConfig.hyperfoilOperatorPackageManifest(); - - public HyperfoilOperatorProvisioner(@NonNull HyperfoilOperatorApplication hyperfoilOperatorApplication) { - super(hyperfoilOperatorApplication, OPERATOR_ID); - } - - public static String getOperatorId() { - return OPERATOR_ID; - } - - /** - * Get a client capable of working with {@link #HYPERFOIL_CUSTOM_RESOURCE_DEFINITION} custom resource. - * - * @return client for operations with {@link #HYPERFOIL_CUSTOM_RESOURCE_DEFINITION} custom resource - */ - NonNamespaceOperation> hyperfoilClient() { - if (HYPERFOIL_CUSTOM_RESOURCE_CLIENT == null) { - CustomResourceDefinition crd = OpenShifts.admin().apiextensions().v1().customResourceDefinitions() - .withName(HYPERFOIL_CUSTOM_RESOURCE_DEFINITION).get(); - CustomResourceDefinitionContext crdc = CustomResourceDefinitionContext.fromCrd(crd); - if (!getCustomResourceDefinitions().contains(HYPERFOIL_CUSTOM_RESOURCE_DEFINITION)) { - throw new RuntimeException(String.format("[%s] custom resource is not provided by [%s] operator.", - HYPERFOIL_CUSTOM_RESOURCE_DEFINITION, OPERATOR_ID)); - } - MixedOperation> hyperfoilCrClient = OpenShifts - .master().newHasMetadataOperation(crdc, Hyperfoil.class, HyperfoilList.class); - HYPERFOIL_CUSTOM_RESOURCE_CLIENT = hyperfoilCrClient.inNamespace(OpenShiftConfig.namespace()); - } - return HYPERFOIL_CUSTOM_RESOURCE_CLIENT; - } - - /** - * Get a reference to Hyperfoil object. Use get() to get the actual object, or null in case it does not - * exist on tested cluster. - * - * @return A concrete {@link Resource} instance representing the {@link Hyperfoil} resource definition - */ - public Resource hyperfoil() { - return hyperfoilClient().withName(getApplication().getName()); - } - - @Override - public void deploy() { - ffCheck = FailFastUtils.getFailFastCheck(EventHelper.timeOfLastEventBMOrTestNamespaceOrEpoch(), - getApplication().getName()); - if (!isSubscribed()) { - subscribe(); - } - hyperfoilClient().createOrReplace(getApplication().getHyperfoil()); - new SimpleWaiter(() -> hyperfoil().get().getStatus() != null) - .failFast(ffCheck) - .reason("Wait for status field to be initialized.") - .level(Level.DEBUG) - .waitFor(); - new SimpleWaiter(() -> getPods().size() == 1) - .failFast(ffCheck) - .reason("Wait for expected number of replicas to be active.") - .level(Level.DEBUG) - .waitFor(); - WaitersUtil.routeIsUp(getURL().toExternalForm()) - .level(Level.DEBUG) - .waitFor(); - } - - @Override - public URL getURL() { - Route route = OpenShiftProvisioner.openShift.getRoute(getApplication().getName()); - if (Objects.nonNull(route)) { - String host = route.getSpec().getHost() != null ? route.getSpec().getHost() - : route.getStatus().getIngress().get(0).getHost(); - String url = String.format("https://%s", host); - try { - return new URL( - url); - } catch (MalformedURLException e) { - throw new RuntimeException(String.format("Hyperfoil operator route \"%s\"is malformed.", url), e); - } - } - return null; - } - - @Override - public void undeploy() { - undeploy(true); - } - - public void undeploy(boolean unsubscribe) { - hyperfoil().withPropagationPolicy(DeletionPropagation.FOREGROUND).delete(); - OpenShiftWaiters.get(OpenShiftProvisioner.openShift, ffCheck).areExactlyNPodsReady(0, "app", getApplication().getName()) - .level(Level.DEBUG).waitFor(); - if (unsubscribe) { - unsubscribe(); - } - } - - @Override - public List getPods() { - List pods = new ArrayList<>(); - Pod hyperfoilControllerPod = OpenShiftProvisioner.openShift - .getPod(String.format("%s-controller", getApplication().getName())); - if (isContainerReady(hyperfoilControllerPod, "controller")) { - pods.add(hyperfoilControllerPod); - } - return pods; - } - - /** - * This method checks if the Operator's POD is actually running; - * It's been tailored on the community-operators Cluster Service version format which is missing label - * spec.install.spec.deployments.spec.template.metadata.labels."app.kubernetes.io/name" which is used - * in @see OperatorProvisioner#waitForOperatorPod() (see - * https://github.com/operator-framework/community-operators/tree/master/community-operators/hyperfoil-bundle) - */ - @Override - protected void waitForOperatorPod() { - String[] operatorSpecs = getAdminBinary().execute("get", "csvs", getCurrentCSV(), "-o", "template", "--template", - "{{range .spec.install.spec.deployments}}{{printf \"%d|%s\\n\" .spec.replicas .name}}{{end}}") - .split(System.lineSeparator()); - for (String spec : operatorSpecs) { - String[] operatorSpec = spec.split("\\|"); - if (operatorSpec.length != 2) { - throw new RuntimeException("Failed to get operator deployment spec from csvs!"); - } - new SimpleWaiter(() -> OpenShiftProvisioner.openShift.getPods().stream().filter( - pod -> (pod.getMetadata() - .getName() - .startsWith(operatorSpec[1]) - && pod.getStatus().getPhase().equalsIgnoreCase("Running"))) - .count() == Integer.valueOf(operatorSpec[0])) - .failFast(ffCheck) - .reason("Wait for expected number of replicas to be active.") - .level(Level.DEBUG) - .waitFor(); - } - } - - /** - * Tells if a specific container inside the pod is ready - * - * @param pod - * @param containerName: name of the container - * @return - */ - private boolean isContainerReady(Pod pod, String containerName) { - if (Objects.nonNull(pod)) { - return pod.getStatus().getContainerStatuses().stream() - .filter(containerStatus -> containerStatus.getName().equalsIgnoreCase(containerName) - && containerStatus.getReady()) - .count() > 0; - } - return false; - } - - @Override - protected String getOperatorCatalogSource() { - return IntersmashConfig.hyperfoilOperatorCatalogSource(); - } - - @Override - protected String getOperatorIndexImage() { - return IntersmashConfig.hyperfoilOperatorIndexImage(); - } - - @Override - protected String getOperatorChannel() { - return IntersmashConfig.hyperfoilOperatorChannel(); - } - - @Override - public void scale(int replicas, boolean wait) { - throw new UnsupportedOperationException("To be implemented!"); - } -} diff --git a/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/InfinispanOpenShiftOperatorProvisioner.java b/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/InfinispanOpenShiftOperatorProvisioner.java new file mode 100644 index 000000000..e59acd234 --- /dev/null +++ b/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/InfinispanOpenShiftOperatorProvisioner.java @@ -0,0 +1,162 @@ +/** + * Copyright (C) 2023 Red Hat, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.jboss.intersmash.provision.openshift; + +import java.net.MalformedURLException; +import java.net.URL; +import java.util.List; +import java.util.Objects; + +import org.assertj.core.util.Lists; +import org.infinispan.v1.Infinispan; +import org.infinispan.v1.infinispanspec.Expose; +import org.infinispan.v2alpha1.Cache; +import org.jboss.intersmash.application.operator.InfinispanOperatorApplication; +import org.jboss.intersmash.provision.openshift.operator.infinispan.cache.CacheList; +import org.jboss.intersmash.provision.openshift.operator.infinispan.infinispan.InfinispanList; +import org.jboss.intersmash.provision.operator.InfinispanOperatorProvisioner; + +import cz.xtf.core.openshift.OpenShifts; +import io.fabric8.kubernetes.api.model.Pod; +import io.fabric8.kubernetes.api.model.Service; +import io.fabric8.kubernetes.api.model.apiextensions.v1.CustomResourceDefinition; +import io.fabric8.kubernetes.api.model.apiextensions.v1.CustomResourceDefinitionList; +import io.fabric8.kubernetes.api.model.apps.StatefulSet; +import io.fabric8.kubernetes.client.NamespacedKubernetesClientAdapter; +import io.fabric8.kubernetes.client.dsl.NonNamespaceOperation; +import io.fabric8.kubernetes.client.dsl.Resource; +import io.fabric8.kubernetes.client.dsl.base.CustomResourceDefinitionContext; +import io.fabric8.kubernetes.client.dsl.internal.HasMetadataOperationsImpl; +import io.fabric8.openshift.api.model.Route; +import io.fabric8.openshift.client.NamespacedOpenShiftClient; +import lombok.NonNull; + +public class InfinispanOpenShiftOperatorProvisioner + // leverage Infinispan common Operator based provisioner behavior + extends InfinispanOperatorProvisioner + // ... and common OpenShift provisioning logic, too + implements OpenShiftProvisioner { + + public InfinispanOpenShiftOperatorProvisioner( + @NonNull InfinispanOperatorApplication application) { + super(application); + } + + @Override + public NamespacedKubernetesClientAdapter client() { + return OpenShiftProvisioner.super.client(); + } + + @Override + public String execute(String... args) { + return OpenShiftProvisioner.super.execute(args); + } + + // ================================================================================================================= + // Infinispan related + // ================================================================================================================= + @Override + public List getPods() { + StatefulSet statefulSet = OpenShiftProvisioner.openShift.getStatefulSet(getApplication().getName()); + return Objects.nonNull(statefulSet) + ? OpenShiftProvisioner.openShift.getLabeledPods("controller-revision-hash", + statefulSet.getStatus().getUpdateRevision()) + : Lists.emptyList(); + } + + public Route getRoute(final String name) { + return OpenShiftProvisioner.openShift.getRoute(name); + } + + // ================================================================================================================= + // Operator based provisioning concrete implementation, see OperatorProvisioner + // ================================================================================================================= + @Override + protected String getOperatorNamespace() { + return "openshift-marketplace"; + } + + // ================================================================================================================= + // Related to generic provisioning behavior + // ================================================================================================================= + /** + * The result is affected by the CR definition and specifically the method will return the {@code service} URL in + * case the CR {@code .spec.expose.type} is set to {@code NodePort} or {@code LoadBalancer} while it will return the + * route URL (i.e. for external access) when {@code .spec.expose.type} is set to {@code Route} + * + * @return a {@link URL} instance that represents the Infinispan service route URL + */ + @Override + public URL getURL() { + final Service defaultInternalService = getService(getApplication().getName()); + String internalUrl = "http://" + defaultInternalService.getSpec().getClusterIP() + ":11222"; + String externalUrl = null; + if (getApplication().getInfinispan().getSpec().getExpose() != null) { + final String exposedType = getApplication().getInfinispan().getSpec().getExpose().getType().getValue(); + switch (Expose.Type.valueOf(exposedType)) { + case NodePort: + // TODO - check + // see see https://github.com/infinispan/infinispan-operator/blob/2.0.x/pkg/apis/infinispan/v1/infinispan_types.go#L107 + externalUrl = "http://" + + getService(getApplication().getName() + "-external").getSpec() + .getClusterIP() + + getApplication().getInfinispan().getSpec().getExpose().getNodePort(); + break; + case LoadBalancer: + // TODO - check + // see https://github.com/infinispan/infinispan-operator/blob/2.0.x/pkg/apis/infinispan/v1/infinispan_types.go#L111 + externalUrl = "http://" + + getService(getApplication().getName() + "-external").getSpec() + .getExternalIPs().get(0) + + getApplication().getInfinispan().getSpec().getExpose().getNodePort(); + break; + case Route: + // https://github.com/infinispan/infinispan-operator/blob/2.0.x/pkg/apis/infinispan/v1/infinispan_types.go#L116 + Route route = getRoute(getApplication().getName() + "-external"); + externalUrl = "https://" + route.getSpec().getHost(); + break; + default: + throw new UnsupportedOperationException(String.format("Unsupported .spec.expose.type: %s", exposedType)); + } + } + try { + return new URL(externalUrl == null ? internalUrl : externalUrl); + } catch (MalformedURLException e) { + throw new RuntimeException(String.format("Infinispan operator Internal URL \"%s\" is malformed.", internalUrl), e); + } + } + + // ================================================================================================================= + // Client related + // ================================================================================================================= + @Override + public NonNamespaceOperation> customResourceDefinitionsClient() { + return OpenShifts.admin().apiextensions().v1().customResourceDefinitions(); + } + + @Override + public HasMetadataOperationsImpl infinispanCustomResourcesClient( + CustomResourceDefinitionContext crdc) { + return OpenShifts + .master().newHasMetadataOperation(crdc, Infinispan.class, InfinispanList.class); + } + + @Override + public HasMetadataOperationsImpl cacheCustomResourcesClient(CustomResourceDefinitionContext crdc) { + return OpenShifts + .master().newHasMetadataOperation(crdc, Cache.class, CacheList.class); + } +} diff --git a/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/InfinispanOperatorProvisionerFactory.java b/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/InfinispanOpenShiftOperatorProvisionerFactory.java similarity index 69% rename from provisioners/src/main/java/org/jboss/intersmash/provision/openshift/InfinispanOperatorProvisionerFactory.java rename to provisioners/src/main/java/org/jboss/intersmash/provision/openshift/InfinispanOpenShiftOperatorProvisionerFactory.java index 8fcb45cc2..9a7544967 100644 --- a/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/InfinispanOperatorProvisionerFactory.java +++ b/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/InfinispanOpenShiftOperatorProvisionerFactory.java @@ -16,18 +16,19 @@ package org.jboss.intersmash.provision.openshift; import org.jboss.intersmash.application.Application; -import org.jboss.intersmash.application.openshift.InfinispanOperatorApplication; +import org.jboss.intersmash.application.operator.InfinispanOperatorApplication; import org.jboss.intersmash.provision.ProvisionerFactory; import lombok.extern.slf4j.Slf4j; @Slf4j -public class InfinispanOperatorProvisionerFactory implements ProvisionerFactory { +public class InfinispanOpenShiftOperatorProvisionerFactory + implements ProvisionerFactory { @Override - public InfinispanOperatorProvisioner getProvisioner(Application application) { + public InfinispanOpenShiftOperatorProvisioner getProvisioner(Application application) { if (InfinispanOperatorApplication.class.isAssignableFrom(application.getClass())) - return new InfinispanOperatorProvisioner((InfinispanOperatorApplication) application); + return new InfinispanOpenShiftOperatorProvisioner((InfinispanOperatorApplication) application); return null; } } diff --git a/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/KafkaOpenShiftOperatorProvisioner.java b/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/KafkaOpenShiftOperatorProvisioner.java new file mode 100644 index 000000000..2dc7c7cc9 --- /dev/null +++ b/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/KafkaOpenShiftOperatorProvisioner.java @@ -0,0 +1,71 @@ +/** + * Copyright (C) 2023 Red Hat, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.jboss.intersmash.provision.openshift; + +import org.jboss.intersmash.application.operator.KafkaOperatorApplication; +import org.jboss.intersmash.provision.operator.KafkaOperatorProvisioner; + +import cz.xtf.core.openshift.OpenShifts; +import io.fabric8.kubernetes.api.model.apiextensions.v1.CustomResourceDefinition; +import io.fabric8.kubernetes.api.model.apiextensions.v1.CustomResourceDefinitionList; +import io.fabric8.kubernetes.client.NamespacedKubernetesClientAdapter; +import io.fabric8.kubernetes.client.dsl.NonNamespaceOperation; +import io.fabric8.kubernetes.client.dsl.Resource; +import io.fabric8.openshift.client.NamespacedOpenShiftClient; +import lombok.NonNull; +import lombok.extern.slf4j.Slf4j; + +/** + * Deploys an application that implements {@link KafkaOperatorApplication} interface and which is extended by this + * class. + */ +@Slf4j +public class KafkaOpenShiftOperatorProvisioner + // leverage Kafka common Operator based provisioner behavior + extends KafkaOperatorProvisioner + // leverage common OpenShift provisioning logic + implements OpenShiftProvisioner { + + public KafkaOpenShiftOperatorProvisioner(@NonNull KafkaOperatorApplication application) { + super(application); + } + + @Override + public NamespacedKubernetesClientAdapter client() { + return OpenShiftProvisioner.super.client(); + } + + @Override + public String execute(String... args) { + return OpenShiftProvisioner.super.execute(args); + } + + // ================================================================================================================= + // Operator based provisioning concrete implementation, see OperatorProvisioner + // ================================================================================================================= + @Override + protected String getOperatorNamespace() { + return "openshift-marketplace"; + } + + // ================================================================================================================= + // Client related + // ================================================================================================================= + @Override + public NonNamespaceOperation> customResourceDefinitionsClient() { + return OpenShifts.admin().apiextensions().v1().customResourceDefinitions(); + } +} diff --git a/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/KafkaOperatorProvisionerFactory.java b/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/KafkaOpenShiftOperatorProvisionerFactory.java similarity index 71% rename from provisioners/src/main/java/org/jboss/intersmash/provision/openshift/KafkaOperatorProvisionerFactory.java rename to provisioners/src/main/java/org/jboss/intersmash/provision/openshift/KafkaOpenShiftOperatorProvisionerFactory.java index 0d2b3a957..06ff70441 100644 --- a/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/KafkaOperatorProvisionerFactory.java +++ b/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/KafkaOpenShiftOperatorProvisionerFactory.java @@ -16,18 +16,18 @@ package org.jboss.intersmash.provision.openshift; import org.jboss.intersmash.application.Application; -import org.jboss.intersmash.application.openshift.KafkaOperatorApplication; +import org.jboss.intersmash.application.operator.KafkaOperatorApplication; import org.jboss.intersmash.provision.ProvisionerFactory; import lombok.extern.slf4j.Slf4j; @Slf4j -public class KafkaOperatorProvisionerFactory implements ProvisionerFactory { +public class KafkaOpenShiftOperatorProvisionerFactory implements ProvisionerFactory { @Override - public KafkaOperatorProvisioner getProvisioner(Application application) { + public KafkaOpenShiftOperatorProvisioner getProvisioner(Application application) { if (KafkaOperatorApplication.class.isAssignableFrom(application.getClass())) - return new KafkaOperatorProvisioner((KafkaOperatorApplication) application); + return new KafkaOpenShiftOperatorProvisioner((KafkaOperatorApplication) application); return null; } } diff --git a/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/KeycloakOpenShiftOperatorProvisioner.java b/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/KeycloakOpenShiftOperatorProvisioner.java new file mode 100644 index 000000000..c0e52f5de --- /dev/null +++ b/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/KeycloakOpenShiftOperatorProvisioner.java @@ -0,0 +1,136 @@ +/** + * Copyright (C) 2023 Red Hat, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.jboss.intersmash.provision.openshift; + +import java.net.MalformedURLException; +import java.net.URL; +import java.util.Map; + +import org.assertj.core.util.Strings; +import org.jboss.intersmash.IntersmashConfig; +import org.jboss.intersmash.application.operator.KeycloakOperatorApplication; +import org.jboss.intersmash.provision.operator.KeycloakOperatorProvisioner; +import org.keycloak.k8s.v2alpha1.Keycloak; +import org.keycloak.k8s.v2alpha1.KeycloakOperatorKeycloakList; +import org.keycloak.k8s.v2alpha1.KeycloakOperatorRealmImportList; +import org.keycloak.k8s.v2alpha1.KeycloakRealmImport; + +import cz.xtf.core.openshift.OpenShifts; +import io.fabric8.kubernetes.api.model.apiextensions.v1.CustomResourceDefinition; +import io.fabric8.kubernetes.api.model.apiextensions.v1.CustomResourceDefinitionList; +import io.fabric8.kubernetes.client.NamespacedKubernetesClientAdapter; +import io.fabric8.kubernetes.client.dsl.NonNamespaceOperation; +import io.fabric8.kubernetes.client.dsl.Resource; +import io.fabric8.kubernetes.client.dsl.base.CustomResourceDefinitionContext; +import io.fabric8.kubernetes.client.dsl.internal.HasMetadataOperationsImpl; +import io.fabric8.openshift.client.NamespacedOpenShiftClient; +import lombok.NonNull; + +/** + * Keycloak operator provisioner + */ +public class KeycloakOpenShiftOperatorProvisioner + // leverage Wildfly common Operator based provisioner behavior + extends KeycloakOperatorProvisioner + // ... and common OpenShift provisioning logic + implements OpenShiftProvisioner { + + public KeycloakOpenShiftOperatorProvisioner(@NonNull KeycloakOperatorApplication application) { + super(application); + } + + @Override + public NamespacedKubernetesClientAdapter client() { + return OpenShiftProvisioner.super.client(); + } + + @Override + public String execute(String... args) { + return OpenShiftProvisioner.super.execute(args); + } + + // ================================================================================================================= + // Operator based provisioning concrete implementation, see OperatorProvisioner + // ================================================================================================================= + @Override + protected String getOperatorNamespace() { + return "openshift-marketplace"; + } + + @Override + public void subscribe() { + if (Strings.isNullOrEmpty(IntersmashConfig.keycloakImageURL())) { + super.subscribe(); + } else { + subscribe( + INSTALLPLAN_APPROVAL_MANUAL, + Map.of( + // Custom Keycloak image to be used: overrides the Keycloak image at the operator level: all + // Keycloak instances will be spun out of this image + // e.g. OPERATOR_KEYCLOAK_IMAGE=quay.io/keycloak/keycloak:21.1.1 --> operator.keycloak.image + "OPERATOR_KEYCLOAK_IMAGE", IntersmashConfig.keycloakImageURL())); + } + } + + @Override + public URL getURL() { + String host = OpenShiftProvisioner.openShift.routes().list().getItems() + .stream().filter( + route -> route.getMetadata().getName().startsWith( + keycloak().get().getMetadata().getName()) + && + route.getMetadata().getLabels().entrySet() + .stream().filter( + label -> label.getKey().equalsIgnoreCase("app.kubernetes.io/instance") + && + label.getValue().equalsIgnoreCase( + keycloak().get().getMetadata().getLabels() + .get("app"))) + .count() == 1 + + ).findFirst() + .orElseThrow(() -> new RuntimeException( + String.format("No route for Keycloak %s!", keycloak().get().getMetadata().getName()))) + .getSpec().getHost(); + try { + return Strings.isNullOrEmpty(host) ? null : new URL(String.format("https://%s", host)); + } catch (MalformedURLException e) { + throw new RuntimeException(String.format("Keycloak operator External URL \"%s\" is malformed.", host), e); + } + } + + // ================================================================================================================= + // Client related + // ================================================================================================================= + @Override + public NonNamespaceOperation> customResourceDefinitionsClient() { + return OpenShifts.admin().apiextensions().v1().customResourceDefinitions(); + } + + @Override + public HasMetadataOperationsImpl keycloaksCustomResourcesClient( + CustomResourceDefinitionContext crdc) { + return OpenShifts + .master().newHasMetadataOperation(crdc, Keycloak.class, KeycloakOperatorKeycloakList.class); + } + + @Override + public HasMetadataOperationsImpl keycloakRealmImportsCustomResourcesClient( + CustomResourceDefinitionContext crdc) { + return OpenShifts + .master().newHasMetadataOperation(crdc, KeycloakRealmImport.class, KeycloakOperatorRealmImportList.class); + } +} diff --git a/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/KeycloakOperatorProvisionerFactory.java b/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/KeycloakOperatorProvisionerFactory.java index c894c38eb..06e46480e 100644 --- a/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/KeycloakOperatorProvisionerFactory.java +++ b/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/KeycloakOperatorProvisionerFactory.java @@ -16,19 +16,19 @@ package org.jboss.intersmash.provision.openshift; import org.jboss.intersmash.application.Application; -import org.jboss.intersmash.application.openshift.KeycloakOperatorApplication; +import org.jboss.intersmash.application.operator.KeycloakOperatorApplication; import org.jboss.intersmash.provision.ProvisionerFactory; import lombok.extern.slf4j.Slf4j; @Slf4j public class KeycloakOperatorProvisionerFactory - implements ProvisionerFactory { + implements ProvisionerFactory { @Override - public KeycloakOperatorProvisioner getProvisioner(Application application) { + public KeycloakOpenShiftOperatorProvisioner getProvisioner(Application application) { if (KeycloakOperatorApplication.class.isAssignableFrom(application.getClass())) - return new KeycloakOperatorProvisioner((KeycloakOperatorApplication) application); + return new KeycloakOpenShiftOperatorProvisioner((KeycloakOperatorApplication) application); return null; } } diff --git a/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/RhSsoOpenShiftOperatorProvisioner.java b/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/RhSsoOpenShiftOperatorProvisioner.java new file mode 100644 index 000000000..b97f98eb3 --- /dev/null +++ b/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/RhSsoOpenShiftOperatorProvisioner.java @@ -0,0 +1,134 @@ +/** + * Copyright (C) 2023 Red Hat, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.jboss.intersmash.provision.openshift; + +import java.util.Map; + +import org.assertj.core.util.Strings; +import org.jboss.intersmash.IntersmashConfig; +import org.jboss.intersmash.application.operator.RhSsoOperatorApplication; +import org.jboss.intersmash.provision.openshift.operator.keycloak.backup.KeycloakBackupList; +import org.jboss.intersmash.provision.openshift.operator.keycloak.client.KeycloakClientList; +import org.jboss.intersmash.provision.openshift.operator.keycloak.keycloak.KeycloakList; +import org.jboss.intersmash.provision.openshift.operator.keycloak.realm.KeycloakRealmList; +import org.jboss.intersmash.provision.openshift.operator.keycloak.user.KeycloakUserList; +import org.jboss.intersmash.provision.operator.RhSsoOperatorProvisioner; +import org.keycloak.v1alpha1.Keycloak; +import org.keycloak.v1alpha1.KeycloakBackup; +import org.keycloak.v1alpha1.KeycloakClient; +import org.keycloak.v1alpha1.KeycloakRealm; +import org.keycloak.v1alpha1.KeycloakUser; + +import cz.xtf.core.openshift.OpenShifts; +import io.fabric8.kubernetes.api.model.apiextensions.v1.CustomResourceDefinition; +import io.fabric8.kubernetes.api.model.apiextensions.v1.CustomResourceDefinitionList; +import io.fabric8.kubernetes.client.NamespacedKubernetesClientAdapter; +import io.fabric8.kubernetes.client.dsl.NonNamespaceOperation; +import io.fabric8.kubernetes.client.dsl.Resource; +import io.fabric8.kubernetes.client.dsl.base.CustomResourceDefinitionContext; +import io.fabric8.kubernetes.client.dsl.internal.HasMetadataOperationsImpl; +import io.fabric8.openshift.client.NamespacedOpenShiftClient; +import lombok.NonNull; + +/** + * Keycloak operator provisioner + */ +@Deprecated(since = "0.0.2") +public class RhSsoOpenShiftOperatorProvisioner + // leverage Red Hat SSO common Operator based provisioner behavior + extends RhSsoOperatorProvisioner + // ... and common OpenShift provisioning logic, too + implements OpenShiftProvisioner { + + public RhSsoOpenShiftOperatorProvisioner(@NonNull RhSsoOperatorApplication application) { + super(application); + } + + @Override + public NamespacedKubernetesClientAdapter client() { + return OpenShiftProvisioner.super.client(); + } + + @Override + public String execute(String... args) { + return OpenShiftProvisioner.super.execute(args); + } + + // ================================================================================================================= + // Operator based provisioning concrete implementation, see OperatorProvisioner + // ================================================================================================================= + @Override + protected String getOperatorNamespace() { + return "openshift-marketplace"; + } + + @Override + public void subscribe() { + if (Strings.isNullOrEmpty(IntersmashConfig.rhSsoImageURL())) { + super.subscribe(); + } else { + // RELATED_IMAGE_RHSSO_OPENJ9 and RELATED_IMAGE_RHSSO_OPENJDK, determine the final value for RELATED_IMAGE_RHSSO + subscribe( + INSTALLPLAN_APPROVAL_MANUAL, + Map.of( + "RELATED_IMAGE_RHSSO", IntersmashConfig.rhSsoImageURL(), + "PROFILE", "RHSSO")); + } + } + + // ================================================================================================================= + // Client related + // ================================================================================================================= + @Override + public NonNamespaceOperation> customResourceDefinitionsClient() { + return OpenShifts.admin().apiextensions().v1().customResourceDefinitions(); + } + + @Override + public HasMetadataOperationsImpl keycloakCustomResourcesClient( + CustomResourceDefinitionContext crdc) { + return OpenShifts + .master().newHasMetadataOperation(crdc, Keycloak.class, KeycloakList.class); + } + + @Override + public HasMetadataOperationsImpl keycloakRealmCustomResourcesClient( + CustomResourceDefinitionContext crdc) { + return OpenShifts + .master().newHasMetadataOperation(crdc, KeycloakRealm.class, KeycloakRealmList.class); + } + + @Override + public HasMetadataOperationsImpl keycloakClientCustomResourcesClient( + CustomResourceDefinitionContext crdc) { + return OpenShifts + .master().newHasMetadataOperation(crdc, KeycloakClient.class, KeycloakClientList.class); + } + + @Override + public HasMetadataOperationsImpl keycloakBackupCustomResourcesClient( + CustomResourceDefinitionContext crdc) { + return OpenShifts + .master().newHasMetadataOperation(crdc, KeycloakBackup.class, KeycloakBackupList.class); + } + + @Override + public HasMetadataOperationsImpl keycloakUserCustomResourcesClient( + CustomResourceDefinitionContext crdc) { + return OpenShifts + .master().newHasMetadataOperation(crdc, KeycloakUser.class, KeycloakUserList.class); + } +} diff --git a/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/RhSsoOperatorProvisionerFactory.java b/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/RhSsoOpenShiftOperatorProvisionerFactory.java similarity index 72% rename from provisioners/src/main/java/org/jboss/intersmash/provision/openshift/RhSsoOperatorProvisionerFactory.java rename to provisioners/src/main/java/org/jboss/intersmash/provision/openshift/RhSsoOpenShiftOperatorProvisionerFactory.java index e3b8d7b20..f28339bee 100644 --- a/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/RhSsoOperatorProvisionerFactory.java +++ b/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/RhSsoOpenShiftOperatorProvisionerFactory.java @@ -16,19 +16,19 @@ package org.jboss.intersmash.provision.openshift; import org.jboss.intersmash.application.Application; -import org.jboss.intersmash.application.openshift.RhSsoOperatorApplication; +import org.jboss.intersmash.application.operator.RhSsoOperatorApplication; import org.jboss.intersmash.provision.ProvisionerFactory; import lombok.extern.slf4j.Slf4j; @Slf4j @Deprecated(since = "0.0.2") -public class RhSsoOperatorProvisionerFactory implements ProvisionerFactory { +public class RhSsoOpenShiftOperatorProvisionerFactory implements ProvisionerFactory { @Override - public RhSsoOperatorProvisioner getProvisioner(Application application) { + public RhSsoOpenShiftOperatorProvisioner getProvisioner(Application application) { if (RhSsoOperatorApplication.class.isAssignableFrom(application.getClass())) - return new RhSsoOperatorProvisioner((RhSsoOperatorApplication) application); + return new RhSsoOpenShiftOperatorProvisioner((RhSsoOperatorApplication) application); return null; } } diff --git a/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/WildflyImageOpenShiftProvisioner.java b/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/WildflyImageOpenShiftProvisioner.java index 78b817b60..5f273079d 100644 --- a/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/WildflyImageOpenShiftProvisioner.java +++ b/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/WildflyImageOpenShiftProvisioner.java @@ -28,11 +28,11 @@ import org.assertj.core.util.Strings; import org.jboss.intersmash.IntersmashConfig; +import org.jboss.intersmash.application.input.BinarySource; +import org.jboss.intersmash.application.input.BuildInput; +import org.jboss.intersmash.application.input.GitSource; import org.jboss.intersmash.application.openshift.WildflyImageOpenShiftApplication; import org.jboss.intersmash.application.openshift.WildflyOpenShiftApplication; -import org.jboss.intersmash.application.openshift.input.BinarySource; -import org.jboss.intersmash.application.openshift.input.BuildInput; -import org.jboss.intersmash.application.openshift.input.GitSource; import org.slf4j.event.Level; import cz.xtf.builder.builders.ApplicationBuilder; diff --git a/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/WildflyOpenShiftOperatorProvisioner.java b/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/WildflyOpenShiftOperatorProvisioner.java new file mode 100644 index 000000000..58918b467 --- /dev/null +++ b/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/WildflyOpenShiftOperatorProvisioner.java @@ -0,0 +1,77 @@ +/** + * Copyright (C) 2023 Red Hat, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.jboss.intersmash.provision.openshift; + +import org.jboss.intersmash.application.operator.WildflyOperatorApplication; +import org.jboss.intersmash.provision.openshift.operator.wildfly.WildFlyServerList; +import org.jboss.intersmash.provision.operator.WildflyOperatorProvisioner; +import org.wildfly.v1alpha1.WildFlyServer; + +import cz.xtf.core.openshift.OpenShifts; +import io.fabric8.kubernetes.api.model.apiextensions.v1.CustomResourceDefinition; +import io.fabric8.kubernetes.api.model.apiextensions.v1.CustomResourceDefinitionList; +import io.fabric8.kubernetes.client.NamespacedKubernetesClientAdapter; +import io.fabric8.kubernetes.client.dsl.NonNamespaceOperation; +import io.fabric8.kubernetes.client.dsl.Resource; +import io.fabric8.kubernetes.client.dsl.base.CustomResourceDefinitionContext; +import io.fabric8.kubernetes.client.dsl.internal.HasMetadataOperationsImpl; +import io.fabric8.openshift.client.NamespacedOpenShiftClient; +import lombok.NonNull; + +public class WildflyOpenShiftOperatorProvisioner + // leverage WildFly common Operator based provisioner behavior + extends WildflyOperatorProvisioner + // ... and common OpenShift provisioning logic, too + implements OpenShiftProvisioner { + + public WildflyOpenShiftOperatorProvisioner(@NonNull WildflyOperatorApplication application) { + super(application); + } + + @Override + public NamespacedKubernetesClientAdapter client() { + return OpenShiftProvisioner.super.client(); + } + + @Override + public String execute(String... args) { + return OpenShifts.adminBinary().execute(args); + } + + // ================================================================================================================= + // Operator based provisioning concrete implementation, see OperatorProvisioner + // ================================================================================================================= + @Override + protected String getOperatorNamespace() { + return "openshift-marketplace"; + } + + // ================================================================================================================= + // Client related + // ================================================================================================================= + + @Override + public NonNamespaceOperation> customResourceDefinitionsClient() { + return OpenShifts.admin().apiextensions().v1().customResourceDefinitions(); + } + + @Override + public HasMetadataOperationsImpl wildflyCustomResourcesClient( + CustomResourceDefinitionContext crdc) { + return OpenShifts + .master().newHasMetadataOperation(crdc, WildFlyServer.class, WildFlyServerList.class); + } +} diff --git a/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/WildflyOperatorProvisionerFactory.java b/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/WildflyOpenShiftOperatorProvisionerFactory.java similarity index 70% rename from provisioners/src/main/java/org/jboss/intersmash/provision/openshift/WildflyOperatorProvisionerFactory.java rename to provisioners/src/main/java/org/jboss/intersmash/provision/openshift/WildflyOpenShiftOperatorProvisionerFactory.java index 41c116f33..dbf94e23e 100644 --- a/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/WildflyOperatorProvisionerFactory.java +++ b/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/WildflyOpenShiftOperatorProvisionerFactory.java @@ -16,18 +16,18 @@ package org.jboss.intersmash.provision.openshift; import org.jboss.intersmash.application.Application; -import org.jboss.intersmash.application.openshift.WildflyOperatorApplication; +import org.jboss.intersmash.application.operator.WildflyOperatorApplication; import org.jboss.intersmash.provision.ProvisionerFactory; import lombok.extern.slf4j.Slf4j; @Slf4j -public class WildflyOperatorProvisionerFactory implements ProvisionerFactory { +public class WildflyOpenShiftOperatorProvisionerFactory implements ProvisionerFactory { @Override - public WildflyOperatorProvisioner getProvisioner(Application application) { + public WildflyOpenShiftOperatorProvisioner getProvisioner(Application application) { if (WildflyOperatorApplication.class.isAssignableFrom(application.getClass())) - return new WildflyOperatorProvisioner((WildflyOperatorApplication) application); + return new WildflyOpenShiftOperatorProvisioner((WildflyOperatorApplication) application); return null; } } diff --git a/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/ActiveMQOperatorProvisioner.java b/provisioners/src/main/java/org/jboss/intersmash/provision/operator/ActiveMQOperatorProvisioner.java similarity index 58% rename from provisioners/src/main/java/org/jboss/intersmash/provision/openshift/ActiveMQOperatorProvisioner.java rename to provisioners/src/main/java/org/jboss/intersmash/provision/operator/ActiveMQOperatorProvisioner.java index 72a15525f..c268fb52b 100644 --- a/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/ActiveMQOperatorProvisioner.java +++ b/provisioners/src/main/java/org/jboss/intersmash/provision/operator/ActiveMQOperatorProvisioner.java @@ -13,137 +13,74 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jboss.intersmash.provision.openshift; +package org.jboss.intersmash.provision.operator; import java.util.List; import java.util.stream.Collectors; import org.jboss.intersmash.IntersmashConfig; -import org.jboss.intersmash.application.openshift.ActiveMQOperatorApplication; -import org.jboss.intersmash.provision.openshift.operator.OperatorProvisioner; +import org.jboss.intersmash.application.operator.ActiveMQOperatorApplication; +import org.jboss.intersmash.provision.Provisioner; import org.jboss.intersmash.provision.openshift.operator.activemq.address.ActiveMQArtemisAddressList; import org.jboss.intersmash.provision.openshift.operator.activemq.broker.ActiveMQArtemisList; import org.slf4j.event.Level; -import cz.xtf.core.config.OpenShiftConfig; -import cz.xtf.core.event.helpers.EventHelper; -import cz.xtf.core.openshift.OpenShiftWaiters; -import cz.xtf.core.openshift.OpenShifts; import cz.xtf.core.waiting.SimpleWaiter; +import cz.xtf.core.waiting.failfast.FailFastCheck; import io.amq.broker.v1beta1.ActiveMQArtemis; import io.amq.broker.v1beta1.ActiveMQArtemisAddress; import io.fabric8.kubernetes.api.model.DeletionPropagation; import io.fabric8.kubernetes.api.model.Pod; import io.fabric8.kubernetes.api.model.apiextensions.v1.CustomResourceDefinition; -import io.fabric8.kubernetes.client.dsl.MixedOperation; +import io.fabric8.kubernetes.api.model.apiextensions.v1.CustomResourceDefinitionList; +import io.fabric8.kubernetes.client.NamespacedKubernetesClient; import io.fabric8.kubernetes.client.dsl.NonNamespaceOperation; import io.fabric8.kubernetes.client.dsl.Resource; import io.fabric8.kubernetes.client.dsl.base.CustomResourceDefinitionContext; +import io.fabric8.kubernetes.client.dsl.internal.HasMetadataOperationsImpl; import lombok.NonNull; /** - * ActiveMQ Operator based provisioner + * Defines the contract and default behavior of an Operator based provisioner for the ActiveMQ Artemis Operator */ -public class ActiveMQOperatorProvisioner extends OperatorProvisioner { - private final static String ACTIVE_MQ_ARTEMIS_RESOURCE = "activemqartemises.broker.amq.io"; - private static NonNamespaceOperation> ACTIVE_MQ_ARTEMISES_CLIENT; - - private final static String ACTIVE_MQ_ARTEMIS_ADDRESS_RESOURCE = "activemqartemisaddresses.broker.amq.io"; - private static NonNamespaceOperation> ACTIVE_MQ_ARTEMIS_ADDRESSES_CLIENT; - - // private final static String ACTIVE_MQ_ARTEMIS_SCALEDOWN_RESOURCE = "activemqartemisscaledowns.broker.amq.io"; // TODO add on demand - - private static final String OPERATOR_ID = IntersmashConfig.activeMQOperatorPackageManifest(); +public abstract class ActiveMQOperatorProvisioner extends + OperatorProvisioner implements Provisioner { - public ActiveMQOperatorProvisioner(@NonNull ActiveMQOperatorApplication activeMqOperatorApplication) { - super(activeMqOperatorApplication, OPERATOR_ID); + public ActiveMQOperatorProvisioner(@NonNull ActiveMQOperatorApplication application) { + super(application, ActiveMQOperatorProvisioner.OPERATOR_ID); } - public static String getOperatorId() { - return OPERATOR_ID; - } - - /** - * Get a client capable of working with {@link #ACTIVE_MQ_ARTEMIS_ADDRESS_RESOURCE} custom resource. - * - * @return client for operations with {@link #ACTIVE_MQ_ARTEMIS_ADDRESS_RESOURCE} custom resource - */ - public NonNamespaceOperation> activeMQArtemisAddressesClient() { - if (ACTIVE_MQ_ARTEMIS_ADDRESSES_CLIENT == null) { - CustomResourceDefinition crd = OpenShifts.admin().apiextensions().v1().customResourceDefinitions() - .withName(ACTIVE_MQ_ARTEMIS_ADDRESS_RESOURCE).get(); - CustomResourceDefinitionContext crdc = CustomResourceDefinitionContext.fromCrd(crd); - if (!getCustomResourceDefinitions().contains(ACTIVE_MQ_ARTEMIS_ADDRESS_RESOURCE)) { - throw new RuntimeException(String.format("[%s] custom resource is not provided by [%s] operator.", - ACTIVE_MQ_ARTEMIS_ADDRESS_RESOURCE, OPERATOR_ID)); - } - MixedOperation> addressesClient = OpenShifts - .master().newHasMetadataOperation(crdc, ActiveMQArtemisAddress.class, ActiveMQArtemisAddressList.class); - ACTIVE_MQ_ARTEMIS_ADDRESSES_CLIENT = addressesClient.inNamespace(OpenShiftConfig.namespace()); - } - return ACTIVE_MQ_ARTEMIS_ADDRESSES_CLIENT; - } - - /** - * Get a reference to activeMQArtemisAddress object. Use get() to get the actual object, or null in case it does not - * exist on tested cluster. - * - * @param name name of the activeMQArtemisAddress custom resource - * @return A concrete {@link Resource} instance representing the {@link ActiveMQArtemisAddress} resource definition - */ - public Resource activeMQArtemisAddress(String name) { - return activeMQArtemisAddressesClient().withName(name); + // ================================================================================================================= + // ActiveMQ Artemis related + // ================================================================================================================= + protected List getLabeledPods(final String labelName, final String labelValue) { + return this.getPods().stream() + .filter(p -> p.getMetadata().getLabels().entrySet().stream() + .anyMatch(l -> l.getKey().equals(labelName) && l.getValue().equals(labelValue))) + .collect(Collectors.toList()); } - /** - * Get all activeMQArtemisAddresses maintained by the current operator instance. - * - * Be aware that this method return just a references to the addresses, they might not actually exist on the cluster. - * Use get() to get the actual object, or null in case it does not exist on tested cluster. - * @return A list of {@link Resource} instances representing the {@link ActiveMQArtemisAddress} resource definitions - */ - public List> activeMQArtemisAddresses() { - ActiveMQOperatorApplication activeMqOperatorApplication = getApplication(); - return activeMqOperatorApplication.getActiveMQArtemisAddresses().stream() - .map(activeMQArtemisAddress -> activeMQArtemisAddress.getMetadata().getName()) - .map(this::activeMQArtemisAddress) - .collect(Collectors.toList()); + // ================================================================================================================= + // Related to generic provisioning behavior + // ================================================================================================================= + @Override + public String getOperatorCatalogSource() { + return IntersmashConfig.activeMQOperatorCatalogSource(); } - /** - * Get a client capable of working with {@link #ACTIVE_MQ_ARTEMIS_RESOURCE} custom resource. - * - * @return client for operations with {@link #ACTIVE_MQ_ARTEMIS_RESOURCE} custom resource - */ - public NonNamespaceOperation> activeMQArtemisesClient() { - if (ACTIVE_MQ_ARTEMISES_CLIENT == null) { - CustomResourceDefinition crd = OpenShifts.admin().apiextensions().v1().customResourceDefinitions() - .withName(ACTIVE_MQ_ARTEMIS_RESOURCE).get(); - CustomResourceDefinitionContext crdc = CustomResourceDefinitionContext.fromCrd(crd); - if (!getCustomResourceDefinitions().contains(ACTIVE_MQ_ARTEMIS_RESOURCE)) { - throw new RuntimeException(String.format("[%s] custom resource is not provided by [%s] operator.", - ACTIVE_MQ_ARTEMIS_RESOURCE, OPERATOR_ID)); - } - MixedOperation> amqClient = OpenShifts - .master().newHasMetadataOperation(crdc, ActiveMQArtemis.class, ActiveMQArtemisList.class); - ACTIVE_MQ_ARTEMISES_CLIENT = amqClient.inNamespace(OpenShiftConfig.namespace()); - } - return ACTIVE_MQ_ARTEMISES_CLIENT; + @Override + public String getOperatorIndexImage() { + return IntersmashConfig.activeMQOperatorIndexImage(); } - /** - * Get a reference to activeMQArtemis object. Use get() to get the actual object, or null in case it does not - * exist on tested cluster. - * @return A concrete {@link Resource} instance representing the {@link ActiveMQArtemis} resource definition - */ - public Resource activeMQArtemis() { - return activeMQArtemisesClient().withName(getApplication().getActiveMQArtemis().getMetadata().getName()); + @Override + public String getOperatorChannel() { + return IntersmashConfig.activeMQOperatorChannel(); } @Override public void deploy() { - ffCheck = FailFastUtils.getFailFastCheck(EventHelper.timeOfLastEventBMOrTestNamespaceOrEpoch(), - getApplication().getName()); + FailFastCheck ffCheck = () -> false; subscribe(); int replicas = getApplication().getActiveMQArtemis().getSpec().getDeploymentPlan().getSize(); @@ -154,15 +91,20 @@ public void deploy() { .forEach(activeMQArtemisAddress -> activeMQArtemisAddressesClient().createOrReplace(activeMQArtemisAddress)); // wait for all resources to be ready activeMQArtemisAddresses() - .forEach(address -> new SimpleWaiter(() -> address.get() != null).level(Level.DEBUG).waitFor()); + .forEach(address -> new SimpleWaiter(() -> address.get() != null) + .level(Level.DEBUG) + .waitFor()); new SimpleWaiter(() -> activeMQArtemis().get() != null) .failFast(ffCheck) .level(Level.DEBUG) .waitFor(); - OpenShiftWaiters.get(OpenShiftProvisioner.openShift, ffCheck).areExactlyNPodsReady(replicas, - activeMQArtemis().get().getKind(), getApplication().getActiveMQArtemis().getMetadata().getName()) + new SimpleWaiter(() -> getLabeledPods(activeMQArtemis().get().getKind(), + getApplication().getActiveMQArtemis().getMetadata().getName()) + .size() == replicas) + .failFast(ffCheck) .level(Level.DEBUG) .waitFor(); + /* * The following wait condition has been suppressed temporarily since: * 1. doesn't allow for the ActiveMQ CR `.podStatus` to be @@ -178,6 +120,7 @@ public void deploy() { @Override public void undeploy() { + FailFastCheck ffCheck = () -> false; // delete the resources activeMQArtemisAddresses().forEach(address -> address.withPropagationPolicy(DeletionPropagation.FOREGROUND).delete()); @@ -186,7 +129,7 @@ public void undeploy() { activeMQArtemis().withPropagationPolicy(DeletionPropagation.FOREGROUND).delete(); // wait - new SimpleWaiter(() -> activeMQArtemisesClient().list().getItems().size() == 0).failFast(ffCheck).level(Level.DEBUG) + new SimpleWaiter(() -> activeMQArtemisesClient().list().getItems().isEmpty()).failFast(ffCheck).level(Level.DEBUG) .waitFor(); activeMQArtemisAddresses().forEach( address -> new SimpleWaiter(() -> address.get() == null).level(Level.DEBUG).failFast(ffCheck).waitFor()); @@ -194,14 +137,14 @@ public void undeploy() { unsubscribe(); } - @Override public void scale(int replicas, boolean wait) { ActiveMQArtemis tmpBroker = activeMQArtemis().get(); tmpBroker.getSpec().getDeploymentPlan().setSize(replicas); activeMQArtemis().replace(tmpBroker); if (wait) { - OpenShiftWaiters.get(OpenShiftProvisioner.openShift, ffCheck) - .areExactlyNPodsReady(replicas, tmpBroker.getKind(), tmpBroker.getMetadata().getName()) + new SimpleWaiter(() -> getLabeledPods(tmpBroker.getKind(), + tmpBroker.getMetadata().getName()) + .size() == replicas) .level(Level.DEBUG) .waitFor(); /* @@ -217,33 +160,109 @@ public void scale(int replicas, boolean wait) { } } + // ================================================================================================================= + // Client related + // ================================================================================================================= + // this is the packagemanifest for the operator; + // you can get it with command: + // oc get packagemanifest -o template --template='{{ .metadata.name }}' + public static String OPERATOR_ID = IntersmashConfig.activeMQOperatorPackageManifest(); + + // this is the name of the CustomResourceDefinition(s) + // you can get it with command: + // oc get crd > -o template --template='{{ .metadata.name }}' + protected static String ACTIVEMQ_ARTEMIS_CRD_NAME = "activemqartemises.broker.amq.io"; + + protected static String ACTIVEMQ_ARTEMIS_ADDRESS_CRD_NAME = "activemqartemisaddresses.broker.amq.io"; + /** - * Get the provisioned application service related Pods - *

- * Currently blocked by the fact that Pod Status pod names do not reflect the reality - *

- * Once these issues are resolved, we can use the ready pod names returned by - * {@code ActiveMQArtemisStatus.getPodStatus()} to create the List with pods maintained by the provisioner. + * Generic CRD client which is used by client builders default implementation to build the CRDs client * - * @return A list of related {@link Pod} instances + * @return A {@link NonNamespaceOperation} instance that represents a */ - @Override - public List getPods() { - throw new UnsupportedOperationException("To be implemented!"); + protected abstract NonNamespaceOperation> customResourceDefinitionsClient(); + + // activemqartemises.broker.amq.io + protected abstract HasMetadataOperationsImpl activeMQArtemisCustomResourcesClient( + CustomResourceDefinitionContext crdc); + + // activemqartemisaddresses.broker.amq.io + protected abstract HasMetadataOperationsImpl activeMQArtemisAddressesCustomResourcesClient( + CustomResourceDefinitionContext crdc); + + private static NonNamespaceOperation> ACTIVE_MQ_ARTEMISES_CLIENT; + + /** + * Get a client capable of working with {@link {@link ActiveMQOperatorProvisioner#ACTIVEMQ_ARTEMIS_CRD_NAME}} custom resource. + * + * @return client for operations with {@link {@link ActiveMQOperatorProvisioner#ACTIVEMQ_ARTEMIS_CRD_NAME}} custom resource + */ + public NonNamespaceOperation> activeMQArtemisesClient() { + if (ACTIVE_MQ_ARTEMISES_CLIENT == null) { + CustomResourceDefinition crd = customResourceDefinitionsClient() + .withName(ACTIVEMQ_ARTEMIS_CRD_NAME).get(); + if (crd == null) { + throw new RuntimeException(String.format("[%s] custom resource is not provided by [%s] operator.", + ACTIVEMQ_ARTEMIS_CRD_NAME, OPERATOR_ID)); + } + ACTIVE_MQ_ARTEMISES_CLIENT = activeMQArtemisCustomResourcesClient(CustomResourceDefinitionContext.fromCrd(crd)); + } + return ACTIVE_MQ_ARTEMISES_CLIENT; } - @Override - protected String getOperatorCatalogSource() { - return IntersmashConfig.activeMQOperatorCatalogSource(); + private static NonNamespaceOperation> ACTIVE_MQ_ARTEMIS_ADDRESSES_CLIENT; + + /** + * Get a client capable of working with {@link ActiveMQOperatorProvisioner#ACTIVEMQ_ARTEMIS_ADDRESS_CRD_NAME} custom resource. + * + * @return client for operations with {@link ActiveMQOperatorProvisioner#ACTIVEMQ_ARTEMIS_ADDRESS_CRD_NAME} custom resource + */ + public NonNamespaceOperation> activeMQArtemisAddressesClient() { + if (ACTIVE_MQ_ARTEMIS_ADDRESSES_CLIENT == null) { + CustomResourceDefinition crd = customResourceDefinitionsClient() + .withName(ACTIVEMQ_ARTEMIS_ADDRESS_CRD_NAME).get(); + if (crd == null) { + throw new RuntimeException(String.format("[%s] custom resource is not provided by [%s] operator.", + ACTIVEMQ_ARTEMIS_ADDRESS_CRD_NAME, OPERATOR_ID)); + } + ACTIVE_MQ_ARTEMIS_ADDRESSES_CLIENT = activeMQArtemisAddressesCustomResourcesClient( + CustomResourceDefinitionContext.fromCrd(crd)); + } + return ACTIVE_MQ_ARTEMIS_ADDRESSES_CLIENT; } - @Override - protected String getOperatorIndexImage() { - return IntersmashConfig.activeMQOperatorIndexImage(); + /** + * Get a reference to activeMQArtemis object. Use get() to get the actual object, or null in case it does not + * exist on tested cluster. + * @return A concrete {@link Resource} instance representing the {@link ActiveMQArtemis} resource definition + */ + public Resource activeMQArtemis() { + return activeMQArtemisesClient().withName(getApplication().getActiveMQArtemis().getMetadata().getName()); } - @Override - protected String getOperatorChannel() { - return IntersmashConfig.activeMQOperatorChannel(); + /** + * Get a reference to activeMQArtemisAddress object. Use get() to get the actual object, or null in case it does not + * exist on tested cluster. + * + * @param name name of the activeMQArtemisAddress custom resource + * @return A concrete {@link Resource} instance representing the {@link ActiveMQArtemisAddress} resource definition + */ + public Resource activeMQArtemisAddress(String name) { + return activeMQArtemisAddressesClient().withName(name); + } + + /** + * Get all activeMQArtemisAddresses maintained by the current operator instance. + * + * Be aware that this method return just a references to the addresses, they might not actually exist on the cluster. + * Use get() to get the actual object, or null in case it does not exist on tested cluster. + * @return A list of {@link Resource} instances representing the {@link ActiveMQArtemisAddress} resource definitions + */ + public List> activeMQArtemisAddresses() { + ActiveMQOperatorApplication activeMqOperatorApplication = getApplication(); + return activeMqOperatorApplication.getActiveMQArtemisAddresses().stream() + .map(activeMQArtemisAddress -> activeMQArtemisAddress.getMetadata().getName()) + .map(this::activeMQArtemisAddress) + .collect(Collectors.toList()); } } diff --git a/provisioners/src/main/java/org/jboss/intersmash/provision/operator/HyperfoilOperatorProvisioner.java b/provisioners/src/main/java/org/jboss/intersmash/provision/operator/HyperfoilOperatorProvisioner.java new file mode 100644 index 000000000..e137cf0bc --- /dev/null +++ b/provisioners/src/main/java/org/jboss/intersmash/provision/operator/HyperfoilOperatorProvisioner.java @@ -0,0 +1,240 @@ +package org.jboss.intersmash.provision.operator; + +import java.util.Objects; +import java.util.concurrent.TimeUnit; +import java.util.function.BooleanSupplier; + +import org.jboss.intersmash.IntersmashConfig; +import org.jboss.intersmash.application.operator.HyperfoilOperatorApplication; +import org.jboss.intersmash.k8s.KubernetesConfig; +import org.jboss.intersmash.provision.Provisioner; +import org.jboss.intersmash.application.k8s.HasPods; +import org.slf4j.event.Level; + +import com.google.common.base.Strings; + +import cz.xtf.core.http.Https; +import cz.xtf.core.waiting.SimpleWaiter; +import cz.xtf.core.waiting.failfast.FailFastCheck; +import io.fabric8.kubernetes.api.model.DeletionPropagation; +import io.fabric8.kubernetes.api.model.Pod; +import io.fabric8.kubernetes.api.model.apiextensions.v1.CustomResourceDefinition; +import io.fabric8.kubernetes.api.model.apiextensions.v1.CustomResourceDefinitionList; +import io.fabric8.kubernetes.api.model.networking.v1.Ingress; +import io.fabric8.kubernetes.client.NamespacedKubernetesClient; +import io.fabric8.kubernetes.client.dsl.NonNamespaceOperation; +import io.fabric8.kubernetes.client.dsl.Resource; +import io.fabric8.kubernetes.client.dsl.base.CustomResourceDefinitionContext; +import io.fabric8.kubernetes.client.dsl.internal.HasMetadataOperationsImpl; +import io.hyperfoil.v1alpha2.Hyperfoil; +import io.hyperfoil.v1alpha2.HyperfoilList; +import lombok.NonNull; + +public abstract class HyperfoilOperatorProvisioner extends + OperatorProvisioner implements Provisioner, HasPods { + + public HyperfoilOperatorProvisioner(@NonNull HyperfoilOperatorApplication application) { + super(application, HyperfoilOperatorProvisioner.OPERATOR_ID); + } + + // ================================================================================================================= + // Hyperfoil related + // ================================================================================================================= + protected Ingress retrieveNamedIngress(final String ingressName) { + return this.client().network().v1().ingresses().withName(ingressName).get(); + } + + public void undeploy(boolean unsubscribe) { + hyperfoil().withPropagationPolicy(DeletionPropagation.FOREGROUND).delete(); + BooleanSupplier bs = () -> getPods().stream() + .noneMatch(p -> !Strings.isNullOrEmpty(p.getMetadata().getLabels().get("app")) + && p.getMetadata().getLabels().get("app").equals(getApplication().getName())); + String reason = "Waiting for exactly 0 pods with label \"app\"=" + getApplication().getName() + " to be ready."; + new SimpleWaiter(bs, TimeUnit.MINUTES, 2, reason) + .level(Level.DEBUG) + .waitFor(); + if (unsubscribe) { + unsubscribe(); + } + } + + /** + * This method checks if the Operator's POD is actually running; + * It's been tailored on the community-operators Cluster Service version format which is missing label + * spec.install.spec.deployments.spec.template.metadata.labels."app.kubernetes.io/name" which is used + * in @see OperatorProvisioner#waitForOperatorPod() (see + * https://github.com/operator-framework/community-operators/tree/master/community-operators/hyperfoil-bundle) + */ + @Override + protected void waitForOperatorPod() { + String[] operatorSpecs = this.execute("get", "csvs", getCurrentCSV(), "-o", "template", "--template", + "{{range .spec.install.spec.deployments}}{{printf \"%d|%s\\n\" .spec.replicas .name}}{{end}}") + .split(System.lineSeparator()); + for (String spec : operatorSpecs) { + String[] operatorSpec = spec.split("\\|"); + if (operatorSpec.length != 2) { + throw new RuntimeException("Failed to get operator deployment spec from csvs!"); + } + new SimpleWaiter(() -> getPods().stream().filter( + pod -> (pod.getMetadata() + .getName() + .startsWith(operatorSpec[1]) + && pod.getStatus().getPhase().equalsIgnoreCase("Running"))) + .count() == Integer.parseInt(operatorSpec[0])) + .failFast(() -> false) + .reason("Wait for expected number of replicas to be active.") + .level(Level.DEBUG) + .waitFor(); + } + } + + /** + * Tells if a specific container inside the pod is ready + * + * @param pod + * @param containerName: name of the container + * @return + */ + private boolean isContainerReady(Pod pod, String containerName) { + if (Objects.nonNull(pod)) { + return pod.getStatus().getContainerStatuses().stream() + .filter(containerStatus -> containerStatus.getName().equalsIgnoreCase(containerName) + && containerStatus.getReady()) + .count() > 0; + } + return false; + } + + // ================================================================================================================= + // Related to generic provisioning behavior + // ================================================================================================================= + /** + * HyperFoil is community only, so we return the Operator namespace concretely here + * @return String representing the Operator namespace + */ + @Override + protected String getOperatorNamespace() { + return "olm"; + } + + @Override + public String getOperatorCatalogSource() { + return IntersmashConfig.hyperfoilOperatorCatalogSource(); + } + + @Override + public String getOperatorIndexImage() { + return IntersmashConfig.hyperfoilOperatorIndexImage(); + } + + @Override + public String getOperatorChannel() { + return IntersmashConfig.hyperfoilOperatorChannel(); + } + + @Override + protected String getCatalogSourceNamespace() { + String namespace = super.getCatalogSourceNamespace(); + if (!org.assertj.core.util.Strings.isNullOrEmpty(getOperatorIndexImage())) { + namespace = KubernetesConfig.namespace(); + } + return namespace; + } + + @Override + public void deploy() { + FailFastCheck ffCheck = () -> false; + if (!isSubscribed()) { + subscribe(); + } + hyperfoilClient().createOrReplace(getApplication().getHyperfoil()); + new SimpleWaiter(() -> hyperfoil().get().getStatus() != null) + .failFast(ffCheck) + .reason("Wait for status field to be initialized.") + .level(Level.DEBUG) + .waitFor(); + new SimpleWaiter(() -> getPods().size() == 1) + .failFast(ffCheck) + .reason("Wait for expected number of replicas to be active.") + .level(Level.DEBUG) + .waitFor(); + new SimpleWaiter( + () -> Https.getCode(getURL().toExternalForm()) != 503) + .reason("Wait until the route is ready to serve."); + } + + // TODO: check for removal + // @Override + // default List getPods() { + // List pods = new ArrayList<>(); + // Pod hyperfoilControllerPod = getPod(String.format("%s-controller", getApplication().getName())); + // if (isContainerReady(hyperfoilControllerPod, "controller")) { + // pods.add(hyperfoilControllerPod); + // } + // return pods; + // } + + @Override + public void undeploy() { + undeploy(true); + } + + @Override + public void scale(int replicas, boolean wait) { + throw new UnsupportedOperationException("Scaling is not implemented by Hyperfoil operator based provisioning"); + } + + // ================================================================================================================= + // Client related + // ================================================================================================================= + // this is the packagemanifest for the hyperfoil operator; + // you can get it with command: + // oc get packagemanifest hyperfoil-bundle -o template --template='{{ .metadata.name }}' + public static String OPERATOR_ID = IntersmashConfig.hyperfoilOperatorPackageManifest(); + + // this is the name of the Hyperfoil CustomResourceDefinition + // you can get it with command: + // oc get crd hyperfoils.hyperfoil.io -o template --template='{{ .metadata.name }}' + protected static String HYPERFOIL_CRD_NAME = "hyperfoils.hyperfoil.io"; + + /** + * Generic CRD client which is used by client builders default implementation to build the CRDs client + * + * @return A {@link NonNamespaceOperation} instance that represents a + */ + protected abstract NonNamespaceOperation> customResourceDefinitionsClient(); + + // hyperfoils.hyperfoil.io + protected abstract HasMetadataOperationsImpl hyperfoilCustomResourcesClient( + CustomResourceDefinitionContext crdc); + + protected static NonNamespaceOperation> HYPERFOIL_CUSTOM_RESOURCE_CLIENT; + + /** + * Get a client capable of working with {@link HyperfoilOperatorProvisioner#HYPERFOIL_CRD_NAME} custom resource. + * + * @return client for operations with {@link HyperfoilOperatorProvisioner#HYPERFOIL_CRD_NAME} custom resource + */ + public NonNamespaceOperation> hyperfoilClient() { + if (HYPERFOIL_CUSTOM_RESOURCE_CLIENT == null) { + CustomResourceDefinition crd = customResourceDefinitionsClient() + .withName(HYPERFOIL_CRD_NAME).get(); + if (crd == null) { + throw new RuntimeException(String.format("[%s] custom resource is not provided by [%s] operator.", + HYPERFOIL_CRD_NAME, OPERATOR_ID)); + } + HYPERFOIL_CUSTOM_RESOURCE_CLIENT = hyperfoilCustomResourcesClient(CustomResourceDefinitionContext.fromCrd(crd)); + } + return HYPERFOIL_CUSTOM_RESOURCE_CLIENT; + } + + /** + * Get a reference to Hyperfoil object. Use get() to get the actual object, or null in case it does not + * exist on tested cluster. + * + * @return A concrete {@link Resource} instance representing the {@link Hyperfoil} resource definition + */ + public Resource hyperfoil() { + return hyperfoilClient().withName(getApplication().getName()); + } +} diff --git a/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/InfinispanOperatorProvisioner.java b/provisioners/src/main/java/org/jboss/intersmash/provision/operator/InfinispanOperatorProvisioner.java similarity index 50% rename from provisioners/src/main/java/org/jboss/intersmash/provision/openshift/InfinispanOperatorProvisioner.java rename to provisioners/src/main/java/org/jboss/intersmash/provision/operator/InfinispanOperatorProvisioner.java index e905ffcad..e5515edec 100644 --- a/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/InfinispanOperatorProvisioner.java +++ b/provisioners/src/main/java/org/jboss/intersmash/provision/operator/InfinispanOperatorProvisioner.java @@ -13,65 +13,115 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jboss.intersmash.provision.openshift; +package org.jboss.intersmash.provision.operator; -import java.net.MalformedURLException; -import java.net.URL; import java.util.List; -import java.util.Objects; +import java.util.concurrent.TimeUnit; +import java.util.function.BooleanSupplier; import java.util.stream.Collectors; -import org.assertj.core.util.Lists; import org.infinispan.v1.Infinispan; -import org.infinispan.v1.infinispanspec.Expose; import org.infinispan.v2alpha1.Cache; import org.jboss.intersmash.IntersmashConfig; -import org.jboss.intersmash.application.openshift.InfinispanOperatorApplication; -import org.jboss.intersmash.provision.openshift.operator.OperatorProvisioner; +import org.jboss.intersmash.application.operator.InfinispanOperatorApplication; +import org.jboss.intersmash.provision.Provisioner; import org.jboss.intersmash.provision.openshift.operator.infinispan.cache.CacheList; import org.jboss.intersmash.provision.openshift.operator.infinispan.infinispan.InfinispanList; import org.jboss.intersmash.provision.openshift.operator.infinispan.infinispan.spec.InfinispanConditionBuilder; import org.slf4j.event.Level; -import cz.xtf.core.config.OpenShiftConfig; -import cz.xtf.core.event.helpers.EventHelper; -import cz.xtf.core.openshift.OpenShiftWaiters; -import cz.xtf.core.openshift.OpenShifts; import cz.xtf.core.waiting.SimpleWaiter; +import cz.xtf.core.waiting.failfast.FailFastCheck; import io.fabric8.kubernetes.api.model.DeletionPropagation; import io.fabric8.kubernetes.api.model.Pod; import io.fabric8.kubernetes.api.model.Service; import io.fabric8.kubernetes.api.model.apiextensions.v1.CustomResourceDefinition; -import io.fabric8.kubernetes.api.model.apps.StatefulSet; -import io.fabric8.kubernetes.client.dsl.MixedOperation; +import io.fabric8.kubernetes.api.model.apiextensions.v1.CustomResourceDefinitionList; +import io.fabric8.kubernetes.client.NamespacedKubernetesClient; import io.fabric8.kubernetes.client.dsl.NonNamespaceOperation; import io.fabric8.kubernetes.client.dsl.Resource; import io.fabric8.kubernetes.client.dsl.base.CustomResourceDefinitionContext; -import io.fabric8.openshift.api.model.Route; -import lombok.NonNull; +import io.fabric8.kubernetes.client.dsl.internal.HasMetadataOperationsImpl; -public class InfinispanOperatorProvisioner extends OperatorProvisioner { - private static final String INFINISPAN_RESOURCE = "infinispans.infinispan.org"; - private static NonNamespaceOperation> INFINISPAN_CLIENT; +/** + * Defines the contract and default behavior of an Operator based provisioner for the Infinispan Operator + */ +public abstract class InfinispanOperatorProvisioner extends + OperatorProvisioner implements Provisioner { - private static final String INFINISPAN_CACHE_RESOURCE = "caches.infinispan.org"; - private static NonNamespaceOperation> INFINISPAN_CACHES_CLIENT; + public InfinispanOperatorProvisioner(InfinispanOperatorApplication application) { + super(application, InfinispanOperatorProvisioner.OPERATOR_ID); + } - // oc get packagemanifest datagrid -n openshift-marketplace - private static final String OPERATOR_ID = IntersmashConfig.infinispanOperatorPackageManifest(); + // ================================================================================================================= + // Infinispan related + // ================================================================================================================= + public List getInfinispanPods() { + return getPods().stream().filter( + // the following criteria is implemented based on similar requirements taken from the + // infinispan-operator project, see + // https://github.com/infinispan/infinispan-operator/blob/main/test/e2e/utils/kubernetes.go#L599-L604 + p -> p.getMetadata().getLabels().entrySet().stream() + .anyMatch(tl -> "app".equals(tl.getKey()) && "infinispan-pod".equals(tl.getValue()) + && p.getMetadata().getLabels().entrySet().stream().anyMatch( + cnl -> "clusterName".equals(cnl.getKey()) + && getApplication().getName().equals(cnl.getValue())))) + .collect(Collectors.toList()); + } - public InfinispanOperatorProvisioner(@NonNull InfinispanOperatorApplication infinispanOperatorApplication) { - super(infinispanOperatorApplication, OPERATOR_ID); + protected Service getService(final String name) { + return this.client().services().withName(name).get(); } - public static String getOperatorId() { - return OPERATOR_ID; + // TODO: check for removal + // default List getStatefulSetPods() { + // StatefulSet statefulSet = getStatefulSet(getApplication().getName()); + // return Objects.nonNull(statefulSet) + // ? getPods().stream() + // .filter(p -> p.getMetadata().getLabels().get("controller-revision-hash") != null + // && p.getMetadata().getLabels().get("controller-revision-hash") + // .equals(statefulSet.getStatus().getUpdateRevision())) + // .collect(Collectors.toList()) + // : List.of(); + // } + + private void waitForResourceReadiness() { + // it must be well-formed + // see https://github.com/kubernetes/apimachinery/blob/v0.20.4/pkg/apis/meta/v1/types.go#L1289 + new SimpleWaiter( + () -> infinispan().get().getStatus().getConditions().stream() + .anyMatch( + c -> c.getType().equals(InfinispanConditionBuilder.ConditionType.ConditionWellFormed.getValue()) + && c.getStatus().equals("True"))) + .reason("Wait for infinispan resource to be ready").level(Level.DEBUG) + .waitFor(); + // and with the expected number of Cache CR(s) + if (getApplication().getCaches().size() > 0) + new SimpleWaiter(() -> cachesClient().list().getItems().size() == caches().size()) + .reason("Wait for caches to be ready.").level(Level.DEBUG).waitFor(); + } + + // ================================================================================================================= + // Related to generic provisioning behavior + // ================================================================================================================= + @Override + public String getOperatorCatalogSource() { + return IntersmashConfig.infinispanOperatorCatalogSource(); + } + + @Override + public String getOperatorIndexImage() { + return IntersmashConfig.infinispanOperatorIndexImage(); + } + + @Override + public String getOperatorChannel() { + return IntersmashConfig.infinispanOperatorChannel(); } @Override public void deploy() { - ffCheck = FailFastUtils.getFailFastCheck(EventHelper.timeOfLastEventBMOrTestNamespaceOrEpoch(), - getApplication().getName()); + FailFastCheck ffCheck = () -> false; subscribe(); // create Infinispan CR final int replicas = getApplication().getInfinispan().getSpec().getReplicas(); @@ -89,15 +139,15 @@ public void deploy() { @Override public void undeploy() { + FailFastCheck ffCheck = () -> false; // delete Cache CR(s) caches().forEach(keycloakUser -> keycloakUser.withPropagationPolicy(DeletionPropagation.FOREGROUND).delete()); // delete Infinispan CR infinispan().withPropagationPolicy(DeletionPropagation.FOREGROUND).delete(); - // wait for 0 pods, and here it waits for no pods to exist with the `clusterName=` label, - // since all CRs have been deleted - OpenShiftWaiters.get(OpenShiftProvisioner.openShift, ffCheck) - .areExactlyNPodsReady(0, "clusterName", getApplication().getInfinispan().getMetadata().getName()) - .level(Level.DEBUG) + // wait for 0 pods + BooleanSupplier bs = () -> getInfinispanPods().isEmpty(); + new SimpleWaiter(bs, TimeUnit.MINUTES, 2, + "Waiting for 0 pods with label \"clusterName\"=" + getApplication().getInfinispan().getMetadata().getName()) .waitFor(); unsubscribe(); } @@ -116,112 +166,65 @@ public void scale(int replicas, boolean wait) { } } - @Override - public List getPods() { - StatefulSet statefulSet = OpenShiftProvisioner.openShift.getStatefulSet(getApplication().getName()); - return Objects.nonNull(statefulSet) - ? OpenShiftProvisioner.openShift.getLabeledPods("controller-revision-hash", - statefulSet.getStatus().getUpdateRevision()) - : Lists.emptyList(); - } + // ================================================================================================================= + // Client related + // ================================================================================================================= + // this is the packagemanifest for the operator; + // you can get it with command: + // oc get packagemanifest -o template --template='{{ .metadata.name }}' + public static String OPERATOR_ID = IntersmashConfig.infinispanOperatorPackageManifest(); - public List getInfinispanPods() { - return getInfinispanPods(getApplication().getName()); - } + // this is the name of the CustomResourceDefinition(s) + // you can get it with command: + // oc get crd -o template --template='{{ .metadata.name }}' + private static String INFINISPAN_CRD_NAME = "infinispans.infinispan.org"; - public static List getInfinispanPods(final String clusterName) { - return OpenShiftProvisioner.openShift.inNamespace(OpenShiftConfig.namespace()).pods().list().getItems().stream().filter( - // the following criteria is implemented based on similar requirements taken from the - // infinispan-operator project, see - // https://github.com/infinispan/infinispan-operator/blob/main/test/e2e/utils/kubernetes.go#L599-L604 - p -> p.getMetadata().getLabels().entrySet().stream() - .anyMatch(tl -> "app".equals(tl.getKey()) && "infinispan-pod".equals(tl.getValue()) - && p.getMetadata().getLabels().entrySet().stream().anyMatch( - cnl -> "clusterName".equals(cnl.getKey()) && clusterName.equals(cnl.getValue())))) - .collect(Collectors.toList()); - } + private static String INFINISPAN_CACHE_CRD_NAME = "caches.infinispan.org"; - @Override - protected String getOperatorCatalogSource() { - return IntersmashConfig.infinispanOperatorCatalogSource(); - } + /** + * Generic CRD client which is used by client builders default implementation to build the CRDs client + * + * @return A {@link NonNamespaceOperation} instance that represents a + */ + protected abstract NonNamespaceOperation> customResourceDefinitionsClient(); - @Override - protected String getOperatorIndexImage() { - return IntersmashConfig.infinispanOperatorIndexImage(); - } + // infinispans.infinispan.org + protected abstract HasMetadataOperationsImpl infinispanCustomResourcesClient( + CustomResourceDefinitionContext crdc); - @Override - protected String getOperatorChannel() { - return IntersmashConfig.infinispanOperatorChannel(); - } + // caches.infinispan.org + protected abstract HasMetadataOperationsImpl cacheCustomResourcesClient( + CustomResourceDefinitionContext crdc); - /** - * The result is affected by the CR definition and specifically the method will return the {@code service} URL in - * case the CR {@code .spec.expose.type} is set to {@code NodePort} or {@code LoadBalancer} while it will return the - * route URL (i.e. for external access) when {@code .spec.expose.type} is set to {@code Route} - * @return The URL for the provisioned Infinispan service - */ - @Override - public URL getURL() { - final Service defaultInternalService = OpenShiftProvisioner.openShift.getService(getApplication().getName()); - String internalUrl = "http://" + defaultInternalService.getSpec().getClusterIP() + ":11222"; - String externalUrl = null; - if (getApplication().getInfinispan().getSpec().getExpose() != null) { - final Expose.Type exposedType = getApplication().getInfinispan().getSpec().getExpose().getType(); - switch (exposedType) { - case NodePort: - // see see https://github.com/infinispan/infinispan-operator/blob/2.0.x/pkg/apis/infinispan/v1/infinispan_types.go#L107 - externalUrl = "http://" - + OpenShiftProvisioner.openShift.getService(getApplication().getName() + "-external").getSpec() - .getClusterIP() - + getApplication().getInfinispan().getSpec().getExpose().getNodePort(); - break; - case LoadBalancer: - // see https://github.com/infinispan/infinispan-operator/blob/2.0.x/pkg/apis/infinispan/v1/infinispan_types.go#L111 - externalUrl = "http://" - + OpenShiftProvisioner.openShift.getService(getApplication().getName() + "-external").getSpec() - .getExternalIPs().get(0) - + getApplication().getInfinispan().getSpec().getExpose().getNodePort(); - break; - case Route: - // https://github.com/infinispan/infinispan-operator/blob/2.0.x/pkg/apis/infinispan/v1/infinispan_types.go#L116 - Route route = OpenShiftProvisioner.openShift.getRoute(getApplication().getName() + "-external"); - externalUrl = "https://" + route.getSpec().getHost(); - break; - default: - throw new UnsupportedOperationException(String.format("Unsupported .spec.expose.type: %s", exposedType)); - } - } - try { - return new URL(externalUrl == null ? internalUrl : externalUrl); - } catch (MalformedURLException e) { - throw new RuntimeException(String.format("Infinispan operator Internal URL \"%s\" is malformed.", internalUrl), e); - } - } + private static NonNamespaceOperation> INFINISPAN_CLIENT; + private static NonNamespaceOperation> INFINISPAN_CACHES_CLIENT; - /** - * Get a client capable of working with {@link #INFINISPAN_RESOURCE} custom resource. - * - * @return client for operations with {@link #INFINISPAN_RESOURCE} custom resource - */ public NonNamespaceOperation> infinispansClient() { if (INFINISPAN_CLIENT == null) { - CustomResourceDefinition crd = OpenShifts.admin().apiextensions().v1().customResourceDefinitions() - .withName(INFINISPAN_RESOURCE).get(); - CustomResourceDefinitionContext crdc = CustomResourceDefinitionContext.fromCrd(crd); - if (!getCustomResourceDefinitions().contains(INFINISPAN_RESOURCE)) { + CustomResourceDefinition crd = customResourceDefinitionsClient() + .withName(INFINISPAN_CRD_NAME).get(); + if (crd == null) { throw new RuntimeException(String.format("[%s] custom resource is not provided by [%s] operator.", - INFINISPAN_RESOURCE, OPERATOR_ID)); + INFINISPAN_CRD_NAME, OPERATOR_ID)); } - MixedOperation> infinispansClient = OpenShifts - .master() - .newHasMetadataOperation(crdc, Infinispan.class, InfinispanList.class); - INFINISPAN_CLIENT = infinispansClient.inNamespace(OpenShiftConfig.namespace()); + INFINISPAN_CLIENT = infinispanCustomResourcesClient(CustomResourceDefinitionContext.fromCrd(crd)); } return INFINISPAN_CLIENT; } + public NonNamespaceOperation> cachesClient() { + if (INFINISPAN_CACHES_CLIENT == null) { + CustomResourceDefinition crd = customResourceDefinitionsClient() + .withName(INFINISPAN_CACHE_CRD_NAME).get(); + if (crd == null) { + throw new RuntimeException(String.format("[%s] custom resource is not provided by [%s] operator.", + INFINISPAN_CACHE_CRD_NAME, OPERATOR_ID)); + } + INFINISPAN_CACHES_CLIENT = cacheCustomResourcesClient(CustomResourceDefinitionContext.fromCrd(crd)); + } + return INFINISPAN_CACHES_CLIENT; + } + /** * Get a reference to infinispan object. Use get() to get the actual object, or null in case it does not * exist on tested cluster. @@ -231,30 +234,6 @@ public Resource infinispan() { return infinispansClient().withName(getApplication().getInfinispan().getMetadata().getName()); } - // caches.infinispan.org - - /** - * Get a client capable of working with {@link #INFINISPAN_CACHE_RESOURCE} custom resource. - * - * @return client for operations with {@link #INFINISPAN_CACHE_RESOURCE} custom resource - */ - public NonNamespaceOperation> cachesClient() { - if (INFINISPAN_CACHES_CLIENT == null) { - CustomResourceDefinition crd = OpenShifts.admin().apiextensions().v1().customResourceDefinitions() - .withName(INFINISPAN_CACHE_RESOURCE).get(); - CustomResourceDefinitionContext crdc = CustomResourceDefinitionContext.fromCrd(crd); - if (!getCustomResourceDefinitions().contains(INFINISPAN_CACHE_RESOURCE)) { - throw new RuntimeException(String.format("[%s] custom resource is not provided by [%s] operator.", - INFINISPAN_CACHE_RESOURCE, OPERATOR_ID)); - } - MixedOperation> cachesClient = OpenShifts - .master() - .newHasMetadataOperation(crdc, Cache.class, CacheList.class); - INFINISPAN_CACHES_CLIENT = cachesClient.inNamespace(OpenShiftConfig.namespace()); - } - return INFINISPAN_CACHES_CLIENT; - } - /** * Get a reference to cache object. Use get() to get the actual object, or null in case it does not * exist on tested cluster. @@ -280,20 +259,4 @@ public List> caches() { .map(this::cache) .collect(Collectors.toList()); } - - private void waitForResourceReadiness() { - // it must be well-formed - // see https://github.com/kubernetes/apimachinery/blob/v0.20.4/pkg/apis/meta/v1/types.go#L1289 - new SimpleWaiter( - () -> infinispan().get().getStatus().getConditions().stream() - .anyMatch( - c -> c.getType().equals(InfinispanConditionBuilder.ConditionType.ConditionWellFormed.getValue()) - && c.getStatus().equals("True"))) - .reason("Wait for infinispan resource to be ready").level(Level.DEBUG) - .waitFor(); - // and with the expected number of Cache CR(s) - if (getApplication().getCaches().size() > 0) - new SimpleWaiter(() -> cachesClient().list().getItems().size() == caches().size()) - .reason("Wait for caches to be ready.").level(Level.DEBUG).waitFor(); // no isReady() for cache - } } diff --git a/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/KafkaOperatorProvisioner.java b/provisioners/src/main/java/org/jboss/intersmash/provision/operator/KafkaOperatorProvisioner.java similarity index 73% rename from provisioners/src/main/java/org/jboss/intersmash/provision/openshift/KafkaOperatorProvisioner.java rename to provisioners/src/main/java/org/jboss/intersmash/provision/operator/KafkaOperatorProvisioner.java index a312c68c1..10df4fe20 100644 --- a/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/KafkaOperatorProvisioner.java +++ b/provisioners/src/main/java/org/jboss/intersmash/provision/operator/KafkaOperatorProvisioner.java @@ -13,21 +13,25 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jboss.intersmash.provision.openshift; +package org.jboss.intersmash.provision.operator; import java.util.List; +import java.util.concurrent.TimeUnit; +import java.util.function.BooleanSupplier; import org.jboss.intersmash.IntersmashConfig; -import org.jboss.intersmash.application.openshift.KafkaOperatorApplication; -import org.jboss.intersmash.provision.openshift.operator.OperatorProvisioner; +import org.jboss.intersmash.application.operator.KafkaOperatorApplication; +import org.jboss.intersmash.provision.Provisioner; import org.slf4j.event.Level; -import cz.xtf.core.config.OpenShiftConfig; -import cz.xtf.core.event.helpers.EventHelper; -import cz.xtf.core.openshift.OpenShiftWaiters; import cz.xtf.core.waiting.SimpleWaiter; +import cz.xtf.core.waiting.failfast.FailFastCheck; import io.fabric8.kubernetes.api.model.DeletionPropagation; import io.fabric8.kubernetes.api.model.Pod; +import io.fabric8.kubernetes.api.model.StatusDetails; +import io.fabric8.kubernetes.api.model.apiextensions.v1.CustomResourceDefinition; +import io.fabric8.kubernetes.api.model.apiextensions.v1.CustomResourceDefinitionList; +import io.fabric8.kubernetes.client.NamespacedKubernetesClient; import io.fabric8.kubernetes.client.dsl.NonNamespaceOperation; import io.fabric8.kubernetes.client.dsl.Resource; import io.strimzi.api.kafka.Crds; @@ -45,87 +49,62 @@ * class. */ @Slf4j -public class KafkaOperatorProvisioner extends OperatorProvisioner { +public abstract class KafkaOperatorProvisioner extends + OperatorProvisioner implements Provisioner { - private static final String OPERATOR_ID = IntersmashConfig.kafkaOperatorPackageManifest(); - - public KafkaOperatorProvisioner(@NonNull KafkaOperatorApplication kafkaOperatorApplication) { - super(kafkaOperatorApplication, OPERATOR_ID); + public KafkaOperatorProvisioner(@NonNull KafkaOperatorApplication application) { + super(application, KafkaOperatorProvisioner.OPERATOR_ID); } - /** - * Get a client capable of working with {@link Kafka} custom resource on our OpenShift instance. - * - * @return client for operations with {@link Kafka} custom resource on our OpenShift instance - */ - public NonNamespaceOperation> kafkasClient() { - return Crds.kafkaOperation(OpenShiftProvisioner.openShift).inNamespace(OpenShiftConfig.namespace()); + // ================================================================================================================= + // Kafka related + // ================================================================================================================= + public List getClusterOperatorPods() { + return this.client().pods().inNamespace(this.client().getNamespace()) + .withLabel("strimzi.io/kind", "cluster-operator").list().getItems(); } /** - * Kafka cluster resource on OpenShift instance. The Kafka resource returned is the one that is tied with the - * appropriate Application for which this provisioner is created for. The instance is determined based on the name - * value defined in specifications. - * - * @return returns Kafka cluster resource on OpenShift instance that is tied with our relevant Application only + * Get list of all Kafka pods on OpenShift instance with regards this Kafka cluster. + *

+ * Note: Operator actually creates also pods for Kafka, instance entity operator pods and cluster operator pod. + * But we list only Kafka related pods here. + * @return list of Kafka pods */ - public Resource kafka() { - return kafkasClient().withName(getApplication().getKafka().getMetadata().getName()); - } + public List getKafkaPods() { + List kafkaPods = this.client().pods().inNamespace(this.client().getNamespace()) + .withLabel("app.kubernetes.io/name", "kafka").list().getItems(); + // Let's filter out just those who match particular naming + for (Pod kafkaPod : kafkaPods) { + if (!kafkaPod.getMetadata().getName().contains(getApplication().getName() + "-kafka-")) { + kafkaPods.remove(kafkaPod); + } + } - /** - * Get a client capable of working with {@link KafkaUser} custom resource on our OpenShift instance. - * - * @return client for operations with {@link KafkaUser} custom resource on our OpenShift instance - */ - public NonNamespaceOperation> kafkasUserClient() { - return Crds.kafkaUserOperation(OpenShiftProvisioner.openShift).inNamespace(OpenShiftConfig.namespace()); + return kafkaPods; } /** - * Get a client capable of working with {@link KafkaTopic} custom resource on our OpenShift instance. - * - * @return client for operations with {@link KafkaTopic} custom resource on our OpenShift instance + * Get list of all Zookeeper pods on OpenShift instance with regards this Kafka cluster. + *

+ * Note: Operator actually creates also pods for Kafka, instance entity operator pods and cluster operator pod. + * But we list only Zookeeper related pods here. + * @return list of Kafka pods */ - public NonNamespaceOperation> kafkasTopicClient() { - return Crds.topicOperation(OpenShiftProvisioner.openShift).inNamespace(OpenShiftConfig.namespace()); - } - - @Override - public void deploy() { - ffCheck = FailFastUtils.getFailFastCheck(EventHelper.timeOfLastEventBMOrTestNamespaceOrEpoch(), - getApplication().getName()); - - subscribe(); - - if (getApplication().getKafka() != null) { - // Create a Kafka cluster instance - kafkasClient().createOrReplace(getApplication().getKafka()); - waitForKafkaClusterCreation(); - } - - if (getApplication().getTopics() != null) { - for (KafkaTopic topic : getApplication().getTopics()) { - // Create a Kafka topic instance - kafkasTopicClient().createOrReplace(topic); - - // Wait for it to be created and ready... - waitForKafkaTopicCreation(topic); - } - } - - if (getApplication().getUsers() != null) { - for (KafkaUser user : getApplication().getUsers()) { - // Create a Kafka user instance - kafkasUserClient().createOrReplace(user); - - // Wait for it to be created and ready... - waitForKafkaUserCreation(user); + public List getZookeeperPods() { + List kafkaPods = this.client().pods().inNamespace(this.client().getNamespace()) + .withLabel("app.kubernetes.io/name", "zookeeper").list().getItems(); + // Let's filter out just those who match particular naming + for (Pod kafkaPod : kafkaPods) { + if (!kafkaPod.getMetadata().getName().contains(getApplication().getName() + "-zookeeper-")) { + kafkaPods.remove(kafkaPod); } } + return kafkaPods; } - private void waitForKafkaClusterCreation() { + public void waitForKafkaClusterCreation() { + FailFastCheck ffCheck = () -> false; int expectedReplicas = getApplication().getKafka().getSpec().getKafka().getReplicas(); new SimpleWaiter(() -> kafka().get() != null) .failFast(ffCheck) @@ -247,101 +226,96 @@ private void waitForKafkaUserCreation(KafkaUser user) { "Waiting for user '" + userName + "' condition to be 'Ready'").level(Level.DEBUG).waitFor(); } + // ================================================================================================================= + // Related to generic provisioning behavior + // ================================================================================================================= @Override - public void undeploy() { - // delete the resources - - if (getApplication().getUsers() != null) { - if (kafkasUserClient().delete().isEmpty()) { - log.warn("Wasn't able to remove all relevant 'Kafka User' resources created for '" + getApplication().getName() - + "' instance!"); - } - - new SimpleWaiter(() -> kafkasUserClient().list().getItems().isEmpty()).level(Level.DEBUG).waitFor(); - } - - if (getApplication().getTopics() != null) { - if (kafkasTopicClient().delete().isEmpty()) { - log.warn("Wasn't able to remove all relevant 'Kafka Topic' resources created for '" + getApplication().getName() - + "' instance!"); - } - - new SimpleWaiter(() -> kafkasTopicClient().list().getItems().isEmpty()).level(Level.DEBUG).waitFor(); - } - - if (getApplication().getKafka() != null) { - if (kafka().withPropagationPolicy(DeletionPropagation.FOREGROUND).delete().isEmpty()) { - log.warn("Wasn't able to remove all relevant 'Kafka' resources created for '" + getApplication().getName() - + "' instance!"); - } - - new SimpleWaiter(() -> getKafkaPods().size() == 0).level(Level.DEBUG).waitFor(); - } - - unsubscribe(); - - OpenShiftWaiters.get(OpenShiftProvisioner.openShift, ffCheck) - .areExactlyNPodsReady(0, "name", getApplication().getName() + "-cluster-operator") - .level(Level.DEBUG).waitFor(); - } - - public static String getOperatorId() { - return OPERATOR_ID; + protected String getOperatorCatalogSource() { + return IntersmashConfig.kafkaOperatorCatalogSource(); } - public KafkaUserList getUsers() { - return kafkasUserClient().list(); + @Override + protected String getOperatorIndexImage() { + return IntersmashConfig.kafkaOperatorIndexImage(); } - public KafkaTopicList getTopics() { - return kafkasTopicClient().list(); + @Override + protected String getOperatorChannel() { + return IntersmashConfig.kafkaOperatorChannel(); } @Override public List getPods() { - return OpenShiftProvisioner.openShift.getLabeledPods("strimzi.io/cluster", getApplication().getName()); + return this.client().pods().inNamespace(this.client().getNamespace()) + .withLabel("strimzi.io/cluster", getApplication().getName()).list().getItems(); } - public List getClusterOperatorPods() { - return OpenShiftProvisioner.openShift.getLabeledPods("strimzi.io/kind", "cluster-operator"); - } + @Override + public void deploy() { + subscribe(); + if (getApplication().getKafka() != null) { + // Create a Kafka cluster instance + kafkasClient().createOrReplace(getApplication().getKafka()); + waitForKafkaClusterCreation(); + } + if (getApplication().getTopics() != null) { + for (KafkaTopic topic : getApplication().getTopics()) { + // Create a Kafka topic instance + kafkasTopicClient().createOrReplace(topic); - /** - * Get list of all Kafka pods on OpenShift instance with regards this Kafka cluster. - *

- * Note: Operator actually creates also pods for Kafka, instance entity operator pods and cluster operator pod. - * But we list only Kafka related pods here. - * @return list of Kafka pods - */ - public List getKafkaPods() { - List kafkaPods = OpenShiftProvisioner.openShift.getLabeledPods("app.kubernetes.io/name", "kafka"); - // Let's filter out just those who match particular naming - for (Pod kafkaPod : kafkaPods) { - if (!kafkaPod.getMetadata().getName().contains(getApplication().getName() + "-kafka-")) { - kafkaPods.remove(kafkaPod); + // Wait for it to be created and ready... + waitForKafkaTopicCreation(topic); } } + if (getApplication().getUsers() != null) { + for (KafkaUser user : getApplication().getUsers()) { + // Create a Kafka user instance + kafkasUserClient().createOrReplace(user); - return kafkaPods; + // Wait for it to be created and ready... + waitForKafkaUserCreation(user); + } + } } - /** - * Get list of all Zookeeper pods on OpenShift instance with regards this Kafka cluster. - *

- * Note: Operator actually creates also pods for Kafka, instance entity operator pods and cluster operator pod. - * But we list only Zookeeper related pods here. - * @return list of Kafka pods - */ - public List getZookeeperPods() { - List kafkaPods = OpenShiftProvisioner.openShift.getLabeledPods("app.kubernetes.io/name", "zookeeper"); - // Let's filter out just those who match particular naming - for (Pod kafkaPod : kafkaPods) { - if (!kafkaPod.getMetadata().getName().contains(getApplication().getName() + "-zookeeper-")) { - kafkaPods.remove(kafkaPod); + @Override + public void undeploy() { + // delete the resources + List deletionDetails; + boolean deleted; + if (getApplication().getUsers() != null) { + deletionDetails = kafkasUserClient().delete(); + deleted = deletionDetails.stream().allMatch(d -> d.getCauses().isEmpty()); + if (!deleted) { + log.warn("Wasn't able to remove all relevant 'Kafka User' resources created for '{}' instance!", + getApplication().getName()); } + new SimpleWaiter(() -> kafkasUserClient().list().getItems().isEmpty()).level(Level.DEBUG).waitFor(); } - - return kafkaPods; + if (getApplication().getTopics() != null) { + deletionDetails = kafkasTopicClient().delete(); + deleted = deletionDetails.stream().allMatch(d -> d.getCauses().isEmpty()); + if (!deleted) { + log.warn("Wasn't able to remove all relevant 'Kafka Topic' resources created for '{}' instance!", + getApplication().getName()); + } + new SimpleWaiter(() -> kafkasTopicClient().list().getItems().isEmpty()).level(Level.DEBUG).waitFor(); + } + if (getApplication().getKafka() != null) { + deletionDetails = kafka().withPropagationPolicy(DeletionPropagation.FOREGROUND).delete(); + deleted = deletionDetails.stream().allMatch(d -> d.getCauses().isEmpty()); + if (!deleted) { + log.warn("Wasn't able to remove all relevant 'Kafka' resources created for '{}' instance!", + getApplication().getName()); + } + new SimpleWaiter(() -> getKafkaPods().isEmpty()).level(Level.DEBUG).waitFor(); + } + unsubscribe(); + BooleanSupplier bs = () -> getPods().stream().noneMatch(p -> p.getMetadata().getLabels().get("name") != null + && p.getMetadata().getLabels().get("name").equals(getApplication().getName() + "-cluster-operator")); + new SimpleWaiter(bs, TimeUnit.MINUTES, 2, + "Waiting for 0 pods with label \"name\"=" + getApplication().getName() + "-cluster-operator") + .waitFor(); } @Override @@ -357,18 +331,64 @@ public void scale(int replicas, boolean wait) { } } - @Override - protected String getOperatorCatalogSource() { - return IntersmashConfig.kafkaOperatorCatalogSource(); + // ================================================================================================================= + // Client related + // ================================================================================================================= + // this is the packagemanifest for the hyperfoil operator; + // you can get it with command: + // oc get packagemanifest hyperfoil-bundle -o template --template='{{ .metadata.name }}' + public static String OPERATOR_ID = IntersmashConfig.kafkaOperatorPackageManifest(); + + /** + * Generic CRD client which is used by client builders default implementation to build the CRDs client + * + * @return A {@link NonNamespaceOperation} instance that represents a + */ + protected abstract NonNamespaceOperation> customResourceDefinitionsClient(); + + /** + * Get a client capable of working with {@link Kafka} custom resource on our OpenShift instance. + * + * @return client for operations with {@link Kafka} custom resource on our OpenShift instance + */ + public NonNamespaceOperation> kafkasClient() { + return Crds.kafkaOperation(this.client()).inNamespace(this.client().getNamespace()); } - @Override - protected String getOperatorIndexImage() { - return IntersmashConfig.kafkaOperatorIndexImage(); + /** + * Get a client capable of working with {@link KafkaUser} custom resource on our OpenShift instance. + * + * @return client for operations with {@link KafkaUser} custom resource on our OpenShift instance + */ + public NonNamespaceOperation> kafkasUserClient() { + return Crds.kafkaUserOperation(this.client()).inNamespace(this.client().getNamespace()); } - @Override - protected String getOperatorChannel() { - return IntersmashConfig.kafkaOperatorChannel(); + /** + * Get a client capable of working with {@link KafkaTopic} custom resource on our OpenShift instance. + * + * @return client for operations with {@link KafkaTopic} custom resource on our OpenShift instance + */ + public NonNamespaceOperation> kafkasTopicClient() { + return Crds.topicOperation(this.client()).inNamespace(this.client().getNamespace()); + } + + /** + * Kafka cluster resource on OpenShift instance. The Kafka resource returned is the one that is tied with the + * appropriate Application for which this provisioner is created for. The instance is determined based on the name + * value defined in specifications. + * + * @return returns Kafka cluster resource on OpenShift instance that is tied with our relevant Application only + */ + public Resource kafka() { + return kafkasClient().withName(getApplication().getKafka().getMetadata().getName()); + } + + public KafkaUserList getUsers() { + return kafkasUserClient().list(); + } + + public KafkaTopicList getTopics() { + return kafkasTopicClient().list(); } } diff --git a/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/KeycloakOperatorProvisioner.java b/provisioners/src/main/java/org/jboss/intersmash/provision/operator/KeycloakOperatorProvisioner.java similarity index 60% rename from provisioners/src/main/java/org/jboss/intersmash/provision/openshift/KeycloakOperatorProvisioner.java rename to provisioners/src/main/java/org/jboss/intersmash/provision/operator/KeycloakOperatorProvisioner.java index 36c8d79ff..e3554671b 100644 --- a/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/KeycloakOperatorProvisioner.java +++ b/provisioners/src/main/java/org/jboss/intersmash/provision/operator/KeycloakOperatorProvisioner.java @@ -13,20 +13,21 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jboss.intersmash.provision.openshift; +package org.jboss.intersmash.provision.operator; -import java.net.MalformedURLException; import java.net.URL; import java.text.MessageFormat; import java.util.List; import java.util.Objects; +import java.util.concurrent.TimeUnit; +import java.util.function.BooleanSupplier; import java.util.stream.Collectors; import org.assertj.core.util.Lists; import org.assertj.core.util.Strings; import org.jboss.intersmash.IntersmashConfig; -import org.jboss.intersmash.application.openshift.KeycloakOperatorApplication; -import org.jboss.intersmash.provision.openshift.operator.OperatorProvisioner; +import org.jboss.intersmash.application.operator.KeycloakOperatorApplication; +import org.jboss.intersmash.provision.Provisioner; import org.jboss.intersmash.util.tls.CertificatesUtils; import org.keycloak.k8s.v2alpha1.Keycloak; import org.keycloak.k8s.v2alpha1.KeycloakOperatorKeycloakList; @@ -35,94 +36,51 @@ import org.keycloak.k8s.v2alpha1.keycloakspec.Http; import org.slf4j.event.Level; -import cz.xtf.core.config.OpenShiftConfig; -import cz.xtf.core.event.helpers.EventHelper; -import cz.xtf.core.openshift.OpenShiftWaiters; -import cz.xtf.core.openshift.OpenShifts; +import cz.xtf.core.http.Https; import cz.xtf.core.waiting.SimpleWaiter; import cz.xtf.core.waiting.failfast.FailFastCheck; import io.fabric8.kubernetes.api.model.DeletionPropagation; import io.fabric8.kubernetes.api.model.Pod; import io.fabric8.kubernetes.api.model.apiextensions.v1.CustomResourceDefinition; +import io.fabric8.kubernetes.api.model.apiextensions.v1.CustomResourceDefinitionList; import io.fabric8.kubernetes.api.model.apps.StatefulSet; -import io.fabric8.kubernetes.client.dsl.MixedOperation; +import io.fabric8.kubernetes.client.NamespacedKubernetesClient; import io.fabric8.kubernetes.client.dsl.NonNamespaceOperation; import io.fabric8.kubernetes.client.dsl.Resource; import io.fabric8.kubernetes.client.dsl.base.CustomResourceDefinitionContext; -import lombok.NonNull; +import io.fabric8.kubernetes.client.dsl.internal.HasMetadataOperationsImpl; /** * Keycloak operator provisioner */ -public class KeycloakOperatorProvisioner extends OperatorProvisioner { - private static final String KEYCLOAK_RESOURCE = "keycloaks.k8s.keycloak.org"; - private static final String KEYCLOAK_REALM_IMPORT_RESOURCE = "keycloakrealmimports.k8s.keycloak.org"; - private static NonNamespaceOperation> KEYCLOAK_CUSTOM_RESOURCE_CLIENT; - private static NonNamespaceOperation> KEYCLOAK_REALM_IMPORT_CUSTOM_RESOURCE_CLIENT; +public abstract class KeycloakOperatorProvisioner extends + OperatorProvisioner implements Provisioner { - public NonNamespaceOperation> keycloakClient() { - if (KEYCLOAK_CUSTOM_RESOURCE_CLIENT == null) { - CustomResourceDefinition crd = OpenShifts.admin().apiextensions().v1().customResourceDefinitions() - .withName(KEYCLOAK_RESOURCE).get(); - CustomResourceDefinitionContext crdc = CustomResourceDefinitionContext.fromCrd(crd); - if (!getCustomResourceDefinitions().contains(KEYCLOAK_RESOURCE)) { - throw new RuntimeException(String.format("[%s] custom resource is not provided by [%s] operator.", - KEYCLOAK_RESOURCE, OPERATOR_ID)); - } - MixedOperation> crClient = OpenShifts - .master().newHasMetadataOperation(crdc, Keycloak.class, KeycloakOperatorKeycloakList.class); - KEYCLOAK_CUSTOM_RESOURCE_CLIENT = crClient.inNamespace(OpenShiftConfig.namespace()); - } - return KEYCLOAK_CUSTOM_RESOURCE_CLIENT; - } - - public NonNamespaceOperation> keycloakRealmImportClient() { - if (KEYCLOAK_REALM_IMPORT_CUSTOM_RESOURCE_CLIENT == null) { - CustomResourceDefinition crd = OpenShifts.admin().apiextensions().v1().customResourceDefinitions() - .withName(KEYCLOAK_REALM_IMPORT_RESOURCE).get(); - CustomResourceDefinitionContext crdc = CustomResourceDefinitionContext.fromCrd(crd); - if (!getCustomResourceDefinitions().contains(KEYCLOAK_REALM_IMPORT_RESOURCE)) { - throw new RuntimeException(String.format("[%s] custom resource is not provided by [%s] operator.", - KEYCLOAK_REALM_IMPORT_RESOURCE, OPERATOR_ID)); - } - MixedOperation> crClient = OpenShifts - .master() - .newHasMetadataOperation(crdc, KeycloakRealmImport.class, KeycloakOperatorRealmImportList.class); - KEYCLOAK_REALM_IMPORT_CUSTOM_RESOURCE_CLIENT = crClient.inNamespace(OpenShiftConfig.namespace()); - } - return KEYCLOAK_REALM_IMPORT_CUSTOM_RESOURCE_CLIENT; - } - - private static final String OPERATOR_ID = IntersmashConfig.keycloakOperatorPackageManifest(); - protected FailFastCheck ffCheck = () -> false; - - public KeycloakOperatorProvisioner(@NonNull KeycloakOperatorApplication application) { - super(application, OPERATOR_ID); - } - - public static String getOperatorId() { - return OPERATOR_ID; + public KeycloakOperatorProvisioner(KeycloakOperatorApplication application) { + super(application, KeycloakOperatorProvisioner.OPERATOR_ID); } + // ================================================================================================================= + // Related to generic provisioning behavior + // ================================================================================================================= @Override - protected String getOperatorCatalogSource() { + public String getOperatorCatalogSource() { return IntersmashConfig.keycloakOperatorCatalogSource(); } @Override - protected String getOperatorIndexImage() { + public String getOperatorIndexImage() { return IntersmashConfig.keycloakOperatorIndexImage(); } @Override - protected String getOperatorChannel() { + public String getOperatorChannel() { return IntersmashConfig.keycloakOperatorChannel(); } @Override public void deploy() { - ffCheck = FailFastUtils.getFailFastCheck(EventHelper.timeOfLastEventBMOrTestNamespaceOrEpoch(), - getApplication().getName()); + FailFastCheck ffCheck = () -> false; // Keycloak Operator codebase contains the name of the Keycloak image to deploy: user can override Keycloak image to // deploy using environment variables in Keycloak Operator Subscription subscribe(); @@ -148,7 +106,7 @@ public void deploy() { CertificatesUtils.CertificateAndKey certificateAndKey = CertificatesUtils .generateSelfSignedCertificateAndKey( getApplication().getKeycloak().getSpec().getHostname().getHostname().replaceFirst("[.].*$", ""), - tlsSecretName); + tlsSecretName, this.client(), this.client().getNamespace()); // add config to keycloak if (getApplication().getKeycloak().getSpec().getHttp() == null) { Http http = new Http(); @@ -163,17 +121,15 @@ public void deploy() { // 1. check externalDatabase exists if (getApplication().getKeycloak().getSpec().getDb() != null) { // 2. Service "spec.db.host" must be installed beforehand - new SimpleWaiter(() -> OpenShiftProvisioner.openShift - .getService(getApplication().getKeycloak().getSpec().getDb().getHost()) != null) + new SimpleWaiter(() -> this.client().services().withName(getApplication().getKeycloak().getSpec().getDb().getHost()) + .get() != null) .level(Level.DEBUG).waitFor(); } // create custom resources keycloakClient().createOrReplace(getApplication().getKeycloak()); - if (getApplication().getKeycloakRealmImports().size() > 0) { - getApplication().getKeycloakRealmImports().stream() - .forEach((i) -> keycloakRealmImportClient().resource(i).create()); - } + getApplication().getKeycloakRealmImports().stream() + .forEach(ri -> keycloakRealmImportClient().createOrReplace(ri)); // Wait for Keycloak (and PostgreSQL) to be ready waitFor(getApplication().getKeycloak()); @@ -182,9 +138,9 @@ public void deploy() { // check that route is up, only if there's a valid external URL available URL externalUrl = getURL(); if ((getApplication().getKeycloak().getSpec().getInstances() > 0) && (externalUrl != null)) { - WaitersUtil.routeIsUp(externalUrl.toExternalForm()) - .level(Level.DEBUG) - .waitFor(); + new SimpleWaiter( + () -> Https.getCode(getURL().toExternalForm()) != 503) + .reason("Wait until the route is ready to serve."); } } @@ -193,9 +149,12 @@ public void waitFor(Keycloak keycloak) { if (replicas > 0) { // wait for >= 1 pods with label controller-revision-hash=keycloak-d86bb6ddc String controllerRevisionHash = getStatefulSet().getStatus().getUpdateRevision(); - OpenShiftWaiters.get(OpenShiftProvisioner.openShift, ffCheck) - .areExactlyNPodsReady(replicas.intValue(), "controller-revision-hash", - controllerRevisionHash) + BooleanSupplier bs = () -> getPods().stream() + .filter(p -> p.getMetadata().getLabels().get("controller-revision-hash") != null + && p.getMetadata().getLabels().get("controller-revision-hash").equals(controllerRevisionHash)) + .collect(Collectors.toList()).size() == replicas.intValue(); + new SimpleWaiter(bs, TimeUnit.MINUTES, 2, + "Waiting for pods with label \"controller-revision-hash\"=" + controllerRevisionHash + " to be scaled") .waitFor(); } } @@ -237,39 +196,20 @@ private void waitForKeycloakResourceReadiness() { .reason("Wait for KeycloakRealmImports to be done.").level(Level.DEBUG).waitFor(); } - /** - * Get a reference to keycloak object. Use get() to get the actual object, or null in case it does not - * exist on tested cluster. - * @return A concrete {@link Resource} instance representing the {@link org.jboss.intersmash.provision.openshift.operator.keycloak.keycloak.Keycloak} resource definition - */ - public Resource keycloak() { - return keycloakClient() - .withName(getApplication().getKeycloak().getMetadata().getName()); - } - - public List keycloakRealmImports() { - return keycloakRealmImportClient().list().getItems() - .stream().filter( - realm -> getApplication().getKeycloakRealmImports().stream().map( - ri -> ri.getMetadata().getName()) - .anyMatch(riName -> riName.equalsIgnoreCase(realm.getMetadata().getName()))) - .collect(Collectors.toList()); - } - /** * @return the underlying StatefulSet which provisions the cluster */ - private StatefulSet getStatefulSet() { - final String STATEFUL_SET_NAME = getApplication().getKeycloak().getMetadata().getName(); + public StatefulSet getStatefulSet() { + final String statefulSetName = getApplication().getKeycloak().getMetadata().getName(); new SimpleWaiter( - () -> Objects.nonNull(OpenShiftProvisioner.openShift.getStatefulSet(STATEFUL_SET_NAME))) + () -> Objects.nonNull(this.client().apps().statefulSets().withName(statefulSetName).get())) .reason( MessageFormat.format( "Waiting for StatefulSet \"{0}\" to be created for Keycloak \"{1}\".", - STATEFUL_SET_NAME, + statefulSetName, getApplication().getKeycloak().getMetadata().getName())) .level(Level.DEBUG).timeout(60000L).waitFor(); - return OpenShiftProvisioner.openShift.getStatefulSet(STATEFUL_SET_NAME); + return this.client().apps().statefulSets().withName(statefulSetName).get(); } @Override @@ -288,9 +228,17 @@ public void undeploy() { .reason("Wait for Keycloak instances to be deleted.").level(Level.DEBUG).waitFor(); // wait for 0 pods - OpenShiftWaiters.get(OpenShiftProvisioner.openShift, () -> false) - .areExactlyNPodsReady(0, "app", getApplication().getKeycloak().getKind().toLowerCase()).level(Level.DEBUG) + BooleanSupplier bs = () -> this.client().pods().inNamespace(this.client().getNamespace()).list().getItems().stream() + .filter(p -> !com.google.common.base.Strings.isNullOrEmpty(p.getMetadata().getLabels().get("app")) + && p.getMetadata().getLabels().get("app") + .equals(getApplication().getKeycloak().getKind().toLowerCase())) + .collect(Collectors.toList()).isEmpty(); + String reason = "Waiting for exactly 0 pods with label \"app\"=" + + getApplication().getKeycloak().getKind().toLowerCase() + " to be ready."; + new SimpleWaiter(bs, TimeUnit.MINUTES, 2, reason) + .level(Level.DEBUG) .waitFor(); + unsubscribe(); } @@ -302,8 +250,12 @@ public void scale(int replicas, boolean wait) { tmpKeycloak.getSpec().setInstances(Integer.toUnsignedLong(replicas)); keycloak().replace(tmpKeycloak); if (wait) { - OpenShiftWaiters.get(OpenShiftProvisioner.openShift, ffCheck) - .areExactlyNPodsReady(replicas, "controller-revision-hash", controllerRevisionHash) + BooleanSupplier bs = () -> getPods().stream() + .filter(p -> p.getMetadata().getLabels().get("controller-revision-hash") != null + && p.getMetadata().getLabels().get("controller-revision-hash").equals(controllerRevisionHash)) + .collect(Collectors.toList()).size() == replicas; + new SimpleWaiter(bs, TimeUnit.MINUTES, 2, + "Waiting for pods with label \"controller-revision-hash\"=" + controllerRevisionHash + " to be scaled") .level(Level.DEBUG) .waitFor(); } @@ -314,46 +266,99 @@ public void scale(int replicas, boolean wait) { .reason("Wait for Keycloak resource to be ready").level(Level.DEBUG).waitFor(); // check that route is up if (originalReplicas == 0 && replicas > 0) { - WaitersUtil.routeIsUp(getURL().toExternalForm()) - .level(Level.DEBUG) - .waitFor(); + new SimpleWaiter( + () -> Https.getCode(getURL().toExternalForm()) != 503) + .reason("Wait until the route is ready to serve."); } } - @Override public List getPods() { - String STATEFUL_SET_NAME = getApplication().getKeycloak().getMetadata().getName(); - StatefulSet statefulSet = OpenShiftProvisioner.openShift.getStatefulSet(STATEFUL_SET_NAME); + final String statefulSetName = getApplication().getKeycloak().getMetadata().getName(); + StatefulSet statefulSet = this.client().apps().statefulSets().withName(statefulSetName).get(); return Objects.nonNull(statefulSet) - ? OpenShiftProvisioner.openShift.getLabeledPods("controller-revision-hash", - statefulSet.getStatus().getUpdateRevision()) + ? getPods().stream() + .filter(p -> p.getMetadata().getLabels().get("controller-revision-hash") != null + && p.getMetadata().getLabels().get("controller-revision-hash") + .equals(statefulSet.getStatus().getUpdateRevision())) + .collect(Collectors.toList()) : Lists.emptyList(); } - @Override - public URL getURL() { - String host = OpenShiftProvisioner.openShift.routes().list().getItems() - .stream().filter( - route -> route.getMetadata().getName().startsWith( - keycloak().get().getMetadata().getName()) - && - route.getMetadata().getLabels().entrySet() - .stream().filter( - label -> label.getKey().equalsIgnoreCase("app.kubernetes.io/instance") - && - label.getValue().equalsIgnoreCase( - keycloak().get().getMetadata().getLabels() - .get("app"))) - .count() == 1 - - ).findFirst() - .orElseThrow(() -> new RuntimeException( - String.format("No route for Keycloak %s!", keycloak().get().getMetadata().getName()))) - .getSpec().getHost(); - try { - return Strings.isNullOrEmpty(host) ? null : new URL(String.format("https://%s", host)); - } catch (MalformedURLException e) { - throw new RuntimeException(String.format("Keycloak operator External URL \"%s\" is malformed.", host), e); + // ================================================================================================================= + // Client related + // ================================================================================================================= + // this is the packagemanifest for the operator; + // you can get it with command: + // oc get packagemanifest -o template --template='{{ .metadata.name }}' + public static String OPERATOR_ID = IntersmashConfig.keycloakOperatorPackageManifest(); + // this is the name of the CustomResourceDefinition(s) + // you can get it with command: + // oc get crd > -o template --template='{{ .metadata.name }}' + public String KEYCLOACK_CRD_NAME = "keycloaks.k8s.keycloak.org"; + + public String KEYCLOACK_REALM_IMPORT_CRD_NAME = "keycloakrealmimports.k8s.keycloak.org"; + + /** + * Generic CRD client which is used by client builders default implementation to build the CRDs client + * + * @return A {@link NonNamespaceOperation} instance that represents a + */ + public abstract NonNamespaceOperation> customResourceDefinitionsClient(); + + private static NonNamespaceOperation> KEYCLOAKS_CLIENT; + private static NonNamespaceOperation> KEYCLOAK_REALM_IMPORTS_CLIENT; + + // keycloaks.k8s.keycloak.org + protected abstract HasMetadataOperationsImpl keycloaksCustomResourcesClient( + CustomResourceDefinitionContext crdc); + + // keycloakrealmimports.k8s.keycloak.org + protected abstract HasMetadataOperationsImpl keycloakRealmImportsCustomResourcesClient( + CustomResourceDefinitionContext crdc); + + public NonNamespaceOperation> keycloakClient() { + if (KEYCLOAKS_CLIENT == null) { + CustomResourceDefinition crd = customResourceDefinitionsClient() + .withName(KEYCLOACK_CRD_NAME).get(); + if (crd == null) { + throw new RuntimeException(String.format("[%s] custom resource is not provided by [%s] operator.", + KEYCLOACK_CRD_NAME, OPERATOR_ID)); + } + KEYCLOAKS_CLIENT = keycloaksCustomResourcesClient(CustomResourceDefinitionContext.fromCrd(crd)); + } + return KEYCLOAKS_CLIENT; + } + + public NonNamespaceOperation> keycloakRealmImportClient() { + if (KEYCLOAK_REALM_IMPORTS_CLIENT == null) { + CustomResourceDefinition crd = customResourceDefinitionsClient() + .withName(KEYCLOACK_REALM_IMPORT_CRD_NAME).get(); + if (crd == null) { + throw new RuntimeException(String.format("[%s] custom resource is not provided by [%s] operator.", + KEYCLOACK_REALM_IMPORT_CRD_NAME, OPERATOR_ID)); + } + KEYCLOAK_REALM_IMPORTS_CLIENT = keycloakRealmImportsCustomResourcesClient( + CustomResourceDefinitionContext.fromCrd(crd)); } + return KEYCLOAK_REALM_IMPORTS_CLIENT; + } + + /** + * Get a reference to keycloak object. Use get() to get the actual object, or null in case it does not + * exist on tested cluster. + * @return A concrete {@link Resource} instance representing the {@link Keycloak} resource definition + */ + public Resource keycloak() { + return keycloakClient() + .withName(getApplication().getKeycloak().getMetadata().getName()); + } + + public List keycloakRealmImports() { + return keycloakRealmImportClient().list().getItems() + .stream().filter( + realm -> getApplication().getKeycloakRealmImports().stream().map( + ri -> ri.getMetadata().getName()) + .anyMatch(riName -> riName.equalsIgnoreCase(realm.getMetadata().getName()))) + .collect(Collectors.toList()); } } diff --git a/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/RhSsoOperatorProvisioner.java b/provisioners/src/main/java/org/jboss/intersmash/provision/operator/RhSsoOperatorProvisioner.java similarity index 63% rename from provisioners/src/main/java/org/jboss/intersmash/provision/openshift/RhSsoOperatorProvisioner.java rename to provisioners/src/main/java/org/jboss/intersmash/provision/operator/RhSsoOperatorProvisioner.java index 514ad82f5..fa9508896 100644 --- a/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/RhSsoOperatorProvisioner.java +++ b/provisioners/src/main/java/org/jboss/intersmash/provision/operator/RhSsoOperatorProvisioner.java @@ -13,20 +13,21 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jboss.intersmash.provision.openshift; +package org.jboss.intersmash.provision.operator; import java.net.MalformedURLException; import java.net.URL; import java.util.List; -import java.util.Map; import java.util.Objects; +import java.util.concurrent.TimeUnit; +import java.util.function.BooleanSupplier; import java.util.stream.Collectors; import org.assertj.core.util.Lists; import org.assertj.core.util.Strings; import org.jboss.intersmash.IntersmashConfig; -import org.jboss.intersmash.application.openshift.RhSsoOperatorApplication; -import org.jboss.intersmash.provision.openshift.operator.OperatorProvisioner; +import org.jboss.intersmash.application.operator.RhSsoOperatorApplication; +import org.jboss.intersmash.provision.Provisioner; import org.jboss.intersmash.provision.openshift.operator.keycloak.backup.KeycloakBackupList; import org.jboss.intersmash.provision.openshift.operator.keycloak.client.KeycloakClientList; import org.jboss.intersmash.provision.openshift.operator.keycloak.keycloak.KeycloakList; @@ -39,54 +40,50 @@ import org.keycloak.v1alpha1.KeycloakUser; import org.slf4j.event.Level; -import cz.xtf.core.config.OpenShiftConfig; -import cz.xtf.core.event.helpers.EventHelper; -import cz.xtf.core.openshift.OpenShiftWaiters; -import cz.xtf.core.openshift.OpenShifts; +import cz.xtf.core.http.Https; import cz.xtf.core.openshift.helpers.ResourceParsers; import cz.xtf.core.waiting.SimpleWaiter; +import cz.xtf.core.waiting.failfast.FailFastCheck; import io.fabric8.kubernetes.api.model.DeletionPropagation; import io.fabric8.kubernetes.api.model.Pod; +import io.fabric8.kubernetes.api.model.Service; import io.fabric8.kubernetes.api.model.apiextensions.v1.CustomResourceDefinition; +import io.fabric8.kubernetes.api.model.apiextensions.v1.CustomResourceDefinitionList; import io.fabric8.kubernetes.api.model.apps.StatefulSet; -import io.fabric8.kubernetes.client.dsl.MixedOperation; +import io.fabric8.kubernetes.client.NamespacedKubernetesClient; import io.fabric8.kubernetes.client.dsl.NonNamespaceOperation; import io.fabric8.kubernetes.client.dsl.Resource; import io.fabric8.kubernetes.client.dsl.base.CustomResourceDefinitionContext; -import lombok.NonNull; +import io.fabric8.kubernetes.client.dsl.internal.HasMetadataOperationsImpl; /** - * Keycloak operator provisioner + * Red Hat SSO (7.6, based on legacy Keycloak v20) operator provisioner */ @Deprecated(since = "0.0.2") -public class RhSsoOperatorProvisioner extends OperatorProvisioner { - private static final String KEYCLOAK_RESOURCE = "keycloaks.keycloak.org"; - private static NonNamespaceOperation> KEYCLOAKS_CLIENT; - - private static final String KEYCLOAK_REALM_RESOURCE = "keycloakrealms.keycloak.org"; - private static NonNamespaceOperation> KEYCLOAK_REALMS_CLIENT; - - private static final String KEYCLOAK_BACKUP_RESOURCE = "keycloakbackups.keycloak.org"; - private static NonNamespaceOperation> KEYCLOAK_BACKUPS_CLIENT; - - private static final String KEYCLOAK_CLIENT_RESOURCE = "keycloakclients.keycloak.org"; - private static NonNamespaceOperation> KEYCLOAK_CLIENTS_CLIENT; +public abstract class RhSsoOperatorProvisioner extends + OperatorProvisioner implements Provisioner { - private static final String KEYCLOAK_USER_RESOURCE = "keycloakusers.keycloak.org"; - private static NonNamespaceOperation> KEYCLOAK_USERS_CLIENT; - - // oc get packagemanifest rhsso-operator -n openshift-marketplace - private static final String OPERATOR_ID = IntersmashConfig.rhSsoOperatorPackageManifest(); - private static final String STATEFUL_SET_NAME = "keycloak"; - - public RhSsoOperatorProvisioner(@NonNull RhSsoOperatorApplication rhSsoOperatorApplication) { - super(rhSsoOperatorApplication, OPERATOR_ID); + public RhSsoOperatorProvisioner(RhSsoOperatorApplication application) { + super(application, RhSsoOperatorProvisioner.OPERATOR_ID); } - public static String getOperatorId() { - return OPERATOR_ID; + /** + * @return the underlying StatefulSet which provisions the cluster + */ + protected StatefulSet getStatefulSet() { + final String name = "keycloak"; + StatefulSet statefulSet = this.client().apps().statefulSets().withName(name).get(); + if (Objects.isNull(statefulSet)) { + throw new IllegalStateException(String.format( + "StatefulSet with name=\"%s\" not found", + name)); + } + return statefulSet; } + // ================================================================================================================= + // Related to generic provisioning behavior + // ================================================================================================================= @Override protected String getOperatorCatalogSource() { return IntersmashConfig.rhSsoOperatorCatalogSource(); @@ -102,41 +99,26 @@ protected String getOperatorChannel() { return IntersmashConfig.rhSsoOperatorChannel(); } - @Override - public void subscribe() { - if (Strings.isNullOrEmpty(IntersmashConfig.rhSsoImageURL())) { - super.subscribe(); - } else { - // RELATED_IMAGE_RHSSO_OPENJ9 and RELATED_IMAGE_RHSSO_OPENJDK, determine the final value for RELATED_IMAGE_RHSSO - subscribe( - INSTALLPLAN_APPROVAL_MANUAL, - Map.of( - "RELATED_IMAGE_RHSSO", IntersmashConfig.rhSsoImageURL(), - "PROFILE", "RHSSO")); - } - } - @Override public void deploy() { - ffCheck = FailFastUtils.getFailFastCheck(EventHelper.timeOfLastEventBMOrTestNamespaceOrEpoch(), - getApplication().getName()); + FailFastCheck ffCheck = () -> false; // Keycloak Operator codebase contains the name of the Keycloak image to deploy: user can override Keycloak image to // deploy using environment variables in Keycloak Operator Subscription subscribe(); // create custom resources - keycloaksClient().createOrReplace(getApplication().getKeycloak()); + keycloakClient().createOrReplace(getApplication().getKeycloak()); if (getApplication().getKeycloakRealms().size() > 0) { - getApplication().getKeycloakRealms().stream().forEach((i) -> keycloakRealmsClient().resource(i).create()); + getApplication().getKeycloakRealms().stream().forEach((i) -> keycloakRealmClient().resource(i).create()); } if (getApplication().getKeycloakClients().size() > 0) { - getApplication().getKeycloakClients().stream().forEach((i) -> keycloakClientsClient().resource(i).create()); + getApplication().getKeycloakClients().stream().forEach((i) -> keycloakClientClient().resource(i).create()); } if (getApplication().getKeycloakUsers().size() > 0) { - getApplication().getKeycloakUsers().stream().forEach((i) -> keycloakUsersClient().resource(i).create()); + getApplication().getKeycloakUsers().stream().forEach((i) -> keycloakUserClient().resource(i).create()); } if (getApplication().getKeycloakBackups().size() > 0) { - getApplication().getKeycloakBackups().stream().forEach((i) -> keycloakBackupsClient().resource(i).create()); + getApplication().getKeycloakBackups().stream().forEach((i) -> keycloakBackupClient().resource(i).create()); } // Wait for Keycloak (and PostgreSQL) to be ready @@ -146,9 +128,9 @@ public void deploy() { // check that route is up, only if there's a valid external URL available URL externalUrl = getURL(); if ((getApplication().getKeycloak().getSpec().getInstances() > 0) && (externalUrl != null)) { - WaitersUtil.routeIsUp(externalUrl.toExternalForm()) - .level(Level.DEBUG) - .waitFor(); + new SimpleWaiter( + () -> Https.getCode(getURL().toExternalForm()) != 503) + .reason("Wait until the route is ready to serve."); } } @@ -163,25 +145,34 @@ public void waitFor(Keycloak keycloak) { // 1. check externalDatabase if (keycloak.getSpec().getExternalDatabase() == null || !keycloak.getSpec().getExternalDatabase().getEnabled()) { // 2. wait for PostgreSQL to be ready (Service "keycloak-postgresql" is guaranteed to exist by documentation) - new SimpleWaiter(() -> OpenShiftProvisioner.openShift.getPods() + final Service postgreSqlService = this.client().services().withName("keycloak-postgresql").get(); + new SimpleWaiter(() -> getPods() .stream() .filter( - pod -> OpenShiftProvisioner.openShift.getService("keycloak-postgresql") != null + pod -> postgreSqlService != null && pod.getMetadata().getLabels().entrySet().containsAll( - OpenShiftProvisioner.openShift.getService("keycloak-postgresql").getSpec() - .getSelector().entrySet()) + postgreSqlService.getSpec().getSelector().entrySet()) && ResourceParsers.isPodReady(pod)) .count() > 0).level(Level.DEBUG).waitFor(); } // 4. wait for >= 1 pods with label controller-revision-hash=keycloak-d86bb6ddc - String controllerRevisionHash = getStatefulSet().getStatus().getUpdateRevision(); - OpenShiftWaiters.get(OpenShiftProvisioner.openShift, ffCheck) - .areExactlyNPodsReady(replicas, "controller-revision-hash", - controllerRevisionHash) - .waitFor(); + final String controllerRevisionHash = getStatefulSet().getStatus().getUpdateRevision(); + waitForExactNumberOfLabeledPodsToBeReady("controller-revision-hash", controllerRevisionHash, replicas); } } + private void waitForExactNumberOfLabeledPodsToBeReady(final String labelName, final String labelValue, int replicas) { + BooleanSupplier bs = () -> getLabeledPods(labelName, labelValue).size() == replicas; + new SimpleWaiter(bs, TimeUnit.MINUTES, 2, + "Waiting for pods with label \"" + labelName + "\"=" + labelValue + " to be ready") + .waitFor(); + } + + private List getLabeledPods(final String labelName, final String labelValue) { + return this.client().pods().inNamespace(this.client().getNamespace()).withLabel(labelName, labelValue).list() + .getItems(); + } + private void waitForKeycloakResourceReadiness() { new SimpleWaiter(() -> keycloak().get().getStatus().getReady()) .reason("Wait for keycloak resource to be ready").level(Level.DEBUG).waitFor(); @@ -194,7 +185,7 @@ private void waitForKeycloakResourceReadiness() { .reduce(Boolean::logicalAnd).get()) .reason("Wait for keycloakclients to be ready.").level(Level.DEBUG).waitFor(); if (getApplication().getKeycloakUsers().size() > 0) - new SimpleWaiter(() -> keycloakUsersClient().list().getItems().size() == getApplication().getKeycloakUsers().size()) + new SimpleWaiter(() -> keycloakUserClient().list().getItems().size() == getApplication().getKeycloakUsers().size()) .reason("Wait for keycloakusers to be ready.").level(Level.DEBUG).waitFor(); // no isReady() for users if (getApplication().getKeycloakBackups().size() > 0) new SimpleWaiter(() -> keycloakBackups().stream().map(realm -> realm.get().getStatus().getReady()) @@ -207,58 +198,54 @@ public void undeploy() { // delete custom resources keycloakBackups() .forEach(keycloakBackup -> keycloakBackup.withPropagationPolicy(DeletionPropagation.FOREGROUND).delete()); - new SimpleWaiter(() -> keycloakBackupsClient().list().getItems().size() == 0) + new SimpleWaiter(() -> keycloakBackupClient().list().getItems().size() == 0) .reason("Wait for all keycloakbackups instances to be deleted.").level(Level.DEBUG).waitFor(); keycloakUsers().forEach(keycloakUser -> keycloakUser.withPropagationPolicy(DeletionPropagation.FOREGROUND).delete()); - new SimpleWaiter(() -> keycloakUsersClient().list().getItems().size() == 0) + new SimpleWaiter(() -> keycloakUserClient().list().getItems().size() == 0) .reason("Wait for all keycloakusers instances to be deleted.").level(Level.DEBUG).waitFor(); keycloakClients() .forEach(keycloakClient -> keycloakClient.withPropagationPolicy(DeletionPropagation.FOREGROUND).delete()); keycloakUsers().forEach(keycloakUser -> keycloakUser.withPropagationPolicy(DeletionPropagation.FOREGROUND).delete()); - new SimpleWaiter(() -> keycloakClientsClient().list().getItems().size() == 0) + new SimpleWaiter(() -> keycloakClientClient().list().getItems().size() == 0) .reason("Wait for all keycloakclients instances to be deleted.").level(Level.DEBUG).waitFor(); keycloakRealms().forEach(keycloakRealm -> keycloakRealm.withPropagationPolicy(DeletionPropagation.FOREGROUND).delete()); - new SimpleWaiter(() -> keycloakRealmsClient().list().getItems().size() == 0) + new SimpleWaiter(() -> keycloakRealmClient().list().getItems().size() == 0) .reason("Wait for all keycloakrealms instances to be deleted.").level(Level.DEBUG).waitFor(); keycloak().withPropagationPolicy(DeletionPropagation.FOREGROUND).delete(); - new SimpleWaiter(() -> keycloaksClient().list().getItems().size() == 0) + new SimpleWaiter(() -> keycloakClient().list().getItems().size() == 0) .reason("Wait for all keycloakrealms instances to be deleted.").level(Level.DEBUG).waitFor(); // wait for 0 pods - OpenShiftWaiters.get(OpenShiftProvisioner.openShift, () -> false) - .areExactlyNPodsReady(0, "app", getApplication().getKeycloak().getKind().toLowerCase()).level(Level.DEBUG) - .waitFor(); + waitForExactNumberOfLabeledPodsToBeReady("app", getApplication().getKeycloak().getKind().toLowerCase(), 0); unsubscribe(); } @Override public void scale(int replicas, boolean wait) { - String controllerRevisionHash = getStatefulSet().getStatus().getUpdateRevision(); + FailFastCheck ffCheck = () -> false; + final String controllerRevisionHash = getStatefulSet().getStatus().getUpdateRevision(); Keycloak tmpKeycloak = keycloak().get(); - int originalReplicas = tmpKeycloak.getSpec().getInstances().intValue(); + final int originalReplicas = tmpKeycloak.getSpec().getInstances().intValue(); tmpKeycloak.getSpec().setInstances(Long.valueOf(replicas)); keycloak().replace(tmpKeycloak); if (wait) { - OpenShiftWaiters.get(OpenShiftProvisioner.openShift, ffCheck) - .areExactlyNPodsReady(replicas, "controller-revision-hash", controllerRevisionHash) - .level(Level.DEBUG) - .waitFor(); + waitForExactNumberOfLabeledPodsToBeReady("controller-revision-hash", controllerRevisionHash, replicas); } new SimpleWaiter(() -> keycloak().get().getStatus().getReady()) .reason("Wait for keycloak resource to be ready").level(Level.DEBUG).waitFor(); // check that route is up if (originalReplicas == 0 && replicas > 0) { - WaitersUtil.routeIsUp(getURL().toExternalForm()) - .level(Level.DEBUG) - .waitFor(); + new SimpleWaiter( + () -> Https.getCode(getURL().toExternalForm()) != 503) + .reason("Wait until the route is ready to serve."); } } @Override public List getPods() { - StatefulSet statefulSet = OpenShiftProvisioner.openShift.getStatefulSet(STATEFUL_SET_NAME); + StatefulSet statefulSet = getStatefulSet(); return Objects.nonNull(statefulSet) - ? OpenShiftProvisioner.openShift.getLabeledPods("controller-revision-hash", + ? getLabeledPods("controller-revision-hash", statefulSet.getStatus().getUpdateRevision()) : Lists.emptyList(); } @@ -274,26 +261,57 @@ public URL getURL() { } } + // ================================================================================================================= + // Client related + // ================================================================================================================= + // this is the packagemanifest for the operator; + // you can get it with command: + // oc get packagemanifest -o template --template='{{ .metadata.name }}' + public static String OPERATOR_ID = IntersmashConfig.rhSsoOperatorPackageManifest(); + + // this is the name of the CustomResourceDefinition(s) + // you can get it with command: + // oc get crd > -o template --template='{{ .metadata.name }}' + private static final String KEYCLOAK_CRD_NAME = "keycloaks.keycloak.org"; + private static NonNamespaceOperation> KEYCLOAKS_CLIENT; + + private static final String KEYCLOAK_REALM_CRD_NAME = "keycloakrealms.keycloak.org"; + private static NonNamespaceOperation> KEYCLOAK_REALMS_CLIENT; + + private static final String KEYCLOAK_BACKUP_CRD_NAME = "keycloakbackups.keycloak.org"; + private static NonNamespaceOperation> KEYCLOAK_BACKUPS_CLIENT; + + private static final String KEYCLOAK_CLIENT_CRD_NAME = "keycloakclients.keycloak.org"; + private static NonNamespaceOperation> KEYCLOAK_CLIENTS_CLIENT; + + private static final String KEYCLOAK_USER_CRD_NAME = "keycloakusers.keycloak.org"; + private static NonNamespaceOperation> KEYCLOAK_USERS_CLIENT; + + /** + * Generic CRD client which is used by client builders default implementation to build the CRDs client + * + * @return A {@link NonNamespaceOperation} instance that represents a + */ + public abstract NonNamespaceOperation> customResourceDefinitionsClient(); + // keycloaks.keycloak.org + protected abstract HasMetadataOperationsImpl keycloakCustomResourcesClient( + CustomResourceDefinitionContext crdc); /** - * Get a client capable of working with {@link #KEYCLOAK_RESOURCE} custom resource. + * Get a client capable of working with {@link RhSsoOperatorProvisioner#KEYCLOAK_CRD_NAME} custom resource. * - * @return client for operations with {@link #KEYCLOAK_RESOURCE} custom resource + * @return client for operations with {@link RhSsoOperatorProvisioner#KEYCLOAK_CRD_NAME} custom resource */ - public NonNamespaceOperation> keycloaksClient() { + public NonNamespaceOperation> keycloakClient() { if (KEYCLOAKS_CLIENT == null) { - CustomResourceDefinition crd = OpenShifts.admin().apiextensions().v1().customResourceDefinitions() - .withName(KEYCLOAK_RESOURCE).get(); - CustomResourceDefinitionContext crdc = CustomResourceDefinitionContext.fromCrd(crd); - if (!getCustomResourceDefinitions().contains(KEYCLOAK_RESOURCE)) { + CustomResourceDefinition crd = customResourceDefinitionsClient() + .withName(KEYCLOAK_CRD_NAME).get(); + if (crd == null) { throw new RuntimeException(String.format("[%s] custom resource is not provided by [%s] operator.", - KEYCLOAK_RESOURCE, OPERATOR_ID)); + KEYCLOAK_CRD_NAME, OPERATOR_ID)); } - MixedOperation> keycloaksClient = OpenShifts - .master() - .newHasMetadataOperation(crdc, Keycloak.class, KeycloakList.class); - KEYCLOAKS_CLIENT = keycloaksClient.inNamespace(OpenShiftConfig.namespace()); + KEYCLOAKS_CLIENT = keycloakCustomResourcesClient(CustomResourceDefinitionContext.fromCrd(crd)); } return KEYCLOAKS_CLIENT; } @@ -304,29 +322,27 @@ public NonNamespaceOperation> keycloa * @return A concrete {@link Resource} instance representing the {@link Keycloak} resource definition */ public Resource keycloak() { - return keycloaksClient().withName(getApplication().getKeycloak().getMetadata().getName()); + return keycloakClient().withName(getApplication().getKeycloak().getMetadata().getName()); } // keycloakrealms.keycloak.org + protected abstract HasMetadataOperationsImpl keycloakRealmCustomResourcesClient( + CustomResourceDefinitionContext crdc); /** - * Get a client capable of working with {@link #KEYCLOAK_REALM_RESOURCE} custom resource. + * Get a client capable of working with {@link #KEYCLOAK_REALM_CRD_NAME} custom resource. * - * @return client for operations with {@link #KEYCLOAK_REALM_RESOURCE} custom resource + * @return client for operations with {@link #KEYCLOAK_REALM_CRD_NAME} custom resource */ - public NonNamespaceOperation> keycloakRealmsClient() { + public NonNamespaceOperation> keycloakRealmClient() { if (KEYCLOAK_REALMS_CLIENT == null) { - CustomResourceDefinition crd = OpenShifts.admin().apiextensions().v1().customResourceDefinitions() - .withName(KEYCLOAK_REALM_RESOURCE).get(); - CustomResourceDefinitionContext crdc = CustomResourceDefinitionContext.fromCrd(crd); - if (!getCustomResourceDefinitions().contains(KEYCLOAK_REALM_RESOURCE)) { + CustomResourceDefinition crd = customResourceDefinitionsClient() + .withName(KEYCLOAK_REALM_CRD_NAME).get(); + if (crd == null) { throw new RuntimeException(String.format("[%s] custom resource is not provided by [%s] operator.", - KEYCLOAK_REALM_RESOURCE, OPERATOR_ID)); + KEYCLOAK_REALM_CRD_NAME, OPERATOR_ID)); } - MixedOperation> keycloakRealmsClient = OpenShifts - .master() - .newHasMetadataOperation(crdc, KeycloakRealm.class, KeycloakRealmList.class); - KEYCLOAK_REALMS_CLIENT = keycloakRealmsClient.inNamespace(OpenShiftConfig.namespace()); + KEYCLOAK_REALMS_CLIENT = keycloakRealmCustomResourcesClient(CustomResourceDefinitionContext.fromCrd(crd)); } return KEYCLOAK_REALMS_CLIENT; } @@ -339,7 +355,7 @@ public NonNamespaceOperation keycloakRealm(String name) { - return keycloakRealmsClient().withName(name); + return keycloakRealmClient().withName(name); } /** @@ -358,25 +374,23 @@ public List> keycloakRealms() { } // keycloakbackups.keycloak.org + protected abstract HasMetadataOperationsImpl keycloakBackupCustomResourcesClient( + CustomResourceDefinitionContext crdc); /** - * Get a client capable of working with {@link #KEYCLOAK_BACKUP_RESOURCE} custom resource. + * Get a client capable of working with {@link #KEYCLOAK_BACKUP_CRD_NAME} custom resource. * - * @return client for operations with {@link #KEYCLOAK_BACKUP_RESOURCE} custom resource + * @return client for operations with {@link #KEYCLOAK_BACKUP_CRD_NAME} custom resource */ - public NonNamespaceOperation> keycloakBackupsClient() { + public NonNamespaceOperation> keycloakBackupClient() { if (KEYCLOAK_BACKUPS_CLIENT == null) { - CustomResourceDefinition crd = OpenShifts.admin().apiextensions().v1().customResourceDefinitions() - .withName(KEYCLOAK_BACKUP_RESOURCE).get(); - CustomResourceDefinitionContext crdc = CustomResourceDefinitionContext.fromCrd(crd); - if (!getCustomResourceDefinitions().contains(KEYCLOAK_BACKUP_RESOURCE)) { + CustomResourceDefinition crd = customResourceDefinitionsClient() + .withName(KEYCLOAK_BACKUP_CRD_NAME).get(); + if (crd == null) { throw new RuntimeException(String.format("[%s] custom resource is not provided by [%s] operator.", - KEYCLOAK_BACKUP_RESOURCE, OPERATOR_ID)); + KEYCLOAK_BACKUP_CRD_NAME, OPERATOR_ID)); } - MixedOperation> keycloakBackupsClient = OpenShifts - .master() - .newHasMetadataOperation(crdc, KeycloakBackup.class, KeycloakBackupList.class); - KEYCLOAK_BACKUPS_CLIENT = keycloakBackupsClient.inNamespace(OpenShiftConfig.namespace()); + KEYCLOAK_BACKUPS_CLIENT = keycloakBackupCustomResourcesClient(CustomResourceDefinitionContext.fromCrd(crd)); } return KEYCLOAK_BACKUPS_CLIENT; } @@ -389,7 +403,7 @@ public NonNamespaceOperation keycloakBackup(String name) { - return keycloakBackupsClient().withName(name); + return keycloakBackupClient().withName(name); } /** @@ -408,25 +422,23 @@ public List> keycloakBackups() { } // keycloakclients.keycloak.org + protected abstract HasMetadataOperationsImpl keycloakClientCustomResourcesClient( + CustomResourceDefinitionContext crdc); /** - * Get a client capable of working with {@link #KEYCLOAK_CLIENT_RESOURCE} custom resource. + * Get a client capable of working with {@link #KEYCLOAK_CLIENT_CRD_NAME} custom resource. * - * @return client for operations with {@link #KEYCLOAK_CLIENT_RESOURCE} custom resource + * @return client for operations with {@link #KEYCLOAK_CLIENT_CRD_NAME} custom resource */ - public NonNamespaceOperation> keycloakClientsClient() { + public NonNamespaceOperation> keycloakClientClient() { if (KEYCLOAK_CLIENTS_CLIENT == null) { - CustomResourceDefinition crd = OpenShifts.admin().apiextensions().v1().customResourceDefinitions() - .withName(KEYCLOAK_CLIENT_RESOURCE).get(); - CustomResourceDefinitionContext crdc = CustomResourceDefinitionContext.fromCrd(crd); - if (!getCustomResourceDefinitions().contains(KEYCLOAK_CLIENT_RESOURCE)) { + CustomResourceDefinition crd = customResourceDefinitionsClient() + .withName(KEYCLOAK_CLIENT_CRD_NAME).get(); + if (crd == null) { throw new RuntimeException(String.format("[%s] custom resource is not provided by [%s] operator.", - KEYCLOAK_CLIENT_RESOURCE, OPERATOR_ID)); + KEYCLOAK_CLIENT_CRD_NAME, OPERATOR_ID)); } - MixedOperation> keycloakClientsClient = OpenShifts - .master() - .newHasMetadataOperation(crdc, KeycloakClient.class, KeycloakClientList.class); - KEYCLOAK_CLIENTS_CLIENT = keycloakClientsClient.inNamespace(OpenShiftConfig.namespace()); + KEYCLOAK_CLIENTS_CLIENT = keycloakClientCustomResourcesClient(CustomResourceDefinitionContext.fromCrd(crd)); } return KEYCLOAK_CLIENTS_CLIENT; } @@ -439,7 +451,7 @@ public NonNamespaceOperation keycloakClient(String name) { - return keycloakClientsClient().withName(name); + return keycloakClientClient().withName(name); } /** @@ -458,25 +470,23 @@ public List> keycloakClients() { } // keycloakusers.keycloak.org + protected abstract HasMetadataOperationsImpl keycloakUserCustomResourcesClient( + CustomResourceDefinitionContext crdc); /** - * Get a client capable of working with {@link #KEYCLOAK_USER_RESOURCE} custom resource. + * Get a client capable of working with {@link #KEYCLOAK_USER_CRD_NAME} custom resource. * - * @return client for operations with {@link #KEYCLOAK_USER_RESOURCE} custom resource + * @return client for operations with {@link #KEYCLOAK_USER_CRD_NAME} custom resource */ - public NonNamespaceOperation> keycloakUsersClient() { + public NonNamespaceOperation> keycloakUserClient() { if (KEYCLOAK_USERS_CLIENT == null) { - CustomResourceDefinition crd = OpenShifts.admin().apiextensions().v1().customResourceDefinitions() - .withName(KEYCLOAK_USER_RESOURCE).get(); - CustomResourceDefinitionContext crdc = CustomResourceDefinitionContext.fromCrd(crd); - if (!getCustomResourceDefinitions().contains(KEYCLOAK_USER_RESOURCE)) { + CustomResourceDefinition crd = customResourceDefinitionsClient() + .withName(KEYCLOAK_USER_CRD_NAME).get(); + if (crd == null) { throw new RuntimeException(String.format("[%s] custom resource is not provided by [%s] operator.", - KEYCLOAK_USER_RESOURCE, OPERATOR_ID)); + KEYCLOAK_USER_CRD_NAME, OPERATOR_ID)); } - MixedOperation> keycloakUsersClient = OpenShifts - .master() - .newHasMetadataOperation(crdc, KeycloakUser.class, KeycloakUserList.class); - KEYCLOAK_USERS_CLIENT = keycloakUsersClient.inNamespace(OpenShiftConfig.namespace()); + KEYCLOAK_USERS_CLIENT = keycloakUserCustomResourcesClient(CustomResourceDefinitionContext.fromCrd(crd)); } return KEYCLOAK_USERS_CLIENT; } @@ -489,7 +499,7 @@ public NonNamespaceOperation keycloakUser(String name) { - return keycloakUsersClient().withName(name); + return keycloakUserClient().withName(name); } /** @@ -506,17 +516,4 @@ public List> keycloakUsers() { .map(this::keycloakUser) .collect(Collectors.toList()); } - - /** - * @return the underlying StatefulSet which provisions the cluster - */ - private StatefulSet getStatefulSet() { - StatefulSet statefulSet = OpenShiftProvisioner.openShift.getStatefulSet(STATEFUL_SET_NAME); - if (Objects.isNull(statefulSet)) { - throw new IllegalStateException(String.format( - "Impossible to find StatefulSet with name=\"%s\"!", - STATEFUL_SET_NAME)); - } - return statefulSet; - } } diff --git a/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/WildflyOperatorProvisioner.java b/provisioners/src/main/java/org/jboss/intersmash/provision/operator/WildflyOperatorProvisioner.java similarity index 52% rename from provisioners/src/main/java/org/jboss/intersmash/provision/openshift/WildflyOperatorProvisioner.java rename to provisioners/src/main/java/org/jboss/intersmash/provision/operator/WildflyOperatorProvisioner.java index 3d031dba7..2b81df331 100644 --- a/provisioners/src/main/java/org/jboss/intersmash/provision/openshift/WildflyOperatorProvisioner.java +++ b/provisioners/src/main/java/org/jboss/intersmash/provision/operator/WildflyOperatorProvisioner.java @@ -13,85 +13,111 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jboss.intersmash.provision.openshift; +package org.jboss.intersmash.provision.operator; import java.net.MalformedURLException; import java.net.URL; import java.util.List; +import java.util.concurrent.TimeUnit; +import java.util.function.BooleanSupplier; import java.util.stream.Collectors; import org.jboss.intersmash.IntersmashConfig; -import org.jboss.intersmash.application.openshift.WildflyOperatorApplication; -import org.jboss.intersmash.provision.openshift.operator.OperatorProvisioner; +import org.jboss.intersmash.application.operator.WildflyOperatorApplication; +import org.jboss.intersmash.provision.Provisioner; import org.jboss.intersmash.provision.openshift.operator.wildfly.WildFlyServerList; import org.slf4j.event.Level; import org.wildfly.v1alpha1.WildFlyServer; import org.wildfly.v1alpha1.wildflyserverstatus.Pods; -import cz.xtf.core.config.OpenShiftConfig; -import cz.xtf.core.event.helpers.EventHelper; -import cz.xtf.core.openshift.OpenShiftWaiters; -import cz.xtf.core.openshift.OpenShifts; +import cz.xtf.core.http.Https; import cz.xtf.core.waiting.SimpleWaiter; +import cz.xtf.core.waiting.failfast.FailFastCheck; import io.fabric8.kubernetes.api.model.DeletionPropagation; import io.fabric8.kubernetes.api.model.Pod; import io.fabric8.kubernetes.api.model.apiextensions.v1.CustomResourceDefinition; -import io.fabric8.kubernetes.client.dsl.MixedOperation; +import io.fabric8.kubernetes.api.model.apiextensions.v1.CustomResourceDefinitionList; +import io.fabric8.kubernetes.client.NamespacedKubernetesClient; import io.fabric8.kubernetes.client.dsl.NonNamespaceOperation; import io.fabric8.kubernetes.client.dsl.Resource; import io.fabric8.kubernetes.client.dsl.base.CustomResourceDefinitionContext; +import io.fabric8.kubernetes.client.dsl.internal.HasMetadataOperationsImpl; import lombok.NonNull; -public class WildflyOperatorProvisioner extends OperatorProvisioner { - private final static String WILDFLY_SERVER_RESOURCE = "wildflyservers.wildfly.org"; - private static NonNamespaceOperation> WILDFLY_SERVERS_CLIENT; - // oc get packagemanifest wildfly -n openshift-marketplace - private static final String OPERATOR_ID = IntersmashConfig.wildflyOperatorPackageManifest(); - - public WildflyOperatorProvisioner(@NonNull WildflyOperatorApplication wildflyOperatorApplication) { - super(wildflyOperatorApplication, OPERATOR_ID); - } +public abstract class WildflyOperatorProvisioner extends + OperatorProvisioner implements Provisioner { - public static String getOperatorId() { - return OPERATOR_ID; + public WildflyOperatorProvisioner(@NonNull WildflyOperatorApplication application) { + super(application, OPERATOR_ID); } /** - * Get a client capable of working with {@link #WILDFLY_SERVER_RESOURCE} custom resource. + * Return the ready pods managed by the operator. * - * @return client for operations with {@link #WILDFLY_SERVER_RESOURCE} custom resource + * @return pods which are registered by operator as active and are in ready state */ - public NonNamespaceOperation> wildflyServersClient() { - if (WILDFLY_SERVERS_CLIENT == null) { - CustomResourceDefinition crd = OpenShifts.admin().apiextensions().v1().customResourceDefinitions() - .withName(WILDFLY_SERVER_RESOURCE).get(); - CustomResourceDefinitionContext crdc = CustomResourceDefinitionContext.fromCrd(crd); - if (!getCustomResourceDefinitions().contains(WILDFLY_SERVER_RESOURCE)) { - throw new RuntimeException(String.format("[%s] custom resource is not provided by [%s] operator.", - WILDFLY_SERVER_RESOURCE, OPERATOR_ID)); - } - MixedOperation> wildflyServersClient = OpenShifts - .master().newHasMetadataOperation(crdc, WildFlyServer.class, WildFlyServerList.class); - WILDFLY_SERVERS_CLIENT = wildflyServersClient.inNamespace(OpenShiftConfig.namespace()); - } - return WILDFLY_SERVERS_CLIENT; + @Override + public List getPods() { + List pods = this.client().pods().inNamespace(this.client().getNamespace()).list().getItems(); + List activeOperatorPodNames = wildFlyServer().get().getStatus().getPods().stream() + .filter(podStatus -> podStatus.getState().equals(Pods.State.ACTIVE)) + .map(Pods::getName) + .collect(Collectors.toList()); + return pods.stream() + .filter(pod -> activeOperatorPodNames.contains(pod.getMetadata().getName())) + .filter(pod -> pod.getStatus().getContainerStatuses().size() > 0 + && pod.getStatus().getContainerStatuses().get(0).getReady()) + .collect(Collectors.toList()); + } - /** - * Get a reference to wildFlyServer object. Use get() to get the actual object, or null in case it does not - * exist on tested cluster. - * @return A concrete {@link Resource} instance representing the {@link WildFlyServer} resource definition - */ - public Resource wildFlyServer() { - return wildflyServersClient().withName(getApplication().getName()); + protected void waitForExactNumberOfLabeledPodsToBeReady(final String labelName, final String labelValue, int replicas) { + BooleanSupplier bs = () -> getLabeledPods(labelName, labelValue).size() == replicas; + new SimpleWaiter(bs, TimeUnit.MINUTES, 2, + "Waiting for pods with label \"" + labelName + "\"=" + labelValue + " to be ready") + .waitFor(); + } + + protected List getLabeledPods(final String labelName, final String labelValue) { + return getPods().stream() + .filter(p -> p.getMetadata().getLabels().get(labelName) != null + && p.getMetadata().getLabels().get(labelName).equals(labelValue)) + .collect(Collectors.toList()); + } + + // ================================================================================================================= + // Related to generic provisioning behavior + // ================================================================================================================= + @Override + protected String getOperatorCatalogSource() { + return IntersmashConfig.wildflyOperatorCatalogSource(); + } + + @Override + protected String getOperatorIndexImage() { + return IntersmashConfig.wildflyOperatorIndexImage(); + } + + @Override + protected String getOperatorChannel() { + return IntersmashConfig.wildflyOperatorChannel(); + } + + @Override + public URL getURL() { + String url = "http://" + wildFlyServer().get().getStatus().getHosts().get(0); + try { + return new URL(url); + } catch (MalformedURLException e) { + throw new RuntimeException(String.format("WILDFLY operator route \"%s\"is malformed.", url), e); + } } @Override public void deploy() { - ffCheck = FailFastUtils.getFailFastCheck(EventHelper.timeOfLastEventBMOrTestNamespaceOrEpoch(), - getApplication().getName()); + FailFastCheck ffCheck = () -> false; subscribe(); - wildflyServersClient().createOrReplace(getApplication().getWildflyServer()); + wildflyServerClient().createOrReplace(getApplication().getWildflyServer()); int expected = getApplication().getWildflyServer().getSpec().getReplicas(); new SimpleWaiter(() -> wildFlyServer().get().getStatus() != null) .failFast(ffCheck) @@ -104,27 +130,18 @@ public void deploy() { .level(Level.DEBUG) .waitFor(); if (getApplication().getWildflyServer().getSpec().getReplicas() > 0) { - WaitersUtil.routeIsUp(getURL().toExternalForm()) - .level(Level.DEBUG) - .waitFor(); + new SimpleWaiter( + () -> Https.getCode(getURL().toExternalForm()) != 503) + .reason("Wait until the route is ready to serve."); } } - @Override - public void undeploy() { - wildFlyServer().withPropagationPolicy(DeletionPropagation.FOREGROUND).delete(); - OpenShiftWaiters.get(OpenShiftProvisioner.openShift, ffCheck) - .areExactlyNPodsReady(0, "app.kubernetes.io/name", getApplication().getName()) - .level(Level.DEBUG).waitFor(); - unsubscribe(); - } - @Override public void scale(int replicas, boolean wait) { scale(wildFlyServer(), replicas, wait); } - private void scale(Resource wildFlyServer, int replicas, boolean wait) { + protected void scale(Resource wildFlyServer, int replicas, boolean wait) { WildFlyServer tmpServer = wildFlyServer.get(); int originalReplicas = tmpServer.getSpec().getReplicas(); tmpServer.getSpec().setReplicas(replicas); @@ -134,53 +151,69 @@ private void scale(Resource wildFlyServer, int replicas, boolean .level(Level.DEBUG).waitFor(); } if (originalReplicas == 0 && replicas > 0) { - WaitersUtil.routeIsUp(getURL().toExternalForm()) - .level(Level.DEBUG) - .waitFor(); + new SimpleWaiter( + () -> Https.getCode(getURL().toExternalForm()) != 503) + .reason("Wait until the route is ready to serve."); } } - /** - * Return the ready pods managed by the operator. - * - * @return pods which are registered by operator as active and are in ready state - */ @Override - public List getPods() { - List pods = OpenShiftProvisioner.openShift.getPods(); - List activeOperatorPodNames = wildFlyServer().get().getStatus().getPods().stream() - .filter(podStatus -> podStatus.getState().equals(Pods.State.ACTIVE)) - .map(Pods::getName) - .collect(Collectors.toList()); - return pods.stream() - .filter(pod -> activeOperatorPodNames.contains(pod.getMetadata().getName())) - .filter(pod -> pod.getStatus().getContainerStatuses().size() > 0 - && pod.getStatus().getContainerStatuses().get(0).getReady()) - .collect(Collectors.toList()); + public void undeploy() { + FailFastCheck ffCheck = () -> false; + wildFlyServer().withPropagationPolicy(DeletionPropagation.FOREGROUND).delete(); + waitForExactNumberOfLabeledPodsToBeReady("app.kubernetes.io/name", getApplication().getName(), 0); + unsubscribe(); } - @Override - protected String getOperatorCatalogSource() { - return IntersmashConfig.wildflyOperatorCatalogSource(); - } + // ================================================================================================================= + // Client related + // ================================================================================================================= + // this is the packagemanifest for the WildFly operator; + // you can get it with command: + // oc get packagemanifest hyperfoil-bundle -o template --template='{{ .metadata.name }}' + public static String OPERATOR_ID = IntersmashConfig.wildflyOperatorPackageManifest(); + + // this is the name of the Wildfly CustomResourceDefinition + // you can get it with command: + // oc get crd wildflyservers.wildfly.org -o template --template='{{ .metadata.name }}' + protected static String WILDFLY_SERVER_CRD_NAME = "wildflyservers.wildfly.org"; + private static NonNamespaceOperation> WILDFLY_SERVERS_CLIENT; - @Override - protected String getOperatorIndexImage() { - return IntersmashConfig.wildflyOperatorIndexImage(); - } + /** + * Generic CRD client which is used by client builders default implementation to build the CRDs client + * + * @return A {@link NonNamespaceOperation} instance that represents a + */ + protected abstract NonNamespaceOperation> customResourceDefinitionsClient(); - @Override - protected String getOperatorChannel() { - return IntersmashConfig.wildflyOperatorChannel(); - } + // wildflyservers.wildfly.org + protected abstract HasMetadataOperationsImpl wildflyCustomResourcesClient( + CustomResourceDefinitionContext crdc); - @Override - public URL getURL() { - String url = "http://" + wildFlyServer().get().getStatus().getHosts().get(0); - try { - return new URL(url); - } catch (MalformedURLException e) { - throw new RuntimeException(String.format("WILDFLY operator route \"%s\"is malformed.", url), e); + /** + * Get a client capable of working with {@link #WILDFLY_SERVER_CRD_NAME} custom resource. + * + * @return client for operations with {@link #WILDFLY_SERVER_CRD_NAME} custom resource + */ + public NonNamespaceOperation> wildflyServerClient() { + if (WILDFLY_SERVERS_CLIENT == null) { + CustomResourceDefinition crd = customResourceDefinitionsClient() + .withName(WILDFLY_SERVER_CRD_NAME).get(); + if (crd == null) { + throw new RuntimeException(String.format("[%s] custom resource is not provided by [%s] operator.", + WILDFLY_SERVER_CRD_NAME, OPERATOR_ID)); + } + WILDFLY_SERVERS_CLIENT = wildflyCustomResourcesClient(CustomResourceDefinitionContext.fromCrd(crd)); } + return WILDFLY_SERVERS_CLIENT; + } + + /** + * Get a reference to wildFlyServer object. Use get() to get the actual object, or null in case it does not + * exist on tested cluster. + * @return A concrete {@link Resource} instance representing the {@link WildFlyServer} resource definition + */ + public Resource wildFlyServer() { + return wildflyServerClient().withName(getApplication().getName()); } } diff --git a/provisioners/src/main/java/org/jboss/intersmash/util/tls/CertificatesUtils.java b/provisioners/src/main/java/org/jboss/intersmash/util/tls/CertificatesUtils.java index 3eb2779e6..94408a4a2 100644 --- a/provisioners/src/main/java/org/jboss/intersmash/util/tls/CertificatesUtils.java +++ b/provisioners/src/main/java/org/jboss/intersmash/util/tls/CertificatesUtils.java @@ -27,10 +27,9 @@ import java.util.Map; import java.util.Objects; -import cz.xtf.core.config.OpenShiftConfig; -import cz.xtf.core.openshift.OpenShifts; import io.fabric8.kubernetes.api.model.Secret; import io.fabric8.kubernetes.api.model.SecretBuilder; +import io.fabric8.kubernetes.client.NamespacedKubernetesClient; import lombok.Getter; import lombok.extern.slf4j.Slf4j; @@ -64,13 +63,14 @@ public static class CertificateAndKey { * @param tlsSecretName: name of the secret to be created in OpenShift containing key and certificate * @return wrapper object {@link CertificateAndKey} containing details about the newly created key, certificate and secret */ - public static CertificateAndKey generateSelfSignedCertificateAndKey(String hostname, String tlsSecretName) { + public static CertificateAndKey generateSelfSignedCertificateAndKey(String hostname, String tlsSecretName, + final NamespacedKubernetesClient client, final String namespace) { CertificateAndKey certificateAndKey = new CertificateAndKey(); String certificate = hostname + "-certificate.pem"; String key = hostname + "-key.pem"; String truststoreFormat = "jks"; - String truststorePassword = "1234PIPPOBAUDO"; + String truststorePassword = "certificateSecret-1234"; String truststore = hostname + "-truststore." + truststoreFormat; certificateAndKey.key = Paths.get(caDir.toFile().getAbsolutePath(), key); @@ -83,7 +83,7 @@ public static CertificateAndKey generateSelfSignedCertificateAndKey(String hostn caDir.resolve(key).toFile().exists() && caDir.resolve(truststore).toFile().exists()) { certificateAndKey.existing = true; - Secret tlsSecret = OpenShifts.master().getSecret(tlsSecretName); + Secret tlsSecret = client.secrets().withName(tlsSecretName).get(); if (Objects.isNull(tlsSecret)) { throw new RuntimeException(MessageFormat.format("Secret {} doesn't exist!", tlsSecretName)); } @@ -101,7 +101,8 @@ public static CertificateAndKey generateSelfSignedCertificateAndKey(String hostn // create secret try { - Secret tlsSecret = createTlsSecret(tlsSecretName, certificateAndKey.key, certificateAndKey.certificate); + Secret tlsSecret = createTlsSecret(tlsSecretName, certificateAndKey.key, certificateAndKey.certificate, client, + namespace); if (Objects.isNull(tlsSecret)) { throw new RuntimeException(MessageFormat.format("Secret {} doesn't exist!", tlsSecretName)); } @@ -142,7 +143,8 @@ private static void processCall(Path cwd, String... args) { } } - public static Secret createTlsSecret(String secretName, Path key, Path certificate) throws IOException { + public static Secret createTlsSecret(final String secretName, final Path key, final Path certificate, + final NamespacedKubernetesClient client, final String namespace) throws IOException { Map data = new HashMap<>(); String keyDerData = Files.readString(key); String crtDerData = Files.readString(certificate); @@ -154,6 +156,6 @@ public static Secret createTlsSecret(String secretName, Path key, Path certifica .withImmutable(false) .addToData(data) .build(); - return OpenShifts.master().secrets().inNamespace(OpenShiftConfig.namespace()).createOrReplace(secret); + return client.secrets().inNamespace(namespace).createOrReplace(secret); } } diff --git a/provisioners/src/main/resources/META-INF/services/org.jboss.intersmash.provision.ProvisionerFactory b/provisioners/src/main/resources/META-INF/services/org.jboss.intersmash.provision.ProvisionerFactory index 95b263325..ab8f60bec 100644 --- a/provisioners/src/main/resources/META-INF/services/org.jboss.intersmash.provision.ProvisionerFactory +++ b/provisioners/src/main/resources/META-INF/services/org.jboss.intersmash.provision.ProvisionerFactory @@ -1,19 +1,21 @@ -org.jboss.intersmash.provision.openshift.ActiveMQOperatorProvisionerFactory -org.jboss.intersmash.provision.openshift.KafkaOperatorProvisionerFactory +org.jboss.intersmash.provision.openshift.ActiveMQOpenShiftOperatorProvisionerFactory +org.jboss.intersmash.provision.openshift.KafkaOpenShiftOperatorProvisionerFactory org.jboss.intersmash.provision.openshift.WildflyBootableJarImageOpenShiftProvisionerFactory org.jboss.intersmash.provision.openshift.WildflyImageOpenShiftProvisionerFactory -org.jboss.intersmash.provision.openshift.WildflyOperatorProvisionerFactory +org.jboss.intersmash.provision.openshift.WildflyOpenShiftOperatorProvisionerFactory org.jboss.intersmash.provision.helm.wildfly.WildflyHelmChartOpenShiftProvisionerFactory org.jboss.intersmash.provision.openshift.MysqlImageOpenShiftProvisionerFactory org.jboss.intersmash.provision.openshift.PostgreSQLImageOpenShiftProvisionerFactory -org.jboss.intersmash.provision.openshift.InfinispanOperatorProvisionerFactory +org.jboss.intersmash.provision.openshift.InfinispanOpenShiftOperatorProvisionerFactory org.jboss.intersmash.provision.openshift.KeycloakOperatorProvisionerFactory -org.jboss.intersmash.provision.openshift.RhSsoOperatorProvisionerFactory +org.jboss.intersmash.provision.openshift.RhSsoOpenShiftOperatorProvisionerFactory org.jboss.intersmash.provision.openshift.RhSsoTemplateOpenShiftProvisionerFactory org.jboss.intersmash.provision.openshift.PostgreSQLTemplateOpenShiftProvisionerFactory org.jboss.intersmash.provision.openshift.Eap7ImageOpenShiftProvisionerFactory org.jboss.intersmash.provision.openshift.Eap7LegacyS2iBuildTemplateProvisionerFactory org.jboss.intersmash.provision.openshift.Eap7TemplateOpenShiftProvisionerFactory -org.jboss.intersmash.provision.openshift.HyperfoilOperatorProvisionerFactory +org.jboss.intersmash.provision.openshift.HyperfoilOpenShiftOperatorProvisionerFactory org.jboss.intersmash.provision.openshift.auto.OpenShiftAutoProvisionerFactory +org.jboss.intersmash.provision.k8s.HyperfoilKubernetesOperatorProvisionerFactory + diff --git a/provisioners/src/test/java/org/jboss/intersmash/provision/ProvisionerManagerTestCase.java b/provisioners/src/test/java/org/jboss/intersmash/provision/ProvisionerManagerTestCase.java index 5d384f95e..e281ffb0e 100644 --- a/provisioners/src/test/java/org/jboss/intersmash/provision/ProvisionerManagerTestCase.java +++ b/provisioners/src/test/java/org/jboss/intersmash/provision/ProvisionerManagerTestCase.java @@ -22,22 +22,22 @@ import org.jboss.intersmash.application.Application; import org.jboss.intersmash.provision.helm.wildfly.WildflyHelmChartOpenShiftProvisioner; -import org.jboss.intersmash.provision.openshift.ActiveMQOperatorProvisioner; +import org.jboss.intersmash.provision.openshift.ActiveMQOpenShiftOperatorProvisioner; import org.jboss.intersmash.provision.openshift.Eap7ImageOpenShiftProvisioner; import org.jboss.intersmash.provision.openshift.Eap7LegacyS2iBuildTemplateProvisioner; import org.jboss.intersmash.provision.openshift.Eap7TemplateOpenShiftProvisioner; -import org.jboss.intersmash.provision.openshift.HyperfoilOperatorProvisioner; -import org.jboss.intersmash.provision.openshift.InfinispanOperatorProvisioner; -import org.jboss.intersmash.provision.openshift.KafkaOperatorProvisioner; -import org.jboss.intersmash.provision.openshift.KeycloakOperatorProvisioner; +import org.jboss.intersmash.provision.openshift.HyperfoilOpenShiftOperatorProvisioner; +import org.jboss.intersmash.provision.openshift.InfinispanOpenShiftOperatorProvisioner; +import org.jboss.intersmash.provision.openshift.KafkaOpenShiftOperatorProvisioner; +import org.jboss.intersmash.provision.openshift.KeycloakOpenShiftOperatorProvisioner; import org.jboss.intersmash.provision.openshift.MysqlImageOpenShiftProvisioner; import org.jboss.intersmash.provision.openshift.PostgreSQLImageOpenShiftProvisioner; import org.jboss.intersmash.provision.openshift.PostgreSQLTemplateOpenShiftProvisioner; -import org.jboss.intersmash.provision.openshift.RhSsoOperatorProvisioner; +import org.jboss.intersmash.provision.openshift.RhSsoOpenShiftOperatorProvisioner; import org.jboss.intersmash.provision.openshift.RhSsoTemplateOpenShiftProvisioner; import org.jboss.intersmash.provision.openshift.WildflyBootableJarImageOpenShiftProvisioner; import org.jboss.intersmash.provision.openshift.WildflyImageOpenShiftProvisioner; -import org.jboss.intersmash.provision.openshift.WildflyOperatorProvisioner; +import org.jboss.intersmash.provision.openshift.WildflyOpenShiftOperatorProvisioner; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; @@ -47,8 +47,8 @@ public class ProvisionerManagerTestCase { enum SupportedApplication { ActiveMqOperatorApplication( - getApplicationMock(org.jboss.intersmash.application.openshift.ActiveMQOperatorApplication.class), - ActiveMQOperatorProvisioner.class), + getApplicationMock(org.jboss.intersmash.application.operator.ActiveMQOperatorApplication.class), + ActiveMQOpenShiftOperatorProvisioner.class), BootableJarOpenShiftApplication( getApplicationMock(org.jboss.intersmash.application.openshift.BootableJarOpenShiftApplication.class), WildflyBootableJarImageOpenShiftProvisioner.class), @@ -62,16 +62,16 @@ enum SupportedApplication { getApplicationMock(org.jboss.intersmash.application.openshift.Eap7TemplateOpenShiftApplication.class), Eap7TemplateOpenShiftProvisioner.class), InfinispanOperatorApplication( - getApplicationMock(org.jboss.intersmash.application.openshift.InfinispanOperatorApplication.class), - InfinispanOperatorProvisioner.class), + getApplicationMock(org.jboss.intersmash.application.operator.InfinispanOperatorApplication.class), + InfinispanOpenShiftOperatorProvisioner.class), HyperfoilOperatorApplication( - getApplicationMock(org.jboss.intersmash.application.openshift.HyperfoilOperatorApplication.class), - HyperfoilOperatorProvisioner.class), - KafkaOperatorApplication(getApplicationMock(org.jboss.intersmash.application.openshift.KafkaOperatorApplication.class), - KafkaOperatorProvisioner.class), + getApplicationMock(org.jboss.intersmash.application.operator.HyperfoilOperatorApplication.class), + HyperfoilOpenShiftOperatorProvisioner.class), + KafkaOperatorApplication(getApplicationMock(org.jboss.intersmash.application.operator.KafkaOperatorApplication.class), + KafkaOpenShiftOperatorProvisioner.class), KeycloakOperatorApplication( - getApplicationMock(org.jboss.intersmash.application.openshift.KeycloakOperatorApplication.class), - KeycloakOperatorProvisioner.class), + getApplicationMock(org.jboss.intersmash.application.operator.KeycloakOperatorApplication.class), + KeycloakOpenShiftOperatorProvisioner.class), MysqlImageOpenShiftApplication( getApplicationMock(org.jboss.intersmash.application.openshift.MysqlImageOpenShiftApplication.class), MysqlImageOpenShiftProvisioner.class), @@ -86,8 +86,8 @@ enum SupportedApplication { .thenReturn( org.jboss.intersmash.application.openshift.template.PostgreSQLTemplate.POSTGRESQL_PERSISTENT)), PostgreSQLTemplateOpenShiftProvisioner.class), - RhSsoOperatorApplication(getApplicationMock(org.jboss.intersmash.application.openshift.RhSsoOperatorApplication.class), - RhSsoOperatorProvisioner.class), + RhSsoOperatorApplication(getApplicationMock(org.jboss.intersmash.application.operator.RhSsoOperatorApplication.class), + RhSsoOpenShiftOperatorProvisioner.class), RhSsoTemplateOpenShiftApplication( getApplicationMock(org.jboss.intersmash.application.openshift.RhSsoTemplateOpenShiftApplication.class, (application) -> when( @@ -99,8 +99,8 @@ enum SupportedApplication { getApplicationMock(org.jboss.intersmash.application.openshift.WildflyImageOpenShiftApplication.class), WildflyImageOpenShiftProvisioner.class), WildflyOperatorApplication( - getApplicationMock(org.jboss.intersmash.application.openshift.WildflyOperatorApplication.class), - WildflyOperatorProvisioner.class), + getApplicationMock(org.jboss.intersmash.application.operator.WildflyOperatorApplication.class), + WildflyOpenShiftOperatorProvisioner.class), WildflyHelmChartOpenShiftApplication( getApplicationMock(org.jboss.intersmash.application.openshift.helm.WildflyHelmChartOpenShiftApplication.class), WildflyHelmChartOpenShiftProvisioner.class); diff --git a/provisioners/src/test/java/org/jboss/intersmash/provision/openshift/operator/resources/OpenShiftResourceTestCase.java b/provisioners/src/test/java/org/jboss/intersmash/provision/olm/SerializationCapableResourceTestCase.java similarity index 77% rename from provisioners/src/test/java/org/jboss/intersmash/provision/openshift/operator/resources/OpenShiftResourceTestCase.java rename to provisioners/src/test/java/org/jboss/intersmash/provision/olm/SerializationCapableResourceTestCase.java index 4d2a15723..54fb51fb3 100644 --- a/provisioners/src/test/java/org/jboss/intersmash/provision/openshift/operator/resources/OpenShiftResourceTestCase.java +++ b/provisioners/src/test/java/org/jboss/intersmash/provision/olm/SerializationCapableResourceTestCase.java @@ -13,24 +13,24 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jboss.intersmash.provision.openshift.operator.resources; +package org.jboss.intersmash.provision.olm; import java.io.File; import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.util.stream.Stream; -import org.jboss.intersmash.provision.openshift.operator.OperatorProvisioner; +import org.jboss.intersmash.provision.operator.OperatorProvisioner; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.MethodSource; /** - * Verify the functionality provided by {@link OpenShiftResource} interface. + * Verify the functionality provided by {@link SerializationCapableResource} interface. */ -public class OpenShiftResourceTestCase { +public class SerializationCapableResourceTestCase { - private static Stream resourceProvider() { + private static Stream resourceProvider() { return Stream.of( new OperatorGroup("my-namepace"), new Subscription( @@ -55,13 +55,13 @@ private static Stream resourceProvider() { @ParameterizedTest(name = "{displayName}#class({0})") @MethodSource("resourceProvider") - public void writeReadEqualsTest(OpenShiftResource resource) throws IOException, NoSuchMethodException, + public void writeReadEqualsTest(SerializationCapableResource resource) throws IOException, NoSuchMethodException, InvocationTargetException, InstantiationException, IllegalAccessException { // write test File yaml = resource.save(); // read test - OpenShiftResource loaded = resource.getClass().getDeclaredConstructor().newInstance(); + SerializationCapableResource loaded = resource.getClass().getDeclaredConstructor().newInstance(); loaded.load(yaml); // Assertions.assertEquals(resource, loaded, diff --git a/provisioners/src/test/java/org/jboss/intersmash/provision/openshift/operator/wildfly/WildFlyServersTestCase.java b/provisioners/src/test/java/org/jboss/intersmash/provision/olm/wildfly/WildFlyServersTestCase.java similarity index 81% rename from provisioners/src/test/java/org/jboss/intersmash/provision/openshift/operator/wildfly/WildFlyServersTestCase.java rename to provisioners/src/test/java/org/jboss/intersmash/provision/olm/wildfly/WildFlyServersTestCase.java index 44e0b34e8..2ebb6b4e6 100644 --- a/provisioners/src/test/java/org/jboss/intersmash/provision/openshift/operator/wildfly/WildFlyServersTestCase.java +++ b/provisioners/src/test/java/org/jboss/intersmash/provision/olm/wildfly/WildFlyServersTestCase.java @@ -13,13 +13,14 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.jboss.intersmash.provision.openshift.operator.wildfly; +package org.jboss.intersmash.provision.olm.wildfly; import java.io.File; import java.io.IOException; import org.jboss.intersmash.IntersmashConfig; -import org.jboss.intersmash.provision.openshift.operator.resources.OpenShiftResource; +import org.jboss.intersmash.provision.olm.SerializationCapableResource; +import org.jboss.intersmash.provision.openshift.operator.wildfly.WildFlyServerBuilder; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import org.wildfly.v1alpha1.WildFlyServer; @@ -40,10 +41,10 @@ public void writeReadEqualsTest() throws IOException { .build(); // write test - File yaml = OpenShiftResource.save(wildFlyServer); + File yaml = SerializationCapableResource.save(wildFlyServer); // read test WildFlyServer testServer = new WildFlyServer(); - OpenShiftResource.load(yaml, WildFlyServer.class, testServer); + SerializationCapableResource.load(yaml, WildFlyServer.class, testServer); // Assertions.assertEquals(wildFlyServer, testServer, "OpenShift resource (WildflyServer) does not equal after serialization into yaml file and deserialization back to an object."); diff --git a/testsuite/integration-tests/pom.xml b/testsuite/integration-tests/pom.xml index 0a3eb776a..9bd74bf0c 100644 --- a/testsuite/integration-tests/pom.xml +++ b/testsuite/integration-tests/pom.xml @@ -31,6 +31,10 @@ org.projectlombok lombok + + org.jboss.intersmash + intersmash-kubernetes-client + org.jboss.intersmash intersmash-core diff --git a/testsuite/integration-tests/src/main/java/org/jboss/intersmash/testsuite/junit5/categories/KubernetesTest.java b/testsuite/integration-tests/src/main/java/org/jboss/intersmash/testsuite/junit5/categories/KubernetesTest.java new file mode 100644 index 000000000..3064fe787 --- /dev/null +++ b/testsuite/integration-tests/src/main/java/org/jboss/intersmash/testsuite/junit5/categories/KubernetesTest.java @@ -0,0 +1,17 @@ +package org.jboss.intersmash.testsuite.junit5.categories; + +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.junit.jupiter.api.Tag; + +/** + * Mark test that runs against Kubernetes. + * Used per class. + */ +@Tag("ts.k8s") +@Retention(RetentionPolicy.RUNTIME) +@Target({ java.lang.annotation.ElementType.TYPE }) +public @interface KubernetesTest { +} diff --git a/testsuite/integration-tests/src/main/java/org/jboss/intersmash/testsuite/junit5/categories/OpenShiftTest.java b/testsuite/integration-tests/src/main/java/org/jboss/intersmash/testsuite/junit5/categories/OpenShiftTest.java new file mode 100644 index 000000000..95afd9a65 --- /dev/null +++ b/testsuite/integration-tests/src/main/java/org/jboss/intersmash/testsuite/junit5/categories/OpenShiftTest.java @@ -0,0 +1,17 @@ +package org.jboss.intersmash.testsuite.junit5.categories; + +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.junit.jupiter.api.Tag; + +/** + * Mark test that runs against OpenShift. + * Used per class. + */ +@Tag("ts.openshift") +@Retention(RetentionPolicy.RUNTIME) +@Target({ java.lang.annotation.ElementType.TYPE }) +public @interface OpenShiftTest { +} diff --git a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/k8s/NamespaceCreationCapable.java b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/k8s/NamespaceCreationCapable.java new file mode 100644 index 000000000..15b180c4c --- /dev/null +++ b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/k8s/NamespaceCreationCapable.java @@ -0,0 +1,8 @@ +package org.jboss.intersmash.testsuite.k8s; + +import org.jboss.intersmash.k8s.junit5.KubernetesNamespaceCreator; +import org.junit.jupiter.api.extension.ExtendWith; + +@ExtendWith(KubernetesNamespaceCreator.class) +public interface NamespaceCreationCapable { +} diff --git a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/k8s/client/binary/KubernetesClientBinaryTest.java b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/k8s/client/binary/KubernetesClientBinaryTest.java new file mode 100644 index 000000000..d4e06b5c3 --- /dev/null +++ b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/k8s/client/binary/KubernetesClientBinaryTest.java @@ -0,0 +1,23 @@ +package org.jboss.intersmash.testsuite.k8s.client.binary; + +import org.jboss.intersmash.k8s.client.Kuberneteses; +import org.jboss.intersmash.k8s.client.binary.KubernetesClientBinary; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +public class KubernetesClientBinaryTest { + + private static final KubernetesClientBinary ADMIN_BINARY = Kuberneteses.adminBinary(); + + private static final KubernetesClientBinary MASTER_BINARY = Kuberneteses.masterBinary(); + + @Test + void getClusterInfoTest() { + String actual = ADMIN_BINARY.execute("cluster-info"); + Assertions.assertTrue(actual.contains("Kubernetes control plane")); + Assertions.assertTrue(actual.contains("is running at")); + actual = MASTER_BINARY.execute("cluster-info"); + Assertions.assertTrue(actual.contains("Kubernetes control plane")); + Assertions.assertTrue(actual.contains("is running at")); + } +} diff --git a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/openshift/ProjectCreationCapable.java b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/openshift/ProjectCreationCapable.java new file mode 100644 index 000000000..a92b49b27 --- /dev/null +++ b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/openshift/ProjectCreationCapable.java @@ -0,0 +1,9 @@ +package org.jboss.intersmash.testsuite.openshift; + +import org.junit.jupiter.api.extension.ExtendWith; + +import cz.xtf.junit5.listeners.ProjectCreator; + +@ExtendWith(ProjectCreator.class) +public interface ProjectCreationCapable { +} diff --git a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/k8s/HyperfoilKubernetesOperatorProvisionerTest.java b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/k8s/HyperfoilKubernetesOperatorProvisionerTest.java new file mode 100644 index 000000000..102dbd8db --- /dev/null +++ b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/k8s/HyperfoilKubernetesOperatorProvisionerTest.java @@ -0,0 +1,207 @@ +/** + * Copyright (C) 2023 Red Hat, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.jboss.intersmash.testsuite.provision.k8s; + +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.Arrays; + +import org.jboss.intersmash.application.operator.HyperfoilOperatorApplication; +import org.jboss.intersmash.junit5.IntersmashExtension; +import org.jboss.intersmash.k8s.KubernetesConfig; +import org.jboss.intersmash.k8s.client.Kuberneteses; +import org.jboss.intersmash.provision.k8s.HyperfoilKubernetesOperatorProvisioner; +import org.jboss.intersmash.provision.olm.OperatorGroup; +import org.jboss.intersmash.provision.openshift.operator.hyperfoil.client.runschema.RunStatisticsWrapper; +import org.jboss.intersmash.provision.openshift.operator.hyperfoil.client.v05.HyperfoilApi; +import org.jboss.intersmash.provision.openshift.operator.hyperfoil.client.v05.invoker.ApiClient; +import org.jboss.intersmash.provision.openshift.operator.hyperfoil.client.v05.invoker.ApiException; +import org.jboss.intersmash.provision.openshift.operator.hyperfoil.client.v05.invoker.Configuration; +import org.jboss.intersmash.provision.openshift.operator.hyperfoil.client.v05.model.Run; +import org.jboss.intersmash.provision.operator.HyperfoilOperatorProvisioner; +import org.jboss.intersmash.testsuite.junit5.categories.KubernetesTest; +import org.jboss.intersmash.testsuite.k8s.NamespaceCreationCapable; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.MethodOrderer; +import org.junit.jupiter.api.Order; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestMethodOrder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import io.hyperfoil.v1alpha2.Hyperfoil; +import io.hyperfoil.v1alpha2.HyperfoilBuilder; +import io.hyperfoil.v1alpha2.HyperfoilSpec; +import io.hyperfoil.v1alpha2.hyperfoilspec.Route; + +@TestMethodOrder(MethodOrderer.OrderAnnotation.class) +@KubernetesTest +public class HyperfoilKubernetesOperatorProvisionerTest implements NamespaceCreationCapable { + private static final Logger logger = LoggerFactory.getLogger(HyperfoilKubernetesOperatorProvisionerTest.class); + private static final String NAME = "hyperfoil"; + private static final HyperfoilKubernetesOperatorProvisioner hyperfoilOperatorProvisioner = initializeOperatorProvisioner(); + + private static HyperfoilKubernetesOperatorProvisioner initializeOperatorProvisioner() { + HyperfoilKubernetesOperatorProvisioner operatorProvisioner = new HyperfoilKubernetesOperatorProvisioner( + new HyperfoilOperatorApplication() { + @Override + public Hyperfoil getHyperfoil() { + Hyperfoil hyperfoil = new HyperfoilBuilder( + getName(), + // see https://github.com/Hyperfoil/hyperfoil-operator/issues/18, "latest" (default) would fail. + "0.24.2").build(); + HyperfoilSpec spec = new HyperfoilSpec(); + Route route = new Route(); + route.setHost(getName()); + route.setType("http"); + spec.setRoute(route); + hyperfoil.setSpec(spec); + return hyperfoil; + + } + + @Override + public String getName() { + return NAME; + } + }); + return operatorProvisioner; + } + + @BeforeAll + public static void createOperatorGroup() throws IOException { + hyperfoilOperatorProvisioner.configure(); + IntersmashExtension.operatorCleanup(true, false); + // create operator group - this should be done by InteropExtension + Kuberneteses.adminBinary().execute("apply", "-f", + new OperatorGroup(KubernetesConfig.namespace()).save().getAbsolutePath()); + // clean any leftovers + hyperfoilOperatorProvisioner.unsubscribe(); + } + + @AfterAll + public static void removeOperatorGroup() { + Kuberneteses.adminBinary().execute("delete", "operatorgroup", "--all"); + // there might be leftovers in case of failures + Kuberneteses.admin().pods().withLabel("role", "agent").delete(); + hyperfoilOperatorProvisioner.dismiss(); + } + + /** + * Test subscription of Hyperfoil operator + */ + @Test + @Order(1) + public void subscribe() { + hyperfoilOperatorProvisioner.subscribe(); + } + + /** + * Test deploy of Hyperfoil + */ + @Test + @Order(2) + public void deploy() { + hyperfoilOperatorProvisioner.deploy(); + Assertions.assertEquals(1, hyperfoilOperatorProvisioner.getPods().size(), + "Unexpected number of cluster operator pods for '" + HyperfoilOperatorProvisioner.OPERATOR_ID + + "' after deploy"); + } + + /** + * Test running a Benchmark on Hyperfoil + */ + @Test + @Order(3) + public void benchmark() throws ApiException, InterruptedException, IOException { + ApiClient defaultClient = Configuration.getDefaultApiClient(); + defaultClient.setBasePath(hyperfoilOperatorProvisioner.getURL().toString()); + defaultClient.setVerifyingSsl(false); + + HyperfoilApi apiInstance = new HyperfoilApi(defaultClient); + String storedFilesBenchmark = "k8s-hello-world"; // String | Name of previously uploaded benchmark where extra files should be loaded from during multi-part upload. Usually this is the same benchmark unless it is being renamed. + File body = new File( + this.getClass().getClassLoader().getResource("k8s-hello-world.hf.yaml").getPath()); + try { + apiInstance.addBenchmark(null, storedFilesBenchmark, body); + } catch (ApiException err) { + logger.error("Hyperfoil Benchmark add failed:", err); + Assertions.fail("Hyperfoil Benchmark add failed: " + err.getMessage()); + } + Run run = null; + try { + run = apiInstance.startBenchmark(storedFilesBenchmark, "Hello World Benchmark", null, null, + Arrays.asList("HOST_URL=http://hyperfoil:8090")); + } catch (ApiException err) { + if (run != null) { + apiInstance.killRun(run.getId()); + } + logger.error("Hyperfoil Benchmark run failed:", err); + Assertions.fail("Hyperfoil Benchmark run failed: " + err.getMessage()); + } + + int wait = 18; + try { + while (!run.getCompleted() && wait > 0) { + Thread.sleep(10000); + run = apiInstance.getRun(run.getId()); + wait--; + } + } catch (ApiException err) { + try { + apiInstance.killRun(run.getId()); + } catch (Exception ignore) { + } + logger.error("Hyperfoil Benchmark wait failed:", err); + Assertions.fail("Hyperfoil Benchmark wait failed: " + err.getMessage()); + } + Assertions.assertTrue(run.getErrors().stream().filter(e -> !e.toString().contains("Jitter watchdog was not invoked") + && !e.toString().matches(".*CPU [0-9]+ was used for.*")).count() == 0); + Assertions.assertTrue(wait > 0); + + // consume run statistics + File allStats = apiInstance.getAllStats(run.getId()); + String JSON = Files.readString(Paths.get(allStats.getAbsolutePath())); + logger.debug("JSON: {}", JSON); + RunStatisticsWrapper runStatisticsWrapper = new RunStatisticsWrapper(JSON); + Assertions.assertTrue(runStatisticsWrapper.getPhaseStats().size() > 0); + } + + /** + * Test undeploy of Hyperfoil + */ + @Test + @Order(4) + public void undeploy() { + hyperfoilOperatorProvisioner.undeploy(false); + } + + /** + * Test unsubscribe of Hyperfoil operator + */ + @Test + @Order(5) + public void unsubscribe() { + hyperfoilOperatorProvisioner.unsubscribe(); + Assertions.assertEquals(0, hyperfoilOperatorProvisioner.getPods().size(), + "Unexpected number of cluster operator pods for '" + HyperfoilOperatorProvisioner.OPERATOR_ID + + "' after deploy"); + } +} diff --git a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/ActiveMQOperatorProvisionerTest.java b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/ActiveMQOpenShiftOperatorProvisionerTest.java similarity index 87% rename from testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/ActiveMQOperatorProvisionerTest.java rename to testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/ActiveMQOpenShiftOperatorProvisionerTest.java index 3e1134fc2..8a698b915 100644 --- a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/ActiveMQOperatorProvisionerTest.java +++ b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/ActiveMQOpenShiftOperatorProvisionerTest.java @@ -19,19 +19,22 @@ import java.util.Collections; import java.util.List; -import org.jboss.intersmash.application.openshift.ActiveMQOperatorApplication; +import org.jboss.intersmash.application.operator.ActiveMQOperatorApplication; import org.jboss.intersmash.junit5.IntersmashExtension; -import org.jboss.intersmash.provision.openshift.ActiveMQOperatorProvisioner; +import org.jboss.intersmash.provision.olm.OperatorGroup; +import org.jboss.intersmash.provision.openshift.ActiveMQOpenShiftOperatorProvisioner; import org.jboss.intersmash.provision.openshift.operator.activemq.address.ActiveMQArtemisAddressBuilder; import org.jboss.intersmash.provision.openshift.operator.activemq.broker.ActiveMQArtemisBuilder; import org.jboss.intersmash.provision.openshift.operator.activemq.broker.spec.DeploymentPlanBuilder; -import org.jboss.intersmash.provision.openshift.operator.resources.OperatorGroup; +import org.jboss.intersmash.testsuite.junit5.categories.OpenShiftTest; +import org.jboss.intersmash.testsuite.openshift.ProjectCreationCapable; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; +import cz.xtf.core.config.OpenShiftConfig; import cz.xtf.core.openshift.OpenShifts; import cz.xtf.core.waiting.SimpleWaiter; import cz.xtf.junit5.annotations.CleanBeforeAll; @@ -42,11 +45,12 @@ @Slf4j @CleanBeforeAll -public class ActiveMQOperatorProvisionerTest { - private static final ActiveMQOperatorProvisioner activeMQOperatorProvisioner = initializeOperatorProvisioner(); +@OpenShiftTest +public class ActiveMQOpenShiftOperatorProvisionerTest implements ProjectCreationCapable { + private static final ActiveMQOpenShiftOperatorProvisioner activeMQOperatorProvisioner = initializeOperatorProvisioner(); - private static ActiveMQOperatorProvisioner initializeOperatorProvisioner() { - ActiveMQOperatorProvisioner operatorProvisioner = new ActiveMQOperatorProvisioner( + private static ActiveMQOpenShiftOperatorProvisioner initializeOperatorProvisioner() { + ActiveMQOpenShiftOperatorProvisioner operatorProvisioner = new ActiveMQOpenShiftOperatorProvisioner( new ActiveMQOperatorApplication() { private static final String DEFAULT_ACTIVEMQ_APP_NAME = "example-amq-broker"; @@ -76,9 +80,10 @@ public String getName() { @BeforeAll public static void createOperatorGroup() throws IOException { activeMQOperatorProvisioner.configure(); - IntersmashExtension.operatorCleanup(); + IntersmashExtension.operatorCleanup(false, true); // create operator group - this should be done by InteropExtension - OpenShifts.adminBinary().execute("apply", "-f", OperatorGroup.SINGLE_NAMESPACE.save().getAbsolutePath()); + OpenShifts.adminBinary().execute("apply", "-f", + new OperatorGroup(OpenShiftConfig.namespace()).save().getAbsolutePath()); // clean any leftovers activeMQOperatorProvisioner.unsubscribe(); // let's configure the provisioner diff --git a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/Eap7BootableJarTestCase.java b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/Eap7BootableJarTestCase.java index c940e3e6f..a2562432a 100644 --- a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/Eap7BootableJarTestCase.java +++ b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/Eap7BootableJarTestCase.java @@ -20,7 +20,9 @@ import org.jboss.intersmash.provision.openshift.BootableJarImageOpenShiftProvisioner; import org.jboss.intersmash.provision.openshift.WildflyBootableJarImageOpenShiftProvisioner; import org.jboss.intersmash.testsuite.junit5.categories.NotForCommunityExecutionProfile; +import org.jboss.intersmash.testsuite.junit5.categories.OpenShiftTest; import org.jboss.intersmash.testsuite.junit5.categories.wildfly.RequiresBootableJarDistribution; +import org.jboss.intersmash.testsuite.openshift.ProjectCreationCapable; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; @@ -33,7 +35,8 @@ @CleanBeforeAll @NotForCommunityExecutionProfile @RequiresBootableJarDistribution -public class Eap7BootableJarTestCase { +@OpenShiftTest +public class Eap7BootableJarTestCase implements ProjectCreationCapable { private static final OpenShift openShift = OpenShifts.master(); private static final BootableJarOpenShiftApplication application = OpenShiftProvisionerTestBase .getEap7BootableJarOpenShiftApplication(); diff --git a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/Eap7ImageProvisionerTestCase.java b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/Eap7ImageProvisionerTestCase.java index b84c5cf5d..0908dfe8b 100644 --- a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/Eap7ImageProvisionerTestCase.java +++ b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/Eap7ImageProvisionerTestCase.java @@ -23,6 +23,8 @@ import org.jboss.intersmash.application.openshift.Eap7ImageOpenShiftApplication; import org.jboss.intersmash.provision.openshift.Eap7ImageOpenShiftProvisioner; import org.jboss.intersmash.testsuite.junit5.categories.NotForCommunityExecutionProfile; +import org.jboss.intersmash.testsuite.junit5.categories.OpenShiftTest; +import org.jboss.intersmash.testsuite.openshift.ProjectCreationCapable; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; @@ -37,7 +39,8 @@ @CleanBeforeAll @NotForCommunityExecutionProfile -public class Eap7ImageProvisionerTestCase { +@OpenShiftTest +public class Eap7ImageProvisionerTestCase implements ProjectCreationCapable { private static final OpenShift openShift = OpenShifts.master(); private static final Eap7ImageOpenShiftApplication application = OpenShiftProvisionerTestBase .getEap7OpenShiftImageApplication(); diff --git a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/Eap7TemplateProvisionerTestCase.java b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/Eap7TemplateProvisionerTestCase.java index b52b68f67..9a2b749f9 100644 --- a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/Eap7TemplateProvisionerTestCase.java +++ b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/Eap7TemplateProvisionerTestCase.java @@ -22,6 +22,8 @@ import org.jboss.intersmash.application.openshift.Eap7TemplateOpenShiftApplication; import org.jboss.intersmash.provision.openshift.Eap7TemplateOpenShiftProvisioner; import org.jboss.intersmash.testsuite.junit5.categories.NotForCommunityExecutionProfile; +import org.jboss.intersmash.testsuite.junit5.categories.OpenShiftTest; +import org.jboss.intersmash.testsuite.openshift.ProjectCreationCapable; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; @@ -36,7 +38,8 @@ @CleanBeforeAll @NotForCommunityExecutionProfile -public class Eap7TemplateProvisionerTestCase { +@OpenShiftTest +public class Eap7TemplateProvisionerTestCase implements ProjectCreationCapable { private static final OpenShift openShift = OpenShifts.master(); private static final Eap7TemplateOpenShiftApplication application = OpenShiftProvisionerTestBase .getEap7OpenShiftTemplateApplication(); diff --git a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/HyperfoilOperatorProvisionerTest.java b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/HyperfoilOpenShiftOperatorProvisionerTest.java similarity index 87% rename from testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/HyperfoilOperatorProvisionerTest.java rename to testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/HyperfoilOpenShiftOperatorProvisionerTest.java index c8c20c62d..1a6662612 100644 --- a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/HyperfoilOperatorProvisionerTest.java +++ b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/HyperfoilOpenShiftOperatorProvisionerTest.java @@ -21,17 +21,20 @@ import java.nio.file.Paths; import java.util.Arrays; -import org.jboss.intersmash.application.openshift.HyperfoilOperatorApplication; +import org.jboss.intersmash.application.operator.HyperfoilOperatorApplication; import org.jboss.intersmash.junit5.IntersmashExtension; -import org.jboss.intersmash.provision.openshift.HyperfoilOperatorProvisioner; +import org.jboss.intersmash.provision.olm.OperatorGroup; +import org.jboss.intersmash.provision.openshift.HyperfoilOpenShiftOperatorProvisioner; import org.jboss.intersmash.provision.openshift.operator.hyperfoil.client.runschema.RunStatisticsWrapper; import org.jboss.intersmash.provision.openshift.operator.hyperfoil.client.v05.HyperfoilApi; import org.jboss.intersmash.provision.openshift.operator.hyperfoil.client.v05.invoker.ApiClient; import org.jboss.intersmash.provision.openshift.operator.hyperfoil.client.v05.invoker.ApiException; import org.jboss.intersmash.provision.openshift.operator.hyperfoil.client.v05.invoker.Configuration; import org.jboss.intersmash.provision.openshift.operator.hyperfoil.client.v05.model.Run; -import org.jboss.intersmash.provision.openshift.operator.resources.OperatorGroup; +import org.jboss.intersmash.provision.operator.HyperfoilOperatorProvisioner; import org.jboss.intersmash.testsuite.junit5.categories.NotForProductizedExecutionProfile; +import org.jboss.intersmash.testsuite.junit5.categories.OpenShiftTest; +import org.jboss.intersmash.testsuite.openshift.ProjectCreationCapable; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; @@ -42,6 +45,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import cz.xtf.core.config.OpenShiftConfig; import cz.xtf.core.openshift.OpenShifts; import cz.xtf.junit5.annotations.CleanBeforeAll; import io.hyperfoil.v1alpha2.Hyperfoil; @@ -50,13 +54,14 @@ @CleanBeforeAll @NotForProductizedExecutionProfile @TestMethodOrder(MethodOrderer.OrderAnnotation.class) -public class HyperfoilOperatorProvisionerTest { - private static final Logger logger = LoggerFactory.getLogger(HyperfoilOperatorProvisionerTest.class); +@OpenShiftTest +public class HyperfoilOpenShiftOperatorProvisionerTest implements ProjectCreationCapable { + private static final Logger logger = LoggerFactory.getLogger(HyperfoilOpenShiftOperatorProvisionerTest.class); private static final String NAME = "hyperfoil"; private static final HyperfoilOperatorProvisioner hyperfoilOperatorProvisioner = initializeOperatorProvisioner(); private static HyperfoilOperatorProvisioner initializeOperatorProvisioner() { - HyperfoilOperatorProvisioner operatorProvisioner = new HyperfoilOperatorProvisioner( + HyperfoilOperatorProvisioner operatorProvisioner = new HyperfoilOpenShiftOperatorProvisioner( new HyperfoilOperatorApplication() { @Override public Hyperfoil getHyperfoil() { @@ -77,9 +82,10 @@ public String getName() { @BeforeAll public static void createOperatorGroup() throws IOException { hyperfoilOperatorProvisioner.configure(); - IntersmashExtension.operatorCleanup(); + IntersmashExtension.operatorCleanup(false, true); // create operator group - this should be done by InteropExtension - OpenShifts.adminBinary().execute("apply", "-f", OperatorGroup.SINGLE_NAMESPACE.save().getAbsolutePath()); + OpenShifts.adminBinary().execute("apply", "-f", + new OperatorGroup(OpenShiftConfig.namespace()).save().getAbsolutePath()); // clean any leftovers hyperfoilOperatorProvisioner.unsubscribe(); } @@ -109,7 +115,7 @@ public void subscribe() { public void deploy() { hyperfoilOperatorProvisioner.deploy(); Assertions.assertEquals(1, hyperfoilOperatorProvisioner.getPods().size(), - "Unexpected number of cluster operator pods for '" + HyperfoilOperatorProvisioner.getOperatorId() + "Unexpected number of cluster operator pods for '" + HyperfoilOperatorProvisioner.OPERATOR_ID + "' after deploy"); } @@ -189,7 +195,7 @@ public void undeploy() { public void unsubscribe() { hyperfoilOperatorProvisioner.unsubscribe(); Assertions.assertEquals(0, hyperfoilOperatorProvisioner.getPods().size(), - "Unexpected number of cluster operator pods for '" + HyperfoilOperatorProvisioner.getOperatorId() + "Unexpected number of cluster operator pods for '" + HyperfoilOperatorProvisioner.OPERATOR_ID + "' after deploy"); } } diff --git a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/InfinispanOperatorProvisionerTest.java b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/InfinispanOpenShiftOperatorProvisionerTest.java similarity index 88% rename from testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/InfinispanOperatorProvisionerTest.java rename to testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/InfinispanOpenShiftOperatorProvisionerTest.java index 4b9776089..6e426ddba 100644 --- a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/InfinispanOperatorProvisionerTest.java +++ b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/InfinispanOpenShiftOperatorProvisionerTest.java @@ -25,13 +25,16 @@ import org.infinispan.v1.Infinispan; import org.infinispan.v1.infinispanspec.Service; import org.infinispan.v2alpha1.Cache; -import org.jboss.intersmash.application.openshift.InfinispanOperatorApplication; +import org.jboss.intersmash.application.operator.InfinispanOperatorApplication; import org.jboss.intersmash.junit5.IntersmashExtension; -import org.jboss.intersmash.provision.openshift.InfinispanOperatorProvisioner; +import org.jboss.intersmash.provision.olm.OperatorGroup; +import org.jboss.intersmash.provision.openshift.InfinispanOpenShiftOperatorProvisioner; import org.jboss.intersmash.provision.openshift.operator.infinispan.cache.CacheBuilder; import org.jboss.intersmash.provision.openshift.operator.infinispan.infinispan.InfinispanBuilder; import org.jboss.intersmash.provision.openshift.operator.infinispan.infinispan.spec.InfinispanServiceSpecBuilder; -import org.jboss.intersmash.provision.openshift.operator.resources.OperatorGroup; +import org.jboss.intersmash.provision.operator.InfinispanOperatorProvisioner; +import org.jboss.intersmash.testsuite.junit5.categories.OpenShiftTest; +import org.jboss.intersmash.testsuite.openshift.ProjectCreationCapable; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Assertions; @@ -41,6 +44,7 @@ import cz.xtf.builder.builders.SecretBuilder; import cz.xtf.builder.builders.secret.SecretType; +import cz.xtf.core.config.OpenShiftConfig; import cz.xtf.core.openshift.OpenShifts; import cz.xtf.core.waiting.SimpleWaiter; import cz.xtf.junit5.annotations.CleanBeforeAll; @@ -55,17 +59,18 @@ */ @Slf4j @CleanBeforeAll -public class InfinispanOperatorProvisionerTest { +@OpenShiftTest +public class InfinispanOpenShiftOperatorProvisionerTest implements ProjectCreationCapable { static final String TEST_SECRET_USERNAME = "developer"; static final String TEST_SECRET_PASSWORD = "developer"; static final String TEST_SECRET_NAME = "test-secret"; static final Secret TEST_SECRET = new SecretBuilder(TEST_SECRET_NAME) .setType(SecretType.OPAQUE).addData(TEST_SECRET_USERNAME, TEST_SECRET_PASSWORD.getBytes()).build(); // Be aware that since we're using the static mock application, not all provisioner methods will work as expected! - private static final InfinispanOperatorProvisioner INFINISPAN_OPERATOR_PROVISIONER = initializeOperatorProvisioner(); + private static final InfinispanOpenShiftOperatorProvisioner INFINISPAN_OPERATOR_PROVISIONER = initializeOperatorProvisioner(); - private static InfinispanOperatorProvisioner initializeOperatorProvisioner() { - InfinispanOperatorProvisioner operatorProvisioner = new InfinispanOperatorProvisioner( + private static InfinispanOpenShiftOperatorProvisioner initializeOperatorProvisioner() { + InfinispanOpenShiftOperatorProvisioner operatorProvisioner = new InfinispanOpenShiftOperatorProvisioner( new InfinispanOperatorApplication() { private static final String DEFAULT_INFINISPAN_APP_NAME = "example-infinispan"; @@ -98,9 +103,10 @@ public static void createOperatorGroup() throws IOException { // let's configure the provisioner INFINISPAN_OPERATOR_PROVISIONER.configure(); matchLabels.put("app", "datagrid"); - IntersmashExtension.operatorCleanup(); + IntersmashExtension.operatorCleanup(false, true); // create operator group - this should be done by InteropExtension - OpenShifts.adminBinary().execute("apply", "-f", OperatorGroup.SINGLE_NAMESPACE.save().getAbsolutePath()); + OpenShifts.adminBinary().execute("apply", "-f", + new OperatorGroup(OpenShiftConfig.namespace()).save().getAbsolutePath()); // clean any leftovers INFINISPAN_OPERATOR_PROVISIONER.unsubscribe(); } @@ -214,13 +220,13 @@ public void basicProvisioningTest() { INFINISPAN_OPERATOR_PROVISIONER.deploy(); try { Assertions.assertEquals(1, INFINISPAN_OPERATOR_PROVISIONER.getPods().size(), - "Unexpected number of cluster operator pods for '" + INFINISPAN_OPERATOR_PROVISIONER.getOperatorId() + "Unexpected number of cluster operator pods for '" + InfinispanOperatorProvisioner.OPERATOR_ID + "' after deploy"); int scaledNum = INFINISPAN_OPERATOR_PROVISIONER.getApplication().getInfinispan().getSpec().getReplicas() + 1; INFINISPAN_OPERATOR_PROVISIONER.scale(scaledNum, true); Assertions.assertEquals(scaledNum, INFINISPAN_OPERATOR_PROVISIONER.getPods().size(), - "Unexpected number of cluster operator pods for '" + INFINISPAN_OPERATOR_PROVISIONER.getOperatorId() + "Unexpected number of cluster operator pods for '" + InfinispanOperatorProvisioner.OPERATOR_ID + "' after scaling"); } finally { INFINISPAN_OPERATOR_PROVISIONER.undeploy(); @@ -229,7 +235,7 @@ public void basicProvisioningTest() { INFINISPAN_OPERATOR_PROVISIONER.getPods() .stream().filter(pod -> Objects.isNull(pod.getMetadata().getDeletionTimestamp())) .count(), - "Unexpected number of cluster operator pods for '" + INFINISPAN_OPERATOR_PROVISIONER.getOperatorId() + "Unexpected number of cluster operator pods for '" + InfinispanOperatorProvisioner.OPERATOR_ID + "' after undeploy"); } @@ -269,7 +275,7 @@ private void deleteAndVerifyMinimalInfinispan(String name, boolean waitForPods) if (waitForPods) { // checking for size of Infinispan CR pod set is 0 new SimpleWaiter( - () -> InfinispanOperatorProvisioner.getInfinispanPods(name).size() == 0) + () -> INFINISPAN_OPERATOR_PROVISIONER.getInfinispanPods().isEmpty()) .level(Level.DEBUG).waitFor(); } } @@ -296,7 +302,7 @@ private void verifyMinimalInfinispan(final Infinispan infinispan, final boolean if (waitForPods) { // a correct number of Infinispan CRs has been created new SimpleWaiter( - () -> InfinispanOperatorProvisioner.getInfinispanPods(name).size() == infinispan.getSpec().getReplicas()) + () -> INFINISPAN_OPERATOR_PROVISIONER.getInfinispanPods().size() == infinispan.getSpec().getReplicas()) .level(Level.DEBUG).waitFor(); log.debug(INFINISPAN_OPERATOR_PROVISIONER.infinispansClient().withName(name).get().getStatus().toString()); } @@ -316,7 +322,7 @@ private void verifyMinimalTwoReplicasInfinispan(Infinispan infinispan, boolean w if (waitForPods) { // a correct number of Infinispan CRs has been created new SimpleWaiter( - () -> InfinispanOperatorProvisioner.getInfinispanPods(name).size() == infinispan.getSpec().getReplicas()) + () -> INFINISPAN_OPERATOR_PROVISIONER.getInfinispanPods().size() == infinispan.getSpec().getReplicas()) .level(Level.DEBUG).waitFor(); log.debug(INFINISPAN_OPERATOR_PROVISIONER.infinispansClient().withName(name).get().getStatus().toString()); } diff --git a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/KafkaOperatorProvisionerTest.java b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/KafkaOpenShiftOperatorProvisionerTest.java similarity index 83% rename from testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/KafkaOperatorProvisionerTest.java rename to testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/KafkaOpenShiftOperatorProvisionerTest.java index e154c7c71..1a7dfd64f 100644 --- a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/KafkaOperatorProvisionerTest.java +++ b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/KafkaOpenShiftOperatorProvisionerTest.java @@ -17,41 +17,47 @@ import java.io.IOException; -import org.jboss.intersmash.application.openshift.KafkaOperatorApplication; +import org.jboss.intersmash.application.operator.KafkaOperatorApplication; import org.jboss.intersmash.junit5.IntersmashExtension; -import org.jboss.intersmash.provision.openshift.KafkaOperatorProvisioner; -import org.jboss.intersmash.provision.openshift.operator.resources.OperatorGroup; +import org.jboss.intersmash.provision.olm.OperatorGroup; +import org.jboss.intersmash.provision.openshift.KafkaOpenShiftOperatorProvisioner; +import org.jboss.intersmash.provision.operator.KafkaOperatorProvisioner; +import org.jboss.intersmash.testsuite.junit5.categories.OpenShiftTest; +import org.jboss.intersmash.testsuite.openshift.ProjectCreationCapable; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; +import cz.xtf.core.config.OpenShiftConfig; import cz.xtf.core.openshift.OpenShifts; import cz.xtf.junit5.annotations.CleanBeforeAll; import io.fabric8.kubernetes.api.model.DeletionPropagation; import lombok.extern.slf4j.Slf4j; /** - * Simple class that tests basic features of {@link KafkaOperatorProvisioner}. + * Simple class that tests basic features of {@link KafkaOpenShiftOperatorProvisioner}. */ @Slf4j @CleanBeforeAll -public class KafkaOperatorProvisionerTest { +@OpenShiftTest +public class KafkaOpenShiftOperatorProvisionerTest implements ProjectCreationCapable { private static KafkaOperatorApplication application = OpenShiftProvisionerTestBase.getKafkaApplication(); - private static final KafkaOperatorProvisioner operatorProvisioner = initializeOperatorProvisioner(); + private static final KafkaOpenShiftOperatorProvisioner operatorProvisioner = initializeOperatorProvisioner(); - private static KafkaOperatorProvisioner initializeOperatorProvisioner() { - KafkaOperatorProvisioner operatorProvisioner = new KafkaOperatorProvisioner(application); + private static KafkaOpenShiftOperatorProvisioner initializeOperatorProvisioner() { + KafkaOpenShiftOperatorProvisioner operatorProvisioner = new KafkaOpenShiftOperatorProvisioner(application); return operatorProvisioner; } @BeforeAll public static void createOperatorGroup() throws IOException { operatorProvisioner.configure(); - IntersmashExtension.operatorCleanup(); + IntersmashExtension.operatorCleanup(false, true); // create operator group - this should be done by InteropExtension - OpenShifts.adminBinary().execute("apply", "-f", OperatorGroup.SINGLE_NAMESPACE.save().getAbsolutePath()); + OpenShifts.adminBinary().execute("apply", "-f", + new OperatorGroup(OpenShiftConfig.namespace()).save().getAbsolutePath()); // clean any leftovers operatorProvisioner.unsubscribe(); // Let's skip subscribe operation here since we use regular deploy/undeploy where subscribe is called anyway. @@ -100,7 +106,7 @@ public void basicProvisioningTest() { private void verifyDeployed() { Assertions.assertEquals(1, operatorProvisioner.getClusterOperatorPods().size(), - "Unexpected number of cluster operator pods for '" + operatorProvisioner.getOperatorId() + "'"); + "Unexpected number of cluster operator pods for '" + KafkaOperatorProvisioner.OPERATOR_ID + "'"); int kafkaReplicas = operatorProvisioner.getApplication().getKafka().getSpec().getKafka().getReplicas(); int zookeeperReplicas = operatorProvisioner.getApplication().getKafka().getSpec().getZookeeper().getReplicas(); @@ -133,7 +139,7 @@ private void verifyDeployed() { private void verifyScaledDeployed(int scaledNum) { Assertions.assertEquals(1, operatorProvisioner.getClusterOperatorPods().size(), - "Unexpected number of cluster operator pods for '" + operatorProvisioner.getOperatorId() + "'"); + "Unexpected number of cluster operator pods for '" + KafkaOperatorProvisioner.OPERATOR_ID + "'"); int zookeeperReplicas = operatorProvisioner.getApplication().getKafka().getSpec().getZookeeper().getReplicas(); diff --git a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/KeycloakOperatorProvisionerTest.java b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/KeycloakOperatorProvisionerTest.java index 92d505ec2..cabdcc640 100644 --- a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/KeycloakOperatorProvisionerTest.java +++ b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/KeycloakOperatorProvisionerTest.java @@ -22,12 +22,14 @@ import java.util.Map; import java.util.Objects; -import org.jboss.intersmash.application.openshift.KeycloakOperatorApplication; import org.jboss.intersmash.application.openshift.PostgreSQLImageOpenShiftApplication; +import org.jboss.intersmash.application.operator.KeycloakOperatorApplication; import org.jboss.intersmash.junit5.IntersmashExtension; -import org.jboss.intersmash.provision.openshift.KeycloakOperatorProvisioner; +import org.jboss.intersmash.provision.olm.OperatorGroup; +import org.jboss.intersmash.provision.openshift.KeycloakOpenShiftOperatorProvisioner; import org.jboss.intersmash.provision.openshift.PostgreSQLImageOpenShiftProvisioner; -import org.jboss.intersmash.provision.openshift.operator.resources.OperatorGroup; +import org.jboss.intersmash.testsuite.junit5.categories.OpenShiftTest; +import org.jboss.intersmash.testsuite.openshift.ProjectCreationCapable; import org.jboss.intersmash.util.tls.CertificatesUtils; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; @@ -51,6 +53,7 @@ import org.keycloak.k8s.v2alpha1.keycloakspec.db.UsernameSecret; import org.slf4j.event.Level; +import cz.xtf.core.config.OpenShiftConfig; import cz.xtf.core.openshift.OpenShiftWaiters; import cz.xtf.core.openshift.OpenShifts; import cz.xtf.core.waiting.SimpleWaiter; @@ -71,13 +74,12 @@ */ @Slf4j @CleanBeforeAll -public class KeycloakOperatorProvisionerTest { - private static KeycloakOperatorProvisioner KEYCLOAK_OPERATOR_PROVISIONER; +@OpenShiftTest +public class KeycloakOperatorProvisionerTest implements ProjectCreationCapable { + private static KeycloakOpenShiftOperatorProvisioner KEYCLOAK_OPERATOR_PROVISIONER; private static final String POSTGRESQL_NAME = "postgresql"; private static final String POSTGRESQL_DATABASE = "keycloak"; - private static final String POSTGRESQL_PASSWORD = "pippobaudo1234"; - private static final String POSTGRESQL_USER = "user09M"; private static final PostgreSQLImageOpenShiftApplication pgSQLApplication = new PostgreSQLImageOpenShiftApplication() { @Override @@ -103,9 +105,9 @@ public String getDbName() { private static final PostgreSQLImageOpenShiftProvisioner POSTGRESQL_IMAGE_PROVISIONER = new PostgreSQLImageOpenShiftProvisioner( pgSQLApplication); - private static KeycloakOperatorProvisioner initializeOperatorProvisioner(final Keycloak keycloak, + private static KeycloakOpenShiftOperatorProvisioner initializeOperatorProvisioner(final Keycloak keycloak, final String appName) { - KeycloakOperatorProvisioner operatorProvisioner = new KeycloakOperatorProvisioner( + KeycloakOpenShiftOperatorProvisioner operatorProvisioner = new KeycloakOpenShiftOperatorProvisioner( new KeycloakOperatorApplication() { @Override @@ -129,9 +131,10 @@ public String getName() { @BeforeAll public static void createOperatorGroup() throws IOException { matchLabels.put("app", "sso"); - IntersmashExtension.operatorCleanup(); + IntersmashExtension.operatorCleanup(false, true); // create operator group - this should be done by InteropExtension - OpenShifts.adminBinary().execute("apply", "-f", OperatorGroup.SINGLE_NAMESPACE.save().getAbsolutePath()); + OpenShifts.adminBinary().execute("apply", "-f", + new OperatorGroup(OpenShiftConfig.namespace()).save().getAbsolutePath()); } @AfterAll @@ -167,7 +170,7 @@ public void customResourcesCleanup() { *
- https://github.com/keycloak/keycloak-operator/tree/master/deploy/examples/keycloak */ @Test - public void exampleSso() { + public void exampleKeycloakTest() { name = "example-sso"; final Keycloak keycloak = new Keycloak(); @@ -183,7 +186,8 @@ public void exampleSso() { // create key, certificate and tls secret: Keycloak expects the secret to be created beforehand String tlsSecretName = name + "-tls-secret"; CertificatesUtils.CertificateAndKey certificateAndKey = CertificatesUtils - .generateSelfSignedCertificateAndKey(hostname.getHostname().replaceFirst("[.].*$", ""), tlsSecretName); + .generateSelfSignedCertificateAndKey(hostname.getHostname().replaceFirst("[.].*$", ""), tlsSecretName, + OpenShifts.master().getClient(), OpenShifts.master().getNamespace()); // add TLS config to keycloak using the secret we just created Http http = new Http(); http.setTlsSecret(certificateAndKey.tlsSecret.getMetadata().getName()); @@ -216,7 +220,7 @@ public void exampleSso() { *
- https://github.com/keycloak/keycloak-operator/tree/master/deploy/examples/keycloak */ @Test - public void exampleSsoWithDatabase() { + public void exampleKeycloakWithDatabaseTest() { POSTGRESQL_IMAGE_PROVISIONER.configure(); try { POSTGRESQL_IMAGE_PROVISIONER.preDeploy(); @@ -239,7 +243,7 @@ public void exampleSsoWithDatabase() { String tlsSecretName = name + "-tls-secret"; CertificatesUtils.CertificateAndKey certificateAndKey = CertificatesUtils .generateSelfSignedCertificateAndKey(hostname.getHostname().replaceFirst("[.].*$", ""), - tlsSecretName); + tlsSecretName, OpenShifts.master().getClient(), OpenShifts.master().getNamespace()); // add TLS config to keycloak using the secret we just created Http http = new Http(); http.setTlsSecret(certificateAndKey.tlsSecret.getMetadata().getName()); diff --git a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/MysqlImageTestCase.java b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/MysqlImageTestCase.java index 46415d9b6..12f9e8ef4 100644 --- a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/MysqlImageTestCase.java +++ b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/MysqlImageTestCase.java @@ -19,6 +19,8 @@ import org.jboss.intersmash.application.openshift.MysqlImageOpenShiftApplication; import org.jboss.intersmash.provision.openshift.MysqlImageOpenShiftProvisioner; import org.jboss.intersmash.testsuite.junit5.categories.NotForProductizedExecutionProfile; +import org.jboss.intersmash.testsuite.junit5.categories.OpenShiftTest; +import org.jboss.intersmash.testsuite.openshift.ProjectCreationCapable; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; @@ -31,7 +33,8 @@ @CleanBeforeAll @Slf4j @NotForProductizedExecutionProfile -public class MysqlImageTestCase { +@OpenShiftTest +public class MysqlImageTestCase implements ProjectCreationCapable { private static final OpenShift openShift = OpenShifts.master(); private static final MysqlImageOpenShiftApplication application = OpenShiftProvisionerTestBase .getMysqlOpenShiftApplication(); diff --git a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/OpenShiftProvisionerTestBase.java b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/OpenShiftProvisionerTestBase.java index d8f778866..0fa3620f4 100644 --- a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/OpenShiftProvisionerTestBase.java +++ b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/OpenShiftProvisionerTestBase.java @@ -31,29 +31,30 @@ import org.infinispan.v1.Infinispan; import org.infinispan.v2alpha1.Cache; import org.jboss.intersmash.IntersmashConfig; +import org.jboss.intersmash.application.input.BinarySource; +import org.jboss.intersmash.application.input.BuildInput; +import org.jboss.intersmash.application.input.BuildInputBuilder; import org.jboss.intersmash.application.openshift.BootableJarOpenShiftApplication; import org.jboss.intersmash.application.openshift.Eap7ImageOpenShiftApplication; import org.jboss.intersmash.application.openshift.Eap7LegacyS2iBuildTemplateApplication; import org.jboss.intersmash.application.openshift.Eap7TemplateOpenShiftApplication; -import org.jboss.intersmash.application.openshift.InfinispanOperatorApplication; -import org.jboss.intersmash.application.openshift.KafkaOperatorApplication; -import org.jboss.intersmash.application.openshift.KeycloakOperatorApplication; import org.jboss.intersmash.application.openshift.MysqlImageOpenShiftApplication; import org.jboss.intersmash.application.openshift.PostgreSQLImageOpenShiftApplication; import org.jboss.intersmash.application.openshift.PostgreSQLTemplateOpenShiftApplication; import org.jboss.intersmash.application.openshift.RhSsoTemplateOpenShiftApplication; import org.jboss.intersmash.application.openshift.WildflyImageOpenShiftApplication; -import org.jboss.intersmash.application.openshift.input.BinarySource; -import org.jboss.intersmash.application.openshift.input.BuildInput; -import org.jboss.intersmash.application.openshift.input.BuildInputBuilder; import org.jboss.intersmash.application.openshift.template.Eap7Template; import org.jboss.intersmash.application.openshift.template.PostgreSQLTemplate; import org.jboss.intersmash.application.openshift.template.RhSsoTemplate; +import org.jboss.intersmash.application.operator.InfinispanOperatorApplication; +import org.jboss.intersmash.application.operator.KafkaOperatorApplication; +import org.jboss.intersmash.application.operator.KeycloakOperatorApplication; import org.jboss.intersmash.provision.openshift.operator.infinispan.infinispan.InfinispanBuilder; import org.jboss.intersmash.test.deployments.DeploymentsProvider; import org.jboss.intersmash.test.deployments.TestDeploymentProperties; import org.jboss.intersmash.test.deployments.WildflyDeploymentApplicationConfiguration; import org.jboss.intersmash.testsuite.IntersmashTestsuiteProperties; +import org.jboss.intersmash.testsuite.junit5.categories.OpenShiftTest; import org.jboss.intersmash.util.CommandLineBasedKeystoreGenerator; import org.jboss.intersmash.util.openshift.WildflyOpenShiftUtils; import org.jboss.intersmash.util.tls.CertificatesUtils; @@ -84,6 +85,7 @@ import lombok.extern.slf4j.Slf4j; @Slf4j +@OpenShiftTest public class OpenShiftProvisionerTestBase { static final EnvVar TEST_ENV_VAR = new EnvVarBuilder().withName("test-evn-key").withValue("test-evn-value").build(); static final String TEST_SECRET_FOO = "foo"; @@ -735,7 +737,8 @@ public org.keycloak.k8s.v2alpha1.Keycloak getKeycloak() { final String hostName = OpenShifts.master().generateHostname(DEFAULT_KEYCLOAK_APP_NAME); final String tlsSecretName = DEFAULT_KEYCLOAK_APP_NAME + "-tls-secret"; CertificatesUtils.CertificateAndKey certificateAndKey = CertificatesUtils - .generateSelfSignedCertificateAndKey(hostName.replaceFirst("[.].*$", ""), tlsSecretName); + .generateSelfSignedCertificateAndKey(hostName.replaceFirst("[.].*$", ""), tlsSecretName, + OpenShifts.master().getClient(), OpenShifts.master().getNamespace()); // build the basic Keycloak resource return new org.keycloak.k8s.v2alpha1.KeycloakBuilder() .withNewMetadata() diff --git a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/PostgreSQLImageTestCase.java b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/PostgreSQLImageTestCase.java index a66663ee7..3894f586c 100644 --- a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/PostgreSQLImageTestCase.java +++ b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/PostgreSQLImageTestCase.java @@ -19,6 +19,8 @@ import org.jboss.intersmash.application.openshift.PostgreSQLImageOpenShiftApplication; import org.jboss.intersmash.provision.openshift.PostgreSQLImageOpenShiftProvisioner; import org.jboss.intersmash.testsuite.junit5.categories.NotForProductizedExecutionProfile; +import org.jboss.intersmash.testsuite.junit5.categories.OpenShiftTest; +import org.jboss.intersmash.testsuite.openshift.ProjectCreationCapable; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; @@ -31,7 +33,8 @@ @CleanBeforeAll @Slf4j @NotForProductizedExecutionProfile -public class PostgreSQLImageTestCase { +@OpenShiftTest +public class PostgreSQLImageTestCase implements ProjectCreationCapable { private static final OpenShift openShift = OpenShifts.master(); private static final PostgreSQLImageOpenShiftApplication application = OpenShiftProvisionerTestBase .getPostgreSQLImageOpenShiftApplication(); diff --git a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/ProvisionerCleanupTestCase.java b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/ProvisionerCleanupTestCase.java index a9808c000..3ca1f853d 100644 --- a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/ProvisionerCleanupTestCase.java +++ b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/ProvisionerCleanupTestCase.java @@ -18,26 +18,19 @@ import java.io.IOException; import java.util.stream.Stream; -import org.jboss.intersmash.provision.openshift.Eap7ImageOpenShiftProvisioner; -import org.jboss.intersmash.provision.openshift.Eap7LegacyS2iBuildTemplateProvisioner; -import org.jboss.intersmash.provision.openshift.InfinispanOperatorProvisioner; -import org.jboss.intersmash.provision.openshift.KeycloakOperatorProvisioner; -import org.jboss.intersmash.provision.openshift.MysqlImageOpenShiftProvisioner; -import org.jboss.intersmash.provision.openshift.OpenShiftProvisioner; -import org.jboss.intersmash.provision.openshift.PostgreSQLImageOpenShiftProvisioner; -import org.jboss.intersmash.provision.openshift.PostgreSQLTemplateOpenShiftProvisioner; -import org.jboss.intersmash.provision.openshift.RhSsoTemplateOpenShiftProvisioner; -import org.jboss.intersmash.provision.openshift.WildflyBootableJarImageOpenShiftProvisioner; -import org.jboss.intersmash.provision.openshift.WildflyImageOpenShiftProvisioner; -import org.jboss.intersmash.provision.openshift.operator.OperatorProvisioner; -import org.jboss.intersmash.provision.openshift.operator.resources.OperatorGroup; +import org.jboss.intersmash.provision.olm.OperatorGroup; +import org.jboss.intersmash.provision.openshift.*; +import org.jboss.intersmash.provision.operator.OperatorProvisioner; import org.jboss.intersmash.testsuite.IntersmashTestsuiteProperties; import org.jboss.intersmash.testsuite.junit5.categories.NotForCommunityExecutionProfile; +import org.jboss.intersmash.testsuite.junit5.categories.OpenShiftTest; +import org.jboss.intersmash.testsuite.openshift.ProjectCreationCapable; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.MethodSource; +import cz.xtf.core.config.OpenShiftConfig; import cz.xtf.core.openshift.OpenShift; import cz.xtf.core.openshift.OpenShifts; import cz.xtf.junit5.annotations.CleanBeforeEach; @@ -46,21 +39,22 @@ @CleanBeforeEach @Slf4j -public class ProvisionerCleanupTestCase { +@OpenShiftTest +public class ProvisionerCleanupTestCase implements ProjectCreationCapable { protected static final OpenShift openShift = OpenShifts.master(); private static Stream provisionerProvider() { if (IntersmashTestsuiteProperties.isCommunityTestExecutionProfileEnabled()) { return Stream.of( // Infinispan - new InfinispanOperatorProvisioner(OpenShiftProvisionerTestBase.getInfinispanOperatorApplication()) + new InfinispanOpenShiftOperatorProvisioner(OpenShiftProvisionerTestBase.getInfinispanOperatorApplication()) // WildFly , new WildflyBootableJarImageOpenShiftProvisioner( OpenShiftProvisionerTestBase.getWildflyBootableJarOpenShiftApplication()), new WildflyBootableJarImageOpenShiftProvisioner( OpenShiftProvisionerTestBase.getEap7BootableJarOpenShiftApplication()) // Keycloak - , new KeycloakOperatorProvisioner( + , new KeycloakOpenShiftOperatorProvisioner( OpenShiftProvisionerTestBase.getKeycloakOperatorApplication()) // MySQL , new MysqlImageOpenShiftProvisioner(OpenShiftProvisionerTestBase.getMysqlOpenShiftApplication()) @@ -72,7 +66,7 @@ private static Stream provisionerProvider() { } else if (IntersmashTestsuiteProperties.isProductizedTestExecutionProfileEnabled()) { return Stream.of( // RHDG - new InfinispanOperatorProvisioner(OpenShiftProvisionerTestBase.getInfinispanOperatorApplication()) + new InfinispanOpenShiftOperatorProvisioner(OpenShiftProvisionerTestBase.getInfinispanOperatorApplication()) // EAP latest GA , new WildflyImageOpenShiftProvisioner( OpenShiftProvisionerTestBase.getWildflyOpenShiftLocalBinaryTargetServerApplication()) @@ -81,7 +75,7 @@ private static Stream provisionerProvider() { // RHSSO 7.6.x , new RhSsoTemplateOpenShiftProvisioner(OpenShiftProvisionerTestBase.getHttpsRhSso()) // RHBK - , new KeycloakOperatorProvisioner( + , new KeycloakOpenShiftOperatorProvisioner( OpenShiftProvisionerTestBase.getKeycloakOperatorApplication())); } else { throw new IllegalStateException( @@ -130,9 +124,9 @@ private static void evalOperatorSetup(OpenShiftProvisioner provisioner) throws I if (OperatorProvisioner.class.isAssignableFrom(provisioner.getClass())) { operatorCleanup(); log.debug("Deploy operatorgroup [{}] to enable operators subscription into tested namespace", - OperatorGroup.SINGLE_NAMESPACE.getMetadata().getName()); + new OperatorGroup(OpenShiftConfig.namespace()).getMetadata().getName()); OpenShifts.adminBinary().execute("apply", "-f", - OperatorGroup.SINGLE_NAMESPACE.save().getAbsolutePath()); + new OperatorGroup(OpenShiftConfig.namespace()).save().getAbsolutePath()); } } diff --git a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/RhSsoOperatorProvisionerTest.java b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/RhSsoOpenShiftOperatorProvisionerTest.java similarity index 88% rename from testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/RhSsoOperatorProvisionerTest.java rename to testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/RhSsoOpenShiftOperatorProvisionerTest.java index 3be355db5..d2917224b 100644 --- a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/RhSsoOperatorProvisionerTest.java +++ b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/RhSsoOpenShiftOperatorProvisionerTest.java @@ -23,9 +23,10 @@ import java.util.stream.Collectors; import java.util.stream.Stream; -import org.jboss.intersmash.application.openshift.RhSsoOperatorApplication; +import org.jboss.intersmash.application.operator.RhSsoOperatorApplication; import org.jboss.intersmash.junit5.IntersmashExtension; -import org.jboss.intersmash.provision.openshift.RhSsoOperatorProvisioner; +import org.jboss.intersmash.provision.olm.OperatorGroup; +import org.jboss.intersmash.provision.openshift.RhSsoOpenShiftOperatorProvisioner; import org.jboss.intersmash.provision.openshift.operator.keycloak.backup.KeycloakBackupBuilder; import org.jboss.intersmash.provision.openshift.operator.keycloak.backup.spec.KeycloakAWSSpecBuilder; import org.jboss.intersmash.provision.openshift.operator.keycloak.client.KeycloakClientBuilder; @@ -38,8 +39,10 @@ import org.jboss.intersmash.provision.openshift.operator.keycloak.user.KeycloakUserBuilder; import org.jboss.intersmash.provision.openshift.operator.keycloak.user.spec.KeycloakAPIUserBuilder; import org.jboss.intersmash.provision.openshift.operator.keycloak.user.spec.KeycloakCredentialBuilder; -import org.jboss.intersmash.provision.openshift.operator.resources.OperatorGroup; +import org.jboss.intersmash.provision.operator.RhSsoOperatorProvisioner; import org.jboss.intersmash.testsuite.junit5.categories.NotForCommunityExecutionProfile; +import org.jboss.intersmash.testsuite.junit5.categories.OpenShiftTest; +import org.jboss.intersmash.testsuite.openshift.ProjectCreationCapable; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Assertions; @@ -57,6 +60,7 @@ import org.keycloak.v1alpha1.keycloakuserspec.user.Credentials; import org.slf4j.event.Level; +import cz.xtf.core.config.OpenShiftConfig; import cz.xtf.core.openshift.OpenShiftWaiters; import cz.xtf.core.openshift.OpenShifts; import cz.xtf.core.waiting.SimpleWaiter; @@ -79,12 +83,13 @@ @CleanBeforeAll @NotForCommunityExecutionProfile @Deprecated(since = "0.0.2") -public class RhSsoOperatorProvisionerTest { +@OpenShiftTest +public class RhSsoOpenShiftOperatorProvisionerTest implements ProjectCreationCapable { // Be aware that since we're using the static mock application, not all provisioner methods will work as expected! - private static final RhSsoOperatorProvisioner KEYCLOAK_OPERATOR_PROVISIONER = initializeOperatorProvisioner(); + private static final RhSsoOpenShiftOperatorProvisioner KEYCLOAK_OPERATOR_PROVISIONER = initializeOperatorProvisioner(); - private static RhSsoOperatorProvisioner initializeOperatorProvisioner() { - RhSsoOperatorProvisioner operatorProvisioner = new RhSsoOperatorProvisioner( + private static RhSsoOpenShiftOperatorProvisioner initializeOperatorProvisioner() { + RhSsoOpenShiftOperatorProvisioner operatorProvisioner = new RhSsoOpenShiftOperatorProvisioner( new RhSsoOperatorApplication() { private static final String DEFAULT_KEYCLOAK_APP_NAME = "example-sso"; @@ -112,9 +117,10 @@ public String getName() { public static void createOperatorGroup() throws IOException { KEYCLOAK_OPERATOR_PROVISIONER.configure(); matchLabels.put("app", "sso"); - IntersmashExtension.operatorCleanup(); + IntersmashExtension.operatorCleanup(false, true); // create operator group - this should be done by InteropExtension - OpenShifts.adminBinary().execute("apply", "-f", OperatorGroup.SINGLE_NAMESPACE.save().getAbsolutePath()); + OpenShifts.adminBinary().execute("apply", "-f", + new OperatorGroup(OpenShiftConfig.namespace()).save().getAbsolutePath()); // clean any leftovers KEYCLOAK_OPERATOR_PROVISIONER.unsubscribe(); } @@ -128,25 +134,25 @@ public static void removeOperatorGroup() { @AfterEach public void customResourcesCleanup() { // delete backups - KEYCLOAK_OPERATOR_PROVISIONER.keycloakBackupsClient().list().getItems().stream() + KEYCLOAK_OPERATOR_PROVISIONER.keycloakBackupClient().list().getItems().stream() .map(resource -> resource.getMetadata().getName()).forEach(name -> KEYCLOAK_OPERATOR_PROVISIONER - .keycloakBackupsClient().withName(name).withPropagationPolicy(DeletionPropagation.FOREGROUND).delete()); + .keycloakBackupClient().withName(name).withPropagationPolicy(DeletionPropagation.FOREGROUND).delete()); // delete clients - KEYCLOAK_OPERATOR_PROVISIONER.keycloakClientsClient().list().getItems().stream() + KEYCLOAK_OPERATOR_PROVISIONER.keycloakClientClient().list().getItems().stream() .map(resource -> resource.getMetadata().getName()).forEach(name -> KEYCLOAK_OPERATOR_PROVISIONER - .keycloakClientsClient().withName(name).withPropagationPolicy(DeletionPropagation.FOREGROUND).delete()); + .keycloakClientClient().withName(name).withPropagationPolicy(DeletionPropagation.FOREGROUND).delete()); // delete keycloaks - KEYCLOAK_OPERATOR_PROVISIONER.keycloaksClient().list().getItems().stream() + KEYCLOAK_OPERATOR_PROVISIONER.keycloakClient().list().getItems().stream() .map(resource -> resource.getMetadata().getName()).forEach(name -> KEYCLOAK_OPERATOR_PROVISIONER - .keycloaksClient().withName(name).withPropagationPolicy(DeletionPropagation.FOREGROUND).delete()); + .keycloakClient().withName(name).withPropagationPolicy(DeletionPropagation.FOREGROUND).delete()); // delete realms - KEYCLOAK_OPERATOR_PROVISIONER.keycloakRealmsClient().list().getItems().stream() + KEYCLOAK_OPERATOR_PROVISIONER.keycloakRealmClient().list().getItems().stream() .map(resource -> resource.getMetadata().getName()).forEach(name -> KEYCLOAK_OPERATOR_PROVISIONER - .keycloakRealmsClient().withName(name).withPropagationPolicy(DeletionPropagation.FOREGROUND).delete()); + .keycloakRealmClient().withName(name).withPropagationPolicy(DeletionPropagation.FOREGROUND).delete()); // delete users - KEYCLOAK_OPERATOR_PROVISIONER.keycloakUsersClient().list().getItems().stream() + KEYCLOAK_OPERATOR_PROVISIONER.keycloakUserClient().list().getItems().stream() .map(resource -> resource.getMetadata().getName()).forEach(name -> KEYCLOAK_OPERATOR_PROVISIONER - .keycloakUsersClient().withName(name).withPropagationPolicy(DeletionPropagation.FOREGROUND).delete()); + .keycloakUserClient().withName(name).withPropagationPolicy(DeletionPropagation.FOREGROUND).delete()); } /** @@ -611,23 +617,23 @@ public void userWithCredentials() { * * Does subscribe/unsubscribe on its own, so no need to call explicitly here. * - * This test adds no further checks after {@link RhSsoOperatorProvisioner#undeploy()} based on - * {@link RhSsoOperatorProvisioner#getPods()}, since it looks for a stateful set which would be null at this point. - * There is room for evaluating whether to revisit {@link RhSsoOperatorProvisioner} with respect to such logic + * This test adds no further checks after {@link RhSsoOpenShiftOperatorProvisioner#undeploy()} based on + * {@link RhSsoOpenShiftOperatorProvisioner#getPods()}, since it looks for a stateful set which would be null at this point. + * There is room for evaluating whether to revisit {@link RhSsoOpenShiftOperatorProvisioner} with respect to such logic */ @Test public void basicProvisioningTest() { KEYCLOAK_OPERATOR_PROVISIONER.deploy(); try { Assertions.assertEquals(1, KEYCLOAK_OPERATOR_PROVISIONER.getPods().size(), - "Unexpected number of cluster operator pods for '" + KEYCLOAK_OPERATOR_PROVISIONER.getOperatorId() + "Unexpected number of cluster operator pods for '" + RhSsoOperatorProvisioner.OPERATOR_ID + "' after deploy"); int scaledNum = KEYCLOAK_OPERATOR_PROVISIONER.getApplication().getKeycloak().getSpec().getInstances().intValue() + 1; KEYCLOAK_OPERATOR_PROVISIONER.scale(scaledNum, true); Assertions.assertEquals(scaledNum, KEYCLOAK_OPERATOR_PROVISIONER.getPods().size(), - "Unexpected number of cluster operator pods for '" + KEYCLOAK_OPERATOR_PROVISIONER.getOperatorId() + "Unexpected number of cluster operator pods for '" + RhSsoOperatorProvisioner.OPERATOR_ID + "' after scaling"); } finally { KEYCLOAK_OPERATOR_PROVISIONER.undeploy(); @@ -636,54 +642,54 @@ public void basicProvisioningTest() { private void verifyUser(KeycloakUser keycloakUser) { // create and verify that object exists - KEYCLOAK_OPERATOR_PROVISIONER.keycloakUsersClient().createOrReplace(keycloakUser); - new SimpleWaiter(() -> KEYCLOAK_OPERATOR_PROVISIONER.keycloakUsersClient().list().getItems().size() == 1) + KEYCLOAK_OPERATOR_PROVISIONER.keycloakUserClient().createOrReplace(keycloakUser); + new SimpleWaiter(() -> KEYCLOAK_OPERATOR_PROVISIONER.keycloakUserClient().list().getItems().size() == 1) .level(Level.DEBUG) .waitFor(); Assertions.assertEquals(keycloakUser.getSpec(), KEYCLOAK_OPERATOR_PROVISIONER.keycloakUser(name).get().getSpec()); // delete and verify that object was removed - KEYCLOAK_OPERATOR_PROVISIONER.keycloakUsersClient().withName(name).withPropagationPolicy(DeletionPropagation.FOREGROUND) + KEYCLOAK_OPERATOR_PROVISIONER.keycloakUserClient().withName(name).withPropagationPolicy(DeletionPropagation.FOREGROUND) .delete(); - new SimpleWaiter(() -> KEYCLOAK_OPERATOR_PROVISIONER.keycloakUsersClient().list().getItems().size() == 0) + new SimpleWaiter(() -> KEYCLOAK_OPERATOR_PROVISIONER.keycloakUserClient().list().getItems().size() == 0) .level(Level.DEBUG) .waitFor(); } private void verifyRealm(KeycloakRealm keycloakRealm) { // create and verify that object exists - KEYCLOAK_OPERATOR_PROVISIONER.keycloakRealmsClient().createOrReplace(keycloakRealm); - new SimpleWaiter(() -> KEYCLOAK_OPERATOR_PROVISIONER.keycloakRealmsClient().list().getItems().size() == 1) + KEYCLOAK_OPERATOR_PROVISIONER.keycloakRealmClient().createOrReplace(keycloakRealm); + new SimpleWaiter(() -> KEYCLOAK_OPERATOR_PROVISIONER.keycloakRealmClient().list().getItems().size() == 1) .level(Level.DEBUG) .waitFor(); Assertions.assertEquals(keycloakRealm.getSpec(), KEYCLOAK_OPERATOR_PROVISIONER.keycloakRealm(name).get().getSpec()); // delete and verify that object was removed - KEYCLOAK_OPERATOR_PROVISIONER.keycloakRealmsClient().withName(name) + KEYCLOAK_OPERATOR_PROVISIONER.keycloakRealmClient().withName(name) .withPropagationPolicy(DeletionPropagation.FOREGROUND) .delete(); - new SimpleWaiter(() -> KEYCLOAK_OPERATOR_PROVISIONER.keycloakRealmsClient().list().getItems().size() == 0) + new SimpleWaiter(() -> KEYCLOAK_OPERATOR_PROVISIONER.keycloakRealmClient().list().getItems().size() == 0) .level(Level.DEBUG) .waitFor(); } private void verifyKeycloak(Keycloak keycloak, boolean waitForPods) { // create and verify that object exists - KEYCLOAK_OPERATOR_PROVISIONER.keycloaksClient().createOrReplace(keycloak); + KEYCLOAK_OPERATOR_PROVISIONER.keycloakClient().createOrReplace(keycloak); KEYCLOAK_OPERATOR_PROVISIONER.waitFor(keycloak); // two pods expected keycloak-0 and keycloak-postgresql-*, keycloak-0 won't start unless keycloak-postgresql-* is ready if (waitForPods) { OpenShiftWaiters.get(OpenShifts.master(), () -> false) .areExactlyNPodsReady(2, "app", keycloak.getKind().toLowerCase()).level(Level.DEBUG).waitFor(); - log.debug(KEYCLOAK_OPERATOR_PROVISIONER.keycloaksClient().withName(name).get().getStatus().toString()); + log.debug(KEYCLOAK_OPERATOR_PROVISIONER.keycloakClient().withName(name).get().getStatus().toString()); } Assertions.assertEquals(keycloak.getSpec(), - KEYCLOAK_OPERATOR_PROVISIONER.keycloaksClient().withName(name).get().getSpec()); + KEYCLOAK_OPERATOR_PROVISIONER.keycloakClient().withName(name).get().getSpec()); // delete and verify that object was removed - KEYCLOAK_OPERATOR_PROVISIONER.keycloaksClient().withName(name).withPropagationPolicy(DeletionPropagation.FOREGROUND) + KEYCLOAK_OPERATOR_PROVISIONER.keycloakClient().withName(name).withPropagationPolicy(DeletionPropagation.FOREGROUND) .delete(); - new SimpleWaiter(() -> KEYCLOAK_OPERATOR_PROVISIONER.keycloaksClient().list().getItems().size() == 0).level(Level.DEBUG) + new SimpleWaiter(() -> KEYCLOAK_OPERATOR_PROVISIONER.keycloakClient().list().getItems().size() == 0).level(Level.DEBUG) .waitFor(); if (waitForPods) { OpenShiftWaiters.get(OpenShifts.master(), () -> false) @@ -693,31 +699,31 @@ private void verifyKeycloak(Keycloak keycloak, boolean waitForPods) { private void verifyBackup(KeycloakBackup keycloakBackup) { // create and verify that object exists - KEYCLOAK_OPERATOR_PROVISIONER.keycloakBackupsClient().createOrReplace(keycloakBackup); - new SimpleWaiter(() -> KEYCLOAK_OPERATOR_PROVISIONER.keycloakBackupsClient().list().getItems().size() == 1) + KEYCLOAK_OPERATOR_PROVISIONER.keycloakBackupClient().createOrReplace(keycloakBackup); + new SimpleWaiter(() -> KEYCLOAK_OPERATOR_PROVISIONER.keycloakBackupClient().list().getItems().size() == 1) .level(Level.DEBUG).waitFor(); Assertions.assertEquals(keycloakBackup.getSpec(), KEYCLOAK_OPERATOR_PROVISIONER.keycloakBackup(name).get().getSpec()); // delete and verify that object was removed - KEYCLOAK_OPERATOR_PROVISIONER.keycloakBackupsClient().withName(name) + KEYCLOAK_OPERATOR_PROVISIONER.keycloakBackupClient().withName(name) .withPropagationPolicy(DeletionPropagation.FOREGROUND) .delete(); - new SimpleWaiter(() -> KEYCLOAK_OPERATOR_PROVISIONER.keycloakBackupsClient().list().getItems().size() == 0) + new SimpleWaiter(() -> KEYCLOAK_OPERATOR_PROVISIONER.keycloakBackupClient().list().getItems().size() == 0) .level(Level.DEBUG).waitFor(); } private void verifyClient(KeycloakClient keycloakClient) { // create and verify that object exists - KEYCLOAK_OPERATOR_PROVISIONER.keycloakClientsClient().createOrReplace(keycloakClient); - new SimpleWaiter(() -> KEYCLOAK_OPERATOR_PROVISIONER.keycloakClientsClient().list().getItems().size() == 1) + KEYCLOAK_OPERATOR_PROVISIONER.keycloakClientClient().createOrReplace(keycloakClient); + new SimpleWaiter(() -> KEYCLOAK_OPERATOR_PROVISIONER.keycloakClientClient().list().getItems().size() == 1) .level(Level.DEBUG).waitFor(); Assertions.assertEquals(keycloakClient.getSpec(), KEYCLOAK_OPERATOR_PROVISIONER.keycloakClient(name).get().getSpec()); // delete and verify that object was removed - KEYCLOAK_OPERATOR_PROVISIONER.keycloakClientsClient().withName(name) + KEYCLOAK_OPERATOR_PROVISIONER.keycloakClientClient().withName(name) .withPropagationPolicy(DeletionPropagation.FOREGROUND) .delete(); - new SimpleWaiter(() -> KEYCLOAK_OPERATOR_PROVISIONER.keycloakUsersClient().list().getItems().size() == 0) + new SimpleWaiter(() -> KEYCLOAK_OPERATOR_PROVISIONER.keycloakUserClient().list().getItems().size() == 0) .level(Level.DEBUG) .waitFor(); } diff --git a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/RhSsoTemplateTestCase.java b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/RhSsoTemplateTestCase.java index c578fae6c..0867501bc 100644 --- a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/RhSsoTemplateTestCase.java +++ b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/RhSsoTemplateTestCase.java @@ -4,6 +4,8 @@ import org.jboss.intersmash.application.openshift.RhSsoTemplateOpenShiftApplication; import org.jboss.intersmash.provision.openshift.RhSsoTemplateOpenShiftProvisioner; import org.jboss.intersmash.testsuite.junit5.categories.NotForCommunityExecutionProfile; +import org.jboss.intersmash.testsuite.junit5.categories.OpenShiftTest; +import org.jboss.intersmash.testsuite.openshift.ProjectCreationCapable; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; @@ -15,7 +17,8 @@ @CleanBeforeAll @NotForCommunityExecutionProfile @Deprecated(since = "0.0.2") -public class RhSsoTemplateTestCase { +@OpenShiftTest +public class RhSsoTemplateTestCase implements ProjectCreationCapable { private static final OpenShift openShift = OpenShifts.master(); private static final RhSsoTemplateOpenShiftApplication application = OpenShiftProvisionerTestBase.getHttpsRhSso(); private static final RhSsoTemplateOpenShiftProvisioner provisioner = new RhSsoTemplateOpenShiftProvisioner(application); diff --git a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/WildflyBootableJarTestCase.java b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/WildflyBootableJarTestCase.java index 070fbaa82..21be0e847 100644 --- a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/WildflyBootableJarTestCase.java +++ b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/WildflyBootableJarTestCase.java @@ -19,7 +19,9 @@ import org.jboss.intersmash.application.openshift.BootableJarOpenShiftApplication; import org.jboss.intersmash.provision.openshift.BootableJarImageOpenShiftProvisioner; import org.jboss.intersmash.provision.openshift.WildflyBootableJarImageOpenShiftProvisioner; +import org.jboss.intersmash.testsuite.junit5.categories.OpenShiftTest; import org.jboss.intersmash.testsuite.junit5.categories.wildfly.RequiresBootableJarDistribution; +import org.jboss.intersmash.testsuite.openshift.ProjectCreationCapable; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; @@ -31,7 +33,8 @@ @CleanBeforeAll @RequiresBootableJarDistribution -public class WildflyBootableJarTestCase { +@OpenShiftTest +public class WildflyBootableJarTestCase implements ProjectCreationCapable { private static final OpenShift openShift = OpenShifts.master(); private static final BootableJarOpenShiftApplication application = OpenShiftProvisionerTestBase .getWildflyBootableJarOpenShiftApplication(); diff --git a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/WildflyHelmChartExistingValuesProvisionerTest.java b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/WildflyHelmChartExistingValuesProvisionerTest.java index e4b55d891..963a16b54 100644 --- a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/WildflyHelmChartExistingValuesProvisionerTest.java +++ b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/WildflyHelmChartExistingValuesProvisionerTest.java @@ -18,6 +18,8 @@ import org.jboss.intersmash.IntersmashConfig; import org.jboss.intersmash.provision.helm.HelmChartOpenShiftProvisioner; import org.jboss.intersmash.provision.helm.wildfly.WildflyHelmChartOpenShiftProvisioner; +import org.jboss.intersmash.testsuite.junit5.categories.OpenShiftTest; +import org.jboss.intersmash.testsuite.openshift.ProjectCreationCapable; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; @@ -29,7 +31,8 @@ * a Helm values file AND {@code --set} overrides */ @CleanBeforeAll -public class WildflyHelmChartExistingValuesProvisionerTest { +@OpenShiftTest +public class WildflyHelmChartExistingValuesProvisionerTest implements ProjectCreationCapable { @Test public void basicProvisioningTest() { diff --git a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/WildflyHelmChartProvisionerTest.java b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/WildflyHelmChartProvisionerTest.java index 64a6d49fc..453d7d51e 100644 --- a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/WildflyHelmChartProvisionerTest.java +++ b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/WildflyHelmChartProvisionerTest.java @@ -18,6 +18,8 @@ import org.jboss.intersmash.application.openshift.helm.WildflyHelmChartOpenShiftApplication; import org.jboss.intersmash.provision.helm.HelmChartOpenShiftProvisioner; import org.jboss.intersmash.provision.helm.wildfly.WildflyHelmChartOpenShiftProvisioner; +import org.jboss.intersmash.testsuite.junit5.categories.OpenShiftTest; +import org.jboss.intersmash.testsuite.openshift.ProjectCreationCapable; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; @@ -29,7 +31,8 @@ * programmatically */ @CleanBeforeAll -public class WildflyHelmChartProvisionerTest { +@OpenShiftTest +public class WildflyHelmChartProvisionerTest implements ProjectCreationCapable { @Test public void basicProvisioningTest() { diff --git a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/WildflyImageProvisionerTestCase.java b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/WildflyImageProvisionerTestCase.java index 0c45af72a..5da2cb2bc 100644 --- a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/WildflyImageProvisionerTestCase.java +++ b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/WildflyImageProvisionerTestCase.java @@ -25,6 +25,8 @@ import org.assertj.core.api.SoftAssertions; import org.jboss.intersmash.application.openshift.WildflyImageOpenShiftApplication; import org.jboss.intersmash.provision.openshift.WildflyImageOpenShiftProvisioner; +import org.jboss.intersmash.testsuite.junit5.categories.OpenShiftTest; +import org.jboss.intersmash.testsuite.openshift.ProjectCreationCapable; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; @@ -45,7 +47,8 @@ * inside the builder image. */ @CleanBeforeAll -public class WildflyImageProvisionerTestCase { +@OpenShiftTest +public class WildflyImageProvisionerTestCase implements ProjectCreationCapable { private static final OpenShift openShift = OpenShifts.master(); private static final WildflyImageOpenShiftApplication application = OpenShiftProvisionerTestBase .getWildflyOpenShiftImageApplication(); diff --git a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/WildflyMavenProjectTestCase.java b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/WildflyMavenProjectTestCase.java index 23dda35ec..b6ab7b740 100644 --- a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/WildflyMavenProjectTestCase.java +++ b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/WildflyMavenProjectTestCase.java @@ -24,6 +24,8 @@ import org.assertj.core.api.Assertions; import org.assertj.core.api.SoftAssertions; import org.jboss.intersmash.provision.openshift.WildflyImageOpenShiftProvisioner; +import org.jboss.intersmash.testsuite.junit5.categories.OpenShiftTest; +import org.jboss.intersmash.testsuite.openshift.ProjectCreationCapable; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; @@ -44,7 +46,8 @@ * inside the builder image via the s2i binary build process. */ @CleanBeforeAll -public class WildflyMavenProjectTestCase { +@OpenShiftTest +public class WildflyMavenProjectTestCase implements ProjectCreationCapable { private static final OpenShift openShift = OpenShifts.master(); private static final OpenShiftProvisionerTestBase.StaticWildflyImageOpenShiftDeploymentApplication application = OpenShiftProvisionerTestBase .getWildflyOpenShiftLocalBinarySourceApplication(); diff --git a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/WildflyOperatorProvisionerTest.java b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/WildflyOpenShiftOperatorProvisionerTest.java similarity index 65% rename from testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/WildflyOperatorProvisionerTest.java rename to testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/WildflyOpenShiftOperatorProvisionerTest.java index 7187d1dbf..4218ee6f9 100644 --- a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/WildflyOperatorProvisionerTest.java +++ b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/WildflyOpenShiftOperatorProvisionerTest.java @@ -17,13 +17,16 @@ import java.io.IOException; -import org.jboss.intersmash.application.openshift.WildflyOperatorApplication; +import org.jboss.intersmash.application.operator.WildflyOperatorApplication; import org.jboss.intersmash.junit5.IntersmashExtension; -import org.jboss.intersmash.provision.openshift.WildflyOperatorProvisioner; -import org.jboss.intersmash.provision.openshift.operator.OperatorProvisioner; -import org.jboss.intersmash.provision.openshift.operator.resources.OperatorGroup; +import org.jboss.intersmash.provision.olm.OperatorGroup; +import org.jboss.intersmash.provision.openshift.WildflyOpenShiftOperatorProvisioner; import org.jboss.intersmash.provision.openshift.operator.wildfly.WildFlyServerBuilder; +import org.jboss.intersmash.provision.operator.OperatorProvisioner; +import org.jboss.intersmash.provision.operator.WildflyOperatorProvisioner; import org.jboss.intersmash.testsuite.IntersmashTestsuiteProperties; +import org.jboss.intersmash.testsuite.junit5.categories.OpenShiftTest; +import org.jboss.intersmash.testsuite.openshift.ProjectCreationCapable; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Assertions; @@ -31,17 +34,19 @@ import org.junit.jupiter.api.Test; import org.wildfly.v1alpha1.WildFlyServer; +import cz.xtf.core.config.OpenShiftConfig; import cz.xtf.core.openshift.OpenShifts; import cz.xtf.junit5.annotations.CleanBeforeAll; import io.fabric8.kubernetes.api.model.DeletionPropagation; @CleanBeforeAll -public class WildflyOperatorProvisionerTest { +@OpenShiftTest +public class WildflyOpenShiftOperatorProvisionerTest implements ProjectCreationCapable { private static final String NAME = "wildfly-operator-test"; - private static final WildflyOperatorProvisioner WILDFLY_OPERATOR_PROVISIONER = initializeOperatorProvisioner(); + private static final WildflyOpenShiftOperatorProvisioner WILDFLY_OPERATOR_PROVISIONER = initializeOperatorProvisioner(); - private static WildflyOperatorProvisioner initializeOperatorProvisioner() { - WildflyOperatorProvisioner operatorProvisioner = new WildflyOperatorProvisioner( + private static WildflyOpenShiftOperatorProvisioner initializeOperatorProvisioner() { + WildflyOpenShiftOperatorProvisioner operatorProvisioner = new WildflyOpenShiftOperatorProvisioner( new WildflyOperatorApplication() { @Override @@ -73,9 +78,10 @@ public String getName() { @BeforeAll public static void createOperatorGroup() throws IOException { WILDFLY_OPERATOR_PROVISIONER.configure(); - IntersmashExtension.operatorCleanup(); + IntersmashExtension.operatorCleanup(false, true); // create operator group - this should be done by InteropExtension - OpenShifts.adminBinary().execute("apply", "-f", OperatorGroup.SINGLE_NAMESPACE.save().getAbsolutePath()); + OpenShifts.adminBinary().execute("apply", "-f", + new OperatorGroup(OpenShiftConfig.namespace()).save().getAbsolutePath()); // clean any leftovers WILDFLY_OPERATOR_PROVISIONER.unsubscribe(); } @@ -88,9 +94,9 @@ public static void removeOperatorGroup() { @AfterEach public void customResourcesCleanup() { - WILDFLY_OPERATOR_PROVISIONER.wildflyServersClient().list().getItems().stream() + WILDFLY_OPERATOR_PROVISIONER.wildflyServerClient().list().getItems().stream() .map(resource -> resource.getMetadata().getName()).forEach(name -> WILDFLY_OPERATOR_PROVISIONER - .wildflyServersClient().withName(name).withPropagationPolicy(DeletionPropagation.FOREGROUND).delete()); + .wildflyServerClient().withName(name).withPropagationPolicy(DeletionPropagation.FOREGROUND).delete()); } /** @@ -98,11 +104,11 @@ public void customResourcesCleanup() { * * Does subscribe/unsubscribe on its own, so no need to call explicitly here * - * This test adds no further checks here because {@link WildflyOperatorProvisioner#undeploy} does the only useful + * This test adds no further checks here because {@link org.jboss.intersmash.provision.operator.WildflyOperatorProvisioner#undeploy} does the only useful * thing, i.e. check for 0 app pods, immediately after deleting, then unsubscribe the operator. - * Checking through {@link WildflyOperatorProvisioner#getPods()} here would intermittently find a resumed app pod + * Checking through {@link org.jboss.intersmash.provision.operator.WildflyOperatorProvisioner#getPods()} here would intermittently find a resumed app pod * before unsubscribe (which doesn't wait ATM) finishes. - * Basically there could be room for revisiting {@link WildflyOperatorProvisioner#undeploy} and + * Basically there could be room for revisiting {@link org.jboss.intersmash.provision.operator.WildflyOperatorProvisioner#undeploy} and * {@link OperatorProvisioner#unsubscribe()} */ @Test @@ -110,13 +116,13 @@ public void basicProvisioningTest() { WILDFLY_OPERATOR_PROVISIONER.deploy(); try { Assertions.assertEquals(1, WILDFLY_OPERATOR_PROVISIONER.getPods().size(), - "Unexpected number of cluster operator pods for '" + WILDFLY_OPERATOR_PROVISIONER.getOperatorId() + "Unexpected number of cluster operator pods for '" + WildflyOperatorProvisioner.OPERATOR_ID + "' after deploy"); int scaledNum = WILDFLY_OPERATOR_PROVISIONER.getApplication().getWildflyServer().getSpec().getReplicas() + 1; WILDFLY_OPERATOR_PROVISIONER.scale(scaledNum, true); Assertions.assertEquals(scaledNum, WILDFLY_OPERATOR_PROVISIONER.getPods().size(), - "Unexpected number of cluster operator pods for '" + WILDFLY_OPERATOR_PROVISIONER.getOperatorId() + "Unexpected number of cluster operator pods for '" + WildflyOperatorProvisioner.OPERATOR_ID + "' after scaling"); } finally { WILDFLY_OPERATOR_PROVISIONER.undeploy(); diff --git a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/WildflyTargetServerTestCase.java b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/WildflyTargetServerTestCase.java index 522e62bd8..6531f664f 100644 --- a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/WildflyTargetServerTestCase.java +++ b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/WildflyTargetServerTestCase.java @@ -25,6 +25,8 @@ import org.assertj.core.api.SoftAssertions; import org.jboss.intersmash.application.openshift.WildflyImageOpenShiftApplication; import org.jboss.intersmash.provision.openshift.WildflyImageOpenShiftProvisioner; +import org.jboss.intersmash.testsuite.junit5.categories.OpenShiftTest; +import org.jboss.intersmash.testsuite.openshift.ProjectCreationCapable; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; @@ -46,7 +48,8 @@ * which are compatible with a WildFly/EAP s2i v2 binary build. */ @CleanBeforeAll -public class WildflyTargetServerTestCase { +@OpenShiftTest +public class WildflyTargetServerTestCase implements ProjectCreationCapable { private static final OpenShift openShift = OpenShifts.master(); private static final WildflyImageOpenShiftApplication application = OpenShiftProvisionerTestBase .getWildflyOpenShiftLocalBinaryTargetServerApplication(); diff --git a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/operator/OperatorSubscriptionTestCase.java b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/operator/OperatorSubscriptionTestCase.java index 8cf8b8205..7495fedf2 100644 --- a/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/operator/OperatorSubscriptionTestCase.java +++ b/testsuite/integration-tests/src/test/java/org/jboss/intersmash/testsuite/provision/openshift/operator/OperatorSubscriptionTestCase.java @@ -20,23 +20,17 @@ import java.io.IOException; import java.util.stream.Stream; -import org.jboss.intersmash.application.openshift.ActiveMQOperatorApplication; -import org.jboss.intersmash.application.openshift.HyperfoilOperatorApplication; -import org.jboss.intersmash.application.openshift.InfinispanOperatorApplication; -import org.jboss.intersmash.application.openshift.KafkaOperatorApplication; -import org.jboss.intersmash.application.openshift.KeycloakOperatorApplication; -import org.jboss.intersmash.application.openshift.RhSsoOperatorApplication; -import org.jboss.intersmash.application.openshift.WildflyOperatorApplication; +import org.jboss.intersmash.application.operator.ActiveMQOperatorApplication; +import org.jboss.intersmash.application.operator.HyperfoilOperatorApplication; +import org.jboss.intersmash.application.operator.InfinispanOperatorApplication; +import org.jboss.intersmash.application.operator.KafkaOperatorApplication; +import org.jboss.intersmash.application.operator.KeycloakOperatorApplication; +import org.jboss.intersmash.application.operator.RhSsoOperatorApplication; +import org.jboss.intersmash.application.operator.WildflyOperatorApplication; import org.jboss.intersmash.junit5.IntersmashExtension; -import org.jboss.intersmash.provision.openshift.ActiveMQOperatorProvisioner; -import org.jboss.intersmash.provision.openshift.HyperfoilOperatorProvisioner; -import org.jboss.intersmash.provision.openshift.InfinispanOperatorProvisioner; -import org.jboss.intersmash.provision.openshift.KafkaOperatorProvisioner; -import org.jboss.intersmash.provision.openshift.KeycloakOperatorProvisioner; -import org.jboss.intersmash.provision.openshift.RhSsoOperatorProvisioner; -import org.jboss.intersmash.provision.openshift.WildflyOperatorProvisioner; -import org.jboss.intersmash.provision.openshift.operator.OperatorProvisioner; -import org.jboss.intersmash.provision.openshift.operator.resources.OperatorGroup; +import org.jboss.intersmash.provision.olm.OperatorGroup; +import org.jboss.intersmash.provision.openshift.*; +import org.jboss.intersmash.provision.operator.OperatorProvisioner; import org.jboss.intersmash.testsuite.IntersmashTestsuiteProperties; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.Assertions; @@ -44,6 +38,7 @@ import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.MethodSource; +import cz.xtf.core.config.OpenShiftConfig; import cz.xtf.core.openshift.OpenShifts; import cz.xtf.junit5.annotations.CleanBeforeAll; import io.fabric8.kubernetes.api.model.Pod; @@ -56,17 +51,17 @@ @CleanBeforeAll public class OperatorSubscriptionTestCase { private static final Stream COMMON_PROVISIONERS = Stream.of( - new ActiveMQOperatorProvisioner(mock(ActiveMQOperatorApplication.class)), - new InfinispanOperatorProvisioner(mock(InfinispanOperatorApplication.class)), - new KafkaOperatorProvisioner(mock(KafkaOperatorApplication.class)), - new WildflyOperatorProvisioner(mock(WildflyOperatorApplication.class))); + new ActiveMQOpenShiftOperatorProvisioner(mock(ActiveMQOperatorApplication.class)), + new InfinispanOpenShiftOperatorProvisioner(mock(InfinispanOperatorApplication.class)), + new KafkaOpenShiftOperatorProvisioner(mock(KafkaOperatorApplication.class)), + new WildflyOpenShiftOperatorProvisioner(mock(WildflyOperatorApplication.class))); private static final Stream COMMUNITY_ONLY_PROVISIONERS = Stream.of( - new HyperfoilOperatorProvisioner(mock(HyperfoilOperatorApplication.class)), - new KeycloakOperatorProvisioner(mock(KeycloakOperatorApplication.class))); + new HyperfoilOpenShiftOperatorProvisioner(mock(HyperfoilOperatorApplication.class)), + new KeycloakOpenShiftOperatorProvisioner(mock(KeycloakOperatorApplication.class))); private static final Stream PRODUCT_ONLY_PROVISIONERS = Stream.of( - new RhSsoOperatorProvisioner(mock(RhSsoOperatorApplication.class))); + new RhSsoOpenShiftOperatorProvisioner(mock(RhSsoOperatorApplication.class))); private static Stream provisionerProvider() { if (IntersmashTestsuiteProperties.isCommunityTestExecutionProfileEnabled()) { @@ -79,15 +74,16 @@ private static Stream provisionerProvider() { @BeforeAll public static void createOperatorGroup() throws IOException { - IntersmashExtension.operatorCleanup(); + IntersmashExtension.operatorCleanup(false, true); // create operator group - this should be done by InteropExtension - OpenShifts.adminBinary().execute("apply", "-f", OperatorGroup.SINGLE_NAMESPACE.save().getAbsolutePath()); + OpenShifts.adminBinary().execute("apply", "-f", + new OperatorGroup(OpenShiftConfig.namespace()).save().getAbsolutePath()); } @AfterAll public static void removeOperatorGroup() { // remove operator group - this should be done by InteropExtension - IntersmashExtension.operatorCleanup(); + IntersmashExtension.operatorCleanup(false, true); } @ParameterizedTest(name = "{displayName}#class({0})") diff --git a/testsuite/integration-tests/src/test/resources/META-INF/services/org.junit.platform.launcher.TestExecutionListener b/testsuite/integration-tests/src/test/resources/META-INF/services/org.junit.platform.launcher.TestExecutionListener index 63b7383d3..1b8abbbb0 100644 --- a/testsuite/integration-tests/src/test/resources/META-INF/services/org.junit.platform.launcher.TestExecutionListener +++ b/testsuite/integration-tests/src/test/resources/META-INF/services/org.junit.platform.launcher.TestExecutionListener @@ -1,2 +1 @@ cz.xtf.junit5.listeners.TestExecutionLogger -cz.xtf.junit5.listeners.ProjectCreator