Skip to content

Commit

Permalink
Merge pull request #50 from tchiotludo/dev
Browse files Browse the repository at this point in the history
Release 0.6.0
  • Loading branch information
tchiotludo authored Apr 23, 2019
2 parents a185d0c + 852a4b0 commit 75d5fd5
Show file tree
Hide file tree
Showing 34 changed files with 1,263 additions and 111 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ logs/*

### Kafka HQ ###
src/**/*-dev.yml
connects-plugins/

## Docker
.env
Expand Down
16 changes: 11 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# KafkaHQ
[![Build Status](https://travis-ci.org/tchiotludo/kafkahq.svg?branch=master)](https://travis-ci.org/tchiotludo/kafkahq)

> Kafka GUI for topics, topics data, consumers group, schema registry and more...
> Kafka GUI for topics, topics data, consumers group, schema registry, connect and more...

![preview](https://user-images.githubusercontent.com/2064609/50536651-e050de80-0b56-11e9-816f-9d3aca3f1c88.gif)
Expand Down Expand Up @@ -40,10 +40,12 @@
- Update consumer group offsets to start / end / timestamp
- **Schema Registry**
- List schema
- Create a schema
- Update a schema
- Delete a schema
- Create / Update / Delete a schema
- View and delete individual schema version
- **Connect**
- List connect definition
- Create / Update / Delete a definition
- Pause / Resume / Restart a definition or a task
- **Nodes**
- List
- Configurations view
Expand Down Expand Up @@ -94,6 +96,7 @@ file example can be found here :[application.example.yml](application.example.ym
* `key`: must be an url friendly string the identify your cluster (`my-cluster-1` and `my-cluster-2` is the example above)
* `properties`: all the configurations found on [Kafka consumer documentation](https://kafka.apache.org/documentation/#consumerconfigs). Most important is `bootstrap.servers` that is a list of host:port of your Kafka brokers.
* `schema-registry`: the schema registry url *(optional)*
* `connect`: connect url *(optional)*

### Security
* `kafkahq.security.default-roles`: Roles available for all the user even unlogged user, roles available are :
Expand Down Expand Up @@ -128,6 +131,7 @@ kafkahq:
- topic/data/read
- group/read
- registry/read
- connect/read
```
Expand All @@ -138,7 +142,9 @@ kafkahq:
* `roles`: Role for current users

### Server
* `kafkahq.server.base-path`: if behind a reverse proxy, path to kafkahq with trailing slash
* `kafkahq.server.base-path`: if behind a reverse proxy, path to kafkahq with trailing slash (optional). Example:
kafkahq is behind a reverse proxy with url http://my-server/kafkahq, set base-path: "/kafkahq/". Not needed if you're
behind a reverse proxy with subdomain http://kafkahq.my-server/

### Kafka admin / producer / consumer default properties
* `kafkahq.clients-defaults.{{admin|producer|consumer}}.properties`: if behind a reverse proxy, path to kafkahq with
Expand Down
12 changes: 9 additions & 3 deletions application.example.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,13 @@ kafkahq:

# list of kafka cluster available for kafkahq
connections:
my-cluster-1: # url friendly name for the cluster
my-cluster-plain-text: # url friendly name for the cluster
properties: # standard kafka properties (optional)
bootstrap.servers: "kafka:9092"
schema-registry: "http://schema-registry:8085" # schema registry url (optional)
connect: "http://connect:8083" # connect url (optional)

my-cluster-2:
my-cluster-ssl:
properties:
bootstrap.servers: "kafka:9093"
security.protocol: SSL
Expand All @@ -31,7 +32,7 @@ kafkahq:
ssl.keystore.password: password
ssl.key.password: password

my-cluster-3:
my-cluster-sasl:
properties:
bootstrap.servers: "kafka:9094"
security.protocol: SASL_SSL
Expand Down Expand Up @@ -69,6 +70,11 @@ kafkahq:
- registry/update
- registry/delete
- registry/version/delete
- connect/read
- connect/insert
- connect/update
- connect/delete
- connect/state/update

# Basic auth configuration
basic-auth:
Expand Down
23 changes: 16 additions & 7 deletions build.gradle
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
buildscript {
ext {
micronautVersion = "1.1.0"
micronautVersion = "1.1.+"
confluentVersion = "5.1.+"
kafkaVersion = "2.2.+"
}
Expand Down Expand Up @@ -61,13 +61,14 @@ idea {
}
}

configurations {
implementation {
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
exclude group: 'log4j', module: 'log4j'
}
configurations.all {
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
exclude group: 'org.apache.logging.log4j', module: 'log4j-slf4j-impl'
exclude group: 'org.apache.logging.log4j', module: 'log4j-core'
exclude group: 'org.apache.logging.log4j', module: 'log4j-api'
}


/**********************************************************************************************************************\
* Micronaut
**********************************************************************************************************************/
Expand Down Expand Up @@ -125,6 +126,11 @@ dependencies {
compile group: "io.confluent", name: "kafka-schema-registry-client", version: confluentVersion
compile group: "io.confluent", name: "kafka-avro-serializer", version: confluentVersion
compile group: 'org.apache.avro', name: 'avro', version: '1.8.2'
compile group: 'org.sourcelab', name: 'kafka-connect-client', version: '2.0.+'

// log
compile group: 'org.slf4j', name: 'jul-to-slf4j', version: '1.8.+'
compile group: 'org.slf4j', name: 'log4j-over-slf4j', version: '1.7.26'

// utils
compileOnly group: "org.projectlombok", name: "lombok", version: "1.18.+"
Expand All @@ -143,9 +149,12 @@ dependencies {
testCompile "org.apache.kafka:kafka_2.12:" + kafkaVersion
testCompile "io.confluent:kafka-schema-registry:" + confluentVersion
testCompile "io.confluent:kafka-schema-registry:" + confluentVersion + ":tests"
testCompile "org.apache.kafka:connect-runtime:" + kafkaVersion
testCompile "org.apache.kafka:connect-file:" + kafkaVersion

testCompile group: 'org.apache.kafka', name: 'kafka-streams', version: kafkaVersion
testCompile group: "io.confluent", name: "kafka-streams-avro-serde", version: confluentVersion
testCompile group: 'org.slf4j', name: 'jul-to-slf4j', version: '1.8.+'
testCompile "io.confluent:kafka-connect-avro-converter:" + confluentVersion
testCompile group: 'commons-codec', name: 'commons-codec', version: '1.11'
testImplementation 'org.hamcrest:hamcrest:2.1'
testImplementation 'org.hamcrest:hamcrest-library:2.1'
Expand Down
7 changes: 5 additions & 2 deletions docker-compose-dev.yml
Original file line number Diff line number Diff line change
Expand Up @@ -95,13 +95,16 @@ services:
CONNECT_GROUP_ID: "kafka-connect"
CONNECT_KEY_CONVERTER_SCHEMAS_ENABLE: "true"
CONNECT_KEY_CONVERTER: io.confluent.connect.avro.AvroConverter
CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL: http://schema-registry:8081
CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL: http://schema-registry:8085
CONNECT_VALUE_CONVERTER_SCHEMAS_ENABLE: "true"
CONNECT_VALUE_CONVERTER: io.confluent.connect.avro.AvroConverter
CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: http://schema-registry:8081
CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: http://schema-registry:8085
CONNECT_INTERNAL_KEY_CONVERTER: org.apache.kafka.connect.json.JsonConverter
CONNECT_INTERNAL_VALUE_CONVERTER: org.apache.kafka.connect.json.JsonConverter
CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: 1
CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: 1
CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: 1
CONNECT_PLUGIN_PATH: ' /usr/share/java/'
volumes:
- ./connects-plugins:/usr/share/java/connects-plugins

41 changes: 30 additions & 11 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions src/main/java/org/kafkahq/configs/Connection.java
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
@Getter
public class Connection extends AbstractProperties {
Optional<URL> schemaRegistry = Optional.empty();
Optional<URL> connect = Optional.empty();

public Connection(@Parameter String name) {
super(name);
Expand Down
6 changes: 6 additions & 0 deletions src/main/java/org/kafkahq/configs/Role.java
Original file line number Diff line number Diff line change
Expand Up @@ -22,4 +22,10 @@ public class Role {
public static final String ROLE_REGISTRY_UPDATE = "registry/update";
public static final String ROLE_REGISTRY_DELETE = "registry/delete";
public static final String ROLE_REGISTRY_VERSION_DELETE = "registry/version/delete";

public static final String ROLE_CONNECT_READ = "connect/read";
public static final String ROLE_CONNECT_INSERT = "connect/insert";
public static final String ROLE_CONNECT_UPDATE = "connect/update";
public static final String ROLE_CONNECT_DELETE = "connect/delete";
public static final String ROLE_CONNECT_STATE_UPDATE = "connect/state/update";
}
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@ protected Map templateData(Optional<String> cluster, Object... values) {
cluster.ifPresent(s -> {
datas.put("clusterId", s);
datas.put("registryEnabled", this.kafkaModule.getRegistryRestClient(s) != null);
datas.put("connectEnabled", this.kafkaModule.getConnectRestClient(s) != null);
});

if (applicationContext.containsBean(SecurityService.class)) {
Expand Down Expand Up @@ -129,7 +130,7 @@ private static List<String> expandRoles(List<String> roles) {
}

@SuppressWarnings("unchecked")
private List<String> getRights() {
protected List<String> getRights() {
if (!applicationContext.containsBean(SecurityService.class)) {
return expandRoles(this.defaultRoles);
}
Expand Down
Loading

0 comments on commit 75d5fd5

Please sign in to comment.