1. 安装步骤 

1.Install Java 11
2.Download the latest jar on release page:
	https://github.com/tchiotludo/akhq/releases
	
3.Create an configuration files
   参考: application.yml
4.Launch the application with java -Dmicronaut.config.files=/path/to/application.yml -jar akhq.jar
	# use 
	mkdir -p /home/azureuser/akhq_service
	cd /home/azureuser/akhq_service
	java -Dmicronaut.config.files=./application.yml -jar akhq-0.21.0-all.jar  
	nohup java -Dmicronaut.config.files=./application.yml -jar akhq-0.21.0-all.jar  2>&1 &
http://mytest-akhq.com:8080

2. application.yml

micronaut:
  security:
    enabled: true
    # Ldap authenticaton configuration
    ldap:
      default:
        enabled: true
        context:
          server: 'ldap://192.168.132.12:389'
#          managerDn: 'cn=mds,ou=users,dc=bmw,dc=com'
#          managerPassword: '123456'
          managerDn: ''
          managerPassword: ''
        search:
          base: "OU=My Application Account,OU=My Technical Account,OU=accounts,DC=fiend,DC=cn"
        groups:
          enabled: true
          base: "CN=Test Integration,OU=Designer,OU=Groups test,OU=Groups,OU=accounts,DC=fiend,DC=cn"
akhq:
  connections:
    azure-cluster:
      properties:
        bootstrap.servers: my-broker.test.com:9093
        security.protocol: SASL_SSL
        ssl.truststore.location: kafka-broker-truststore.jks
        ssl.truststore.type: JKS
        ssl.truststore.password: kafkatruststorepass
        ssl.endpoint.identification.algorithm: https
        sasl.jaas.config: org.apache.kafka.common.security.plain.PlainLoginModule required username='fiend' password='123456';
        sasl.mechanism: PLAIN
    aws-cluster:
      properties:
        bootstrap.servers: my-aws-broker.com:9093
        security.protocol: SASL_SSL
        ssl.truststore.location: kafka-broker-truststore.jks
        ssl.truststore.type: JKS
        ssl.truststore.password: kafkatruststorepass
        ssl.endpoint.identification.algorithm: https
        sasl.jaas.config: org.apache.kafka.common.security.plain.PlainLoginModule required username='fiend' password='123456';
        sasl.mechanism: PLAIN

3.application.example.yml

micronaut:
  security:
    enabled: true
    # Ldap authenticaton configuration
    ldap:
      default:
        enabled: true
        context:
          server: 'ldap://ldap.forumsys.com:389'
          managerDn: 'cn=read-only-admin,dc=example,dc=com'
          managerPassword: 'password'
        search:
          base: "dc=example,dc=com"
        groups:
          enabled: true
          base: "dc=example,dc=com"
    # OIDC authentication configuration
    oauth2:
      enabled: true
      clients:
        oidc:
          client-id: "<client-id>"
          client-secret: "<client-secret>"
          openid:
            issuer: "<issuer-url>"
    token:
      jwt:
        signatures:
          secret:
            generator:
              secret: pleasechangeme

  server:
    context-path: "" # if behind a reverse proxy, path to akhq without trailing slash (optional). Example: akhq is
                     # behind a reverse proxy with url http://my-server/akhq, set context-path: "/akhq".
                     # Not needed if you're behind a reverse proxy with subdomain http://akhq.my-server/
akhq:
  server:
    access-log: # Access log configuration (optional)
      enabled: true # true by default
      name: org.akhq.log.access # Logger name
      format: "[Date: {}] [Duration: {} ms] [Url: {} {}] [Status: {}] [Ip: {}] [User: {}]" # Logger format

  # default kafka properties for each clients, available for admin / producer / consumer (optional)
  clients-defaults:
    consumer:
      properties:
        isolation.level: read_committed

  # list of kafka cluster available for akhq
  connections:
    my-cluster-plain-text: # url friendly name for the cluster (letter, number, _, -, ... dot are not allowed here)
      properties: # standard kafka properties (optional)
        bootstrap.servers: "kafka:9092"
      schema-registry:
        url: "http://schema-registry:8085" # schema registry url (optional)
        type: "confluent" # schema registry type (optional). Supported types are "confluent" (default) or "tibco"
        # Basic Auth user / pass
        basic-auth-username: basic-auth-user
        basic-auth-password: basic-auth-pass
        properties: # standard kafka properties (optional)
          ssl.protocol: TLS
      connect:
        - name: connect-1
          url: "http://connect:8083"
          # Basic Auth user / pass (optional)
          basic-auth-username: basic-auth-user
          basic-auth-password: basic-auth-pass
          # ssl store configuration (optional)
          ssl-trust-store: /app/truststore.jks
          ssl-trust-store-password: trust-store-password
          ssl-key-store: /app/truststore.jks
          ssl-key-store-password: key-store-password
        - name: connect-2
          url: "http://connect:8084"
          # Basic Auth user / pass (optional)
          basic-auth-username: basic-auth-user
          basic-auth-password: basic-auth-pass
          # ssl store configuration (optional)
          ssl-trust-store: /app/truststore.jks
          ssl-trust-store-password: trust-store-password
          ssl-key-store: /app/truststore.jks
          ssl-key-store-password: key-store-password
      deserialization:
        protobuf:
          # (optional) if descriptor-file properties are used
          descriptors-folder: "/app/protobuf_desc"
          topics-mapping:
            - topic-regex: "album.*"
              descriptor-file-base64: "Cs4BCgthbGJ1bS5wcm90bxIXY29tLm5ldGNyYWNrZXIucHJvdG9idWYidwoFQWxidW0SFAoFdGl0bGUYASABKAlSBXRpdGxlEhYKBmFydGlzdBgCIAMoCVIGYXJ0aXN0EiEKDHJlbGVhc2VfeWVhchgDIAEoBVILcmVsZWFzZVllYXISHQoKc29uZ190aXRsZRgEIAMoCVIJc29uZ1RpdGxlQiUKF2NvbS5uZXRjcmFja2VyLnByb3RvYnVmQgpBbGJ1bVByb3RvYgZwcm90bzM="
              value-message-type: "org.akhq.utils.Album"
            - topic-regex: "film.*"
              descriptor-file-base64: "CuEBCgpmaWxtLnByb3RvEhRjb20uY29tcGFueS5wcm90b2J1ZiKRAQoERmlsbRISCgRuYW1lGAEgASgJUgRuYW1lEhoKCHByb2R1Y2VyGAIgASgJUghwcm9kdWNlchIhCgxyZWxlYXNlX3llYXIYAyABKAVSC3JlbGVhc2VZZWFyEhoKCGR1cmF0aW9uGAQgASgFUghkdXJhdGlvbhIaCghzdGFycmluZxgFIAMoCVIIc3RhcnJpbmdCIQoUY29tLmNvbXBhbnkucHJvdG9idWZCCUZpbG1Qcm90b2IGcHJvdG8z"
              value-message-type: "org.akhq.utils.Film"
            - topic-regex: "test.*"
              descriptor-file: "other.desc"
              key-message-type: "org.akhq.utils.Row"
              value-message-type: "org.akhq.utils.Envelope"
      # Ui Cluster Options (optional)
      ui-options:
        topic:
          default-view: ALL  # default list view (ALL, HIDE_INTERNAL, HIDE_INTERNAL_STREAM, HIDE_STREAM). Overrides default
          skip-consumer-groups: false # Skip loading consumer group information when showing topics. Overrides default
          skip-last-record: true  # Skip loading last record date information when showing topics.  Overrides default
          show-all-consumer-groups: true # Expand list of consumer groups instead of showing one. Overrides default.
        topic-data:
          sort: NEWEST # default sort order (OLDEST, NEWEST) (default: OLDEST).  Overrides default
          date-time-format: ISO # format of message timestamps (RELATIVE, ISO) (default: RELATIVE)

    my-cluster-ssl:
      properties:
        bootstrap.servers: "kafka:9093"
        security.protocol: SSL
        ssl.truststore.location: /app/truststore.jks
        ssl.truststore.password: password
        ssl.keystore.location: /app/keystore.jks
        ssl.keystore.password: password
        ssl.key.password: password

    my-cluster-sasl:
      properties:
        bootstrap.servers: "kafka:9094"
        security.protocol: SASL_SSL
        sasl.mechanism: SCRAM-SHA-256
        sasl.jaas.config: org.apache.kafka.common.security.scram.ScramLoginModule required username="admin" password="password";
        ssl.truststore.location: /app/truststore.jks
        ssl.truststore.password: password
        ssl.keystore.location: /app/keystore.jks
        ssl.keystore.password: password
        ssl.key.password: password

  pagination:
    page-size: 25 # number of elements per page (default : 25)
    threads: 16 # Number of parallel threads to resolve page

  # Configure avro-to-json serializer
  avro-serializer:
    json.serialization.inclusions: # ObjectMapper serialization inclusions used for avro-to-json conversion for display in the UI.
    # Supports Enums in JsonInclude.Include from Jackson library
      - NON_NULL

  # Topic list display options (optional)
  topic:
    retention: 172800000 # default retention period when creating topic
    partition: 3 #  default number of partition when creating topic
    replication: 3 # default number of replicas when creating topic
    internal-regexps: # list of regexp to be considered as internal (internal topic can't be deleted or updated)
      - "^_.*$"
      - "^.*_schemas$"
      - "^.*connect-config$"
      - "^.*connect-offsets$1"
      - "^.*connect-status$"
    stream-regexps: # list of regexp to be considered as internal stream topic
      - "^.*-changelog$"
      - "^.*-repartition$"
      - "^.*-rekey$"
    skip-consumer-groups: false # Skip loading consumer group information when showing topics
    skip-last-record: false # Skip loading last record date information when showing topics
    show-all-consumer-groups: false # Expand list of consumer groups instead of showing one.
    # Retry options for topic operations
    retry:
      topic-exists: # Delay between retries when checking for existence of newly created topics. This is needed as it might take the kafka broker a few seconds to create new topics.
        delay: "3s"

  # Topic display data options (optional)
  topic-data:
    size: 50 # max record per page (default: 50)
    poll-timeout: 1000 # The time, in milliseconds, spent waiting in poll if data is not available in the buffer.
    kafka-max-message-length: 1000000 # Max message length allowed to send to UI when retrieving a list of records in bytes.

  # Ui Global Options (optional)
  ui-options:
    topic:
      default-view: ALL  # default list view (ALL, HIDE_INTERNAL, HIDE_INTERNAL_STREAM, HIDE_STREAM). Overrides default
      skip-consumer-groups: false # Skip loading consumer group information when showing topics. Overrides default
      skip-last-record: true  # Skip loading last record date information when showing topics.  Overrides default
      show-all-consumer-groups: true # Expand list of consumer groups instead of showing one. Overrides default.
    topic-data:
      sort: NEWEST # default sort order (OLDEST, NEWEST) (default: OLDEST).  Overrides default

  # Auth & Roles (optional)
  security:
    default-group: admin # Default groups for all the user even unlogged user
    # Groups definition
    groups:
      admin: # unique key
        name: admin # Group name
        roles:  # roles for the group
          - topic/read
          - topic/insert
          - topic/delete
          - topic/config/update
          - node/read
          - node/config/update
          - topic/data/read
          - topic/data/insert
          - topic/data/delete
          - group/read
          - group/delete
          - group/offsets/update
          - registry/read
          - registry/insert
          - registry/update
          - registry/delete
          - registry/version/delete
          - acls/read
          - connect/read
          - connect/insert
          - connect/update
          - connect/delete
          - connect/state/update
        attributes:
          # Regexp list to filter topic available for group
          topics-filter-regexp:
            - "test.*"
          # Regexp list to filter connect configs visible for group
          connects-filter-regexp:
            - "^test.*$"
          # Regexp list to filter consumer groups visible for group
          consumer-groups-filter-regexp:
            - "consumer.*"
      topic-reader: # unique key
        name: topic-reader # Other group
        roles:
          - topic/read
        attributes:
          topics-filter-regexp:
            - "test\\.reader.*"

    # Basic auth configuration
    basic-auth:
      - username: user # Username
        password: pass # Password in sha256
        groups: # Groups for the user
          - admin
          - topic-reader

    # Ldap Groups configuration (when using ldap)
    ldap:
      default-group: topic-reader
      groups:
        - name: group-ldap-1
          groups: # Akhq groups list
            - topic-reader
        - name: group-ldap-2
          groups:
            - admin
      users:
        - username: riemann # ldap user id
          groups: # Akhq groups list
            - topic-reader
        - username: einstein
          groups:
            - admin

    # OIDC configuration
    oidc:
      enabled: true
      providers:
        oidc:
          label: "Login with OIDC"
          username-field: preferred_username
          groups-field: roles
          default-group: topic-reader
          groups:
            - name: oidc-admin-group
              groups:
                - admin
          users:
            - username: einstein
              groups:
                - admin

    # Header configuration (reverse proxy)
    header-auth:
      user-header: x-akhq-user # mandatory (the header name that will contain username)
      groups-header: x-akhq-group # optional (the header name that will contain groups separated by groups-header-separator)
      groups-header-separator: ',' # optional (separator, defaults to ',')
      ip-patterns: [127.0.0.*] # optional (Java regular expressions for matching trusted IP addresses, '0.0.0.0' matches all addresses)
      default-group: topic-reader
      groups:
        - name: header-admin-group
          groups:
            - admin
      users: # optional
        - username: header-user # username matching the `user-header` value
          groups: # list of groups / additional groups
            - topic-reader
        - username: header-admin
          groups:
            - admin

Logo

Kafka开源项目指南提供详尽教程,助开发者掌握其架构、配置和使用,实现高效数据流管理和实时处理。它高性能、可扩展,适合日志收集和实时数据处理,通过持久化保障数据安全,是企业大数据生态系统的核心。

更多推荐