71 lines
3.6 KiB
Properties
71 lines
3.6 KiB
Properties
server.servlet.context-path=/av
|
|
|
|
# spring kafka default beans properties begin ->
|
|
# kafka in consumer (with possibility for default bean)
|
|
#host1:port1, host2:port2
|
|
spring.kafka.consumer.bootstrap.servers=${AV_KAFKA_BOOTSTRAP_SERVERS}
|
|
spring.kafka.consumer.security.protocol=${AV_KAFKA_SECURITY_PROTOCOL:SASL_PLAINTEXT}
|
|
#login password to set
|
|
spring.kafka.consumer.properties.sasl.jaas.config=org.apache.kafka.common.security.scram.ScramLoginModule required username="${AV_KAFKA_USERNAME}" password="${AV_KAFKA_PASSWORD}";
|
|
spring.kafka.consumer.properties.sasl.mechanism=${AV_KAFKA_SASL_MECHANISM:SCRAM-SHA-256}
|
|
#
|
|
spring.kafka.consumer.enable.auto.commit=false
|
|
spring.kafka.consumer.group.id=${AV_KAFKA_GROUP_ID:file-to-upload-consumers}
|
|
# kafka in listeners
|
|
spring.kafka.listener.ack.mode=MANUAL_IMMEDIATE
|
|
# kafka in producer (with possibility for default bean)
|
|
#host1:port1, host2:port2
|
|
spring.kafka.in.producer.bootstrap.servers=${AV_KAFKA_BOOTSTRAP_SERVERS}
|
|
spring.kafka.in.producer.security.protocol=${AV_KAFKA_SECURITY_PROTOCOL:SASL_PLAINTEXT}
|
|
#login password to set
|
|
spring.kafka.in.producer.properties.sasl.jaas.config=org.apache.kafka.common.security.scram.ScramLoginModule required username="${AV_KAFKA_USERNAME}" password="${AV_KAFKA_PASSWORD}";
|
|
spring.kafka.in.producer.properties.sasl.mechanism=${AV_KAFKA_SASL_MECHANISM:SCRAM-SHA-256}
|
|
#
|
|
# kafka out producer (with possibility for default bean)
|
|
#host1:port1, host2:port2
|
|
spring.kafka.producer.bootstrap.servers=${ERVU_KAFKA_BOOTSTRAP_SERVERS}
|
|
spring.kafka.producer.security.protocol=${ERVU_KAFKA_SECURITY_PROTOCOL:SASL_PLAINTEXT}
|
|
#login password to set
|
|
spring.kafka.producer.properties.sasl.jaas.config=org.apache.kafka.common.security.scram.ScramLoginModule required username="${ERVU_KAFKA_USERNAME}" password="${ERVU_KAFKA_PASSWORD}";
|
|
spring.kafka.producer.properties.sasl.mechanism=${ERVU_KAFKA_SASL_MECHANISM:SCRAM-SHA-256}
|
|
# spring kafka default beans properties <- end
|
|
#
|
|
# kafka out consumer (not for default bean creation by spring)
|
|
#host1:port1, host2:port2
|
|
spring.kafka.out.consumer.bootstrap.servers=${ERVU_KAFKA_BOOTSTRAP_SERVERS}
|
|
spring.kafka.out.consumer.security.protocol=${ERVU_KAFKA_SECURITY_PROTOCOL:SASL_PLAINTEXT}
|
|
#login password to set
|
|
spring.kafka.out.consumer.properties.sasl.jaas.config=org.apache.kafka.common.security.scram.ScramLoginModule required username="${ERVU_KAFKA_USERNAME}" password="${ERVU_KAFKA_PASSWORD}";
|
|
spring.kafka.out.consumer.properties.sasl.mechanism=${ERVU_KAFKA_SASL_MECHANISM:SCRAM-SHA-256}
|
|
#
|
|
spring.kafka.out.consumer.enable.auto.commit=false
|
|
spring.kafka.out.consumer.group.id=${ERVU_KAFKA_GROUP_ID:response-consumers}
|
|
# kafka out listeners
|
|
spring.kafka.out.listener.ack.mode=MANUAL_IMMEDIATE
|
|
#
|
|
#
|
|
kafka.in.topic.name=${AV_KAFKA_TOPIC_NAME}
|
|
kafka.in.status.topic.name=${AV_KAFKA_STATUS_TOPIC_NAME}
|
|
kafka.out.error.topic.name=${ERVU_KAFKA_ERROR_TOPIC_NAME}
|
|
kafka.out.success.topic.name=${ERVU_KAFKA_SUCCESS_TOPIC_NAME}
|
|
kafka.out.response.topic.name=${ERVU_KAFKA_RESPONSE_TOPIC_NAME}
|
|
#
|
|
av.check.enabled=${AV_CHECK_ENABLED:true}
|
|
av.rest.address=${AV_REST_ADDRESS}
|
|
av.first.timeout.milliseconds=${AV_FIRST_TIMEOUT_MILLISECONDS:1000}
|
|
av.retry.max.attempts.count=${AV_RETRY_MAX_ATTEMPTS_COUNT:10}
|
|
av.retry.delay.milliseconds=${AV_RETRY_DELAY_MILLISECONDS:1000}
|
|
file.saving.path=${FILE_SAVING_PATH:/transfer/}
|
|
#
|
|
s3.out.endpoint=${S3_ENDPOINT}
|
|
s3.out.access_key=${S3_ACCESS_KEY}
|
|
s3.out.secret_key=${S3_SECRET_KEY}
|
|
s3.out.bucket_name=${S3_OUT_BUCKET_NAME}
|
|
s3.out.path.style.access.enabled=${S3_OUT_PATH_STYLE_ACCESS_ENABLED}
|
|
#
|
|
webdav.username=${WEBDAV_USERNAME}
|
|
webdav.password=${WEBDAV_PASSWORD}
|
|
|
|
# endpoints management
|
|
management.endpoints.web.exposure.include = health, info, metrics
|
|
management.endpoint.health.show-details = always
|