if [ $# -lt 1 ]; then # 提示命令使用方法 echo"USAGE: $0 [-daemon] server.properties [--override property=value]*"exit 1 fi base_dir=$(dirname $0) if [ "x$KAFKA_LOG4J_OPTS" = "x" ]; then export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/log4j.properties" fi
if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then export KAFKA_HEAP_OPTS="-Xmx1G -Xms1G" fi EXTRA_ARGS=${EXTRA_ARGS-'-name kafkaServer -loggc'} COMMAND=$1 case$COMMANDin -daemon) EXTRA_ARGS="-daemon "$EXTRA_ARGS shift ;; *) ;; esac exec$base_dir/kafka-run-class.sh $EXTRA_ARGS kafka.Kafka "$@"
#!/bin/bash if [ $# -lt 1 ]; then echo"USAGE: $0 [-daemon] [-name servicename] [-loggc] classname [opts]" exit 1 fi
# CYGWIN == 1 if Cygwin is detected, else 0. if [[ $(uname -a) =~ "CYGWIN" ]]; then CYGWIN=1 else CYGWIN=0 fi
if [ -z "$INCLUDE_TEST_JARS" ]; then INCLUDE_TEST_JARS=false fi
# Exclude jars not necessary for running commands. regex="(-(test|test-sources|src|scaladoc|javadoc)\.jar|jar.asc)$" should_include_file() { if [ "$INCLUDE_TEST_JARS" = true ]; then return 0 fi file=$1 if [ -z "$(echo "$file" | egrep "$regex")" ] ; then return 0 else return 1 fi }
base_dir=$(dirname $0)/..
if [ -z "$SCALA_VERSION" ]; then SCALA_VERSION=2.13.3 if [[ -f "$base_dir/gradle.properties" ]]; then SCALA_VERSION=`grep "^scalaVersion=""$base_dir/gradle.properties" | cut -d= -f 2` fi fi
if [ -z "$SCALA_BINARY_VERSION" ]; then SCALA_BINARY_VERSION=$(echo$SCALA_VERSION | cut -f 1-2 -d '.') fi
# run ./gradlew copyDependantLibs to get all dependant jars in a local dir shopt -s nullglob if [ -z "$UPGRADE_KAFKA_STREAMS_TEST_VERSION" ]; then for dir in"$base_dir"/core/build/dependant-libs-${SCALA_VERSION}*; do CLASSPATH="$CLASSPATH:$dir/*" done fi
for file in"$base_dir"/examples/build/libs/kafka-examples*.jar; do if should_include_file "$file"; then CLASSPATH="$CLASSPATH":"$file" fi done
if [ -z "$UPGRADE_KAFKA_STREAMS_TEST_VERSION" ]; then clients_lib_dir=$(dirname $0)/../clients/build/libs streams_lib_dir=$(dirname $0)/../streams/build/libs streams_dependant_clients_lib_dir=$(dirname $0)/../streams/build/dependant-libs-${SCALA_VERSION} else clients_lib_dir=/opt/kafka-$UPGRADE_KAFKA_STREAMS_TEST_VERSION/libs streams_lib_dir=$clients_lib_dir streams_dependant_clients_lib_dir=$streams_lib_dir fi
for file in"$clients_lib_dir"/kafka-clients*.jar; do if should_include_file "$file"; then CLASSPATH="$CLASSPATH":"$file" fi done
for file in"$streams_lib_dir"/kafka-streams*.jar; do if should_include_file "$file"; then CLASSPATH="$CLASSPATH":"$file" fi done
if [ -z "$UPGRADE_KAFKA_STREAMS_TEST_VERSION" ]; then for file in"$base_dir"/streams/examples/build/libs/kafka-streams-examples*.jar; do if should_include_file "$file"; then CLASSPATH="$CLASSPATH":"$file" fi done else VERSION_NO_DOTS=`echo$UPGRADE_KAFKA_STREAMS_TEST_VERSION | sed 's/\.//g'` SHORT_VERSION_NO_DOTS=${VERSION_NO_DOTS:0:((${#VERSION_NO_DOTS} - 1))} # remove last char, ie, bug-fix number for file in"$base_dir"/streams/upgrade-system-tests-$SHORT_VERSION_NO_DOTS/build/libs/kafka-streams-upgrade-system-tests*.jar; do if should_include_file "$file"; then CLASSPATH="$file":"$CLASSPATH" fi done if [ "$SHORT_VERSION_NO_DOTS" = "0100" ]; then CLASSPATH="/opt/kafka-$UPGRADE_KAFKA_STREAMS_TEST_VERSION/libs/zkclient-0.8.jar":"$CLASSPATH" CLASSPATH="/opt/kafka-$UPGRADE_KAFKA_STREAMS_TEST_VERSION/libs/zookeeper-3.4.6.jar":"$CLASSPATH" fi if [ "$SHORT_VERSION_NO_DOTS" = "0101" ]; then CLASSPATH="/opt/kafka-$UPGRADE_KAFKA_STREAMS_TEST_VERSION/libs/zkclient-0.9.jar":"$CLASSPATH" CLASSPATH="/opt/kafka-$UPGRADE_KAFKA_STREAMS_TEST_VERSION/libs/zookeeper-3.4.8.jar":"$CLASSPATH" fi fi
for file in"$streams_dependant_clients_lib_dir"/rocksdb*.jar; do CLASSPATH="$CLASSPATH":"$file" done
for file in"$streams_dependant_clients_lib_dir"/*hamcrest*.jar; do CLASSPATH="$CLASSPATH":"$file" done
for file in"$base_dir"/tools/build/libs/kafka-tools*.jar; do if should_include_file "$file"; then CLASSPATH="$CLASSPATH":"$file" fi done
for dir in"$base_dir"/tools/build/dependant-libs-${SCALA_VERSION}*; do CLASSPATH="$CLASSPATH:$dir/*" done
for cc_pkg in"api""transforms""runtime""file""mirror""mirror-client""json""tools""basic-auth-extension" do for file in"$base_dir"/connect/${cc_pkg}/build/libs/connect-${cc_pkg}*.jar; do if should_include_file "$file"; then CLASSPATH="$CLASSPATH":"$file" fi done if [ -d "$base_dir/connect/${cc_pkg}/build/dependant-libs" ] ; then CLASSPATH="$CLASSPATH:$base_dir/connect/${cc_pkg}/build/dependant-libs/*" fi done
# classpath addition for release for file in"$base_dir"/libs/*; do if should_include_file "$file"; then CLASSPATH="$CLASSPATH":"$file" fi done
for file in"$base_dir"/core/build/libs/kafka_${SCALA_BINARY_VERSION}*.jar; do if should_include_file "$file"; then CLASSPATH="$CLASSPATH":"$file" fi done shopt -u nullglob
if [ -z "$CLASSPATH" ] ; then echo"Classpath is empty. Please build the project first e.g. by running './gradlew jar -PscalaVersion=$SCALA_VERSION'" exit 1 fi
# JMX settings if [ -z "$KAFKA_JMX_OPTS" ]; then KAFKA_JMX_OPTS="-Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false " fi
# JMX port to use if [ $JMX_PORT ]; then KAFKA_JMX_OPTS="$KAFKA_JMX_OPTS -Dcom.sun.management.jmxremote.port=$JMX_PORT " fi
# Log directory to use if [ "x$LOG_DIR" = "x" ]; then LOG_DIR="$base_dir/logs" fi
# Log4j settings if [ -z "$KAFKA_LOG4J_OPTS" ]; then # Log to console. This is a tool. LOG4J_DIR="$base_dir/config/tools-log4j.properties" # If Cygwin is detected, LOG4J_DIR is converted to Windows format. (( CYGWIN )) && LOG4J_DIR=$(cygpath --path --mixed "${LOG4J_DIR}") KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:${LOG4J_DIR}" else # create logs directory if [ ! -d "$LOG_DIR" ]; then mkdir -p "$LOG_DIR" fi fi
# If Cygwin is detected, LOG_DIR is converted to Windows format. (( CYGWIN )) && LOG_DIR=$(cygpath --path --mixed "${LOG_DIR}") KAFKA_LOG4J_OPTS="-Dkafka.logs.dir=$LOG_DIR$KAFKA_LOG4J_OPTS"
# Generic jvm settings you want to add if [ -z "$KAFKA_OPTS" ]; then KAFKA_OPTS="" fi
# Set Debug options if enabled if [ "x$KAFKA_DEBUG" != "x" ]; then
# Use default ports DEFAULT_JAVA_DEBUG_PORT="5005"
if [ -z "$JAVA_DEBUG_PORT" ]; then JAVA_DEBUG_PORT="$DEFAULT_JAVA_DEBUG_PORT" fi
# Use the defaults if JAVA_DEBUG_OPTS was not set DEFAULT_JAVA_DEBUG_OPTS="-agentlib:jdwp=transport=dt_socket,server=y,suspend=${DEBUG_SUSPEND_FLAG:-n},address=$JAVA_DEBUG_PORT" if [ -z "$JAVA_DEBUG_OPTS" ]; then JAVA_DEBUG_OPTS="$DEFAULT_JAVA_DEBUG_OPTS" fi
echo"Enabling Java debug options: $JAVA_DEBUG_OPTS" KAFKA_OPTS="$JAVA_DEBUG_OPTS$KAFKA_OPTS" fi
# Which java to use if [ -z "$JAVA_HOME" ]; then JAVA="java" else JAVA="$JAVA_HOME/bin/java" fi
# Memory options if [ -z "$KAFKA_HEAP_OPTS" ]; then KAFKA_HEAP_OPTS="-Xmx256M" fi
# JVM performance options # MaxInlineLevel=15 is the default since JDK 14 and can be removed once older JDKs are no longer supported if [ -z "$KAFKA_JVM_PERFORMANCE_OPTS" ]; then KAFKA_JVM_PERFORMANCE_OPTS="-server -XX:+UseG1GC -XX:MaxGCPauseMillis=20 -XX:InitiatingHeapOccupancyPercent=35 -XX:+ExplicitGCInvokesConcurrent -XX:MaxInlineLevel=15 -Djava.awt.headless=true" fi
while [ $# -gt 0 ]; do COMMAND=$1 case$COMMANDin -name) DAEMON_NAME=$2 CONSOLE_OUTPUT_FILE=$LOG_DIR/$DAEMON_NAME.out shift 2 ;; -loggc) if [ -z "$KAFKA_GC_LOG_OPTS" ]; then GC_LOG_ENABLED="true" fi shift ;; -daemon) DAEMON_MODE="true" shift ;; *) break ;; esac done
# GC options GC_FILE_SUFFIX='-gc.log' GC_LOG_FILE_NAME='' if [ "x$GC_LOG_ENABLED" = "xtrue" ]; then GC_LOG_FILE_NAME=$DAEMON_NAME$GC_FILE_SUFFIX
# The first segment of the version number, which is '1' for releases before Java 9 # it then becomes '9', '10', ... # Some examples of the first line of `java --version`: # 8 -> java version "1.8.0_152" # 9.0.4 -> java version "9.0.4" # 10 -> java version "10" 2018-03-20 # 10.0.1 -> java version "10.0.1" 2018-04-17 # We need to match to the end of the line to prevent sed from printing the characters that do not match JAVA_MAJOR_VERSION=$("$JAVA" -version 2>&1 | sed -E -n 's/.* version "([0-9]*).*$/\1/p') if [[ "$JAVA_MAJOR_VERSION" -ge "9" ]] ; then KAFKA_GC_LOG_OPTS="-Xlog:gc*:file=$LOG_DIR/$GC_LOG_FILE_NAME:time,tags:filecount=10,filesize=100M" else KAFKA_GC_LOG_OPTS="-Xloggc:$LOG_DIR/$GC_LOG_FILE_NAME -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCDateStamps -XX:+PrintGCTimeStamps -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=10 -XX:GCLogFileSize=100M" fi fi
# Remove a possible colon prefix from the classpath (happens at lines like `CLASSPATH="$CLASSPATH:$file"` when CLASSPATH is blank) # Syntax used on the right side is native Bash string manipulation; for more details see # http://tldp.org/LDP/abs/html/string-manipulation.html, specifically the section titled "Substring Removal" CLASSPATH=${CLASSPATH#:}
# If Cygwin is detected, classpath is converted to Windows format. (( CYGWIN )) && CLASSPATH=$(cygpath --path --mixed "${CLASSPATH}")
# Launch mode if [ "x$DAEMON_MODE" = "xtrue" ]; then nohup "$JAVA"$KAFKA_HEAP_OPTS$KAFKA_JVM_PERFORMANCE_OPTS$KAFKA_GC_LOG_OPTS$KAFKA_JMX_OPTS$KAFKA_LOG4J_OPTS -cp "$CLASSPATH"$KAFKA_OPTS"$@" > "$CONSOLE_OUTPUT_FILE" 2>&1 < /dev/null & else exec"$JAVA"$KAFKA_HEAP_OPTS$KAFKA_JVM_PERFORMANCE_OPTS$KAFKA_GC_LOG_OPTS$KAFKA_JMX_OPTS$KAFKA_LOG4J_OPTS -cp "$CLASSPATH"$KAFKA_OPTS"$@" fi
脚本内容很长,但是实际上只有最后一部分才是真正在完成启动操作:
1 2 3 4 5 6
# Launch mode if [ "x$DAEMON_MODE" = "xtrue" ]; then nohup "$JAVA"$KAFKA_HEAP_OPTS$KAFKA_JVM_PERFORMANCE_OPTS$KAFKA_GC_LOG_OPTS$KAFKA_JMX_OPTS$KAFKA_LOG4J_OPTS -cp "$CLASSPATH"$KAFKA_OPTS"$@" > "$CONSOLE_OUTPUT_FILE" 2>&1 < /dev/null & else exec"$JAVA"$KAFKA_HEAP_OPTS$KAFKA_JVM_PERFORMANCE_OPTS$KAFKA_GC_LOG_OPTS$KAFKA_JMX_OPTS$KAFKA_LOG4J_OPTS -cp "$CLASSPATH"$KAFKA_OPTS"$@" fi
Launch modes
在脚本最后一段是有关启动方式的提示。
1 2 3 4 5 6
# Launch mode if [ "x$DAEMON_MODE" = "xtrue" ]; then nohup "$JAVA"$KAFKA_HEAP_OPTS$KAFKA_JVM_PERFORMANCE_OPTS$KAFKA_GC_LOG_OPTS$KAFKA_JMX_OPTS$KAFKA_LOG4J_OPTS -cp "$CLASSPATH"$KAFKA_OPTS"$@" > "$CONSOLE_OUTPUT_FILE" 2>&1 < /dev/null & else exec"$JAVA"$KAFKA_HEAP_OPTS$KAFKA_JVM_PERFORMANCE_OPTS$KAFKA_GC_LOG_OPTS$KAFKA_JMX_OPTS$KAFKA_LOG4J_OPTS -cp "$CLASSPATH"$KAFKA_OPTS"$@" fi
# JVM performance options # MaxInlineLevel=15 is the default since JDK 14 and can be removed once older JDKs are no longer supported # MaxInlineLevel=15 是自JDK 14以来的默认值,一旦旧的JDK不再支持,就可以删除。 if [ -z "$KAFKA_JVM_PERFORMANCE_OPTS" ]; then KAFKA_JVM_PERFORMANCE_OPTS="-server -XX:+UseG1GC -XX:MaxGCPauseMillis=20 -XX:InitiatingHeapOccupancyPercent=35 -XX:+ExplicitGCInvokesConcurrent -XX:MaxInlineLevel=15 -Djava.awt.headless=true" fi
while [ $# -gt 0 ]; do COMMAND=$1 case$COMMANDin -name) DAEMON_NAME=$2 CONSOLE_OUTPUT_FILE=$LOG_DIR/$DAEMON_NAME.out shift 2 ;; -loggc) if [ -z "$KAFKA_GC_LOG_OPTS" ]; then GC_LOG_ENABLED="true" fi shift ;; -daemon) DAEMON_MODE="true" shift ;; *) break ;; esac done
-XX:c=percent Sets the percentage of the heap occupancy(0 to 100) at which to start a concurrent GC cycle. It is used by garbage collectors that trigger a concurrent GC cycle based on the occupancy of the entire heap, not just one of the generations(for example, the G1 garbage collector). By default, the initiating value is set to 45%. A value of 0 implies nonstop GC cycles. The following example shows how to set the initiating heap occupancy to 75%: -XX:InitiatingHeapOccupancyPercent=75
XX:InitiatingHeapOccupancyPercent determines the initial value as a percentage of the size of the current old generation as long as there aren’t enough observations to make a good prediction of the Initiating Heap Occupancy threshold. Turn off this behavior of G1 using the option-XX:-G1UseAdaptiveIHOP. In this case, the value of -XX:InitiatingHeapOccupancyPercent always determines this threshold.。 “XX:启动堆占用百分比”将初始值确定为当前老一代大小的百分比,只要没有足够的观测值来很好地预测起始堆占用阈值。 使用选项’-XX:-G1UseAdaptiveIHOP‘关闭G1的此行为。在这种情况下-XX:InitiatingHeapOccupancyPercent 启动堆占用百分比’的值始终确定此阈值。
Other causes than Allocation Failure for a Full GC typically indicate that either the application or some external tool causes a full heap collection. If the cause is , and there is no way to modify the application sources, the effect of Full GCs can be mitigated by using or let the VM completely ignore them by setting . External tools may still force Full GCs; they can be removed only by not requesting them.System.gc()-XX:+ExplicitGCInvokesConcurrent -XX:+DisableExplicitGC
KAFKA-5470: Replace -XX:+DisableExplicitGC with -XX:+ExplicitGCInvokesConcurrent in kafka-run-class by ijuma · Pull Request #3371 · apache/kafka (github.com)
提交者的原话是:
This is important because Bits.reserveMemory calls System.gc() hoping to free native memory in order to avoid throwing an OutOfMemoryException. This call is currently a no-op due to -XX:+DisableExplicitGC.
It’s worth mentioning that -XX:MaxDirectMemorySize can be used to increase the amount of native memory available for allocation of direct byte buffers.
// These methods should be called whenever direct memory is allocated or // freed. They allow the user to control the amount of direct memory // which a process may access. All sizes are specified in bytes.
// optimist! if (tryReserveMemory(size, cap)) { return; }
final JavaLangRefAccess jlra = SharedSecrets.getJavaLangRefAccess();
// retry while helping enqueue pending Reference objects // which includes executing pending Cleaner(s) which includes // Cleaner(s) that free direct buffer memory while (jlra.tryHandlePendingReference()) { if (tryReserveMemory(size, cap)) { return; } }
// trigger VM's Reference processing System.gc();
// a retry loop with exponential back-off delays // (this gives VM some time to do it's job) boolean interrupted = false; try { long sleepTime = 1; int sleeps = 0; while (true) { if (tryReserveMemory(size, cap)) { return; } if (sleeps >= MAX_SLEEPS) { break; } if (!jlra.tryHandlePendingReference()) { try { Thread.sleep(sleepTime); sleepTime <<= 1; sleeps++; } catch (InterruptedException e) { interrupted = true; } } }
// no luck thrownew OutOfMemoryError("Direct buffer memory");
国外有网友直接痛骂了这一段代码是一坨Shit:java.nio.Bits.reserveMemory uses a lock, calls System.gc, and is generally bad code… (google.com)
1 2 3 4 5 6 7
1. ALL memory access requires a lock. That's evil if you're allocating small chunks.
2. The code to change the reserved memory counters is duplicated twice. This is a great way to introduce bugs. (how did this even get approved? do they not do code audits or require that commits be approved?)
3. If you are out of memory we call System.gc... EVIL. The entire way direct memory is reclaimed via GC is a horrible design.
4. After GC they sleep 100ms. What's that about? Why 100ms? Why not 1ms?
One reason is also that the inlining itself in the HotSpot JVM is implemented with recursion. Every time inlining of a method is started a new context is created on the native stack. Allowing an unlimited depth would eventually make the JIT-compiler crash when it runs out of stack.
# Log directory to use # 获取log_dir,如果没配置就那 $base_dir 环境变量 if [ "x$LOG_DIR" = "x" ]; then # base_dir=$(dirname $0)/.. LOG_DIR="$base_dir/logs" fi
# GC options GC_FILE_SUFFIX='-gc.log' GC_LOG_FILE_NAME='' if [ "x$GC_LOG_ENABLED" = "xtrue" ]; then GC_LOG_FILE_NAME=$DAEMON_NAME$GC_FILE_SUFFIX
# The first segment of the version number, which is '1' for releases before Java 9 # it then becomes '9', '10', ... # Some examples of the first line of `java --version`: # 8 -> java version "1.8.0_152" # 9.0.4 -> java version "9.0.4" # 10 -> java version "10" 2018-03-20 # 10.0.1 -> java version "10.0.1" 2018-04-17 # We need to match to the end of the line to prevent sed from printing the characters that do not match JAVA_MAJOR_VERSION=$("$JAVA" -version 2>&1 | sed -E -n 's/.* version "([0-9]*).*$/\1/p') if [[ "$JAVA_MAJOR_VERSION" -ge "9" ]] ; then KAFKA_GC_LOG_OPTS="-Xlog:gc*:file=$LOG_DIR/$GC_LOG_FILE_NAME:time,tags:filecount=10,filesize=100M" else KAFKA_GC_LOG_OPTS="-Xloggc:$LOG_DIR/$GC_LOG_FILE_NAME -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCDateStamps -XX:+PrintGCTimeStamps -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=10 -XX:GCLogFileSize=100M" fi fi
# JMX settings if [ -z "$KAFKA_JMX_OPTS" ]; then KAFKA_JMX_OPTS="-Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false " fi
# 验证日志 log4j.appender.authorizerAppender=org.apache.log4j.DailyRollingFileAppender log4j.appender.authorizerAppender.DatePattern='.'yyyy-MM-dd-HH log4j.appender.authorizerAppender.File=${kafka.logs.dir}/kafka-authorizer.log log4j.appender.authorizerAppender.layout=org.apache.log4j.PatternLayout log4j.appender.authorizerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n # Change the line below to adjust ZK client logging # 修改下面的日志控制ZK的日志输出 log4j.logger.org.apache.zookeeper=INFO # Change the two lines below to adjust the general broker logging level (output to server.log and stdout) # 更改下面两行以调整一般代理日志记录级别(输出到 server.log 和 stdout) log4j.logger.kafka=INFO log4j.logger.org.apache.kafka=INFO # Change to DEBUG or TRACE to enable request logging # 修改日志级别为 DEBUG和TRACE获取请求日志 log4j.logger.kafka.request.logger=WARN, requestAppender log4j.additivity.kafka.request.logger=false # Uncomment the lines below and change log4j.logger.kafka.network.RequestChannel$ to TRACE for additional output # 取消注释下面的行并将 log4j.logger.kafka.network.RequestChannel$ 更改为 TRACE 以获得额外的输出 # related to the handling of requests # 与请求的处理相关 #log4j.logger.kafka.network.Processor=TRACE, requestAppender #log4j.logger.kafka.server.KafkaApis=TRACE, requestAppender #log4j.additivity.kafka.server.KafkaApis=false
log4j.logger.kafka.network.RequestChannel$=WARN, requestAppender log4j.additivity.kafka.network.RequestChannel$=false log4j.logger.kafka.controller=TRACE, controllerAppender log4j.additivity.kafka.controller=false log4j.logger.kafka.log.LogCleaner=INFO, cleanerAppender log4j.additivity.kafka.log.LogCleaner=false log4j.logger.state.change.logger=INFO, stateChangeAppender log4j.additivity.state.change.logger=false # Access denials are logged at INFO level, change to DEBUG to also log allowed accesses # 拒绝访问记录在 INFO 级别,更改为 DEBUG 以记录允许的访问 log4j.logger.kafka.authorizer.logger=INFO, authorizerAppender log4j.additivity.kafka.authorizer.logger=false
KAFKA_OPTS
KAFKA_OPTS 可以在这里设置自己的想要的通用配置:
1 2 3 4
# Generic jvm settings you want to add if [ -z "$KAFKA_OPTS" ]; then KAFKA_OPTS="" fi
if [ -z "$UPGRADE_KAFKA_STREAMS_TEST_VERSION" ]; then for file in"$base_dir"/streams/examples/build/libs/kafka-streams-examples*.jar; do if should_include_file "$file"; then CLASSPATH="$CLASSPATH":"$file" fi done else VERSION_NO_DOTS=`echo$UPGRADE_KAFKA_STREAMS_TEST_VERSION | sed 's/\.//g'` SHORT_VERSION_NO_DOTS=${VERSION_NO_DOTS:0:((${#VERSION_NO_DOTS} - 1))} # remove last char, ie, bug-fix number for file in"$base_dir"/streams/upgrade-system-tests-$SHORT_VERSION_NO_DOTS/build/libs/kafka-streams-upgrade-system-tests*.jar; do if should_include_file "$file"; then CLASSPATH="$file":"$CLASSPATH" fi done if [ "$SHORT_VERSION_NO_DOTS" = "0100" ]; then CLASSPATH="/opt/kafka-$UPGRADE_KAFKA_STREAMS_TEST_VERSION/libs/zkclient-0.8.jar":"$CLASSPATH" CLASSPATH="/opt/kafka-$UPGRADE_KAFKA_STREAMS_TEST_VERSION/libs/zookeeper-3.4.6.jar":"$CLASSPATH" fi if [ "$SHORT_VERSION_NO_DOTS" = "0101" ]; then CLASSPATH="/opt/kafka-$UPGRADE_KAFKA_STREAMS_TEST_VERSION/libs/zkclient-0.9.jar":"$CLASSPATH" CLASSPATH="/opt/kafka-$UPGRADE_KAFKA_STREAMS_TEST_VERSION/libs/zookeeper-3.4.8.jar":"$CLASSPATH" fi fi
# run ./gradlew copyDependantLibs to get all dependant jars in a local dir shopt -s nullglob if [ -z "$UPGRADE_KAFKA_STREAMS_TEST_VERSION" ]; then for dir in"$base_dir"/core/build/dependant-libs-${SCALA_VERSION}*; do CLASSPATH="$CLASSPATH:$dir/*" done fi
for file in"$base_dir"/examples/build/libs/kafka-examples*.jar; do if should_include_file "$file"; then CLASSPATH="$CLASSPATH":"$file" fi done
> Configure project : Building project 'core' with Scala version 2.13.3 Building project 'streams-scala' with Scala version 2.13.3
Deprecated Gradle features were used in this build, making it incompatible with Gradle 7.0. Use '--warning-mode all' to show the individual deprecation warnings. See https://docs.gradle.org/6.6.1/userguide/command_line_interface.html#sec:command_line_warnings
shift: shift [n] Shift positional parameters. Rename the positional parameters $N+1,$N+2 ... to $1,$2 ... If N is not given, it is assumed to be 1. Exit Status: Returns success unless N is negative or greater than $#
if [ -z "$SCALA_VERSION" ]; then SCALA_VERSION=2.13.3 if [[ -f "$base_dir/gradle.properties" ]]; then SCALA_VERSION=`grep "^scalaVersion=""$base_dir/gradle.properties" | cut -d= -f 2` fi fi
group=org.apache.kafka # NOTE: When you change this version number, you should also make sure to update # the version numbers in # - docs/js/templateData.js # - tests/kafkatest/__init__.py # - tests/kafkatest/version.py (variable DEV_VERSION) # - kafka-merge-pr.py version=2.7.2 scalaVersion=2.13.3 task=build org.gradle.jvmargs=-Xmx2g -Xss4m -XX:+UseParallelGC
# Set Debug options if enabled if [ "x$KAFKA_DEBUG" != "x" ]; then
# Use default ports DEFAULT_JAVA_DEBUG_PORT="5005"
if [ -z "$JAVA_DEBUG_PORT" ]; then JAVA_DEBUG_PORT="$DEFAULT_JAVA_DEBUG_PORT" fi
# Use the defaults if JAVA_DEBUG_OPTS was not set DEFAULT_JAVA_DEBUG_OPTS="-agentlib:jdwp=transport=dt_socket,server=y,suspend=${DEBUG_SUSPEND_FLAG:-n},address=$JAVA_DEBUG_PORT" if [ -z "$JAVA_DEBUG_OPTS" ]; then JAVA_DEBUG_OPTS="$DEFAULT_JAVA_DEBUG_OPTS" fi
echo"Enabling Java debug options: $JAVA_DEBUG_OPTS" KAFKA_OPTS="$JAVA_DEBUG_OPTS$KAFKA_OPTS" fi
debugging - What are Java command line options to set to allow JVM to be remotely debugged? - Stack Overflow
Before Java 5.0, use -Xdebug and -Xrunjdwp arguments. These options will still work in later versions, but it will run in interpreted mode instead of JIT, which will be slower.
JDK5之前的版本这里可以直接忽略。(知道了也没啥用处)
From Java 5.0, it is better to use the -agentlib:jdwp single option:
for cc_pkg in"api""transforms""runtime""file""mirror""mirror-client""json""tools""basic-auth-extension" do for file in"$base_dir"/connect/${cc_pkg}/build/libs/connect-${cc_pkg}*.jar; do if should_include_file "$file"; then CLASSPATH="$CLASSPATH":"$file" fi done if [ -d "$base_dir/connect/${cc_pkg}/build/dependant-libs" ] ; then CLASSPATH="$CLASSPATH:$base_dir/connect/${cc_pkg}/build/dependant-libs/*" fi done
Exclude jars not necessary for running commands.
排除命令不需要的jar包,比如test和javadoc等。
1 2 3 4 5 6 7 8 9 10 11 12
regex="(-(test|test-sources|src|scaladoc|javadoc)\.jar|jar.asc)$" should_include_file() { if [ "$INCLUDE_TEST_JARS" = true ]; then return 0 fi file=$1 if [ -z "$(echo "$file" | egrep "$regex")" ] ; then return 0 else return 1 fi }
INCLUDE_TEST_JARS
判断是否开启了包含测试的jar包。
1 2 3
if [ -z "$INCLUDE_TEST_JARS" ]; then INCLUDE_TEST_JARS=false fi