Skip to content

Commit

Permalink
KAFKA-2820: systest log level
Browse files Browse the repository at this point in the history
Restores control over log level in system test service class KafkaService.

Author: Geoff Anderson <[email protected]>

Reviewers: Ismael Juma, Ewen Cheslack-Postava

Closes apache#538 from granders/KAFKA-2820-systest-log-level
  • Loading branch information
Geoff Anderson authored and Confluent committed Nov 18, 2015
1 parent 0f00ec9 commit 2e91806
Show file tree
Hide file tree
Showing 3 changed files with 40 additions and 36 deletions.
29 changes: 17 additions & 12 deletions tests/kafkatest/services/kafka/kafka.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,17 +34,22 @@
class KafkaService(JmxMixin, Service):

PERSISTENT_ROOT = "/mnt"
LOG4J_CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "kafka-log4j.properties")
STDOUT_CAPTURE = os.path.join(PERSISTENT_ROOT, "kafka.log")
STDERR_CAPTURE = os.path.join(PERSISTENT_ROOT, "kafka.log")
LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "kafka-log4j.properties")
# Logs such as controller.log, server.log, etc all go here
OPERATIONAL_LOG_DIR = os.path.join(PERSISTENT_ROOT, "kafka-operational-logs")
# Kafka log segments etc go here
DATA_LOG_DIR = os.path.join(PERSISTENT_ROOT, "kafka-data-logs")
CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "kafka.properties")


logs = {
"kafka_log": {
"path": "/mnt/kafka.log",
"collect_default": True},
"kafka_operational_logs": {
"path": "/mnt/kafka-operational-logs",
"path": OPERATIONAL_LOG_DIR,
"collect_default": True},
"kafka_data": {
"path": "/mnt/kafka-data-logs",
"path": DATA_LOG_DIR,
"collect_default": False}
}

Expand Down Expand Up @@ -107,24 +112,24 @@ def prop_file(self, node):

def start_cmd(self, node):
cmd = "export JMX_PORT=%d; " % self.jmx_port
cmd += "export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%s\"; " % self.LOG4J_CONFIG_FILE
cmd += "export LOG_DIR=/mnt/kafka-operational-logs/; "
cmd += "export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%s\"; " % self.LOG4J_CONFIG
cmd += "export LOG_DIR=%s; " % KafkaService.OPERATIONAL_LOG_DIR
cmd += "export KAFKA_OPTS=%s; " % self.security_config.kafka_opts
cmd += "/opt/" + kafka_dir(node) + "/bin/kafka-server-start.sh /mnt/kafka.properties 1>> /mnt/kafka.log 2>> /mnt/kafka.log &"
cmd += "/opt/" + kafka_dir(node) + "/bin/kafka-server-start.sh %s 1>> %s 2>> %s &" % (KafkaService.CONFIG_FILE, KafkaService.STDOUT_CAPTURE, KafkaService.STDERR_CAPTURE)
return cmd

def start_node(self, node):
prop_file = self.prop_file(node)
self.logger.info("kafka.properties:")
self.logger.info(prop_file)
node.account.create_file("/mnt/kafka.properties", prop_file)
node.account.create_file(self.LOG4J_CONFIG_FILE, self.render('log4j.properties'))
node.account.create_file(KafkaService.CONFIG_FILE, prop_file)
node.account.create_file(self.LOG4J_CONFIG, self.render('log4j.properties', log_dir=KafkaService.OPERATIONAL_LOG_DIR))

self.security_config.setup_node(node)

cmd = self.start_cmd(node)
self.logger.debug("Attempting to start KafkaService on %s with command: %s" % (str(node.account), cmd))
with node.account.monitor_log("/mnt/kafka.log") as monitor:
with node.account.monitor_log(KafkaService.STDOUT_CAPTURE) as monitor:
node.account.ssh(cmd)
monitor.wait_until("Kafka Server.*started", timeout_sec=30, err_msg="Kafka server didn't finish startup")

Expand Down
46 changes: 23 additions & 23 deletions tests/kafkatest/services/kafka/templates/log4j.properties
Original file line number Diff line number Diff line change
Expand Up @@ -13,75 +13,75 @@
# See the License for the specific language governing permissions and
# limitations under the License.

log4j.rootLogger=INFO, stdout
log4j.rootLogger={{ log_level }}, stdout

log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n

log4j.appender.kafkaAppender=org.apache.log4j.DailyRollingFileAppender
log4j.appender.kafkaAppender.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.kafkaAppender.File=${kafka.logs.dir}/server.log
log4j.appender.kafkaAppender.File={{ log_dir }}/server.log
log4j.appender.kafkaAppender.layout=org.apache.log4j.PatternLayout
log4j.appender.kafkaAppender.layout.ConversionPattern=[%d] %p %m (%c)%n

log4j.appender.stateChangeAppender=org.apache.log4j.DailyRollingFileAppender
log4j.appender.stateChangeAppender.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.stateChangeAppender.File=${kafka.logs.dir}/state-change.log
log4j.appender.stateChangeAppender.File={{ log_dir }}/state-change.log
log4j.appender.stateChangeAppender.layout=org.apache.log4j.PatternLayout
log4j.appender.stateChangeAppender.layout.ConversionPattern=[%d] %p %m (%c)%n

log4j.appender.requestAppender=org.apache.log4j.DailyRollingFileAppender
log4j.appender.requestAppender.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.requestAppender.File=${kafka.logs.dir}/kafka-request.log
log4j.appender.requestAppender.File={{ log_dir }}/kafka-request.log
log4j.appender.requestAppender.layout=org.apache.log4j.PatternLayout
log4j.appender.requestAppender.layout.ConversionPattern=[%d] %p %m (%c)%n

log4j.appender.cleanerAppender=org.apache.log4j.DailyRollingFileAppender
log4j.appender.cleanerAppender.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.cleanerAppender.File=${kafka.logs.dir}/log-cleaner.log
log4j.appender.cleanerAppender.File={{ log_dir }}/log-cleaner.log
log4j.appender.cleanerAppender.layout=org.apache.log4j.PatternLayout
log4j.appender.cleanerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n

log4j.appender.controllerAppender=org.apache.log4j.DailyRollingFileAppender
log4j.appender.controllerAppender.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.controllerAppender.File=${kafka.logs.dir}/controller.log
log4j.appender.controllerAppender.File={{ log_dir }}/controller.log
log4j.appender.controllerAppender.layout=org.apache.log4j.PatternLayout
log4j.appender.controllerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n

log4j.appender.authorizerAppender=org.apache.log4j.DailyRollingFileAppender
log4j.appender.authorizerAppender.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.authorizerAppender.File=${kafka.logs.dir}/kafka-authorizer.log
log4j.appender.authorizerAppender.File={{ log_dir }}/kafka-authorizer.log
log4j.appender.authorizerAppender.layout=org.apache.log4j.PatternLayout
log4j.appender.authorizerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n

# Turn on all our debugging info
#log4j.logger.kafka.producer.async.DefaultEventHandler=DEBUG, kafkaAppender
#log4j.logger.kafka.client.ClientUtils=DEBUG, kafkaAppender
#log4j.logger.kafka.perf=DEBUG, kafkaAppender
#log4j.logger.kafka.perf.ProducerPerformance$ProducerThread=DEBUG, kafkaAppender
#log4j.logger.org.I0Itec.zkclient.ZkClient=DEBUG
log4j.logger.kafka=INFO, kafkaAppender

log4j.logger.kafka.network.RequestChannel$=WARN, requestAppender
log4j.logger.kafka.producer.async.DefaultEventHandler={{ log_level }}, kafkaAppender
log4j.logger.kafka.client.ClientUtils={{ log_level }}, kafkaAppender
log4j.logger.kafka.perf={{ log_level }}, kafkaAppender
log4j.logger.kafka.perf.ProducerPerformance$ProducerThread={{ log_level }}, kafkaAppender
log4j.logger.org.I0Itec.zkclient.ZkClient={{ log_level }}
log4j.logger.kafka={{ log_level }}, kafkaAppender

log4j.logger.kafka.network.RequestChannel$={{ log_level }}, requestAppender
log4j.additivity.kafka.network.RequestChannel$=false

#log4j.logger.kafka.network.Processor=TRACE, requestAppender
#log4j.logger.kafka.server.KafkaApis=TRACE, requestAppender
#log4j.additivity.kafka.server.KafkaApis=false
log4j.logger.kafka.request.logger=WARN, requestAppender
log4j.logger.kafka.network.Processor={{ log_level }}, requestAppender
log4j.logger.kafka.server.KafkaApis={{ log_level }}, requestAppender
log4j.additivity.kafka.server.KafkaApis=false
log4j.logger.kafka.request.logger={{ log_level }}, requestAppender
log4j.additivity.kafka.request.logger=false

log4j.logger.kafka.controller=TRACE, controllerAppender
log4j.logger.kafka.controller={{ log_level }}, controllerAppender
log4j.additivity.kafka.controller=false

log4j.logger.kafka.log.LogCleaner=INFO, cleanerAppender
log4j.logger.kafka.log.LogCleaner={{ log_level }}, cleanerAppender
log4j.additivity.kafka.log.LogCleaner=false

log4j.logger.state.change.logger=TRACE, stateChangeAppender
log4j.logger.state.change.logger={{ log_level }}, stateChangeAppender
log4j.additivity.state.change.logger=false

#Change this to debug to get the actual audit log for authorizer.
log4j.logger.kafka.authorizer.logger=WARN, authorizerAppender
log4j.logger.kafka.authorizer.logger={{ log_level }}, authorizerAppender
log4j.additivity.kafka.authorizer.logger=false

1 change: 0 additions & 1 deletion tests/kafkatest/services/verifiable_producer.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@

from kafkatest.services.kafka.directory import kafka_dir, KAFKA_TRUNK
from kafkatest.services.kafka.version import TRUNK, LATEST_0_8_2
from kafkatest.services.security.security_config import SecurityConfig

import json
import os
Expand Down

0 comments on commit 2e91806

Please sign in to comment.