Skip to content

Commit

Permalink
ST: Improve logging (strimzi#2881)
Browse files Browse the repository at this point in the history
* WIP: improve logging

Signed-off-by: Jakub Stejskal <xstejs24@gmail.com>

* Add some logging

Signed-off-by: Jakub Stejskal <xstejs24@gmail.com>

* Improve another part

Signed-off-by: Jakub Stejskal <xstejs24@gmail.com>

* Improve another part

Signed-off-by: Jakub Stejskal <xstejs24@gmail.com>

* Another bunch of improvements

Signed-off-by: Jakub Stejskal <xstejs24@gmail.com>

* Last bunch of improvements

Signed-off-by: Jakub Stejskal <xstejs24@gmail.com>

* fixup! Last bunch of improvements

Signed-off-by: Jakub Stejskal <xstejs24@gmail.com>
  • Loading branch information
Frawless authored Apr 23, 2020
1 parent 1c4e736 commit 16f1160
Show file tree
Hide file tree
Showing 55 changed files with 277 additions and 269 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ public class Environment {
private Environment() { }

static {
String debugFormat = "{}:{}";
String debugFormat = "{}: {}";
LOGGER.info("Used environment variables:");
LOGGER.info(debugFormat, STRIMZI_ORG_ENV, STRIMZI_ORG);
LOGGER.info(debugFormat, STRIMZI_TAG_ENV, STRIMZI_TAG);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -76,11 +76,10 @@ public int sendMessagesPlain(long timeout) {
.withTopicName(topicName)
.build();

LOGGER.info("Starting verifiableClient plain producer with following configuration {}", producer.toString());
LOGGER.info("Sending {} messages to {}#{}", messageCount, producer.getBootstrapServer(), topicName);
LOGGER.info("Starting verifiableClient plain producer with the following configuration: {}", producer.toString());
LOGGER.info("Producing {} messages to {}:{} from pod {}", messageCount, producer.getBootstrapServer(), topicName, podName);

TestUtils.waitFor("Sending messages", Constants.PRODUCER_POLL_INTERVAL, Constants.GLOBAL_CLIENTS_TIMEOUT, () -> {
LOGGER.info("Sending {} messages to {}", messageCount, podName);
TestUtils.waitFor("produce messages", Constants.PRODUCER_POLL_INTERVAL, Constants.GLOBAL_CLIENTS_TIMEOUT, () -> {
producer.run(Constants.PRODUCER_TIMEOUT);
int sent = getSentMessagesCount(producer.getMessages().toString(), messageCount);
return sent == messageCount;
Expand Down Expand Up @@ -110,8 +109,8 @@ public int sendMessagesTls(long timeout) {
.withTopicName(topicName)
.build();

LOGGER.info("Starting verifiableClient tls producer with following configuration {}", producerTls.toString());
LOGGER.info("Sending {} messages to {}#{}", messageCount, producerTls.getBootstrapServer(), topicName);
LOGGER.info("Starting verifiableClient tls producer with the following configuration: {}", producerTls.toString());
LOGGER.info("Producing {} messages to {}:{} from pod {}", messageCount, producerTls.getBootstrapServer(), topicName, podName);

boolean hasPassed = producerTls.run(timeout);
LOGGER.info("Producer finished correctly: {}", hasPassed);
Expand Down Expand Up @@ -143,8 +142,8 @@ public int receiveMessagesPlain(long timeout) {
.build();


LOGGER.info("Starting verifiableClient plain consumer with following configuration {}", consumer.toString());
LOGGER.info("Wait for receive {} messages from {}#{}", messageCount, consumer.getBootstrapServer(), topicName);
LOGGER.info("Starting verifiableClient plain consumer with the following configuration: {}", consumer.toString());
LOGGER.info("Consuming {} messages from {}#{} from pod {}", messageCount, consumer.getBootstrapServer(), topicName, podName);

boolean hasPassed = consumer.run(timeout);
LOGGER.info("Consumer finished correctly: {}", hasPassed);
Expand Down Expand Up @@ -178,8 +177,8 @@ public int receiveMessagesTls(long timeoutMs) {
.withConsumerInstanceId("instance" + new Random().nextInt(Integer.MAX_VALUE))
.build();

LOGGER.info("Starting verifiableClient tls consumer with following configuration {}", consumerTls.toString());
LOGGER.info("Wait for receive {} messages from {}#{}", messageCount, consumerTls.getBootstrapServer(), topicName);
LOGGER.info("Starting verifiableClient tls consumer with the following configuration: {}", consumerTls.toString());
LOGGER.info("Consuming {} messages from {}:{} from pod {}", messageCount, consumerTls.getBootstrapServer(), topicName, podName);

boolean hasPassed = consumerTls.run(timeoutMs);
LOGGER.info("Consumer finished correctly: {}", hasPassed);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -198,15 +198,25 @@ private boolean runClient(long timeoutMs, boolean logToOutput) {
messages.clear();
try {
executor = new Exec();
int ret = executor.execute(null, prepareCommand(), timeoutMs);
ArrayList<String> command = prepareCommand();
LOGGER.info("Client command: {}", String.join(" ", command));
int ret = executor.execute(null, command, timeoutMs);
synchronized (lock) {
LOGGER.info("{} {} Return code - {}", this.getClass().getSimpleName(), clientType, ret);
if (logToOutput) {
LOGGER.info("{} {} stdout : {}", this.getClass().getSimpleName(), clientType, executor.out());
if (ret == 0) {
parseToList(executor.out());
} else if (!executor.err().isEmpty()) {
LOGGER.error("{} {} stderr : {}", this.getClass().getSimpleName(), clientType, executor.err());
} else {
LOGGER.info("{} RETURN code: {}", clientType, ret);
if (!executor.out().isEmpty()) {
LOGGER.info("======STDOUT START=======");
LOGGER.info("{}", executor.out());
LOGGER.info("======STDOUT END======");
}
if (!executor.err().isEmpty()) {
LOGGER.info("======STDERR START=======");
LOGGER.info("{}", executor.err());
LOGGER.info("======STDERR END======");
}
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ public LogCollector(KubeClient kubeClient, File logDir) {
}

public void collectLogsFromPods() {
LOGGER.info("Collecting logs for pods in namespace {}", namespace);
LOGGER.info("Collecting logs for Pod(s) in namespace {}", namespace);

try {
kubeClient.listPods().forEach(pod -> {
Expand All @@ -61,36 +61,36 @@ public void collectLogsFromPods() {
}

public void collectEvents() {
LOGGER.info("Collecting events in namespace {}", namespace);
LOGGER.info("Collecting events in Namespace {}", namespace);
String events = cmdKubeClient().getEvents();
// Write events to file
writeFile(eventsDir + "/" + "events-in-namespace" + kubeClient.getNamespace() + ".log", events);
}

public void collectConfigMaps() {
LOGGER.info("Collecting configmaps in namespace {}", namespace);
LOGGER.info("Collecting ConfigMaps in Namespace {}", namespace);
kubeClient.listConfigMaps().forEach(configMap -> {
writeFile(configMapDir + "/" + configMap.getMetadata().getName() + "-" + namespace + ".log", configMap.toString());
});
}

public void collectDeployments() {
LOGGER.info("Collecting Deployments in namespaces {}", namespace);
LOGGER.info("Collecting Deployments in Namespace {}", namespace);
writeFile(logDir + "/deployments.log", cmdKubeClient().list("deployment").toString());
}

public void collectStatefulSets() {
LOGGER.info("Collecting StatefulSets in namespaces {}", namespace);
LOGGER.info("Collecting StatefulSets in Namespace {}", namespace);
writeFile(logDir + "/statefulsets.log", cmdKubeClient().list("statefulset").toString());
}

public void collectReplicaSets() {
LOGGER.info("Collecting ReplicaSet in namespaces {}", namespace);
LOGGER.info("Collecting ReplicaSet in Namespace {}", namespace);
writeFile(logDir + "/replicasets.log", cmdKubeClient().list("replicaset").toString());
}

public void collectStrimzi() {
LOGGER.info("Collecting CR in namespaces {}", namespace);
LOGGER.info("Collecting Strimzi in Namespace {}", namespace);
String crData = cmdKubeClient().exec(false, "get", "strimzi", "-o", "yaml", "-n", namespace).out();
writeFile(logDir + "/strimzi-custom-resources.log", crData);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,6 @@ public static DoneableRoleBinding roleBinding(String yamlPath, String namespace,
}

private static DoneableRoleBinding roleBinding(RoleBinding roleBinding, String clientNamespace) {
LOGGER.info("Apply RoleBinding in namespace {}", clientNamespace);
ResourceManager.kubeClient().namespace(clientNamespace).createOrReplaceRoleBinding(roleBinding);
deleteLater(roleBinding);
return new DoneableRoleBinding(roleBinding);
Expand All @@ -165,7 +164,6 @@ public static DoneableClusterRoleBinding clusterRoleBinding(String yamlPath, Str
}

public static DoneableClusterRoleBinding clusterRoleBinding(ClusterRoleBinding clusterRoleBinding, String clientNamespace) {
LOGGER.info("Apply ClusterRoleBinding in namespace {}", clientNamespace);
ResourceManager.kubeClient().createOrReplaceClusterRoleBinding(clusterRoleBinding);
deleteLater(clusterRoleBinding);
return new DoneableClusterRoleBinding(clusterRoleBinding);
Expand Down Expand Up @@ -254,14 +252,14 @@ public static ServiceBuilder getSystemtestsServiceResource(String appName, int p

public static DoneableService createServiceResource(String appName, int port, String clientNamespace, String transportProtocol) {
Service service = getSystemtestsServiceResource(appName, port, clientNamespace, transportProtocol).build();
LOGGER.info("Creating service {} in namespace {}", service.getMetadata().getName(), clientNamespace);
LOGGER.info("Creating Service {} in namespace {}", service.getMetadata().getName(), clientNamespace);
ResourceManager.kubeClient().createService(service);
deleteLater(service);
return new DoneableService(service);
}

public static DoneableService createServiceResource(Service service, String clientNamespace) {
LOGGER.info("Creating service {} in namespace {}", service.getMetadata().getName(), clientNamespace);
LOGGER.info("Creating Service {} in namespace {}", service.getMetadata().getName(), clientNamespace);
ResourceManager.kubeClient().createService(service);
deleteLater(service);
return new DoneableService(service);
Expand Down Expand Up @@ -424,10 +422,7 @@ private static ClusterRoleBinding getClusterRoleBindingFromYaml(String yamlPath)

private static Deployment waitFor(Deployment deployment) {
String deploymentName = deployment.getMetadata().getName();

LOGGER.info("Waiting for deployment {}", deploymentName);
DeploymentUtils.waitForDeploymentReady(deploymentName, deployment.getSpec().getReplicas());
LOGGER.info("Deployment {} is ready", deploymentName);
return deployment;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ public static <T extends CustomResource, L extends CustomResourceList<T>, D exte
}
@SuppressWarnings("unchecked")
public static <T extends HasMetadata> T deleteLater(MixedOperation<T, ?, ?, ?> operation, T resource) {
LOGGER.info("Scheduled deletion of {} {} in namespace {}",
LOGGER.debug("Scheduled deletion of {} {} in namespace {}",
resource.getKind(), resource.getMetadata().getName(), resource.getMetadata().getNamespace() == null ? "(not set)" : resource.getMetadata().getNamespace());
switch (resource.getKind()) {
case Kafka.RESOURCE_KIND:
Expand Down Expand Up @@ -262,7 +262,7 @@ private static void waitForDeletion(Kafka kafka) {
}

private static void waitForDeletion(KafkaConnect kafkaConnect) {
LOGGER.info("Waiting when all the pods are terminated for Kafka Connect {}", kafkaConnect.getMetadata().getName());
LOGGER.info("Waiting when all the Pods are terminated for KafkaConnect {}", kafkaConnect.getMetadata().getName());

DeploymentUtils.waitForDeploymentDeletion(KafkaMirrorMakerResources.deploymentName(kafkaConnect.getMetadata().getName()));
ReplicaSetUtils.waitForReplicaSetDeletion(KafkaMirrorMakerResources.deploymentName(kafkaConnect.getMetadata().getName()));
Expand All @@ -273,7 +273,7 @@ private static void waitForDeletion(KafkaConnect kafkaConnect) {
}

private static void waitForDeletion(KafkaConnectS2I kafkaConnectS2I) {
LOGGER.info("Waiting when all the pods are terminated for Kafka Connect S2I {}", kafkaConnectS2I.getMetadata().getName());
LOGGER.info("Waiting when all the Pods are terminated for KafkaConnectS2I {}", kafkaConnectS2I.getMetadata().getName());

DeploymentUtils.waitForDeploymentConfigDeletion(KafkaMirrorMakerResources.deploymentName(kafkaConnectS2I.getMetadata().getName()));
ReplicaSetUtils.waitForReplicaSetDeletion(KafkaMirrorMakerResources.deploymentName(kafkaConnectS2I.getMetadata().getName()));
Expand All @@ -287,7 +287,7 @@ private static void waitForDeletion(KafkaConnectS2I kafkaConnectS2I) {
}

private static void waitForDeletion(KafkaMirrorMaker kafkaMirrorMaker) {
LOGGER.info("Waiting when all the pods are terminated for Kafka Mirror Maker {}", kafkaMirrorMaker.getMetadata().getName());
LOGGER.info("Waiting when all the Pods are terminated for KafkaMirrorMaker {}", kafkaMirrorMaker.getMetadata().getName());

DeploymentUtils.waitForDeploymentDeletion(KafkaMirrorMakerResources.deploymentName(kafkaMirrorMaker.getMetadata().getName()));
ReplicaSetUtils.waitForReplicaSetDeletion(KafkaMirrorMakerResources.deploymentName(kafkaMirrorMaker.getMetadata().getName()));
Expand All @@ -298,7 +298,7 @@ private static void waitForDeletion(KafkaMirrorMaker kafkaMirrorMaker) {
}

private static void waitForDeletion(KafkaMirrorMaker2 kafkaMirrorMaker2) {
LOGGER.info("Waiting when all the pods are terminated for Kafka MirrorMaker2 {}", kafkaMirrorMaker2.getMetadata().getName());
LOGGER.info("Waiting when all the Pods are terminated for KafkaMirrorMaker2 {}", kafkaMirrorMaker2.getMetadata().getName());

DeploymentUtils.waitForDeploymentDeletion(KafkaMirrorMakerResources.deploymentName(kafkaMirrorMaker2.getMetadata().getName()));
ReplicaSetUtils.waitForReplicaSetDeletion(KafkaMirrorMakerResources.deploymentName(kafkaMirrorMaker2.getMetadata().getName()));
Expand All @@ -309,7 +309,7 @@ private static void waitForDeletion(KafkaMirrorMaker2 kafkaMirrorMaker2) {
}

private static void waitForDeletion(KafkaBridge kafkaBridge) {
LOGGER.info("Waiting when all the pods are terminated for Kafka Bridge {}", kafkaBridge.getMetadata().getName());
LOGGER.info("Waiting when all the Pods are terminated for KafkaBridge {}", kafkaBridge.getMetadata().getName());

DeploymentUtils.waitForDeploymentDeletion(KafkaMirrorMakerResources.deploymentName(kafkaBridge.getMetadata().getName()));
ReplicaSetUtils.waitForReplicaSetDeletion(KafkaMirrorMakerResources.deploymentName(kafkaBridge.getMetadata().getName()));
Expand All @@ -330,20 +330,22 @@ private static void waitForDeletion(Deployment deployment) {
}

public static void deleteClassResources() {
LOGGER.info("Going to clear all class resources");
LOGGER.info("-----CLEARING CLASS RESOURCES-----");
while (!classResources.empty()) {
classResources.pop().run();
}
classResources.clear();
LOGGER.info("-----CLASS RESOURCES CLEARED-----");
}

public static void deleteMethodResources() {
LOGGER.info("Going to clear all method resources");
LOGGER.info("-----CLEARING METHOD RESOURCES-----");
while (!methodResources.empty()) {
methodResources.pop().run();
}
methodResources.clear();
pointerResources = classResources;
LOGGER.info("-----METHOD RESOURCES CLEARED-----");
}

public static String getImageValueFromCO(String name) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -92,9 +92,9 @@ private static KafkaBridge getKafkaBridgeFromYaml(String yamlPath) {
private static KafkaBridge waitFor(KafkaBridge kafkaBridge) {
String kafkaBridgeCrName = kafkaBridge.getMetadata().getName();

LOGGER.info("Waiting for Kafka Bridge {}", kafkaBridgeCrName);
LOGGER.info("Waiting for KafkaBridge {}", kafkaBridgeCrName);
DeploymentUtils.waitForDeploymentReady(KafkaBridgeResources.deploymentName(kafkaBridgeCrName), kafkaBridge.getSpec().getReplicas());
LOGGER.info("Kafka Bridge {} is ready", kafkaBridgeCrName);
LOGGER.info("KafkaBridge {} is ready", kafkaBridgeCrName);

return kafkaBridge;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -123,9 +123,9 @@ private static KafkaConnect getKafkaConnectFromYaml(String yamlPath) {
private static KafkaConnect waitFor(KafkaConnect kafkaConnect) {
String kafkaConnectCrName = kafkaConnect.getMetadata().getName();

LOGGER.info("Waiting for Kafka Connect {}", kafkaConnectCrName);
LOGGER.info("Waiting for KafkaConnect {}", kafkaConnectCrName);
DeploymentUtils.waitForDeploymentReady(KafkaConnectResources.deploymentName(kafkaConnectCrName), kafkaConnect.getSpec().getReplicas());
LOGGER.info("Kafka Connect {} is ready", kafkaConnectCrName);
LOGGER.info("KafkaConnect {} is ready", kafkaConnectCrName);

return kafkaConnect;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -107,9 +107,9 @@ private static KafkaConnectS2I getKafkaConnectS2IFromYaml(String yamlPath) {
private static KafkaConnectS2I waitFor(KafkaConnectS2I kafkaConnectS2I) {
String kafkaConnectS2ICrName = kafkaConnectS2I.getMetadata().getName();

LOGGER.info("Waiting for Kafka ConnectS2I {}", kafkaConnectS2ICrName);
LOGGER.info("Waiting for KafkaConnectS2I {}", kafkaConnectS2ICrName);
KafkaConnectS2IUtils.waitForConnectS2IStatus(kafkaConnectS2ICrName, "Ready");
LOGGER.info("Kafka ConnectS2I {} is ready", kafkaConnectS2ICrName);
LOGGER.info("KafkaConnectS2I {} is ready", kafkaConnectS2ICrName);

return kafkaConnectS2I;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -94,9 +94,9 @@ private static KafkaConnector getKafkaConnectorFromYaml(String yamlPath) {
private static KafkaConnector waitFor(KafkaConnector kafkaConnector) {
String kafkaConnectorCrName = kafkaConnector.getMetadata().getName();

LOGGER.info("Waiting for Kafka Connector {}", kafkaConnectorCrName);
LOGGER.info("Waiting for KafkaConnector {}", kafkaConnectorCrName);
KafkaConnectorUtils.waitForConnectorStatus(kafkaConnectorCrName, "Ready");
LOGGER.info("Kafka Connector {} is ready", kafkaConnectorCrName);
LOGGER.info("KafkaConnector {} is ready", kafkaConnectorCrName);

return kafkaConnector;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -140,9 +140,9 @@ private static KafkaMirrorMaker2 getKafkaMirrorMaker2FromYaml(String yamlPath) {
private static KafkaMirrorMaker2 waitFor(KafkaMirrorMaker2 kafkaMirrorMaker2) {
String kafkaMirrorMaker2CrName = kafkaMirrorMaker2.getMetadata().getName();

LOGGER.info("Waiting for Kafka MirrorMaker2 {}", kafkaMirrorMaker2CrName);
LOGGER.info("Waiting for KafkaMirrorMaker2 {}", kafkaMirrorMaker2CrName);
DeploymentUtils.waitForDeploymentReady(KafkaMirrorMaker2Resources.deploymentName(kafkaMirrorMaker2CrName), kafkaMirrorMaker2.getSpec().getReplicas());
LOGGER.info("Kafka MirrorMaker2 {} is ready", kafkaMirrorMaker2CrName);
LOGGER.info("KafkaMirrorMaker2 {} is ready", kafkaMirrorMaker2CrName);

return kafkaMirrorMaker2;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -117,9 +117,9 @@ private static KafkaMirrorMaker getKafkaMirrorMakerFromYaml(String yamlPath) {
private static KafkaMirrorMaker waitFor(KafkaMirrorMaker kafkaMirrorMaker) {
String kafkaMirrorMakerCrName = kafkaMirrorMaker.getMetadata().getName();

LOGGER.info("Waiting for Kafka MirrorMaker {}", kafkaMirrorMakerCrName);
LOGGER.info("Waiting for KafkaMirrorMaker {}", kafkaMirrorMakerCrName);
DeploymentUtils.waitForDeploymentReady(KafkaMirrorMakerResources.deploymentName(kafkaMirrorMakerCrName), kafkaMirrorMaker.getSpec().getReplicas());
LOGGER.info("Kafka MirrorMaker {} is ready", kafkaMirrorMakerCrName);
LOGGER.info("KafkaMirrorMaker {} is ready", kafkaMirrorMakerCrName);

return kafkaMirrorMaker;
}
Expand Down
Loading

0 comments on commit 16f1160

Please sign in to comment.