Add trace log for better diagnostics

This commit is contained in:
János Benjamin Antal 2022-02-02 14:14:42 +01:00
parent 52eba3194e
commit 02336ada39
2 changed files with 6 additions and 0 deletions

View File

@ -398,6 +398,7 @@ void Consumer::StartConsuming() {
try {
consumer_function_(batch);
spdlog::trace("Consumer function of {} has returned.", info_.consumer_name);
std::vector<RdKafka::TopicPartition *> partitions;
utils::OnScopeExit clear_partitions([&]() { RdKafka::TopicPartition::destroy(partitions); });
@ -405,14 +406,17 @@ void Consumer::StartConsuming() {
throw ConsumerCheckFailedException(
info_.consumer_name, fmt::format("Couldn't get assignment to commit offsets: {}", RdKafka::err2str(err)));
}
spdlog::trace("Got assignment for {}.", info_.consumer_name);
if (const auto err = consumer_->position(partitions); err != RdKafka::ERR_NO_ERROR) {
throw ConsumerCheckFailedException(
info_.consumer_name, fmt::format("Couldn't get offsets from librdkafka {}", RdKafka::err2str(err)));
}
spdlog::trace("Got offset positions for {}.", info_.consumer_name);
if (const auto err = consumer_->commitSync(partitions); err != RdKafka::ERR_NO_ERROR) {
spdlog::warn("Committing offset of consumer {} failed: {}", info_.consumer_name, RdKafka::err2str(err));
break;
}
spdlog::trace("Commited offsets for {}.", info_.consumer_name);
} catch (const std::exception &e) {
spdlog::warn("Error happened in consumer {} while processing a batch: {}!", info_.consumer_name, e.what());
break;

View File

@ -548,7 +548,9 @@ Streams::StreamsMap::iterator Streams::CreateConsumer(StreamsMap &map, const std
spdlog::trace("Commit transaction in stream '{}'", stream_name);
interpreter->CommitTransaction();
spdlog::trace("Transaction committed in stream {}.", stream_name);
result.rows.clear();
spdlog::trace("Result rows are cleared in stream {}.", stream_name);
break;
} catch (const query::TransactionSerializationException &e) {
interpreter->Abort();