diff --git a/tools/src/main/java/org/apache/kafka/tools/LogCompactionTester.java b/tools/src/main/java/org/apache/kafka/tools/LogCompactionTester.java index accf5241a3538..497b8b92b272e 100644 --- a/tools/src/main/java/org/apache/kafka/tools/LogCompactionTester.java +++ b/tools/src/main/java/org/apache/kafka/tools/LogCompactionTester.java @@ -43,8 +43,8 @@ import java.nio.file.Path; import java.time.Duration; import java.util.ArrayList; -import java.util.Arrays; import java.util.Iterator; +import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Properties; @@ -61,6 +61,8 @@ import joptsimple.OptionSet; import joptsimple.OptionSpec; +import static java.util.stream.Collectors.toCollection; + /** * This is a torture test that runs against an existing broker @@ -246,9 +248,9 @@ public static void main(String[] args) throws Exception { int sleepSecs = optionSet.valueOf(options.sleepSecsOpt); long testId = RANDOM.nextLong(); - String[] topics = IntStream.range(0, topicCount) + Set topics = IntStream.range(0, topicCount) .mapToObj(i -> "log-cleaner-test-" + testId + "-" + i) - .toArray(String[]::new); + .collect(toCollection(LinkedHashSet::new)); createTopics(brokerUrl, topics); System.out.println("Producing " + messages + " messages..to topics " + String.join(",", topics)); @@ -278,7 +280,7 @@ public static void main(String[] args) throws Exception { } - private static void createTopics(String brokerUrl, String[] topics) throws Exception { + private static void createTopics(String brokerUrl, Set topics) throws Exception { Properties adminConfig = new Properties(); adminConfig.put(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, brokerUrl); @@ -286,7 +288,7 @@ private static void createTopics(String brokerUrl, String[] topics) throws Excep Map topicConfigs = Map.of( TopicConfig.CLEANUP_POLICY_CONFIG, TopicConfig.CLEANUP_POLICY_COMPACT ); - List newTopics = Arrays.stream(topics) + List newTopics = topics.stream() .map(name -> new NewTopic(name, 1, (short) 1).configs(topicConfigs)).toList(); adminClient.createTopics(newTopics).all().get(); @@ -296,7 +298,7 @@ private static void createTopics(String brokerUrl, String[] topics) throws Excep Set allTopics = adminClient.listTopics().names().get(); pendingTopics.clear(); pendingTopics.addAll( - Arrays.stream(topics) + topics.stream() .filter(topicName -> !allTopics.contains(topicName)) .toList() ); @@ -392,7 +394,7 @@ private static void require(boolean requirement, String message) { } } - private static Path produceMessages(String brokerUrl, String[] topics, long messages, + private static Path produceMessages(String brokerUrl, Set topics, long messages, String compressionType, int dups, int percentDeletes) throws IOException { Map producerProps = Map.of( ProducerConfig.MAX_BLOCK_MS_CONFIG, String.valueOf(Long.MAX_VALUE), @@ -408,8 +410,10 @@ producerProps, new ByteArraySerializer(), new ByteArraySerializer())) { try (BufferedWriter producedWriter = Files.newBufferedWriter( producedFilePath, StandardCharsets.UTF_8)) { - for (long i = 0; i < messages * topics.length; i++) { - String topic = topics[(int) (i % topics.length)]; + List topicsList = List.copyOf(topics); + int size = topicsList.size(); + for (long i = 0; i < messages * size; i++) { + String topic = topicsList.get((int) (i % size)); int key = RANDOM.nextInt(keyCount); boolean delete = (i % 100) < percentDeletes; ProducerRecord record; @@ -430,14 +434,14 @@ record = new ProducerRecord<>(topic, } } - private static Path consumeMessages(String brokerUrl, String[] topics) throws IOException { + private static Path consumeMessages(String brokerUrl, Set topics) throws IOException { Path consumedFilePath = Files.createTempFile("kafka-log-cleaner-consumed-", ".txt"); System.out.println("Logging consumed messages to " + consumedFilePath); try (Consumer consumer = createConsumer(brokerUrl); BufferedWriter consumedWriter = Files.newBufferedWriter(consumedFilePath, StandardCharsets.UTF_8)) { - consumer.subscribe(Arrays.asList(topics)); + consumer.subscribe(topics); while (true) { ConsumerRecords consumerRecords = consumer.poll(Duration.ofSeconds(20)); if (consumerRecords.isEmpty()) return consumedFilePath;