-
Notifications
You must be signed in to change notification settings - Fork 2
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
6 changed files
with
196 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
31 changes: 31 additions & 0 deletions
31
kafka-demo/src/main/java/com/tomshidi/kafka/partitioner/CustomPartitioner.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,31 @@ | ||
package com.tomshidi.kafka.partitioner; | ||
|
||
import org.apache.kafka.clients.producer.Partitioner; | ||
import org.apache.kafka.common.Cluster; | ||
|
||
import java.util.Map; | ||
|
||
/** | ||
* @author TomShiDi | ||
* @since 2024/3/13 14:39 | ||
*/ | ||
public class CustomPartitioner implements Partitioner { | ||
@Override | ||
public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { | ||
// 1.计算分区号 | ||
String keyStr = key.toString(); | ||
int keyHash = keyStr.hashCode(); | ||
Integer partitionCount = cluster.partitionCountForTopic(topic); | ||
return keyHash % partitionCount; | ||
} | ||
|
||
@Override | ||
public void close() { | ||
|
||
} | ||
|
||
@Override | ||
public void configure(Map<String, ?> configs) { | ||
|
||
} | ||
} |
38 changes: 38 additions & 0 deletions
38
kafka-demo/src/main/java/com/tomshidi/kafka/producer/CustomCallbackProducer.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,38 @@ | ||
package com.tomshidi.kafka.producer; | ||
|
||
import org.apache.kafka.clients.producer.*; | ||
import org.apache.kafka.common.serialization.StringSerializer; | ||
|
||
import java.util.Properties; | ||
|
||
/** | ||
* @author TomShiDi | ||
* @since 2024/3/13 10:01 | ||
*/ | ||
public class CustomCallbackProducer { | ||
public static void main(String[] args) { | ||
// 1.创建配置对象 | ||
Properties properties = new Properties(); | ||
properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "node1:9092,node2:9092"); | ||
properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); | ||
properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); | ||
properties.put(ProducerConfig.PARTITIONER_CLASS_CONFIG, "com.tomshidi.kafka.partitioner.CustomPartitioner"); | ||
// 2.创建kafka生产者对象 | ||
KafkaProducer<String, String> kafkaProducer = new KafkaProducer<>(properties); | ||
// 3.发送数据 | ||
for (int i = 1; i < 11; i++) { | ||
String message = String.format("第%d号服务员", i); | ||
ProducerRecord<String, String> producerRecord = new ProducerRecord<>("first", i + "", message); | ||
kafkaProducer.send(producerRecord, | ||
(recordMetadata, ex) -> { | ||
if (ex == null) { | ||
int partition = recordMetadata.partition(); | ||
long offset = recordMetadata.offset(); | ||
System.out.printf("数据:%s\n分区:%d\n偏移量:%d\n", message, partition, offset); | ||
} | ||
}); | ||
} | ||
// 4.关闭资源 | ||
kafkaProducer.close(); | ||
} | ||
} |
32 changes: 32 additions & 0 deletions
32
kafka-demo/src/main/java/com/tomshidi/kafka/producer/CustomProducer.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,32 @@ | ||
package com.tomshidi.kafka.producer; | ||
|
||
import org.apache.kafka.clients.producer.KafkaProducer; | ||
import org.apache.kafka.clients.producer.ProducerConfig; | ||
import org.apache.kafka.clients.producer.ProducerRecord; | ||
import org.apache.kafka.common.serialization.StringSerializer; | ||
|
||
import java.util.Properties; | ||
|
||
/** | ||
* @author TomShiDi | ||
* @since 2024/3/13 10:01 | ||
*/ | ||
public class CustomProducer { | ||
public static void main(String[] args) { | ||
// 1.创建配置对象 | ||
Properties properties = new Properties(); | ||
properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "node1:9092,node2:9092"); | ||
properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); | ||
properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); | ||
// 2.创建kafka生产者对象 | ||
KafkaProducer<String, String> kafkaProducer = new KafkaProducer<>(properties); | ||
// 3.发送数据 | ||
for (int i = 1; i < 11; i++) { | ||
String message = String.format("第%d号服务员", i); | ||
ProducerRecord<String, String> producerRecord = new ProducerRecord<>("first", message); | ||
kafkaProducer.send(producerRecord); | ||
} | ||
// 4.关闭资源 | ||
kafkaProducer.close(); | ||
} | ||
} |
36 changes: 36 additions & 0 deletions
36
kafka-demo/src/main/java/com/tomshidi/kafka/producer/CustomSyncProducer.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,36 @@ | ||
package com.tomshidi.kafka.producer; | ||
|
||
import org.apache.kafka.clients.producer.*; | ||
import org.apache.kafka.common.serialization.StringSerializer; | ||
|
||
import java.util.Properties; | ||
import java.util.concurrent.ExecutionException; | ||
|
||
/** | ||
* 生产者同步发送 | ||
* @author TomShiDi | ||
* @since 2024/3/13 10:42 | ||
*/ | ||
public class CustomSyncProducer { | ||
public static void main(String[] args) throws ExecutionException, InterruptedException { | ||
// 1.创建配置 | ||
Properties properties = new Properties(); | ||
properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "node1:9092"); | ||
properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); | ||
properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); | ||
// 2.创建生产者对象 | ||
KafkaProducer<String, String> producer = new KafkaProducer<>(properties); | ||
// 3. 循环发送 | ||
for (int i = 1; i < 11; i++) { | ||
String message = String.format("第%d号服务员", i); | ||
ProducerRecord<String, String> record = new ProducerRecord<>("first", message); | ||
producer.send(record, (recordMetadata, ex) -> { | ||
if (ex == null) { | ||
int partition = recordMetadata.partition(); | ||
long offset = recordMetadata.offset(); | ||
System.out.printf("数据:%s\n分区:%d\n偏移量:%d\n", message, partition, offset); | ||
} | ||
}).get(); | ||
} | ||
} | ||
} |
51 changes: 51 additions & 0 deletions
51
kafka-demo/src/main/java/com/tomshidi/kafka/producer/CustomThroughputProducer.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,51 @@ | ||
package com.tomshidi.kafka.producer; | ||
|
||
import org.apache.kafka.clients.producer.KafkaProducer; | ||
import org.apache.kafka.clients.producer.ProducerConfig; | ||
import org.apache.kafka.clients.producer.ProducerRecord; | ||
import org.apache.kafka.common.serialization.StringSerializer; | ||
|
||
import java.util.Properties; | ||
|
||
/** | ||
* @author TomShiDi | ||
* @since 2024/3/13 10:01 | ||
*/ | ||
public class CustomThroughputProducer { | ||
public static void main(String[] args) { | ||
// 1.创建配置对象 | ||
Properties properties = new Properties(); | ||
properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "node1:9092,node2:9092"); | ||
properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); | ||
properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); | ||
properties.put(ProducerConfig.PARTITIONER_CLASS_CONFIG, "com.tomshidi.kafka.partitioner.CustomPartitioner"); | ||
// 性能优化配置 | ||
properties.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 33554432); | ||
properties.put(ProducerConfig.BATCH_SIZE_CONFIG, 16384); | ||
properties.put(ProducerConfig.LINGER_MS_CONFIG, 10); | ||
properties.put(ProducerConfig.COMPRESSION_TYPE_CONFIG, "snappy"); | ||
// ack应答机制配置 | ||
properties.put(ProducerConfig.ACKS_CONFIG, "all"); | ||
properties.put(ProducerConfig.RETRIES_CONFIG, 3); | ||
// 幂等性 | ||
properties.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, true); | ||
|
||
// 2.创建kafka生产者对象 | ||
KafkaProducer<String, String> kafkaProducer = new KafkaProducer<>(properties); | ||
// 3.发送数据 | ||
for (int i = 1; i < 11; i++) { | ||
String message = String.format("第%d号服务员", i); | ||
ProducerRecord<String, String> producerRecord = new ProducerRecord<>("first", i + "", message); | ||
kafkaProducer.send(producerRecord, | ||
(recordMetadata, ex) -> { | ||
if (ex == null) { | ||
int partition = recordMetadata.partition(); | ||
long offset = recordMetadata.offset(); | ||
System.out.printf("数据:%s\n分区:%d\n偏移量:%d\n", message, partition, offset); | ||
} | ||
}); | ||
} | ||
// 4.关闭资源 | ||
kafkaProducer.close(); | ||
} | ||
} |