本文整理了Java中org.apache.flink.streaming.connectors.kafka.internals.KafkaTopicsDescriptor
本文整理了Java中org.apache.flink.streaming.connectors.kafka.internals.KafkaTopicsDescriptor
类的一些代码示例,展示了KafkaTopicsDescriptor
类的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。KafkaTopicsDescriptor
类的具体详情如下:
包路径:org.apache.flink.streaming.connectors.kafka.internals.KafkaTopicsDescriptor
类名称:KafkaTopicsDescriptor
KafkaTopicsDescriptor介绍
[英]A Kafka Topics Descriptor describes how the consumer subscribes to Kafka topics - either a fixed list of topics, or a topic pattern.
[中]卡夫卡主题描述符描述消费者订阅卡夫卡主题的方式-主题的固定列表或主题模式。
代码示例
代码示例来源:origin: apache/flink
PartitionInfoFetcher(List topics, Properties properties) {
// we're only using partial functionality of the partition discoverer; the subtask id arguments doesn't matter
this.partitiOnDiscoverer= new Kafka08PartitionDiscoverer(new KafkaTopicsDescriptor(topics, null), 0, 1, properties);
this.topics = topics;
}
代码示例来源:origin: apache/flink
if (topicsDescriptor.isFixedTopics()) {
newDiscoveredPartitiOns= getAllPartitionsForTopics(topicsDescriptor.getFixedTopics());
} else {
List matchedTopics = getAllTopics();
if (!topicsDescriptor.getTopicPattern().matcher(iter.next()).matches()) {
iter.remove();
代码示例来源:origin: apache/flink
@Override
protected List getAllTopics() {
assertTrue(topicsDescriptor.isTopicPattern());
return mockGetAllTopicsReturnSequence.get(getAllTopicsInvokeCount++);
}
代码示例来源:origin: apache/flink
@Override
protected List getAllPartitionsForTopics(List topics) {
if (topicsDescriptor.isFixedTopics()) {
assertEquals(topicsDescriptor.getFixedTopics(), topics);
} else {
assertEquals(mockGetAllTopicsReturnSequence.get(getAllPartitionsForTopicsInvokeCount - 1), topics);
}
return mockGetAllPartitionsForTopicsReturnSequence.get(getAllPartitionsForTopicsInvokeCount++);
}
代码示例来源:origin: apache/flink
public FailingPartitionDiscoverer(RuntimeException failureCause) {
super(
new KafkaTopicsDescriptor(Arrays.asList("foo"), null),
0,
1);
this.failureCause = failureCause;
}
代码示例来源:origin: org.apache.flink/flink-connector-kafka-base_2.11
if (topicsDescriptor.isFixedTopics()) {
newDiscoveredPartitiOns= getAllPartitionsForTopics(topicsDescriptor.getFixedTopics());
} else {
List matchedTopics = getAllTopics();
if (!topicsDescriptor.getTopicPattern().matcher(iter.next()).matches()) {
iter.remove();
代码示例来源:origin: apache/flink
@Parameterized.Parameters(name = "KafkaTopicsDescriptor = {0}")
@SuppressWarnings("unchecked")
public static Collection timeCharacteristic(){
return Arrays.asList(
new KafkaTopicsDescriptor[]{new KafkaTopicsDescriptor(Collections.singletonList(TEST_TOPIC), null)},
new KafkaTopicsDescriptor[]{new KafkaTopicsDescriptor(null, Pattern.compile(TEST_TOPIC_PATTERN))});
}
代码示例来源:origin: org.apache.flink/flink-connector-kafka-base
if (topicsDescriptor.isFixedTopics()) {
newDiscoveredPartitiOns= getAllPartitionsForTopics(topicsDescriptor.getFixedTopics());
} else {
List matchedTopics = getAllTopics();
if (!topicsDescriptor.getTopicPattern().matcher(iter.next()).matches()) {
iter.remove();
代码示例来源:origin: apache/flink
/**
* Base constructor.
*
* @param topics fixed list of topics to subscribe to (null, if using topic pattern)
* @param topicPattern the topic pattern to subscribe to (null, if using fixed topics)
* @param deserializer The deserializer to turn raw byte messages into Java/Scala objects.
* @param discoveryIntervalMillis the topic / partition discovery interval, in
* milliseconds (0 if discovery is disabled).
*/
public FlinkKafkaConsumerBase(
List topics,
Pattern topicPattern,
KeyedDeserializationSchema deserializer,
long discoveryIntervalMillis,
boolean useMetrics) {
this.topicsDescriptor = new KafkaTopicsDescriptor(topics, topicPattern);
this.deserializer = checkNotNull(deserializer, "valueDeserializer");
checkArgument(
discoveryIntervalMillis == PARTITION_DISCOVERY_DISABLED || discoveryIntervalMillis >= 0,
"Cannot define a negative value for the topic / partition discovery interval.");
this.discoveryIntervalMillis = discoveryIntervalMillis;
this.useMetrics = useMetrics;
}
代码示例来源:origin: apache/flink
private DummyPartitionDiscoverer() {
super(new KafkaTopicsDescriptor(Collections.singletonList("foo"), null), 0, 1);
this.allTopics = Collections.singletonList("foo");
this.allPartitiOns= Collections.singletonList(new KafkaTopicPartition("foo", 0));
}
代码示例来源:origin: apache/flink
new KafkaTopicsDescriptor(Collections.singletonList("test-topic"), null),
i,
initialParallelism,
new KafkaTopicsDescriptor(Collections.singletonList("test-topic"), null),
i,
restoredParallelism,
代码示例来源:origin: org.apache.flink/flink-connector-kafka-0.8_2.11
PartitionInfoFetcher(List topics, Properties properties) {
// we're only using partial functionality of the partition discoverer; the subtask id arguments doesn't matter
this.partitiOnDiscoverer= new Kafka08PartitionDiscoverer(new KafkaTopicsDescriptor(topics, null), 0, 1, properties);
this.topics = topics;
}
代码示例来源:origin: org.apache.flink/flink-connector-kafka-0.8
PartitionInfoFetcher(List topics, Properties properties) {
// we're only using partial functionality of the partition discoverer; the subtask id arguments doesn't matter
this.partitiOnDiscoverer= new Kafka08PartitionDiscoverer(new KafkaTopicsDescriptor(topics, null), 0, 1, properties);
this.topics = topics;
}
代码示例来源:origin: org.apache.flink/flink-connector-kafka-base_2.11
/**
* Base constructor.
*
* @param topics fixed list of topics to subscribe to (null, if using topic pattern)
* @param topicPattern the topic pattern to subscribe to (null, if using fixed topics)
* @param deserializer The deserializer to turn raw byte messages into Java/Scala objects.
* @param discoveryIntervalMillis the topic / partition discovery interval, in
* milliseconds (0 if discovery is disabled).
*/
public FlinkKafkaConsumerBase(
List topics,
Pattern topicPattern,
KeyedDeserializationSchema deserializer,
long discoveryIntervalMillis,
boolean useMetrics) {
this.topicsDescriptor = new KafkaTopicsDescriptor(topics, topicPattern);
this.deserializer = checkNotNull(deserializer, "valueDeserializer");
checkArgument(
discoveryIntervalMillis == PARTITION_DISCOVERY_DISABLED || discoveryIntervalMillis >= 0,
"Cannot define a negative value for the topic / partition discovery interval.");
this.discoveryIntervalMillis = discoveryIntervalMillis;
this.useMetrics = useMetrics;
}
代码示例来源:origin: org.apache.flink/flink-connector-kafka-base
/**
* Base constructor.
*
* @param topics fixed list of topics to subscribe to (null, if using topic pattern)
* @param topicPattern the topic pattern to subscribe to (null, if using fixed topics)
* @param deserializer The deserializer to turn raw byte messages into Java/Scala objects.
* @param discoveryIntervalMillis the topic / partition discovery interval, in
* milliseconds (0 if discovery is disabled).
*/
public FlinkKafkaConsumerBase(
List topics,
Pattern topicPattern,
KeyedDeserializationSchema deserializer,
long discoveryIntervalMillis,
boolean useMetrics) {
this.topicsDescriptor = new KafkaTopicsDescriptor(topics, topicPattern);
this.deserializer = checkNotNull(deserializer, "valueDeserializer");
checkArgument(
discoveryIntervalMillis == PARTITION_DISCOVERY_DISABLED || discoveryIntervalMillis >= 0,
"Cannot define a negative value for the topic / partition discovery interval.");
this.discoveryIntervalMillis = discoveryIntervalMillis;
this.useMetrics = useMetrics;
}