c
pl.touk.nussknacker.engine.kafka.generic
DelayedFlinkKafkaConsumer
Companion object DelayedFlinkKafkaConsumer
class DelayedFlinkKafkaConsumer[T] extends FlinkKafkaConsumerHandlingExceptions[T]
Warning: this consumer works correctly only when it's handling a single partition (so job's parallelism must be at least equal to the number of topic partitions). Otherwise, a single message will block reading from multiple partitions, leading bigger delays than intended.
Linear Supertypes
Ordering
- Alphabetic
- By Inheritance
Inherited
- DelayedFlinkKafkaConsumer
- FlinkKafkaConsumerHandlingExceptions
- LazyLogging
- FlinkKafkaConsumer
- FlinkKafkaConsumerBase
- CheckpointedFunction
- ResultTypeQueryable
- CheckpointListener
- RichParallelSourceFunction
- ParallelSourceFunction
- SourceFunction
- AbstractRichFunction
- RichFunction
- Function
- Serializable
- AnyRef
- Any
- Hide All
- Show All
Visibility
- Public
- Protected
Instance Constructors
- new DelayedFlinkKafkaConsumer(topics: NonEmptyList[PreparedKafkaTopic[ForSource]], schema: KafkaDeserializationSchema[T], props: Properties, delayCalculator: DelayCalculator, extractTimestamp: ExtractTimestampForDelay[T], exceptionHandlerPreparer: (RuntimeContext) => ExceptionHandler, convertToEngineRuntimeContext: (RuntimeContext) => EngineRuntimeContext, nodeId: NodeId)
Value Members
- final def !=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
- final def ##: Int
- Definition Classes
- AnyRef → Any
- final def ==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
- final def asInstanceOf[T0]: T0
- Definition Classes
- Any
- def assignTimestampsAndWatermarks(arg0: WatermarkStrategy[T]): FlinkKafkaConsumerBase[T]
- Definition Classes
- FlinkKafkaConsumerBase
- def cancel(): Unit
- Definition Classes
- FlinkKafkaConsumerBase → SourceFunction
- def clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.CloneNotSupportedException]) @HotSpotIntrinsicCandidate() @native()
- def close(): Unit
- Definition Classes
- FlinkKafkaConsumerHandlingExceptions → FlinkKafkaConsumerBase → AbstractRichFunction → RichFunction
- def createFetcher(sourceContext: SourceContext[T], assignedPartitionsWithInitialOffsets: Map[KafkaTopicPartition, Long], watermarkStrategy: SerializedValue[WatermarkStrategy[T]], runtimeContext: StreamingRuntimeContext, offsetCommitMode: OffsetCommitMode, consumerMetricGroup: MetricGroup, useMetrics: Boolean): AbstractFetcher[T, _]
- Definition Classes
- DelayedFlinkKafkaConsumer → FlinkKafkaConsumer → FlinkKafkaConsumerBase
- Annotations
- @silent("deprecated")
- def createPartitionDiscoverer(arg0: KafkaTopicsDescriptor, arg1: Int, arg2: Int): AbstractPartitionDiscoverer
- Attributes
- protected[org.apache.flink.streaming.connectors.kafka]
- Definition Classes
- FlinkKafkaConsumer → FlinkKafkaConsumerBase
- def disableFilterRestoredPartitionsWithSubscribedTopics(): FlinkKafkaConsumerBase[T]
- Definition Classes
- FlinkKafkaConsumerBase
- final def eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
- def equals(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef → Any
- var exceptionHandler: ExceptionHandler
- Attributes
- protected
- Definition Classes
- FlinkKafkaConsumerHandlingExceptions
- var exceptionPurposeContextIdGenerator: ContextIdGenerator
- Attributes
- protected
- Definition Classes
- FlinkKafkaConsumerHandlingExceptions
- def fetchOffsetsWithTimestamp(arg0: Collection[KafkaTopicPartition], arg1: Long): Map[KafkaTopicPartition, Long]
- Attributes
- protected[org.apache.flink.streaming.connectors.kafka]
- Definition Classes
- FlinkKafkaConsumer → FlinkKafkaConsumerBase
- final def getClass(): Class[_ <: AnyRef]
- Definition Classes
- AnyRef → Any
- Annotations
- @HotSpotIntrinsicCandidate() @native()
- def getEnableCommitOnCheckpoints(): Boolean
- Definition Classes
- FlinkKafkaConsumerBase
- def getIsAutoCommitEnabled(): Boolean
- Attributes
- protected[org.apache.flink.streaming.connectors.kafka]
- Definition Classes
- FlinkKafkaConsumer → FlinkKafkaConsumerBase
- def getIterationRuntimeContext(): IterationRuntimeContext
- Definition Classes
- AbstractRichFunction → RichFunction
- def getProducedType(): TypeInformation[T]
- Definition Classes
- FlinkKafkaConsumerBase → ResultTypeQueryable
- def getRuntimeContext(): RuntimeContext
- Definition Classes
- AbstractRichFunction → RichFunction
- def hashCode(): Int
- Definition Classes
- AnyRef → Any
- Annotations
- @HotSpotIntrinsicCandidate() @native()
- final def initializeState(arg0: FunctionInitializationContext): Unit
- Definition Classes
- FlinkKafkaConsumerBase → CheckpointedFunction
- Annotations
- @throws(classOf[java.lang.Exception])
- final def isInstanceOf[T0]: Boolean
- Definition Classes
- Any
- lazy val logger: Logger
- Attributes
- protected
- Definition Classes
- LazyLogging
- Annotations
- @transient()
- final def ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
- final def notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @HotSpotIntrinsicCandidate() @native()
- final def notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @HotSpotIntrinsicCandidate() @native()
- def notifyCheckpointAborted(arg0: Long): Unit
- Definition Classes
- FlinkKafkaConsumerBase → CheckpointListener
- final def notifyCheckpointComplete(arg0: Long): Unit
- Definition Classes
- FlinkKafkaConsumerBase → CheckpointListener
- Annotations
- @throws(classOf[java.lang.Exception])
- def open(parameters: Configuration): Unit
- Definition Classes
- FlinkKafkaConsumerHandlingExceptions → FlinkKafkaConsumerBase → AbstractRichFunction → RichFunction
- def open(arg0: OpenContext): Unit
- Definition Classes
- RichFunction
- Annotations
- @throws(classOf[java.lang.Exception])
- def run(arg0: SourceContext[T]): Unit
- Definition Classes
- FlinkKafkaConsumerBase → SourceFunction
- Annotations
- @throws(classOf[java.lang.Exception])
- def setCommitOffsetsOnCheckpoints(arg0: Boolean): FlinkKafkaConsumerBase[T]
- Definition Classes
- FlinkKafkaConsumerBase
- def setRuntimeContext(arg0: RuntimeContext): Unit
- Definition Classes
- AbstractRichFunction → RichFunction
- def setStartFromEarliest(): FlinkKafkaConsumerBase[T]
- Definition Classes
- FlinkKafkaConsumerBase
- def setStartFromGroupOffsets(): FlinkKafkaConsumerBase[T]
- Definition Classes
- FlinkKafkaConsumerBase
- def setStartFromLatest(): FlinkKafkaConsumerBase[T]
- Definition Classes
- FlinkKafkaConsumerBase
- def setStartFromSpecificOffsets(arg0: Map[KafkaTopicPartition, Long]): FlinkKafkaConsumerBase[T]
- Definition Classes
- FlinkKafkaConsumerBase
- def setStartFromTimestamp(arg0: Long): FlinkKafkaConsumerBase[T]
- Definition Classes
- FlinkKafkaConsumerBase
- final def snapshotState(arg0: FunctionSnapshotContext): Unit
- Definition Classes
- FlinkKafkaConsumerBase → CheckpointedFunction
- Annotations
- @throws(classOf[java.lang.Exception])
- final def synchronized[T0](arg0: => T0): T0
- Definition Classes
- AnyRef
- def toString(): String
- Definition Classes
- AnyRef → Any
- final def wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.InterruptedException])
- final def wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.InterruptedException]) @native()
- final def wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.InterruptedException])
Deprecated Value Members
- def assignTimestampsAndWatermarks(arg0: AssignerWithPeriodicWatermarks[T]): FlinkKafkaConsumerBase[T]
- Definition Classes
- FlinkKafkaConsumerBase
- Annotations
- @Deprecated
- Deprecated
- def assignTimestampsAndWatermarks(arg0: AssignerWithPunctuatedWatermarks[T]): FlinkKafkaConsumerBase[T]
- Definition Classes
- FlinkKafkaConsumerBase
- Annotations
- @Deprecated
- Deprecated
- def finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.Throwable]) @Deprecated
- Deprecated
(Since version 9)