extend schema
@link(
url: "https://specs.grafbase.com/grafbase"
import: ["InputValueSet", "UrlTemplate", "JsonTemplate"]
)
"""
Kafka producer directive for configuring message publishing to Kafka topics.
Allows schema-level configuration of Kafka producers with connection, topic, and producer settings.
"""
directive @kafkaProducer(
"""
The unique name identifier for this Kafka producer instance
"""
name: String!
"""
The Kafka provider to use
"""
provider: String! = "default"
"""
The Kafka topic to publish messages to
"""
topic: String!
"""
Additional configuration options for the Kafka producer
"""
config: KafkaProducerConfiguration! = {}
) repeatable on SCHEMA
"""
Kafka publish directive for sending messages to Kafka topics through a configured producer.
Applied to field definitions to automatically publish field resolution results as messages.
"""
directive @kafkaPublish(
"""
The name of the Kafka producer instance to use for publishing messages.
Must reference a producer defined with @kafkaProducer directive.
"""
producer: String!
"""
The key of the message to publish.
This supports templating using GraphQL arguments: {{args.argument}}
"""
key: UrlTemplate
"""
The body of the message to publish
"""
body: Body! = { selection: "*" }
) on FIELD_DEFINITION
directive @kafkaSubscription(
"""
The Kafka provider to use
"""
provider: String! = "default"
"""
The topic to subscribe to
This supports templating using GraphQL arguments: {{args.argument}}
"""
topic: UrlTemplate!
"""
Selection to apply to the subscription payload. In jq syntax.
"""
selection: JsonTemplate
"""
A regular expression to filter messages by key
"""
keyFilter: UrlTemplate
"""
Kafka consumer settings
"""
consumerConfig: KafkaConsumerConfiguration! = {}
) on FIELD_DEFINITION
"""
Configuration options for Kafka consumer behavior and message consumption settings.
"""
input KafkaConsumerConfiguration {
"""
Minimum number of messages to wait for before processing a batch
"""
minBatchSize: Int
"""
Maximum number of messages to process in a single batch
"""
maxBatchSize: Int
"""
Maximum time in milliseconds to wait for messages before returning an empty batch
"""
maxWaitTimeMs: Int
"""
Starting offset position for consuming messages from the topic
"""
startOffset: KafkaConsumerStartOffset! = { preset: LATEST }
"""
Specific partition numbers to consume from. If not specified, consumes from all partitions
"""
partitions: [Int!]
}
"""
Configuration for specifying the starting offset position when consuming Kafka messages.
Use either a preset position or a specific numeric offset.
"""
input KafkaConsumerStartOffset @oneOf {
"""
Predefined offset position (e.g., LATEST, EARLIEST)
"""
preset: KafkaConsumerStartOffsetPreset
"""
Specific numeric offset to start consuming from
"""
offset: Int
}
enum KafkaConsumerStartOffsetPreset {
"""
Start consuming from the latest offset available in the topic
"""
LATEST
"""
Start consuming from the earliest offset available in the topic
"""
EARLIEST
}
"""
Body configuration for NATS publish operations
"""
input Body {
"""
GraphQL selection to include in the message body
"""
selection: InputValueSet
"""
Static JSON content to include in the message body
"""
static: JSON
}
"""
Configuration options for Kafka producer behavior and connection settings.
"""
input KafkaProducerConfiguration {
"""
Compression algorithm to use for message payloads
"""
compression: KafkaProducerCompression
"""
Specific partition numbers to produce to. If not specified, uses default partitioning
"""
partitions: [Int!]
"""
Batching configuration for optimizing message throughput and latency
"""
batch: KafkaProducerBatchConfiguration
}
"""
Producer batching configuration for optimizing message throughput and latency
"""
input KafkaProducerBatchConfiguration {
"""
Time in milliseconds to wait for additional messages before sending a batch
"""
lingerMs: Int
"""
Maximum size of a message batch in bytes before forcing a send
"""
maxSizeBytes: Int
}
"""
Supported compression algorithms for Kafka message payloads.
"""
enum KafkaProducerCompression {
"""
GZIP compression algorithm
"""
GZIP
"""
Snappy compression algorithm
"""
SNAPPY
"""
LZ4 compression algorithm
"""
LZ4
"""
Zstandard compression algorithm
"""
ZSTD
}
kafka0.1.1
Map Kafka endpoints to GraphQL fields. Supports publishing and consuming messages.2 Jun, 2025
Julius de Bruijn
InstallAdd this to your TOML configuration file:
[extensions]
kafka = "0.1.1"