Avro consumer: print value_schema_id and key_schema_id (#311)
Magnus Edenhill
3 years ago
2 | 2 |
* Add support for multibyte delimiters to `-D` and `-K` (#140, #280)
|
3 | 3 |
* Add support for `-X partition.assignment.strategy=cooperative-sticky` incremental rebalancing.
|
4 | 4 |
* High-level consumer `-G` now supports exit-on-eof `-e` option (#86)
|
|
5 |
* Avro consumer with -J will now emit `key_schema_id` and `value_schema_id`.
|
5 | 6 |
|
6 | 7 |
|
7 | 8 |
# kafkacat v1.6.0
|
117 | 117 |
*
|
118 | 118 |
* @returns newly allocated JSON string, or NULL on error.
|
119 | 119 |
*/
|
120 | |
char *kc_avro_to_json (const void *data, size_t data_len,
|
|
120 |
char *kc_avro_to_json (const void *data, size_t data_len, int *schema_idp,
|
121 | 121 |
char *errstr, size_t errstr_size) {
|
122 | 122 |
avro_value_t avro;
|
123 | 123 |
serdes_schema_t *schema;
|
|
147 | 147 |
return NULL;
|
148 | 148 |
}
|
149 | 149 |
|
|
150 |
if (schema && schema_idp)
|
|
151 |
*schema_idp = serdes_schema_id(schema);
|
|
152 |
|
150 | 153 |
avro_value_decref(&avro);
|
151 | 154 |
|
152 | 155 |
return json;
|
33 | 33 |
const char *_s = (STR); \
|
34 | 34 |
yajl_gen_string(G, (const unsigned char *)_s, strlen(_s)); \
|
35 | 35 |
} while (0)
|
|
36 |
#define JS_INT(G, INT) yajl_gen_integer(g, INT)
|
36 | 37 |
|
37 | 38 |
void fmt_msg_output_json (FILE *fp, const rd_kafka_message_t *rkmessage) {
|
38 | 39 |
yajl_gen g;
|
|
116 | 117 |
#if ENABLE_AVRO && YAJL_HAS_GEN_VERBATIM
|
117 | 118 |
if (conf.flags & CONF_F_FMT_AVRO_KEY) {
|
118 | 119 |
char errstr[256];
|
|
120 |
int schema_id = -1;
|
119 | 121 |
char *json = kc_avro_to_json(
|
120 | 122 |
rkmessage->key,
|
121 | 123 |
rkmessage->key_len,
|
|
124 |
&schema_id,
|
122 | 125 |
errstr, sizeof(errstr));
|
123 | 126 |
|
124 | 127 |
if (!json) {
|
|
131 | 134 |
yajl_gen_null(g);
|
132 | 135 |
JS_STR(g, "key_error");
|
133 | 136 |
JS_STR(g, errstr);
|
134 | |
} else
|
|
137 |
} else {
|
135 | 138 |
yajl_gen_verbatim(g, json, strlen(json));
|
|
139 |
JS_STR(g, "key_schema_id");
|
|
140 |
JS_INT(g, schema_id);
|
|
141 |
}
|
136 | 142 |
free(json);
|
137 | 143 |
} else
|
138 | 144 |
#endif
|
|
147 | 153 |
#if ENABLE_AVRO && YAJL_HAS_GEN_VERBATIM
|
148 | 154 |
if (conf.flags & CONF_F_FMT_AVRO_VALUE) {
|
149 | 155 |
char errstr[256];
|
|
156 |
int schema_id = -1;
|
150 | 157 |
char *json = kc_avro_to_json(
|
151 | 158 |
rkmessage->payload,
|
152 | 159 |
rkmessage->len,
|
|
160 |
&schema_id,
|
153 | 161 |
errstr, sizeof(errstr));
|
154 | 162 |
|
155 | 163 |
if (!json) {
|
|
162 | 170 |
yajl_gen_null(g);
|
163 | 171 |
JS_STR(g, "payload_error");
|
164 | 172 |
JS_STR(g, errstr);
|
165 | |
} else
|
|
173 |
} else {
|
166 | 174 |
yajl_gen_verbatim(g, json, strlen(json));
|
|
175 |
JS_STR(g, "value_schema_id");
|
|
176 |
JS_INT(g, schema_id);
|
|
177 |
}
|
|
178 |
|
167 | 179 |
free(json);
|
168 | 180 |
} else
|
169 | 181 |
#endif
|
1443 | 1443 |
" \"broker\": int,\n"
|
1444 | 1444 |
" \"headers\": { \"<name>\": str, .. }, // optional\n"
|
1445 | 1445 |
" \"key\": str|json, \"payload\": str|json,\n"
|
1446 | |
" \"key_error\": str, \"payload_error\": str } //optional\n"
|
1447 | |
" (note: key_error and payload_error are only included if "
|
1448 | |
"deserialization failed)\n"
|
|
1446 |
" \"key_error\": str, \"payload_error\": str, //optional\n"
|
|
1447 |
" \"key_schema_id\": int, "
|
|
1448 |
"\"value_schema_id\": int //optional\n"
|
|
1449 |
" }\n"
|
|
1450 |
" notes:\n"
|
|
1451 |
" - key_error and payload_error are only included if "
|
|
1452 |
"deserialization fails.\n"
|
|
1453 |
" - key_schema_id and value_schema_id are included for "
|
|
1454 |
"successfully deserialized Avro messages.\n"
|
1449 | 1455 |
"\n"
|
1450 | 1456 |
#endif
|
1451 | 1457 |
"Consumer mode (writes messages to stdout):\n"
|
209 | 209 |
/*
|
210 | 210 |
* avro.c
|
211 | 211 |
*/
|
212 | |
char *kc_avro_to_json (const void *data, size_t data_len,
|
|
212 |
char *kc_avro_to_json (const void *data, size_t data_len, int *schema_idp,
|
213 | 213 |
char *errstr, size_t errstr_size);
|
214 | 214 |
|
215 | 215 |
void kc_avro_init (const char *key_schema_name,
|
|
0 |
#!/bin/bash
|
|
1 |
#
|
|
2 |
|
|
3 |
set -e
|
|
4 |
source helpers.sh
|
|
5 |
|
|
6 |
|
|
7 |
#
|
|
8 |
# Verify Avro consumer, requires docker and a running trivup cluster with
|
|
9 |
# Kafka and Schema-registry.
|
|
10 |
#
|
|
11 |
|
|
12 |
|
|
13 |
if [[ -z $SR_URL ]]; then
|
|
14 |
SKIP "No schema-registry available (SR_URL env not set)"
|
|
15 |
fi
|
|
16 |
|
|
17 |
if ! $KAFKACAT -V | grep -q ^Version.*Avro.*builtin\.features; then
|
|
18 |
SKIP "Kafkacat not built with Avro support"
|
|
19 |
fi
|
|
20 |
|
|
21 |
topic=$(make_topic_name)
|
|
22 |
|
|
23 |
create_topic $topic 1
|
|
24 |
|
|
25 |
info "Producing Avro message to $topic"
|
|
26 |
|
|
27 |
echo '{"number": 63, "name": "TestName"}' |
|
|
28 |
docker run --network=host -i \
|
|
29 |
confluentinc/cp-schema-registry:6.0.0 \
|
|
30 |
kafka-avro-console-producer \
|
|
31 |
--bootstrap-server $BROKERS \
|
|
32 |
--topic $topic \
|
|
33 |
--property schema.registry.url="$SR_URL" \
|
|
34 |
--property value.schema="$(< basic_schema.avsc)"
|
|
35 |
|
|
36 |
info "Reading Avro messages"
|
|
37 |
output=$($KAFKACAT -C -r $SR_URL -t $topic -o beginning -e -s value=avro | \
|
|
38 |
jq -r '(.name + "=" + (.number | tostring))')
|
|
39 |
|
|
40 |
exp="TestName=63"
|
|
41 |
|
|
42 |
if [[ $output != $exp ]]; then
|
|
43 |
echo "FAIL: Expected '$exp', not '$output'"
|
|
44 |
exit 1
|
|
45 |
fi
|
|
46 |
|
|
47 |
|
|
48 |
PASS "Expected output seen: $output"
|
|
0 |
{
|
|
1 |
"name": "basic",
|
|
2 |
"type": "record",
|
|
3 |
"doc": "basic schema for tests",
|
|
4 |
"namespace": "python.test",
|
|
5 |
"fields": [
|
|
6 |
{
|
|
7 |
"name": "number",
|
|
8 |
"doc": "age",
|
|
9 |
"type": "long"
|
|
10 |
},
|
|
11 |
{
|
|
12 |
"name": "name",
|
|
13 |
"doc": "a name",
|
|
14 |
"type": "string"
|
|
15 |
}
|
|
16 |
]
|
|
17 |
}
|
1 | 1 |
|
2 | 2 |
CLR_BGRED="\033[37;41m"
|
3 | 3 |
CLR_BGGREEN="\033[37;42m"
|
|
4 |
CLR_YELLOW="\033[33m"
|
4 | 5 |
CLR_INFO="\033[34m"
|
5 | 6 |
CLR="\033[0m"
|
6 | 7 |
|
|
17 | 18 |
echo "kafkacat_test_$$_${RANDOM}_${TEST_NAME}name"
|
18 | 19 |
}
|
19 | 20 |
|
|
21 |
function create_topic {
|
|
22 |
local topic=$1
|
|
23 |
local partitions=$2
|
|
24 |
info "Creating topic $topic with $partitions partition(s)"
|
|
25 |
$KAFKA_PATH/bin/kafka-topics.sh \
|
|
26 |
--bootstrap-server $BROKERS \
|
|
27 |
--create \
|
|
28 |
--topic "$topic" \
|
|
29 |
--partitions $partitions \
|
|
30 |
--replication-factor 1
|
|
31 |
}
|
20 | 32 |
|
21 | 33 |
|
22 | 34 |
function info {
|
|
34 | 46 |
local str=$1
|
35 | 47 |
echo -e "${CLR_BGGREEN}${TEST_NAME} | TEST PASSED: $str${CLR}"
|
36 | 48 |
}
|
|
49 |
|
|
50 |
|
|
51 |
function SKIP {
|
|
52 |
local str=$1
|
|
53 |
echo -e "${CLR_YELLOW}${TEST_NAME} | TEST SKIPPED: $str${CLR}"
|
|
54 |
exit 0
|
|
55 |
}
|