Skip to content

Commit

Permalink
make systest issue
Browse files Browse the repository at this point in the history
  • Loading branch information
YenchangChan committed Mar 5, 2024
1 parent 38a4370 commit 09b6a3e
Show file tree
Hide file tree
Showing 7 changed files with 45 additions and 45 deletions.
8 changes: 4 additions & 4 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ services:
restart: always
hostname: zookeeper
ports:
- "2181:2181"
- "52181:2181"
environment:
ALLOW_ANONYMOUS_LOGIN: 1
ZOO_4LW_COMMANDS_WHITELIST: "*"
Expand Down Expand Up @@ -37,8 +37,8 @@ services:
image: clickhouse/clickhouse-server:23.8
restart: always
ports:
- "8123:8123"
- "9000:9000"
- "58123:8123"
- "59000:9000"
ulimits:
nofile:
soft: 262144
Expand All @@ -55,4 +55,4 @@ services:
- PREFER_HOST_MODE=hostname
- MODE=standalone
ports:
- "8848:8848"
- "58848:8848"
2 changes: 1 addition & 1 deletion docker/test_auto_schema.hjson
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
127.0.0.1
]
]
port: 9000
port: 59000
db: default
username: ""
password: ""
Expand Down
2 changes: 1 addition & 1 deletion docker/test_dynamic_schema.hjson
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
127.0.0.1
]
]
port: 9000
port: 59000
db: default
username: ""
password: ""
Expand Down
2 changes: 1 addition & 1 deletion docker/test_fixed_schema.hjson
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
127.0.0.1
]
]
port: 9000
port: 59000
db: default
username: ""
password: ""
Expand Down
2 changes: 1 addition & 1 deletion docker/test_prom_metric.hjson
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
127.0.0.1
]
]
port: 9000
port: 59000
db: gauge
username: ""
password: ""
Expand Down
28 changes: 14 additions & 14 deletions go.metrictest.sh
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
#!/usr/bin/env bash

echo "create database"
curl "localhost:8123" -d "CREATE DATABASE IF NOT EXISTS gauge ON CLUSTER abc"
curl "localhost:58123" -d "CREATE DATABASE IF NOT EXISTS gauge ON CLUSTER abc"

echo "create metric test tables"
curl "localhost:8123" -d "DROP TABLE IF EXISTS test_prom_metric ON CLUSTER abc SYNC"
curl "localhost:8123" -d "CREATE TABLE test_prom_metric ON CLUSTER abc
curl "localhost:58123" -d "DROP TABLE IF EXISTS test_prom_metric ON CLUSTER abc SYNC"
curl "localhost:58123" -d "CREATE TABLE test_prom_metric ON CLUSTER abc
(
__series_id__ Int64,
timestamp DateTime CODEC(DoubleDelta, LZ4),
Expand All @@ -14,11 +14,11 @@ curl "localhost:8123" -d "CREATE TABLE test_prom_metric ON CLUSTER abc
PARTITION BY toYYYYMMDD(timestamp)
ORDER BY (__series_id__, timestamp);"

curl "localhost:8123" -d "DROP TABLE IF EXISTS dist_test_prom_metric ON CLUSTER abc SYNC"
curl "localhost:8123" -d "CREATE TABLE dist_test_prom_metric ON CLUSTER abc AS test_prom_metric ENGINE = Distributed(abc, default, test_prom_metric);"
curl "localhost:58123" -d "DROP TABLE IF EXISTS dist_test_prom_metric ON CLUSTER abc SYNC"
curl "localhost:58123" -d "CREATE TABLE dist_test_prom_metric ON CLUSTER abc AS test_prom_metric ENGINE = Distributed(abc, default, test_prom_metric);"

curl "localhost:8123" -d "DROP TABLE IF EXISTS test_prom_series ON CLUSTER abc SYNC"
curl "localhost:8123" -d "CREATE TABLE test_prom_series ON CLUSTER abc
curl "localhost:58123" -d "DROP TABLE IF EXISTS test_prom_series ON CLUSTER abc SYNC"
curl "localhost:58123" -d "CREATE TABLE test_prom_series ON CLUSTER abc
(
__series_id__ Int64,
__mgmt_id__ Int64,
Expand All @@ -27,8 +27,8 @@ curl "localhost:8123" -d "CREATE TABLE test_prom_series ON CLUSTER abc
) ENGINE=ReplicatedReplacingMergeTree()
ORDER BY (__name__, __series_id__);"

curl "localhost:8123" -d "DROP TABLE IF EXISTS dist_test_prom_series ON CLUSTER abc SYNC"
curl "localhost:8123" -d "CREATE TABLE dist_test_prom_series ON CLUSTER abc AS test_prom_series ENGINE = Distributed(abc, default, test_prom_series);"
curl "localhost:58123" -d "DROP TABLE IF EXISTS dist_test_prom_series ON CLUSTER abc SYNC"
curl "localhost:58123" -d "CREATE TABLE dist_test_prom_series ON CLUSTER abc AS test_prom_series ENGINE = Distributed(abc, default, test_prom_series);"

echo "send messages to kafka"
echo "cat /tmp/test_prom_metric.data | kafka-console-producer --topic test_metric_topic --broker-list localhost:9092" > send.sh
Expand All @@ -41,20 +41,20 @@ sudo docker exec kafka sh /tmp/send.sh
echo "start clickhouse_sinker to consume"
timeout 30 ./bin/clickhouse_sinker --local-cfg-file docker/test_prom_metric.hjson

schema=`curl "localhost:8123" -d 'DESC test_prom_metric' 2>/dev/null | sort | tr -d '\t' | tr -d ' '| tr '\n' ','`
schema=`curl "localhost:58123" -d 'DESC test_prom_metric' 2>/dev/null | sort | tr -d '\t' | tr -d ' '| tr '\n' ','`
echo "Got test_prom_metric schema => $schema"
[ $schema = "__series_id__Int64,timestampDateTimeDoubleDelta,LZ4,value1Nullable(Float64),value2Nullable(Int64),value3Nullable(Bool),valueFloat32ZSTD(15)," ] || exit 1


schema=`curl "localhost:8123" -d 'DESC test_prom_series' 2>/dev/null | sort | tr -d '\t' | tr -d ' '| tr '\n' ','`
schema=`curl "localhost:58123" -d 'DESC test_prom_series' 2>/dev/null | sort | tr -d '\t' | tr -d ' '| tr '\n' ','`
echo "Got test_prom_series schema => $schema"
[ $schema = "key_0Nullable(String),key_1Nullable(String),key_2Nullable(String),key_4Nullable(String),key_5Nullable(String),key_6Nullable(String),key_7Nullable(String),key_8Nullable(String),key_9Nullable(String),labelsString,__mgmt_idInt64,__name__String,__series_id__Int64," ] || exit 1
[ $schema = "key_0Nullable(String),key_1Nullable(String),key_2Nullable(String),key_4Nullable(String),key_5Nullable(String),key_6Nullable(String),key_7Nullable(String),key_8Nullable(String),key_9Nullable(String),labelsString,__mgmt_id__Int64,__name__String,__series_id__Int64," ] || exit 1

echo "check result 1"
count=`curl "localhost:8123" -d 'select count() from dist_test_prom_metric'`
count=`curl "localhost:58123" -d 'select count() from dist_test_prom_metric'`
echo "Got test_prom_metric count => $count"
[ $count -le 10000 ] || exit 1

count=`curl "localhost:8123" -d 'select count() from dist_test_prom_series'`
count=`curl "localhost:58123" -d 'select count() from dist_test_prom_series'`
echo "Got test_prom_series count => $count"
[ $count -eq 1000 ] || exit 1
46 changes: 23 additions & 23 deletions go.test.sh
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
#!/usr/bin/env bash

echo "create tables"
curl "localhost:8123" -d 'DROP TABLE IF EXISTS test_fixed_schema'
curl "localhost:8123" -d 'CREATE TABLE test_fixed_schema
curl "localhost:58123" -d 'DROP TABLE IF EXISTS test_fixed_schema'
curl "localhost:58123" -d 'CREATE TABLE test_fixed_schema
(
time DateTime,
name String,
Expand All @@ -13,13 +13,13 @@ ENGINE = MergeTree
PARTITION BY toYYYYMMDD(time)
ORDER BY (time, name)'

curl "localhost:8123" -d 'DROP TABLE IF EXISTS test_auto_schema'
curl "localhost:8123" -d 'CREATE TABLE test_auto_schema AS test_fixed_schema'
curl "localhost:58123" -d 'DROP TABLE IF EXISTS test_auto_schema'
curl "localhost:58123" -d 'CREATE TABLE test_auto_schema AS test_fixed_schema'

curl "localhost:8123" -d 'DROP TABLE IF EXISTS test_dynamic_schema'
curl "localhost:8123" -d 'CREATE TABLE test_dynamic_schema AS test_fixed_schema'
curl "localhost:58123" -d 'DROP TABLE IF EXISTS test_dynamic_schema'
curl "localhost:58123" -d 'CREATE TABLE test_dynamic_schema AS test_fixed_schema'

counts=`curl "localhost:8123" -d 'SELECT count() FROM test_fixed_schema UNION ALL SELECT count() FROM test_auto_schema UNION ALL SELECT count() FROM test_dynamic_schema' 2>/dev/null | tr '\n' ','`
counts=`curl "localhost:58123" -d 'SELECT count() FROM test_fixed_schema UNION ALL SELECT count() FROM test_auto_schema UNION ALL SELECT count() FROM test_dynamic_schema' 2>/dev/null | tr '\n' ','`
echo "Got initial row counts => $counts"
[ $counts = "0,0,0," ] || exit 1

Expand Down Expand Up @@ -54,50 +54,50 @@ timeout 30 ./bin/clickhouse_sinker --local-cfg-file docker/test_auto_schema.hjso
timeout 60 ./bin/clickhouse_sinker --local-cfg-file docker/test_dynamic_schema.hjson

echo "check result 1"
count=`curl "localhost:8123" -d 'select count() from test_fixed_schema'`
count=`curl "localhost:58123" -d 'select count() from test_fixed_schema'`
echo "Got test_fixed_schema count => $count"
[ $count -eq 100000 ] || exit 1

count=`curl "localhost:8123" -d 'select count() from test_auto_schema'`
count=`curl "localhost:58123" -d 'select count() from test_auto_schema'`
echo "Got test_auto_schema count => $count"
[ $count -eq 100000 ] || exit 1

schema=`curl "localhost:8123" -d 'DESC test_dynamic_schema' 2>/dev/null | grep newkey | sort | tr -d '\t' | tr '\n' ','`
schema=`curl "localhost:58123" -d 'DESC test_dynamic_schema' 2>/dev/null | grep newkey | sort | tr -d '\t' | tr '\n' ','`
echo "Got test_dynamic_schema schema => $schema"
[ $schema = "newkey00Nullable(Bool),newkey01Nullable(Int64),newkey02Nullable(Float64),newkey03Nullable(String),newkey04Nullable(DateTime64(3))," ] || exit 1
count=`curl "localhost:8123" -d 'SELECT count() FROM test_dynamic_schema'`
count=`curl "localhost:58123" -d 'SELECT count() FROM test_dynamic_schema'`
echo "Got test_dynamic_schema count => $count"
[ $count -eq 100000 ] || exit 1

echo "truncate tables"
curl "localhost:8123" -d 'TRUNCATE TABLE test_fixed_schema'
curl "localhost:8123" -d 'TRUNCATE TABLE test_auto_schema'
curl "localhost:8123" -d 'TRUNCATE TABLE test_dynamic_schema'
curl "localhost:58123" -d 'TRUNCATE TABLE test_fixed_schema'
curl "localhost:58123" -d 'TRUNCATE TABLE test_auto_schema'
curl "localhost:58123" -d 'TRUNCATE TABLE test_dynamic_schema'

echo "publish clickhouse_sinker config"
./bin/nacos_publish_config --nacos-addr 127.0.0.1:8848 --nacos-username nacos --nacos-password nacos --nacos-dataid test_fixed_schema --local-cfg-file docker/test_fixed_schema.hjson
./bin/nacos_publish_config --nacos-addr 127.0.0.1:8848 --nacos-username nacos --nacos-password nacos --nacos-dataid test_auto_schema --local-cfg-file docker/test_auto_schema.hjson
./bin/nacos_publish_config --nacos-addr 127.0.0.1:8848 --nacos-username nacos --nacos-password nacos --nacos-dataid test_dynamic_schema --local-cfg-file docker/test_dynamic_schema.hjson
./bin/nacos_publish_config --nacos-addr 127.0.0.1:58848 --nacos-username nacos --nacos-password nacos --nacos-dataid test_fixed_schema --local-cfg-file docker/test_fixed_schema.hjson
./bin/nacos_publish_config --nacos-addr 127.0.0.1:58848 --nacos-username nacos --nacos-password nacos --nacos-dataid test_auto_schema --local-cfg-file docker/test_auto_schema.hjson
./bin/nacos_publish_config --nacos-addr 127.0.0.1:58848 --nacos-username nacos --nacos-password nacos --nacos-dataid test_dynamic_schema --local-cfg-file docker/test_dynamic_schema.hjson

echo "start clickhouse_sinker to consume"
sudo docker exec kafka kafka-consumer-groups --bootstrap-server localhost:9093 --execute --reset-offsets --group test_fixed_schema --all-topics --to-earliest
timeout 30 ./bin/clickhouse_sinker --nacos-addr 127.0.0.1:8848 --nacos-username nacos --nacos-password nacos --nacos-dataid test_fixed_schema
timeout 30 ./bin/clickhouse_sinker --nacos-addr 127.0.0.1:58848 --nacos-username nacos --nacos-password nacos --nacos-dataid test_fixed_schema

sudo docker exec kafka kafka-consumer-groups --bootstrap-server localhost:9093 --execute --reset-offsets --group test_auto_schema --all-topics --to-earliest
timeout 30 ./bin/clickhouse_sinker --nacos-addr 127.0.0.1:8848 --nacos-username nacos --nacos-password nacos --nacos-dataid test_auto_schema
timeout 30 ./bin/clickhouse_sinker --nacos-addr 127.0.0.1:58848 --nacos-username nacos --nacos-password nacos --nacos-dataid test_auto_schema

sudo docker exec kafka kafka-consumer-groups --bootstrap-server localhost:9093 --execute --reset-offsets --group test_dynamic_schema --all-topics --to-earliest
timeout 30 ./bin/clickhouse_sinker --nacos-addr 127.0.0.1:8848 --nacos-username nacos --nacos-password nacos --nacos-dataid test_dynamic_schema
timeout 30 ./bin/clickhouse_sinker --nacos-addr 127.0.0.1:58848 --nacos-username nacos --nacos-password nacos --nacos-dataid test_dynamic_schema

echo "check result 2"
count=`curl "localhost:8123" -d 'select count() from test_fixed_schema'`
count=`curl "localhost:58123" -d 'select count() from test_fixed_schema'`
echo "Got test_fixed_schema count => $count"
[ $count -eq 100000 ] || exit 1

count=`curl "localhost:8123" -d 'select count() from test_auto_schema'`
count=`curl "localhost:58123" -d 'select count() from test_auto_schema'`
echo "Got test_auto_schema count => $count"
[ $count -eq 100000 ] || exit 1

count=`curl "localhost:8123" -d 'SELECT count() FROM test_dynamic_schema'`
count=`curl "localhost:58123" -d 'SELECT count() FROM test_dynamic_schema'`
echo "Got test_dynamic_schema count => $count"
[ $count -eq 100000 ] || exit 1

0 comments on commit 09b6a3e

Please sign in to comment.