@@ -9,10 +9,120 @@ function setUp(){
99}
1010
1111function tearDown(){
12- # _debug
12+ _debug
1313 _make undeploy_elasticsearch 1> /dev/null
1414}
1515
16+ function test_bulk_commit_after_one_action(){
17+
18+ description=" [${BASHT_TEST_FILENAME##*/ } ] acceptance-tests (v${CLIENT_VERSION} ): $BASHT_TEST_NUMBER - bulk commit after one action"
19+ echo " $description "
20+
21+ name=" ${BASHT_TEST_FILENAME##*/ } .${BASHT_TEST_NUMBER} "
22+ message=" $(( RANDOM)) $description "
23+
24+ basht_run docker run --rm -ti \
25+ --log-driver rchicoli/docker-log-elasticsearch:development \
26+ --log-opt elasticsearch-url=" ${ELASTICSEARCH_URL} " \
27+ --log-opt elasticsearch-version=" ${CLIENT_VERSION} " \
28+ --name " $name " \
29+ --log-opt elasticsearch-bulk-workers=2 \
30+ --log-opt elasticsearch-bulk-actions=1 \
31+ --log-opt elasticsearch-bulk-flush-interval=30s \
32+ alpine echo -n " $message "
33+
34+ sleep " ${SLEEP_TIME} "
35+
36+ basht_run curl -s -G --connect-timeout 5 \
37+ " ${ELASTICSEARCH_URL} /${ELASTICSEARCH_INDEX} /${ELASTICSEARCH_TYPE} /_search?pretty=true&size=1" \
38+ --data-urlencode " q=message:\" $message \" "
39+
40+ basht_assert " echo '${output} ' | jq -r '.hits.hits[0]._source.message'" == " $message "
41+
42+ docker rm -f " $name "
43+
44+ }
45+
46+ function test_bulk_disable_actions_and_bulk_size(){
47+
48+ description=" [${BASHT_TEST_FILENAME##*/ } ] acceptance-tests (v${CLIENT_VERSION} ): $BASHT_TEST_NUMBER - bulk disable actions and bulk size"
49+ echo " $description "
50+
51+ name=" ${BASHT_TEST_FILENAME##*/ } .${BASHT_TEST_NUMBER} "
52+ message=" $(( RANDOM)) $description "
53+
54+ basht_run docker run --rm -ti \
55+ --log-driver rchicoli/docker-log-elasticsearch:development \
56+ --log-opt elasticsearch-url=" ${ELASTICSEARCH_URL} " \
57+ --log-opt elasticsearch-version=" ${CLIENT_VERSION} " \
58+ --name " $name " \
59+ --log-opt elasticsearch-bulk-workers=1 \
60+ --log-opt elasticsearch-bulk-actions=" -1" \
61+ --log-opt elasticsearch-bulk-size=" -1" \
62+ --log-opt elasticsearch-bulk-flush-interval=" 10s" \
63+ alpine echo -n " $message "
64+
65+ # total numbers of hits should be zero, because the flush interval has not been reached
66+ basht_run curl -s -G --connect-timeout 5 \
67+ " ${ELASTICSEARCH_URL} /${ELASTICSEARCH_INDEX} /${ELASTICSEARCH_TYPE} /_search?pretty=true&size=1" \
68+ --data-urlencode " q=message:\" $message \" "
69+ basht_assert " echo '${output} ' | jq -r '.hits.total'" == 0
70+
71+ # wait for the flush interval time
72+ sleep 10s
73+
74+ basht_run curl -s -G --connect-timeout 5 \
75+ " ${ELASTICSEARCH_URL} /${ELASTICSEARCH_INDEX} /${ELASTICSEARCH_TYPE} /_search?pretty=true&size=1" \
76+ --data-urlencode " q=message:\" $message \" "
77+
78+ basht_assert " echo '${output} ' | jq -r '.hits.hits[0]._source.message'" == " $message "
79+
80+ docker rm -f " $name "
81+
82+ }
83+
84+ function test_bulk_multiple_messages(){
85+
86+ description=" [${BASHT_TEST_FILENAME##*/ } ] acceptance-tests (v${CLIENT_VERSION} ): $BASHT_TEST_NUMBER - bulk multiple messages"
87+ echo " $description "
88+
89+ name=" ${BASHT_TEST_FILENAME##*/ } .${BASHT_TEST_NUMBER} "
90+ message=" bulk-multi-message"
91+
92+ basht_run docker run -d \
93+ --log-driver rchicoli/docker-log-elasticsearch:development \
94+ --log-opt elasticsearch-url=" ${ELASTICSEARCH_URL} " \
95+ --log-opt elasticsearch-version=" ${CLIENT_VERSION} " \
96+ --name " $name " --ip=" ${WEBAPPER_IP} " --network=" docker_development" \
97+ --log-opt elasticsearch-bulk-actions=100 \
98+ --log-opt elasticsearch-bulk-flush-interval=' 2s' \
99+ --log-opt elasticsearch-bulk-workers=2 \
100+ rchicoli/webapper
101+
102+ bulk_size=99
103+
104+ for i in $( seq 1 " $bulk_size " ) ; do
105+ basht_run curl -s -XPOST -H " Content-Type: application/json" --data " {\" message\" :\" $message -$i \" }" " http://${WEBAPPER_IP} :${WEBAPPER_PORT} /log" > /dev/null
106+ done
107+
108+ sleep " ${SLEEP_TIME} "
109+
110+ basht_run docker stop " $name "
111+ basht_run docker rm " $name "
112+
113+ sleep " ${SLEEP_TIME} "
114+
115+ basht_run curl -s --connect-timeout 5 " ${ELASTICSEARCH_URL} /${ELASTICSEARCH_INDEX} /${ELASTICSEARCH_TYPE} /_search?pretty=true&size=1"
116+ basht_assert " echo '${output} ' | jq -r '.hits.total'" == " $bulk_size "
117+
118+ for i in $( seq 1 " $bulk_size " ) ; do
119+ basht_run curl -G -s --connect-timeout 5 " ${ELASTICSEARCH_URL} /${ELASTICSEARCH_INDEX} /${ELASTICSEARCH_TYPE} /_search?pretty=true&size=1" \
120+ --data-urlencode " q=message:\" ${message} -$i \" " > /dev/null
121+ basht_assert " echo '${output} ' | jq -r '.hits.hits[0]._source.message'" equals " $message -$i "
122+ done
123+
124+ }
125+
16126# May 03 18:59:34 sunlight dockerd[7729]: time="2018-05-03T18:59:34+02:00" level=error
17127# msg="level=info msg=\"response error message and status code\"
18128# containerID=55eeb1ed63dbb828a7bb0ad2a371e1f1f6781b854e8811bd45a4a14ed92f762e
@@ -35,12 +145,12 @@ function test_bulk_rejections(){
35145 --log-opt elasticsearch-url=" ${ELASTICSEARCH_URL} " \
36146 --log-opt elasticsearch-version=" ${CLIENT_VERSION} " \
37147 --name " $name " --ip=" ${WEBAPPER_IP} " --network=" docker_development" \
38- --log-opt elasticsearch-bulk-actions=5000 \
39- --log-opt elasticsearch-bulk-flush-interval=' 10s ' \
40- --log-opt elasticsearch-bulk-workers=50 \
148+ --log-opt elasticsearch-bulk-actions=500 \
149+ --log-opt elasticsearch-bulk-flush-interval=' 5s ' \
150+ --log-opt elasticsearch-bulk-workers=100 \
41151 rchicoli/webapper
42152
43- bulk_size=5
153+ bulk_size=20000
44154
45155 seq 1 " $bulk_size " | \
46156 xargs -n 1 -P 4 \
@@ -56,4 +166,4 @@ function test_bulk_rejections(){
56166 basht_run curl -s --connect-timeout 5 " ${ELASTICSEARCH_URL} /${ELASTICSEARCH_INDEX} /${ELASTICSEARCH_TYPE} /_search?pretty=true&size=1"
57167 basht_assert " echo '${output} ' | jq -r '.hits.total'" == " $bulk_size "
58168
59- }
169+ }
0 commit comments