File tree Expand file tree Collapse file tree 2 files changed +14
-2
lines changed
Expand file tree Collapse file tree 2 files changed +14
-2
lines changed Original file line number Diff line number Diff line change @@ -39,6 +39,10 @@ def main():
3939 # flush_buffer_size=3, # uncomment to see that it works slower
4040 # flush_interval=3.0, # interval in seconds
4141 unhandled_exception_logger = logger ,
42+ kafka_producer_args = {
43+ 'api_version_auto_timeout_ms' : 1000000 ,
44+ 'request_timeout_ms' : 1000000 ,
45+ },
4246 # you can include arbitrary fields to all produced logs
4347 additional_fields = {
4448 "service" : "test_service"
@@ -51,7 +55,7 @@ def main():
5155
5256 # test logging
5357 logger .debug ("Test debug level logs" )
54- for idx in range (6 ):
58+ for idx in range (3 ):
5559 logger .info ("Test log #%d" , idx )
5660 time .sleep (0.5 )
5761
Original file line number Diff line number Diff line change 99import sys
1010from threading import Lock , Thread , Timer
1111import time
12+ import datetime
1213
1314from kafka import KafkaProducer # pylint: disable=import-error
1415
@@ -172,7 +173,14 @@ def prepare_record_dict(self, record):
172173 # if there is no formatting in the logging call
173174 value = str (value )
174175 rec [key ] = "" if value is None else value
175-
176+ if key == 'created' :
177+ # inspired by: cmanaha/python-elasticsearch-logger
178+ created_date = \
179+ datetime .datetime .utcfromtimestamp (record .created )
180+ rec ['timestamp' ] = \
181+ "{0!s}.{1:03d}Z" .format (
182+ created_date .strftime ('%Y-%m-%dT%H:%M:%S' ),
183+ int (created_date .microsecond / 1000 ))
176184 # apply preprocessor(s)
177185 for preprocessor in self .log_preprocess :
178186 rec = preprocessor (rec )
You can’t perform that action at this time.
0 commit comments