Skip to content

Commit df1c5cb

Browse files
authored
Merge pull request #17 from kusha/elk-timestamp
Elk timestamp
2 parents 559cb35 + 4be5955 commit df1c5cb

File tree

2 files changed

+14
-2
lines changed

2 files changed

+14
-2
lines changed

examples/simple.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,6 +39,10 @@ def main():
3939
# flush_buffer_size=3, # uncomment to see that it works slower
4040
# flush_interval=3.0, # interval in seconds
4141
unhandled_exception_logger=logger,
42+
kafka_producer_args={
43+
'api_version_auto_timeout_ms': 1000000,
44+
'request_timeout_ms': 1000000,
45+
},
4246
# you can include arbitrary fields to all produced logs
4347
additional_fields={
4448
"service": "test_service"
@@ -51,7 +55,7 @@ def main():
5155

5256
# test logging
5357
logger.debug("Test debug level logs")
54-
for idx in range(6):
58+
for idx in range(3):
5559
logger.info("Test log #%d", idx)
5660
time.sleep(0.5)
5761

kafka_logger/handlers.py

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99
import sys
1010
from threading import Lock, Thread, Timer
1111
import time
12+
import datetime
1213

1314
from kafka import KafkaProducer # pylint: disable=import-error
1415

@@ -172,7 +173,14 @@ def prepare_record_dict(self, record):
172173
# if there is no formatting in the logging call
173174
value = str(value)
174175
rec[key] = "" if value is None else value
175-
176+
if key == 'created':
177+
# inspired by: cmanaha/python-elasticsearch-logger
178+
created_date = \
179+
datetime.datetime.utcfromtimestamp(record.created)
180+
rec['timestamp'] = \
181+
"{0!s}.{1:03d}Z".format(
182+
created_date.strftime('%Y-%m-%dT%H:%M:%S'),
183+
int(created_date.microsecond / 1000))
176184
# apply preprocessor(s)
177185
for preprocessor in self.log_preprocess:
178186
rec = preprocessor(rec)

0 commit comments

Comments
 (0)