Skip to content

Commit 7f79c45

Browse files
[Lambda]: Update to manage NO SSL for HTTP
1 parent 897fcfe commit 7f79c45

File tree

2 files changed

+21
-16
lines changed

2 files changed

+21
-16
lines changed

aws/logs_monitoring/README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -87,7 +87,7 @@ Set the environment variable `DD_SITE` to `datadoghq.eu` and logs are automatica
8787

8888
### Send logs through a proxy
8989

90-
For TCP, ensure that you disable SSL between the lambda and your proxy by setting `DD_NO_SSL` to `true`
90+
Ensure that you disable SSL between the lambda and your proxy by setting `DD_NO_SSL` to `true`
9191

9292
Two environment variables can be used to forward logs through a proxy:
9393

aws/logs_monitoring/lambda_function.py

Lines changed: 20 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -39,18 +39,23 @@
3939
# For backward-compatibility
4040
DD_FORWARD_TRACES = False
4141

42+
43+
# Return the boolean environment variable corresponding to envvar
44+
def get_bool_env_var(envvar, default):
45+
return os.getenv(envvar, default=default).lower() == "true"
46+
47+
4248
# Set this variable to `False` to disable log forwarding.
4349
# E.g., when you only want to forward metrics from logs.
44-
DD_FORWARD_LOG = os.getenv("DD_FORWARD_LOG", default="true").lower() == "true"
45-
50+
DD_FORWARD_LOG = get_bool_env_var("DD_FORWARD_LOG", "false")
4651

4752
# Change this value to change the underlying network client (HTTP or TCP),
4853
# by default, use the TCP client.
49-
DD_USE_TCP = os.getenv("DD_USE_TCP", default="false").lower() == "true"
54+
DD_USE_TCP = get_bool_env_var("DD_USE_TCP", "false")
5055

51-
# Set this value to disable SSL over our TCP client.
52-
# Useful when you are forwarding your logs via TCP to a proxy.
53-
DD_NO_SSL = os.getenv("DD_NO_SSL", default="false").lower() == "true"
56+
# Set this value to disable SSL;
57+
# Useful when you are forwarding your logs to a proxy.
58+
DD_NO_SSL = get_bool_env_var("DD_NO_SSL", "false")
5459

5560
# Define the destination endpoint to send logs to
5661
DD_SITE = os.getenv("DD_SITE", default="datadoghq.com")
@@ -67,6 +72,7 @@
6772
DD_URL = os.getenv("DD_URL", default="lambda-http-intake.logs." + DD_SITE)
6873
DD_PORT = int(os.getenv("DD_PORT", default="443"))
6974

75+
7076
class ScrubbingRuleConfig(object):
7177
def __init__(self, name, pattern, placeholder):
7278
self.name = name
@@ -92,6 +98,7 @@ def __init__(self, name, pattern, placeholder):
9298
)
9399
]
94100

101+
95102
# Use for include, exclude, and scrubbing rules
96103
def compileRegex(rule, pattern):
97104
if pattern is not None:
@@ -112,7 +119,6 @@ def compileRegex(rule, pattern):
112119
EXCLUDE_AT_MATCH = os.getenv("EXCLUDE_AT_MATCH", default=None)
113120
exclude_regex = compileRegex("EXCLUDE_AT_MATCH", EXCLUDE_AT_MATCH)
114121

115-
116122
# DD_API_KEY: Datadog API Key
117123
DD_API_KEY = "<your_api_key>"
118124
if "DD_KMS_API_KEY" in os.environ:
@@ -271,8 +277,9 @@ class DatadogHTTPClient(object):
271277
_POST = "POST"
272278
_HEADERS = {"Content-type": "application/json"}
273279

274-
def __init__(self, host, port, api_key, scrubber, timeout=10):
275-
self._url = "https://{}:{}/v1/input/{}".format(host, port, api_key)
280+
def __init__(self, host, port, no_ssl, api_key, scrubber, timeout=10):
281+
protocol = "http" if no_ssl else "https"
282+
self._url = "{}://{}:{}/v1/input/{}".format(protocol, host, port, api_key)
276283
self._scrubber = scrubber
277284
self._timeout = timeout
278285
self._session = None
@@ -344,8 +351,8 @@ def batch(self, logs):
344351
for log in logs:
345352
log_size_bytes = self._sizeof_bytes(log)
346353
if size_count > 0 and (
347-
size_count >= self._max_size_count
348-
or size_bytes + log_size_bytes > self._max_size_bytes
354+
size_count >= self._max_size_count
355+
or size_bytes + log_size_bytes > self._max_size_bytes
349356
):
350357
batches.append(batch)
351358
batch = []
@@ -416,7 +423,7 @@ def forward_logs(logs):
416423
cli = DatadogTCPClient(DD_URL, DD_PORT, DD_NO_SSL, DD_API_KEY, scrubber)
417424
else:
418425
batcher = DatadogBatcher(256 * 1000, 2 * 1000 * 1000, 200)
419-
cli = DatadogHTTPClient(DD_URL, DD_PORT, DD_API_KEY, scrubber)
426+
cli = DatadogHTTPClient(DD_URL, DD_PORT, DD_NO_SSL, DD_API_KEY, scrubber)
420427

421428
with DatadogClient(cli) as client:
422429
for batch in batcher.batch(logs):
@@ -678,7 +685,7 @@ def reformat_record(record):
678685
def awslogs_handler(event, context, metadata):
679686
# Get logs
680687
with gzip.GzipFile(
681-
fileobj=BytesIO(base64.b64decode(event["awslogs"]["data"]))
688+
fileobj=BytesIO(base64.b64decode(event["awslogs"]["data"]))
682689
) as decompress_stream:
683690
# Reading line by line avoid a bug where gzip would take a very long
684691
# time (>5min) for file around 60MB gzipped
@@ -752,7 +759,6 @@ def awslogs_handler(event, context, metadata):
752759

753760
# Handle Cloudwatch Events
754761
def cwevent_handler(event, metadata):
755-
756762
data = event
757763

758764
# Set the source on the log
@@ -770,7 +776,6 @@ def cwevent_handler(event, metadata):
770776

771777
# Handle Sns events
772778
def sns_handler(event, metadata):
773-
774779
data = event
775780
# Set the source on the log
776781
metadata[DD_SOURCE] = parse_event_source(event, "sns")

0 commit comments

Comments
 (0)