1- import re
2- import os
31import logging
2+ import os
3+ import re
44
55from collections import defaultdict
66from time import time
77
8-
98import boto3
109from botocore .exceptions import ClientError
11-
1210from datadog_lambda .metric import lambda_metric
1311
1412
3230log = logging .getLogger ()
3331log .setLevel (logging .INFO )
3432
35- def set_log_level_to_env_var :
33+
34+ def set_log_level_to_env_var ():
3635 """Reads the log level env var and sets the log level according to its value
3736
3837 Defaults to INFO level
@@ -45,6 +44,7 @@ def set_log_level_to_env_var:
4544 if env_var_log_level == "warn" or env_var_log_level == "warning" :
4645 log .setLevel (logging .WARN )
4746
47+
4848set_log_level_to_env_var ()
4949
5050
@@ -86,7 +86,15 @@ def get_dd_tag_string_from_aws_dict(aws_key_value_tag_dict):
8686def parse_get_resources_response_for_tags_by_arn (get_resources_page ):
8787 """Parses a page of GetResources response for the mapping from ARN to tags
8888
89+ Args:
90+ get_resources_page (dict<str, multiple types>): one page of the GetResources response. Ex:
91+ [{
92+ 'ResourceARN': 'arn:aws:lambda:us-east-1:123497598159:function:my-test-lambda',
93+ 'Tags': [{'Key': 'stage', 'Value': 'dev'}, {'Key': 'team', 'Value': 'serverless'}]
94+ }]
8995
96+ Returns:
97+ tags_by_arn (dict<str, str[]>): Lambda tag lists keyed by ARN
9098 """
9199 tags_by_arn = defaultdict (list )
92100
@@ -180,6 +188,7 @@ def get_lambda_tags(self, resource_arn):
180188
181189 return function_tags
182190
191+
183192# Store the cache in the global scope so that it will be reused as long as
184193# the log forwarder Lambda container is running
185194account_lambda_tags_cache = LambdaTagsCache ()
@@ -236,11 +245,15 @@ def parse_and_submit_enhanced_metrics(logs):
236245 """
237246 # Wrap everything in try/catch to prevent failing the Lambda on enhanced metrics
238247 try :
239- enhanced_metrics = generate_enhanced_lambda_metrics (logs , account_lambda_tags_cache )
248+ enhanced_metrics = generate_enhanced_lambda_metrics (
249+ logs , account_lambda_tags_cache
250+ )
240251 for enhanced_metric in enhanced_metrics :
241252 enhanced_metric .submit_to_dd ()
242253 except Exception :
243- log .exception ("Encountered an error while trying to parse and submit enhanced metrics" )
254+ log .exception (
255+ "Encountered an error while trying to parse and submit enhanced metrics"
256+ )
244257
245258
246259def generate_enhanced_lambda_metrics (logs , tags_cache ):
@@ -283,12 +296,7 @@ def generate_enhanced_lambda_metrics(logs, tags_cache):
283296
284297 # If the log dict is missing any of this data it's not a Lambda REPORT log and we move on
285298 if not all (
286- (
287- log_function_arn ,
288- log_message ,
289- timestamp ,
290- log_message .startswith ("REPORT" ),
291- )
299+ (log_function_arn , log_message , timestamp , log_message .startswith ("REPORT" ))
292300 ):
293301 continue
294302
@@ -313,6 +321,7 @@ def generate_enhanced_lambda_metrics(logs, tags_cache):
313321
314322 return enhanced_metrics
315323
324+
316325# Names to use for metrics and for the named regex groups
317326REQUEST_ID_FIELD_NAME = "request_id"
318327DURATION_METRIC_NAME = "duration"
@@ -364,9 +373,9 @@ def parse_lambda_tags_from_arn(arn):
364373 _ , _ , _ , region , account_id , _ , function_name = split_arn
365374
366375 return [
367- "functionname:{}" .format (function_name ),
368- "account_id:{}" .format (account_id ),
369376 "region:{}" .format (region ),
377+ "account_id:{}" .format (account_id ),
378+ "functionname:{}" .format (function_name ),
370379 ]
371380
372381
0 commit comments