diff --git a/newrelic/api/application.py b/newrelic/api/application.py index 9297509bc..ebc8356a7 100644 --- a/newrelic/api/application.py +++ b/newrelic/api/application.py @@ -22,7 +22,6 @@ class Application(object): - _lock = threading.Lock() _instances = {} @@ -164,7 +163,9 @@ def record_transaction(self, data): def record_log_event(self, message, level=None, timestamp=None, attributes=None, priority=None): if self.active: - self._agent.record_log_event(self._name, message, level, timestamp, attributes=attributes, priority=priority) + self._agent.record_log_event( + self._name, message, level, timestamp, attributes=attributes, priority=priority + ) def normalize_name(self, name, rule_type="url"): if self.active: diff --git a/newrelic/api/log.py b/newrelic/api/log.py index 2c372117b..cdd6737e3 100644 --- a/newrelic/api/log.py +++ b/newrelic/api/log.py @@ -21,8 +21,8 @@ from newrelic.api.time_trace import get_linking_metadata from newrelic.api.transaction import current_transaction, record_log_event from newrelic.common import agent_http -from newrelic.common.object_names import parse_exc_info from newrelic.common.encoding_utils import safe_json_encode +from newrelic.common.object_names import parse_exc_info from newrelic.core.attribute import truncate from newrelic.core.config import global_settings, is_expected_error from newrelic.packages import six @@ -133,8 +133,8 @@ def __init__( "The contributed NewRelicLogHandler has been superseded by automatic instrumentation for " "logging in the standard lib. If for some reason you need to manually configure a handler, " "please use newrelic.api.log.NewRelicLogForwardingHandler to take advantage of all the " - "features included in application log forwarding such as proper batching.", - DeprecationWarning + "features included in application log forwarding such as proper batching.", + DeprecationWarning, ) super(NewRelicLogHandler, self).__init__(level=level) self.license_key = license_key or self.settings.license_key diff --git a/newrelic/api/transaction.py b/newrelic/api/transaction.py index d0164eaf2..26b6bfa4a 100644 --- a/newrelic/api/transaction.py +++ b/newrelic/api/transaction.py @@ -55,6 +55,7 @@ create_attributes, create_user_attributes, process_user_attribute, + resolve_logging_context_attributes, truncate, ) from newrelic.core.attribute_filter import ( @@ -310,7 +311,7 @@ def __init__(self, application, enabled=None, source=None): self.synthetics_job_id = None self.synthetics_monitor_id = None self.synthetics_header = None - + # Synthetics Info Header self.synthetics_type = None self.synthetics_initiator = None @@ -1545,16 +1546,17 @@ def record_log_event(self, message, level=None, timestamp=None, attributes=None, message = truncate(message, MAX_LOG_MESSAGE_LENGTH) - attrs = get_linking_metadata() - if attributes and (settings and settings.application_logging and settings.application_logging.forwarding and settings.application_logging.forwarding.context_data and settings.application_logging.forwarding.context_data.enabled): - # TODO add attibute filtering - attrs.update({"context." + k: safe_json_encode(v, ignore_string_types=True) for k, v in six.iteritems(attributes)}) + collected_attributes = get_linking_metadata() + if attributes and (settings and settings.application_logging.forwarding.context_data.enabled): + context_attributes = resolve_logging_context_attributes(attributes, settings.attribute_filter, "context.") + if context_attributes: + collected_attributes.update(context_attributes) event = LogEventNode( timestamp=timestamp, level=level, message=message, - attributes=attrs, + attributes=collected_attributes, ) self._log_events.add(event, priority=priority) diff --git a/newrelic/common/encoding_utils.py b/newrelic/common/encoding_utils.py index e89573324..f075d42d3 100644 --- a/newrelic/common/encoding_utils.py +++ b/newrelic/common/encoding_utils.py @@ -31,14 +31,14 @@ from newrelic.packages import six -HEXDIGLC_RE = re.compile('^[0-9a-f]+$') -DELIMITER_FORMAT_RE = re.compile('[ \t]*,[ \t]*') +HEXDIGLC_RE = re.compile("^[0-9a-f]+$") +DELIMITER_FORMAT_RE = re.compile("[ \t]*,[ \t]*") PARENT_TYPE = { - '0': 'App', - '1': 'Browser', - '2': 'Mobile', + "0": "App", + "1": "Browser", + "2": "Mobile", } -BASE64_DECODE_STR = getattr(base64, 'decodestring', None) +BASE64_DECODE_STR = getattr(base64, "decodestring", None) # Functions for encoding/decoding JSON. These wrappers are used in order @@ -48,6 +48,7 @@ # be supplied as key word arguments to allow the wrappers to supply # defaults. + def json_encode(obj, **kwargs): _kwargs = {} @@ -79,21 +80,21 @@ def json_encode(obj, **kwargs): # The third is eliminate white space after separators to trim the # size of the data being sent. - if type(b'') is type(''): # NOQA - _kwargs['encoding'] = 'latin-1' + if type(b"") is type(""): # NOQA + _kwargs["encoding"] = "latin-1" def _encode(o): if isinstance(o, bytes): - return o.decode('latin-1') + return o.decode("latin-1") elif isinstance(o, types.GeneratorType): return list(o) - elif hasattr(o, '__iter__'): + elif hasattr(o, "__iter__"): return list(iter(o)) - raise TypeError(repr(o) + ' is not JSON serializable') + raise TypeError(repr(o) + " is not JSON serializable") - _kwargs['default'] = _encode + _kwargs["default"] = _encode - _kwargs['separators'] = (',', ':') + _kwargs["separators"] = (",", ":") # We still allow supplied arguments to override internal defaults if # necessary, but the caller must be sure they aren't dependent on @@ -116,7 +117,7 @@ def safe_json_encode(obj, ignore_string_types=False, **kwargs): # Performs the same operation as json_encode but replaces unserializable objects with a string containing their class name. # If ignore_string_types is True, do not encode string types further. # Currently used for safely encoding logging attributes. - + if ignore_string_types and isinstance(obj, (six.string_types, six.binary_type)): return obj @@ -147,7 +148,7 @@ def xor_cipher_genkey(key, length=None): """ - return bytearray(key[:length], encoding='ascii') + return bytearray(key[:length], encoding="ascii") def xor_cipher_encrypt(text, key): @@ -213,8 +214,8 @@ def xor_cipher_encrypt_base64(text, key): # isn't UTF-8 and so fail with a Unicode decoding error. if isinstance(text, bytes): - text = text.decode('latin-1') - text = text.encode('utf-8').decode('latin-1') + text = text.decode("latin-1") + text = text.encode("utf-8").decode("latin-1") result = base64.b64encode(bytes(xor_cipher_encrypt(text, key))) @@ -225,7 +226,7 @@ def xor_cipher_encrypt_base64(text, key): # produces characters within that codeset. if six.PY3: - return result.decode('ascii') + return result.decode("ascii") return result @@ -246,7 +247,7 @@ def xor_cipher_decrypt_base64(text, key): result = xor_cipher_decrypt(bytearray(base64.b64decode(text)), key) - return bytes(result).decode('utf-8') + return bytes(result).decode("utf-8") obfuscate = xor_cipher_encrypt_base64 @@ -263,13 +264,13 @@ def unpack_field(field): """ if not isinstance(field, bytes): - field = field.encode('UTF-8') + field = field.encode("UTF-8") - data = getattr(base64, 'decodebytes', BASE64_DECODE_STR)(field) + data = getattr(base64, "decodebytes", BASE64_DECODE_STR)(field) data = zlib.decompress(data) if isinstance(data, bytes): - data = data.decode('Latin-1') + data = data.decode("Latin-1") data = json_decode(data) return data @@ -283,13 +284,13 @@ def generate_path_hash(name, seed): """ - rotated = ((seed << 1) | (seed >> 31)) & 0xffffffff + rotated = ((seed << 1) | (seed >> 31)) & 0xFFFFFFFF if not isinstance(name, bytes): - name = name.encode('UTF-8') + name = name.encode("UTF-8") - path_hash = (rotated ^ int(hashlib.md5(name).hexdigest()[-8:], base=16)) - return '%08x' % path_hash + path_hash = rotated ^ int(hashlib.md5(name).hexdigest()[-8:], base=16) + return "%08x" % path_hash def base64_encode(text): @@ -314,11 +315,11 @@ def base64_encode(text): # and so fail with a Unicode decoding error. if isinstance(text, bytes): - text = text.decode('latin-1') - text = text.encode('utf-8').decode('latin-1') + text = text.decode("latin-1") + text = text.encode("utf-8").decode("latin-1") # Re-encode as utf-8 when passing to b64 encoder - result = base64.b64encode(text.encode('utf-8')) + result = base64.b64encode(text.encode("utf-8")) # The result from base64 encoding will be a byte string but since # dealing with byte strings in Python 2 and Python 3 is quite @@ -327,7 +328,7 @@ def base64_encode(text): # produces characters within that codeset. if six.PY3: - return result.decode('ascii') + return result.decode("ascii") return result @@ -337,7 +338,7 @@ def base64_decode(text): the decoded text is UTF-8 encoded. """ - return base64.b64decode(text).decode('utf-8') + return base64.b64decode(text).decode("utf-8") def gzip_compress(text): @@ -348,9 +349,9 @@ def gzip_compress(text): compressed_data = io.BytesIO() if six.PY3 and isinstance(text, str): - text = text.encode('utf-8') + text = text.encode("utf-8") - with gzip.GzipFile(fileobj=compressed_data, mode='wb') as f: + with gzip.GzipFile(fileobj=compressed_data, mode="wb") as f: f.write(text) return compressed_data.getvalue() @@ -363,7 +364,7 @@ def gzip_decompress(payload): """ data_bytes = io.BytesIO(payload) decoded_data = gzip.GzipFile(fileobj=data_bytes).read() - return decoded_data.decode('utf-8') + return decoded_data.decode("utf-8") def serverless_payload_encode(payload): @@ -381,7 +382,7 @@ def serverless_payload_encode(payload): def ensure_str(s): if not isinstance(s, six.string_types): try: - s = s.decode('utf-8') + s = s.decode("utf-8") except Exception: return return s @@ -393,8 +394,8 @@ def serverless_payload_decode(text): Python object. """ - if hasattr(text, 'decode'): - text = text.decode('utf-8') + if hasattr(text, "decode"): + text = text.decode("utf-8") decoded_bytes = base64.b64decode(text) uncompressed_data = gzip_decompress(decoded_bytes) @@ -407,8 +408,7 @@ def decode_newrelic_header(encoded_header, encoding_key): decoded_header = None if encoded_header: try: - decoded_header = json_decode(deobfuscate( - encoded_header, encoding_key)) + decoded_header = json_decode(deobfuscate(encoded_header, encoding_key)) except Exception: pass @@ -425,7 +425,6 @@ def convert_to_cat_metadata_value(nr_headers): class DistributedTracePayload(dict): - version = (0, 1) def text(self): @@ -460,17 +459,16 @@ def decode(cls, payload): class W3CTraceParent(dict): - def text(self): - if 'id' in self: - guid = self['id'] + if "id" in self: + guid = self["id"] else: - guid = '{:016x}'.format(random.getrandbits(64)) + guid = "{:016x}".format(random.getrandbits(64)) - return '00-{}-{}-{:02x}'.format( - self['tr'].lower().zfill(32), + return "00-{}-{}-{:02x}".format( + self["tr"].lower().zfill(32), guid, - int(self.get('sa', 0)), + int(self.get("sa", 0)), ) @classmethod @@ -479,7 +477,7 @@ def decode(cls, payload): if len(payload) < 55: return None - fields = payload.split('-', 4) + fields = payload.split("-", 4) # Expect that there are at least 4 fields if len(fields) < 4: @@ -492,11 +490,11 @@ def decode(cls, payload): return None # Version 255 is invalid - if version == 'ff': + if version == "ff": return None # Expect exactly 4 fields if version 00 - if version == '00' and len(fields) != 4: + if version == "00" and len(fields) != 4: return None # Check field lengths and values @@ -506,18 +504,15 @@ def decode(cls, payload): # trace_id or parent_id of all 0's are invalid trace_id, parent_id = fields[1:3] - if parent_id == '0' * 16 or trace_id == '0' * 32: + if parent_id == "0" * 16 or trace_id == "0" * 32: return None return cls(tr=trace_id, id=parent_id) class W3CTraceState(OrderedDict): - def text(self, limit=32): - return ','.join( - '{}={}'.format(k, v) - for k, v in itertools.islice(self.items(), limit)) + return ",".join("{}={}".format(k, v) for k, v in itertools.islice(self.items(), limit)) @classmethod def decode(cls, tracestate): @@ -525,9 +520,8 @@ def decode(cls, tracestate): vendors = cls() for entry in entries: - vendor_value = entry.split('=', 2) - if (len(vendor_value) != 2 or - any(len(v) > 256 for v in vendor_value)): + vendor_value = entry.split("=", 2) + if len(vendor_value) != 2 or any(len(v) > 256 for v in vendor_value): continue vendor, value = vendor_value @@ -537,36 +531,38 @@ def decode(cls, tracestate): class NrTraceState(dict): - FIELDS = ('ty', 'ac', 'ap', 'id', 'tx', 'sa', 'pr') + FIELDS = ("ty", "ac", "ap", "id", "tx", "sa", "pr") def text(self): - pr = self.get('pr', '') + pr = self.get("pr", "") if pr: - pr = ('%.6f' % pr).rstrip('0').rstrip('.') - - payload = '-'.join(( - '0-0', - self['ac'], - self['ap'], - self.get('id', ''), - self.get('tx', ''), - '1' if self.get('sa') else '0', - pr, - str(self['ti']), - )) - return '{}@nr={}'.format( - self.get('tk', self['ac']), + pr = ("%.6f" % pr).rstrip("0").rstrip(".") + + payload = "-".join( + ( + "0-0", + self["ac"], + self["ap"], + self.get("id", ""), + self.get("tx", ""), + "1" if self.get("sa") else "0", + pr, + str(self["ti"]), + ) + ) + return "{}@nr={}".format( + self.get("tk", self["ac"]), payload, ) @classmethod def decode(cls, payload, tk): - fields = payload.split('-', 9) + fields = payload.split("-", 9) if len(fields) >= 9 and all(fields[:4]) and fields[8]: data = cls(tk=tk) try: - data['ti'] = int(fields[8]) + data["ti"] = int(fields[8]) except: return @@ -574,24 +570,24 @@ def decode(cls, payload, tk): if value: data[name] = value - if data['ty'] in PARENT_TYPE: - data['ty'] = PARENT_TYPE[data['ty']] + if data["ty"] in PARENT_TYPE: + data["ty"] = PARENT_TYPE[data["ty"]] else: return - if 'sa' in data: - if data['sa'] == '1': - data['sa'] = True - elif data['sa'] == '0': - data['sa'] = False + if "sa" in data: + if data["sa"] == "1": + data["sa"] = True + elif data["sa"] == "0": + data["sa"] = False else: - data['sa'] = None + data["sa"] = None - if 'pr' in data: + if "pr" in data: try: - data['pr'] = float(fields[7]) + data["pr"] = float(fields[7]) except: - data['pr'] = None + data["pr"] = None return data @@ -609,7 +605,7 @@ def capitalize(string): def camel_case(string, upper=False): """ Convert a string of snake case to camel case. - + Setting upper=True will capitalize the first letter. Defaults to False, where no change is made to the first letter. """ string = ensure_str(string) @@ -630,6 +626,8 @@ def camel_case(string, upper=False): _snake_case_re = re.compile(r"([A-Z]+[a-z]*)") + + def snake_case(string): """Convert a string of camel case to snake case. Assumes no repeated runs of capital letters.""" string = ensure_str(string) diff --git a/newrelic/config.py b/newrelic/config.py index b1bff73a8..6782a0396 100644 --- a/newrelic/config.py +++ b/newrelic/config.py @@ -553,6 +553,8 @@ def _process_configuration(section): _process_setting(section, "application_logging.forwarding.max_samples_stored", "getint", None) _process_setting(section, "application_logging.forwarding.enabled", "getboolean", None) _process_setting(section, "application_logging.forwarding.context_data.enabled", "getboolean", None) + _process_setting(section, "application_logging.forwarding.context_data.include", "get", _map_inc_excl_attributes) + _process_setting(section, "application_logging.forwarding.context_data.exclude", "get", _map_inc_excl_attributes) _process_setting(section, "application_logging.metrics.enabled", "getboolean", None) _process_setting(section, "application_logging.local_decorating.enabled", "getboolean", None) diff --git a/newrelic/core/agent.py b/newrelic/core/agent.py index 376f90c9f..31cef43e8 100644 --- a/newrelic/core/agent.py +++ b/newrelic/core/agent.py @@ -339,7 +339,6 @@ def activate_application(self, app_name, linked_applications=None, timeout=None, with self._lock: application = self._applications.get(app_name, None) if not application: - process_id = os.getpid() if process_id != self._process_id: @@ -449,7 +448,6 @@ def register_data_source(self, source, application=None, name=None, settings=Non instance.register_data_source(source, name, settings, **properties) def remove_thread_utilization(self): - _logger.debug("Removing thread utilization data source from all applications") source_name = thread_utilization_data_source.__name__ @@ -565,7 +563,7 @@ def record_ml_event(self, app_name, event_type, params): application.record_ml_event(event_type, params) - def record_log_event(self, app_name, message, level=None, timestamp=None, attributes=None, priority=None): + def record_log_event(self, app_name, message, level=None, timestamp=None, attributes=None, priority=None): application = self._applications.get(app_name, None) if application is None or not application.active: return diff --git a/newrelic/core/application.py b/newrelic/core/application.py index dcdb21f64..b7f6fb27d 100644 --- a/newrelic/core/application.py +++ b/newrelic/core/application.py @@ -945,7 +945,9 @@ def record_log_event(self, message, level=None, timestamp=None, attributes=None, if message: with self._stats_custom_lock: - event = self._stats_engine.record_log_event(message, level, timestamp, attributes=attributes, priority=priority) + event = self._stats_engine.record_log_event( + message, level, timestamp, attributes=attributes, priority=priority + ) if event: self._global_events_account += 1 @@ -1506,7 +1508,9 @@ def harvest(self, shutdown=False, flexible=False): # Send metrics self._active_session.send_metric_data(self._period_start, period_end, metric_data) if dimensional_metric_data: - self._active_session.send_dimensional_metric_data(self._period_start, period_end, dimensional_metric_data) + self._active_session.send_dimensional_metric_data( + self._period_start, period_end, dimensional_metric_data + ) _logger.debug("Done sending data for harvest of %r.", self._app_name) diff --git a/newrelic/core/attribute.py b/newrelic/core/attribute.py index 10ae8e459..04dfdec34 100644 --- a/newrelic/core/attribute.py +++ b/newrelic/core/attribute.py @@ -18,6 +18,7 @@ from newrelic.core.attribute_filter import ( DST_ALL, DST_ERROR_COLLECTOR, + DST_LOG_EVENT_CONTEXT_DATA, DST_SPAN_EVENTS, DST_TRANSACTION_EVENTS, DST_TRANSACTION_SEGMENTS, @@ -174,6 +175,32 @@ def resolve_agent_attributes(attr_dict, attribute_filter, target_destination, at return a_attrs +def resolve_logging_context_attributes(attr_dict, attribute_filter, attr_prefix, attr_class=dict): + """ + Helper function for processing logging context attributes that require a prefix. Correctly filters attribute names + before applying the required prefix, and then applies the process_user_attribute after the prefix is applied to + correctly check length requirements. + """ + c_attrs = attr_class() + + for attr_name, attr_value in attr_dict.items(): + dest = attribute_filter.apply(attr_name, DST_LOG_EVENT_CONTEXT_DATA) + + if dest & DST_LOG_EVENT_CONTEXT_DATA: + try: + attr_name, attr_value = process_user_attribute(attr_prefix + attr_name, attr_value) + if attr_name: + c_attrs[attr_name] = attr_value + except Exception: + _logger.debug( + "Log event context attribute failed to validate for unknown reason. Dropping context attribute: %s. Check traceback for clues.", + attr_name, + exc_info=True, + ) + + return c_attrs + + def create_user_attributes(attr_dict, attribute_filter): destinations = DST_ALL return create_attributes(attr_dict, destinations, attribute_filter) diff --git a/newrelic/core/attribute_filter.py b/newrelic/core/attribute_filter.py index 8d4a93843..8cd26cb30 100644 --- a/newrelic/core/attribute_filter.py +++ b/newrelic/core/attribute_filter.py @@ -15,16 +15,17 @@ # Attribute "destinations" represented as bitfields. DST_NONE = 0x0 -DST_ALL = 0x3F -DST_TRANSACTION_EVENTS = 1 << 0 -DST_TRANSACTION_TRACER = 1 << 1 -DST_ERROR_COLLECTOR = 1 << 2 -DST_BROWSER_MONITORING = 1 << 3 -DST_SPAN_EVENTS = 1 << 4 +DST_ALL = 0x7F +DST_TRANSACTION_EVENTS = 1 << 0 +DST_TRANSACTION_TRACER = 1 << 1 +DST_ERROR_COLLECTOR = 1 << 2 +DST_BROWSER_MONITORING = 1 << 3 +DST_SPAN_EVENTS = 1 << 4 DST_TRANSACTION_SEGMENTS = 1 << 5 +DST_LOG_EVENT_CONTEXT_DATA = 1 << 6 -class AttributeFilter(object): +class AttributeFilter(object): # Apply filtering rules to attributes. # # Upon initialization, an AttributeFilter object will take all attribute @@ -59,46 +60,45 @@ class AttributeFilter(object): # 4. Return the resulting bitfield after all rules have been applied. def __init__(self, flattened_settings): - self.enabled_destinations = self._set_enabled_destinations(flattened_settings) self.rules = self._build_rules(flattened_settings) self.cache = {} def __repr__(self): - return "" % ( - bin(self.enabled_destinations), self.rules) + return "" % (bin(self.enabled_destinations), self.rules) def _set_enabled_destinations(self, settings): - # Determines and returns bitfield representing attribute destinations enabled. enabled_destinations = DST_NONE - if settings.get('transaction_segments.attributes.enabled', None): + if settings.get("transaction_segments.attributes.enabled", None): enabled_destinations |= DST_TRANSACTION_SEGMENTS - if settings.get('span_events.attributes.enabled', None): + if settings.get("span_events.attributes.enabled", None): enabled_destinations |= DST_SPAN_EVENTS - if settings.get('transaction_tracer.attributes.enabled', None): + if settings.get("transaction_tracer.attributes.enabled", None): enabled_destinations |= DST_TRANSACTION_TRACER - if settings.get('transaction_events.attributes.enabled', None): + if settings.get("transaction_events.attributes.enabled", None): enabled_destinations |= DST_TRANSACTION_EVENTS - if settings.get('error_collector.attributes.enabled', None): + if settings.get("error_collector.attributes.enabled", None): enabled_destinations |= DST_ERROR_COLLECTOR - if settings.get('browser_monitoring.attributes.enabled', None): + if settings.get("browser_monitoring.attributes.enabled", None): enabled_destinations |= DST_BROWSER_MONITORING - if not settings.get('attributes.enabled', None): + if settings.get("application_logging.forwarding.context_data.enabled", None): + enabled_destinations |= DST_LOG_EVENT_CONTEXT_DATA + + if not settings.get("attributes.enabled", None): enabled_destinations = DST_NONE return enabled_destinations def _build_rules(self, settings): - # "Rule Templates" below are used for building AttributeFilterRules. # # Each tuple includes: @@ -107,26 +107,27 @@ def _build_rules(self, settings): # 3. Boolean that represents whether the setting is an "include" or not. rule_templates = ( - ('attributes.include', DST_ALL, True), - ('attributes.exclude', DST_ALL, False), - ('transaction_events.attributes.include', DST_TRANSACTION_EVENTS, True), - ('transaction_events.attributes.exclude', DST_TRANSACTION_EVENTS, False), - ('transaction_tracer.attributes.include', DST_TRANSACTION_TRACER, True), - ('transaction_tracer.attributes.exclude', DST_TRANSACTION_TRACER, False), - ('error_collector.attributes.include', DST_ERROR_COLLECTOR, True), - ('error_collector.attributes.exclude', DST_ERROR_COLLECTOR, False), - ('browser_monitoring.attributes.include', DST_BROWSER_MONITORING, True), - ('browser_monitoring.attributes.exclude', DST_BROWSER_MONITORING, False), - ('span_events.attributes.include', DST_SPAN_EVENTS, True), - ('span_events.attributes.exclude', DST_SPAN_EVENTS, False), - ('transaction_segments.attributes.include', DST_TRANSACTION_SEGMENTS, True), - ('transaction_segments.attributes.exclude', DST_TRANSACTION_SEGMENTS, False), + ("attributes.include", DST_ALL, True), + ("attributes.exclude", DST_ALL, False), + ("transaction_events.attributes.include", DST_TRANSACTION_EVENTS, True), + ("transaction_events.attributes.exclude", DST_TRANSACTION_EVENTS, False), + ("transaction_tracer.attributes.include", DST_TRANSACTION_TRACER, True), + ("transaction_tracer.attributes.exclude", DST_TRANSACTION_TRACER, False), + ("error_collector.attributes.include", DST_ERROR_COLLECTOR, True), + ("error_collector.attributes.exclude", DST_ERROR_COLLECTOR, False), + ("browser_monitoring.attributes.include", DST_BROWSER_MONITORING, True), + ("browser_monitoring.attributes.exclude", DST_BROWSER_MONITORING, False), + ("span_events.attributes.include", DST_SPAN_EVENTS, True), + ("span_events.attributes.exclude", DST_SPAN_EVENTS, False), + ("transaction_segments.attributes.include", DST_TRANSACTION_SEGMENTS, True), + ("transaction_segments.attributes.exclude", DST_TRANSACTION_SEGMENTS, False), + ("application_logging.forwarding.context_data.include", DST_LOG_EVENT_CONTEXT_DATA, True), + ("application_logging.forwarding.context_data.exclude", DST_LOG_EVENT_CONTEXT_DATA, False), ) rules = [] - for (setting_name, destination, is_include) in rule_templates: - + for setting_name, destination, is_include in rule_templates: for setting in settings.get(setting_name) or (): rule = AttributeFilterRule(setting, destination, is_include) rules.append(rule) @@ -157,16 +158,15 @@ def apply(self, name, default_destinations): self.cache[cache_index] = destinations return destinations -class AttributeFilterRule(object): +class AttributeFilterRule(object): def __init__(self, name, destinations, is_include): - self.name = name.rstrip('*') + self.name = name.rstrip("*") self.destinations = destinations self.is_include = is_include - self.is_wildcard = name.endswith('*') + self.is_wildcard = name.endswith("*") def _as_sortable(self): - # Represent AttributeFilterRule as a tuple that will sort properly. # # Sorting rules: @@ -207,8 +207,7 @@ def __ge__(self, other): return self._as_sortable() >= other._as_sortable() def __repr__(self): - return '(%s, %s, %s, %s)' % (self.name, bin(self.destinations), - self.is_wildcard, self.is_include) + return "(%s, %s, %s, %s)" % (self.name, bin(self.destinations), self.is_wildcard, self.is_include) def name_match(self, name): if self.is_wildcard: diff --git a/newrelic/core/config.py b/newrelic/core/config.py index aba1db10b..f627356c7 100644 --- a/newrelic/core/config.py +++ b/newrelic/core/config.py @@ -902,6 +902,12 @@ def default_otlp_host(host): _settings.application_logging.forwarding.context_data.enabled = _environ_as_bool( "NEW_RELIC_APPLICATION_LOGGING_FORWARDING_CONTEXT_DATA_ENABLED", default=False ) +_settings.application_logging.forwarding.context_data.include = _environ_as_set( + "NEW_RELIC_APPLICATION_LOGGING_FORWARDING_CONTEXT_DATA_INCLUDE", default="" +) +_settings.application_logging.forwarding.context_data.exclude = _environ_as_set( + "NEW_RELIC_APPLICATION_LOGGING_FORWARDING_CONTEXT_DATA_EXCLUDE", default="" +) _settings.application_logging.metrics.enabled = _environ_as_bool( "NEW_RELIC_APPLICATION_LOGGING_METRICS_ENABLED", default=True ) diff --git a/newrelic/core/stats_engine.py b/newrelic/core/stats_engine.py index c39ec5e6e..e09637ee9 100644 --- a/newrelic/core/stats_engine.py +++ b/newrelic/core/stats_engine.py @@ -43,6 +43,7 @@ create_agent_attributes, create_user_attributes, process_user_attribute, + resolve_logging_context_attributes, truncate, ) from newrelic.core.attribute_filter import DST_ERROR_COLLECTOR @@ -1218,7 +1219,6 @@ def record_transaction(self, transaction): ): self._log_events.merge(transaction.log_events, priority=transaction.priority) - def record_log_event(self, message, level=None, timestamp=None, attributes=None, priority=None): settings = self.__settings if not ( @@ -1239,16 +1239,17 @@ def record_log_event(self, message, level=None, timestamp=None, attributes=None, message = truncate(message, MAX_LOG_MESSAGE_LENGTH) - attrs = get_linking_metadata() - if attributes and (settings and settings.application_logging and settings.application_logging.forwarding and settings.application_logging.forwarding.context_data and settings.application_logging.forwarding.context_data.enabled): - # TODO add attibute filtering - attrs.update({"context." + k: safe_json_encode(v, ignore_string_types=True) for k, v in six.iteritems(attributes)}) + collected_attributes = get_linking_metadata() + if attributes and (settings and settings.application_logging.forwarding.context_data.enabled): + context_attributes = resolve_logging_context_attributes(attributes, settings.attribute_filter, "context.") + if context_attributes: + collected_attributes.update(context_attributes) event = LogEventNode( timestamp=timestamp, level=level, message=message, - attributes=attrs, + attributes=collected_attributes, ) if priority is None: diff --git a/newrelic/hooks/logger_logging.py b/newrelic/hooks/logger_logging.py index 27fd7edc6..80cf0fd24 100644 --- a/newrelic/hooks/logger_logging.py +++ b/newrelic/hooks/logger_logging.py @@ -91,7 +91,9 @@ def wrap_callHandlers(wrapped, instance, args, kwargs): try: message = record.getMessage() attrs = filter_record_attributes(record) - record_log_event(message=message, level=level_name, timestamp=int(record.created * 1000), attributes=attrs) + record_log_event( + message=message, level=level_name, timestamp=int(record.created * 1000), attributes=attrs + ) except Exception: pass diff --git a/tests/agent_features/test_log_events.py b/tests/agent_features/test_log_events.py index b62d6a5fc..91585d2ff 100644 --- a/tests/agent_features/test_log_events.py +++ b/tests/agent_features/test_log_events.py @@ -12,22 +12,34 @@ # See the License for the specific language governing permissions and # limitations under the License. -from newrelic.api.background_task import background_task -from newrelic.api.time_trace import current_trace -from newrelic.api.transaction import current_transaction, record_log_event, ignore_transaction -from testing_support.fixtures import override_application_settings, reset_core_stats_engine +import pytest +from testing_support.fixtures import ( + override_application_settings, + reset_core_stats_engine, +) from testing_support.validators.validate_log_event_count import validate_log_event_count from testing_support.validators.validate_log_event_count_outside_transaction import ( validate_log_event_count_outside_transaction, ) from testing_support.validators.validate_log_events import validate_log_events -from testing_support.validators.validate_log_events_outside_transaction import validate_log_events_outside_transaction +from testing_support.validators.validate_log_events_outside_transaction import ( + validate_log_events_outside_transaction, +) + +from newrelic.api.background_task import background_task +from newrelic.api.time_trace import current_trace +from newrelic.api.transaction import ( + current_transaction, + ignore_transaction, + record_log_event, +) +from newrelic.core.config import _parse_attributes class NonPrintableObject(object): def __str__(self): raise RuntimeError("Unable to print object.") - + __repr__ = __str__ @@ -71,10 +83,11 @@ def exercise_record_log_event(): enable_log_forwarding = override_application_settings( {"application_logging.forwarding.enabled": True, "application_logging.forwarding.context_data.enabled": True} ) +disable_log_forwarding = override_application_settings({"application_logging.forwarding.enabled": False}) + disable_log_attributes = override_application_settings( {"application_logging.forwarding.enabled": True, "application_logging.forwarding.context_data.enabled": False} ) -disable_log_forwarding = override_application_settings({"application_logging.forwarding.enabled": False}) _common_attributes_service_linking = { "timestamp": None, @@ -92,6 +105,9 @@ def exercise_record_log_event(): "dict_attr": {"key": "value"}, "non_serializable_attr": NonSerializableObject(), "non_printable_attr": NonPrintableObject(), + "attr_value_too_long": "*" * 256, + "attr_name_too_long_" + ("*" * 237): "value", + "attr_name_with_prefix_too_long_" + ("*" * 220): "value", } _exercise_record_log_event_events = [ @@ -102,10 +118,10 @@ def exercise_record_log_event(): "message": "serialized_attributes", "context.str_attr": "Value", "context.bytes_attr": b"value", - "context.int_attr": "1", - "context.dict_attr": '{"key":"value"}', + "context.int_attr": 1, + "context.dict_attr": "{'key': 'value'}", "context.non_serializable_attr": "", - "context.non_printable_attr": "", + "context.attr_value_too_long": "*" * 255, }, ] _exercise_record_log_event_inside_transaction_events = [ @@ -114,12 +130,21 @@ def exercise_record_log_event(): _exercise_record_log_event_outside_transaction_events = [ combine_dicts(_common_attributes_service_linking, log) for log in _exercise_record_log_event_events ] +_exercise_record_log_event_forgone_attrs = [ + "context.non_printable_attr", + "attr_name_too_long_", + "attr_name_with_prefix_too_long_", +] + # Test Log Forwarding + @enable_log_forwarding def test_record_log_event_inside_transaction(): - @validate_log_events(_exercise_record_log_event_inside_transaction_events) + @validate_log_events( + _exercise_record_log_event_inside_transaction_events, forgone_attrs=_exercise_record_log_event_forgone_attrs + ) @validate_log_event_count(len(_exercise_record_log_event_inside_transaction_events)) @background_task() def test(): @@ -131,7 +156,9 @@ def test(): @enable_log_forwarding @reset_core_stats_engine() def test_record_log_event_outside_transaction(): - @validate_log_events_outside_transaction(_exercise_record_log_event_outside_transaction_events) + @validate_log_events_outside_transaction( + _exercise_record_log_event_outside_transaction_events, forgone_attrs=_exercise_record_log_event_forgone_attrs + ) @validate_log_event_count_outside_transaction(len(_exercise_record_log_event_outside_transaction_events)) def test(): exercise_record_log_event() @@ -154,6 +181,7 @@ def test(): _test_log_event_truncation_events = [{"message": "A" * 32768}] + @enable_log_forwarding def test_log_event_truncation_inside_transaction(): @validate_log_events(_test_log_event_truncation_events) @@ -178,6 +206,7 @@ def test(): # Test Log Forwarding Settings + @disable_log_forwarding def test_disabled_record_log_event_inside_transaction(): @validate_log_event_count(0) @@ -197,8 +226,10 @@ def test(): test() + # Test Log Attribute Settings + @disable_log_attributes def test_attributes_disabled_inside_transaction(): @validate_log_events([{"message": "A"}], forgone_attrs=["context.key"]) @@ -219,3 +250,86 @@ def test(): record_log_event("A", attributes={"key": "value"}) test() + + +_test_record_log_event_attribute_filtering_params = [ + ("", "", "context.A", True), + ("", "A", "context.A", False), + ("", "A", "context.B", True), + ("A B", "*", "context.A", True), + ("A B", "*", "context.B", True), + ("A B", "*", "context.C", False), + ("A B", "C", "context.A", True), + ("A B", "C", "context.C", False), + ("A B", "B", "context.A", True), + ("A B", "B", "context.B", False), + ("A", "A *", "context.A", False), + ("A", "A *", "context.B", False), + ("A*", "", "context.A", True), + ("A*", "", "context.AB", True), + ("", "A*", "context.A", False), + ("", "A*", "context.B", True), + ("A*", "AB", "context.AC", True), + ("A*", "AB", "context.AB", False), + ("AB", "A*", "context.AB", True), + ("A*", "AB*", "context.ACB", True), + ("A*", "AB*", "context.ABC", False), + # Linking attributes not affected by filtering + ("", "", "entity.name", True), + ("A", "*", "entity.name", True), + ("", "*", "entity.name", True), +] + + +@pytest.mark.parametrize("include,exclude,attr,expected", _test_record_log_event_attribute_filtering_params) +def test_record_log_event_attribute_filtering_inside_transaction(include, exclude, attr, expected): + if expected: + expected_event = {"required_attrs": [attr]} + else: + expected_event = {"forgone_attrs": [attr]} + + @override_application_settings( + { + "application_logging.forwarding.enabled": True, + "application_logging.forwarding.context_data.enabled": True, + "application_logging.forwarding.context_data.include": _parse_attributes(include), + "application_logging.forwarding.context_data.exclude": _parse_attributes(exclude), + } + ) + @validate_log_events(**expected_event) + @validate_log_event_count(1) + @background_task() + def test(): + if attr.startswith("context."): + record_log_event("A", attributes={attr.lstrip("context."): 1}) + else: + record_log_event("A") + + test() + + +@pytest.mark.parametrize("include,exclude,attr,expected", _test_record_log_event_attribute_filtering_params) +@reset_core_stats_engine() +def test_record_log_event_attribute_filtering_outside_transaction(include, exclude, attr, expected): + if expected: + expected_event = {"required_attrs": [attr]} + else: + expected_event = {"forgone_attrs": [attr]} + + @override_application_settings( + { + "application_logging.forwarding.enabled": True, + "application_logging.forwarding.context_data.enabled": True, + "application_logging.forwarding.context_data.include": _parse_attributes(include), + "application_logging.forwarding.context_data.exclude": _parse_attributes(exclude), + } + ) + @validate_log_events_outside_transaction(**expected_event) + @validate_log_event_count_outside_transaction(1) + def test(): + if attr.startswith("context."): + record_log_event("A", attributes={attr.lstrip("context."): 1}) + else: + record_log_event("A") + + test() diff --git a/tests/agent_features/test_logs_in_context.py b/tests/agent_features/test_logs_in_context.py index 01abb853f..8693c0f08 100644 --- a/tests/agent_features/test_logs_in_context.py +++ b/tests/agent_features/test_logs_in_context.py @@ -50,7 +50,7 @@ def log_buffer(caplog): class NonPrintableObject(object): def __str__(self): raise RuntimeError("Unable to print object.") - + __repr__ = __str__ @@ -77,7 +77,7 @@ def test_newrelic_logger_no_error(log_buffer): "second": "baz", }, } - _logger.info(u"Hello %s", u"World", extra=extra) + _logger.info("Hello %s", "World", extra=extra) log_buffer.seek(0) message = json.load(log_buffer) @@ -95,25 +95,25 @@ def test_newrelic_logger_no_error(log_buffer): assert isinstance(line_number, int) expected = { - u"entity.name": u"Python Agent Test (agent_features)", - u"entity.type": u"SERVICE", - u"message": u"Hello World", - u"log.level": u"INFO", - u"logger.name": u"test_logs_in_context", - u"thread.name": u"MainThread", - u"process.name": u"MainProcess", - u"extra.string": u"foo", - u"extra.integer": 1, - u"extra.float": 1.23, - u"extra.null": None, - u"extra.array": [1, 2, 3], - u"extra.bool": True, - u"extra.set": u'["set"]', - u"extra.non_serializable": u"", - u"extra.non_printable": u"", - u"extra.object": { - u"first": u"bar", - u"second": u"baz", + "entity.name": "Python Agent Test (agent_features)", + "entity.type": "SERVICE", + "message": "Hello World", + "log.level": "INFO", + "logger.name": "test_logs_in_context", + "thread.name": "MainThread", + "process.name": "MainProcess", + "extra.string": "foo", + "extra.integer": 1, + "extra.float": 1.23, + "extra.null": None, + "extra.array": [1, 2, 3], + "extra.bool": True, + "extra.set": '["set"]', + "extra.non_serializable": "", + "extra.non_printable": "", + "extra.object": { + "first": "bar", + "second": "baz", }, } expected_extra_txn_keys = ( @@ -127,7 +127,6 @@ def test_newrelic_logger_no_error(log_buffer): assert set(message.keys()) == set(expected_extra_txn_keys) - class ExceptionForTest(ValueError): pass @@ -137,7 +136,7 @@ def test_newrelic_logger_error_inside_transaction(log_buffer): try: raise ExceptionForTest except ExceptionForTest: - _logger.exception(u"oops") + _logger.exception("oops") log_buffer.seek(0) message = json.load(log_buffer) @@ -155,16 +154,16 @@ def test_newrelic_logger_error_inside_transaction(log_buffer): assert isinstance(line_number, int) expected = { - u"entity.name": u"Python Agent Test (agent_features)", - u"entity.type": u"SERVICE", - u"message": u"oops", - u"log.level": u"ERROR", - u"logger.name": u"test_logs_in_context", - u"thread.name": u"MainThread", - u"process.name": u"MainProcess", - u"error.class": u"test_logs_in_context:ExceptionForTest", - u"error.message": u"", - u"error.expected": False, + "entity.name": "Python Agent Test (agent_features)", + "entity.type": "SERVICE", + "message": "oops", + "log.level": "ERROR", + "logger.name": "test_logs_in_context", + "thread.name": "MainThread", + "process.name": "MainProcess", + "error.class": "test_logs_in_context:ExceptionForTest", + "error.message": "", + "error.expected": False, } expected_extra_txn_keys = ( "trace.id", @@ -183,7 +182,7 @@ def test_newrelic_logger_error_outside_transaction(log_buffer): try: raise ExceptionForTest except ExceptionForTest: - _logger.exception(u"oops") + _logger.exception("oops") log_buffer.seek(0) message = json.load(log_buffer) @@ -201,15 +200,15 @@ def test_newrelic_logger_error_outside_transaction(log_buffer): assert isinstance(line_number, int) expected = { - u"entity.name": u"Python Agent Test (agent_features)", - u"entity.type": u"SERVICE", - u"message": u"oops", - u"log.level": u"ERROR", - u"logger.name": u"test_logs_in_context", - u"thread.name": u"MainThread", - u"process.name": u"MainProcess", - u"error.class": u"test_logs_in_context:ExceptionForTest", - u"error.message": u"", + "entity.name": "Python Agent Test (agent_features)", + "entity.type": "SERVICE", + "message": "oops", + "log.level": "ERROR", + "logger.name": "test_logs_in_context", + "thread.name": "MainThread", + "process.name": "MainProcess", + "error.class": "test_logs_in_context:ExceptionForTest", + "error.message": "", } expected_extra_txn_keys = ( "entity.guid", @@ -222,14 +221,13 @@ def test_newrelic_logger_error_outside_transaction(log_buffer): assert set(message.keys()) == set(expected_extra_txn_keys) - EXPECTED_KEYS_TXN = ( "trace.id", "span.id", "entity.name", "entity.type", "entity.guid", - "hostname", + "hostname", ) EXPECTED_KEYS_NO_TXN = EXPECTED_KEYS_TRACE_ENDED = ( diff --git a/tests/logger_logging/test_attribute_forwarding.py b/tests/logger_logging/test_attribute_forwarding.py index 04879ace6..eea549e70 100644 --- a/tests/logger_logging/test_attribute_forwarding.py +++ b/tests/logger_logging/test_attribute_forwarding.py @@ -12,14 +12,20 @@ # See the License for the specific language governing permissions and # limitations under the License. -from newrelic.api.background_task import background_task -from testing_support.fixtures import reset_core_stats_engine +from testing_support.fixtures import ( + override_application_settings, + reset_core_stats_engine, +) from testing_support.validators.validate_log_event_count import validate_log_event_count -from testing_support.validators.validate_log_event_count_outside_transaction import validate_log_event_count_outside_transaction +from testing_support.validators.validate_log_event_count_outside_transaction import ( + validate_log_event_count_outside_transaction, +) from testing_support.validators.validate_log_events import validate_log_events -from testing_support.validators.validate_log_events_outside_transaction import validate_log_events_outside_transaction -from testing_support.fixtures import override_application_settings +from testing_support.validators.validate_log_events_outside_transaction import ( + validate_log_events_outside_transaction, +) +from newrelic.api.background_task import background_task _event_attributes = {"message": "A", "context.key": "value"} @@ -28,9 +34,12 @@ def exercise_logging(logger): logger.error("A", extra={"key": "value"}) assert len(logger.caplog.records) == 1 -@override_application_settings({ - "application_logging.forwarding.context_data.enabled": True, -}) + +@override_application_settings( + { + "application_logging.forwarding.context_data.enabled": True, + } +) def test_attributes_inside_transaction(logger): @validate_log_events([_event_attributes]) @validate_log_event_count(1) @@ -42,9 +51,11 @@ def test(): @reset_core_stats_engine() -@override_application_settings({ - "application_logging.forwarding.context_data.enabled": True, -}) +@override_application_settings( + { + "application_logging.forwarding.context_data.enabled": True, + } +) def test_attributes_outside_transaction(logger): @validate_log_events_outside_transaction([_event_attributes]) @validate_log_event_count_outside_transaction(1) diff --git a/tests/logger_logging/test_logging_handler.py b/tests/logger_logging/test_logging_handler.py index 25b1cc804..dce99f9f8 100644 --- a/tests/logger_logging/test_logging_handler.py +++ b/tests/logger_logging/test_logging_handler.py @@ -12,23 +12,35 @@ # See the License for the specific language governing permissions and # limitations under the License. -from conftest import logger as conf_logger import logging + import pytest +from conftest import logger as conf_logger +from testing_support.fixtures import ( + override_application_settings, + reset_core_stats_engine, +) +from testing_support.validators.validate_function_called import validate_function_called +from testing_support.validators.validate_log_event_count import validate_log_event_count +from testing_support.validators.validate_log_event_count_outside_transaction import ( + validate_log_event_count_outside_transaction, +) +from testing_support.validators.validate_log_events import validate_log_events +from testing_support.validators.validate_log_events_outside_transaction import ( + validate_log_events_outside_transaction, +) from newrelic.api.background_task import background_task from newrelic.api.log import NewRelicLogForwardingHandler -from testing_support.fixtures import reset_core_stats_engine, override_application_settings -from testing_support.validators.validate_log_event_count import validate_log_event_count -from testing_support.validators.validate_log_event_count_outside_transaction import validate_log_event_count_outside_transaction -from testing_support.validators.validate_function_called import validate_function_called -from testing_support.validators.validate_log_events import validate_log_events -from testing_support.validators.validate_log_events_outside_transaction import validate_log_events_outside_transaction -from newrelic.api.transaction import current_transaction from newrelic.api.time_trace import current_trace +from newrelic.api.transaction import current_transaction - -_common_attributes_service_linking = {"timestamp": None, "hostname": None, "entity.name": "Python Agent Test (logger_logging)", "entity.guid": None} +_common_attributes_service_linking = { + "timestamp": None, + "hostname": None, + "entity.name": "Python Agent Test (logger_logging)", + "entity.guid": None, +} _common_attributes_trace_linking = {"span.id": "abcdefgh", "trace.id": "abcdefgh12345678"} @@ -64,11 +76,27 @@ def exercise_logging(logger): assert len(logger.caplog.records) == 1 -@override_application_settings({ - "application_logging.forwarding.context_data.enabled": True, -}) +@override_application_settings( + { + "application_logging.forwarding.context_data.enabled": True, + } +) def test_handler_inside_transaction(logger): - @validate_log_events([{"message": "C", "level": "WARNING", "timestamp": None, "hostname": None, "entity.name": "Python Agent Test (logger_logging)", "entity.guid": None, "span.id": "abcdefgh", "trace.id": "abcdefgh12345678", "context.key": "value"}]) + @validate_log_events( + [ + { + "message": "C", + "level": "WARNING", + "timestamp": None, + "hostname": None, + "entity.name": "Python Agent Test (logger_logging)", + "entity.guid": None, + "span.id": "abcdefgh", + "trace.id": "abcdefgh12345678", + "context.key": "value", + } + ] + ) @validate_log_event_count(1) @validate_function_called("newrelic.api.log", "NewRelicLogForwardingHandler.emit") @background_task() @@ -79,11 +107,25 @@ def test(): @reset_core_stats_engine() -@override_application_settings({ - "application_logging.forwarding.context_data.enabled": True, -}) +@override_application_settings( + { + "application_logging.forwarding.context_data.enabled": True, + } +) def test_handler_outside_transaction(logger): - @validate_log_events_outside_transaction([{"message": "C", "level": "WARNING", "timestamp": None, "hostname": None, "entity.name": "Python Agent Test (logger_logging)", "entity.guid": None, "context.key": "value"}]) + @validate_log_events_outside_transaction( + [ + { + "message": "C", + "level": "WARNING", + "timestamp": None, + "hostname": None, + "entity.name": "Python Agent Test (logger_logging)", + "entity.guid": None, + "context.key": "value", + } + ] + ) @validate_log_event_count_outside_transaction(1) @validate_function_called("newrelic.api.log", "NewRelicLogForwardingHandler.emit") def test(): diff --git a/tests/logger_loguru/conftest.py b/tests/logger_loguru/conftest.py index 55fc7edd6..1be79f89b 100644 --- a/tests/logger_loguru/conftest.py +++ b/tests/logger_loguru/conftest.py @@ -13,10 +13,12 @@ # limitations under the License. import logging -import pytest - -from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 +import pytest +from testing_support.fixtures import ( # noqa: F401; pylint: disable=W0611 + collector_agent_registration_fixture, + collector_available_fixture, +) _default_settings = { "transaction_tracer.explain_threshold": 0.0, @@ -42,6 +44,7 @@ class CaplogHandler(logging.StreamHandler): To prevent possible issues with pytest's monkey patching use a custom Caplog handler to capture all records """ + def __init__(self, *args, **kwargs): self.records = [] super(CaplogHandler, self).__init__(*args, **kwargs) diff --git a/tests/logger_loguru/test_attribute_forwarding.py b/tests/logger_loguru/test_attribute_forwarding.py index bb5da5362..d81608e3c 100644 --- a/tests/logger_loguru/test_attribute_forwarding.py +++ b/tests/logger_loguru/test_attribute_forwarding.py @@ -12,14 +12,20 @@ # See the License for the specific language governing permissions and # limitations under the License. -from newrelic.api.background_task import background_task -from testing_support.fixtures import reset_core_stats_engine +from testing_support.fixtures import ( + override_application_settings, + reset_core_stats_engine, +) from testing_support.validators.validate_log_event_count import validate_log_event_count -from testing_support.validators.validate_log_event_count_outside_transaction import validate_log_event_count_outside_transaction +from testing_support.validators.validate_log_event_count_outside_transaction import ( + validate_log_event_count_outside_transaction, +) from testing_support.validators.validate_log_events import validate_log_events -from testing_support.validators.validate_log_events_outside_transaction import validate_log_events_outside_transaction -from testing_support.fixtures import override_application_settings +from testing_support.validators.validate_log_events_outside_transaction import ( + validate_log_events_outside_transaction, +) +from newrelic.api.background_task import background_task _event_attributes = {"message": "A", "context.key": "value"} @@ -28,9 +34,12 @@ def exercise_logging(logger): logger.bind(key="value").error("A") assert len(logger.caplog.records) == 1 -@override_application_settings({ - "application_logging.forwarding.context_data.enabled": True, -}) + +@override_application_settings( + { + "application_logging.forwarding.context_data.enabled": True, + } +) def test_attributes_inside_transaction(logger): @validate_log_events([_event_attributes]) @validate_log_event_count(1) @@ -42,9 +51,11 @@ def test(): @reset_core_stats_engine() -@override_application_settings({ - "application_logging.forwarding.context_data.enabled": True, -}) +@override_application_settings( + { + "application_logging.forwarding.context_data.enabled": True, + } +) def test_attributes_outside_transaction(logger): @validate_log_events_outside_transaction([_event_attributes]) @validate_log_event_count_outside_transaction(1) diff --git a/tests/testing_support/validators/validate_log_event_count.py b/tests/testing_support/validators/validate_log_event_count.py index e4f58a60a..13f6c4e47 100644 --- a/tests/testing_support/validators/validate_log_event_count.py +++ b/tests/testing_support/validators/validate_log_event_count.py @@ -14,14 +14,14 @@ import copy -from newrelic.common.object_wrapper import (transient_function_wrapper, - function_wrapper) from testing_support.fixtures import catch_background_exceptions +from newrelic.common.object_wrapper import function_wrapper, transient_function_wrapper + + def validate_log_event_count(count=1): @function_wrapper def _validate_wrapper(wrapped, instance, args, kwargs): - record_called = [] recorded_logs = [] @@ -43,7 +43,7 @@ def _validate_log_event_count(wrapped, instance, args, kwargs): if count: assert record_called logs = copy.copy(recorded_logs) - + record_called[:] = [] recorded_logs[:] = [] diff --git a/tests/testing_support/validators/validate_log_event_count_outside_transaction.py b/tests/testing_support/validators/validate_log_event_count_outside_transaction.py index 8dd29c22e..e04c9df85 100644 --- a/tests/testing_support/validators/validate_log_event_count_outside_transaction.py +++ b/tests/testing_support/validators/validate_log_event_count_outside_transaction.py @@ -14,14 +14,14 @@ import copy -from newrelic.common.object_wrapper import (transient_function_wrapper, - function_wrapper) from testing_support.fixtures import catch_background_exceptions +from newrelic.common.object_wrapper import function_wrapper, transient_function_wrapper + + def validate_log_event_count_outside_transaction(count=1): @function_wrapper def _validate_wrapper(wrapped, instance, args, kwargs): - record_called = [] recorded_logs = [] @@ -44,7 +44,7 @@ def _validate_log_event_count_outside_transaction(wrapped, instance, args, kwarg if count: assert record_called logs = copy.copy(recorded_logs) - + record_called[:] = [] recorded_logs[:] = [] diff --git a/tests/testing_support/validators/validate_log_events.py b/tests/testing_support/validators/validate_log_events.py index c8618b390..4008d0e52 100644 --- a/tests/testing_support/validators/validate_log_events.py +++ b/tests/testing_support/validators/validate_log_events.py @@ -14,19 +14,21 @@ import copy +from testing_support.fixtures import catch_background_exceptions + +from newrelic.common.object_wrapper import function_wrapper, transient_function_wrapper from newrelic.packages import six -from newrelic.common.object_wrapper import (transient_function_wrapper, - function_wrapper) -from testing_support.fixtures import catch_background_exceptions -def validate_log_events(events=None, forgone_attrs=None): - events = events or [{}] # Empty event allows assertions based on only forgone attrs to still run and validate +def validate_log_events(events=None, required_attrs=None, forgone_attrs=None): + events = events or [ + {} + ] # Empty event allows assertions based on only required or forgone attrs to still run and validate + required_attrs = required_attrs or [] forgone_attrs = forgone_attrs or [] @function_wrapper def _validate_wrapper(wrapped, instance, args, kwargs): - record_called = [] recorded_logs = [] @@ -44,12 +46,11 @@ def _validate_log_events(wrapped, instance, args, kwargs): return result - _new_wrapper = _validate_log_events(wrapped) val = _new_wrapper(*args, **kwargs) assert record_called logs = copy.copy(recorded_logs) - + record_called[:] = [] recorded_logs[:] = [] @@ -57,14 +58,13 @@ def _validate_log_events(wrapped, instance, args, kwargs): matching_log_events = 0 mismatches = [] for captured in logs: - if _check_log_attributes(expected, forgone_attrs, captured, mismatches): + if _check_log_attributes(expected, required_attrs, forgone_attrs, captured, mismatches): matching_log_events += 1 assert matching_log_events == 1, _log_details(matching_log_events, logs, mismatches) return val - - def _check_log_attributes(expected, forgone_attrs, captured, mismatches): + def _check_log_attributes(expected, required_attrs, forgone_attrs, captured, mismatches): for key, value in six.iteritems(expected): if hasattr(captured, key): captured_value = getattr(captured, key, None) @@ -79,12 +79,19 @@ def _check_log_attributes(expected, forgone_attrs, captured, mismatches): mismatches.append("key: %s, value:<%s><%s>" % (key, value, captured_value)) return False - for key in forgone_attrs: - if hasattr(captured, key): - mismatches.append("forgone_key: %s, value:<%s>" % (key, getattr(captured, key, None))) + for key in required_attrs: + if not hasattr(captured, key) and key not in captured.attributes: + mismatches.append("required_key: %s" % key) return False - elif key in captured.attributes: - mismatches.append("forgone_key: %s, value:<%s>" % (key, captured.attributes[key])) + + for key in forgone_attrs: + if hasattr(captured, key) or key in captured.attributes: + if hasattr(captured, key): + captured_value = getattr(captured, key, None) + elif key in captured.attributes: + captured_value = captured.attributes[key] + + mismatches.append("forgone_key: %s, value:<%s>" % (key, captured_value)) return False return True diff --git a/tests/testing_support/validators/validate_log_events_outside_transaction.py b/tests/testing_support/validators/validate_log_events_outside_transaction.py index 35c89a362..6c17d089b 100644 --- a/tests/testing_support/validators/validate_log_events_outside_transaction.py +++ b/tests/testing_support/validators/validate_log_events_outside_transaction.py @@ -20,13 +20,13 @@ from newrelic.packages import six -def validate_log_events_outside_transaction(events=None, forgone_attrs=None): +def validate_log_events_outside_transaction(events=None, required_attrs=None, forgone_attrs=None): events = events or [{}] # Empty event allows assertions based on only forgone attrs to still run and validate + required_attrs = required_attrs or [] forgone_attrs = forgone_attrs or [] @function_wrapper def _validate_wrapper(wrapped, instance, args, kwargs): - record_called = [] recorded_logs = [] @@ -55,14 +55,13 @@ def _validate_log_events_outside_transaction(wrapped, instance, args, kwargs): matching_log_events = 0 mismatches = [] for captured in logs: - if _check_log_attributes(expected, forgone_attrs, captured, mismatches): + if _check_log_attributes(expected, required_attrs, forgone_attrs, captured, mismatches): matching_log_events += 1 assert matching_log_events == 1, _log_details(matching_log_events, logs, mismatches) return val - - def _check_log_attributes(expected, forgone_attrs, captured, mismatches): + def _check_log_attributes(expected, required_attrs, forgone_attrs, captured, mismatches): for key, value in six.iteritems(expected): if hasattr(captured, key): captured_value = getattr(captured, key, None) @@ -77,12 +76,19 @@ def _check_log_attributes(expected, forgone_attrs, captured, mismatches): mismatches.append("key: %s, value:<%s><%s>" % (key, value, captured_value)) return False - for key in forgone_attrs: - if hasattr(captured, key): - mismatches.append("forgone_key: %s, value:<%s>" % (key, getattr(captured, key, None))) + for key in required_attrs: + if not hasattr(captured, key) and key not in captured.attributes: + mismatches.append("required_key: %s" % key) return False - elif key in captured.attributes: - mismatches.append("forgone_key: %s, value:<%s>" % (key, captured.attributes[key])) + + for key in forgone_attrs: + if hasattr(captured, key) or key in captured.attributes: + if hasattr(captured, key): + captured_value = getattr(captured, key, None) + elif key in captured.attributes: + captured_value = captured.attributes[key] + + mismatches.append("forgone_key: %s, value:<%s>" % (key, captured_value)) return False return True