From 9a56f3a267d068bc947623633a2876982f93559c Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Mon, 10 Oct 2022 10:00:25 -0700 Subject: [PATCH 001/108] Update Versioning Scheme (#651) * Update versioning scheme to 3 semver digits * Fix version indexing Co-authored-by: Hannah Stepanek Co-authored-by: Lalleh Rafeei * Remove version truncation * [Mega-Linter] Apply linters fixes * Bump tests Co-authored-by: Hannah Stepanek Co-authored-by: Lalleh Rafeei Co-authored-by: TimPansino --- newrelic/__init__.py | 7 ++++--- setup.py | 9 +++------ 2 files changed, 7 insertions(+), 9 deletions(-) diff --git a/newrelic/__init__.py b/newrelic/__init__.py index b142f593e..2a5828f8f 100644 --- a/newrelic/__init__.py +++ b/newrelic/__init__.py @@ -13,12 +13,13 @@ # limitations under the License. import os.path + THIS_DIR = os.path.dirname(__file__) try: - with open(os.path.join(THIS_DIR, 'version.txt'), 'r') as f: + with open(os.path.join(THIS_DIR, "version.txt"), "r") as f: version = f.read() except: - version = '0.0.0.0' + version = "0.0.0" -version_info = list(map(int, version.split('.'))) +version_info = list(map(int, version.split("."))) diff --git a/setup.py b/setup.py index cdb4ac091..ff1560936 100644 --- a/setup.py +++ b/setup.py @@ -45,13 +45,10 @@ def newrelic_agent_guess_next_version(tag_version): version, _, _ = str(tag_version).partition("+") version_info = list(map(int, version.split("."))) - if len(version_info) < 4: + if len(version_info) < 3: return version version_info[1] += 1 - if version_info[1] % 2: - version_info[3] = 0 - else: - version_info[3] += 1 + version_info[2] = 0 return ".".join(map(str, version_info)) @@ -134,7 +131,7 @@ def build_extension(self, ext): use_scm_version={ "version_scheme": newrelic_agent_next_version, "local_scheme": "no-local-version", - "git_describe_command": "git describe --dirty --tags --long --match *.*.*.*", + "git_describe_command": "git describe --dirty --tags --long --match *.*.*", "write_to": "newrelic/version.txt", }, setup_requires=["setuptools_scm>=3.2,<7"], From 65246e7f37cedc2897adb399fdab24b0a9dcafbb Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Mon, 10 Oct 2022 10:35:00 -0700 Subject: [PATCH 002/108] Fix Trace Finalizer Crashes (#652) * Patch crashes in various traces with None settings * Add tests for graphql trace types to unittests * Add test to ensure traces don't crash in finalizer * [Mega-Linter] Apply linters fixes * Bump tests Co-authored-by: TimPansino Co-authored-by: Lalleh Rafeei <84813886+lrafeei@users.noreply.github.com> --- newrelic/api/database_trace.py | 5 +- newrelic/api/graphql_trace.py | 7 +- newrelic/api/transaction.py | 36 +- tests/agent_features/test_span_events.py | 735 +++++++++--------- tests/agent_features/test_time_trace.py | 37 +- .../test_transaction_trace_segments.py | 166 ++-- 6 files changed, 548 insertions(+), 438 deletions(-) diff --git a/newrelic/api/database_trace.py b/newrelic/api/database_trace.py index 09dfa1e11..2bc497688 100644 --- a/newrelic/api/database_trace.py +++ b/newrelic/api/database_trace.py @@ -127,6 +127,7 @@ def _log_async_warning(self): def finalize_data(self, transaction, exc=None, value=None, tb=None): self.stack_trace = None + self.sql_format = "off" connect_params = None cursor_params = None @@ -206,8 +207,8 @@ def finalize_data(self, transaction, exc=None, value=None, tb=None): transaction._explain_plan_count += 1 self.sql_format = ( - tt.record_sql if tt.record_sql else "" - ) # If tt.record_sql is None, then the empty string will default to sql being obfuscated + tt.record_sql if tt.record_sql else "off" + ) # If tt.record_sql is None, then default to sql being off self.connect_params = connect_params self.cursor_params = cursor_params self.sql_parameters = sql_parameters diff --git a/newrelic/api/graphql_trace.py b/newrelic/api/graphql_trace.py index 0fef99c70..7a2c9ec02 100644 --- a/newrelic/api/graphql_trace.py +++ b/newrelic/api/graphql_trace.py @@ -69,8 +69,13 @@ def finalize_data(self, transaction, exc=None, value=None, tb=None): self._add_agent_attribute("graphql.operation.type", self.operation_type) self._add_agent_attribute("graphql.operation.name", self.operation_name) + settings = transaction.settings + if settings and settings.agent_limits and settings.agent_limits.sql_query_length_maximum: + limit = transaction.settings.agent_limits.sql_query_length_maximum + else: + limit = 0 + # Attach formatted graphql - limit = transaction.settings.agent_limits.sql_query_length_maximum self.graphql = graphql = self.formatted[:limit] self._add_agent_attribute("graphql.operation.query", graphql) diff --git a/newrelic/api/transaction.py b/newrelic/api/transaction.py index f486989b4..f7d7595cb 100644 --- a/newrelic/api/transaction.py +++ b/newrelic/api/transaction.py @@ -25,13 +25,12 @@ import weakref from collections import OrderedDict -from newrelic.api.application import application_instance import newrelic.core.database_node import newrelic.core.error_node -from newrelic.core.log_event_node import LogEventNode import newrelic.core.root_node import newrelic.core.transaction_node import newrelic.packages.six as six +from newrelic.api.application import application_instance from newrelic.api.time_trace import TimeTrace, get_linking_metadata from newrelic.common.encoding_utils import ( DistributedTracePayload, @@ -63,6 +62,7 @@ ) from newrelic.core.config import DEFAULT_RESERVOIR_SIZE, LOG_EVENT_RESERVOIR_SIZE from newrelic.core.custom_event import create_custom_event +from newrelic.core.log_event_node import LogEventNode from newrelic.core.stack_trace import exception_stack from newrelic.core.stats_engine import CustomMetrics, SampledDataSet from newrelic.core.thread_utilization import utilization_tracker @@ -324,8 +324,12 @@ def __init__(self, application, enabled=None, source=None): self.enabled = True if self._settings: - self._custom_events = SampledDataSet(capacity=self._settings.event_harvest_config.harvest_limits.custom_event_data) - self._log_events = SampledDataSet(capacity=self._settings.event_harvest_config.harvest_limits.log_event_data) + self._custom_events = SampledDataSet( + capacity=self._settings.event_harvest_config.harvest_limits.custom_event_data + ) + self._log_events = SampledDataSet( + capacity=self._settings.event_harvest_config.harvest_limits.log_event_data + ) else: self._custom_events = SampledDataSet(capacity=DEFAULT_RESERVOIR_SIZE) self._log_events = SampledDataSet(capacity=LOG_EVENT_RESERVOIR_SIZE) @@ -1473,31 +1477,35 @@ def set_transaction_name(self, name, group=None, priority=None): self._group = group self._name = name - def record_log_event(self, message, level=None, timestamp=None, priority=None): settings = self.settings - if not (settings and settings.application_logging and settings.application_logging.enabled and settings.application_logging.forwarding and settings.application_logging.forwarding.enabled): + if not ( + settings + and settings.application_logging + and settings.application_logging.enabled + and settings.application_logging.forwarding + and settings.application_logging.forwarding.enabled + ): return - + timestamp = timestamp if timestamp is not None else time.time() level = str(level) if level is not None else "UNKNOWN" - + if not message or message.isspace(): _logger.debug("record_log_event called where message was missing. No log event will be sent.") return - + message = truncate(message, MAX_LOG_MESSAGE_LENGTH) event = LogEventNode( timestamp=timestamp, level=level, message=message, - attributes=get_linking_metadata(), + attributes=get_linking_metadata(), ) self._log_events.add(event, priority=priority) - def record_exception(self, exc=None, value=None, tb=None, params=None, ignore_errors=None): # Deprecation Warning warnings.warn( @@ -1603,6 +1611,8 @@ def _process_node(self, node): if type(node) is newrelic.core.database_node.DatabaseNode: settings = self._settings + if not settings: + return if not settings.collect_traces: return if not settings.slow_sql.enabled and not settings.transaction_tracer.explain_enabled: @@ -1869,7 +1879,9 @@ def record_log_event(message, level=None, timestamp=None, application=None, prio "record_log_event has been called but no transaction or application was running. As a result, " "the following event has not been recorded. message: %r level: %r timestamp %r. To correct " "this problem, supply an application object as a parameter to this record_log_event call.", - message, level, timestamp, + message, + level, + timestamp, ) elif application.enabled: application.record_log_event(message, level, timestamp, priority=priority) diff --git a/tests/agent_features/test_span_events.py b/tests/agent_features/test_span_events.py index 0024c1b8b..4fda858c0 100644 --- a/tests/agent_features/test_span_events.py +++ b/tests/agent_features/test_span_events.py @@ -12,97 +12,95 @@ # See the License for the specific language governing permissions and # limitations under the License. -import pytest import sys -from newrelic.api.transaction import current_transaction -from newrelic.api.time_trace import (current_trace, - add_custom_span_attribute, notice_error) -from newrelic.api.background_task import background_task -from newrelic.common.object_names import callable_name +import pytest +from testing_support.fixtures import ( + dt_enabled, + function_not_called, + override_application_settings, + validate_transaction_event_attributes, + validate_transaction_metrics, + validate_tt_segment_params, +) +from testing_support.validators.validate_span_events import validate_span_events +from newrelic.api.background_task import background_task from newrelic.api.database_trace import DatabaseTrace from newrelic.api.datastore_trace import DatastoreTrace from newrelic.api.external_trace import ExternalTrace from newrelic.api.function_trace import FunctionTrace, function_trace +from newrelic.api.graphql_trace import GraphQLOperationTrace, GraphQLResolverTrace from newrelic.api.memcache_trace import MemcacheTrace from newrelic.api.message_trace import MessageTrace from newrelic.api.solr_trace import SolrTrace - -from testing_support.fixtures import (override_application_settings, - function_not_called, validate_tt_segment_params, - validate_transaction_metrics, dt_enabled, - validate_transaction_event_attributes) -from testing_support.validators.validate_span_events import ( - validate_span_events) +from newrelic.api.time_trace import ( + add_custom_span_attribute, + current_trace, + notice_error, +) +from newrelic.api.transaction import current_transaction +from newrelic.common.object_names import callable_name ERROR = ValueError("whoops") ERROR_NAME = callable_name(ERROR) -@pytest.mark.parametrize('dt_enabled', (True, False)) -@pytest.mark.parametrize('span_events_enabled', (True, False)) -@pytest.mark.parametrize('txn_sampled', (True, False)) +@pytest.mark.parametrize("dt_enabled", (True, False)) +@pytest.mark.parametrize("span_events_enabled", (True, False)) +@pytest.mark.parametrize("txn_sampled", (True, False)) def test_span_events(dt_enabled, span_events_enabled, txn_sampled): - guid = 'dbb536c53b749e0b' - sentinel_guid = '0687e0c371ea2c4e' - function_guid = '482439c52de807ee' - transaction_name = 'OtherTransaction/Function/transaction' + guid = "dbb536c53b749e0b" + sentinel_guid = "0687e0c371ea2c4e" + function_guid = "482439c52de807ee" + transaction_name = "OtherTransaction/Function/transaction" priority = 0.5 - @function_trace(name='child') + @function_trace(name="child") def child(): pass - @function_trace(name='function') + @function_trace(name="function") def function(): current_trace().guid = function_guid child() - _settings = { - 'distributed_tracing.enabled': dt_enabled, - 'span_events.enabled': span_events_enabled - } + _settings = {"distributed_tracing.enabled": dt_enabled, "span_events.enabled": span_events_enabled} count = 0 if dt_enabled and span_events_enabled and txn_sampled: count = 1 exact_intrinsics_common = { - 'type': 'Span', - 'transactionId': guid, - 'sampled': txn_sampled, - 'priority': priority, - 'category': 'generic', + "type": "Span", + "transactionId": guid, + "sampled": txn_sampled, + "priority": priority, + "category": "generic", } - expected_intrinsics = ('timestamp', 'duration') + expected_intrinsics = ("timestamp", "duration") exact_intrinsics_root = exact_intrinsics_common.copy() - exact_intrinsics_root['name'] = 'Function/transaction' - exact_intrinsics_root['transaction.name'] = transaction_name - exact_intrinsics_root['nr.entryPoint'] = True + exact_intrinsics_root["name"] = "Function/transaction" + exact_intrinsics_root["transaction.name"] = transaction_name + exact_intrinsics_root["nr.entryPoint"] = True exact_intrinsics_function = exact_intrinsics_common.copy() - exact_intrinsics_function['name'] = 'Function/function' - exact_intrinsics_function['parentId'] = sentinel_guid + exact_intrinsics_function["name"] = "Function/function" + exact_intrinsics_function["parentId"] = sentinel_guid exact_intrinsics_child = exact_intrinsics_common.copy() - exact_intrinsics_child['name'] = 'Function/child' - exact_intrinsics_child['parentId'] = function_guid - - @validate_span_events(count=count, - expected_intrinsics=['nr.entryPoint']) - @validate_span_events(count=count, - exact_intrinsics=exact_intrinsics_root, - expected_intrinsics=expected_intrinsics) - @validate_span_events(count=count, - exact_intrinsics=exact_intrinsics_function, - expected_intrinsics=expected_intrinsics) - @validate_span_events(count=count, - exact_intrinsics=exact_intrinsics_child, - expected_intrinsics=expected_intrinsics) + exact_intrinsics_child["name"] = "Function/child" + exact_intrinsics_child["parentId"] = function_guid + + @validate_span_events(count=count, expected_intrinsics=["nr.entryPoint"]) + @validate_span_events(count=count, exact_intrinsics=exact_intrinsics_root, expected_intrinsics=expected_intrinsics) + @validate_span_events( + count=count, exact_intrinsics=exact_intrinsics_function, expected_intrinsics=expected_intrinsics + ) + @validate_span_events(count=count, exact_intrinsics=exact_intrinsics_child, expected_intrinsics=expected_intrinsics) @override_application_settings(_settings) - @background_task(name='transaction') + @background_task(name="transaction") def _test(): # Force intrinsics txn = current_transaction() @@ -116,22 +114,29 @@ def _test(): _test() -@pytest.mark.parametrize('trace_type,args', ( - (DatabaseTrace, ('select * from foo', )), - (DatastoreTrace, ('db_product', 'db_target', 'db_operation')), - (ExternalTrace, ('lib', 'url')), - (FunctionTrace, ('name', )), - (MemcacheTrace, ('command', )), - (MessageTrace, ('lib', 'operation', 'dst_type', 'dst_name')), - (SolrTrace, ('lib', 'command')), -)) +@pytest.mark.parametrize( + "trace_type,args", + ( + (DatabaseTrace, ("select * from foo",)), + (DatastoreTrace, ("db_product", "db_target", "db_operation")), + (ExternalTrace, ("lib", "url")), + (FunctionTrace, ("name",)), + (GraphQLOperationTrace, ()), + (GraphQLResolverTrace, ()), + (MemcacheTrace, ("command",)), + (MessageTrace, ("lib", "operation", "dst_type", "dst_name")), + (SolrTrace, ("lib", "command")), + ), +) def test_each_span_type(trace_type, args): @validate_span_events(count=2) - @override_application_settings({ - 'distributed_tracing.enabled': True, - 'span_events.enabled': True, - }) - @background_task(name='test_each_span_type') + @override_application_settings( + { + "distributed_tracing.enabled": True, + "span_events.enabled": True, + } + ) + @background_task(name="test_each_span_type") def _test(): transaction = current_transaction() @@ -143,37 +148,37 @@ def _test(): _test() -@pytest.mark.parametrize('sql,sql_format,expected', ( - pytest.param( - 'a' * 2001, - 'raw', - ''.join(['a'] * 1997 + ['...']), - id='truncate'), - pytest.param( - 'a' * 2000, - 'raw', - ''.join(['a'] * 2000), - id='no_truncate'), - pytest.param( - 'select * from %s' % ''.join(['?'] * 2000), - 'obfuscated', - 'select * from %s...' % ( - ''.join(['?'] * (2000 - len('select * from ') - 3))), - id='truncate_obfuscated'), - pytest.param('select 1', 'off', ''), - pytest.param('select 1', 'raw', 'select 1'), - pytest.param('select 1', 'obfuscated', 'select ?'), -)) +@pytest.mark.parametrize( + "sql,sql_format,expected", + ( + pytest.param("a" * 2001, "raw", "".join(["a"] * 1997 + ["..."]), id="truncate"), + pytest.param("a" * 2000, "raw", "".join(["a"] * 2000), id="no_truncate"), + pytest.param( + "select * from %s" % "".join(["?"] * 2000), + "obfuscated", + "select * from %s..." % ("".join(["?"] * (2000 - len("select * from ") - 3))), + id="truncate_obfuscated", + ), + pytest.param("select 1", "off", ""), + pytest.param("select 1", "raw", "select 1"), + pytest.param("select 1", "obfuscated", "select ?"), + ), +) def test_database_db_statement_format(sql, sql_format, expected): - @validate_span_events(count=1, exact_agents={ - 'db.statement': expected, - }) - @override_application_settings({ - 'distributed_tracing.enabled': True, - 'span_events.enabled': True, - 'transaction_tracer.record_sql': sql_format, - }) - @background_task(name='test_database_db_statement_format') + @validate_span_events( + count=1, + exact_agents={ + "db.statement": expected, + }, + ) + @override_application_settings( + { + "distributed_tracing.enabled": True, + "span_events.enabled": True, + "transaction_tracer.record_sql": sql_format, + } + ) + @background_task(name="test_database_db_statement_format") def _test(): transaction = current_transaction() transaction._sampled = True @@ -186,115 +191,130 @@ def _test(): @validate_span_events( count=1, - exact_intrinsics={'category': 'datastore'}, - unexpected_agents=['db.statement'], + exact_intrinsics={"category": "datastore"}, + unexpected_agents=["db.statement"], +) +@override_application_settings( + { + "distributed_tracing.enabled": True, + "span_events.enabled": True, + "span_events.attributes.exclude": ["db.statement"], + } ) -@override_application_settings({ - 'distributed_tracing.enabled': True, - 'span_events.enabled': True, - 'span_events.attributes.exclude': ['db.statement'], -}) -@background_task(name='test_database_db_statement_exclude') +@background_task(name="test_database_db_statement_exclude") def test_database_db_statement_exclude(): transaction = current_transaction() transaction._sampled = True - with DatabaseTrace('select 1'): + with DatabaseTrace("select 1"): pass -@pytest.mark.parametrize('trace_type,args,attrs', ( - (DatastoreTrace, ('db_product', 'db_target', 'db_operation'), {"db.collection": "db_target", "db.operation": "db_operation"}), - (DatabaseTrace, ("select 1 from db_table",), {"db.collection": "db_table", "db.statement": "select ? from db_table"}), -)) +@pytest.mark.parametrize( + "trace_type,args,attrs", + ( + ( + DatastoreTrace, + ("db_product", "db_target", "db_operation"), + {"db.collection": "db_target", "db.operation": "db_operation"}, + ), + ( + DatabaseTrace, + ("select 1 from db_table",), + {"db.collection": "db_table", "db.statement": "select ? from db_table"}, + ), + ), +) def test_datastore_database_trace_attrs(trace_type, args, attrs): @validate_span_events( count=1, exact_agents=attrs, ) - @override_application_settings({ - 'distributed_tracing.enabled': True, - 'span_events.enabled': True, - }) - @background_task(name='test_database_db_statement_exclude') + @override_application_settings( + { + "distributed_tracing.enabled": True, + "span_events.enabled": True, + } + ) + @background_task(name="test_database_db_statement_exclude") def test(): transaction = current_transaction() transaction._sampled = True with trace_type(*args): pass - + test() -@pytest.mark.parametrize('exclude_url', (True, False)) +@pytest.mark.parametrize("exclude_url", (True, False)) def test_external_spans(exclude_url): override_settings = { - 'distributed_tracing.enabled': True, - 'span_events.enabled': True, + "distributed_tracing.enabled": True, + "span_events.enabled": True, } if exclude_url: - override_settings['span_events.attributes.exclude'] = ['http.url'] + override_settings["span_events.attributes.exclude"] = ["http.url"] exact_agents = {} - unexpected_agents = ['http.url'] + unexpected_agents = ["http.url"] else: - exact_agents = {'http.url': 'http://example.com/foo'} + exact_agents = {"http.url": "http://example.com/foo"} unexpected_agents = [] @validate_span_events( count=1, exact_intrinsics={ - 'name': 'External/example.com/library/get', - 'type': 'Span', - 'sampled': True, - - 'category': 'http', - 'span.kind': 'client', - 'component': 'library', - 'http.method': 'get', + "name": "External/example.com/library/get", + "type": "Span", + "sampled": True, + "category": "http", + "span.kind": "client", + "component": "library", + "http.method": "get", }, exact_agents=exact_agents, unexpected_agents=unexpected_agents, - expected_intrinsics=('priority',), + expected_intrinsics=("priority",), ) @override_application_settings(override_settings) - @background_task(name='test_external_spans') + @background_task(name="test_external_spans") def _test(): transaction = current_transaction() transaction._sampled = True - with ExternalTrace( - library='library', - url='http://example.com/foo?secret=123', - method='get'): + with ExternalTrace(library="library", url="http://example.com/foo?secret=123", method="get"): pass _test() -@pytest.mark.parametrize('kwarg_override,attr_override', ( - ({'url': 'a' * 256}, {'http.url': 'a' * 255}), - ({'library': 'a' * 256}, {'component': 'a' * 255}), - ({'method': 'a' * 256}, {'http.method': 'a' * 255}), -)) -@override_application_settings({ - 'distributed_tracing.enabled': True, - 'span_events.enabled': True, -}) +@pytest.mark.parametrize( + "kwarg_override,attr_override", + ( + ({"url": "a" * 256}, {"http.url": "a" * 255}), + ({"library": "a" * 256}, {"component": "a" * 255}), + ({"method": "a" * 256}, {"http.method": "a" * 255}), + ), +) +@override_application_settings( + { + "distributed_tracing.enabled": True, + "span_events.enabled": True, + } +) def test_external_span_limits(kwarg_override, attr_override): exact_intrinsics = { - 'type': 'Span', - 'sampled': True, - - 'category': 'http', - 'span.kind': 'client', - 'component': 'library', - 'http.method': 'get', + "type": "Span", + "sampled": True, + "category": "http", + "span.kind": "client", + "component": "library", + "http.method": "get", } exact_agents = { - 'http.url': 'http://example.com/foo', + "http.url": "http://example.com/foo", } for attr_name, attr_value in attr_override.items(): if attr_name in exact_agents: @@ -303,9 +323,9 @@ def test_external_span_limits(kwarg_override, attr_override): exact_intrinsics[attr_name] = attr_value kwargs = { - 'library': 'library', - 'url': 'http://example.com/foo?secret=123', - 'method': 'get', + "library": "library", + "url": "http://example.com/foo?secret=123", + "method": "get", } kwargs.update(kwarg_override) @@ -313,9 +333,9 @@ def test_external_span_limits(kwarg_override, attr_override): count=1, exact_intrinsics=exact_intrinsics, exact_agents=exact_agents, - expected_intrinsics=('priority',), + expected_intrinsics=("priority",), ) - @background_task(name='test_external_spans') + @background_task(name="test_external_spans") def _test(): transaction = current_transaction() transaction._sampled = True @@ -326,32 +346,34 @@ def _test(): _test() -@pytest.mark.parametrize('kwarg_override,attribute_override', ( - ({'host': 'a' * 256}, - {'peer.hostname': 'a' * 255, 'peer.address': 'a' * 255}), - ({'port_path_or_id': 'a' * 256, 'host': 'a'}, - {'peer.hostname': 'a', 'peer.address': 'a:' + 'a' * 253}), - ({'database_name': 'a' * 256}, {'db.instance': 'a' * 255}), -)) -@override_application_settings({ - 'distributed_tracing.enabled': True, - 'span_events.enabled': True, -}) +@pytest.mark.parametrize( + "kwarg_override,attribute_override", + ( + ({"host": "a" * 256}, {"peer.hostname": "a" * 255, "peer.address": "a" * 255}), + ({"port_path_or_id": "a" * 256, "host": "a"}, {"peer.hostname": "a", "peer.address": "a:" + "a" * 253}), + ({"database_name": "a" * 256}, {"db.instance": "a" * 255}), + ), +) +@override_application_settings( + { + "distributed_tracing.enabled": True, + "span_events.enabled": True, + } +) def test_datastore_span_limits(kwarg_override, attribute_override): exact_intrinsics = { - 'type': 'Span', - 'sampled': True, - - 'category': 'datastore', - 'span.kind': 'client', - 'component': 'library', + "type": "Span", + "sampled": True, + "category": "datastore", + "span.kind": "client", + "component": "library", } exact_agents = { - 'db.instance': 'db', - 'peer.hostname': 'foo', - 'peer.address': 'foo:1234', + "db.instance": "db", + "peer.hostname": "foo", + "peer.address": "foo:1234", } for k, v in attribute_override.items(): @@ -361,22 +383,22 @@ def test_datastore_span_limits(kwarg_override, attribute_override): exact_intrinsics[k] = v kwargs = { - 'product': 'library', - 'target': 'table', - 'operation': 'operation', - 'host': 'foo', - 'port_path_or_id': 1234, - 'database_name': 'db', + "product": "library", + "target": "table", + "operation": "operation", + "host": "foo", + "port_path_or_id": 1234, + "database_name": "db", } kwargs.update(kwarg_override) @validate_span_events( count=1, exact_intrinsics=exact_intrinsics, - expected_intrinsics=('priority',), + expected_intrinsics=("priority",), exact_agents=exact_agents, ) - @background_task(name='test_external_spans') + @background_task(name="test_external_spans") def _test(): transaction = current_transaction() transaction._sampled = True @@ -387,10 +409,9 @@ def _test(): _test() -@pytest.mark.parametrize('collect_span_events', (False, True)) -@pytest.mark.parametrize('span_events_enabled', (False, True)) -def test_collect_span_events_override(collect_span_events, - span_events_enabled): +@pytest.mark.parametrize("collect_span_events", (False, True)) +@pytest.mark.parametrize("span_events_enabled", (False, True)) +def test_collect_span_events_override(collect_span_events, span_events_enabled): if collect_span_events and span_events_enabled: spans_expected = True @@ -400,63 +421,64 @@ def test_collect_span_events_override(collect_span_events, span_count = 2 if spans_expected else 0 @validate_span_events(count=span_count) - @override_application_settings({ - 'transaction_tracer.enabled': False, - 'distributed_tracing.enabled': True, - 'span_events.enabled': span_events_enabled, - 'collect_span_events': collect_span_events - }) - @background_task(name='test_collect_span_events_override') + @override_application_settings( + { + "transaction_tracer.enabled": False, + "distributed_tracing.enabled": True, + "span_events.enabled": span_events_enabled, + "collect_span_events": collect_span_events, + } + ) + @background_task(name="test_collect_span_events_override") def _test(): transaction = current_transaction() transaction._sampled = True - with FunctionTrace('span_generator'): + with FunctionTrace("span_generator"): pass if not spans_expected: - _test = function_not_called( - 'newrelic.core.attribute', - 'resolve_agent_attributes')(_test) + _test = function_not_called("newrelic.core.attribute", "resolve_agent_attributes")(_test) _test() -@pytest.mark.parametrize('include_attribues', (True, False)) +@pytest.mark.parametrize("include_attribues", (True, False)) def test_span_event_agent_attributes(include_attribues): override_settings = { - 'distributed_tracing.enabled': True, - 'span_events.enabled': True, + "distributed_tracing.enabled": True, + "span_events.enabled": True, } if include_attribues: count = 1 - override_settings['attributes.include'] = ['*'] + override_settings["attributes.include"] = ["*"] else: count = 0 @override_application_settings(override_settings) + @validate_span_events(count=count, expected_agents=["webfrontend.queue.seconds"]) @validate_span_events( - count=count, expected_agents=['webfrontend.queue.seconds']) - @validate_span_events( - count=count, - exact_agents={'trace1_a': 'foobar', 'trace1_b': 'barbaz'}, - unexpected_agents=['trace2_a', 'trace2_b']) + count=count, + exact_agents={"trace1_a": "foobar", "trace1_b": "barbaz"}, + unexpected_agents=["trace2_a", "trace2_b"], + ) @validate_span_events( - count=count, - exact_agents={'trace2_a': 'foobar', 'trace2_b': 'barbaz'}, - unexpected_agents=['trace1_a', 'trace1_b']) - @background_task(name='test_span_event_agent_attributes') + count=count, + exact_agents={"trace2_a": "foobar", "trace2_b": "barbaz"}, + unexpected_agents=["trace1_a", "trace1_b"], + ) + @background_task(name="test_span_event_agent_attributes") def _test(): transaction = current_transaction() transaction.queue_start = 1.0 transaction._sampled = True - with FunctionTrace('trace1') as trace_1: - trace_1._add_agent_attribute('trace1_a', 'foobar') - trace_1._add_agent_attribute('trace1_b', 'barbaz') - with FunctionTrace('trace2') as trace_2: - trace_2._add_agent_attribute('trace2_a', 'foobar') - trace_2._add_agent_attribute('trace2_b', 'barbaz') + with FunctionTrace("trace1") as trace_1: + trace_1._add_agent_attribute("trace1_a", "foobar") + trace_1._add_agent_attribute("trace1_b", "barbaz") + with FunctionTrace("trace2") as trace_2: + trace_2._add_agent_attribute("trace2_a", "foobar") + trace_2._add_agent_attribute("trace2_b", "barbaz") _test() @@ -469,31 +491,36 @@ def __exit__(self, *args): pass -@pytest.mark.parametrize('trace_type,args', ( - (DatabaseTrace, ('select * from foo', )), - (DatastoreTrace, ('db_product', 'db_target', 'db_operation')), - (ExternalTrace, ('lib', 'url')), - (FunctionTrace, ('name', )), - (MemcacheTrace, ('command', )), - (MessageTrace, ('lib', 'operation', 'dst_type', 'dst_name')), - (SolrTrace, ('lib', 'command')), - (FakeTrace, ()), -)) -@pytest.mark.parametrize('exclude_attributes', (True, False)) +@pytest.mark.parametrize( + "trace_type,args", + ( + (DatabaseTrace, ("select * from foo",)), + (DatastoreTrace, ("db_product", "db_target", "db_operation")), + (ExternalTrace, ("lib", "url")), + (FunctionTrace, ("name",)), + (MemcacheTrace, ("command",)), + (MessageTrace, ("lib", "operation", "dst_type", "dst_name")), + (SolrTrace, ("lib", "command")), + (FakeTrace, ()), + ), +) +@pytest.mark.parametrize("exclude_attributes", (True, False)) def test_span_event_user_attributes(trace_type, args, exclude_attributes): _settings = { - 'distributed_tracing.enabled': True, - 'span_events.enabled': True, + "distributed_tracing.enabled": True, + "span_events.enabled": True, } - forgone_params = ['invalid_value', ] - expected_params = {'trace1_a': 'foobar', 'trace1_b': 'barbaz'} + forgone_params = [ + "invalid_value", + ] + expected_params = {"trace1_a": "foobar", "trace1_b": "barbaz"} # We expect user_attributes to be included by default if exclude_attributes: count = 0 - _settings['attributes.exclude'] = ['*'] - forgone_params.extend(('trace1_a', 'trace1_b')) + _settings["attributes.exclude"] = ["*"] + forgone_params.extend(("trace1_a", "trace1_b")) expected_trace_params = {} else: expected_trace_params = expected_params @@ -503,44 +530,44 @@ def test_span_event_user_attributes(trace_type, args, exclude_attributes): @validate_span_events( count=count, exact_users=expected_params, - unexpected_users=forgone_params,) - @validate_tt_segment_params(exact_params=expected_trace_params, - forgone_params=forgone_params) - @background_task(name='test_span_event_user_attributes') + unexpected_users=forgone_params, + ) + @validate_tt_segment_params(exact_params=expected_trace_params, forgone_params=forgone_params) + @background_task(name="test_span_event_user_attributes") def _test(): transaction = current_transaction() transaction._sampled = True with trace_type(*args): - add_custom_span_attribute('trace1_a', 'foobar') - add_custom_span_attribute('trace1_b', 'barbaz') - add_custom_span_attribute('invalid_value', sys.maxsize + 1) + add_custom_span_attribute("trace1_a", "foobar") + add_custom_span_attribute("trace1_b", "barbaz") + add_custom_span_attribute("invalid_value", sys.maxsize + 1) _test() -@validate_span_events(count=1, exact_users={'foo': 'b'}) +@validate_span_events(count=1, exact_users={"foo": "b"}) @dt_enabled -@background_task(name='test_span_user_attribute_overrides_transaction_attribute') +@background_task(name="test_span_user_attribute_overrides_transaction_attribute") def test_span_user_attribute_overrides_transaction_attribute(): transaction = current_transaction() - transaction.add_custom_parameter('foo', 'a') - add_custom_span_attribute('foo', 'b') - transaction.add_custom_parameter('foo', 'c') + transaction.add_custom_parameter("foo", "a") + add_custom_span_attribute("foo", "b") + transaction.add_custom_parameter("foo", "c") -@override_application_settings({'attributes.include': '*'}) -@validate_span_events(count=1, exact_agents={'foo': 'b'}) +@override_application_settings({"attributes.include": "*"}) +@validate_span_events(count=1, exact_agents={"foo": "b"}) @dt_enabled -@background_task(name='test_span_agent_attribute_overrides_transaction_attribute') +@background_task(name="test_span_agent_attribute_overrides_transaction_attribute") def test_span_agent_attribute_overrides_transaction_attribute(): transaction = current_transaction() trace = current_trace() - transaction._add_agent_attribute('foo', 'a') - trace._add_agent_attribute('foo', 'b') - transaction._add_agent_attribute('foo', 'c') + transaction._add_agent_attribute("foo", "a") + trace._add_agent_attribute("foo", "b") + transaction._add_agent_attribute("foo", "c") def test_span_custom_attribute_limit(): @@ -555,71 +582,71 @@ def test_span_custom_attribute_limit(): for i in range(128): if i < 64: - span_custom_attrs.append('span_attr%i' % i) - txn_custom_attrs.append('txn_attr%i' % i) + span_custom_attrs.append("span_attr%i" % i) + txn_custom_attrs.append("txn_attr%i" % i) unexpected_txn_attrs.extend(span_custom_attrs) span_custom_attrs.extend(txn_custom_attrs[:64]) - expected_txn_attrs = {'user': txn_custom_attrs, 'agent': [], - 'intrinsic': []} - expected_absent_txn_attrs = {'agent': [], - 'user': unexpected_txn_attrs, - 'intrinsic': []} - - @override_application_settings({'attributes.include': '*'}) - @validate_transaction_event_attributes(expected_txn_attrs, - expected_absent_txn_attrs) - @validate_span_events(count=1, - expected_users=span_custom_attrs, - unexpected_users=txn_custom_attrs[64:]) + expected_txn_attrs = {"user": txn_custom_attrs, "agent": [], "intrinsic": []} + expected_absent_txn_attrs = {"agent": [], "user": unexpected_txn_attrs, "intrinsic": []} + + @override_application_settings({"attributes.include": "*"}) + @validate_transaction_event_attributes(expected_txn_attrs, expected_absent_txn_attrs) + @validate_span_events(count=1, expected_users=span_custom_attrs, unexpected_users=txn_custom_attrs[64:]) @dt_enabled - @background_task(name='test_span_attribute_limit') + @background_task(name="test_span_attribute_limit") def _test(): transaction = current_transaction() for i in range(128): - transaction.add_custom_parameter('txn_attr%i' % i, 'txnValue') + transaction.add_custom_parameter("txn_attr%i" % i, "txnValue") if i < 64: - add_custom_span_attribute('span_attr%i' % i, 'spanValue') + add_custom_span_attribute("span_attr%i" % i, "spanValue") + _test() _span_event_metrics = [("Supportability/SpanEvent/Errors/Dropped", None)] -@pytest.mark.parametrize('trace_type,args', ( - (DatabaseTrace, ('select * from foo', )), - (DatastoreTrace, ('db_product', 'db_target', 'db_operation')), - (ExternalTrace, ('lib', 'url')), - (FunctionTrace, ('name', )), - (MemcacheTrace, ('command', )), - (MessageTrace, ('lib', 'operation', 'dst_type', 'dst_name')), - (SolrTrace, ('lib', 'command')), - (FakeTrace, ()), -)) +@pytest.mark.parametrize( + "trace_type,args", + ( + (DatabaseTrace, ("select * from foo",)), + (DatastoreTrace, ("db_product", "db_target", "db_operation")), + (ExternalTrace, ("lib", "url")), + (FunctionTrace, ("name",)), + (GraphQLOperationTrace, ()), + (GraphQLResolverTrace, ()), + (MemcacheTrace, ("command",)), + (MessageTrace, ("lib", "operation", "dst_type", "dst_name")), + (SolrTrace, ("lib", "command")), + (FakeTrace, ()), + ), +) def test_span_event_error_attributes_notice_error(trace_type, args): _settings = { - 'distributed_tracing.enabled': True, - 'span_events.enabled': True, + "distributed_tracing.enabled": True, + "span_events.enabled": True, } error = ValueError("whoops") exact_agents = { - 'error.class': callable_name(error), - 'error.message': 'whoops', + "error.class": callable_name(error), + "error.message": "whoops", } @override_application_settings(_settings) @validate_transaction_metrics( - 'test_span_event_error_attributes_notice_error', - background_task=True, - rollup_metrics=_span_event_metrics) + "test_span_event_error_attributes_notice_error", background_task=True, rollup_metrics=_span_event_metrics + ) @validate_span_events( count=1, - exact_agents=exact_agents,) - @background_task(name='test_span_event_error_attributes_notice_error') + exact_agents=exact_agents, + ) + @background_task(name="test_span_event_error_attributes_notice_error") def _test(): transaction = current_transaction() transaction._sampled = True @@ -633,36 +660,41 @@ def _test(): _test() -@pytest.mark.parametrize('trace_type,args', ( - (DatabaseTrace, ('select * from foo', )), - (DatastoreTrace, ('db_product', 'db_target', 'db_operation')), - (ExternalTrace, ('lib', 'url')), - (FunctionTrace, ('name', )), - (MemcacheTrace, ('command', )), - (MessageTrace, ('lib', 'operation', 'dst_type', 'dst_name')), - (SolrTrace, ('lib', 'command')), -)) +@pytest.mark.parametrize( + "trace_type,args", + ( + (DatabaseTrace, ("select * from foo",)), + (DatastoreTrace, ("db_product", "db_target", "db_operation")), + (ExternalTrace, ("lib", "url")), + (FunctionTrace, ("name",)), + (GraphQLOperationTrace, ()), + (GraphQLResolverTrace, ()), + (MemcacheTrace, ("command",)), + (MessageTrace, ("lib", "operation", "dst_type", "dst_name")), + (SolrTrace, ("lib", "command")), + ), +) def test_span_event_error_attributes_observed(trace_type, args): error = ValueError("whoops") exact_agents = { - 'error.class': callable_name(error), - 'error.message': 'whoops', + "error.class": callable_name(error), + "error.message": "whoops", } # Verify errors are not recorded since notice_error is not called - rollups = [('Errors/all', None)] + _span_event_metrics + rollups = [("Errors/all", None)] + _span_event_metrics @dt_enabled @validate_transaction_metrics( - 'test_span_event_error_attributes_observed', - background_task=True, - rollup_metrics=rollups) + "test_span_event_error_attributes_observed", background_task=True, rollup_metrics=rollups + ) @validate_span_events( count=1, - exact_agents=exact_agents,) - @background_task(name='test_span_event_error_attributes_observed') + exact_agents=exact_agents, + ) + @background_task(name="test_span_event_error_attributes_observed") def _test(): try: with trace_type(*args): @@ -673,20 +705,24 @@ def _test(): _test() -@pytest.mark.parametrize('trace_type,args', ( - (DatabaseTrace, ('select * from foo', )), - (DatastoreTrace, ('db_product', 'db_target', 'db_operation')), - (ExternalTrace, ('lib', 'url')), - (FunctionTrace, ('name', )), - (MemcacheTrace, ('command', )), - (MessageTrace, ('lib', 'operation', 'dst_type', 'dst_name')), - (SolrTrace, ('lib', 'command')), - (FakeTrace, ()), -)) +@pytest.mark.parametrize( + "trace_type,args", + ( + (DatabaseTrace, ("select * from foo",)), + (DatastoreTrace, ("db_product", "db_target", "db_operation")), + (ExternalTrace, ("lib", "url")), + (FunctionTrace, ("name",)), + (GraphQLOperationTrace, ()), + (GraphQLResolverTrace, ()), + (MemcacheTrace, ("command",)), + (MessageTrace, ("lib", "operation", "dst_type", "dst_name")), + (SolrTrace, ("lib", "command")), + (FakeTrace, ()), + ), +) @dt_enabled -@validate_span_events(count=1, - exact_agents={'error.class': ERROR_NAME, 'error.message': 'whoops'}) -@background_task(name='test_span_event_notice_error_overrides_observed') +@validate_span_events(count=1, exact_agents={"error.class": ERROR_NAME, "error.message": "whoops"}) +@background_task(name="test_span_event_notice_error_overrides_observed") def test_span_event_notice_error_overrides_observed(trace_type, args): try: with trace_type(*args): @@ -699,21 +735,24 @@ def test_span_event_notice_error_overrides_observed(trace_type, args): pass -@pytest.mark.parametrize('trace_type,args', ( - (DatabaseTrace, ('select * from foo', )), - (DatastoreTrace, ('db_product', 'db_target', 'db_operation')), - (ExternalTrace, ('lib', 'url')), - (FunctionTrace, ('name', )), - (MemcacheTrace, ('command', )), - (MessageTrace, ('lib', 'operation', 'dst_type', 'dst_name')), - (SolrTrace, ('lib', 'command')), - (FakeTrace, ()), -)) -@override_application_settings({'error_collector.enabled': False}) -@validate_span_events(count=0, expected_agents=['error.class']) -@validate_span_events(count=0, expected_agents=['error.message']) +@pytest.mark.parametrize( + "trace_type,args", + ( + (DatabaseTrace, ("select * from foo",)), + (DatastoreTrace, ("db_product", "db_target", "db_operation")), + (ExternalTrace, ("lib", "url")), + (FunctionTrace, ("name",)), + (MemcacheTrace, ("command",)), + (MessageTrace, ("lib", "operation", "dst_type", "dst_name")), + (SolrTrace, ("lib", "command")), + (FakeTrace, ()), + ), +) +@override_application_settings({"error_collector.enabled": False}) +@validate_span_events(count=0, expected_agents=["error.class"]) +@validate_span_events(count=0, expected_agents=["error.message"]) @dt_enabled -@background_task(name='test_span_event_errors_disabled') +@background_task(name="test_span_event_errors_disabled") def test_span_event_errors_disabled(trace_type, args): with trace_type(*args): try: @@ -725,32 +764,34 @@ def test_span_event_errors_disabled(trace_type, args): _metrics = [("Supportability/SpanEvent/Errors/Dropped", 2)] -@pytest.mark.parametrize('trace_type,args', ( - (FunctionTrace, ('name', )), - (FakeTrace, ()), -)) +@pytest.mark.parametrize( + "trace_type,args", + ( + (FunctionTrace, ("name",)), + (FakeTrace, ()), + ), +) def test_span_event_multiple_errors(trace_type, args): _settings = { - 'distributed_tracing.enabled': True, - 'span_events.enabled': True, + "distributed_tracing.enabled": True, + "span_events.enabled": True, } error = ValueError("whoops") exact_agents = { - 'error.class': callable_name(error), - 'error.message': 'whoops', + "error.class": callable_name(error), + "error.message": "whoops", "error.expected": False, } @override_application_settings(_settings) @validate_span_events( count=1, - exact_agents=exact_agents,) - @validate_transaction_metrics("test_span_event_multiple_errors", - background_task=True, - rollup_metrics=_metrics) - @background_task(name='test_span_event_multiple_errors') + exact_agents=exact_agents, + ) + @validate_transaction_metrics("test_span_event_multiple_errors", background_task=True, rollup_metrics=_metrics) + @background_task(name="test_span_event_multiple_errors") def _test(): transaction = current_transaction() transaction._sampled = True diff --git a/tests/agent_features/test_time_trace.py b/tests/agent_features/test_time_trace.py index f81e8750d..449b7dc97 100644 --- a/tests/agent_features/test_time_trace.py +++ b/tests/agent_features/test_time_trace.py @@ -14,11 +14,19 @@ import logging +import pytest from testing_support.fixtures import validate_transaction_metrics from newrelic.api.background_task import background_task +from newrelic.api.database_trace import DatabaseTrace +from newrelic.api.datastore_trace import DatastoreTrace +from newrelic.api.external_trace import ExternalTrace from newrelic.api.function_trace import FunctionTrace -from newrelic.api.transaction import end_of_transaction +from newrelic.api.graphql_trace import GraphQLOperationTrace, GraphQLResolverTrace +from newrelic.api.memcache_trace import MemcacheTrace +from newrelic.api.message_trace import MessageTrace +from newrelic.api.solr_trace import SolrTrace +from newrelic.api.transaction import current_transaction, end_of_transaction @validate_transaction_metrics( @@ -34,3 +42,30 @@ def test_trace_after_end_of_transaction(caplog): error_messages = [record for record in caplog.records if record.levelno >= logging.ERROR] assert not error_messages + + +@pytest.mark.parametrize( + "trace_type,args", + ( + (DatabaseTrace, ("select * from foo",)), + (DatastoreTrace, ("db_product", "db_target", "db_operation")), + (ExternalTrace, ("lib", "url")), + (FunctionTrace, ("name",)), + (GraphQLOperationTrace, ()), + (GraphQLResolverTrace, ()), + (MemcacheTrace, ("command",)), + (MessageTrace, ("lib", "operation", "dst_type", "dst_name")), + (SolrTrace, ("lib", "command")), + ), +) +@background_task() +def test_trace_finalizes_with_transaction_missing_settings(monkeypatch, trace_type, args): + txn = current_transaction() + try: + with trace_type(*args): + # Validate no errors are raised when finalizing trace with no settings + monkeypatch.setattr(txn, "_settings", None) + finally: + # Ensure transaction still has settings when it exits to prevent other crashes making errors hard to read + monkeypatch.undo() + assert txn.settings diff --git a/tests/agent_features/test_transaction_trace_segments.py b/tests/agent_features/test_transaction_trace_segments.py index ad4d02b18..b205afc3c 100644 --- a/tests/agent_features/test_transaction_trace_segments.py +++ b/tests/agent_features/test_transaction_trace_segments.py @@ -13,142 +13,158 @@ # limitations under the License. import pytest +from testing_support.fixtures import ( + override_application_settings, + validate_tt_segment_params, +) -from newrelic.api.transaction import current_transaction from newrelic.api.background_task import background_task - from newrelic.api.database_trace import DatabaseTrace from newrelic.api.datastore_trace import DatastoreTrace -from newrelic.api.external_trace import external_trace, ExternalTrace +from newrelic.api.external_trace import ExternalTrace, external_trace from newrelic.api.function_trace import FunctionTrace +from newrelic.api.graphql_trace import GraphQLOperationTrace, GraphQLResolverTrace from newrelic.api.memcache_trace import MemcacheTrace from newrelic.api.message_trace import MessageTrace from newrelic.api.solr_trace import SolrTrace - -from testing_support.fixtures import (override_application_settings, - validate_tt_segment_params) +from newrelic.api.transaction import current_transaction -@external_trace('lib', 'https://example.com/path?q=q#frag') +@external_trace("lib", "https://example.com/path?q=q#frag") def external(): pass -@validate_tt_segment_params(present_params=('http.url',)) -@background_task(name='test_external_segment_attributes_default') +@validate_tt_segment_params(present_params=("http.url",)) +@background_task(name="test_external_segment_attributes_default") def test_external_segment_attributes_default(): external() -@override_application_settings({ - 'transaction_segments.attributes.exclude': ['http.url'], -}) -@validate_tt_segment_params(forgone_params=('http.url',)) -@background_task(name='test_external_segment_attributes_disabled') +@override_application_settings( + { + "transaction_segments.attributes.exclude": ["http.url"], + } +) +@validate_tt_segment_params(forgone_params=("http.url",)) +@background_task(name="test_external_segment_attributes_disabled") def test_external_segment_attributes_disabled(): external() -@validate_tt_segment_params(exact_params={'http.url': 'http://example.org'}) -@background_task(name='test_external_user_params_override_url') +@validate_tt_segment_params(exact_params={"http.url": "http://example.org"}) +@background_task(name="test_external_user_params_override_url") def test_external_user_params_override_url(): - with ExternalTrace('lib', 'http://example.com') as t: + with ExternalTrace("lib", "http://example.com") as t: # Pretend like this is a user attribute and it's legal to do this - t.params['http.url'] = 'http://example.org' + t.params["http.url"] = "http://example.org" -@validate_tt_segment_params(exact_params={'db.instance': 'a' * 255}) -@background_task(name='test_datastore_db_instance_truncation') +@validate_tt_segment_params(exact_params={"db.instance": "a" * 255}) +@background_task(name="test_datastore_db_instance_truncation") def test_datastore_db_instance_truncation(): - with DatastoreTrace('db_product', 'db_target', 'db_operation', - database_name='a' * 256): + with DatastoreTrace("db_product", "db_target", "db_operation", database_name="a" * 256): pass -@validate_tt_segment_params(exact_params={'db.instance': 'a' * 255}) -@background_task(name='test_database_db_instance_truncation') +@validate_tt_segment_params(exact_params={"db.instance": "a" * 255}) +@background_task(name="test_database_db_instance_truncation") def test_database_db_instance_truncation(): - with DatabaseTrace('select * from foo', - database_name='a' * 256): + with DatabaseTrace("select * from foo", database_name="a" * 256): pass -@override_application_settings({ - 'transaction_tracer.record_sql': 'raw', -}) -@validate_tt_segment_params(exact_params={'db.statement': 'select 1'}) -@background_task(name='test_database_db_statement') +@override_application_settings( + { + "transaction_tracer.record_sql": "raw", + } +) +@validate_tt_segment_params(exact_params={"db.statement": "select 1"}) +@background_task(name="test_database_db_statement") def test_database_db_statement_default_enabled(): - with DatabaseTrace('select 1'): + with DatabaseTrace("select 1"): pass -@override_application_settings({ - 'transaction_tracer.record_sql': 'raw', - 'agent_limits.sql_query_length_maximum': 1, -}) -@validate_tt_segment_params(exact_params={'db.statement': 'a'}) -@background_task(name='test_database_db_statement_truncation') +@override_application_settings( + { + "transaction_tracer.record_sql": "raw", + "agent_limits.sql_query_length_maximum": 1, + } +) +@validate_tt_segment_params(exact_params={"db.statement": "a"}) +@background_task(name="test_database_db_statement_truncation") def test_database_db_statement_truncation(): - with DatabaseTrace('a' * 2): + with DatabaseTrace("a" * 2): pass -@override_application_settings({ - 'transaction_segments.attributes.exclude': ['db.*'], -}) -@validate_tt_segment_params(forgone_params=('db.instance', 'db.statement')) -@background_task(name='test_database_segment_attributes_disabled') +@override_application_settings( + { + "transaction_segments.attributes.exclude": ["db.*"], + } +) +@validate_tt_segment_params(forgone_params=("db.instance", "db.statement")) +@background_task(name="test_database_segment_attributes_disabled") def test_database_segment_attributes_disabled(): transaction = current_transaction() - with DatabaseTrace('select 1', database_name='foo'): + with DatabaseTrace("select 1", database_name="foo"): pass -@pytest.mark.parametrize('trace_type,args', ( - (DatabaseTrace, ('select * from foo', )), - (DatastoreTrace, ('db_product', 'db_target', 'db_operation')), - (ExternalTrace, ('lib', 'url')), - (FunctionTrace, ('name', )), - (MemcacheTrace, ('command', )), - (MessageTrace, ('lib', 'operation', 'dst_type', 'dst_name')), - (SolrTrace, ('lib', 'command')), -)) +@pytest.mark.parametrize( + "trace_type,args", + ( + (DatabaseTrace, ("select * from foo",)), + (DatastoreTrace, ("db_product", "db_target", "db_operation")), + (ExternalTrace, ("lib", "url")), + (FunctionTrace, ("name",)), + (GraphQLOperationTrace, ()), + (GraphQLResolverTrace, ()), + (MemcacheTrace, ("command",)), + (MessageTrace, ("lib", "operation", "dst_type", "dst_name")), + (SolrTrace, ("lib", "command")), + ), +) def test_each_segment_type(trace_type, args): - @validate_tt_segment_params(exact_params={'blah': 'bloo'}) - @override_application_settings({ - 'distributed_tracing.enabled': True, - 'span_events.enabled': True, - 'attributes.include': ['blah'], - }) - @background_task(name='test_each_segment_type') + @validate_tt_segment_params(exact_params={"blah": "bloo"}) + @override_application_settings( + { + "distributed_tracing.enabled": True, + "span_events.enabled": True, + "attributes.include": ["blah"], + } + ) + @background_task(name="test_each_segment_type") def _test(): transaction = current_transaction() transaction._sampled = True with trace_type(*args) as trace: - trace._add_agent_attribute('blah', 'bloo') + trace._add_agent_attribute("blah", "bloo") _test() -@override_application_settings({ - 'distributed_tracing.enabled': True, - 'span_events.enabled': True, - 'attributes.include': ['*'], -}) -@background_task(name='test_attribute_overrides') +@override_application_settings( + { + "distributed_tracing.enabled": True, + "span_events.enabled": True, + "attributes.include": ["*"], + } +) +@background_task(name="test_attribute_overrides") def test_attribute_overrides(): - with FunctionTrace('test_attribute_overrides_trace') as trace: + with FunctionTrace("test_attribute_overrides_trace") as trace: trace.exclusive = 0.1 - trace._add_agent_attribute('exclusive_duration_millis', 0.2) - trace._add_agent_attribute('test_attr', 'a') - trace.add_custom_attribute('exclusive_duration_millis', 0.3) - trace.add_custom_attribute('test_attr', 'b') + trace._add_agent_attribute("exclusive_duration_millis", 0.2) + trace._add_agent_attribute("test_attr", "a") + trace.add_custom_attribute("exclusive_duration_millis", 0.3) + trace.add_custom_attribute("test_attr", "b") node = trace.create_node() params = node.get_trace_segment_params(current_transaction().settings) - assert params['exclusive_duration_millis'] == 100 - assert params['test_attr'] == 'b' + assert params["exclusive_duration_millis"] == 100 + assert params["test_attr"] == "b" From b9bca3d9b5009ccc53688f696b125c75dca7ccff Mon Sep 17 00:00:00 2001 From: Uma Annamalai Date: Mon, 17 Oct 2022 12:37:41 -0400 Subject: [PATCH 003/108] Add usage tracking metrics for Kafka clients. (#658) * Add usage tracking metrics for Kafka clients. * Fix double import lint error * [Mega-Linter] Apply linters fixes * Create version util file and add metrics to consumer. * Address linting errors. * Add missing semi-colon. * [Mega-Linter] Apply linters fixes * Bump tests. Co-authored-by: Hannah Stepanek Co-authored-by: hmstepanek Co-authored-by: umaannamalai --- newrelic/api/transaction.py | 9 ++++++++ newrelic/common/package_version_utils.py | 23 +++++++++++++++++++ newrelic/core/environment.py | 17 +++----------- .../hooks/messagebroker_confluentkafka.py | 4 ++++ newrelic/hooks/messagebroker_kafkapython.py | 13 +++++++---- .../test_consumer.py | 3 +++ .../test_producer.py | 3 +++ .../test_consumer.py | 3 +++ .../test_producer.py | 3 +++ 9 files changed, 60 insertions(+), 18 deletions(-) create mode 100644 newrelic/common/package_version_utils.py diff --git a/newrelic/api/transaction.py b/newrelic/api/transaction.py index f7d7595cb..00b344ca6 100644 --- a/newrelic/api/transaction.py +++ b/newrelic/api/transaction.py @@ -186,6 +186,7 @@ def __init__(self, application, enabled=None, source=None): self._loop_time = 0.0 self._frameworks = set() + self._message_brokers = set() self._frozen_path = None @@ -545,6 +546,10 @@ def __exit__(self, exc, value, tb): for framework, version in self._frameworks: self.record_custom_metric("Python/Framework/%s/%s" % (framework, version), 1) + if self._message_brokers: + for message_broker, version in self._message_brokers: + self.record_custom_metric("Python/MessageBroker/%s/%s" % (message_broker, version), 1) + if self._settings.distributed_tracing.enabled: # Sampled and priority need to be computed at the end of the # transaction when distributed tracing or span events are enabled. @@ -1676,6 +1681,10 @@ def add_framework_info(self, name, version=None): if name: self._frameworks.add((name, version)) + def add_messagebroker_info(self, name, version=None): + if name: + self._message_brokers.add((name, version)) + def dump(self, file): """Dumps details about the transaction to the file object.""" diff --git a/newrelic/common/package_version_utils.py b/newrelic/common/package_version_utils.py new file mode 100644 index 000000000..c881d7ada --- /dev/null +++ b/newrelic/common/package_version_utils.py @@ -0,0 +1,23 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + + +def get_package_version(name): + # importlib was introduced into the standard library starting in Python3.8. + if "importlib" in sys.modules and hasattr(sys.modules["importlib"], "metadata"): + return sys.modules["importlib"].metadata.version(name) # pylint: disable=E1101 + elif "pkg_resources" in sys.modules: + return sys.modules["pkg_resources"].get_distribution(name).version diff --git a/newrelic/core/environment.py b/newrelic/core/environment.py index 9fc6e2dd4..1306816ef 100644 --- a/newrelic/core/environment.py +++ b/newrelic/core/environment.py @@ -23,6 +23,7 @@ import sysconfig import newrelic +from newrelic.common.package_version_utils import get_package_version from newrelic.common.system_info import ( logical_processor_count, physical_processor_count, @@ -37,18 +38,6 @@ def environment_settings(): """Returns an array of arrays of environment settings""" - - # Find version resolver. - - get_version = None - # importlib was introduced into the standard library starting in Python3.8. - if "importlib" in sys.modules and hasattr(sys.modules["importlib"], "metadata"): - get_version = sys.modules["importlib"].metadata.version - elif "pkg_resources" in sys.modules: - - def get_version(name): # pylint: disable=function-redefined - return sys.modules["pkg_resources"].get_distribution(name).version - env = [] # Agent information. @@ -186,7 +175,7 @@ def get_version(name): # pylint: disable=function-redefined dispatcher.append(("Dispatcher Version", hypercorn.__version__)) else: try: - dispatcher.append(("Dispatcher Version", get_version("hypercorn"))) + dispatcher.append(("Dispatcher Version", get_package_version("hypercorn"))) except Exception: pass @@ -237,7 +226,7 @@ def get_version(name): # pylint: disable=function-redefined continue try: - version = get_version(name) + version = get_package_version(name) plugins.append("%s (%s)" % (name, version)) except Exception: plugins.append(name) diff --git a/newrelic/hooks/messagebroker_confluentkafka.py b/newrelic/hooks/messagebroker_confluentkafka.py index 965fd765b..e735b8ade 100644 --- a/newrelic/hooks/messagebroker_confluentkafka.py +++ b/newrelic/hooks/messagebroker_confluentkafka.py @@ -22,6 +22,7 @@ from newrelic.api.time_trace import notice_error from newrelic.api.transaction import current_transaction from newrelic.common.object_wrapper import function_wrapper, wrap_function_wrapper +from newrelic.common.package_version_utils import get_package_version _logger = logging.getLogger(__name__) @@ -56,6 +57,8 @@ def wrap_Producer_produce(wrapped, instance, args, kwargs): else: topic = kwargs.get("topic", None) + transaction.add_messagebroker_info("Confluent-Kafka", get_package_version("confluent-kafka")) + with MessageTrace( library="Kafka", operation="Produce", @@ -161,6 +164,7 @@ def wrap_Consumer_poll(wrapped, instance, args, kwargs): name = "Named/%s" % destination_name transaction.record_custom_metric("%s/%s/Received/Bytes" % (group, name), received_bytes) transaction.record_custom_metric("%s/%s/Received/Messages" % (group, name), message_count) + transaction.add_messagebroker_info("Confluent-Kafka", get_package_version("confluent-kafka")) return record diff --git a/newrelic/hooks/messagebroker_kafkapython.py b/newrelic/hooks/messagebroker_kafkapython.py index 697b46349..9124a16dc 100644 --- a/newrelic/hooks/messagebroker_kafkapython.py +++ b/newrelic/hooks/messagebroker_kafkapython.py @@ -26,6 +26,7 @@ function_wrapper, wrap_function_wrapper, ) +from newrelic.common.package_version_utils import get_package_version HEARTBEAT_POLL = "MessageBroker/Kafka/Heartbeat/Poll" HEARTBEAT_SENT = "MessageBroker/Kafka/Heartbeat/Sent" @@ -48,6 +49,8 @@ def wrap_KafkaProducer_send(wrapped, instance, args, kwargs): topic, value, key, headers, partition, timestamp_ms = _bind_send(*args, **kwargs) headers = list(headers) if headers else [] + transaction.add_messagebroker_info("Kafka-Python", get_package_version("kafka-python")) + with MessageTrace( library="Kafka", operation="Produce", @@ -112,6 +115,7 @@ def wrap_kafkaconsumer_next(wrapped, instance, args, kwargs): message_count = 1 transaction = current_transaction(active_only=False) + if not transaction: transaction = MessageTransaction( application=application_instance(), @@ -124,7 +128,7 @@ def wrap_kafkaconsumer_next(wrapped, instance, args, kwargs): source=wrapped, ) instance._nr_transaction = transaction - transaction.__enter__() + transaction.__enter__() # pylint: disable=C2801 # Obtain consumer client_id to send up as agent attribute if hasattr(instance, "config") and "client_id" in instance.config: @@ -143,12 +147,13 @@ def wrap_kafkaconsumer_next(wrapped, instance, args, kwargs): name = "Named/%s" % destination_name transaction.record_custom_metric("%s/%s/Received/Bytes" % (group, name), received_bytes) transaction.record_custom_metric("%s/%s/Received/Messages" % (group, name), message_count) + transaction.add_messagebroker_info("Kafka-Python", get_package_version("kafka-python")) return record def wrap_KafkaProducer_init(wrapped, instance, args, kwargs): - get_config_key = lambda key: kwargs.get(key, instance.DEFAULT_CONFIG[key]) # noqa: E731 + get_config_key = lambda key: kwargs.get(key, instance.DEFAULT_CONFIG[key]) # pylint: disable=C3001 # noqa: E731 kwargs["key_serializer"] = wrap_serializer( instance, "Serialization/Key", "MessageBroker", get_config_key("key_serializer") @@ -162,13 +167,13 @@ def wrap_KafkaProducer_init(wrapped, instance, args, kwargs): class NewRelicSerializerWrapper(ObjectProxy): def __init__(self, wrapped, serializer_name, group_prefix): - ObjectProxy.__init__.__get__(self)(wrapped) + ObjectProxy.__init__.__get__(self)(wrapped) # pylint: disable=W0231 self._nr_serializer_name = serializer_name self._nr_group_prefix = group_prefix def serialize(self, topic, object): - wrapped = self.__wrapped__.serialize + wrapped = self.__wrapped__.serialize # pylint: disable=W0622 args = (topic, object) kwargs = {} diff --git a/tests/messagebroker_confluentkafka/test_consumer.py b/tests/messagebroker_confluentkafka/test_consumer.py index 61f532a78..4313e668f 100644 --- a/tests/messagebroker_confluentkafka/test_consumer.py +++ b/tests/messagebroker_confluentkafka/test_consumer.py @@ -63,6 +63,8 @@ def _test(): def test_custom_metrics_on_existing_transaction(get_consumer_record, topic): + from confluent_kafka import __version__ as version + transaction_name = ( "test_consumer:test_custom_metrics_on_existing_transaction.._test" if six.PY3 else "test_consumer:_test" ) @@ -72,6 +74,7 @@ def test_custom_metrics_on_existing_transaction(get_consumer_record, topic): custom_metrics=[ ("Message/Kafka/Topic/Named/%s/Received/Bytes" % topic, 1), ("Message/Kafka/Topic/Named/%s/Received/Messages" % topic, 1), + ("Python/MessageBroker/Confluent-Kafka/%s" % version, 1), ], background_task=True, ) diff --git a/tests/messagebroker_confluentkafka/test_producer.py b/tests/messagebroker_confluentkafka/test_producer.py index 71b674e80..139239e73 100644 --- a/tests/messagebroker_confluentkafka/test_producer.py +++ b/tests/messagebroker_confluentkafka/test_producer.py @@ -65,6 +65,8 @@ def producer_callback(err, msg): def test_trace_metrics(topic, send_producer_message): + from confluent_kafka import __version__ as version + scoped_metrics = [("MessageBroker/Kafka/Topic/Produce/Named/%s" % topic, 1)] unscoped_metrics = scoped_metrics txn_name = "test_producer:test_trace_metrics..test" if six.PY3 else "test_producer:test" @@ -73,6 +75,7 @@ def test_trace_metrics(topic, send_producer_message): txn_name, scoped_metrics=scoped_metrics, rollup_metrics=unscoped_metrics, + custom_metrics=[("Python/MessageBroker/Confluent-Kafka/%s" % version, 1)], background_task=True, ) @background_task() diff --git a/tests/messagebroker_kafkapython/test_consumer.py b/tests/messagebroker_kafkapython/test_consumer.py index f53b2acb3..84cf29a04 100644 --- a/tests/messagebroker_kafkapython/test_consumer.py +++ b/tests/messagebroker_kafkapython/test_consumer.py @@ -60,6 +60,8 @@ def _test(): def test_custom_metrics_on_existing_transaction(get_consumer_record, topic): + from kafka.version import __version__ as version + transaction_name = ( "test_consumer:test_custom_metrics_on_existing_transaction.._test" if six.PY3 else "test_consumer:_test" ) @@ -69,6 +71,7 @@ def test_custom_metrics_on_existing_transaction(get_consumer_record, topic): custom_metrics=[ ("Message/Kafka/Topic/Named/%s/Received/Bytes" % topic, 1), ("Message/Kafka/Topic/Named/%s/Received/Messages" % topic, 1), + ("Python/MessageBroker/Kafka-Python/%s" % version, 1), ], background_task=True, ) diff --git a/tests/messagebroker_kafkapython/test_producer.py b/tests/messagebroker_kafkapython/test_producer.py index 927956482..280d4fd15 100644 --- a/tests/messagebroker_kafkapython/test_producer.py +++ b/tests/messagebroker_kafkapython/test_producer.py @@ -28,6 +28,8 @@ def test_trace_metrics(topic, send_producer_message): + from kafka.version import __version__ as version + scoped_metrics = [("MessageBroker/Kafka/Topic/Produce/Named/%s" % topic, 1)] unscoped_metrics = scoped_metrics txn_name = "test_producer:test_trace_metrics..test" if six.PY3 else "test_producer:test" @@ -36,6 +38,7 @@ def test_trace_metrics(topic, send_producer_message): txn_name, scoped_metrics=scoped_metrics, rollup_metrics=unscoped_metrics, + custom_metrics=[("Python/MessageBroker/Kafka-Python/%s" % version, 1)], background_task=True, ) @background_task() From 968f3dcacb8f0c0feafd5ee3fc9d25569e3d84b1 Mon Sep 17 00:00:00 2001 From: Lalleh Rafeei <84813886+lrafeei@users.noreply.github.com> Date: Mon, 17 Oct 2022 10:45:32 -0700 Subject: [PATCH 004/108] Deprecate add_custom_parameter(s) API (#655) * Deprecate add_custom_parameter(s) API * Fix unicode tests and some pylint errors * Fix more pylint errors * Revert "Fix more pylint errors" This reverts commit 807ec1c5c40fe421300ccdcd6fedd81f288dce2c. * Edit deprecation message in add_custom_parameters --- newrelic/admin/validate_config.py | 99 +-- newrelic/agent.py | 659 ++++++-------- newrelic/api/transaction.py | 58 +- newrelic/core/attribute.py | 158 ++-- tests/agent_features/test_asgi_browser.py | 747 ++++++++-------- tests/agent_features/test_attribute.py | 344 ++++---- .../test_attributes_in_action.py | 6 +- tests/agent_features/test_browser.py | 823 +++++++++--------- .../agent_features/test_high_security_mode.py | 14 +- tests/agent_features/test_span_events.py | 6 +- tests/cross_agent/test_rum_client_config.py | 98 ++- tests/testing_support/sample_applications.py | 107 +-- .../sample_asgi_applications.py | 6 +- 13 files changed, 1599 insertions(+), 1526 deletions(-) diff --git a/newrelic/admin/validate_config.py b/newrelic/admin/validate_config.py index a842df7be..ac25b715e 100644 --- a/newrelic/admin/validate_config.py +++ b/newrelic/admin/validate_config.py @@ -25,17 +25,15 @@ def _run_validation_test(): from newrelic.api.error_trace import error_trace from newrelic.api.external_trace import external_trace from newrelic.api.function_trace import function_trace - from newrelic.api.transaction import add_custom_parameter from newrelic.api.time_trace import notice_error + from newrelic.api.transaction import add_custom_attribute from newrelic.api.wsgi_application import wsgi_application - @external_trace(library='test', - url='http://localhost/test', method='GET') + @external_trace(library="test", url="http://localhost/test", method="GET") def _external1(): time.sleep(0.1) - @function_trace(label='label', - params={'fun-key-1': '1', 'fun-key-2': 2, 'fun-key-3': 3.0}) + @function_trace(label="label", params={"fun-key-1": "1", "fun-key-2": 2, "fun-key-3": 3.0}) def _function1(): _external1() @@ -47,33 +45,29 @@ def _function2(): @error_trace() @function_trace() def _function3(): - add_custom_parameter('txn-key-1', 1) + add_custom_attribute("txn-key-1", 1) _function4() - raise RuntimeError('This is a test error and can be ignored.') + raise RuntimeError("This is a test error and can be ignored.") @function_trace() def _function4(params=None, application=None): try: _function5() except: - notice_error(attributes=(params or { - 'err-key-2': 2, 'err-key-3': 3.0}), - application=application) + notice_error(attributes=(params or {"err-key-2": 2, "err-key-3": 3.0}), application=application) @function_trace() def _function5(): - raise NotImplementedError( - 'This is a test error and can be ignored.') + raise NotImplementedError("This is a test error and can be ignored.") @wsgi_application() def _wsgi_application(environ, start_response): - status = '200 OK' - output = 'Hello World!' + status = "200 OK" + output = "Hello World!" - response_headers = [('Content-type', 'text/plain'), - ('Content-Length', str(len(output)))] + response_headers = [("Content-type", "text/plain"), ("Content-Length", str(len(output)))] start_response(status, response_headers) for i in range(10): @@ -107,16 +101,15 @@ def _background_task(): def _start_response(*args): pass - _environ = { 'SCRIPT_NAME': '', 'PATH_INFO': '/test', - 'QUERY_STRING': 'key=value' } + _environ = {"SCRIPT_NAME": "", "PATH_INFO": "/test", "QUERY_STRING": "key=value"} _iterable = _wsgi_application(_environ, _start_response) _iterable.close() _background_task() - _function4(params={'err-key-4': 4, 'err-key-5': 5.0}, - application=application()) + _function4(params={"err-key-4": 4, "err-key-5": 5.0}, application=application()) + _user_message = """ Running Python agent test. @@ -136,19 +129,23 @@ def _start_response(*args): data to the New Relic UI. """ -@command('validate-config', 'config_file [log_file]', -"""Validates the syntax of . Also tests connectivity to New + +@command( + "validate-config", + "config_file [log_file]", + """Validates the syntax of . Also tests connectivity to New Relic core application by connecting to the account corresponding to the license key listed in the configuration file, and reporting test data under -the application name 'Python Agent Test'.""") +the application name 'Python Agent Test'.""", +) def validate_config(args): + import logging import os import sys - import logging import time if len(args) == 0: - usage('validate-config') + usage("validate-config") sys.exit(1) from newrelic.api.application import register_application @@ -158,7 +155,7 @@ def validate_config(args): if len(args) >= 2: log_file = args[1] else: - log_file = '/tmp/python-agent-test.log' + log_file = "/tmp/python-agent-test.log" # nosec log_level = logging.DEBUG @@ -168,21 +165,20 @@ def validate_config(args): pass config_file = args[0] - environment = os.environ.get('NEW_RELIC_ENVIRONMENT') + environment = os.environ.get("NEW_RELIC_ENVIRONMENT") - if config_file == '-': - config_file = os.environ.get('NEW_RELIC_CONFIG_FILE') + if config_file == "-": + config_file = os.environ.get("NEW_RELIC_CONFIG_FILE") - initialize(config_file, environment, ignore_errors=False, - log_file=log_file, log_level=log_level) + initialize(config_file, environment, ignore_errors=False, log_file=log_file, log_level=log_level) _logger = logging.getLogger(__name__) - _logger.debug('Starting agent validation.') + _logger.debug("Starting agent validation.") _settings = global_settings() - app_name = os.environ.get('NEW_RELIC_TEST_APP_NAME', 'Python Agent Test') + app_name = os.environ.get("NEW_RELIC_TEST_APP_NAME", "Python Agent Test") _settings.app_name = app_name _settings.transaction_tracer.transaction_threshold = 0 @@ -194,17 +190,17 @@ def validate_config(args): print(_user_message % dict(app_name=app_name, log_file=log_file)) - _logger.debug('Register test application.') + _logger.debug("Register test application.") - _logger.debug('Collector host is %r.', _settings.host) - _logger.debug('Collector port is %r.', _settings.port) + _logger.debug("Collector host is %r.", _settings.host) + _logger.debug("Collector port is %r.", _settings.port) - _logger.debug('Proxy scheme is %r.', _settings.proxy_scheme) - _logger.debug('Proxy host is %r.', _settings.proxy_host) - _logger.debug('Proxy port is %r.', _settings.proxy_port) - _logger.debug('Proxy user is %r.', _settings.proxy_user) + _logger.debug("Proxy scheme is %r.", _settings.proxy_scheme) + _logger.debug("Proxy host is %r.", _settings.proxy_host) + _logger.debug("Proxy port is %r.", _settings.proxy_port) + _logger.debug("Proxy user is %r.", _settings.proxy_user) - _logger.debug('License key is %r.', _settings.license_key) + _logger.debug("License key is %r.", _settings.license_key) _timeout = 30.0 @@ -215,24 +211,25 @@ def validate_config(args): _duration = _end - _start if not _application.active: - _logger.error('Unable to register application for test, ' - 'connection could not be established within %s seconds.', - _timeout) + _logger.error( + "Unable to register application for test, " "connection could not be established within %s seconds.", + _timeout, + ) return - if hasattr(_application.settings, 'messages'): + if hasattr(_application.settings, "messages"): for message in _application.settings.messages: - if message['message'].startswith('Reporting to:'): - parts = message['message'].split('Reporting to:') + if message["message"].startswith("Reporting to:"): + parts = message["message"].split("Reporting to:") url = parts[1].strip() - print('Registration successful. Reporting to:') + print("Registration successful. Reporting to:") print() - print(' %s' % url) + print(" %s" % url) print() break - _logger.debug('Registration took %s seconds.', _duration) + _logger.debug("Registration took %s seconds.", _duration) - _logger.debug('Run the validation test.') + _logger.debug("Run the validation test.") _run_validation_test() diff --git a/newrelic/agent.py b/newrelic/agent.py index f635b866f..e532d1c6e 100644 --- a/newrelic/agent.py +++ b/newrelic/agent.py @@ -12,412 +12,327 @@ # See the License for the specific language governing permissions and # limitations under the License. -from newrelic.config import ( - initialize as __initialize, - extra_settings as __extra_settings) - -from newrelic.core.config import global_settings as __global_settings - -from newrelic.core.agent import ( - shutdown_agent as __shutdown_agent, - register_data_source as __register_data_source) - -from newrelic.samplers.decorators import ( - data_source_generator as __data_source_generator, - data_source_factory as __data_source_factory) - -from newrelic.api.log import NewRelicContextFormatter - -from newrelic.api.application import ( - application_instance as __application, - register_application as __register_application, - application_settings as __application_settings) +from newrelic.api.application import application_instance as __application +from newrelic.api.application import application_settings as __application_settings +from newrelic.api.application import register_application as __register_application +# from newrelic.api.log import NewRelicContextFormatter from newrelic.api.time_trace import ( - current_trace as __current_trace, - get_linking_metadata as __get_linking_metadata, - add_custom_span_attribute as __add_custom_span_attribute, - record_exception as __record_exception, - notice_error as __notice_error) - + add_custom_span_attribute as __add_custom_span_attribute, +) +from newrelic.api.time_trace import current_trace as __current_trace +from newrelic.api.time_trace import get_linking_metadata as __get_linking_metadata +from newrelic.api.time_trace import notice_error as __notice_error +from newrelic.api.time_trace import record_exception as __record_exception from newrelic.api.transaction import ( - current_transaction as __current_transaction, - set_transaction_name as __set_transaction_name, - end_of_transaction as __end_of_transaction, - set_background_task as __set_background_task, - ignore_transaction as __ignore_transaction, - suppress_apdex_metric as __suppress_apdex_metric, - capture_request_params as __capture_request_params, - add_custom_parameter as __add_custom_parameter, - add_custom_parameters as __add_custom_parameters, - add_framework_info as __add_framework_info, - get_browser_timing_header as __get_browser_timing_header, - get_browser_timing_footer as __get_browser_timing_footer, - disable_browser_autorum as __disable_browser_autorum, - suppress_transaction_trace as __suppress_transaction_trace, - record_custom_metric as __record_custom_metric, - record_custom_metrics as __record_custom_metrics, - record_custom_event as __record_custom_event, - accept_distributed_trace_payload as __accept_distributed_trace_payload, - create_distributed_trace_payload as __create_distributed_trace_payload, - accept_distributed_trace_headers as __accept_distributed_trace_headers, - insert_distributed_trace_headers as __insert_distributed_trace_headers, - current_trace_id as __current_trace_id, - current_span_id as __current_span_id) - + accept_distributed_trace_headers as __accept_distributed_trace_headers, +) +from newrelic.api.transaction import ( + accept_distributed_trace_payload as __accept_distributed_trace_payload, +) +from newrelic.api.transaction import add_custom_attribute as __add_custom_attribute +from newrelic.api.transaction import add_custom_attributes as __add_custom_attributes +from newrelic.api.transaction import add_custom_parameter as __add_custom_parameter +from newrelic.api.transaction import add_custom_parameters as __add_custom_parameters +from newrelic.api.transaction import add_framework_info as __add_framework_info +from newrelic.api.transaction import capture_request_params as __capture_request_params +from newrelic.api.transaction import ( + create_distributed_trace_payload as __create_distributed_trace_payload, +) +from newrelic.api.transaction import current_span_id as __current_span_id +from newrelic.api.transaction import current_trace_id as __current_trace_id +from newrelic.api.transaction import current_transaction as __current_transaction +from newrelic.api.transaction import ( + disable_browser_autorum as __disable_browser_autorum, +) +from newrelic.api.transaction import end_of_transaction as __end_of_transaction +from newrelic.api.transaction import ( + get_browser_timing_footer as __get_browser_timing_footer, +) +from newrelic.api.transaction import ( + get_browser_timing_header as __get_browser_timing_header, +) +from newrelic.api.transaction import ignore_transaction as __ignore_transaction +from newrelic.api.transaction import ( + insert_distributed_trace_headers as __insert_distributed_trace_headers, +) +from newrelic.api.transaction import record_custom_event as __record_custom_event +from newrelic.api.transaction import record_custom_metric as __record_custom_metric +from newrelic.api.transaction import record_custom_metrics as __record_custom_metrics +from newrelic.api.transaction import set_background_task as __set_background_task +from newrelic.api.transaction import set_transaction_name as __set_transaction_name +from newrelic.api.transaction import suppress_apdex_metric as __suppress_apdex_metric +from newrelic.api.transaction import ( + suppress_transaction_trace as __suppress_transaction_trace, +) +from newrelic.api.wsgi_application import ( + WSGIApplicationWrapper as __WSGIApplicationWrapper, +) from newrelic.api.wsgi_application import ( - wsgi_application as __wsgi_application, - WSGIApplicationWrapper as __WSGIApplicationWrapper, - wrap_wsgi_application as __wrap_wsgi_application) + wrap_wsgi_application as __wrap_wsgi_application, +) +from newrelic.api.wsgi_application import wsgi_application as __wsgi_application +from newrelic.config import extra_settings as __extra_settings +from newrelic.config import initialize as __initialize +from newrelic.core.agent import register_data_source as __register_data_source +from newrelic.core.agent import shutdown_agent as __shutdown_agent +from newrelic.core.config import global_settings as __global_settings +from newrelic.samplers.decorators import data_source_factory as __data_source_factory +from newrelic.samplers.decorators import ( + data_source_generator as __data_source_generator, +) try: from newrelic.api.asgi_application import ( - asgi_application as __asgi_application, - ASGIApplicationWrapper as __ASGIApplicationWrapper, - wrap_asgi_application as __wrap_asgi_application) + ASGIApplicationWrapper as __ASGIApplicationWrapper, + ) + from newrelic.api.asgi_application import asgi_application as __asgi_application + from newrelic.api.asgi_application import ( + wrap_asgi_application as __wrap_asgi_application, + ) except SyntaxError: + def __asgi_application(*args, **kwargs): pass __ASGIApplicationWrapper = __asgi_application __wrap_asgi_application = __asgi_application -from newrelic.api.web_transaction import ( - WebTransaction as __WebTransaction, - web_transaction as __web_transaction, - WebTransactionWrapper as __WebTransactionWrapper, - wrap_web_transaction as __wrap_web_transaction) - +from newrelic.api.background_task import BackgroundTask as __BackgroundTask from newrelic.api.background_task import ( - background_task as __background_task, - BackgroundTask as __BackgroundTask, - BackgroundTaskWrapper as __BackgroundTaskWrapper, - wrap_background_task as __wrap_background_task) - -from newrelic.api.lambda_handler import ( - LambdaHandlerWrapper as __LambdaHandlerWrapper, - lambda_handler as __lambda_handler) - + BackgroundTaskWrapper as __BackgroundTaskWrapper, +) +from newrelic.api.background_task import background_task as __background_task +from newrelic.api.background_task import wrap_background_task as __wrap_background_task +from newrelic.api.database_trace import DatabaseTrace as __DatabaseTrace +from newrelic.api.database_trace import DatabaseTraceWrapper as __DatabaseTraceWrapper +from newrelic.api.database_trace import database_trace as __database_trace +from newrelic.api.database_trace import ( + register_database_client as __register_database_client, +) +from newrelic.api.database_trace import wrap_database_trace as __wrap_database_trace +from newrelic.api.datastore_trace import DatastoreTrace as __DatastoreTrace +from newrelic.api.datastore_trace import ( + DatastoreTraceWrapper as __DatastoreTraceWrapper, +) +from newrelic.api.datastore_trace import datastore_trace as __datastore_trace +from newrelic.api.datastore_trace import wrap_datastore_trace as __wrap_datastore_trace +from newrelic.api.error_trace import ErrorTrace as __ErrorTrace +from newrelic.api.error_trace import ErrorTraceWrapper as __ErrorTraceWrapper +from newrelic.api.error_trace import error_trace as __error_trace +from newrelic.api.error_trace import wrap_error_trace as __wrap_error_trace +from newrelic.api.external_trace import ExternalTrace as __ExternalTrace +from newrelic.api.external_trace import ExternalTraceWrapper as __ExternalTraceWrapper +from newrelic.api.external_trace import external_trace as __external_trace +from newrelic.api.external_trace import wrap_external_trace as __wrap_external_trace +from newrelic.api.function_trace import FunctionTrace as __FunctionTrace +from newrelic.api.function_trace import FunctionTraceWrapper as __FunctionTraceWrapper +from newrelic.api.function_trace import function_trace as __function_trace +from newrelic.api.function_trace import wrap_function_trace as __wrap_function_trace +from newrelic.api.generator_trace import ( + GeneratorTraceWrapper as __GeneratorTraceWrapper, +) +from newrelic.api.generator_trace import generator_trace as __generator_trace +from newrelic.api.generator_trace import wrap_generator_trace as __wrap_generator_trace +from newrelic.api.html_insertion import insert_html_snippet as __insert_html_snippet +from newrelic.api.html_insertion import verify_body_exists as __verify_body_exists +from newrelic.api.lambda_handler import LambdaHandlerWrapper as __LambdaHandlerWrapper +from newrelic.api.lambda_handler import lambda_handler as __lambda_handler +from newrelic.api.message_trace import MessageTrace as __MessageTrace +from newrelic.api.message_trace import MessageTraceWrapper as __MessageTraceWrapper +from newrelic.api.message_trace import message_trace as __message_trace +from newrelic.api.message_trace import wrap_message_trace as __wrap_message_trace +from newrelic.api.message_transaction import MessageTransaction as __MessageTransaction +from newrelic.api.message_transaction import ( + MessageTransactionWrapper as __MessageTransactionWrapper, +) +from newrelic.api.message_transaction import ( + message_transaction as __message_transaction, +) +from newrelic.api.message_transaction import ( + wrap_message_transaction as __wrap_message_transaction, +) +from newrelic.api.profile_trace import ProfileTraceWrapper as __ProfileTraceWrapper +from newrelic.api.profile_trace import profile_trace as __profile_trace +from newrelic.api.profile_trace import wrap_profile_trace as __wrap_profile_trace +from newrelic.api.supportability import wrap_api_call as __wrap_api_call from newrelic.api.transaction_name import ( - transaction_name as __transaction_name, - TransactionNameWrapper as __TransactionNameWrapper, - wrap_transaction_name as __wrap_transaction_name) - -from newrelic.api.function_trace import ( - function_trace as __function_trace, - FunctionTrace as __FunctionTrace, - FunctionTraceWrapper as __FunctionTraceWrapper, - wrap_function_trace as __wrap_function_trace) + TransactionNameWrapper as __TransactionNameWrapper, +) +from newrelic.api.transaction_name import transaction_name as __transaction_name +from newrelic.api.transaction_name import ( + wrap_transaction_name as __wrap_transaction_name, +) +from newrelic.api.web_transaction import WebTransaction as __WebTransaction +from newrelic.api.web_transaction import ( + WebTransactionWrapper as __WebTransactionWrapper, +) +from newrelic.api.web_transaction import web_transaction as __web_transaction +from newrelic.api.web_transaction import wrap_web_transaction as __wrap_web_transaction +from newrelic.common.object_names import callable_name as __callable_name +from newrelic.common.object_wrapper import FunctionWrapper as __FunctionWrapper +from newrelic.common.object_wrapper import InFunctionWrapper as __InFunctionWrapper +from newrelic.common.object_wrapper import ObjectProxy as __ObjectProxy +from newrelic.common.object_wrapper import ObjectWrapper as __ObjectWrapper +from newrelic.common.object_wrapper import OutFunctionWrapper as __OutFunctionWrapper +from newrelic.common.object_wrapper import PostFunctionWrapper as __PostFunctionWrapper +from newrelic.common.object_wrapper import PreFunctionWrapper as __PreFunctionWrapper +from newrelic.common.object_wrapper import function_wrapper as __function_wrapper +from newrelic.common.object_wrapper import in_function as __in_function +from newrelic.common.object_wrapper import out_function as __out_function +from newrelic.common.object_wrapper import ( + patch_function_wrapper as __patch_function_wrapper, +) +from newrelic.common.object_wrapper import post_function as __post_function +from newrelic.common.object_wrapper import pre_function as __pre_function +from newrelic.common.object_wrapper import resolve_path as __resolve_path +from newrelic.common.object_wrapper import ( + transient_function_wrapper as __transient_function_wrapper, +) +from newrelic.common.object_wrapper import ( + wrap_function_wrapper as __wrap_function_wrapper, +) +from newrelic.common.object_wrapper import wrap_in_function as __wrap_in_function +from newrelic.common.object_wrapper import wrap_object as __wrap_object +from newrelic.common.object_wrapper import ( + wrap_object_attribute as __wrap_object_attribute, +) +from newrelic.common.object_wrapper import wrap_out_function as __wrap_out_function +from newrelic.common.object_wrapper import wrap_post_function as __wrap_post_function +from newrelic.common.object_wrapper import wrap_pre_function as __wrap_pre_function # EXPERIMENTAL - Generator traces are currently experimental and may not # exist in this form in future versions of the agent. -from newrelic.api.generator_trace import ( - generator_trace as __generator_trace, - GeneratorTraceWrapper as __GeneratorTraceWrapper, - wrap_generator_trace as __wrap_generator_trace) # EXPERIMENTAL - Profile traces are currently experimental and may not # exist in this form in future versions of the agent. -from newrelic.api.profile_trace import ( - profile_trace as __profile_trace, - ProfileTraceWrapper as __ProfileTraceWrapper, - wrap_profile_trace as __wrap_profile_trace) - -from newrelic.api.database_trace import ( - database_trace as __database_trace, - DatabaseTrace as __DatabaseTrace, - DatabaseTraceWrapper as __DatabaseTraceWrapper, - wrap_database_trace as __wrap_database_trace, - register_database_client as __register_database_client) - -from newrelic.api.datastore_trace import ( - datastore_trace as __datastore_trace, - DatastoreTrace as __DatastoreTrace, - DatastoreTraceWrapper as __DatastoreTraceWrapper, - wrap_datastore_trace as __wrap_datastore_trace) - -from newrelic.api.external_trace import ( - external_trace as __external_trace, - ExternalTrace as __ExternalTrace, - ExternalTraceWrapper as __ExternalTraceWrapper, - wrap_external_trace as __wrap_external_trace) - -from newrelic.api.error_trace import ( - error_trace as __error_trace, - ErrorTrace as __ErrorTrace, - ErrorTraceWrapper as __ErrorTraceWrapper, - wrap_error_trace as __wrap_error_trace) - -from newrelic.api.message_trace import ( - message_trace as __message_trace, - MessageTrace as __MessageTrace, - MessageTraceWrapper as __MessageTraceWrapper, - wrap_message_trace as __wrap_message_trace) - -from newrelic.api.message_transaction import ( - message_transaction as __message_transaction, - MessageTransaction as __MessageTransaction, - MessageTransactionWrapper as __MessageTransactionWrapper, - wrap_message_transaction as __wrap_message_transaction) - -from newrelic.common.object_names import callable_name as __callable_name - -from newrelic.common.object_wrapper import ( - ObjectProxy as __ObjectProxy, - wrap_object as __wrap_object, - wrap_object_attribute as __wrap_object_attribute, - resolve_path as __resolve_path, - transient_function_wrapper as __transient_function_wrapper, - FunctionWrapper as __FunctionWrapper, - function_wrapper as __function_wrapper, - wrap_function_wrapper as __wrap_function_wrapper, - patch_function_wrapper as __patch_function_wrapper, - ObjectWrapper as __ObjectWrapper, - pre_function as __pre_function, - PreFunctionWrapper as __PreFunctionWrapper, - wrap_pre_function as __wrap_pre_function, - post_function as __post_function, - PostFunctionWrapper as __PostFunctionWrapper, - wrap_post_function as __wrap_post_function, - in_function as __in_function, - InFunctionWrapper as __InFunctionWrapper, - wrap_in_function as __wrap_in_function, - out_function as __out_function, - OutFunctionWrapper as __OutFunctionWrapper, - wrap_out_function as __wrap_out_function) - -from newrelic.api.html_insertion import ( - insert_html_snippet as __insert_html_snippet, - verify_body_exists as __verify_body_exists) - -from newrelic.api.supportability import wrap_api_call as __wrap_api_call initialize = __initialize -extra_settings = __wrap_api_call(__extra_settings, - 'extra_settings') -global_settings = __wrap_api_call(__global_settings, - 'global_settings') -shutdown_agent = __wrap_api_call(__shutdown_agent, - 'shutdown_agent') -register_data_source = __wrap_api_call(__register_data_source, - 'register_data_source') -data_source_generator = __wrap_api_call(__data_source_generator, - 'data_source_generator') -data_source_factory = __wrap_api_call(__data_source_factory, - 'data_source_factory') -application = __wrap_api_call(__application, - 'application') +extra_settings = __wrap_api_call(__extra_settings, "extra_settings") +global_settings = __wrap_api_call(__global_settings, "global_settings") +shutdown_agent = __wrap_api_call(__shutdown_agent, "shutdown_agent") +register_data_source = __wrap_api_call(__register_data_source, "register_data_source") +data_source_generator = __wrap_api_call(__data_source_generator, "data_source_generator") +data_source_factory = __wrap_api_call(__data_source_factory, "data_source_factory") +application = __wrap_api_call(__application, "application") register_application = __register_application -application_settings = __wrap_api_call(__application_settings, - 'application_settings') -current_trace = __wrap_api_call(__current_trace, - 'current_trace') -get_linking_metadata = __wrap_api_call(__get_linking_metadata, - 'get_linking_metadata') -add_custom_span_attribute = __wrap_api_call(__add_custom_span_attribute, - 'add_custom_span_attribute') -current_transaction = __wrap_api_call(__current_transaction, - 'current_transaction') -set_transaction_name = __wrap_api_call(__set_transaction_name, - 'set_transaction_name') -end_of_transaction = __wrap_api_call(__end_of_transaction, - 'end_of_transaction') -set_background_task = __wrap_api_call(__set_background_task, - 'set_background_task') -ignore_transaction = __wrap_api_call(__ignore_transaction, - 'ignore_transaction') -suppress_apdex_metric = __wrap_api_call(__suppress_apdex_metric, - 'suppress_apdex_metric') -capture_request_params = __wrap_api_call(__capture_request_params, - 'capture_request_params') -add_custom_parameter = __wrap_api_call(__add_custom_parameter, - 'add_custom_parameter') -add_custom_parameters = __wrap_api_call(__add_custom_parameters, - 'add_custom_parameters') -add_framework_info = __wrap_api_call(__add_framework_info, - 'add_framework_info') -record_exception = __wrap_api_call(__record_exception, - 'record_exception') -notice_error = __wrap_api_call(__notice_error, - 'notice_error') -get_browser_timing_header = __wrap_api_call(__get_browser_timing_header, - 'get_browser_timing_header') -get_browser_timing_footer = __wrap_api_call(__get_browser_timing_footer, - 'get_browser_timing_footer') -disable_browser_autorum = __wrap_api_call(__disable_browser_autorum, - 'disable_browser_autorum') -suppress_transaction_trace = __wrap_api_call(__suppress_transaction_trace, - 'suppress_transaction_trace') -record_custom_metric = __wrap_api_call(__record_custom_metric, - 'record_custom_metric') -record_custom_metrics = __wrap_api_call(__record_custom_metrics, - 'record_custom_metrics') -record_custom_event = __wrap_api_call(__record_custom_event, - 'record_custom_event') +application_settings = __wrap_api_call(__application_settings, "application_settings") +current_trace = __wrap_api_call(__current_trace, "current_trace") +get_linking_metadata = __wrap_api_call(__get_linking_metadata, "get_linking_metadata") +add_custom_span_attribute = __wrap_api_call(__add_custom_span_attribute, "add_custom_span_attribute") +current_transaction = __wrap_api_call(__current_transaction, "current_transaction") +set_transaction_name = __wrap_api_call(__set_transaction_name, "set_transaction_name") +end_of_transaction = __wrap_api_call(__end_of_transaction, "end_of_transaction") +set_background_task = __wrap_api_call(__set_background_task, "set_background_task") +ignore_transaction = __wrap_api_call(__ignore_transaction, "ignore_transaction") +suppress_apdex_metric = __wrap_api_call(__suppress_apdex_metric, "suppress_apdex_metric") +capture_request_params = __wrap_api_call(__capture_request_params, "capture_request_params") +add_custom_parameter = __wrap_api_call(__add_custom_parameter, "add_custom_parameter") +add_custom_parameters = __wrap_api_call(__add_custom_parameters, "add_custom_parameters") +add_custom_attribute = __wrap_api_call(__add_custom_attribute, "add_custom_attribute") +add_custom_attributes = __wrap_api_call(__add_custom_attributes, "add_custom_attributes") +add_framework_info = __wrap_api_call(__add_framework_info, "add_framework_info") +record_exception = __wrap_api_call(__record_exception, "record_exception") +notice_error = __wrap_api_call(__notice_error, "notice_error") +get_browser_timing_header = __wrap_api_call(__get_browser_timing_header, "get_browser_timing_header") +get_browser_timing_footer = __wrap_api_call(__get_browser_timing_footer, "get_browser_timing_footer") +disable_browser_autorum = __wrap_api_call(__disable_browser_autorum, "disable_browser_autorum") +suppress_transaction_trace = __wrap_api_call(__suppress_transaction_trace, "suppress_transaction_trace") +record_custom_metric = __wrap_api_call(__record_custom_metric, "record_custom_metric") +record_custom_metrics = __wrap_api_call(__record_custom_metrics, "record_custom_metrics") +record_custom_event = __wrap_api_call(__record_custom_event, "record_custom_event") accept_distributed_trace_payload = __wrap_api_call( - __accept_distributed_trace_payload, 'accept_distributed_trace_payload') + __accept_distributed_trace_payload, "accept_distributed_trace_payload" +) create_distributed_trace_payload = __wrap_api_call( - __create_distributed_trace_payload, - 'create_distributed_trace_payload') + __create_distributed_trace_payload, "create_distributed_trace_payload" +) accept_distributed_trace_headers = __wrap_api_call( - __accept_distributed_trace_headers, - 'accept_distributed_trace_headers') + __accept_distributed_trace_headers, "accept_distributed_trace_headers" +) insert_distributed_trace_headers = __wrap_api_call( - __insert_distributed_trace_headers, - 'insert_distributed_trace_headers') -current_trace_id = __wrap_api_call(__current_trace_id, 'current_trace_id') -current_span_id = __wrap_api_call(__current_span_id, 'current_span_id') + __insert_distributed_trace_headers, "insert_distributed_trace_headers" +) +current_trace_id = __wrap_api_call(__current_trace_id, "current_trace_id") +current_span_id = __wrap_api_call(__current_span_id, "current_span_id") wsgi_application = __wsgi_application asgi_application = __asgi_application -WebTransaction = __wrap_api_call(__WebTransaction, - 'WebTransaction') -web_transaction = __wrap_api_call(__web_transaction, - 'web_transaction') -WebTransactionWrapper = __wrap_api_call(__WebTransactionWrapper, - 'WebTransactionWrapper') -wrap_web_transaction = __wrap_api_call(__wrap_web_transaction, - 'wrap_web_transaction') +WebTransaction = __wrap_api_call(__WebTransaction, "WebTransaction") +web_transaction = __wrap_api_call(__web_transaction, "web_transaction") +WebTransactionWrapper = __wrap_api_call(__WebTransactionWrapper, "WebTransactionWrapper") +wrap_web_transaction = __wrap_api_call(__wrap_web_transaction, "wrap_web_transaction") WSGIApplicationWrapper = __WSGIApplicationWrapper wrap_wsgi_application = __wrap_wsgi_application ASGIApplicationWrapper = __ASGIApplicationWrapper wrap_asgi_application = __wrap_asgi_application -background_task = __wrap_api_call(__background_task, - 'background_task') -BackgroundTask = __wrap_api_call(__BackgroundTask, - 'BackgroundTask') -BackgroundTaskWrapper = __wrap_api_call(__BackgroundTaskWrapper, - 'BackgroundTaskWrapper') -wrap_background_task = __wrap_api_call(__wrap_background_task, - 'wrap_background_task') -LambdaHandlerWrapper = __wrap_api_call(__LambdaHandlerWrapper, - 'LambdaHandlerWrapper') -lambda_handler = __wrap_api_call(__lambda_handler, - 'lambda_handler') -transaction_name = __wrap_api_call(__transaction_name, - 'transaction_name') -TransactionNameWrapper = __wrap_api_call(__TransactionNameWrapper, - 'TransactionNameWrapper') -wrap_transaction_name = __wrap_api_call(__wrap_transaction_name, - 'wrap_transaction_name') -function_trace = __wrap_api_call(__function_trace, - 'function_trace') -FunctionTrace = __wrap_api_call(__FunctionTrace, - 'FunctionTrace') -FunctionTraceWrapper = __wrap_api_call(__FunctionTraceWrapper, - 'FunctionTraceWrapper') -wrap_function_trace = __wrap_api_call(__wrap_function_trace, - 'wrap_function_trace') -generator_trace = __wrap_api_call(__generator_trace, - 'generator_trace') -GeneratorTraceWrapper = __wrap_api_call(__GeneratorTraceWrapper, - 'GeneratorTraceWrapper') -wrap_generator_trace = __wrap_api_call(__wrap_generator_trace, - 'wrap_generator_trace') -profile_trace = __wrap_api_call(__profile_trace, - 'profile_trace') -ProfileTraceWrapper = __wrap_api_call(__ProfileTraceWrapper, - 'ProfileTraceWrapper') -wrap_profile_trace = __wrap_api_call(__wrap_profile_trace, - 'wrap_profile_trace') -database_trace = __wrap_api_call(__database_trace, - 'database_trace') -DatabaseTrace = __wrap_api_call(__DatabaseTrace, - 'DatabaseTrace') -DatabaseTraceWrapper = __wrap_api_call(__DatabaseTraceWrapper, - 'DatabaseTraceWrapper') -wrap_database_trace = __wrap_api_call(__wrap_database_trace, - 'wrap_database_trace') -register_database_client = __wrap_api_call(__register_database_client, - 'register_database_client') -datastore_trace = __wrap_api_call(__datastore_trace, - 'datastore_trace') -DatastoreTrace = __wrap_api_call(__DatastoreTrace, - 'DatastoreTrace') -DatastoreTraceWrapper = __wrap_api_call(__DatastoreTraceWrapper, - 'DatastoreTraceWrapper') -wrap_datastore_trace = __wrap_api_call(__wrap_datastore_trace, - 'wrap_datastore_trace') -external_trace = __wrap_api_call(__external_trace, - 'external_trace') -ExternalTrace = __wrap_api_call(__ExternalTrace, - 'ExternalTrace') -ExternalTraceWrapper = __wrap_api_call(__ExternalTraceWrapper, - 'ExternalTraceWrapper') -wrap_external_trace = __wrap_api_call(__wrap_external_trace, - 'wrap_external_trace') -error_trace = __wrap_api_call(__error_trace, - 'error_trace') -ErrorTrace = __wrap_api_call(__ErrorTrace, - 'ErrorTrace') -ErrorTraceWrapper = __wrap_api_call(__ErrorTraceWrapper, - 'ErrorTraceWrapper') -wrap_error_trace = __wrap_api_call(__wrap_error_trace, - 'wrap_error_trace') -message_trace = __wrap_api_call(__message_trace, - 'message_trace') -MessageTrace = __wrap_api_call(__MessageTrace, - 'MessageTrace') -MessageTraceWrapper = __wrap_api_call(__MessageTraceWrapper, - 'MessageTraceWrapper') -wrap_message_trace = __wrap_api_call(__wrap_message_trace, - 'wrap_message_trace') -message_transaction = __wrap_api_call(__message_transaction, - 'message_trace') -MessageTransaction = __wrap_api_call(__MessageTransaction, - 'MessageTransaction') -MessageTransactionWrapper = __wrap_api_call(__MessageTransactionWrapper, - 'MessageTransactionWrapper') -wrap_message_transaction = __wrap_api_call(__wrap_message_transaction, - 'wrap_message_transaction') -callable_name = __wrap_api_call(__callable_name, - 'callable_name') -ObjectProxy = __wrap_api_call(__ObjectProxy, - 'ObjectProxy') -wrap_object = __wrap_api_call(__wrap_object, - 'wrap_object') -wrap_object_attribute = __wrap_api_call(__wrap_object_attribute, - 'wrap_object_attribute') -resolve_path = __wrap_api_call(__resolve_path, - 'resolve_path') -transient_function_wrapper = __wrap_api_call(__transient_function_wrapper, - 'transient_function_wrapper') -FunctionWrapper = __wrap_api_call(__FunctionWrapper, - 'FunctionWrapper') -function_wrapper = __wrap_api_call(__function_wrapper, - 'function_wrapper') -wrap_function_wrapper = __wrap_api_call(__wrap_function_wrapper, - 'wrap_function_wrapper') -patch_function_wrapper = __wrap_api_call(__patch_function_wrapper, - 'patch_function_wrapper') -ObjectWrapper = __wrap_api_call(__ObjectWrapper, - 'ObjectWrapper') -pre_function = __wrap_api_call(__pre_function, - 'pre_function') -PreFunctionWrapper = __wrap_api_call(__PreFunctionWrapper, - 'PreFunctionWrapper') -wrap_pre_function = __wrap_api_call(__wrap_pre_function, - 'wrap_pre_function') -post_function = __wrap_api_call(__post_function, - 'post_function') -PostFunctionWrapper = __wrap_api_call(__PostFunctionWrapper, - 'PostFunctionWrapper') -wrap_post_function = __wrap_api_call(__wrap_post_function, - 'wrap_post_function') -in_function = __wrap_api_call(__in_function, - 'in_function') -InFunctionWrapper = __wrap_api_call(__InFunctionWrapper, - 'InFunctionWrapper') -wrap_in_function = __wrap_api_call(__wrap_in_function, - 'wrap_in_function') -out_function = __wrap_api_call(__out_function, - 'out_function') -OutFunctionWrapper = __wrap_api_call(__OutFunctionWrapper, - 'OutFunctionWrapper') -wrap_out_function = __wrap_api_call(__wrap_out_function, - 'wrap_out_function') -insert_html_snippet = __wrap_api_call(__insert_html_snippet, - 'insert_html_snippet') -verify_body_exists = __wrap_api_call(__verify_body_exists, - 'verify_body_exists') +background_task = __wrap_api_call(__background_task, "background_task") +BackgroundTask = __wrap_api_call(__BackgroundTask, "BackgroundTask") +BackgroundTaskWrapper = __wrap_api_call(__BackgroundTaskWrapper, "BackgroundTaskWrapper") +wrap_background_task = __wrap_api_call(__wrap_background_task, "wrap_background_task") +LambdaHandlerWrapper = __wrap_api_call(__LambdaHandlerWrapper, "LambdaHandlerWrapper") +lambda_handler = __wrap_api_call(__lambda_handler, "lambda_handler") +transaction_name = __wrap_api_call(__transaction_name, "transaction_name") +TransactionNameWrapper = __wrap_api_call(__TransactionNameWrapper, "TransactionNameWrapper") +wrap_transaction_name = __wrap_api_call(__wrap_transaction_name, "wrap_transaction_name") +function_trace = __wrap_api_call(__function_trace, "function_trace") +FunctionTrace = __wrap_api_call(__FunctionTrace, "FunctionTrace") +FunctionTraceWrapper = __wrap_api_call(__FunctionTraceWrapper, "FunctionTraceWrapper") +wrap_function_trace = __wrap_api_call(__wrap_function_trace, "wrap_function_trace") +generator_trace = __wrap_api_call(__generator_trace, "generator_trace") +GeneratorTraceWrapper = __wrap_api_call(__GeneratorTraceWrapper, "GeneratorTraceWrapper") +wrap_generator_trace = __wrap_api_call(__wrap_generator_trace, "wrap_generator_trace") +profile_trace = __wrap_api_call(__profile_trace, "profile_trace") +ProfileTraceWrapper = __wrap_api_call(__ProfileTraceWrapper, "ProfileTraceWrapper") +wrap_profile_trace = __wrap_api_call(__wrap_profile_trace, "wrap_profile_trace") +database_trace = __wrap_api_call(__database_trace, "database_trace") +DatabaseTrace = __wrap_api_call(__DatabaseTrace, "DatabaseTrace") +DatabaseTraceWrapper = __wrap_api_call(__DatabaseTraceWrapper, "DatabaseTraceWrapper") +wrap_database_trace = __wrap_api_call(__wrap_database_trace, "wrap_database_trace") +register_database_client = __wrap_api_call(__register_database_client, "register_database_client") +datastore_trace = __wrap_api_call(__datastore_trace, "datastore_trace") +DatastoreTrace = __wrap_api_call(__DatastoreTrace, "DatastoreTrace") +DatastoreTraceWrapper = __wrap_api_call(__DatastoreTraceWrapper, "DatastoreTraceWrapper") +wrap_datastore_trace = __wrap_api_call(__wrap_datastore_trace, "wrap_datastore_trace") +external_trace = __wrap_api_call(__external_trace, "external_trace") +ExternalTrace = __wrap_api_call(__ExternalTrace, "ExternalTrace") +ExternalTraceWrapper = __wrap_api_call(__ExternalTraceWrapper, "ExternalTraceWrapper") +wrap_external_trace = __wrap_api_call(__wrap_external_trace, "wrap_external_trace") +error_trace = __wrap_api_call(__error_trace, "error_trace") +ErrorTrace = __wrap_api_call(__ErrorTrace, "ErrorTrace") +ErrorTraceWrapper = __wrap_api_call(__ErrorTraceWrapper, "ErrorTraceWrapper") +wrap_error_trace = __wrap_api_call(__wrap_error_trace, "wrap_error_trace") +message_trace = __wrap_api_call(__message_trace, "message_trace") +MessageTrace = __wrap_api_call(__MessageTrace, "MessageTrace") +MessageTraceWrapper = __wrap_api_call(__MessageTraceWrapper, "MessageTraceWrapper") +wrap_message_trace = __wrap_api_call(__wrap_message_trace, "wrap_message_trace") +message_transaction = __wrap_api_call(__message_transaction, "message_trace") +MessageTransaction = __wrap_api_call(__MessageTransaction, "MessageTransaction") +MessageTransactionWrapper = __wrap_api_call(__MessageTransactionWrapper, "MessageTransactionWrapper") +wrap_message_transaction = __wrap_api_call(__wrap_message_transaction, "wrap_message_transaction") +callable_name = __wrap_api_call(__callable_name, "callable_name") +ObjectProxy = __wrap_api_call(__ObjectProxy, "ObjectProxy") +wrap_object = __wrap_api_call(__wrap_object, "wrap_object") +wrap_object_attribute = __wrap_api_call(__wrap_object_attribute, "wrap_object_attribute") +resolve_path = __wrap_api_call(__resolve_path, "resolve_path") +transient_function_wrapper = __wrap_api_call(__transient_function_wrapper, "transient_function_wrapper") +FunctionWrapper = __wrap_api_call(__FunctionWrapper, "FunctionWrapper") +function_wrapper = __wrap_api_call(__function_wrapper, "function_wrapper") +wrap_function_wrapper = __wrap_api_call(__wrap_function_wrapper, "wrap_function_wrapper") +patch_function_wrapper = __wrap_api_call(__patch_function_wrapper, "patch_function_wrapper") +ObjectWrapper = __wrap_api_call(__ObjectWrapper, "ObjectWrapper") +pre_function = __wrap_api_call(__pre_function, "pre_function") +PreFunctionWrapper = __wrap_api_call(__PreFunctionWrapper, "PreFunctionWrapper") +wrap_pre_function = __wrap_api_call(__wrap_pre_function, "wrap_pre_function") +post_function = __wrap_api_call(__post_function, "post_function") +PostFunctionWrapper = __wrap_api_call(__PostFunctionWrapper, "PostFunctionWrapper") +wrap_post_function = __wrap_api_call(__wrap_post_function, "wrap_post_function") +in_function = __wrap_api_call(__in_function, "in_function") +InFunctionWrapper = __wrap_api_call(__InFunctionWrapper, "InFunctionWrapper") +wrap_in_function = __wrap_api_call(__wrap_in_function, "wrap_in_function") +out_function = __wrap_api_call(__out_function, "out_function") +OutFunctionWrapper = __wrap_api_call(__OutFunctionWrapper, "OutFunctionWrapper") +wrap_out_function = __wrap_api_call(__wrap_out_function, "wrap_out_function") +insert_html_snippet = __wrap_api_call(__insert_html_snippet, "insert_html_snippet") +verify_body_exists = __wrap_api_call(__verify_body_exists, "verify_body_exists") diff --git a/newrelic/api/transaction.py b/newrelic/api/transaction.py index 00b344ca6..bad9562ff 100644 --- a/newrelic/api/transaction.py +++ b/newrelic/api/transaction.py @@ -29,7 +29,6 @@ import newrelic.core.error_node import newrelic.core.root_node import newrelic.core.transaction_node -import newrelic.packages.six as six from newrelic.api.application import application_instance from newrelic.api.time_trace import TimeTrace, get_linking_metadata from newrelic.common.encoding_utils import ( @@ -71,6 +70,7 @@ TraceCacheNoActiveTraceError, trace_cache, ) +from newrelic.packages import six _logger = logging.getLogger(__name__) @@ -120,7 +120,7 @@ def complete_root(self): self.exited = True @staticmethod - def complete_trace(): + def complete_trace(): # pylint: disable=arguments-differ pass @property @@ -837,7 +837,7 @@ def trace_intrinsics(self): # Add in special CPU time value for UI to display CPU burn. - # XXX Disable cpu time value for CPU burn as was + # TODO: Disable cpu time value for CPU burn as was # previously reporting incorrect value and we need to # fix it, at least on Linux to report just the CPU time # for the executing thread. @@ -1576,7 +1576,7 @@ def _create_error_node(self, settings, fullname, message, expected, custom_param source=source, ) - # TODO Errors are recorded in time order. If + # TODO: Errors are recorded in time order. If # there are two exceptions of same type and # different message, the UI displays the first # one. In the PHP agent it was recording the @@ -1648,12 +1648,12 @@ def stop_recording(self): self._cpu_user_time_end = os.times()[0] - def add_custom_parameter(self, name, value): + def add_custom_attribute(self, name, value): if not self._settings: return False if self._settings.high_security: - _logger.debug("Cannot add custom parameter in High Security Mode.") + _logger.debug("Cannot add custom attribute in High Security Mode.") return False if len(self._custom_params) >= MAX_NUM_USER_ATTRIBUTES: @@ -1668,15 +1668,31 @@ def add_custom_parameter(self, name, value): self._custom_params[key] = val return True - def add_custom_parameters(self, items): + def add_custom_attributes(self, items): result = True # items is a list of (name, value) tuples. for name, value in items: - result &= self.add_custom_parameter(name, value) + result &= self.add_custom_attribute(name, value) return result + def add_custom_parameter(self, name, value): + # Deprecation warning + warnings.warn( + ("The add_custom_parameter API has been deprecated. " "Please use the add_custom_attribute API."), + DeprecationWarning, + ) + return self.add_custom_attribute(name, value) + + def add_custom_parameters(self, items): + # Deprecation warning + warnings.warn( + ("The add_custom_parameters API has been deprecated. " "Please use the add_custom_attributes API."), + DeprecationWarning, + ) + return self.add_custom_attributes(items) + def add_framework_info(self, name, version=None): if name: self._frameworks.add((name, version)) @@ -1753,22 +1769,40 @@ def capture_request_params(flag=True): transaction.capture_params = flag -def add_custom_parameter(key, value): +def add_custom_attribute(key, value): transaction = current_transaction() if transaction: - return transaction.add_custom_parameter(key, value) + return transaction.add_custom_attribute(key, value) else: return False -def add_custom_parameters(items): +def add_custom_attributes(items): transaction = current_transaction() if transaction: - return transaction.add_custom_parameters(items) + return transaction.add_custom_attributes(items) else: return False +def add_custom_parameter(key, value): + # Deprecation warning + warnings.warn( + ("The add_custom_parameter API has been deprecated. " "Please use the add_custom_attribute API."), + DeprecationWarning, + ) + return add_custom_attribute(key, value) + + +def add_custom_parameters(items): + # Deprecation warning + warnings.warn( + ("The add_custom_parameters API has been deprecated. " "Please use the add_custom_attributes API."), + DeprecationWarning, + ) + return add_custom_attributes(items) + + def add_framework_info(name, version=None): transaction = current_transaction() if transaction: diff --git a/newrelic/core/attribute.py b/newrelic/core/attribute.py index 4c2673939..c5f19e4c0 100644 --- a/newrelic/core/attribute.py +++ b/newrelic/core/attribute.py @@ -13,20 +13,21 @@ # limitations under the License. import logging - from collections import namedtuple +from newrelic.core.attribute_filter import ( + DST_ALL, + DST_ERROR_COLLECTOR, + DST_SPAN_EVENTS, + DST_TRANSACTION_EVENTS, + DST_TRANSACTION_SEGMENTS, + DST_TRANSACTION_TRACER, +) from newrelic.packages import six -from newrelic.core.attribute_filter import (DST_ALL, DST_ERROR_COLLECTOR, - DST_TRANSACTION_TRACER, DST_TRANSACTION_EVENTS, DST_SPAN_EVENTS, - DST_TRANSACTION_SEGMENTS) - - _logger = logging.getLogger(__name__) -_Attribute = namedtuple('_Attribute', - ['name', 'value', 'destinations']) +_Attribute = namedtuple("_Attribute", ["name", "value", "destinations"]) # The following destinations are created here, never changed, and only # used in create_agent_attributes. It is placed at the module level here @@ -34,61 +35,59 @@ # All agent attributes go to transaction traces and error traces by default. -_DESTINATIONS = (DST_ERROR_COLLECTOR | - DST_TRANSACTION_TRACER | - DST_TRANSACTION_SEGMENTS) -_DESTINATIONS_WITH_EVENTS = (_DESTINATIONS | - DST_TRANSACTION_EVENTS | - DST_SPAN_EVENTS) +_DESTINATIONS = DST_ERROR_COLLECTOR | DST_TRANSACTION_TRACER | DST_TRANSACTION_SEGMENTS +_DESTINATIONS_WITH_EVENTS = _DESTINATIONS | DST_TRANSACTION_EVENTS | DST_SPAN_EVENTS # The following subset goes to transaction events by default. -_TRANSACTION_EVENT_DEFAULT_ATTRIBUTES = set(( - 'host.displayName', - 'request.method', - 'request.headers.contentType', - 'request.headers.contentLength', - 'request.uri', - 'response.status', - 'request.headers.accept', - 'response.headers.contentLength', - 'response.headers.contentType', - 'request.headers.host', - 'request.headers.userAgent', - 'message.queueName', - 'message.routingKey', - 'http.url', - 'http.statusCode', - 'aws.requestId', - 'aws.operation', - 'aws.lambda.arn', - 'aws.lambda.coldStart', - 'aws.lambda.eventSource.arn', +_TRANSACTION_EVENT_DEFAULT_ATTRIBUTES = set( + ( + "host.displayName", + "request.method", + "request.headers.contentType", + "request.headers.contentLength", + "request.uri", + "response.status", + "request.headers.accept", + "response.headers.contentLength", + "response.headers.contentType", + "request.headers.host", + "request.headers.userAgent", + "message.queueName", + "message.routingKey", + "http.url", + "http.statusCode", + "aws.requestId", + "aws.operation", + "aws.lambda.arn", + "aws.lambda.coldStart", + "aws.lambda.eventSource.arn", "db.collection", - 'db.instance', - 'db.operation', - 'db.statement', - 'error.class', - 'error.message', - 'error.expected', - 'peer.hostname', - 'peer.address', - 'graphql.field.name', - 'graphql.field.parentType', - 'graphql.field.path', - 'graphql.field.returnType', - 'graphql.operation.name', - 'graphql.operation.type', - 'graphql.operation.query', + "db.instance", + "db.operation", + "db.statement", + "error.class", + "error.message", + "error.expected", + "peer.hostname", + "peer.address", + "graphql.field.name", + "graphql.field.parentType", + "graphql.field.path", + "graphql.field.returnType", + "graphql.operation.name", + "graphql.operation.type", + "graphql.operation.query", "code.filepath", "code.function", "code.lineno", "code.namespace", -)) + ) +) MAX_NUM_USER_ATTRIBUTES = 128 MAX_ATTRIBUTE_LENGTH = 255 -MAX_64_BIT_INT = 2 ** 63 - 1 +MAX_64_BIT_INT = 2**63 - 1 MAX_LOG_MESSAGE_LENGTH = 32768 @@ -109,10 +108,8 @@ class CastingFailureException(Exception): class Attribute(_Attribute): - def __repr__(self): - return "Attribute(name=%r, value=%r, destinations=%r)" % ( - self.name, self.value, bin(self.destinations)) + return "Attribute(name=%r, value=%r, destinations=%r)" % (self.name, self.value, bin(self.destinations)) def create_attributes(attr_dict, destinations, attribute_filter): @@ -142,8 +139,7 @@ def create_agent_attributes(attr_dict, attribute_filter): return attributes -def resolve_user_attributes( - attr_dict, attribute_filter, target_destination, attr_class=dict): +def resolve_user_attributes(attr_dict, attribute_filter, target_destination, attr_class=dict): u_attrs = attr_class() for attr_name, attr_value in attr_dict.items(): @@ -158,8 +154,7 @@ def resolve_user_attributes( return u_attrs -def resolve_agent_attributes( - attr_dict, attribute_filter, target_destination, attr_class=dict): +def resolve_agent_attributes(attr_dict, attribute_filter, target_destination, attr_class=dict): a_attrs = attr_class() for attr_name, attr_value in attr_dict.items(): @@ -182,10 +177,9 @@ def create_user_attributes(attr_dict, attribute_filter): return create_attributes(attr_dict, destinations, attribute_filter) -def truncate( - text, maxsize=MAX_ATTRIBUTE_LENGTH, encoding='utf-8', ending=None): +def truncate(text, maxsize=MAX_ATTRIBUTE_LENGTH, encoding="utf-8", ending=None): - # Truncate text so that it's byte representation + # Truncate text so that its byte representation # is no longer than maxsize bytes. # If text is unicode (Python 2 or 3), return unicode. @@ -198,21 +192,21 @@ def truncate( ending = ending and ending.encode(encoding) if ending and truncated != text: - truncated = truncated[:-len(ending)] + ending + truncated = truncated[: -len(ending)] + ending return truncated -def _truncate_unicode(u, maxsize, encoding='utf-8'): +def _truncate_unicode(u, maxsize, encoding="utf-8"): encoded = u.encode(encoding)[:maxsize] - return encoded.decode(encoding, 'ignore') + return encoded.decode(encoding, "ignore") def _truncate_bytes(s, maxsize): return s[:maxsize] -def check_name_length(name, max_length=MAX_ATTRIBUTE_LENGTH, encoding='utf-8'): +def check_name_length(name, max_length=MAX_ATTRIBUTE_LENGTH, encoding="utf-8"): trunc_name = truncate(name, max_length, encoding) if name != trunc_name: raise NameTooLongException() @@ -228,8 +222,7 @@ def check_max_int(value, max_int=MAX_64_BIT_INT): raise IntTooLargeException() -def process_user_attribute( - name, value, max_length=MAX_ATTRIBUTE_LENGTH, ending=None): +def process_user_attribute(name, value, max_length=MAX_ATTRIBUTE_LENGTH, ending=None): # Perform all necessary checks on a potential attribute. # @@ -250,23 +243,19 @@ def process_user_attribute( value = sanitize(value) except NameIsNotStringException: - _logger.debug('Attribute name must be a string. Dropping ' - 'attribute: %r=%r', name, value) + _logger.debug("Attribute name must be a string. Dropping " "attribute: %r=%r", name, value) return FAILED_RESULT except NameTooLongException: - _logger.debug('Attribute name exceeds maximum length. Dropping ' - 'attribute: %r=%r', name, value) + _logger.debug("Attribute name exceeds maximum length. Dropping " "attribute: %r=%r", name, value) return FAILED_RESULT except IntTooLargeException: - _logger.debug('Attribute value exceeds maximum integer value. ' - 'Dropping attribute: %r=%r', name, value) + _logger.debug("Attribute value exceeds maximum integer value. " "Dropping attribute: %r=%r", name, value) return FAILED_RESULT except CastingFailureException: - _logger.debug('Attribute value cannot be cast to a string. ' - 'Dropping attribute: %r=%r', name, value) + _logger.debug("Attribute value cannot be cast to a string. " "Dropping attribute: %r=%r", name, value) return FAILED_RESULT else: @@ -278,9 +267,12 @@ def process_user_attribute( if isinstance(value, valid_types_text): trunc_value = truncate(value, maxsize=max_length, ending=ending) if value != trunc_value: - _logger.debug('Attribute value exceeds maximum length ' - '(%r bytes). Truncating value: %r=%r.', - max_length, name, trunc_value) + _logger.debug( + "Attribute value exceeds maximum length " "(%r bytes). Truncating value: %r=%r.", + max_length, + name, + trunc_value, + ) value = trunc_value @@ -294,8 +286,7 @@ def sanitize(value): # # Raise CastingFailureException, if str(value) somehow fails. - valid_value_types = (six.text_type, six.binary_type, bool, float, - six.integer_types) + valid_value_types = (six.text_type, six.binary_type, bool, float, six.integer_types) if not isinstance(value, valid_value_types): original = value @@ -305,7 +296,8 @@ def sanitize(value): except Exception: raise CastingFailureException() else: - _logger.debug('Attribute value is of type: %r. Casting %r to ' - 'string: %s', type(original), original, value) + _logger.debug( + "Attribute value is of type: %r. Casting %r to " "string: %s", type(original), original, value + ) return value diff --git a/tests/agent_features/test_asgi_browser.py b/tests/agent_features/test_asgi_browser.py index c2c7ce715..a1c3daeb7 100644 --- a/tests/agent_features/test_asgi_browser.py +++ b/tests/agent_features/test_asgi_browser.py @@ -12,48 +12,55 @@ # See the License for the specific language governing permissions and # limitations under the License. -import sys import json import pytest import six - -from testing_support.fixtures import (override_application_settings, - validate_transaction_errors, validate_custom_parameters) +from bs4 import BeautifulSoup from testing_support.asgi_testing import AsgiTest +from testing_support.fixtures import ( + override_application_settings, + validate_custom_parameters, + validate_transaction_errors, +) from newrelic.api.application import application_settings -from newrelic.api.transaction import (get_browser_timing_header, - get_browser_timing_footer, add_custom_parameter, - disable_browser_autorum) from newrelic.api.asgi_application import asgi_application +from newrelic.api.transaction import ( + add_custom_attribute, + disable_browser_autorum, + get_browser_timing_footer, + get_browser_timing_header, +) from newrelic.common.encoding_utils import deobfuscate -from bs4 import BeautifulSoup +_runtime_error_name = RuntimeError.__module__ + ":" + RuntimeError.__name__ -_runtime_error_name = (RuntimeError.__module__ + ':' + RuntimeError.__name__) @asgi_application() async def target_asgi_application_manual_rum(scope, receive, send): - text = '%s

RESPONSE

%s' + text = "%s

RESPONSE

%s" - output = (text % (get_browser_timing_header(), - get_browser_timing_footer())).encode('UTF-8') + output = (text % (get_browser_timing_header(), get_browser_timing_footer())).encode("UTF-8") - response_headers = [(b'content-type', b'text/html; charset=utf-8'), - (b'content-length', str(len(output)).encode('utf-8'))] + response_headers = [ + (b"content-type", b"text/html; charset=utf-8"), + (b"content-length", str(len(output)).encode("utf-8")), + ] await send({"type": "http.response.start", "status": 200, "headers": response_headers}) await send({"type": "http.response.body", "body": output}) + target_application_manual_rum = AsgiTest(target_asgi_application_manual_rum) _test_footer_attributes = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': False, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": False, + "js_agent_loader": "", } + @override_application_settings(_test_footer_attributes) def test_footer_attributes(): settings = application_settings() @@ -67,589 +74,632 @@ def test_footer_attributes(): assert settings.beacon assert settings.error_beacon - token = '0123456789ABCDEF' - headers = { 'Cookie': 'NRAGENT=tk=%s' % token } + token = "0123456789ABCDEF" # nosec + headers = {"Cookie": "NRAGENT=tk=%s" % token} - response = target_application_manual_rum.get('/', headers=headers) + response = target_application_manual_rum.get("/", headers=headers) - html = BeautifulSoup(response.body, 'html.parser') + html = BeautifulSoup(response.body, "html.parser") header = html.html.head.script.string content = html.html.body.p.string footer = html.html.body.script.string # Validate actual body content. - assert content == 'RESPONSE' + assert content == "RESPONSE" # Validate the insertion of RUM header. - assert header.find('NREUM HEADER') != -1 + assert header.find("NREUM HEADER") != -1 # Now validate the various fields of the footer. The fields are # held by a JSON dictionary. - data = json.loads(footer.split('NREUM.info=')[1]) + data = json.loads(footer.split("NREUM.info=")[1]) - assert data['licenseKey'] == settings.browser_key - assert data['applicationID'] == settings.application_id + assert data["licenseKey"] == settings.browser_key + assert data["applicationID"] == settings.application_id - assert data['agent'] == settings.js_agent_file - assert data['beacon'] == settings.beacon - assert data['errorBeacon'] == settings.error_beacon + assert data["agent"] == settings.js_agent_file + assert data["beacon"] == settings.beacon + assert data["errorBeacon"] == settings.error_beacon - assert data['applicationTime'] >= 0 - assert data['queueTime'] >= 0 + assert data["applicationTime"] >= 0 + assert data["queueTime"] >= 0 obfuscation_key = settings.license_key[:13] - assert type(data['transactionName']) == type(u'') + type_transaction_data = unicode if six.PY2 else str # noqa: F821 + assert isinstance(data["transactionName"], type_transaction_data) - txn_name = deobfuscate(data['transactionName'], obfuscation_key) + txn_name = deobfuscate(data["transactionName"], obfuscation_key) - assert txn_name == u'WebTransaction/Uri/' + assert txn_name == "WebTransaction/Uri/" + + assert "atts" not in data - assert 'atts' not in data _test_rum_ssl_for_http_is_none = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': False, - 'browser_monitoring.ssl_for_http': None, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": False, + "browser_monitoring.ssl_for_http": None, + "js_agent_loader": "", } + @override_application_settings(_test_rum_ssl_for_http_is_none) def test_ssl_for_http_is_none(): settings = application_settings() assert settings.browser_monitoring.ssl_for_http is None - response = target_application_manual_rum.get('/') - html = BeautifulSoup(response.body, 'html.parser') + response = target_application_manual_rum.get("/") + html = BeautifulSoup(response.body, "html.parser") footer = html.html.body.script.string - data = json.loads(footer.split('NREUM.info=')[1]) + data = json.loads(footer.split("NREUM.info=")[1]) + + assert "sslForHttp" not in data - assert 'sslForHttp' not in data _test_rum_ssl_for_http_is_true = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': False, - 'browser_monitoring.ssl_for_http': True, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": False, + "browser_monitoring.ssl_for_http": True, + "js_agent_loader": "", } + @override_application_settings(_test_rum_ssl_for_http_is_true) def test_ssl_for_http_is_true(): settings = application_settings() assert settings.browser_monitoring.ssl_for_http is True - response = target_application_manual_rum.get('/') - html = BeautifulSoup(response.body, 'html.parser') + response = target_application_manual_rum.get("/") + html = BeautifulSoup(response.body, "html.parser") footer = html.html.body.script.string - data = json.loads(footer.split('NREUM.info=')[1]) + data = json.loads(footer.split("NREUM.info=")[1]) + + assert data["sslForHttp"] is True - assert data['sslForHttp'] is True _test_rum_ssl_for_http_is_false = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': False, - 'browser_monitoring.ssl_for_http': False, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": False, + "browser_monitoring.ssl_for_http": False, + "js_agent_loader": "", } + @override_application_settings(_test_rum_ssl_for_http_is_false) def test_ssl_for_http_is_false(): settings = application_settings() assert settings.browser_monitoring.ssl_for_http is False - response = target_application_manual_rum.get('/') - html = BeautifulSoup(response.body, 'html.parser') + response = target_application_manual_rum.get("/") + html = BeautifulSoup(response.body, "html.parser") footer = html.html.body.script.string - data = json.loads(footer.split('NREUM.info=')[1]) + data = json.loads(footer.split("NREUM.info=")[1]) + + assert data["sslForHttp"] is False - assert data['sslForHttp'] is False @asgi_application() async def target_asgi_application_yield_single_no_head(scope, receive, send): - output = b'

RESPONSE

' + output = b"

RESPONSE

" - response_headers = [(b'content-type', b'text/html; charset=utf-8'), - (b'content-length', str(len(output)).encode('utf-8'))] + response_headers = [ + (b"content-type", b"text/html; charset=utf-8"), + (b"content-length", str(len(output)).encode("utf-8")), + ] await send({"type": "http.response.start", "status": 200, "headers": response_headers}) await send({"type": "http.response.body", "body": output}) -target_application_yield_single_no_head = AsgiTest( - target_asgi_application_yield_single_no_head) + +target_application_yield_single_no_head = AsgiTest(target_asgi_application_yield_single_no_head) _test_html_insertion_yield_single_no_head_settings = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': True, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": True, + "js_agent_loader": "", } + @override_application_settings(_test_html_insertion_yield_single_no_head_settings) def test_html_insertion_yield_single_no_head(): - response = target_application_yield_single_no_head.get('/') + response = target_application_yield_single_no_head.get("/") assert response.status == 200 - assert 'content-type' in response.headers - assert 'content-length' in response.headers + assert "content-type" in response.headers + assert "content-length" in response.headers # The 'NREUM HEADER' value comes from our override for the header. # The 'NREUM.info' value comes from the programmatically generated # footer added by the agent. - assert b'NREUM HEADER' in response.body - assert b'NREUM.info' in response.body + assert b"NREUM HEADER" in response.body + assert b"NREUM.info" in response.body + @asgi_application() async def target_asgi_application_yield_multi_no_head(scope, receive, send): - output = [ b'', b'

RESPONSE

' ] + output = [b"", b"

RESPONSE

"] - response_headers = [(b'content-type', b'text/html; charset=utf-8'), - (b'content-length', str(len(b''.join(output))).encode('utf-8'))] + response_headers = [ + (b"content-type", b"text/html; charset=utf-8"), + (b"content-length", str(len(b"".join(output))).encode("utf-8")), + ] await send({"type": "http.response.start", "status": 200, "headers": response_headers}) for data in output: more_body = data is not output[-1] await send({"type": "http.response.body", "body": data, "more_body": more_body}) -target_application_yield_multi_no_head = AsgiTest( - target_asgi_application_yield_multi_no_head) + +target_application_yield_multi_no_head = AsgiTest(target_asgi_application_yield_multi_no_head) _test_html_insertion_yield_multi_no_head_settings = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': True, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": True, + "js_agent_loader": "", } + @override_application_settings(_test_html_insertion_yield_multi_no_head_settings) def test_html_insertion_yield_multi_no_head(): - response = target_application_yield_multi_no_head.get('/') + response = target_application_yield_multi_no_head.get("/") assert response.status == 200 - assert 'content-type' in response.headers - assert 'content-length' in response.headers + assert "content-type" in response.headers + assert "content-length" in response.headers # The 'NREUM HEADER' value comes from our override for the header. # The 'NREUM.info' value comes from the programmatically generated # footer added by the agent. - assert b'NREUM HEADER' in response.body - assert b'NREUM.info' in response.body + assert b"NREUM HEADER" in response.body + assert b"NREUM.info" in response.body + @asgi_application() async def target_asgi_application_unnamed_attachment_header(scope, receive, send): - output = b'

RESPONSE

' + output = b"

RESPONSE

" - response_headers = [(b'content-type', b'text/html; charset=utf-8'), - (b'content-length', str(len(output)).encode('utf-8')), - (b'content-disposition', b'attachment')] + response_headers = [ + (b"content-type", b"text/html; charset=utf-8"), + (b"content-length", str(len(output)).encode("utf-8")), + (b"content-disposition", b"attachment"), + ] await send({"type": "http.response.start", "status": 200, "headers": response_headers}) await send({"type": "http.response.body", "body": output}) -target_application_unnamed_attachment_header = AsgiTest( - target_asgi_application_unnamed_attachment_header) + +target_application_unnamed_attachment_header = AsgiTest(target_asgi_application_unnamed_attachment_header) _test_html_insertion_unnamed_attachment_header_settings = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': True, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": True, + "js_agent_loader": "", } -@override_application_settings( - _test_html_insertion_unnamed_attachment_header_settings) + +@override_application_settings(_test_html_insertion_unnamed_attachment_header_settings) def test_html_insertion_unnamed_attachment_header(): - response = target_application_unnamed_attachment_header.get('/') + response = target_application_unnamed_attachment_header.get("/") assert response.status == 200 - assert 'content-type' in response.headers - assert 'content-length' in response.headers - assert 'content-disposition' in response.headers + assert "content-type" in response.headers + assert "content-length" in response.headers + assert "content-disposition" in response.headers # The 'NREUM HEADER' value comes from our override for the header. # The 'NREUM.info' value comes from the programmatically generated # footer added by the agent. - assert b'NREUM HEADER' not in response.body - assert b'NREUM.info' not in response.body + assert b"NREUM HEADER" not in response.body + assert b"NREUM.info" not in response.body + @asgi_application() async def target_asgi_application_named_attachment_header(scope, receive, send): - output = b'

RESPONSE

' + output = b"

RESPONSE

" - response_headers = [(b'content-type', b'text/html; charset=utf-8'), - (b'content-length', str(len(output)).encode('utf-8')), - (b'content-disposition', b'Attachment; filename="X"')] + response_headers = [ + (b"content-type", b"text/html; charset=utf-8"), + (b"content-length", str(len(output)).encode("utf-8")), + (b"content-disposition", b'Attachment; filename="X"'), + ] await send({"type": "http.response.start", "status": 200, "headers": response_headers}) await send({"type": "http.response.body", "body": output}) -target_application_named_attachment_header = AsgiTest( - target_asgi_application_named_attachment_header) + +target_application_named_attachment_header = AsgiTest(target_asgi_application_named_attachment_header) _test_html_insertion_named_attachment_header_settings = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': True, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": True, + "js_agent_loader": "", } -@override_application_settings( - _test_html_insertion_named_attachment_header_settings) + +@override_application_settings(_test_html_insertion_named_attachment_header_settings) def test_html_insertion_named_attachment_header(): - response = target_application_named_attachment_header.get('/') + response = target_application_named_attachment_header.get("/") assert response.status == 200 - assert 'content-type' in response.headers - assert 'content-length' in response.headers - assert 'content-disposition' in response.headers + assert "content-type" in response.headers + assert "content-length" in response.headers + assert "content-disposition" in response.headers # The 'NREUM HEADER' value comes from our override for the header. # The 'NREUM.info' value comes from the programmatically generated # footer added by the agent. - assert b'NREUM HEADER' not in response.body - assert b'NREUM.info' not in response.body + assert b"NREUM HEADER" not in response.body + assert b"NREUM.info" not in response.body + @asgi_application() async def target_asgi_application_inline_attachment_header(scope, receive, send): - output = b'

RESPONSE

' + output = b"

RESPONSE

" - response_headers = [(b'content-type', b'text/html; charset=utf-8'), - (b'content-length', str(len(output)).encode('utf-8')), - (b'content-disposition', b'inline; filename="attachment"')] + response_headers = [ + (b"content-type", b"text/html; charset=utf-8"), + (b"content-length", str(len(output)).encode("utf-8")), + (b"content-disposition", b'inline; filename="attachment"'), + ] await send({"type": "http.response.start", "status": 200, "headers": response_headers}) await send({"type": "http.response.body", "body": output}) -target_application_inline_attachment_header = AsgiTest( - target_asgi_application_inline_attachment_header) + +target_application_inline_attachment_header = AsgiTest(target_asgi_application_inline_attachment_header) _test_html_insertion_inline_attachment_header_settings = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': True, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": True, + "js_agent_loader": "", } -@override_application_settings( - _test_html_insertion_inline_attachment_header_settings) + +@override_application_settings(_test_html_insertion_inline_attachment_header_settings) def test_html_insertion_inline_attachment_header(): - response = target_application_inline_attachment_header.get('/') + response = target_application_inline_attachment_header.get("/") assert response.status == 200 - assert 'content-type' in response.headers - assert 'content-length' in response.headers - assert 'content-disposition' in response.headers + assert "content-type" in response.headers + assert "content-length" in response.headers + assert "content-disposition" in response.headers # The 'NREUM HEADER' value comes from our override for the header. # The 'NREUM.info' value comes from the programmatically generated # footer added by the agent. - assert b'NREUM HEADER' in response.body - assert b'NREUM.info' in response.body + assert b"NREUM HEADER" in response.body + assert b"NREUM.info" in response.body + @asgi_application() async def target_asgi_application_empty(scope, receive, send): - status = '200 OK' + status = "200 OK" - response_headers = [(b'content-type', b'text/html; charset=utf-8'), - (b'content-length', b'0')] + response_headers = [(b"content-type", b"text/html; charset=utf-8"), (b"content-length", b"0")] await send({"type": "http.response.start", "status": 200, "headers": response_headers}) await send({"type": "http.response.body"}) -target_application_empty = AsgiTest( - target_asgi_application_empty) + +target_application_empty = AsgiTest(target_asgi_application_empty) _test_html_insertion_empty_settings = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': True, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": True, + "js_agent_loader": "", } -@override_application_settings( - _test_html_insertion_empty_settings) + +@override_application_settings(_test_html_insertion_empty_settings) def test_html_insertion_empty(): - response = target_application_empty.get('/') + response = target_application_empty.get("/") assert response.status == 200 - assert 'content-type' in response.headers - assert 'content-length' in response.headers + assert "content-type" in response.headers + assert "content-length" in response.headers # The 'NREUM HEADER' value comes from our override for the header. # The 'NREUM.info' value comes from the programmatically generated # footer added by the agent. - assert b'NREUM HEADER' not in response.body - assert b'NREUM.info' not in response.body + assert b"NREUM HEADER" not in response.body + assert b"NREUM.info" not in response.body assert len(response.body) == 0 + @asgi_application() async def target_asgi_application_single_empty_string(scope, receive, send): - response_headers = [(b'content-type', b'text/html; charset=utf-8'), - (b'content-length', b'0')] + response_headers = [(b"content-type", b"text/html; charset=utf-8"), (b"content-length", b"0")] await send({"type": "http.response.start", "status": 200, "headers": response_headers}) await send({"type": "http.response.body", "body": b""}) -target_application_single_empty_string = AsgiTest( - target_asgi_application_single_empty_string) + +target_application_single_empty_string = AsgiTest(target_asgi_application_single_empty_string) _test_html_insertion_single_empty_string_settings = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': True, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": True, + "js_agent_loader": "", } -@override_application_settings( - _test_html_insertion_single_empty_string_settings) + +@override_application_settings(_test_html_insertion_single_empty_string_settings) def test_html_insertion_single_empty_string(): - response = target_application_single_empty_string.get('/') + response = target_application_single_empty_string.get("/") assert response.status == 200 - assert 'content-type' in response.headers - assert 'content-length' in response.headers + assert "content-type" in response.headers + assert "content-length" in response.headers # The 'NREUM HEADER' value comes from our override for the header. # The 'NREUM.info' value comes from the programmatically generated # footer added by the agent. - assert b'NREUM HEADER' not in response.body - assert b'NREUM.info' not in response.body + assert b"NREUM HEADER" not in response.body + assert b"NREUM.info" not in response.body assert len(response.body) == 0 + @asgi_application() async def target_asgi_application_multiple_empty_string(scope, receive, send): - response_headers = [(b'content-type', b'text/html; charset=utf-8'), - (b'content-length', b'0')] + response_headers = [(b"content-type", b"text/html; charset=utf-8"), (b"content-length", b"0")] await send({"type": "http.response.start", "status": 200, "headers": response_headers}) await send({"type": "http.response.body", "body": b"", "more_body": True}) await send({"type": "http.response.body", "body": b""}) -target_application_multiple_empty_string = AsgiTest( - target_asgi_application_multiple_empty_string) + +target_application_multiple_empty_string = AsgiTest(target_asgi_application_multiple_empty_string) _test_html_insertion_multiple_empty_string_settings = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': True, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": True, + "js_agent_loader": "", } -@override_application_settings( - _test_html_insertion_multiple_empty_string_settings) + +@override_application_settings(_test_html_insertion_multiple_empty_string_settings) def test_html_insertion_multiple_empty_string(): - response = target_application_multiple_empty_string.get('/') + response = target_application_multiple_empty_string.get("/") assert response.status == 200 - assert 'content-type' in response.headers - assert 'content-length' in response.headers + assert "content-type" in response.headers + assert "content-length" in response.headers # The 'NREUM HEADER' value comes from our override for the header. # The 'NREUM.info' value comes from the programmatically generated # footer added by the agent. - assert b'NREUM HEADER' not in response.body - assert b'NREUM.info' not in response.body + assert b"NREUM HEADER" not in response.body + assert b"NREUM.info" not in response.body assert len(response.body) == 0 + @asgi_application() async def target_asgi_application_single_large_prelude(scope, receive, send): - output = 64*1024*b' ' + b'' + output = 64 * 1024 * b" " + b"" - response_headers = [(b'content-type', b'text/html; charset=utf-8'), - (b'content-length', str(len(output)).encode("utf-8"))] + response_headers = [ + (b"content-type", b"text/html; charset=utf-8"), + (b"content-length", str(len(output)).encode("utf-8")), + ] await send({"type": "http.response.start", "status": 200, "headers": response_headers}) await send({"type": "http.response.body", "body": output}) -target_application_single_large_prelude = AsgiTest( - target_asgi_application_single_large_prelude) + +target_application_single_large_prelude = AsgiTest(target_asgi_application_single_large_prelude) _test_html_insertion_single_large_prelude_settings = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': True, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": True, + "js_agent_loader": "", } -@override_application_settings( - _test_html_insertion_single_large_prelude_settings) + +@override_application_settings(_test_html_insertion_single_large_prelude_settings) def test_html_insertion_single_large_prelude(): - response = target_application_single_large_prelude.get('/') + response = target_application_single_large_prelude.get("/") assert response.status == 200 # The 'NREUM HEADER' value comes from our override for the header. # The 'NREUM.info' value comes from the programmatically generated # footer added by the agent. - assert 'content-type' in response.headers - assert 'content-length' in response.headers + assert "content-type" in response.headers + assert "content-length" in response.headers + + assert b"NREUM HEADER" not in response.body + assert b"NREUM.info" not in response.body - assert b'NREUM HEADER' not in response.body - assert b'NREUM.info' not in response.body + output = [32 * 1024 * b" ", 32 * 1024 * b" ", b""] - output = [32*1024*b' ', 32*1024*b' ', b''] + assert len(response.body) == len(b"".join(output)) - assert len(response.body) == len(b''.join(output)) @asgi_application() async def target_asgi_application_multi_large_prelude(scope, receive, send): - output = [32*1024*b' ', 32*1024*b' ', b''] + output = [32 * 1024 * b" ", 32 * 1024 * b" ", b""] - response_headers = [(b'content-type', b'text/html; charset=utf-8'), - (b'content-length', str(len(b''.join(output))).encode("utf-8"))] + response_headers = [ + (b"content-type", b"text/html; charset=utf-8"), + (b"content-length", str(len(b"".join(output))).encode("utf-8")), + ] await send({"type": "http.response.start", "status": 200, "headers": response_headers}) for data in output: more_body = data is not output[-1] await send({"type": "http.response.body", "body": data, "more_body": more_body}) -target_application_multi_large_prelude = AsgiTest( - target_asgi_application_multi_large_prelude) + +target_application_multi_large_prelude = AsgiTest(target_asgi_application_multi_large_prelude) _test_html_insertion_multi_large_prelude_settings = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': True, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": True, + "js_agent_loader": "", } -@override_application_settings( - _test_html_insertion_multi_large_prelude_settings) + +@override_application_settings(_test_html_insertion_multi_large_prelude_settings) def test_html_insertion_multi_large_prelude(): - response = target_application_multi_large_prelude.get('/') + response = target_application_multi_large_prelude.get("/") assert response.status == 200 # The 'NREUM HEADER' value comes from our override for the header. # The 'NREUM.info' value comes from the programmatically generated # footer added by the agent. - assert 'content-type' in response.headers - assert 'content-length' in response.headers + assert "content-type" in response.headers + assert "content-length" in response.headers + + assert b"NREUM HEADER" not in response.body + assert b"NREUM.info" not in response.body - assert b'NREUM HEADER' not in response.body - assert b'NREUM.info' not in response.body + output = [32 * 1024 * b" ", 32 * 1024 * b" ", b""] - output = [32*1024*b' ', 32*1024*b' ', b''] + assert len(response.body) == len(b"".join(output)) - assert len(response.body) == len(b''.join(output)) @asgi_application() async def target_asgi_application_yield_before_start(scope, receive, send): # This is not legal but we should see what happens with our middleware await send({"type": "http.response.body", "body": b"", "more_body": True}) - output = b'

RESPONSE

' + output = b"

RESPONSE

" - response_headers = [(b'content-type', b'text/html; charset=utf-8'), - (b'content-length', str(len(output)).encode("utf-8"))] + response_headers = [ + (b"content-type", b"text/html; charset=utf-8"), + (b"content-length", str(len(output)).encode("utf-8")), + ] await send({"type": "http.response.start", "status": 200, "headers": response_headers}) await send({"type": "http.response.body", "body": output}) -target_application_yield_before_start = AsgiTest( - target_asgi_application_yield_before_start) + +target_application_yield_before_start = AsgiTest(target_asgi_application_yield_before_start) _test_html_insertion_yield_before_start_settings = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': True, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": True, + "js_agent_loader": "", } + @override_application_settings(_test_html_insertion_yield_before_start_settings) def test_html_insertion_yield_before_start(): # The application should complete as pass through, but an assertion error # would be raised in the AsgiTest class with pytest.raises(AssertionError): - target_application_yield_before_start.get('/') + target_application_yield_before_start.get("/") + @asgi_application() async def target_asgi_application_start_yield_start(scope, receive, send): - output = b'

RESPONSE

' + output = b"

RESPONSE

" - response_headers = [(b'content-type', b'text/html; charset=utf-8'), - (b'content-length', str(len(output)).encode("utf-8"))] + response_headers = [ + (b"content-type", b"text/html; charset=utf-8"), + (b"content-length", str(len(output)).encode("utf-8")), + ] await send({"type": "http.response.start", "status": 200, "headers": response_headers}) await send({"type": "http.response.body", "body": b""}) await send({"type": "http.response.start", "status": 200, "headers": response_headers}) -target_application_start_yield_start = AsgiTest( - target_asgi_application_start_yield_start) + +target_application_start_yield_start = AsgiTest(target_asgi_application_start_yield_start) _test_html_insertion_start_yield_start_settings = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': True, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": True, + "js_agent_loader": "", } + @override_application_settings(_test_html_insertion_start_yield_start_settings) def test_html_insertion_start_yield_start(): # The application should complete as pass through, but an assertion error # would be raised in the AsgiTest class with pytest.raises(AssertionError): - target_application_start_yield_start.get('/') + target_application_start_yield_start.get("/") + @asgi_application() async def target_asgi_application_invalid_content_length(scope, receive, send): - output = b'

RESPONSE

' + output = b"

RESPONSE

" - response_headers = [(b'content-type', b'text/html; charset=utf-8'), - (b'content-length', b'XXX')] + response_headers = [(b"content-type", b"text/html; charset=utf-8"), (b"content-length", b"XXX")] await send({"type": "http.response.start", "status": 200, "headers": response_headers}) await send({"type": "http.response.body", "body": output}) -target_application_invalid_content_length = AsgiTest( - target_asgi_application_invalid_content_length) + +target_application_invalid_content_length = AsgiTest(target_asgi_application_invalid_content_length) _test_html_insertion_invalid_content_length_settings = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': True, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": True, + "js_agent_loader": "", } + @override_application_settings(_test_html_insertion_invalid_content_length_settings) def test_html_insertion_invalid_content_length(): - response = target_application_invalid_content_length.get('/') + response = target_application_invalid_content_length.get("/") assert response.status == 200 - assert 'content-type' in response.headers - assert 'content-length' in response.headers + assert "content-type" in response.headers + assert "content-length" in response.headers - assert response.headers['content-length'] == 'XXX' + assert response.headers["content-length"] == "XXX" + + assert b"NREUM HEADER" not in response.body + assert b"NREUM.info" not in response.body - assert b'NREUM HEADER' not in response.body - assert b'NREUM.info' not in response.body @asgi_application() async def target_asgi_application_content_encoding(scope, receive, send): - output = b'

RESPONSE

' + output = b"

RESPONSE

" - response_headers = [(b'content-type', b'text/html; charset=utf-8'), - (b'content-length', str(len(output)).encode("utf-8")), - (b'content-encoding', b'identity')] + response_headers = [ + (b"content-type", b"text/html; charset=utf-8"), + (b"content-length", str(len(output)).encode("utf-8")), + (b"content-encoding", b"identity"), + ] await send({"type": "http.response.start", "status": 200, "headers": response_headers}) await send({"type": "http.response.body", "body": output}) -target_application_content_encoding = AsgiTest( - target_asgi_application_content_encoding) + +target_application_content_encoding = AsgiTest(target_asgi_application_content_encoding) _test_html_insertion_content_encoding_settings = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': True, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": True, + "js_agent_loader": "", } + @override_application_settings(_test_html_insertion_content_encoding_settings) def test_html_insertion_content_encoding(): - response = target_application_content_encoding.get('/') + response = target_application_content_encoding.get("/") assert response.status == 200 # Technically 'identity' should not be used in Content-Encoding @@ -657,181 +707,190 @@ def test_html_insertion_content_encoding(): # RUM for this test. Other option is to compress the response # and use 'gzip'. - assert 'content-type' in response.headers - assert 'content-length' in response.headers + assert "content-type" in response.headers + assert "content-length" in response.headers + + assert response.headers["content-encoding"] == "identity" - assert response.headers['content-encoding'] == 'identity' + assert b"NREUM HEADER" not in response.body + assert b"NREUM.info" not in response.body - assert b'NREUM HEADER' not in response.body - assert b'NREUM.info' not in response.body @asgi_application() async def target_asgi_application_no_content_type(scope, receive, send): - output = b'

RESPONSE

' + output = b"

RESPONSE

" - response_headers = [(b'content-length', str(len(output)).encode("utf-8"))] + response_headers = [(b"content-length", str(len(output)).encode("utf-8"))] await send({"type": "http.response.start", "status": 200, "headers": response_headers}) await send({"type": "http.response.body", "body": output}) -target_application_no_content_type = AsgiTest( - target_asgi_application_no_content_type) + +target_application_no_content_type = AsgiTest(target_asgi_application_no_content_type) _test_html_insertion_no_content_type_settings = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': True, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": True, + "js_agent_loader": "", } + @override_application_settings(_test_html_insertion_no_content_type_settings) def test_html_insertion_no_content_type(): - response = target_application_no_content_type.get('/') + response = target_application_no_content_type.get("/") assert response.status == 200 - assert 'content-type' not in response.headers - assert 'content-length' in response.headers + assert "content-type" not in response.headers + assert "content-length" in response.headers + + assert b"NREUM HEADER" not in response.body + assert b"NREUM.info" not in response.body - assert b'NREUM HEADER' not in response.body - assert b'NREUM.info' not in response.body @asgi_application() async def target_asgi_application_plain_text(scope, receive, send): - output = b'RESPONSE' + output = b"RESPONSE" - response_headers = [ - (b'content-type', b'text/plain'), - (b'content-length', str(len(output)).encode("utf-8"))] + response_headers = [(b"content-type", b"text/plain"), (b"content-length", str(len(output)).encode("utf-8"))] await send({"type": "http.response.start", "status": 200, "headers": response_headers}) await send({"type": "http.response.body", "body": output}) -target_application_plain_text = AsgiTest( - target_asgi_application_plain_text) + +target_application_plain_text = AsgiTest(target_asgi_application_plain_text) _test_html_insertion_plain_text_settings = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': True, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": True, + "js_agent_loader": "", } + @override_application_settings(_test_html_insertion_plain_text_settings) def test_html_insertion_plain_text(): - response = target_application_plain_text.get('/') + response = target_application_plain_text.get("/") assert response.status == 200 - assert 'content-type' in response.headers - assert 'content-length' in response.headers + assert "content-type" in response.headers + assert "content-length" in response.headers - assert b'NREUM HEADER' not in response.body - assert b'NREUM.info' not in response.body + assert b"NREUM HEADER" not in response.body + assert b"NREUM.info" not in response.body @asgi_application() async def target_asgi_application_param(scope, receive, send): - output = b'

RESPONSE

' + output = b"

RESPONSE

" response_headers = [ - (b'content-type', b'text/html; charset=utf-8'), - (b'content-length', str(len(output)).encode("utf-8"))] + (b"content-type", b"text/html; charset=utf-8"), + (b"content-length", str(len(output)).encode("utf-8")), + ] - add_custom_parameter('key', 'value') + add_custom_attribute("key", "value") await send({"type": "http.response.start", "status": 200, "headers": response_headers}) await send({"type": "http.response.body", "body": output}) -target_application_param = AsgiTest( - target_asgi_application_param) +target_application_param = AsgiTest(target_asgi_application_param) _test_html_insertion_param_settings = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': True, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": True, + "js_agent_loader": "", } @override_application_settings(_test_html_insertion_param_settings) -@validate_custom_parameters(required_params=[('key', 'value')]) +@validate_custom_parameters(required_params=[("key", "value")]) def test_html_insertion_param(): - response = target_application_param.get('/') + response = target_application_param.get("/") assert response.status == 200 - assert b'NREUM HEADER' in response.body - assert b'NREUM.info' in response.body + assert b"NREUM HEADER" in response.body + assert b"NREUM.info" in response.body + @asgi_application() async def target_asgi_application_param_on_error(scope, receive, send): - output = b'

RESPONSE

' + output = b"

RESPONSE

" response_headers = [ - (b'content-type', b'text/html; charset=utf-8'), - (b'content-length', str(len(output)).encode("utf-8"))] + (b"content-type", b"text/html; charset=utf-8"), + (b"content-length", str(len(output)).encode("utf-8")), + ] await send({"type": "http.response.start", "status": 200, "headers": response_headers}) try: - raise RuntimeError('ERROR') + raise RuntimeError("ERROR") finally: - add_custom_parameter('key', 'value') + add_custom_attribute("key", "value") -target_application_param_on_error = AsgiTest( - target_asgi_application_param_on_error) + +target_application_param_on_error = AsgiTest(target_asgi_application_param_on_error) _test_html_insertion_param_on_error_settings = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': True, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": True, + "js_agent_loader": "", } + @override_application_settings(_test_html_insertion_param_on_error_settings) @validate_transaction_errors(errors=[_runtime_error_name]) -@validate_custom_parameters(required_params=[('key', 'value')]) +@validate_custom_parameters(required_params=[("key", "value")]) def test_html_insertion_param_on_error(): try: - target_application_param_on_error.get('/') + target_application_param_on_error.get("/") except RuntimeError: pass + @asgi_application() async def target_asgi_application_disable_autorum_via_api(scope, receive, send): - output = b'

RESPONSE

' + output = b"

RESPONSE

" disable_browser_autorum() - response_headers = [(b'content-type', b'text/html; charset=utf-8'), - (b'content-length', str(len(output)).encode("utf-8"))] + response_headers = [ + (b"content-type", b"text/html; charset=utf-8"), + (b"content-length", str(len(output)).encode("utf-8")), + ] await send({"type": "http.response.start", "status": 200, "headers": response_headers}) await send({"type": "http.response.body", "body": output}) -target_application_disable_autorum_via_api = AsgiTest( - target_asgi_application_disable_autorum_via_api) + +target_application_disable_autorum_via_api = AsgiTest(target_asgi_application_disable_autorum_via_api) _test_html_insertion_disable_autorum_via_api_settings = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': True, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": True, + "js_agent_loader": "", } -@override_application_settings( - _test_html_insertion_disable_autorum_via_api_settings) + +@override_application_settings(_test_html_insertion_disable_autorum_via_api_settings) def test_html_insertion_disable_autorum_via_api(): - response = target_application_disable_autorum_via_api.get('/') + response = target_application_disable_autorum_via_api.get("/") assert response.status == 200 - assert 'content-type' in response.headers - assert 'content-length' in response.headers + assert "content-type" in response.headers + assert "content-length" in response.headers # The 'NREUM HEADER' value comes from our override for the header. # The 'NREUM.info' value comes from the programmatically generated # footer added by the agent. - assert b'NREUM HEADER' not in response.body - assert b'NREUM.info' not in response.body + assert b"NREUM HEADER" not in response.body + assert b"NREUM.info" not in response.body + @asgi_application() async def target_asgi_application_manual_rum_insertion(scope, receive, send): - output = b'

RESPONSE

' + output = b"

RESPONSE

" header = get_browser_timing_header() footer = get_browser_timing_footer() @@ -839,36 +898,38 @@ async def target_asgi_application_manual_rum_insertion(scope, receive, send): header = get_browser_timing_header() footer = get_browser_timing_footer() - assert header == '' - assert footer == '' + assert header == "" + assert footer == "" - response_headers = [(b'content-type', b'text/html; charset=utf-8'), - (b'content-length', str(len(output)).encode("utf-8"))] + response_headers = [ + (b"content-type", b"text/html; charset=utf-8"), + (b"content-length", str(len(output)).encode("utf-8")), + ] await send({"type": "http.response.start", "status": 200, "headers": response_headers}) await send({"type": "http.response.body", "body": output}) -target_application_manual_rum_insertion = AsgiTest( - target_asgi_application_manual_rum_insertion) + +target_application_manual_rum_insertion = AsgiTest(target_asgi_application_manual_rum_insertion) _test_html_insertion_manual_rum_insertion_settings = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': True, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": True, + "js_agent_loader": "", } -@override_application_settings( - _test_html_insertion_manual_rum_insertion_settings) + +@override_application_settings(_test_html_insertion_manual_rum_insertion_settings) def test_html_insertion_manual_rum_insertion(): - response = target_application_manual_rum_insertion.get('/') + response = target_application_manual_rum_insertion.get("/") assert response.status == 200 - assert 'content-type' in response.headers - assert 'content-length' in response.headers + assert "content-type" in response.headers + assert "content-length" in response.headers # The 'NREUM HEADER' value comes from our override for the header. # The 'NREUM.info' value comes from the programmatically generated # footer added by the agent. - assert b'NREUM HEADER' not in response.body - assert b'NREUM.info' not in response.body + assert b"NREUM HEADER" not in response.body + assert b"NREUM.info" not in response.body diff --git a/tests/agent_features/test_attribute.py b/tests/agent_features/test_attribute.py index edfffae2f..ab6f778dd 100644 --- a/tests/agent_features/test_attribute.py +++ b/tests/agent_features/test_attribute.py @@ -13,24 +13,31 @@ # limitations under the License. import sys + import pytest import webtest +from testing_support.fixtures import ( + override_application_settings, + validate_agent_attribute_types, + validate_attributes, + validate_attributes_complete, + validate_custom_parameters, +) +from testing_support.sample_applications import fully_featured_app from newrelic.api.background_task import background_task -from newrelic.api.transaction import (add_custom_parameter, - add_custom_parameters) +from newrelic.api.transaction import add_custom_attribute, add_custom_attributes from newrelic.api.wsgi_application import wsgi_application -from newrelic.core.attribute import (truncate, sanitize, Attribute, - CastingFailureException, MAX_64_BIT_INT, _DESTINATIONS_WITH_EVENTS) - +from newrelic.core.attribute import ( + _DESTINATIONS_WITH_EVENTS, + MAX_64_BIT_INT, + Attribute, + CastingFailureException, + sanitize, + truncate, +) from newrelic.packages import six -from testing_support.fixtures import (override_application_settings, - validate_attributes, validate_attributes_complete, - validate_custom_parameters, validate_agent_attribute_types) -from testing_support.sample_applications import fully_featured_app - - # Python 3 lacks longs if sys.version_info >= (3, 0): @@ -43,333 +50,328 @@ @wsgi_application() def target_wsgi_application(environ, start_response): - status = '200 OK' - output = b'Hello World!' + status = "200 OK" + output = b"Hello World!" - path = environ.get('PATH_INFO') - if path == '/user_attribute': - add_custom_parameter('test_key', 'test_value') + path = environ.get("PATH_INFO") + if path == "/user_attribute": + add_custom_attribute("test_key", "test_value") - response_headers = [('Content-Type', 'text/plain; charset=utf-8'), - ('Content-Length', str(len(output)))] + response_headers = [("Content-Type", "text/plain; charset=utf-8"), ("Content-Length", str(len(output)))] start_response(status, response_headers) return [output] -_required_intrinsics = ['trip_id', 'totalTime'] +_required_intrinsics = ["trip_id", "totalTime"] _forgone_intrinsics = [] -@validate_attributes('intrinsic', _required_intrinsics, _forgone_intrinsics) +@validate_attributes("intrinsic", _required_intrinsics, _forgone_intrinsics) def test_intrinsics(): target_application = webtest.TestApp(target_wsgi_application) - response = target_application.get('/') - assert response.body == b'Hello World!' - - -_required_agent = ['request.method', 'wsgi.output.seconds', 'response.status', - 'request.headers.host', 'request.headers.accept', 'request.uri', - 'response.headers.contentType', 'response.headers.contentLength'] + response = target_application.get("/") + assert response.body == b"Hello World!" + + +_required_agent = [ + "request.method", + "wsgi.output.seconds", + "response.status", + "request.headers.host", + "request.headers.accept", + "request.uri", + "response.headers.contentType", + "response.headers.contentLength", +] if ThreadUtilization: - _required_agent.append('thread.concurrency') + _required_agent.append("thread.concurrency") _forgone_agent = [] -@validate_attributes('agent', _required_agent, _forgone_agent) +@validate_attributes("agent", _required_agent, _forgone_agent) def test_agent(): target_application = webtest.TestApp(target_wsgi_application) - response = target_application.get('/', - extra_environ={'HTTP_ACCEPT': '*/*'}) - assert response.body == b'Hello World!' + response = target_application.get("/", extra_environ={"HTTP_ACCEPT": "*/*"}) + assert response.body == b"Hello World!" _required_user = [] -_forgone_user = ['test_key'] +_forgone_user = ["test_key"] -@validate_attributes('user', _required_user, _forgone_user) +@validate_attributes("user", _required_user, _forgone_user) def test_user_default(): target_application = webtest.TestApp(target_wsgi_application) - response = target_application.get('/') - assert response.body == b'Hello World!' + response = target_application.get("/") + assert response.body == b"Hello World!" -_required_user = ['test_key'] +_required_user = ["test_key"] _forgone_user = [] -@validate_attributes('user', _required_user, _forgone_user) +@validate_attributes("user", _required_user, _forgone_user) def test_user_add_attribute(): target_application = webtest.TestApp(target_wsgi_application) - response = target_application.get('/user_attribute') - assert response.body == b'Hello World!' + response = target_application.get("/user_attribute") + assert response.body == b"Hello World!" -_settings_legacy_false = {'capture_params': False} +_settings_legacy_false = {"capture_params": False} _required_request_legacy_false = [] -_forgone_request_legacy_false = ['request.parameters.foo'] +_forgone_request_legacy_false = ["request.parameters.foo"] @override_application_settings(_settings_legacy_false) -@validate_attributes('agent', _required_request_legacy_false, - _forgone_request_legacy_false) +@validate_attributes("agent", _required_request_legacy_false, _forgone_request_legacy_false) def test_capture_request_params_legacy_false(): target_application = webtest.TestApp(target_wsgi_application) - response = target_application.get('/?foo=bar') - assert response.body == b'Hello World!' + response = target_application.get("/?foo=bar") + assert response.body == b"Hello World!" -_settings_legacy_true = {'capture_params': True} -_required_request_legacy_true = ['request.parameters.foo'] +_settings_legacy_true = {"capture_params": True} +_required_request_legacy_true = ["request.parameters.foo"] _forgone_request_legacy_true = [] @override_application_settings(_settings_legacy_true) -@validate_attributes('agent', _required_request_legacy_true, - _forgone_request_legacy_true) +@validate_attributes("agent", _required_request_legacy_true, _forgone_request_legacy_true) def test_capture_request_params_legacy_true(): target_application = webtest.TestApp(target_wsgi_application) - response = target_application.get('/?foo=bar') - assert response.body == b'Hello World!' + response = target_application.get("/?foo=bar") + assert response.body == b"Hello World!" -_required_request_default = ['request.parameters.foo'] +_required_request_default = ["request.parameters.foo"] _forgone_request_default = [] -@validate_attributes('agent', _required_request_default, - _forgone_request_default) +@validate_attributes("agent", _required_request_default, _forgone_request_default) def test_capture_request_params_default(): target_application = webtest.TestApp(target_wsgi_application) - response = target_application.get('/?foo=bar') - assert response.body == b'Hello World!' + response = target_application.get("/?foo=bar") + assert response.body == b"Hello World!" _required_display_host_default = [] -_forgone_display_host_default = ['host.displayName'] +_forgone_display_host_default = ["host.displayName"] -@validate_attributes('agent', _required_display_host_default, - _forgone_display_host_default) +@validate_attributes("agent", _required_display_host_default, _forgone_display_host_default) def test_display_host_default(): target_application = webtest.TestApp(target_wsgi_application) - response = target_application.get('/') - assert response.body == b'Hello World!' + response = target_application.get("/") + assert response.body == b"Hello World!" -_settings_display_host_custom = {'process_host.display_name': 'CUSTOM NAME'} +_settings_display_host_custom = {"process_host.display_name": "CUSTOM NAME"} -_display_name_attribute = Attribute(name='host.displayName', - value='CUSTOM NAME', destinations=_DESTINATIONS_WITH_EVENTS) +_display_name_attribute = Attribute( + name="host.displayName", value="CUSTOM NAME", destinations=_DESTINATIONS_WITH_EVENTS +) _required_display_host_custom = [_display_name_attribute] _forgone_display_host_custom = [] @override_application_settings(_settings_display_host_custom) -@validate_attributes_complete('agent', _required_display_host_custom, - _forgone_display_host_custom) +@validate_attributes_complete("agent", _required_display_host_custom, _forgone_display_host_custom) def test_display_host_custom(): target_application = webtest.TestApp(target_wsgi_application) - response = target_application.get('/') - assert response.body == b'Hello World!' + response = target_application.get("/") + assert response.body == b"Hello World!" # Tests for truncate() + def test_truncate_string(): - s = 'blahblah' + s = "blahblah" result = truncate(s, maxsize=4) assert isinstance(result, six.string_types) - assert result == 'blah' + assert result == "blah" def test_truncate_bytes(): - b = b'foobar' + b = b"foobar" result = truncate(b, maxsize=3) assert isinstance(result, six.binary_type) - assert result == b'foo' + assert result == b"foo" def test_truncate_unicode_snowman(): # '\u2603' is 'SNOWMAN' - u = u'snow\u2603' - assert u.encode('utf-8') == b'snow\xe2\x98\x83' + # decode("unicode-escape") is used to get Py2 unicode + u = "snow\u2603".decode("unicode-escape") if six.PY2 else "snow\u2603" + assert u.encode("utf-8") == b"snow\xe2\x98\x83" result = truncate(u, maxsize=5) assert isinstance(result, six.text_type) - assert result == u'snow' + assert result == "snow" def test_truncate_combining_characters(): # '\u0308' is 'COMBINING DIAERESIS' (AKA 'umlaut') - u = u'Zoe\u0308' - assert u.encode('utf-8') == b'Zoe\xcc\x88' + # decode("unicode-escape") is used to get Py2 unicode + u = "Zoe\u0308".decode("unicode-escape") if six.PY2 else "Zoe\u0308" + assert u.encode("utf-8") == b"Zoe\xcc\x88" # truncate will chop off 'COMBINING DIAERESIS', which leaves # 'LATIN SMALL LETTER E' by itself. result = truncate(u, maxsize=3) assert isinstance(result, six.text_type) - assert result == u'Zoe' + assert result == "Zoe" def test_truncate_empty_string(): - s = '' + s = "" result = truncate(s, maxsize=4) assert isinstance(result, six.string_types) - assert result == '' + assert result == "" def test_truncate_empty_bytes(): - b = b'' + b = b"" result = truncate(b, maxsize=3) assert isinstance(result, six.binary_type) - assert result == b'' + assert result == b"" def test_truncate_empty_unicode(): - u = u'' + # decode("unicode-escape") is used to get Py2 unicode + u = "".decode("unicode-escape") if six.PY2 else "" result = truncate(u, maxsize=5) assert isinstance(result, six.text_type) - assert result == u'' + assert result == "" # Tests for limits on user attributes -TOO_LONG = '*' * 256 -TRUNCATED = '*' * 255 +TOO_LONG = "*" * 256 +TRUNCATED = "*" * 255 -_required_custom_params = [('key', 'value')] +_required_custom_params = [("key", "value")] _forgone_custom_params = [] @validate_custom_parameters(_required_custom_params, _forgone_custom_params) @background_task() def test_custom_param_ok(): - result = add_custom_parameter('key', 'value') + result = add_custom_attribute("key", "value") assert result @validate_custom_parameters(_required_custom_params, _forgone_custom_params) @background_task() def test_custom_params_ok(): - result = add_custom_parameters([('key', 'value')]) + result = add_custom_attributes([("key", "value")]) assert result _required_custom_params_long_key = [] -_forgone_custom_params_long_key = [(TOO_LONG, 'value')] +_forgone_custom_params_long_key = [(TOO_LONG, "value")] -@validate_custom_parameters(_required_custom_params_long_key, - _forgone_custom_params_long_key) +@validate_custom_parameters(_required_custom_params_long_key, _forgone_custom_params_long_key) @background_task() def test_custom_param_key_too_long(): - result = add_custom_parameter(TOO_LONG, 'value') + result = add_custom_attribute(TOO_LONG, "value") assert not result -@validate_custom_parameters(_required_custom_params_long_key, - _forgone_custom_params_long_key) +@validate_custom_parameters(_required_custom_params_long_key, _forgone_custom_params_long_key) @background_task() def test_custom_params_key_too_long(): - result = add_custom_parameters([(TOO_LONG, 'value')]) + result = add_custom_attributes([(TOO_LONG, "value")]) assert not result -_required_custom_params_long_value = [('key', TRUNCATED)] +_required_custom_params_long_value = [("key", TRUNCATED)] _forgone_custom_params_long_value = [] -@validate_custom_parameters(_required_custom_params_long_value, - _forgone_custom_params_long_value) +@validate_custom_parameters(_required_custom_params_long_value, _forgone_custom_params_long_value) @background_task() def test_custom_param_value_too_long(): - result = add_custom_parameter('key', TOO_LONG) + result = add_custom_attribute("key", TOO_LONG) assert result -@validate_custom_parameters(_required_custom_params_long_value, - _forgone_custom_params_long_value) +@validate_custom_parameters(_required_custom_params_long_value, _forgone_custom_params_long_value) @background_task() def test_custom_params_value_too_long(): - result = add_custom_parameters([('key', TOO_LONG)]) + result = add_custom_attributes([("key", TOO_LONG)]) assert result -_required_custom_params_too_many = [('key-127', 'value')] -_forgone_custom_params_too_many = [('key-128', 'value')] +_required_custom_params_too_many = [("key-127", "value")] +_forgone_custom_params_too_many = [("key-128", "value")] -@validate_custom_parameters(_required_custom_params_too_many, - _forgone_custom_params_too_many) +@validate_custom_parameters(_required_custom_params_too_many, _forgone_custom_params_too_many) @background_task() def test_custom_param_too_many(): for i in range(129): - result = add_custom_parameter('key-%02d' % i, 'value') + result = add_custom_attribute("key-%02d" % i, "value") if i < 128: assert result else: - assert not result # Last one fails + assert not result # Last one fails -@validate_custom_parameters(_required_custom_params_too_many, - _forgone_custom_params_too_many) +@validate_custom_parameters(_required_custom_params_too_many, _forgone_custom_params_too_many) @background_task() def test_custom_params_too_many(): - item_list = [('key-%02d' % i, 'value') for i in range(129)] - result = add_custom_parameters(item_list) + item_list = [("key-%02d" % i, "value") for i in range(129)] + result = add_custom_attributes(item_list) assert not result _required_custom_params_name_not_string = [] -_forgone_custom_params_name_not_string = [(1, 'value')] +_forgone_custom_params_name_not_string = [(1, "value")] -@validate_custom_parameters(_required_custom_params_name_not_string, - _forgone_custom_params_name_not_string) +@validate_custom_parameters(_required_custom_params_name_not_string, _forgone_custom_params_name_not_string) @background_task() def test_custom_param_name_not_string(): - result = add_custom_parameter(1, 'value') + result = add_custom_attribute(1, "value") assert not result -@validate_custom_parameters(_required_custom_params_name_not_string, - _forgone_custom_params_name_not_string) +@validate_custom_parameters(_required_custom_params_name_not_string, _forgone_custom_params_name_not_string) @background_task() def test_custom_params_name_not_string(): - result = add_custom_parameters([(1, 'value')]) + result = add_custom_attributes([(1, "value")]) assert not result TOO_BIG = MAX_64_BIT_INT + 1 _required_custom_params_int_too_big = [] -_forgone_custom_params_int_too_big = [('key', TOO_BIG)] +_forgone_custom_params_int_too_big = [("key", TOO_BIG)] -@validate_custom_parameters(_required_custom_params_int_too_big, - _forgone_custom_params_int_too_big) +@validate_custom_parameters(_required_custom_params_int_too_big, _forgone_custom_params_int_too_big) @background_task() def test_custom_param_int_too_big(): - result = add_custom_parameter('key', TOO_BIG) + result = add_custom_attribute("key", TOO_BIG) assert not result -@validate_custom_parameters(_required_custom_params_int_too_big, - _forgone_custom_params_int_too_big) +@validate_custom_parameters(_required_custom_params_int_too_big, _forgone_custom_params_int_too_big) @background_task() def test_custom_params_int_too_big(): - result = add_custom_parameters([('key', TOO_BIG)]) + result = add_custom_attributes([("key", TOO_BIG)]) assert not result -OK_KEY = '*' * (255 - len('request.parameters.')) -OK_REQUEST_PARAM = 'request.parameters.' + OK_KEY -TOO_LONG_KEY = '*' * (256 - len('request.parameters.')) -TOO_LONG_REQUEST_PARAM = 'request.parameters.' + TOO_LONG_KEY +OK_KEY = "*" * (255 - len("request.parameters.")) +OK_REQUEST_PARAM = "request.parameters." + OK_KEY +TOO_LONG_KEY = "*" * (256 - len("request.parameters.")) +TOO_LONG_REQUEST_PARAM = "request.parameters." + TOO_LONG_KEY assert len(OK_REQUEST_PARAM) == 255 assert len(TOO_LONG_REQUEST_PARAM) == 256 @@ -378,36 +380,33 @@ def test_custom_params_int_too_big(): _forgone_request_key_ok = [] -@validate_attributes('agent', _required_request_key_ok, - _forgone_request_key_ok) +@validate_attributes("agent", _required_request_key_ok, _forgone_request_key_ok) def test_capture_request_params_key_ok(): target_application = webtest.TestApp(target_wsgi_application) - response = target_application.get('/?%s=bar' % OK_KEY) - assert response.body == b'Hello World!' + response = target_application.get("/?%s=bar" % OK_KEY) + assert response.body == b"Hello World!" _required_request_key_too_long = [] _forgone_request_key_too_long = [TOO_LONG_REQUEST_PARAM] -@validate_attributes('agent', _required_request_key_too_long, - _forgone_request_key_too_long) +@validate_attributes("agent", _required_request_key_too_long, _forgone_request_key_too_long) def test_capture_request_params_key_too_long(): target_application = webtest.TestApp(target_wsgi_application) - response = target_application.get('/?%s=bar' % TOO_LONG_KEY) - assert response.body == b'Hello World!' + response = target_application.get("/?%s=bar" % TOO_LONG_KEY) + assert response.body == b"Hello World!" -_required_request_value_too_long = ['request.parameters.foo'] +_required_request_value_too_long = ["request.parameters.foo"] _forgone_request_value_too_long = [] -@validate_attributes('agent', _required_request_value_too_long, - _forgone_request_value_too_long) +@validate_attributes("agent", _required_request_value_too_long, _forgone_request_value_too_long) def test_capture_request_params_value_too_long(): target_application = webtest.TestApp(target_wsgi_application) - response = target_application.get('/?foo=%s' % TOO_LONG) - assert response.body == b'Hello World!' + response = target_application.get("/?foo=%s" % TOO_LONG) + assert response.body == b"Hello World!" # Test attribute types are according to Agent-Attributes spec. @@ -416,41 +415,48 @@ def test_capture_request_params_value_too_long(): # Types are only defined in the spec for agent attributes, not intrinsics. -agent_attributes = {'request.headers.accept': six.string_types, - 'request.headers.contentLength': int, - 'request.headers.contentType': six.string_types, - 'request.headers.host': six.string_types, - 'request.headers.referer': six.string_types, - 'request.headers.userAgent': six.string_types, - 'request.method': six.string_types, - 'request.parameters.test': six.string_types, - 'response.headers.contentLength': int, - 'response.headers.contentType': six.string_types, - 'response.status': six.string_types} +agent_attributes = { + "request.headers.accept": six.string_types, + "request.headers.contentLength": int, + "request.headers.contentType": six.string_types, + "request.headers.host": six.string_types, + "request.headers.referer": six.string_types, + "request.headers.userAgent": six.string_types, + "request.method": six.string_types, + "request.parameters.test": six.string_types, + "response.headers.contentLength": int, + "response.headers.contentType": six.string_types, + "response.status": six.string_types, +} @validate_agent_attribute_types(agent_attributes) def test_agent_attribute_types(): - test_environ = {'CONTENT_TYPE': 'HTML', 'CONTENT_LENGTH': '100', - 'HTTP_USER_AGENT': 'Firefox', 'HTTP_REFERER': 'somewhere', - 'HTTP_ACCEPT': 'everything'} - fully_featured_application.get('/?test=val', extra_environ=test_environ) + test_environ = { + "CONTENT_TYPE": "HTML", + "CONTENT_LENGTH": "100", + "HTTP_USER_AGENT": "Firefox", + "HTTP_REFERER": "somewhere", + "HTTP_ACCEPT": "everything", + } + fully_featured_application.get("/?test=val", extra_environ=test_environ) # Test sanitize() + def test_sanitize_string(): - s = 'foo' + s = "foo" assert sanitize(s) == s def test_sanitize_bytes(): - b = b'bytes' + b = b"bytes" assert sanitize(b) == b def test_sanitize_unicode(): - u = u'SMILING FACE: \u263a' + u = "SMILING FACE: \u263a" assert sanitize(u) == u @@ -467,22 +473,22 @@ def test_sanitize_int(): def test_sanitize_long(): - l = long(123456) - assert sanitize(l) == l + long_int = long(123456) + assert sanitize(long_int) == long_int def test_sanitize_dict(): - d = {1: 'foo'} + d = {1: "foo"} assert sanitize(d) == "{1: 'foo'}" def test_sanitize_list(): - l = [1, 2, 3, 4] - assert sanitize(l) == '[1, 2, 3, 4]' + list_var = [1, 2, 3, 4] + assert sanitize(list_var) == "[1, 2, 3, 4]" def test_sanitize_tuple(): - t = ('one', 'two', 'three') + t = ("one", "two", "three") assert sanitize(t) == "('one', 'two', 'three')" diff --git a/tests/agent_features/test_attributes_in_action.py b/tests/agent_features/test_attributes_in_action.py index 31c5d625d..f5ee9b229 100644 --- a/tests/agent_features/test_attributes_in_action.py +++ b/tests/agent_features/test_attributes_in_action.py @@ -33,7 +33,7 @@ from newrelic.api.application import application_instance as application from newrelic.api.message_transaction import message_transaction from newrelic.api.time_trace import notice_error -from newrelic.api.transaction import add_custom_parameter +from newrelic.api.transaction import add_custom_attribute from newrelic.api.wsgi_application import wsgi_application from newrelic.common.object_names import callable_name @@ -132,8 +132,8 @@ def normal_wsgi_application(environ, start_response): output = "header

RESPONSE

" output = output.encode("UTF-8") - add_custom_parameter(USER_ATTRS[0], "test_value") - add_custom_parameter(USER_ATTRS[1], "test_value") + add_custom_attribute(USER_ATTRS[0], "test_value") + add_custom_attribute(USER_ATTRS[1], "test_value") response_headers = [("Content-Type", "text/html; charset=utf-8"), ("Content-Length", str(len(output)))] start_response(status, response_headers) diff --git a/tests/agent_features/test_browser.py b/tests/agent_features/test_browser.py index 5f8492016..b5ca867d5 100644 --- a/tests/agent_features/test_browser.py +++ b/tests/agent_features/test_browser.py @@ -12,47 +12,53 @@ # See the License for the specific language governing permissions and # limitations under the License. -import sys -import webtest import json +import sys import six - -from testing_support.fixtures import (override_application_settings, - validate_transaction_errors, validate_custom_parameters) +import webtest +from testing_support.fixtures import ( + override_application_settings, + validate_custom_parameters, + validate_transaction_errors, +) from newrelic.api.application import application_settings -from newrelic.api.transaction import (get_browser_timing_header, - get_browser_timing_footer, add_custom_parameter, - disable_browser_autorum) +from newrelic.api.transaction import ( + add_custom_attribute, + disable_browser_autorum, + get_browser_timing_footer, + get_browser_timing_header, +) from newrelic.api.wsgi_application import wsgi_application from newrelic.common.encoding_utils import deobfuscate -_runtime_error_name = (RuntimeError.__module__ + ':' + RuntimeError.__name__) +_runtime_error_name = RuntimeError.__module__ + ":" + RuntimeError.__name__ + @wsgi_application() def target_wsgi_application_manual_rum(environ, start_response): - status = '200 OK' + status = "200 OK" - text = '%s

RESPONSE

%s' + text = "%s

RESPONSE

%s" - output = (text % (get_browser_timing_header(), - get_browser_timing_footer())).encode('UTF-8') + output = (text % (get_browser_timing_header(), get_browser_timing_footer())).encode("UTF-8") - response_headers = [('Content-Type', 'text/html; charset=utf-8'), - ('Content-Length', str(len(output)))] + response_headers = [("Content-Type", "text/html; charset=utf-8"), ("Content-Length", str(len(output)))] start_response(status, response_headers) return [output] + target_application_manual_rum = webtest.TestApp(target_wsgi_application_manual_rum) _test_footer_attributes = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': False, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": False, + "js_agent_loader": "", } + @override_application_settings(_test_footer_attributes) def test_footer_attributes(): settings = application_settings() @@ -66,10 +72,10 @@ def test_footer_attributes(): assert settings.beacon assert settings.error_beacon - token = '0123456789ABCDEF' - headers = { 'Cookie': 'NRAGENT=tk=%s' % token } + token = "0123456789ABCDEF" # nosec + headers = {"Cookie": "NRAGENT=tk=%s" % token} - response = target_application_manual_rum.get('/', headers=headers) + response = target_application_manual_rum.get("/", headers=headers) header = response.html.html.head.script.string content = response.html.html.body.p.string @@ -77,702 +83,731 @@ def test_footer_attributes(): # Validate actual body content. - assert content == 'RESPONSE' + assert content == "RESPONSE" # Validate the insertion of RUM header. - assert header.find('NREUM HEADER') != -1 + assert header.find("NREUM HEADER") != -1 # Now validate the various fields of the footer. The fields are # held by a JSON dictionary. - data = json.loads(footer.split('NREUM.info=')[1]) + data = json.loads(footer.split("NREUM.info=")[1]) - assert data['licenseKey'] == settings.browser_key - assert data['applicationID'] == settings.application_id + assert data["licenseKey"] == settings.browser_key + assert data["applicationID"] == settings.application_id - assert data['agent'] == settings.js_agent_file - assert data['beacon'] == settings.beacon - assert data['errorBeacon'] == settings.error_beacon + assert data["agent"] == settings.js_agent_file + assert data["beacon"] == settings.beacon + assert data["errorBeacon"] == settings.error_beacon - assert data['applicationTime'] >= 0 - assert data['queueTime'] >= 0 + assert data["applicationTime"] >= 0 + assert data["queueTime"] >= 0 obfuscation_key = settings.license_key[:13] - assert type(data['transactionName']) == type(u'') + type_transaction_data = unicode if six.PY2 else str # noqa: F821 + assert isinstance(data["transactionName"], type_transaction_data) + + txn_name = deobfuscate(data["transactionName"], obfuscation_key) - txn_name = deobfuscate(data['transactionName'], obfuscation_key) + assert txn_name == "WebTransaction/Uri/" - assert txn_name == u'WebTransaction/Uri/' + assert "atts" not in data - assert 'atts' not in data _test_rum_ssl_for_http_is_none = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': False, - 'browser_monitoring.ssl_for_http': None, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": False, + "browser_monitoring.ssl_for_http": None, + "js_agent_loader": "", } + @override_application_settings(_test_rum_ssl_for_http_is_none) def test_ssl_for_http_is_none(): settings = application_settings() assert settings.browser_monitoring.ssl_for_http is None - response = target_application_manual_rum.get('/') + response = target_application_manual_rum.get("/") footer = response.html.html.body.script.string - data = json.loads(footer.split('NREUM.info=')[1]) + data = json.loads(footer.split("NREUM.info=")[1]) + + assert "sslForHttp" not in data - assert 'sslForHttp' not in data _test_rum_ssl_for_http_is_true = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': False, - 'browser_monitoring.ssl_for_http': True, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": False, + "browser_monitoring.ssl_for_http": True, + "js_agent_loader": "", } + @override_application_settings(_test_rum_ssl_for_http_is_true) def test_ssl_for_http_is_true(): settings = application_settings() assert settings.browser_monitoring.ssl_for_http is True - response = target_application_manual_rum.get('/') + response = target_application_manual_rum.get("/") footer = response.html.html.body.script.string - data = json.loads(footer.split('NREUM.info=')[1]) + data = json.loads(footer.split("NREUM.info=")[1]) + + assert data["sslForHttp"] is True - assert data['sslForHttp'] is True _test_rum_ssl_for_http_is_false = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': False, - 'browser_monitoring.ssl_for_http': False, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": False, + "browser_monitoring.ssl_for_http": False, + "js_agent_loader": "", } + @override_application_settings(_test_rum_ssl_for_http_is_false) def test_ssl_for_http_is_false(): settings = application_settings() assert settings.browser_monitoring.ssl_for_http is False - response = target_application_manual_rum.get('/') + response = target_application_manual_rum.get("/") footer = response.html.html.body.script.string - data = json.loads(footer.split('NREUM.info=')[1]) + data = json.loads(footer.split("NREUM.info=")[1]) + + assert data["sslForHttp"] is False - assert data['sslForHttp'] is False @wsgi_application() def target_wsgi_application_yield_single_no_head(environ, start_response): - status = '200 OK' + status = "200 OK" - output = b'

RESPONSE

' + output = b"

RESPONSE

" - response_headers = [('Content-Type', 'text/html; charset=utf-8'), - ('Content-Length', str(len(output)))] + response_headers = [("Content-Type", "text/html; charset=utf-8"), ("Content-Length", str(len(output)))] start_response(status, response_headers) yield output -target_application_yield_single_no_head = webtest.TestApp( - target_wsgi_application_yield_single_no_head) + +target_application_yield_single_no_head = webtest.TestApp(target_wsgi_application_yield_single_no_head) _test_html_insertion_yield_single_no_head_settings = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': True, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": True, + "js_agent_loader": "", } + @override_application_settings(_test_html_insertion_yield_single_no_head_settings) def test_html_insertion_yield_single_no_head(): - response = target_application_yield_single_no_head.get('/', status=200) + response = target_application_yield_single_no_head.get("/", status=200) - assert 'Content-Type' in response.headers - assert 'Content-Length' in response.headers + assert "Content-Type" in response.headers + assert "Content-Length" in response.headers # The 'NREUM HEADER' value comes from our override for the header. # The 'NREUM.info' value comes from the programmatically generated # footer added by the agent. - response.mustcontain('NREUM HEADER', 'NREUM.info') + response.mustcontain("NREUM HEADER", "NREUM.info") + @wsgi_application() def target_wsgi_application_yield_multi_no_head(environ, start_response): - status = '200 OK' + status = "200 OK" - output = [ b'', b'

RESPONSE

' ] + output = [b"", b"

RESPONSE

"] - response_headers = [('Content-Type', 'text/html; charset=utf-8'), - ('Content-Length', str(len(b''.join(output))))] + response_headers = [("Content-Type", "text/html; charset=utf-8"), ("Content-Length", str(len(b"".join(output))))] start_response(status, response_headers) for data in output: yield data -target_application_yield_multi_no_head = webtest.TestApp( - target_wsgi_application_yield_multi_no_head) + +target_application_yield_multi_no_head = webtest.TestApp(target_wsgi_application_yield_multi_no_head) _test_html_insertion_yield_multi_no_head_settings = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': True, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": True, + "js_agent_loader": "", } + @override_application_settings(_test_html_insertion_yield_multi_no_head_settings) def test_html_insertion_yield_multi_no_head(): - response = target_application_yield_multi_no_head.get('/', status=200) + response = target_application_yield_multi_no_head.get("/", status=200) - assert 'Content-Type' in response.headers - assert 'Content-Length' in response.headers + assert "Content-Type" in response.headers + assert "Content-Length" in response.headers # The 'NREUM HEADER' value comes from our override for the header. # The 'NREUM.info' value comes from the programmatically generated # footer added by the agent. - response.mustcontain('NREUM HEADER', 'NREUM.info') + response.mustcontain("NREUM HEADER", "NREUM.info") + @wsgi_application() def target_wsgi_application_unnamed_attachment_header(environ, start_response): - status = '200 OK' + status = "200 OK" - output = b'

RESPONSE

' + output = b"

RESPONSE

" - response_headers = [('Content-Type', 'text/html; charset=utf-8'), - ('Content-Length', str(len(output))), - ('Content-Disposition', 'attachment')] + response_headers = [ + ("Content-Type", "text/html; charset=utf-8"), + ("Content-Length", str(len(output))), + ("Content-Disposition", "attachment"), + ] start_response(status, response_headers) yield output -target_application_unnamed_attachment_header = webtest.TestApp( - target_wsgi_application_unnamed_attachment_header) + +target_application_unnamed_attachment_header = webtest.TestApp(target_wsgi_application_unnamed_attachment_header) _test_html_insertion_unnamed_attachment_header_settings = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': True, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": True, + "js_agent_loader": "", } -@override_application_settings( - _test_html_insertion_unnamed_attachment_header_settings) + +@override_application_settings(_test_html_insertion_unnamed_attachment_header_settings) def test_html_insertion_unnamed_attachment_header(): - response = target_application_unnamed_attachment_header.get('/', status=200) + response = target_application_unnamed_attachment_header.get("/", status=200) - assert 'Content-Type' in response.headers - assert 'Content-Length' in response.headers - assert 'Content-Disposition' in response.headers + assert "Content-Type" in response.headers + assert "Content-Length" in response.headers + assert "Content-Disposition" in response.headers # The 'NREUM HEADER' value comes from our override for the header. # The 'NREUM.info' value comes from the programmatically generated # footer added by the agent. - response.mustcontain(no=['NREUM HEADER', 'NREUM.info']) + response.mustcontain(no=["NREUM HEADER", "NREUM.info"]) + @wsgi_application() def target_wsgi_application_named_attachment_header(environ, start_response): - status = '200 OK' + status = "200 OK" - output = b'

RESPONSE

' + output = b"

RESPONSE

" - response_headers = [('Content-Type', 'text/html; charset=utf-8'), - ('Content-Length', str(len(output))), - ('Content-Disposition', 'Attachment; filename="X"')] + response_headers = [ + ("Content-Type", "text/html; charset=utf-8"), + ("Content-Length", str(len(output))), + ("Content-Disposition", 'Attachment; filename="X"'), + ] start_response(status, response_headers) yield output -target_application_named_attachment_header = webtest.TestApp( - target_wsgi_application_named_attachment_header) + +target_application_named_attachment_header = webtest.TestApp(target_wsgi_application_named_attachment_header) _test_html_insertion_named_attachment_header_settings = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': True, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": True, + "js_agent_loader": "", } -@override_application_settings( - _test_html_insertion_named_attachment_header_settings) + +@override_application_settings(_test_html_insertion_named_attachment_header_settings) def test_html_insertion_named_attachment_header(): - response = target_application_named_attachment_header.get('/', status=200) + response = target_application_named_attachment_header.get("/", status=200) - assert 'Content-Type' in response.headers - assert 'Content-Length' in response.headers - assert 'Content-Disposition' in response.headers + assert "Content-Type" in response.headers + assert "Content-Length" in response.headers + assert "Content-Disposition" in response.headers # The 'NREUM HEADER' value comes from our override for the header. # The 'NREUM.info' value comes from the programmatically generated # footer added by the agent. - response.mustcontain(no=['NREUM HEADER', 'NREUM.info']) + response.mustcontain(no=["NREUM HEADER", "NREUM.info"]) + @wsgi_application() def target_wsgi_application_inline_attachment_header(environ, start_response): - status = '200 OK' + status = "200 OK" - output = b'

RESPONSE

' + output = b"

RESPONSE

" - response_headers = [('Content-Type', 'text/html; charset=utf-8'), - ('Content-Length', str(len(output))), - ('Content-Disposition', 'inline; filename="attachment"')] + response_headers = [ + ("Content-Type", "text/html; charset=utf-8"), + ("Content-Length", str(len(output))), + ("Content-Disposition", 'inline; filename="attachment"'), + ] start_response(status, response_headers) yield output -target_application_inline_attachment_header = webtest.TestApp( - target_wsgi_application_inline_attachment_header) + +target_application_inline_attachment_header = webtest.TestApp(target_wsgi_application_inline_attachment_header) _test_html_insertion_inline_attachment_header_settings = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': True, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": True, + "js_agent_loader": "", } -@override_application_settings( - _test_html_insertion_inline_attachment_header_settings) + +@override_application_settings(_test_html_insertion_inline_attachment_header_settings) def test_html_insertion_inline_attachment_header(): - response = target_application_inline_attachment_header.get('/', status=200) + response = target_application_inline_attachment_header.get("/", status=200) - assert 'Content-Type' in response.headers - assert 'Content-Length' in response.headers - assert 'Content-Disposition' in response.headers + assert "Content-Type" in response.headers + assert "Content-Length" in response.headers + assert "Content-Disposition" in response.headers # The 'NREUM HEADER' value comes from our override for the header. # The 'NREUM.info' value comes from the programmatically generated # footer added by the agent. - response.mustcontain('NREUM HEADER', 'NREUM.info') + response.mustcontain("NREUM HEADER", "NREUM.info") + @wsgi_application() def target_wsgi_application_empty_list(environ, start_response): - status = '200 OK' + status = "200 OK" - response_headers = [('Content-Type', 'text/html; charset=utf-8'), - ('Content-Length', '0')] + response_headers = [("Content-Type", "text/html; charset=utf-8"), ("Content-Length", "0")] start_response(status, response_headers) return [] -target_application_empty_list = webtest.TestApp( - target_wsgi_application_empty_list) + +target_application_empty_list = webtest.TestApp(target_wsgi_application_empty_list) _test_html_insertion_empty_list_settings = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': True, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": True, + "js_agent_loader": "", } -@override_application_settings( - _test_html_insertion_empty_list_settings) + +@override_application_settings(_test_html_insertion_empty_list_settings) def test_html_insertion_empty_list(): - response = target_application_empty_list.get('/', status=200) + response = target_application_empty_list.get("/", status=200) - assert 'Content-Type' in response.headers - assert 'Content-Length' in response.headers + assert "Content-Type" in response.headers + assert "Content-Length" in response.headers # The 'NREUM HEADER' value comes from our override for the header. # The 'NREUM.info' value comes from the programmatically generated # footer added by the agent. - response.mustcontain(no=['NREUM HEADER', 'NREUM.info']) + response.mustcontain(no=["NREUM HEADER", "NREUM.info"]) assert len(response.body) == 0 + @wsgi_application() def target_wsgi_application_single_empty_string(environ, start_response): - status = '200 OK' + status = "200 OK" - response_headers = [('Content-Type', 'text/html; charset=utf-8'), - ('Content-Length', '0')] + response_headers = [("Content-Type", "text/html; charset=utf-8"), ("Content-Length", "0")] start_response(status, response_headers) - return [''] + return [""] + -target_application_single_empty_string = webtest.TestApp( - target_wsgi_application_single_empty_string) +target_application_single_empty_string = webtest.TestApp(target_wsgi_application_single_empty_string) _test_html_insertion_single_empty_string_settings = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': True, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": True, + "js_agent_loader": "", } -@override_application_settings( - _test_html_insertion_single_empty_string_settings) + +@override_application_settings(_test_html_insertion_single_empty_string_settings) def test_html_insertion_single_empty_string(): - response = target_application_single_empty_string.get('/', status=200) + response = target_application_single_empty_string.get("/", status=200) - assert 'Content-Type' in response.headers - assert 'Content-Length' in response.headers + assert "Content-Type" in response.headers + assert "Content-Length" in response.headers # The 'NREUM HEADER' value comes from our override for the header. # The 'NREUM.info' value comes from the programmatically generated # footer added by the agent. - response.mustcontain(no=['NREUM HEADER', 'NREUM.info']) + response.mustcontain(no=["NREUM HEADER", "NREUM.info"]) assert len(response.body) == 0 + @wsgi_application() def target_wsgi_application_multiple_empty_string(environ, start_response): - status = '200 OK' + status = "200 OK" - response_headers = [('Content-Type', 'text/html; charset=utf-8'), - ('Content-Length', '0')] + response_headers = [("Content-Type", "text/html; charset=utf-8"), ("Content-Length", "0")] start_response(status, response_headers) - return ['', ''] + return ["", ""] + -target_application_multiple_empty_string = webtest.TestApp( - target_wsgi_application_multiple_empty_string) +target_application_multiple_empty_string = webtest.TestApp(target_wsgi_application_multiple_empty_string) _test_html_insertion_multiple_empty_string_settings = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': True, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": True, + "js_agent_loader": "", } -@override_application_settings( - _test_html_insertion_multiple_empty_string_settings) + +@override_application_settings(_test_html_insertion_multiple_empty_string_settings) def test_html_insertion_multiple_empty_string(): - response = target_application_multiple_empty_string.get('/', status=200) + response = target_application_multiple_empty_string.get("/", status=200) - assert 'Content-Type' in response.headers - assert 'Content-Length' in response.headers + assert "Content-Type" in response.headers + assert "Content-Length" in response.headers # The 'NREUM HEADER' value comes from our override for the header. # The 'NREUM.info' value comes from the programmatically generated # footer added by the agent. - response.mustcontain(no=['NREUM HEADER', 'NREUM.info']) + response.mustcontain(no=["NREUM HEADER", "NREUM.info"]) assert len(response.body) == 0 + @wsgi_application() def target_wsgi_application_single_large_prelude(environ, start_response): - status = '200 OK' + status = "200 OK" - output = [64*1024*b' ' + b''] + output = [64 * 1024 * b" " + b""] - response_headers = [('Content-Type', 'text/html; charset=utf-8'), - ('Content-Length', str(len(b''.join(output))))] + response_headers = [("Content-Type", "text/html; charset=utf-8"), ("Content-Length", str(len(b"".join(output))))] start_response(status, response_headers) return output -target_application_single_large_prelude = webtest.TestApp( - target_wsgi_application_single_large_prelude) + +target_application_single_large_prelude = webtest.TestApp(target_wsgi_application_single_large_prelude) _test_html_insertion_single_large_prelude_settings = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': True, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": True, + "js_agent_loader": "", } -@override_application_settings( - _test_html_insertion_single_large_prelude_settings) + +@override_application_settings(_test_html_insertion_single_large_prelude_settings) def test_html_insertion_single_large_prelude(): - response = target_application_single_large_prelude.get('/', status=200) + response = target_application_single_large_prelude.get("/", status=200) # The 'NREUM HEADER' value comes from our override for the header. # The 'NREUM.info' value comes from the programmatically generated # footer added by the agent. - assert 'Content-Type' in response.headers - assert 'Content-Length' in response.headers + assert "Content-Type" in response.headers + assert "Content-Length" in response.headers + + response.mustcontain(no=["NREUM HEADER", "NREUM.info"]) - response.mustcontain(no=['NREUM HEADER', 'NREUM.info']) + output = [32 * 1024 * b" ", 32 * 1024 * b" ", b""] - output = [32*1024*b' ', 32*1024*b' ', b''] + assert len(response.body) == len(b"".join(output)) - assert len(response.body) == len(b''.join(output)) @wsgi_application() def target_wsgi_application_multi_large_prelude(environ, start_response): - status = '200 OK' + status = "200 OK" - output = [32*1024*b' ', 32*1024*b' ', b''] + output = [32 * 1024 * b" ", 32 * 1024 * b" ", b""] - response_headers = [('Content-Type', 'text/html; charset=utf-8'), - ('Content-Length', str(len(b''.join(output))))] + response_headers = [("Content-Type", "text/html; charset=utf-8"), ("Content-Length", str(len(b"".join(output))))] start_response(status, response_headers) return output -target_application_multi_large_prelude = webtest.TestApp( - target_wsgi_application_multi_large_prelude) + +target_application_multi_large_prelude = webtest.TestApp(target_wsgi_application_multi_large_prelude) _test_html_insertion_multi_large_prelude_settings = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': True, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": True, + "js_agent_loader": "", } -@override_application_settings( - _test_html_insertion_multi_large_prelude_settings) + +@override_application_settings(_test_html_insertion_multi_large_prelude_settings) def test_html_insertion_multi_large_prelude(): - response = target_application_multi_large_prelude.get('/', status=200) + response = target_application_multi_large_prelude.get("/", status=200) # The 'NREUM HEADER' value comes from our override for the header. # The 'NREUM.info' value comes from the programmatically generated # footer added by the agent. - assert 'Content-Type' in response.headers - assert 'Content-Length' in response.headers + assert "Content-Type" in response.headers + assert "Content-Length" in response.headers + + response.mustcontain(no=["NREUM HEADER", "NREUM.info"]) - response.mustcontain(no=['NREUM HEADER', 'NREUM.info']) + output = [32 * 1024 * b" ", 32 * 1024 * b" ", b""] - output = [32*1024*b' ', 32*1024*b' ', b''] + assert len(response.body) == len(b"".join(output)) - assert len(response.body) == len(b''.join(output)) @wsgi_application() def target_wsgi_application_yield_before_start(environ, start_response): - status = '200 OK' + status = "200 OK" # Ambiguous whether yield an empty string before calling # start_response() is legal. Various WSGI servers allow it # We have to disable WebTest lint check to get this to run. - yield b'' + yield b"" - output = b'

RESPONSE

' + output = b"

RESPONSE

" - response_headers = [('Content-Type', 'text/html; charset=utf-8'), - ('Content-Length', str(len(output)))] + response_headers = [("Content-Type", "text/html; charset=utf-8"), ("Content-Length", str(len(output)))] start_response(status, response_headers) yield output -target_application_yield_before_start = webtest.TestApp( - target_wsgi_application_yield_before_start, lint=False) + +target_application_yield_before_start = webtest.TestApp(target_wsgi_application_yield_before_start, lint=False) _test_html_insertion_yield_before_start_settings = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': True, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": True, + "js_agent_loader": "", } + @override_application_settings(_test_html_insertion_yield_before_start_settings) def test_html_insertion_yield_before_start(): - response = target_application_yield_before_start.get('/', status=200) + response = target_application_yield_before_start.get("/", status=200) # The 'NREUM HEADER' value comes from our override for the header. # The 'NREUM.info' value comes from the programmatically generated # footer added by the agent. - response.mustcontain('NREUM HEADER', 'NREUM.info') + response.mustcontain("NREUM HEADER", "NREUM.info") + @wsgi_application() def target_wsgi_application_start_yield_start(environ, start_response): - output = b'

RESPONSE

' + output = b"

RESPONSE

" - response_headers = [('Content-Type', 'text/html; charset=utf-8'), - ('Content-Length', str(len(output)))] + response_headers = [("Content-Type", "text/html; charset=utf-8"), ("Content-Length", str(len(output)))] - start_response('200 OK', response_headers) + start_response("200 OK", response_headers) - yield '' + yield "" try: - start_response(status, response_headers) + start_response(status, response_headers) # noqa: F821 except Exception: - start_response('500 Error', response_headers, sys.exc_info()) + start_response("500 Error", response_headers, sys.exc_info()) yield output -target_application_start_yield_start = webtest.TestApp( - target_wsgi_application_start_yield_start) + +target_application_start_yield_start = webtest.TestApp(target_wsgi_application_start_yield_start) _test_html_insertion_start_yield_start_settings = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': True, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": True, + "js_agent_loader": "", } + @override_application_settings(_test_html_insertion_start_yield_start_settings) def test_html_insertion_start_yield_start(): - response = target_application_start_yield_start.get('/', status=500) + response = target_application_start_yield_start.get("/", status=500) # The 'NREUM HEADER' value comes from our override for the header. # The 'NREUM.info' value comes from the programmatically generated # footer added by the agent. - assert 'Content-Type' in response.headers - assert 'Content-Length' in response.headers + assert "Content-Type" in response.headers + assert "Content-Length" in response.headers + + response.mustcontain("NREUM HEADER", "NREUM.info") - response.mustcontain('NREUM HEADER', 'NREUM.info') @wsgi_application() def target_wsgi_application_invalid_content_length(environ, start_response): - output = b'

RESPONSE

' + output = b"

RESPONSE

" - response_headers = [('Content-Type', 'text/html; charset=utf-8'), - ('Content-Length', 'XXX')] + response_headers = [("Content-Type", "text/html; charset=utf-8"), ("Content-Length", "XXX")] - start_response('200 OK', response_headers) + start_response("200 OK", response_headers) yield output -target_application_invalid_content_length = webtest.TestApp( - target_wsgi_application_invalid_content_length) + +target_application_invalid_content_length = webtest.TestApp(target_wsgi_application_invalid_content_length) _test_html_insertion_invalid_content_length_settings = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': True, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": True, + "js_agent_loader": "", } + @override_application_settings(_test_html_insertion_invalid_content_length_settings) def test_html_insertion_invalid_content_length(): - response = target_application_invalid_content_length.get('/', status=200) + response = target_application_invalid_content_length.get("/", status=200) # This is relying on WebTest not validating the # value of the Content-Length response header # and just passing it through as is. - assert 'Content-Type' in response.headers - assert 'Content-Length' in response.headers + assert "Content-Type" in response.headers + assert "Content-Length" in response.headers - assert response.headers['Content-Length'] == 'XXX' + assert response.headers["Content-Length"] == "XXX" + + response.mustcontain(no=["NREUM HEADER", "NREUM.info"]) - response.mustcontain(no=['NREUM HEADER', 'NREUM.info']) @wsgi_application() def target_wsgi_application_content_encoding(environ, start_response): - output = b'

RESPONSE

' + output = b"

RESPONSE

" - response_headers = [('Content-Type', 'text/html; charset=utf-8'), - ('Content-Length', str(len(output))), - ('Content-Encoding', 'identity')] + response_headers = [ + ("Content-Type", "text/html; charset=utf-8"), + ("Content-Length", str(len(output))), + ("Content-Encoding", "identity"), + ] - start_response('200 OK', response_headers) + start_response("200 OK", response_headers) yield output -target_application_content_encoding = webtest.TestApp( - target_wsgi_application_content_encoding) + +target_application_content_encoding = webtest.TestApp(target_wsgi_application_content_encoding) _test_html_insertion_content_encoding_settings = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': True, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": True, + "js_agent_loader": "", } + @override_application_settings(_test_html_insertion_content_encoding_settings) def test_html_insertion_content_encoding(): - response = target_application_content_encoding.get('/', status=200) + response = target_application_content_encoding.get("/", status=200) # Technically 'identity' should not be used in Content-Encoding # but clients will still accept it. Use this fact to disable auto # RUM for this test. Other option is to compress the response # and use 'gzip'. - assert 'Content-Type' in response.headers - assert 'Content-Length' in response.headers + assert "Content-Type" in response.headers + assert "Content-Length" in response.headers - assert response.headers['Content-Encoding'] == 'identity' + assert response.headers["Content-Encoding"] == "identity" + + response.mustcontain(no=["NREUM HEADER", "NREUM.info"]) - response.mustcontain(no=['NREUM HEADER', 'NREUM.info']) @wsgi_application() def target_wsgi_application_no_content_type(environ, start_response): - output = b'

RESPONSE

' + output = b"

RESPONSE

" - response_headers = [('Content-Length', str(len(output)))] + response_headers = [("Content-Length", str(len(output)))] - start_response('200 OK', response_headers) + start_response("200 OK", response_headers) yield output -target_application_no_content_type = webtest.TestApp( - target_wsgi_application_no_content_type, lint=False) + +target_application_no_content_type = webtest.TestApp(target_wsgi_application_no_content_type, lint=False) _test_html_insertion_no_content_type_settings = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': True, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": True, + "js_agent_loader": "", } + @override_application_settings(_test_html_insertion_no_content_type_settings) def test_html_insertion_no_content_type(): - response = target_application_no_content_type.get('/', status=200) + response = target_application_no_content_type.get("/", status=200) + + assert "Content-Type" not in response.headers + assert "Content-Length" in response.headers - assert 'Content-Type' not in response.headers - assert 'Content-Length' in response.headers + response.mustcontain(no=["NREUM HEADER", "NREUM.info"]) - response.mustcontain(no=['NREUM HEADER', 'NREUM.info']) @wsgi_application() def target_wsgi_application_plain_text(environ, start_response): - output = b'RESPONSE' + output = b"RESPONSE" - response_headers = [('Content-Type', 'text/plain'), - ('Content-Length', str(len(output)))] + response_headers = [("Content-Type", "text/plain"), ("Content-Length", str(len(output)))] - start_response('200 OK', response_headers) + start_response("200 OK", response_headers) yield output -target_application_plain_text = webtest.TestApp( - target_wsgi_application_plain_text) + +target_application_plain_text = webtest.TestApp(target_wsgi_application_plain_text) _test_html_insertion_plain_text_settings = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': True, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": True, + "js_agent_loader": "", } + @override_application_settings(_test_html_insertion_plain_text_settings) def test_html_insertion_plain_text(): - response = target_application_plain_text.get('/', status=200) + response = target_application_plain_text.get("/", status=200) + + assert "Content-Type" in response.headers + assert "Content-Length" in response.headers - assert 'Content-Type' in response.headers - assert 'Content-Length' in response.headers + response.mustcontain(no=["NREUM HEADER", "NREUM.info"]) - response.mustcontain(no=['NREUM HEADER', 'NREUM.info']) @wsgi_application() def target_wsgi_application_write_callback(environ, start_response): - output = b'

RESPONSE

' + output = b"

RESPONSE

" - response_headers = [('Content-Type', 'text/html'), - ('Content-Length', str(len(output)))] + response_headers = [("Content-Type", "text/html"), ("Content-Length", str(len(output)))] - write = start_response('200 OK', response_headers) + write = start_response("200 OK", response_headers) write(output) return [] -target_application_write_callback = webtest.TestApp( - target_wsgi_application_write_callback) + +target_application_write_callback = webtest.TestApp(target_wsgi_application_write_callback) _test_html_insertion_write_callback_settings = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': True, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": True, + "js_agent_loader": "", } + @override_application_settings(_test_html_insertion_write_callback_settings) def test_html_insertion_write_callback(): - response = target_application_write_callback.get('/', status=200) + response = target_application_write_callback.get("/", status=200) + + assert "Content-Type" in response.headers + assert "Content-Length" in response.headers - assert 'Content-Type' in response.headers - assert 'Content-Length' in response.headers + response.mustcontain(no=["NREUM HEADER", "NREUM.info"]) - response.mustcontain(no=['NREUM HEADER', 'NREUM.info']) @wsgi_application() def target_wsgi_application_yield_before_write(environ, start_response): - output = [b'', b'

RESPONSE

'] + output = [b"", b"

RESPONSE

"] - response_headers = [('Content-Type', 'text/html; charset=utf-8'), - ('Content-Length', str(len(b''.join(output))))] + response_headers = [("Content-Type", "text/html; charset=utf-8"), ("Content-Length", str(len(b"".join(output))))] - write = start_response('200 OK', response_headers) + write = start_response("200 OK", response_headers) # Technically this is in violation of the WSGI specification # if that write() should always be before yields. @@ -781,172 +816,177 @@ def target_wsgi_application_yield_before_write(environ, start_response): write(output.pop(0)) -target_application_yield_before_write = webtest.TestApp( - target_wsgi_application_yield_before_write) + +target_application_yield_before_write = webtest.TestApp(target_wsgi_application_yield_before_write) _test_html_insertion_yield_before_write_settings = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': True, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": True, + "js_agent_loader": "", } + @override_application_settings(_test_html_insertion_yield_before_write_settings) def test_html_insertion_yield_before_write(): - response = target_application_yield_before_write.get('/', status=200) + response = target_application_yield_before_write.get("/", status=200) - assert 'Content-Type' in response.headers - assert 'Content-Length' in response.headers + assert "Content-Type" in response.headers + assert "Content-Length" in response.headers - response.mustcontain(no=['NREUM HEADER', 'NREUM.info']) + response.mustcontain(no=["NREUM HEADER", "NREUM.info"]) - expected = b'

RESPONSE

' + expected = b"

RESPONSE

" assert response.body == expected + @wsgi_application() def target_wsgi_application_write_before_yield(environ, start_response): - output = [b'', b'

RESPONSE

'] + output = [b"", b"

RESPONSE

"] - response_headers = [('Content-Type', 'text/html; charset=utf-8'), - ('Content-Length', str(len(b''.join(output))))] + response_headers = [("Content-Type", "text/html; charset=utf-8"), ("Content-Length", str(len(b"".join(output))))] - write = start_response('200 OK', response_headers) + write = start_response("200 OK", response_headers) write(output.pop(0)) yield output.pop(0) -target_application_write_before_yield = webtest.TestApp( - target_wsgi_application_write_before_yield) + +target_application_write_before_yield = webtest.TestApp(target_wsgi_application_write_before_yield) _test_html_insertion_write_before_yield_settings = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': True, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": True, + "js_agent_loader": "", } + @override_application_settings(_test_html_insertion_write_before_yield_settings) def test_html_insertion_write_before_yield(): - response = target_application_write_before_yield.get('/', status=200) + response = target_application_write_before_yield.get("/", status=200) - assert 'Content-Type' in response.headers - assert 'Content-Length' in response.headers + assert "Content-Type" in response.headers + assert "Content-Length" in response.headers - response.mustcontain(no=['NREUM HEADER', 'NREUM.info']) + response.mustcontain(no=["NREUM HEADER", "NREUM.info"]) - expected = b'

RESPONSE

' + expected = b"

RESPONSE

" assert response.body == expected + @wsgi_application() def target_wsgi_application_param_on_close(environ, start_response): - output = b'

RESPONSE

' + output = b"

RESPONSE

" - response_headers = [('Content-Type', 'text/html; charset=utf-8'), - ('Content-Length', str(len(output)))] + response_headers = [("Content-Type", "text/html; charset=utf-8"), ("Content-Length", str(len(output)))] - start_response('200 OK', response_headers) + start_response("200 OK", response_headers) try: yield output finally: - add_custom_parameter('key', 'value') + add_custom_attribute("key", "value") + -target_application_param_on_close = webtest.TestApp( - target_wsgi_application_param_on_close) +target_application_param_on_close = webtest.TestApp(target_wsgi_application_param_on_close) _test_html_insertion_param_on_close_settings = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': True, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": True, + "js_agent_loader": "", } + @override_application_settings(_test_html_insertion_param_on_close_settings) -@validate_custom_parameters(required_params=[('key', 'value')]) +@validate_custom_parameters(required_params=[("key", "value")]) def test_html_insertion_param_on_close(): - response = target_application_param_on_close.get('/', status=200) + response = target_application_param_on_close.get("/", status=200) + + response.mustcontain("NREUM HEADER", "NREUM.info") - response.mustcontain('NREUM HEADER', 'NREUM.info') @wsgi_application() def target_wsgi_application_param_on_error(environ, start_response): - output = b'

RESPONSE

' + output = b"

RESPONSE

" - response_headers = [('Content-Type', 'text/html; charset=utf-8'), - ('Content-Length', str(len(output)))] + response_headers = [("Content-Type", "text/html; charset=utf-8"), ("Content-Length", str(len(output)))] - start_response('200 OK', response_headers) + start_response("200 OK", response_headers) try: - raise RuntimeError('ERROR') + raise RuntimeError("ERROR") yield output finally: - add_custom_parameter('key', 'value') + add_custom_attribute("key", "value") + -target_application_param_on_error = webtest.TestApp( - target_wsgi_application_param_on_error) +target_application_param_on_error = webtest.TestApp(target_wsgi_application_param_on_error) _test_html_insertion_param_on_error_settings = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': True, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": True, + "js_agent_loader": "", } + @override_application_settings(_test_html_insertion_param_on_error_settings) @validate_transaction_errors(errors=[_runtime_error_name]) -@validate_custom_parameters(required_params=[('key', 'value')]) +@validate_custom_parameters(required_params=[("key", "value")]) def test_html_insertion_param_on_error(): try: - response = target_application_param_on_error.get('/', status=500) + response = target_application_param_on_error.get("/", status=500) except RuntimeError: pass + @wsgi_application() def target_wsgi_application_disable_autorum_via_api(environ, start_response): - status = '200 OK' + status = "200 OK" - output = b'

RESPONSE

' + output = b"

RESPONSE

" disable_browser_autorum() - response_headers = [('Content-Type', 'text/html; charset=utf-8'), - ('Content-Length', str(len(output)))] + response_headers = [("Content-Type", "text/html; charset=utf-8"), ("Content-Length", str(len(output)))] start_response(status, response_headers) yield output -target_application_disable_autorum_via_api = webtest.TestApp( - target_wsgi_application_disable_autorum_via_api) + +target_application_disable_autorum_via_api = webtest.TestApp(target_wsgi_application_disable_autorum_via_api) _test_html_insertion_disable_autorum_via_api_settings = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': True, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": True, + "js_agent_loader": "", } -@override_application_settings( - _test_html_insertion_disable_autorum_via_api_settings) + +@override_application_settings(_test_html_insertion_disable_autorum_via_api_settings) def test_html_insertion_disable_autorum_via_api(): - response = target_application_disable_autorum_via_api.get('/', status=200) + response = target_application_disable_autorum_via_api.get("/", status=200) - assert 'Content-Type' in response.headers - assert 'Content-Length' in response.headers + assert "Content-Type" in response.headers + assert "Content-Length" in response.headers # The 'NREUM HEADER' value comes from our override for the header. # The 'NREUM.info' value comes from the programmatically generated # footer added by the agent. - response.mustcontain(no=['NREUM HEADER', 'NREUM.info']) + response.mustcontain(no=["NREUM HEADER", "NREUM.info"]) + @wsgi_application() def target_wsgi_application_manual_rum_insertion(environ, start_response): - status = '200 OK' + status = "200 OK" - output = b'

RESPONSE

' + output = b"

RESPONSE

" header = get_browser_timing_header() footer = get_browser_timing_footer() @@ -954,34 +994,33 @@ def target_wsgi_application_manual_rum_insertion(environ, start_response): header = get_browser_timing_header() footer = get_browser_timing_footer() - assert header == '' - assert footer == '' + assert header == "" + assert footer == "" - response_headers = [('Content-Type', 'text/html; charset=utf-8'), - ('Content-Length', str(len(output)))] + response_headers = [("Content-Type", "text/html; charset=utf-8"), ("Content-Length", str(len(output)))] start_response(status, response_headers) yield output -target_application_manual_rum_insertion = webtest.TestApp( - target_wsgi_application_manual_rum_insertion) + +target_application_manual_rum_insertion = webtest.TestApp(target_wsgi_application_manual_rum_insertion) _test_html_insertion_manual_rum_insertion_settings = { - 'browser_monitoring.enabled': True, - 'browser_monitoring.auto_instrument': True, - 'js_agent_loader': u'', + "browser_monitoring.enabled": True, + "browser_monitoring.auto_instrument": True, + "js_agent_loader": "", } -@override_application_settings( - _test_html_insertion_manual_rum_insertion_settings) + +@override_application_settings(_test_html_insertion_manual_rum_insertion_settings) def test_html_insertion_manual_rum_insertion(): - response = target_application_manual_rum_insertion.get('/', status=200) + response = target_application_manual_rum_insertion.get("/", status=200) - assert 'Content-Type' in response.headers - assert 'Content-Length' in response.headers + assert "Content-Type" in response.headers + assert "Content-Length" in response.headers # The 'NREUM HEADER' value comes from our override for the header. # The 'NREUM.info' value comes from the programmatically generated # footer added by the agent. - response.mustcontain(no=['NREUM HEADER', 'NREUM.info']) + response.mustcontain(no=["NREUM HEADER", "NREUM.info"]) diff --git a/tests/agent_features/test_high_security_mode.py b/tests/agent_features/test_high_security_mode.py index 89499d365..51cd19931 100644 --- a/tests/agent_features/test_high_security_mode.py +++ b/tests/agent_features/test_high_security_mode.py @@ -38,7 +38,7 @@ from newrelic.api.settings import STRIP_EXCEPTION_MESSAGE from newrelic.api.time_trace import notice_error from newrelic.api.transaction import ( - add_custom_parameter, + add_custom_attribute, capture_request_params, current_transaction, record_custom_event, @@ -396,7 +396,7 @@ def test_remote_config_hsm_fixups_server_side_disabled(): @validate_custom_parameters(required_params=[("key", "value")]) @background_task() def test_other_transaction_custom_parameters_hsm_disabled(): - add_custom_parameter("key", "value") + add_custom_attribute("key", "value") @override_application_settings(_test_transaction_settings_hsm_disabled) @@ -404,14 +404,14 @@ def test_other_transaction_custom_parameters_hsm_disabled(): @background_task() def test_other_transaction_multiple_custom_parameters_hsm_disabled(): transaction = current_transaction() - transaction.add_custom_parameters([("key-1", "value-1"), ("key-2", "value-2")]) + transaction.add_custom_attributes([("key-1", "value-1"), ("key-2", "value-2")]) @override_application_settings(_test_transaction_settings_hsm_enabled) @validate_custom_parameters(forgone_params=[("key", "value")]) @background_task() def test_other_transaction_custom_parameters_hsm_enabled(): - add_custom_parameter("key", "value") + add_custom_attribute("key", "value") @override_application_settings(_test_transaction_settings_hsm_enabled) @@ -419,7 +419,7 @@ def test_other_transaction_custom_parameters_hsm_enabled(): @background_task() def test_other_transaction_multiple_custom_parameters_hsm_enabled(): transaction = current_transaction() - transaction.add_custom_parameters([("key-1", "value-1"), ("key-2", "value-2")]) + transaction.add_custom_attributes([("key-1", "value-1"), ("key-2", "value-2")]) class TestException(Exception): @@ -434,7 +434,7 @@ class TestException(Exception): @validate_custom_parameters(required_params=[("key-1", "value-1")]) @background_task() def test_other_transaction_error_parameters_hsm_disabled(): - add_custom_parameter("key-1", "value-1") + add_custom_attribute("key-1", "value-1") try: raise TestException("test message") except Exception: @@ -448,7 +448,7 @@ def test_other_transaction_error_parameters_hsm_disabled(): @validate_custom_parameters(forgone_params=[("key-1", "value-1")]) @background_task() def test_other_transaction_error_parameters_hsm_enabled(): - add_custom_parameter("key-1", "value-1") + add_custom_attribute("key-1", "value-1") try: raise TestException("test message") except Exception: diff --git a/tests/agent_features/test_span_events.py b/tests/agent_features/test_span_events.py index 4fda858c0..465613169 100644 --- a/tests/agent_features/test_span_events.py +++ b/tests/agent_features/test_span_events.py @@ -552,9 +552,9 @@ def _test(): def test_span_user_attribute_overrides_transaction_attribute(): transaction = current_transaction() - transaction.add_custom_parameter("foo", "a") + transaction.add_custom_attribute("foo", "a") add_custom_span_attribute("foo", "b") - transaction.add_custom_parameter("foo", "c") + transaction.add_custom_attribute("foo", "c") @override_application_settings({"attributes.include": "*"}) @@ -599,7 +599,7 @@ def _test(): transaction = current_transaction() for i in range(128): - transaction.add_custom_parameter("txn_attr%i" % i, "txnValue") + transaction.add_custom_attribute("txn_attr%i" % i, "txnValue") if i < 64: add_custom_span_attribute("span_attr%i" % i, "spanValue") diff --git a/tests/cross_agent/test_rum_client_config.py b/tests/cross_agent/test_rum_client_config.py index d60cff777..c2a4a465f 100644 --- a/tests/cross_agent/test_rum_client_config.py +++ b/tests/cross_agent/test_rum_client_config.py @@ -14,94 +14,118 @@ import json import os + import pytest import webtest +from testing_support.fixtures import override_application_settings -from newrelic.api.transaction import (set_transaction_name, - add_custom_parameter, get_browser_timing_footer) +from newrelic.api.transaction import ( + add_custom_attribute, + get_browser_timing_footer, + set_transaction_name, +) from newrelic.api.wsgi_application import wsgi_application -from testing_support.fixtures import override_application_settings - def _load_tests(): - fixture = os.path.join(os.curdir, 'fixtures', 'rum_client_config.json') - with open(fixture, 'r') as fh: + fixture = os.path.join(os.curdir, "fixtures", "rum_client_config.json") + with open(fixture, "r") as fh: js = fh.read() return json.loads(js) -fields = ['testname', 'apptime_milliseconds', 'queuetime_milliseconds', - 'browser_monitoring.attributes.enabled', 'transaction_name', - 'license_key', 'connect_reply', 'user_attributes', 'expected'] + +fields = [ + "testname", + "apptime_milliseconds", + "queuetime_milliseconds", + "browser_monitoring.attributes.enabled", + "transaction_name", + "license_key", + "connect_reply", + "user_attributes", + "expected", +] # Replace . as not a valid character in python argument names -field_names = ','.join([f.replace('.', '_') for f in fields]) +field_names = ",".join([f.replace(".", "_") for f in fields]) + def _parametrize_test(test): return tuple([test.get(f, None) for f in fields]) + _rum_tests = [_parametrize_test(t) for t in _load_tests()] + @wsgi_application() def target_wsgi_application(environ, start_response): - status = '200 OK' + status = "200 OK" - txn_name = environ.get('txn_name') - set_transaction_name(txn_name, group='') + txn_name = environ.get("txn_name") + set_transaction_name(txn_name, group="") - user_attrs = json.loads(environ.get('user_attrs')) + user_attrs = json.loads(environ.get("user_attrs")) for key, value in user_attrs.items(): - add_custom_parameter(key, value) + add_custom_attribute(key, value) - text = '%s

RESPONSE

' + text = "%s

RESPONSE

" - output = (text % get_browser_timing_footer()).encode('UTF-8') + output = (text % get_browser_timing_footer()).encode("UTF-8") - response_headers = [('Content-Type', 'text/html; charset=utf-8'), - ('Content-Length', str(len(output)))] + response_headers = [("Content-Type", "text/html; charset=utf-8"), ("Content-Length", str(len(output)))] start_response(status, response_headers) return [output] + target_application = webtest.TestApp(target_wsgi_application) + @pytest.mark.parametrize(field_names, _rum_tests) -def test_browser_montioring(testname, apptime_milliseconds, queuetime_milliseconds, - browser_monitoring_attributes_enabled, transaction_name, - license_key, connect_reply, user_attributes, expected): +def test_browser_montioring( + testname, + apptime_milliseconds, + queuetime_milliseconds, + browser_monitoring_attributes_enabled, + transaction_name, + license_key, + connect_reply, + user_attributes, + expected, +): settings = { - 'browser_monitoring.attributes.enabled': browser_monitoring_attributes_enabled, - 'license_key': license_key, - 'js_agent_loader': u'', - } + "browser_monitoring.attributes.enabled": browser_monitoring_attributes_enabled, + "license_key": license_key, + "js_agent_loader": "", + } settings.update(connect_reply) @override_application_settings(settings) def run_browser_data_test(): - response = target_application.get('/', - extra_environ={'txn_name': str(transaction_name), - 'user_attrs': json.dumps(user_attributes)}) + response = target_application.get( + "/", extra_environ={"txn_name": str(transaction_name), "user_attrs": json.dumps(user_attributes)} + ) # We actually put the "footer" in the header, the first script is the # agent "header", the second one is where the data lives, hence the [1]. - footer = response.html.html.head.find_all('script')[1] - footer_data = json.loads(footer.string.split('NREUM.info=')[1]) + footer = response.html.html.head.find_all("script")[1] + footer_data = json.loads(footer.string.split("NREUM.info=")[1]) # Not feasible to test the time metric values in testing - expected.pop('queueTime') - expected.pop('applicationTime') - assert footer_data['applicationTime'] >= 0 - assert footer_data['queueTime'] >= 0 + expected.pop("queueTime") + expected.pop("applicationTime") + assert footer_data["applicationTime"] >= 0 + assert footer_data["queueTime"] >= 0 # Python always prepends stuff to the transaction name, so this # doesn't match the obscured value. - expected.pop('transactionName') + expected.pop("transactionName") # Check that all other values are correct @@ -112,7 +136,7 @@ def run_browser_data_test(): # don't omit it, so we need to special case 'atts' when we compare # to 'expected'. - if key == 'atts' and value == '': + if key == "atts" and value == "": assert key not in footer_data else: assert footer_data[key] == value diff --git a/tests/testing_support/sample_applications.py b/tests/testing_support/sample_applications.py index 0d5f03b8b..74d5e6dbe 100644 --- a/tests/testing_support/sample_applications.py +++ b/tests/testing_support/sample_applications.py @@ -17,127 +17,132 @@ try: from urllib2 import urlopen # Py2.X except ImportError: - from urllib.request import urlopen # Py3.X + from urllib.request import urlopen # Py3.X import sqlite3 as db from newrelic.api.time_trace import notice_error -from newrelic.api.transaction import (add_custom_parameter, - get_browser_timing_header, get_browser_timing_footer, - record_custom_event) +from newrelic.api.transaction import ( + add_custom_attribute, + get_browser_timing_footer, + get_browser_timing_header, + record_custom_event, +) from newrelic.api.wsgi_application import wsgi_application _logger = logging.getLogger(__name__) _custom_parameters = { - 'user' : 'user-name', - 'account' : 'account-name', - 'product' : 'product-name', - 'bytes' : b'bytes-value', - 'string' : 'string-value', - 'unicode' : u'unicode-value', - 'integer' : 1, - 'float' : 1.0, - 'invalid-utf8' : b'\xe2', - 'multibyte-utf8' : b'\xe2\x88\x9a', - 'multibyte-unicode' : b'\xe2\x88\x9a'.decode('utf-8'), - 'list' : [], - 'tuple' : (), - 'dict' : {}, + "user": "user-name", + "account": "account-name", + "product": "product-name", + "bytes": b"bytes-value", + "string": "string-value", + "unicode": "unicode-value", + "integer": 1, + "float": 1.0, + "invalid-utf8": b"\xe2", + "multibyte-utf8": b"\xe2\x88\x9a", + "multibyte-unicode": b"\xe2\x88\x9a".decode("utf-8"), + "list": [], + "tuple": (), + "dict": {}, } -_err_param = { - 'err-param' : 'value' -} +_err_param = {"err-param": "value"} + def user_attributes_added(): """Expected values when the custom parameters in this file are added as user attributes """ user_attributes = _custom_parameters.copy() - user_attributes['list'] = '[]' - user_attributes['tuple'] = '()' - user_attributes['dict'] = '{}' + user_attributes["list"] = "[]" + user_attributes["tuple"] = "()" + user_attributes["dict"] = "{}" return user_attributes + def error_user_params_added(): return _err_param.copy() + @wsgi_application() def fully_featured_app(environ, start_response): - status = '200 OK' + status = "200 OK" - path = environ.get('PATH_INFO') - use_user_attrs = environ.get('record_attributes', 'TRUE') == 'TRUE' + path = environ.get("PATH_INFO") + use_user_attrs = environ.get("record_attributes", "TRUE") == "TRUE" - environ['wsgi.input'].read() - environ['wsgi.input'].readline() - environ['wsgi.input'].readlines() + environ["wsgi.input"].read() + environ["wsgi.input"].readline() + environ["wsgi.input"].readlines() if use_user_attrs: for attr, val in _custom_parameters.items(): - add_custom_parameter(attr, val) + add_custom_attribute(attr, val) - if 'db' in environ and int(environ['db']) > 0: + if "db" in environ and int(environ["db"]) > 0: connection = db.connect(":memory:") - for i in range(int(environ['db']) - 1): + for i in range(int(environ["db"]) - 1): connection.execute("create table test_db%d (a, b, c)" % i) - if 'external' in environ: - for i in range(int(environ['external'])): - r = urlopen('http://www.python.org') + if "external" in environ: + for i in range(int(environ["external"])): + r = urlopen("http://www.python.org") # nosec r.read(10) - if 'err_message' in environ: - n_errors = int(environ.get('n_errors', 1)) + if "err_message" in environ: + n_errors = int(environ.get("n_errors", 1)) for i in range(n_errors): try: # append number to stats engine to get unique errors, so they # don't immediately get filtered out. - raise ValueError(environ['err_message'] + str(i)) + raise ValueError(environ["err_message"] + str(i)) except ValueError: if use_user_attrs: notice_error(attributes=_err_param) else: notice_error() - text = '%s

RESPONSE

%s' + text = "%s

RESPONSE

%s" - output = (text % (get_browser_timing_header(), - get_browser_timing_footer())).encode('UTF-8') + output = (text % (get_browser_timing_header(), get_browser_timing_footer())).encode("UTF-8") - response_headers = [('Content-type', 'text/html; charset=utf-8'), - ('Content-Length', str(len(output)))] + response_headers = [("Content-type", "text/html; charset=utf-8"), ("Content-Length", str(len(output)))] write = start_response(status, response_headers) - write(b'') + write(b"") return [output] + @wsgi_application() def simple_exceptional_app(environ, start_response): - start_response('500 :(',[]) + start_response("500 :(", []) + + raise ValueError("Transaction had bad value") - raise ValueError('Transaction had bad value') @wsgi_application() def simple_app(environ, start_response): - status = '200 OK' + status = "200 OK" _logger.info("Starting response") start_response(status, response_headers=[]) return [] + @wsgi_application() def simple_custom_event_app(environ, start_response): - params = {'snowman': u'\u2603', 'foo': 'bar'} - record_custom_event('SimpleAppEvent', params) + params = {"snowman": "\u2603", "foo": "bar"} + record_custom_event("SimpleAppEvent", params) - start_response(status='200 OK', response_headers=[]) + start_response(status="200 OK", response_headers=[]) return [] diff --git a/tests/testing_support/sample_asgi_applications.py b/tests/testing_support/sample_asgi_applications.py index 53bf40d33..9118883b0 100644 --- a/tests/testing_support/sample_asgi_applications.py +++ b/tests/testing_support/sample_asgi_applications.py @@ -15,7 +15,7 @@ from newrelic.api.asgi_application import ASGIApplicationWrapper from newrelic.api.time_trace import notice_error from newrelic.api.transaction import ( - add_custom_parameter, + add_custom_attribute, current_transaction, ignore_transaction, ) @@ -101,8 +101,8 @@ async def __call__(self, scope, receive, send): @ASGIApplicationWrapper async def normal_asgi_application(scope, receive, send): output = b"header

RESPONSE

" - add_custom_parameter("puppies", "test_value") - add_custom_parameter("sunshine", "test_value") + add_custom_attribute("puppies", "test_value") + add_custom_attribute("sunshine", "test_value") response_headers = [ (b"content-type", b"text/html; charset=utf-8"), From 922db2fed19d4e228f2583f68d4c3eae2f0b27cf Mon Sep 17 00:00:00 2001 From: Uma Annamalai Date: Thu, 20 Oct 2022 14:33:56 -0700 Subject: [PATCH 005/108] Add usage metrics for Daphne and Hypercorn. (#665) * Add usage metrics for Daphne and Hypercorn. * [Mega-Linter] Apply linters fixes Co-authored-by: umaannamalai --- newrelic/api/asgi_application.py | 10 +++++++--- newrelic/api/transaction.py | 9 +++++++++ newrelic/api/wsgi_application.py | 19 +++++++++++-------- newrelic/hooks/adapter_daphne.py | 4 +++- newrelic/hooks/adapter_hypercorn.py | 9 ++++++--- tests/adapter_daphne/test_daphne.py | 7 ++++++- tests/adapter_hypercorn/test_hypercorn.py | 9 ++++++++- 7 files changed, 50 insertions(+), 17 deletions(-) diff --git a/newrelic/api/asgi_application.py b/newrelic/api/asgi_application.py index 609a8b4b5..6ba049d9e 100644 --- a/newrelic/api/asgi_application.py +++ b/newrelic/api/asgi_application.py @@ -254,7 +254,7 @@ async def send(self, event): return await self._send(event) -def ASGIApplicationWrapper(wrapped, application=None, name=None, group=None, framework=None): +def ASGIApplicationWrapper(wrapped, application=None, name=None, group=None, framework=None, dispatcher=None): def nr_asgi_wrapper(wrapped, instance, args, kwargs): double_callable = asgiref_compatibility.is_double_callable(wrapped) if double_callable: @@ -271,9 +271,7 @@ async def nr_async_asgi(receive, send): # Check to see if any transaction is present, even an inactive # one which has been marked to be ignored or which has been # stopped already. - transaction = current_transaction(active_only=False) - if transaction: # If there is any active transaction we will return without # applying a new ASGI application wrapper context. In the @@ -290,6 +288,9 @@ async def nr_async_asgi(receive, send): if framework: transaction.add_framework_info(name=framework[0], version=framework[1]) + if dispatcher: + transaction.add_dispatcher_info(name=dispatcher[0], version=dispatcher[1]) + # Also override the web transaction name to be the name of # the wrapped callable if not explicitly named, and we want # the default name to be that of the ASGI component for the @@ -323,6 +324,9 @@ async def nr_async_asgi(receive, send): if framework: transaction.add_framework_info(name=framework[0], version=framework[1]) + if dispatcher: + transaction.add_dispatcher_info(name=dispatcher[0], version=dispatcher[1]) + # Override the initial web transaction name to be the supplied # name, or the name of the wrapped callable if wanting to use # the callable as the default. This will override the use of a diff --git a/newrelic/api/transaction.py b/newrelic/api/transaction.py index bad9562ff..1c0c1bd08 100644 --- a/newrelic/api/transaction.py +++ b/newrelic/api/transaction.py @@ -187,6 +187,7 @@ def __init__(self, application, enabled=None, source=None): self._frameworks = set() self._message_brokers = set() + self._dispatchers = set() self._frozen_path = None @@ -550,6 +551,10 @@ def __exit__(self, exc, value, tb): for message_broker, version in self._message_brokers: self.record_custom_metric("Python/MessageBroker/%s/%s" % (message_broker, version), 1) + if self._dispatchers: + for dispatcher, version in self._dispatchers: + self.record_custom_metric("Python/Dispatcher/%s/%s" % (dispatcher, version), 1) + if self._settings.distributed_tracing.enabled: # Sampled and priority need to be computed at the end of the # transaction when distributed tracing or span events are enabled. @@ -1701,6 +1706,10 @@ def add_messagebroker_info(self, name, version=None): if name: self._message_brokers.add((name, version)) + def add_dispatcher_info(self, name, version=None): + if name: + self._dispatchers.add((name, version)) + def dump(self, file): """Dumps details about the transaction to the file object.""" diff --git a/newrelic/api/wsgi_application.py b/newrelic/api/wsgi_application.py index 0f4d30454..502c9d8a0 100644 --- a/newrelic/api/wsgi_application.py +++ b/newrelic/api/wsgi_application.py @@ -18,9 +18,6 @@ import time from newrelic.api.application import application_instance -from newrelic.api.transaction import current_transaction -from newrelic.api.time_trace import notice_error -from newrelic.api.web_transaction import WSGIWebTransaction from newrelic.api.function_trace import FunctionTrace, FunctionTraceWrapper from newrelic.api.html_insertion import insert_html_snippet, verify_body_exists from newrelic.api.time_trace import notice_error @@ -80,12 +77,12 @@ def close(self): self.response_trace = None try: - with FunctionTrace(name='Finalize', group='Python/WSGI'): + with FunctionTrace(name="Finalize", group="Python/WSGI"): if isinstance(self.generator, _WSGIApplicationMiddleware): self.generator.close() - elif hasattr(self.generator, 'close'): + elif hasattr(self.generator, "close"): FunctionTraceWrapper(self.generator.close)() except: # Catch all @@ -437,7 +434,7 @@ def close(self): # Call close() on the iterable as required by the # WSGI specification. - if hasattr(self.iterable, 'close'): + if hasattr(self.iterable, "close"): FunctionTraceWrapper(self.iterable.close)() def __iter__(self): @@ -510,7 +507,7 @@ def __iter__(self): yield data -def WSGIApplicationWrapper(wrapped, application=None, name=None, group=None, framework=None): +def WSGIApplicationWrapper(wrapped, application=None, name=None, group=None, framework=None, dispatcher=None): # Python 2 does not allow rebinding nonlocal variables, so to fix this # framework must be stored in list so it can be edited by closure. @@ -556,6 +553,9 @@ def _nr_wsgi_application_wrapper_(wrapped, instance, args, kwargs): if framework: transaction.add_framework_info(name=framework[0], version=framework[1]) + if dispatcher: + transaction.add_dispatcher_info(name=dispatcher[0], version=dispatcher[1]) + # Also override the web transaction name to be the name of # the wrapped callable if not explicitly named, and we want # the default name to be that of the WSGI component for the @@ -618,6 +618,9 @@ def _args(environ, start_response, *args, **kwargs): if framework: transaction.add_framework_info(name=framework[0], version=framework[1]) + if dispatcher: + transaction.add_dispatcher_info(name=dispatcher[0], version=dispatcher[1]) + # Override the initial web transaction name to be the supplied # name, or the name of the wrapped callable if wanting to use # the callable as the default. This will override the use of a @@ -672,7 +675,7 @@ def write(data): if "wsgi.input" in environ: environ["wsgi.input"] = _WSGIInputWrapper(transaction, environ["wsgi.input"]) - with FunctionTrace(name='Application', group='Python/WSGI'): + with FunctionTrace(name="Application", group="Python/WSGI"): with FunctionTrace(name=callable_name(wrapped), source=wrapped): if settings and settings.browser_monitoring.enabled and not transaction.autorum_disabled: result = _WSGIApplicationMiddleware(wrapped, environ, _start_response, transaction) diff --git a/newrelic/hooks/adapter_daphne.py b/newrelic/hooks/adapter_daphne.py index 430d9c4b3..f18cb779a 100644 --- a/newrelic/hooks/adapter_daphne.py +++ b/newrelic/hooks/adapter_daphne.py @@ -13,6 +13,7 @@ # limitations under the License. from newrelic.api.asgi_application import ASGIApplicationWrapper +from newrelic.common.package_version_utils import get_package_version @property @@ -22,9 +23,10 @@ def application(self): @application.setter def application(self, value): + dispatcher_details = ("Daphne", get_package_version("daphne")) # Wrap app only once if value and not getattr(value, "_nr_wrapped", False): - value = ASGIApplicationWrapper(value) + value = ASGIApplicationWrapper(value, dispatcher=dispatcher_details) value._nr_wrapped = True self._nr_application = value diff --git a/newrelic/hooks/adapter_hypercorn.py b/newrelic/hooks/adapter_hypercorn.py index f22dc74f1..8dec936ef 100644 --- a/newrelic/hooks/adapter_hypercorn.py +++ b/newrelic/hooks/adapter_hypercorn.py @@ -15,6 +15,7 @@ from newrelic.api.asgi_application import ASGIApplicationWrapper from newrelic.api.wsgi_application import WSGIApplicationWrapper from newrelic.common.object_wrapper import wrap_function_wrapper +from newrelic.common.package_version_utils import get_package_version def bind_worker_serve(app, *args, **kwargs): @@ -24,6 +25,7 @@ def bind_worker_serve(app, *args, **kwargs): async def wrap_worker_serve(wrapped, instance, args, kwargs): import hypercorn + dispatcher_details = ("Hypercorn", get_package_version("hypercorn")) wrapper_module = getattr(hypercorn, "app_wrappers", None) asgi_wrapper_class = getattr(wrapper_module, "ASGIWrapper", None) wsgi_wrapper_class = getattr(wrapper_module, "WSGIWrapper", None) @@ -32,13 +34,14 @@ async def wrap_worker_serve(wrapped, instance, args, kwargs): # Hypercorn 0.14.1 introduced wrappers for ASGI and WSGI apps that need to be above our instrumentation. if asgi_wrapper_class is not None and isinstance(app, asgi_wrapper_class): - app.app = ASGIApplicationWrapper(app.app) + app.app = ASGIApplicationWrapper(app.app, dispatcher=dispatcher_details) elif wsgi_wrapper_class is not None and isinstance(app, wsgi_wrapper_class): - app.app = WSGIApplicationWrapper(app.app) + app.app = WSGIApplicationWrapper(app.app, dispatcher=dispatcher_details) else: - app = ASGIApplicationWrapper(app) + app = ASGIApplicationWrapper(app, dispatcher=dispatcher_details) app._nr_wrapped = True + return await wrapped(app, *args, **kwargs) diff --git a/tests/adapter_daphne/test_daphne.py b/tests/adapter_daphne/test_daphne.py index 4953e9a9f..80faac992 100644 --- a/tests/adapter_daphne/test_daphne.py +++ b/tests/adapter_daphne/test_daphne.py @@ -112,7 +112,12 @@ async def fake_app(*args, **kwargs): @override_application_settings({"transaction_name.naming_scheme": "framework"}) def test_daphne_200(port, app): - @validate_transaction_metrics(callable_name(app)) + @validate_transaction_metrics( + callable_name(app), + custom_metrics=[ + ("Python/Dispatcher/Daphne/%s" % daphne.__version__, 1), + ], + ) @raise_background_exceptions() @wait_for_background_threads() def response(): diff --git a/tests/adapter_hypercorn/test_hypercorn.py b/tests/adapter_hypercorn/test_hypercorn.py index 05bf9fdc5..12c3d7d6c 100644 --- a/tests/adapter_hypercorn/test_hypercorn.py +++ b/tests/adapter_hypercorn/test_hypercorn.py @@ -128,7 +128,14 @@ def wait_for_port(port, retries=10): @override_application_settings({"transaction_name.naming_scheme": "framework"}) def test_hypercorn_200(port, app): - @validate_transaction_metrics(callable_name(app)) + hypercorn_version = pkg_resources.get_distribution("hypercorn").version + + @validate_transaction_metrics( + callable_name(app), + custom_metrics=[ + ("Python/Dispatcher/Hypercorn/%s" % hypercorn_version, 1), + ], + ) @raise_background_exceptions() @wait_for_background_threads() def response(): From c96ffc53b822c94999488c911313520d6303b3a0 Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Mon, 24 Oct 2022 10:13:56 -0700 Subject: [PATCH 006/108] Fix Flask view support in Code Level Metrics (#664) * Fix Flask view support in Code Level Metrics Co-authored-by: Lalleh Rafeei Co-authored-by: Hannah Stepanek Co-authored-by: Uma Annamalai * [Mega-Linter] Apply linters fixes * Bump tests * Fix CLM tests for flaskrest * [Mega-Linter] Apply linters fixes * Bump tests Co-authored-by: Lalleh Rafeei Co-authored-by: Hannah Stepanek Co-authored-by: Uma Annamalai Co-authored-by: TimPansino Co-authored-by: Uma Annamalai --- newrelic/hooks/framework_flask.py | 18 +++++ .../component_flask_rest/test_application.py | 79 ++++++++++--------- tests/framework_flask/test_views.py | 8 ++ 3 files changed, 67 insertions(+), 38 deletions(-) diff --git a/newrelic/hooks/framework_flask.py b/newrelic/hooks/framework_flask.py index ea2f4f143..c0540a60d 100644 --- a/newrelic/hooks/framework_flask.py +++ b/newrelic/hooks/framework_flask.py @@ -16,6 +16,8 @@ """ +from inspect import isclass + from newrelic.api.function_trace import ( FunctionTrace, FunctionTraceWrapper, @@ -55,6 +57,22 @@ def _nr_wrapper_handler_(wrapped, instance, args, kwargs): name = getattr(wrapped, "_nr_view_func_name", callable_name(wrapped)) view = getattr(wrapped, "view_class", wrapped) + try: + # Attempt to narrow down class based views to the correct method + from flask import request + from flask.views import MethodView + + if isclass(view): + if issubclass(view, MethodView): + # For method based views, use the corresponding method if available + method = request.method.lower() + view = getattr(view, method, view) + else: + # For class based views, use the dispatch_request function if available + view = getattr(view, "dispatch_request", view) + except ImportError: + pass + # Set priority=2 so this will take precedence over any error # handler which will be at priority=1. diff --git a/tests/component_flask_rest/test_application.py b/tests/component_flask_rest/test_application.py index 94b6fbc5c..67d60bc53 100644 --- a/tests/component_flask_rest/test_application.py +++ b/tests/component_flask_rest/test_application.py @@ -13,20 +13,27 @@ # limitations under the License. import pytest +from testing_support.fixtures import ( + override_generic_settings, + override_ignore_status_codes, + validate_transaction_errors, + validate_transaction_metrics, +) +from testing_support.validators.validate_code_level_metrics import ( + validate_code_level_metrics, +) +from newrelic.common.object_names import callable_name +from newrelic.core.config import global_settings from newrelic.packages import six -from testing_support.fixtures import (validate_transaction_metrics, - validate_transaction_errors, override_ignore_status_codes, - override_generic_settings) -from testing_support.validators.validate_code_level_metrics import validate_code_level_metrics -from newrelic.core.config import global_settings -from newrelic.common.object_names import callable_name +TEST_APPLICATION_PREFIX = "_test_application.create_app." if six.PY3 else "_test_application" @pytest.fixture(params=["flask_restful", "flask_restplus", "flask_restx"]) def application(request): from _test_application import get_test_application + if request.param == "flask_restful": import flask_restful as module elif request.param == "flask_restplus": @@ -44,49 +51,46 @@ def application(request): _test_application_index_scoped_metrics = [ - ('Function/flask.app:Flask.wsgi_app', 1), - ('Python/WSGI/Application', 1), - ('Python/WSGI/Response', 1), - ('Python/WSGI/Finalize', 1), - ('Function/_test_application:index', 1), - ('Function/werkzeug.wsgi:ClosingIterator.close', 1), + ("Function/flask.app:Flask.wsgi_app", 1), + ("Python/WSGI/Application", 1), + ("Python/WSGI/Response", 1), + ("Python/WSGI/Finalize", 1), + ("Function/_test_application:index", 1), + ("Function/werkzeug.wsgi:ClosingIterator.close", 1), ] -@validate_code_level_metrics("_test_application.create_app." if six.PY3 else "_test_application", "IndexResource") +@validate_code_level_metrics(TEST_APPLICATION_PREFIX + ".IndexResource", "get") @validate_transaction_errors(errors=[]) -@validate_transaction_metrics('_test_application:index', - scoped_metrics=_test_application_index_scoped_metrics) +@validate_transaction_metrics("_test_application:index", scoped_metrics=_test_application_index_scoped_metrics) def test_application_index(application): - response = application.get('/index') - response.mustcontain('hello') + response = application.get("/index") + response.mustcontain("hello") _test_application_raises_scoped_metrics = [ - ('Function/flask.app:Flask.wsgi_app', 1), - ('Python/WSGI/Application', 1), - ('Function/_test_application:exception', 1), + ("Function/flask.app:Flask.wsgi_app", 1), + ("Python/WSGI/Application", 1), + ("Function/_test_application:exception", 1), ] @pytest.mark.parametrize( - 'exception,status_code,ignore_status_code,propagate_exceptions', [ - ('werkzeug.exceptions:HTTPException', 404, False, False), - ('werkzeug.exceptions:HTTPException', 404, True, False), - ('werkzeug.exceptions:HTTPException', 503, False, False), - ('_test_application:CustomException', 500, False, False), - ('_test_application:CustomException', 500, False, True), -]) -def test_application_raises(exception, status_code, ignore_status_code, - propagate_exceptions, application): - - @validate_code_level_metrics("_test_application.create_app." if six.PY3 else "_test_application", "ExceptionResource") - @validate_transaction_metrics('_test_application:exception', - scoped_metrics=_test_application_raises_scoped_metrics) + "exception,status_code,ignore_status_code,propagate_exceptions", + [ + ("werkzeug.exceptions:HTTPException", 404, False, False), + ("werkzeug.exceptions:HTTPException", 404, True, False), + ("werkzeug.exceptions:HTTPException", 503, False, False), + ("_test_application:CustomException", 500, False, False), + ("_test_application:CustomException", 500, False, True), + ], +) +def test_application_raises(exception, status_code, ignore_status_code, propagate_exceptions, application): + @validate_code_level_metrics(TEST_APPLICATION_PREFIX + ".ExceptionResource", "get") + @validate_transaction_metrics("_test_application:exception", scoped_metrics=_test_application_raises_scoped_metrics) def _test(): try: - application.get('/exception/%s/%i' % (exception, - status_code), status=status_code, expect_errors=True) + application.get("/exception/%s/%i" % (exception, status_code), status=status_code, expect_errors=True) except Exception as e: assert propagate_exceptions @@ -108,9 +112,8 @@ def test_application_outside_transaction(application): _settings = global_settings() - @override_generic_settings(_settings, {'enabled': False}) + @override_generic_settings(_settings, {"enabled": False}) def _test(): - application.get('/exception/werkzeug.exceptions:HTTPException/404', - status=404) + application.get("/exception/werkzeug.exceptions:HTTPException/404", status=404) _test() diff --git a/tests/framework_flask/test_views.py b/tests/framework_flask/test_views.py index d4dd8178f..698ddd4f4 100644 --- a/tests/framework_flask/test_views.py +++ b/tests/framework_flask/test_views.py @@ -20,6 +20,9 @@ validate_transaction_errors, validate_transaction_metrics, ) +from testing_support.validators.validate_code_level_metrics import ( + validate_code_level_metrics, +) scoped_metrics = [ ("Function/flask.app:Flask.wsgi_app", 1), @@ -50,6 +53,7 @@ def target_application(): return _test_application +@validate_code_level_metrics("_test_views.TestView", "dispatch_request") @validate_transaction_errors(errors=[]) @validate_transaction_metrics("_test_views:test_view", scoped_metrics=scoped_metrics) def test_class_based_view(): @@ -59,6 +63,7 @@ def test_class_based_view(): @skip_if_not_async_handler_support +@validate_code_level_metrics("_test_views_async.TestAsyncView", "dispatch_request") @validate_transaction_errors(errors=[]) @validate_transaction_metrics("_test_views_async:test_async_view", scoped_metrics=scoped_metrics) def test_class_based_async_view(): @@ -67,6 +72,7 @@ def test_class_based_async_view(): response.mustcontain("ASYNC VIEW RESPONSE") +@validate_code_level_metrics("_test_views.TestMethodView", "get") @validate_transaction_errors(errors=[]) @validate_transaction_metrics("_test_views:test_methodview", scoped_metrics=scoped_metrics) def test_get_method_view(): @@ -75,6 +81,7 @@ def test_get_method_view(): response.mustcontain("METHODVIEW GET RESPONSE") +@validate_code_level_metrics("_test_views.TestMethodView", "post") @validate_transaction_errors(errors=[]) @validate_transaction_metrics("_test_views:test_methodview", scoped_metrics=scoped_metrics) def test_post_method_view(): @@ -84,6 +91,7 @@ def test_post_method_view(): @skip_if_not_async_handler_support +@validate_code_level_metrics("_test_views_async.TestAsyncMethodView", "get") @validate_transaction_errors(errors=[]) @validate_transaction_metrics("_test_views_async:test_async_methodview", scoped_metrics=scoped_metrics) def test_get_method_async_view(): From 820e0b7eb805e7a7e77a9e81fd3e77543bf812b5 Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Mon, 24 Oct 2022 10:36:59 -0700 Subject: [PATCH 007/108] Fix aioredis version crash (#661) Co-authored-by: Uma Annamalai --- newrelic/hooks/datastore_aioredis.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/newrelic/hooks/datastore_aioredis.py b/newrelic/hooks/datastore_aioredis.py index a2267960c..428787d30 100644 --- a/newrelic/hooks/datastore_aioredis.py +++ b/newrelic/hooks/datastore_aioredis.py @@ -27,9 +27,9 @@ import aioredis try: - AIOREDIS_VERSION = tuple(int(x) for x in getattr(aioredis, "__version__").split(".")) + AIOREDIS_VERSION = lambda: tuple(int(x) for x in getattr(aioredis, "__version__").split(".")) except Exception: - AIOREDIS_VERSION = (0, 0, 0) + AIOREDIS_VERSION = lambda: (0, 0, 0) def _conn_attrs_to_dict(connection): @@ -68,7 +68,7 @@ def _nr_wrapper_AioRedis_method_(wrapped, instance, args, kwargs): # Check for transaction and return early if found. # Method will return synchronously without executing, # it will be added to the command stack and run later. - if AIOREDIS_VERSION < (2,): + if AIOREDIS_VERSION() < (2,): # AioRedis v1 uses a RedisBuffer instead of a real connection for queueing up pipeline commands from aioredis.commands.transaction import _RedisBuffer if isinstance(instance._pool_or_conn, _RedisBuffer): From 03131c99e441120d70eed94c8e8a25b9afc58a83 Mon Sep 17 00:00:00 2001 From: Uma Annamalai Date: Mon, 24 Oct 2022 11:09:34 -0700 Subject: [PATCH 008/108] Add double wrapped testing for Hypercorn and Daphne and dispatcher argument to WSGI API. (#667) * Add double wrapped app tests. * Fix linting errors. * [Mega-Linter] Apply linters fixes * Add co-authors. Co-authored-by: Tim Pansino Co-authored-by: Lalleh Rafeei Co-authored-by: Hannah Stepanek Co-authored-by: umaannamalai Co-authored-by: Tim Pansino Co-authored-by: Lalleh Rafeei Co-authored-by: Hannah Stepanek --- newrelic/api/asgi_application.py | 9 +- newrelic/api/wsgi_application.py | 15 +- tests/adapter_daphne/test_daphne.py | 7 +- tests/agent_features/test_asgi_transaction.py | 28 +++- tests/agent_features/test_web_transaction.py | 150 +++++++++++------- tests/testing_support/sample_applications.py | 6 +- 6 files changed, 145 insertions(+), 70 deletions(-) diff --git a/newrelic/api/asgi_application.py b/newrelic/api/asgi_application.py index 6ba049d9e..2e4e4979b 100644 --- a/newrelic/api/asgi_application.py +++ b/newrelic/api/asgi_application.py @@ -371,20 +371,23 @@ async def nr_async_asgi(receive, send): return FunctionWrapper(wrapped, nr_asgi_wrapper) -def asgi_application(application=None, name=None, group=None, framework=None): +def asgi_application(application=None, name=None, group=None, framework=None, dispatcher=None): return functools.partial( ASGIApplicationWrapper, application=application, name=name, group=group, framework=framework, + dispatcher=dispatcher, ) -def wrap_asgi_application(module, object_path, application=None, name=None, group=None, framework=None): +def wrap_asgi_application( + module, object_path, application=None, name=None, group=None, framework=None, dispatcher=None +): wrap_object( module, object_path, ASGIApplicationWrapper, - (application, name, group, framework), + (application, name, group, framework, dispatcher), ) diff --git a/newrelic/api/wsgi_application.py b/newrelic/api/wsgi_application.py index 502c9d8a0..67338cbdd 100644 --- a/newrelic/api/wsgi_application.py +++ b/newrelic/api/wsgi_application.py @@ -691,11 +691,18 @@ def write(data): return FunctionWrapper(wrapped, _nr_wsgi_application_wrapper_) -def wsgi_application(application=None, name=None, group=None, framework=None): +def wsgi_application(application=None, name=None, group=None, framework=None, dispatcher=None): return functools.partial( - WSGIApplicationWrapper, application=application, name=name, group=group, framework=framework + WSGIApplicationWrapper, + application=application, + name=name, + group=group, + framework=framework, + dispatcher=dispatcher, ) -def wrap_wsgi_application(module, object_path, application=None, name=None, group=None, framework=None): - wrap_object(module, object_path, WSGIApplicationWrapper, (application, name, group, framework)) +def wrap_wsgi_application( + module, object_path, application=None, name=None, group=None, framework=None, dispatcher=None +): + wrap_object(module, object_path, WSGIApplicationWrapper, (application, name, group, framework, dispatcher)) diff --git a/tests/adapter_daphne/test_daphne.py b/tests/adapter_daphne/test_daphne.py index 80faac992..471e0335b 100644 --- a/tests/adapter_daphne/test_daphne.py +++ b/tests/adapter_daphne/test_daphne.py @@ -29,6 +29,7 @@ AppWithCall, AppWithCallRaw, simple_app_v2_raw, + simple_app_v3, ) from testing_support.util import get_open_port @@ -45,6 +46,10 @@ simple_app_v2_raw, marks=skip_asgi_2_unsupported, ), + pytest.param( + simple_app_v3, + marks=skip_asgi_3_unsupported, + ), pytest.param( AppWithCallRaw(), marks=skip_asgi_3_unsupported, @@ -54,7 +59,7 @@ marks=skip_asgi_3_unsupported, ), ), - ids=("raw", "class_with_call", "class_with_call_double_wrapped"), + ids=("raw", "wrapped", "class_with_call", "class_with_call_double_wrapped"), ) def app(request, server_and_port): app = request.param diff --git a/tests/agent_features/test_asgi_transaction.py b/tests/agent_features/test_asgi_transaction.py index 1820efa86..520e954bc 100644 --- a/tests/agent_features/test_asgi_transaction.py +++ b/tests/agent_features/test_asgi_transaction.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import asyncio import logging import pytest @@ -124,15 +123,34 @@ def test_asgi_application_decorator_no_params_double_callable(): assert response.body == b"" -# Test for presence of framework info based on whether framework is specified -@validate_transaction_metrics(name="test", custom_metrics=[("Python/Framework/framework/v1", 1)]) -def test_framework_metrics(): - asgi_decorator = asgi_application(name="test", framework=("framework", "v1")) +# Test for presence of framework and dispatcher info based on whether framework is specified +@validate_transaction_metrics( + name="test", custom_metrics=[("Python/Framework/framework/v1", 1), ("Python/Dispatcher/dispatcher/v1.0.0", 1)] +) +def test_dispatcher_and_framework_metrics(): + asgi_decorator = asgi_application(name="test", framework=("framework", "v1"), dispatcher=("dispatcher", "v1.0.0")) decorated_application = asgi_decorator(simple_app_v2_raw) application = AsgiTest(decorated_application) application.make_request("GET", "/") +# Test for presence of framework and dispatcher info under existing transaction +@validate_transaction_metrics( + name="test", custom_metrics=[("Python/Framework/framework/v1", 1), ("Python/Dispatcher/dispatcher/v1.0.0", 1)] +) +def test_double_wrapped_dispatcher_and_framework_metrics(): + inner_asgi_decorator = asgi_application( + name="test", framework=("framework", "v1"), dispatcher=("dispatcher", "v1.0.0") + ) + decorated_application = inner_asgi_decorator(simple_app_v2_raw) + + outer_asgi_decorator = asgi_application(name="double_wrapped") + double_decorated_application = outer_asgi_decorator(decorated_application) + + application = AsgiTest(double_decorated_application) + application.make_request("GET", "/") + + @pytest.mark.parametrize("method", ("method", "cls", "static")) @validate_transaction_metrics(name="", group="Uri") def test_app_with_descriptor(method): diff --git a/tests/agent_features/test_web_transaction.py b/tests/agent_features/test_web_transaction.py index 22bfd6eb8..0d8b17548 100644 --- a/tests/agent_features/test_web_transaction.py +++ b/tests/agent_features/test_web_transaction.py @@ -14,67 +14,109 @@ # limitations under the License. import gc -import webtest -import pytest import time + +import pytest +import webtest +from testing_support.fixtures import validate_attributes, validate_transaction_metrics +from testing_support.sample_applications import simple_app, simple_app_raw + +import newrelic.packages.six as six from newrelic.api.application import application_instance from newrelic.api.web_transaction import WebTransaction -from testing_support.fixtures import (validate_transaction_metrics, - validate_attributes) -from testing_support.sample_applications import simple_app -import newrelic.packages.six as six +from newrelic.api.wsgi_application import wsgi_application + application = webtest.TestApp(simple_app) # TODO: WSGI metrics must not be generated for a WebTransaction METRICS = ( - ('Python/WSGI/Input/Bytes', None), - ('Python/WSGI/Input/Time', None), - ('Python/WSGI/Input/Calls/read', None), - ('Python/WSGI/Input/Calls/readline', None), - ('Python/WSGI/Input/Calls/readlines', None), - ('Python/WSGI/Output/Bytes', None), - ('Python/WSGI/Output/Time', None), - ('Python/WSGI/Output/Calls/yield', None), - ('Python/WSGI/Output/Calls/write', None), + ("Python/WSGI/Input/Bytes", None), + ("Python/WSGI/Input/Time", None), + ("Python/WSGI/Input/Calls/read", None), + ("Python/WSGI/Input/Calls/readline", None), + ("Python/WSGI/Input/Calls/readlines", None), + ("Python/WSGI/Output/Bytes", None), + ("Python/WSGI/Output/Time", None), + ("Python/WSGI/Output/Calls/yield", None), + ("Python/WSGI/Output/Calls/write", None), ) -# TODO: Add rollup_metrics=METRICS +# Test for presence of framework and dispatcher info based on whether framework is specified +@validate_transaction_metrics( + name="test", custom_metrics=[("Python/Framework/framework/v1", 1), ("Python/Dispatcher/dispatcher/v1.0.0", 1)] +) +def test_dispatcher_and_framework_metrics(): + inner_wsgi_decorator = wsgi_application( + name="test", framework=("framework", "v1"), dispatcher=("dispatcher", "v1.0.0") + ) + decorated_application = inner_wsgi_decorator(simple_app_raw) + + application = webtest.TestApp(decorated_application) + application.get("/") + + +# Test for presence of framework and dispatcher info under existing transaction @validate_transaction_metrics( - 'test_base_web_transaction', - group='Test') -@validate_attributes('agent', -[ - 'request.headers.accept', 'request.headers.contentLength', - 'request.headers.contentType', 'request.headers.host', - 'request.headers.referer', 'request.headers.userAgent', 'request.method', - 'request.uri', 'response.status', 'response.headers.contentLength', - 'response.headers.contentType', 'request.parameters.foo', - 'request.parameters.boo', 'webfrontend.queue.seconds', -]) -@pytest.mark.parametrize('use_bytes', (True, False)) + name="test", custom_metrics=[("Python/Framework/framework/v1", 1), ("Python/Dispatcher/dispatcher/v1.0.0", 1)] +) +def test_double_wrapped_dispatcher_and_framework_metrics(): + inner_wsgi_decorator = wsgi_application( + name="test", framework=("framework", "v1"), dispatcher=("dispatcher", "v1.0.0") + ) + decorated_application = inner_wsgi_decorator(simple_app_raw) + + outer_wsgi_decorator = wsgi_application(name="double_wrapped") + double_decorated_application = outer_wsgi_decorator(decorated_application) + + application = webtest.TestApp(double_decorated_application) + application.get("/") + + +# TODO: Add rollup_metrics=METRICS +@validate_transaction_metrics("test_base_web_transaction", group="Test") +@validate_attributes( + "agent", + [ + "request.headers.accept", + "request.headers.contentLength", + "request.headers.contentType", + "request.headers.host", + "request.headers.referer", + "request.headers.userAgent", + "request.method", + "request.uri", + "response.status", + "response.headers.contentLength", + "response.headers.contentType", + "request.parameters.foo", + "request.parameters.boo", + "webfrontend.queue.seconds", + ], +) +@pytest.mark.parametrize("use_bytes", (True, False)) def test_base_web_transaction(use_bytes): application = application_instance() request_headers = { - 'Accept': 'text/plain', - 'Content-Length': '0', - 'Content-Type': 'text/plain', - 'Host': 'localhost', - 'Referer': 'http://example.com?q=1&boat=⛵', - 'User-Agent': 'potato', - 'X-Request-Start': str(time.time() - 0.2), - 'newRelic': 'invalid', + "Accept": "text/plain", + "Content-Length": "0", + "Content-Type": "text/plain", + "Host": "localhost", + "Referer": "http://example.com?q=1&boat=⛵", + "User-Agent": "potato", + "X-Request-Start": str(time.time() - 0.2), + "newRelic": "invalid", } if use_bytes: byte_headers = {} for name, value in request_headers.items(): - name = name.encode('utf-8') + name = name.encode("utf-8") try: - value = value.encode('utf-8') + value = value.encode("utf-8") except UnicodeDecodeError: assert six.PY2 byte_headers[name] = value @@ -82,24 +124,22 @@ def test_base_web_transaction(use_bytes): request_headers = byte_headers transaction = WebTransaction( - application, - 'test_base_web_transaction', - group='Test', - scheme='http', - host='localhost', - port=8000, - request_method='HEAD', - request_path='/foobar', - query_string='foo=bar&boo=baz', - headers=request_headers.items(), + application, + "test_base_web_transaction", + group="Test", + scheme="http", + host="localhost", + port=8000, + request_method="HEAD", + request_path="/foobar", + query_string="foo=bar&boo=baz", + headers=request_headers.items(), ) if use_bytes: - response_headers = ((b'Content-Length', b'0'), - (b'Content-Type', b'text/plain')) + response_headers = ((b"Content-Length", b"0"), (b"Content-Type", b"text/plain")) else: - response_headers = (('Content-Length', '0'), - ('Content-Type', 'text/plain')) + response_headers = (("Content-Length", "0"), ("Content-Type", "text/plain")) with transaction: transaction.process_response(200, response_headers) @@ -117,8 +157,8 @@ def validate_no_garbage(): @validate_transaction_metrics( - name='', - group='Uri', + name="", + group="Uri", ) def test_wsgi_app_memory(validate_no_garbage): - application.get('/') + application.get("/") diff --git a/tests/testing_support/sample_applications.py b/tests/testing_support/sample_applications.py index 74d5e6dbe..7973a4e11 100644 --- a/tests/testing_support/sample_applications.py +++ b/tests/testing_support/sample_applications.py @@ -128,8 +128,7 @@ def simple_exceptional_app(environ, start_response): raise ValueError("Transaction had bad value") -@wsgi_application() -def simple_app(environ, start_response): +def simple_app_raw(environ, start_response): status = "200 OK" _logger.info("Starting response") @@ -138,6 +137,9 @@ def simple_app(environ, start_response): return [] +simple_app = wsgi_application()(simple_app_raw) + + @wsgi_application() def simple_custom_event_app(environ, start_response): From 1e6c93762332601b58d15847f1fc91ef6289394c Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Mon, 24 Oct 2022 11:27:19 -0700 Subject: [PATCH 009/108] Add Python 3.11 Support (#654) * Add py311 tests * Fix typo * Added 3.11 support for aiohttp framework Co-authored-by: Timothy Pansino * Set up environment to run Python 3.11 Co-authored-by: Timothy Pansino * Add Python 3.11 support for agent_features Co-authored-by: Timothy Pansino * Partial Python 3.11 support added for Tornado Co-authored-by: Timothy Pansino * Adjust postgres versions * Fix tornado install path locally * Remove aioredis py311 tests * Update 3.11 to dev in tests * Fix sanic instrumentation and imp/importlib deprecation Co-authored-by: Timothy Pansino * Simplify wheel build options * Update cibuildwheel for 3.11 * Remove falconmaster py311 test Co-authored-by: Lalleh Rafeei Co-authored-by: Timothy Pansino --- .devcontainer/dotfiles | 1 + .../actions/setup-python-matrix/action.yml | 5 + .github/workflows/deploy-python.yml | 4 +- newrelic/hooks/framework_aiohttp.py | 27 +- newrelic/hooks/framework_sanic.py | 2 +- setup.py | 1 + .../_test_async_coroutine_trace.py | 2 + tests/agent_features/test_async_timing.py | 12 +- tests/agent_features/test_coroutine_trace.py | 29 +-- .../test_coroutine_transaction.py | 73 +++--- .../test_event_loop_wait_time.py | 17 +- tests/agent_unittests/conftest.py | 2 +- .../test_utilization_settings.py | 246 +++++++++--------- tests/cross_agent/test_collector_hostname.py | 68 ++--- tests/cross_agent/test_utilization_configs.py | 123 ++++----- .../framework_aiohttp/_target_application.py | 130 +++++---- tests/framework_aiohttp/conftest.py | 3 +- tests/framework_aiohttp/test_client.py | 26 +- tests/framework_aiohttp/test_client_cat.py | 25 +- tests/framework_aiohttp/test_externals.py | 32 ++- tests/framework_aiohttp/test_middleware.py | 64 +++-- tests/framework_aiohttp/test_server.py | 229 ++++++++-------- tests/framework_aiohttp/test_server_cat.py | 178 +++++++------ tests/framework_aiohttp/test_ws.py | 23 +- .../framework_tornado/_target_application.py | 88 +++---- tests/framework_tornado/test_server.py | 226 ++++++++-------- tox.ini | 143 +++++----- 27 files changed, 867 insertions(+), 912 deletions(-) create mode 160000 .devcontainer/dotfiles diff --git a/.devcontainer/dotfiles b/.devcontainer/dotfiles new file mode 160000 index 000000000..4d575e4d6 --- /dev/null +++ b/.devcontainer/dotfiles @@ -0,0 +1 @@ +Subproject commit 4d575e4d60a9f195f9f315dde7f380a5ae26e27d diff --git a/.github/actions/setup-python-matrix/action.yml b/.github/actions/setup-python-matrix/action.yml index 344cf686c..c507278b4 100644 --- a/.github/actions/setup-python-matrix/action.yml +++ b/.github/actions/setup-python-matrix/action.yml @@ -33,6 +33,11 @@ runs: python-version: "3.10" architecture: x64 + - uses: actions/setup-python@v3 + with: + python-version: "3.11-dev" + architecture: x64 + - uses: actions/setup-python@v3 with: python-version: "2.7" diff --git a/.github/workflows/deploy-python.yml b/.github/workflows/deploy-python.yml index e8fbd4f7f..fe16ee485 100644 --- a/.github/workflows/deploy-python.yml +++ b/.github/workflows/deploy-python.yml @@ -54,10 +54,10 @@ jobs: CIBW_ENVIRONMENT: "LD_LIBRARY_PATH=/opt/rh/=vtoolset-8/root/usr/lib64:/opt/rh/devtoolset-8/root/usr/lib:/opt/rh/devtoolset-8/root/usr/lib64/dyninst:/opt/rh/devtoolset-8/root/usr/lib/dyninst:/usr/local/lib64:/usr/local/lib" - name: Build Manylinux Wheels (Python 3) - uses: pypa/cibuildwheel@v2.1.3 + uses: pypa/cibuildwheel@v2.11.1 env: CIBW_PLATFORM: linux - CIBW_BUILD: cp37-manylinux_aarch64 cp38-manylinux_aarch64 cp39-manylinux_aarch64 cp310-manylinux_aarch64 cp37-manylinux_x86_64 cp38-manylinux_x86_64 cp39-manylinux_x86_64 cp310-manylinux_x86_64 + CIBW_BUILD: cp37-manylinux* cp38-manylinux* cp39-manylinux* cp310-manylinux* cp311-manylinux* CIBW_ARCHS: x86_64 aarch64 CIBW_ENVIRONMENT: "LD_LIBRARY_PATH=/opt/rh/devtoolset-8/root/usr/lib64:/opt/rh/devtoolset-8/root/usr/lib:/opt/rh/devtoolset-8/root/usr/lib64/dyninst:/opt/rh/devtoolset-8/root/usr/lib/dyninst:/usr/local/lib64:/usr/local/lib" diff --git a/newrelic/hooks/framework_aiohttp.py b/newrelic/hooks/framework_aiohttp.py index 346753167..de72ae0c5 100644 --- a/newrelic/hooks/framework_aiohttp.py +++ b/newrelic/hooks/framework_aiohttp.py @@ -11,7 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import asyncio + import inspect import itertools @@ -163,9 +163,8 @@ def _bind_params(request): @function_wrapper def _nr_aiohttp_wrap_middleware_(wrapped, instance, args, kwargs): - @asyncio.coroutine - def _inner(): - result = yield from wrapped(*args, **kwargs) + async def _inner(): + result = await wrapped(*args, **kwargs) return function_trace()(result) return _inner() @@ -221,10 +220,9 @@ def _nr_aiohttp_add_cat_headers_(wrapped, instance, args, kwargs): if is_coroutine_callable(wrapped): - @asyncio.coroutine - def new_coro(): + async def new_coro(): try: - result = yield from wrapped(*args, **kwargs) + result = await wrapped(*args, **kwargs) return result finally: instance.headers = tmp @@ -267,10 +265,9 @@ def _nr_aiohttp_request_wrapper_(wrapped, instance, args, kwargs): method, url = _bind_request(*args, **kwargs) trace = ExternalTrace("aiohttp", str(url), method) - @asyncio.coroutine - def _coro(): + async def _coro(): try: - response = yield from wrapped(*args, **kwargs) + response = await wrapped(*args, **kwargs) try: trace.process_response_headers(response.headers.items()) @@ -332,14 +329,10 @@ def _nr_request_wrapper(wrapped, instance, args, kwargs): coro = wrapped(*args, **kwargs) - if hasattr(coro, "__await__"): - coro = coro.__await__() - - @asyncio.coroutine - def _coro(*_args, **_kwargs): + async def _coro(*_args, **_kwargs): transaction = current_transaction() if transaction is None: - response = yield from coro + response = await coro return response # Patch in should_ignore to all notice_error calls @@ -352,7 +345,7 @@ def _coro(*_args, **_kwargs): import aiohttp.web as _web try: - response = yield from coro + response = await coro except _web.HTTPException as e: _nr_process_response(e, transaction) raise diff --git a/newrelic/hooks/framework_sanic.py b/newrelic/hooks/framework_sanic.py index 745cdbf70..94b5179c2 100644 --- a/newrelic/hooks/framework_sanic.py +++ b/newrelic/hooks/framework_sanic.py @@ -311,4 +311,4 @@ def instrument_sanic_response(module): def instrument_sanic_touchup_service(module): if hasattr(module, "TouchUp") and hasattr(module.TouchUp, "run"): - wrap_function_wrapper(module.TouchUp, "run", _nr_wrap_touchup_run) + wrap_function_wrapper(module, "TouchUp.run", _nr_wrap_touchup_run) diff --git a/setup.py b/setup.py index ff1560936..217ba458c 100644 --- a/setup.py +++ b/setup.py @@ -121,6 +121,7 @@ def build_extension(self, ext): "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: System :: Monitoring", diff --git a/tests/agent_features/_test_async_coroutine_trace.py b/tests/agent_features/_test_async_coroutine_trace.py index 96cb3c7dc..11f018535 100644 --- a/tests/agent_features/_test_async_coroutine_trace.py +++ b/tests/agent_features/_test_async_coroutine_trace.py @@ -14,6 +14,7 @@ import asyncio import functools +import sys import time import pytest @@ -68,6 +69,7 @@ def _test(): assert full_metrics[metric_key].total_call_time >= 0.1 +@pytest.mark.skipif(sys.version_info >= (3, 11), reason="Asyncio decorator was removed in Python 3.11+.") @pytest.mark.parametrize( "trace,metric", [ diff --git a/tests/agent_features/test_async_timing.py b/tests/agent_features/test_async_timing.py index 0198f151a..f8cad864d 100644 --- a/tests/agent_features/test_async_timing.py +++ b/tests/agent_features/test_async_timing.py @@ -44,17 +44,15 @@ def _validate_total_time_value(wrapped, instance, args, kwargs): @function_trace(name="child") -@asyncio.coroutine -def child(): - yield from asyncio.sleep(0.1) +async def child(): + await asyncio.sleep(0.1) @background_task(name="parent") -@asyncio.coroutine -def parent(calls): +async def parent(calls): coros = [child() for _ in range(calls)] - yield from asyncio.gather(*coros) - yield from asyncio.sleep(0.1) + await asyncio.gather(*coros) + await asyncio.sleep(0.1) @validate_total_time_value_greater_than(0.2) diff --git a/tests/agent_features/test_coroutine_trace.py b/tests/agent_features/test_coroutine_trace.py index b15537573..7aba53f62 100644 --- a/tests/agent_features/test_coroutine_trace.py +++ b/tests/agent_features/test_coroutine_trace.py @@ -105,33 +105,30 @@ def test_coroutine_siblings(event_loop): # the case if child was a child of child since child is terminal) @function_trace("child", terminal=True) - @asyncio.coroutine - def child(wait, event=None): + async def child(wait, event=None): if event: event.set() - yield from wait.wait() + await wait.wait() - @asyncio.coroutine - def middle(): + async def middle(): wait = asyncio.Event() started = asyncio.Event() child_0 = asyncio.ensure_future(child(wait, started)) # Wait for the first child to start - yield from started.wait() + await started.wait() child_1 = asyncio.ensure_future(child(wait)) # Allow children to complete wait.set() - yield from child_1 - yield from child_0 + await child_1 + await child_0 @function_trace("parent") - @asyncio.coroutine - def parent(): - yield from asyncio.ensure_future(middle()) + async def parent(): + await asyncio.ensure_future(middle()) event_loop.run_until_complete(parent()) @@ -465,15 +462,13 @@ def test_trace_outlives_transaction(event_loop): running, finish = asyncio.Event(), asyncio.Event() @function_trace(name="coro") - @asyncio.coroutine - def _coro(): + async def _coro(): running.set() - yield from finish.wait() + await finish.wait() - @asyncio.coroutine - def parent(): + async def parent(): task.append(asyncio.ensure_future(_coro())) - yield from running.wait() + await running.wait() @validate_transaction_metrics( "test_trace_outlives_transaction", diff --git a/tests/agent_features/test_coroutine_transaction.py b/tests/agent_features/test_coroutine_transaction.py index 1a55385c7..dd7f281e2 100644 --- a/tests/agent_features/test_coroutine_transaction.py +++ b/tests/agent_features/test_coroutine_transaction.py @@ -39,8 +39,7 @@ def coroutine_test(event_loop, transaction, nr_enabled=True, does_hang=False, call_exit=False, runtime_error=False): @transaction - @asyncio.coroutine - def task(): + async def task(): txn = current_transaction() if not nr_enabled: @@ -55,15 +54,15 @@ def task(): try: if does_hang: - yield from loop.create_future() + await loop.create_future() # noqa else: - yield from asyncio.sleep(0.0) + await asyncio.sleep(0.0) if nr_enabled and txn.enabled: # Validate loop time is recorded after suspend assert txn._loop_time > 0.0 except GeneratorExit: if runtime_error: - yield from asyncio.sleep(0.0) + await asyncio.sleep(0.0) return task @@ -160,11 +159,10 @@ def test_async_coroutine_throw_cancel(event_loop, num_coroutines, create_test_ta tasks = [create_test_task(event_loop, transaction) for _ in range(num_coroutines)] - @asyncio.coroutine - def task_c(): + async def task_c(): futures = [asyncio.ensure_future(t()) for t in tasks] - yield from asyncio.sleep(0.0) + await asyncio.sleep(0.0) [f.cancel() for f in futures] @@ -195,8 +193,7 @@ def test_async_coroutine_throw_error(event_loop, num_coroutines, create_test_tas tasks = [create_test_task(event_loop, transaction) for _ in range(num_coroutines)] - @asyncio.coroutine - def task_c(): + async def task_c(): coros = [t() for t in tasks] for coro in coros: @@ -232,14 +229,13 @@ def test_async_coroutine_close(event_loop, num_coroutines, create_test_task, tra tasks = [create_test_task(event_loop, transaction) for _ in range(num_coroutines)] - @asyncio.coroutine - def task_c(): + async def task_c(): coros = [t() for t in tasks] if start_coroutines: [asyncio.ensure_future(coro) for coro in coros] - yield from asyncio.sleep(0.0) + await asyncio.sleep(0.0) [coro.close() for coro in coros] @@ -273,13 +269,12 @@ def test_async_coroutine_close_raises_error(event_loop, num_coroutines, create_t tasks = [create_test_task(event_loop, transaction, runtime_error=True) for _ in range(num_coroutines)] - @asyncio.coroutine - def task_c(): + async def task_c(): coros = [t() for t in tasks] [c.send(None) for c in coros] - yield from asyncio.sleep(0.0) + await asyncio.sleep(0.0) for coro in coros: with pytest.raises(RuntimeError): @@ -313,24 +308,21 @@ def test_deferred_async_background_task(event_loop, transaction, metric, argumen args, kwargs = arguments("deferred") @transaction(*args, **kwargs) - @asyncio.coroutine - def child_task(): - yield from asyncio.sleep(0) + async def child_task(): + await asyncio.sleep(0) main_metric = (metric % "main", "") args, kwargs = arguments("main") @transaction(*args, **kwargs) - @asyncio.coroutine - def parent_task(): - yield from asyncio.sleep(0) + async def parent_task(): + await asyncio.sleep(0) return event_loop.create_task(child_task()) - @asyncio.coroutine - def test_runner(): - child = yield from parent_task() - yield from child + async def test_runner(): + child = await parent_task() + await child metrics = [] @@ -362,18 +354,16 @@ def test_child_transaction_when_parent_is_running(event_loop, transaction, metri args, kwargs = arguments("deferred") @transaction(*args, **kwargs) - @asyncio.coroutine - def child_task(): - yield from asyncio.sleep(0) + async def child_task(): + await asyncio.sleep(0) main_metric = (metric % "main", "") args, kwargs = arguments("main") @transaction(*args, **kwargs) - @asyncio.coroutine - def parent_task(): - yield from event_loop.create_task(child_task()) + async def parent_task(): + await event_loop.create_task(child_task()) metrics = [] @@ -405,9 +395,8 @@ def test_nested_coroutine_inside_sync(event_loop, transaction, metric, arguments args, kwargs = arguments("child") @transaction(*args, **kwargs) - @asyncio.coroutine - def child_task(): - yield from asyncio.sleep(0) + async def child_task(): + await asyncio.sleep(0) main_metric = (metric % "main", "") args, kwargs = arguments("main") @@ -443,22 +432,20 @@ def test_nested_coroutine_task_already_active(event_loop, transaction, metric, a args, kwargs = arguments("deferred") @transaction(*args, **kwargs) - @asyncio.coroutine - def child_task(): - yield from asyncio.sleep(0) + async def child_task(): + await asyncio.sleep(0) @function_trace() - def child_trace(): - yield from child_task() + async def child_trace(): + await child_task() main_metric = (metric % "main", "") args, kwargs = arguments("main") @transaction(*args, **kwargs) - @asyncio.coroutine - def parent_task(): - yield from event_loop.create_task(child_trace()) + async def parent_task(): + await event_loop.create_task(child_trace()) metrics = [] diff --git a/tests/agent_features/test_event_loop_wait_time.py b/tests/agent_features/test_event_loop_wait_time.py index ccf57c9a4..c92a611cc 100644 --- a/tests/agent_features/test_event_loop_wait_time.py +++ b/tests/agent_features/test_event_loop_wait_time.py @@ -30,36 +30,33 @@ @background_task(name="block") -@asyncio.coroutine -def block_loop(ready, done, blocking_transaction_active, times=1): +async def block_loop(ready, done, blocking_transaction_active, times=1): for _ in range(times): - yield from ready.wait() + await ready.wait() ready.clear() time.sleep(0.1) done.set() if blocking_transaction_active: - yield from ready.wait() + await ready.wait() @function_trace(name="waiter") -@asyncio.coroutine -def waiter(ready, done, times=1): +async def waiter(ready, done, times=1): for _ in range(times): ready.set() - yield from done.wait() + await done.wait() done.clear() @background_task(name="wait") -@asyncio.coroutine -def wait_for_loop(ready, done, times=1): +async def wait_for_loop(ready, done, times=1): transaction = current_transaction() transaction._sampled = True # Run the waiter on another task so that the sentinel for wait appears # multiple times in the trace cache - yield from asyncio.ensure_future(waiter(ready, done, times)) + await asyncio.ensure_future(waiter(ready, done, times)) # Set the ready to terminate the block_loop if it's running ready.set() diff --git a/tests/agent_unittests/conftest.py b/tests/agent_unittests/conftest.py index 012e6ca4b..93f3228ad 100644 --- a/tests/agent_unittests/conftest.py +++ b/tests/agent_unittests/conftest.py @@ -44,7 +44,7 @@ reload except NameError: # python 3.x - from imp import reload # pylint: disable=W0402 + from importlib import reload class FakeProtos(object): diff --git a/tests/agent_unittests/test_utilization_settings.py b/tests/agent_unittests/test_utilization_settings.py index 0cc9b4bc7..8af4bcbf1 100644 --- a/tests/agent_unittests/test_utilization_settings.py +++ b/tests/agent_unittests/test_utilization_settings.py @@ -13,29 +13,33 @@ # limitations under the License. import os -import pytest import tempfile -from newrelic.common.object_wrapper import function_wrapper -from newrelic.core.agent_protocol import AgentProtocol -from newrelic.config import initialize +import pytest # these will be reloaded for each test import newrelic.config import newrelic.core.config +from newrelic.common.object_wrapper import function_wrapper +from newrelic.config import initialize +from newrelic.core.agent_protocol import AgentProtocol # the specific methods imported here will not be changed when the modules are # reloaded -from newrelic.core.config import (_remove_ignored_configs, - finalize_application_settings, _environ_as_int, _environ_as_float, - global_settings) +from newrelic.core.config import ( + _environ_as_float, + _environ_as_int, + _remove_ignored_configs, + finalize_application_settings, + global_settings, +) try: # python 2.x reload except NameError: # python 3.x - from imp import reload + from importlib import reload INI_FILE_WITHOUT_UTIL_CONF = b""" [newrelic] @@ -56,14 +60,16 @@ """ ENV_WITHOUT_UTIL_CONF = {} -ENV_WITH_UTIL_CONF = {'NEW_RELIC_UTILIZATION_BILLING_HOSTNAME': 'env-hostname'} +ENV_WITH_UTIL_CONF = {"NEW_RELIC_UTILIZATION_BILLING_HOSTNAME": "env-hostname"} ENV_WITH_BAD_UTIL_CONF = { - 'NEW_RELIC_UTILIZATION_LOGICAL_PROCESSORS': 'notanum', - 'NEW_RELIC_UTILIZATION_BILLING_HOSTNAME': 'env-hostname', - 'NEW_RELIC_UTILIZATION_TOTAL_RAM_MIB': '98765', + "NEW_RELIC_UTILIZATION_LOGICAL_PROCESSORS": "notanum", + "NEW_RELIC_UTILIZATION_BILLING_HOSTNAME": "env-hostname", + "NEW_RELIC_UTILIZATION_TOTAL_RAM_MIB": "98765", +} +ENV_WITH_HEROKU = { + "NEW_RELIC_HEROKU_USE_DYNO_NAMES": "false", + "NEW_RELIC_HEROKU_DYNO_NAME_PREFIXES_TO_SHORTEN": "meow wruff", } -ENV_WITH_HEROKU = {'NEW_RELIC_HEROKU_USE_DYNO_NAMES': 'false', - 'NEW_RELIC_HEROKU_DYNO_NAME_PREFIXES_TO_SHORTEN': 'meow wruff'} INITIAL_ENV = os.environ @@ -108,6 +114,7 @@ def reset(wrapped, instance, args, kwargs): returned = wrapped(*args, **kwargs) return returned + return reset @@ -115,39 +122,35 @@ def reset(wrapped, instance, args, kwargs): def test_heroku_default(): settings = global_settings() assert settings.heroku.use_dyno_names is True - assert settings.heroku.dyno_name_prefixes_to_shorten in \ - (['scheduler', 'run'], ['run', 'scheduler']) + assert settings.heroku.dyno_name_prefixes_to_shorten in (["scheduler", "run"], ["run", "scheduler"]) @reset_agent_config(INI_FILE_WITHOUT_UTIL_CONF, ENV_WITH_HEROKU) def test_heroku_override(): settings = global_settings() assert settings.heroku.use_dyno_names is False - assert settings.heroku.dyno_name_prefixes_to_shorten in \ - (['meow', 'wruff'], ['wruff', 'meow']) + assert settings.heroku.dyno_name_prefixes_to_shorten in (["meow", "wruff"], ["wruff", "meow"]) @reset_agent_config(INI_FILE_WITHOUT_UTIL_CONF, ENV_WITH_UTIL_CONF) def test_billing_hostname_from_env_vars(): settings = global_settings() - assert settings.utilization.billing_hostname == 'env-hostname' + assert settings.utilization.billing_hostname == "env-hostname" - local_config, = AgentProtocol._connect_payload( - '', [], [], settings) - util_conf = local_config['utilization'].get('config') - assert util_conf == {'hostname': 'env-hostname'} + (local_config,) = AgentProtocol._connect_payload("", [], [], settings) + util_conf = local_config["utilization"].get("config") + assert util_conf == {"hostname": "env-hostname"} @reset_agent_config(INI_FILE_WITH_UTIL_CONF, ENV_WITH_UTIL_CONF) def test_billing_hostname_precedence(): # ini-file takes precedence over env vars settings = global_settings() - assert settings.utilization.billing_hostname == 'file-hostname' + assert settings.utilization.billing_hostname == "file-hostname" - local_config, = AgentProtocol._connect_payload( - '', [], [], settings) - util_conf = local_config['utilization'].get('config') - assert util_conf == {'hostname': 'file-hostname'} + (local_config,) = AgentProtocol._connect_payload("", [], [], settings) + util_conf = local_config["utilization"].get("config") + assert util_conf == {"hostname": "file-hostname"} @reset_agent_config(INI_FILE_WITHOUT_UTIL_CONF, ENV_WITHOUT_UTIL_CONF) @@ -157,21 +160,19 @@ def test_billing_hostname_with_blank_ini_file_no_env(): # if no utilization config settings are set, the 'config' section is not in # the payload at all - local_config, = AgentProtocol._connect_payload( - '', [], [], settings) - util_conf = local_config['utilization'].get('config') + (local_config,) = AgentProtocol._connect_payload("", [], [], settings) + util_conf = local_config["utilization"].get("config") assert util_conf is None @reset_agent_config(INI_FILE_WITH_UTIL_CONF, ENV_WITHOUT_UTIL_CONF) def test_billing_hostname_with_set_in_ini_not_in_env(): settings = global_settings() - assert settings.utilization.billing_hostname == 'file-hostname' + assert settings.utilization.billing_hostname == "file-hostname" - local_config, = AgentProtocol._connect_payload( - '', [], [], settings) - util_conf = local_config['utilization'].get('config') - assert util_conf == {'hostname': 'file-hostname'} + (local_config,) = AgentProtocol._connect_payload("", [], [], settings) + util_conf = local_config["utilization"].get("config") + assert util_conf == {"hostname": "file-hostname"} @reset_agent_config(INI_FILE_WITH_BAD_UTIL_CONF, ENV_WITHOUT_UTIL_CONF) @@ -179,10 +180,9 @@ def test_bad_value_in_ini_file(): settings = global_settings() assert settings.utilization.logical_processors == 0 - local_config, = AgentProtocol._connect_payload( - '', [], [], settings) - util_conf = local_config['utilization'].get('config') - assert util_conf == {'hostname': 'file-hostname', 'total_ram_mib': 12345} + (local_config,) = AgentProtocol._connect_payload("", [], [], settings) + util_conf = local_config["utilization"].get("config") + assert util_conf == {"hostname": "file-hostname", "total_ram_mib": 12345} @reset_agent_config(INI_FILE_WITHOUT_UTIL_CONF, ENV_WITH_BAD_UTIL_CONF) @@ -190,160 +190,154 @@ def test_bad_value_in_env_var(): settings = global_settings() assert settings.utilization.logical_processors == 0 - local_config, = AgentProtocol._connect_payload( - '', [], [], settings) - util_conf = local_config['utilization'].get('config') - assert util_conf == {'hostname': 'env-hostname', 'total_ram_mib': 98765} + (local_config,) = AgentProtocol._connect_payload("", [], [], settings) + util_conf = local_config["utilization"].get("config") + assert util_conf == {"hostname": "env-hostname", "total_ram_mib": 98765} # Tests for combining with server side settings _server_side_config_settings_util_conf = [ { - 'foo': 123, - 'bar': 456, - 'agent_config': { - 'utilization.billing_hostname': 'server-side-hostname' - }, + "foo": 123, + "bar": 456, + "agent_config": {"utilization.billing_hostname": "server-side-hostname"}, }, { - 'foo': 123, - 'bar': 456, - 'agent_config': { - 'baz': 789, + "foo": 123, + "bar": 456, + "agent_config": { + "baz": 789, }, }, { - 'foo': 123, - 'bar': 456, + "foo": 123, + "bar": 456, }, ] -@pytest.mark.parametrize('server_settings', - _server_side_config_settings_util_conf) +@pytest.mark.parametrize("server_settings", _server_side_config_settings_util_conf) def test_remove_ignored_configs(server_settings): fixed_settings = _remove_ignored_configs(server_settings) - agent_config = fixed_settings.get('agent_config', {}) - assert 'utilization.billing_hostname' not in agent_config + agent_config = fixed_settings.get("agent_config", {}) + assert "utilization.billing_hostname" not in agent_config @reset_agent_config(INI_FILE_WITH_UTIL_CONF, ENV_WITHOUT_UTIL_CONF) -@pytest.mark.parametrize('server_settings', - _server_side_config_settings_util_conf) +@pytest.mark.parametrize("server_settings", _server_side_config_settings_util_conf) def test_finalize_application_settings(server_settings): settings = global_settings() - finalize_application_settings(server_side_config=server_settings, - settings=settings) + finalize_application_settings(server_side_config=server_settings, settings=settings) # hostname set in ini_file and not in env vars - assert settings.utilization.billing_hostname == 'file-hostname' + assert settings.utilization.billing_hostname == "file-hostname" # Tests for _environ_as_int _tests_environ_as_int = [ { - 'name': 'test no env var set, no default requested', - 'envvar_set': False, - 'envvar_val': None, # None set - 'default': None, # None requested - 'expected_value': 0, + "name": "test no env var set, no default requested", + "envvar_set": False, + "envvar_val": None, # None set + "default": None, # None requested + "expected_value": 0, }, { - 'name': 'test no env var set, default requested', - 'envvar_set': False, - 'envvar_val': None, # None set - 'default': 123, - 'expected_value': 123, + "name": "test no env var set, default requested", + "envvar_set": False, + "envvar_val": None, # None set + "default": 123, + "expected_value": 123, }, { - 'name': 'test env var is not an int, no default requested', - 'envvar_set': True, - 'envvar_val': 'testing', - 'default': None, # None requested - 'expected_value': 0, + "name": "test env var is not an int, no default requested", + "envvar_set": True, + "envvar_val": "testing", + "default": None, # None requested + "expected_value": 0, }, { - 'name': 'test env var is not an int, default requested', - 'envvar_set': True, - 'envvar_val': 'testing-more', - 'default': 1234, - 'expected_value': 1234, + "name": "test env var is not an int, default requested", + "envvar_set": True, + "envvar_val": "testing-more", + "default": 1234, + "expected_value": 1234, }, { - 'name': 'test env var is an int', - 'envvar_set': True, - 'envvar_val': 7239, - 'default': None, # None requested - 'expected_value': 7239, + "name": "test env var is an int", + "envvar_set": True, + "envvar_val": 7239, + "default": None, # None requested + "expected_value": 7239, }, ] _tests_environ_as_float = [ { - 'name': 'test no env var set, no default requested', - 'envvar_set': False, - 'envvar_val': None, # None set - 'default': None, # None requested - 'expected_value': 0.0, + "name": "test no env var set, no default requested", + "envvar_set": False, + "envvar_val": None, # None set + "default": None, # None requested + "expected_value": 0.0, }, { - 'name': 'test no env var set, default requested', - 'envvar_set': False, - 'envvar_val': None, # None set - 'default': 123.0, - 'expected_value': 123.0, + "name": "test no env var set, default requested", + "envvar_set": False, + "envvar_val": None, # None set + "default": 123.0, + "expected_value": 123.0, }, { - 'name': 'test env var is not a float, no default requested', - 'envvar_set': True, - 'envvar_val': 'testing', - 'default': None, # None requested - 'expected_value': 0.0, + "name": "test env var is not a float, no default requested", + "envvar_set": True, + "envvar_val": "testing", + "default": None, # None requested + "expected_value": 0.0, }, { - 'name': 'test env var is not a number, default requested', - 'envvar_set': True, - 'envvar_val': 'testing-more', - 'default': 1234.0, - 'expected_value': 1234.0, + "name": "test env var is not a number, default requested", + "envvar_set": True, + "envvar_val": "testing-more", + "default": 1234.0, + "expected_value": 1234.0, }, { - 'name': 'test env var is an int, not float', - 'envvar_set': True, - 'envvar_val': '7239', - 'default': None, # None requested - 'expected_value': 7239.0, + "name": "test env var is an int, not float", + "envvar_set": True, + "envvar_val": "7239", + "default": None, # None requested + "expected_value": 7239.0, }, { - 'name': 'test env var is a float', - 'envvar_set': True, - 'envvar_val': '7239.23234', - 'default': None, # None requested - 'expected_value': 7239.23234, + "name": "test env var is a float", + "envvar_set": True, + "envvar_val": "7239.23234", + "default": None, # None requested + "expected_value": 7239.23234, }, ] def _test_environ(env_type, test): - env = {'TESTING': test['envvar_val']} if test['envvar_set'] else {} - default = test['default'] + env = {"TESTING": test["envvar_val"]} if test["envvar_set"] else {} + default = test["default"] with Environ(env): if default: - val = env_type('TESTING', default=default) + val = env_type("TESTING", default=default) else: - val = env_type('TESTING') - assert val == test['expected_value'] + val = env_type("TESTING") + assert val == test["expected_value"] -@pytest.mark.parametrize('test', _tests_environ_as_int) +@pytest.mark.parametrize("test", _tests_environ_as_int) def test__environ_as_int(test): _test_environ(_environ_as_int, test) -@pytest.mark.parametrize('test', _tests_environ_as_float) +@pytest.mark.parametrize("test", _tests_environ_as_float) def test__environ_as_float(test): _test_environ(_environ_as_float, test) diff --git a/tests/cross_agent/test_collector_hostname.py b/tests/cross_agent/test_collector_hostname.py index d9c65e34b..2ce39a1ec 100644 --- a/tests/cross_agent/test_collector_hostname.py +++ b/tests/cross_agent/test_collector_hostname.py @@ -15,29 +15,28 @@ import json import multiprocessing import os -import pytest import sys import tempfile +import pytest + try: # python 2.x reload except NameError: # python 3.x - from imp import reload + from importlib import reload CURRENT_DIR = os.path.dirname(os.path.realpath(__file__)) -FIXTURE = os.path.normpath(os.path.join(CURRENT_DIR, 'fixtures', - 'collector_hostname.json')) +FIXTURE = os.path.normpath(os.path.join(CURRENT_DIR, "fixtures", "collector_hostname.json")) -_parameters_list = ['config_file_key', 'config_override_host', - 'env_key', 'env_override_host', 'hostname'] -_parameters = ','.join(_parameters_list) +_parameters_list = ["config_file_key", "config_override_host", "env_key", "env_override_host", "hostname"] +_parameters = ",".join(_parameters_list) def _load_tests(): - with open(FIXTURE, 'r') as fh: + with open(FIXTURE, "r") as fh: js = fh.read() return json.loads(js) @@ -48,37 +47,39 @@ def _parametrize_test(test): _tests_json = _load_tests() _collector_hostname_tests = [_parametrize_test(t) for t in _tests_json] -_collector_hostname_ids = [t.get('name', None) for t in _tests_json] +_collector_hostname_ids = [t.get("name", None) for t in _tests_json] -def _test_collector_hostname(config_file_key=None, config_override_host=None, - env_key=None, env_override_host=None, hostname=None, queue=None): +def _test_collector_hostname( + config_file_key=None, config_override_host=None, env_key=None, env_override_host=None, hostname=None, queue=None +): try: - ini_contents = '[newrelic]' + ini_contents = "[newrelic]" - if 'NEW_RELIC_HOST' in os.environ: - del os.environ['NEW_RELIC_HOST'] - if 'NEW_RELIC_LICENSE_KEY' in os.environ: - del os.environ['NEW_RELIC_LICENSE_KEY'] + if "NEW_RELIC_HOST" in os.environ: + del os.environ["NEW_RELIC_HOST"] + if "NEW_RELIC_LICENSE_KEY" in os.environ: + del os.environ["NEW_RELIC_LICENSE_KEY"] if env_override_host: - os.environ['NEW_RELIC_HOST'] = env_override_host + os.environ["NEW_RELIC_HOST"] = env_override_host if env_key: - os.environ['NEW_RELIC_LICENSE_KEY'] = env_key + os.environ["NEW_RELIC_LICENSE_KEY"] = env_key if config_file_key: - ini_contents += '\nlicense_key = %s' % config_file_key + ini_contents += "\nlicense_key = %s" % config_file_key if config_override_host: - ini_contents += '\nhost = %s' % config_override_host + ini_contents += "\nhost = %s" % config_override_host import newrelic.config as config import newrelic.core.config as core_config + reload(core_config) reload(config) ini_file = tempfile.NamedTemporaryFile() - ini_file.write(ini_contents.encode('utf-8')) + ini_file.write(ini_contents.encode("utf-8")) ini_file.seek(0) config.initialize(ini_file.name) @@ -91,13 +92,11 @@ def _test_collector_hostname(config_file_key=None, config_override_host=None, raise if queue: - queue.put('PASS') + queue.put("PASS") -@pytest.mark.parametrize(_parameters, _collector_hostname_tests, - ids=_collector_hostname_ids) -def test_collector_hostname(config_file_key, config_override_host, env_key, - env_override_host, hostname): +@pytest.mark.parametrize(_parameters, _collector_hostname_tests, ids=_collector_hostname_ids) +def test_collector_hostname(config_file_key, config_override_host, env_key, env_override_host, hostname): # We run the actual test in a subprocess because we are editing the # settings we need to connect to the data collector. With the wrong @@ -105,11 +104,18 @@ def test_collector_hostname(config_file_key, config_override_host, env_key, # run after this one. queue = multiprocessing.Queue() - process = multiprocessing.Process(target=_test_collector_hostname, - kwargs={'config_file_key': config_file_key, 'config_override_host': - config_override_host, 'env_key': env_key, 'env_override_host': - env_override_host, 'hostname': hostname, 'queue': queue}) + process = multiprocessing.Process( + target=_test_collector_hostname, + kwargs={ + "config_file_key": config_file_key, + "config_override_host": config_override_host, + "env_key": env_key, + "env_override_host": env_override_host, + "hostname": hostname, + "queue": queue, + }, + ) process.start() result = queue.get(timeout=2) - assert result == 'PASS' + assert result == "PASS" diff --git a/tests/cross_agent/test_utilization_configs.py b/tests/cross_agent/test_utilization_configs.py index 810631ee6..4a4adb485 100644 --- a/tests/cross_agent/test_utilization_configs.py +++ b/tests/cross_agent/test_utilization_configs.py @@ -14,36 +14,36 @@ import json import os -import pytest import sys import tempfile +import pytest # NOTE: the test_utilization_settings_from_env_vars test mocks several of the # methods in newrelic.core.data_collector and does not put them back! from testing_support.mock_http_client import create_client_cls -from newrelic.core.agent_protocol import AgentProtocol -from newrelic.common.system_info import BootIdUtilization -from newrelic.common.utilization import (CommonUtilization) -from newrelic.common.object_wrapper import (function_wrapper) + import newrelic.core.config +from newrelic.common.object_wrapper import function_wrapper +from newrelic.common.system_info import BootIdUtilization +from newrelic.common.utilization import CommonUtilization +from newrelic.core.agent_protocol import AgentProtocol try: # python 2.x reload except NameError: # python 3.x - from imp import reload + from importlib import reload INITIAL_ENV = os.environ CURRENT_DIR = os.path.dirname(os.path.realpath(__file__)) -FIXTURE = os.path.normpath(os.path.join( - CURRENT_DIR, 'fixtures', 'utilization', 'utilization_json.json')) +FIXTURE = os.path.normpath(os.path.join(CURRENT_DIR, "fixtures", "utilization", "utilization_json.json")) def _load_tests(): - with open(FIXTURE, 'r') as fh: + with open(FIXTURE, "r") as fh: js = fh.read() return json.loads(js) @@ -51,24 +51,28 @@ def _load_tests(): def _mock_logical_processor_count(cnt): def logical_processor_count(): return cnt + return logical_processor_count def _mock_total_physical_memory(mem): def total_physical_memory(): return mem + return total_physical_memory def _mock_gethostname(name): def gethostname(*args, **kwargs): return name + return gethostname def _mock_getips(ip_addresses): def getips(*args, **kwargs): return ip_addresses + return getips @@ -100,26 +104,32 @@ def __exit__(self, *args, **kwargs): def _get_response_body_for_test(test): - if test.get('input_aws_id'): - return json.dumps({ - 'instanceId': test.get('input_aws_id'), - 'instanceType': test.get('input_aws_type'), - 'availabilityZone': test.get('input_aws_zone'), - }).encode('utf8') - if test.get('input_azure_id'): - return json.dumps({ - 'location': test.get('input_azure_location'), - 'name': test.get('input_azure_name'), - 'vmId': test.get('input_azure_id'), - 'vmSize': test.get('input_azure_size'), - }).encode('utf8') - if test.get('input_gcp_id'): - return json.dumps({ - 'id': test.get('input_gcp_id'), - 'machineType': test.get('input_gcp_type'), - 'name': test.get('input_gcp_name'), - 'zone': test.get('input_gcp_zone'), - }).encode('utf8') + if test.get("input_aws_id"): + return json.dumps( + { + "instanceId": test.get("input_aws_id"), + "instanceType": test.get("input_aws_type"), + "availabilityZone": test.get("input_aws_zone"), + } + ).encode("utf8") + if test.get("input_azure_id"): + return json.dumps( + { + "location": test.get("input_azure_location"), + "name": test.get("input_azure_name"), + "vmId": test.get("input_azure_id"), + "vmSize": test.get("input_azure_size"), + } + ).encode("utf8") + if test.get("input_gcp_id"): + return json.dumps( + { + "id": test.get("input_gcp_id"), + "machineType": test.get("input_gcp_type"), + "name": test.get("input_gcp_name"), + "zone": test.get("input_gcp_zone"), + } + ).encode("utf8") def patch_boot_id_file(test): @@ -128,16 +138,16 @@ def _patch_boot_id_file(wrapped, instance, args, kwargs): boot_id_file = None initial_sys_platform = sys.platform - if test.get('input_boot_id'): + if test.get("input_boot_id"): boot_id_file = tempfile.NamedTemporaryFile() - boot_id_file.write(test.get('input_boot_id')) + boot_id_file.write(test.get("input_boot_id")) boot_id_file.seek(0) BootIdUtilization.METADATA_URL = boot_id_file.name - sys.platform = 'linux-mock-testing' # ensure boot_id is gathered + sys.platform = "linux-mock-testing" # ensure boot_id is gathered else: # do not gather boot_id at all, this will ensure there is nothing # extra in the gathered utilizations data - sys.platform = 'not-linux' + sys.platform = "not-linux" try: return wrapped(*args, **kwargs) @@ -153,38 +163,36 @@ def patch_system_info(test, monkeypatch): def _patch_system_info(wrapped, instance, args, kwargs): sys_info = newrelic.common.system_info - monkeypatch.setattr(sys_info, "logical_processor_count", - _mock_logical_processor_count( - test.get('input_logical_processors'))) - monkeypatch.setattr(sys_info, "total_physical_memory", - _mock_total_physical_memory( - test.get('input_total_ram_mib'))) - monkeypatch.setattr(sys_info, "gethostname", - _mock_gethostname( - test.get('input_hostname'))) - monkeypatch.setattr(sys_info, "getips", - _mock_getips( - test.get('input_ip_address'))) + monkeypatch.setattr( + sys_info, "logical_processor_count", _mock_logical_processor_count(test.get("input_logical_processors")) + ) + monkeypatch.setattr( + sys_info, "total_physical_memory", _mock_total_physical_memory(test.get("input_total_ram_mib")) + ) + monkeypatch.setattr(sys_info, "gethostname", _mock_gethostname(test.get("input_hostname"))) + monkeypatch.setattr(sys_info, "getips", _mock_getips(test.get("input_ip_address"))) return wrapped(*args, **kwargs) return _patch_system_info -@pytest.mark.parametrize('test', _load_tests()) +@pytest.mark.parametrize("test", _load_tests()) def test_utilization_settings(test, monkeypatch): - env = test.get('input_environment_variables', {}) + env = test.get("input_environment_variables", {}) - if test.get('input_pcf_guid'): - env.update({ - 'CF_INSTANCE_GUID': test.get('input_pcf_guid'), - 'CF_INSTANCE_IP': test.get('input_pcf_ip'), - 'MEMORY_LIMIT': test.get('input_pcf_mem_limit'), - }) + if test.get("input_pcf_guid"): + env.update( + { + "CF_INSTANCE_GUID": test.get("input_pcf_guid"), + "CF_INSTANCE_IP": test.get("input_pcf_ip"), + "MEMORY_LIMIT": test.get("input_pcf_mem_limit"), + } + ) for key, val in env.items(): - monkeypatch.setenv(key, val) + monkeypatch.setenv(key, str(val)) @patch_boot_id_file(test) @patch_system_info(test, monkeypatch) @@ -199,10 +207,9 @@ def _test_utilization_data(): # gathered utilization data monkeypatch.setattr(settings.utilization, "detect_docker", False) - local_config, = AgentProtocol._connect_payload( - '', [], [], settings) - util_output = local_config['utilization'] - expected_output = test['expected_output_json'] + (local_config,) = AgentProtocol._connect_payload("", [], [], settings) + util_output = local_config["utilization"] + expected_output = test["expected_output_json"] # The agent does not record full_hostname and it's not required expected_output.pop("full_hostname") diff --git a/tests/framework_aiohttp/_target_application.py b/tests/framework_aiohttp/_target_application.py index 895260798..77d6fef6c 100644 --- a/tests/framework_aiohttp/_target_application.py +++ b/tests/framework_aiohttp/_target_application.py @@ -14,53 +14,47 @@ import asyncio import sys -from aiohttp import web, WSMsgType, ClientSession + +from aiohttp import ClientSession, WSMsgType, web + from newrelic.api.function_trace import function_trace -@asyncio.coroutine -def index(request): - yield - resp = web.Response(text='Hello Aiohttp!') - resp.set_cookie('ExampleCookie', 'ExampleValue') +async def index(request): + await asyncio.sleep(0) + resp = web.Response(text="Hello Aiohttp!") + resp.set_cookie("ExampleCookie", "ExampleValue") return resp -@asyncio.coroutine -def hang(request): +async def hang(request): while True: - yield + await asyncio.sleep(0) -@asyncio.coroutine -def error(request): +async def error(request): raise ValueError("Value Error") -@asyncio.coroutine -def non_500_error(request): +async def non_500_error(request): raise web.HTTPGone() -@asyncio.coroutine -def raise_404(request): +async def raise_404(request): raise web.HTTPNotFound() -@asyncio.coroutine @function_trace() -def wait(): - yield from asyncio.sleep(0.1) +async def wait(): + await asyncio.sleep(0.1) -@asyncio.coroutine -def run_task(loop): - yield from wait() +async def run_task(loop): + await wait() loop.stop() -@asyncio.coroutine -def background(request): +async def background(request): try: loop = request.loop except AttributeError: @@ -68,16 +62,14 @@ def background(request): asyncio.set_event_loop(loop) asyncio.tasks.ensure_future(run_task(loop)) - return web.Response(text='Background Task Scheduled') + return web.Response(text="Background Task Scheduled") class HelloWorldView(web.View): - - @asyncio.coroutine - def _respond(self): - yield - resp = web.Response(text='Hello Aiohttp!') - resp.set_cookie('ExampleCookie', 'ExampleValue') + async def _respond(self): + await asyncio.sleep(0) + resp = web.Response(text="Hello Aiohttp!") + resp.set_cookie("ExampleCookie", "ExampleValue") return resp get = _respond @@ -92,15 +84,13 @@ class KnownException(Exception): class KnownErrorView(web.View): - - @asyncio.coroutine - def _respond(self): + async def _respond(self): try: - yield + await asyncio.sleep(0) except KnownException: pass finally: - return web.Response(text='Hello Aiohttp!') + return web.Response(text="Hello Aiohttp!") get = _respond post = _respond @@ -109,83 +99,79 @@ def _respond(self): delete = _respond -@asyncio.coroutine -def websocket_handler(request): +async def websocket_handler(request): ws = web.WebSocketResponse() - yield from ws.prepare(request) + await ws.prepare(request) # receive messages for all eternity! # (or until the client closes the socket) while not ws.closed: - msg = yield from ws.receive() + msg = await ws.receive() if msg.type == WSMsgType.TEXT: - result = ws.send_str('/' + msg.data) - if hasattr(result, '__await__'): - yield from result.__await__() + result = ws.send_str("/" + msg.data) + if hasattr(result, "__await__"): + await result return ws -@asyncio.coroutine -def fetch(method, url, loop): +async def fetch(method, url, loop): session = ClientSession(loop=loop) - if hasattr(session, '__aenter__'): - yield from session.__aenter__() + if hasattr(session, "__aenter__"): + await session.__aenter__() else: session.__enter__() try: _method = getattr(session, method) try: - response = yield from asyncio.wait_for(_method(url), timeout=None, loop=loop) + response = await asyncio.wait_for(_method(url), timeout=None, loop=loop) except TypeError: - response = yield from asyncio.wait_for(_method(url), timeout=None) - text = yield from response.text() + response = await asyncio.wait_for(_method(url), timeout=None) + text = await response.text() finally: - if hasattr(session, '__aexit__'): - yield from session.__aexit__(*sys.exc_info()) + if hasattr(session, "__aexit__"): + await session.__aexit__(*sys.exc_info()) else: session.__exit__(*sys.exc_info()) return text -@asyncio.coroutine -def fetch_multiple(method, loop, url): +async def fetch_multiple(method, loop, url): coros = [fetch(method, url, loop) for _ in range(2)] try: - responses = yield from asyncio.gather(*coros, loop=loop) + responses = await asyncio.gather(*coros, loop=loop) except TypeError: - responses = yield from asyncio.gather(*coros) - return '\n'.join(responses) + responses = await asyncio.gather(*coros) + return "\n".join(responses) -@asyncio.coroutine -def multi_fetch_handler(request): +async def multi_fetch_handler(request): try: loop = request.loop except AttributeError: loop = request.task._loop - responses = yield from fetch_multiple('get', loop, request.query['url']) - return web.Response(text=responses, content_type='text/html') + responses = await fetch_multiple("get", loop, request.query["url"]) + return web.Response(text=responses, content_type="text/html") def make_app(middlewares=None): app = web.Application(middlewares=middlewares) - app.router.add_route('*', '/coro', index) - app.router.add_route('*', '/class', HelloWorldView) - app.router.add_route('*', '/error', error) - app.router.add_route('*', '/known_error', KnownErrorView) - app.router.add_route('*', '/non_500_error', non_500_error) - app.router.add_route('*', '/raise_404', raise_404) - app.router.add_route('*', '/hang', hang) - app.router.add_route('*', '/background', background) - app.router.add_route('*', '/ws', websocket_handler) - app.router.add_route('*', '/multi_fetch', multi_fetch_handler) + app.router.add_route("*", "/coro", index) + app.router.add_route("*", "/class", HelloWorldView) + app.router.add_route("*", "/error", error) + app.router.add_route("*", "/known_error", KnownErrorView) + app.router.add_route("*", "/non_500_error", non_500_error) + app.router.add_route("*", "/raise_404", raise_404) + app.router.add_route("*", "/hang", hang) + app.router.add_route("*", "/background", background) + app.router.add_route("*", "/ws", websocket_handler) + app.router.add_route("*", "/multi_fetch", multi_fetch_handler) for route in app.router.routes(): handler = route.handler @@ -199,5 +185,5 @@ def make_app(middlewares=None): return app -if __name__ == '__main__': - web.run_app(make_app(), host='127.0.0.1') +if __name__ == "__main__": + web.run_app(make_app(), host="127.0.0.1") diff --git a/tests/framework_aiohttp/conftest.py b/tests/framework_aiohttp/conftest.py index b4a31d7e2..bb7e7716e 100644 --- a/tests/framework_aiohttp/conftest.py +++ b/tests/framework_aiohttp/conftest.py @@ -76,8 +76,7 @@ def tearDown(self): if hasattr(self, "asyncTearDown"): asyncio.get_event_loop().run_until_complete(self.asyncTearDown()) - @asyncio.coroutine - def _get_client(self, app_or_server): + async def _get_client(self, app_or_server): """Return a TestClient instance.""" client_constructor_arg = app_or_server diff --git a/tests/framework_aiohttp/test_client.py b/tests/framework_aiohttp/test_client.py index b2d23dd23..69649109d 100644 --- a/tests/framework_aiohttp/test_client.py +++ b/tests/framework_aiohttp/test_client.py @@ -28,18 +28,16 @@ ) -@asyncio.coroutine -def fetch(method, url): +async def fetch(method, url): with aiohttp.ClientSession() as session: _method = getattr(session, method) - response = yield from asyncio.wait_for(_method(url), timeout=None) + response = await asyncio.wait_for(_method(url), timeout=None) response.raise_for_status() - yield from response.text() + await response.text() @background_task(name="fetch_multiple") -@asyncio.coroutine -def fetch_multiple(method, url): +async def fetch_multiple(method, url): coros = [fetch(method, url) for _ in range(2)] return asyncio.gather(*coros, return_exceptions=True) @@ -126,8 +124,7 @@ class ThrowerException(ValueError): pass @background_task(name="test_client_throw_yield_from") - @asyncio.coroutine - def self_driving_thrower(): + async def self_driving_thrower(): with aiohttp.ClientSession() as session: coro = session._request(method.upper(), local_server_info.url) @@ -159,8 +156,7 @@ def task_test(): @pytest.mark.parametrize("method,exc_expected", test_matrix) def test_client_close_yield_from(event_loop, local_server_info, method, exc_expected): @background_task(name="test_client_close_yield_from") - @asyncio.coroutine - def self_driving_closer(): + async def self_driving_closer(): with aiohttp.ClientSession() as session: coro = session._request(method.upper(), local_server_info.url) @@ -219,17 +215,15 @@ def test_create_task_yield_from(event_loop, local_server_info, method, exc_expec # `loop.create_task` returns a Task object which uses the coroutine's # `send` method, not `__next__` - @asyncio.coroutine - def fetch_task(loop): + async def fetch_task(loop): with aiohttp.ClientSession() as session: coro = getattr(session, method) - resp = yield from loop.create_task(coro(local_server_info.url)) + resp = await loop.create_task(coro(local_server_info.url)) resp.raise_for_status() - yield from resp.text() + await resp.text() @background_task(name="test_create_task_yield_from") - @asyncio.coroutine - def fetch_multiple(loop): + async def fetch_multiple(loop): coros = [fetch_task(loop) for _ in range(2)] return asyncio.gather(*coros, return_exceptions=True) diff --git a/tests/framework_aiohttp/test_client_cat.py b/tests/framework_aiohttp/test_client_cat.py index a830c2269..c1177db0a 100644 --- a/tests/framework_aiohttp/test_client_cat.py +++ b/tests/framework_aiohttp/test_client_cat.py @@ -13,7 +13,6 @@ # limitations under the License. import asyncio -import os import aiohttp import pytest @@ -41,8 +40,7 @@ _expected_error_class = aiohttp.client_exceptions.ClientResponseError -@asyncio.coroutine -def fetch(url, headers=None, raise_for_status=False, connector=None): +async def fetch(url, headers=None, raise_for_status=False, connector=None): kwargs = {} if version_info >= (2, 0): @@ -53,13 +51,13 @@ def fetch(url, headers=None, raise_for_status=False, connector=None): headers = {} try: - response = yield from request + response = await request if raise_for_status and version_info < (2, 0): response.raise_for_status() except _expected_error_class: return headers - response_text = yield from response.text() + response_text = await response.text() for header in response_text.split("\n"): if not header: continue @@ -69,7 +67,7 @@ def fetch(url, headers=None, raise_for_status=False, connector=None): continue headers[h.strip()] = v.strip() f = session.close() - yield from asyncio.ensure_future(f) + await asyncio.ensure_future(f) return headers @@ -78,9 +76,8 @@ def fetch(url, headers=None, raise_for_status=False, connector=None): @pytest.mark.parametrize("span_events", (True, False)) def test_outbound_cross_process_headers(event_loop, cat_enabled, distributed_tracing, span_events, mock_header_server): @background_task(name="test_outbound_cross_process_headers") - @asyncio.coroutine - def _test(): - headers = yield from fetch("http://127.0.0.1:%d" % mock_header_server.port) + async def _test(): + headers = await fetch("http://127.0.0.1:%d" % mock_header_server.port) transaction = current_transaction() transaction._test_request_headers = headers @@ -144,15 +141,14 @@ def test_outbound_cross_process_headers_no_txn(event_loop, mock_header_server): def test_outbound_cross_process_headers_exception(event_loop, mock_header_server): @background_task(name="test_outbound_cross_process_headers_exception") - @asyncio.coroutine - def test(): + async def test(): # corrupt the transaction object to force an error transaction = current_transaction() guid = transaction.guid delattr(transaction, "guid") try: - headers = yield from fetch("http://127.0.0.1:%d" % mock_header_server.port) + headers = await fetch("http://127.0.0.1:%d" % mock_header_server.port) assert not headers.get(ExternalTrace.cat_id_key) assert not headers.get(ExternalTrace.cat_transaction_key) @@ -163,10 +159,9 @@ def test(): class PoorResolvingConnector(aiohttp.TCPConnector): - @asyncio.coroutine - def _resolve_host(self, host, port, *args, **kwargs): + async def _resolve_host(self, host, port, *args, **kwargs): res = [{"hostname": host, "host": host, "port": 1234, "family": self._family, "proto": 0, "flags": 0}] - hosts = yield from super(PoorResolvingConnector, self)._resolve_host(host, port, *args, **kwargs) + hosts = await super(PoorResolvingConnector, self)._resolve_host(host, port, *args, **kwargs) for hinfo in hosts: res.append(hinfo) return res diff --git a/tests/framework_aiohttp/test_externals.py b/tests/framework_aiohttp/test_externals.py index a410590ef..7cb849bb8 100644 --- a/tests/framework_aiohttp/test_externals.py +++ b/tests/framework_aiohttp/test_externals.py @@ -12,29 +12,27 @@ # See the License for the specific language governing permissions and # limitations under the License. -import pytest -import asyncio - -from testing_support.fixtures import (validate_transaction_metrics, - validate_tt_parenting) +from testing_support.fixtures import validate_transaction_metrics, validate_tt_parenting expected_parenting = ( - 'TransactionNode', [ - ('FunctionNode', [ - ('ExternalTrace', []), - ('ExternalTrace', []), - ]), -]) + "TransactionNode", + [ + ( + "FunctionNode", + [ + ("ExternalTrace", []), + ("ExternalTrace", []), + ], + ), + ], +) @validate_tt_parenting(expected_parenting) -@validate_transaction_metrics('_target_application:multi_fetch_handler', - rollup_metrics=[('External/all', 2)]) +@validate_transaction_metrics("_target_application:multi_fetch_handler", rollup_metrics=[("External/all", 2)]) def test_multiple_requests_within_transaction(local_server_info, aiohttp_app): - @asyncio.coroutine - def fetch(): - resp = yield from aiohttp_app.client.request('GET', '/multi_fetch', - params={'url': local_server_info.url}) + async def fetch(): + resp = await aiohttp_app.client.request("GET", "/multi_fetch", params={"url": local_server_info.url}) assert resp.status == 200 aiohttp_app.loop.run_until_complete(fetch()) diff --git a/tests/framework_aiohttp/test_middleware.py b/tests/framework_aiohttp/test_middleware.py index 47050232f..b383b4d5e 100644 --- a/tests/framework_aiohttp/test_middleware.py +++ b/tests/framework_aiohttp/test_middleware.py @@ -12,57 +12,53 @@ # See the License for the specific language governing permissions and # limitations under the License. -import pytest -import asyncio import aiohttp +import pytest +from testing_support.fixtures import ( + override_generic_settings, + validate_transaction_metrics, +) +from testing_support.validators.validate_code_level_metrics import ( + validate_code_level_metrics, +) from newrelic.core.config import global_settings -from testing_support.fixtures import (validate_transaction_metrics, - override_generic_settings) -from testing_support.validators.validate_code_level_metrics import validate_code_level_metrics - -version_info = tuple(int(_) for _ in aiohttp.__version__.split('.')[:2]) +version_info = tuple(int(_) for _ in aiohttp.__version__.split(".")[:2]) -@asyncio.coroutine -def middleware_factory(app, handler): - @asyncio.coroutine - def middleware_handler(request): - response = yield from handler(request) +async def middleware_factory(app, handler): + async def middleware_handler(request): + response = await handler(request) return response return middleware_handler middleware_tests = [ - (middleware_factory, 'Function/test_middleware:' - 'middleware_factory..middleware_handler'), + (middleware_factory, "Function/test_middleware:" "middleware_factory..middleware_handler"), ] if version_info >= (3, 0): + @aiohttp.web.middleware - @asyncio.coroutine - def new_style_middleware(request, handler): - response = yield from handler(request) + async def new_style_middleware(request, handler): + response = await handler(request) return response middleware_tests.append( - (new_style_middleware, - 'Function/test_middleware:new_style_middleware'), + (new_style_middleware, "Function/test_middleware:new_style_middleware"), ) -@pytest.mark.parametrize('nr_enabled', [True, False]) -@pytest.mark.parametrize('middleware,metric', middleware_tests) +@pytest.mark.parametrize("nr_enabled", [True, False]) +@pytest.mark.parametrize("middleware,metric", middleware_tests) def test_middleware(nr_enabled, aiohttp_app, middleware, metric): - - @asyncio.coroutine - def fetch(): - resp = yield from aiohttp_app.client.request('GET', '/coro') + async def fetch(): + resp = await aiohttp_app.client.request("GET", "/coro") assert resp.status == 200 - text = yield from resp.text() + text = await resp.text() assert "Hello Aiohttp!" in text return resp @@ -71,27 +67,27 @@ def _test(): if nr_enabled: scoped_metrics = [ - ('Function/_target_application:index', 1), + ("Function/_target_application:index", 1), (metric, 1), ] rollup_metrics = [ - ('Function/_target_application:index', 1), + ("Function/_target_application:index", 1), (metric, 1), - ('Python/Framework/aiohttp/%s' % aiohttp.__version__, 1), + ("Python/Framework/aiohttp/%s" % aiohttp.__version__, 1), ] - _test = validate_transaction_metrics('_target_application:index', - scoped_metrics=scoped_metrics, - rollup_metrics=rollup_metrics)(_test) + _test = validate_transaction_metrics( + "_target_application:index", scoped_metrics=scoped_metrics, rollup_metrics=rollup_metrics + )(_test) _test = validate_code_level_metrics("_target_application", "index")(_test) - + func_name = metric.split("/")[1].replace(":", ".").split(".") namespace, func_name = ".".join(func_name[:-1]), func_name[-1] _test = validate_code_level_metrics(namespace, func_name)(_test) else: settings = global_settings() - _test = override_generic_settings(settings, {'enabled': False})(_test) + _test = override_generic_settings(settings, {"enabled": False})(_test) _test() diff --git a/tests/framework_aiohttp/test_server.py b/tests/framework_aiohttp/test_server.py index 2b4e28b5f..70dd1e496 100644 --- a/tests/framework_aiohttp/test_server.py +++ b/tests/framework_aiohttp/test_server.py @@ -12,64 +12,57 @@ # See the License for the specific language governing permissions and # limitations under the License. -import pytest import asyncio + import aiohttp -from newrelic.core.config import global_settings +import pytest +from testing_support.fixtures import ( + count_transactions, + override_application_settings, + override_generic_settings, + override_ignore_status_codes, + validate_transaction_errors, + validate_transaction_event_attributes, + validate_transaction_metrics, +) +from testing_support.validators.validate_code_level_metrics import ( + validate_code_level_metrics, +) -from testing_support.fixtures import (validate_transaction_metrics, - validate_transaction_errors, validate_transaction_event_attributes, - count_transactions, override_generic_settings, - override_application_settings, override_ignore_status_codes) -from testing_support.validators.validate_code_level_metrics import validate_code_level_metrics +from newrelic.core.config import global_settings -version_info = tuple(int(_) for _ in aiohttp.__version__.split('.')[:2]) +version_info = tuple(int(_) for _ in aiohttp.__version__.split(".")[:2]) -BASE_REQUIRED_ATTRS = ['request.headers.contentType', - 'request.method'] +BASE_REQUIRED_ATTRS = ["request.headers.contentType", "request.method"] # The agent should not record these attributes in events unless the settings # explicitly say to do so -BASE_FORGONE_ATTRS = ['request.parameters.hello'] - - -@pytest.mark.parametrize('nr_enabled', [True, False]) -@pytest.mark.parametrize('method', [ - 'GET', - 'POST', - 'PUT', - 'PATCH', - 'DELETE', -]) -@pytest.mark.parametrize('uri,metric_name,error,status', [ - ( - '/error?hello=world', - '_target_application:error', - 'builtins:ValueError', - 500 - ), - - ( - '/non_500_error?hello=world', - '_target_application:non_500_error', - 'aiohttp.web_exceptions:HTTPGone', - 410 - ), - - ( - '/raise_404?hello=world', - '_target_application:raise_404', - None, - 404 - ), -]) -def test_error_exception(method, uri, metric_name, error, status, nr_enabled, - aiohttp_app): - @asyncio.coroutine - def fetch(): - resp = yield from aiohttp_app.client.request(method, - uri, headers={'content-type': 'text/plain'}) +BASE_FORGONE_ATTRS = ["request.parameters.hello"] + + +@pytest.mark.parametrize("nr_enabled", [True, False]) +@pytest.mark.parametrize( + "method", + [ + "GET", + "POST", + "PUT", + "PATCH", + "DELETE", + ], +) +@pytest.mark.parametrize( + "uri,metric_name,error,status", + [ + ("/error?hello=world", "_target_application:error", "builtins:ValueError", 500), + ("/non_500_error?hello=world", "_target_application:non_500_error", "aiohttp.web_exceptions:HTTPGone", 410), + ("/raise_404?hello=world", "_target_application:raise_404", None, 404), + ], +) +def test_error_exception(method, uri, metric_name, error, status, nr_enabled, aiohttp_app): + async def fetch(): + resp = await aiohttp_app.client.request(method, uri, headers={"content-type": "text/plain"}) assert resp.status == status required_attrs = list(BASE_REQUIRED_ATTRS) @@ -81,76 +74,78 @@ def fetch(): errors.append(error) @validate_transaction_errors(errors=errors) - @validate_transaction_metrics(metric_name, + @validate_transaction_metrics( + metric_name, scoped_metrics=[ - ('Function/%s' % metric_name, 1), + ("Function/%s" % metric_name, 1), ], rollup_metrics=[ - ('Function/%s' % metric_name, 1), - ('Python/Framework/aiohttp/%s' % aiohttp.__version__, 1), + ("Function/%s" % metric_name, 1), + ("Python/Framework/aiohttp/%s" % aiohttp.__version__, 1), ], ) @validate_transaction_event_attributes( required_params={ - 'agent': required_attrs, - 'user': [], - 'intrinsic': [], + "agent": required_attrs, + "user": [], + "intrinsic": [], }, forgone_params={ - 'agent': forgone_attrs, - 'user': [], - 'intrinsic': [], + "agent": forgone_attrs, + "user": [], + "intrinsic": [], }, exact_attrs={ - 'agent': { - 'response.status': str(status), + "agent": { + "response.status": str(status), }, - 'user': {}, - 'intrinsic': {}, + "user": {}, + "intrinsic": {}, }, ) @validate_code_level_metrics(*metric_name.split(":")) @override_ignore_status_codes([404]) def _test(): aiohttp_app.loop.run_until_complete(fetch()) + else: settings = global_settings() - @override_generic_settings(settings, {'enabled': False}) + @override_generic_settings(settings, {"enabled": False}) def _test(): aiohttp_app.loop.run_until_complete(fetch()) _test() -@pytest.mark.parametrize('nr_enabled', [True, False]) -@pytest.mark.parametrize('method', [ - 'GET', - 'POST', - 'PUT', - 'PATCH', - 'DELETE', -]) -@pytest.mark.parametrize('uri,metric_name', [ - ('/coro?hello=world', '_target_application:index'), - ('/class?hello=world', '_target_application:HelloWorldView._respond'), - ('/known_error?hello=world', - '_target_application:KnownErrorView._respond'), -]) -def test_simultaneous_requests(method, uri, metric_name, - nr_enabled, aiohttp_app): - - @asyncio.coroutine - def fetch(): - resp = yield from aiohttp_app.client.request(method, uri, - headers={'content-type': 'text/plain'}) +@pytest.mark.parametrize("nr_enabled", [True, False]) +@pytest.mark.parametrize( + "method", + [ + "GET", + "POST", + "PUT", + "PATCH", + "DELETE", + ], +) +@pytest.mark.parametrize( + "uri,metric_name", + [ + ("/coro?hello=world", "_target_application:index"), + ("/class?hello=world", "_target_application:HelloWorldView._respond"), + ("/known_error?hello=world", "_target_application:KnownErrorView._respond"), + ], +) +def test_simultaneous_requests(method, uri, metric_name, nr_enabled, aiohttp_app): + async def fetch(): + resp = await aiohttp_app.client.request(method, uri, headers={"content-type": "text/plain"}) assert resp.status == 200 - text = yield from resp.text() + text = await resp.text() assert "Hello Aiohttp!" in text return resp - @asyncio.coroutine - def multi_fetch(loop): + async def multi_fetch(loop): coros = [fetch() for i in range(2)] try: @@ -158,7 +153,7 @@ def multi_fetch(loop): except TypeError: combined = asyncio.gather(*coros, loop=loop) - responses = yield from combined + responses = await combined return responses required_attrs = list(BASE_REQUIRED_ATTRS) @@ -166,8 +161,7 @@ def multi_fetch(loop): required_attrs.extend(extra_required) - required_attrs.extend(['response.status', - 'response.headers.contentType']) + required_attrs.extend(["response.status", "response.headers.contentType"]) if nr_enabled: transactions = [] @@ -175,26 +169,27 @@ def multi_fetch(loop): func_name = metric_name.replace(":", ".").split(".") namespace, func_name = ".".join(func_name[:-1]), func_name[-1] - @override_application_settings({'attributes.include': ['request.*']}) - @validate_transaction_metrics(metric_name, + @override_application_settings({"attributes.include": ["request.*"]}) + @validate_transaction_metrics( + metric_name, scoped_metrics=[ - ('Function/%s' % metric_name, 1), + ("Function/%s" % metric_name, 1), ], rollup_metrics=[ - ('Function/%s' % metric_name, 1), - ('Python/Framework/aiohttp/%s' % aiohttp.__version__, 1), + ("Function/%s" % metric_name, 1), + ("Python/Framework/aiohttp/%s" % aiohttp.__version__, 1), ], ) @validate_transaction_event_attributes( required_params={ - 'agent': required_attrs, - 'user': [], - 'intrinsic': [], + "agent": required_attrs, + "user": [], + "intrinsic": [], }, forgone_params={ - 'agent': [], - 'user': [], - 'intrinsic': [], + "agent": [], + "user": [], + "intrinsic": [], }, ) @validate_code_level_metrics(namespace, func_name) @@ -202,21 +197,21 @@ def multi_fetch(loop): def _test(): aiohttp_app.loop.run_until_complete(multi_fetch(aiohttp_app.loop)) assert len(transactions) == 2 + else: settings = global_settings() - @override_generic_settings(settings, {'enabled': False}) + @override_generic_settings(settings, {"enabled": False}) def _test(): aiohttp_app.loop.run_until_complete(multi_fetch(aiohttp_app.loop)) _test() -@pytest.mark.parametrize('nr_enabled', [True, False]) +@pytest.mark.parametrize("nr_enabled", [True, False]) def test_system_response_creates_no_transaction(nr_enabled, aiohttp_app): - @asyncio.coroutine - def fetch(): - resp = yield from aiohttp_app.client.request('GET', '/404') + async def fetch(): + resp = await aiohttp_app.client.request("GET", "/404") assert resp.status == 404 return resp @@ -227,10 +222,11 @@ def fetch(): def _test(): aiohttp_app.loop.run_until_complete(fetch()) assert len(transactions) == 0 + else: settings = global_settings() - @override_generic_settings(settings, {'enabled': False}) + @override_generic_settings(settings, {"enabled": False}) def _test(): aiohttp_app.loop.run_until_complete(fetch()) @@ -238,18 +234,17 @@ def _test(): def test_aborted_connection_creates_transaction(aiohttp_app): - @asyncio.coroutine - def fetch(): + async def fetch(): try: - yield from aiohttp_app.client.request('GET', '/hang', timeout=0.1) + await aiohttp_app.client.request("GET", "/hang", timeout=0.1) except asyncio.TimeoutError: try: # Force the client to disconnect (while the server is hanging) - yield from aiohttp_app.client.close() + await aiohttp_app.client.close() # In aiohttp 1.X, this can result in a CancelledError being raised except asyncio.CancelledError: pass - yield + await asyncio.sleep(0) return assert False, "Request did not time out" @@ -265,13 +260,11 @@ def _test(): def test_work_after_request_not_recorded(aiohttp_app): - resp = aiohttp_app.loop.run_until_complete( - aiohttp_app.client.request('GET', '/background')) + resp = aiohttp_app.loop.run_until_complete(aiohttp_app.client.request("GET", "/background")) assert resp.status == 200 - @asyncio.coroutine - def timeout(): - yield from asyncio.sleep(1) + async def timeout(): + await asyncio.sleep(1) aiohttp_app.loop.stop() assert False diff --git a/tests/framework_aiohttp/test_server_cat.py b/tests/framework_aiohttp/test_server_cat.py index a09fa6b79..0c325215d 100644 --- a/tests/framework_aiohttp/test_server_cat.py +++ b/tests/framework_aiohttp/test_server_cat.py @@ -12,47 +12,52 @@ # See the License for the specific language governing permissions and # limitations under the License. -import asyncio import json + import pytest +from testing_support.fixtures import ( + make_cross_agent_headers, + override_application_settings, + validate_analytics_catmap_data, + validate_transaction_event_attributes, +) -from newrelic.common.object_wrapper import transient_function_wrapper from newrelic.common.encoding_utils import deobfuscate -from testing_support.fixtures import (override_application_settings, - make_cross_agent_headers, validate_analytics_catmap_data, - validate_transaction_event_attributes) +from newrelic.common.object_wrapper import transient_function_wrapper -ENCODING_KEY = '1234567890123456789012345678901234567890' +ENCODING_KEY = "1234567890123456789012345678901234567890" test_uris = [ - ('/error?hello=world', '_target_application:error'), - ('/coro?hello=world', '_target_application:index'), - ('/class?hello=world', '_target_application:HelloWorldView._respond'), + ("/error?hello=world", "_target_application:error"), + ("/coro?hello=world", "_target_application:index"), + ("/class?hello=world", "_target_application:HelloWorldView._respond"), ] def record_aiohttp1_raw_headers(raw_headers): try: - import aiohttp.protocol + import aiohttp.protocol # noqa except ImportError: + def pass_through(function): return function + return pass_through - @transient_function_wrapper('aiohttp.protocol', 'HttpParser.parse_headers') + @transient_function_wrapper("aiohttp.protocol", "HttpParser.parse_headers") def recorder(wrapped, instance, args, kwargs): def _bind_params(lines): return lines lines = _bind_params(*args, **kwargs) for line in lines: - line = line.decode('utf-8') + line = line.decode("utf-8") # This is the request, not the response - if line.startswith('GET'): + if line.startswith("GET"): break - if ':' in line: - key, value = line.split(':', maxsplit=1) + if ":" in line: + key, value = line.split(":", maxsplit=1) raw_headers[key.strip()] = value.strip() return wrapped(*args, **kwargs) @@ -61,59 +66,61 @@ def _bind_params(lines): @pytest.mark.parametrize( - 'inbound_payload,expected_intrinsics,forgone_intrinsics,cat_id', [ - - # Valid payload from trusted account - (["b854df4feb2b1f06", False, "7e249074f277923d", "5d2957be"], - {"nr.referringTransactionGuid": "b854df4feb2b1f06", - "nr.tripId": "7e249074f277923d", - "nr.referringPathHash": "5d2957be"}, - [], - '1#1'), - - # Valid payload from an untrusted account - (["b854df4feb2b1f06", False, "7e249074f277923d", "5d2957be"], - {}, - ['nr.referringTransactionGuid', 'nr.tripId', 'nr.referringPathHash'], - '80#1'), -]) -@pytest.mark.parametrize('method', ['GET']) -@pytest.mark.parametrize('uri,metric_name', test_uris) -def test_cat_headers(method, uri, metric_name, inbound_payload, - expected_intrinsics, forgone_intrinsics, cat_id, aiohttp_app): + "inbound_payload,expected_intrinsics,forgone_intrinsics,cat_id", + [ + # Valid payload from trusted account + ( + ["b854df4feb2b1f06", False, "7e249074f277923d", "5d2957be"], + { + "nr.referringTransactionGuid": "b854df4feb2b1f06", + "nr.tripId": "7e249074f277923d", + "nr.referringPathHash": "5d2957be", + }, + [], + "1#1", + ), + # Valid payload from an untrusted account + ( + ["b854df4feb2b1f06", False, "7e249074f277923d", "5d2957be"], + {}, + ["nr.referringTransactionGuid", "nr.tripId", "nr.referringPathHash"], + "80#1", + ), + ], +) +@pytest.mark.parametrize("method", ["GET"]) +@pytest.mark.parametrize("uri,metric_name", test_uris) +def test_cat_headers( + method, uri, metric_name, inbound_payload, expected_intrinsics, forgone_intrinsics, cat_id, aiohttp_app +): _raw_headers = {} - @asyncio.coroutine - def fetch(): - headers = make_cross_agent_headers(inbound_payload, ENCODING_KEY, - cat_id) - resp = yield from aiohttp_app.client.request(method, uri, - headers=headers) + async def fetch(): + headers = make_cross_agent_headers(inbound_payload, ENCODING_KEY, cat_id) + resp = await aiohttp_app.client.request(method, uri, headers=headers) if _raw_headers: raw_headers = _raw_headers else: - raw_headers = {k.decode('utf-8'): v.decode('utf-8') - for k, v in resp.raw_headers} + raw_headers = {k.decode("utf-8"): v.decode("utf-8") for k, v in resp.raw_headers} if expected_intrinsics: # test valid CAT response header - assert 'X-NewRelic-App-Data' in raw_headers + assert "X-NewRelic-App-Data" in raw_headers - app_data = json.loads(deobfuscate( - raw_headers['X-NewRelic-App-Data'], ENCODING_KEY)) + app_data = json.loads(deobfuscate(raw_headers["X-NewRelic-App-Data"], ENCODING_KEY)) assert app_data[0] == cat_id - assert app_data[1] == ('WebTransaction/Function/%s' % metric_name) + assert app_data[1] == ("WebTransaction/Function/%s" % metric_name) else: - assert 'X-NewRelic-App-Data' not in resp.headers + assert "X-NewRelic-App-Data" not in resp.headers _custom_settings = { - 'cross_process_id': '1#1', - 'encoding_key': ENCODING_KEY, - 'trusted_account_ids': [1], - 'cross_application_tracer.enabled': True, - 'distributed_tracing.enabled': False, + "cross_process_id": "1#1", + "encoding_key": ENCODING_KEY, + "trusted_account_ids": [1], + "cross_application_tracer.enabled": True, + "distributed_tracing.enabled": False, } # NOTE: the logic-flow of this test can be a bit confusing. @@ -125,9 +132,11 @@ def fetch(): # is received and subsequently processed. that code is # a fixture from conftest.py/_target_application.py - @validate_analytics_catmap_data('WebTransaction/Function/%s' % metric_name, - expected_attributes=expected_intrinsics, - non_expected_attributes=forgone_intrinsics) + @validate_analytics_catmap_data( + "WebTransaction/Function/%s" % metric_name, + expected_attributes=expected_intrinsics, + non_expected_attributes=forgone_intrinsics, + ) @override_application_settings(_custom_settings) @record_aiohttp1_raw_headers(_raw_headers) def _test(): @@ -136,8 +145,8 @@ def _test(): _test() -account_id = '33' -primary_application_id = '2827902' +account_id = "33" +primary_application_id = "2827902" inbound_payload = { "v": [0, 1], @@ -150,14 +159,14 @@ def _test(): "sa": True, "ti": 1518469636035, "tr": "d6b4ba0c3a712ca", - "ty": "App" - } + "ty": "App", + }, } expected_attributes = { - 'agent': [], - 'user': [], - 'intrinsic': { + "agent": [], + "user": [], + "intrinsic": { "traceId": "d6b4ba0c3a712ca", "priority": 1.234567, "sampled": True, @@ -166,32 +175,28 @@ def _test(): "parent.account": account_id, "parent.transportType": "HTTP", "parentId": "e8b91a159289ff74", - "parentSpanId": "7d3efb1b173fecfa" - } + "parentSpanId": "7d3efb1b173fecfa", + }, } unexpected_attributes = { - 'agent': [], - 'user': [], - 'intrinsic': [ - "grandparentId", "cross_process_id", "nr.tripId", "nr.pathHash" - ] + "agent": [], + "user": [], + "intrinsic": ["grandparentId", "cross_process_id", "nr.tripId", "nr.pathHash"], } -@pytest.mark.parametrize('uri,metric_name', test_uris) +@pytest.mark.parametrize("uri,metric_name", test_uris) def test_distributed_tracing_headers(uri, metric_name, aiohttp_app): - @asyncio.coroutine - def fetch(): - headers = {'newrelic': json.dumps(inbound_payload)} - resp = yield from aiohttp_app.client.request('GET', uri, - headers=headers) + async def fetch(): + headers = {"newrelic": json.dumps(inbound_payload)} + resp = await aiohttp_app.client.request("GET", uri, headers=headers) # better cat does not send a response in the headers - assert 'newrelic' not in resp.headers + assert "newrelic" not in resp.headers # old-cat headers should not be in the response - assert 'X-NewRelic-App-Data' not in resp.headers + assert "X-NewRelic-App-Data" not in resp.headers # NOTE: the logic-flow of this test can be a bit confusing. # the override settings and attribute validation occur @@ -202,14 +207,15 @@ def fetch(): # is received and subsequently processed. that code is # a fixture from conftest.py/_target_application.py - @validate_transaction_event_attributes( - expected_attributes, unexpected_attributes) - @override_application_settings({ - 'account_id': '33', - 'trusted_account_key': '33', - 'primary_application_id': primary_application_id, - 'distributed_tracing.enabled': True - }) + @validate_transaction_event_attributes(expected_attributes, unexpected_attributes) + @override_application_settings( + { + "account_id": "33", + "trusted_account_key": "33", + "primary_application_id": primary_application_id, + "distributed_tracing.enabled": True, + } + ) def _test(): aiohttp_app.loop.run_until_complete(fetch()) diff --git a/tests/framework_aiohttp/test_ws.py b/tests/framework_aiohttp/test_ws.py index 549902d6e..da908014d 100644 --- a/tests/framework_aiohttp/test_ws.py +++ b/tests/framework_aiohttp/test_ws.py @@ -12,28 +12,25 @@ # See the License for the specific language governing permissions and # limitations under the License. -import asyncio import aiohttp from testing_support.fixtures import function_not_called -version_info = tuple(int(_) for _ in aiohttp.__version__.split('.')[:2]) +version_info = tuple(int(_) for _ in aiohttp.__version__.split(".")[:2]) -@function_not_called('newrelic.core.stats_engine', - 'StatsEngine.record_transaction') +@function_not_called("newrelic.core.stats_engine", "StatsEngine.record_transaction") def test_websocket(aiohttp_app): - @asyncio.coroutine - def ws_write(): - ws = yield from aiohttp_app.client.ws_connect('/ws') + async def ws_write(): + ws = await aiohttp_app.client.ws_connect("/ws") try: for _ in range(2): - result = ws.send_str('Hello') - if hasattr(result, '__await__'): - yield from result.__await__() - msg = yield from ws.receive() - assert msg.data == '/Hello' + result = ws.send_str("Hello") + if hasattr(result, "__await__"): + await result + msg = await ws.receive() + assert msg.data == "/Hello" finally: - yield from ws.close(code=1000) + await ws.close(code=1000) assert ws.close_code == 1000 aiohttp_app.loop.run_until_complete(ws_write()) diff --git a/tests/framework_tornado/_target_application.py b/tests/framework_tornado/_target_application.py index 497b81efd..98db75ab9 100644 --- a/tests/framework_tornado/_target_application.py +++ b/tests/framework_tornado/_target_application.py @@ -13,12 +13,13 @@ # limitations under the License. import time -import tornado.ioloop -import tornado.web + import tornado.gen import tornado.httpclient -import tornado.websocket import tornado.httputil +import tornado.ioloop +import tornado.web +import tornado.websocket from tornado.routing import PathMatches @@ -36,16 +37,15 @@ def get_status(self, *args, **kwargs): class ProcessCatHeadersHandler(tornado.web.RequestHandler): def __init__(self, application, request, response_code=200, **kwargs): - super(ProcessCatHeadersHandler, self).__init__(application, request, - **kwargs) + super(ProcessCatHeadersHandler, self).__init__(application, request, **kwargs) self.response_code = response_code def get(self, client_cross_process_id, txn_header, flush=None): import newrelic.api.transaction as _transaction + txn = _transaction.current_transaction() if txn: - txn._process_incoming_cat_headers(client_cross_process_id, - txn_header) + txn._process_incoming_cat_headers(client_cross_process_id, txn_header) if self.response_code != 200: self.set_status(self.response_code) @@ -53,7 +53,7 @@ def get(self, client_cross_process_id, txn_header, flush=None): self.write("Hello, world") - if flush == 'flush': + if flush == "flush": # Force a flush prior to calling finish # This causes the headers to get written immediately. The tests # which hit this endpoint will check that the response has been @@ -61,17 +61,17 @@ def get(self, client_cross_process_id, txn_header, flush=None): self.flush() # change the headers to garbage - self.set_header('Content-Type', 'garbage') + self.set_header("Content-Type", "garbage") class EchoHeaderHandler(tornado.web.RequestHandler): def get(self): - response = str(self.request.headers.__dict__).encode('utf-8') + response = str(self.request.headers.__dict__).encode("utf-8") self.write(response) class SimpleHandler(tornado.web.RequestHandler): - options = {'your_command': 'options'} + options = {"your_command": "options"} def get(self): self.write("Hello, world") @@ -111,7 +111,7 @@ def get(self): @tornado.gen.coroutine def throw_exception(self): - raise ValueError('Throwing exception.') + raise ValueError("Throwing exception.") class CoroHandler(tornado.web.RequestHandler): @@ -165,7 +165,7 @@ async def get(self): def trace(self): from newrelic.api.function_trace import FunctionTrace - with FunctionTrace(name='trace', terminal=True): + with FunctionTrace(name="trace", terminal=True): pass @@ -178,12 +178,11 @@ class EnsureFutureHandler(tornado.web.RequestHandler): def get(self): import asyncio - @asyncio.coroutine - def coro_trace(): + async def coro_trace(): from newrelic.api.function_trace import FunctionTrace - with FunctionTrace(name='trace', terminal=True): - yield from tornado.gen.sleep(0) + with FunctionTrace(name="trace", terminal=True): + await tornado.gen.sleep(0) asyncio.ensure_future(coro_trace()) @@ -193,18 +192,14 @@ def on_message(self, message): super(WebNestedHandler, self).on_message(message) -class CustomApplication( - tornado.httputil.HTTPServerConnectionDelegate, - tornado.httputil.HTTPMessageDelegate): - +class CustomApplication(tornado.httputil.HTTPServerConnectionDelegate, tornado.httputil.HTTPMessageDelegate): def start_request(self, server_conn, http_conn): self.server_conn = server_conn self.http_conn = http_conn return self def finish(self): - response_line = tornado.httputil.ResponseStartLine( - "HTTP/1.1", 200, "OK") + response_line = tornado.httputil.ResponseStartLine("HTTP/1.1", 200, "OK") headers = tornado.httputil.HTTPHeaders() headers["Content-Type"] = "text/plain" self.http_conn.write_headers(response_line, headers) @@ -221,6 +216,7 @@ def initialize(self, yield_before_finish=False): async def get(self, total=1): import asyncio + total = int(total) cls = type(self) @@ -239,33 +235,31 @@ async def get(self, total=1): if self.yield_before_finish: await asyncio.sleep(0) - self.write('*') + self.write("*") def make_app(custom=False): handlers = [ - (PathMatches(r'/simple'), SimpleHandler), - (r'/crash', CrashHandler), - (r'/call-simple', CallSimpleHandler), - (r'/super-simple', SuperSimpleHandler), - (r'/coro', CoroHandler), - (r'/coro-throw', CoroThrowHandler), - (r'/fake-coro', FakeCoroHandler), - (r'/init', InitializeHandler), - (r'/html-insertion', HTMLInsertionHandler), - (r'/bad-get-status', BadGetStatusHandler), - (r'/force-cat-response/(\S+)/(\S+)/(\S+)', ProcessCatHeadersHandler), - (r'/304-cat-response/(\S+)/(\S+)', ProcessCatHeadersHandler, - {'response_code': 304}), - (r'/echo-headers', EchoHeaderHandler), - (r'/native-simple', NativeSimpleHandler), - (r'/multi-trace', MultiTraceHandler), - (r'/web-socket', WebSocketHandler), - (r'/ensure-future', EnsureFutureHandler), - (r'/call-web-socket', WebNestedHandler), - (r'/block/(\d+)', BlockingHandler), - (r'/block-with-yield/(\d+)', BlockingHandler, - {'yield_before_finish': True}), + (PathMatches(r"/simple"), SimpleHandler), + (r"/crash", CrashHandler), + (r"/call-simple", CallSimpleHandler), + (r"/super-simple", SuperSimpleHandler), + (r"/coro", CoroHandler), + (r"/coro-throw", CoroThrowHandler), + (r"/fake-coro", FakeCoroHandler), + (r"/init", InitializeHandler), + (r"/html-insertion", HTMLInsertionHandler), + (r"/bad-get-status", BadGetStatusHandler), + (r"/force-cat-response/(\S+)/(\S+)/(\S+)", ProcessCatHeadersHandler), + (r"/304-cat-response/(\S+)/(\S+)", ProcessCatHeadersHandler, {"response_code": 304}), + (r"/echo-headers", EchoHeaderHandler), + (r"/native-simple", NativeSimpleHandler), + (r"/multi-trace", MultiTraceHandler), + (r"/web-socket", WebSocketHandler), + (r"/ensure-future", EnsureFutureHandler), + (r"/call-web-socket", WebNestedHandler), + (r"/block/(\d+)", BlockingHandler), + (r"/block-with-yield/(\d+)", BlockingHandler, {"yield_before_finish": True}), ] if custom: return CustomApplication() @@ -275,5 +269,5 @@ def make_app(custom=False): if __name__ == "__main__": app = make_app() - app.listen(8888, address='127.0.0.1') + app.listen(8888, address="127.0.0.1") tornado.ioloop.IOLoop.current().start() diff --git a/tests/framework_tornado/test_server.py b/tests/framework_tornado/test_server.py index 16aced356..6963cee56 100644 --- a/tests/framework_tornado/test_server.py +++ b/tests/framework_tornado/test_server.py @@ -13,41 +13,50 @@ # limitations under the License. import pytest -from newrelic.core.config import global_settings -from testing_support.fixtures import (validate_transaction_metrics, - override_generic_settings, function_not_called, - validate_transaction_event_attributes, - validate_transaction_errors, override_ignore_status_codes, - override_application_settings) +from testing_support.fixtures import ( + function_not_called, + override_application_settings, + override_generic_settings, + override_ignore_status_codes, + validate_transaction_errors, + validate_transaction_event_attributes, + validate_transaction_metrics, +) +from testing_support.validators.validate_code_level_metrics import ( + validate_code_level_metrics, +) from testing_support.validators.validate_transaction_count import ( - validate_transaction_count) -from testing_support.validators.validate_code_level_metrics import validate_code_level_metrics - - -@pytest.mark.parametrize('uri,name,metrics, method_metric', ( - # ('/native-simple', '_target_application:NativeSimpleHandler.get', None, - # True), - # ('/simple', '_target_application:SimpleHandler.get', None, True), - ('/call-simple', '_target_application:CallSimpleHandler.get', None, True), - ('/super-simple', '_target_application:SuperSimpleHandler.get', None, - True), - ('/coro', '_target_application:CoroHandler.get', None, False), - ('/fake-coro', '_target_application:FakeCoroHandler.get', None, False), - ('/coro-throw', '_target_application:CoroThrowHandler.get', None, False), - ('/init', '_target_application:InitializeHandler.get', None, True), - ('/multi-trace', '_target_application:MultiTraceHandler.get', - [('Function/trace', 2)], True), -)) -@override_application_settings({'attributes.include': ['request.*']}) + validate_transaction_count, +) + +from newrelic.core.config import global_settings + + +@pytest.mark.parametrize( + "uri,name,metrics, method_metric", + ( + # ('/native-simple', '_target_application:NativeSimpleHandler.get', None, + # True), + # ('/simple', '_target_application:SimpleHandler.get', None, True), + ("/call-simple", "_target_application:CallSimpleHandler.get", None, True), + ("/super-simple", "_target_application:SuperSimpleHandler.get", None, True), + ("/coro", "_target_application:CoroHandler.get", None, False), + ("/fake-coro", "_target_application:FakeCoroHandler.get", None, False), + ("/coro-throw", "_target_application:CoroThrowHandler.get", None, False), + ("/init", "_target_application:InitializeHandler.get", None, True), + ("/multi-trace", "_target_application:MultiTraceHandler.get", [("Function/trace", 2)], True), + ), +) +@override_application_settings({"attributes.include": ["request.*"]}) def test_server(app, uri, name, metrics, method_metric): - FRAMEWORK_METRIC = 'Python/Framework/Tornado/%s' % app.tornado_version - METHOD_METRIC = 'Function/%s' % name + FRAMEWORK_METRIC = "Python/Framework/Tornado/%s" % app.tornado_version + METHOD_METRIC = "Function/%s" % name metrics = metrics or [] metrics.append((FRAMEWORK_METRIC, 1)) metrics.append((METHOD_METRIC, 1 if method_metric else None)) - host = '127.0.0.1:' + str(app.get_http_port()) + host = "127.0.0.1:" + str(app.get_http_port()) namespace, func_name = name.split(".") namespace = namespace.replace(":", ".") @@ -56,21 +65,21 @@ def test_server(app, uri, name, metrics, method_metric): rollup_metrics=metrics, ) @validate_transaction_event_attributes( - required_params={ - 'agent': ('response.headers.contentType',), - 'user': (), 'intrinsic': ()}, + required_params={"agent": ("response.headers.contentType",), "user": (), "intrinsic": ()}, exact_attrs={ - 'agent': {'request.headers.contentType': '1234', - 'request.headers.host': host, - 'request.method': 'GET', - 'request.uri': uri, - 'response.status': '200'}, - 'user': {}, - 'intrinsic': {'port': app.get_http_port()}, + "agent": { + "request.headers.contentType": "1234", + "request.headers.host": host, + "request.method": "GET", + "request.uri": uri, + "response.status": "200", + }, + "user": {}, + "intrinsic": {"port": app.get_http_port()}, }, ) def _test(): - response = app.fetch(uri, headers=(('Content-Type', '1234'),)) + response = app.fetch(uri, headers=(("Content-Type", "1234"),)) assert response.code == 200 if method_metric: @@ -79,33 +88,31 @@ def _test(): _test() -@pytest.mark.parametrize('uri,name,metrics,method_metric', ( - ('/native-simple', '_target_application:NativeSimpleHandler.get', None, - True), - ('/simple', '_target_application:SimpleHandler.get', None, True), - ('/call-simple', '_target_application:CallSimpleHandler.get', None, True), - ('/super-simple', '_target_application:SuperSimpleHandler.get', None, - True), - ('/coro', '_target_application:CoroHandler.get', None, False), - ('/fake-coro', '_target_application:FakeCoroHandler.get', None, False), - ('/coro-throw', '_target_application:CoroThrowHandler.get', None, False), - ('/init', '_target_application:InitializeHandler.get', None, True), - ('/ensure-future', - '_target_application:EnsureFutureHandler.get', - [('Function/trace', None)], True), - ('/multi-trace', '_target_application:MultiTraceHandler.get', - [('Function/trace', 2)], True), -)) +@pytest.mark.parametrize( + "uri,name,metrics,method_metric", + ( + ("/native-simple", "_target_application:NativeSimpleHandler.get", None, True), + ("/simple", "_target_application:SimpleHandler.get", None, True), + ("/call-simple", "_target_application:CallSimpleHandler.get", None, True), + ("/super-simple", "_target_application:SuperSimpleHandler.get", None, True), + ("/coro", "_target_application:CoroHandler.get", None, False), + ("/fake-coro", "_target_application:FakeCoroHandler.get", None, False), + ("/coro-throw", "_target_application:CoroThrowHandler.get", None, False), + ("/init", "_target_application:InitializeHandler.get", None, True), + ("/ensure-future", "_target_application:EnsureFutureHandler.get", [("Function/trace", None)], True), + ("/multi-trace", "_target_application:MultiTraceHandler.get", [("Function/trace", 2)], True), + ), +) def test_concurrent_inbound_requests(app, uri, name, metrics, method_metric): from tornado import gen - FRAMEWORK_METRIC = 'Python/Framework/Tornado/%s' % app.tornado_version - METHOD_METRIC = 'Function/%s' % name + FRAMEWORK_METRIC = "Python/Framework/Tornado/%s" % app.tornado_version + METHOD_METRIC = "Function/%s" % name metrics = metrics or [] metrics.append((FRAMEWORK_METRIC, 1)) metrics.append((METHOD_METRIC, 1 if method_metric else None)) - + namespace, func_name = name.split(".") namespace = namespace.replace(":", ".") @@ -127,89 +134,92 @@ def _test(): _test() + @validate_code_level_metrics("_target_application.CrashHandler", "get") -@validate_transaction_metrics('_target_application:CrashHandler.get') -@validate_transaction_errors(['builtins:ValueError']) +@validate_transaction_metrics("_target_application:CrashHandler.get") +@validate_transaction_errors(["builtins:ValueError"]) def test_exceptions_are_recorded(app): - response = app.fetch('/crash') + response = app.fetch("/crash") assert response.code == 500 -@pytest.mark.parametrize('nr_enabled,ignore_status_codes', [ - (True, [405]), - (True, []), - (False, None), -]) +@pytest.mark.parametrize( + "nr_enabled,ignore_status_codes", + [ + (True, [405]), + (True, []), + (False, None), + ], +) def test_unsupported_method(app, nr_enabled, ignore_status_codes): - def _test(): - response = app.fetch('/simple', - method='TEAPOT', body=b'', allow_nonstandard_methods=True) + response = app.fetch("/simple", method="TEAPOT", body=b"", allow_nonstandard_methods=True) assert response.code == 405 if nr_enabled: _test = override_ignore_status_codes(ignore_status_codes)(_test) - _test = validate_transaction_metrics( - '_target_application:SimpleHandler')(_test) + _test = validate_transaction_metrics("_target_application:SimpleHandler")(_test) if ignore_status_codes: _test = validate_transaction_errors(errors=[])(_test) else: - _test = validate_transaction_errors( - errors=['tornado.web:HTTPError'])(_test) + _test = validate_transaction_errors(errors=["tornado.web:HTTPError"])(_test) else: settings = global_settings() - _test = override_generic_settings(settings, {'enabled': False})(_test) + _test = override_generic_settings(settings, {"enabled": False})(_test) _test() @validate_transaction_errors(errors=[]) -@validate_transaction_metrics('tornado.web:ErrorHandler') +@validate_transaction_metrics("tornado.web:ErrorHandler") @validate_transaction_event_attributes( - required_params={'agent': (), 'user': (), 'intrinsic': ()}, + required_params={"agent": (), "user": (), "intrinsic": ()}, exact_attrs={ - 'agent': {'request.uri': '/does-not-exist'}, - 'user': {}, - 'intrinsic': {}, + "agent": {"request.uri": "/does-not-exist"}, + "user": {}, + "intrinsic": {}, }, ) def test_not_found(app): - response = app.fetch('/does-not-exist') + response = app.fetch("/does-not-exist") assert response.code == 404 -@override_generic_settings(global_settings(), { - 'enabled': False, -}) -@function_not_called('newrelic.core.stats_engine', - 'StatsEngine.record_transaction') +@override_generic_settings( + global_settings(), + { + "enabled": False, + }, +) +@function_not_called("newrelic.core.stats_engine", "StatsEngine.record_transaction") def test_nr_disabled(app): - response = app.fetch('/simple') + response = app.fetch("/simple") assert response.code == 200 -@pytest.mark.parametrize('uri,name', ( - ('/web-socket', '_target_application:WebSocketHandler'), - ('/call-web-socket', '_target_application:WebNestedHandler'), -)) +@pytest.mark.parametrize( + "uri,name", + ( + ("/web-socket", "_target_application:WebSocketHandler"), + ("/call-web-socket", "_target_application:WebNestedHandler"), + ), +) def test_web_socket(uri, name, app): - import asyncio from tornado.websocket import websocket_connect namespace, func_name = name.split(":") @validate_transaction_metrics( name, - rollup_metrics=[('Function/%s' % name, None)], + rollup_metrics=[("Function/%s" % name, None)], ) @validate_code_level_metrics(namespace, func_name) def _test(): - url = app.get_url(uri).replace('http', 'ws') + url = app.get_url(uri).replace("http", "ws") - @asyncio.coroutine - def _connect(): - conn = yield from websocket_connect(url) + async def _connect(): + conn = await websocket_connect(url) return conn @validate_transaction_metrics( @@ -218,14 +228,13 @@ def _connect(): def connect(): return app.io_loop.run_sync(_connect) - @function_not_called('newrelic.core.stats_engine', - 'StatsEngine.record_transaction') + @function_not_called("newrelic.core.stats_engine", "StatsEngine.record_transaction") def call(call): - @asyncio.coroutine - def _call(): - yield from conn.write_message("test") - resp = yield from conn.read_message() + async def _call(): + await conn.write_message("test") + resp = await conn.read_message() assert resp == "hello test" + app.io_loop.run_sync(_call) conn = connect() @@ -235,13 +244,10 @@ def _call(): _test() -LOOP_TIME_METRICS = ( - ('EventLoop/Wait/' - 'WebTransaction/Function/_target_application:BlockingHandler.get', 1), -) +LOOP_TIME_METRICS = (("EventLoop/Wait/" "WebTransaction/Function/_target_application:BlockingHandler.get", 1),) -@pytest.mark.parametrize('yield_before_finish', (True, False)) +@pytest.mark.parametrize("yield_before_finish", (True, False)) @validate_transaction_metrics( "_target_application:BlockingHandler.get", scoped_metrics=LOOP_TIME_METRICS, @@ -250,9 +256,9 @@ def test_io_loop_blocking_time(app, yield_before_finish): from tornado import gen if yield_before_finish: - url = app.get_url('/block-with-yield/2') + url = app.get_url("/block-with-yield/2") else: - url = app.get_url('/block/2') + url = app.get_url("/block/2") coros = (app.http_client.fetch(url) for _ in range(2)) responses = app.io_loop.run_sync(lambda: gen.multi(coros)) diff --git a/tox.ini b/tox.ini index c50a1b75b..fdb143672 100644 --- a/tox.ini +++ b/tox.ini @@ -42,122 +42,124 @@ [tox] setupdir = {toxinidir} envlist = - python-adapter_cheroot-{py27,py37,py38,py39,py310}, - python-adapter_daphne-{py37,py38,py39,py310}-daphnelatest, + python-adapter_cheroot-{py27,py37,py38,py39,py310,py311}, + python-adapter_daphne-{py37,py38,py39,py310,py311}-daphnelatest, python-adapter_daphne-py38-daphne{0204,0205}, - python-adapter_gevent-{py27,py37,py38,py310}, - python-adapter_gunicorn-{py37,py38,py39,py310}-aiohttp3-gunicornlatest, - python-adapter_hypercorn-{py37,py38,py39,py310}-hypercornlatest, + python-adapter_gevent-{py27,py37,py38,py310,py311}, + python-adapter_gunicorn-{py37,py38,py39,py310,py311}-aiohttp3-gunicornlatest, + python-adapter_hypercorn-{py37,py38,py39,py310,py311}-hypercornlatest, python-adapter_hypercorn-py38-hypercorn{0010,0011,0012,0013}, python-adapter_uvicorn-py37-uvicorn03, - python-adapter_uvicorn-{py37,py38,py39,py310}-uvicornlatest, - python-agent_features-{py27,py37,py38,py39,py310}-{with,without}_extensions, + python-adapter_uvicorn-{py37,py38,py39,py310,py311}-uvicornlatest, + python-agent_features-{py27,py37,py38,py39,py310,py311}-{with,without}_extensions, python-agent_features-{pypy,pypy37}-without_extensions, python-agent_streaming-py27-grpc0125-{with,without}_extensions, - python-agent_streaming-{py37,py38,py39,py310}-protobuf04-{with,without}_extensions, + python-agent_streaming-{py37,py38,py39,py310,py311}-protobuf04-{with,without}_extensions, python-agent_streaming-py39-protobuf{03,0319}-{with,without}_extensions, - python-agent_unittests-{py27,py37,py38,py39,py310}-{with,without}_extensions, + python-agent_unittests-{py27,py37,py38,py39,py310,py311}-{with,without}_extensions, python-agent_unittests-{pypy,pypy37}-without_extensions, - python-application_celery-{py27,py37,py38,py39,py310,pypy,pypy37}, + python-application_celery-{py27,py37,py38,py39,py310,py311,pypy,pypy37}, gearman-application_gearman-{py27,pypy}, python-component_djangorestframework-py27-djangorestframework0300, - python-component_djangorestframework-{py37,py38,py39,py310}-djangorestframeworklatest, + python-component_djangorestframework-{py37,py38,py39,py310,py311}-djangorestframeworklatest, python-component_flask_rest-{py27,py37,py38,py39,pypy,pypy37}, - python-component_graphqlserver-{py37,py38,py39,py310}, + python-component_graphqlserver-{py37,py38,py39,py310,py311}, python-component_tastypie-{py27,pypy}-tastypie0143, python-component_tastypie-{py37,py38,py39,pypy37}-tastypie{0143,latest}, - python-coroutines_asyncio-{py37,py38,py39,py310,pypy37}, - python-cross_agent-{py27,py37,py38,py39,py310}-{with,without}_extensions, + python-coroutines_asyncio-{py37,py38,py39,py310,py311,pypy37}, + python-cross_agent-{py27,py37,py38,py39,py310,py311}-{with,without}_extensions, python-cross_agent-pypy-without_extensions, - postgres-datastore_asyncpg-{py37,py38,py39,py310}, - memcached-datastore_bmemcached-{pypy,py27,py37,py38,py39,py310}-memcached030, + postgres-datastore_asyncpg-{py37,py38,py39,py310,py311}, + memcached-datastore_bmemcached-{pypy,py27,py37,py38,py39,py310,py311}-memcached030, elasticsearchserver01-datastore_pyelasticsearch-{py27,pypy}, elasticsearchserver01-datastore_elasticsearch-py27-elasticsearch{00,01,02,05}, - elasticsearchserver07-datastore_elasticsearch-{py27,py37,py38,py39,py310,pypy,pypy37}-elasticsearch{07}, - memcached-datastore_memcache-{py27,py37,py38,py39,py310,pypy,pypy37}-memcached01, + elasticsearchserver07-datastore_elasticsearch-{py27,py37,py38,py39,py310,py311,pypy,pypy37}-elasticsearch{07}, + memcached-datastore_memcache-{py27,py37,py38,py39,py310,py311,pypy,pypy37}-memcached01, mysql-datastore_mysql-mysql080023-py27, - mysql-datastore_mysql-mysqllatest-{py37,py38,py39,py310}, + mysql-datastore_mysql-mysqllatest-{py37,py38,py39,py310,py311}, postgres-datastore_postgresql-{py37,py38,py39}, - postgres-datastore_psycopg2-{py27,py37,py38,py39,py310}-psycopg20208, - postgres-datastore_psycopg2cffi-{py27,pypy}-psycopg2cffi{0207,0208}, - postgres-datastore_psycopg2cffi-{py37,py38,py39,py310}-psycopg2cffi0208, + postgres-datastore_psycopg2-{py27,py37,py38,py39,py310,py311}-psycopg2latest + postgres-datastore_psycopg2cffi-{py27,pypy,py37,py38,py39,py310,py311}-psycopg2cffilatest, memcached-datastore_pylibmc-{py27,py37}, - memcached-datastore_pymemcache-{py27,py37,py38,py39,py310,pypy,pypy37}, - mongodb-datastore_pymongo-{py27,py37,py38,py39,py310,pypy}-pymongo{03}, - mongodb-datastore_pymongo-{py37,py38,py39,py310,pypy,pypy37}-pymongo04, - mysql-datastore_pymysql-{py27,py37,py38,py39,py310,pypy,pypy37}, - solr-datastore_pysolr-{py27,py37,py38,py39,py310,pypy,pypy37}, + memcached-datastore_pymemcache-{py27,py37,py38,py39,py310,py311,pypy,pypy37}, + mongodb-datastore_pymongo-{py27,py37,py38,py39,py310,py311,pypy}-pymongo{03}, + mongodb-datastore_pymongo-{py37,py38,py39,py310,py311,pypy,pypy37}-pymongo04, + mysql-datastore_pymysql-{py27,py37,py38,py39,py310,py311,pypy,pypy37}, + solr-datastore_pysolr-{py27,py37,py38,py39,py310,py311,pypy,pypy37}, redis-datastore_redis-{py27,py37,py38,pypy,pypy37}-redis03, - redis-datastore_redis-{py37,py38,py39,py310,pypy37}-redis{0400,latest}, + redis-datastore_redis-{py37,py38,py39,py310,py311,pypy37}-redis{0400,latest}, redis-datastore_aioredis-{py37,py38,py39,py310,pypy37}-aioredislatest, redis-datastore_aioredis-{py37,py310}-aioredis01, redis-datastore_aredis-{py37,py38,py39,pypy37}-aredislatest, solr-datastore_solrpy-{py27,pypy}-solrpy{00,01}, - python-datastore_sqlite-{py27,py37,py38,py39,py310,pypy,pypy37}, + python-datastore_sqlite-{py27,py37,py38,py39,py310,py311,pypy,pypy37}, memcached-datastore_umemcache-{py27,pypy}, - python-external_boto3-{py27,py37,py38,py39,py310}-boto01, - python-external_botocore-{py27,py37,py38,py39,py310}, + python-external_boto3-{py27,py37,py38,py39,py310,py311}-boto01, + python-external_botocore-{py27,py37,py38,py39,py310,py311}, python-external_feedparser-py27-feedparser{05,06}, - python-external_http-{py27,py37,py38,py39,py310,pypy}, - python-external_httplib-{py27,py37,py38,py39,py310,pypy,pypy37}, - python-external_httplib2-{py27,py37,py38,py39,py310,pypy,pypy37}, - python-external_httpx-{py37,py38,py39,py310}, - python-external_requests-{py27,py37,py38,py39,py310,pypy,pypy37}, + python-external_http-{py27,py37,py38,py39,py310,py311,pypy}, + python-external_httplib-{py27,py37,py38,py39,py310,py311,pypy,pypy37}, + python-external_httplib2-{py27,py37,py38,py39,py310,py311,pypy,pypy37}, + python-external_httpx-{py37,py38,py39,py310,py311}, + python-external_requests-{py27,py37,py38,py39,py310,py311,pypy,pypy37}, python-external_urllib3-{py27,py37,pypy}-urllib3{0109}, - python-external_urllib3-{py27,py37,py38,py39,py310,pypy,pypy37}-urllib3latest, - python-framework_aiohttp-{py37,py38,py39,py310,pypy37}-aiohttp03, - python-framework_ariadne-{py37,py38,py39,py310}-ariadnelatest, + python-external_urllib3-{py27,py37,py38,py39,py310,py311,pypy,pypy37}-urllib3latest, + python-framework_aiohttp-{py37,py38,py39,py310,py311,pypy37}-aiohttp03, + python-framework_ariadne-{py37,py38,py39,py310,py311}-ariadnelatest, python-framework_ariadne-py37-ariadne{0011,0012,0013}, python-framework_bottle-py27-bottle{0008,0009,0010}, python-framework_bottle-{py27,py37,py38,py39,pypy37}-bottle{0011,0012}, - python-framework_bottle-py310-bottle0012, + python-framework_bottle-{py310,py311}-bottle0012, python-framework_bottle-pypy-bottle{0008,0009,0010,0011,0012}, + ; CherryPy still uses inspect.getargspec, deprecated in favor of inspect.getfullargspec. Not supported in 3.11 python-framework_cherrypy-{py37,py38,py39,py310,pypy37}-CherryPy18, python-framework_cherrypy-{py37}-CherryPy0302, python-framework_cherrypy-pypy37-CherryPy0303, python-framework_django-{pypy,py27}-Django0103, python-framework_django-{pypy,py27,py37}-Django0108, python-framework_django-{py39}-Django{0200,0201,0202,0300,0301,latest}, - python-framework_django-{py37,py38,py39,py310}-Django0302, + python-framework_django-{py37,py38,py39,py310,py311}-Django0302, python-framework_falcon-{py27,py37,py38,py39,pypy,pypy37}-falcon0103, python-framework_falcon-{py37,py38,py39,py310,pypy37}-falcon{0200,master}, - python-framework_fastapi-{py37,py38,py39,py310}, + # Falcon master branch failing on 3.11 currently. + python-framework_falcon-py311-falcon0200, + python-framework_fastapi-{py37,py38,py39,py310,py311}, python-framework_flask-{pypy,py27}-flask0012, - python-framework_flask-{pypy,py27,py37,py38,py39,py310,pypy37}-flask0101, + python-framework_flask-{pypy,py27,py37,py38,py39,py310,py311,pypy37}-flask0101, ; temporarily disabling flaskmaster tests - python-framework_flask-{py37,py38,py39,py310,pypy37}-flask{latest}, - python-framework_graphene-{py37,py38,py39,py310}-graphenelatest, + python-framework_flask-{py37,py38,py39,py310,py311,pypy37}-flask{latest}, + python-framework_graphene-{py37,py38,py39,py310,py311}-graphenelatest, python-framework_graphene-{py27,py37,py38,py39,pypy,pypy37}-graphene{0200,0201}, - python-framework_graphene-py310-graphene0201, - python-framework_graphql-{py27,py37,py38,py39,py310,pypy,pypy37}-graphql02, - python-framework_graphql-{py37,py38,py39,py310,pypy37}-graphql03, + python-framework_graphene-{py310,py311}-graphene0201, + python-framework_graphql-{py27,py37,py38,py39,py310,py311,pypy,pypy37}-graphql02, + python-framework_graphql-{py37,py38,py39,py310,py311,pypy37}-graphql03, ; temporarily disabling graphqlmaster tests python-framework_graphql-py37-graphql{0202,0203,0300,0301,0302}, grpc-framework_grpc-py27-grpc0125, - grpc-framework_grpc-{py37,py38,py39,py310}-grpclatest, + grpc-framework_grpc-{py37,py38,py39,py310,py311}-grpclatest, python-framework_pyramid-{pypy,py27,py38}-Pyramid0104, - python-framework_pyramid-{pypy,py27,pypy37,py37,py38,py39,py310}-Pyramid0110-cornice, - python-framework_pyramid-{py37,py38,py39,py310,pypy37}-Pyramidmaster, + python-framework_pyramid-{pypy,py27,pypy37,py37,py38,py39,py310,py311}-Pyramid0110-cornice, + python-framework_pyramid-{py37,py38,py39,py310,py311,pypy37}-Pyramidmaster, python-framework_sanic-{py38,pypy37}-sanic{190301,1906,1812,1912,200904,210300,2109,2112,2203,2290}, - python-framework_sanic-{py37,py38,py39,py310,pypy37}-saniclatest, + python-framework_sanic-{py37,py38,py39,py310,py311,pypy37}-saniclatest, python-framework_starlette-{py310,pypy37}-starlette{0014,0015,0019}, python-framework_starlette-{py37,py38}-starlette{002001}, - python-framework_starlette-{py37,py38,py39,py310,pypy37}-starlettelatest, - python-framework_strawberry-{py37,py38,py39,py310}-strawberrylatest, - python-logger_logging-{py27,py37,py38,py39,py310,pypy,pypy37}, - python-logger_loguru-{py37,py38,py39,py310,pypy37}-logurulatest, + python-framework_starlette-{py37,py38,py39,py310,py311,pypy37}-starlettelatest, + python-framework_strawberry-{py37,py38,py39,py310,py311}-strawberrylatest, + python-logger_logging-{py27,py37,py38,py39,py310,py311,pypy,pypy37}, + python-logger_loguru-{py37,py38,py39,py310,py311,pypy37}-logurulatest, python-logger_loguru-py39-loguru{06,05,04,03}, - libcurl-framework_tornado-{py37,py38,py39,py310,pypy37}-tornado0600, - libcurl-framework_tornado-{py37,py38,py39,py310}-tornadomaster, + libcurl-framework_tornado-{py37,py38,py39,py310,py311,pypy37}-tornado0600, + libcurl-framework_tornado-{py37,py38,py39,py310,py311}-tornadomaster, rabbitmq-messagebroker_pika-{py27,py37,py38,py39,pypy,pypy37}-pika0.13, - rabbitmq-messagebroker_pika-{py37,py38,py39,py310,pypy37}-pikalatest, - kafka-messagebroker_confluentkafka-{py27,py37,py38,py39,py310}-confluentkafkalatest, + rabbitmq-messagebroker_pika-{py37,py38,py39,py310,py311,pypy37}-pikalatest, + kafka-messagebroker_confluentkafka-{py27,py37,py38,py39,py310,py311}-confluentkafkalatest, kafka-messagebroker_confluentkafka-{py27,py39}-confluentkafka{0107,0106}, ; confluent-kafka had a bug in 1.8.2's setup.py file which was incompatible with 2.7. kafka-messagebroker_confluentkafka-{py39}-confluentkafka{0108}, kafka-messagebroker_kafkapython-{pypy,py27,py37,py38,pypy37}-kafkapythonlatest, kafka-messagebroker_kafkapython-{py27,py38}-kafkapython{020001,020000,0104}, - python-template_mako-{py27,py37,py38,py39,py310} + python-template_mako-{py27,py37,py38,py39,py310,py311} [pytest] usefixtures = @@ -168,7 +170,7 @@ usefixtures = [testenv] deps = # Base Dependencies - {py37,py38,py39,py310,pypy37}: pytest==6.2.5 + {py37,py38,py39,py310,py311,pypy37}: pytest==6.2.5 {py27,pypy}: pytest==4.6.11 iniconfig pytest-cov @@ -215,10 +217,10 @@ deps = component_graphqlserver: jinja2<3.1 component_tastypie-tastypie0143: django-tastypie<0.14.4 component_tastypie-{py27,pypy}-tastypie0143: django<1.12 - component_tastypie-{py37,py38,py39,py310,pypy37}-tastypie0143: django<3.0.1 + component_tastypie-{py37,py38,py39,py310,py311,pypy37}-tastypie0143: django<3.0.1 component_tastypie-tastypielatest: django-tastypie component_tastypie-tastypielatest: django<4.1 - coroutines_asyncio-{py37,py38,py39,py310}: uvloop + coroutines_asyncio-{py37,py38,py39,py310,py311}: uvloop cross_agent: mock==1.0.1 cross_agent: requests datastore_asyncpg: asyncpg @@ -235,9 +237,8 @@ deps = datastore_mysql-mysql080023: mysql-connector-python<8.0.24 datastore_mysql: protobuf<4 datastore_postgresql: py-postgresql<1.3 - datastore_psycopg2-psycopg20208: psycopg2-binary<2.9 - datastore_psycopg2cffi-psycopg2cffi0207: psycopg2cffi<2.8 - datastore_psycopg2cffi-psycopg2cffi0208: psycopg2cffi<2.9 + datastore_psycopg2-psycopg2latest: psycopg2-binary + datastore_psycopg2cffi-psycopg2cffilatest: psycopg2cffi datastore_pyelasticsearch: pyelasticsearch<2.0 datastore_pylibmc: pylibmc datastore_pymemcache: pymemcache @@ -259,7 +260,7 @@ deps = external_boto3-boto01: moto<2.0 external_boto3-py27: rsa<4.7.1 external_botocore: botocore - external_botocore-{py37,py38,py39,py310}: moto[awslambda,ec2,iam]<3.0 + external_botocore-{py37,py38,py39,py310,py311}: moto[awslambda,ec2,iam]<3.0 external_botocore-py27: rsa<4.7.1 external_botocore-py27: moto[awslambda,ec2,iam]<2.0 external_feedparser-feedparser05: feedparser<6 @@ -357,7 +358,6 @@ deps = framework_tornado: pycurl framework_tornado-tornado0600: tornado<6.1 framework_tornado-tornadomaster: https://github.com/tornadoweb/tornado/archive/master.zip - framework_tornado: pycurl logger_loguru-logurulatest: loguru logger_loguru-loguru06: loguru<0.7 logger_loguru-loguru05: loguru<0.6 @@ -385,6 +385,9 @@ setenv = agent_features: NEW_RELIC_APDEX_T = 1000 datastore_umemcache: CFLAGS="-Wno-error" framework_grpc: PYTHONPATH={toxinidir}/tests/:{toxinidir}/tests/framework_grpc/sample_application + libcurl: PYCURL_SSL_LIBRARY=openssl + libcurl: LDFLAGS=-L/usr/local/opt/openssl/lib + libcurl: CPPFLAGS=-I/usr/local/opt/openssl/include passenv = NEW_RELIC_DEVELOPER_MODE @@ -399,6 +402,8 @@ commands = framework_grpc: --grpc_python_out={toxinidir}/tests/framework_grpc/sample_application \ framework_grpc: /{toxinidir}/tests/framework_grpc/sample_application/sample_application.proto + libcurl: pip install --ignore-installed --install-option="--with-openssl" pycurl + py.test -v [] install_command= From ca420b327dfce0093daab58461a583e5573d1fce Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Mon, 24 Oct 2022 11:47:48 -0700 Subject: [PATCH 010/108] Remove devcontainer submodule (#669) --- .devcontainer/dotfiles | 1 - 1 file changed, 1 deletion(-) delete mode 160000 .devcontainer/dotfiles diff --git a/.devcontainer/dotfiles b/.devcontainer/dotfiles deleted file mode 160000 index 4d575e4d6..000000000 --- a/.devcontainer/dotfiles +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 4d575e4d60a9f195f9f315dde7f380a5ae26e27d From a66a33af357659626f1c35c6c32a453eff0a399a Mon Sep 17 00:00:00 2001 From: Lalleh Rafeei <84813886+lrafeei@users.noreply.github.com> Date: Wed, 26 Oct 2022 10:37:20 -0700 Subject: [PATCH 011/108] Uncomment NewRelicContextFormatter from agent.py (#676) --- newrelic/agent.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/newrelic/agent.py b/newrelic/agent.py index e532d1c6e..9c4ce1035 100644 --- a/newrelic/agent.py +++ b/newrelic/agent.py @@ -15,8 +15,7 @@ from newrelic.api.application import application_instance as __application from newrelic.api.application import application_settings as __application_settings from newrelic.api.application import register_application as __register_application - -# from newrelic.api.log import NewRelicContextFormatter +from newrelic.api.log import NewRelicContextFormatter # noqa from newrelic.api.time_trace import ( add_custom_span_attribute as __add_custom_span_attribute, ) From 7b82cb95839fb8ebb630204337d87e568a7c70d1 Mon Sep 17 00:00:00 2001 From: Lalleh Rafeei <84813886+lrafeei@users.noreply.github.com> Date: Wed, 26 Oct 2022 11:16:55 -0700 Subject: [PATCH 012/108] Fix botocore tests for botocore v1.28.1+ (#675) * Fix botocore tests for botocore v1.28.1+ Co-authored-by: Timothy Pansino * Fix boto3 tests for botocore v1.28.1+ Co-authored-by: Timothy Pansino Co-authored-by: Hannah Stepanek Co-authored-by: Uma Annamalai * Fix boto3 tests for python 2.7 Co-authored-by: Timothy Pansino Co-authored-by: Hannah Stepanek Co-authored-by: Uma Annamalai Co-authored-by: Timothy Pansino Co-authored-by: Hannah Stepanek Co-authored-by: Uma Annamalai Co-authored-by: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> --- tests/external_boto3/test_boto3_s3.py | 122 ++++++++++---------- tests/external_botocore/test_botocore_s3.py | 113 +++++++++--------- tox.ini | 6 +- 3 files changed, 123 insertions(+), 118 deletions(-) diff --git a/tests/external_boto3/test_boto3_s3.py b/tests/external_boto3/test_boto3_s3.py index ba65bc950..765be1826 100644 --- a/tests/external_boto3/test_boto3_s3.py +++ b/tests/external_boto3/test_boto3_s3.py @@ -18,106 +18,106 @@ import boto3 import botocore import moto +from testing_support.fixtures import ( + override_application_settings, + validate_transaction_metrics, +) +from testing_support.validators.validate_span_events import validate_span_events from newrelic.api.background_task import background_task -from testing_support.fixtures import (validate_transaction_metrics, - override_application_settings) -from testing_support.validators.validate_span_events import ( - validate_span_events) -MOTO_VERSION = tuple(int(v) for v in moto.__version__.split('.')[:3]) +MOTO_VERSION = tuple(int(v) for v in moto.__version__.split(".")[:3]) # patch earlier versions of moto to support py37 if sys.version_info >= (3, 7) and MOTO_VERSION <= (1, 3, 1): import re + moto.packages.responses.responses.re._pattern_type = re.Pattern -AWS_ACCESS_KEY_ID = 'AAAAAAAAAAAACCESSKEY' -AWS_SECRET_ACCESS_KEY = 'AAAAAASECRETKEY' -AWS_REGION_NAME = 'us-west-2' +AWS_ACCESS_KEY_ID = "AAAAAAAAAAAACCESSKEY" +AWS_SECRET_ACCESS_KEY = "AAAAAASECRETKEY" # nosec +AWS_REGION_NAME = "us-west-2" + +TEST_BUCKET = "python-agent-test-%s" % uuid.uuid4() + +BOTOCORE_VERSION = tuple(map(int, botocore.__version__.split("."))) -TEST_BUCKET = 'python-agent-test-%s' % uuid.uuid4() -BOTOCORE_VERSION = tuple(map(int, botocore.__version__.split('.'))) if BOTOCORE_VERSION < (1, 7, 41): - S3_URL = 's3-us-west-2.amazonaws.com' + S3_URL = "s3-us-west-2.amazonaws.com" + EXPECTED_BUCKET_URL = "https://%s/%s" % (S3_URL, TEST_BUCKET) + EXPECTED_KEY_URL = EXPECTED_BUCKET_URL + "/hello_world" +elif BOTOCORE_VERSION < (1, 28): + S3_URL = "s3.us-west-2.amazonaws.com" + EXPECTED_BUCKET_URL = "https://%s/%s" % (S3_URL, TEST_BUCKET) + EXPECTED_KEY_URL = EXPECTED_BUCKET_URL + "/hello_world" else: - S3_URL = 's3.us-west-2.amazonaws.com' + S3_URL = "%s.s3.us-west-2.amazonaws.com" % TEST_BUCKET + EXPECTED_BUCKET_URL = "https://%s/" % S3_URL + EXPECTED_KEY_URL = EXPECTED_BUCKET_URL + "hello_world" -expected_http_url = 'https://%s/%s' % (S3_URL, TEST_BUCKET) _s3_scoped_metrics = [ - ('External/%s/botocore/GET' % S3_URL, 2), - ('External/%s/botocore/PUT' % S3_URL, 2), - ('External/%s/botocore/DELETE' % S3_URL, 2), + ("External/%s/botocore/GET" % S3_URL, 2), + ("External/%s/botocore/PUT" % S3_URL, 2), + ("External/%s/botocore/DELETE" % S3_URL, 2), ] _s3_rollup_metrics = [ - ('External/all', 6), - ('External/allOther', 6), - ('External/%s/all' % S3_URL, 6), - ('External/%s/botocore/GET' % S3_URL, 2), - ('External/%s/botocore/PUT' % S3_URL, 2), - ('External/%s/botocore/DELETE' % S3_URL, 2), + ("External/all", 6), + ("External/allOther", 6), + ("External/%s/all" % S3_URL, 6), + ("External/%s/botocore/GET" % S3_URL, 2), + ("External/%s/botocore/PUT" % S3_URL, 2), + ("External/%s/botocore/DELETE" % S3_URL, 2), ] -@override_application_settings({'distributed_tracing.enabled': True}) -@validate_span_events(exact_agents={'aws.operation': 'CreateBucket'}, count=1) -@validate_span_events(exact_agents={'aws.operation': 'PutObject'}, count=1) -@validate_span_events(exact_agents={'aws.operation': 'ListObjects'}, count=1) -@validate_span_events(exact_agents={'aws.operation': 'GetObject'}, count=1) -@validate_span_events(exact_agents={'aws.operation': 'DeleteObject'}, count=1) -@validate_span_events(exact_agents={'aws.operation': 'DeleteBucket'}, count=1) -@validate_span_events( - exact_agents={'http.url': expected_http_url}, count=3) -@validate_span_events( - exact_agents={'http.url': expected_http_url + '/hello_world'}, count=3) +@override_application_settings({"distributed_tracing.enabled": True}) +@validate_span_events(exact_agents={"aws.operation": "CreateBucket"}, count=1) +@validate_span_events(exact_agents={"aws.operation": "PutObject"}, count=1) +@validate_span_events(exact_agents={"aws.operation": "ListObjects"}, count=1) +@validate_span_events(exact_agents={"aws.operation": "GetObject"}, count=1) +@validate_span_events(exact_agents={"aws.operation": "DeleteObject"}, count=1) +@validate_span_events(exact_agents={"aws.operation": "DeleteBucket"}, count=1) +@validate_span_events(exact_agents={"http.url": EXPECTED_BUCKET_URL}, count=3) +@validate_span_events(exact_agents={"http.url": EXPECTED_KEY_URL}, count=3) @validate_transaction_metrics( - 'test_boto3_s3:test_s3', - scoped_metrics=_s3_scoped_metrics, - rollup_metrics=_s3_rollup_metrics, - background_task=True) + "test_boto3_s3:test_s3", scoped_metrics=_s3_scoped_metrics, rollup_metrics=_s3_rollup_metrics, background_task=True +) @background_task() @moto.mock_s3 def test_s3(): client = boto3.client( - 's3', - aws_access_key_id=AWS_ACCESS_KEY_ID, - aws_secret_access_key=AWS_SECRET_ACCESS_KEY, - region_name=AWS_REGION_NAME, + "s3", + aws_access_key_id=AWS_ACCESS_KEY_ID, + aws_secret_access_key=AWS_SECRET_ACCESS_KEY, + region_name=AWS_REGION_NAME, ) # Create bucket - resp = client.create_bucket( - Bucket=TEST_BUCKET, - CreateBucketConfiguration={'LocationConstraint': AWS_REGION_NAME} - ) - assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 + resp = client.create_bucket(Bucket=TEST_BUCKET, CreateBucketConfiguration={"LocationConstraint": AWS_REGION_NAME}) + assert resp["ResponseMetadata"]["HTTPStatusCode"] == 200 # Put object - resp = client.put_object( - Bucket=TEST_BUCKET, - Key='hello_world', - Body=b'hello_world_content' - ) - assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 + resp = client.put_object(Bucket=TEST_BUCKET, Key="hello_world", Body=b"hello_world_content") + assert resp["ResponseMetadata"]["HTTPStatusCode"] == 200 # List bucket resp = client.list_objects(Bucket=TEST_BUCKET) - assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 - assert len(resp['Contents']) == 1 - assert resp['Contents'][0]['Key'] == 'hello_world' + assert resp["ResponseMetadata"]["HTTPStatusCode"] == 200 + assert len(resp["Contents"]) == 1 + assert resp["Contents"][0]["Key"] == "hello_world" # Get object - resp = client.get_object(Bucket=TEST_BUCKET, Key='hello_world') - assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 - assert resp['Body'].read() == b'hello_world_content' + resp = client.get_object(Bucket=TEST_BUCKET, Key="hello_world") + assert resp["ResponseMetadata"]["HTTPStatusCode"] == 200 + assert resp["Body"].read() == b"hello_world_content" # Delete object - resp = client.delete_object(Bucket=TEST_BUCKET, Key='hello_world') - assert resp['ResponseMetadata']['HTTPStatusCode'] == 204 + resp = client.delete_object(Bucket=TEST_BUCKET, Key="hello_world") + assert resp["ResponseMetadata"]["HTTPStatusCode"] == 204 # Delete bucket resp = client.delete_bucket(Bucket=TEST_BUCKET) - assert resp['ResponseMetadata']['HTTPStatusCode'] == 204 + assert resp["ResponseMetadata"]["HTTPStatusCode"] == 204 diff --git a/tests/external_botocore/test_botocore_s3.py b/tests/external_botocore/test_botocore_s3.py index 3cd4ecd93..51bbb12e8 100644 --- a/tests/external_botocore/test_botocore_s3.py +++ b/tests/external_botocore/test_botocore_s3.py @@ -15,101 +15,104 @@ import sys import uuid +import botocore import botocore.session import moto +from testing_support.fixtures import ( + override_application_settings, + validate_transaction_metrics, +) +from testing_support.validators.validate_span_events import validate_span_events from newrelic.api.background_task import background_task -from testing_support.fixtures import (validate_transaction_metrics, - override_application_settings) -from testing_support.validators.validate_span_events import ( - validate_span_events) -MOTO_VERSION = tuple(int(v) for v in moto.__version__.split('.')[:3]) +MOTO_VERSION = tuple(int(v) for v in moto.__version__.split(".")[:3]) +BOTOCORE_VERSION = tuple(int(v) for v in botocore.__version__.split(".")[:3]) + # patch earlier versions of moto to support py37 if sys.version_info >= (3, 7) and MOTO_VERSION <= (1, 3, 1): import re + moto.packages.responses.responses.re._pattern_type = re.Pattern -AWS_ACCESS_KEY_ID = 'AAAAAAAAAAAACCESSKEY' -AWS_SECRET_ACCESS_KEY = 'AAAAAASECRETKEY' -AWS_REGION = 'us-east-1' +AWS_ACCESS_KEY_ID = "AAAAAAAAAAAACCESSKEY" +AWS_SECRET_ACCESS_KEY = "AAAAAASECRETKEY" # nosec +AWS_REGION = "us-east-1" -TEST_BUCKET = 'python-agent-test-%s' % uuid.uuid4() -S3_URL = 's3.amazonaws.com' -expected_http_url = 'https://%s/%s' % (S3_URL, TEST_BUCKET) +TEST_BUCKET = "python-agent-test-%s" % uuid.uuid4() +if BOTOCORE_VERSION >= (1, 28): + S3_URL = "%s.s3.amazonaws.com" % TEST_BUCKET + EXPECTED_BUCKET_URL = "https://%s/" % S3_URL + EXPECTED_KEY_URL = EXPECTED_BUCKET_URL + "hello_world" +else: + S3_URL = "s3.amazonaws.com" + EXPECTED_BUCKET_URL = "https://%s/%s" % (S3_URL, TEST_BUCKET) + EXPECTED_KEY_URL = EXPECTED_BUCKET_URL + "/hello_world" _s3_scoped_metrics = [ - ('External/s3.amazonaws.com/botocore/GET', 2), - ('External/s3.amazonaws.com/botocore/PUT', 2), - ('External/s3.amazonaws.com/botocore/DELETE', 2), + ("External/%s/botocore/GET" % S3_URL, 2), + ("External/%s/botocore/PUT" % S3_URL, 2), + ("External/%s/botocore/DELETE" % S3_URL, 2), ] _s3_rollup_metrics = [ - ('External/all', 6), - ('External/allOther', 6), - ('External/s3.amazonaws.com/all', 6), - ('External/s3.amazonaws.com/botocore/GET', 2), - ('External/s3.amazonaws.com/botocore/PUT', 2), - ('External/s3.amazonaws.com/botocore/DELETE', 2), + ("External/all", 6), + ("External/allOther", 6), + ("External/%s/all" % S3_URL, 6), + ("External/%s/botocore/GET" % S3_URL, 2), + ("External/%s/botocore/PUT" % S3_URL, 2), + ("External/%s/botocore/DELETE" % S3_URL, 2), ] -@override_application_settings({'distributed_tracing.enabled': True}) -@validate_span_events(exact_agents={'aws.operation': 'CreateBucket'}, count=1) -@validate_span_events(exact_agents={'aws.operation': 'PutObject'}, count=1) -@validate_span_events(exact_agents={'aws.operation': 'ListObjects'}, count=1) -@validate_span_events(exact_agents={'aws.operation': 'GetObject'}, count=1) -@validate_span_events(exact_agents={'aws.operation': 'DeleteObject'}, count=1) -@validate_span_events(exact_agents={'aws.operation': 'DeleteBucket'}, count=1) -@validate_span_events( - exact_agents={'http.url': expected_http_url}, count=3) -@validate_span_events( - exact_agents={'http.url': expected_http_url + '/hello_world'}, count=3) +@override_application_settings({"distributed_tracing.enabled": True}) +@validate_span_events(exact_agents={"aws.operation": "CreateBucket"}, count=1) +@validate_span_events(exact_agents={"aws.operation": "PutObject"}, count=1) +@validate_span_events(exact_agents={"aws.operation": "ListObjects"}, count=1) +@validate_span_events(exact_agents={"aws.operation": "GetObject"}, count=1) +@validate_span_events(exact_agents={"aws.operation": "DeleteObject"}, count=1) +@validate_span_events(exact_agents={"aws.operation": "DeleteBucket"}, count=1) +@validate_span_events(exact_agents={"http.url": EXPECTED_BUCKET_URL}, count=3) +@validate_span_events(exact_agents={"http.url": EXPECTED_KEY_URL}, count=3) @validate_transaction_metrics( - 'test_botocore_s3:test_s3', - scoped_metrics=_s3_scoped_metrics, - rollup_metrics=_s3_rollup_metrics, - background_task=True) + "test_botocore_s3:test_s3", + scoped_metrics=_s3_scoped_metrics, + rollup_metrics=_s3_rollup_metrics, + background_task=True, +) @background_task() @moto.mock_s3 def test_s3(): session = botocore.session.get_session() client = session.create_client( - 's3', - region_name=AWS_REGION, - aws_access_key_id=AWS_ACCESS_KEY_ID, - aws_secret_access_key=AWS_SECRET_ACCESS_KEY + "s3", region_name=AWS_REGION, aws_access_key_id=AWS_ACCESS_KEY_ID, aws_secret_access_key=AWS_SECRET_ACCESS_KEY ) # Create bucket resp = client.create_bucket(Bucket=TEST_BUCKET) - assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 + assert resp["ResponseMetadata"]["HTTPStatusCode"] == 200 # Put object - resp = client.put_object( - Bucket=TEST_BUCKET, - Key='hello_world', - Body=b'hello_world_content' - ) - assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 + resp = client.put_object(Bucket=TEST_BUCKET, Key="hello_world", Body=b"hello_world_content") + assert resp["ResponseMetadata"]["HTTPStatusCode"] == 200 # List bucket resp = client.list_objects(Bucket=TEST_BUCKET) - assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 - assert len(resp['Contents']) == 1 - assert resp['Contents'][0]['Key'] == 'hello_world' + assert resp["ResponseMetadata"]["HTTPStatusCode"] == 200 + assert len(resp["Contents"]) == 1 + assert resp["Contents"][0]["Key"] == "hello_world" # Get object - resp = client.get_object(Bucket=TEST_BUCKET, Key='hello_world') - assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 - assert resp['Body'].read() == b'hello_world_content' + resp = client.get_object(Bucket=TEST_BUCKET, Key="hello_world") + assert resp["ResponseMetadata"]["HTTPStatusCode"] == 200 + assert resp["Body"].read() == b"hello_world_content" # Delete object - resp = client.delete_object(Bucket=TEST_BUCKET, Key='hello_world') - assert resp['ResponseMetadata']['HTTPStatusCode'] == 204 + resp = client.delete_object(Bucket=TEST_BUCKET, Key="hello_world") + assert resp["ResponseMetadata"]["HTTPStatusCode"] == 204 # Delete bucket resp = client.delete_bucket(Bucket=TEST_BUCKET) - assert resp['ResponseMetadata']['HTTPStatusCode'] == 204 + assert resp["ResponseMetadata"]["HTTPStatusCode"] == 204 diff --git a/tox.ini b/tox.ini index fdb143672..167f7beb6 100644 --- a/tox.ini +++ b/tox.ini @@ -95,7 +95,8 @@ envlist = python-datastore_sqlite-{py27,py37,py38,py39,py310,py311,pypy,pypy37}, memcached-datastore_umemcache-{py27,pypy}, python-external_boto3-{py27,py37,py38,py39,py310,py311}-boto01, - python-external_botocore-{py27,py37,py38,py39,py310,py311}, + python-external_botocore-{py27,py37,py38,py39,py310,py311}-botocorelatest, + python-external_botocore-py310-botocore0125, python-external_feedparser-py27-feedparser{05,06}, python-external_http-{py27,py37,py38,py39,py310,py311,pypy}, python-external_httplib-{py27,py37,py38,py39,py310,py311,pypy,pypy37}, @@ -259,7 +260,8 @@ deps = external_boto3-boto01: boto3<2.0 external_boto3-boto01: moto<2.0 external_boto3-py27: rsa<4.7.1 - external_botocore: botocore + external_botocore-botocorelatest: botocore + external_botocore-botocore0125: botocore<1.26 external_botocore-{py37,py38,py39,py310,py311}: moto[awslambda,ec2,iam]<3.0 external_botocore-py27: rsa<4.7.1 external_botocore-py27: moto[awslambda,ec2,iam]<2.0 From caa9485861f23c5e9b68aca29decf8b7d8d0f741 Mon Sep 17 00:00:00 2001 From: Hannah Stepanek Date: Wed, 26 Oct 2022 11:41:41 -0700 Subject: [PATCH 013/108] Feature increased custom event limit (#674) * Update reservoir size for custom events. * [Mega-Linter] Apply linters fixes * Increase custom event limit. (#666) * Remove duplicated CUSTOM_EVENT_RESERVOIR_SIZE Co-authored-by: Tim Pansino Co-authored-by: TimPansino Co-authored-by: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Co-authored-by: Uma Annamalai --- newrelic/api/transaction.py | 4 ++-- newrelic/core/config.py | 3 ++- tests/agent_features/test_configuration.py | 8 ++++---- 3 files changed, 8 insertions(+), 7 deletions(-) diff --git a/newrelic/api/transaction.py b/newrelic/api/transaction.py index 1c0c1bd08..08638e056 100644 --- a/newrelic/api/transaction.py +++ b/newrelic/api/transaction.py @@ -59,7 +59,7 @@ DST_NONE, DST_TRANSACTION_TRACER, ) -from newrelic.core.config import DEFAULT_RESERVOIR_SIZE, LOG_EVENT_RESERVOIR_SIZE +from newrelic.core.config import CUSTOM_EVENT_RESERVOIR_SIZE, LOG_EVENT_RESERVOIR_SIZE from newrelic.core.custom_event import create_custom_event from newrelic.core.log_event_node import LogEventNode from newrelic.core.stack_trace import exception_stack @@ -333,7 +333,7 @@ def __init__(self, application, enabled=None, source=None): capacity=self._settings.event_harvest_config.harvest_limits.log_event_data ) else: - self._custom_events = SampledDataSet(capacity=DEFAULT_RESERVOIR_SIZE) + self._custom_events = SampledDataSet(capacity=CUSTOM_EVENT_RESERVOIR_SIZE) self._log_events = SampledDataSet(capacity=LOG_EVENT_RESERVOIR_SIZE) def __del__(self): diff --git a/newrelic/core/config.py b/newrelic/core/config.py index 60520c113..4111c7149 100644 --- a/newrelic/core/config.py +++ b/newrelic/core/config.py @@ -52,6 +52,7 @@ # reservoir. Error Events have a different default size. DEFAULT_RESERVOIR_SIZE = 1200 +CUSTOM_EVENT_RESERVOIR_SIZE = 3600 ERROR_EVENT_RESERVOIR_SIZE = 100 SPAN_EVENT_RESERVOIR_SIZE = 2000 LOG_EVENT_RESERVOIR_SIZE = 10000 @@ -738,7 +739,7 @@ def default_host(license_key): ) _settings.event_harvest_config.harvest_limits.custom_event_data = _environ_as_int( - "NEW_RELIC_CUSTOM_INSIGHTS_EVENTS_MAX_SAMPLES_STORED", DEFAULT_RESERVOIR_SIZE + "NEW_RELIC_CUSTOM_INSIGHTS_EVENTS_MAX_SAMPLES_STORED", CUSTOM_EVENT_RESERVOIR_SIZE ) _settings.event_harvest_config.harvest_limits.span_event_data = _environ_as_int( diff --git a/tests/agent_features/test_configuration.py b/tests/agent_features/test_configuration.py index 5846e3808..5df69d71e 100644 --- a/tests/agent_features/test_configuration.py +++ b/tests/agent_features/test_configuration.py @@ -438,12 +438,12 @@ def test_delete_setting_parent(): TSetting("event_harvest_config.harvest_limits.error_event_data", 100, 100), ), ( - TSetting("custom_insights_events.max_samples_stored", 1200, 1200), - TSetting("event_harvest_config.harvest_limits.custom_event_data", 9999, 1200), + TSetting("custom_insights_events.max_samples_stored", 3600, 3600), + TSetting("event_harvest_config.harvest_limits.custom_event_data", 9999, 3600), ), ( - TSetting("custom_insights_events.max_samples_stored", 9999, 1200), - TSetting("event_harvest_config.harvest_limits.custom_event_data", 1200, 1200), + TSetting("custom_insights_events.max_samples_stored", 9999, 3600), + TSetting("event_harvest_config.harvest_limits.custom_event_data", 3600, 3600), ), ( TSetting("application_logging.forwarding.max_samples_stored", 10000, 10000), From 1d4e3e536790d644751743fbaa1a47175f4fcc84 Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Wed, 26 Oct 2022 12:14:50 -0700 Subject: [PATCH 014/108] Add python 3.11 stable release to GHA (#671) --- .github/actions/setup-python-matrix/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/setup-python-matrix/action.yml b/.github/actions/setup-python-matrix/action.yml index c507278b4..0f6b64389 100644 --- a/.github/actions/setup-python-matrix/action.yml +++ b/.github/actions/setup-python-matrix/action.yml @@ -35,7 +35,7 @@ runs: - uses: actions/setup-python@v3 with: - python-version: "3.11-dev" + python-version: "3.11" architecture: x64 - uses: actions/setup-python@v3 From 1279bdfaba39b20359bf9f43defa52a3f18d7931 Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Thu, 27 Oct 2022 09:43:37 -0700 Subject: [PATCH 015/108] Double kafka test runners (#677) Co-authored-by: Hannah Stepanek --- .github/workflows/tests.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 892bfce9a..f59b55f66 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -435,12 +435,12 @@ jobs: kafka: env: - TOTAL_GROUPS: 2 + TOTAL_GROUPS: 4 strategy: fail-fast: false matrix: - group-number: [1, 2] + group-number: [1, 2, 3, 4] runs-on: ubuntu-latest timeout-minutes: 30 From d28f0d44c10a186d535337c68b8fa8b2c6cf582b Mon Sep 17 00:00:00 2001 From: Hannah Stepanek Date: Fri, 4 Nov 2022 17:17:47 -0700 Subject: [PATCH 016/108] Fix failing flask_rest tests (#683) * Pin flask-restx in flask_rest tests for 2.7 flask-restx dropped support for 2.7 in 1.0.1. * Drop support for flask-restplus flask-restx replaced flask-restplus. flask-restplus's latest version supports 3.6 which we don't even support anymore. --- newrelic/config.py | 5 ----- tests/component_flask_rest/test_application.py | 4 +--- tox.ini | 13 +++++++------ 3 files changed, 8 insertions(+), 14 deletions(-) diff --git a/newrelic/config.py b/newrelic/config.py index 4e0912db8..292044a29 100644 --- a/newrelic/config.py +++ b/newrelic/config.py @@ -2162,11 +2162,6 @@ def _process_module_builtin_defaults(): ) _process_module_definition("flask_restful", "newrelic.hooks.component_flask_rest", "instrument_flask_rest") - _process_module_definition( - "flask_restplus.api", - "newrelic.hooks.component_flask_rest", - "instrument_flask_rest", - ) _process_module_definition( "flask_restx.api", "newrelic.hooks.component_flask_rest", diff --git a/tests/component_flask_rest/test_application.py b/tests/component_flask_rest/test_application.py index 67d60bc53..934eb5784 100644 --- a/tests/component_flask_rest/test_application.py +++ b/tests/component_flask_rest/test_application.py @@ -30,14 +30,12 @@ TEST_APPLICATION_PREFIX = "_test_application.create_app." if six.PY3 else "_test_application" -@pytest.fixture(params=["flask_restful", "flask_restplus", "flask_restx"]) +@pytest.fixture(params=["flask_restful", "flask_restx"]) def application(request): from _test_application import get_test_application if request.param == "flask_restful": import flask_restful as module - elif request.param == "flask_restplus": - import flask_restplus as module elif request.param == "flask_restx": import flask_restx as module else: diff --git a/tox.ini b/tox.ini index 167f7beb6..e06279107 100644 --- a/tox.ini +++ b/tox.ini @@ -62,7 +62,8 @@ envlist = gearman-application_gearman-{py27,pypy}, python-component_djangorestframework-py27-djangorestframework0300, python-component_djangorestframework-{py37,py38,py39,py310,py311}-djangorestframeworklatest, - python-component_flask_rest-{py27,py37,py38,py39,pypy,pypy37}, + python-component_flask_rest-{py37,py38,py39,pypy37}-flaskrestxlatest, + python-component_flask_rest-{py27,pypy}-flaskrestx051, python-component_graphqlserver-{py37,py38,py39,py310,py311}, python-component_tastypie-{py27,pypy}-tastypie0143, python-component_tastypie-{py37,py38,py39,pypy37}-tastypie{0143,latest}, @@ -205,12 +206,12 @@ deps = component_djangorestframework-djangorestframework0300: djangorestframework < 3.1 component_djangorestframework-djangorestframeworklatest: Django component_djangorestframework-djangorestframeworklatest: djangorestframework - component_flask_rest: flask<0.13 + component_flask_rest: flask component_flask_rest: flask-restful - component_flask_rest: flask-restplus - component_flask_rest: flask-restx - component_flask_rest: jinja2<3.1 - component_flask_rest: itsdangerous<2.1 + component_flask_rest: jinja2 + component_flask_rest: itsdangerous + component_flask_rest-flaskrestxlatest: flask-restx + component_flask_rest-flaskrestx051: flask-restx<1.0 component_graphqlserver: graphql-server[sanic,flask]==3.0.0b5 component_graphqlserver: sanic>20 component_graphqlserver: Flask From cd7e1505a9248baec9150a879fcd5a606f5ea6b2 Mon Sep 17 00:00:00 2001 From: Hannah Stepanek Date: Mon, 7 Nov 2022 12:19:59 -0800 Subject: [PATCH 017/108] Fix failing botocore tests (#684) * Change queue url for botocore>=1.29.0 botocore >=1.29.0 uses sqs.us-east-1.amazonaws.com url instead of queue.amazonaws.com. * Use tuple version instead of str * Change botocore129->botocore128 --- tests/external_botocore/test_botocore_sqs.py | 121 +++++++++---------- tox.ini | 4 +- 2 files changed, 63 insertions(+), 62 deletions(-) diff --git a/tests/external_botocore/test_botocore_sqs.py b/tests/external_botocore/test_botocore_sqs.py index 46482c675..a009e9f0b 100644 --- a/tests/external_botocore/test_botocore_sqs.py +++ b/tests/external_botocore/test_botocore_sqs.py @@ -14,132 +14,131 @@ import sys import uuid -import pytest import botocore.session import moto +import pytest +from testing_support.fixtures import ( + override_application_settings, + validate_transaction_metrics, +) +from testing_support.validators.validate_span_events import validate_span_events from newrelic.api.background_task import background_task -from testing_support.fixtures import (validate_transaction_metrics, - override_application_settings) -from testing_support.validators.validate_span_events import ( - validate_span_events) +from newrelic.common.package_version_utils import get_package_version -MOTO_VERSION = tuple(int(v) for v in moto.__version__.split('.')[:3]) +MOTO_VERSION = tuple(int(v) for v in moto.__version__.split(".")[:3]) # patch earlier versions of moto to support py37 if sys.version_info >= (3, 7) and MOTO_VERSION <= (1, 3, 1): import re + moto.packages.responses.responses.re._pattern_type = re.Pattern -AWS_ACCESS_KEY_ID = 'AAAAAAAAAAAACCESSKEY' -AWS_SECRET_ACCESS_KEY = 'AAAAAASECRETKEY' -AWS_REGION = 'us-east-1' +url = "sqs.us-east-1.amazonaws.com" +botocore_version = tuple([int(n) for n in get_package_version("botocore").split(".")]) +if botocore_version < (1, 29, 0): + url = "queue.amazonaws.com" + +AWS_ACCESS_KEY_ID = "AAAAAAAAAAAACCESSKEY" +AWS_SECRET_ACCESS_KEY = "AAAAAASECRETKEY" +AWS_REGION = "us-east-1" -TEST_QUEUE = 'python-agent-test-%s' % uuid.uuid4() +TEST_QUEUE = "python-agent-test-%s" % uuid.uuid4() _sqs_scoped_metrics = [ - ('MessageBroker/SQS/Queue/Produce/Named/%s' - % TEST_QUEUE, 2), - ('External/queue.amazonaws.com/botocore/POST', 3), + ("MessageBroker/SQS/Queue/Produce/Named/%s" % TEST_QUEUE, 2), + ("External/%s/botocore/POST" % url, 3), ] _sqs_rollup_metrics = [ - ('MessageBroker/SQS/Queue/Produce/Named/%s' - % TEST_QUEUE, 2), - ('MessageBroker/SQS/Queue/Consume/Named/%s' - % TEST_QUEUE, 1), - ('External/all', 3), - ('External/allOther', 3), - ('External/queue.amazonaws.com/all', 3), - ('External/queue.amazonaws.com/botocore/POST', 3), + ("MessageBroker/SQS/Queue/Produce/Named/%s" % TEST_QUEUE, 2), + ("MessageBroker/SQS/Queue/Consume/Named/%s" % TEST_QUEUE, 1), + ("External/all", 3), + ("External/allOther", 3), + ("External/%s/all" % url, 3), + ("External/%s/botocore/POST" % url, 3), ] _sqs_scoped_metrics_malformed = [ - ('MessageBroker/SQS/Queue/Produce/Named/Unknown', 1), + ("MessageBroker/SQS/Queue/Produce/Named/Unknown", 1), ] _sqs_rollup_metrics_malformed = [ - ('MessageBroker/SQS/Queue/Produce/Named/Unknown', 1), + ("MessageBroker/SQS/Queue/Produce/Named/Unknown", 1), ] -@override_application_settings({'distributed_tracing.enabled': True}) -@validate_span_events(exact_agents={'aws.operation': 'CreateQueue'}, count=1) -@validate_span_events(exact_agents={'aws.operation': 'SendMessage'}, count=1) -@validate_span_events( - exact_agents={'aws.operation': 'ReceiveMessage'}, count=1) -@validate_span_events( - exact_agents={'aws.operation': 'SendMessageBatch'}, count=1) -@validate_span_events(exact_agents={'aws.operation': 'PurgeQueue'}, count=1) -@validate_span_events(exact_agents={'aws.operation': 'DeleteQueue'}, count=1) +@override_application_settings({"distributed_tracing.enabled": True}) +@validate_span_events(exact_agents={"aws.operation": "CreateQueue"}, count=1) +@validate_span_events(exact_agents={"aws.operation": "SendMessage"}, count=1) +@validate_span_events(exact_agents={"aws.operation": "ReceiveMessage"}, count=1) +@validate_span_events(exact_agents={"aws.operation": "SendMessageBatch"}, count=1) +@validate_span_events(exact_agents={"aws.operation": "PurgeQueue"}, count=1) +@validate_span_events(exact_agents={"aws.operation": "DeleteQueue"}, count=1) @validate_transaction_metrics( - 'test_botocore_sqs:test_sqs', - scoped_metrics=_sqs_scoped_metrics, - rollup_metrics=_sqs_rollup_metrics, - background_task=True) + "test_botocore_sqs:test_sqs", + scoped_metrics=_sqs_scoped_metrics, + rollup_metrics=_sqs_rollup_metrics, + background_task=True, +) @background_task() @moto.mock_sqs def test_sqs(): session = botocore.session.get_session() client = session.create_client( - 'sqs', - region_name=AWS_REGION, - aws_access_key_id=AWS_ACCESS_KEY_ID, - aws_secret_access_key=AWS_SECRET_ACCESS_KEY + "sqs", region_name=AWS_REGION, aws_access_key_id=AWS_ACCESS_KEY_ID, aws_secret_access_key=AWS_SECRET_ACCESS_KEY ) # Create queue resp = client.create_queue(QueueName=TEST_QUEUE) - assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 + assert resp["ResponseMetadata"]["HTTPStatusCode"] == 200 # QueueUrl is needed for rest of methods. - QUEUE_URL = resp['QueueUrl'] + QUEUE_URL = resp["QueueUrl"] # Send message - resp = client.send_message(QueueUrl=QUEUE_URL, MessageBody='hello_world') - assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 + resp = client.send_message(QueueUrl=QUEUE_URL, MessageBody="hello_world") + assert resp["ResponseMetadata"]["HTTPStatusCode"] == 200 # Receive message resp = client.receive_message(QueueUrl=QUEUE_URL) - assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 + assert resp["ResponseMetadata"]["HTTPStatusCode"] == 200 # Send message batch messages = [ - {'Id': '1', 'MessageBody': 'message 1'}, - {'Id': '2', 'MessageBody': 'message 2'}, - {'Id': '3', 'MessageBody': 'message 3'}, + {"Id": "1", "MessageBody": "message 1"}, + {"Id": "2", "MessageBody": "message 2"}, + {"Id": "3", "MessageBody": "message 3"}, ] resp = client.send_message_batch(QueueUrl=QUEUE_URL, Entries=messages) - assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 + assert resp["ResponseMetadata"]["HTTPStatusCode"] == 200 # Purge queue resp = client.purge_queue(QueueUrl=QUEUE_URL) - assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 + assert resp["ResponseMetadata"]["HTTPStatusCode"] == 200 # Delete queue resp = client.delete_queue(QueueUrl=QUEUE_URL) - assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 + assert resp["ResponseMetadata"]["HTTPStatusCode"] == 200 -@override_application_settings({'distributed_tracing.enabled': True}) +@override_application_settings({"distributed_tracing.enabled": True}) @validate_transaction_metrics( - 'test_botocore_sqs:test_sqs_malformed', - scoped_metrics=_sqs_scoped_metrics_malformed, - rollup_metrics=_sqs_rollup_metrics_malformed, - background_task=True) + "test_botocore_sqs:test_sqs_malformed", + scoped_metrics=_sqs_scoped_metrics_malformed, + rollup_metrics=_sqs_rollup_metrics_malformed, + background_task=True, +) @background_task() @moto.mock_sqs def test_sqs_malformed(): session = botocore.session.get_session() client = session.create_client( - 'sqs', - region_name=AWS_REGION, - aws_access_key_id=AWS_ACCESS_KEY_ID, - aws_secret_access_key=AWS_SECRET_ACCESS_KEY + "sqs", region_name=AWS_REGION, aws_access_key_id=AWS_ACCESS_KEY_ID, aws_secret_access_key=AWS_SECRET_ACCESS_KEY ) # Malformed send message, uses arg instead of kwarg with pytest.raises(TypeError): - client.send_message('https://fake-url/', MessageBody='hello_world') + client.send_message("https://fake-url/", MessageBody="hello_world") diff --git a/tox.ini b/tox.ini index e06279107..118c22acc 100644 --- a/tox.ini +++ b/tox.ini @@ -96,7 +96,8 @@ envlist = python-datastore_sqlite-{py27,py37,py38,py39,py310,py311,pypy,pypy37}, memcached-datastore_umemcache-{py27,pypy}, python-external_boto3-{py27,py37,py38,py39,py310,py311}-boto01, - python-external_botocore-{py27,py37,py38,py39,py310,py311}-botocorelatest, + python-external_botocore-{py37,py38,py39,py310,py311}-botocorelatest, + python-external_botocore-{py311}-botocore128, python-external_botocore-py310-botocore0125, python-external_feedparser-py27-feedparser{05,06}, python-external_http-{py27,py37,py38,py39,py310,py311,pypy}, @@ -262,6 +263,7 @@ deps = external_boto3-boto01: moto<2.0 external_boto3-py27: rsa<4.7.1 external_botocore-botocorelatest: botocore + external_botocore-botocore128: botocore<1.29 external_botocore-botocore0125: botocore<1.26 external_botocore-{py37,py38,py39,py310,py311}: moto[awslambda,ec2,iam]<3.0 external_botocore-py27: rsa<4.7.1 From d4ff1ece843ca5dacd198faf819e8235072781d1 Mon Sep 17 00:00:00 2001 From: Hannah Stepanek Date: Mon, 7 Nov 2022 12:43:40 -0800 Subject: [PATCH 018/108] Add record_log_event to public api (#681) --- newrelic/agent.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/newrelic/agent.py b/newrelic/agent.py index 9c4ce1035..b0bf115e2 100644 --- a/newrelic/agent.py +++ b/newrelic/agent.py @@ -58,6 +58,7 @@ from newrelic.api.transaction import record_custom_event as __record_custom_event from newrelic.api.transaction import record_custom_metric as __record_custom_metric from newrelic.api.transaction import record_custom_metrics as __record_custom_metrics +from newrelic.api.transaction import record_log_event as __record_log_event from newrelic.api.transaction import set_background_task as __set_background_task from newrelic.api.transaction import set_transaction_name as __set_transaction_name from newrelic.api.transaction import suppress_apdex_metric as __suppress_apdex_metric @@ -242,6 +243,7 @@ def __asgi_application(*args, **kwargs): record_custom_metric = __wrap_api_call(__record_custom_metric, "record_custom_metric") record_custom_metrics = __wrap_api_call(__record_custom_metrics, "record_custom_metrics") record_custom_event = __wrap_api_call(__record_custom_event, "record_custom_event") +record_log_event = __wrap_api_call(__record_log_event, "record_log_event") accept_distributed_trace_payload = __wrap_api_call( __accept_distributed_trace_payload, "accept_distributed_trace_payload" ) From 062e7025ede1a6fb51a671c1d12b1a77151d578c Mon Sep 17 00:00:00 2001 From: Uma Annamalai Date: Thu, 10 Nov 2022 13:01:38 -0800 Subject: [PATCH 019/108] Add patch for sentry SDK to correct ASGI v2/v3 detection. (#680) * Add patch for sentry to correct ASGI v2/v3 detection. Co-authored-by: Tim Pansino Co-authored-by: Lalleh Rafeei Co-authored-by: Hannah Stepanek * [Mega-Linter] Apply linters fixes Co-authored-by: Tim Pansino Co-authored-by: Lalleh Rafeei Co-authored-by: Hannah Stepanek Co-authored-by: umaannamalai Co-authored-by: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> --- newrelic/config.py | 4 +++ newrelic/hooks/component_sentry.py | 41 ++++++++++++++++++++++++++++++ 2 files changed, 45 insertions(+) create mode 100644 newrelic/hooks/component_sentry.py diff --git a/newrelic/config.py b/newrelic/config.py index 292044a29..f0b638cd4 100644 --- a/newrelic/config.py +++ b/newrelic/config.py @@ -2174,6 +2174,10 @@ def _process_module_builtin_defaults(): "instrument_graphqlserver", ) + _process_module_definition( + "sentry_sdk.integrations.asgi", "newrelic.hooks.component_sentry", "instrument_sentry_sdk_integrations_asgi" + ) + # _process_module_definition('web.application', # 'newrelic.hooks.framework_webpy') # _process_module_definition('web.template', diff --git a/newrelic/hooks/component_sentry.py b/newrelic/hooks/component_sentry.py new file mode 100644 index 000000000..cc54efa9b --- /dev/null +++ b/newrelic/hooks/component_sentry.py @@ -0,0 +1,41 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from newrelic.common.object_wrapper import FunctionWrapper, wrap_function_wrapper + +# This is NOT a fully-featured instrumentation for the sentry SDK. Instead +# this is a monkey-patch of the sentry SDK to work around a bug that causes +# improper ASGI 2/3 version detection when inspecting our wrappers. We fix this +# by manually unwrapping the application when version detection is run. + + +def bind__looks_like_asgi3(app): + return app + + +def wrap__looks_like_asgi3(wrapped, instance, args, kwargs): + try: + app = bind__looks_like_asgi3(*args, **kwargs) + except Exception: + return wrapped(*args, **kwargs) + + while isinstance(app, FunctionWrapper) and hasattr(app, "__wrapped__"): + app = app.__wrapped__ + + return wrapped(app) + + +def instrument_sentry_sdk_integrations_asgi(module): + if hasattr(module, "_looks_like_asgi3"): + wrap_function_wrapper(module, "_looks_like_asgi3", wrap__looks_like_asgi3) From f014708b31f135e2d8b85d65f804fc9ea333eb2f Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Thu, 10 Nov 2022 15:36:55 -0800 Subject: [PATCH 020/108] Update pip install command (#688) --- tox.ini | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 118c22acc..f8e2b2e0c 100644 --- a/tox.ini +++ b/tox.ini @@ -412,7 +412,9 @@ commands = py.test -v [] install_command= - pip install {opts} {packages} + # Older pip versions that support python 2 have issues with using the cache directory and cause crashes on GitHub Actions + {py27,pypy}: pip install --no-cache-dir {opts} {packages} + !{py27,pypy}: pip install {opts} {packages} extras = agent_streaming: infinite-tracing From 52e101125c45b9cbef5c9fb19a055a7c1b89ece9 Mon Sep 17 00:00:00 2001 From: Lalleh Rafeei <84813886+lrafeei@users.noreply.github.com> Date: Thu, 10 Nov 2022 19:28:39 -0800 Subject: [PATCH 021/108] Validator transfer from fixtures.py to validators directory, Part 1 (#672) * Move validate_transaction_metrics to validators directory * Comment out original validate_transaction_metrics from fixtures.py * Move validate_time_metrics_outside_transaction to validators directory * Move validate_internal_metrics into validators directory and fixed validate_transaction_metrics * Move validate_transaction_errors into validators directory * Move validate_application_errors into validators directory * Move validate_custom_parameters into validators directory * Move validate_synthetics_event into validators directory * Move validate_transaction_event_attributes into validators directory * Move validate_non_transaction_error_event into validators directory * Fix import issues * Fix (more) import issues * Fix validate_transaction_metrics import in aioredis * Remove commented code from fixtures.py --- tests/adapter_cheroot/test_wsgi.py | 2 +- tests/adapter_daphne/test_daphne.py | 12 +- tests/adapter_hypercorn/test_hypercorn.py | 14 +- tests/adapter_uvicorn/test_uvicorn.py | 4 +- .../_test_async_coroutine_trace.py | 4 +- tests/agent_features/test_asgi_browser.py | 6 +- .../test_asgi_distributed_tracing.py | 4 +- tests/agent_features/test_asgi_transaction.py | 12 +- .../test_asgi_w3c_trace_context.py | 6 +- .../test_async_context_propagation.py | 3 +- tests/agent_features/test_attribute.py | 4 +- .../test_attributes_in_action.py | 4 +- tests/agent_features/test_browser.py | 6 +- tests/agent_features/test_coroutine_trace.py | 7 +- .../test_coroutine_transaction.py | 2 + .../test_distributed_tracing.py | 6 +- tests/agent_features/test_error_events.py | 4 +- .../test_event_loop_wait_time.py | 6 +- tests/agent_features/test_function_trace.py | 5 +- .../agent_features/test_high_security_mode.py | 10 +- .../test_ignore_expected_errors.py | 8 +- tests/agent_features/test_lambda_handler.py | 279 +++++----- tests/agent_features/test_notice_error.py | 4 +- tests/agent_features/test_span_events.py | 8 +- .../test_supportability_metrics.py | 2 +- tests/agent_features/test_synthetics.py | 2 +- tests/agent_features/test_time_trace.py | 4 +- ...n_event_data_and_some_browser_stuff_too.py | 272 +++++----- tests/agent_features/test_transaction_name.py | 2 +- .../agent_features/test_w3c_trace_context.py | 6 +- tests/agent_features/test_web_transaction.py | 5 +- tests/agent_features/test_wsgi_attributes.py | 8 +- tests/agent_unittests/test_agent_connect.py | 3 +- tests/application_celery/test_celery.py | 2 +- .../test_application.py | 6 +- .../component_flask_rest/test_application.py | 8 +- tests/component_graphqlserver/test_graphql.py | 8 +- tests/component_tastypie/test_application.py | 5 +- .../test_context_propagation.py | 2 +- .../cross_agent/test_aws_utilization_data.py | 2 +- .../test_azure_utilization_data.py | 2 +- .../test_boot_id_utilization_data.py | 2 +- tests/cross_agent/test_distributed_tracing.py | 3 +- .../cross_agent/test_gcp_utilization_data.py | 2 +- tests/cross_agent/test_lambda_event_source.py | 4 +- .../cross_agent/test_pcf_utilization_data.py | 2 +- tests/cross_agent/test_w3c_trace_context.py | 3 +- .../test_custom_conn_pool.py | 12 +- .../test_execute_command.py | 11 +- tests/datastore_aioredis/test_get_and_set.py | 10 +- tests/datastore_aioredis/test_multiple_dbs.py | 14 +- tests/datastore_aioredis/test_transactions.py | 12 +- .../datastore_aredis/test_custom_conn_pool.py | 4 +- .../datastore_aredis/test_execute_command.py | 4 +- tests/datastore_aredis/test_get_and_set.py | 4 +- tests/datastore_aredis/test_multiple_dbs.py | 6 +- tests/datastore_asyncpg/test_multiple_dbs.py | 6 +- tests/datastore_asyncpg/test_query.py | 6 +- tests/datastore_bmemcached/test_memcache.py | 2 +- .../test_elasticsearch.py | 5 +- tests/datastore_elasticsearch/test_mget.py | 4 +- .../test_multiple_dbs.py | 4 +- tests/datastore_memcache/test_memcache.py | 4 +- tests/datastore_memcache/test_multiple_dbs.py | 4 +- tests/datastore_mysql/test_database.py | 2 +- tests/datastore_postgresql/test_database.py | 2 +- tests/datastore_psycopg2/test_async.py | 6 +- tests/datastore_psycopg2/test_cursor.py | 4 +- tests/datastore_psycopg2/test_multiple_dbs.py | 4 +- tests/datastore_psycopg2/test_register.py | 4 +- tests/datastore_psycopg2/test_rollback.py | 4 +- tests/datastore_psycopg2cffi/test_database.py | 5 +- .../test_pyelasticsearch.py | 4 +- tests/datastore_pylibmc/test_memcache.py | 2 +- tests/datastore_pymemcache/test_memcache.py | 2 +- tests/datastore_pymongo/test_pymongo.py | 7 +- tests/datastore_pymysql/test_database.py | 3 +- tests/datastore_pysolr/test_solr.py | 2 +- .../datastore_redis/test_custom_conn_pool.py | 4 +- tests/datastore_redis/test_execute_command.py | 4 +- tests/datastore_redis/test_get_and_set.py | 4 +- tests/datastore_redis/test_multiple_dbs.py | 4 +- tests/datastore_redis/test_rb.py | 4 +- tests/datastore_solrpy/test_solr.py | 2 +- tests/datastore_sqlite/test_database.py | 2 +- tests/datastore_umemcache/test_memcache.py | 2 +- tests/external_boto3/test_boto3_iam.py | 3 +- tests/external_boto3/test_boto3_s3.py | 6 +- tests/external_boto3/test_boto3_sns.py | 6 +- .../test_botocore_dynamodb.py | 3 +- tests/external_botocore/test_botocore_ec2.py | 3 +- tests/external_botocore/test_botocore_s3.py | 6 +- tests/external_botocore/test_botocore_sqs.py | 8 +- tests/external_feedparser/test_feedparser.py | 2 +- tests/external_http/test_http.py | 2 +- tests/external_httplib/test_httplib.py | 2 +- tests/external_httplib/test_urllib.py | 3 +- tests/external_httplib/test_urllib2.py | 3 +- tests/external_httplib2/test_httplib2.py | 2 +- tests/external_httpx/test_client.py | 4 +- tests/external_requests/test_requests.py | 5 +- tests/external_urllib3/test_urllib3.py | 5 +- tests/framework_aiohttp/test_client.py | 4 +- .../test_client_async_await.py | 3 +- tests/framework_aiohttp/test_client_cat.py | 8 +- tests/framework_aiohttp/test_externals.py | 6 +- tests/framework_aiohttp/test_middleware.py | 8 +- tests/framework_aiohttp/test_server.py | 12 +- tests/framework_aiohttp/test_server_cat.py | 4 +- tests/framework_ariadne/test_application.py | 12 +- .../test_application_async.py | 3 +- tests/framework_ariadne/test_asgi.py | 3 +- tests/framework_ariadne/test_wsgi.py | 3 +- tests/framework_bottle/test_application.py | 7 +- tests/framework_cherrypy/test_application.py | 6 +- tests/framework_cherrypy/test_dispatch.py | 2 +- tests/framework_cherrypy/test_resource.py | 2 +- tests/framework_cherrypy/test_routes.py | 2 +- tests/framework_django/test_application.py | 6 +- .../framework_django/test_asgi_application.py | 6 +- tests/framework_falcon/test_application.py | 6 +- tests/framework_fastapi/test_application.py | 2 +- tests/framework_flask/test_application.py | 6 +- tests/framework_flask/test_blueprints.py | 5 +- tests/framework_flask/test_compress.py | 5 +- tests/framework_flask/test_middleware.py | 5 +- tests/framework_flask/test_not_found.py | 4 +- tests/framework_flask/test_user_exceptions.py | 4 +- tests/framework_flask/test_views.py | 10 +- tests/framework_graphene/test_application.py | 8 +- tests/framework_graphql/test_application.py | 8 +- .../test_application_async.py | 3 +- tests/framework_grpc/test_clients.py | 4 +- .../test_distributed_tracing.py | 5 +- tests/framework_grpc/test_server.py | 11 +- .../test_append_slash_app.py | 6 +- tests/framework_pyramid/test_application.py | 5 +- tests/framework_pyramid/test_cornice.py | 4 +- tests/framework_sanic/test_application.py | 15 +- .../framework_sanic/test_cross_application.py | 148 ++--- tests/framework_starlette/test_application.py | 8 +- tests/framework_starlette/test_bg_tasks.py | 4 +- tests/framework_starlette/test_graphql.py | 3 +- .../framework_strawberry/test_application.py | 12 +- .../test_application_async.py | 3 +- tests/framework_strawberry/test_asgi.py | 3 +- .../framework_tornado/test_custom_handler.py | 2 +- tests/framework_tornado/test_externals.py | 6 +- tests/framework_tornado/test_inbound_cat.py | 5 +- tests/framework_tornado/test_server.py | 13 +- tests/logger_logging/test_metrics.py | 5 +- tests/logger_logging/test_settings.py | 7 +- tests/logger_loguru/test_metrics.py | 4 +- tests/logger_loguru/test_settings.py | 7 +- .../test_consumer.py | 8 +- .../test_producer.py | 10 +- .../test_serialization.py | 4 +- .../test_consumer.py | 8 +- .../test_producer.py | 10 +- .../test_serialization.py | 4 + tests/messagebroker_pika/test_cat.py | 3 +- .../test_distributed_tracing.py | 4 +- .../test_pika_async_connection_consume.py | 4 +- .../test_pika_blocking_connection_consume.py | 3 +- ...a_blocking_connection_consume_generator.py | 4 +- tests/messagebroker_pika/test_pika_produce.py | 2 +- .../test_pika_supportability.py | 2 +- tests/template_mako/test_mako.py | 2 +- tests/testing_support/fixtures.py | 507 +----------------- .../validators/validate_application_errors.py | 56 ++ .../validators/validate_custom_parameters.py | 48 ++ .../validators/validate_internal_metrics.py | 64 +++ .../validate_non_transaction_error_event.py | 71 +++ .../validators/validate_synthetics_event.py | 71 +++ ...lidate_time_metrics_outside_transaction.py | 93 ++++ .../validators/validate_transaction_errors.py | 76 +++ .../validate_transaction_event_attributes.py | 53 ++ .../validate_transaction_metrics.py | 135 +++++ 178 files changed, 1509 insertions(+), 1203 deletions(-) create mode 100644 tests/testing_support/validators/validate_application_errors.py create mode 100644 tests/testing_support/validators/validate_custom_parameters.py create mode 100644 tests/testing_support/validators/validate_internal_metrics.py create mode 100644 tests/testing_support/validators/validate_non_transaction_error_event.py create mode 100644 tests/testing_support/validators/validate_synthetics_event.py create mode 100644 tests/testing_support/validators/validate_time_metrics_outside_transaction.py create mode 100644 tests/testing_support/validators/validate_transaction_errors.py create mode 100644 tests/testing_support/validators/validate_transaction_event_attributes.py create mode 100644 tests/testing_support/validators/validate_transaction_metrics.py diff --git a/tests/adapter_cheroot/test_wsgi.py b/tests/adapter_cheroot/test_wsgi.py index b59a12c2d..49858e2f5 100644 --- a/tests/adapter_cheroot/test_wsgi.py +++ b/tests/adapter_cheroot/test_wsgi.py @@ -16,7 +16,7 @@ import cheroot.wsgi import newrelic.api.transaction -from testing_support.fixtures import validate_transaction_metrics +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics def get_open_port(): diff --git a/tests/adapter_daphne/test_daphne.py b/tests/adapter_daphne/test_daphne.py index 471e0335b..e5f9dd832 100644 --- a/tests/adapter_daphne/test_daphne.py +++ b/tests/adapter_daphne/test_daphne.py @@ -21,8 +21,6 @@ from testing_support.fixtures import ( override_application_settings, raise_background_exceptions, - validate_transaction_errors, - validate_transaction_metrics, wait_for_background_threads, ) from testing_support.sample_asgi_applications import ( @@ -32,6 +30,12 @@ simple_app_v3, ) from testing_support.util import get_open_port +from testing_support.validators.validate_transaction_errors import ( + validate_transaction_errors, +) +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) from newrelic.common.object_names import callable_name @@ -126,7 +130,7 @@ def test_daphne_200(port, app): @raise_background_exceptions() @wait_for_background_threads() def response(): - return urlopen("http://localhost:%d" % port, timeout=10) + return urlopen("http://localhost:%d" % port, timeout=10) # nosec assert response().status == 200 @@ -139,7 +143,7 @@ def test_daphne_500(port, app): @wait_for_background_threads() def _test(): try: - urlopen("http://localhost:%d/exc" % port) + urlopen("http://localhost:%d/exc" % port) # nosec except HTTPError: pass diff --git a/tests/adapter_hypercorn/test_hypercorn.py b/tests/adapter_hypercorn/test_hypercorn.py index 12c3d7d6c..8b53eee0a 100644 --- a/tests/adapter_hypercorn/test_hypercorn.py +++ b/tests/adapter_hypercorn/test_hypercorn.py @@ -22,8 +22,6 @@ from testing_support.fixtures import ( override_application_settings, raise_background_exceptions, - validate_transaction_errors, - validate_transaction_metrics, wait_for_background_threads, ) from testing_support.sample_asgi_applications import ( @@ -32,6 +30,12 @@ simple_app_v2_raw, ) from testing_support.util import get_open_port +from testing_support.validators.validate_transaction_errors import ( + validate_transaction_errors, +) +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) from newrelic.api.transaction import ignore_transaction from newrelic.common.object_names import callable_name @@ -115,7 +119,7 @@ def wait_for_port(port, retries=10): status = None for _ in range(retries): try: - status = urlopen("http://localhost:%d/ignored" % port, timeout=1).status + status = urlopen("http://localhost:%d/ignored" % port, timeout=1).status # nosec assert status == 200 return except Exception as e: @@ -139,7 +143,7 @@ def test_hypercorn_200(port, app): @raise_background_exceptions() @wait_for_background_threads() def response(): - return urlopen("http://localhost:%d" % port, timeout=10) + return urlopen("http://localhost:%d" % port, timeout=10) # nosec assert response().status == 200 @@ -152,6 +156,6 @@ def test_hypercorn_500(port, app): @wait_for_background_threads() def _test(): with pytest.raises(HTTPError): - urlopen("http://localhost:%d/exc" % port) + urlopen("http://localhost:%d/exc" % port) # nosec _test() diff --git a/tests/adapter_uvicorn/test_uvicorn.py b/tests/adapter_uvicorn/test_uvicorn.py index e3261f4e8..93d155aa8 100644 --- a/tests/adapter_uvicorn/test_uvicorn.py +++ b/tests/adapter_uvicorn/test_uvicorn.py @@ -23,10 +23,10 @@ from testing_support.fixtures import ( override_application_settings, raise_background_exceptions, - validate_transaction_errors, - validate_transaction_metrics, wait_for_background_threads, ) +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics +from testing_support.validators.validate_transaction_errors import validate_transaction_errors from testing_support.sample_asgi_applications import ( AppWithCall, AppWithCallRaw, diff --git a/tests/agent_features/_test_async_coroutine_trace.py b/tests/agent_features/_test_async_coroutine_trace.py index 11f018535..51b81f5f6 100644 --- a/tests/agent_features/_test_async_coroutine_trace.py +++ b/tests/agent_features/_test_async_coroutine_trace.py @@ -18,8 +18,8 @@ import time import pytest -from testing_support.fixtures import ( - capture_transaction_metrics, +from testing_support.fixtures import capture_transaction_metrics +from testing_support.validators.validate_transaction_metrics import ( validate_transaction_metrics, ) diff --git a/tests/agent_features/test_asgi_browser.py b/tests/agent_features/test_asgi_browser.py index a1c3daeb7..1e718e1e0 100644 --- a/tests/agent_features/test_asgi_browser.py +++ b/tests/agent_features/test_asgi_browser.py @@ -18,9 +18,11 @@ import six from bs4 import BeautifulSoup from testing_support.asgi_testing import AsgiTest -from testing_support.fixtures import ( - override_application_settings, +from testing_support.fixtures import override_application_settings +from testing_support.validators.validate_custom_parameters import ( validate_custom_parameters, +) +from testing_support.validators.validate_transaction_errors import ( validate_transaction_errors, ) diff --git a/tests/agent_features/test_asgi_distributed_tracing.py b/tests/agent_features/test_asgi_distributed_tracing.py index bb34aba20..90f57becc 100644 --- a/tests/agent_features/test_asgi_distributed_tracing.py +++ b/tests/agent_features/test_asgi_distributed_tracing.py @@ -22,8 +22,8 @@ from newrelic.api.asgi_application import asgi_application, ASGIWebTransaction from testing_support.asgi_testing import AsgiTest -from testing_support.fixtures import (override_application_settings, - validate_transaction_metrics) +from testing_support.fixtures import override_application_settings +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics distributed_trace_intrinsics = ['guid', 'traceId', 'priority', 'sampled'] diff --git a/tests/agent_features/test_asgi_transaction.py b/tests/agent_features/test_asgi_transaction.py index 520e954bc..cac075cef 100644 --- a/tests/agent_features/test_asgi_transaction.py +++ b/tests/agent_features/test_asgi_transaction.py @@ -16,11 +16,7 @@ import pytest from testing_support.asgi_testing import AsgiTest -from testing_support.fixtures import ( - override_application_settings, - validate_transaction_errors, - validate_transaction_metrics, -) +from testing_support.fixtures import override_application_settings from testing_support.sample_asgi_applications import ( AppWithDescriptor, simple_app_v2, @@ -29,6 +25,12 @@ simple_app_v3, simple_app_v3_raw, ) +from testing_support.validators.validate_transaction_errors import ( + validate_transaction_errors, +) +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) from newrelic.api.asgi_application import ASGIApplicationWrapper, asgi_application diff --git a/tests/agent_features/test_asgi_w3c_trace_context.py b/tests/agent_features/test_asgi_w3c_trace_context.py index 68c192a5d..8cec2eb7a 100644 --- a/tests/agent_features/test_asgi_w3c_trace_context.py +++ b/tests/agent_features/test_asgi_w3c_trace_context.py @@ -19,11 +19,11 @@ from newrelic.api.asgi_application import asgi_application from testing_support.asgi_testing import AsgiTest -from testing_support.fixtures import (override_application_settings, - validate_transaction_event_attributes, validate_transaction_metrics) +from testing_support.fixtures import override_application_settings +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.validators.validate_span_events import ( validate_span_events) - +from testing_support.validators.validate_transaction_event_attributes import validate_transaction_event_attributes @asgi_application() async def target_asgi_application(scope, receive, send): diff --git a/tests/agent_features/test_async_context_propagation.py b/tests/agent_features/test_async_context_propagation.py index ea850c095..8026cbbcc 100644 --- a/tests/agent_features/test_async_context_propagation.py +++ b/tests/agent_features/test_async_context_propagation.py @@ -16,9 +16,8 @@ from testing_support.fixtures import ( function_not_called, override_generic_settings, - validate_transaction_metrics, ) - +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from newrelic.api.application import application_instance as application from newrelic.api.background_task import BackgroundTask, background_task from newrelic.api.database_trace import database_trace diff --git a/tests/agent_features/test_attribute.py b/tests/agent_features/test_attribute.py index ab6f778dd..f4b9e896f 100644 --- a/tests/agent_features/test_attribute.py +++ b/tests/agent_features/test_attribute.py @@ -21,9 +21,11 @@ validate_agent_attribute_types, validate_attributes, validate_attributes_complete, - validate_custom_parameters, ) from testing_support.sample_applications import fully_featured_app +from testing_support.validators.validate_custom_parameters import ( + validate_custom_parameters, +) from newrelic.api.background_task import background_task from newrelic.api.transaction import add_custom_attribute, add_custom_attributes diff --git a/tests/agent_features/test_attributes_in_action.py b/tests/agent_features/test_attributes_in_action.py index f5ee9b229..fbedb302e 100644 --- a/tests/agent_features/test_attributes_in_action.py +++ b/tests/agent_features/test_attributes_in_action.py @@ -25,10 +25,12 @@ validate_error_event_attributes_outside_transaction, validate_error_trace_attributes_outside_transaction, validate_transaction_error_trace_attributes, - validate_transaction_event_attributes, validate_transaction_trace_attributes, ) from testing_support.validators.validate_span_events import validate_span_events +from testing_support.validators.validate_transaction_event_attributes import ( + validate_transaction_event_attributes, +) from newrelic.api.application import application_instance as application from newrelic.api.message_transaction import message_transaction diff --git a/tests/agent_features/test_browser.py b/tests/agent_features/test_browser.py index b5ca867d5..e0f562d1e 100644 --- a/tests/agent_features/test_browser.py +++ b/tests/agent_features/test_browser.py @@ -17,9 +17,11 @@ import six import webtest -from testing_support.fixtures import ( - override_application_settings, +from testing_support.fixtures import override_application_settings +from testing_support.validators.validate_custom_parameters import ( validate_custom_parameters, +) +from testing_support.validators.validate_transaction_errors import ( validate_transaction_errors, ) diff --git a/tests/agent_features/test_coroutine_trace.py b/tests/agent_features/test_coroutine_trace.py index 7aba53f62..36e365bc4 100644 --- a/tests/agent_features/test_coroutine_trace.py +++ b/tests/agent_features/test_coroutine_trace.py @@ -18,11 +18,12 @@ import time import pytest -from testing_support.fixtures import ( - capture_transaction_metrics, +from testing_support.fixtures import capture_transaction_metrics, validate_tt_parenting +from testing_support.validators.validate_transaction_errors import ( validate_transaction_errors, +) +from testing_support.validators.validate_transaction_metrics import ( validate_transaction_metrics, - validate_tt_parenting, ) from newrelic.api.background_task import background_task diff --git a/tests/agent_features/test_coroutine_transaction.py b/tests/agent_features/test_coroutine_transaction.py index dd7f281e2..8b602ffc0 100644 --- a/tests/agent_features/test_coroutine_transaction.py +++ b/tests/agent_features/test_coroutine_transaction.py @@ -19,6 +19,8 @@ from testing_support.fixtures import ( capture_transaction_metrics, override_generic_settings, +) +from testing_support.validators.validate_transaction_errors import ( validate_transaction_errors, ) diff --git a/tests/agent_features/test_distributed_tracing.py b/tests/agent_features/test_distributed_tracing.py index ae6b4f32d..7f795573a 100644 --- a/tests/agent_features/test_distributed_tracing.py +++ b/tests/agent_features/test_distributed_tracing.py @@ -26,8 +26,10 @@ from newrelic.api.wsgi_application import wsgi_application from testing_support.fixtures import (override_application_settings, - validate_attributes, validate_transaction_event_attributes, - validate_error_event_attributes, validate_transaction_metrics) + validate_attributes, + validate_error_event_attributes) +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics +from testing_support.validators.validate_transaction_event_attributes import validate_transaction_event_attributes distributed_trace_intrinsics = ['guid', 'traceId', 'priority', 'sampled'] inbound_payload_intrinsics = ['parent.type', 'parent.app', 'parent.account', diff --git a/tests/agent_features/test_error_events.py b/tests/agent_features/test_error_events.py index d93f9908b..99b3935be 100644 --- a/tests/agent_features/test_error_events.py +++ b/tests/agent_features/test_error_events.py @@ -23,10 +23,12 @@ override_application_settings, reset_core_stats_engine, validate_error_event_sample_data, - validate_non_transaction_error_event, validate_transaction_error_event_count, ) from testing_support.sample_applications import fully_featured_app +from testing_support.validators.validate_non_transaction_error_event import ( + validate_non_transaction_error_event, +) from newrelic.api.application import application_instance as application from newrelic.api.application import application_settings diff --git a/tests/agent_features/test_event_loop_wait_time.py b/tests/agent_features/test_event_loop_wait_time.py index c92a611cc..b4906337f 100644 --- a/tests/agent_features/test_event_loop_wait_time.py +++ b/tests/agent_features/test_event_loop_wait_time.py @@ -18,9 +18,13 @@ import pytest from testing_support.fixtures import ( override_application_settings, + validate_transaction_trace_attributes, +) +from testing_support.validators.validate_transaction_event_attributes import ( validate_transaction_event_attributes, +) +from testing_support.validators.validate_transaction_metrics import ( validate_transaction_metrics, - validate_transaction_trace_attributes, ) from newrelic.api.background_task import background_task diff --git a/tests/agent_features/test_function_trace.py b/tests/agent_features/test_function_trace.py index 78c012b30..f1f0cd9ac 100644 --- a/tests/agent_features/test_function_trace.py +++ b/tests/agent_features/test_function_trace.py @@ -17,9 +17,8 @@ from newrelic.api.background_task import background_task from newrelic.api.function_trace import FunctionTrace -from testing_support.fixtures import (validate_transaction_metrics, - validate_tt_parenting) - +from testing_support.fixtures import validate_tt_parenting +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics _test_function_trace_default_group_scoped_metrics = [ ('Function/FunctionTrace', 1)] diff --git a/tests/agent_features/test_high_security_mode.py b/tests/agent_features/test_high_security_mode.py index 51cd19931..dad7edc29 100644 --- a/tests/agent_features/test_high_security_mode.py +++ b/tests/agent_features/test_high_security_mode.py @@ -24,11 +24,17 @@ validate_attributes_complete, validate_custom_event_count, validate_custom_event_in_application_stats_engine, + validate_request_params_omitted, + validate_tt_segment_params, +) +from testing_support.validators.validate_custom_parameters import ( validate_custom_parameters, +) +from testing_support.validators.validate_non_transaction_error_event import ( validate_non_transaction_error_event, - validate_request_params_omitted, +) +from testing_support.validators.validate_transaction_errors import ( validate_transaction_errors, - validate_tt_segment_params, ) from newrelic.api.application import application_instance as application diff --git a/tests/agent_features/test_ignore_expected_errors.py b/tests/agent_features/test_ignore_expected_errors.py index d685c39c0..1a7fa266e 100644 --- a/tests/agent_features/test_ignore_expected_errors.py +++ b/tests/agent_features/test_ignore_expected_errors.py @@ -19,9 +19,15 @@ validate_error_event_attributes_outside_transaction, validate_error_event_sample_data, validate_error_trace_attributes_outside_transaction, - validate_time_metrics_outside_transaction, validate_transaction_error_trace_attributes, +) +from testing_support.validators.validate_time_metrics_outside_transaction import ( + validate_time_metrics_outside_transaction, +) +from testing_support.validators.validate_transaction_errors import ( validate_transaction_errors, +) +from testing_support.validators.validate_transaction_metrics import ( validate_transaction_metrics, ) diff --git a/tests/agent_features/test_lambda_handler.py b/tests/agent_features/test_lambda_handler.py index 4ff932e36..f388aa51b 100644 --- a/tests/agent_features/test_lambda_handler.py +++ b/tests/agent_features/test_lambda_handler.py @@ -13,11 +13,17 @@ # limitations under the License. import functools -import pytest from copy import deepcopy -from testing_support.fixtures import (override_application_settings, - validate_transaction_trace_attributes, - validate_transaction_event_attributes) + +import pytest +from testing_support.fixtures import ( + override_application_settings, + validate_transaction_trace_attributes, +) +from testing_support.validators.validate_transaction_event_attributes import ( + validate_transaction_event_attributes, +) + import newrelic.api.lambda_handler as lambda_handler @@ -27,7 +33,7 @@ @pytest.fixture(autouse=True) def force_cold_start_status(request): try: - is_cold_start = request.getfixturevalue('is_cold') + is_cold_start = request.getfixturevalue("is_cold") lambda_handler.COLD_START_RECORDED = not is_cold_start except Exception: lambda_handler.COLD_START_RECORDED = True @@ -36,63 +42,65 @@ def force_cold_start_status(request): @lambda_handler.lambda_handler() def handler(event, context): return { - 'statusCode': '200', - 'body': '{}', - 'headers': { - 'Content-Type': 'application/json', - 'Content-Length': 2, + "statusCode": "200", + "body": "{}", + "headers": { + "Content-Type": "application/json", + "Content-Length": 2, }, } _override_settings = { - 'attributes.include': ['request.parameters.*', 'request.headers.*'], + "attributes.include": ["request.parameters.*", "request.headers.*"], } _expected_attributes = { - 'agent': [ - 'aws.requestId', - 'aws.lambda.arn', - 'request.method', - 'request.uri', - 'response.status', - 'response.headers.contentType', - 'response.headers.contentLength', + "agent": [ + "aws.requestId", + "aws.lambda.arn", + "request.method", + "request.uri", + "response.status", + "response.headers.contentType", + "response.headers.contentLength", ], - 'user': [], - 'intrinsic': [], + "user": [], + "intrinsic": [], } _exact_attrs = { - 'agent': { - 'request.parameters.foo': 'bar', - 'request.headers.host': 'myhost', + "agent": { + "request.parameters.foo": "bar", + "request.headers.host": "myhost", }, - 'user': {}, - 'intrinsic': {} + "user": {}, + "intrinsic": {}, } empty_event = {} firehose_event = { - "records": [{ - "recordId": "495469866831355442", - "data": "SGVsbG8sIHRoaXMgaXMgYSB0ZXN0IDEyMy4=", - "approximateArrivalTimestamp": 1495072949453 - }], + "records": [ + { + "recordId": "495469866831355442", + "data": "SGVsbG8sIHRoaXMgaXMgYSB0ZXN0IDEyMy4=", + "approximateArrivalTimestamp": 1495072949453, + } + ], "region": "us-west-2", "deliveryStreamArn": "arn:aws:kinesis:EXAMPLE", - "invocationId": "invocationIdExample" + "invocationId": "invocationIdExample", } class Context(object): - aws_request_id = 'cookies' - invoked_function_arn = 'arn' - function_name = 'cats' - function_version = '$LATEST' + aws_request_id = "cookies" + invoked_function_arn = "arn" + function_name = "cats" + function_version = "$LATEST" memory_limit_in_mb = 128 -@pytest.mark.parametrize('is_cold', (False, True)) +@pytest.mark.parametrize("is_cold", (False, True)) def test_lambda_transaction_attributes(is_cold, monkeypatch): # setup copies of the attribute lists for this test only _forgone_params = {} @@ -101,36 +109,32 @@ def test_lambda_transaction_attributes(is_cold, monkeypatch): # if we have a cold start, then we should see aws.lambda.coldStart=True if is_cold: - _exact['agent']['aws.lambda.coldStart'] = True - _expected['agent'].append('aws.lambda.coldStart') + _exact["agent"]["aws.lambda.coldStart"] = True + _expected["agent"].append("aws.lambda.coldStart") # otherwise, then we need to make sure that we don't see it at all else: - _forgone_params = { - 'agent': ['aws.lambda.coldStart'], - 'user': [], - 'intrinsic': [] - } + _forgone_params = {"agent": ["aws.lambda.coldStart"], "user": [], "intrinsic": []} - @validate_transaction_trace_attributes( - required_params=_expected, - forgone_params=_forgone_params) + @validate_transaction_trace_attributes(required_params=_expected, forgone_params=_forgone_params) @validate_transaction_event_attributes( - required_params=_expected, - forgone_params=_forgone_params, - exact_attrs=_exact) + required_params=_expected, forgone_params=_forgone_params, exact_attrs=_exact + ) @override_application_settings(_override_settings) def _test(): - monkeypatch.setenv('AWS_REGION', 'earth') - handler({ - 'httpMethod': 'GET', - 'path': '/', - 'headers': { - 'HOST': 'myhost', + monkeypatch.setenv("AWS_REGION", "earth") + handler( + { + "httpMethod": "GET", + "path": "/", + "headers": { + "HOST": "myhost", + }, + "queryStringParameters": {"foo": "bar"}, + "multiValueQueryStringParameters": {"foo": ["bar"]}, }, - 'queryStringParameters': {'foo': 'bar'}, - 'multiValueQueryStringParameters': {'foo': ['bar']}, - }, Context) + Context, + ) _test() @@ -139,23 +143,26 @@ def _test(): @validate_transaction_event_attributes(_expected_attributes) @override_application_settings(_override_settings) def test_lambda_malformed_api_gateway_payload(monkeypatch): - monkeypatch.setenv('AWS_REGION', 'earth') - handler({ - 'httpMethod': 'GET', - 'path': '/', - 'headers': {}, - 'queryStringParameters': 42, - 'multiValueQueryStringParameters': 42, - }, Context) + monkeypatch.setenv("AWS_REGION", "earth") + handler( + { + "httpMethod": "GET", + "path": "/", + "headers": {}, + "queryStringParameters": 42, + "multiValueQueryStringParameters": 42, + }, + Context, + ) _malformed_request_attributes = { - 'agent': [ - 'aws.requestId', - 'aws.lambda.arn', + "agent": [ + "aws.requestId", + "aws.lambda.arn", ], - 'user': [], - 'intrinsic': [], + "user": [], + "intrinsic": [], } @@ -163,23 +170,26 @@ def test_lambda_malformed_api_gateway_payload(monkeypatch): @validate_transaction_event_attributes(_malformed_request_attributes) @override_application_settings(_override_settings) def test_lambda_malformed_request_headers(): - handler({ - 'httpMethod': 'GET', - 'path': '/', - 'headers': None, - }, Context) + handler( + { + "httpMethod": "GET", + "path": "/", + "headers": None, + }, + Context, + ) _malformed_response_attributes = { - 'agent': [ - 'aws.requestId', - 'aws.lambda.arn', - 'request.method', - 'request.uri', - 'response.status', + "agent": [ + "aws.requestId", + "aws.lambda.arn", + "request.method", + "request.uri", + "response.status", ], - 'user': [], - 'intrinsic': [], + "user": [], + "intrinsic": [], } @@ -187,33 +197,35 @@ def test_lambda_malformed_request_headers(): @validate_transaction_event_attributes(_malformed_response_attributes) @override_application_settings(_override_settings) def test_lambda_malformed_response_headers(): - @lambda_handler.lambda_handler() def handler(event, context): return { - 'statusCode': 200, - 'body': '{}', - 'headers': None, + "statusCode": 200, + "body": "{}", + "headers": None, } - handler({ - 'httpMethod': 'GET', - 'path': '/', - 'headers': {}, - }, Context) + handler( + { + "httpMethod": "GET", + "path": "/", + "headers": {}, + }, + Context, + ) _no_status_code_response = { - 'agent': [ - 'aws.requestId', - 'aws.lambda.arn', - 'request.method', - 'request.uri', - 'response.headers.contentType', - 'response.headers.contentLength', + "agent": [ + "aws.requestId", + "aws.lambda.arn", + "request.method", + "request.uri", + "response.headers.contentType", + "response.headers.contentLength", ], - 'user': [], - 'intrinsic': [], + "user": [], + "intrinsic": [], } @@ -221,53 +233,51 @@ def handler(event, context): @validate_transaction_event_attributes(_no_status_code_response) @override_application_settings(_override_settings) def test_lambda_no_status_code_response(): - @lambda_handler.lambda_handler() def handler(event, context): return { - 'body': '{}', - 'headers': { - 'Content-Type': 'application/json', - 'Content-Length': 2, + "body": "{}", + "headers": { + "Content-Type": "application/json", + "Content-Length": 2, }, } - handler({ - 'httpMethod': 'GET', - 'path': '/', - 'headers': {}, - }, Context) + handler( + { + "httpMethod": "GET", + "path": "/", + "headers": {}, + }, + Context, + ) -@pytest.mark.parametrize('event,arn', ( - (empty_event, None), - (firehose_event, 'arn:aws:kinesis:EXAMPLE'))) +@pytest.mark.parametrize("event,arn", ((empty_event, None), (firehose_event, "arn:aws:kinesis:EXAMPLE"))) def test_lambda_event_source_arn_attribute(event, arn): if arn is None: _exact = None _expected = None _forgone = { - 'user': [], 'intrinsic': [], - 'agent': ['aws.lambda.eventSource.arn'], + "user": [], + "intrinsic": [], + "agent": ["aws.lambda.eventSource.arn"], } else: _exact = { - 'user': {}, 'intrinsic': {}, - 'agent': {'aws.lambda.eventSource.arn': arn}, + "user": {}, + "intrinsic": {}, + "agent": {"aws.lambda.eventSource.arn": arn}, } _expected = { - 'user': [], 'intrinsic': [], - 'agent': ['aws.lambda.eventSource.arn'], + "user": [], + "intrinsic": [], + "agent": ["aws.lambda.eventSource.arn"], } _forgone = None - @validate_transaction_trace_attributes( - required_params=_expected, - forgone_params=_forgone) - @validate_transaction_event_attributes( - required_params=_expected, - forgone_params=_forgone, - exact_attrs=_exact) + @validate_transaction_trace_attributes(required_params=_expected, forgone_params=_forgone) + @validate_transaction_event_attributes(required_params=_expected, forgone_params=_forgone, exact_attrs=_exact) @override_application_settings(_override_settings) def _test(): handler(event, Context) @@ -275,10 +285,13 @@ def _test(): _test() -@pytest.mark.parametrize('api', ( - lambda_handler.lambda_handler, - functools.partial(lambda_handler.LambdaHandlerWrapper, handler), -)) +@pytest.mark.parametrize( + "api", + ( + lambda_handler.lambda_handler, + functools.partial(lambda_handler.LambdaHandlerWrapper, handler), + ), +) def test_deprecation_warnings(api): with pytest.deprecated_call(): api() diff --git a/tests/agent_features/test_notice_error.py b/tests/agent_features/test_notice_error.py index e052602a0..a4509e215 100644 --- a/tests/agent_features/test_notice_error.py +++ b/tests/agent_features/test_notice_error.py @@ -21,12 +21,12 @@ reset_core_stats_engine, validate_application_error_event_count, validate_application_error_trace_count, - validate_application_errors, validate_transaction_error_event_count, validate_transaction_error_trace_count, - validate_transaction_errors, ) +from testing_support.validators.validate_application_errors import validate_application_errors +from testing_support.validators.validate_transaction_errors import validate_transaction_errors from newrelic.api.application import application_instance as application from newrelic.api.application import application_settings from newrelic.api.background_task import background_task diff --git a/tests/agent_features/test_span_events.py b/tests/agent_features/test_span_events.py index 465613169..155642860 100644 --- a/tests/agent_features/test_span_events.py +++ b/tests/agent_features/test_span_events.py @@ -19,11 +19,15 @@ dt_enabled, function_not_called, override_application_settings, - validate_transaction_event_attributes, - validate_transaction_metrics, validate_tt_segment_params, ) from testing_support.validators.validate_span_events import validate_span_events +from testing_support.validators.validate_transaction_event_attributes import ( + validate_transaction_event_attributes, +) +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) from newrelic.api.background_task import background_task from newrelic.api.database_trace import DatabaseTrace diff --git a/tests/agent_features/test_supportability_metrics.py b/tests/agent_features/test_supportability_metrics.py index d3e4b9a69..d77502180 100644 --- a/tests/agent_features/test_supportability_metrics.py +++ b/tests/agent_features/test_supportability_metrics.py @@ -20,7 +20,7 @@ from newrelic.core.agent import agent_instance -from testing_support.fixtures import validate_transaction_metrics +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.validators.validate_metric_payload import ( validate_metric_payload) diff --git a/tests/agent_features/test_synthetics.py b/tests/agent_features/test_synthetics.py index cdf02e3e4..ec7b78e96 100644 --- a/tests/agent_features/test_synthetics.py +++ b/tests/agent_features/test_synthetics.py @@ -19,10 +19,10 @@ cat_enabled, make_synthetics_header, override_application_settings, - validate_synthetics_event, validate_synthetics_transaction_trace, ) +from testing_support.validators.validate_synthetics_event import validate_synthetics_event from newrelic.api.web_transaction import web_transaction from newrelic.api.wsgi_application import wsgi_application from newrelic.common.encoding_utils import deobfuscate, json_decode diff --git a/tests/agent_features/test_time_trace.py b/tests/agent_features/test_time_trace.py index 449b7dc97..eccb4d7fe 100644 --- a/tests/agent_features/test_time_trace.py +++ b/tests/agent_features/test_time_trace.py @@ -15,7 +15,9 @@ import logging import pytest -from testing_support.fixtures import validate_transaction_metrics +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) from newrelic.api.background_task import background_task from newrelic.api.database_trace import DatabaseTrace diff --git a/tests/agent_features/test_transaction_event_data_and_some_browser_stuff_too.py b/tests/agent_features/test_transaction_event_data_and_some_browser_stuff_too.py index a99fc1cdd..c2e22b68a 100644 --- a/tests/agent_features/test_transaction_event_data_and_some_browser_stuff_too.py +++ b/tests/agent_features/test_transaction_event_data_and_some_browser_stuff_too.py @@ -13,36 +13,39 @@ # limitations under the License. import json + import webtest +from testing_support.fixtures import ( + override_application_settings, + validate_transaction_event_sample_data, +) +from testing_support.sample_applications import ( + fully_featured_app, + user_attributes_added, +) +from testing_support.validators.validate_transaction_event_attributes import ( + validate_transaction_event_attributes, +) from newrelic.api.application import application_settings from newrelic.api.background_task import background_task - from newrelic.common.encoding_utils import deobfuscate from newrelic.common.object_wrapper import transient_function_wrapper -from testing_support.fixtures import (override_application_settings, - validate_transaction_event_sample_data, - validate_transaction_event_attributes) -from testing_support.sample_applications import (fully_featured_app, - user_attributes_added) - - fully_featured_application = webtest.TestApp(fully_featured_app) _user_attributes = user_attributes_added() -#====================== Test cases ==================================== +# ====================== Test cases ==================================== -_test_capture_attributes_enabled_settings = { - 'browser_monitoring.attributes.enabled': True } +_test_capture_attributes_enabled_settings = {"browser_monitoring.attributes.enabled": True} _intrinsic_attributes = { - 'name': 'WebTransaction/Uri/', - 'port': 80, + "name": "WebTransaction/Uri/", + "port": 80, } -@validate_transaction_event_sample_data(required_attrs=_intrinsic_attributes, - required_user_attrs=_user_attributes) + +@validate_transaction_event_sample_data(required_attrs=_intrinsic_attributes, required_user_attrs=_user_attributes) @override_application_settings(_test_capture_attributes_enabled_settings) def test_capture_attributes_enabled(): settings = application_settings() @@ -52,7 +55,7 @@ def test_capture_attributes_enabled(): assert settings.js_agent_loader - response = fully_featured_application.get('/') + response = fully_featured_application.get("/") header = response.html.html.head.script.string content = response.html.html.body.p.string @@ -60,25 +63,23 @@ def test_capture_attributes_enabled(): # Validate actual body content. - assert content == 'RESPONSE' + assert content == "RESPONSE" # We no longer are in control of the JS contents of the header so # just check to make sure it contains at least the magic string # 'NREUM'. - assert header.find('NREUM') != -1 + assert header.find("NREUM") != -1 # Now validate the various fields of the footer related to analytics. # The fields are held by a JSON dictionary. - data = json.loads(footer.split('NREUM.info=')[1]) + data = json.loads(footer.split("NREUM.info=")[1]) obfuscation_key = settings.license_key[:13] - attributes = json.loads(deobfuscate(data['atts'], - obfuscation_key)) - user_attrs = attributes['u'] - + attributes = json.loads(deobfuscate(data["atts"], obfuscation_key)) + user_attrs = attributes["u"] # When you round-trip through json encoding and json decoding, you # always end up with unicode (unicode in Python 2, str in Python 3.) @@ -90,22 +91,18 @@ def test_capture_attributes_enabled(): browser_attributes = _user_attributes.copy() - browser_attributes['bytes'] = u'bytes-value' - browser_attributes['invalid-utf8'] = _user_attributes[ - 'invalid-utf8'].decode('latin-1') - browser_attributes['multibyte-utf8'] = _user_attributes[ - 'multibyte-utf8'].decode('latin-1') + browser_attributes["bytes"] = "bytes-value" + browser_attributes["invalid-utf8"] = _user_attributes["invalid-utf8"].decode("latin-1") + browser_attributes["multibyte-utf8"] = _user_attributes["multibyte-utf8"].decode("latin-1") for attr, value in browser_attributes.items(): - assert user_attrs[attr] == value, ( - "attribute %r expected %r, found %r" % - (attr, value, user_attrs[attr])) + assert user_attrs[attr] == value, "attribute %r expected %r, found %r" % (attr, value, user_attrs[attr]) + + +_test_no_attributes_recorded_settings = {"browser_monitoring.attributes.enabled": True} -_test_no_attributes_recorded_settings = { - 'browser_monitoring.attributes.enabled': True } -@validate_transaction_event_sample_data(required_attrs=_intrinsic_attributes, - required_user_attrs={}) +@validate_transaction_event_sample_data(required_attrs=_intrinsic_attributes, required_user_attrs={}) @override_application_settings(_test_no_attributes_recorded_settings) def test_no_attributes_recorded(): settings = application_settings() @@ -115,8 +112,7 @@ def test_no_attributes_recorded(): assert settings.js_agent_loader - response = fully_featured_application.get('/', extra_environ={ - 'record_attributes': 'FALSE'}) + response = fully_featured_application.get("/", extra_environ={"record_attributes": "FALSE"}) header = response.html.html.head.script.string content = response.html.html.body.p.string @@ -124,32 +120,33 @@ def test_no_attributes_recorded(): # Validate actual body content. - assert content == 'RESPONSE' + assert content == "RESPONSE" # We no longer are in control of the JS contents of the header so # just check to make sure it contains at least the magic string # 'NREUM'. - assert header.find('NREUM') != -1 + assert header.find("NREUM") != -1 # Now validate the various fields of the footer related to analytics. # The fields are held by a JSON dictionary. - data = json.loads(footer.split('NREUM.info=')[1]) + data = json.loads(footer.split("NREUM.info=")[1]) # As we are not recording any user or agent attributes, we should not # actually have an entry at all in the footer. - assert 'atts' not in data + assert "atts" not in data + _test_analytic_events_capture_attributes_disabled_settings = { - 'transaction_events.attributes.enabled': False, - 'browser_monitoring.attributes.enabled': True } + "transaction_events.attributes.enabled": False, + "browser_monitoring.attributes.enabled": True, +} -@validate_transaction_event_sample_data(required_attrs=_intrinsic_attributes, - required_user_attrs={}) -@override_application_settings( - _test_analytic_events_capture_attributes_disabled_settings) + +@validate_transaction_event_sample_data(required_attrs=_intrinsic_attributes, required_user_attrs={}) +@override_application_settings(_test_analytic_events_capture_attributes_disabled_settings) def test_analytic_events_capture_attributes_disabled(): settings = application_settings() @@ -162,7 +159,7 @@ def test_analytic_events_capture_attributes_disabled(): assert settings.js_agent_loader - response = fully_featured_application.get('/') + response = fully_featured_application.get("/") header = response.html.html.head.script.string content = response.html.html.body.p.string @@ -170,23 +167,23 @@ def test_analytic_events_capture_attributes_disabled(): # Validate actual body content. - assert content == 'RESPONSE' + assert content == "RESPONSE" # We no longer are in control of the JS contents of the header so # just check to make sure it contains at least the magic string # 'NREUM'. - assert header.find('NREUM') != -1 + assert header.find("NREUM") != -1 # Now validate that attributes are present, since browser monitoring should # be enabled. - data = json.loads(footer.split('NREUM.info=')[1]) + data = json.loads(footer.split("NREUM.info=")[1]) + + assert "atts" in data - assert 'atts' in data -@validate_transaction_event_sample_data(required_attrs=_intrinsic_attributes, - required_user_attrs=_user_attributes) +@validate_transaction_event_sample_data(required_attrs=_intrinsic_attributes, required_user_attrs=_user_attributes) def test_capture_attributes_default(): settings = application_settings() @@ -195,7 +192,7 @@ def test_capture_attributes_default(): assert settings.js_agent_loader - response = fully_featured_application.get('/') + response = fully_featured_application.get("/") header = response.html.html.head.script.string content = response.html.html.body.p.string @@ -203,32 +200,29 @@ def test_capture_attributes_default(): # Validate actual body content. - assert content == 'RESPONSE' + assert content == "RESPONSE" # We no longer are in control of the JS contents of the header so # just check to make sure it contains at least the magic string # 'NREUM'. - assert header.find('NREUM') != -1 + assert header.find("NREUM") != -1 # Now validate that attributes are not present, since should # be disabled. - data = json.loads(footer.split('NREUM.info=')[1]) + data = json.loads(footer.split("NREUM.info=")[1]) - assert 'atts' not in data + assert "atts" not in data -_test_analytic_events_background_task_settings = { - 'browser_monitoring.attributes.enabled': True } -_intrinsic_attributes = { - 'name': 'OtherTransaction/Uri/' -} +_test_analytic_events_background_task_settings = {"browser_monitoring.attributes.enabled": True} + +_intrinsic_attributes = {"name": "OtherTransaction/Uri/"} -@validate_transaction_event_sample_data(required_attrs=_intrinsic_attributes, - required_user_attrs=_user_attributes) -@override_application_settings( - _test_analytic_events_background_task_settings) + +@validate_transaction_event_sample_data(required_attrs=_intrinsic_attributes, required_user_attrs=_user_attributes) +@override_application_settings(_test_analytic_events_background_task_settings) def test_analytic_events_background_task(): settings = application_settings() @@ -240,20 +234,17 @@ def test_analytic_events_background_task(): assert settings.js_agent_loader - response = fully_featured_application.get('/', extra_environ={ - 'newrelic.set_background_task': True}) + response = fully_featured_application.get("/", extra_environ={"newrelic.set_background_task": True}) assert response.html.html.head.script is None -_test_capture_attributes_disabled_settings = { - 'browser_monitoring.attributes.enabled': False } -_intrinsic_attributes = { - 'name': 'WebTransaction/Uri/' -} +_test_capture_attributes_disabled_settings = {"browser_monitoring.attributes.enabled": False} + +_intrinsic_attributes = {"name": "WebTransaction/Uri/"} -@validate_transaction_event_sample_data(required_attrs=_intrinsic_attributes, - required_user_attrs=_user_attributes) + +@validate_transaction_event_sample_data(required_attrs=_intrinsic_attributes, required_user_attrs=_user_attributes) @override_application_settings(_test_capture_attributes_disabled_settings) def test_capture_attributes_disabled(): settings = application_settings() @@ -263,7 +254,7 @@ def test_capture_attributes_disabled(): assert settings.js_agent_loader - response = fully_featured_application.get('/') + response = fully_featured_application.get("/") header = response.html.html.head.script.string content = response.html.html.body.p.string @@ -271,30 +262,33 @@ def test_capture_attributes_disabled(): # Validate actual body content. - assert content == 'RESPONSE' + assert content == "RESPONSE" # We no longer are in control of the JS contents of the header so # just check to make sure it contains at least the magic string # 'NREUM'. - assert header.find('NREUM') != -1 + assert header.find("NREUM") != -1 # Now validate that attributes are not present, since should # be disabled. - data = json.loads(footer.split('NREUM.info=')[1]) + data = json.loads(footer.split("NREUM.info=")[1]) + + assert "atts" not in data - assert 'atts' not in data -@transient_function_wrapper('newrelic.core.stats_engine', - 'SampledDataSet.add') +@transient_function_wrapper("newrelic.core.stats_engine", "SampledDataSet.add") def validate_no_analytics_sample_data(wrapped, instance, args, kwargs): - assert False, 'Should not be recording analytic event.' + assert False, "Should not be recording analytic event." return wrapped(*args, **kwargs) + _test_collect_analytic_events_disabled_settings = { - 'collect_analytics_events': False, - 'browser_monitoring.attributes.enabled': True } + "collect_analytics_events": False, + "browser_monitoring.attributes.enabled": True, +} + @validate_no_analytics_sample_data @override_application_settings(_test_collect_analytic_events_disabled_settings) @@ -308,7 +302,7 @@ def test_collect_analytic_events_disabled(): assert settings.js_agent_loader - response = fully_featured_application.get('/') + response = fully_featured_application.get("/") header = response.html.html.head.script.string content = response.html.html.body.p.string @@ -316,24 +310,27 @@ def test_collect_analytic_events_disabled(): # Validate actual body content. - assert content == 'RESPONSE' + assert content == "RESPONSE" # We no longer are in control of the JS contents of the header so # just check to make sure it contains at least the magic string # 'NREUM'. - assert header.find('NREUM') != -1 + assert header.find("NREUM") != -1 # Now validate that attributes are present, since should # be enabled. - data = json.loads(footer.split('NREUM.info=')[1]) + data = json.loads(footer.split("NREUM.info=")[1]) + + assert "atts" in data - assert 'atts' in data _test_analytic_events_disabled_settings = { - 'transaction_events.enabled': False, - 'browser_monitoring.attributes.enabled': True } + "transaction_events.enabled": False, + "browser_monitoring.attributes.enabled": True, +} + @validate_no_analytics_sample_data @override_application_settings(_test_analytic_events_disabled_settings) @@ -348,7 +345,7 @@ def test_analytic_events_disabled(): assert settings.js_agent_loader - response = fully_featured_application.get('/') + response = fully_featured_application.get("/") header = response.html.html.head.script.string content = response.html.html.body.p.string @@ -356,25 +353,26 @@ def test_analytic_events_disabled(): # Validate actual body content. - assert content == 'RESPONSE' + assert content == "RESPONSE" # We no longer are in control of the JS contents of the header so # just check to make sure it contains at least the magic string # 'NREUM'. - assert header.find('NREUM') != -1 + assert header.find("NREUM") != -1 # Now validate that attributes are present, since should # be enabled. - data = json.loads(footer.split('NREUM.info=')[1]) + data = json.loads(footer.split("NREUM.info=")[1]) + + assert "atts" in data - assert 'atts' in data # -------------- Test call counts in analytic events ---------------- -@validate_transaction_event_sample_data(required_attrs=_intrinsic_attributes, - required_user_attrs=_user_attributes) + +@validate_transaction_event_sample_data(required_attrs=_intrinsic_attributes, required_user_attrs=_user_attributes) def test_no_database_or_external_attributes_in_analytics(): """Make no external calls or database calls in the transaction and check if the analytic event doesn't have the databaseCallCount, databaseDuration, @@ -385,7 +383,7 @@ def test_no_database_or_external_attributes_in_analytics(): assert settings.browser_monitoring.enabled - response = fully_featured_application.get('/') + response = fully_featured_application.get("/") # Validation of analytic data happens in the decorator. @@ -393,15 +391,16 @@ def test_no_database_or_external_attributes_in_analytics(): # Validate actual body content. - assert content == 'RESPONSE' + assert content == "RESPONSE" + _intrinsic_attributes = { - 'name': 'WebTransaction/Uri/db', - 'databaseCallCount': 2, + "name": "WebTransaction/Uri/db", + "databaseCallCount": 2, } -@validate_transaction_event_sample_data(required_attrs=_intrinsic_attributes, - required_user_attrs=_user_attributes) + +@validate_transaction_event_sample_data(required_attrs=_intrinsic_attributes, required_user_attrs=_user_attributes) def test_database_attributes_in_analytics(): """Make database calls in the transaction and check if the analytic event has the databaseCallCount and databaseDuration attributes. @@ -412,9 +411,9 @@ def test_database_attributes_in_analytics(): assert settings.browser_monitoring.enabled test_environ = { - 'db' : '2', + "db": "2", } - response = fully_featured_application.get('/db', extra_environ=test_environ) + response = fully_featured_application.get("/db", extra_environ=test_environ) # Validation of analytic data happens in the decorator. @@ -422,15 +421,16 @@ def test_database_attributes_in_analytics(): # Validate actual body content. - assert content == 'RESPONSE' + assert content == "RESPONSE" + _intrinsic_attributes = { - 'name': 'WebTransaction/Uri/ext', - 'externalCallCount': 2, + "name": "WebTransaction/Uri/ext", + "externalCallCount": 2, } -@validate_transaction_event_sample_data(required_attrs=_intrinsic_attributes, - required_user_attrs=_user_attributes) + +@validate_transaction_event_sample_data(required_attrs=_intrinsic_attributes, required_user_attrs=_user_attributes) def test_external_attributes_in_analytics(): """Make external calls in the transaction and check if the analytic event has the externalCallCount and externalDuration attributes. @@ -441,10 +441,9 @@ def test_external_attributes_in_analytics(): assert settings.browser_monitoring.enabled test_environ = { - 'external' : '2', + "external": "2", } - response = fully_featured_application.get('/ext', - extra_environ=test_environ) + response = fully_featured_application.get("/ext", extra_environ=test_environ) # Validation of analytic data happens in the decorator. @@ -452,16 +451,17 @@ def test_external_attributes_in_analytics(): # Validate actual body content. - assert content == 'RESPONSE' + assert content == "RESPONSE" + _intrinsic_attributes = { - 'name': 'WebTransaction/Uri/dbext', - 'databaseCallCount': 2, - 'externalCallCount': 2, + "name": "WebTransaction/Uri/dbext", + "databaseCallCount": 2, + "externalCallCount": 2, } -@validate_transaction_event_sample_data(required_attrs=_intrinsic_attributes, - required_user_attrs=_user_attributes) + +@validate_transaction_event_sample_data(required_attrs=_intrinsic_attributes, required_user_attrs=_user_attributes) def test_database_and_external_attributes_in_analytics(): """Make external calls and database calls in the transaction and check if the analytic event has the databaseCallCount, databaseDuration, @@ -473,11 +473,10 @@ def test_database_and_external_attributes_in_analytics(): assert settings.browser_monitoring.enabled test_environ = { - 'db' : '2', - 'external' : '2', + "db": "2", + "external": "2", } - response = fully_featured_application.get('/dbext', - extra_environ=test_environ) + response = fully_featured_application.get("/dbext", extra_environ=test_environ) # Validation of analytic data happens in the decorator. @@ -485,24 +484,25 @@ def test_database_and_external_attributes_in_analytics(): # Validate actual body content. - assert content == 'RESPONSE' + assert content == "RESPONSE" + # -------------- Test background tasks ---------------- _expected_attributes = { - 'user': [], - 'agent': [], - 'intrinsic' : ('name', 'duration', 'type', 'timestamp', 'totalTime'), + "user": [], + "agent": [], + "intrinsic": ("name", "duration", "type", "timestamp", "totalTime"), } _expected_absent_attributes = { - 'user': ('foo'), - 'agent': ('response.status', 'request.method'), - 'intrinsic': ('port'), + "user": ("foo"), + "agent": ("response.status", "request.method"), + "intrinsic": ("port"), } -@validate_transaction_event_attributes(_expected_attributes, - _expected_absent_attributes) + +@validate_transaction_event_attributes(_expected_attributes, _expected_absent_attributes) @background_task() def test_background_task_intrinsics_has_no_port(): pass diff --git a/tests/agent_features/test_transaction_name.py b/tests/agent_features/test_transaction_name.py index 492f64df3..beaf83b57 100644 --- a/tests/agent_features/test_transaction_name.py +++ b/tests/agent_features/test_transaction_name.py @@ -15,7 +15,7 @@ from newrelic.api.background_task import background_task from newrelic.api.transaction import set_transaction_name, set_background_task -from testing_support.fixtures import validate_transaction_metrics +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics # test diff --git a/tests/agent_features/test_w3c_trace_context.py b/tests/agent_features/test_w3c_trace_context.py index 17e40fdb0..726cf011a 100644 --- a/tests/agent_features/test_w3c_trace_context.py +++ b/tests/agent_features/test_w3c_trace_context.py @@ -19,11 +19,11 @@ from newrelic.api.transaction import current_transaction from newrelic.api.external_trace import ExternalTrace from newrelic.api.wsgi_application import wsgi_application -from testing_support.fixtures import (override_application_settings, - validate_transaction_event_attributes, validate_transaction_metrics) +from testing_support.fixtures import override_application_settings from testing_support.validators.validate_span_events import ( validate_span_events) - +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics +from testing_support.validators.validate_transaction_event_attributes import validate_transaction_event_attributes @wsgi_application() def target_wsgi_application(environ, start_response): diff --git a/tests/agent_features/test_web_transaction.py b/tests/agent_features/test_web_transaction.py index 0d8b17548..f2f08574a 100644 --- a/tests/agent_features/test_web_transaction.py +++ b/tests/agent_features/test_web_transaction.py @@ -18,8 +18,11 @@ import pytest import webtest -from testing_support.fixtures import validate_attributes, validate_transaction_metrics +from testing_support.fixtures import validate_attributes from testing_support.sample_applications import simple_app, simple_app_raw +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) import newrelic.packages.six as six from newrelic.api.application import application_instance diff --git a/tests/agent_features/test_wsgi_attributes.py b/tests/agent_features/test_wsgi_attributes.py index 0f7f7d6f2..7543e45d8 100644 --- a/tests/agent_features/test_wsgi_attributes.py +++ b/tests/agent_features/test_wsgi_attributes.py @@ -18,9 +18,11 @@ override_application_settings, validate_error_event_attributes, validate_transaction_error_trace_attributes, - validate_transaction_event_attributes, ) from testing_support.sample_applications import fully_featured_app +from testing_support.validators.validate_transaction_event_attributes import ( + validate_transaction_event_attributes, +) WSGI_ATTRIBUTES = [ "wsgi.input.seconds", @@ -44,6 +46,4 @@ @override_application_settings({"attributes.include": ["*"]}) @dt_enabled def test_wsgi_attributes(): - app.post_json( - "/", {"foo": "bar"}, extra_environ={"n_errors": "1", "err_message": "oops"} - ) + app.post_json("/", {"foo": "bar"}, extra_environ={"n_errors": "1", "err_message": "oops"}) diff --git a/tests/agent_unittests/test_agent_connect.py b/tests/agent_unittests/test_agent_connect.py index 46c4edf44..eb944c072 100644 --- a/tests/agent_unittests/test_agent_connect.py +++ b/tests/agent_unittests/test_agent_connect.py @@ -19,10 +19,9 @@ from testing_support.fixtures import ( override_generic_settings, - validate_internal_metrics, failing_endpoint ) - +from testing_support.validators.validate_internal_metrics import validate_internal_metrics SETTINGS = global_settings() diff --git a/tests/application_celery/test_celery.py b/tests/application_celery/test_celery.py index 5bde17714..c2f9177fa 100644 --- a/tests/application_celery/test_celery.py +++ b/tests/application_celery/test_celery.py @@ -15,7 +15,7 @@ from newrelic.api.background_task import background_task from newrelic.api.transaction import ignore_transaction, end_of_transaction -from testing_support.fixtures import validate_transaction_metrics +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.validators.validate_code_level_metrics import validate_code_level_metrics from tasks import add, tsum diff --git a/tests/component_djangorestframework/test_application.py b/tests/component_djangorestframework/test_application.py index 2951e0401..9ed60aa33 100644 --- a/tests/component_djangorestframework/test_application.py +++ b/tests/component_djangorestframework/test_application.py @@ -18,9 +18,11 @@ from newrelic.packages import six from newrelic.core.config import global_settings -from testing_support.fixtures import (validate_transaction_metrics, - validate_transaction_errors, override_generic_settings, +from testing_support.fixtures import ( + override_generic_settings, function_not_called) +from testing_support.validators.validate_transaction_errors import validate_transaction_errors +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.validators.validate_code_level_metrics import validate_code_level_metrics import django diff --git a/tests/component_flask_rest/test_application.py b/tests/component_flask_rest/test_application.py index 934eb5784..d463a0205 100644 --- a/tests/component_flask_rest/test_application.py +++ b/tests/component_flask_rest/test_application.py @@ -16,12 +16,16 @@ from testing_support.fixtures import ( override_generic_settings, override_ignore_status_codes, - validate_transaction_errors, - validate_transaction_metrics, ) from testing_support.validators.validate_code_level_metrics import ( validate_code_level_metrics, ) +from testing_support.validators.validate_transaction_errors import ( + validate_transaction_errors, +) +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) from newrelic.common.object_names import callable_name from newrelic.core.config import global_settings diff --git a/tests/component_graphqlserver/test_graphql.py b/tests/component_graphqlserver/test_graphql.py index 997df1174..e5566047e 100644 --- a/tests/component_graphqlserver/test_graphql.py +++ b/tests/component_graphqlserver/test_graphql.py @@ -15,11 +15,9 @@ import importlib import pytest -from testing_support.fixtures import ( - dt_enabled, - validate_transaction_errors, - validate_transaction_metrics, -) +from testing_support.fixtures import dt_enabled +from testing_support.validators.validate_transaction_errors import validate_transaction_errors +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.validators.validate_span_events import validate_span_events from testing_support.validators.validate_transaction_count import ( validate_transaction_count, diff --git a/tests/component_tastypie/test_application.py b/tests/component_tastypie/test_application.py index 16521425c..5f81d0831 100644 --- a/tests/component_tastypie/test_application.py +++ b/tests/component_tastypie/test_application.py @@ -21,8 +21,9 @@ from newrelic.api.background_task import background_task from newrelic.api.transaction import end_of_transaction -from testing_support.fixtures import (validate_transaction_metrics, - validate_transaction_errors, override_ignore_status_codes) +from testing_support.fixtures import override_ignore_status_codes +from testing_support.validators.validate_transaction_errors import validate_transaction_errors +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.validators.validate_code_level_metrics import validate_code_level_metrics from wsgi import application diff --git a/tests/coroutines_asyncio/test_context_propagation.py b/tests/coroutines_asyncio/test_context_propagation.py index 3beef38d0..09fccffb2 100644 --- a/tests/coroutines_asyncio/test_context_propagation.py +++ b/tests/coroutines_asyncio/test_context_propagation.py @@ -18,9 +18,9 @@ from testing_support.fixtures import ( function_not_called, override_generic_settings, - validate_transaction_metrics, ) +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from newrelic.api.application import application_instance as application from newrelic.api.background_task import BackgroundTask, background_task from newrelic.api.database_trace import database_trace diff --git a/tests/cross_agent/test_aws_utilization_data.py b/tests/cross_agent/test_aws_utilization_data.py index 7ff2e623b..807b1a97b 100644 --- a/tests/cross_agent/test_aws_utilization_data.py +++ b/tests/cross_agent/test_aws_utilization_data.py @@ -18,7 +18,7 @@ from newrelic.common.utilization import AWSUtilization from testing_support.mock_http_client import create_client_cls -from testing_support.fixtures import validate_internal_metrics +from testing_support.validators.validate_internal_metrics import validate_internal_metrics CURRENT_DIR = os.path.dirname(os.path.realpath(__file__)) diff --git a/tests/cross_agent/test_azure_utilization_data.py b/tests/cross_agent/test_azure_utilization_data.py index 897fadec0..772a96f98 100644 --- a/tests/cross_agent/test_azure_utilization_data.py +++ b/tests/cross_agent/test_azure_utilization_data.py @@ -18,7 +18,7 @@ from newrelic.common.utilization import AzureUtilization from testing_support.mock_http_client import create_client_cls -from testing_support.fixtures import validate_internal_metrics +from testing_support.validators.validate_internal_metrics import validate_internal_metrics CURRENT_DIR = os.path.dirname(os.path.realpath(__file__)) diff --git a/tests/cross_agent/test_boot_id_utilization_data.py b/tests/cross_agent/test_boot_id_utilization_data.py index 2eaeae670..ea5b26a9e 100644 --- a/tests/cross_agent/test_boot_id_utilization_data.py +++ b/tests/cross_agent/test_boot_id_utilization_data.py @@ -20,7 +20,7 @@ from newrelic.common.system_info import BootIdUtilization -from testing_support.fixtures import validate_internal_metrics +from testing_support.validators.validate_internal_metrics import validate_internal_metrics CURRENT_DIR = os.path.dirname(os.path.realpath(__file__)) diff --git a/tests/cross_agent/test_distributed_tracing.py b/tests/cross_agent/test_distributed_tracing.py index 2ec246275..0ff46eea2 100644 --- a/tests/cross_agent/test_distributed_tracing.py +++ b/tests/cross_agent/test_distributed_tracing.py @@ -23,10 +23,11 @@ from newrelic.common.object_wrapper import transient_function_wrapper from testing_support.fixtures import (override_application_settings, - validate_transaction_metrics, validate_transaction_event_attributes, validate_error_event_attributes, validate_attributes) from testing_support.validators.validate_span_events import ( validate_span_events) +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics +from testing_support.validators.validate_transaction_event_attributes import validate_transaction_event_attributes CURRENT_DIR = os.path.dirname(os.path.realpath(__file__)) JSON_DIR = os.path.normpath(os.path.join(CURRENT_DIR, 'fixtures', diff --git a/tests/cross_agent/test_gcp_utilization_data.py b/tests/cross_agent/test_gcp_utilization_data.py index 1d77ed0a2..7bbf6ce0a 100644 --- a/tests/cross_agent/test_gcp_utilization_data.py +++ b/tests/cross_agent/test_gcp_utilization_data.py @@ -18,7 +18,7 @@ from newrelic.common.utilization import GCPUtilization from testing_support.mock_http_client import create_client_cls -from testing_support.fixtures import validate_internal_metrics +from testing_support.validators.validate_internal_metrics import validate_internal_metrics CURRENT_DIR = os.path.dirname(os.path.realpath(__file__)) diff --git a/tests/cross_agent/test_lambda_event_source.py b/tests/cross_agent/test_lambda_event_source.py index 3a90aec58..511294cf6 100644 --- a/tests/cross_agent/test_lambda_event_source.py +++ b/tests/cross_agent/test_lambda_event_source.py @@ -17,8 +17,8 @@ import pytest from newrelic.api.lambda_handler import lambda_handler -from testing_support.fixtures import (override_application_settings, - validate_transaction_event_attributes) +from testing_support.fixtures import override_application_settings +from testing_support.validators.validate_transaction_event_attributes import validate_transaction_event_attributes CURRENT_DIR = os.path.dirname(os.path.realpath(__file__)) FIXTURE_DIR = os.path.normpath(os.path.join(CURRENT_DIR, 'fixtures')) diff --git a/tests/cross_agent/test_pcf_utilization_data.py b/tests/cross_agent/test_pcf_utilization_data.py index ce86bfb9f..28b56f759 100644 --- a/tests/cross_agent/test_pcf_utilization_data.py +++ b/tests/cross_agent/test_pcf_utilization_data.py @@ -18,7 +18,7 @@ from newrelic.common.utilization import PCFUtilization -from testing_support.fixtures import validate_internal_metrics +from testing_support.validators.validate_internal_metrics import validate_internal_metrics CURRENT_DIR = os.path.dirname(os.path.realpath(__file__)) diff --git a/tests/cross_agent/test_w3c_trace_context.py b/tests/cross_agent/test_w3c_trace_context.py index 479d799bc..05f157f7b 100644 --- a/tests/cross_agent/test_w3c_trace_context.py +++ b/tests/cross_agent/test_w3c_trace_context.py @@ -24,9 +24,10 @@ from testing_support.validators.validate_span_events import ( validate_span_events) from testing_support.fixtures import (override_application_settings, - validate_transaction_metrics, validate_transaction_event_attributes, validate_attributes) from newrelic.common.encoding_utils import W3CTraceState +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics +from testing_support.validators.validate_transaction_event_attributes import validate_transaction_event_attributes CURRENT_DIR = os.path.dirname(os.path.realpath(__file__)) JSON_DIR = os.path.normpath(os.path.join(CURRENT_DIR, 'fixtures', diff --git a/tests/datastore_aioredis/test_custom_conn_pool.py b/tests/datastore_aioredis/test_custom_conn_pool.py index 7644e8ffb..e59760ea3 100644 --- a/tests/datastore_aioredis/test_custom_conn_pool.py +++ b/tests/datastore_aioredis/test_custom_conn_pool.py @@ -17,13 +17,17 @@ will not result in an error. """ -from newrelic.api.background_task import background_task - -# from testing_support.fixture.event_loop import event_loop as loop -from testing_support.fixtures import validate_transaction_metrics, override_application_settings from testing_support.db_settings import redis_settings +from testing_support.fixtures import override_application_settings from testing_support.util import instance_hostname +# from testing_support.fixture.event_loop import event_loop as loop +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) + +from newrelic.api.background_task import background_task + DB_SETTINGS = redis_settings()[0] diff --git a/tests/datastore_aioredis/test_execute_command.py b/tests/datastore_aioredis/test_execute_command.py index bbc8b2d4f..f6ee9eb27 100644 --- a/tests/datastore_aioredis/test_execute_command.py +++ b/tests/datastore_aioredis/test_execute_command.py @@ -13,12 +13,15 @@ # limitations under the License. import pytest -from newrelic.api.background_task import background_task - -from testing_support.fixtures import validate_transaction_metrics, override_application_settings -from conftest import AIOREDIS_VERSION +from conftest import AIOREDIS_VERSION # , event_loop, loop from testing_support.db_settings import redis_settings +from testing_support.fixtures import override_application_settings from testing_support.util import instance_hostname +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) + +from newrelic.api.background_task import background_task DB_SETTINGS = redis_settings()[0] diff --git a/tests/datastore_aioredis/test_get_and_set.py b/tests/datastore_aioredis/test_get_and_set.py index a446d5f6c..180f32578 100644 --- a/tests/datastore_aioredis/test_get_and_set.py +++ b/tests/datastore_aioredis/test_get_and_set.py @@ -12,11 +12,15 @@ # See the License for the specific language governing permissions and # limitations under the License. -from newrelic.api.background_task import background_task - -from testing_support.fixtures import validate_transaction_metrics, override_application_settings +# from conftest import AIOREDIS_VERSION, event_loop, loop from testing_support.db_settings import redis_settings +from testing_support.fixtures import override_application_settings from testing_support.util import instance_hostname +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) + +from newrelic.api.background_task import background_task DB_SETTINGS = redis_settings()[0] diff --git a/tests/datastore_aioredis/test_multiple_dbs.py b/tests/datastore_aioredis/test_multiple_dbs.py index cb817c9f8..3b9ea37dd 100644 --- a/tests/datastore_aioredis/test_multiple_dbs.py +++ b/tests/datastore_aioredis/test_multiple_dbs.py @@ -12,14 +12,19 @@ # See the License for the specific language governing permissions and # limitations under the License. -import pytest import aioredis -from newrelic.api.background_task import background_task +import pytest +from conftest import AIOREDIS_VERSION # , event_loop, loop -from testing_support.fixtures import validate_transaction_metrics, override_application_settings -from conftest import AIOREDIS_VERSION +# from conftest import AIOREDIS_VERSION from testing_support.db_settings import redis_settings +from testing_support.fixtures import override_application_settings from testing_support.util import instance_hostname +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) + +from newrelic.api.background_task import background_task DB_SETTINGS = redis_settings() @@ -133,7 +138,6 @@ def client_set(request, loop): raise NotImplementedError() - async def exercise_redis(client_1, client_2): await client_1.set("key", "value") await client_1.get("key") diff --git a/tests/datastore_aioredis/test_transactions.py b/tests/datastore_aioredis/test_transactions.py index 168de008b..0f84ca684 100644 --- a/tests/datastore_aioredis/test_transactions.py +++ b/tests/datastore_aioredis/test_transactions.py @@ -13,11 +13,12 @@ # limitations under the License. import pytest +from conftest import AIOREDIS_VERSION, SKIPIF_AIOREDIS_V1, SKIPIF_AIOREDIS_V2 +from testing_support.validators.validate_transaction_errors import ( + validate_transaction_errors, +) from newrelic.api.background_task import background_task -from testing_support.fixtures import validate_transaction_errors - -from conftest import SKIPIF_AIOREDIS_V1, SKIPIF_AIOREDIS_V2, AIOREDIS_VERSION @background_task() @@ -28,7 +29,7 @@ async def exercise(): pipe = client.pipeline(transaction=in_transaction) else: pipe = client.pipeline() # Transaction kwarg unsupported - + pipe.set("TXN", 1) return await pipe.execute() @@ -68,6 +69,7 @@ async def exercise(): @background_task() def test_pipeline_immediate_execution_no_harm(client, loop): key = "TXN_WATCH" + async def exercise(): await client.set(key, 1) @@ -94,6 +96,7 @@ async def exercise(): @background_task() def test_transaction_immediate_execution_no_harm(client, loop): key = "TXN_WATCH" + async def exercise(): async def exercise_transaction(pipe): value = int(await pipe.get(key)) @@ -118,6 +121,7 @@ async def exercise_transaction(pipe): @background_task() def test_transaction_watch_error_no_harm(client, loop): key = "TXN_WATCH" + async def exercise(): async def exercise_transaction(pipe): value = int(await pipe.get(key)) diff --git a/tests/datastore_aredis/test_custom_conn_pool.py b/tests/datastore_aredis/test_custom_conn_pool.py index 344154412..70c75de9e 100644 --- a/tests/datastore_aredis/test_custom_conn_pool.py +++ b/tests/datastore_aredis/test_custom_conn_pool.py @@ -23,8 +23,8 @@ from newrelic.api.background_task import background_task from testing_support.fixture.event_loop import event_loop as loop -from testing_support.fixtures import (validate_transaction_metrics, - override_application_settings) +from testing_support.fixtures import override_application_settings +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.db_settings import redis_settings from testing_support.util import instance_hostname diff --git a/tests/datastore_aredis/test_execute_command.py b/tests/datastore_aredis/test_execute_command.py index 7db3c9c59..c5b0fc332 100644 --- a/tests/datastore_aredis/test_execute_command.py +++ b/tests/datastore_aredis/test_execute_command.py @@ -17,8 +17,8 @@ from newrelic.api.background_task import background_task -from testing_support.fixtures import (validate_transaction_metrics, - override_application_settings) +from testing_support.fixtures import override_application_settings +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.fixture.event_loop import event_loop as loop from testing_support.db_settings import redis_settings from testing_support.util import instance_hostname diff --git a/tests/datastore_aredis/test_get_and_set.py b/tests/datastore_aredis/test_get_and_set.py index fbde29d86..2eeee947b 100644 --- a/tests/datastore_aredis/test_get_and_set.py +++ b/tests/datastore_aredis/test_get_and_set.py @@ -17,8 +17,8 @@ from newrelic.api.background_task import background_task from testing_support.fixture.event_loop import event_loop as loop -from testing_support.fixtures import (validate_transaction_metrics, - override_application_settings) +from testing_support.fixtures import override_application_settings +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.db_settings import redis_settings from testing_support.util import instance_hostname diff --git a/tests/datastore_aredis/test_multiple_dbs.py b/tests/datastore_aredis/test_multiple_dbs.py index e16ae9483..cb4cbac5b 100644 --- a/tests/datastore_aredis/test_multiple_dbs.py +++ b/tests/datastore_aredis/test_multiple_dbs.py @@ -15,10 +15,8 @@ import aredis import pytest from testing_support.db_settings import redis_settings -from testing_support.fixtures import ( - override_application_settings, - validate_transaction_metrics, -) +from testing_support.fixtures import override_application_settings +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.util import instance_hostname from newrelic.api.background_task import background_task diff --git a/tests/datastore_asyncpg/test_multiple_dbs.py b/tests/datastore_asyncpg/test_multiple_dbs.py index a24eaa388..a917a9e83 100644 --- a/tests/datastore_asyncpg/test_multiple_dbs.py +++ b/tests/datastore_asyncpg/test_multiple_dbs.py @@ -17,10 +17,8 @@ import asyncpg import pytest from testing_support.db_settings import postgresql_settings -from testing_support.fixtures import ( - override_application_settings, - validate_transaction_metrics, -) +from testing_support.fixtures import override_application_settings +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.util import instance_hostname from newrelic.api.background_task import background_task diff --git a/tests/datastore_asyncpg/test_query.py b/tests/datastore_asyncpg/test_query.py index eb44cfd16..a952e062a 100644 --- a/tests/datastore_asyncpg/test_query.py +++ b/tests/datastore_asyncpg/test_query.py @@ -20,10 +20,8 @@ import asyncpg import pytest from testing_support.db_settings import postgresql_settings -from testing_support.fixtures import ( - validate_transaction_metrics, - validate_tt_collector_json, -) +from testing_support.fixtures import validate_tt_collector_json +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.util import instance_hostname from newrelic.api.background_task import background_task diff --git a/tests/datastore_bmemcached/test_memcache.py b/tests/datastore_bmemcached/test_memcache.py index 1bef31f59..68eee0633 100644 --- a/tests/datastore_bmemcached/test_memcache.py +++ b/tests/datastore_bmemcached/test_memcache.py @@ -19,7 +19,7 @@ from newrelic.api.background_task import background_task from newrelic.api.transaction import set_background_task -from testing_support.fixtures import validate_transaction_metrics +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.db_settings import memcached_settings diff --git a/tests/datastore_elasticsearch/test_elasticsearch.py b/tests/datastore_elasticsearch/test_elasticsearch.py index 548043216..65a0374df 100644 --- a/tests/datastore_elasticsearch/test_elasticsearch.py +++ b/tests/datastore_elasticsearch/test_elasticsearch.py @@ -17,8 +17,9 @@ from newrelic.api.background_task import background_task -from testing_support.fixtures import (validate_transaction_metrics, - validate_transaction_errors, override_application_settings) +from testing_support.fixtures import override_application_settings +from testing_support.validators.validate_transaction_errors import validate_transaction_errors +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.db_settings import elasticsearch_settings from testing_support.util import instance_hostname diff --git a/tests/datastore_elasticsearch/test_mget.py b/tests/datastore_elasticsearch/test_mget.py index 417b231d6..9f0b442e8 100644 --- a/tests/datastore_elasticsearch/test_mget.py +++ b/tests/datastore_elasticsearch/test_mget.py @@ -17,8 +17,8 @@ from elasticsearch import Elasticsearch from elasticsearch.connection_pool import RoundRobinSelector -from testing_support.fixtures import (validate_transaction_metrics, - override_application_settings) +from testing_support.fixtures import override_application_settings +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.db_settings import elasticsearch_settings from testing_support.util import instance_hostname diff --git a/tests/datastore_elasticsearch/test_multiple_dbs.py b/tests/datastore_elasticsearch/test_multiple_dbs.py index b4b1559a7..70a7be4f1 100644 --- a/tests/datastore_elasticsearch/test_multiple_dbs.py +++ b/tests/datastore_elasticsearch/test_multiple_dbs.py @@ -16,8 +16,8 @@ from elasticsearch import Elasticsearch -from testing_support.fixtures import (validate_transaction_metrics, - override_application_settings) +from testing_support.fixtures import override_application_settings +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.db_settings import elasticsearch_settings from testing_support.util import instance_hostname diff --git a/tests/datastore_memcache/test_memcache.py b/tests/datastore_memcache/test_memcache.py index d8afab3b1..a66c114ee 100644 --- a/tests/datastore_memcache/test_memcache.py +++ b/tests/datastore_memcache/test_memcache.py @@ -14,8 +14,8 @@ import memcache -from testing_support.fixtures import (validate_transaction_metrics, - override_application_settings) +from testing_support.fixtures import override_application_settings +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.db_settings import memcached_settings from testing_support.util import instance_hostname diff --git a/tests/datastore_memcache/test_multiple_dbs.py b/tests/datastore_memcache/test_multiple_dbs.py index b83d7dfcc..dbc3ea2b3 100644 --- a/tests/datastore_memcache/test_multiple_dbs.py +++ b/tests/datastore_memcache/test_multiple_dbs.py @@ -17,8 +17,8 @@ from newrelic.api.background_task import background_task -from testing_support.fixtures import (validate_transaction_metrics, - override_application_settings) +from testing_support.fixtures import override_application_settings +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.db_settings import memcached_settings from testing_support.util import instance_hostname diff --git a/tests/datastore_mysql/test_database.py b/tests/datastore_mysql/test_database.py index 0991d6df0..2fc8ca129 100644 --- a/tests/datastore_mysql/test_database.py +++ b/tests/datastore_mysql/test_database.py @@ -14,7 +14,7 @@ import mysql.connector -from testing_support.fixtures import validate_transaction_metrics +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.validators.validate_database_trace_inputs import validate_database_trace_inputs from testing_support.db_settings import mysql_settings diff --git a/tests/datastore_postgresql/test_database.py b/tests/datastore_postgresql/test_database.py index de53808c6..2ea930b05 100644 --- a/tests/datastore_postgresql/test_database.py +++ b/tests/datastore_postgresql/test_database.py @@ -15,7 +15,7 @@ import postgresql.driver.dbapi20 -from testing_support.fixtures import validate_transaction_metrics +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.validators.validate_database_trace_inputs import ( validate_database_trace_inputs, diff --git a/tests/datastore_psycopg2/test_async.py b/tests/datastore_psycopg2/test_async.py index 78df2beca..7af9adc6a 100644 --- a/tests/datastore_psycopg2/test_async.py +++ b/tests/datastore_psycopg2/test_async.py @@ -16,11 +16,11 @@ import psycopg2.extras import pytest -from testing_support.fixtures import (validate_transaction_metrics, - validate_transaction_errors, +from testing_support.fixtures import ( validate_stats_engine_explain_plan_output_is_none, override_application_settings) - +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics +from testing_support.validators.validate_transaction_errors import validate_transaction_errors from testing_support.validators.validate_database_trace_inputs import validate_database_trace_inputs from testing_support.validators.validate_transaction_slow_sql_count import ( validate_transaction_slow_sql_count) diff --git a/tests/datastore_psycopg2/test_cursor.py b/tests/datastore_psycopg2/test_cursor.py index e7a549c13..d66d73ff8 100644 --- a/tests/datastore_psycopg2/test_cursor.py +++ b/tests/datastore_psycopg2/test_cursor.py @@ -21,8 +21,8 @@ except ImportError: sql = None -from testing_support.fixtures import (validate_transaction_metrics, - override_application_settings) +from testing_support.fixtures import override_application_settings +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.validators.validate_database_trace_inputs import validate_database_trace_inputs from testing_support.util import instance_hostname from utils import DB_SETTINGS diff --git a/tests/datastore_psycopg2/test_multiple_dbs.py b/tests/datastore_psycopg2/test_multiple_dbs.py index bf7629ebf..afbdd66f2 100644 --- a/tests/datastore_psycopg2/test_multiple_dbs.py +++ b/tests/datastore_psycopg2/test_multiple_dbs.py @@ -15,8 +15,8 @@ import psycopg2 import pytest -from testing_support.fixtures import (validate_transaction_metrics, - override_application_settings) +from testing_support.fixtures import override_application_settings +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.validators.validate_database_trace_inputs import validate_database_trace_inputs from testing_support.util import instance_hostname from utils import DB_MULTIPLE_SETTINGS diff --git a/tests/datastore_psycopg2/test_register.py b/tests/datastore_psycopg2/test_register.py index 03b553749..b5450c358 100644 --- a/tests/datastore_psycopg2/test_register.py +++ b/tests/datastore_psycopg2/test_register.py @@ -16,8 +16,8 @@ import psycopg2 import psycopg2.extras -from testing_support.fixtures import (validate_transaction_metrics, - validate_transaction_errors) +from testing_support.validators.validate_transaction_errors import validate_transaction_errors +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from utils import DB_SETTINGS from newrelic.api.background_task import background_task diff --git a/tests/datastore_psycopg2/test_rollback.py b/tests/datastore_psycopg2/test_rollback.py index f0ef8149f..0a23b1005 100644 --- a/tests/datastore_psycopg2/test_rollback.py +++ b/tests/datastore_psycopg2/test_rollback.py @@ -15,8 +15,8 @@ import psycopg2 import pytest -from testing_support.fixtures import (validate_transaction_metrics, - override_application_settings) +from testing_support.fixtures import override_application_settings +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.validators.validate_database_trace_inputs import validate_database_trace_inputs from testing_support.util import instance_hostname from utils import DB_SETTINGS diff --git a/tests/datastore_psycopg2cffi/test_database.py b/tests/datastore_psycopg2cffi/test_database.py index c0eb6e722..54ff6ad09 100644 --- a/tests/datastore_psycopg2cffi/test_database.py +++ b/tests/datastore_psycopg2cffi/test_database.py @@ -16,8 +16,9 @@ import psycopg2cffi.extensions import psycopg2cffi.extras -from testing_support.fixtures import (validate_transaction_metrics, - validate_transaction_errors, validate_stats_engine_explain_plan_output_is_none) +from testing_support.fixtures import validate_stats_engine_explain_plan_output_is_none +from testing_support.validators.validate_transaction_errors import validate_transaction_errors +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.validators.validate_transaction_slow_sql_count import \ validate_transaction_slow_sql_count from testing_support.validators.validate_database_trace_inputs import validate_database_trace_inputs diff --git a/tests/datastore_pyelasticsearch/test_pyelasticsearch.py b/tests/datastore_pyelasticsearch/test_pyelasticsearch.py index 837c9ae19..deb3f68ff 100644 --- a/tests/datastore_pyelasticsearch/test_pyelasticsearch.py +++ b/tests/datastore_pyelasticsearch/test_pyelasticsearch.py @@ -15,8 +15,8 @@ import sqlite3 from pyelasticsearch import ElasticSearch -from testing_support.fixtures import (validate_transaction_metrics, - validate_transaction_errors) +from testing_support.validators.validate_transaction_errors import validate_transaction_errors +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.db_settings import elasticsearch_settings from testing_support.validators.validate_database_duration import validate_database_duration diff --git a/tests/datastore_pylibmc/test_memcache.py b/tests/datastore_pylibmc/test_memcache.py index 554581fdc..769f3b483 100644 --- a/tests/datastore_pylibmc/test_memcache.py +++ b/tests/datastore_pylibmc/test_memcache.py @@ -17,7 +17,7 @@ import pylibmc from testing_support.db_settings import memcached_settings -from testing_support.fixtures import validate_transaction_metrics +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from newrelic.api.background_task import background_task from newrelic.api.transaction import set_background_task diff --git a/tests/datastore_pymemcache/test_memcache.py b/tests/datastore_pymemcache/test_memcache.py index 12bd5da1a..9aeea4d54 100644 --- a/tests/datastore_pymemcache/test_memcache.py +++ b/tests/datastore_pymemcache/test_memcache.py @@ -16,7 +16,7 @@ import pymemcache.client -from testing_support.fixtures import validate_transaction_metrics +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.db_settings import memcached_settings from newrelic.api.background_task import background_task diff --git a/tests/datastore_pymongo/test_pymongo.py b/tests/datastore_pymongo/test_pymongo.py index 09ea62e0b..4649062ce 100644 --- a/tests/datastore_pymongo/test_pymongo.py +++ b/tests/datastore_pymongo/test_pymongo.py @@ -16,14 +16,11 @@ import pymongo from testing_support.db_settings import mongodb_settings -from testing_support.fixtures import ( - validate_transaction_errors, - validate_transaction_metrics, -) from testing_support.validators.validate_database_duration import ( validate_database_duration, ) - +from testing_support.validators.validate_transaction_errors import validate_transaction_errors +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from newrelic.api.background_task import background_task from newrelic.packages import six diff --git a/tests/datastore_pymysql/test_database.py b/tests/datastore_pymysql/test_database.py index 2f20d8cbd..5943b1266 100644 --- a/tests/datastore_pymysql/test_database.py +++ b/tests/datastore_pymysql/test_database.py @@ -14,8 +14,7 @@ import pymysql -from testing_support.fixtures import (validate_transaction_metrics, - ) +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.validators.validate_database_trace_inputs import validate_database_trace_inputs from testing_support.db_settings import mysql_settings diff --git a/tests/datastore_pysolr/test_solr.py b/tests/datastore_pysolr/test_solr.py index 785c3cb9f..a987a29ac 100644 --- a/tests/datastore_pysolr/test_solr.py +++ b/tests/datastore_pysolr/test_solr.py @@ -14,7 +14,7 @@ from pysolr import Solr -from testing_support.fixtures import validate_transaction_metrics +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.db_settings import solr_settings from newrelic.api.background_task import background_task diff --git a/tests/datastore_redis/test_custom_conn_pool.py b/tests/datastore_redis/test_custom_conn_pool.py index 9700392cc..156c9ce31 100644 --- a/tests/datastore_redis/test_custom_conn_pool.py +++ b/tests/datastore_redis/test_custom_conn_pool.py @@ -22,8 +22,8 @@ from newrelic.api.background_task import background_task -from testing_support.fixtures import (validate_transaction_metrics, - override_application_settings) +from testing_support.fixtures import override_application_settings +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.db_settings import redis_settings from testing_support.util import instance_hostname diff --git a/tests/datastore_redis/test_execute_command.py b/tests/datastore_redis/test_execute_command.py index c86295b35..747588072 100644 --- a/tests/datastore_redis/test_execute_command.py +++ b/tests/datastore_redis/test_execute_command.py @@ -17,8 +17,8 @@ from newrelic.api.background_task import background_task -from testing_support.fixtures import (validate_transaction_metrics, - override_application_settings) +from testing_support.fixtures import override_application_settings +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.db_settings import redis_settings from testing_support.util import instance_hostname diff --git a/tests/datastore_redis/test_get_and_set.py b/tests/datastore_redis/test_get_and_set.py index 2c40ddc12..0e2df4bb1 100644 --- a/tests/datastore_redis/test_get_and_set.py +++ b/tests/datastore_redis/test_get_and_set.py @@ -16,8 +16,8 @@ from newrelic.api.background_task import background_task -from testing_support.fixtures import (validate_transaction_metrics, - override_application_settings) +from testing_support.fixtures import override_application_settings +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.db_settings import redis_settings from testing_support.util import instance_hostname diff --git a/tests/datastore_redis/test_multiple_dbs.py b/tests/datastore_redis/test_multiple_dbs.py index 67f7b7fe8..15777cc38 100644 --- a/tests/datastore_redis/test_multiple_dbs.py +++ b/tests/datastore_redis/test_multiple_dbs.py @@ -17,8 +17,8 @@ from newrelic.api.background_task import background_task -from testing_support.fixtures import (validate_transaction_metrics, - override_application_settings) +from testing_support.fixtures import override_application_settings +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.db_settings import redis_settings from testing_support.util import instance_hostname diff --git a/tests/datastore_redis/test_rb.py b/tests/datastore_redis/test_rb.py index 7f4feeda7..5678c2787 100644 --- a/tests/datastore_redis/test_rb.py +++ b/tests/datastore_redis/test_rb.py @@ -24,8 +24,8 @@ from newrelic.api.background_task import background_task -from testing_support.fixtures import (validate_transaction_metrics, - override_application_settings) +from testing_support.fixtures import override_application_settings +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.db_settings import redis_settings from testing_support.util import instance_hostname diff --git a/tests/datastore_solrpy/test_solr.py b/tests/datastore_solrpy/test_solr.py index 86dc23d4a..ee1a7e91e 100644 --- a/tests/datastore_solrpy/test_solr.py +++ b/tests/datastore_solrpy/test_solr.py @@ -14,7 +14,7 @@ from solr import SolrConnection -from testing_support.fixtures import validate_transaction_metrics +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.db_settings import solr_settings from newrelic.api.background_task import background_task diff --git a/tests/datastore_sqlite/test_database.py b/tests/datastore_sqlite/test_database.py index 5443ca1c9..4ca534c39 100644 --- a/tests/datastore_sqlite/test_database.py +++ b/tests/datastore_sqlite/test_database.py @@ -18,7 +18,7 @@ is_pypy = hasattr(sys, 'pypy_version_info') -from testing_support.fixtures import validate_transaction_metrics +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.validators.validate_database_trace_inputs import validate_database_trace_inputs from newrelic.api.background_task import background_task diff --git a/tests/datastore_umemcache/test_memcache.py b/tests/datastore_umemcache/test_memcache.py index ce6475a49..4683a20d5 100644 --- a/tests/datastore_umemcache/test_memcache.py +++ b/tests/datastore_umemcache/test_memcache.py @@ -17,7 +17,7 @@ import umemcache from testing_support.db_settings import memcached_settings -from testing_support.fixtures import validate_transaction_metrics +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from newrelic.api.background_task import background_task from newrelic.api.transaction import set_background_task diff --git a/tests/external_boto3/test_boto3_iam.py b/tests/external_boto3/test_boto3_iam.py index 9c6246c8c..ac49214f4 100644 --- a/tests/external_boto3/test_boto3_iam.py +++ b/tests/external_boto3/test_boto3_iam.py @@ -19,10 +19,11 @@ import moto from newrelic.api.background_task import background_task -from testing_support.fixtures import (validate_transaction_metrics, +from testing_support.fixtures import ( validate_tt_segment_params, override_application_settings) from testing_support.validators.validate_span_events import ( validate_span_events) +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics MOTO_VERSION = tuple(int(v) for v in moto.__version__.split('.')[:3]) diff --git a/tests/external_boto3/test_boto3_s3.py b/tests/external_boto3/test_boto3_s3.py index 765be1826..a7ecf034a 100644 --- a/tests/external_boto3/test_boto3_s3.py +++ b/tests/external_boto3/test_boto3_s3.py @@ -18,11 +18,11 @@ import boto3 import botocore import moto -from testing_support.fixtures import ( - override_application_settings, +from testing_support.fixtures import override_application_settings +from testing_support.validators.validate_span_events import validate_span_events +from testing_support.validators.validate_transaction_metrics import ( validate_transaction_metrics, ) -from testing_support.validators.validate_span_events import validate_span_events from newrelic.api.background_task import background_task diff --git a/tests/external_boto3/test_boto3_sns.py b/tests/external_boto3/test_boto3_sns.py index 38c8c951c..3718d5292 100644 --- a/tests/external_boto3/test_boto3_sns.py +++ b/tests/external_boto3/test_boto3_sns.py @@ -18,10 +18,10 @@ import pytest from newrelic.api.background_task import background_task -from testing_support.fixtures import (validate_transaction_metrics, +from testing_support.fixtures import ( validate_tt_segment_params, override_application_settings) -from testing_support.validators.validate_span_events import ( - validate_span_events) +from testing_support.validators.validate_span_events import validate_span_events +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics MOTO_VERSION = tuple(int(v) for v in moto.__version__.split('.')[:3]) diff --git a/tests/external_botocore/test_botocore_dynamodb.py b/tests/external_botocore/test_botocore_dynamodb.py index eb0432aba..44862d827 100644 --- a/tests/external_botocore/test_botocore_dynamodb.py +++ b/tests/external_botocore/test_botocore_dynamodb.py @@ -19,10 +19,11 @@ import moto from newrelic.api.background_task import background_task -from testing_support.fixtures import (validate_transaction_metrics, +from testing_support.fixtures import ( validate_tt_segment_params, override_application_settings) from testing_support.validators.validate_span_events import ( validate_span_events) +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics MOTO_VERSION = tuple(int(v) for v in moto.__version__.split('.')[:3]) diff --git a/tests/external_botocore/test_botocore_ec2.py b/tests/external_botocore/test_botocore_ec2.py index 6f91ad75f..0cfd09b6f 100644 --- a/tests/external_botocore/test_botocore_ec2.py +++ b/tests/external_botocore/test_botocore_ec2.py @@ -19,10 +19,11 @@ import moto from newrelic.api.background_task import background_task -from testing_support.fixtures import (validate_transaction_metrics, +from testing_support.fixtures import ( validate_tt_segment_params, override_application_settings) from testing_support.validators.validate_span_events import ( validate_span_events) +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics MOTO_VERSION = tuple(int(v) for v in moto.__version__.split('.')[:3]) diff --git a/tests/external_botocore/test_botocore_s3.py b/tests/external_botocore/test_botocore_s3.py index 51bbb12e8..1984d8103 100644 --- a/tests/external_botocore/test_botocore_s3.py +++ b/tests/external_botocore/test_botocore_s3.py @@ -18,11 +18,11 @@ import botocore import botocore.session import moto -from testing_support.fixtures import ( - override_application_settings, +from testing_support.fixtures import override_application_settings +from testing_support.validators.validate_span_events import validate_span_events +from testing_support.validators.validate_transaction_metrics import ( validate_transaction_metrics, ) -from testing_support.validators.validate_span_events import validate_span_events from newrelic.api.background_task import background_task diff --git a/tests/external_botocore/test_botocore_sqs.py b/tests/external_botocore/test_botocore_sqs.py index a009e9f0b..3f7d8c022 100644 --- a/tests/external_botocore/test_botocore_sqs.py +++ b/tests/external_botocore/test_botocore_sqs.py @@ -18,11 +18,11 @@ import botocore.session import moto import pytest -from testing_support.fixtures import ( - override_application_settings, +from testing_support.fixtures import override_application_settings +from testing_support.validators.validate_span_events import validate_span_events +from testing_support.validators.validate_transaction_metrics import ( validate_transaction_metrics, ) -from testing_support.validators.validate_span_events import validate_span_events from newrelic.api.background_task import background_task from newrelic.common.package_version_utils import get_package_version @@ -41,7 +41,7 @@ url = "queue.amazonaws.com" AWS_ACCESS_KEY_ID = "AAAAAAAAAAAACCESSKEY" -AWS_SECRET_ACCESS_KEY = "AAAAAASECRETKEY" +AWS_SECRET_ACCESS_KEY = "AAAAAASECRETKEY" # nosec AWS_REGION = "us-east-1" TEST_QUEUE = "python-agent-test-%s" % uuid.uuid4() diff --git a/tests/external_feedparser/test_feedparser.py b/tests/external_feedparser/test_feedparser.py index 9cb175711..5e515cfc3 100644 --- a/tests/external_feedparser/test_feedparser.py +++ b/tests/external_feedparser/test_feedparser.py @@ -14,7 +14,7 @@ import pytest from newrelic.api.background_task import background_task -from testing_support.fixtures import validate_transaction_metrics +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics @pytest.fixture(scope="session") diff --git a/tests/external_http/test_http.py b/tests/external_http/test_http.py index 16bef3f2a..e08518f5f 100644 --- a/tests/external_http/test_http.py +++ b/tests/external_http/test_http.py @@ -21,8 +21,8 @@ from testing_support.fixtures import ( cat_enabled, override_application_settings, - validate_transaction_metrics, ) +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.validators.validate_cross_process_headers import ( validate_cross_process_headers, ) diff --git a/tests/external_httplib/test_httplib.py b/tests/external_httplib/test_httplib.py index 69f790846..c7747f8ff 100644 --- a/tests/external_httplib/test_httplib.py +++ b/tests/external_httplib/test_httplib.py @@ -26,9 +26,9 @@ from testing_support.fixtures import ( cat_enabled, override_application_settings, - validate_transaction_metrics, validate_tt_segment_params, ) +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.validators.validate_cross_process_headers import ( validate_cross_process_headers, ) diff --git a/tests/external_httplib/test_urllib.py b/tests/external_httplib/test_urllib.py index 38cf4f713..cea88a8dd 100644 --- a/tests/external_httplib/test_urllib.py +++ b/tests/external_httplib/test_urllib.py @@ -25,7 +25,8 @@ cache_outgoing_headers, insert_incoming_headers, ) -from testing_support.fixtures import cat_enabled, validate_transaction_metrics +from testing_support.fixtures import cat_enabled +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.validators.validate_cross_process_headers import ( validate_cross_process_headers, ) diff --git a/tests/external_httplib/test_urllib2.py b/tests/external_httplib/test_urllib2.py index cbcb25a2f..62ed23074 100644 --- a/tests/external_httplib/test_urllib2.py +++ b/tests/external_httplib/test_urllib2.py @@ -25,7 +25,8 @@ cache_outgoing_headers, insert_incoming_headers, ) -from testing_support.fixtures import cat_enabled, validate_transaction_metrics +from testing_support.fixtures import cat_enabled +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.validators.validate_cross_process_headers import ( validate_cross_process_headers, ) diff --git a/tests/external_httplib2/test_httplib2.py b/tests/external_httplib2/test_httplib2.py index d3e71e7b3..288aa84ee 100644 --- a/tests/external_httplib2/test_httplib2.py +++ b/tests/external_httplib2/test_httplib2.py @@ -21,7 +21,6 @@ from testing_support.fixtures import ( cat_enabled, override_application_settings, - validate_transaction_metrics, ) from testing_support.validators.validate_cross_process_headers import ( validate_cross_process_headers, @@ -29,6 +28,7 @@ from testing_support.validators.validate_external_node_params import ( validate_external_node_params, ) +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from newrelic.api.background_task import background_task diff --git a/tests/external_httpx/test_client.py b/tests/external_httpx/test_client.py index 1d170a5fa..87a1bc7d0 100644 --- a/tests/external_httpx/test_client.py +++ b/tests/external_httpx/test_client.py @@ -19,8 +19,6 @@ dt_enabled, override_application_settings, override_generic_settings, - validate_transaction_errors, - validate_transaction_metrics, validate_tt_segment_params, ) from testing_support.mock_external_http_server import ( @@ -30,6 +28,8 @@ validate_cross_process_headers, ) from testing_support.validators.validate_span_events import validate_span_events +from testing_support.validators.validate_transaction_errors import validate_transaction_errors +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from newrelic.api.background_task import background_task from newrelic.api.time_trace import current_trace diff --git a/tests/external_requests/test_requests.py b/tests/external_requests/test_requests.py index b61cf36df..f6f4506e5 100644 --- a/tests/external_requests/test_requests.py +++ b/tests/external_requests/test_requests.py @@ -22,8 +22,6 @@ from testing_support.fixtures import ( cat_enabled, override_application_settings, - validate_transaction_errors, - validate_transaction_metrics, validate_tt_parenting, ) from testing_support.validators.validate_cross_process_headers import ( @@ -32,7 +30,8 @@ from testing_support.validators.validate_external_node_params import ( validate_external_node_params, ) - +from testing_support.validators.validate_transaction_errors import validate_transaction_errors +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from newrelic.api.background_task import background_task diff --git a/tests/external_urllib3/test_urllib3.py b/tests/external_urllib3/test_urllib3.py index 93287e000..68e15d463 100644 --- a/tests/external_urllib3/test_urllib3.py +++ b/tests/external_urllib3/test_urllib3.py @@ -28,8 +28,6 @@ from testing_support.fixtures import ( cat_enabled, override_application_settings, - validate_transaction_errors, - validate_transaction_metrics, ) from testing_support.util import version2tuple from testing_support.validators.validate_cross_process_headers import ( @@ -38,7 +36,8 @@ from testing_support.validators.validate_external_node_params import ( validate_external_node_params, ) - +from testing_support.validators.validate_transaction_errors import validate_transaction_errors +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from newrelic.api.background_task import background_task diff --git a/tests/framework_aiohttp/test_client.py b/tests/framework_aiohttp/test_client.py index 69649109d..96bbb46f0 100644 --- a/tests/framework_aiohttp/test_client.py +++ b/tests/framework_aiohttp/test_client.py @@ -16,7 +16,9 @@ import aiohttp import pytest -from testing_support.fixtures import validate_transaction_metrics +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) from yarl import URL from newrelic.api.background_task import background_task diff --git a/tests/framework_aiohttp/test_client_async_await.py b/tests/framework_aiohttp/test_client_async_await.py index 1e3eb79ec..dedc64c9d 100644 --- a/tests/framework_aiohttp/test_client_async_await.py +++ b/tests/framework_aiohttp/test_client_async_await.py @@ -16,7 +16,8 @@ import aiohttp import pytest -from testing_support.fixtures import cat_enabled, validate_transaction_metrics +from testing_support.fixtures import cat_enabled +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from yarl import URL from newrelic.api.background_task import background_task diff --git a/tests/framework_aiohttp/test_client_cat.py b/tests/framework_aiohttp/test_client_cat.py index c1177db0a..887743429 100644 --- a/tests/framework_aiohttp/test_client_cat.py +++ b/tests/framework_aiohttp/test_client_cat.py @@ -17,16 +17,16 @@ import aiohttp import pytest from testing_support.external_fixtures import create_incoming_headers -from testing_support.fixtures import ( - override_application_settings, - validate_transaction_metrics, -) +from testing_support.fixtures import override_application_settings from testing_support.validators.validate_cross_process_headers import ( validate_cross_process_headers, ) from testing_support.validators.validate_external_node_params import ( validate_external_node_params, ) +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) from newrelic.api.background_task import background_task from newrelic.api.external_trace import ExternalTrace diff --git a/tests/framework_aiohttp/test_externals.py b/tests/framework_aiohttp/test_externals.py index 7cb849bb8..acbc4ca81 100644 --- a/tests/framework_aiohttp/test_externals.py +++ b/tests/framework_aiohttp/test_externals.py @@ -12,7 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -from testing_support.fixtures import validate_transaction_metrics, validate_tt_parenting + +from testing_support.fixtures import validate_tt_parenting +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) expected_parenting = ( "TransactionNode", diff --git a/tests/framework_aiohttp/test_middleware.py b/tests/framework_aiohttp/test_middleware.py index b383b4d5e..6cbf86677 100644 --- a/tests/framework_aiohttp/test_middleware.py +++ b/tests/framework_aiohttp/test_middleware.py @@ -14,13 +14,13 @@ import aiohttp import pytest -from testing_support.fixtures import ( - override_generic_settings, - validate_transaction_metrics, -) +from testing_support.fixtures import override_generic_settings from testing_support.validators.validate_code_level_metrics import ( validate_code_level_metrics, ) +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) from newrelic.core.config import global_settings diff --git a/tests/framework_aiohttp/test_server.py b/tests/framework_aiohttp/test_server.py index 70dd1e496..aa6218c28 100644 --- a/tests/framework_aiohttp/test_server.py +++ b/tests/framework_aiohttp/test_server.py @@ -21,13 +21,19 @@ override_application_settings, override_generic_settings, override_ignore_status_codes, - validate_transaction_errors, - validate_transaction_event_attributes, - validate_transaction_metrics, ) from testing_support.validators.validate_code_level_metrics import ( validate_code_level_metrics, ) +from testing_support.validators.validate_transaction_errors import ( + validate_transaction_errors, +) +from testing_support.validators.validate_transaction_event_attributes import ( + validate_transaction_event_attributes, +) +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) from newrelic.core.config import global_settings diff --git a/tests/framework_aiohttp/test_server_cat.py b/tests/framework_aiohttp/test_server_cat.py index 0c325215d..44b5c7217 100644 --- a/tests/framework_aiohttp/test_server_cat.py +++ b/tests/framework_aiohttp/test_server_cat.py @@ -19,6 +19,8 @@ make_cross_agent_headers, override_application_settings, validate_analytics_catmap_data, +) +from testing_support.validators.validate_transaction_event_attributes import ( validate_transaction_event_attributes, ) @@ -35,7 +37,7 @@ def record_aiohttp1_raw_headers(raw_headers): try: - import aiohttp.protocol # noqa + import aiohttp.protocol # noqa: F401 except ImportError: def pass_through(function): diff --git a/tests/framework_ariadne/test_application.py b/tests/framework_ariadne/test_application.py index f0b3587b8..ed60397b9 100644 --- a/tests/framework_ariadne/test_application.py +++ b/tests/framework_ariadne/test_application.py @@ -13,15 +13,11 @@ # limitations under the License. import pytest -from testing_support.fixtures import ( - dt_enabled, - validate_transaction_errors, - validate_transaction_metrics, -) +from testing_support.fixtures import dt_enabled from testing_support.validators.validate_span_events import validate_span_events -from testing_support.validators.validate_transaction_count import ( - validate_transaction_count, -) +from testing_support.validators.validate_transaction_errors import validate_transaction_errors +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics +from testing_support.validators.validate_transaction_count import validate_transaction_count from newrelic.api.background_task import background_task from newrelic.common.object_names import callable_name diff --git a/tests/framework_ariadne/test_application_async.py b/tests/framework_ariadne/test_application_async.py index 8e46752f2..ada34ffad 100644 --- a/tests/framework_ariadne/test_application_async.py +++ b/tests/framework_ariadne/test_application_async.py @@ -15,7 +15,8 @@ import asyncio import pytest -from testing_support.fixtures import dt_enabled, validate_transaction_metrics +from testing_support.fixtures import dt_enabled +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.validators.validate_span_events import validate_span_events from newrelic.api.background_task import background_task diff --git a/tests/framework_ariadne/test_asgi.py b/tests/framework_ariadne/test_asgi.py index 6275e781f..861f2aa93 100644 --- a/tests/framework_ariadne/test_asgi.py +++ b/tests/framework_ariadne/test_asgi.py @@ -16,7 +16,8 @@ import pytest from testing_support.asgi_testing import AsgiTest -from testing_support.fixtures import dt_enabled, validate_transaction_metrics +from testing_support.fixtures import dt_enabled +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.validators.validate_span_events import validate_span_events diff --git a/tests/framework_ariadne/test_wsgi.py b/tests/framework_ariadne/test_wsgi.py index 2c11276ed..9ce2373d4 100644 --- a/tests/framework_ariadne/test_wsgi.py +++ b/tests/framework_ariadne/test_wsgi.py @@ -14,7 +14,8 @@ import pytest import webtest -from testing_support.fixtures import dt_enabled, validate_transaction_metrics +from testing_support.fixtures import dt_enabled +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.validators.validate_span_events import validate_span_events diff --git a/tests/framework_bottle/test_application.py b/tests/framework_bottle/test_application.py index f9fb0915e..28619d5eb 100644 --- a/tests/framework_bottle/test_application.py +++ b/tests/framework_bottle/test_application.py @@ -15,12 +15,13 @@ import pytest import base64 -from testing_support.fixtures import (validate_transaction_metrics, - validate_transaction_errors, override_ignore_status_codes, +from testing_support.fixtures import ( + override_ignore_status_codes, override_application_settings) - +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from newrelic.packages import six from testing_support.validators.validate_code_level_metrics import validate_code_level_metrics +from testing_support.validators.validate_transaction_errors import validate_transaction_errors import webtest diff --git a/tests/framework_cherrypy/test_application.py b/tests/framework_cherrypy/test_application.py index 1dd7f837d..39f8b5c16 100644 --- a/tests/framework_cherrypy/test_application.py +++ b/tests/framework_cherrypy/test_application.py @@ -17,9 +17,11 @@ from newrelic.packages import six -from testing_support.fixtures import (validate_transaction_errors, - override_application_settings, override_ignore_status_codes) +from testing_support.fixtures import ( + override_application_settings, + override_ignore_status_codes) from testing_support.validators.validate_code_level_metrics import validate_code_level_metrics +from testing_support.validators.validate_transaction_errors import validate_transaction_errors import cherrypy diff --git a/tests/framework_cherrypy/test_dispatch.py b/tests/framework_cherrypy/test_dispatch.py index 0a3f8e7c7..64dccb214 100644 --- a/tests/framework_cherrypy/test_dispatch.py +++ b/tests/framework_cherrypy/test_dispatch.py @@ -17,7 +17,7 @@ from newrelic.packages import six -from testing_support.fixtures import validate_transaction_errors +from testing_support.validators.validate_transaction_errors import validate_transaction_errors import cherrypy diff --git a/tests/framework_cherrypy/test_resource.py b/tests/framework_cherrypy/test_resource.py index 09c68f9fc..385d28d91 100644 --- a/tests/framework_cherrypy/test_resource.py +++ b/tests/framework_cherrypy/test_resource.py @@ -14,7 +14,7 @@ import webtest -from testing_support.fixtures import validate_transaction_errors +from testing_support.validators.validate_transaction_errors import validate_transaction_errors from testing_support.validators.validate_code_level_metrics import validate_code_level_metrics import cherrypy diff --git a/tests/framework_cherrypy/test_routes.py b/tests/framework_cherrypy/test_routes.py index 7c1b23181..9111a29ce 100644 --- a/tests/framework_cherrypy/test_routes.py +++ b/tests/framework_cherrypy/test_routes.py @@ -16,7 +16,7 @@ import sys import webtest -from testing_support.fixtures import validate_transaction_errors +from testing_support.validators.validate_transaction_errors import validate_transaction_errors from testing_support.validators.validate_code_level_metrics import validate_code_level_metrics import cherrypy diff --git a/tests/framework_django/test_application.py b/tests/framework_django/test_application.py index 1876b9a69..1f2616b0f 100644 --- a/tests/framework_django/test_application.py +++ b/tests/framework_django/test_application.py @@ -12,11 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -from testing_support.fixtures import (validate_transaction_metrics, - validate_transaction_errors, override_application_settings, +from testing_support.fixtures import ( + override_application_settings, override_generic_settings, override_ignore_status_codes) from testing_support.validators.validate_code_level_metrics import validate_code_level_metrics from newrelic.hooks.framework_django import django_settings +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics +from testing_support.validators.validate_transaction_errors import validate_transaction_errors import os diff --git a/tests/framework_django/test_asgi_application.py b/tests/framework_django/test_asgi_application.py index 98f157f85..457042766 100644 --- a/tests/framework_django/test_asgi_application.py +++ b/tests/framework_django/test_asgi_application.py @@ -18,10 +18,12 @@ from newrelic.core.config import global_settings from newrelic.common.encoding_utils import gzip_decompress -from testing_support.fixtures import (validate_transaction_metrics, - validate_transaction_errors, override_application_settings, +from testing_support.fixtures import ( + override_application_settings, override_generic_settings, override_ignore_status_codes) from testing_support.validators.validate_code_level_metrics import validate_code_level_metrics +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics +from testing_support.validators.validate_transaction_errors import validate_transaction_errors DJANGO_VERSION = tuple(map(int, django.get_version().split('.')[:2])) diff --git a/tests/framework_falcon/test_application.py b/tests/framework_falcon/test_application.py index 6f59ea826..6b64c8c67 100644 --- a/tests/framework_falcon/test_application.py +++ b/tests/framework_falcon/test_application.py @@ -14,10 +14,12 @@ import pytest from newrelic.core.config import global_settings -from testing_support.fixtures import (validate_transaction_metrics, - validate_transaction_errors, override_ignore_status_codes, +from testing_support.fixtures import ( + override_ignore_status_codes, override_generic_settings) from testing_support.validators.validate_code_level_metrics import validate_code_level_metrics +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics +from testing_support.validators.validate_transaction_errors import validate_transaction_errors SETTINGS = global_settings() diff --git a/tests/framework_fastapi/test_application.py b/tests/framework_fastapi/test_application.py index 8fe4be34f..85d230e26 100644 --- a/tests/framework_fastapi/test_application.py +++ b/tests/framework_fastapi/test_application.py @@ -15,7 +15,7 @@ import logging import pytest -from testing_support.fixtures import validate_transaction_metrics +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.validators.validate_code_level_metrics import validate_code_level_metrics diff --git a/tests/framework_flask/test_application.py b/tests/framework_flask/test_application.py index 98757a805..de7a43019 100644 --- a/tests/framework_flask/test_application.py +++ b/tests/framework_flask/test_application.py @@ -14,10 +14,12 @@ import pytest -from testing_support.fixtures import (validate_transaction_metrics, - validate_transaction_errors, override_application_settings, +from testing_support.fixtures import ( + override_application_settings, validate_tt_parenting) from testing_support.validators.validate_code_level_metrics import validate_code_level_metrics +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics +from testing_support.validators.validate_transaction_errors import validate_transaction_errors from newrelic.packages import six diff --git a/tests/framework_flask/test_blueprints.py b/tests/framework_flask/test_blueprints.py index 6a76d405b..4a1e361fb 100644 --- a/tests/framework_flask/test_blueprints.py +++ b/tests/framework_flask/test_blueprints.py @@ -14,9 +14,10 @@ import pytest -from testing_support.fixtures import (validate_transaction_metrics, - validate_transaction_errors, override_application_settings) +from testing_support.fixtures import override_application_settings from testing_support.validators.validate_code_level_metrics import validate_code_level_metrics +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics +from testing_support.validators.validate_transaction_errors import validate_transaction_errors from newrelic.packages import six diff --git a/tests/framework_flask/test_compress.py b/tests/framework_flask/test_compress.py index ac7f323fd..f6feb01ee 100644 --- a/tests/framework_flask/test_compress.py +++ b/tests/framework_flask/test_compress.py @@ -12,8 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -from testing_support.fixtures import (validate_transaction_metrics, - validate_transaction_errors, override_application_settings) +from testing_support.fixtures import override_application_settings +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics +from testing_support.validators.validate_transaction_errors import validate_transaction_errors from testing_support.validators.validate_code_level_metrics import validate_code_level_metrics diff --git a/tests/framework_flask/test_middleware.py b/tests/framework_flask/test_middleware.py index d92d4d851..3c81ebc47 100644 --- a/tests/framework_flask/test_middleware.py +++ b/tests/framework_flask/test_middleware.py @@ -14,9 +14,10 @@ import pytest -from testing_support.fixtures import (validate_transaction_metrics, - validate_transaction_errors, override_application_settings) +from testing_support.fixtures import override_application_settings from testing_support.validators.validate_code_level_metrics import validate_code_level_metrics +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics +from testing_support.validators.validate_transaction_errors import validate_transaction_errors def target_application(): # We need to delay Flask application creation because of ordering diff --git a/tests/framework_flask/test_not_found.py b/tests/framework_flask/test_not_found.py index 22ad5efcd..c1c55475e 100644 --- a/tests/framework_flask/test_not_found.py +++ b/tests/framework_flask/test_not_found.py @@ -14,8 +14,8 @@ import pytest -from testing_support.fixtures import (validate_transaction_metrics, - validate_transaction_errors) +from testing_support.validators.validate_transaction_errors import validate_transaction_errors +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics def target_application(): # We need to delay Flask application creation because of ordering diff --git a/tests/framework_flask/test_user_exceptions.py b/tests/framework_flask/test_user_exceptions.py index 5c8f3a658..844b4b9ef 100644 --- a/tests/framework_flask/test_user_exceptions.py +++ b/tests/framework_flask/test_user_exceptions.py @@ -14,8 +14,8 @@ import pytest -from testing_support.fixtures import (validate_transaction_metrics, - validate_transaction_errors) +from testing_support.validators.validate_transaction_errors import validate_transaction_errors +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics def target_application(): # We need to delay Flask application creation because of ordering diff --git a/tests/framework_flask/test_views.py b/tests/framework_flask/test_views.py index 698ddd4f4..0338169c7 100644 --- a/tests/framework_flask/test_views.py +++ b/tests/framework_flask/test_views.py @@ -16,13 +16,15 @@ async_handler_support, skip_if_not_async_handler_support, ) -from testing_support.fixtures import ( - validate_transaction_errors, - validate_transaction_metrics, -) from testing_support.validators.validate_code_level_metrics import ( validate_code_level_metrics, ) +from testing_support.validators.validate_transaction_errors import ( + validate_transaction_errors, +) +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) scoped_metrics = [ ("Function/flask.app:Flask.wsgi_app", 1), diff --git a/tests/framework_graphene/test_application.py b/tests/framework_graphene/test_application.py index b4e8e0739..b9d374a3c 100644 --- a/tests/framework_graphene/test_application.py +++ b/tests/framework_graphene/test_application.py @@ -14,15 +14,13 @@ import pytest import six -from testing_support.fixtures import ( - dt_enabled, - validate_transaction_errors, - validate_transaction_metrics, -) +from testing_support.fixtures import dt_enabled from testing_support.validators.validate_span_events import validate_span_events from testing_support.validators.validate_transaction_count import ( validate_transaction_count, ) +from testing_support.validators.validate_transaction_errors import validate_transaction_errors +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from newrelic.api.background_task import background_task from newrelic.common.object_names import callable_name diff --git a/tests/framework_graphql/test_application.py b/tests/framework_graphql/test_application.py index 770c6f6e1..56dc3a738 100644 --- a/tests/framework_graphql/test_application.py +++ b/tests/framework_graphql/test_application.py @@ -13,16 +13,14 @@ # limitations under the License. import pytest -from testing_support.fixtures import ( - dt_enabled, - validate_transaction_errors, - validate_transaction_metrics, -) +from testing_support.fixtures import dt_enabled from testing_support.validators.validate_span_events import validate_span_events from testing_support.validators.validate_transaction_count import ( validate_transaction_count, ) from testing_support.validators.validate_code_level_metrics import validate_code_level_metrics +from testing_support.validators.validate_transaction_errors import validate_transaction_errors +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from newrelic.api.background_task import background_task from newrelic.common.object_names import callable_name diff --git a/tests/framework_graphql/test_application_async.py b/tests/framework_graphql/test_application_async.py index 19b8b1493..28b435c43 100644 --- a/tests/framework_graphql/test_application_async.py +++ b/tests/framework_graphql/test_application_async.py @@ -16,7 +16,8 @@ import pytest from test_application import is_graphql_2 -from testing_support.fixtures import dt_enabled, validate_transaction_metrics +from testing_support.fixtures import dt_enabled +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.validators.validate_span_events import validate_span_events from newrelic.api.background_task import background_task diff --git a/tests/framework_grpc/test_clients.py b/tests/framework_grpc/test_clients.py index e8fed1da5..c6ada806b 100644 --- a/tests/framework_grpc/test_clients.py +++ b/tests/framework_grpc/test_clients.py @@ -18,8 +18,8 @@ from newrelic.api.background_task import background_task -from testing_support.fixtures import (validate_transaction_metrics, - validate_transaction_errors) +from testing_support.validators.validate_transaction_errors import validate_transaction_errors +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from _test_common import create_request, get_result diff --git a/tests/framework_grpc/test_distributed_tracing.py b/tests/framework_grpc/test_distributed_tracing.py index 7cd134ca0..7f253651d 100644 --- a/tests/framework_grpc/test_distributed_tracing.py +++ b/tests/framework_grpc/test_distributed_tracing.py @@ -21,12 +21,11 @@ from newrelic.common.encoding_utils import ( DistributedTracePayload, W3CTraceParent, W3CTraceState, NrTraceState) -from testing_support.fixtures import (override_application_settings, - validate_transaction_metrics) +from testing_support.fixtures import override_application_settings from testing_support.validators.validate_span_events import ( validate_span_events) from _test_common import create_request, wait_for_transaction_completion - +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics _test_matrix = ('method_name,streaming_request', ( ('DoUnaryUnary', False), diff --git a/tests/framework_grpc/test_server.py b/tests/framework_grpc/test_server.py index d1ed47ff5..534d78005 100644 --- a/tests/framework_grpc/test_server.py +++ b/tests/framework_grpc/test_server.py @@ -18,12 +18,13 @@ from conftest import create_stub_and_channel from _test_common import create_request, wait_for_transaction_completion from newrelic.core.config import global_settings -from testing_support.fixtures import (validate_transaction_metrics, - validate_transaction_event_attributes, override_application_settings, - override_generic_settings, function_not_called, - validate_transaction_errors) +from testing_support.fixtures import ( + override_application_settings, + override_generic_settings, function_not_called) from testing_support.validators.validate_code_level_metrics import validate_code_level_metrics - +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics +from testing_support.validators.validate_transaction_errors import validate_transaction_errors +from testing_support.validators.validate_transaction_event_attributes import validate_transaction_event_attributes def select_python_version(py2, py3): return six.PY3 and py3 or py2 diff --git a/tests/framework_pyramid/test_append_slash_app.py b/tests/framework_pyramid/test_append_slash_app.py index 875f91012..f09e14b55 100644 --- a/tests/framework_pyramid/test_append_slash_app.py +++ b/tests/framework_pyramid/test_append_slash_app.py @@ -34,9 +34,9 @@ import pytest import re -from testing_support.fixtures import (validate_transaction_metrics, - validate_transaction_errors, override_application_settings) - +from testing_support.fixtures import override_application_settings +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics +from testing_support.validators.validate_transaction_errors import validate_transaction_errors def _to_int(version_str): m = re.match(r'\d+', version_str) diff --git a/tests/framework_pyramid/test_application.py b/tests/framework_pyramid/test_application.py index 6b912d632..132e0f72a 100644 --- a/tests/framework_pyramid/test_application.py +++ b/tests/framework_pyramid/test_application.py @@ -14,8 +14,9 @@ import pytest -from testing_support.fixtures import (validate_transaction_metrics, - validate_transaction_errors, override_application_settings) +from testing_support.fixtures import override_application_settings +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics +from testing_support.validators.validate_transaction_errors import validate_transaction_errors from newrelic.packages import six from testing_support.validators.validate_code_level_metrics import validate_code_level_metrics diff --git a/tests/framework_pyramid/test_cornice.py b/tests/framework_pyramid/test_cornice.py index 8b8dc58d2..fe36831e0 100644 --- a/tests/framework_pyramid/test_cornice.py +++ b/tests/framework_pyramid/test_cornice.py @@ -14,9 +14,9 @@ import pytest -from testing_support.fixtures import (validate_transaction_errors, - validate_transaction_metrics) from testing_support.validators.validate_code_level_metrics import validate_code_level_metrics +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics +from testing_support.validators.validate_transaction_errors import validate_transaction_errors from newrelic.packages import six diff --git a/tests/framework_sanic/test_application.py b/tests/framework_sanic/test_application.py index eebbde003..5b19ada2b 100644 --- a/tests/framework_sanic/test_application.py +++ b/tests/framework_sanic/test_application.py @@ -21,19 +21,28 @@ override_application_settings, override_generic_settings, override_ignore_status_codes, - validate_transaction_errors, - validate_transaction_event_attributes, - validate_transaction_metrics, ) from testing_support.validators.validate_code_level_metrics import ( validate_code_level_metrics, ) +from testing_support.validators.validate_transaction_errors import ( + validate_transaction_errors, +) +from testing_support.validators.validate_transaction_event_attributes import ( + validate_transaction_event_attributes, +) +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) from newrelic.api.application import application_instance from newrelic.api.external_trace import ExternalTrace from newrelic.api.transaction import Transaction from newrelic.core.config import global_settings +sanic_21 = int(sanic.__version__.split(".", 1)[0]) >= 21 + + BASE_METRICS = [ ("Function/_target_application:index", 1), ("Function/_target_application:request_middleware", 1 if int(sanic.__version__.split(".", 1)[0]) > 18 else 2), diff --git a/tests/framework_sanic/test_cross_application.py b/tests/framework_sanic/test_cross_application.py index 0c1c724cf..7199fae55 100644 --- a/tests/framework_sanic/test_cross_application.py +++ b/tests/framework_sanic/test_cross_application.py @@ -13,30 +13,36 @@ # limitations under the License. import json -import pytest -import re import random import string -from newrelic.common.encoding_utils import deobfuscate +import pytest +from testing_support.fixtures import ( + make_cross_agent_headers, + override_application_settings, + validate_analytics_catmap_data, +) +from testing_support.validators.validate_transaction_event_attributes import ( + validate_transaction_event_attributes, +) +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) + from newrelic.api.application import application_instance from newrelic.api.external_trace import ExternalTrace from newrelic.api.transaction import Transaction - -from testing_support.fixtures import (override_application_settings, - make_cross_agent_headers, validate_analytics_catmap_data, - validate_transaction_metrics, validate_transaction_event_attributes) - +from newrelic.common.encoding_utils import deobfuscate BASE_METRICS = [ - ('Function/_target_application:index', 1), + ("Function/_target_application:index", 1), ] DT_METRICS = [ - ('Supportability/DistributedTrace/AcceptPayload/Success', None), - ('Supportability/TraceContext/TraceParent/Accept/Success', 1), + ("Supportability/DistributedTrace/AcceptPayload/Success", None), + ("Supportability/TraceContext/TraceParent/Accept/Success", 1), ] -BASE_ATTRS = ['response.status', 'response.headers.contentType', - 'response.headers.contentLength'] +BASE_ATTRS = ["response.status", "response.headers.contentType", "response.headers.contentLength"] + def raw_headers(response): try: @@ -50,62 +56,69 @@ def raw_headers(response): @validate_transaction_metrics( - '_target_application:index', + "_target_application:index", scoped_metrics=BASE_METRICS, rollup_metrics=BASE_METRICS + DT_METRICS, ) -@override_application_settings({ - 'distributed_tracing.enabled': True, -}) +@override_application_settings( + { + "distributed_tracing.enabled": True, + } +) @validate_transaction_event_attributes( - required_params={'agent': BASE_ATTRS, 'user': [], 'intrinsic': []}, + required_params={"agent": BASE_ATTRS, "user": [], "intrinsic": []}, ) def test_inbound_distributed_trace(app): transaction = Transaction(application_instance()) dt_headers = ExternalTrace.generate_request_headers(transaction) - response = app.fetch('get', '/', headers=dict(dt_headers)) + response = app.fetch("get", "/", headers=dict(dt_headers)) assert response.status == 200 -ENCODING_KEY = "".join(random.choice(string.ascii_lowercase) for _ in range(40)) +ENCODING_KEY = "".join(random.choice(string.ascii_lowercase) for _ in range(40)) # nosec _cat_response_header_urls_to_test = ( - ('/', '_target_application:index'), - ('/streaming', '_target_application:streaming'), - ('/error', '_target_application:error'), + ("/", "_target_application:index"), + ("/streaming", "_target_application:streaming"), + ("/error", "_target_application:error"), ) _custom_settings = { - 'cross_process_id': '1#1', - 'encoding_key': ENCODING_KEY, - 'trusted_account_ids': [1], - 'cross_application_tracer.enabled': True, - 'distributed_tracing.enabled': False, + "cross_process_id": "1#1", + "encoding_key": ENCODING_KEY, + "trusted_account_ids": [1], + "cross_application_tracer.enabled": True, + "distributed_tracing.enabled": False, } @pytest.mark.parametrize( - 'inbound_payload,expected_intrinsics,forgone_intrinsics,cat_id', [ - - # Valid payload from trusted account - (['b854df4feb2b1f06', False, '7e249074f277923d', '5d2957be'], - {'nr.referringTransactionGuid': 'b854df4feb2b1f06', - 'nr.tripId': '7e249074f277923d', - 'nr.referringPathHash': '5d2957be'}, - [], - '1#1'), - - # Valid payload from an untrusted account - (['b854df4feb2b1f06', False, '7e249074f277923d', '5d2957be'], - {}, - ['nr.referringTransactionGuid', 'nr.tripId', 'nr.referringPathHash'], - '80#1'), -]) -@pytest.mark.parametrize('url,metric_name', _cat_response_header_urls_to_test) -def test_cat_response_headers(app, inbound_payload, expected_intrinsics, - forgone_intrinsics, cat_id, url, metric_name): + "inbound_payload,expected_intrinsics,forgone_intrinsics,cat_id", + [ + # Valid payload from trusted account + ( + ["b854df4feb2b1f06", False, "7e249074f277923d", "5d2957be"], + { + "nr.referringTransactionGuid": "b854df4feb2b1f06", + "nr.tripId": "7e249074f277923d", + "nr.referringPathHash": "5d2957be", + }, + [], + "1#1", + ), + # Valid payload from an untrusted account + ( + ["b854df4feb2b1f06", False, "7e249074f277923d", "5d2957be"], + {}, + ["nr.referringTransactionGuid", "nr.tripId", "nr.referringPathHash"], + "80#1", + ), + ], +) +@pytest.mark.parametrize("url,metric_name", _cat_response_header_urls_to_test) +def test_cat_response_headers(app, inbound_payload, expected_intrinsics, forgone_intrinsics, cat_id, url, metric_name): _base_metrics = [ - ('Function/%s' % metric_name, 1), + ("Function/%s" % metric_name, 1), ] @validate_transaction_metrics( @@ -114,39 +127,36 @@ def test_cat_response_headers(app, inbound_payload, expected_intrinsics, rollup_metrics=_base_metrics, ) @validate_analytics_catmap_data( - 'WebTransaction/Function/%s' % metric_name, - expected_attributes=expected_intrinsics, - non_expected_attributes=forgone_intrinsics) + "WebTransaction/Function/%s" % metric_name, + expected_attributes=expected_intrinsics, + non_expected_attributes=forgone_intrinsics, + ) @override_application_settings(_custom_settings) def _test(): - cat_headers = make_cross_agent_headers(inbound_payload, ENCODING_KEY, - cat_id) - response = app.fetch('get', url, headers=dict(cat_headers)) + cat_headers = make_cross_agent_headers(inbound_payload, ENCODING_KEY, cat_id) + response = app.fetch("get", url, headers=dict(cat_headers)) if expected_intrinsics: # test valid CAT response header - assert b'X-NewRelic-App-Data' in raw_headers(response) + assert b"X-NewRelic-App-Data" in raw_headers(response) cat_response_header = response.headers.get("X-NewRelic-App-Data", None) - app_data = json.loads(deobfuscate(cat_response_header, - ENCODING_KEY)) + app_data = json.loads(deobfuscate(cat_response_header, ENCODING_KEY)) assert app_data[0] == cat_id - assert app_data[1] == ('WebTransaction/Function/%s' % metric_name) + assert app_data[1] == ("WebTransaction/Function/%s" % metric_name) else: - assert b'X-NewRelic-App-Data' not in raw_headers(response) + assert b"X-NewRelic-App-Data" not in raw_headers(response) _test() @override_application_settings(_custom_settings) def test_cat_response_custom_header(app): - inbound_payload = ['b854df4feb2b1f06', False, '7e249074f277923d', - '5d2957be'] - cat_id = '1#1' - custom_header_value = b'my-custom-header-value' - cat_headers = make_cross_agent_headers(inbound_payload, ENCODING_KEY, - cat_id) - - response = app.fetch('get', '/custom-header/%s/%s' % ( - 'X-NewRelic-App-Data', custom_header_value), - headers=dict(cat_headers)) + inbound_payload = ["b854df4feb2b1f06", False, "7e249074f277923d", "5d2957be"] + cat_id = "1#1" + custom_header_value = b"my-custom-header-value" + cat_headers = make_cross_agent_headers(inbound_payload, ENCODING_KEY, cat_id) + + response = app.fetch( + "get", "/custom-header/%s/%s" % ("X-NewRelic-App-Data", custom_header_value), headers=dict(cat_headers) + ) assert custom_header_value in raw_headers(response), raw_headers(response) diff --git a/tests/framework_starlette/test_application.py b/tests/framework_starlette/test_application.py index 9c5944bd0..7d36d66cc 100644 --- a/tests/framework_starlette/test_application.py +++ b/tests/framework_starlette/test_application.py @@ -16,14 +16,12 @@ import pytest import starlette -from testing_support.fixtures import ( - override_ignore_status_codes, - validate_transaction_errors, - validate_transaction_metrics, -) +from testing_support.fixtures import override_ignore_status_codes from newrelic.common.object_names import callable_name from testing_support.validators.validate_code_level_metrics import validate_code_level_metrics +from testing_support.validators.validate_transaction_errors import validate_transaction_errors +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics starlette_version = tuple(int(x) for x in starlette.__version__.split(".")) diff --git a/tests/framework_starlette/test_bg_tasks.py b/tests/framework_starlette/test_bg_tasks.py index 308f67d10..1b909323a 100644 --- a/tests/framework_starlette/test_bg_tasks.py +++ b/tests/framework_starlette/test_bg_tasks.py @@ -16,10 +16,12 @@ import pytest from starlette import __version__ -from testing_support.fixtures import validate_transaction_metrics from testing_support.validators.validate_transaction_count import ( validate_transaction_count, ) +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) starlette_version = tuple(int(x) for x in __version__.split(".")) diff --git a/tests/framework_starlette/test_graphql.py b/tests/framework_starlette/test_graphql.py index 241371eb1..24ec3ab38 100644 --- a/tests/framework_starlette/test_graphql.py +++ b/tests/framework_starlette/test_graphql.py @@ -15,7 +15,8 @@ import json import pytest -from testing_support.fixtures import dt_enabled, validate_transaction_metrics +from testing_support.fixtures import dt_enabled +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.validators.validate_span_events import validate_span_events diff --git a/tests/framework_strawberry/test_application.py b/tests/framework_strawberry/test_application.py index ff6d0e2ed..d57de74f4 100644 --- a/tests/framework_strawberry/test_application.py +++ b/tests/framework_strawberry/test_application.py @@ -13,15 +13,11 @@ # limitations under the License. import pytest -from testing_support.fixtures import ( - dt_enabled, - validate_transaction_errors, - validate_transaction_metrics, -) +from testing_support.fixtures import dt_enabled from testing_support.validators.validate_span_events import validate_span_events -from testing_support.validators.validate_transaction_count import ( - validate_transaction_count, -) +from testing_support.validators.validate_transaction_errors import validate_transaction_errors +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics +from testing_support.validators.validate_transaction_count import validate_transaction_count from newrelic.api.background_task import background_task from newrelic.common.object_names import callable_name diff --git a/tests/framework_strawberry/test_application_async.py b/tests/framework_strawberry/test_application_async.py index 8174eb36e..1354c4c01 100644 --- a/tests/framework_strawberry/test_application_async.py +++ b/tests/framework_strawberry/test_application_async.py @@ -15,7 +15,8 @@ import asyncio import pytest -from testing_support.fixtures import dt_enabled, validate_transaction_metrics +from testing_support.fixtures import dt_enabled +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.validators.validate_span_events import validate_span_events from newrelic.api.background_task import background_task diff --git a/tests/framework_strawberry/test_asgi.py b/tests/framework_strawberry/test_asgi.py index 0db1e8a58..8acbaedfb 100644 --- a/tests/framework_strawberry/test_asgi.py +++ b/tests/framework_strawberry/test_asgi.py @@ -16,7 +16,8 @@ import pytest from testing_support.asgi_testing import AsgiTest -from testing_support.fixtures import dt_enabled, validate_transaction_metrics +from testing_support.fixtures import dt_enabled +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.validators.validate_span_events import validate_span_events diff --git a/tests/framework_tornado/test_custom_handler.py b/tests/framework_tornado/test_custom_handler.py index 4cabc5e1f..a8cb77d76 100644 --- a/tests/framework_tornado/test_custom_handler.py +++ b/tests/framework_tornado/test_custom_handler.py @@ -13,7 +13,7 @@ # limitations under the License. import pytest -from testing_support.fixtures import validate_transaction_metrics +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.validators.validate_code_level_metrics import validate_code_level_metrics pytestmark = pytest.mark.custom_app diff --git a/tests/framework_tornado/test_externals.py b/tests/framework_tornado/test_externals.py index 7d973ac5f..0c44e4336 100644 --- a/tests/framework_tornado/test_externals.py +++ b/tests/framework_tornado/test_externals.py @@ -17,10 +17,8 @@ import sys import pytest -from testing_support.fixtures import ( - override_application_settings, - validate_transaction_metrics, -) +from testing_support.fixtures import override_application_settings +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.mock_external_http_server import ( MockExternalHTTPHResponseHeadersServer, MockExternalHTTPServer, diff --git a/tests/framework_tornado/test_inbound_cat.py b/tests/framework_tornado/test_inbound_cat.py index 87ebe03d0..44fbf2933 100644 --- a/tests/framework_tornado/test_inbound_cat.py +++ b/tests/framework_tornado/test_inbound_cat.py @@ -15,8 +15,9 @@ import json import pytest from testing_support.fixtures import (make_cross_agent_headers, - override_application_settings, validate_transaction_event_attributes, - validate_transaction_metrics) + override_application_settings) +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics +from testing_support.validators.validate_transaction_event_attributes import validate_transaction_event_attributes ENCODING_KEY = '1234567890123456789012345678901234567890' diff --git a/tests/framework_tornado/test_server.py b/tests/framework_tornado/test_server.py index 6963cee56..1d985b938 100644 --- a/tests/framework_tornado/test_server.py +++ b/tests/framework_tornado/test_server.py @@ -18,9 +18,6 @@ override_application_settings, override_generic_settings, override_ignore_status_codes, - validate_transaction_errors, - validate_transaction_event_attributes, - validate_transaction_metrics, ) from testing_support.validators.validate_code_level_metrics import ( validate_code_level_metrics, @@ -28,6 +25,15 @@ from testing_support.validators.validate_transaction_count import ( validate_transaction_count, ) +from testing_support.validators.validate_transaction_errors import ( + validate_transaction_errors, +) +from testing_support.validators.validate_transaction_event_attributes import ( + validate_transaction_event_attributes, +) +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) from newrelic.core.config import global_settings @@ -206,6 +212,7 @@ def test_nr_disabled(app): ), ) def test_web_socket(uri, name, app): + # import asyncio from tornado.websocket import websocket_connect namespace, func_name = name.split(":") diff --git a/tests/logger_logging/test_metrics.py b/tests/logger_logging/test_metrics.py index eb9419daf..f5a1c5e8d 100644 --- a/tests/logger_logging/test_metrics.py +++ b/tests/logger_logging/test_metrics.py @@ -16,10 +16,7 @@ from newrelic.api.background_task import background_task from testing_support.fixtures import reset_core_stats_engine from testing_support.validators.validate_custom_metrics_outside_transaction import validate_custom_metrics_outside_transaction -from testing_support.fixtures import ( - validate_transaction_metrics, -) - +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics def exercise_logging(logger): logger.debug("A") diff --git a/tests/logger_logging/test_settings.py b/tests/logger_logging/test_settings.py index 2406d87c0..0581e6218 100644 --- a/tests/logger_logging/test_settings.py +++ b/tests/logger_logging/test_settings.py @@ -18,11 +18,8 @@ from newrelic.api.background_task import background_task from testing_support.fixtures import reset_core_stats_engine from testing_support.validators.validate_log_event_count import validate_log_event_count -from testing_support.fixtures import ( - override_application_settings, - validate_transaction_metrics, -) - +from testing_support.fixtures import override_application_settings +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics def basic_logging(logger): logger.warning("C") diff --git a/tests/logger_loguru/test_metrics.py b/tests/logger_loguru/test_metrics.py index c8a9299c8..9c02d405e 100644 --- a/tests/logger_loguru/test_metrics.py +++ b/tests/logger_loguru/test_metrics.py @@ -15,9 +15,7 @@ from newrelic.api.background_task import background_task from testing_support.fixtures import reset_core_stats_engine from testing_support.validators.validate_custom_metrics_outside_transaction import validate_custom_metrics_outside_transaction -from testing_support.fixtures import ( - validate_transaction_metrics, -) +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics def exercise_logging(logger): diff --git a/tests/logger_loguru/test_settings.py b/tests/logger_loguru/test_settings.py index 4e5dadf5d..43d675d56 100644 --- a/tests/logger_loguru/test_settings.py +++ b/tests/logger_loguru/test_settings.py @@ -19,11 +19,8 @@ from newrelic.api.background_task import background_task from testing_support.fixtures import reset_core_stats_engine from testing_support.validators.validate_log_event_count import validate_log_event_count -from testing_support.fixtures import ( - override_application_settings, - validate_transaction_metrics, -) - +from testing_support.fixtures import override_application_settings +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics def get_metadata_string(log_message, is_txn): host = platform.uname().node diff --git a/tests/messagebroker_confluentkafka/test_consumer.py b/tests/messagebroker_confluentkafka/test_consumer.py index 4313e668f..5478b7c80 100644 --- a/tests/messagebroker_confluentkafka/test_consumer.py +++ b/tests/messagebroker_confluentkafka/test_consumer.py @@ -18,8 +18,6 @@ reset_core_stats_engine, validate_attributes, validate_error_event_attributes_outside_transaction, - validate_transaction_errors, - validate_transaction_metrics, ) from testing_support.validators.validate_distributed_trace_accepted import ( validate_distributed_trace_accepted, @@ -27,6 +25,12 @@ from testing_support.validators.validate_transaction_count import ( validate_transaction_count, ) +from testing_support.validators.validate_transaction_errors import ( + validate_transaction_errors, +) +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) from newrelic.api.background_task import background_task from newrelic.api.transaction import end_of_transaction diff --git a/tests/messagebroker_confluentkafka/test_producer.py b/tests/messagebroker_confluentkafka/test_producer.py index 139239e73..b5dcff020 100644 --- a/tests/messagebroker_confluentkafka/test_producer.py +++ b/tests/messagebroker_confluentkafka/test_producer.py @@ -16,13 +16,15 @@ import pytest from conftest import cache_kafka_producer_headers -from testing_support.fixtures import ( - validate_transaction_errors, - validate_transaction_metrics, -) from testing_support.validators.validate_messagebroker_headers import ( validate_messagebroker_headers, ) +from testing_support.validators.validate_transaction_errors import ( + validate_transaction_errors, +) +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) from newrelic.api.background_task import background_task from newrelic.common.object_names import callable_name diff --git a/tests/messagebroker_confluentkafka/test_serialization.py b/tests/messagebroker_confluentkafka/test_serialization.py index 4d948713d..0b8b41d52 100644 --- a/tests/messagebroker_confluentkafka/test_serialization.py +++ b/tests/messagebroker_confluentkafka/test_serialization.py @@ -13,8 +13,10 @@ # limitations under the License. import pytest -from testing_support.fixtures import ( +from testing_support.validators.validate_transaction_errors import ( validate_transaction_errors, +) +from testing_support.validators.validate_transaction_metrics import ( validate_transaction_metrics, ) diff --git a/tests/messagebroker_kafkapython/test_consumer.py b/tests/messagebroker_kafkapython/test_consumer.py index 84cf29a04..47e42d6c9 100644 --- a/tests/messagebroker_kafkapython/test_consumer.py +++ b/tests/messagebroker_kafkapython/test_consumer.py @@ -18,8 +18,6 @@ reset_core_stats_engine, validate_attributes, validate_error_event_attributes_outside_transaction, - validate_transaction_errors, - validate_transaction_metrics, ) from testing_support.validators.validate_distributed_trace_accepted import ( validate_distributed_trace_accepted, @@ -27,6 +25,12 @@ from testing_support.validators.validate_transaction_count import ( validate_transaction_count, ) +from testing_support.validators.validate_transaction_errors import ( + validate_transaction_errors, +) +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) from newrelic.api.background_task import background_task from newrelic.api.transaction import end_of_transaction diff --git a/tests/messagebroker_kafkapython/test_producer.py b/tests/messagebroker_kafkapython/test_producer.py index 280d4fd15..53a31dce5 100644 --- a/tests/messagebroker_kafkapython/test_producer.py +++ b/tests/messagebroker_kafkapython/test_producer.py @@ -14,13 +14,15 @@ import pytest from conftest import cache_kafka_producer_headers -from testing_support.fixtures import ( - validate_transaction_errors, - validate_transaction_metrics, -) from testing_support.validators.validate_messagebroker_headers import ( validate_messagebroker_headers, ) +from testing_support.validators.validate_transaction_errors import ( + validate_transaction_errors, +) +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) from newrelic.api.background_task import background_task from newrelic.common.object_names import callable_name diff --git a/tests/messagebroker_kafkapython/test_serialization.py b/tests/messagebroker_kafkapython/test_serialization.py index b83b4e85c..f58d082ec 100644 --- a/tests/messagebroker_kafkapython/test_serialization.py +++ b/tests/messagebroker_kafkapython/test_serialization.py @@ -18,7 +18,11 @@ from testing_support.fixtures import ( reset_core_stats_engine, validate_error_event_attributes_outside_transaction, +) +from testing_support.validators.validate_transaction_errors import ( validate_transaction_errors, +) +from testing_support.validators.validate_transaction_metrics import ( validate_transaction_metrics, ) diff --git a/tests/messagebroker_pika/test_cat.py b/tests/messagebroker_pika/test_cat.py index 2b1aac4a7..e6ca848cc 100644 --- a/tests/messagebroker_pika/test_cat.py +++ b/tests/messagebroker_pika/test_cat.py @@ -23,9 +23,8 @@ from testing_support.fixtures import ( cat_enabled, override_application_settings, - validate_transaction_metrics, ) - +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from newrelic.api.background_task import background_task from newrelic.api.transaction import current_transaction diff --git a/tests/messagebroker_pika/test_distributed_tracing.py b/tests/messagebroker_pika/test_distributed_tracing.py index f911aaad8..548d41a0d 100644 --- a/tests/messagebroker_pika/test_distributed_tracing.py +++ b/tests/messagebroker_pika/test_distributed_tracing.py @@ -23,8 +23,8 @@ from newrelic.common.encoding_utils import DistributedTracePayload from testing_support.db_settings import rabbitmq_settings -from testing_support.fixtures import (override_application_settings, - validate_transaction_metrics) +from testing_support.fixtures import override_application_settings +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics DB_SETTINGS = rabbitmq_settings()[0] diff --git a/tests/messagebroker_pika/test_pika_async_connection_consume.py b/tests/messagebroker_pika/test_pika_async_connection_consume.py index 0ed76503f..7edf6b644 100644 --- a/tests/messagebroker_pika/test_pika_async_connection_consume.py +++ b/tests/messagebroker_pika/test_pika_async_connection_consume.py @@ -26,11 +26,11 @@ from conftest import (QUEUE, QUEUE_2, EXCHANGE, EXCHANGE_2, CORRELATION_ID, REPLY_TO, HEADERS, BODY) from testing_support.fixtures import (capture_transaction_metrics, - validate_transaction_metrics, validate_tt_collector_json, + validate_tt_collector_json, function_not_called, override_application_settings) from testing_support.validators.validate_code_level_metrics import validate_code_level_metrics from testing_support.db_settings import rabbitmq_settings - +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics DB_SETTINGS = rabbitmq_settings()[0] diff --git a/tests/messagebroker_pika/test_pika_blocking_connection_consume.py b/tests/messagebroker_pika/test_pika_blocking_connection_consume.py index 417055bfc..c96d42d98 100644 --- a/tests/messagebroker_pika/test_pika_blocking_connection_consume.py +++ b/tests/messagebroker_pika/test_pika_blocking_connection_consume.py @@ -24,9 +24,10 @@ from conftest import QUEUE, EXCHANGE, CORRELATION_ID, REPLY_TO, HEADERS, BODY from testing_support.fixtures import (capture_transaction_metrics, - validate_transaction_metrics, validate_tt_collector_json) + validate_tt_collector_json) from testing_support.validators.validate_code_level_metrics import validate_code_level_metrics from testing_support.db_settings import rabbitmq_settings +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics DB_SETTINGS = rabbitmq_settings()[0] diff --git a/tests/messagebroker_pika/test_pika_blocking_connection_consume_generator.py b/tests/messagebroker_pika/test_pika_blocking_connection_consume_generator.py index a9ee1b331..4fff11487 100644 --- a/tests/messagebroker_pika/test_pika_blocking_connection_consume_generator.py +++ b/tests/messagebroker_pika/test_pika_blocking_connection_consume_generator.py @@ -17,9 +17,9 @@ from newrelic.api.background_task import background_task from conftest import QUEUE, EXCHANGE, CORRELATION_ID, REPLY_TO, HEADERS, BODY -from testing_support.fixtures import (validate_transaction_metrics, - validate_tt_collector_json) +from testing_support.fixtures import validate_tt_collector_json from testing_support.db_settings import rabbitmq_settings +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics DB_SETTINGS = rabbitmq_settings()[0] diff --git a/tests/messagebroker_pika/test_pika_produce.py b/tests/messagebroker_pika/test_pika_produce.py index 60e6526e4..0960159fa 100644 --- a/tests/messagebroker_pika/test_pika_produce.py +++ b/tests/messagebroker_pika/test_pika_produce.py @@ -17,12 +17,12 @@ from testing_support.db_settings import rabbitmq_settings from testing_support.fixtures import ( override_application_settings, - validate_transaction_metrics, validate_tt_collector_json, ) from testing_support.validators.validate_messagebroker_headers import ( validate_messagebroker_headers, ) +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from newrelic.api.background_task import background_task from newrelic.api.transaction import current_transaction diff --git a/tests/messagebroker_pika/test_pika_supportability.py b/tests/messagebroker_pika/test_pika_supportability.py index fa0e46639..9f0d94e90 100644 --- a/tests/messagebroker_pika/test_pika_supportability.py +++ b/tests/messagebroker_pika/test_pika_supportability.py @@ -19,7 +19,7 @@ from newrelic.api.background_task import background_task from conftest import QUEUE, BODY -from testing_support.fixtures import validate_transaction_metrics +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.db_settings import rabbitmq_settings DB_SETTINGS = rabbitmq_settings()[0] diff --git a/tests/template_mako/test_mako.py b/tests/template_mako/test_mako.py index fd598d786..07d8f4974 100644 --- a/tests/template_mako/test_mako.py +++ b/tests/template_mako/test_mako.py @@ -13,7 +13,7 @@ # limitations under the License. from mako.template import Template -from testing_support.fixtures import validate_transaction_metrics +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from newrelic.api.background_task import background_task diff --git a/tests/testing_support/fixtures.py b/tests/testing_support/fixtures.py index 2df593abc..9e99e89b2 100644 --- a/tests/testing_support/fixtures.py +++ b/tests/testing_support/fixtures.py @@ -65,8 +65,9 @@ ) from newrelic.core.config import apply_config_setting, flatten_settings, global_settings from newrelic.core.database_utils import SQLConnections -from newrelic.core.internal_metrics import InternalTraceContext -from newrelic.core.stats_engine import CustomMetrics + +# from newrelic.core.internal_metrics import InternalTraceContext +# from newrelic.core.stats_engine import CustomMetrics from newrelic.network.exceptions import RetryDataForRequest from newrelic.packages import six @@ -385,194 +386,6 @@ def make_synthetics_header(account_id, resource_id, job_id, monitor_id, encoding return {"X-NewRelic-Synthetics": value} -def validate_transaction_metrics( - name, - group="Function", - background_task=False, - scoped_metrics=None, - rollup_metrics=None, - custom_metrics=None, - index=-1, -): - scoped_metrics = scoped_metrics or [] - rollup_metrics = rollup_metrics or [] - custom_metrics = custom_metrics or [] - - if background_task: - unscoped_metrics = [ - "OtherTransaction/all", - "OtherTransaction/%s/%s" % (group, name), - "OtherTransactionTotalTime", - "OtherTransactionTotalTime/%s/%s" % (group, name), - ] - transaction_scope_name = "OtherTransaction/%s/%s" % (group, name) - else: - unscoped_metrics = [ - "WebTransaction", - "WebTransaction/%s/%s" % (group, name), - "WebTransactionTotalTime", - "WebTransactionTotalTime/%s/%s" % (group, name), - "HttpDispatcher", - ] - transaction_scope_name = "WebTransaction/%s/%s" % (group, name) - - @function_wrapper - def _validate_wrapper(wrapped, instance, args, kwargs): - - record_transaction_called = [] - recorded_metrics = [] - - @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.record_transaction") - @catch_background_exceptions - def _validate_transaction_metrics(wrapped, instance, args, kwargs): - record_transaction_called.append(True) - try: - result = wrapped(*args, **kwargs) - except: - raise - else: - metrics = instance.stats_table - # Record a copy of the metric value so that the values aren't - # merged in the future - _metrics = {} - for k, v in metrics.items(): - _metrics[k] = copy.copy(v) - recorded_metrics.append(_metrics) - - return result - - def _validate(metrics, name, scope, count): - key = (name, scope) - metric = metrics.get(key) - - def _metrics_table(): - out = [""] - out.append("Expected: {0}: {1}".format(key, count)) - for metric_key, metric_value in metrics.items(): - out.append("{0}: {1}".format(metric_key, metric_value[0])) - return "\n".join(out) - - def _metric_details(): - return "metric=%r, count=%r" % (key, metric.call_count) - - if count is not None: - assert metric is not None, _metrics_table() - if count == "present": - assert metric.call_count > 0, _metric_details() - else: - assert metric.call_count == count, _metric_details() - - assert metric.total_call_time >= 0, (key, metric) - assert metric.total_exclusive_call_time >= 0, (key, metric) - assert metric.min_call_time >= 0, (key, metric) - assert metric.sum_of_squares >= 0, (key, metric) - - else: - assert metric is None, _metrics_table() - - _new_wrapper = _validate_transaction_metrics(wrapped) - val = _new_wrapper(*args, **kwargs) - assert record_transaction_called - metrics = recorded_metrics[index] - - record_transaction_called[:] = [] - recorded_metrics[:] = [] - - for unscoped_metric in unscoped_metrics: - _validate(metrics, unscoped_metric, "", 1) - - for scoped_name, scoped_count in scoped_metrics: - _validate(metrics, scoped_name, transaction_scope_name, scoped_count) - - for rollup_name, rollup_count in rollup_metrics: - _validate(metrics, rollup_name, "", rollup_count) - - for custom_name, custom_count in custom_metrics: - _validate(metrics, custom_name, "", custom_count) - - custom_metric_names = {name for name, _ in custom_metrics} - for name, _ in metrics: - if name not in custom_metric_names: - assert not name.startswith("Supportability/api/"), name - - return val - - return _validate_wrapper - - -def validate_time_metrics_outside_transaction(time_metrics=None, index=-1): - time_metrics = time_metrics or [] - - @function_wrapper - def _validate_wrapper(wrapped, instance, args, kwargs): - - record_time_metric_called = [] - recorded_metrics = [] - - @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.record_time_metric") - @catch_background_exceptions - def _validate_transaction_metrics(wrapped, instance, args, kwargs): - record_time_metric_called.append(True) - try: - result = wrapped(*args, **kwargs) - except: - raise - else: - metrics = instance.stats_table - # Record a copy of the metric value so that the values aren't - # merged in the future - _metrics = {} - for k, v in metrics.items(): - _metrics[k] = copy.copy(v) - recorded_metrics.append(_metrics) - - return result - - def _validate(metrics, name, count): - key = (name, "") - metric = metrics.get(key) - - def _metrics_table(): - out = [""] - out.append("Expected: {0}: {1}".format(key, count)) - for metric_key, metric_value in metrics.items(): - out.append("{0}: {1}".format(metric_key, metric_value[0])) - return "\n".join(out) - - def _metric_details(): - return "metric=%r, count=%r" % (key, metric.call_count) - - if count is not None: - assert metric is not None, _metrics_table() - if count == "present": - assert metric.call_count > 0, _metric_details() - else: - assert metric.call_count == count, _metric_details() - - assert metric.total_call_time >= 0, (key, metric) - assert metric.total_exclusive_call_time >= 0, (key, metric) - assert metric.min_call_time >= 0, (key, metric) - assert metric.sum_of_squares >= 0, (key, metric) - - else: - assert metric is None, _metrics_table() - - _new_wrapper = _validate_transaction_metrics(wrapped) - val = _new_wrapper(*args, **kwargs) - assert record_time_metric_called - metrics = recorded_metrics[index] - - record_time_metric_called[:] = [] - recorded_metrics[:] = [] - - for time_metric, count in time_metrics: - _validate(metrics, time_metric, count) - - return val - - return _validate_wrapper - - def capture_transaction_metrics(metrics_list, full_metrics=None): @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.record_transaction") @catch_background_exceptions @@ -594,268 +407,6 @@ def _capture_transaction_metrics(wrapped, instance, args, kwargs): return _capture_transaction_metrics -def validate_internal_metrics(metrics=None): - metrics = metrics or [] - - def no_op(wrapped, instance, args, kwargs): - pass - - @function_wrapper - def _validate_wrapper(wrapped, instance, args, kwargs): - # Apply no-op wrappers to prevent new internal trace contexts from being started, preventing capture - wrapped = transient_function_wrapper("newrelic.core.internal_metrics", "InternalTraceContext.__enter__")(no_op)( - wrapped - ) - wrapped = transient_function_wrapper("newrelic.core.internal_metrics", "InternalTraceContext.__exit__")(no_op)( - wrapped - ) - - captured_metrics = CustomMetrics() - with InternalTraceContext(captured_metrics): - result = wrapped(*args, **kwargs) - captured_metrics = dict(captured_metrics.metrics()) - - def _validate(name, count): - metric = captured_metrics.get(name) - - def _metrics_table(): - return "metric=%r, metrics=%r" % (name, captured_metrics) - - def _metric_details(): - return "metric=%r, count=%r" % (name, metric.call_count) - - if count is not None and count > 0: - assert metric is not None, _metrics_table() - if count == "present": - assert metric.call_count > 0, _metric_details() - else: - assert metric.call_count == count, _metric_details() - - else: - assert metric is None, _metrics_table() - - for metric, count in metrics: - _validate(metric, count) - - return result - - return _validate_wrapper - - -def validate_transaction_errors(errors=None, required_params=None, forgone_params=None): - errors = errors or [] - required_params = required_params or [] - forgone_params = forgone_params or [] - captured_errors = [] - - @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.record_transaction") - @catch_background_exceptions - def _capture_transaction_errors(wrapped, instance, args, kwargs): - def _bind_params(transaction, *args, **kwargs): - return transaction - - transaction = _bind_params(*args, **kwargs) - captured = transaction.errors - - captured_errors.append(captured) - - return wrapped(*args, **kwargs) - - @function_wrapper - def _validate_transaction_errors(wrapped, instance, args, kwargs): - _new_wrapped = _capture_transaction_errors(wrapped) - output = _new_wrapped(*args, **kwargs) - - expected = sorted(errors) - - if captured_errors: - captured = captured_errors[0] - else: - captured = [] - - if errors and isinstance(errors[0], (tuple, list)): - compare_to = sorted([(e.type, e.message) for e in captured]) - else: - compare_to = sorted([e.type for e in captured]) - - assert expected == compare_to, "expected=%r, captured=%r, errors=%r" % (expected, compare_to, captured) - - for e in captured: - assert e.span_id - for name, value in required_params: - assert name in e.custom_params, "name=%r, params=%r" % (name, e.custom_params) - assert e.custom_params[name] == value, "name=%r, value=%r, params=%r" % ( - name, - value, - e.custom_params, - ) - - for name, value in forgone_params: - assert name not in e.custom_params, "name=%r, params=%r" % (name, e.custom_params) - - return output - - return _validate_transaction_errors - - -def validate_application_errors(errors=None, required_params=None, forgone_params=None): - errors = errors or [] - required_params = required_params or [] - forgone_params = forgone_params or [] - - @function_wrapper - def _validate_application_errors(wrapped, instace, args, kwargs): - - try: - result = wrapped(*args, **kwargs) - except: - raise - else: - - stats = core_application_stats_engine() - - app_errors = stats.error_data() - - expected = sorted(errors) - captured = sorted([(e.type, e.message) for e in stats.error_data()]) - - assert expected == captured, "expected=%r, captured=%r, errors=%r" % (expected, captured, app_errors) - - for e in app_errors: - for name, value in required_params: - assert name in e.parameters["userAttributes"], "name=%r, params=%r" % (name, e.parameters) - assert e.parameters["userAttributes"][name] == value, "name=%r, value=%r, params=%r" % ( - name, - value, - e.parameters, - ) - - for name, value in forgone_params: - assert name not in e.parameters["userAttributes"], "name=%r, params=%r" % (name, e.parameters) - - return result - - return _validate_application_errors - - -def validate_custom_parameters(required_params=None, forgone_params=None): - required_params = required_params or [] - forgone_params = forgone_params or [] - - @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.record_transaction") - @catch_background_exceptions - def _validate_custom_parameters(wrapped, instance, args, kwargs): - def _bind_params(transaction, *args, **kwargs): - return transaction - - transaction = _bind_params(*args, **kwargs) - - # these are pre-destination applied attributes, so they may not - # actually end up in a transaction/error trace, we are merely testing - # for presence on the TransactionNode - - attrs = {} - for attr in transaction.user_attributes: - attrs[attr.name] = attr.value - - for name, value in required_params: - assert name in attrs, "name=%r, params=%r" % (name, attrs) - assert attrs[name] == value, "name=%r, value=%r, params=%r" % (name, value, attrs) - - for name, value in forgone_params: - assert name not in attrs, "name=%r, params=%r" % (name, attrs) - - return wrapped(*args, **kwargs) - - return _validate_custom_parameters - - -def validate_synthetics_event(required_attrs=None, forgone_attrs=None, should_exist=True): - required_attrs = required_attrs or [] - forgone_attrs = forgone_attrs or [] - failed = [] - - @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.record_transaction") - def _validate_synthetics_event(wrapped, instance, args, kwargs): - result = wrapped(*args, **kwargs) - - try: - if not should_exist: - assert instance.synthetics_events == [] - else: - assert len(instance.synthetics_events) == 1 - event = instance.synthetics_events[0] - assert event is not None - assert len(event) == 3 - - def _flatten(event): - result = {} - for elem in event: - for k, v in elem.items(): - result[k] = v - return result - - flat_event = _flatten(event) - - assert "nr.guid" in flat_event, "name=%r, event=%r" % ("nr.guid", flat_event) - - for name, value in required_attrs: - assert name in flat_event, "name=%r, event=%r" % (name, flat_event) - assert flat_event[name] == value, "name=%r, value=%r, event=%r" % (name, value, flat_event) - - for name, value in forgone_attrs: - assert name not in flat_event, "name=%r, value=%r, event=%r" % (name, value, flat_event) - except Exception as e: - failed.append(e) - - return result - - @function_wrapper - def wrapper(wrapped, instance, args, kwargs): - _new_wrapper = _validate_synthetics_event(wrapped) - result = _new_wrapper(*args, **kwargs) - if failed: - e = failed.pop() - raise e - return result - - return wrapper - - -def validate_transaction_event_attributes(required_params=None, forgone_params=None, exact_attrs=None, index=-1): - required_params = required_params or {} - forgone_params = forgone_params or {} - exact_attrs = exact_attrs or {} - - captured_events = [] - - @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.record_transaction") - def _capture_transaction_events(wrapped, instance, args, kwargs): - try: - result = wrapped(*args, **kwargs) - except: - raise - else: - event_data = instance.transaction_events - captured_events.append(event_data) - return result - - @function_wrapper - def _validate_transaction_event_attributes(wrapped, instance, args, kwargs): - _new_wrapper = _capture_transaction_events(wrapped) - result = _new_wrapper(*args, **kwargs) - - assert captured_events, "No events captured" - event_data = captured_events[index] - captured_events[:] = [] - - check_event_attributes(event_data, required_params, forgone_params, exact_attrs) - - return result - - return _validate_transaction_event_attributes - - def check_event_attributes(event_data, required_params=None, forgone_params=None, exact_attrs=None): """Check the event attributes from a single (first) event in a SampledDataSet. If necessary, clear out previous errors from StatsEngine @@ -893,58 +444,6 @@ def check_event_attributes(event_data, required_params=None, forgone_params=None assert intrinsics[param] == value, ((param, value), intrinsics) -def validate_non_transaction_error_event(required_intrinsics=None, num_errors=1, required_user=None, forgone_user=None): - """Validate error event data for a single error occurring outside of a - transaction. - """ - required_intrinsics = required_intrinsics or {} - required_user = required_user or {} - forgone_user = forgone_user or [] - - @function_wrapper - def _validate_non_transaction_error_event(wrapped, instace, args, kwargs): - - try: - result = wrapped(*args, **kwargs) - except: - raise - else: - - stats = core_application_stats_engine(None) - - assert stats.error_events.num_seen == num_errors - for event in stats.error_events: - - assert len(event) == 3 # [intrinsic, user, agent attributes] - - intrinsics = event[0] - - # The following attributes are all required, and also the only - # intrinsic attributes that can be included in an error event - # recorded outside of a transaction - - assert intrinsics["type"] == "TransactionError" - assert intrinsics["transactionName"] is None - assert intrinsics["error.class"] == required_intrinsics["error.class"] - assert intrinsics["error.message"].startswith(required_intrinsics["error.message"]) - assert intrinsics["error.expected"] == required_intrinsics["error.expected"] - now = time.time() - assert isinstance(intrinsics["timestamp"], int) - assert intrinsics["timestamp"] <= 1000.0 * now - - user_params = event[1] - for name, value in required_user.items(): - assert name in user_params, "name=%r, params=%r" % (name, user_params) - assert user_params[name] == value, "name=%r, value=%r, params=%r" % (name, value, user_params) - - for param in forgone_user: - assert param not in user_params - - return result - - return _validate_non_transaction_error_event - - def validate_application_error_trace_count(num_errors): """Validate error event data for a single error occurring outside of a transaction. diff --git a/tests/testing_support/validators/validate_application_errors.py b/tests/testing_support/validators/validate_application_errors.py new file mode 100644 index 000000000..7f653d691 --- /dev/null +++ b/tests/testing_support/validators/validate_application_errors.py @@ -0,0 +1,56 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from newrelic.common.object_wrapper import function_wrapper +from testing_support.fixtures import core_application_stats_engine + +def validate_application_errors(errors=None, required_params=None, forgone_params=None): + errors = errors or [] + required_params = required_params or [] + forgone_params = forgone_params or [] + + @function_wrapper + def _validate_application_errors(wrapped, instace, args, kwargs): + + try: + result = wrapped(*args, **kwargs) + except: + raise + else: + + stats = core_application_stats_engine() + + app_errors = stats.error_data() + + expected = sorted(errors) + captured = sorted([(e.type, e.message) for e in stats.error_data()]) + + assert expected == captured, "expected=%r, captured=%r, errors=%r" % (expected, captured, app_errors) + + for e in app_errors: + for name, value in required_params: + assert name in e.parameters["userAttributes"], "name=%r, params=%r" % (name, e.parameters) + assert e.parameters["userAttributes"][name] == value, "name=%r, value=%r, params=%r" % ( + name, + value, + e.parameters, + ) + + for name, value in forgone_params: + assert name not in e.parameters["userAttributes"], "name=%r, params=%r" % (name, e.parameters) + + return result + + return _validate_application_errors diff --git a/tests/testing_support/validators/validate_custom_parameters.py b/tests/testing_support/validators/validate_custom_parameters.py new file mode 100644 index 000000000..9dc92fb12 --- /dev/null +++ b/tests/testing_support/validators/validate_custom_parameters.py @@ -0,0 +1,48 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from newrelic.common.object_wrapper import transient_function_wrapper +from testing_support.fixtures import catch_background_exceptions + + +def validate_custom_parameters(required_params=None, forgone_params=None): + required_params = required_params or [] + forgone_params = forgone_params or [] + + @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.record_transaction") + @catch_background_exceptions + def _validate_custom_parameters(wrapped, instance, args, kwargs): + def _bind_params(transaction, *args, **kwargs): + return transaction + + transaction = _bind_params(*args, **kwargs) + + # these are pre-destination applied attributes, so they may not + # actually end up in a transaction/error trace, we are merely testing + # for presence on the TransactionNode + + attrs = {} + for attr in transaction.user_attributes: + attrs[attr.name] = attr.value + + for name, value in required_params: + assert name in attrs, "name=%r, params=%r" % (name, attrs) + assert attrs[name] == value, "name=%r, value=%r, params=%r" % (name, value, attrs) + + for name, value in forgone_params: + assert name not in attrs, "name=%r, params=%r" % (name, attrs) + + return wrapped(*args, **kwargs) + + return _validate_custom_parameters \ No newline at end of file diff --git a/tests/testing_support/validators/validate_internal_metrics.py b/tests/testing_support/validators/validate_internal_metrics.py new file mode 100644 index 000000000..c685d9b5f --- /dev/null +++ b/tests/testing_support/validators/validate_internal_metrics.py @@ -0,0 +1,64 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from newrelic.common.object_wrapper import ( + function_wrapper, + transient_function_wrapper, +) +from newrelic.core.stats_engine import CustomMetrics +from newrelic.core.internal_metrics import InternalTraceContext + + +def validate_internal_metrics(metrics=None): + metrics = metrics or [] + + def no_op(wrapped, instance, args, kwargs): + pass + + @function_wrapper + def _validate_wrapper(wrapped, instance, args, kwargs): + # Apply no-op wrappers to prevent new internal trace contexts from being started, preventing capture + wrapped = transient_function_wrapper("newrelic.core.internal_metrics", "InternalTraceContext.__enter__")(no_op)(wrapped) + wrapped = transient_function_wrapper("newrelic.core.internal_metrics", "InternalTraceContext.__exit__")(no_op)(wrapped) + + captured_metrics = CustomMetrics() + with InternalTraceContext(captured_metrics): + result = wrapped(*args, **kwargs) + captured_metrics = dict(captured_metrics.metrics()) + + def _validate(name, count): + metric = captured_metrics.get(name) + + def _metrics_table(): + return "metric=%r, metrics=%r" % (name, captured_metrics) + + def _metric_details(): + return "metric=%r, count=%r" % (name, metric.call_count) + + if count is not None and count > 0: + assert metric is not None, _metrics_table() + if count == "present": + assert metric.call_count > 0, _metric_details() + else: + assert metric.call_count == count, _metric_details() + + else: + assert metric is None, _metrics_table() + + for metric, count in metrics: + _validate(metric, count) + + return result + + return _validate_wrapper \ No newline at end of file diff --git a/tests/testing_support/validators/validate_non_transaction_error_event.py b/tests/testing_support/validators/validate_non_transaction_error_event.py new file mode 100644 index 000000000..fa14ae37d --- /dev/null +++ b/tests/testing_support/validators/validate_non_transaction_error_event.py @@ -0,0 +1,71 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import time + +from testing_support.fixtures import core_application_stats_engine + +from newrelic.common.object_wrapper import function_wrapper + + +def validate_non_transaction_error_event(required_intrinsics=None, num_errors=1, required_user=None, forgone_user=None): + """Validate error event data for a single error occurring outside of a + transaction. + """ + required_intrinsics = required_intrinsics or {} + required_user = required_user or {} + forgone_user = forgone_user or [] + + @function_wrapper + def _validate_non_transaction_error_event(wrapped, instace, args, kwargs): + + try: + result = wrapped(*args, **kwargs) + except: + raise + else: + + stats = core_application_stats_engine(None) + + assert stats.error_events.num_seen == num_errors + for event in stats.error_events: + + assert len(event) == 3 # [intrinsic, user, agent attributes] + + intrinsics = event[0] + + # The following attributes are all required, and also the only + # intrinsic attributes that can be included in an error event + # recorded outside of a transaction + + assert intrinsics["type"] == "TransactionError" + assert intrinsics["transactionName"] is None + assert intrinsics["error.class"] == required_intrinsics["error.class"] + assert intrinsics["error.message"].startswith(required_intrinsics["error.message"]) + assert intrinsics["error.expected"] == required_intrinsics["error.expected"] + now = time.time() + assert isinstance(intrinsics["timestamp"], int) + assert intrinsics["timestamp"] <= 1000.0 * now + + user_params = event[1] + for name, value in required_user.items(): + assert name in user_params, "name=%r, params=%r" % (name, user_params) + assert user_params[name] == value, "name=%r, value=%r, params=%r" % (name, value, user_params) + + for param in forgone_user: + assert param not in user_params + + return result + + return _validate_non_transaction_error_event diff --git a/tests/testing_support/validators/validate_synthetics_event.py b/tests/testing_support/validators/validate_synthetics_event.py new file mode 100644 index 000000000..221cf7e6e --- /dev/null +++ b/tests/testing_support/validators/validate_synthetics_event.py @@ -0,0 +1,71 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from newrelic.common.object_wrapper import ( + function_wrapper, + transient_function_wrapper, +) + +def validate_synthetics_event(required_attrs=None, forgone_attrs=None, should_exist=True): + required_attrs = required_attrs or [] + forgone_attrs = forgone_attrs or [] + failed = [] + + @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.record_transaction") + def _validate_synthetics_event(wrapped, instance, args, kwargs): + result = wrapped(*args, **kwargs) + + try: + if not should_exist: + assert instance.synthetics_events == [] + else: + assert len(instance.synthetics_events) == 1 + event = instance.synthetics_events[0] + assert event is not None + assert len(event) == 3 + + def _flatten(event): + result = {} + for elem in event: + for k, v in elem.items(): + result[k] = v + return result + + flat_event = _flatten(event) + + assert "nr.guid" in flat_event, "name=%r, event=%r" % ("nr.guid", flat_event) + + for name, value in required_attrs: + assert name in flat_event, "name=%r, event=%r" % (name, flat_event) + assert flat_event[name] == value, "name=%r, value=%r, event=%r" % (name, value, flat_event) + + for name, value in forgone_attrs: + assert name not in flat_event, "name=%r, value=%r, event=%r" % (name, value, flat_event) + except Exception as e: + failed.append(e) + + return result + + @function_wrapper + def wrapper(wrapped, instance, args, kwargs): + _new_wrapper = _validate_synthetics_event(wrapped) + result = _new_wrapper(*args, **kwargs) + if failed: + e = failed.pop() + raise e + return result + + return wrapper + diff --git a/tests/testing_support/validators/validate_time_metrics_outside_transaction.py b/tests/testing_support/validators/validate_time_metrics_outside_transaction.py new file mode 100644 index 000000000..1f6173a67 --- /dev/null +++ b/tests/testing_support/validators/validate_time_metrics_outside_transaction.py @@ -0,0 +1,93 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import copy + +from newrelic.common.object_wrapper import ( + function_wrapper, + transient_function_wrapper, +) +from testing_support.fixtures import catch_background_exceptions + +def validate_time_metrics_outside_transaction(time_metrics=None, index=-1): + time_metrics = time_metrics or [] + + @function_wrapper + def _validate_wrapper(wrapped, instance, args, kwargs): + + record_time_metric_called = [] + recorded_metrics = [] + + @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.record_time_metric") + @catch_background_exceptions + def _validate_transaction_metrics(wrapped, instance, args, kwargs): + record_time_metric_called.append(True) + try: + result = wrapped(*args, **kwargs) + except: + raise + else: + metrics = instance.stats_table + # Record a copy of the metric value so that the values aren't + # merged in the future + _metrics = {} + for k, v in metrics.items(): + _metrics[k] = copy.copy(v) + recorded_metrics.append(_metrics) + + return result + + def _validate(metrics, name, count): + key = (name, "") + metric = metrics.get(key) + + def _metrics_table(): + out = [""] + out.append("Expected: {0}: {1}".format(key, count)) + for metric_key, metric_value in metrics.items(): + out.append("{0}: {1}".format(metric_key, metric_value[0])) + return "\n".join(out) + + def _metric_details(): + return "metric=%r, count=%r" % (key, metric.call_count) + + if count is not None: + assert metric is not None, _metrics_table() + if count == "present": + assert metric.call_count > 0, _metric_details() + else: + assert metric.call_count == count, _metric_details() + + assert metric.total_call_time >= 0, (key, metric) + assert metric.total_exclusive_call_time >= 0, (key, metric) + assert metric.min_call_time >= 0, (key, metric) + assert metric.sum_of_squares >= 0, (key, metric) + + else: + assert metric is None, _metrics_table() + + _new_wrapper = _validate_transaction_metrics(wrapped) + val = _new_wrapper(*args, **kwargs) + assert record_time_metric_called + metrics = recorded_metrics[index] + + record_time_metric_called[:] = [] + recorded_metrics[:] = [] + + for time_metric, count in time_metrics: + _validate(metrics, time_metric, count) + + return val + + return _validate_wrapper diff --git a/tests/testing_support/validators/validate_transaction_errors.py b/tests/testing_support/validators/validate_transaction_errors.py new file mode 100644 index 000000000..b00b7facd --- /dev/null +++ b/tests/testing_support/validators/validate_transaction_errors.py @@ -0,0 +1,76 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import copy + +from newrelic.common.object_wrapper import ( + function_wrapper, + transient_function_wrapper, +) +from testing_support.fixtures import catch_background_exceptions + +def validate_transaction_errors(errors=None, required_params=None, forgone_params=None): + errors = errors or [] + required_params = required_params or [] + forgone_params = forgone_params or [] + captured_errors = [] + + @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.record_transaction") + @catch_background_exceptions + def _capture_transaction_errors(wrapped, instance, args, kwargs): + def _bind_params(transaction, *args, **kwargs): + return transaction + + transaction = _bind_params(*args, **kwargs) + captured = transaction.errors + + captured_errors.append(captured) + + return wrapped(*args, **kwargs) + + @function_wrapper + def _validate_transaction_errors(wrapped, instance, args, kwargs): + _new_wrapped = _capture_transaction_errors(wrapped) + output = _new_wrapped(*args, **kwargs) + + expected = sorted(errors) + + if captured_errors: + captured = captured_errors[0] + else: + captured = [] + + if errors and isinstance(errors[0], (tuple, list)): + compare_to = sorted([(e.type, e.message) for e in captured]) + else: + compare_to = sorted([e.type for e in captured]) + + assert expected == compare_to, "expected=%r, captured=%r, errors=%r" % (expected, compare_to, captured) + + for e in captured: + assert e.span_id + for name, value in required_params: + assert name in e.custom_params, "name=%r, params=%r" % (name, e.custom_params) + assert e.custom_params[name] == value, "name=%r, value=%r, params=%r" % ( + name, + value, + e.custom_params, + ) + + for name, value in forgone_params: + assert name not in e.custom_params, "name=%r, params=%r" % (name, e.custom_params) + + return output + + return _validate_transaction_errors diff --git a/tests/testing_support/validators/validate_transaction_event_attributes.py b/tests/testing_support/validators/validate_transaction_event_attributes.py new file mode 100644 index 000000000..7ef8c0629 --- /dev/null +++ b/tests/testing_support/validators/validate_transaction_event_attributes.py @@ -0,0 +1,53 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from newrelic.common.object_wrapper import ( + function_wrapper, + transient_function_wrapper, +) +from testing_support.fixtures import check_event_attributes + +def validate_transaction_event_attributes(required_params=None, forgone_params=None, exact_attrs=None, index=-1): + required_params = required_params or {} + forgone_params = forgone_params or {} + exact_attrs = exact_attrs or {} + + captured_events = [] + + @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.record_transaction") + def _capture_transaction_events(wrapped, instance, args, kwargs): + try: + result = wrapped(*args, **kwargs) + except: + raise + else: + event_data = instance.transaction_events + captured_events.append(event_data) + return result + + @function_wrapper + def _validate_transaction_event_attributes(wrapped, instance, args, kwargs): + _new_wrapper = _capture_transaction_events(wrapped) + result = _new_wrapper(*args, **kwargs) + + assert captured_events, "No events captured" + event_data = captured_events[index] + captured_events[:] = [] + + check_event_attributes(event_data, required_params, forgone_params, exact_attrs) + + return result + + return _validate_transaction_event_attributes \ No newline at end of file diff --git a/tests/testing_support/validators/validate_transaction_metrics.py b/tests/testing_support/validators/validate_transaction_metrics.py new file mode 100644 index 000000000..7122b009a --- /dev/null +++ b/tests/testing_support/validators/validate_transaction_metrics.py @@ -0,0 +1,135 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import copy + +from newrelic.common.object_wrapper import ( + function_wrapper, + transient_function_wrapper, +) +from testing_support.fixtures import catch_background_exceptions + +def validate_transaction_metrics( + name, + group="Function", + background_task=False, + scoped_metrics=None, + rollup_metrics=None, + custom_metrics=None, + index=-1, +): + scoped_metrics = scoped_metrics or [] + rollup_metrics = rollup_metrics or [] + custom_metrics = custom_metrics or [] + + if background_task: + unscoped_metrics = [ + "OtherTransaction/all", + "OtherTransaction/%s/%s" % (group, name), + "OtherTransactionTotalTime", + "OtherTransactionTotalTime/%s/%s" % (group, name), + ] + transaction_scope_name = "OtherTransaction/%s/%s" % (group, name) + else: + unscoped_metrics = [ + "WebTransaction", + "WebTransaction/%s/%s" % (group, name), + "WebTransactionTotalTime", + "WebTransactionTotalTime/%s/%s" % (group, name), + "HttpDispatcher", + ] + transaction_scope_name = "WebTransaction/%s/%s" % (group, name) + + @function_wrapper + def _validate_wrapper(wrapped, instance, args, kwargs): + + record_transaction_called = [] + recorded_metrics = [] + + @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.record_transaction") + @catch_background_exceptions + def _validate_transaction_metrics(wrapped, instance, args, kwargs): + record_transaction_called.append(True) + try: + result = wrapped(*args, **kwargs) + except: + raise + else: + metrics = instance.stats_table + # Record a copy of the metric value so that the values aren't + # merged in the future + _metrics = {} + for k, v in metrics.items(): + _metrics[k] = copy.copy(v) + recorded_metrics.append(_metrics) + + return result + + def _validate(metrics, name, scope, count): + key = (name, scope) + metric = metrics.get(key) + + def _metrics_table(): + out = [""] + out.append("Expected: {0}: {1}".format(key, count)) + for metric_key, metric_value in metrics.items(): + out.append("{0}: {1}".format(metric_key, metric_value[0])) + return "\n".join(out) + + def _metric_details(): + return "metric=%r, count=%r" % (key, metric.call_count) + + if count is not None: + assert metric is not None, _metrics_table() + if count == "present": + assert metric.call_count > 0, _metric_details() + else: + assert metric.call_count == count, _metric_details() + + assert metric.total_call_time >= 0, (key, metric) + assert metric.total_exclusive_call_time >= 0, (key, metric) + assert metric.min_call_time >= 0, (key, metric) + assert metric.sum_of_squares >= 0, (key, metric) + + else: + assert metric is None, _metrics_table() + + _new_wrapper = _validate_transaction_metrics(wrapped) + val = _new_wrapper(*args, **kwargs) + assert record_transaction_called + metrics = recorded_metrics[index] + + record_transaction_called[:] = [] + recorded_metrics[:] = [] + + for unscoped_metric in unscoped_metrics: + _validate(metrics, unscoped_metric, "", 1) + + for scoped_name, scoped_count in scoped_metrics: + _validate(metrics, scoped_name, transaction_scope_name, scoped_count) + + for rollup_name, rollup_count in rollup_metrics: + _validate(metrics, rollup_name, "", rollup_count) + + for custom_name, custom_count in custom_metrics: + _validate(metrics, custom_name, "", custom_count) + + custom_metric_names = {name for name, _ in custom_metrics} + for name, _ in metrics: + if name not in custom_metric_names: + assert not name.startswith("Supportability/api/"), name + + return val + + return _validate_wrapper \ No newline at end of file From 1bb2be495bade69de2c126b689620bb4abe7ff5d Mon Sep 17 00:00:00 2001 From: Kevin Morey Date: Mon, 14 Nov 2022 14:19:00 -0600 Subject: [PATCH 022/108] Initialize ExternalNode properties (#687) Co-authored-by: Hannah Stepanek --- newrelic/core/external_node.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/newrelic/core/external_node.py b/newrelic/core/external_node.py index 16c113794..20e07e9a5 100644 --- a/newrelic/core/external_node.py +++ b/newrelic/core/external_node.py @@ -32,6 +32,8 @@ class ExternalNode(_ExternalNode, GenericNodeMixin): + cross_process_id = None + external_txn_name = None @property def details(self): From 7ae0c94491be64b0a92e1e1c6e91314f86513868 Mon Sep 17 00:00:00 2001 From: Lalleh Rafeei <84813886+lrafeei@users.noreply.github.com> Date: Wed, 16 Nov 2022 10:25:08 -0800 Subject: [PATCH 023/108] Fix package_version_utils.py logic (#689) * Fix package_version_utils.py logic Co-authored-by: Timothy Pansino Co-authored-by: Hannah Stepanek Co-authored-by: Uma Annamalai * Move description of func into func itself * typecast lists into tuples * Remove breakpoints * Empty _test_package_version_utils.py * Make changes to the test Co-authored-by: Timothy Pansino Co-authored-by: Hannah Stepanek Co-authored-by: Uma Annamalai --- newrelic/common/package_version_utils.py | 57 +++++++++++++-- .../test_package_version_utils.py | 71 +++++++++++++++++++ 2 files changed, 123 insertions(+), 5 deletions(-) create mode 100644 tests/agent_unittests/test_package_version_utils.py diff --git a/newrelic/common/package_version_utils.py b/newrelic/common/package_version_utils.py index c881d7ada..a785f7ab1 100644 --- a/newrelic/common/package_version_utils.py +++ b/newrelic/common/package_version_utils.py @@ -14,10 +14,57 @@ import sys +# Need to account for 4 possible variations of version declaration specified in (rejected) PEP 396 +VERSION_ATTRS = ("__version__", "version", "__version_tuple__", "version_tuple") # nosec +NULL_VERSIONS = frozenset((None, "", "0", "0.0", "0.0.0", "0.0.0.0", (0,), (0, 0), (0, 0, 0), (0, 0, 0, 0))) # nosec + def get_package_version(name): - # importlib was introduced into the standard library starting in Python3.8. - if "importlib" in sys.modules and hasattr(sys.modules["importlib"], "metadata"): - return sys.modules["importlib"].metadata.version(name) # pylint: disable=E1101 - elif "pkg_resources" in sys.modules: - return sys.modules["pkg_resources"].get_distribution(name).version + """Gets the version of the library. + :param name: The name of library. + :type name: str + :return: The version of the library. Returns None if can't determine version. + :type return: str or None + + Usage:: + >>> get_package_version("botocore") + "1.1.0" + """ + + def _get_package_version(name): + module = sys.modules.get(name, None) + version = None + for attr in VERSION_ATTRS: + try: + version = getattr(module, attr, None) + # Cast any version specified as a list into a tuple. + version = tuple(version) if isinstance(version, list) else version + if version not in NULL_VERSIONS: + return version + except Exception: + pass + + # importlib was introduced into the standard library starting in Python3.8. + if "importlib" in sys.modules and hasattr(sys.modules["importlib"], "metadata"): + try: + version = sys.modules["importlib"].metadata.version(name) # pylint: disable=E1101 + if version not in NULL_VERSIONS: + return version + except Exception: + pass + + if "pkg_resources" in sys.modules: + try: + version = sys.modules["pkg_resources"].get_distribution(name).version + if version not in NULL_VERSIONS: + return version + except Exception: + pass + + version = _get_package_version(name) + + # Coerce iterables into a string + if isinstance(version, tuple): + version = ".".join(str(v) for v in version) + + return version diff --git a/tests/agent_unittests/test_package_version_utils.py b/tests/agent_unittests/test_package_version_utils.py new file mode 100644 index 000000000..14ee454fd --- /dev/null +++ b/tests/agent_unittests/test_package_version_utils.py @@ -0,0 +1,71 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +import pytest +from testing_support.validators.validate_function_called import validate_function_called + +from newrelic.common.package_version_utils import ( + NULL_VERSIONS, + VERSION_ATTRS, + get_package_version, +) + +IS_PY38_PLUS = sys.version_info[:2] >= (3, 8) +SKIP_IF_NOT_IMPORTLIB_METADATA = pytest.mark.skipif(not IS_PY38_PLUS, reason="importlib.metadata is not supported.") +SKIP_IF_IMPORTLIB_METADATA = pytest.mark.skipif( + IS_PY38_PLUS, reason="importlib.metadata is preferred over pkg_resources." +) + + +@pytest.fixture(scope="function", autouse=True) +def patched_pytest_module(monkeypatch): + for attr in VERSION_ATTRS: + if hasattr(pytest, attr): + monkeypatch.delattr(pytest, attr) + + yield pytest + + +@pytest.mark.parametrize( + "attr,value,expected_value", + ( + ("version", "1.2.3.4", "1.2.3.4"), + ("__version__", "1.3.5rc2", "1.3.5rc2"), + ("__version_tuple__", (3, 5, 8), "3.5.8"), + ("version_tuple", [3, 1, "0b2"], "3.1.0b2"), + ), +) +def test_get_package_version(attr, value, expected_value): + # There is no file/module here, so we monkeypatch + # pytest instead for our purposes + setattr(pytest, attr, value) + version = get_package_version("pytest") + assert version == expected_value + delattr(pytest, attr) + + +@SKIP_IF_NOT_IMPORTLIB_METADATA +@validate_function_called("importlib.metadata", "version") +def test_importlib_metadata(): + version = get_package_version("pytest") + assert version not in NULL_VERSIONS, version + + +@SKIP_IF_IMPORTLIB_METADATA +@validate_function_called("pkg_resources", "get_distribution") +def test_pkg_resources_metadata(): + version = get_package_version("pytest") + assert version not in NULL_VERSIONS, version From eb28b52b82aa1db85e14b8ff417f179a0a98c81d Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Wed, 23 Nov 2022 14:28:45 -0800 Subject: [PATCH 024/108] Pin Github Actions Runner to Ubuntu 20 for Py27 (#698) * Pin Github Actions runner to ubuntu 20 for Py27 * Upgrade setup-python --- .../actions/setup-python-matrix/action.yml | 16 +++++----- .github/workflows/tests.yml | 30 +++++++++---------- 2 files changed, 23 insertions(+), 23 deletions(-) diff --git a/.github/actions/setup-python-matrix/action.yml b/.github/actions/setup-python-matrix/action.yml index 0f6b64389..3654f7eb2 100644 --- a/.github/actions/setup-python-matrix/action.yml +++ b/.github/actions/setup-python-matrix/action.yml @@ -3,42 +3,42 @@ description: "Sets up all versions of python required for matrix testing in this runs: using: "composite" steps: - - uses: actions/setup-python@v3 + - uses: actions/setup-python@v4 with: python-version: "pypy-3.7" architecture: x64 - - uses: actions/setup-python@v3 + - uses: actions/setup-python@v4 with: python-version: "pypy-2.7" architecture: x64 - - uses: actions/setup-python@v3 + - uses: actions/setup-python@v4 with: python-version: "3.7" architecture: x64 - - uses: actions/setup-python@v3 + - uses: actions/setup-python@v4 with: python-version: "3.8" architecture: x64 - - uses: actions/setup-python@v3 + - uses: actions/setup-python@v4 with: python-version: "3.9" architecture: x64 - - uses: actions/setup-python@v3 + - uses: actions/setup-python@v4 with: python-version: "3.10" architecture: x64 - - uses: actions/setup-python@v3 + - uses: actions/setup-python@v4 with: python-version: "3.11" architecture: x64 - - uses: actions/setup-python@v3 + - uses: actions/setup-python@v4 with: python-version: "2.7" architecture: x64 diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index f59b55f66..947608207 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -26,7 +26,7 @@ on: jobs: tests: # Aggregate job that provides a single check for workflow success - runs-on: ubuntu-latest + runs-on: ubuntu-20.04 needs: - python - elasticsearchserver01 @@ -78,7 +78,7 @@ jobs: 20, ] - runs-on: ubuntu-latest + runs-on: ubuntu-20.04 timeout-minutes: 30 steps: @@ -108,7 +108,7 @@ jobs: matrix: group-number: [1] - runs-on: ubuntu-latest + runs-on: ubuntu-20.04 timeout-minutes: 30 steps: @@ -138,7 +138,7 @@ jobs: matrix: group-number: [1] - runs-on: ubuntu-latest + runs-on: ubuntu-20.04 timeout-minutes: 30 steps: @@ -174,7 +174,7 @@ jobs: matrix: group-number: [1, 2] - runs-on: ubuntu-latest + runs-on: ubuntu-20.04 timeout-minutes: 30 services: @@ -219,7 +219,7 @@ jobs: matrix: group-number: [1, 2] - runs-on: ubuntu-latest + runs-on: ubuntu-20.04 timeout-minutes: 30 services: @@ -267,7 +267,7 @@ jobs: matrix: group-number: [1, 2] - runs-on: ubuntu-latest + runs-on: ubuntu-20.04 timeout-minutes: 30 services: @@ -310,7 +310,7 @@ jobs: matrix: group-number: [1] - runs-on: ubuntu-latest + runs-on: ubuntu-20.04 timeout-minutes: 30 services: @@ -355,7 +355,7 @@ jobs: matrix: group-number: [1, 2] - runs-on: ubuntu-latest + runs-on: ubuntu-20.04 timeout-minutes: 30 services: @@ -398,7 +398,7 @@ jobs: matrix: group-number: [1] - runs-on: ubuntu-latest + runs-on: ubuntu-20.04 timeout-minutes: 30 services: @@ -442,7 +442,7 @@ jobs: matrix: group-number: [1, 2, 3, 4] - runs-on: ubuntu-latest + runs-on: ubuntu-20.04 timeout-minutes: 30 services: @@ -507,7 +507,7 @@ jobs: matrix: group-number: [1] - runs-on: ubuntu-latest + runs-on: ubuntu-20.04 timeout-minutes: 30 services: @@ -550,7 +550,7 @@ jobs: matrix: group-number: [1] - runs-on: ubuntu-latest + runs-on: ubuntu-20.04 timeout-minutes: 30 services: @@ -595,7 +595,7 @@ jobs: matrix: group-number: [1] - runs-on: ubuntu-latest + runs-on: ubuntu-20.04 timeout-minutes: 30 services: @@ -640,7 +640,7 @@ jobs: matrix: group-number: [1] - runs-on: ubuntu-latest + runs-on: ubuntu-20.04 timeout-minutes: 30 services: From eead7a7eea559050da0b3c92af895659f0f36912 Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Wed, 23 Nov 2022 17:43:02 -0800 Subject: [PATCH 025/108] Fix Confluent Kafka Producer Arguments (#699) * Add confluentkafka test for posargs/kwargs * Fix confluent kafka topic argument bug * More sensible producer arguments --- .../hooks/messagebroker_confluentkafka.py | 2 +- .../test_producer.py | 54 ++++++++++++++----- 2 files changed, 43 insertions(+), 13 deletions(-) diff --git a/newrelic/hooks/messagebroker_confluentkafka.py b/newrelic/hooks/messagebroker_confluentkafka.py index e735b8ade..81d9fa59a 100644 --- a/newrelic/hooks/messagebroker_confluentkafka.py +++ b/newrelic/hooks/messagebroker_confluentkafka.py @@ -55,7 +55,7 @@ def wrap_Producer_produce(wrapped, instance, args, kwargs): topic = args[0] args = args[1:] else: - topic = kwargs.get("topic", None) + topic = kwargs.pop("topic", None) transaction.add_messagebroker_info("Confluent-Kafka", get_package_version("confluent-kafka")) diff --git a/tests/messagebroker_confluentkafka/test_producer.py b/tests/messagebroker_confluentkafka/test_producer.py index b5dcff020..2b3e74e7a 100644 --- a/tests/messagebroker_confluentkafka/test_producer.py +++ b/tests/messagebroker_confluentkafka/test_producer.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import time import threading import pytest @@ -36,34 +37,63 @@ ) @background_task() def test_produce_arguments(topic, producer, client_type, serialize, headers): - callback_called = threading.Event() + callback1_called = threading.Event() + callback2_called = threading.Event() + ts = int(time.time()) - def producer_callback(err, msg): - callback_called.set() + def producer_callback1(err, msg): + callback1_called.set() + + def producer_callback2(err, msg): + callback2_called.set() if client_type == "cimpl": + # Keyword Args producer.produce( - topic, + topic=topic, value=serialize({"foo": 1}), key=serialize("my-key"), - callback=producer_callback, - partition=1, - timestamp=1, + partition=0, + callback=producer_callback2, + timestamp=ts, headers=headers, ) - else: + # Positional Args producer.produce( topic, + serialize({"foo": 1}), + serialize("my-key"), + 0, + producer_callback1, + None, + ts, + headers, + ) + else: + # Keyword Args + producer.produce( + topic=topic, value=serialize({"foo": 1}), key=serialize("my-key"), - partition=1, - on_delivery=producer_callback, - timestamp=1, + partition=0, + on_delivery=producer_callback2, + timestamp=ts, headers=headers, ) + # Positional Args + producer.produce( + topic, + serialize("my-key"), + serialize({"foo": 1}), + 0, + producer_callback1, + ts, + headers, + ) producer.flush() - assert callback_called.wait(5), "Callback never called." + assert callback1_called.wait(5), "Callback never called." + assert callback2_called.wait(5), "Callback never called." def test_trace_metrics(topic, send_producer_message): From 60a5d687e00a8d661c6c77ac500bbebb7f8d17d8 Mon Sep 17 00:00:00 2001 From: Hannah Stepanek Date: Mon, 28 Nov 2022 10:32:53 -0800 Subject: [PATCH 026/108] Fix tornado master tests & instrument redis 4.3.5 (#695) * Remove 3.7 testing of tornado master tornadomaster dropped support for 3.7 * Instrument new redis 4.3.5 client methods Co-authored-by: Lalleh Rafeei <84813886+lrafeei@users.noreply.github.com> Co-authored-by: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> --- newrelic/hooks/datastore_redis.py | 17 +++++++++++++++-- tox.ini | 2 +- 2 files changed, 16 insertions(+), 3 deletions(-) diff --git a/newrelic/hooks/datastore_redis.py b/newrelic/hooks/datastore_redis.py index f25c76e65..7ea60f1fa 100644 --- a/newrelic/hooks/datastore_redis.py +++ b/newrelic/hooks/datastore_redis.py @@ -66,6 +66,8 @@ "blpop", "brpop", "brpoplpush", + "byrank", + "byrevrank", "bzmpop", "bzpopmax", "bzpopmin", @@ -289,6 +291,7 @@ "quit", "randomkey", "range", + "rank", "readonly", "readwrite", "rename", @@ -299,6 +302,7 @@ "resp", "restore", "revrange", + "revrank", "role", "rpop", "rpoplpush", @@ -376,6 +380,7 @@ "time", "toggle", "touch", + "trimmed_mean", "ttl", "type", "unlink", @@ -528,7 +533,15 @@ def _nr_Connection_send_command_wrapper_(wrapped, instance, args, kwargs): operation = _redis_operation_re.sub("_", operation) - with DatastoreTrace(product="Redis", target=None, operation=operation, host=host, port_path_or_id=port_path_or_id, database_name=db, source=wrapped): + with DatastoreTrace( + product="Redis", + target=None, + operation=operation, + host=host, + port_path_or_id=port_path_or_id, + database_name=db, + source=wrapped, + ): return wrapped(*args, **kwargs) @@ -574,7 +587,7 @@ def instrument_redis_commands_bf_commands(module): _instrument_redis_commands_module(module, "CMSCommands") _instrument_redis_commands_module(module, "TDigestCommands") _instrument_redis_commands_module(module, "TOPKCommands") - + def _instrument_redis_commands_module(module, class_name): for name in _redis_client_methods: diff --git a/tox.ini b/tox.ini index f8e2b2e0c..c815defc2 100644 --- a/tox.ini +++ b/tox.ini @@ -153,7 +153,7 @@ envlist = python-logger_loguru-{py37,py38,py39,py310,py311,pypy37}-logurulatest, python-logger_loguru-py39-loguru{06,05,04,03}, libcurl-framework_tornado-{py37,py38,py39,py310,py311,pypy37}-tornado0600, - libcurl-framework_tornado-{py37,py38,py39,py310,py311}-tornadomaster, + libcurl-framework_tornado-{py38,py39,py310,py311}-tornadomaster, rabbitmq-messagebroker_pika-{py27,py37,py38,py39,pypy,pypy37}-pika0.13, rabbitmq-messagebroker_pika-{py37,py38,py39,py310,py311,pypy37}-pikalatest, kafka-messagebroker_confluentkafka-{py27,py37,py38,py39,py310,py311}-confluentkafkalatest, From 67feaa7cb788440017d342e91a6d270736267530 Mon Sep 17 00:00:00 2001 From: Hannah Stepanek Date: Mon, 28 Nov 2022 12:13:43 -0800 Subject: [PATCH 027/108] Remove pylint codes from flake8 config (#701) --- setup.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index 544bdbea3..453a10eeb 100644 --- a/setup.cfg +++ b/setup.cfg @@ -5,4 +5,4 @@ license_files = [flake8] max-line-length = 120 -extend-ignore = C0103,C0115,C0116,C0415,E0401,E1120,E122,E126,E127,E128,E203,E501,E722,F841,R1725,W0613,W0613,W504 +extend-ignore = E122,E126,E127,E128,E203,E501,E722,F841,W504 From 87b649f6b2354373dbe78c275706adae2ab20eb7 Mon Sep 17 00:00:00 2001 From: Lalleh Rafeei <84813886+lrafeei@users.noreply.github.com> Date: Tue, 29 Nov 2022 17:07:46 -0800 Subject: [PATCH 028/108] Validator transfer from fixtures.py to validators directory, Part 2 (#690) * Move validate_transaction_metrics to validators directory * Comment out original validate_transaction_metrics from fixtures.py * Move validate_time_metrics_outside_transaction to validators directory * Move validate_internal_metrics into validators directory and fixed validate_transaction_metrics * Move validate_transaction_errors into validators directory * Move validate_application_errors into validators directory * Move validate_custom_parameters into validators directory * Move validate_synthetics_event into validators directory * Move validate_transaction_event_attributes into validators directory * Move validate_non_transaction_error_event into validators directory * Move validate_application_error_trace_count into validators directory * Move validate_application_error_event_count into validators directory * Move validate_synthetics_transaction_trace into validators directory * Move validate_tt_collector_json to validators directory * Move validate_transaction_trace_attributes into validator directory * Move validate_transaction_error_trace_attributes into validator directory * Move validate_error_trace_collector_json into validator directory * Move validate_error_event_collector_json into validator directory * Move validate_transaction_event_collector_json into validator directory * Fix import issues from merge * Fix some pylint errors * Revert 'raise ValueError' to be PY2 compatible * Delete commented lines --- .../test_attributes_in_action.py | 8 +- .../agent_features/test_collector_payloads.py | 51 +- .../test_event_loop_wait_time.py | 12 +- .../test_ignore_expected_errors.py | 4 +- tests/agent_features/test_lambda_handler.py | 10 +- tests/agent_features/test_notice_error.py | 16 +- tests/agent_features/test_span_events.py | 29 +- tests/agent_features/test_synthetics.py | 6 +- tests/agent_features/test_web_transaction.py | 2 +- tests/agent_features/test_wsgi_attributes.py | 4 +- .../test_custom_conn_pool.py | 7 +- .../test_execute_command.py | 12 +- tests/datastore_aioredis/test_multiple_dbs.py | 12 +- tests/datastore_aioredis/test_trace_node.py | 10 +- tests/datastore_aredis/test_trace_node.py | 86 ++-- tests/datastore_asyncpg/test_query.py | 10 +- .../test_trace_node.py | 64 +-- tests/datastore_psycopg2/test_trace_node.py | 60 +-- tests/datastore_redis/test_trace_node.py | 76 ++- .../framework_sanic/test_cross_application.py | 2 + tests/framework_tornado/test_server.py | 1 + .../test_pika_async_connection_consume.py | 327 ++++++------ .../test_pika_blocking_connection_consume.py | 213 ++++---- ...a_blocking_connection_consume_generator.py | 219 ++++---- tests/messagebroker_pika/test_pika_produce.py | 14 +- tests/testing_support/fixtures.py | 469 ------------------ .../validate_application_error_event_count.py | 39 ++ .../validate_application_error_trace_count.py | 39 ++ .../validate_error_event_collector_json.py | 69 +++ .../validate_error_trace_collector_json.py | 58 +++ .../validate_non_transaction_error_event.py | 4 +- .../validate_synthetics_transaction_trace.py | 67 +++ ...date_transaction_error_trace_attributes.py | 49 ++ ...lidate_transaction_event_collector_json.py | 56 +++ .../validate_transaction_trace_attributes.py | 69 +++ .../validators/validate_tt_collector_json.py | 184 +++++++ 36 files changed, 1310 insertions(+), 1048 deletions(-) create mode 100644 tests/testing_support/validators/validate_application_error_event_count.py create mode 100644 tests/testing_support/validators/validate_application_error_trace_count.py create mode 100644 tests/testing_support/validators/validate_error_event_collector_json.py create mode 100644 tests/testing_support/validators/validate_error_trace_collector_json.py create mode 100644 tests/testing_support/validators/validate_synthetics_transaction_trace.py create mode 100644 tests/testing_support/validators/validate_transaction_error_trace_attributes.py create mode 100644 tests/testing_support/validators/validate_transaction_event_collector_json.py create mode 100644 tests/testing_support/validators/validate_transaction_trace_attributes.py create mode 100644 tests/testing_support/validators/validate_tt_collector_json.py diff --git a/tests/agent_features/test_attributes_in_action.py b/tests/agent_features/test_attributes_in_action.py index fbedb302e..e51298dbf 100644 --- a/tests/agent_features/test_attributes_in_action.py +++ b/tests/agent_features/test_attributes_in_action.py @@ -24,13 +24,17 @@ validate_error_event_attributes, validate_error_event_attributes_outside_transaction, validate_error_trace_attributes_outside_transaction, - validate_transaction_error_trace_attributes, - validate_transaction_trace_attributes, ) from testing_support.validators.validate_span_events import validate_span_events +from testing_support.validators.validate_transaction_error_trace_attributes import ( + validate_transaction_error_trace_attributes, +) from testing_support.validators.validate_transaction_event_attributes import ( validate_transaction_event_attributes, ) +from testing_support.validators.validate_transaction_trace_attributes import ( + validate_transaction_trace_attributes, +) from newrelic.api.application import application_instance as application from newrelic.api.message_transaction import message_transaction diff --git a/tests/agent_features/test_collector_payloads.py b/tests/agent_features/test_collector_payloads.py index 17b46ce49..0c1b2367c 100644 --- a/tests/agent_features/test_collector_payloads.py +++ b/tests/agent_features/test_collector_payloads.py @@ -14,17 +14,30 @@ import pytest import webtest - -from testing_support.fixtures import (validate_error_trace_collector_json, - validate_tt_collector_json, validate_transaction_event_collector_json, - validate_error_event_collector_json, - validate_custom_event_collector_json, override_application_settings) - -from testing_support.sample_applications import (simple_app, - simple_exceptional_app, simple_custom_event_app) - -from testing_support.validators.validate_log_event_collector_json import validate_log_event_collector_json - +from testing_support.fixtures import ( + override_application_settings, + validate_custom_event_collector_json, +) +from testing_support.sample_applications import ( + simple_app, + simple_custom_event_app, + simple_exceptional_app, +) +from testing_support.validators.validate_error_event_collector_json import ( + validate_error_event_collector_json, +) +from testing_support.validators.validate_error_trace_collector_json import ( + validate_error_trace_collector_json, +) +from testing_support.validators.validate_log_event_collector_json import ( + validate_log_event_collector_json, +) +from testing_support.validators.validate_transaction_event_collector_json import ( + validate_transaction_event_collector_json, +) +from testing_support.validators.validate_tt_collector_json import ( + validate_tt_collector_json, +) exceptional_application = webtest.TestApp(simple_exceptional_app) normal_application = webtest.TestApp(simple_app) @@ -34,7 +47,7 @@ @validate_error_trace_collector_json() def test_error_trace_json(): try: - exceptional_application.get('/') + exceptional_application.get("/") except ValueError: pass @@ -42,34 +55,34 @@ def test_error_trace_json(): @validate_error_event_collector_json() def test_error_event_json(): try: - exceptional_application.get('/') + exceptional_application.get("/") except ValueError: pass @validate_tt_collector_json() def test_transaction_trace_json(): - normal_application.get('/') + normal_application.get("/") @validate_tt_collector_json(exclude_request_uri=True) -@override_application_settings({'attributes.exclude': set(('request.uri',))}) +@override_application_settings({"attributes.exclude": set(("request.uri",))}) def test_transaction_trace_json_no_request_uri(): - normal_application.get('/') + normal_application.get("/") @validate_transaction_event_collector_json() def test_transaction_event_json(): - normal_application.get('/') + normal_application.get("/") @validate_custom_event_collector_json() def test_custom_event_json(): - custom_event_application.get('/') + custom_event_application.get("/") @pytest.mark.xfail(reason="Unwritten validator") @validate_log_event_collector_json def test_log_event_json(): - normal_application.get('/') + normal_application.get("/") raise NotImplementedError("Fix my validator") diff --git a/tests/agent_features/test_event_loop_wait_time.py b/tests/agent_features/test_event_loop_wait_time.py index b4906337f..69e6fc102 100644 --- a/tests/agent_features/test_event_loop_wait_time.py +++ b/tests/agent_features/test_event_loop_wait_time.py @@ -16,16 +16,16 @@ import time import pytest -from testing_support.fixtures import ( - override_application_settings, - validate_transaction_trace_attributes, -) +from testing_support.fixtures import override_application_settings from testing_support.validators.validate_transaction_event_attributes import ( validate_transaction_event_attributes, ) from testing_support.validators.validate_transaction_metrics import ( validate_transaction_metrics, ) +from testing_support.validators.validate_transaction_trace_attributes import ( + validate_transaction_trace_attributes, +) from newrelic.api.background_task import background_task from newrelic.api.function_trace import FunctionTrace, function_trace @@ -75,7 +75,7 @@ async def wait_for_loop(ready, done, times=1): ), ) def test_record_event_loop_wait(event_loop, blocking_transaction_active, event_loop_visibility_enabled): - import asyncio + # import asyncio metric_count = 2 if event_loop_visibility_enabled else None execute_attributes = {"intrinsic": ("eventLoopTime",), "agent": (), "user": ()} @@ -184,7 +184,7 @@ def test_blocking_task_on_different_loop(): def test_record_event_loop_wait_on_different_task(event_loop): - import asyncio + # import asyncio async def recorder(ready, wait): ready.set() diff --git a/tests/agent_features/test_ignore_expected_errors.py b/tests/agent_features/test_ignore_expected_errors.py index 1a7fa266e..5cf61eced 100644 --- a/tests/agent_features/test_ignore_expected_errors.py +++ b/tests/agent_features/test_ignore_expected_errors.py @@ -19,11 +19,13 @@ validate_error_event_attributes_outside_transaction, validate_error_event_sample_data, validate_error_trace_attributes_outside_transaction, - validate_transaction_error_trace_attributes, ) from testing_support.validators.validate_time_metrics_outside_transaction import ( validate_time_metrics_outside_transaction, ) +from testing_support.validators.validate_transaction_error_trace_attributes import ( + validate_transaction_error_trace_attributes, +) from testing_support.validators.validate_transaction_errors import ( validate_transaction_errors, ) diff --git a/tests/agent_features/test_lambda_handler.py b/tests/agent_features/test_lambda_handler.py index f388aa51b..40b694407 100644 --- a/tests/agent_features/test_lambda_handler.py +++ b/tests/agent_features/test_lambda_handler.py @@ -16,15 +16,15 @@ from copy import deepcopy import pytest -from testing_support.fixtures import ( - override_application_settings, - validate_transaction_trace_attributes, -) +from testing_support.fixtures import override_application_settings from testing_support.validators.validate_transaction_event_attributes import ( validate_transaction_event_attributes, ) +from testing_support.validators.validate_transaction_trace_attributes import ( + validate_transaction_trace_attributes, +) -import newrelic.api.lambda_handler as lambda_handler +from newrelic.api import lambda_handler # NOTE: this fixture will force all tests in this file to assume that a cold diff --git a/tests/agent_features/test_notice_error.py b/tests/agent_features/test_notice_error.py index a4509e215..913ee9289 100644 --- a/tests/agent_features/test_notice_error.py +++ b/tests/agent_features/test_notice_error.py @@ -19,14 +19,22 @@ error_is_saved, override_application_settings, reset_core_stats_engine, - validate_application_error_event_count, - validate_application_error_trace_count, validate_transaction_error_event_count, validate_transaction_error_trace_count, ) +from testing_support.validators.validate_application_error_event_count import ( + validate_application_error_event_count, +) +from testing_support.validators.validate_application_error_trace_count import ( + validate_application_error_trace_count, +) +from testing_support.validators.validate_application_errors import ( + validate_application_errors, +) +from testing_support.validators.validate_transaction_errors import ( + validate_transaction_errors, +) -from testing_support.validators.validate_application_errors import validate_application_errors -from testing_support.validators.validate_transaction_errors import validate_transaction_errors from newrelic.api.application import application_instance as application from newrelic.api.application import application_settings from newrelic.api.background_task import background_task diff --git a/tests/agent_features/test_span_events.py b/tests/agent_features/test_span_events.py index 155642860..655efee8c 100644 --- a/tests/agent_features/test_span_events.py +++ b/tests/agent_features/test_span_events.py @@ -34,7 +34,6 @@ from newrelic.api.datastore_trace import DatastoreTrace from newrelic.api.external_trace import ExternalTrace from newrelic.api.function_trace import FunctionTrace, function_trace -from newrelic.api.graphql_trace import GraphQLOperationTrace, GraphQLResolverTrace from newrelic.api.memcache_trace import MemcacheTrace from newrelic.api.message_trace import MessageTrace from newrelic.api.solr_trace import SolrTrace @@ -125,8 +124,6 @@ def _test(): (DatastoreTrace, ("db_product", "db_target", "db_operation")), (ExternalTrace, ("lib", "url")), (FunctionTrace, ("name",)), - (GraphQLOperationTrace, ()), - (GraphQLResolverTrace, ()), (MemcacheTrace, ("command",)), (MessageTrace, ("lib", "operation", "dst_type", "dst_name")), (SolrTrace, ("lib", "command")), @@ -416,11 +413,11 @@ def _test(): @pytest.mark.parametrize("collect_span_events", (False, True)) @pytest.mark.parametrize("span_events_enabled", (False, True)) def test_collect_span_events_override(collect_span_events, span_events_enabled): - - if collect_span_events and span_events_enabled: - spans_expected = True - else: - spans_expected = False + spans_expected = collect_span_events and span_events_enabled + # if collect_span_events and span_events_enabled: + # spans_expected = True + # else: + # spans_expected = False span_count = 2 if spans_expected else 0 @@ -556,9 +553,9 @@ def _test(): def test_span_user_attribute_overrides_transaction_attribute(): transaction = current_transaction() - transaction.add_custom_attribute("foo", "a") + transaction.add_custom_parameter("foo", "a") add_custom_span_attribute("foo", "b") - transaction.add_custom_attribute("foo", "c") + transaction.add_custom_parameter("foo", "c") @override_application_settings({"attributes.include": "*"}) @@ -603,7 +600,7 @@ def _test(): transaction = current_transaction() for i in range(128): - transaction.add_custom_attribute("txn_attr%i" % i, "txnValue") + transaction.add_custom_parameter("txn_attr%i" % i, "txnValue") if i < 64: add_custom_span_attribute("span_attr%i" % i, "spanValue") @@ -620,8 +617,6 @@ def _test(): (DatastoreTrace, ("db_product", "db_target", "db_operation")), (ExternalTrace, ("lib", "url")), (FunctionTrace, ("name",)), - (GraphQLOperationTrace, ()), - (GraphQLResolverTrace, ()), (MemcacheTrace, ("command",)), (MessageTrace, ("lib", "operation", "dst_type", "dst_name")), (SolrTrace, ("lib", "command")), @@ -671,8 +666,6 @@ def _test(): (DatastoreTrace, ("db_product", "db_target", "db_operation")), (ExternalTrace, ("lib", "url")), (FunctionTrace, ("name",)), - (GraphQLOperationTrace, ()), - (GraphQLResolverTrace, ()), (MemcacheTrace, ("command",)), (MessageTrace, ("lib", "operation", "dst_type", "dst_name")), (SolrTrace, ("lib", "command")), @@ -716,8 +709,6 @@ def _test(): (DatastoreTrace, ("db_product", "db_target", "db_operation")), (ExternalTrace, ("lib", "url")), (FunctionTrace, ("name",)), - (GraphQLOperationTrace, ()), - (GraphQLResolverTrace, ()), (MemcacheTrace, ("command",)), (MessageTrace, ("lib", "operation", "dst_type", "dst_name")), (SolrTrace, ("lib", "command")), @@ -732,9 +723,9 @@ def test_span_event_notice_error_overrides_observed(trace_type, args): with trace_type(*args): try: raise ERROR - except: + except Exception: notice_error() - raise ValueError + raise ValueError # pylint: disable except ValueError: pass diff --git a/tests/agent_features/test_synthetics.py b/tests/agent_features/test_synthetics.py index ec7b78e96..2e08144cc 100644 --- a/tests/agent_features/test_synthetics.py +++ b/tests/agent_features/test_synthetics.py @@ -19,10 +19,14 @@ cat_enabled, make_synthetics_header, override_application_settings, +) +from testing_support.validators.validate_synthetics_event import ( + validate_synthetics_event, +) +from testing_support.validators.validate_synthetics_transaction_trace import ( validate_synthetics_transaction_trace, ) -from testing_support.validators.validate_synthetics_event import validate_synthetics_event from newrelic.api.web_transaction import web_transaction from newrelic.api.wsgi_application import wsgi_application from newrelic.common.encoding_utils import deobfuscate, json_decode diff --git a/tests/agent_features/test_web_transaction.py b/tests/agent_features/test_web_transaction.py index f2f08574a..66cf25858 100644 --- a/tests/agent_features/test_web_transaction.py +++ b/tests/agent_features/test_web_transaction.py @@ -24,10 +24,10 @@ validate_transaction_metrics, ) -import newrelic.packages.six as six from newrelic.api.application import application_instance from newrelic.api.web_transaction import WebTransaction from newrelic.api.wsgi_application import wsgi_application +from newrelic.packages import six application = webtest.TestApp(simple_app) diff --git a/tests/agent_features/test_wsgi_attributes.py b/tests/agent_features/test_wsgi_attributes.py index 7543e45d8..e90410b6d 100644 --- a/tests/agent_features/test_wsgi_attributes.py +++ b/tests/agent_features/test_wsgi_attributes.py @@ -17,9 +17,11 @@ dt_enabled, override_application_settings, validate_error_event_attributes, - validate_transaction_error_trace_attributes, ) from testing_support.sample_applications import fully_featured_app +from testing_support.validators.validate_transaction_error_trace_attributes import ( + validate_transaction_error_trace_attributes, +) from testing_support.validators.validate_transaction_event_attributes import ( validate_transaction_event_attributes, ) diff --git a/tests/datastore_aioredis/test_custom_conn_pool.py b/tests/datastore_aioredis/test_custom_conn_pool.py index e59760ea3..b09cf0bdd 100644 --- a/tests/datastore_aioredis/test_custom_conn_pool.py +++ b/tests/datastore_aioredis/test_custom_conn_pool.py @@ -18,10 +18,9 @@ """ from testing_support.db_settings import redis_settings +from testing_support.fixture.event_loop import event_loop as loop # noqa from testing_support.fixtures import override_application_settings from testing_support.util import instance_hostname - -# from testing_support.fixture.event_loop import event_loop as loop from testing_support.validators.validate_transaction_metrics import ( validate_transaction_metrics, ) @@ -111,7 +110,7 @@ async def exercise_redis(client): background_task=True, ) @background_task() -def test_fake_conn_pool_enable_instance(client, loop, monkeypatch): +def test_fake_conn_pool_enable_instance(client, loop, monkeypatch): # noqa # Get a real connection conn = getattr(client, "_pool_or_conn", None) if conn is None: @@ -136,7 +135,7 @@ def test_fake_conn_pool_enable_instance(client, loop, monkeypatch): background_task=True, ) @background_task() -def test_fake_conn_pool_disable_instance(client, loop, monkeypatch): +def test_fake_conn_pool_disable_instance(client, loop, monkeypatch): # noqa # Get a real connection conn = getattr(client, "_pool_or_conn", None) if conn is None: diff --git a/tests/datastore_aioredis/test_execute_command.py b/tests/datastore_aioredis/test_execute_command.py index f6ee9eb27..54851a659 100644 --- a/tests/datastore_aioredis/test_execute_command.py +++ b/tests/datastore_aioredis/test_execute_command.py @@ -13,7 +13,9 @@ # limitations under the License. import pytest -from conftest import AIOREDIS_VERSION # , event_loop, loop + +# import aioredis +from conftest import AIOREDIS_VERSION, loop # noqa # pylint: disable=E0611,W0611 from testing_support.db_settings import redis_settings from testing_support.fixtures import override_application_settings from testing_support.util import instance_hostname @@ -81,7 +83,7 @@ async def exercise_redis_single_arg(client): background_task=True, ) @background_task() -def test_redis_execute_command_as_one_arg_enable(client, loop): +def test_redis_execute_command_as_one_arg_enable(client, loop): # noqa loop.run_until_complete(exercise_redis_single_arg(client)) @@ -94,7 +96,7 @@ def test_redis_execute_command_as_one_arg_enable(client, loop): background_task=True, ) @background_task() -def test_redis_execute_command_as_one_arg_disable(client, loop): +def test_redis_execute_command_as_one_arg_disable(client, loop): # noqa loop.run_until_complete(exercise_redis_single_arg(client)) @@ -106,7 +108,7 @@ def test_redis_execute_command_as_one_arg_disable(client, loop): background_task=True, ) @background_task() -def test_redis_execute_command_as_two_args_enable(client, loop): +def test_redis_execute_command_as_two_args_enable(client, loop): # noqa loop.run_until_complete(exercise_redis_multi_args(client)) @@ -118,5 +120,5 @@ def test_redis_execute_command_as_two_args_enable(client, loop): background_task=True, ) @background_task() -def test_redis_execute_command_as_two_args_disable(client, loop): +def test_redis_execute_command_as_two_args_disable(client, loop): # noqa loop.run_until_complete(exercise_redis_multi_args(client)) diff --git a/tests/datastore_aioredis/test_multiple_dbs.py b/tests/datastore_aioredis/test_multiple_dbs.py index 3b9ea37dd..61d99d3ae 100644 --- a/tests/datastore_aioredis/test_multiple_dbs.py +++ b/tests/datastore_aioredis/test_multiple_dbs.py @@ -14,9 +14,7 @@ import aioredis import pytest -from conftest import AIOREDIS_VERSION # , event_loop, loop - -# from conftest import AIOREDIS_VERSION +from conftest import AIOREDIS_VERSION, loop # noqa from testing_support.db_settings import redis_settings from testing_support.fixtures import override_application_settings from testing_support.util import instance_hostname @@ -107,7 +105,7 @@ @pytest.fixture(params=("Redis", "StrictRedis")) -def client_set(request, loop): +def client_set(request, loop): # noqa if len(DB_SETTINGS) > 1: if AIOREDIS_VERSION >= (2, 0): if request.param == "Redis": @@ -157,7 +155,7 @@ async def exercise_redis(client_1, client_2): background_task=True, ) @background_task() -def test_multiple_datastores_enabled(client_set, loop): +def test_multiple_datastores_enabled(client_set, loop): # noqa loop.run_until_complete(exercise_redis(client_set[0], client_set[1])) @@ -170,7 +168,7 @@ def test_multiple_datastores_enabled(client_set, loop): background_task=True, ) @background_task() -def test_multiple_datastores_disabled(client_set, loop): +def test_multiple_datastores_disabled(client_set, loop): # noqa loop.run_until_complete(exercise_redis(client_set[0], client_set[1])) @@ -183,7 +181,7 @@ def test_multiple_datastores_disabled(client_set, loop): ) @override_application_settings(_enable_instance_settings) @background_task() -def test_concurrent_calls(client_set, loop): +def test_concurrent_calls(client_set, loop): # noqa # Concurrent calls made with original instrumenation taken from synchonous Redis # instrumentation had a bug where datastore info on concurrent calls to multiple instances # would result in all instances reporting as the host/port of the final call made. diff --git a/tests/datastore_aioredis/test_trace_node.py b/tests/datastore_aioredis/test_trace_node.py index e4fa1e3ba..92235f793 100644 --- a/tests/datastore_aioredis/test_trace_node.py +++ b/tests/datastore_aioredis/test_trace_node.py @@ -12,9 +12,15 @@ # See the License for the specific language governing permissions and # limitations under the License. -from testing_support.fixtures import validate_tt_collector_json, override_application_settings -from testing_support.util import instance_hostname +# import aioredis +# import pytest +# from conftest import AIOREDIS_VERSION, event_loop from testing_support.db_settings import redis_settings +from testing_support.fixtures import override_application_settings +from testing_support.util import instance_hostname +from testing_support.validators.validate_tt_collector_json import ( + validate_tt_collector_json, +) from newrelic.api.background_task import background_task diff --git a/tests/datastore_aredis/test_trace_node.py b/tests/datastore_aredis/test_trace_node.py index 9741bfbd6..9d5d86162 100644 --- a/tests/datastore_aredis/test_trace_node.py +++ b/tests/datastore_aredis/test_trace_node.py @@ -13,12 +13,13 @@ # limitations under the License. import aredis - -from testing_support.fixture.event_loop import event_loop as loop -from testing_support.fixtures import (validate_tt_collector_json, - override_application_settings) -from testing_support.util import instance_hostname from testing_support.db_settings import redis_settings +from testing_support.fixture.event_loop import event_loop as loop # noqa: F401 +from testing_support.fixtures import override_application_settings +from testing_support.util import instance_hostname +from testing_support.validators.validate_tt_collector_json import ( + validate_tt_collector_json, +) from newrelic.api.background_task import background_task @@ -28,100 +29,93 @@ # Settings _enable_instance_settings = { - 'datastore_tracer.instance_reporting.enabled': True, - 'datastore_tracer.database_name_reporting.enabled': True, + "datastore_tracer.instance_reporting.enabled": True, + "datastore_tracer.database_name_reporting.enabled": True, } _disable_instance_settings = { - 'datastore_tracer.instance_reporting.enabled': False, - 'datastore_tracer.database_name_reporting.enabled': False, + "datastore_tracer.instance_reporting.enabled": False, + "datastore_tracer.database_name_reporting.enabled": False, } _instance_only_settings = { - 'datastore_tracer.instance_reporting.enabled': True, - 'datastore_tracer.database_name_reporting.enabled': False, + "datastore_tracer.instance_reporting.enabled": True, + "datastore_tracer.database_name_reporting.enabled": False, } _database_only_settings = { - 'datastore_tracer.instance_reporting.enabled': False, - 'datastore_tracer.database_name_reporting.enabled': True, + "datastore_tracer.instance_reporting.enabled": False, + "datastore_tracer.database_name_reporting.enabled": True, } # Expected parameters _enabled_required = { - 'host': instance_hostname(DB_SETTINGS['host']), - 'port_path_or_id': str(DB_SETTINGS['port']), - 'db.instance': str(DATABASE_NUMBER), + "host": instance_hostname(DB_SETTINGS["host"]), + "port_path_or_id": str(DB_SETTINGS["port"]), + "db.instance": str(DATABASE_NUMBER), } _enabled_forgone = {} _disabled_required = {} _disabled_forgone = { - 'host': 'VALUE NOT USED', - 'port_path_or_id': 'VALUE NOT USED', - 'db.instance': 'VALUE NOT USED', + "host": "VALUE NOT USED", + "port_path_or_id": "VALUE NOT USED", + "db.instance": "VALUE NOT USED", } _instance_only_required = { - 'host': instance_hostname(DB_SETTINGS['host']), - 'port_path_or_id': str(DB_SETTINGS['port']), + "host": instance_hostname(DB_SETTINGS["host"]), + "port_path_or_id": str(DB_SETTINGS["port"]), } _instance_only_forgone = { - 'db.instance': str(DATABASE_NUMBER), + "db.instance": str(DATABASE_NUMBER), } _database_only_required = { - 'db.instance': str(DATABASE_NUMBER), + "db.instance": str(DATABASE_NUMBER), } _database_only_forgone = { - 'host': 'VALUE NOT USED', - 'port_path_or_id': 'VALUE NOT USED', + "host": "VALUE NOT USED", + "port_path_or_id": "VALUE NOT USED", } # Query + async def _exercise_db(): - client = aredis.StrictRedis(host=DB_SETTINGS['host'], - port=DB_SETTINGS['port'], db=DATABASE_NUMBER) + client = aredis.StrictRedis(host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=DATABASE_NUMBER) - await client.set('key', 'value') - await client.get('key') + await client.set("key", "value") + await client.get("key") - await client.execute_command('CLIENT', 'LIST', parse='LIST') + await client.execute_command("CLIENT", "LIST", parse="LIST") # Tests + @override_application_settings(_enable_instance_settings) -@validate_tt_collector_json( - datastore_params=_enabled_required, - datastore_forgone_params=_enabled_forgone) +@validate_tt_collector_json(datastore_params=_enabled_required, datastore_forgone_params=_enabled_forgone) @background_task() -def test_trace_node_datastore_params_enable_instance(loop): +def test_trace_node_datastore_params_enable_instance(loop): # noqa: F811 loop.run_until_complete(_exercise_db()) @override_application_settings(_disable_instance_settings) -@validate_tt_collector_json( - datastore_params=_disabled_required, - datastore_forgone_params=_disabled_forgone) +@validate_tt_collector_json(datastore_params=_disabled_required, datastore_forgone_params=_disabled_forgone) @background_task() -def test_trace_node_datastore_params_disable_instance(loop): +def test_trace_node_datastore_params_disable_instance(loop): # noqa: F811 loop.run_until_complete(_exercise_db()) @override_application_settings(_instance_only_settings) -@validate_tt_collector_json( - datastore_params=_instance_only_required, - datastore_forgone_params=_instance_only_forgone) +@validate_tt_collector_json(datastore_params=_instance_only_required, datastore_forgone_params=_instance_only_forgone) @background_task() -def test_trace_node_datastore_params_instance_only(loop): +def test_trace_node_datastore_params_instance_only(loop): # noqa: F811 loop.run_until_complete(_exercise_db()) @override_application_settings(_database_only_settings) -@validate_tt_collector_json( - datastore_params=_database_only_required, - datastore_forgone_params=_database_only_forgone) +@validate_tt_collector_json(datastore_params=_database_only_required, datastore_forgone_params=_database_only_forgone) @background_task() -def test_trace_node_datastore_params_database_only(loop): +def test_trace_node_datastore_params_database_only(loop): # noqa: F811 loop.run_until_complete(_exercise_db()) diff --git a/tests/datastore_asyncpg/test_query.py b/tests/datastore_asyncpg/test_query.py index a952e062a..838ced61d 100644 --- a/tests/datastore_asyncpg/test_query.py +++ b/tests/datastore_asyncpg/test_query.py @@ -12,17 +12,19 @@ # See the License for the specific language governing permissions and # limitations under the License. -import asyncio import os -import random from io import BytesIO import asyncpg import pytest from testing_support.db_settings import postgresql_settings -from testing_support.fixtures import validate_tt_collector_json -from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.util import instance_hostname +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) +from testing_support.validators.validate_tt_collector_json import ( + validate_tt_collector_json, +) from newrelic.api.background_task import background_task diff --git a/tests/datastore_elasticsearch/test_trace_node.py b/tests/datastore_elasticsearch/test_trace_node.py index 65e773340..445b4a4eb 100644 --- a/tests/datastore_elasticsearch/test_trace_node.py +++ b/tests/datastore_elasticsearch/test_trace_node.py @@ -13,77 +13,81 @@ # limitations under the License. from elasticsearch import Elasticsearch - -from testing_support.fixtures import (validate_tt_collector_json, - override_application_settings, validate_tt_parenting) from testing_support.db_settings import elasticsearch_settings +from testing_support.fixtures import ( + override_application_settings, + validate_tt_parenting, +) from testing_support.util import instance_hostname +from testing_support.validators.validate_tt_collector_json import ( + validate_tt_collector_json, +) from newrelic.api.background_task import background_task ES_SETTINGS = elasticsearch_settings()[0] -ES_URL = 'http://%s:%s' % (ES_SETTINGS['host'], ES_SETTINGS['port']) +ES_URL = "http://%s:%s" % (ES_SETTINGS["host"], ES_SETTINGS["port"]) # Settings _enable_instance_settings = { - 'datastore_tracer.instance_reporting.enabled': True, - 'datastore_tracer.database_name_reporting.enabled': True, + "datastore_tracer.instance_reporting.enabled": True, + "datastore_tracer.database_name_reporting.enabled": True, } _disable_instance_settings = { - 'datastore_tracer.instance_reporting.enabled': False, - 'datastore_tracer.database_name_reporting.enabled': False, + "datastore_tracer.instance_reporting.enabled": False, + "datastore_tracer.database_name_reporting.enabled": False, } _instance_only_settings = { - 'datastore_tracer.instance_reporting.enabled': True, - 'datastore_tracer.database_name_reporting.enabled': False, + "datastore_tracer.instance_reporting.enabled": True, + "datastore_tracer.database_name_reporting.enabled": False, } # Expected parameters _enabled_required = { - 'host': instance_hostname(ES_SETTINGS['host']), - 'port_path_or_id': str(ES_SETTINGS['port']), + "host": instance_hostname(ES_SETTINGS["host"]), + "port_path_or_id": str(ES_SETTINGS["port"]), } _enabled_forgone = { - 'db.instance': 'VALUE NOT USED', + "db.instance": "VALUE NOT USED", } _disabled_required = {} _disabled_forgone = { - 'host': 'VALUE NOT USED', - 'port_path_or_id': 'VALUE NOT USED', - 'db.instance': 'VALUE NOT USED', + "host": "VALUE NOT USED", + "port_path_or_id": "VALUE NOT USED", + "db.instance": "VALUE NOT USED", } _instance_only_required = { - 'host': instance_hostname(ES_SETTINGS['host']), - 'port_path_or_id': str(ES_SETTINGS['port']), + "host": instance_hostname(ES_SETTINGS["host"]), + "port_path_or_id": str(ES_SETTINGS["port"]), } _instance_only_forgone = { - 'db.instance': 'VALUE NOT USED', + "db.instance": "VALUE NOT USED", } _tt_parenting = ( - 'TransactionNode', [ - ('DatastoreNode', []), + "TransactionNode", + [ + ("DatastoreNode", []), ], ) # Query + def _exercise_es(es): - es.index(index='contacts', doc_type='person', - body={'name': 'Joe Tester', 'age': 25, 'title': 'QA Master'}, id=1) + es.index(index="contacts", doc_type="person", body={"name": "Joe Tester", "age": 25, "title": "QA Master"}, id=1) # Tests + @override_application_settings(_enable_instance_settings) -@validate_tt_collector_json( - datastore_params=_enabled_required, - datastore_forgone_params=_enabled_forgone) +@validate_tt_collector_json(datastore_params=_enabled_required, datastore_forgone_params=_enabled_forgone) @validate_tt_parenting(_tt_parenting) @background_task() def test_trace_node_datastore_params_enable_instance(): @@ -92,9 +96,7 @@ def test_trace_node_datastore_params_enable_instance(): @override_application_settings(_disable_instance_settings) -@validate_tt_collector_json( - datastore_params=_disabled_required, - datastore_forgone_params=_disabled_forgone) +@validate_tt_collector_json(datastore_params=_disabled_required, datastore_forgone_params=_disabled_forgone) @validate_tt_parenting(_tt_parenting) @background_task() def test_trace_node_datastore_params_disable_instance(): @@ -103,9 +105,7 @@ def test_trace_node_datastore_params_disable_instance(): @override_application_settings(_instance_only_settings) -@validate_tt_collector_json( - datastore_params=_instance_only_required, - datastore_forgone_params=_instance_only_forgone) +@validate_tt_collector_json(datastore_params=_instance_only_required, datastore_forgone_params=_instance_only_forgone) @validate_tt_parenting(_tt_parenting) @background_task() def test_trace_node_datastore_params_instance_only(): diff --git a/tests/datastore_psycopg2/test_trace_node.py b/tests/datastore_psycopg2/test_trace_node.py index b9cd45788..9bfbcf42b 100644 --- a/tests/datastore_psycopg2/test_trace_node.py +++ b/tests/datastore_psycopg2/test_trace_node.py @@ -13,72 +13,78 @@ # limitations under the License. import psycopg2 - -from testing_support.fixtures import (validate_tt_collector_json, - override_application_settings, validate_tt_parenting) +from testing_support.fixtures import ( + override_application_settings, + validate_tt_parenting, +) from testing_support.util import instance_hostname +from testing_support.validators.validate_tt_collector_json import ( + validate_tt_collector_json, +) from utils import DB_SETTINGS from newrelic.api.background_task import background_task - # Settings _enable_instance_settings = { - 'datastore_tracer.instance_reporting.enabled': True, - 'datastore_tracer.database_name_reporting.enabled': True, + "datastore_tracer.instance_reporting.enabled": True, + "datastore_tracer.database_name_reporting.enabled": True, } _disable_instance_settings = { - 'datastore_tracer.instance_reporting.enabled': False, - 'datastore_tracer.database_name_reporting.enabled': False, + "datastore_tracer.instance_reporting.enabled": False, + "datastore_tracer.database_name_reporting.enabled": False, } # Expected parameters _enabled_required = { - 'host': instance_hostname(DB_SETTINGS['host']), - 'port_path_or_id': str(DB_SETTINGS['port']), - 'db.instance': DB_SETTINGS['name'], + "host": instance_hostname(DB_SETTINGS["host"]), + "port_path_or_id": str(DB_SETTINGS["port"]), + "db.instance": DB_SETTINGS["name"], } _enabled_forgone = {} _disabled_required = {} _disabled_forgone = { - 'host': 'VALUE NOT USED', - 'port_path_or_id': 'VALUE NOT USED', - 'db.instance': 'VALUE NOT USED', + "host": "VALUE NOT USED", + "port_path_or_id": "VALUE NOT USED", + "db.instance": "VALUE NOT USED", } _tt_parenting = ( - 'TransactionNode', [ - ('FunctionNode', []), - ('DatabaseNode', []), + "TransactionNode", + [ + ("FunctionNode", []), + ("DatabaseNode", []), ], ) # Query + def _exercise_db(): connection = psycopg2.connect( - database=DB_SETTINGS['name'], user=DB_SETTINGS['user'], - password=DB_SETTINGS['password'], host=DB_SETTINGS['host'], - port=DB_SETTINGS['port']) + database=DB_SETTINGS["name"], + user=DB_SETTINGS["user"], + password=DB_SETTINGS["password"], + host=DB_SETTINGS["host"], + port=DB_SETTINGS["port"], + ) try: cursor = connection.cursor() - cursor.execute("""SELECT setting from pg_settings where name=%s""", - ('server_version',)) + cursor.execute("""SELECT setting from pg_settings where name=%s""", ("server_version",)) finally: connection.close() # Tests + @override_application_settings(_enable_instance_settings) -@validate_tt_collector_json( - datastore_params=_enabled_required, - datastore_forgone_params=_enabled_forgone) +@validate_tt_collector_json(datastore_params=_enabled_required, datastore_forgone_params=_enabled_forgone) @validate_tt_parenting(_tt_parenting) @background_task() def test_trace_node_datastore_params_enable_instance(): @@ -86,9 +92,7 @@ def test_trace_node_datastore_params_enable_instance(): @override_application_settings(_disable_instance_settings) -@validate_tt_collector_json( - datastore_params=_disabled_required, - datastore_forgone_params=_disabled_forgone) +@validate_tt_collector_json(datastore_params=_disabled_required, datastore_forgone_params=_disabled_forgone) @validate_tt_parenting(_tt_parenting) @background_task() def test_trace_node_datastore_params_disable_instance(): diff --git a/tests/datastore_redis/test_trace_node.py b/tests/datastore_redis/test_trace_node.py index 39b7763ba..cc0d59919 100644 --- a/tests/datastore_redis/test_trace_node.py +++ b/tests/datastore_redis/test_trace_node.py @@ -13,11 +13,12 @@ # limitations under the License. import redis - -from testing_support.fixtures import (validate_tt_collector_json, - override_application_settings) -from testing_support.util import instance_hostname from testing_support.db_settings import redis_settings +from testing_support.fixtures import override_application_settings +from testing_support.util import instance_hostname +from testing_support.validators.validate_tt_collector_json import ( + validate_tt_collector_json, +) from newrelic.api.background_task import background_task @@ -27,100 +28,93 @@ # Settings _enable_instance_settings = { - 'datastore_tracer.instance_reporting.enabled': True, - 'datastore_tracer.database_name_reporting.enabled': True, + "datastore_tracer.instance_reporting.enabled": True, + "datastore_tracer.database_name_reporting.enabled": True, } _disable_instance_settings = { - 'datastore_tracer.instance_reporting.enabled': False, - 'datastore_tracer.database_name_reporting.enabled': False, + "datastore_tracer.instance_reporting.enabled": False, + "datastore_tracer.database_name_reporting.enabled": False, } _instance_only_settings = { - 'datastore_tracer.instance_reporting.enabled': True, - 'datastore_tracer.database_name_reporting.enabled': False, + "datastore_tracer.instance_reporting.enabled": True, + "datastore_tracer.database_name_reporting.enabled": False, } _database_only_settings = { - 'datastore_tracer.instance_reporting.enabled': False, - 'datastore_tracer.database_name_reporting.enabled': True, + "datastore_tracer.instance_reporting.enabled": False, + "datastore_tracer.database_name_reporting.enabled": True, } # Expected parameters _enabled_required = { - 'host': instance_hostname(DB_SETTINGS['host']), - 'port_path_or_id': str(DB_SETTINGS['port']), - 'db.instance': str(DATABASE_NUMBER), + "host": instance_hostname(DB_SETTINGS["host"]), + "port_path_or_id": str(DB_SETTINGS["port"]), + "db.instance": str(DATABASE_NUMBER), } _enabled_forgone = {} _disabled_required = {} _disabled_forgone = { - 'host': 'VALUE NOT USED', - 'port_path_or_id': 'VALUE NOT USED', - 'db.instance': 'VALUE NOT USED', + "host": "VALUE NOT USED", + "port_path_or_id": "VALUE NOT USED", + "db.instance": "VALUE NOT USED", } _instance_only_required = { - 'host': instance_hostname(DB_SETTINGS['host']), - 'port_path_or_id': str(DB_SETTINGS['port']), + "host": instance_hostname(DB_SETTINGS["host"]), + "port_path_or_id": str(DB_SETTINGS["port"]), } _instance_only_forgone = { - 'db.instance': str(DATABASE_NUMBER), + "db.instance": str(DATABASE_NUMBER), } _database_only_required = { - 'db.instance': str(DATABASE_NUMBER), + "db.instance": str(DATABASE_NUMBER), } _database_only_forgone = { - 'host': 'VALUE NOT USED', - 'port_path_or_id': 'VALUE NOT USED', + "host": "VALUE NOT USED", + "port_path_or_id": "VALUE NOT USED", } # Query + def _exercise_db(): - client = redis.StrictRedis(host=DB_SETTINGS['host'], - port=DB_SETTINGS['port'], db=DATABASE_NUMBER) + client = redis.StrictRedis(host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=DATABASE_NUMBER) - client.set('key', 'value') - client.get('key') + client.set("key", "value") + client.get("key") - client.execute_command('CLIENT', 'LIST', parse='LIST') + client.execute_command("CLIENT", "LIST", parse="LIST") # Tests + @override_application_settings(_enable_instance_settings) -@validate_tt_collector_json( - datastore_params=_enabled_required, - datastore_forgone_params=_enabled_forgone) +@validate_tt_collector_json(datastore_params=_enabled_required, datastore_forgone_params=_enabled_forgone) @background_task() def test_trace_node_datastore_params_enable_instance(): _exercise_db() @override_application_settings(_disable_instance_settings) -@validate_tt_collector_json( - datastore_params=_disabled_required, - datastore_forgone_params=_disabled_forgone) +@validate_tt_collector_json(datastore_params=_disabled_required, datastore_forgone_params=_disabled_forgone) @background_task() def test_trace_node_datastore_params_disable_instance(): _exercise_db() @override_application_settings(_instance_only_settings) -@validate_tt_collector_json( - datastore_params=_instance_only_required, - datastore_forgone_params=_instance_only_forgone) +@validate_tt_collector_json(datastore_params=_instance_only_required, datastore_forgone_params=_instance_only_forgone) @background_task() def test_trace_node_datastore_params_instance_only(): _exercise_db() @override_application_settings(_database_only_settings) -@validate_tt_collector_json( - datastore_params=_database_only_required, - datastore_forgone_params=_database_only_forgone) +@validate_tt_collector_json(datastore_params=_database_only_required, datastore_forgone_params=_database_only_forgone) @background_task() def test_trace_node_datastore_params_database_only(): _exercise_db() diff --git a/tests/framework_sanic/test_cross_application.py b/tests/framework_sanic/test_cross_application.py index 7199fae55..31dc3b9b9 100644 --- a/tests/framework_sanic/test_cross_application.py +++ b/tests/framework_sanic/test_cross_application.py @@ -14,6 +14,8 @@ import json import random + +# import re import string import pytest diff --git a/tests/framework_tornado/test_server.py b/tests/framework_tornado/test_server.py index 1d985b938..6f8e6bf2a 100644 --- a/tests/framework_tornado/test_server.py +++ b/tests/framework_tornado/test_server.py @@ -213,6 +213,7 @@ def test_nr_disabled(app): ) def test_web_socket(uri, name, app): # import asyncio + from tornado.websocket import websocket_connect namespace, func_name = name.split(":") diff --git a/tests/messagebroker_pika/test_pika_async_connection_consume.py b/tests/messagebroker_pika/test_pika_async_connection_consume.py index 7edf6b644..4e44c7ed7 100644 --- a/tests/messagebroker_pika/test_pika_async_connection_consume.py +++ b/tests/messagebroker_pika/test_pika_async_connection_consume.py @@ -12,39 +12,57 @@ # See the License for the specific language governing permissions and # limitations under the License. -from minversion import pika_version_info -from compat import basic_consume import functools + import pika -from pika.adapters.tornado_connection import TornadoConnection import pytest import six import tornado +from compat import basic_consume +from conftest import ( + BODY, + CORRELATION_ID, + EXCHANGE, + EXCHANGE_2, + HEADERS, + QUEUE, + QUEUE_2, + REPLY_TO, +) +from minversion import pika_version_info +from pika.adapters.tornado_connection import TornadoConnection +from testing_support.db_settings import rabbitmq_settings +from testing_support.fixtures import ( + capture_transaction_metrics, + function_not_called, + override_application_settings, +) +from testing_support.validators.validate_code_level_metrics import ( + validate_code_level_metrics, +) +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) +from testing_support.validators.validate_tt_collector_json import ( + validate_tt_collector_json, +) from newrelic.api.background_task import background_task -from conftest import (QUEUE, QUEUE_2, EXCHANGE, EXCHANGE_2, CORRELATION_ID, - REPLY_TO, HEADERS, BODY) -from testing_support.fixtures import (capture_transaction_metrics, - validate_tt_collector_json, - function_not_called, override_application_settings) -from testing_support.validators.validate_code_level_metrics import validate_code_level_metrics -from testing_support.db_settings import rabbitmq_settings -from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics - DB_SETTINGS = rabbitmq_settings()[0] _message_broker_tt_params = { - 'queue_name': QUEUE, - 'routing_key': QUEUE, - 'correlation_id': CORRELATION_ID, - 'reply_to': REPLY_TO, - 'headers': HEADERS.copy(), + "queue_name": QUEUE, + "routing_key": QUEUE, + "correlation_id": CORRELATION_ID, + "reply_to": REPLY_TO, + "headers": HEADERS.copy(), } # Tornado's IO loop is not configurable in versions 5.x and up try: + class MyIOLoop(tornado.ioloop.IOLoop.configured_class()): def handle_callback_exception(self, *args, **kwargs): raise @@ -55,38 +73,44 @@ def handle_callback_exception(self, *args, **kwargs): connection_classes = [pika.SelectConnection, TornadoConnection] -parametrized_connection = pytest.mark.parametrize('ConnectionClass', - connection_classes) +parametrized_connection = pytest.mark.parametrize("ConnectionClass", connection_classes) _test_select_conn_basic_get_inside_txn_metrics = [ - ('MessageBroker/RabbitMQ/Exchange/Produce/Named/%s' % EXCHANGE, None), - ('MessageBroker/RabbitMQ/Exchange/Consume/Named/%s' % EXCHANGE, 1), + ("MessageBroker/RabbitMQ/Exchange/Produce/Named/%s" % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Consume/Named/%s" % EXCHANGE, 1), ] if six.PY3: _test_select_conn_basic_get_inside_txn_metrics.append( - (('Function/test_pika_async_connection_consume:' - 'test_async_connection_basic_get_inside_txn.' - '.on_message'), 1)) + ( + ( + "Function/test_pika_async_connection_consume:" + "test_async_connection_basic_get_inside_txn." + ".on_message" + ), + 1, + ) + ) else: - _test_select_conn_basic_get_inside_txn_metrics.append( - ('Function/test_pika_async_connection_consume:on_message', 1)) + _test_select_conn_basic_get_inside_txn_metrics.append(("Function/test_pika_async_connection_consume:on_message", 1)) @parametrized_connection -@pytest.mark.parametrize('callback_as_partial', [True, False]) -@validate_code_level_metrics("test_pika_async_connection_consume" + (".test_async_connection_basic_get_inside_txn." if six.PY3 else ""), "on_message") +@pytest.mark.parametrize("callback_as_partial", [True, False]) +@validate_code_level_metrics( + "test_pika_async_connection_consume" + (".test_async_connection_basic_get_inside_txn." if six.PY3 else ""), + "on_message", +) @validate_transaction_metrics( - ('test_pika_async_connection_consume:' - 'test_async_connection_basic_get_inside_txn'), - scoped_metrics=_test_select_conn_basic_get_inside_txn_metrics, - rollup_metrics=_test_select_conn_basic_get_inside_txn_metrics, - background_task=True) + ("test_pika_async_connection_consume:" "test_async_connection_basic_get_inside_txn"), + scoped_metrics=_test_select_conn_basic_get_inside_txn_metrics, + rollup_metrics=_test_select_conn_basic_get_inside_txn_metrics, + background_task=True, +) @validate_tt_collector_json(message_broker_params=_message_broker_tt_params) @background_task() -def test_async_connection_basic_get_inside_txn(producer, ConnectionClass, - callback_as_partial): +def test_async_connection_basic_get_inside_txn(producer, ConnectionClass, callback_as_partial): def on_message(channel, method_frame, header_frame, body): assert method_frame assert body == BODY @@ -104,9 +128,7 @@ def on_open_channel(channel): def on_open_connection(connection): connection.channel(on_open_callback=on_open_channel) - connection = ConnectionClass( - pika.ConnectionParameters(DB_SETTINGS['host']), - on_open_callback=on_open_connection) + connection = ConnectionClass(pika.ConnectionParameters(DB_SETTINGS["host"]), on_open_callback=on_open_connection) try: connection.ioloop.start() @@ -117,9 +139,8 @@ def on_open_connection(connection): @parametrized_connection -@pytest.mark.parametrize('callback_as_partial', [True, False]) -def test_select_connection_basic_get_outside_txn(producer, ConnectionClass, - callback_as_partial): +@pytest.mark.parametrize("callback_as_partial", [True, False]) +def test_select_connection_basic_get_outside_txn(producer, ConnectionClass, callback_as_partial): metrics_list = [] @capture_transaction_metrics(metrics_list) @@ -142,8 +163,8 @@ def on_open_connection(connection): connection.channel(on_open_callback=on_open_channel) connection = ConnectionClass( - pika.ConnectionParameters(DB_SETTINGS['host']), - on_open_callback=on_open_connection) + pika.ConnectionParameters(DB_SETTINGS["host"]), on_open_callback=on_open_connection + ) try: connection.ioloop.start() @@ -160,25 +181,24 @@ def on_open_connection(connection): _test_select_conn_basic_get_inside_txn_no_callback_metrics = [ - ('MessageBroker/RabbitMQ/Exchange/Produce/Named/%s' % EXCHANGE, None), - ('MessageBroker/RabbitMQ/Exchange/Consume/Named/%s' % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Produce/Named/%s" % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Consume/Named/%s" % EXCHANGE, None), ] @pytest.mark.skipif( - condition=pika_version_info[0] > 0, - reason='pika 1.0 removed the ability to use basic_get with callback=None') + condition=pika_version_info[0] > 0, reason="pika 1.0 removed the ability to use basic_get with callback=None" +) @parametrized_connection @validate_transaction_metrics( - ('test_pika_async_connection_consume:' - 'test_async_connection_basic_get_inside_txn_no_callback'), + ("test_pika_async_connection_consume:" "test_async_connection_basic_get_inside_txn_no_callback"), scoped_metrics=_test_select_conn_basic_get_inside_txn_no_callback_metrics, rollup_metrics=_test_select_conn_basic_get_inside_txn_no_callback_metrics, - background_task=True) + background_task=True, +) @validate_tt_collector_json(message_broker_params=_message_broker_tt_params) @background_task() -def test_async_connection_basic_get_inside_txn_no_callback(producer, - ConnectionClass): +def test_async_connection_basic_get_inside_txn_no_callback(producer, ConnectionClass): def on_open_channel(channel): channel.basic_get(callback=None, queue=QUEUE) channel.close() @@ -188,9 +208,7 @@ def on_open_channel(channel): def on_open_connection(connection): connection.channel(on_open_callback=on_open_channel) - connection = ConnectionClass( - pika.ConnectionParameters(DB_SETTINGS['host']), - on_open_callback=on_open_connection) + connection = ConnectionClass(pika.ConnectionParameters(DB_SETTINGS["host"]), on_open_callback=on_open_connection) try: connection.ioloop.start() @@ -201,27 +219,26 @@ def on_open_connection(connection): _test_async_connection_basic_get_empty_metrics = [ - ('MessageBroker/RabbitMQ/Exchange/Produce/Named/%s' % EXCHANGE, None), - ('MessageBroker/RabbitMQ/Exchange/Consume/Named/%s' % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Produce/Named/%s" % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Consume/Named/%s" % EXCHANGE, None), ] @parametrized_connection -@pytest.mark.parametrize('callback_as_partial', [True, False]) +@pytest.mark.parametrize("callback_as_partial", [True, False]) @validate_transaction_metrics( - ('test_pika_async_connection_consume:' - 'test_async_connection_basic_get_empty'), - scoped_metrics=_test_async_connection_basic_get_empty_metrics, - rollup_metrics=_test_async_connection_basic_get_empty_metrics, - background_task=True) + ("test_pika_async_connection_consume:" "test_async_connection_basic_get_empty"), + scoped_metrics=_test_async_connection_basic_get_empty_metrics, + rollup_metrics=_test_async_connection_basic_get_empty_metrics, + background_task=True, +) @validate_tt_collector_json(message_broker_params=_message_broker_tt_params) @background_task() -def test_async_connection_basic_get_empty(ConnectionClass, - callback_as_partial): - QUEUE = 'test_async_empty' +def test_async_connection_basic_get_empty(ConnectionClass, callback_as_partial): + QUEUE = "test_async_empty" def on_message(channel, method_frame, header_frame, body): - assert False, body.decode('UTF-8') + assert False, body.decode("UTF-8") if callback_as_partial: on_message = functools.partial(on_message) @@ -235,9 +252,7 @@ def on_open_channel(channel): def on_open_connection(connection): connection.channel(on_open_callback=on_open_channel) - connection = ConnectionClass( - pika.ConnectionParameters(DB_SETTINGS['host']), - on_open_callback=on_open_connection) + connection = ConnectionClass(pika.ConnectionParameters(DB_SETTINGS["host"]), on_open_callback=on_open_connection) try: connection.ioloop.start() @@ -248,33 +263,42 @@ def on_open_connection(connection): _test_select_conn_basic_consume_in_txn_metrics = [ - ('MessageBroker/RabbitMQ/Exchange/Produce/Named/%s' % EXCHANGE, None), - ('MessageBroker/RabbitMQ/Exchange/Consume/Named/%s' % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Produce/Named/%s" % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Consume/Named/%s" % EXCHANGE, None), ] if six.PY3: _test_select_conn_basic_consume_in_txn_metrics.append( - (('Function/test_pika_async_connection_consume:' - 'test_async_connection_basic_consume_inside_txn.' - '.on_message'), 1)) + ( + ( + "Function/test_pika_async_connection_consume:" + "test_async_connection_basic_consume_inside_txn." + ".on_message" + ), + 1, + ) + ) else: - _test_select_conn_basic_consume_in_txn_metrics.append( - ('Function/test_pika_async_connection_consume:on_message', 1)) + _test_select_conn_basic_consume_in_txn_metrics.append(("Function/test_pika_async_connection_consume:on_message", 1)) @parametrized_connection @validate_transaction_metrics( - ('test_pika_async_connection_consume:' - 'test_async_connection_basic_consume_inside_txn'), - scoped_metrics=_test_select_conn_basic_consume_in_txn_metrics, - rollup_metrics=_test_select_conn_basic_consume_in_txn_metrics, - background_task=True) -@validate_code_level_metrics("test_pika_async_connection_consume" + (".test_async_connection_basic_consume_inside_txn." if six.PY3 else ""), "on_message") + ("test_pika_async_connection_consume:" "test_async_connection_basic_consume_inside_txn"), + scoped_metrics=_test_select_conn_basic_consume_in_txn_metrics, + rollup_metrics=_test_select_conn_basic_consume_in_txn_metrics, + background_task=True, +) +@validate_code_level_metrics( + "test_pika_async_connection_consume" + + (".test_async_connection_basic_consume_inside_txn." if six.PY3 else ""), + "on_message", +) @validate_tt_collector_json(message_broker_params=_message_broker_tt_params) @background_task() def test_async_connection_basic_consume_inside_txn(producer, ConnectionClass): def on_message(channel, method_frame, header_frame, body): - assert hasattr(method_frame, '_nr_start_time') + assert hasattr(method_frame, "_nr_start_time") assert body == BODY channel.basic_ack(method_frame.delivery_tag) channel.close() @@ -287,9 +311,7 @@ def on_open_channel(channel): def on_open_connection(connection): connection.channel(on_open_callback=on_open_channel) - connection = ConnectionClass( - pika.ConnectionParameters(DB_SETTINGS['host']), - on_open_callback=on_open_connection) + connection = ConnectionClass(pika.ConnectionParameters(DB_SETTINGS["host"]), on_open_callback=on_open_connection) try: connection.ioloop.start() @@ -300,46 +322,67 @@ def on_open_connection(connection): _test_select_conn_basic_consume_two_exchanges = [ - ('MessageBroker/RabbitMQ/Exchange/Produce/Named/%s' % EXCHANGE, None), - ('MessageBroker/RabbitMQ/Exchange/Consume/Named/%s' % EXCHANGE, None), - ('MessageBroker/RabbitMQ/Exchange/Produce/Named/%s' % EXCHANGE_2, None), - ('MessageBroker/RabbitMQ/Exchange/Consume/Named/%s' % EXCHANGE_2, None), + ("MessageBroker/RabbitMQ/Exchange/Produce/Named/%s" % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Consume/Named/%s" % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Produce/Named/%s" % EXCHANGE_2, None), + ("MessageBroker/RabbitMQ/Exchange/Consume/Named/%s" % EXCHANGE_2, None), ] if six.PY3: _test_select_conn_basic_consume_two_exchanges.append( - (('Function/test_pika_async_connection_consume:' - 'test_async_connection_basic_consume_two_exchanges.' - '.on_message_1'), 1)) + ( + ( + "Function/test_pika_async_connection_consume:" + "test_async_connection_basic_consume_two_exchanges." + ".on_message_1" + ), + 1, + ) + ) _test_select_conn_basic_consume_two_exchanges.append( - (('Function/test_pika_async_connection_consume:' - 'test_async_connection_basic_consume_two_exchanges.' - '.on_message_2'), 1)) + ( + ( + "Function/test_pika_async_connection_consume:" + "test_async_connection_basic_consume_two_exchanges." + ".on_message_2" + ), + 1, + ) + ) else: _test_select_conn_basic_consume_two_exchanges.append( - ('Function/test_pika_async_connection_consume:on_message_1', 1)) + ("Function/test_pika_async_connection_consume:on_message_1", 1) + ) _test_select_conn_basic_consume_two_exchanges.append( - ('Function/test_pika_async_connection_consume:on_message_2', 1)) + ("Function/test_pika_async_connection_consume:on_message_2", 1) + ) @parametrized_connection @validate_transaction_metrics( - ('test_pika_async_connection_consume:' - 'test_async_connection_basic_consume_two_exchanges'), - scoped_metrics=_test_select_conn_basic_consume_two_exchanges, - rollup_metrics=_test_select_conn_basic_consume_two_exchanges, - background_task=True) -@validate_code_level_metrics("test_pika_async_connection_consume" + (".test_async_connection_basic_consume_two_exchanges." if six.PY3 else ""), "on_message_1") -@validate_code_level_metrics("test_pika_async_connection_consume" + (".test_async_connection_basic_consume_two_exchanges." if six.PY3 else ""), "on_message_2") + ("test_pika_async_connection_consume:" "test_async_connection_basic_consume_two_exchanges"), + scoped_metrics=_test_select_conn_basic_consume_two_exchanges, + rollup_metrics=_test_select_conn_basic_consume_two_exchanges, + background_task=True, +) +@validate_code_level_metrics( + "test_pika_async_connection_consume" + + (".test_async_connection_basic_consume_two_exchanges." if six.PY3 else ""), + "on_message_1", +) +@validate_code_level_metrics( + "test_pika_async_connection_consume" + + (".test_async_connection_basic_consume_two_exchanges." if six.PY3 else ""), + "on_message_2", +) @background_task() -def test_async_connection_basic_consume_two_exchanges(producer, producer_2, - ConnectionClass): +def test_async_connection_basic_consume_two_exchanges(producer, producer_2, ConnectionClass): global events_received events_received = 0 def on_message_1(channel, method_frame, header_frame, body): channel.basic_ack(method_frame.delivery_tag) - assert hasattr(method_frame, '_nr_start_time') + assert hasattr(method_frame, "_nr_start_time") assert body == BODY global events_received @@ -352,7 +395,7 @@ def on_message_1(channel, method_frame, header_frame, body): def on_message_2(channel, method_frame, header_frame, body): channel.basic_ack(method_frame.delivery_tag) - assert hasattr(method_frame, '_nr_start_time') + assert hasattr(method_frame, "_nr_start_time") assert body == BODY global events_received @@ -370,9 +413,7 @@ def on_open_channel(channel): def on_open_connection(connection): connection.channel(on_open_callback=on_open_channel) - connection = ConnectionClass( - pika.ConnectionParameters(DB_SETTINGS['host']), - on_open_callback=on_open_connection) + connection = ConnectionClass(pika.ConnectionParameters(DB_SETTINGS["host"]), on_open_callback=on_open_connection) try: connection.ioloop.start() @@ -383,12 +424,11 @@ def on_open_connection(connection): # This should not create a transaction -@function_not_called('newrelic.core.stats_engine', - 'StatsEngine.record_transaction') -@override_application_settings({'debug.record_transaction_failure': True}) +@function_not_called("newrelic.core.stats_engine", "StatsEngine.record_transaction") +@override_application_settings({"debug.record_transaction_failure": True}) def test_tornado_connection_basic_consume_outside_transaction(producer): def on_message(channel, method_frame, header_frame, body): - assert hasattr(method_frame, '_nr_start_time') + assert hasattr(method_frame, "_nr_start_time") assert body == BODY channel.basic_ack(method_frame.delivery_tag) channel.close() @@ -401,9 +441,7 @@ def on_open_channel(channel): def on_open_connection(connection): connection.channel(on_open_callback=on_open_channel) - connection = TornadoConnection( - pika.ConnectionParameters(DB_SETTINGS['host']), - on_open_callback=on_open_connection) + connection = TornadoConnection(pika.ConnectionParameters(DB_SETTINGS["host"]), on_open_callback=on_open_connection) try: connection.ioloop.start() @@ -414,31 +452,44 @@ def on_open_connection(connection): if six.PY3: - _txn_name = ('test_pika_async_connection_consume:' - 'test_select_connection_basic_consume_outside_transaction.' - '.on_message') + _txn_name = ( + "test_pika_async_connection_consume:" + "test_select_connection_basic_consume_outside_transaction." + ".on_message" + ) _test_select_connection_consume_outside_txn_metrics = [ - (('Function/test_pika_async_connection_consume:' - 'test_select_connection_basic_consume_outside_transaction.' - '.on_message'), None)] + ( + ( + "Function/test_pika_async_connection_consume:" + "test_select_connection_basic_consume_outside_transaction." + ".on_message" + ), + None, + ) + ] else: - _txn_name = ( - 'test_pika_async_connection_consume:on_message') + _txn_name = "test_pika_async_connection_consume:on_message" _test_select_connection_consume_outside_txn_metrics = [ - ('Function/test_pika_async_connection_consume:on_message', None)] + ("Function/test_pika_async_connection_consume:on_message", None) + ] # This should create a transaction @validate_transaction_metrics( - _txn_name, - scoped_metrics=_test_select_connection_consume_outside_txn_metrics, - rollup_metrics=_test_select_connection_consume_outside_txn_metrics, - background_task=True, - group='Message/RabbitMQ/Exchange/%s' % EXCHANGE) -@validate_code_level_metrics("test_pika_async_connection_consume" + (".test_select_connection_basic_consume_outside_transaction." if six.PY3 else ""), "on_message") + _txn_name, + scoped_metrics=_test_select_connection_consume_outside_txn_metrics, + rollup_metrics=_test_select_connection_consume_outside_txn_metrics, + background_task=True, + group="Message/RabbitMQ/Exchange/%s" % EXCHANGE, +) +@validate_code_level_metrics( + "test_pika_async_connection_consume" + + (".test_select_connection_basic_consume_outside_transaction." if six.PY3 else ""), + "on_message", +) def test_select_connection_basic_consume_outside_transaction(producer): def on_message(channel, method_frame, header_frame, body): - assert hasattr(method_frame, '_nr_start_time') + assert hasattr(method_frame, "_nr_start_time") assert body == BODY channel.basic_ack(method_frame.delivery_tag) channel.close() @@ -452,8 +503,8 @@ def on_open_connection(connection): connection.channel(on_open_callback=on_open_channel) connection = pika.SelectConnection( - pika.ConnectionParameters(DB_SETTINGS['host']), - on_open_callback=on_open_connection) + pika.ConnectionParameters(DB_SETTINGS["host"]), on_open_callback=on_open_connection + ) try: connection.ioloop.start() diff --git a/tests/messagebroker_pika/test_pika_blocking_connection_consume.py b/tests/messagebroker_pika/test_pika_blocking_connection_consume.py index c96d42d98..7b41674a2 100644 --- a/tests/messagebroker_pika/test_pika_blocking_connection_consume.py +++ b/tests/messagebroker_pika/test_pika_blocking_connection_consume.py @@ -12,52 +12,56 @@ # See the License for the specific language governing permissions and # limitations under the License. -from compat import basic_consume import functools +import os + import pika import pytest import six -import os +from compat import basic_consume +from conftest import BODY, CORRELATION_ID, EXCHANGE, HEADERS, QUEUE, REPLY_TO +from testing_support.db_settings import rabbitmq_settings +from testing_support.fixtures import capture_transaction_metrics +from testing_support.validators.validate_code_level_metrics import ( + validate_code_level_metrics, +) +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) +from testing_support.validators.validate_tt_collector_json import ( + validate_tt_collector_json, +) from newrelic.api.background_task import background_task from newrelic.api.transaction import end_of_transaction -from conftest import QUEUE, EXCHANGE, CORRELATION_ID, REPLY_TO, HEADERS, BODY -from testing_support.fixtures import (capture_transaction_metrics, - validate_tt_collector_json) -from testing_support.validators.validate_code_level_metrics import validate_code_level_metrics -from testing_support.db_settings import rabbitmq_settings -from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics - DB_SETTINGS = rabbitmq_settings()[0] _message_broker_tt_params = { - 'queue_name': QUEUE, - 'routing_key': QUEUE, - 'correlation_id': CORRELATION_ID, - 'reply_to': REPLY_TO, - 'headers': HEADERS.copy(), + "queue_name": QUEUE, + "routing_key": QUEUE, + "correlation_id": CORRELATION_ID, + "reply_to": REPLY_TO, + "headers": HEADERS.copy(), } _test_blocking_connection_basic_get_metrics = [ - ('MessageBroker/RabbitMQ/Exchange/Produce/Named/%s' % EXCHANGE, None), - ('MessageBroker/RabbitMQ/Exchange/Consume/Named/%s' % EXCHANGE, 1), - (('Function/pika.adapters.blocking_connection:' - '_CallbackResult.set_value_once'), 1) + ("MessageBroker/RabbitMQ/Exchange/Produce/Named/%s" % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Consume/Named/%s" % EXCHANGE, 1), + (("Function/pika.adapters.blocking_connection:" "_CallbackResult.set_value_once"), 1), ] @validate_transaction_metrics( - ('test_pika_blocking_connection_consume:' - 'test_blocking_connection_basic_get'), - scoped_metrics=_test_blocking_connection_basic_get_metrics, - rollup_metrics=_test_blocking_connection_basic_get_metrics, - background_task=True) + ("test_pika_blocking_connection_consume:" "test_blocking_connection_basic_get"), + scoped_metrics=_test_blocking_connection_basic_get_metrics, + rollup_metrics=_test_blocking_connection_basic_get_metrics, + background_task=True, +) @validate_tt_collector_json(message_broker_params=_message_broker_tt_params) @background_task() def test_blocking_connection_basic_get(producer): - with pika.BlockingConnection( - pika.ConnectionParameters(DB_SETTINGS['host'])) as connection: + with pika.BlockingConnection(pika.ConnectionParameters(DB_SETTINGS["host"])) as connection: channel = connection.channel() method_frame, _, _ = channel.basic_get(QUEUE) assert method_frame @@ -65,23 +69,22 @@ def test_blocking_connection_basic_get(producer): _test_blocking_connection_basic_get_empty_metrics = [ - ('MessageBroker/RabbitMQ/Exchange/Produce/Named/%s' % EXCHANGE, None), - ('MessageBroker/RabbitMQ/Exchange/Consume/Named/%s' % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Produce/Named/%s" % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Consume/Named/%s" % EXCHANGE, None), ] @validate_transaction_metrics( - ('test_pika_blocking_connection_consume:' - 'test_blocking_connection_basic_get_empty'), - scoped_metrics=_test_blocking_connection_basic_get_empty_metrics, - rollup_metrics=_test_blocking_connection_basic_get_empty_metrics, - background_task=True) + ("test_pika_blocking_connection_consume:" "test_blocking_connection_basic_get_empty"), + scoped_metrics=_test_blocking_connection_basic_get_empty_metrics, + rollup_metrics=_test_blocking_connection_basic_get_empty_metrics, + background_task=True, +) @validate_tt_collector_json(message_broker_params=_message_broker_tt_params) @background_task() def test_blocking_connection_basic_get_empty(): - QUEUE = 'test_blocking_empty-%s' % os.getpid() - with pika.BlockingConnection( - pika.ConnectionParameters(DB_SETTINGS['host'])) as connection: + QUEUE = "test_blocking_empty-%s" % os.getpid() + with pika.BlockingConnection(pika.ConnectionParameters(DB_SETTINGS["host"])) as connection: channel = connection.channel() channel.queue_declare(queue=QUEUE) @@ -97,8 +100,7 @@ def test_blocking_connection_basic_get_outside_transaction(producer): @capture_transaction_metrics(metrics_list) def test_basic_get(): - with pika.BlockingConnection( - pika.ConnectionParameters(DB_SETTINGS['host'])) as connection: + with pika.BlockingConnection(pika.ConnectionParameters(DB_SETTINGS["host"])) as connection: channel = connection.channel() channel.queue_declare(queue=QUEUE) @@ -114,46 +116,57 @@ def test_basic_get(): _test_blocking_conn_basic_consume_no_txn_metrics = [ - ('MessageBroker/RabbitMQ/Exchange/Produce/Named/%s' % EXCHANGE, None), - ('MessageBroker/RabbitMQ/Exchange/Consume/Named/%s' % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Produce/Named/%s" % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Consume/Named/%s" % EXCHANGE, None), ] if six.PY3: - _txn_name = ('test_pika_blocking_connection_consume:' - 'test_blocking_connection_basic_consume_outside_transaction.' - '.on_message') + _txn_name = ( + "test_pika_blocking_connection_consume:" + "test_blocking_connection_basic_consume_outside_transaction." + ".on_message" + ) _test_blocking_conn_basic_consume_no_txn_metrics.append( - (('Function/test_pika_blocking_connection_consume:' - 'test_blocking_connection_basic_consume_outside_transaction.' - '.on_message'), None)) + ( + ( + "Function/test_pika_blocking_connection_consume:" + "test_blocking_connection_basic_consume_outside_transaction." + ".on_message" + ), + None, + ) + ) else: - _txn_name = ('test_pika_blocking_connection_consume:' - 'on_message') + _txn_name = "test_pika_blocking_connection_consume:" "on_message" _test_blocking_conn_basic_consume_no_txn_metrics.append( - ('Function/test_pika_blocking_connection_consume:on_message', None)) + ("Function/test_pika_blocking_connection_consume:on_message", None) + ) -@pytest.mark.parametrize('as_partial', [True, False]) -@validate_code_level_metrics("test_pika_blocking_connection_consume" + (".test_blocking_connection_basic_consume_outside_transaction." if six.PY3 else ""), "on_message") +@pytest.mark.parametrize("as_partial", [True, False]) +@validate_code_level_metrics( + "test_pika_blocking_connection_consume" + + (".test_blocking_connection_basic_consume_outside_transaction." if six.PY3 else ""), + "on_message", +) @validate_transaction_metrics( - _txn_name, - scoped_metrics=_test_blocking_conn_basic_consume_no_txn_metrics, - rollup_metrics=_test_blocking_conn_basic_consume_no_txn_metrics, - background_task=True, - group='Message/RabbitMQ/Exchange/%s' % EXCHANGE) + _txn_name, + scoped_metrics=_test_blocking_conn_basic_consume_no_txn_metrics, + rollup_metrics=_test_blocking_conn_basic_consume_no_txn_metrics, + background_task=True, + group="Message/RabbitMQ/Exchange/%s" % EXCHANGE, +) @validate_tt_collector_json(message_broker_params=_message_broker_tt_params) -def test_blocking_connection_basic_consume_outside_transaction(producer, - as_partial): +def test_blocking_connection_basic_consume_outside_transaction(producer, as_partial): def on_message(channel, method_frame, header_frame, body): - assert hasattr(method_frame, '_nr_start_time') + assert hasattr(method_frame, "_nr_start_time") assert body == BODY channel.stop_consuming() if as_partial: on_message = functools.partial(on_message) - with pika.BlockingConnection( - pika.ConnectionParameters(DB_SETTINGS['host'])) as connection: + with pika.BlockingConnection(pika.ConnectionParameters(DB_SETTINGS["host"])) as connection: channel = connection.channel() basic_consume(channel, QUEUE, on_message) @@ -165,41 +178,51 @@ def on_message(channel, method_frame, header_frame, body): _test_blocking_conn_basic_consume_in_txn_metrics = [ - ('MessageBroker/RabbitMQ/Exchange/Produce/Named/%s' % EXCHANGE, None), - ('MessageBroker/RabbitMQ/Exchange/Consume/Named/%s' % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Produce/Named/%s" % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Consume/Named/%s" % EXCHANGE, None), ] if six.PY3: _test_blocking_conn_basic_consume_in_txn_metrics.append( - (('Function/test_pika_blocking_connection_consume:' - 'test_blocking_connection_basic_consume_inside_txn.' - '.on_message'), 1)) + ( + ( + "Function/test_pika_blocking_connection_consume:" + "test_blocking_connection_basic_consume_inside_txn." + ".on_message" + ), + 1, + ) + ) else: _test_blocking_conn_basic_consume_in_txn_metrics.append( - ('Function/test_pika_blocking_connection_consume:on_message', 1)) + ("Function/test_pika_blocking_connection_consume:on_message", 1) + ) -@pytest.mark.parametrize('as_partial', [True, False]) -@validate_code_level_metrics("test_pika_blocking_connection_consume" + (".test_blocking_connection_basic_consume_inside_txn." if six.PY3 else ""), "on_message") +@pytest.mark.parametrize("as_partial", [True, False]) +@validate_code_level_metrics( + "test_pika_blocking_connection_consume" + + (".test_blocking_connection_basic_consume_inside_txn." if six.PY3 else ""), + "on_message", +) @validate_transaction_metrics( - ('test_pika_blocking_connection_consume:' - 'test_blocking_connection_basic_consume_inside_txn'), - scoped_metrics=_test_blocking_conn_basic_consume_in_txn_metrics, - rollup_metrics=_test_blocking_conn_basic_consume_in_txn_metrics, - background_task=True) + ("test_pika_blocking_connection_consume:" "test_blocking_connection_basic_consume_inside_txn"), + scoped_metrics=_test_blocking_conn_basic_consume_in_txn_metrics, + rollup_metrics=_test_blocking_conn_basic_consume_in_txn_metrics, + background_task=True, +) @validate_tt_collector_json(message_broker_params=_message_broker_tt_params) @background_task() def test_blocking_connection_basic_consume_inside_txn(producer, as_partial): def on_message(channel, method_frame, header_frame, body): - assert hasattr(method_frame, '_nr_start_time') + assert hasattr(method_frame, "_nr_start_time") assert body == BODY channel.stop_consuming() if as_partial: on_message = functools.partial(on_message) - with pika.BlockingConnection( - pika.ConnectionParameters(DB_SETTINGS['host'])) as connection: + with pika.BlockingConnection(pika.ConnectionParameters(DB_SETTINGS["host"])) as connection: channel = connection.channel() basic_consume(channel, QUEUE, on_message) try: @@ -210,33 +233,40 @@ def on_message(channel, method_frame, header_frame, body): _test_blocking_conn_basic_consume_stopped_txn_metrics = [ - ('MessageBroker/RabbitMQ/Exchange/Produce/Named/%s' % EXCHANGE, None), - ('MessageBroker/RabbitMQ/Exchange/Consume/Named/%s' % EXCHANGE, None), - ('OtherTransaction/Message/RabbitMQ/Exchange/Named/%s' % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Produce/Named/%s" % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Consume/Named/%s" % EXCHANGE, None), + ("OtherTransaction/Message/RabbitMQ/Exchange/Named/%s" % EXCHANGE, None), ] if six.PY3: _test_blocking_conn_basic_consume_stopped_txn_metrics.append( - (('Function/test_pika_blocking_connection_consume:' - 'test_blocking_connection_basic_consume_stopped_txn.' - '.on_message'), None)) + ( + ( + "Function/test_pika_blocking_connection_consume:" + "test_blocking_connection_basic_consume_stopped_txn." + ".on_message" + ), + None, + ) + ) else: _test_blocking_conn_basic_consume_stopped_txn_metrics.append( - ('Function/test_pika_blocking_connection_consume:on_message', None)) + ("Function/test_pika_blocking_connection_consume:on_message", None) + ) -@pytest.mark.parametrize('as_partial', [True, False]) +@pytest.mark.parametrize("as_partial", [True, False]) @validate_transaction_metrics( - ('test_pika_blocking_connection_consume:' - 'test_blocking_connection_basic_consume_stopped_txn'), - scoped_metrics=_test_blocking_conn_basic_consume_stopped_txn_metrics, - rollup_metrics=_test_blocking_conn_basic_consume_stopped_txn_metrics, - background_task=True) + ("test_pika_blocking_connection_consume:" "test_blocking_connection_basic_consume_stopped_txn"), + scoped_metrics=_test_blocking_conn_basic_consume_stopped_txn_metrics, + rollup_metrics=_test_blocking_conn_basic_consume_stopped_txn_metrics, + background_task=True, +) @validate_tt_collector_json(message_broker_params=_message_broker_tt_params) @background_task() def test_blocking_connection_basic_consume_stopped_txn(producer, as_partial): def on_message(channel, method_frame, header_frame, body): - assert hasattr(method_frame, '_nr_start_time') + assert hasattr(method_frame, "_nr_start_time") assert body == BODY channel.stop_consuming() @@ -245,8 +275,7 @@ def on_message(channel, method_frame, header_frame, body): if as_partial: on_message = functools.partial(on_message) - with pika.BlockingConnection( - pika.ConnectionParameters(DB_SETTINGS['host'])) as connection: + with pika.BlockingConnection(pika.ConnectionParameters(DB_SETTINGS["host"])) as connection: channel = connection.channel() basic_consume(channel, QUEUE, on_message) try: diff --git a/tests/messagebroker_pika/test_pika_blocking_connection_consume_generator.py b/tests/messagebroker_pika/test_pika_blocking_connection_consume_generator.py index 4fff11487..816b28323 100644 --- a/tests/messagebroker_pika/test_pika_blocking_connection_consume_generator.py +++ b/tests/messagebroker_pika/test_pika_blocking_connection_consume_generator.py @@ -13,65 +13,66 @@ # limitations under the License. import pika +from conftest import BODY, CORRELATION_ID, EXCHANGE, HEADERS, QUEUE, REPLY_TO +from testing_support.db_settings import rabbitmq_settings +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) +from testing_support.validators.validate_tt_collector_json import ( + validate_tt_collector_json, +) from newrelic.api.background_task import background_task -from conftest import QUEUE, EXCHANGE, CORRELATION_ID, REPLY_TO, HEADERS, BODY -from testing_support.fixtures import validate_tt_collector_json -from testing_support.db_settings import rabbitmq_settings -from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics - DB_SETTINGS = rabbitmq_settings()[0] _message_broker_tt_params = { - 'queue_name': QUEUE, - 'routing_key': QUEUE, - 'correlation_id': CORRELATION_ID, - 'reply_to': REPLY_TO, - 'headers': HEADERS.copy(), + "queue_name": QUEUE, + "routing_key": QUEUE, + "correlation_id": CORRELATION_ID, + "reply_to": REPLY_TO, + "headers": HEADERS.copy(), } _test_blocking_connection_consume_metrics = [ - ('MessageBroker/RabbitMQ/Exchange/Produce/Named/%s' % EXCHANGE, None), - ('MessageBroker/RabbitMQ/Exchange/Consume/Named/%s' % EXCHANGE, None), - ('MessageBroker/RabbitMQ/Exchange/Consume/Named/Unknown', None), + ("MessageBroker/RabbitMQ/Exchange/Produce/Named/%s" % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Consume/Named/%s" % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Consume/Named/Unknown", None), ] @validate_transaction_metrics( - ('test_pika_blocking_connection_consume_generator:' - 'test_blocking_connection_consume_break'), - scoped_metrics=_test_blocking_connection_consume_metrics, - rollup_metrics=_test_blocking_connection_consume_metrics, - background_task=True) + ("test_pika_blocking_connection_consume_generator:" "test_blocking_connection_consume_break"), + scoped_metrics=_test_blocking_connection_consume_metrics, + rollup_metrics=_test_blocking_connection_consume_metrics, + background_task=True, +) @validate_tt_collector_json(message_broker_params=_message_broker_tt_params) @background_task() def test_blocking_connection_consume_break(producer): - with pika.BlockingConnection( - pika.ConnectionParameters(DB_SETTINGS['host'])) as connection: + with pika.BlockingConnection(pika.ConnectionParameters(DB_SETTINGS["host"])) as connection: channel = connection.channel() for method_frame, properties, body in channel.consume(QUEUE): - assert hasattr(method_frame, '_nr_start_time') + assert hasattr(method_frame, "_nr_start_time") assert body == BODY break @validate_transaction_metrics( - ('test_pika_blocking_connection_consume_generator:' - 'test_blocking_connection_consume_connection_close'), - scoped_metrics=_test_blocking_connection_consume_metrics, - rollup_metrics=_test_blocking_connection_consume_metrics, - background_task=True) + ("test_pika_blocking_connection_consume_generator:" "test_blocking_connection_consume_connection_close"), + scoped_metrics=_test_blocking_connection_consume_metrics, + rollup_metrics=_test_blocking_connection_consume_metrics, + background_task=True, +) @validate_tt_collector_json(message_broker_params=_message_broker_tt_params) @background_task() def test_blocking_connection_consume_connection_close(producer): - connection = pika.BlockingConnection( - pika.ConnectionParameters(DB_SETTINGS['host'])) + connection = pika.BlockingConnection(pika.ConnectionParameters(DB_SETTINGS["host"])) channel = connection.channel() try: for method_frame, properties, body in channel.consume(QUEUE): - assert hasattr(method_frame, '_nr_start_time') + assert hasattr(method_frame, "_nr_start_time") assert body == BODY channel.close() connection.close() @@ -82,16 +83,15 @@ def test_blocking_connection_consume_connection_close(producer): @validate_transaction_metrics( - ('test_pika_blocking_connection_consume_generator:' - 'test_blocking_connection_consume_timeout'), - scoped_metrics=_test_blocking_connection_consume_metrics, - rollup_metrics=_test_blocking_connection_consume_metrics, - background_task=True) + ("test_pika_blocking_connection_consume_generator:" "test_blocking_connection_consume_timeout"), + scoped_metrics=_test_blocking_connection_consume_metrics, + rollup_metrics=_test_blocking_connection_consume_metrics, + background_task=True, +) @validate_tt_collector_json(message_broker_params=_message_broker_tt_params) @background_task() def test_blocking_connection_consume_timeout(producer): - with pika.BlockingConnection( - pika.ConnectionParameters(DB_SETTINGS['host'])) as connection: + with pika.BlockingConnection(pika.ConnectionParameters(DB_SETTINGS["host"])) as connection: channel = connection.channel() for result in channel.consume(QUEUE, inactivity_timeout=0.01): @@ -99,7 +99,7 @@ def test_blocking_connection_consume_timeout(producer): if result and any(result): method_frame, properties, body = result channel.basic_ack(method_frame.delivery_tag) - assert hasattr(method_frame, '_nr_start_time') + assert hasattr(method_frame, "_nr_start_time") assert body == BODY else: # timeout hit! @@ -107,16 +107,15 @@ def test_blocking_connection_consume_timeout(producer): @validate_transaction_metrics( - ('test_pika_blocking_connection_consume_generator:' - 'test_blocking_connection_consume_exception_in_for_loop'), - scoped_metrics=_test_blocking_connection_consume_metrics, - rollup_metrics=_test_blocking_connection_consume_metrics, - background_task=True) + ("test_pika_blocking_connection_consume_generator:" "test_blocking_connection_consume_exception_in_for_loop"), + scoped_metrics=_test_blocking_connection_consume_metrics, + rollup_metrics=_test_blocking_connection_consume_metrics, + background_task=True, +) @validate_tt_collector_json(message_broker_params=_message_broker_tt_params) @background_task() def test_blocking_connection_consume_exception_in_for_loop(producer): - with pika.BlockingConnection( - pika.ConnectionParameters(DB_SETTINGS['host'])) as connection: + with pika.BlockingConnection(pika.ConnectionParameters(DB_SETTINGS["host"])) as connection: channel = connection.channel() try: @@ -128,29 +127,28 @@ def test_blocking_connection_consume_exception_in_for_loop(producer): # Expected error pass except Exception as e: - assert False, 'Wrong exception was raised: %s' % e + assert False, "Wrong exception was raised: %s" % e else: - assert False, 'No exception was raised!' + assert False, "No exception was raised!" _test_blocking_connection_consume_empty_metrics = [ - ('MessageBroker/RabbitMQ/Exchange/Produce/Named/%s' % EXCHANGE, None), - ('MessageBroker/RabbitMQ/Exchange/Consume/Named/%s' % EXCHANGE, None), - ('MessageBroker/RabbitMQ/Exchange/Consume/Named/Unknown', None), + ("MessageBroker/RabbitMQ/Exchange/Produce/Named/%s" % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Consume/Named/%s" % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Consume/Named/Unknown", None), ] @validate_transaction_metrics( - ('test_pika_blocking_connection_consume_generator:' - 'test_blocking_connection_consume_exception_in_generator'), - scoped_metrics=_test_blocking_connection_consume_empty_metrics, - rollup_metrics=_test_blocking_connection_consume_empty_metrics, - background_task=True) + ("test_pika_blocking_connection_consume_generator:" "test_blocking_connection_consume_exception_in_generator"), + scoped_metrics=_test_blocking_connection_consume_empty_metrics, + rollup_metrics=_test_blocking_connection_consume_empty_metrics, + background_task=True, +) @validate_tt_collector_json(message_broker_params=_message_broker_tt_params) @background_task() def test_blocking_connection_consume_exception_in_generator(): - with pika.BlockingConnection( - pika.ConnectionParameters(DB_SETTINGS['host'])) as connection: + with pika.BlockingConnection(pika.ConnectionParameters(DB_SETTINGS["host"])) as connection: channel = connection.channel() try: @@ -161,29 +159,28 @@ def test_blocking_connection_consume_exception_in_generator(): # Expected error pass except Exception as e: - assert False, 'Wrong exception was raised: %s' % e + assert False, "Wrong exception was raised: %s" % e else: - assert False, 'No exception was raised!' + assert False, "No exception was raised!" _test_blocking_connection_consume_many_metrics = [ - ('MessageBroker/RabbitMQ/Exchange/Produce/Named/%s' % EXCHANGE, None), - ('MessageBroker/RabbitMQ/Exchange/Consume/Named/%s' % EXCHANGE, None), - ('MessageBroker/RabbitMQ/Exchange/Consume/Named/Unknown', None), + ("MessageBroker/RabbitMQ/Exchange/Produce/Named/%s" % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Consume/Named/%s" % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Consume/Named/Unknown", None), ] @validate_transaction_metrics( - ('test_pika_blocking_connection_consume_generator:' - 'test_blocking_connection_consume_many'), - scoped_metrics=_test_blocking_connection_consume_many_metrics, - rollup_metrics=_test_blocking_connection_consume_many_metrics, - background_task=True) + ("test_pika_blocking_connection_consume_generator:" "test_blocking_connection_consume_many"), + scoped_metrics=_test_blocking_connection_consume_many_metrics, + rollup_metrics=_test_blocking_connection_consume_many_metrics, + background_task=True, +) @validate_tt_collector_json(message_broker_params=_message_broker_tt_params) @background_task() def test_blocking_connection_consume_many(produce_five): - with pika.BlockingConnection( - pika.ConnectionParameters(DB_SETTINGS['host'])) as connection: + with pika.BlockingConnection(pika.ConnectionParameters(DB_SETTINGS["host"])) as connection: channel = connection.channel() consumed = 0 @@ -196,22 +193,21 @@ def test_blocking_connection_consume_many(produce_five): @validate_transaction_metrics( - ('test_pika_blocking_connection_consume_generator:' - 'test_blocking_connection_consume_using_methods'), - scoped_metrics=_test_blocking_connection_consume_metrics, - rollup_metrics=_test_blocking_connection_consume_metrics, - background_task=True) + ("test_pika_blocking_connection_consume_generator:" "test_blocking_connection_consume_using_methods"), + scoped_metrics=_test_blocking_connection_consume_metrics, + rollup_metrics=_test_blocking_connection_consume_metrics, + background_task=True, +) @validate_tt_collector_json(message_broker_params=_message_broker_tt_params) @background_task() def test_blocking_connection_consume_using_methods(producer): - with pika.BlockingConnection( - pika.ConnectionParameters(DB_SETTINGS['host'])) as connection: + with pika.BlockingConnection(pika.ConnectionParameters(DB_SETTINGS["host"])) as connection: channel = connection.channel() consumer = channel.consume(QUEUE, inactivity_timeout=0.01) method, properties, body = next(consumer) - assert hasattr(method, '_nr_start_time') + assert hasattr(method, "_nr_start_time") assert body == BODY result = next(consumer) @@ -224,28 +220,28 @@ def test_blocking_connection_consume_using_methods(producer): pass else: # this is not - assert False, 'No exception was raised!' + assert False, "No exception was raised!" result = consumer.close() assert result is None @validate_transaction_metrics( - 'Named/%s' % EXCHANGE, - scoped_metrics=_test_blocking_connection_consume_metrics, - rollup_metrics=_test_blocking_connection_consume_metrics, - background_task=True, - group='Message/RabbitMQ/Exchange') + "Named/%s" % EXCHANGE, + scoped_metrics=_test_blocking_connection_consume_metrics, + rollup_metrics=_test_blocking_connection_consume_metrics, + background_task=True, + group="Message/RabbitMQ/Exchange", +) @validate_tt_collector_json(message_broker_params=_message_broker_tt_params) def test_blocking_connection_consume_outside_txn(producer): - with pika.BlockingConnection( - pika.ConnectionParameters(DB_SETTINGS['host'])) as connection: + with pika.BlockingConnection(pika.ConnectionParameters(DB_SETTINGS["host"])) as connection: channel = connection.channel() consumer = channel.consume(QUEUE) try: for method_frame, properties, body in consumer: - assert hasattr(method_frame, '_nr_start_time') + assert hasattr(method_frame, "_nr_start_time") assert body == BODY break finally: @@ -254,26 +250,24 @@ def test_blocking_connection_consume_outside_txn(producer): def test_blocking_connection_consume_many_outside_txn(produce_five): - @validate_transaction_metrics( - 'Named/%s' % EXCHANGE, - scoped_metrics=_test_blocking_connection_consume_metrics, - rollup_metrics=_test_blocking_connection_consume_metrics, - background_task=True, - group='Message/RabbitMQ/Exchange') - @validate_tt_collector_json( - message_broker_params=_message_broker_tt_params) + "Named/%s" % EXCHANGE, + scoped_metrics=_test_blocking_connection_consume_metrics, + rollup_metrics=_test_blocking_connection_consume_metrics, + background_task=True, + group="Message/RabbitMQ/Exchange", + ) + @validate_tt_collector_json(message_broker_params=_message_broker_tt_params) def consume_it(consumer, up_next=None): if up_next is None: method_frame, properties, body = next(consumer) else: method_frame, properties, body = up_next - assert hasattr(method_frame, '_nr_start_time') + assert hasattr(method_frame, "_nr_start_time") assert body == BODY return next(consumer) - with pika.BlockingConnection( - pika.ConnectionParameters(DB_SETTINGS['host'])) as connection: + with pika.BlockingConnection(pika.ConnectionParameters(DB_SETTINGS["host"])) as connection: channel = connection.channel() consumer = channel.consume(QUEUE) @@ -288,21 +282,21 @@ def consume_it(consumer, up_next=None): @validate_transaction_metrics( - 'Named/%s' % EXCHANGE, - scoped_metrics=_test_blocking_connection_consume_metrics, - rollup_metrics=_test_blocking_connection_consume_metrics, - background_task=True, - group='Message/RabbitMQ/Exchange') + "Named/%s" % EXCHANGE, + scoped_metrics=_test_blocking_connection_consume_metrics, + rollup_metrics=_test_blocking_connection_consume_metrics, + background_task=True, + group="Message/RabbitMQ/Exchange", +) @validate_tt_collector_json(message_broker_params=_message_broker_tt_params) def test_blocking_connection_consume_using_methods_outside_txn(producer): - with pika.BlockingConnection( - pika.ConnectionParameters(DB_SETTINGS['host'])) as connection: + with pika.BlockingConnection(pika.ConnectionParameters(DB_SETTINGS["host"])) as connection: channel = connection.channel() consumer = channel.consume(QUEUE, inactivity_timeout=0.01) method, properties, body = next(consumer) - assert hasattr(method, '_nr_start_time') + assert hasattr(method, "_nr_start_time") assert body == BODY result = next(consumer) @@ -315,22 +309,21 @@ def test_blocking_connection_consume_using_methods_outside_txn(producer): pass else: # this is not - assert False, 'No exception was raised!' + assert False, "No exception was raised!" result = consumer.close() assert result is None @validate_transaction_metrics( - ('test_pika_blocking_connection_consume_generator:' - 'test_blocking_connection_consume_exception_on_creation'), - scoped_metrics=_test_blocking_connection_consume_empty_metrics, - rollup_metrics=_test_blocking_connection_consume_empty_metrics, - background_task=True) + ("test_pika_blocking_connection_consume_generator:" "test_blocking_connection_consume_exception_on_creation"), + scoped_metrics=_test_blocking_connection_consume_empty_metrics, + rollup_metrics=_test_blocking_connection_consume_empty_metrics, + background_task=True, +) @background_task() def test_blocking_connection_consume_exception_on_creation(): - with pika.BlockingConnection( - pika.ConnectionParameters(DB_SETTINGS['host'])) as connection: + with pika.BlockingConnection(pika.ConnectionParameters(DB_SETTINGS["host"])) as connection: channel = connection.channel() try: @@ -340,4 +333,4 @@ def test_blocking_connection_consume_exception_on_creation(): pass else: # this is not - assert False, 'TypeError was not raised' + assert False, "TypeError was not raised" diff --git a/tests/messagebroker_pika/test_pika_produce.py b/tests/messagebroker_pika/test_pika_produce.py index 0960159fa..dbc9af030 100644 --- a/tests/messagebroker_pika/test_pika_produce.py +++ b/tests/messagebroker_pika/test_pika_produce.py @@ -15,14 +15,16 @@ import pika import pytest from testing_support.db_settings import rabbitmq_settings -from testing_support.fixtures import ( - override_application_settings, - validate_tt_collector_json, -) +from testing_support.fixtures import override_application_settings from testing_support.validators.validate_messagebroker_headers import ( validate_messagebroker_headers, ) -from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) +from testing_support.validators.validate_tt_collector_json import ( + validate_tt_collector_json, +) from newrelic.api.background_task import background_task from newrelic.api.transaction import current_transaction @@ -46,7 +48,7 @@ def cache_pika_headers(wrapped, instance, args, kwargs): QUEUE = "test-pika-queue" CORRELATION_ID = "testingpika" REPLY_TO = "testing" -HEADERS = {u"MYHEADER": u"pikatest"} +HEADERS = {"MYHEADER": "pikatest"} _message_broker_tt_included_params = { "routing_key": QUEUE, diff --git a/tests/testing_support/fixtures.py b/tests/testing_support/fixtures.py index 9e99e89b2..f642d1f6f 100644 --- a/tests/testing_support/fixtures.py +++ b/tests/testing_support/fixtures.py @@ -54,7 +54,6 @@ transient_function_wrapper, wrap_function_wrapper, ) -from newrelic.common.system_info import LOCALHOST_EQUIVALENTS from newrelic.config import initialize from newrelic.core.agent import shutdown_agent from newrelic.core.attribute import create_attributes @@ -89,14 +88,6 @@ def _environ_as_bool(name, default=False): return flag -def _lookup_string_table(name, string_table, default=None): - try: - index = int(name.lstrip("`")) - return string_table[index] - except ValueError: - return default - - if _environ_as_bool("NEW_RELIC_HIGH_SECURITY"): DeveloperModeClient.RESPONSES["connect"]["high_security"] = True @@ -444,338 +435,6 @@ def check_event_attributes(event_data, required_params=None, forgone_params=None assert intrinsics[param] == value, ((param, value), intrinsics) -def validate_application_error_trace_count(num_errors): - """Validate error event data for a single error occurring outside of a - transaction. - """ - - @function_wrapper - def _validate_application_error_trace_count(wrapped, instace, args, kwargs): - - try: - result = wrapped(*args, **kwargs) - except: - raise - else: - - stats = core_application_stats_engine(None) - assert len(stats.error_data()) == num_errors - - return result - - return _validate_application_error_trace_count - - -def validate_application_error_event_count(num_errors): - """Validate error event data for a single error occurring outside of a - transaction. - """ - - @function_wrapper - def _validate_application_error_event_count(wrapped, instace, args, kwargs): - - try: - result = wrapped(*args, **kwargs) - except: - raise - else: - - stats = core_application_stats_engine(None) - assert len(list(stats.error_events)) == num_errors - - return result - - return _validate_application_error_event_count - - -def validate_synthetics_transaction_trace(required_params=None, forgone_params=None, should_exist=True): - required_params = required_params or {} - forgone_params = forgone_params or {} - - @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.record_transaction") - def _validate_synthetics_transaction_trace(wrapped, instance, args, kwargs): - try: - result = wrapped(*args, **kwargs) - except: - raise - else: - - # Now that transaction has been recorded, generate - # a transaction trace - - connections = SQLConnections() - trace_data = instance.transaction_trace_data(connections) - - # Check that synthetics resource id is in TT header - - header = trace_data[0] - header_key = "synthetics_resource_id" - - if should_exist: - assert header_key in required_params - assert header[9] == required_params[header_key], "name=%r, header=%r" % (header_key, header) - else: - assert header[9] is None - - # Check that synthetics ids are in TT custom params - - pack_data = unpack_field(trace_data[0][4]) - tt_intrinsics = pack_data[0][4]["intrinsics"] - - for name in required_params: - assert name in tt_intrinsics, "name=%r, intrinsics=%r" % (name, tt_intrinsics) - assert tt_intrinsics[name] == required_params[name], "name=%r, value=%r, intrinsics=%r" % ( - name, - required_params[name], - tt_intrinsics, - ) - - for name in forgone_params: - assert name not in tt_intrinsics, "name=%r, intrinsics=%r" % (name, tt_intrinsics) - - return result - - return _validate_synthetics_transaction_trace - - -def validate_tt_collector_json( - required_params=None, - forgone_params=None, - should_exist=True, - datastore_params=None, - datastore_forgone_params=None, - message_broker_params=None, - message_broker_forgone_params=None, - exclude_request_uri=False, -): - """make assertions based off the cross-agent spec on transaction traces""" - required_params = required_params or {} - forgone_params = forgone_params or {} - datastore_params = datastore_params or {} - datastore_forgone_params = datastore_forgone_params or {} - message_broker_params = message_broker_params or {} - message_broker_forgone_params = message_broker_forgone_params or [] - - @function_wrapper - def _validate_wrapper(wrapped, instance, args, kwargs): - - traces_recorded = [] - - @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.record_transaction") - def _validate_tt_collector_json(wrapped, instance, args, kwargs): - - result = wrapped(*args, **kwargs) - - # Now that transaction has been recorded, generate - # a transaction trace - - connections = SQLConnections() - trace_data = instance.transaction_trace_data(connections) - traces_recorded.append(trace_data) - - return result - - def _validate_trace(trace): - assert isinstance(trace[0], float) # absolute start time (ms) - assert isinstance(trace[1], float) # duration (ms) - assert trace[0] > 0 # absolute time (ms) - assert isinstance(trace[2], six.string_types) # transaction name - if trace[2].startswith("WebTransaction"): - if exclude_request_uri: - assert trace[3] is None # request url - else: - assert isinstance(trace[3], six.string_types) - # query parameters should not be captured - assert "?" not in trace[3] - - # trace details -- python agent always uses condensed trace array - - trace_details, string_table = unpack_field(trace[4]) - assert len(trace_details) == 5 - assert isinstance(trace_details[0], float) # start time (ms) - - # the next two items should be empty dicts, old parameters stuff, - # placeholders for now - - assert isinstance(trace_details[1], dict) - assert len(trace_details[1]) == 0 - assert isinstance(trace_details[2], dict) - assert len(trace_details[2]) == 0 - - # root node in slot 3 - - root_node = trace_details[3] - assert isinstance(root_node[0], float) # entry timestamp - assert isinstance(root_node[1], float) # exit timestamp - assert root_node[2] == "ROOT" - assert isinstance(root_node[3], dict) - assert len(root_node[3]) == 0 # spec shows empty (for root) - children = root_node[4] - assert isinstance(children, list) - - # there are two optional items at the end of trace segments, - # class name that segment is in, and method name function is in; - # Python agent does not use these (only Java does) - - # let's just test the first child - trace_segment = children[0] - assert isinstance(trace_segment[0], float) # entry timestamp - assert isinstance(trace_segment[1], float) # exit timestamp - assert isinstance(trace_segment[2], six.string_types) # scope - assert isinstance(trace_segment[3], dict) # request params - assert isinstance(trace_segment[4], list) # children - - assert trace_segment[0] >= root_node[0] # trace starts after root - - def _check_params_and_start_time(node): - children = node[4] - for child in children: - assert child[0] >= node[0] # child started after parent - _check_params_and_start_time(child) - - params = node[3] - assert isinstance(params, dict) - - # We should always report exclusive_duration_millis on a - # segment. This allows us to override exclusive time - # calculations on APM. - assert "exclusive_duration_millis" in params - assert isinstance(params["exclusive_duration_millis"], float) - - segment_name = _lookup_string_table(node[2], string_table, default=node[2]) - if segment_name.startswith("Datastore"): - for key in datastore_params: - assert key in params, key - assert params[key] == datastore_params[key] - for key in datastore_forgone_params: - assert key not in params, key - - # if host is reported, it cannot be localhost - if "host" in params: - assert params["host"] not in LOCALHOST_EQUIVALENTS - - elif segment_name.startswith("MessageBroker"): - for key in message_broker_params: - assert key in params, key - assert params[key] == message_broker_params[key] - for key in message_broker_forgone_params: - assert key not in params, key - - _check_params_and_start_time(trace_segment) - - attributes = trace_details[4] - - assert "intrinsics" in attributes - assert "userAttributes" in attributes - assert "agentAttributes" in attributes - - assert isinstance(trace[5], six.string_types) # GUID - assert trace[6] is None # reserved for future use - assert trace[7] is False # deprecated force persist flag - - # x-ray session ID - - assert trace[8] is None - - # Synthetics ID - - assert trace[9] is None or isinstance(trace[9], six.string_types) - - assert isinstance(string_table, list) - for name in string_table: - assert isinstance(name, six.string_types) # metric name - - _new_wrapper = _validate_tt_collector_json(wrapped) - val = _new_wrapper(*args, **kwargs) - trace_data = traces_recorded.pop() - trace = trace_data[0] # 1st trace - _validate_trace(trace) - return val - - return _validate_wrapper - - -def validate_transaction_trace_attributes( - required_params=None, forgone_params=None, should_exist=True, url=None, index=-1 -): - required_params = required_params or {} - forgone_params = forgone_params or {} - - trace_data = [] - - @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.record_transaction") - def _validate_transaction_trace_attributes(wrapped, instance, args, kwargs): - - result = wrapped(*args, **kwargs) - - # Now that transaction has been recorded, generate - # a transaction trace - - connections = SQLConnections() - _trace_data = instance.transaction_trace_data(connections) - trace_data.append(_trace_data) - - return result - - @function_wrapper - def wrapper(wrapped, instance, args, kwargs): - _new_wrapper = _validate_transaction_trace_attributes(wrapped) - result = _new_wrapper(*args, **kwargs) - - _trace_data = trace_data[index] - trace_data[:] = [] - - if url is not None: - trace_url = _trace_data[0][3] - assert url == trace_url - - pack_data = unpack_field(_trace_data[0][4]) - assert len(pack_data) == 2 - assert len(pack_data[0]) == 5 - parameters = pack_data[0][4] - - assert "intrinsics" in parameters - assert "userAttributes" in parameters - assert "agentAttributes" in parameters - - check_attributes(parameters, required_params, forgone_params) - - return result - - return wrapper - - -def validate_transaction_error_trace_attributes(required_params=None, forgone_params=None, exact_attrs=None): - """Check the error trace for attributes, expect only one error to be - present in the transaction. - """ - required_params = required_params or {} - forgone_params = forgone_params or {} - exact_attrs = exact_attrs or {} - - @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.record_transaction") - def _validate_transaction_error_trace(wrapped, instance, args, kwargs): - try: - result = wrapped(*args, **kwargs) - except: - raise - else: - - error_data = instance.error_data() - - # there should be only one error - assert len(error_data) == 1 - traced_error = error_data[0] - - check_error_attributes( - traced_error.parameters, required_params, forgone_params, exact_attrs, is_transaction=True - ) - - return result - - return _validate_transaction_error_trace - - def check_error_attributes( parameters, required_params=None, forgone_params=None, exact_attrs=None, is_transaction=True ): @@ -833,134 +492,6 @@ def check_attributes(parameters, required_params=None, forgone_params=None, exac assert intrinsics[param] == value, ((param, value), intrinsics) -def validate_error_trace_collector_json(): - @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.record_transaction") - def _validate_error_trace_collector_json(wrapped, instance, args, kwargs): - try: - result = wrapped(*args, **kwargs) - except: - raise - else: - errors = instance.error_data() - - # recreate what happens right before data is sent to the collector - # in data_collector.py via ApplicationSession.send_errors - agent_run_id = 666 - payload = (agent_run_id, errors) - collector_json = json_encode(payload) - - decoded_json = json.loads(collector_json) - - assert decoded_json[0] == agent_run_id - err = decoded_json[1][0] - assert len(err) == 5 - assert isinstance(err[0], (int, float)) - assert isinstance(err[1], six.string_types) # path - assert isinstance(err[2], six.string_types) # error message - assert isinstance(err[3], six.string_types) # exception name - parameters = err[4] - - parameter_fields = ["userAttributes", "stack_trace", "agentAttributes", "intrinsics"] - - for field in parameter_fields: - assert field in parameters - - assert "request_uri" not in parameters - - return result - - return _validate_error_trace_collector_json - - -def validate_error_event_collector_json(num_errors=1): - """Validate the format, types and number of errors of the data we - send to the collector for harvest. - """ - - @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.record_transaction") - def _validate_error_event_collector_json(wrapped, instance, args, kwargs): - try: - result = wrapped(*args, **kwargs) - except: - raise - else: - - samples = list(instance.error_events) - s_info = instance.error_events.sampling_info - agent_run_id = 666 - - # emulate the payload used in data_collector.py - - payload = (agent_run_id, s_info, samples) - collector_json = json_encode(payload) - - decoded_json = json.loads(collector_json) - - assert decoded_json[0] == agent_run_id - - sampling_info = decoded_json[1] - - harvest_config = instance.settings.event_harvest_config - reservoir_size = harvest_config.harvest_limits.error_event_data - - assert sampling_info["reservoir_size"] == reservoir_size - assert sampling_info["events_seen"] == num_errors - - error_events = decoded_json[2] - - assert len(error_events) == num_errors - for event in error_events: - - # event is an array containing intrinsics, user-attributes, - # and agent-attributes - - assert len(event) == 3 - for d in event: - assert isinstance(d, dict) - - return result - - return _validate_error_event_collector_json - - -def validate_transaction_event_collector_json(): - @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.record_transaction") - def _validate_transaction_event_collector_json(wrapped, instance, args, kwargs): - try: - result = wrapped(*args, **kwargs) - except: - raise - else: - samples = list(instance.transaction_events) - - # recreate what happens right before data is sent to the collector - # in data_collector.py during the harvest via analytic_event_data - agent_run_id = 666 - payload = (agent_run_id, samples) - collector_json = json_encode(payload) - - decoded_json = json.loads(collector_json) - - assert decoded_json[0] == agent_run_id - - # list of events - - events = decoded_json[1] - - for event in events: - - # event is an array containing intrinsics, user-attributes, - # and agent-attributes - - assert len(event) == 3 - for d in event: - assert isinstance(d, dict) - - return result - - return _validate_transaction_event_collector_json - - def validate_custom_event_collector_json(num_events=1): """Validate the format, types and number of custom events.""" diff --git a/tests/testing_support/validators/validate_application_error_event_count.py b/tests/testing_support/validators/validate_application_error_event_count.py new file mode 100644 index 000000000..8812ed7ed --- /dev/null +++ b/tests/testing_support/validators/validate_application_error_event_count.py @@ -0,0 +1,39 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from testing_support.fixtures import core_application_stats_engine + +from newrelic.common.object_wrapper import function_wrapper + + +def validate_application_error_event_count(num_errors): + """Validate error event data for a single error occurring outside of a + transaction. + """ + + @function_wrapper + def _validate_application_error_event_count(wrapped, instace, args, kwargs): + + try: + result = wrapped(*args, **kwargs) + except: + raise + else: + + stats = core_application_stats_engine(None) + assert len(list(stats.error_events)) == num_errors + + return result + + return _validate_application_error_event_count diff --git a/tests/testing_support/validators/validate_application_error_trace_count.py b/tests/testing_support/validators/validate_application_error_trace_count.py new file mode 100644 index 000000000..d700ea5eb --- /dev/null +++ b/tests/testing_support/validators/validate_application_error_trace_count.py @@ -0,0 +1,39 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from testing_support.fixtures import core_application_stats_engine + +from newrelic.common.object_wrapper import function_wrapper + + +def validate_application_error_trace_count(num_errors): + """Validate error event data for a single error occurring outside of a + transaction. + """ + + @function_wrapper + def _validate_application_error_trace_count(wrapped, instace, args, kwargs): + + try: + result = wrapped(*args, **kwargs) + except: + raise + else: + + stats = core_application_stats_engine(None) + assert len(stats.error_data()) == num_errors + + return result + + return _validate_application_error_trace_count diff --git a/tests/testing_support/validators/validate_error_event_collector_json.py b/tests/testing_support/validators/validate_error_event_collector_json.py new file mode 100644 index 000000000..44940818f --- /dev/null +++ b/tests/testing_support/validators/validate_error_event_collector_json.py @@ -0,0 +1,69 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json + +from newrelic.common.encoding_utils import json_encode +from newrelic.common.object_wrapper import transient_function_wrapper + + +def validate_error_event_collector_json(num_errors=1): + """Validate the format, types and number of errors of the data we + send to the collector for harvest. + """ + + @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.record_transaction") + def _validate_error_event_collector_json(wrapped, instance, args, kwargs): + try: + result = wrapped(*args, **kwargs) + except: + raise + else: + + samples = list(instance.error_events) + s_info = instance.error_events.sampling_info + agent_run_id = 666 + + # emulate the payload used in data_collector.py + + payload = (agent_run_id, s_info, samples) + collector_json = json_encode(payload) + + decoded_json = json.loads(collector_json) + + assert decoded_json[0] == agent_run_id + + sampling_info = decoded_json[1] + + harvest_config = instance.settings.event_harvest_config + reservoir_size = harvest_config.harvest_limits.error_event_data + + assert sampling_info["reservoir_size"] == reservoir_size + assert sampling_info["events_seen"] == num_errors + + error_events = decoded_json[2] + + assert len(error_events) == num_errors + for event in error_events: + + # event is an array containing intrinsics, user-attributes, + # and agent-attributes + + assert len(event) == 3 + for d in event: + assert isinstance(d, dict) + + return result + + return _validate_error_event_collector_json diff --git a/tests/testing_support/validators/validate_error_trace_collector_json.py b/tests/testing_support/validators/validate_error_trace_collector_json.py new file mode 100644 index 000000000..e4d14ee21 --- /dev/null +++ b/tests/testing_support/validators/validate_error_trace_collector_json.py @@ -0,0 +1,58 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json + +from newrelic.common.encoding_utils import json_encode +from newrelic.common.object_wrapper import transient_function_wrapper +from newrelic.packages import six + + +def validate_error_trace_collector_json(): + @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.record_transaction") + def _validate_error_trace_collector_json(wrapped, instance, args, kwargs): + try: + result = wrapped(*args, **kwargs) + except: + raise + else: + errors = instance.error_data() + + # recreate what happens right before data is sent to the collector + # in data_collector.py via ApplicationSession.send_errors + agent_run_id = 666 + payload = (agent_run_id, errors) + collector_json = json_encode(payload) + + decoded_json = json.loads(collector_json) + + assert decoded_json[0] == agent_run_id + err = decoded_json[1][0] + assert len(err) == 5 + assert isinstance(err[0], (int, float)) + assert isinstance(err[1], six.string_types) # path + assert isinstance(err[2], six.string_types) # error message + assert isinstance(err[3], six.string_types) # exception name + parameters = err[4] + + parameter_fields = ["userAttributes", "stack_trace", "agentAttributes", "intrinsics"] + + for field in parameter_fields: + assert field in parameters + + assert "request_uri" not in parameters + + return result + + return _validate_error_trace_collector_json diff --git a/tests/testing_support/validators/validate_non_transaction_error_event.py b/tests/testing_support/validators/validate_non_transaction_error_event.py index fa14ae37d..97048d103 100644 --- a/tests/testing_support/validators/validate_non_transaction_error_event.py +++ b/tests/testing_support/validators/validate_non_transaction_error_event.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -import time +from time import time from testing_support.fixtures import core_application_stats_engine @@ -54,7 +54,7 @@ def _validate_non_transaction_error_event(wrapped, instace, args, kwargs): assert intrinsics["error.class"] == required_intrinsics["error.class"] assert intrinsics["error.message"].startswith(required_intrinsics["error.message"]) assert intrinsics["error.expected"] == required_intrinsics["error.expected"] - now = time.time() + now = time() assert isinstance(intrinsics["timestamp"], int) assert intrinsics["timestamp"] <= 1000.0 * now diff --git a/tests/testing_support/validators/validate_synthetics_transaction_trace.py b/tests/testing_support/validators/validate_synthetics_transaction_trace.py new file mode 100644 index 000000000..7227d0327 --- /dev/null +++ b/tests/testing_support/validators/validate_synthetics_transaction_trace.py @@ -0,0 +1,67 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from newrelic.common.encoding_utils import unpack_field +from newrelic.common.object_wrapper import transient_function_wrapper +from newrelic.core.database_utils import SQLConnections + + +def validate_synthetics_transaction_trace(required_params=None, forgone_params=None, should_exist=True): + required_params = required_params or {} + forgone_params = forgone_params or {} + + @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.record_transaction") + def _validate_synthetics_transaction_trace(wrapped, instance, args, kwargs): + try: + result = wrapped(*args, **kwargs) + except: + raise + else: + + # Now that transaction has been recorded, generate + # a transaction trace + + connections = SQLConnections() + trace_data = instance.transaction_trace_data(connections) + + # Check that synthetics resource id is in TT header + + header = trace_data[0] + header_key = "synthetics_resource_id" + + if should_exist: + assert header_key in required_params + assert header[9] == required_params[header_key], "name=%r, header=%r" % (header_key, header) + else: + assert header[9] is None + + # Check that synthetics ids are in TT custom params + + pack_data = unpack_field(trace_data[0][4]) + tt_intrinsics = pack_data[0][4]["intrinsics"] + + for name in required_params: + assert name in tt_intrinsics, "name=%r, intrinsics=%r" % (name, tt_intrinsics) + assert tt_intrinsics[name] == required_params[name], "name=%r, value=%r, intrinsics=%r" % ( + name, + required_params[name], + tt_intrinsics, + ) + + for name in forgone_params: + assert name not in tt_intrinsics, "name=%r, intrinsics=%r" % (name, tt_intrinsics) + + return result + + return _validate_synthetics_transaction_trace diff --git a/tests/testing_support/validators/validate_transaction_error_trace_attributes.py b/tests/testing_support/validators/validate_transaction_error_trace_attributes.py new file mode 100644 index 000000000..90734d348 --- /dev/null +++ b/tests/testing_support/validators/validate_transaction_error_trace_attributes.py @@ -0,0 +1,49 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from testing_support.fixtures import check_error_attributes + +from newrelic.common.object_wrapper import transient_function_wrapper + + +def validate_transaction_error_trace_attributes(required_params=None, forgone_params=None, exact_attrs=None): + """Check the error trace for attributes, expect only one error to be + present in the transaction. + """ + required_params = required_params or {} + forgone_params = forgone_params or {} + exact_attrs = exact_attrs or {} + + @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.record_transaction") + def _validate_transaction_error_trace(wrapped, instance, args, kwargs): + try: + result = wrapped(*args, **kwargs) + except: + raise + else: + + error_data = instance.error_data() + + # there should be only one error + assert len(error_data) == 1 + traced_error = error_data[0] + + check_error_attributes( + traced_error.parameters, required_params, forgone_params, exact_attrs, is_transaction=True + ) + + return result + + return _validate_transaction_error_trace diff --git a/tests/testing_support/validators/validate_transaction_event_collector_json.py b/tests/testing_support/validators/validate_transaction_event_collector_json.py new file mode 100644 index 000000000..765a3b8e0 --- /dev/null +++ b/tests/testing_support/validators/validate_transaction_event_collector_json.py @@ -0,0 +1,56 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json + +from newrelic.common.encoding_utils import json_encode +from newrelic.common.object_wrapper import transient_function_wrapper + + +def validate_transaction_event_collector_json(): + @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.record_transaction") + def _validate_transaction_event_collector_json(wrapped, instance, args, kwargs): + try: + result = wrapped(*args, **kwargs) + except: + raise + else: + samples = list(instance.transaction_events) + + # recreate what happens right before data is sent to the collector + # in data_collector.py during the harvest via analytic_event_data + agent_run_id = 666 + payload = (agent_run_id, samples) + collector_json = json_encode(payload) + + decoded_json = json.loads(collector_json) + + assert decoded_json[0] == agent_run_id + + # list of events + + events = decoded_json[1] + + for event in events: + + # event is an array containing intrinsics, user-attributes, + # and agent-attributes + + assert len(event) == 3 + for d in event: + assert isinstance(d, dict) + + return result + + return _validate_transaction_event_collector_json diff --git a/tests/testing_support/validators/validate_transaction_trace_attributes.py b/tests/testing_support/validators/validate_transaction_trace_attributes.py new file mode 100644 index 000000000..dafe9b920 --- /dev/null +++ b/tests/testing_support/validators/validate_transaction_trace_attributes.py @@ -0,0 +1,69 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from testing_support.fixtures import check_attributes + +from newrelic.common.encoding_utils import unpack_field +from newrelic.common.object_wrapper import function_wrapper, transient_function_wrapper +from newrelic.core.database_utils import SQLConnections + + +def validate_transaction_trace_attributes( + required_params=None, forgone_params=None, should_exist=True, url=None, index=-1 +): + required_params = required_params or {} + forgone_params = forgone_params or {} + + trace_data = [] + + @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.record_transaction") + def _validate_transaction_trace_attributes(wrapped, instance, args, kwargs): + + result = wrapped(*args, **kwargs) + + # Now that transaction has been recorded, generate + # a transaction trace + + connections = SQLConnections() + _trace_data = instance.transaction_trace_data(connections) + trace_data.append(_trace_data) + + return result + + @function_wrapper + def wrapper(wrapped, instance, args, kwargs): + _new_wrapper = _validate_transaction_trace_attributes(wrapped) + result = _new_wrapper(*args, **kwargs) + + _trace_data = trace_data[index] + trace_data[:] = [] + + if url is not None: + trace_url = _trace_data[0][3] + assert url == trace_url + + pack_data = unpack_field(_trace_data[0][4]) + assert len(pack_data) == 2 + assert len(pack_data[0]) == 5 + parameters = pack_data[0][4] + + assert "intrinsics" in parameters + assert "userAttributes" in parameters + assert "agentAttributes" in parameters + + check_attributes(parameters, required_params, forgone_params) + + return result + + return wrapper diff --git a/tests/testing_support/validators/validate_tt_collector_json.py b/tests/testing_support/validators/validate_tt_collector_json.py new file mode 100644 index 000000000..85e393280 --- /dev/null +++ b/tests/testing_support/validators/validate_tt_collector_json.py @@ -0,0 +1,184 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from newrelic.common.encoding_utils import unpack_field +from newrelic.common.object_wrapper import function_wrapper, transient_function_wrapper +from newrelic.common.system_info import LOCALHOST_EQUIVALENTS +from newrelic.core.database_utils import SQLConnections +from newrelic.packages import six + + +def _lookup_string_table(name, string_table, default=None): + try: + index = int(name.lstrip("`")) + return string_table[index] + except ValueError: + return default + + +def validate_tt_collector_json( + required_params=None, + forgone_params=None, + should_exist=True, + datastore_params=None, + datastore_forgone_params=None, + message_broker_params=None, + message_broker_forgone_params=None, + exclude_request_uri=False, +): + """make assertions based off the cross-agent spec on transaction traces""" + required_params = required_params or {} + forgone_params = forgone_params or {} + datastore_params = datastore_params or {} + datastore_forgone_params = datastore_forgone_params or {} + message_broker_params = message_broker_params or {} + message_broker_forgone_params = message_broker_forgone_params or [] + + @function_wrapper + def _validate_wrapper(wrapped, instance, args, kwargs): + + traces_recorded = [] + + @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.record_transaction") + def _validate_tt_collector_json(wrapped, instance, args, kwargs): + + result = wrapped(*args, **kwargs) + + # Now that transaction has been recorded, generate + # a transaction trace + + connections = SQLConnections() + trace_data = instance.transaction_trace_data(connections) + traces_recorded.append(trace_data) + + return result + + def _validate_trace(trace): + assert isinstance(trace[0], float) # absolute start time (ms) + assert isinstance(trace[1], float) # duration (ms) + assert trace[0] > 0 # absolute time (ms) + assert isinstance(trace[2], six.string_types) # transaction name + if trace[2].startswith("WebTransaction"): + if exclude_request_uri: + assert trace[3] is None # request url + else: + assert isinstance(trace[3], six.string_types) + # query parameters should not be captured + assert "?" not in trace[3] + + # trace details -- python agent always uses condensed trace array + + trace_details, string_table = unpack_field(trace[4]) + assert len(trace_details) == 5 + assert isinstance(trace_details[0], float) # start time (ms) + + # the next two items should be empty dicts, old parameters stuff, + # placeholders for now + + assert isinstance(trace_details[1], dict) + assert len(trace_details[1]) == 0 + assert isinstance(trace_details[2], dict) + assert len(trace_details[2]) == 0 + + # root node in slot 3 + + root_node = trace_details[3] + assert isinstance(root_node[0], float) # entry timestamp + assert isinstance(root_node[1], float) # exit timestamp + assert root_node[2] == "ROOT" + assert isinstance(root_node[3], dict) + assert len(root_node[3]) == 0 # spec shows empty (for root) + children = root_node[4] + assert isinstance(children, list) + + # there are two optional items at the end of trace segments, + # class name that segment is in, and method name function is in; + # Python agent does not use these (only Java does) + + # let's just test the first child + trace_segment = children[0] + assert isinstance(trace_segment[0], float) # entry timestamp + assert isinstance(trace_segment[1], float) # exit timestamp + assert isinstance(trace_segment[2], six.string_types) # scope + assert isinstance(trace_segment[3], dict) # request params + assert isinstance(trace_segment[4], list) # children + + assert trace_segment[0] >= root_node[0] # trace starts after root + + def _check_params_and_start_time(node): + children = node[4] + for child in children: + assert child[0] >= node[0] # child started after parent + _check_params_and_start_time(child) + + params = node[3] + assert isinstance(params, dict) + + # We should always report exclusive_duration_millis on a + # segment. This allows us to override exclusive time + # calculations on APM. + assert "exclusive_duration_millis" in params + assert isinstance(params["exclusive_duration_millis"], float) + + segment_name = _lookup_string_table(node[2], string_table, default=node[2]) + if segment_name.startswith("Datastore"): + for key in datastore_params: + assert key in params, key + assert params[key] == datastore_params[key] + for key in datastore_forgone_params: + assert key not in params, key + + # if host is reported, it cannot be localhost + if "host" in params: + assert params["host"] not in LOCALHOST_EQUIVALENTS + + elif segment_name.startswith("MessageBroker"): + for key in message_broker_params: + assert key in params, key + assert params[key] == message_broker_params[key] + for key in message_broker_forgone_params: + assert key not in params, key + + _check_params_and_start_time(trace_segment) + + attributes = trace_details[4] + + assert "intrinsics" in attributes + assert "userAttributes" in attributes + assert "agentAttributes" in attributes + + assert isinstance(trace[5], six.string_types) # GUID + assert trace[6] is None # reserved for future use + assert trace[7] is False # deprecated force persist flag + + # x-ray session ID + + assert trace[8] is None + + # Synthetics ID + + assert trace[9] is None or isinstance(trace[9], six.string_types) + + assert isinstance(string_table, list) + for name in string_table: + assert isinstance(name, six.string_types) # metric name + + _new_wrapper = _validate_tt_collector_json(wrapped) + val = _new_wrapper(*args, **kwargs) + trace_data = traces_recorded.pop() + trace = trace_data[0] # 1st trace + _validate_trace(trace) + return val + + return _validate_wrapper From 864f0865d998e49e23c84f12a2023cf421bd2b49 Mon Sep 17 00:00:00 2001 From: Hannah Stepanek Date: Tue, 29 Nov 2022 17:59:06 -0800 Subject: [PATCH 029/108] Fix bug in celery where works don't report data (#696) This fixes Missing information from Celery workers when using MAX_TASKS_PER_CHILD issue. Previously, if celery was run with the --loglevel=INFO flag, an agent instance would be created for the main celery process and after the first worker shutdown, all following worker's agent instances would point to that agent instance instead of creating a new instance. This was root caused to incorrectly creating an agent instance when application activate was not set. Now no agent instance will be created for the main celery process. --- newrelic/api/application.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/newrelic/api/application.py b/newrelic/api/application.py index 41a1b0cd3..ea57829f2 100644 --- a/newrelic/api/application.py +++ b/newrelic/api/application.py @@ -33,18 +33,18 @@ def _instance(name, activate=True): if name is None: name = newrelic.core.config.global_settings().app_name - # Ensure we grab a reference to the agent before grabbing - # the lock, else startup callback on agent initialisation - # could deadlock as it tries to create a application when - # we already have the lock held. - - agent = newrelic.core.agent.agent_instance() - # Try first without lock. If we find it we can return. instance = Application._instances.get(name, None) if not instance and activate: + # Ensure we grab a reference to the agent before grabbing + # the lock, else startup callback on agent initialisation + # could deadlock as it tries to create a application when + # we already have the lock held. + + agent = newrelic.core.agent.agent_instance() + with Application._lock: # Now try again with lock so that only one gets # to create and add it. From b849010f037af7b45c6005dc8d131094b257b280 Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Thu, 1 Dec 2022 15:57:50 -0800 Subject: [PATCH 030/108] Reverts removal of flask_restful hooks. (#705) --- newrelic/config.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/newrelic/config.py b/newrelic/config.py index f0b638cd4..0bfb7daa0 100644 --- a/newrelic/config.py +++ b/newrelic/config.py @@ -2162,6 +2162,11 @@ def _process_module_builtin_defaults(): ) _process_module_definition("flask_restful", "newrelic.hooks.component_flask_rest", "instrument_flask_rest") + _process_module_definition( + "flask_restplus.api", + "newrelic.hooks.component_flask_rest", + "instrument_flask_rest", + ) _process_module_definition( "flask_restx.api", "newrelic.hooks.component_flask_rest", From a63e33f277af0b10a9b48c1743587c00ec990633 Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Mon, 5 Dec 2022 16:18:08 -0800 Subject: [PATCH 031/108] Update instrumented methods in redis. (#707) Co-authored-by: Lalleh Rafeei Co-authored-by: Uma Annamalai Co-authored-by: Hannah Stepanek Co-authored-by: Lalleh Rafeei Co-authored-by: Uma Annamalai Co-authored-by: Hannah Stepanek --- newrelic/hooks/datastore_redis.py | 1 + .../test_uninstrumented_methods.py | 43 ++++++++++--------- 2 files changed, 23 insertions(+), 21 deletions(-) diff --git a/newrelic/hooks/datastore_redis.py b/newrelic/hooks/datastore_redis.py index 7ea60f1fa..26d1facb0 100644 --- a/newrelic/hooks/datastore_redis.py +++ b/newrelic/hooks/datastore_redis.py @@ -55,6 +55,7 @@ "bgsave", "bitcount", "bitfield", + "bitfield_ro", "bitop_and", "bitop_not", "bitop_or", diff --git a/tests/datastore_redis/test_uninstrumented_methods.py b/tests/datastore_redis/test_uninstrumented_methods.py index 314f9f203..f9c675659 100644 --- a/tests/datastore_redis/test_uninstrumented_methods.py +++ b/tests/datastore_redis/test_uninstrumented_methods.py @@ -27,6 +27,8 @@ 'MODULE_CALLBACKS', 'MODULE_VERSION', 'NAME', + "add_edge", + "add_node", "append_bucket_size", "append_capacity", "append_error", @@ -38,6 +40,10 @@ "append_no_scale", "append_values_and_weights", "append_weights", + "batch_indexer", + "BatchIndexer", + "bulk", + "call_procedure", "client_tracking_off", "client_tracking_on", "client", @@ -46,45 +52,40 @@ "connection_pool", "connection", "debug_segfault", + "edges", "execute_command", + "flush", "from_url", "get_connection_kwargs", "get_encoder", + "get_label", + "get_params_args", + "get_property", + "get_relation", + "get_retry", "hscan_iter", + "index_name", + "labels", + "list_keys", + "load_document", "load_external_module", "lock", + "name", + "nodes", "parse_response", "pipeline", + "property_keys", "register_script", + "relationship_types", "response_callbacks", "RESPONSE_CALLBACKS", "sentinel", "set_file", "set_path", "set_response_callback", + "set_retry", "transaction", - "BatchIndexer", - "batch_indexer", - "get_params_args", - "index_name", - "load_document", - "add_edge", - "add_node", - "bulk", - "call_procedure", - "edges", - "flush", - "get_label", - "get_property", - "get_relation", - "labels", - "list_keys", - "name", - "nodes", - "property_keys", - "relationship_types", "version", - } REDIS_MODULES = { From f977ba6b903d095432677f703e3084af71d109e2 Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Tue, 6 Dec 2022 16:20:13 -0800 Subject: [PATCH 032/108] Add TraceCache Guarded Iteration (#704) * Add MutableMapping API to TraceCache * Update trace cache usage to use guarded APIs. * [Mega-Linter] Apply linters fixes * Bump tests * Fix keys iterator * Comments for trace cache methods * Reorganize tests * Fix fixture refs * Fix testing refs * [Mega-Linter] Apply linters fixes * Bump tests * Upper case constant Co-authored-by: TimPansino Co-authored-by: Lalleh Rafeei <84813886+lrafeei@users.noreply.github.com> --- newrelic/core/context.py | 10 +- newrelic/core/trace_cache.py | 103 +++++++++++--- .../test_async_context_propagation.py | 12 +- .../test_event_loop_wait_time.py | 2 +- tests/agent_unittests/test_trace_cache.py | 129 ++++++++++++++++++ .../test_context_propagation.py | 11 +- 6 files changed, 228 insertions(+), 39 deletions(-) create mode 100644 tests/agent_unittests/test_trace_cache.py diff --git a/newrelic/core/context.py b/newrelic/core/context.py index 95de15b4e..7560855ae 100644 --- a/newrelic/core/context.py +++ b/newrelic/core/context.py @@ -46,7 +46,7 @@ def log_propagation_failure(s): elif trace is not None: self.trace = trace elif trace_cache_id is not None: - self.trace = self.trace_cache._cache.get(trace_cache_id, None) + self.trace = self.trace_cache.get(trace_cache_id, None) if self.trace is None: log_propagation_failure("No trace with id %d." % trace_cache_id) elif hasattr(request, "_nr_trace") and request._nr_trace is not None: @@ -60,11 +60,11 @@ def __enter__(self): self.thread_id = self.trace_cache.current_thread_id() # Save previous cache contents - self.restore = self.trace_cache._cache.get(self.thread_id, None) + self.restore = self.trace_cache.get(self.thread_id, None) self.should_restore = True # Set context in trace cache - self.trace_cache._cache[self.thread_id] = self.trace + self.trace_cache[self.thread_id] = self.trace return self @@ -72,10 +72,10 @@ def __exit__(self, exc, value, tb): if self.should_restore: if self.restore is not None: # Restore previous contents - self.trace_cache._cache[self.thread_id] = self.restore + self.trace_cache[self.thread_id] = self.restore else: # Remove entry from cache - self.trace_cache._cache.pop(self.thread_id) + self.trace_cache.pop(self.thread_id) def context_wrapper(func, trace=None, request=None, trace_cache_id=None, strict=True): diff --git a/newrelic/core/trace_cache.py b/newrelic/core/trace_cache.py index 1634d0d0b..5f0ddcd3d 100644 --- a/newrelic/core/trace_cache.py +++ b/newrelic/core/trace_cache.py @@ -28,6 +28,11 @@ except ImportError: import _thread as thread +try: + from collections.abc import MutableMapping +except ImportError: + from collections import MutableMapping + from newrelic.core.config import global_settings from newrelic.core.loop_node import LoopNode @@ -92,7 +97,7 @@ class TraceCacheActiveTraceError(RuntimeError): pass -class TraceCache(object): +class TraceCache(MutableMapping): asyncio = cached_module("asyncio") greenlet = cached_module("greenlet") @@ -100,7 +105,7 @@ def __init__(self): self._cache = weakref.WeakValueDictionary() def __repr__(self): - return "<%s object at 0x%x %s>" % (self.__class__.__name__, id(self), str(dict(self._cache.items()))) + return "<%s object at 0x%x %s>" % (self.__class__.__name__, id(self), str(dict(self.items()))) def current_thread_id(self): """Returns the thread ID for the caller. @@ -135,10 +140,10 @@ def current_thread_id(self): def task_start(self, task): trace = self.current_trace() if trace: - self._cache[id(task)] = trace + self[id(task)] = trace def task_stop(self, task): - self._cache.pop(id(task), None) + self.pop(id(task), None) def current_transaction(self): """Return the transaction object if one exists for the currently @@ -146,11 +151,11 @@ def current_transaction(self): """ - trace = self._cache.get(self.current_thread_id()) + trace = self.get(self.current_thread_id()) return trace and trace.transaction def current_trace(self): - return self._cache.get(self.current_thread_id()) + return self.get(self.current_thread_id()) def active_threads(self): """Returns an iterator over all current stack frames for all @@ -169,7 +174,7 @@ def active_threads(self): # First yield up those for real Python threads. for thread_id, frame in sys._current_frames().items(): - trace = self._cache.get(thread_id) + trace = self.get(thread_id) transaction = trace and trace.transaction if transaction is not None: if transaction.background_task: @@ -197,7 +202,7 @@ def active_threads(self): debug = global_settings().debug if debug.enable_coroutine_profiling: - for thread_id, trace in list(self._cache.items()): + for thread_id, trace in self.items(): transaction = trace.transaction if transaction and transaction._greenlet is not None: gr = transaction._greenlet() @@ -212,7 +217,7 @@ def prepare_for_root(self): trace in the cache is from a different task (for asyncio). Returns the current trace after the cache is updated.""" thread_id = self.current_thread_id() - trace = self._cache.get(thread_id) + trace = self.get(thread_id) if not trace: return None @@ -221,11 +226,11 @@ def prepare_for_root(self): task = current_task(self.asyncio) if task is not None and id(trace._task) != id(task): - self._cache.pop(thread_id, None) + self.pop(thread_id, None) return None if trace.root and trace.root.exited: - self._cache.pop(thread_id, None) + self.pop(thread_id, None) return None return trace @@ -240,8 +245,8 @@ def save_trace(self, trace): thread_id = trace.thread_id - if thread_id in self._cache: - cache_root = self._cache[thread_id].root + if thread_id in self: + cache_root = self[thread_id].root if cache_root and cache_root is not trace.root and not cache_root.exited: # Cached trace exists and has a valid root still _logger.error( @@ -253,7 +258,7 @@ def save_trace(self, trace): raise TraceCacheActiveTraceError("transaction already active") - self._cache[thread_id] = trace + self[thread_id] = trace # We judge whether we are actually running in a coroutine by # seeing if the current thread ID is actually listed in the set @@ -284,7 +289,7 @@ def pop_current(self, trace): thread_id = trace.thread_id parent = trace.parent - self._cache[thread_id] = parent + self[thread_id] = parent def complete_root(self, root): """Completes a trace specified by the given root @@ -301,7 +306,7 @@ def complete_root(self, root): to_complete = [] for task_id in task_ids: - entry = self._cache.get(task_id) + entry = self.get(task_id) if entry and entry is not root and entry.root is root: to_complete.append(entry) @@ -316,12 +321,12 @@ def complete_root(self, root): thread_id = root.thread_id - if thread_id not in self._cache: + if thread_id not in self: thread_id = self.current_thread_id() - if thread_id not in self._cache: + if thread_id not in self: raise TraceCacheNoActiveTraceError("no active trace") - current = self._cache.get(thread_id) + current = self.get(thread_id) if root is not current: _logger.error( @@ -333,7 +338,7 @@ def complete_root(self, root): raise RuntimeError("not the current trace") - del self._cache[thread_id] + del self[thread_id] root._greenlet = None def record_event_loop_wait(self, start_time, end_time): @@ -359,7 +364,7 @@ def record_event_loop_wait(self, start_time, end_time): task = getattr(transaction.root_span, "_task", None) loop = get_event_loop(task) - for trace in list(self._cache.values()): + for trace in self.values(): if trace in seen: continue @@ -390,6 +395,62 @@ def record_event_loop_wait(self, start_time, end_time): root.increment_child_count() root.add_child(node) + # MutableMapping methods + + def items(self): + """ + Safely iterates on self._cache.items() indirectly using a list of value references + to avoid RuntimeErrors from size changes during iteration. + """ + for wr in self._cache.valuerefs(): + value = wr() # Dereferenced value is potentially no longer live. + if ( + value is not None + ): # weakref is None means weakref has been garbage collected and is no longer live. Ignore. + yield wr.key, value # wr.key is the original dict key + + def keys(self): + """ + Iterates on self._cache.keys() indirectly using a list of value references + to avoid RuntimeErrors from size changes during iteration. + + NOTE: Returned keys are keys to weak references which may at any point be garbage collected. + It is only safe to retrieve values from the trace cache using trace_cache.get(key, None). + Retrieving values using trace_cache[key] can cause a KeyError if the item has been garbage collected. + """ + for wr in self._cache.valuerefs(): + yield wr.key # wr.key is the original dict key + + def values(self): + """ + Safely iterates on self._cache.values() indirectly using a list of value references + to avoid RuntimeErrors from size changes during iteration. + """ + for wr in self._cache.valuerefs(): + value = wr() # Dereferenced value is potentially no longer live. + if ( + value is not None + ): # weakref is None means weakref has been garbage collected and is no longer live. Ignore. + yield value + + def __getitem__(self, key): + return self._cache.__getitem__(key) + + def __setitem__(self, key, value): + self._cache.__setitem__(key, value) + + def __delitem__(self, key): + self._cache.__delitem__(key) + + def __iter__(self): + return self.keys() + + def __len__(self): + return self._cache.__len__() + + def __bool__(self): + return bool(self._cache.__len__()) + _trace_cache = TraceCache() diff --git a/tests/agent_features/test_async_context_propagation.py b/tests/agent_features/test_async_context_propagation.py index 8026cbbcc..47d16cfc5 100644 --- a/tests/agent_features/test_async_context_propagation.py +++ b/tests/agent_features/test_async_context_propagation.py @@ -13,11 +13,11 @@ # limitations under the License. import pytest -from testing_support.fixtures import ( - function_not_called, - override_generic_settings, +from testing_support.fixtures import function_not_called, override_generic_settings +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, ) -from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics + from newrelic.api.application import application_instance as application from newrelic.api.background_task import BackgroundTask, background_task from newrelic.api.database_trace import database_trace @@ -131,7 +131,7 @@ def handle_exception(loop, context): # The agent should have removed all traces from the cache since # run_until_complete has terminated (all callbacks scheduled inside the # task have run) - assert not trace_cache()._cache + assert not trace_cache() # Assert that no exceptions have occurred assert not exceptions, exceptions @@ -286,7 +286,7 @@ def _test(): # The agent should have removed all traces from the cache since # run_until_complete has terminated - assert not trace_cache()._cache + assert not trace_cache() # Assert that no exceptions have occurred assert not exceptions, exceptions diff --git a/tests/agent_features/test_event_loop_wait_time.py b/tests/agent_features/test_event_loop_wait_time.py index 69e6fc102..84c65dcdc 100644 --- a/tests/agent_features/test_event_loop_wait_time.py +++ b/tests/agent_features/test_event_loop_wait_time.py @@ -140,7 +140,7 @@ def _test(): def test_record_event_loop_wait_outside_task(): # Insert a random trace into the trace cache trace = FunctionTrace(name="testing") - trace_cache()._cache[0] = trace + trace_cache()[0] = trace @background_task(name="test_record_event_loop_wait_outside_task") def _test(): diff --git a/tests/agent_unittests/test_trace_cache.py b/tests/agent_unittests/test_trace_cache.py new file mode 100644 index 000000000..e0f7db84f --- /dev/null +++ b/tests/agent_unittests/test_trace_cache.py @@ -0,0 +1,129 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import threading + +import pytest + +from newrelic.core.trace_cache import TraceCache + +_TEST_CONCURRENT_ITERATION_TC_SIZE = 20 + + +class DummyTrace(object): + pass + + +@pytest.fixture(scope="function") +def trace_cache(): + return TraceCache() + + +def test_trace_cache_methods(trace_cache): + """Test MutableMapping methods functional for trace_cache""" + obj = DummyTrace() # weakref compatible object + + trace_cache[1] = obj + assert 1 in trace_cache + assert bool(trace_cache) + assert list(trace_cache) + + del trace_cache[1] + assert 1 not in trace_cache + assert not bool(trace_cache) + + trace_cache[1] = obj + assert trace_cache.get(1, None) + assert trace_cache.pop(1, None) + + trace_cache[1] = obj + assert len(trace_cache) == 1 + assert len(list(trace_cache.items())) == 1 + assert len(list(trace_cache.keys())) == 1 + assert len(list(trace_cache.values())) == 1 + + +@pytest.fixture(scope="function") +def iterate_trace_cache(trace_cache): + def _iterate_trace_cache(shutdown): + while True: + if shutdown.is_set(): + return + for k, v in trace_cache.items(): + pass + for v in trace_cache.values(): + pass + for v in trace_cache.keys(): + pass + + return _iterate_trace_cache + + +@pytest.fixture(scope="function") +def change_weakref_dict_size(trace_cache): + def _change_weakref_dict_size(shutdown, obj_refs): + """ + Cause RuntimeErrors when iterating on the trace_cache by: + - Repeatedly pop and add batches of keys to cause size changes. + - Randomly delete and replace some object refs so the weak references are deleted, + causing the weakref dict to delete them and forcing further size changes. + """ + + dict_size_change = _TEST_CONCURRENT_ITERATION_TC_SIZE // 2 # Remove up to half of items + while True: + if shutdown.is_set(): + return + + # Delete and re-add keys + for i in range(dict_size_change): + trace_cache._cache.pop(i, None) + for i in range(dict_size_change): + trace_cache._cache[i] = obj_refs[i] + + # Replace every 3rd obj ref causing the WeakValueDictionary to drop it. + for i, _ in enumerate(obj_refs[::3]): + obj_refs[i] = DummyTrace() + + return _change_weakref_dict_size + + +def test_concurrent_iteration(iterate_trace_cache, change_weakref_dict_size): + """ + Test for exceptions related to trace_cache changing size during iteration. + + The WeakValueDictionary used internally is particularly prone to this, as iterating + on it in any way other than indirectly through WeakValueDictionary.valuerefs() + will cause RuntimeErrors due to the unguarded iteration on a dictionary internally. + """ + obj_refs = [DummyTrace() for _ in range(_TEST_CONCURRENT_ITERATION_TC_SIZE)] + shutdown = threading.Event() + + t1 = threading.Thread(target=change_weakref_dict_size, args=(shutdown, obj_refs)) + t2 = threading.Thread(target=iterate_trace_cache, args=(shutdown,)) + t1.daemon = True + t2.daemon = True + t1.start() + t2.start() + + # Run for 1 second, then shutdown. Stop immediately for exceptions. + t2.join(timeout=1) + assert t1.is_alive(), "Thread exited with exception." + assert t2.is_alive(), "Thread exited with exception." + shutdown.set() + + # Ensure threads shutdown with a timeout to prevent hangs + t1.join(timeout=1) + t2.join(timeout=1) + assert not t1.is_alive(), "Thread failed to exit." + assert not t2.is_alive(), "Thread failed to exit." diff --git a/tests/coroutines_asyncio/test_context_propagation.py b/tests/coroutines_asyncio/test_context_propagation.py index 09fccffb2..ef26aacc1 100644 --- a/tests/coroutines_asyncio/test_context_propagation.py +++ b/tests/coroutines_asyncio/test_context_propagation.py @@ -15,12 +15,11 @@ import sys import pytest -from testing_support.fixtures import ( - function_not_called, - override_generic_settings, +from testing_support.fixtures import function_not_called, override_generic_settings +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, ) -from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from newrelic.api.application import application_instance as application from newrelic.api.background_task import BackgroundTask, background_task from newrelic.api.database_trace import database_trace @@ -132,7 +131,7 @@ def handle_exception(loop, context): # The agent should have removed all traces from the cache since # run_until_complete has terminated (all callbacks scheduled inside the # task have run) - assert not trace_cache()._cache + assert not trace_cache() # Assert that no exceptions have occurred assert not exceptions, exceptions @@ -290,7 +289,7 @@ def _test(): # The agent should have removed all traces from the cache since # run_until_complete has terminated - assert not trace_cache()._cache + assert not trace_cache() # Assert that no exceptions have occurred assert not exceptions, exceptions From fd0fd01e40b00c9c8b4130ee884ee70a82fe8b5e Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Wed, 7 Dec 2022 13:50:37 -0800 Subject: [PATCH 033/108] Fix Type Constructor Classes in Code Level Metrics (#708) * Fix CLM exception catching * Reorganize CLM Tests * Add type constructor tests to CLM * Fix line number * Pin tox version * Fix lambda tests in CLM * Fix lint issues * Turn helper func into pytest fixture Co-authored-by: Hannah Stepanek --- .../actions/setup-python-matrix/action.yml | 2 +- newrelic/core/code_level_metrics.py | 2 +- .../_test_code_level_metrics.py | 41 +- .../agent_features/test_code_level_metrics.py | 437 ++++++++++++------ 4 files changed, 340 insertions(+), 142 deletions(-) diff --git a/.github/actions/setup-python-matrix/action.yml b/.github/actions/setup-python-matrix/action.yml index 3654f7eb2..bcb5cbc78 100644 --- a/.github/actions/setup-python-matrix/action.yml +++ b/.github/actions/setup-python-matrix/action.yml @@ -47,4 +47,4 @@ runs: shell: bash run: | python3.10 -m pip install -U pip - python3.10 -m pip install -U wheel setuptools tox virtualenv!=20.0.24 + python3.10 -m pip install -U wheel setuptools 'tox<4' virtualenv!=20.0.24 diff --git a/newrelic/core/code_level_metrics.py b/newrelic/core/code_level_metrics.py index 652715eab..ba00d93af 100644 --- a/newrelic/core/code_level_metrics.py +++ b/newrelic/core/code_level_metrics.py @@ -89,7 +89,7 @@ def extract_code_from_callable(func): # Use inspect to get file and line number file_path = inspect.getsourcefile(func) line_number = inspect.getsourcelines(func)[1] - except TypeError: + except Exception: pass # Split function path to extract class name diff --git a/tests/agent_features/_test_code_level_metrics.py b/tests/agent_features/_test_code_level_metrics.py index 90529320d..bbe3363f4 100644 --- a/tests/agent_features/_test_code_level_metrics.py +++ b/tests/agent_features/_test_code_level_metrics.py @@ -13,11 +13,12 @@ # limitations under the License. import functools + def exercise_function(): return -class ExerciseClass(): +class ExerciseClass(object): def exercise_method(self): return @@ -30,12 +31,46 @@ def exercise_class_method(cls): return -class ExerciseClassCallable(): +class ExerciseClassCallable(object): def __call__(self): return + +def exercise_method(self): + return + + +@staticmethod +def exercise_static_method(): + return + + +@classmethod +def exercise_class_method(cls): + return + + +def __call__(self): + return + + +type_dict = { + "exercise_method": exercise_method, + "exercise_static_method": exercise_static_method, + "exercise_class_method": exercise_class_method, + "exercise_lambda": lambda: None, +} +callable_type_dict = type_dict.copy() +callable_type_dict["__call__"] = __call__ + +ExerciseTypeConstructor = type("ExerciseTypeConstructor", (object,), type_dict) +ExerciseTypeConstructorCallable = type("ExerciseTypeConstructorCallable", (object,), callable_type_dict) + + CLASS_INSTANCE = ExerciseClass() CLASS_INSTANCE_CALLABLE = ExerciseClassCallable() +TYPE_CONSTRUCTOR_CLASS_INSTANCE = ExerciseTypeConstructor() +TYPE_CONSTRUCTOR_CALLABLE_CLASS_INSTANCE = ExerciseTypeConstructorCallable() -exercise_lambda = lambda: None +exercise_lambda = lambda: None # noqa: E731 exercise_partial = functools.partial(exercise_function) diff --git a/tests/agent_features/test_code_level_metrics.py b/tests/agent_features/test_code_level_metrics.py index 1d2bd6c3a..a7aeaa39a 100644 --- a/tests/agent_features/test_code_level_metrics.py +++ b/tests/agent_features/test_code_level_metrics.py @@ -12,25 +12,40 @@ # See the License for the specific language governing permissions and # limitations under the License. -import sys import sqlite3 -import newrelic.packages.six as six -import pytest +import sys -from testing_support.fixtures import override_application_settings, dt_enabled +import pytest +from _test_code_level_metrics import ( + CLASS_INSTANCE, + CLASS_INSTANCE_CALLABLE, + TYPE_CONSTRUCTOR_CALLABLE_CLASS_INSTANCE, + TYPE_CONSTRUCTOR_CLASS_INSTANCE, + ExerciseClass, + ExerciseClassCallable, + ExerciseTypeConstructor, + ExerciseTypeConstructorCallable, +) +from _test_code_level_metrics import __file__ as FILE_PATH +from _test_code_level_metrics import ( + exercise_function, + exercise_lambda, + exercise_partial, +) +from testing_support.fixtures import dt_enabled, override_application_settings from testing_support.validators.validate_span_events import validate_span_events +import newrelic.packages.six as six from newrelic.api.background_task import background_task -from newrelic.api.function_trace import FunctionTrace, FunctionTraceWrapper - -from _test_code_level_metrics import exercise_function, CLASS_INSTANCE, CLASS_INSTANCE_CALLABLE, exercise_lambda, exercise_partial, ExerciseClass, ExerciseClassCallable, __file__ as FILE_PATH - +from newrelic.api.function_trace import FunctionTrace is_pypy = hasattr(sys, "pypy_version_info") NAMESPACE = "_test_code_level_metrics" CLASS_NAMESPACE = ".".join((NAMESPACE, "ExerciseClass")) CALLABLE_CLASS_NAMESPACE = ".".join((NAMESPACE, "ExerciseClassCallable")) +TYPE_CONSTRUCTOR_NAMESPACE = ".".join((NAMESPACE, "ExerciseTypeConstructor")) +TYPE_CONSTRUCTOR_CALLABLE_NAMESPACE = ".".join((NAMESPACE, "ExerciseTypeConstructorCallable")) FUZZY_NAMESPACE = CLASS_NAMESPACE if six.PY3 else NAMESPACE if FILE_PATH.endswith(".pyc"): FILE_PATH = FILE_PATH[:-1] @@ -39,115 +54,166 @@ BUILTIN_ATTRS = {"code.filepath": "", "code.lineno": None} if not is_pypy else {} + def merge_dicts(A, B): d = {} d.update(A) d.update(B) return d -@pytest.mark.parametrize( - "func,args,agents", - ( - ( # Function - exercise_function, - (), - { - "code.filepath": FILE_PATH, - "code.function": "exercise_function", - "code.lineno": 16, - "code.namespace": NAMESPACE, - }, - ), - ( # Method - CLASS_INSTANCE.exercise_method, - (), - { - "code.filepath": FILE_PATH, - "code.function": "exercise_method", - "code.lineno": 21, - "code.namespace": CLASS_NAMESPACE, - }, - ), - ( # Static Method - CLASS_INSTANCE.exercise_static_method, - (), - { - "code.filepath": FILE_PATH, - "code.function": "exercise_static_method", - "code.lineno": 24, - "code.namespace": FUZZY_NAMESPACE, - }, - ), - ( # Class Method - ExerciseClass.exercise_class_method, - (), - { - "code.filepath": FILE_PATH, - "code.function": "exercise_class_method", - "code.lineno": 28, - "code.namespace": CLASS_NAMESPACE, - }, - ), - ( # Callable object - CLASS_INSTANCE_CALLABLE, - (), - { - "code.filepath": FILE_PATH, - "code.function": "__call__", - "code.lineno": 34, - "code.namespace": CALLABLE_CLASS_NAMESPACE, - }, - ), - ( # Lambda - exercise_lambda, - (), - { - "code.filepath": FILE_PATH, - "code.function": "", - "code.lineno": 40, - "code.namespace": NAMESPACE, - }, - ), - ( # Functools Partials - exercise_partial, - (), + +@pytest.fixture +def extract(): + def _extract(obj): + with FunctionTrace("_test", source=obj): + pass + + return _extract + + +_TEST_BASIC_CALLABLES = { + "function": ( + exercise_function, + (), + { + "code.filepath": FILE_PATH, + "code.function": "exercise_function", + "code.lineno": 17, + "code.namespace": NAMESPACE, + }, + ), + "lambda": ( + exercise_lambda, + (), + { + "code.filepath": FILE_PATH, + "code.function": "", + "code.lineno": 75, + "code.namespace": NAMESPACE, + }, + ), + "partial": ( + exercise_partial, + (), + { + "code.filepath": FILE_PATH, + "code.function": "exercise_function", + "code.lineno": 17, + "code.namespace": NAMESPACE, + }, + ), + "builtin_function": ( + max, + (1, 2), + merge_dicts( { - "code.filepath": FILE_PATH, - "code.function": "exercise_function", - "code.lineno": 16, - "code.namespace": NAMESPACE, - }, - ), - ( # Top Level Builtin - max, - (1, 2), - merge_dicts({ "code.function": "max", "code.namespace": "builtins" if six.PY3 else "__builtin__", - }, BUILTIN_ATTRS), + }, + BUILTIN_ATTRS, ), - ( # Module Level Builtin - sqlite3.connect, - (":memory:",), - merge_dicts({ + ), + "builtin_module_function": ( + sqlite3.connect, + (":memory:",), + merge_dicts( + { "code.function": "connect", "code.namespace": "_sqlite3", - }, BUILTIN_ATTRS), + }, + BUILTIN_ATTRS, ), - ( # Builtin Method - SQLITE_CONNECTION.__enter__, - (), - merge_dicts({ + ), +} + + +@pytest.mark.parametrize( + "func,args,agents", + [pytest.param(*args, id=id_) for id_, args in six.iteritems(_TEST_BASIC_CALLABLES)], +) +def test_code_level_metrics_basic_callables(func, args, agents, extract): + @override_application_settings( + { + "code_level_metrics.enabled": True, + } + ) + @dt_enabled + @validate_span_events( + count=1, + exact_agents=agents, + ) + @background_task() + def _test(): + extract(func) + + _test() + + +_TEST_METHODS = { + "method": ( + CLASS_INSTANCE.exercise_method, + (), + { + "code.filepath": FILE_PATH, + "code.function": "exercise_method", + "code.lineno": 22, + "code.namespace": CLASS_NAMESPACE, + }, + ), + "static_method": ( + CLASS_INSTANCE.exercise_static_method, + (), + { + "code.filepath": FILE_PATH, + "code.function": "exercise_static_method", + "code.lineno": 25, + "code.namespace": FUZZY_NAMESPACE, + }, + ), + "class_method": ( + ExerciseClass.exercise_class_method, + (), + { + "code.filepath": FILE_PATH, + "code.function": "exercise_class_method", + "code.lineno": 29, + "code.namespace": CLASS_NAMESPACE, + }, + ), + "call_method": ( + CLASS_INSTANCE_CALLABLE, + (), + { + "code.filepath": FILE_PATH, + "code.function": "__call__", + "code.lineno": 35, + "code.namespace": CALLABLE_CLASS_NAMESPACE, + }, + ), + "builtin_method": ( + SQLITE_CONNECTION.__enter__, + (), + merge_dicts( + { "code.function": "__enter__", "code.namespace": "sqlite3.Connection" if not is_pypy else "_sqlite3.Connection", - }, BUILTIN_ATTRS), + }, + BUILTIN_ATTRS, ), ), +} + + +@pytest.mark.parametrize( + "func,args,agents", + [pytest.param(*args, id=id_) for id_, args in six.iteritems(_TEST_METHODS)], ) -def test_code_level_metrics_callables(func, args, agents): - @override_application_settings({ - "code_level_metrics.enabled": True, - }) +def test_code_level_metrics_methods(func, args, agents, extract): + @override_application_settings( + { + "code_level_metrics.enabled": True, + } + ) @dt_enabled @validate_span_events( count=1, @@ -155,47 +221,145 @@ def test_code_level_metrics_callables(func, args, agents): ) @background_task() def _test(): - FunctionTraceWrapper(func)(*args) + extract(func) _test() +_TEST_TYPE_CONSTRUCTOR_METHODS = { + "method": ( + TYPE_CONSTRUCTOR_CLASS_INSTANCE.exercise_method, + (), + { + "code.filepath": FILE_PATH, + "code.function": "exercise_method", + "code.lineno": 39, + "code.namespace": TYPE_CONSTRUCTOR_NAMESPACE, + }, + ), + "static_method": ( + TYPE_CONSTRUCTOR_CLASS_INSTANCE.exercise_static_method, + (), + { + "code.filepath": FILE_PATH, + "code.function": "exercise_static_method", + "code.lineno": 43, + "code.namespace": NAMESPACE, + }, + ), + "class_method": ( + ExerciseTypeConstructor.exercise_class_method, + (), + { + "code.filepath": FILE_PATH, + "code.function": "exercise_class_method", + "code.lineno": 48, + "code.namespace": TYPE_CONSTRUCTOR_NAMESPACE, + }, + ), + "lambda_method": ( + ExerciseTypeConstructor.exercise_lambda, + (), + { + "code.filepath": FILE_PATH, + "code.function": "", + "code.lineno": 61, + # Lambdas behave strangely in type constructors on Python 2 and use the class namespace. + "code.namespace": NAMESPACE if six.PY3 else TYPE_CONSTRUCTOR_NAMESPACE, + }, + ), + "call_method": ( + TYPE_CONSTRUCTOR_CALLABLE_CLASS_INSTANCE, + (), + { + "code.filepath": FILE_PATH, + "code.function": "__call__", + "code.lineno": 53, + "code.namespace": TYPE_CONSTRUCTOR_CALLABLE_NAMESPACE, + }, + ), +} + + @pytest.mark.parametrize( - "obj,agents", - ( - ( # Class with __call__ - ExerciseClassCallable, - { - "code.filepath": FILE_PATH, - "code.function": "ExerciseClassCallable", - "code.lineno": 33, - "code.namespace":NAMESPACE, - }, - ), - ( # Class without __call__ - ExerciseClass, - { - "code.filepath": FILE_PATH, - "code.function": "ExerciseClass", - "code.lineno": 20, - "code.namespace": NAMESPACE, - }, - ), - ( # Non-callable Object instance - CLASS_INSTANCE, - { - "code.filepath": FILE_PATH, - "code.function": "ExerciseClass", - "code.lineno": 20, - "code.namespace": NAMESPACE, - }, - ), + "func,args,agents", + [pytest.param(*args, id=id_) for id_, args in six.iteritems(_TEST_TYPE_CONSTRUCTOR_METHODS)], +) +def test_code_level_metrics_type_constructor_methods(func, args, agents, extract): + @override_application_settings( + { + "code_level_metrics.enabled": True, + } + ) + @dt_enabled + @validate_span_events( + count=1, + exact_agents=agents, + ) + @background_task() + def _test(): + extract(func) + + _test() + + +_TEST_OBJECTS = { + "class": ( + ExerciseClass, + { + "code.filepath": FILE_PATH, + "code.function": "ExerciseClass", + "code.lineno": 21, + "code.namespace": NAMESPACE, + }, + ), + "callable_class": ( + ExerciseClassCallable, + { + "code.filepath": FILE_PATH, + "code.function": "ExerciseClassCallable", + "code.lineno": 34, + "code.namespace": NAMESPACE, + }, + ), + "type_constructor_class": ( + ExerciseTypeConstructor, + { + "code.filepath": FILE_PATH, + "code.function": "ExerciseTypeConstructor", + "code.namespace": NAMESPACE, + }, + ), + "type_constructor_class_callable_class": ( + ExerciseTypeConstructorCallable, + { + "code.filepath": FILE_PATH, + "code.function": "ExerciseTypeConstructorCallable", + "code.namespace": NAMESPACE, + }, ), + "non_callable_object": ( + CLASS_INSTANCE, + { + "code.filepath": FILE_PATH, + "code.function": "ExerciseClass", + "code.lineno": 21, + "code.namespace": NAMESPACE, + }, + ), +} + + +@pytest.mark.parametrize( + "obj,agents", + [pytest.param(*args, id=id_) for id_, args in six.iteritems(_TEST_OBJECTS)], ) -def test_code_level_metrics_objects(obj, agents): - @override_application_settings({ - "code_level_metrics.enabled": True, - }) +def test_code_level_metrics_objects(obj, agents, extract): + @override_application_settings( + { + "code_level_metrics.enabled": True, + } + ) @dt_enabled @validate_span_events( count=1, @@ -203,7 +367,6 @@ def test_code_level_metrics_objects(obj, agents): ) @background_task() def _test(): - with FunctionTrace("_test", source=obj): - pass - - _test() \ No newline at end of file + extract(obj) + + _test() From 9cbae0106069b4c7f2aec818b53a31e48908ee0d Mon Sep 17 00:00:00 2001 From: Lalleh Rafeei <84813886+lrafeei@users.noreply.github.com> Date: Tue, 3 Jan 2023 14:07:44 -0800 Subject: [PATCH 034/108] Fix sanic and starlette tests (#734) * Fix sanic tests * Tweak test fix for sanic * Remove test for v18.12 in sanic (no longer supported) * Pin starlette latest to v0.23.1 (for now) * Add comment in tox about pinned starlette version --- tests/framework_sanic/test_application.py | 7 +++++-- tox.ini | 5 +++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/tests/framework_sanic/test_application.py b/tests/framework_sanic/test_application.py index 5b19ada2b..a949d91d0 100644 --- a/tests/framework_sanic/test_application.py +++ b/tests/framework_sanic/test_application.py @@ -38,14 +38,17 @@ from newrelic.api.application import application_instance from newrelic.api.external_trace import ExternalTrace from newrelic.api.transaction import Transaction +from newrelic.common.package_version_utils import get_package_version from newrelic.core.config import global_settings -sanic_21 = int(sanic.__version__.split(".", 1)[0]) >= 21 +SANIC_VERSION = tuple(map(int, get_package_version("sanic").split("."))) +sanic_21 = SANIC_VERSION >= (21,) +sanic_v19_to_v22_12 = SANIC_VERSION >= (19,) and SANIC_VERSION < (22, 12) BASE_METRICS = [ ("Function/_target_application:index", 1), - ("Function/_target_application:request_middleware", 1 if int(sanic.__version__.split(".", 1)[0]) > 18 else 2), + ("Function/_target_application:request_middleware", 1 if sanic_v19_to_v22_12 else 2), ] FRAMEWORK_METRICS = [ ("Python/Framework/Sanic/%s" % sanic.__version__, 1), diff --git a/tox.ini b/tox.ini index c815defc2..2fd6d201a 100644 --- a/tox.ini +++ b/tox.ini @@ -143,7 +143,7 @@ envlist = python-framework_pyramid-{pypy,py27,py38}-Pyramid0104, python-framework_pyramid-{pypy,py27,pypy37,py37,py38,py39,py310,py311}-Pyramid0110-cornice, python-framework_pyramid-{py37,py38,py39,py310,py311,pypy37}-Pyramidmaster, - python-framework_sanic-{py38,pypy37}-sanic{190301,1906,1812,1912,200904,210300,2109,2112,2203,2290}, + python-framework_sanic-{py38,pypy37}-sanic{190301,1906,1912,200904,210300,2109,2112,2203,2290}, python-framework_sanic-{py37,py38,py39,py310,py311,pypy37}-saniclatest, python-framework_starlette-{py310,pypy37}-starlette{0014,0015,0019}, python-framework_starlette-{py37,py38}-starlette{002001}, @@ -357,7 +357,8 @@ deps = framework_starlette-starlette0015: starlette<0.16 framework_starlette-starlette0019: starlette<0.20 framework_starlette-starlette002001: starlette==0.20.1 - framework_starlette-starlettelatest: starlette + ; Starlette latest version temporarily pinned + framework_starlette-starlettelatest: starlette<0.23.1 framework_strawberry: starlette framework_strawberry-strawberrylatest: strawberry-graphql framework_tornado: pycurl From d7987b9fd50e4c5c80100e540a1c94115417c5c4 Mon Sep 17 00:00:00 2001 From: Lalleh Rafeei <84813886+lrafeei@users.noreply.github.com> Date: Mon, 9 Jan 2023 09:50:16 -0800 Subject: [PATCH 035/108] Add methods to instrument (#738) --- newrelic/hooks/datastore_redis.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/newrelic/hooks/datastore_redis.py b/newrelic/hooks/datastore_redis.py index 26d1facb0..8c8b6f7a6 100644 --- a/newrelic/hooks/datastore_redis.py +++ b/newrelic/hooks/datastore_redis.py @@ -218,7 +218,12 @@ "insertnx", "keys", "lastsave", + "latency_doctor", + "latency_graph", "latency_histogram", + "latency_history", + "latency_latest", + "latency_reset", "lcs", "lindex", "linsert", From 045b8f8d5d65ed5496816a959e4cd2059066b7b0 Mon Sep 17 00:00:00 2001 From: Lalleh Rafeei <84813886+lrafeei@users.noreply.github.com> Date: Wed, 11 Jan 2023 14:58:23 -0800 Subject: [PATCH 036/108] Add card to instrumented methods in Redis (#740) --- newrelic/hooks/datastore_redis.py | 1 + 1 file changed, 1 insertion(+) diff --git a/newrelic/hooks/datastore_redis.py b/newrelic/hooks/datastore_redis.py index 8c8b6f7a6..23a6dfb77 100644 --- a/newrelic/hooks/datastore_redis.py +++ b/newrelic/hooks/datastore_redis.py @@ -72,6 +72,7 @@ "bzmpop", "bzpopmax", "bzpopmin", + "card", "cdf", "clear", "client_getname", From 05d8569b1994fd9bfdea18b22259c58221e6cf59 Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Wed, 18 Jan 2023 11:33:04 -0800 Subject: [PATCH 037/108] Add DevContainer (#711) * Add devcontainer setup * Add newrelic env vars to passenv * Add default extensions * Add devcontainer instructions to contributing docs * Convert smart quotes in contributing docs. * Apply proper RST formatting * [Mega-Linter] Apply linters fixes * Add GHCR to prerequisites * [Mega-Linter] Apply linters fixes * Bump tests Co-authored-by: TimPansino --- .devcontainer/Dockerfile | 4 + .devcontainer/devcontainer.json | 35 +++++ CONTRIBUTING.rst | 244 +++++++++++++++++++++++++------- 3 files changed, 228 insertions(+), 55 deletions(-) create mode 100644 .devcontainer/Dockerfile create mode 100644 .devcontainer/devcontainer.json diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile new file mode 100644 index 000000000..f42af328e --- /dev/null +++ b/.devcontainer/Dockerfile @@ -0,0 +1,4 @@ +ARG IMAGE=ghcr.io/newrelic-experimental/pyenv-devcontainer:latest + +# To target other architectures, change the --platform directive in the Dockerfile. +FROM --platform=linux/amd64 ${IMAGE} diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json new file mode 100644 index 000000000..92a8cdee4 --- /dev/null +++ b/.devcontainer/devcontainer.json @@ -0,0 +1,35 @@ +// For format details, see https://containers.dev/implementors/json_reference/. +{ + "name": "pyenv", + "build":{ + // To target other architectures, change the --platform directive in the Dockerfile. + "dockerfile": "Dockerfile", + "args": { + "IMAGE": "ghcr.io/newrelic-experimental/pyenv-devcontainer:latest" + } + }, + "remoteUser": "vscode", + "runArgs": ["--network=host"], + "features": { + // Available Features: https://containers.dev/features + // "ghcr.io/devcontainers/features/docker-from-docker:1": {"moby": false}, + // "ghcr.io/devcontainers/features/aws-cli:1": {}, + // "ghcr.io/devcontainers/features/github-cli:1": {} + }, + "containerEnv": { + "NEW_RELIC_HOST": "${localEnv:NEW_RELIC_HOST}", + "NEW_RELIC_LICENSE_KEY": "${localEnv:NEW_RELIC_LICENSE_KEY}", + "NEW_RELIC_INSERT_KEY": "${localEnv:NEW_RELIC_INSERT_KEY}", + "NEW_RELIC_DEVELOPER_MODE": "${localEnv:NEW_RELIC_DEVELOPER_MODE}" + }, + "customizations": { + "vscode": { + "settings": {}, + "extensions": [ + "ms-python.python", + "ms-vsliveshare.vsliveshare", + "eamodio.gitlens" + ] + } + } +} diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index af5082362..5e95d3806 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -1,54 +1,85 @@ -Contributing to the Python Agent -================================= +################################## + Contributing to the Python Agent +################################## -Thanks for your interest in contributing to the ``New Relic Python Agent``! We look forward to engaging with you. +Thanks for your interest in contributing to the ``New Relic Python +Agent``! We look forward to engaging with you. -How to Contribute ------------------ +******************* + How to Contribute +******************* Contributions are always welcome. Before contributing please read the -`code of conduct `__ and `search the issue tracker <../../issues>`__; your issue may have already been discussed or fixed in `main`. To contribute, `fork `__ this repository, commit your changes, and `send a Pull Request `__. - -Note that our `code of conduct `__ applies to all platforms and venues related to this project; please follow it in all your interactions with the project and its participants. - -How to Get Help or Ask Questions --------------------------------- +`code of conduct +`__ +and `search the issue tracker <../../issues>`__; your issue may have +already been discussed or fixed in `main`. To contribute, `fork +`__ this repository, +commit your changes, and `send a Pull Request +`__. + +Note that our `code of conduct +`__ +applies to all platforms and venues related to this project; please +follow it in all your interactions with the project and its +participants. + +********************************** + How to Get Help or Ask Questions +********************************** Do you have questions or are you experiencing unexpected behaviors after modifying this Open Source Software? Please engage with the “Build on -New Relic” space in the `Explorers -Hub `__, -New Relic’s Forum. Posts are publicly viewable by anyone, please do not +New Relic” space in the `Explorers Hub +`__, +New Relic's Forum. Posts are publicly viewable by anyone, please do not include PII or sensitive information in your forum post. -Contributor License Agreement (“CLA”) -------------------------------------- - -We’d love to get your contributions to improve the Python Agent! Keep in mind that when you submit your Pull Request, you'll need to sign the CLA via the click-through using CLA-Assistant. You only have to sign the CLA one time per project. If you'd like to execute our corporate CLA, or if you have any questions, please drop us an email at opensource@newrelic.com. - -For more information about CLAs, please check out Alex Russell’s excellent post, -`Why Do I Need to Sign This? `__. - -Feature Requests ----------------- - -Feature requests should be submitted in the `Issue tracker <../../issues>`__, with a description of the expected behavior & use case, where they’ll remain closed until sufficient interest, `e.g. :+1: reactions `__, has been `shown by the community <../../issues?q=label%3A%22votes+needed%22+sort%3Areactions-%2B1-desc>`__. Before submitting an Issue, please search for similar ones in the -`closed issues <../../issues?q=is%3Aissue+is%3Aclosed+label%3Aenhancement>`__. - -Filing Issues & Bug Reports ---------------------------- +*************************************** + Contributor License Agreement (“CLA”) +*************************************** + +We'd love to get your contributions to improve the Python Agent! Keep in +mind that when you submit your Pull Request, you'll need to sign the CLA +via the click-through using CLA-Assistant. You only have to sign the CLA +one time per project. If you'd like to execute our corporate CLA, or if +you have any questions, please drop us an email at +opensource@newrelic.com. + +For more information about CLAs, please check out Alex Russell's +excellent post, `Why Do I Need to Sign This? +`__. + +****************** + Feature Requests +****************** + +Feature requests should be submitted in the `Issue tracker +<../../issues>`__, with a description of the expected behavior & use +case, where they'll remain closed until sufficient interest, `e.g. :+1: +reactions +`__, +has been `shown by the community +<../../issues?q=label%3A%22votes+needed%22+sort%3Areactions-%2B1-desc>`__. +Before submitting an Issue, please search for similar ones in the +`closed issues +<../../issues?q=is%3Aissue+is%3Aclosed+label%3Aenhancement>`__. + +***************************** + Filing Issues & Bug Reports +***************************** We use GitHub issues to track public issues and bugs. If possible, please provide a link to an example app or gist that reproduces the issue. When filing an issue, please ensure your description is clear and includes the following information. -* Project version (ex: 1.4.0) -* Custom configurations (ex: flag=true) -* Any modifications made to the Python Agent +- Project version (ex: 1.4.0) +- Custom configurations (ex: flag=true) +- Any modifications made to the Python Agent A note about vulnerabilities -^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +============================ New Relic is committed to the security of our customers and their data. We believe that providing coordinated disclosure by security researchers @@ -56,12 +87,13 @@ and engaging with the security community are important means to achieve our security goals. If you believe you have found a security vulnerability in this project -or any of New Relic’s products or websites, we welcome and greatly -appreciate you reporting it to New Relic through -`HackerOne `__. +or any of New Relic's products or websites, we welcome and greatly +appreciate you reporting it to New Relic through `HackerOne +`__. -Setting Up Your Environment ---------------------------- +***************************** + Setting Up Your Environment +***************************** This Open Source Software can be used in a large number of environments, all of which have their own quirks and best practices. As such, while we @@ -69,8 +101,95 @@ are happy to provide documentation and assistance for unmodified Open Source Software, we cannot provide support for your specific environment. -Pull Request Guidelines ------------------------ +******************************* + Developing Inside a Container +******************************* + +To avoid the issues involved with setting up a local environment, +consider using our prebuilt development container to easily create an +environment on demand with a wide selection of Python versions +installed. This also comes with the `tox +`__ tool (See Testing Guidelines) and a +few packages preinstalled. + +While we cannot provide direct support in setting up your environment to +work with this container, we develop it in the open and provide this +documentation to help reduce the setup burden on new contributors. + +Prerequisites: +============== + +#. Install `Docker `__ for you local operating + system. + +#. Login to the `GitHub Container Registry + `__ + through Docker. + +#. Install Either: + - `VS Code `__ onto your local + system (recommended). + + - The `Dev Container CLI + `__ in your terminal. + (Requires a local copy of `npm + `__.) + +Steps for VS Code: +================== + +#. Ensure Docker is running. + +#. Install the `VS Code Extension for Dev Containers + `__ + into VS Code. + +#. In VS Code, open the command pallette (Ctrl-Shift-P on Windows/Linux + or Cmd-Shift-P on Mac) and search for and run "Dev Containers: + Rebuild and Reopen in Container". + +#. Wait for the container to build and start. This may take a long time + to pull the first time the container is run, subsequent runs should + be faster thanks to caching. + +#. To update your container, open the command pallette and run "Dev + Containers: Rebuild Without Cache and Reopen in Container". + +Steps for Command Line Editor Users (vim, etc.): +================================================ + +#. Ensure Docker is running. + +#. From the root of this repository, run ``devcontainer up + --workspace-folder=.`` to start the container. The running container + ID will be displayed, which is useful for subsequent steps. + +#. To gain shell access to the container, run ``docker exec -it + /bin/bash``. Alternative shells include ``zsh`` and + ``fish``. + +#. Navigate to the ``/workspaces`` folder to find your source code. + +#. To stop the container, run ``exit`` on any open shells and then run + ``docker stop ``. ``docker ps`` may be helpful for + finding the ID if you've lost it. + +Personalizing Your Container: +============================= + +#. If you use a dotfiles repository (such as `chezmoi + `__), you can configure your container to + clone and install your dotfiles using `VS Code dotfile settings + `__. + +#. To install extra packages and features, you can edit your local copy + of the .devcontainer/devcontainer.json file to use specific `Dev + Container Features `__. A few common + needs are already included but commented out. + +************************* + Pull Request Guidelines +************************* Before we can accept a pull request, you must sign our `Contributor Licensing Agreement <#contributor-license-agreement-cla>`__, if you have @@ -79,28 +198,43 @@ same Apache 2.0 license as we use for this project in general. Minimally, the `test suite <#testing-guidelines>`__ must pass for us to accept a PR. Ideally, we would love it if you also added appropriate -tests if you’re implementing a feature! +tests if you're implementing a feature! -Please note that integration tests will be run internally before contributions are accepted. +Please note that integration tests will be run internally before +contributions are accepted. Additionally: -1. Ensure any install or build dependencies are removed before the end of the layer when doing a build. -2. Increase the version numbers in any examples files and the README.md to the new version that this Pull Request would represent. The versioning scheme we use is `SemVer `__. -3. You may merge the Pull Request in once you have the sign-off of two other developers, or if you do not have permission to do that, you may request the second reviewer to merge it for you. +#. Ensure any install or build dependencies are removed before the end + of the layer when doing a build. + +#. Increase the version numbers in any examples files and the README.md + to the new version that this Pull Request would represent. The + versioning scheme we use is `SemVer `__. + +#. You may merge the Pull Request in once you have the sign-off of two + other developers, or if you do not have permission to do that, you + may request the second reviewer to merge it for you. -Testing Guidelines ------------------- +******************** + Testing Guidelines +******************** The Python Agent uses `tox `__ for -testing. The repository uses tests in -`tests/ `__. +testing. The repository uses tests in tests/. -You can run these tests by entering the `tests/ `__ directory and then entering the directory of the tests you want to run. Then, run the following command: +You can run these tests by entering the tests/ directory and then +entering the directory of the tests you want to run. Then, run the +following command: -tox -c tox.ini -e [test environment] +``tox -c tox.ini -e [test environment]`` -Slack ------ +******* + Slack +******* -We host a public Slack with a dedicated channel for contributors and maintainers of open source projects hosted by New Relic. If you are contributing to this project, you're welcome to request access to the #oss-contributors channel in the newrelicusers.slack.com workspace. To request access, see https://newrelicusers-signup.herokuapp.com/. +We host a public Slack with a dedicated channel for contributors and +maintainers of open source projects hosted by New Relic. If you are +contributing to this project, you're welcome to request access to the +#oss-contributors channel in the newrelicusers.slack.com workspace. To +request access, see https://newrelicusers-signup.herokuapp.com/. From b45bdd8bfbbee5ade09a0b981f60c9955358a21f Mon Sep 17 00:00:00 2001 From: Lalleh Rafeei <84813886+lrafeei@users.noreply.github.com> Date: Wed, 18 Jan 2023 13:41:42 -0800 Subject: [PATCH 038/108] Module classmethod fix (#662) * Fix function_wrapper calls to module * Fix wrapper in pika hook * Revert elasticsearch instrumentation * Revert some wrap_function_wrappers to orig * Remove comments/breakpoints * Fix hooks in elasticsearch Co-authored-by: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> --- newrelic/hooks/datastore_aredis.py | 2 +- newrelic/hooks/datastore_bmemcached.py | 23 +- newrelic/hooks/datastore_elasticsearch.py | 362 +++++++++++--------- newrelic/hooks/datastore_memcache.py | 46 ++- newrelic/hooks/datastore_pyelasticsearch.py | 91 ++--- newrelic/hooks/datastore_pylibmc.py | 29 +- newrelic/hooks/datastore_pymemcache.py | 28 +- newrelic/hooks/datastore_pymongo.py | 73 ++-- newrelic/hooks/datastore_pysolr.py | 18 +- newrelic/hooks/datastore_solrpy.py | 17 +- newrelic/hooks/messagebroker_pika.py | 235 ++++++------- 11 files changed, 516 insertions(+), 408 deletions(-) diff --git a/newrelic/hooks/datastore_aredis.py b/newrelic/hooks/datastore_aredis.py index a63da57c7..236cbf3f8 100644 --- a/newrelic/hooks/datastore_aredis.py +++ b/newrelic/hooks/datastore_aredis.py @@ -98,4 +98,4 @@ def instrument_aredis_client(module): def instrument_aredis_connection(module): - wrap_function_wrapper(module.Connection, "send_command", wrap_Connection_send_command) + wrap_function_wrapper(module, "Connection.send_command", wrap_Connection_send_command) diff --git a/newrelic/hooks/datastore_bmemcached.py b/newrelic/hooks/datastore_bmemcached.py index c54947ab7..3091f0992 100644 --- a/newrelic/hooks/datastore_bmemcached.py +++ b/newrelic/hooks/datastore_bmemcached.py @@ -14,12 +14,25 @@ from newrelic.api.datastore_trace import wrap_datastore_trace -_memcache_client_methods = ('get', 'gets', 'get_multi', 'set', 'cas', - 'set_multi', 'add', 'replace', 'delete', 'delete_multi', 'incr', - 'decr', 'flush_all', 'stats') +_memcache_client_methods = ( + "get", + "gets", + "get_multi", + "set", + "cas", + "set_multi", + "add", + "replace", + "delete", + "delete_multi", + "incr", + "decr", + "flush_all", + "stats", +) + def instrument_bmemcached_client(module): for name in _memcache_client_methods: if hasattr(module.Client, name): - wrap_datastore_trace(module.Client, name, - product='Memcached', target=None, operation=name) + wrap_datastore_trace(module, "Client.%s" % name, product="Memcached", target=None, operation=name) diff --git a/newrelic/hooks/datastore_elasticsearch.py b/newrelic/hooks/datastore_elasticsearch.py index 3db62bb90..b4c6f3bb6 100644 --- a/newrelic/hooks/datastore_elasticsearch.py +++ b/newrelic/hooks/datastore_elasticsearch.py @@ -12,11 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -from newrelic.packages import six - from newrelic.api.datastore_trace import DatastoreTrace from newrelic.api.transaction import current_transaction from newrelic.common.object_wrapper import wrap_function_wrapper +from newrelic.packages import six # An index name can be a string, None or a sequence. In the case of None # an empty string or '*', it is the same as using '_all'. When a string @@ -24,40 +23,48 @@ # obviously can also be more than one index name. Where we are certain # there is only a single index name we use it, otherwise we use 'other'. + def _index_name(index): - if not index or index == '*': - return '_all' - if not isinstance(index, six.string_types) or ',' in index: - return 'other' + if not index or index == "*": + return "_all" + if not isinstance(index, six.string_types) or "," in index: + return "other" return index + def _extract_kwargs_index(*args, **kwargs): - return _index_name(kwargs.get('index')) + return _index_name(kwargs.get("index")) + def _extract_args_index(index=None, *args, **kwargs): return _index_name(index) + def _extract_args_body_index(body=None, index=None, *args, **kwargs): return _index_name(index) -def _extract_args_doctype_body_index(doc_type=None, body=None, index=None, - *args, **kwargs): + +def _extract_args_doctype_body_index(doc_type=None, body=None, index=None, *args, **kwargs): return _index_name(index) + def _extract_args_field_index(field=None, index=None, *args, **kwargs): return _index_name(index) -def _extract_args_name_body_index(name=None, body=None, index=None, - *args, **kwargs): + +def _extract_args_name_body_index(name=None, body=None, index=None, *args, **kwargs): return _index_name(index) + def _extract_args_name_index(name=None, index=None, *args, **kwargs): return _index_name(index) + def _extract_args_metric_index(metric=None, index=None, *args, **kwargs): return _index_name(index) -def wrap_elasticsearch_client_method(owner, name, arg_extractor, prefix=None): + +def wrap_elasticsearch_client_method(module, class_name, method_name, arg_extractor, prefix=None): def _nr_wrapper_Elasticsearch_method_(wrapped, instance, args, kwargs): transaction = current_transaction() @@ -76,18 +83,13 @@ def _nr_wrapper_Elasticsearch_method_(wrapped, instance, args, kwargs): index = arg_extractor(*args, **kwargs) if prefix: - operation = '%s.%s' % (prefix, name) + operation = "%s.%s" % (prefix, method_name) else: - operation = name + operation = method_name transaction._nr_datastore_instance_info = (None, None, None) - dt = DatastoreTrace( - product='Elasticsearch', - target=index, - operation=operation, - source=wrapped - ) + dt = DatastoreTrace(product="Elasticsearch", target=index, operation=operation, source=wrapped) with dt: result = wrapped(*args, **kwargs) @@ -100,200 +102,217 @@ def _nr_wrapper_Elasticsearch_method_(wrapped, instance, args, kwargs): return result - if hasattr(owner, name): - wrap_function_wrapper(owner, name, _nr_wrapper_Elasticsearch_method_) + wrap_function_wrapper(module, "%s.%s" % (class_name, method_name), _nr_wrapper_Elasticsearch_method_) + _elasticsearch_client_methods = ( - ('abort_benchmark', None), - ('benchmark', _extract_args_index), - ('bulk', None), - ('clear_scroll', None), - ('count', _extract_args_index), - ('count_percolate', _extract_args_index), - ('create', _extract_args_index), - ('delete', _extract_args_index), - ('delete_by_query', _extract_args_index), - ('delete_script', None), - ('delete_template', None), - ('exists', _extract_args_index), - ('explain', _extract_args_index), - ('get', _extract_args_index), - ('get_script', None), - ('get_source', _extract_args_index), - ('get_template', None), - ('index', _extract_args_index), - ('info', None), - ('list_benchmarks', _extract_args_index), - ('mget', None), - ('mlt', _extract_args_index), - ('mpercolate', _extract_args_body_index), - ('msearch', None), - ('mtermvectors', None), - ('percolate', _extract_args_index), - ('ping', None), - ('put_script', None), - ('put_template', None), - ('scroll', None), - ('search', _extract_args_index), - ('search_exists', _extract_args_index), - ('search_shards', _extract_args_index), - ('search_template', _extract_args_index), - ('suggest', _extract_args_body_index), - ('termvector', _extract_args_index), - ('termvectors', None), - ('update', _extract_args_index), + ("abort_benchmark", None), + ("benchmark", _extract_args_index), + ("bulk", None), + ("clear_scroll", None), + ("count", _extract_args_index), + ("count_percolate", _extract_args_index), + ("create", _extract_args_index), + ("delete", _extract_args_index), + ("delete_by_query", _extract_args_index), + ("delete_script", None), + ("delete_template", None), + ("exists", _extract_args_index), + ("explain", _extract_args_index), + ("get", _extract_args_index), + ("get_script", None), + ("get_source", _extract_args_index), + ("get_template", None), + ("index", _extract_args_index), + ("info", None), + ("list_benchmarks", _extract_args_index), + ("mget", None), + ("mlt", _extract_args_index), + ("mpercolate", _extract_args_body_index), + ("msearch", None), + ("mtermvectors", None), + ("percolate", _extract_args_index), + ("ping", None), + ("put_script", None), + ("put_template", None), + ("scroll", None), + ("search", _extract_args_index), + ("search_exists", _extract_args_index), + ("search_shards", _extract_args_index), + ("search_template", _extract_args_index), + ("suggest", _extract_args_body_index), + ("termvector", _extract_args_index), + ("termvectors", None), + ("update", _extract_args_index), ) + def instrument_elasticsearch_client(module): - for name, arg_extractor in _elasticsearch_client_methods: - wrap_elasticsearch_client_method(module.Elasticsearch, name, - arg_extractor) + for method_name, arg_extractor in _elasticsearch_client_methods: + if hasattr(getattr(module, "Elasticsearch"), method_name): + wrap_elasticsearch_client_method(module, "Elasticsearch", method_name, arg_extractor) + _elasticsearch_client_indices_methods = ( - ('analyze', _extract_args_index), - ('clear_cache', _extract_args_index), - ('close', _extract_args_index), - ('create', _extract_args_index), - ('delete', _extract_args_index), - ('delete_alias', _extract_args_index), - ('delete_mapping', _extract_args_index), - ('delete_template', None), - ('delete_warmer', _extract_args_index), - ('exists', _extract_args_index), - ('exists_alias', _extract_args_name_index), - ('exists_template', None), - ('exists_type', _extract_args_index), - ('flush', _extract_args_index), - ('get', _extract_args_index), - ('get_alias', _extract_args_index), - ('get_aliases', _extract_args_index), - ('get_mapping', _extract_args_index), - ('get_field_mapping', _extract_args_field_index), - ('get_settings', _extract_args_index), - ('get_template', None), - ('get_upgrade', _extract_args_index), - ('get_warmer', _extract_args_index), - ('open', _extract_args_index), - ('optimize', _extract_args_index), - ('put_alias', _extract_args_name_index), - ('put_mapping', _extract_args_doctype_body_index), - ('put_settings', _extract_args_body_index), - ('put_template', None), - ('put_warmer', _extract_args_name_body_index), - ('recovery', _extract_args_index), - ('refresh', _extract_args_index), - ('segments', _extract_args_index), - ('snapshot_index', _extract_args_index), - ('stats', _extract_args_index), - ('status', _extract_args_index), - ('update_aliases', None), - ('upgrade', _extract_args_index), - ('validate_query', _extract_args_index), + ("analyze", _extract_args_index), + ("clear_cache", _extract_args_index), + ("close", _extract_args_index), + ("create", _extract_args_index), + ("delete", _extract_args_index), + ("delete_alias", _extract_args_index), + ("delete_mapping", _extract_args_index), + ("delete_template", None), + ("delete_warmer", _extract_args_index), + ("exists", _extract_args_index), + ("exists_alias", _extract_args_name_index), + ("exists_template", None), + ("exists_type", _extract_args_index), + ("flush", _extract_args_index), + ("get", _extract_args_index), + ("get_alias", _extract_args_index), + ("get_aliases", _extract_args_index), + ("get_mapping", _extract_args_index), + ("get_field_mapping", _extract_args_field_index), + ("get_settings", _extract_args_index), + ("get_template", None), + ("get_upgrade", _extract_args_index), + ("get_warmer", _extract_args_index), + ("open", _extract_args_index), + ("optimize", _extract_args_index), + ("put_alias", _extract_args_name_index), + ("put_mapping", _extract_args_doctype_body_index), + ("put_settings", _extract_args_body_index), + ("put_template", None), + ("put_warmer", _extract_args_name_body_index), + ("recovery", _extract_args_index), + ("refresh", _extract_args_index), + ("segments", _extract_args_index), + ("snapshot_index", _extract_args_index), + ("stats", _extract_args_index), + ("status", _extract_args_index), + ("update_aliases", None), + ("upgrade", _extract_args_index), + ("validate_query", _extract_args_index), ) + def instrument_elasticsearch_client_indices(module): - for name, arg_extractor in _elasticsearch_client_indices_methods: - wrap_elasticsearch_client_method(module.IndicesClient, name, - arg_extractor, 'indices') + for method_name, arg_extractor in _elasticsearch_client_indices_methods: + if hasattr(getattr(module, "IndicesClient"), method_name): + wrap_elasticsearch_client_method(module, "IndicesClient", method_name, arg_extractor, "indices") + _elasticsearch_client_cat_methods = ( - ('aliases', None), - ('allocation', None), - ('count', _extract_args_index), - ('fielddata', None), - ('health', None), - ('help', None), - ('indices', _extract_args_index), - ('master', None), - ('nodes', None), - ('pending_tasks', None), - ('plugins', None), - ('recovery', _extract_args_index), - ('shards', _extract_args_index), - ('segments', _extract_args_index), - ('thread_pool', None), + ("aliases", None), + ("allocation", None), + ("count", _extract_args_index), + ("fielddata", None), + ("health", None), + ("help", None), + ("indices", _extract_args_index), + ("master", None), + ("nodes", None), + ("pending_tasks", None), + ("plugins", None), + ("recovery", _extract_args_index), + ("shards", _extract_args_index), + ("segments", _extract_args_index), + ("thread_pool", None), ) + def instrument_elasticsearch_client_cat(module): - for name, arg_extractor in _elasticsearch_client_cat_methods: - wrap_elasticsearch_client_method(module.CatClient, name, - arg_extractor, 'cat') + for method_name, arg_extractor in _elasticsearch_client_cat_methods: + if hasattr(getattr(module, "CatClient"), method_name): + wrap_elasticsearch_client_method(module, "CatClient", method_name, arg_extractor, "cat") + _elasticsearch_client_cluster_methods = ( - ('get_settings', None), - ('health', _extract_args_index), - ('pending_tasks', None), - ('put_settings', None), - ('reroute', None), - ('state', _extract_args_metric_index), - ('stats', None), + ("get_settings", None), + ("health", _extract_args_index), + ("pending_tasks", None), + ("put_settings", None), + ("reroute", None), + ("state", _extract_args_metric_index), + ("stats", None), ) + def instrument_elasticsearch_client_cluster(module): - for name, arg_extractor in _elasticsearch_client_cluster_methods: - wrap_elasticsearch_client_method(module.ClusterClient, name, - arg_extractor, 'cluster') + for method_name, arg_extractor in _elasticsearch_client_cluster_methods: + if hasattr(getattr(module, "ClusterClient"), method_name): + wrap_elasticsearch_client_method(module, "ClusterClient", method_name, arg_extractor, "cluster") + _elasticsearch_client_nodes_methods = ( - ('hot_threads', None), - ('info', None), - ('shutdown', None), - ('stats', None), + ("hot_threads", None), + ("info", None), + ("shutdown", None), + ("stats", None), ) + def instrument_elasticsearch_client_nodes(module): - for name, arg_extractor in _elasticsearch_client_nodes_methods: - wrap_elasticsearch_client_method(module.NodesClient, name, - arg_extractor, 'nodes') + for method_name, arg_extractor in _elasticsearch_client_nodes_methods: + if hasattr(getattr(module, "NodesClient"), method_name): + wrap_elasticsearch_client_method(module, "NodesClient", method_name, arg_extractor, "nodes") + _elasticsearch_client_snapshot_methods = ( - ('create', None), - ('create_repository', None), - ('delete', None), - ('delete_repository', None), - ('get', None), - ('get_repository', None), - ('restore', None), - ('status', None), - ('verify_repository', None), + ("create", None), + ("create_repository", None), + ("delete", None), + ("delete_repository", None), + ("get", None), + ("get_repository", None), + ("restore", None), + ("status", None), + ("verify_repository", None), ) + def instrument_elasticsearch_client_snapshot(module): - for name, arg_extractor in _elasticsearch_client_snapshot_methods: - wrap_elasticsearch_client_method(module.SnapshotClient, name, - arg_extractor, 'snapshot') + for method_name, arg_extractor in _elasticsearch_client_snapshot_methods: + if hasattr(getattr(module, "SnapshotClient"), method_name): + wrap_elasticsearch_client_method(module, "SnapshotClient", method_name, arg_extractor, "snapshot") + _elasticsearch_client_tasks_methods = ( - ('list', None), - ('cancel', None), - ('get', None), + ("list", None), + ("cancel", None), + ("get", None), ) + def instrument_elasticsearch_client_tasks(module): - for name, arg_extractor in _elasticsearch_client_tasks_methods: - wrap_elasticsearch_client_method(module.TasksClient, name, - arg_extractor, 'tasks') + for method_name, arg_extractor in _elasticsearch_client_tasks_methods: + if hasattr(getattr(module, "TasksClient"), method_name): + wrap_elasticsearch_client_method(module, "TasksClient", method_name, arg_extractor, "tasks") + _elasticsearch_client_ingest_methods = ( - ('get_pipeline', None), - ('put_pipeline', None), - ('delete_pipeline', None), - ('simulate', None), + ("get_pipeline", None), + ("put_pipeline", None), + ("delete_pipeline", None), + ("simulate", None), ) + def instrument_elasticsearch_client_ingest(module): - for name, arg_extractor in _elasticsearch_client_ingest_methods: - wrap_elasticsearch_client_method(module.IngestClient, name, - arg_extractor, 'ingest') + for method_name, arg_extractor in _elasticsearch_client_ingest_methods: + if hasattr(getattr(module, "IngestClient"), method_name): + wrap_elasticsearch_client_method(module, "IngestClient", method_name, arg_extractor, "ingest") + # # Instrumentation to get Datastore Instance Information # + def _nr_Connection__init__wrapper(wrapped, instance, args, kwargs): """Cache datastore instance info on Connection object""" - def _bind_params(host='localhost', port=9200, *args, **kwargs): + def _bind_params(host="localhost", port=9200, *args, **kwargs): return host, port host, port = _bind_params(*args, **kwargs) @@ -302,9 +321,10 @@ def _bind_params(host='localhost', port=9200, *args, **kwargs): return wrapped(*args, **kwargs) + def instrument_elasticsearch_connection_base(module): - wrap_function_wrapper(module.Connection, '__init__', - _nr_Connection__init__wrapper) + wrap_function_wrapper(module, "Connection.__init__", _nr_Connection__init__wrapper) + def _nr_get_connection_wrapper(wrapped, instance, args, kwargs): """Read instance info from Connection and stash on Transaction.""" @@ -324,12 +344,12 @@ def _nr_get_connection_wrapper(wrapped, instance, args, kwargs): host, port_path_or_id = conn._nr_host_port instance_info = (host, port_path_or_id, None) except: - instance_info = ('unknown', 'unknown', None) + instance_info = ("unknown", "unknown", None) transaction._nr_datastore_instance_info = instance_info return conn + def instrument_elasticsearch_transport(module): - wrap_function_wrapper(module.Transport, 'get_connection', - _nr_get_connection_wrapper) + wrap_function_wrapper(module, "Transport.get_connection", _nr_get_connection_wrapper) diff --git a/newrelic/hooks/datastore_memcache.py b/newrelic/hooks/datastore_memcache.py index 9d51aead4..90b2d43dc 100644 --- a/newrelic/hooks/datastore_memcache.py +++ b/newrelic/hooks/datastore_memcache.py @@ -14,19 +14,24 @@ from newrelic.api.datastore_trace import DatastoreTrace, wrap_datastore_trace from newrelic.api.transaction import current_transaction -from newrelic.common.object_wrapper import (wrap_object, FunctionWrapper, - wrap_function_wrapper) +from newrelic.common.object_wrapper import ( + FunctionWrapper, + wrap_function_wrapper, + wrap_object, +) + def _instance_info(memcache_host): try: host = memcache_host.ip port_path_or_id = str(memcache_host.port) except AttributeError: - host = 'localhost' + host = "localhost" port_path_or_id = str(memcache_host.address) return (host, port_path_or_id, None) + def _nr_get_server_wrapper(wrapped, instance, args, kwargs): transaction = current_transaction() @@ -46,14 +51,14 @@ def _nr_get_server_wrapper(wrapped, instance, args, kwargs): if tracer_settings.instance_reporting.enabled and host is not None: instance_info = _instance_info(host) except: - instance_info = ('unknown', 'unknown', None) + instance_info = ("unknown", "unknown", None) transaction._nr_datastore_instance_info = instance_info return result -def MemcacheSingleWrapper(wrapped, product, target, operation, module): +def MemcacheSingleWrapper(wrapped, product, target, operation, module): def _nr_datastore_trace_wrapper_(wrapped, instance, args, kwargs): transaction = current_transaction() @@ -76,26 +81,35 @@ def _nr_datastore_trace_wrapper_(wrapped, instance, args, kwargs): return FunctionWrapper(wrapped, _nr_datastore_trace_wrapper_) + def wrap_memcache_single(module, object_path, product, target, operation): - wrap_object(module.Client, object_path, MemcacheSingleWrapper, - (product, target, operation, module)) + wrap_object(module, "Client.%s" % object_path, MemcacheSingleWrapper, (product, target, operation, module)) + -_memcache_client_methods = ('delete', 'incr', 'decr', 'add', - 'append', 'prepend', 'replace', 'set', 'cas', 'get', 'gets') +_memcache_client_methods = ( + "delete", + "incr", + "decr", + "add", + "append", + "prepend", + "replace", + "set", + "cas", + "get", + "gets", +) -_memcache_multi_methods = ('delete_multi', 'get_multi', 'set_multi', - 'get_stats', 'get_slabs', 'flush_all') +_memcache_multi_methods = ("delete_multi", "get_multi", "set_multi", "get_stats", "get_slabs", "flush_all") def instrument_memcache(module): - wrap_function_wrapper(module.Client, '_get_server', _nr_get_server_wrapper) + wrap_function_wrapper(module, "Client._get_server", _nr_get_server_wrapper) for name in _memcache_client_methods: if hasattr(module.Client, name): - wrap_memcache_single(module, name, - product='Memcached', target=None, operation=name) + wrap_memcache_single(module, name, product="Memcached", target=None, operation=name) for name in _memcache_multi_methods: if hasattr(module.Client, name): - wrap_datastore_trace(module.Client, name, - product='Memcached', target=None, operation=name) + wrap_datastore_trace(module, "Client.%s" % name, product="Memcached", target=None, operation=name) diff --git a/newrelic/hooks/datastore_pyelasticsearch.py b/newrelic/hooks/datastore_pyelasticsearch.py index 37b7f3176..63e33a9bb 100644 --- a/newrelic/hooks/datastore_pyelasticsearch.py +++ b/newrelic/hooks/datastore_pyelasticsearch.py @@ -12,11 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -from newrelic.packages import six - from newrelic.api.datastore_trace import DatastoreTraceWrapper from newrelic.api.transaction import current_transaction from newrelic.common.object_wrapper import wrap_function_wrapper +from newrelic.packages import six # An index name can be a string, None or a sequence. In the case of None # an empty string or '*', it is the same as using '_all'. When a string @@ -24,58 +23,64 @@ # obviously can also be more than one index name. Where we are certain # there is only a single index name we use it, otherwise we use 'other'. + def _index_name(index): - if not index or index == '*': - return '_all' - if not isinstance(index, six.string_types) or ',' in index: - return 'other' + if not index or index == "*": + return "_all" + if not isinstance(index, six.string_types) or "," in index: + return "other" return index + def _extract_kwargs_index(*args, **kwargs): - return _index_name(kwargs.get('index')) + return _index_name(kwargs.get("index")) + def _extract_args_index(index=None, *args, **kwargs): return _index_name(index) + def _extract_args_metric_index(metric=None, index=None, *args, **kwargs): return _index_name(index) + _elasticsearch_client_methods = ( - ('bulk', None), - ('bulk_index', _extract_args_index), - ('close_index', None), - ('cluster_state', _extract_args_metric_index), - ('count', _extract_kwargs_index), - ('create_index', _extract_args_index), - ('delete', _extract_args_index), - ('delete_all', _extract_args_index), - ('delete_all_indexes', None), - ('delete_by_query', _extract_args_index), - ('delete_index', _extract_args_index), - ('flush', _extract_args_index), - ('gateway_snapshot', _extract_args_index), - ('get', _extract_args_index), - ('get_aliases', _extract_args_index), - ('get_mapping', _extract_args_index), - ('get_settings', _extract_args_index), - ('health', _extract_args_index), - ('index', _extract_args_index), - ('more_like_this', _extract_args_index), - ('multi_get', None), - ('open_index', _extract_args_index), - ('optimize', _extract_args_index), - ('percolate', _extract_args_index), - ('put_mapping', _extract_args_index), - ('refresh', _extract_args_index), - ('search', _extract_kwargs_index), - ('send_request', None), - ('status', _extract_args_index), - ('update', _extract_args_index), - ('update_aliases', None), - ('update_all_settings', None), - ('update_settings', _extract_args_index), + ("bulk", None), + ("bulk_index", _extract_args_index), + ("close_index", None), + ("cluster_state", _extract_args_metric_index), + ("count", _extract_kwargs_index), + ("create_index", _extract_args_index), + ("delete", _extract_args_index), + ("delete_all", _extract_args_index), + ("delete_all_indexes", None), + ("delete_by_query", _extract_args_index), + ("delete_index", _extract_args_index), + ("flush", _extract_args_index), + ("gateway_snapshot", _extract_args_index), + ("get", _extract_args_index), + ("get_aliases", _extract_args_index), + ("get_mapping", _extract_args_index), + ("get_settings", _extract_args_index), + ("health", _extract_args_index), + ("index", _extract_args_index), + ("more_like_this", _extract_args_index), + ("multi_get", None), + ("open_index", _extract_args_index), + ("optimize", _extract_args_index), + ("percolate", _extract_args_index), + ("put_mapping", _extract_args_index), + ("refresh", _extract_args_index), + ("search", _extract_kwargs_index), + ("send_request", None), + ("status", _extract_args_index), + ("update", _extract_args_index), + ("update_aliases", None), + ("update_all_settings", None), + ("update_settings", _extract_args_index), ) + def wrap_elasticsearch_client_method(module, name, arg_extractor): def _nr_wrapper_ElasticSearch_method_(wrapped, instance, args, kwargs): transaction = current_transaction() @@ -94,11 +99,11 @@ def _nr_wrapper_ElasticSearch_method_(wrapped, instance, args, kwargs): else: index = arg_extractor(*args, **kwargs) - return DatastoreTraceWrapper(wrapped, product='Elasticsearch',target=index, operation=name)(*args, **kwargs) + return DatastoreTraceWrapper(wrapped, product="Elasticsearch", target=index, operation=name)(*args, **kwargs) if hasattr(module.ElasticSearch, name): - wrap_function_wrapper(module.ElasticSearch, name, - _nr_wrapper_ElasticSearch_method_) + wrap_function_wrapper(module, "ElasticSearch.%s" % name, _nr_wrapper_ElasticSearch_method_) + def instrument_pyelasticsearch_client(module): for name, arg_extractor in _elasticsearch_client_methods: diff --git a/newrelic/hooks/datastore_pylibmc.py b/newrelic/hooks/datastore_pylibmc.py index 3c8ab636b..3d42a70fb 100644 --- a/newrelic/hooks/datastore_pylibmc.py +++ b/newrelic/hooks/datastore_pylibmc.py @@ -14,13 +14,30 @@ from newrelic.api.datastore_trace import wrap_datastore_trace -_memcache_client_methods = ('get', 'gets', 'set', 'replace', 'add', - 'prepend', 'append', 'cas', 'delete', 'incr', 'decr', 'incr_multi', - 'get_multi', 'set_multi', 'add_multi', 'delete_multi', 'get_stats', - 'flush_all', 'touch') +_memcache_client_methods = ( + "get", + "gets", + "set", + "replace", + "add", + "prepend", + "append", + "cas", + "delete", + "incr", + "decr", + "incr_multi", + "get_multi", + "set_multi", + "add_multi", + "delete_multi", + "get_stats", + "flush_all", + "touch", +) + def instrument_pylibmc_client(module): for name in _memcache_client_methods: if hasattr(module.Client, name): - wrap_datastore_trace(module.Client, name, - product='Memcached', target=None, operation=name) + wrap_datastore_trace(module, "Client.%s" % name, product="Memcached", target=None, operation=name) diff --git a/newrelic/hooks/datastore_pymemcache.py b/newrelic/hooks/datastore_pymemcache.py index f33f135de..690e95d61 100644 --- a/newrelic/hooks/datastore_pymemcache.py +++ b/newrelic/hooks/datastore_pymemcache.py @@ -14,12 +14,30 @@ from newrelic.api.datastore_trace import wrap_datastore_trace -_memcache_client_methods = ('set', 'set_many', 'add', 'replace', 'append', - 'prepend', 'cas', 'get', 'get_many', 'gets', 'gets_many', 'delete', - 'delete_many', 'incr', 'decr', 'touch', 'stats', 'flush_all', 'quit') +_memcache_client_methods = ( + "set", + "set_many", + "add", + "replace", + "append", + "prepend", + "cas", + "get", + "get_many", + "gets", + "gets_many", + "delete", + "delete_many", + "incr", + "decr", + "touch", + "stats", + "flush_all", + "quit", +) + def instrument_pymemcache_client(module): for name in _memcache_client_methods: if hasattr(module.Client, name): - wrap_datastore_trace(module.Client, name, - product='Memcached', target=None, operation=name) + wrap_datastore_trace(module, "Client.%s" % name, product="Memcached", target=None, operation=name) diff --git a/newrelic/hooks/datastore_pymongo.py b/newrelic/hooks/datastore_pymongo.py index 66a86b7ab..c9c34b1fc 100644 --- a/newrelic/hooks/datastore_pymongo.py +++ b/newrelic/hooks/datastore_pymongo.py @@ -15,38 +15,70 @@ from newrelic.api.datastore_trace import wrap_datastore_trace from newrelic.api.function_trace import wrap_function_trace -_pymongo_client_methods = ('save', 'insert', 'update', 'drop', 'remove', - 'find_one', 'find', 'count', 'create_index', 'ensure_index', - 'drop_indexes', 'drop_index', 'reindex', 'index_information', - 'options', 'group', 'rename', 'distinct', 'map_reduce', - 'inline_map_reduce', 'find_and_modify', 'initialize_unordered_bulk_op', - 'initialize_ordered_bulk_op', 'bulk_write', 'insert_one', 'insert_many', - 'replace_one', 'update_one', 'update_many', 'delete_one', 'delete_many', - 'find_raw_batches', 'parallel_scan', 'create_indexes', 'list_indexes', - 'aggregate', 'aggregate_raw_batches', 'find_one_and_delete', - 'find_one_and_replace', 'find_one_and_update') +_pymongo_client_methods = ( + "save", + "insert", + "update", + "drop", + "remove", + "find_one", + "find", + "count", + "create_index", + "ensure_index", + "drop_indexes", + "drop_index", + "reindex", + "index_information", + "options", + "group", + "rename", + "distinct", + "map_reduce", + "inline_map_reduce", + "find_and_modify", + "initialize_unordered_bulk_op", + "initialize_ordered_bulk_op", + "bulk_write", + "insert_one", + "insert_many", + "replace_one", + "update_one", + "update_many", + "delete_one", + "delete_many", + "find_raw_batches", + "parallel_scan", + "create_indexes", + "list_indexes", + "aggregate", + "aggregate_raw_batches", + "find_one_and_delete", + "find_one_and_replace", + "find_one_and_update", +) def instrument_pymongo_connection(module): # Must name function explicitly as pymongo overrides the # __getattr__() method in a way that breaks introspection. - rollup = ('Datastore/all', 'Datastore/MongoDB/all') + rollup = ("Datastore/all", "Datastore/MongoDB/all") - wrap_function_trace(module, 'Connection.__init__', - name='%s:Connection.__init__' % module.__name__, - terminal=True, rollup=rollup) + wrap_function_trace( + module, "Connection.__init__", name="%s:Connection.__init__" % module.__name__, terminal=True, rollup=rollup + ) def instrument_pymongo_mongo_client(module): # Must name function explicitly as pymongo overrides the # __getattr__() method in a way that breaks introspection. - rollup = ('Datastore/all', 'Datastore/MongoDB/all') + rollup = ("Datastore/all", "Datastore/MongoDB/all") - wrap_function_trace(module, 'MongoClient.__init__', - name='%s:MongoClient.__init__' % module.__name__, - terminal=True, rollup=rollup) + wrap_function_trace( + module, "MongoClient.__init__", name="%s:MongoClient.__init__" % module.__name__, terminal=True, rollup=rollup + ) def instrument_pymongo_collection(module): @@ -55,5 +87,6 @@ def _collection_name(collection, *args, **kwargs): for name in _pymongo_client_methods: if hasattr(module.Collection, name): - wrap_datastore_trace(module.Collection, name, product='MongoDB', - target=_collection_name, operation=name) + wrap_datastore_trace( + module, "Collection.%s" % name, product="MongoDB", target=_collection_name, operation=name + ) diff --git a/newrelic/hooks/datastore_pysolr.py b/newrelic/hooks/datastore_pysolr.py index da29e37ed..7d4e8697d 100644 --- a/newrelic/hooks/datastore_pysolr.py +++ b/newrelic/hooks/datastore_pysolr.py @@ -14,21 +14,19 @@ from newrelic.api.datastore_trace import wrap_datastore_trace -_pysolr_client_methods = ('search', 'more_like_this', 'suggest_terms', 'add', -'delete', 'commit', 'optimize', 'extract') +_pysolr_client_methods = ("search", "more_like_this", "suggest_terms", "add", "delete", "commit", "optimize", "extract") + +_pysolr_admin_methods = ("status", "create", "reload", "rename", "swap", "unload", "load") -_pysolr_admin_methods = ('status', 'create', 'reload', 'rename', 'swap', - 'unload', 'load') def instrument_pysolr(module): for name in _pysolr_client_methods: if hasattr(module.Solr, name): - wrap_datastore_trace(module.Solr, name, - product='Solr', target=None, operation=name) + wrap_datastore_trace(module, "Solr.%s" % name, product="Solr", target=None, operation=name) - if hasattr(module, 'SolrCoreAdmin'): + if hasattr(module, "SolrCoreAdmin"): for name in _pysolr_admin_methods: if hasattr(module.SolrCoreAdmin, name): - wrap_datastore_trace(module.SolrCoreAdmin, name, - product='Solr', target=None, - operation='admin.%s' % name) + wrap_datastore_trace( + module, "SolrCoreAdmin.%s" % name, product="Solr", target=None, operation="admin.%s" % name + ) diff --git a/newrelic/hooks/datastore_solrpy.py b/newrelic/hooks/datastore_solrpy.py index 3b2ac9c17..74e808ae5 100644 --- a/newrelic/hooks/datastore_solrpy.py +++ b/newrelic/hooks/datastore_solrpy.py @@ -14,11 +14,20 @@ from newrelic.api.datastore_trace import wrap_datastore_trace -_solrpy_client_methods = ('query', 'add', 'add_many', 'delete', 'delete_many', -'delete_query', 'commit', 'optimize', 'raw_query') +_solrpy_client_methods = ( + "query", + "add", + "add_many", + "delete", + "delete_many", + "delete_query", + "commit", + "optimize", + "raw_query", +) + def instrument_solrpy(module): for name in _solrpy_client_methods: if hasattr(module.SolrConnection, name): - wrap_datastore_trace(module.SolrConnection, name, - product='Solr', target=None, operation=name) + wrap_datastore_trace(module, "SolrConnection.%s" % name, product="Solr", target=None, operation=name) diff --git a/newrelic/hooks/messagebroker_pika.py b/newrelic/hooks/messagebroker_pika.py index 625302ba1..cecc1b934 100644 --- a/newrelic/hooks/messagebroker_pika.py +++ b/newrelic/hooks/messagebroker_pika.py @@ -17,73 +17,74 @@ import types from newrelic.api.application import application_instance -from newrelic.api.message_transaction import MessageTransaction from newrelic.api.function_trace import FunctionTraceWrapper from newrelic.api.message_trace import MessageTrace +from newrelic.api.message_transaction import MessageTransaction from newrelic.api.transaction import current_transaction from newrelic.common.object_names import callable_name -from newrelic.common.object_wrapper import (wrap_function_wrapper, wrap_object, - FunctionWrapper, function_wrapper, resolve_path, apply_patch) - +from newrelic.common.object_wrapper import ( + FunctionWrapper, + apply_patch, + function_wrapper, + resolve_path, + wrap_function_wrapper, + wrap_object, +) -_START_KEY = '_nr_start_time' -KWARGS_ERROR = 'Supportability/hooks/pika/kwargs_error' +_START_KEY = "_nr_start_time" +KWARGS_ERROR = "Supportability/hooks/pika/kwargs_error" -def _add_consume_rabbitmq_trace(transaction, method, properties, - nr_start_time, queue_name=None): +def _add_consume_rabbitmq_trace(transaction, method, properties, nr_start_time, queue_name=None): routing_key = None - if hasattr(method, 'routing_key'): + if hasattr(method, "routing_key"): routing_key = method.routing_key properties = properties and properties.__dict__ or {} - correlation_id = properties.get('correlation_id') - reply_to = properties.get('reply_to') - headers = properties.get('headers') + correlation_id = properties.get("correlation_id") + reply_to = properties.get("reply_to") + headers = properties.get("headers") # Do not record dt headers in the segment parameters if headers: - headers.pop( - MessageTrace.cat_id_key, None) - headers.pop( - MessageTrace.cat_transaction_key, None) - headers.pop( - MessageTrace.cat_distributed_trace_key, None) - headers.pop('traceparent', None) - headers.pop('tracestate', None) + headers.pop(MessageTrace.cat_id_key, None) + headers.pop(MessageTrace.cat_transaction_key, None) + headers.pop(MessageTrace.cat_distributed_trace_key, None) + headers.pop("traceparent", None) + headers.pop("tracestate", None) # The transaction may have started after the message was received. In this # case, the start time is reset to the true transaction start time. - transaction.start_time = min(nr_start_time, - transaction.start_time) + transaction.start_time = min(nr_start_time, transaction.start_time) params = {} if routing_key is not None: - params['routing_key'] = routing_key + params["routing_key"] = routing_key if correlation_id is not None: - params['correlation_id'] = correlation_id + params["correlation_id"] = correlation_id if reply_to is not None: - params['reply_to'] = reply_to + params["reply_to"] = reply_to if headers is not None: - params['headers'] = headers + params["headers"] = headers if queue_name is not None: - params['queue_name'] = queue_name + params["queue_name"] = queue_name # create a trace starting at the time the message was received - trace = MessageTrace(library='RabbitMQ', - operation='Consume', - destination_type='Exchange', - destination_name=method.exchange or 'Default', - params=params) + trace = MessageTrace( + library="RabbitMQ", + operation="Consume", + destination_type="Exchange", + destination_name=method.exchange or "Default", + params=params, + ) trace.__enter__() trace.start_time = nr_start_time trace.__exit__(None, None, None) -def _bind_basic_publish( - exchange, routing_key, body, properties=None, *args, **kwargs): +def _bind_basic_publish(exchange, routing_key, body, properties=None, *args, **kwargs): return (exchange, routing_key, body, properties, args, kwargs) @@ -95,8 +96,7 @@ def _nr_wrapper_basic_publish(wrapped, instance, args, kwargs): from pika import BasicProperties - (exchange, routing_key, body, properties, args, kwargs) = ( - _bind_basic_publish(*args, **kwargs)) + (exchange, routing_key, body, properties, args, kwargs) = _bind_basic_publish(*args, **kwargs) properties = properties or BasicProperties() properties.headers = properties.headers or {} user_headers = properties.headers.copy() @@ -112,20 +112,22 @@ def _nr_wrapper_basic_publish(wrapped, instance, args, kwargs): params = {} if routing_key is not None: - params['routing_key'] = routing_key + params["routing_key"] = routing_key if properties.correlation_id is not None: - params['correlation_id'] = properties.correlation_id + params["correlation_id"] = properties.correlation_id if properties.reply_to is not None: - params['reply_to'] = properties.reply_to + params["reply_to"] = properties.reply_to if user_headers: - params['headers'] = user_headers - - with MessageTrace(library='RabbitMQ', - operation='Produce', - destination_type='Exchange', - destination_name=exchange or 'Default', - params=params, - source=wrapped): + params["headers"] = user_headers + + with MessageTrace( + library="RabbitMQ", + operation="Produce", + destination_type="Exchange", + destination_name=exchange or "Default", + params=params, + source=wrapped, + ): cat_headers = MessageTrace.generate_request_headers(transaction) properties.headers.update(cat_headers) return wrapped(*args, **kwargs) @@ -133,7 +135,6 @@ def _nr_wrapper_basic_publish(wrapped, instance, args, kwargs): def _wrap_Channel_get_callback(module, obj, wrap_get): def _nr_wrapper_basic_get(wrapped, instance, args, kwargs): - @function_wrapper def callback_wrapper(callback, _instance, _args, _kwargs): transaction = current_transaction() @@ -143,13 +144,11 @@ def callback_wrapper(callback, _instance, _args, _kwargs): if not _kwargs: method, properties = _args[1:3] - start_time = getattr(callback_wrapper, '_nr_start_time', None) + start_time = getattr(callback_wrapper, "_nr_start_time", None) - _add_consume_rabbitmq_trace(transaction, - method=method, - properties=properties, - nr_start_time=start_time, - queue_name=queue) + _add_consume_rabbitmq_trace( + transaction, method=method, properties=properties, nr_start_time=start_time, queue_name=queue + ) else: m = transaction._transaction_metrics.get(KWARGS_ERROR, 0) transaction._transaction_metrics[KWARGS_ERROR] = m + 1 @@ -172,26 +171,23 @@ def _nr_wrapper_Basic_Deliver_init_(wrapper, instance, args, kwargs): def _nr_wrap_BlockingChannel___init__(wrapped, instance, args, kwargs): ret = wrapped(*args, **kwargs) - impl = getattr(instance, '_impl', None) + impl = getattr(instance, "_impl", None) # Patch in the original basic_consume to avoid wrapping twice - if impl and hasattr(impl, '_nr_basic_consume'): + if impl and hasattr(impl, "_nr_basic_consume"): impl.basic_consume = impl.basic_consume.__wrapped__ return ret -def _wrap_basic_consume_BlockingChannel_old(wrapper, - consumer_callback, queue, *args, **kwargs): +def _wrap_basic_consume_BlockingChannel_old(wrapper, consumer_callback, queue, *args, **kwargs): args = (wrapper(consumer_callback), queue) + args return queue, args, kwargs -def _wrap_basic_consume_Channel_old(wrapper, consumer_callback, queue='', - *args, **kwargs): +def _wrap_basic_consume_Channel_old(wrapper, consumer_callback, queue="", *args, **kwargs): return queue, (wrapper(consumer_callback), queue) + args, kwargs -def _wrap_basic_consume_Channel(wrapper, queue, on_message_callback, *args, - **kwargs): +def _wrap_basic_consume_Channel(wrapper, queue, on_message_callback, *args, **kwargs): args = (queue, wrapper(on_message_callback)) + args return queue, args, kwargs @@ -201,8 +197,7 @@ def _wrap_basic_get_Channel(wrapper, queue, callback, *args, **kwargs): return queue, args, kwargs -def _wrap_basic_get_Channel_old(wrapper, callback=None, queue='', - *args, **kwargs): +def _wrap_basic_get_Channel_old(wrapper, callback=None, queue="", *args, **kwargs): if callback is not None: callback = wrapper(callback) args = (callback, queue) + args @@ -210,7 +205,6 @@ def _wrap_basic_get_Channel_old(wrapper, callback=None, queue='', def _ConsumeGeneratorWrapper(wrapped): - def wrapper(wrapped, instance, args, kwargs): def _possibly_create_traces(yielded): # This generator can be called either outside of a transaction, or @@ -245,16 +239,15 @@ def _possibly_create_traces(yielded): else: # 3. Outside of a transaction - exchange = method.exchange or 'Default' - routing_key = getattr(method, 'routing_key', None) + exchange = method.exchange or "Default" + routing_key = getattr(method, "routing_key", None) headers = None reply_to = None correlation_id = None if properties is not None: - headers = getattr(properties, 'headers', None) - reply_to = getattr(properties, 'reply_to', None) - correlation_id = getattr( - properties, 'correlation_id', None) + headers = getattr(properties, "headers", None) + reply_to = getattr(properties, "reply_to", None) + correlation_id = getattr(properties, "correlation_id", None) # Create a messagebroker task for each iteration through the # generator. This is important because it is foreseeable that @@ -262,15 +255,16 @@ def _possibly_create_traces(yielded): # many messages. bt = MessageTransaction( - application=application_instance(), - library='RabbitMQ', - destination_type='Exchange', - destination_name=exchange, - routing_key=routing_key, - headers=headers, - reply_to=reply_to, - correlation_id=correlation_id, - source=wrapped) + application=application_instance(), + library="RabbitMQ", + destination_type="Exchange", + destination_name=exchange, + routing_key=routing_key, + headers=headers, + reply_to=reply_to, + correlation_id=correlation_id, + source=wrapped, + ) bt.__enter__() return bt @@ -327,28 +321,25 @@ def _generator(generator): def _wrap_Channel_consume_callback(module, obj, wrap_consume): - @function_wrapper def _nr_wrapper_Channel_consume_(wrapped, channel, args, kwargs): - @function_wrapper def callback_wrapper(wrapped, instance, args, kwargs): name = callable_name(wrapped) transaction = current_transaction(active_only=False) - if transaction and (transaction.ignore_transaction or - transaction.stopped): + if transaction and (transaction.ignore_transaction or transaction.stopped): return wrapped(*args, **kwargs) elif transaction: return FunctionTraceWrapper(wrapped, name=name)(*args, **kwargs) else: - if hasattr(channel, '_nr_disable_txn_tracing'): + if hasattr(channel, "_nr_disable_txn_tracing"): return wrapped(*args, **kwargs) # Keyword arguments are unknown since this is a user # defined callback - exchange = 'Unknown' + exchange = "Unknown" routing_key = None headers = None reply_to = None @@ -356,32 +347,31 @@ def callback_wrapper(wrapped, instance, args, kwargs): unknown_kwargs = False if not kwargs: method, properties = args[1:3] - exchange = method.exchange or 'Default' - routing_key = getattr(method, 'routing_key', None) + exchange = method.exchange or "Default" + routing_key = getattr(method, "routing_key", None) if properties is not None: - headers = getattr(properties, 'headers', None) - reply_to = getattr(properties, 'reply_to', None) - correlation_id = getattr( - properties, 'correlation_id', None) + headers = getattr(properties, "headers", None) + reply_to = getattr(properties, "reply_to", None) + correlation_id = getattr(properties, "correlation_id", None) else: unknown_kwargs = True with MessageTransaction( - application=application_instance(), - library='RabbitMQ', - destination_type='Exchange', - destination_name=exchange, - routing_key=routing_key, - headers=headers, - queue_name=queue, - reply_to=reply_to, - correlation_id=correlation_id, - source=wrapped) as mt: + application=application_instance(), + library="RabbitMQ", + destination_type="Exchange", + destination_name=exchange, + routing_key=routing_key, + headers=headers, + queue_name=queue, + reply_to=reply_to, + correlation_id=correlation_id, + source=wrapped, + ) as mt: # Improve transaction naming - _new_txn_name = 'RabbitMQ/Exchange/%s/%s' % (exchange, - name) - mt.set_transaction_name(_new_txn_name, group='Message') + _new_txn_name = "RabbitMQ/Exchange/%s/%s" % (exchange, name) + mt.set_transaction_name(_new_txn_name, group="Message") # Record that something went horribly wrong if unknown_kwargs: @@ -411,35 +401,30 @@ def _disable_channel_transactions(wrapped, instance, args, kwargs): def instrument_pika_adapters(module): import pika - version = tuple(int(num) for num in pika.__version__.split('.', 1)[0]) + + version = tuple(int(num) for num in pika.__version__.split(".", 1)[0]) if version[0] < 1: wrap_consume = _wrap_basic_consume_BlockingChannel_old else: wrap_consume = _wrap_basic_consume_Channel - _wrap_Channel_consume_callback( - module.blocking_connection, - 'BlockingChannel.basic_consume', - wrap_consume) - wrap_function_wrapper(module.blocking_connection, - 'BlockingChannel.__init__', _nr_wrap_BlockingChannel___init__) - wrap_object(module.blocking_connection, 'BlockingChannel.consume', - _ConsumeGeneratorWrapper) + _wrap_Channel_consume_callback(module, "blocking_connection.BlockingChannel.basic_consume", wrap_consume) + wrap_function_wrapper(module, "blocking_connection.BlockingChannel.__init__", _nr_wrap_BlockingChannel___init__) + wrap_object(module, "blocking_connection.BlockingChannel.consume", _ConsumeGeneratorWrapper) - if hasattr(module, 'tornado_connection'): - wrap_function_wrapper(module.tornado_connection, - 'TornadoConnection.channel', _disable_channel_transactions) + if hasattr(module, "tornado_connection"): + wrap_function_wrapper(module, "tornado_connection.TornadoConnection.channel", _disable_channel_transactions) def instrument_pika_spec(module): - wrap_function_wrapper(module.Basic.Deliver, '__init__', - _nr_wrapper_Basic_Deliver_init_) + wrap_function_wrapper(module, "Basic.Deliver.__init__", _nr_wrapper_Basic_Deliver_init_) def instrument_pika_channel(module): import pika - version = tuple(int(num) for num in pika.__version__.split('.', 1)[0]) + + version = tuple(int(num) for num in pika.__version__.split(".", 1)[0]) if version[0] < 1: wrap_consume = _wrap_basic_consume_Channel_old @@ -448,11 +433,7 @@ def instrument_pika_channel(module): wrap_consume = _wrap_basic_consume_Channel wrap_get = _wrap_basic_get_Channel - wrap_function_wrapper(module, 'Channel.basic_publish', - _nr_wrapper_basic_publish) + wrap_function_wrapper(module, "Channel.basic_publish", _nr_wrapper_basic_publish) - _wrap_Channel_get_callback(module, 'Channel.basic_get', wrap_get) - _wrap_Channel_consume_callback( - module, - 'Channel.basic_consume', - wrap_consume) + _wrap_Channel_get_callback(module, "Channel.basic_get", wrap_get) + _wrap_Channel_consume_callback(module, "Channel.basic_consume", wrap_consume) From 0db5fee1e5d44b0791dc517ac9f5d88d1240a340 Mon Sep 17 00:00:00 2001 From: Lalleh Rafeei <84813886+lrafeei@users.noreply.github.com> Date: Thu, 19 Jan 2023 19:05:50 -0800 Subject: [PATCH 039/108] Fix log decorating to be JSON compatible (#736) * Initial addition of JSON capability * Add NR-LINKING metadata JSON combatibility * Remove breakpoint * Hardcode local log decorating tests * Tweak linking metatdata parsing/adding --- newrelic/hooks/logger_logging.py | 22 ++++-- tests/logger_logging/test_local_decorating.py | 79 +++++++++++++++---- 2 files changed, 80 insertions(+), 21 deletions(-) diff --git a/newrelic/hooks/logger_logging.py b/newrelic/hooks/logger_logging.py index 22cdc8c78..5de8fc293 100644 --- a/newrelic/hooks/logger_logging.py +++ b/newrelic/hooks/logger_logging.py @@ -12,13 +12,14 @@ # See the License for the specific language governing permissions and # limitations under the License. +import json + from newrelic.api.application import application_instance from newrelic.api.time_trace import get_linking_metadata from newrelic.api.transaction import current_transaction, record_log_event -from newrelic.common.object_wrapper import wrap_function_wrapper, function_wrapper +from newrelic.common.object_wrapper import function_wrapper, wrap_function_wrapper from newrelic.core.config import global_settings - try: from urllib import quote except ImportError: @@ -28,13 +29,22 @@ def add_nr_linking_metadata(message): available_metadata = get_linking_metadata() entity_name = quote(available_metadata.get("entity.name", "")) - entity_guid = available_metadata.get("entity.guid", "") + entity_guid = available_metadata.get("entity.guid", "") span_id = available_metadata.get("span.id", "") trace_id = available_metadata.get("trace.id", "") hostname = available_metadata.get("hostname", "") - nr_linking_str = "|".join(("NR-LINKING", entity_guid, hostname, trace_id, span_id, entity_name)) - return "%s %s|" % (message, nr_linking_str) + try: + # See if the message is in JSON format + nr_linking_value = "|".join((entity_guid, hostname, trace_id, span_id, entity_name)) + "|" + edited_message = json.loads(message) + edited_message["NR-LINKING"] = nr_linking_value + message = json.dumps(edited_message) + return message + except ValueError: + # Previous functionality of adding NR Linking Metadata + nr_linking_str = "|".join(("NR-LINKING", entity_guid, hostname, trace_id, span_id, entity_name)) + "|" + return "%s %s" % (message, nr_linking_str) @function_wrapper @@ -72,7 +82,7 @@ def wrap_callHandlers(wrapped, instance, args, kwargs): if application and application.enabled: application.record_custom_metric("Logging/lines", {"count": 1}) application.record_custom_metric("Logging/lines/%s" % level_name, {"count": 1}) - + if settings.application_logging.forwarding and settings.application_logging.forwarding.enabled: try: message = record.getMessage() diff --git a/tests/logger_logging/test_local_decorating.py b/tests/logger_logging/test_local_decorating.py index 32a47e904..8b58ea585 100644 --- a/tests/logger_logging/test_local_decorating.py +++ b/tests/logger_logging/test_local_decorating.py @@ -14,13 +14,16 @@ import platform +from testing_support.fixtures import reset_core_stats_engine +from testing_support.validators.validate_log_event_count import validate_log_event_count +from testing_support.validators.validate_log_event_count_outside_transaction import ( + validate_log_event_count_outside_transaction, +) + from newrelic.api.application import application_settings from newrelic.api.background_task import background_task from newrelic.api.time_trace import current_trace from newrelic.api.transaction import current_transaction -from testing_support.fixtures import reset_core_stats_engine -from testing_support.validators.validate_log_event_count import validate_log_event_count -from testing_support.validators.validate_log_event_count_outside_transaction import validate_log_event_count_outside_transaction def set_trace_ids(): @@ -31,22 +34,17 @@ def set_trace_ids(): if trace: trace.guid = "abcdefgh" + def exercise_logging(logger): set_trace_ids() logger.warning("C") -def get_metadata_string(log_message, is_txn): - host = platform.uname()[1] - assert host - entity_guid = application_settings().entity_guid - if is_txn: - metadata_string = "".join(('NR-LINKING|', entity_guid, '|', host, '|abcdefgh12345678|abcdefgh|Python%20Agent%20Test%20%28logger_logging%29|')) - else: - metadata_string = "".join(('NR-LINKING|', entity_guid, '|', host, '|||Python%20Agent%20Test%20%28logger_logging%29|')) - formatted_string = log_message + " " + metadata_string - return formatted_string +def exercise_logging_json(logger): + set_trace_ids() + + logger.warning('{"first_name": "Hugh", "last_name": "Man"}') @reset_core_stats_engine() @@ -54,8 +52,37 @@ def test_local_log_decoration_inside_transaction(logger): @validate_log_event_count(1) @background_task() def test(): + host = platform.uname()[1] + assert host + entity_guid = application_settings().entity_guid + entity_name = "Python%20Agent%20Test%20%28logger_logging%29" exercise_logging(logger) - assert logger.caplog.records[0] == get_metadata_string('C', True) + assert logger.caplog.records[0] == "C NR-LINKING|%s|%s|abcdefgh12345678|abcdefgh|%s|" % ( + entity_guid, + host, + entity_name, + ) + + test() + + +@reset_core_stats_engine() +def test_local_log_decoration_inside_transaction_with_json(logger): + @validate_log_event_count(1) + @background_task() + def test(): + host = platform.uname()[1] + assert host + entity_guid = application_settings().entity_guid + entity_name = "Python%20Agent%20Test%20%28logger_logging%29" + exercise_logging_json(logger) + assert logger.caplog.records[ + 0 + ] == '{"first_name": "Hugh", "last_name": "Man", "NR-LINKING": "%s|%s|abcdefgh12345678|abcdefgh|%s|"}' % ( + entity_guid, + host, + entity_name, + ) test() @@ -64,7 +91,29 @@ def test(): def test_local_log_decoration_outside_transaction(logger): @validate_log_event_count_outside_transaction(1) def test(): + host = platform.uname()[1] + assert host + entity_guid = application_settings().entity_guid + entity_name = "Python%20Agent%20Test%20%28logger_logging%29" exercise_logging(logger) - assert logger.caplog.records[0] == get_metadata_string('C', False) + assert logger.caplog.records[0] == "C NR-LINKING|%s|%s|||%s|" % (entity_guid, host, entity_name) + + test() + + +@reset_core_stats_engine() +def test_local_log_decoration_outside_transaction_with_json(logger): + @validate_log_event_count_outside_transaction(1) + def test(): + host = platform.uname()[1] + assert host + entity_guid = application_settings().entity_guid + entity_name = "Python%20Agent%20Test%20%28logger_logging%29" + exercise_logging_json(logger) + assert logger.caplog.records[0] == '{"first_name": "Hugh", "last_name": "Man", "NR-LINKING": "%s|%s|||%s|"}' % ( + entity_guid, + host, + entity_name, + ) test() From 01ceca79b90ed093fbde3e6d8878a2688b7089b9 Mon Sep 17 00:00:00 2001 From: Hannah Stepanek Date: Thu, 26 Jan 2023 09:41:37 -0800 Subject: [PATCH 040/108] Revert "Fix log decorating to be JSON compatible" (#746) * Revert "Fix log decorating to be JSON compatible (#736)" This reverts commit 0db5fee1e5d44b0791dc517ac9f5d88d1240a340. * [Mega-Linter] Apply linters fixes * Trigger tests Co-authored-by: hmstepanek --- newrelic/hooks/logger_logging.py | 15 +--- tests/logger_logging/test_local_decorating.py | 79 ++++++------------- 2 files changed, 24 insertions(+), 70 deletions(-) diff --git a/newrelic/hooks/logger_logging.py b/newrelic/hooks/logger_logging.py index 5de8fc293..67fb46525 100644 --- a/newrelic/hooks/logger_logging.py +++ b/newrelic/hooks/logger_logging.py @@ -12,8 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import json - from newrelic.api.application import application_instance from newrelic.api.time_trace import get_linking_metadata from newrelic.api.transaction import current_transaction, record_log_event @@ -34,17 +32,8 @@ def add_nr_linking_metadata(message): trace_id = available_metadata.get("trace.id", "") hostname = available_metadata.get("hostname", "") - try: - # See if the message is in JSON format - nr_linking_value = "|".join((entity_guid, hostname, trace_id, span_id, entity_name)) + "|" - edited_message = json.loads(message) - edited_message["NR-LINKING"] = nr_linking_value - message = json.dumps(edited_message) - return message - except ValueError: - # Previous functionality of adding NR Linking Metadata - nr_linking_str = "|".join(("NR-LINKING", entity_guid, hostname, trace_id, span_id, entity_name)) + "|" - return "%s %s" % (message, nr_linking_str) + nr_linking_str = "|".join(("NR-LINKING", entity_guid, hostname, trace_id, span_id, entity_name)) + return "%s %s|" % (message, nr_linking_str) @function_wrapper diff --git a/tests/logger_logging/test_local_decorating.py b/tests/logger_logging/test_local_decorating.py index 8b58ea585..d4917eff5 100644 --- a/tests/logger_logging/test_local_decorating.py +++ b/tests/logger_logging/test_local_decorating.py @@ -41,10 +41,26 @@ def exercise_logging(logger): logger.warning("C") -def exercise_logging_json(logger): - set_trace_ids() - - logger.warning('{"first_name": "Hugh", "last_name": "Man"}') +def get_metadata_string(log_message, is_txn): + host = platform.uname()[1] + assert host + entity_guid = application_settings().entity_guid + if is_txn: + metadata_string = "".join( + ( + "NR-LINKING|", + entity_guid, + "|", + host, + "|abcdefgh12345678|abcdefgh|Python%20Agent%20Test%20%28logger_logging%29|", + ) + ) + else: + metadata_string = "".join( + ("NR-LINKING|", entity_guid, "|", host, "|||Python%20Agent%20Test%20%28logger_logging%29|") + ) + formatted_string = log_message + " " + metadata_string + return formatted_string @reset_core_stats_engine() @@ -52,37 +68,8 @@ def test_local_log_decoration_inside_transaction(logger): @validate_log_event_count(1) @background_task() def test(): - host = platform.uname()[1] - assert host - entity_guid = application_settings().entity_guid - entity_name = "Python%20Agent%20Test%20%28logger_logging%29" exercise_logging(logger) - assert logger.caplog.records[0] == "C NR-LINKING|%s|%s|abcdefgh12345678|abcdefgh|%s|" % ( - entity_guid, - host, - entity_name, - ) - - test() - - -@reset_core_stats_engine() -def test_local_log_decoration_inside_transaction_with_json(logger): - @validate_log_event_count(1) - @background_task() - def test(): - host = platform.uname()[1] - assert host - entity_guid = application_settings().entity_guid - entity_name = "Python%20Agent%20Test%20%28logger_logging%29" - exercise_logging_json(logger) - assert logger.caplog.records[ - 0 - ] == '{"first_name": "Hugh", "last_name": "Man", "NR-LINKING": "%s|%s|abcdefgh12345678|abcdefgh|%s|"}' % ( - entity_guid, - host, - entity_name, - ) + assert logger.caplog.records[0] == get_metadata_string("C", True) test() @@ -91,29 +78,7 @@ def test(): def test_local_log_decoration_outside_transaction(logger): @validate_log_event_count_outside_transaction(1) def test(): - host = platform.uname()[1] - assert host - entity_guid = application_settings().entity_guid - entity_name = "Python%20Agent%20Test%20%28logger_logging%29" exercise_logging(logger) - assert logger.caplog.records[0] == "C NR-LINKING|%s|%s|||%s|" % (entity_guid, host, entity_name) - - test() - - -@reset_core_stats_engine() -def test_local_log_decoration_outside_transaction_with_json(logger): - @validate_log_event_count_outside_transaction(1) - def test(): - host = platform.uname()[1] - assert host - entity_guid = application_settings().entity_guid - entity_name = "Python%20Agent%20Test%20%28logger_logging%29" - exercise_logging_json(logger) - assert logger.caplog.records[0] == '{"first_name": "Hugh", "last_name": "Man", "NR-LINKING": "%s|%s|||%s|"}' % ( - entity_guid, - host, - entity_name, - ) + assert logger.caplog.records[0] == get_metadata_string("C", False) test() From 1d8d078e712d0f617dc34a4f228bf977867151ab Mon Sep 17 00:00:00 2001 From: Kate Anderson <90657569+kanderson250@users.noreply.github.com> Date: Thu, 26 Jan 2023 16:39:09 -0800 Subject: [PATCH 041/108] Add apdexPerfZone attribute to Transaction. (#753) Co-authored-by: Enriqueta De Leon Co-authored-by: Kate Anderson Co-authored-by: Mary Martinez Co-authored-by: Enriqueta De Leon Co-authored-by: Kate Anderson Co-authored-by: Mary Martinez Co-authored-by: Hannah Stepanek --- newrelic/core/transaction_node.py | 6 ++++-- tests/agent_features/test_attributes_in_action.py | 2 +- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/newrelic/core/transaction_node.py b/newrelic/core/transaction_node.py index 97f6f3ebb..11b254b8a 100644 --- a/newrelic/core/transaction_node.py +++ b/newrelic/core/transaction_node.py @@ -452,6 +452,10 @@ def transaction_event_intrinsics(self, stats_table): def _add_if_not_empty(key, value): if value: intrinsics[key] = value + + apdex_perf_zone = self.apdex_perf_zone() + _add_if_not_empty('apdexPerfZone', apdex_perf_zone) + _add_if_not_empty('nr.apdexPerfZone', apdex_perf_zone) if self.errors: intrinsics['error'] = True @@ -467,8 +471,6 @@ def _add_if_not_empty(key, value): ','.join(self.alternate_path_hashes)) _add_if_not_empty('nr.referringTransactionGuid', self.referring_transaction_guid) - _add_if_not_empty('nr.apdexPerfZone', - self.apdex_perf_zone()) if self.synthetics_resource_id: intrinsics['nr.guid'] = self.guid diff --git a/tests/agent_features/test_attributes_in_action.py b/tests/agent_features/test_attributes_in_action.py index e51298dbf..aa44d3e2d 100644 --- a/tests/agent_features/test_attributes_in_action.py +++ b/tests/agent_features/test_attributes_in_action.py @@ -93,7 +93,7 @@ AGENT_KEYS_ALL = TRACE_ERROR_AGENT_KEYS + REQ_PARAMS -TRANS_EVENT_INTRINSICS = ("name", "duration", "type", "timestamp", "totalTime", "error") +TRANS_EVENT_INTRINSICS = ("name", "duration", "type", "timestamp", "totalTime", "error", "nr.apdexPerfZone", "apdexPerfZone") TRANS_EVENT_AGENT_KEYS = [ "response.status", "request.method", From 5844bb18cd7de4b3719f5bf021f69ee2aee0c09c Mon Sep 17 00:00:00 2001 From: Lalleh Rafeei <84813886+lrafeei@users.noreply.github.com> Date: Tue, 31 Jan 2023 15:09:50 -0800 Subject: [PATCH 042/108] Fix tests in starlette v0.23.1 (#752) * Fix tests in starlette v0.23.1 * Fix conditional tests * Add comment to bg_task test --- tests/framework_starlette/test_bg_tasks.py | 9 ++++++--- tox.ini | 3 +-- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/tests/framework_starlette/test_bg_tasks.py b/tests/framework_starlette/test_bg_tasks.py index 1b909323a..07a70131b 100644 --- a/tests/framework_starlette/test_bg_tasks.py +++ b/tests/framework_starlette/test_bg_tasks.py @@ -87,10 +87,12 @@ def _test(): response = app.get("/" + route) assert response.status == 200 - BUG_COMPLETELY_FIXED = (starlette_version >= (0, 21, 0)) or ( - starlette_version >= (0, 20, 1) and sys.version_info[:2] > (3, 7) + # The bug was fixed in version 0.21.0 but re-occured in 0.23.1. + # The bug was also not present on 0.20.1 to 0.23.1 if using Python3.7. + BUG_COMPLETELY_FIXED = (0, 21, 0) <= starlette_version < (0, 23, 1) or ( + (0, 20, 1) <= starlette_version < (0, 23, 1) and sys.version_info[:2] > (3, 7) ) - BUG_PARTIALLY_FIXED = (0, 20, 1) <= starlette_version < (0, 21, 0) and sys.version_info[:2] <= (3, 7) + BUG_PARTIALLY_FIXED = (0, 20, 1) <= starlette_version < (0, 21, 0) or starlette_version >= (0, 23, 1) if BUG_COMPLETELY_FIXED: # Assert both web transaction and background task transactions are present. @@ -103,6 +105,7 @@ def _test(): # The background task no longer blocks the completion of the web request/web transaction. # However, the BaseHTTPMiddleware causes the task to be cancelled when the web request disconnects, so there are no # longer function traces or background task transactions. + # In version 0.23.1, the check to see if more_body exists is removed, reverting behavior to this model _test = validate_transaction_metrics("_test_bg_tasks:run_%s_bg_task" % route, scoped_metrics=[route_metric])( _test ) diff --git a/tox.ini b/tox.ini index 2fd6d201a..1691740f7 100644 --- a/tox.ini +++ b/tox.ini @@ -357,8 +357,7 @@ deps = framework_starlette-starlette0015: starlette<0.16 framework_starlette-starlette0019: starlette<0.20 framework_starlette-starlette002001: starlette==0.20.1 - ; Starlette latest version temporarily pinned - framework_starlette-starlettelatest: starlette<0.23.1 + framework_starlette-starlettelatest: starlette framework_strawberry: starlette framework_strawberry-strawberrylatest: strawberry-graphql framework_tornado: pycurl From a76580963919163c00089fb9e2ab4f0e050a3ee4 Mon Sep 17 00:00:00 2001 From: Dmitry Kolyagin Date: Thu, 26 Jan 2023 20:22:43 +0100 Subject: [PATCH 043/108] Support `redis.asyncio` (#744) * Support `redis.asyncio` * Fix `flake8` issues Co-authored-by: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> --- newrelic/config.py | 8 ++++++++ newrelic/hooks/datastore_aioredis.py | 28 +++++++++++++++++----------- 2 files changed, 25 insertions(+), 11 deletions(-) diff --git a/newrelic/config.py b/newrelic/config.py index 0bfb7daa0..f19318247 100644 --- a/newrelic/config.py +++ b/newrelic/config.py @@ -2665,6 +2665,14 @@ def _process_module_builtin_defaults(): "aioredis.connection", "newrelic.hooks.datastore_aioredis", "instrument_aioredis_connection" ) + _process_module_definition("redis.asyncio.client", "newrelic.hooks.datastore_aioredis", "instrument_aioredis_client") + + _process_module_definition("redis.asyncio.commands", "newrelic.hooks.datastore_aioredis", "instrument_aioredis_client") + + _process_module_definition( + "redis.asyncio.connection", "newrelic.hooks.datastore_aioredis", "instrument_aioredis_connection" + ) + _process_module_definition( "elasticsearch.client", "newrelic.hooks.datastore_elasticsearch", diff --git a/newrelic/hooks/datastore_aioredis.py b/newrelic/hooks/datastore_aioredis.py index 428787d30..e42fd3b29 100644 --- a/newrelic/hooks/datastore_aioredis.py +++ b/newrelic/hooks/datastore_aioredis.py @@ -12,24 +12,26 @@ # See the License for the specific language governing permissions and # limitations under the License. -from newrelic.api.datastore_trace import DatastoreTrace, DatastoreTraceWrapper +from newrelic.api.datastore_trace import DatastoreTrace from newrelic.api.time_trace import current_trace from newrelic.api.transaction import current_transaction -from newrelic.common.object_wrapper import wrap_function_wrapper, function_wrapper, FunctionWrapper +from newrelic.common.object_wrapper import wrap_function_wrapper, function_wrapper from newrelic.hooks.datastore_redis import ( _redis_client_methods, _redis_multipart_commands, _redis_operation_re, ) -from newrelic.common.async_wrapper import async_wrapper -import aioredis - -try: - AIOREDIS_VERSION = lambda: tuple(int(x) for x in getattr(aioredis, "__version__").split(".")) -except Exception: - AIOREDIS_VERSION = lambda: (0, 0, 0) +def get_aioredis_version(): + try: + import aioredis as aioredis_legacy + except ModuleNotFoundError: + return None + try: + return tuple(int(x) for x in getattr(aioredis_legacy, "__version__").split(".")) + except Exception: + return 0, 0, 0 def _conn_attrs_to_dict(connection): @@ -68,7 +70,8 @@ def _nr_wrapper_AioRedis_method_(wrapped, instance, args, kwargs): # Check for transaction and return early if found. # Method will return synchronously without executing, # it will be added to the command stack and run later. - if AIOREDIS_VERSION() < (2,): + aioredis_version = get_aioredis_version() + if aioredis_version and aioredis_version < (2,): # AioRedis v1 uses a RedisBuffer instead of a real connection for queueing up pipeline commands from aioredis.commands.transaction import _RedisBuffer if isinstance(instance._pool_or_conn, _RedisBuffer): @@ -77,7 +80,10 @@ def _nr_wrapper_AioRedis_method_(wrapped, instance, args, kwargs): return wrapped(*args, **kwargs) else: # AioRedis v2 uses a Pipeline object for a client and internally queues up pipeline commands - from aioredis.client import Pipeline + if aioredis_version: + from aioredis.client import Pipeline + else: + from redis.asyncio.client import Pipeline if isinstance(instance, Pipeline): return wrapped(*args, **kwargs) From daf57b639fa5085ada89960143002cbee06a9ea0 Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Tue, 31 Jan 2023 13:01:58 -0800 Subject: [PATCH 044/108] Redis Asyncio Testing (#750) * Add standardized method for package version tuples * Adapt aioredis tests to redis.asyncio * Standardize version tuple * Refresh uninstrumented redis methods * Fix aioredis version checking * Remove aioredis version function --- newrelic/common/package_version_utils.py | 90 ++++++++++++------- newrelic/hooks/datastore_aioredis.py | 14 +-- .../test_package_version_utils.py | 19 ++++ tests/datastore_aioredis/conftest.py | 14 ++- .../datastore_aioredis/test_instance_info.py | 7 +- tests/datastore_aioredis/test_multiple_dbs.py | 3 +- .../test_uninstrumented_methods.py | 23 +++++ tox.ini | 2 + 8 files changed, 124 insertions(+), 48 deletions(-) diff --git a/newrelic/common/package_version_utils.py b/newrelic/common/package_version_utils.py index a785f7ab1..13b816878 100644 --- a/newrelic/common/package_version_utils.py +++ b/newrelic/common/package_version_utils.py @@ -20,7 +20,7 @@ def get_package_version(name): - """Gets the version of the library. + """Gets the version string of the library. :param name: The name of library. :type name: str :return: The version of the library. Returns None if can't determine version. @@ -31,36 +31,6 @@ def get_package_version(name): "1.1.0" """ - def _get_package_version(name): - module = sys.modules.get(name, None) - version = None - for attr in VERSION_ATTRS: - try: - version = getattr(module, attr, None) - # Cast any version specified as a list into a tuple. - version = tuple(version) if isinstance(version, list) else version - if version not in NULL_VERSIONS: - return version - except Exception: - pass - - # importlib was introduced into the standard library starting in Python3.8. - if "importlib" in sys.modules and hasattr(sys.modules["importlib"], "metadata"): - try: - version = sys.modules["importlib"].metadata.version(name) # pylint: disable=E1101 - if version not in NULL_VERSIONS: - return version - except Exception: - pass - - if "pkg_resources" in sys.modules: - try: - version = sys.modules["pkg_resources"].get_distribution(name).version - if version not in NULL_VERSIONS: - return version - except Exception: - pass - version = _get_package_version(name) # Coerce iterables into a string @@ -68,3 +38,61 @@ def _get_package_version(name): version = ".".join(str(v) for v in version) return version + + +def get_package_version_tuple(name): + """Gets the version tuple of the library. + :param name: The name of library. + :type name: str + :return: The version of the library. Returns None if can't determine version. + :type return: tuple or None + + Usage:: + >>> get_package_version_tuple("botocore") + (1, 1, 0) + """ + + def int_or_str(value): + try: + return int(value) + except Exception: + return str(value) + + version = _get_package_version(name) + + # Split "." separated strings and cast fields to ints + if isinstance(version, str): + version = tuple(int_or_str(v) for v in version.split(".")) + + return version + + +def _get_package_version(name): + module = sys.modules.get(name, None) + version = None + for attr in VERSION_ATTRS: + try: + version = getattr(module, attr, None) + # Cast any version specified as a list into a tuple. + version = tuple(version) if isinstance(version, list) else version + if version not in NULL_VERSIONS: + return version + except Exception: + pass + + # importlib was introduced into the standard library starting in Python3.8. + if "importlib" in sys.modules and hasattr(sys.modules["importlib"], "metadata"): + try: + version = sys.modules["importlib"].metadata.version(name) # pylint: disable=E1101 + if version not in NULL_VERSIONS: + return version + except Exception: + pass + + if "pkg_resources" in sys.modules: + try: + version = sys.modules["pkg_resources"].get_distribution(name).version + if version not in NULL_VERSIONS: + return version + except Exception: + pass \ No newline at end of file diff --git a/newrelic/hooks/datastore_aioredis.py b/newrelic/hooks/datastore_aioredis.py index e42fd3b29..9bd5b17b0 100644 --- a/newrelic/hooks/datastore_aioredis.py +++ b/newrelic/hooks/datastore_aioredis.py @@ -21,17 +21,7 @@ _redis_multipart_commands, _redis_operation_re, ) - - -def get_aioredis_version(): - try: - import aioredis as aioredis_legacy - except ModuleNotFoundError: - return None - try: - return tuple(int(x) for x in getattr(aioredis_legacy, "__version__").split(".")) - except Exception: - return 0, 0, 0 +from newrelic.common.package_version_utils import get_package_version_tuple def _conn_attrs_to_dict(connection): @@ -70,7 +60,7 @@ def _nr_wrapper_AioRedis_method_(wrapped, instance, args, kwargs): # Check for transaction and return early if found. # Method will return synchronously without executing, # it will be added to the command stack and run later. - aioredis_version = get_aioredis_version() + aioredis_version = get_package_version_tuple("aioredis") if aioredis_version and aioredis_version < (2,): # AioRedis v1 uses a RedisBuffer instead of a real connection for queueing up pipeline commands from aioredis.commands.transaction import _RedisBuffer diff --git a/tests/agent_unittests/test_package_version_utils.py b/tests/agent_unittests/test_package_version_utils.py index 14ee454fd..d80714d77 100644 --- a/tests/agent_unittests/test_package_version_utils.py +++ b/tests/agent_unittests/test_package_version_utils.py @@ -21,6 +21,7 @@ NULL_VERSIONS, VERSION_ATTRS, get_package_version, + get_package_version_tuple, ) IS_PY38_PLUS = sys.version_info[:2] >= (3, 8) @@ -57,6 +58,24 @@ def test_get_package_version(attr, value, expected_value): delattr(pytest, attr) +@pytest.mark.parametrize( + "attr,value,expected_value", + ( + ("version", "1.2.3.4", (1, 2, 3, 4)), + ("__version__", "1.3.5rc2", (1, 3, "5rc2")), + ("__version_tuple__", (3, 5, 8), (3, 5, 8)), + ("version_tuple", [3, 1, "0b2"], (3, 1, "0b2")), + ), +) +def test_get_package_version_tuple(attr, value, expected_value): + # There is no file/module here, so we monkeypatch + # pytest instead for our purposes + setattr(pytest, attr, value) + version = get_package_version_tuple("pytest") + assert version == expected_value + delattr(pytest, attr) + + @SKIP_IF_NOT_IMPORTLIB_METADATA @validate_function_called("importlib.metadata", "version") def test_importlib_metadata(): diff --git a/tests/datastore_aioredis/conftest.py b/tests/datastore_aioredis/conftest.py index de9c6c04d..3d341f2b6 100644 --- a/tests/datastore_aioredis/conftest.py +++ b/tests/datastore_aioredis/conftest.py @@ -12,9 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -import aioredis import pytest +from newrelic.common.package_version_utils import get_package_version_tuple from testing_support.db_settings import redis_settings from testing_support.fixture.event_loop import event_loop as loop @@ -24,7 +24,17 @@ collector_available_fixture, ) -AIOREDIS_VERSION = tuple(int(x) for x in aioredis.__version__.split(".")[:2]) +try: + import aioredis + + AIOREDIS_VERSION = get_package_version_tuple("aioredis") +except ImportError: + import redis.asyncio as aioredis + + # Fake aioredis version to show when it was moved to redis.asyncio + AIOREDIS_VERSION = (2, 0, 2) + + SKIPIF_AIOREDIS_V1 = pytest.mark.skipif(AIOREDIS_VERSION < (2,), reason="Unsupported aioredis version.") SKIPIF_AIOREDIS_V2 = pytest.mark.skipif(AIOREDIS_VERSION >= (2,), reason="Unsupported aioredis version.") DB_SETTINGS = redis_settings()[0] diff --git a/tests/datastore_aioredis/test_instance_info.py b/tests/datastore_aioredis/test_instance_info.py index 4bb744149..d43366ab5 100644 --- a/tests/datastore_aioredis/test_instance_info.py +++ b/tests/datastore_aioredis/test_instance_info.py @@ -14,10 +14,9 @@ from inspect import isawaitable import pytest -import aioredis from newrelic.hooks.datastore_aioredis import _conn_attrs_to_dict, _instance_info -from conftest import AIOREDIS_VERSION, SKIPIF_AIOREDIS_V1 +from conftest import aioredis, AIOREDIS_VERSION, SKIPIF_AIOREDIS_V1 _instance_info_tests = [ ({}, ("localhost", "6379", "0")), @@ -35,6 +34,10 @@ class DisabledConnection(aioredis.Connection): @staticmethod async def connect(*args, **kwargs): pass + + async def can_read_destructive(self, *args, **kwargs): + return False + class DisabledUnixConnection(aioredis.UnixDomainSocketConnection, DisabledConnection): diff --git a/tests/datastore_aioredis/test_multiple_dbs.py b/tests/datastore_aioredis/test_multiple_dbs.py index 61d99d3ae..d490c1f58 100644 --- a/tests/datastore_aioredis/test_multiple_dbs.py +++ b/tests/datastore_aioredis/test_multiple_dbs.py @@ -12,7 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -import aioredis +from conftest import aioredis + import pytest from conftest import AIOREDIS_VERSION, loop # noqa from testing_support.db_settings import redis_settings diff --git a/tests/datastore_aioredis/test_uninstrumented_methods.py b/tests/datastore_aioredis/test_uninstrumented_methods.py index f1b36b1ca..c48b5a442 100644 --- a/tests/datastore_aioredis/test_uninstrumented_methods.py +++ b/tests/datastore_aioredis/test_uninstrumented_methods.py @@ -15,7 +15,10 @@ IGNORED_METHODS = { "address", + "auto_close_connection_pool", "channels", + "client_tracking_off", + "client_tracking_on", "close", "closed", "connection_pool", @@ -25,6 +28,9 @@ "execute_command", "execute", "from_url", + "get_connection_kwargs", + "get_encoder", + "get_retry", "hscan_iter", "ihscan", "in_pubsub", @@ -33,6 +39,7 @@ "iscan", "isscan", "izscan", + "load_external_module", "lock", "multi_exec", "parse_response", @@ -42,9 +49,11 @@ "register_script", "response_callbacks", "RESPONSE_CALLBACKS", + "sentinel", "SET_IF_EXIST", "SET_IF_NOT_EXIST", "set_response_callback", + "set_retry", "SHUTDOWN_NOSAVE", "SHUTDOWN_SAVE", "single_connection_client", @@ -61,6 +70,20 @@ "ZSET_IF_NOT_EXIST", } +REDIS_MODULES = { + "bf", + "cf", + "cms", + "ft", + "graph", + "json", + "tdigest", + "topk", + "ts", +} + +IGNORED_METHODS |= REDIS_MODULES + def test_uninstrumented_methods(client): methods = {m for m in dir(client) if not m[0] == "_"} diff --git a/tox.ini b/tox.ini index 1691740f7..c77a3d9a9 100644 --- a/tox.ini +++ b/tox.ini @@ -91,6 +91,7 @@ envlist = redis-datastore_redis-{py37,py38,py39,py310,py311,pypy37}-redis{0400,latest}, redis-datastore_aioredis-{py37,py38,py39,py310,pypy37}-aioredislatest, redis-datastore_aioredis-{py37,py310}-aioredis01, + redis-datastore_aioredis-{py37,py38,py39,py310,py311,pypy37}-redislatest, redis-datastore_aredis-{py37,py38,py39,pypy37}-aredislatest, solr-datastore_solrpy-{py27,pypy}-solrpy{00,01}, python-datastore_sqlite-{py27,py37,py38,py39,py310,py311,pypy,pypy37}, @@ -253,6 +254,7 @@ deps = datastore_redis-redis0400: redis<4.1 datastore_redis-redis03: redis<4.0 datastore_redis-{py27,pypy}: rb + datastore_aioredis-redislatest: redis datastore_aioredis-aioredislatest: aioredis datastore_aioredis-aioredis01: aioredis<2 datastore_aredis-aredislatest: aredis From 0608ca47fc9c99783b1ae51517c9af4c4eb3e538 Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Thu, 9 Feb 2023 10:10:32 -0800 Subject: [PATCH 045/108] CodeCov Integration (#710) * Add aggregate coverage settings to tox.ini * Refactor coverage fixture for GHA * Send coverage data files * Linter fixes * Configure codecov report * Yield cov handle from fixture * Fix empty coverage fixture * Specify artifact download dir * Find coverage files with find command * Add concurrency cancelling to github actions * uncomment test deps * Fix or symbol * Fix concurrency groups * Linter fixes * Add comment for yield None in fixture * [Mega-Linter] Apply linters fixes * Bump Tests --------- Co-authored-by: TimPansino --- .github/workflows/mega-linter.yml | 2 +- .github/workflows/tests.yml | 138 +++++++++++++++++++++++++++++- tests/testing_support/fixtures.py | 56 ++++++------ tox.ini | 5 ++ 4 files changed, 167 insertions(+), 34 deletions(-) diff --git a/.github/workflows/mega-linter.yml b/.github/workflows/mega-linter.yml index d378752dc..cd0930507 100644 --- a/.github/workflows/mega-linter.yml +++ b/.github/workflows/mega-linter.yml @@ -15,7 +15,7 @@ env: # Comment env block if you do not want to apply fixes APPLY_FIXES_MODE: commit # If APPLY_FIXES is used, defines if the fixes are directly committed (commit) or posted in a PR (pull_request) concurrency: - group: ${{ github.ref }}-${{ github.workflow }} + group: ${{ github.ref || github.run_id }}-${{ github.workflow }} cancel-in-progress: true jobs: diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 947608207..131e7c5c3 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -24,8 +24,13 @@ on: schedule: - cron: "0 15 * * *" +concurrency: + group: ${{ github.ref || github.run_id }}-${{ github.workflow }} + cancel-in-progress: true + jobs: - tests: # Aggregate job that provides a single check for workflow success + # Aggregate job that provides a single check for all tests passing + tests: runs-on: ubuntu-20.04 needs: - python @@ -47,6 +52,39 @@ jobs: - name: Success run: echo "Success!" + # Combine and upload coverage data + coverage: + if: success() || failure() # Does not run on cancelled workflows + runs-on: ubuntu-20.04 + needs: + - tests + + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: "3.10" + architecture: x64 + + - name: Download Coverage Artifacts + uses: actions/download-artifact@v3 + with: + path: ./ + + - name: Combine Coverage + run: | + pip install coverage + find . -name ".coverage.*" -exec mv {} ./ \; + coverage combine + coverage xml + + - name: Upload Coverage to Codecov + uses: codecov/codecov-action@v3 + with: + files: coverage.xml + fail_ci_if_error: true + + # Tests python: env: TOTAL_GROUPS: 20 @@ -99,6 +137,13 @@ jobs: TOX_PARALLEL_NO_SPINNER: 1 PY_COLORS: 0 + - name: Upload Coverage Artifacts + uses: actions/upload-artifact@v3 + with: + name: coverage-${{ github.job }}-${{ strategy.job-index }} + path: ./**/.coverage.* + retention-days: 1 + grpc: env: TOTAL_GROUPS: 1 @@ -129,6 +174,13 @@ jobs: TOX_PARALLEL_NO_SPINNER: 1 PY_COLORS: 0 + - name: Upload Coverage Artifacts + uses: actions/upload-artifact@v3 + with: + name: coverage-${{ github.job }}-${{ strategy.job-index }} + path: ./**/.coverage.* + retention-days: 1 + libcurl: env: TOTAL_GROUPS: 1 @@ -165,6 +217,13 @@ jobs: TOX_PARALLEL_NO_SPINNER: 1 PY_COLORS: 0 + - name: Upload Coverage Artifacts + uses: actions/upload-artifact@v3 + with: + name: coverage-${{ github.job }}-${{ strategy.job-index }} + path: ./**/.coverage.* + retention-days: 1 + postgres: env: TOTAL_GROUPS: 2 @@ -210,6 +269,13 @@ jobs: TOX_PARALLEL_NO_SPINNER: 1 PY_COLORS: 0 + - name: Upload Coverage Artifacts + uses: actions/upload-artifact@v3 + with: + name: coverage-${{ github.job }}-${{ strategy.job-index }} + path: ./**/.coverage.* + retention-days: 1 + mysql: env: TOTAL_GROUPS: 2 @@ -258,6 +324,13 @@ jobs: TOX_PARALLEL_NO_SPINNER: 1 PY_COLORS: 0 + - name: Upload Coverage Artifacts + uses: actions/upload-artifact@v3 + with: + name: coverage-${{ github.job }}-${{ strategy.job-index }} + path: ./**/.coverage.* + retention-days: 1 + redis: env: TOTAL_GROUPS: 2 @@ -301,6 +374,13 @@ jobs: TOX_PARALLEL_NO_SPINNER: 1 PY_COLORS: 0 + - name: Upload Coverage Artifacts + uses: actions/upload-artifact@v3 + with: + name: coverage-${{ github.job }}-${{ strategy.job-index }} + path: ./**/.coverage.* + retention-days: 1 + solr: env: TOTAL_GROUPS: 1 @@ -346,6 +426,13 @@ jobs: TOX_PARALLEL_NO_SPINNER: 1 PY_COLORS: 0 + - name: Upload Coverage Artifacts + uses: actions/upload-artifact@v3 + with: + name: coverage-${{ github.job }}-${{ strategy.job-index }} + path: ./**/.coverage.* + retention-days: 1 + memcached: env: TOTAL_GROUPS: 2 @@ -389,6 +476,13 @@ jobs: TOX_PARALLEL_NO_SPINNER: 1 PY_COLORS: 0 + - name: Upload Coverage Artifacts + uses: actions/upload-artifact@v3 + with: + name: coverage-${{ github.job }}-${{ strategy.job-index }} + path: ./**/.coverage.* + retention-days: 1 + rabbitmq: env: TOTAL_GROUPS: 1 @@ -433,6 +527,13 @@ jobs: TOX_PARALLEL_NO_SPINNER: 1 PY_COLORS: 0 + - name: Upload Coverage Artifacts + uses: actions/upload-artifact@v3 + with: + name: coverage-${{ github.job }}-${{ strategy.job-index }} + path: ./**/.coverage.* + retention-days: 1 + kafka: env: TOTAL_GROUPS: 4 @@ -498,6 +599,13 @@ jobs: TOX_PARALLEL_NO_SPINNER: 1 PY_COLORS: 0 + - name: Upload Coverage Artifacts + uses: actions/upload-artifact@v3 + with: + name: coverage-${{ github.job }}-${{ strategy.job-index }} + path: ./**/.coverage.* + retention-days: 1 + mongodb: env: TOTAL_GROUPS: 1 @@ -541,6 +649,13 @@ jobs: TOX_PARALLEL_NO_SPINNER: 1 PY_COLORS: 0 + - name: Upload Coverage Artifacts + uses: actions/upload-artifact@v3 + with: + name: coverage-${{ github.job }}-${{ strategy.job-index }} + path: ./**/.coverage.* + retention-days: 1 + elasticsearchserver01: env: TOTAL_GROUPS: 1 @@ -586,6 +701,13 @@ jobs: TOX_PARALLEL_NO_SPINNER: 1 PY_COLORS: 0 + - name: Upload Coverage Artifacts + uses: actions/upload-artifact@v3 + with: + name: coverage-${{ github.job }}-${{ strategy.job-index }} + path: ./**/.coverage.* + retention-days: 1 + elasticsearchserver07: env: TOTAL_GROUPS: 1 @@ -631,6 +753,13 @@ jobs: TOX_PARALLEL_NO_SPINNER: 1 PY_COLORS: 0 + - name: Upload Coverage Artifacts + uses: actions/upload-artifact@v3 + with: + name: coverage-${{ github.job }}-${{ strategy.job-index }} + path: ./**/.coverage.* + retention-days: 1 + gearman: env: TOTAL_GROUPS: 1 @@ -672,3 +801,10 @@ jobs: env: TOX_PARALLEL_NO_SPINNER: 1 PY_COLORS: 0 + + - name: Upload Coverage Artifacts + uses: actions/upload-artifact@v3 + with: + name: coverage-${{ github.job }}-${{ strategy.job-index }} + path: ./**/.coverage.* + retention-days: 1 diff --git a/tests/testing_support/fixtures.py b/tests/testing_support/fixtures.py index f642d1f6f..ebb0da830 100644 --- a/tests/testing_support/fixtures.py +++ b/tests/testing_support/fixtures.py @@ -211,7 +211,6 @@ def collector_agent_registration_fixture( @pytest.fixture(scope="session") def _collector_agent_registration_fixture(request): - if should_initialize_agent: initialize_agent(app_name=app_name, default_settings=default_settings) @@ -529,7 +528,7 @@ def _validate_custom_event_collector_json(wrapped, instance, args, kwargs): assert decoded_sampling_info["events_seen"] == num_events assert len(decoded_events) == num_events - for (intrinsics, attributes) in decoded_events: + for intrinsics, attributes in decoded_events: assert isinstance(intrinsics, dict) assert isinstance(attributes, dict) @@ -549,7 +548,6 @@ def _validate_tt_parameters(wrapped, instance, args, kwargs): except: raise else: - # Now that transaction has been recorded, generate # a transaction trace @@ -673,7 +671,6 @@ def _validate_browser_attributes(wrapped, instance, args, kwargs): obfuscation_key = instance._settings.license_key[:13] attributes = json_decode(deobfuscate(footer_data["atts"], obfuscation_key)) else: - # if there are no user or agent attributes, there will be no dict # for them in the browser data @@ -722,7 +719,6 @@ def _validate_error_event_attributes(wrapped, instance, args, kwargs): except: raise else: - event_data = instance.error_events for sample in event_data: error_data_samples.append(sample) @@ -863,7 +859,6 @@ def _bind_params(transaction, *args, **kwargs): def validate_attributes_complete(attr_type, required_attrs=None, forgone_attrs=None): - # This differs from `validate_attributes` in that all fields of # Attribute must match (name, value, and destinations), not just # name. It's a more thorough test, but it's more of a pain to set @@ -895,7 +890,6 @@ def _bind_params(transaction, *args, **kwargs): attribute_filter = transaction.settings.attribute_filter if attr_type == "intrinsic": - # Intrinsics are stored as a dict, so for consistency's sake # in this test, we convert them to Attributes. @@ -1095,7 +1089,6 @@ def _bind_params(transaction, *args, **kwargs): error_events = transaction.error_events(instance.stats_table) assert len(error_events) == num_errors for sample in error_events: - assert isinstance(sample, list) assert len(sample) == 3 @@ -1127,7 +1120,6 @@ def _bind_params(transaction, *args, **kwargs): def _validate_event_attributes(intrinsics, user_attributes, required_intrinsics, required_user): - now = time.time() assert isinstance(intrinsics["timestamp"], int) assert intrinsics["timestamp"] <= 1000.0 * now @@ -1191,7 +1183,6 @@ def _validate_transaction_exception_message(wrapped, instance, args, kwargs): except: raise else: - error_data = instance.error_data() assert len(error_data) == 1 error = error_data[0] @@ -1221,13 +1212,11 @@ def validate_application_exception_message(expected_message): @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.notice_error") def _validate_application_exception_message(wrapped, instance, args, kwargs): - try: result = wrapped(*args, **kwargs) except: raise else: - error_data = instance.error_data() assert len(error_data) == 1 error = error_data[0] @@ -1483,35 +1472,41 @@ def code_coverage_fixture(source=None): if source is None: source = ["newrelic"] + github_actions = bool(os.environ.get("GITHUB_ACTIONS", None)) + tox_env_directory = os.environ.get("TOX_ENVDIR", None) + + if tox_env_directory: + data_file = os.path.join(tox_env_directory, ".coverage") + data_suffix = os.path.split(tox_env_directory)[-1] + coverage_directory = os.path.join(tox_env_directory, "htmlcov") + xml_report = os.path.join(tox_env_directory, "coverage.xml") + else: + data_file = ".coverage" + data_suffix = None + coverage_directory = "htmlcov" + xml_report = "coverage.xml" + @pytest.fixture(scope="session") def _code_coverage_fixture(request): if not source: - return - - if os.environ.get("GITHUB_ACTIONS") is not None: + yield None # Required, generator based fixtures must yield 1 value or pytest will throw an exception. return from coverage import coverage - env_directory = os.environ.get("TOX_ENVDIR", None) + cov = coverage(source=source, data_file=data_file, data_suffix=data_suffix, branch=True) + cov.start() - if env_directory is not None: - coverage_directory = os.path.join(env_directory, "htmlcov") - xml_report = os.path.join(env_directory, "coverage.xml") - else: - coverage_directory = "htmlcov" - xml_report = "coverage.xml" + yield cov - def finalize(): - cov.stop() + # At exit, stop coverage and save to data file + cov.stop() + cov.save() + if not github_actions: + # Run html and xml reports locally cov.html_report(directory=coverage_directory) cov.xml_report(outfile=xml_report) - request.addfinalizer(finalize) - - cov = coverage(source=source, branch=True) - cov.start() - return _code_coverage_fixture @@ -1609,7 +1604,6 @@ def set_default_encoding(encoding): @function_wrapper def _set_default_encoding(wrapped, instance, args, kwargs): - # This technique of reloading the sys module is necessary because the # method is removed during initialization of Python. Doing this is # highly frowned upon, but it is the only way to test how our agent @@ -1657,7 +1651,6 @@ def wrapper(wrapped, instance, args, kwargs): def validate_analytics_catmap_data(name, expected_attributes=(), non_expected_attributes=()): - samples = [] @transient_function_wrapper("newrelic.core.stats_engine", "SampledDataSet.add") @@ -1709,7 +1702,6 @@ def _increment_count(wrapped, instance, args, kwargs): def failing_endpoint(endpoint, raises=RetryDataForRequest, call_number=1): - called_list = [] @transient_function_wrapper("newrelic.core.agent_protocol", "AgentProtocol.send") diff --git a/tox.ini b/tox.ini index c77a3d9a9..3e5a58cd8 100644 --- a/tox.ini +++ b/tox.ini @@ -171,6 +171,11 @@ usefixtures = collector_agent_registration code_coverage +[coverage:paths] +source = + newrelic/ + .tox/**/site-packages/newrelic/ + [testenv] deps = # Base Dependencies From b0d4e73f018145012cd6e4fdcd7aa22c0f623199 Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Thu, 9 Feb 2023 10:55:14 -0800 Subject: [PATCH 046/108] Mergify (#761) * Add mergify config file * Remove priority * Clean up mergify rules * Add non-draft requirement for merge * Add merge method * [Mega-Linter] Apply linters fixes * Don't update draft PRs. * Remove merge rules for develop branches * Linting --------- Co-authored-by: TimPansino --- .github/mergify.yml | 75 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 75 insertions(+) create mode 100644 .github/mergify.yml diff --git a/.github/mergify.yml b/.github/mergify.yml new file mode 100644 index 000000000..dfd9c88f5 --- /dev/null +++ b/.github/mergify.yml @@ -0,0 +1,75 @@ +# For condition grammar see: https://docs.mergify.com/conditions/#grammar + +shared: + conditions: + - and: &pr_ready_checks + - "#approved-reviews-by>=1" # A '#' pulls the length of the underlying list + - "label=ready-to-merge" + - "check-success=tests" + - "-draft" # Don't include draft PRs + - or: # Only handle branches that target main or develop branches + - "base=main" + - "base~=^develop" + +queue_rules: + - name: default + conditions: + - and: *pr_ready_checks + merge_method: squash + +pull_request_rules: + # Merge Queue PR Rules + - name: Regular PRs - Add to merge queue on approval (squash) + conditions: + - and: *pr_ready_checks + - "-head~=^develop" # Don't include PRs from develop branches + actions: + queue: + method: squash + + # Automatic PR Updates + - name: Automatic PR branch updates + conditions: + - "queue-position=-1" # Not queued + - "-draft" # Don't include draft PRs + actions: + update: + + # Automatic Labeling + - name: Clean up after merge + conditions: + - merged + actions: + delete_head_branch: + label: + remove: + - "merge-conflicts" + - "ready-to-merge" + - "tests-failing" + + - name: Toggle label on merge conflicts + conditions: + - conflict + actions: + label: + toggle: + - "merge-conflicts" + + # Don't use a toggle for this, as the label constantly gets applied and removed when tests are rerun. + - name: Add label on test failures + conditions: + - or: + - check-failure=tests + - check-skipped=tests + actions: + label: + add: + - "tests-failing" + + - name: Remove label on test success + conditions: + - check-success=tests + actions: + label: + remove: + - "tests-failing" From 1bf6a65a02dac45a73b5369b4db187906e51bba0 Mon Sep 17 00:00:00 2001 From: Lalleh Rafeei <84813886+lrafeei@users.noreply.github.com> Date: Thu, 9 Feb 2023 12:02:27 -0800 Subject: [PATCH 047/108] Elasticsearch v8 support (#741) * Fix function_wrapper calls to module * Fix wrapper in pika hook * Revert elasticsearch instrumentation * Revert some wrap_function_wrappers to orig * Remove comments/breakpoints * Fix hooks in elasticsearch * Add new client methods from v8 and their hooks * Add elasticsearch v8 to workflow and tox * Fix indices for elasticsearch01 * Disable xpack security in elasticsearch v8.0 * Start to add try/except blocks in tests * Add support for v8 transport * add support for v8 connection * Add tests-WIP * Clean up most tests * Clean up unused instrumentation Co-authored-by: Lalleh Rafeei Co-authored-by: Enriqueta De Leon Co-authored-by: Uma Annamalai Co-authored-by: Kate Anderson Co-authored-by: Mary Martinez Co-authored-by: Hannah Stepanek * Remove elastic search source code * Elasticsearch v8 testing Co-authored-by: Lalleh Rafeei Co-authored-by: Enriqueta De Leon Co-authored-by: Uma Annamalai Co-authored-by: Kate Anderson Co-authored-by: Mary Martinez Co-authored-by: Hannah Stepanek * Scope ES fixture * ES v8 only supports Python3.6+ * Refactor transport tests for v8 Co-authored-by: Lalleh Rafeei Co-authored-by: Uma Annamalai Co-authored-by: Hannah Stepanek Co-authored-by: Kate Anderson Co-authored-by: Enriqueta De Leon * Remove extra comments * Added perform_request_kwargs to test_transport * Fix some linter issues * Remove extra newline * Group es v7 v8 process modules together * Add auto signature detection & binding * Use bind_arguments in ES * Add test for wrapped function * Add validator for datastore trace inputs * Use common bind_arguments for PY3 * Fix tests in starlette v0.23.1 (#752) * Fix tests in starlette v0.23.1 * Fix conditional tests * Add comment to bg_task test * Split below es 8 methods from es 8 methods Note the previous tests in this file to check whether a method was instrumented, did not test anything because they were checking whether the list of methods that we instrumented were instrumented instead of whether there were uninstrumented methods on the es client that we missed. Because we decided due to lack of reporting of bugs by our customers, to not support the buggy wrapping on previous es versions (below es8), we only added tests to assert all methods were wrapped from es8+. We also are only testing es8+ wrapping of methods since the previous versions wrapping behavior may not have been correct due to the signature of the methods changing without us detecting it due to lack of tests. Since our customers have not reported any issues, it seems not worth it at this time to go back and fix these bugs. * Remove signature auto detection implementation * Fixup: remove signature autodetection * Fixup: cleanup * Test method calls on all es versions * Fixup: don't run some methods on es7 --------- Co-authored-by: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Co-authored-by: mary-martinez Co-authored-by: enriqueta Co-authored-by: Tim Pansino Co-authored-by: Lalleh Rafeei Co-authored-by: Enriqueta De Leon Co-authored-by: Uma Annamalai Co-authored-by: Kate Anderson Co-authored-by: Mary Martinez Co-authored-by: Hannah Stepanek Co-authored-by: Hannah Stepanek Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> --- .github/workflows/tests.yml | 15 +- .gitignore | 3 + newrelic/config.py | 87 +++- newrelic/hooks/datastore_elasticsearch.py | 391 ++++++++++++++++-- tests/datastore_elasticsearch/conftest.py | 40 +- .../test_connection.py | 50 ++- .../test_database_duration.py | 44 +- .../test_elasticsearch.py | 289 +++++++------ .../test_instrumented_methods.py | 144 +++++-- tests/datastore_elasticsearch/test_mget.py | 159 +++---- .../test_multiple_dbs.py | 96 +++-- .../test_trace_node.py | 22 +- .../datastore_elasticsearch/test_transport.py | 132 +++--- .../validate_datastore_trace_inputs.py | 50 +++ tox.ini | 10 +- 15 files changed, 1093 insertions(+), 439 deletions(-) create mode 100644 tests/testing_support/validators/validate_datastore_trace_inputs.py diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 131e7c5c3..d20a7c02a 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -34,8 +34,8 @@ jobs: runs-on: ubuntu-20.04 needs: - python - - elasticsearchserver01 - elasticsearchserver07 + - elasticsearchserver08 - gearman - grpc - kafka @@ -656,7 +656,7 @@ jobs: path: ./**/.coverage.* retention-days: 1 - elasticsearchserver01: + elasticsearchserver07: env: TOTAL_GROUPS: 1 @@ -669,8 +669,8 @@ jobs: timeout-minutes: 30 services: - es01: - image: elasticsearch:1.4.4 + es07: + image: elasticsearch:7.17.8 env: "discovery.type": "single-node" ports: @@ -708,7 +708,7 @@ jobs: path: ./**/.coverage.* retention-days: 1 - elasticsearchserver07: + elasticsearchserver08: env: TOTAL_GROUPS: 1 @@ -721,9 +721,10 @@ jobs: timeout-minutes: 30 services: - es01: - image: elasticsearch:7.13.2 + es08: + image: elasticsearch:8.6.0 env: + "xpack.security.enabled": "false" "discovery.type": "single-node" ports: - 8080:9200 diff --git a/.gitignore b/.gitignore index 8226b0e97..d4550713f 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,6 @@ +.DS_Store +.DS_Store/ + # Linter megalinter-reports/ diff --git a/newrelic/config.py b/newrelic/config.py index f19318247..203318287 100644 --- a/newrelic/config.py +++ b/newrelic/config.py @@ -2665,64 +2665,147 @@ def _process_module_builtin_defaults(): "aioredis.connection", "newrelic.hooks.datastore_aioredis", "instrument_aioredis_connection" ) - _process_module_definition("redis.asyncio.client", "newrelic.hooks.datastore_aioredis", "instrument_aioredis_client") + _process_module_definition( + "redis.asyncio.client", "newrelic.hooks.datastore_aioredis", "instrument_aioredis_client" + ) - _process_module_definition("redis.asyncio.commands", "newrelic.hooks.datastore_aioredis", "instrument_aioredis_client") + _process_module_definition( + "redis.asyncio.commands", "newrelic.hooks.datastore_aioredis", "instrument_aioredis_client" + ) _process_module_definition( "redis.asyncio.connection", "newrelic.hooks.datastore_aioredis", "instrument_aioredis_connection" ) + # v7 and below _process_module_definition( "elasticsearch.client", "newrelic.hooks.datastore_elasticsearch", "instrument_elasticsearch_client", ) + # v8 and above + _process_module_definition( + "elasticsearch._sync.client", + "newrelic.hooks.datastore_elasticsearch", + "instrument_elasticsearch_client_v8", + ) + + # v7 and below _process_module_definition( "elasticsearch.client.cat", "newrelic.hooks.datastore_elasticsearch", "instrument_elasticsearch_client_cat", ) + # v8 and above + _process_module_definition( + "elasticsearch._sync.client.cat", + "newrelic.hooks.datastore_elasticsearch", + "instrument_elasticsearch_client_cat_v8", + ) + + # v7 and below _process_module_definition( "elasticsearch.client.cluster", "newrelic.hooks.datastore_elasticsearch", "instrument_elasticsearch_client_cluster", ) + # v8 and above + _process_module_definition( + "elasticsearch._sync.client.cluster", + "newrelic.hooks.datastore_elasticsearch", + "instrument_elasticsearch_client_cluster_v8", + ) + + # v7 and below _process_module_definition( "elasticsearch.client.indices", "newrelic.hooks.datastore_elasticsearch", "instrument_elasticsearch_client_indices", ) + # v8 and above + _process_module_definition( + "elasticsearch._sync.client.indices", + "newrelic.hooks.datastore_elasticsearch", + "instrument_elasticsearch_client_indices_v8", + ) + + # v7 and below _process_module_definition( "elasticsearch.client.nodes", "newrelic.hooks.datastore_elasticsearch", "instrument_elasticsearch_client_nodes", ) + # v8 and above + _process_module_definition( + "elasticsearch._sync.client.nodes", + "newrelic.hooks.datastore_elasticsearch", + "instrument_elasticsearch_client_nodes_v8", + ) + + # v7 and below _process_module_definition( "elasticsearch.client.snapshot", "newrelic.hooks.datastore_elasticsearch", "instrument_elasticsearch_client_snapshot", ) + # v8 and above + _process_module_definition( + "elasticsearch._sync.client.snapshot", + "newrelic.hooks.datastore_elasticsearch", + "instrument_elasticsearch_client_snapshot_v8", + ) + + # v7 and below _process_module_definition( "elasticsearch.client.tasks", "newrelic.hooks.datastore_elasticsearch", "instrument_elasticsearch_client_tasks", ) + # v8 and above + _process_module_definition( + "elasticsearch._sync.client.tasks", + "newrelic.hooks.datastore_elasticsearch", + "instrument_elasticsearch_client_tasks_v8", + ) + + # v7 and below _process_module_definition( "elasticsearch.client.ingest", "newrelic.hooks.datastore_elasticsearch", "instrument_elasticsearch_client_ingest", ) + # v8 and above + _process_module_definition( + "elasticsearch._sync.client.ingest", + "newrelic.hooks.datastore_elasticsearch", + "instrument_elasticsearch_client_ingest_v8", + ) + + # v7 and below _process_module_definition( "elasticsearch.connection.base", "newrelic.hooks.datastore_elasticsearch", "instrument_elasticsearch_connection_base", ) + # v8 and above + _process_module_definition( + "elastic_transport._node._base", + "newrelic.hooks.datastore_elasticsearch", + "instrument_elastic_transport__node__base", + ) + + # v7 and below _process_module_definition( "elasticsearch.transport", "newrelic.hooks.datastore_elasticsearch", "instrument_elasticsearch_transport", ) + # v8 and above + _process_module_definition( + "elastic_transport._transport", + "newrelic.hooks.datastore_elasticsearch", + "instrument_elastic_transport__transport", + ) _process_module_definition("pika.adapters", "newrelic.hooks.messagebroker_pika", "instrument_pika_adapters") _process_module_definition("pika.channel", "newrelic.hooks.messagebroker_pika", "instrument_pika_channel") diff --git a/newrelic/hooks/datastore_elasticsearch.py b/newrelic/hooks/datastore_elasticsearch.py index b4c6f3bb6..2417aabfe 100644 --- a/newrelic/hooks/datastore_elasticsearch.py +++ b/newrelic/hooks/datastore_elasticsearch.py @@ -14,7 +14,8 @@ from newrelic.api.datastore_trace import DatastoreTrace from newrelic.api.transaction import current_transaction -from newrelic.common.object_wrapper import wrap_function_wrapper +from newrelic.common.object_wrapper import function_wrapper, wrap_function_wrapper +from newrelic.common.package_version_utils import get_package_version_tuple from newrelic.packages import six # An index name can be a string, None or a sequence. In the case of None @@ -23,6 +24,8 @@ # obviously can also be more than one index name. Where we are certain # there is only a single index name we use it, otherwise we use 'other'. +ES_VERSION = get_package_version_tuple("elasticsearch") + def _index_name(index): if not index or index == "*": @@ -32,11 +35,25 @@ def _index_name(index): return index -def _extract_kwargs_index(*args, **kwargs): - return _index_name(kwargs.get("index")) +def _extract_args_index(index=None, *args, **kwargs): + return _index_name(index) -def _extract_args_index(index=None, *args, **kwargs): +def _extract_args_allocation_explain_index( + current_node=None, + error_trace=None, + filter_path=None, + human=None, + include_disk_info=None, + include_yes_decisions=None, + index=None, + *args, + **kwargs +): + return _index_name(index) + + +def _extract_args_name_index(name=None, index=None, *args, **kwargs): return _index_name(index) @@ -44,6 +61,22 @@ def _extract_args_body_index(body=None, index=None, *args, **kwargs): return _index_name(index) +def _extract_args_requests_index(requests=None, index=None, *args, **kwargs): + return _index_name(index) + + +def _extract_args_searches_index(searches=None, index=None, *args, **kwargs): + return _index_name(index) + + +def _extract_args_search_templates_index(search_templates=None, index=None, *args, **kwargs): + return _index_name(index) + + +def _extract_args_operations_index(operations=None, index=None, *args, **kwargs): + return _index_name(index) + + def _extract_args_doctype_body_index(doc_type=None, body=None, index=None, *args, **kwargs): return _index_name(index) @@ -52,11 +85,11 @@ def _extract_args_field_index(field=None, index=None, *args, **kwargs): return _index_name(index) -def _extract_args_name_body_index(name=None, body=None, index=None, *args, **kwargs): +def _extract_args_fields_index(fields=None, index=None, *args, **kwargs): return _index_name(index) -def _extract_args_name_index(name=None, index=None, *args, **kwargs): +def _extract_args_name_body_index(name=None, body=None, index=None, *args, **kwargs): return _index_name(index) @@ -64,19 +97,27 @@ def _extract_args_metric_index(metric=None, index=None, *args, **kwargs): return _index_name(index) +def _extract_args_settings_index(settings=None, index=None, *args, **kwargs): + return _index_name(index) + + +def instrument_es_methods(module, _class, client_methods, prefix=None): + for method_name, arg_extractor in client_methods: + if hasattr(getattr(module, _class), method_name): + wrap_elasticsearch_client_method(module, _class, method_name, arg_extractor, prefix) + + def wrap_elasticsearch_client_method(module, class_name, method_name, arg_extractor, prefix=None): def _nr_wrapper_Elasticsearch_method_(wrapped, instance, args, kwargs): transaction = current_transaction() if transaction is None: return wrapped(*args, **kwargs) - - # When arg_extractor is None, it means there is no target field + # When index is None, it means there is no target field # associated with this method. Hence this method will only # create an operation metric and no statement metric. This is # handled by setting the target to None when calling the # DatastoreTraceWrapper. - if arg_extractor is None: index = None else: @@ -105,7 +146,7 @@ def _nr_wrapper_Elasticsearch_method_(wrapped, instance, args, kwargs): wrap_function_wrapper(module, "%s.%s" % (class_name, method_name), _nr_wrapper_Elasticsearch_method_) -_elasticsearch_client_methods = ( +_elasticsearch_client_methods_below_v8 = ( ("abort_benchmark", None), ("benchmark", _extract_args_index), ("bulk", None), @@ -147,13 +188,68 @@ def _nr_wrapper_Elasticsearch_method_(wrapped, instance, args, kwargs): ) +_elasticsearch_client_methods_v8 = ( + ("bulk", _extract_args_operations_index), + ("clear_scroll", None), + ("close", None), + ("close_point_in_time", None), + ("count", _extract_args_index), + ("create", _extract_args_index), + ("delete", _extract_args_index), + ("delete_by_query", _extract_args_index), + ("delete_by_query_rethrottle", None), + ("delete_script", None), + ("exists", _extract_args_index), + ("exists_source", _extract_args_index), + ("explain", _extract_args_index), + ("field_caps", _extract_args_index), + ("get", _extract_args_index), + ("get_script", None), + ("get_script_context", None), + ("get_script_languages", None), + ("get_source", _extract_args_index), + ("index", _extract_args_index), + ("info", None), + ("knn_search", _extract_args_index), + ("mget", _extract_args_index), + ("msearch", _extract_args_searches_index), + ("msearch_template", _extract_args_search_templates_index), + ("mtermvectors", _extract_args_index), + ("open_point_in_time", _extract_args_index), + ("options", None), + ("ping", None), + ("put_script", None), + ("rank_eval", _extract_args_requests_index), + ("reindex", None), + ("reindex_rethrottle", None), + ("render_search_template", None), + ("scripts_painless_execute", None), + ("scroll", None), + ("search", _extract_args_index), + ("search_mvt", _extract_args_index), + ("search_shards", _extract_args_index), + ("terms_enum", _extract_args_index), + ("termvector", _extract_args_index), + ("termvectors", _extract_args_index), + ("update", _extract_args_index), + ("update_by_query", _extract_args_index), + ("update_by_query_rethrottle", None), +) + + def instrument_elasticsearch_client(module): - for method_name, arg_extractor in _elasticsearch_client_methods: - if hasattr(getattr(module, "Elasticsearch"), method_name): - wrap_elasticsearch_client_method(module, "Elasticsearch", method_name, arg_extractor) + # The module path was remapped in v8 to match previous versions. + # In order to avoid double wrapping we check the version before + # wrapping. + if ES_VERSION < (8,): + instrument_es_methods(module, "Elasticsearch", _elasticsearch_client_methods_below_v8) + + +def instrument_elasticsearch_client_v8(module): + instrument_es_methods(module, "Elasticsearch", _elasticsearch_client_methods_v8) -_elasticsearch_client_indices_methods = ( +_elasticsearch_client_indices_methods_below_v8 = ( ("analyze", _extract_args_index), ("clear_cache", _extract_args_index), ("close", _extract_args_index), @@ -196,13 +292,77 @@ def instrument_elasticsearch_client(module): ) +_elasticsearch_client_indices_methods_v8 = ( + ("add_block", _extract_args_index), + ("analyze", _extract_args_index), + ("clear_cache", _extract_args_index), + ("clone", _extract_args_index), + ("close", _extract_args_index), + ("create", _extract_args_index), + ("create_data_stream", None), + ("data_streams_stats", None), + ("delete", _extract_args_index), + ("delete_alias", _extract_args_index), + ("delete_data_stream", None), + ("delete_index_template", None), + ("delete_template", None), + ("disk_usage", _extract_args_index), + ("downsample", _extract_args_index), + ("exists", _extract_args_index), + ("exists_alias", _extract_args_name_index), + ("exists_index_template", None), + ("exists_template", None), + ("field_usage_stats", _extract_args_index), + ("flush", _extract_args_index), + ("forcemerge", _extract_args_index), + ("get", _extract_args_index), + ("get_alias", _extract_args_index), + ("get_data_stream", None), + ("get_field_mapping", _extract_args_fields_index), + ("get_index_template", None), + ("get_mapping", _extract_args_index), + ("get_settings", _extract_args_index), + ("get_template", None), + ("migrate_to_data_stream", None), + ("modify_data_stream", None), + ("open", _extract_args_index), + ("promote_data_stream", None), + ("put_alias", _extract_args_index), + ("put_index_template", None), + ("put_mapping", _extract_args_index), + ("put_settings", _extract_args_settings_index), + ("put_template", None), + ("recovery", _extract_args_index), + ("refresh", _extract_args_index), + ("reload_search_analyzers", _extract_args_index), + ("resolve_index", None), + ("rollover", None), + ("segments", _extract_args_index), + ("shard_stores", _extract_args_index), + ("shrink", _extract_args_index), + ("simulate_index_template", None), + ("simulate_template", None), + ("split", _extract_args_index), + ("stats", _extract_args_index), + ("unfreeze", _extract_args_index), + ("update_aliases", None), + ("validate_query", _extract_args_index), +) + + def instrument_elasticsearch_client_indices(module): - for method_name, arg_extractor in _elasticsearch_client_indices_methods: - if hasattr(getattr(module, "IndicesClient"), method_name): - wrap_elasticsearch_client_method(module, "IndicesClient", method_name, arg_extractor, "indices") + # The module path was remapped in v8 to match previous versions. + # In order to avoid double wrapping we check the version before + # wrapping. + if ES_VERSION < (8,): + instrument_es_methods(module, "IndicesClient", _elasticsearch_client_indices_methods_below_v8, "indices") + +def instrument_elasticsearch_client_indices_v8(module): + instrument_es_methods(module, "IndicesClient", _elasticsearch_client_indices_methods_v8, "indices") -_elasticsearch_client_cat_methods = ( + +_elasticsearch_client_cat_methods_below_v8 = ( ("aliases", None), ("allocation", None), ("count", _extract_args_index), @@ -220,18 +380,72 @@ def instrument_elasticsearch_client_indices(module): ("thread_pool", None), ) +_elasticsearch_client_cat_methods_v8 = ( + ("aliases", None), + ("allocation", None), + ("component_templates", None), + ("count", _extract_args_index), + ("fielddata", None), + ("health", None), + ("help", None), + ("indices", _extract_args_index), + ("master", None), + ("ml_data_frame_analytics", None), + ("ml_datafeeds", None), + ("ml_jobs", None), + ("ml_trained_models", None), + ("nodeattrs", None), + ("nodes", None), + ("pending_tasks", None), + ("plugins", None), + ("recovery", _extract_args_index), + ("repositories", None), + ("segments", _extract_args_index), + ("shards", _extract_args_index), + ("snapshots", None), + ("tasks", None), + ("templates", None), + ("thread_pool", None), + ("transforms", None), +) + def instrument_elasticsearch_client_cat(module): - for method_name, arg_extractor in _elasticsearch_client_cat_methods: - if hasattr(getattr(module, "CatClient"), method_name): - wrap_elasticsearch_client_method(module, "CatClient", method_name, arg_extractor, "cat") + # The module path was remapped in v8 to match previous versions. + # In order to avoid double wrapping we check the version before + # wrapping. + if ES_VERSION < (8,): + instrument_es_methods(module, "CatClient", _elasticsearch_client_cat_methods_below_v8, "cat") + + +def instrument_elasticsearch_client_cat_v8(module): + instrument_es_methods(module, "CatClient", _elasticsearch_client_cat_methods_v8, "cat") + + +_elasticsearch_client_cluster_methods_below_v8 = ( + ("get_settings", None), + ("health", _extract_args_index), + ("pending_tasks", None), + ("put_settings", None), + ("reroute", None), + ("state", _extract_args_metric_index), + ("stats", None), +) -_elasticsearch_client_cluster_methods = ( +_elasticsearch_client_cluster_methods_v8 = ( + ("allocation_explain", _extract_args_allocation_explain_index), + ("delete_component_template", None), + ("delete_voting_config_exclusions", None), + ("exists_component_template", None), + ("get_component_template", None), ("get_settings", None), ("health", _extract_args_index), ("pending_tasks", None), + ("post_voting_config_exclusions", None), + ("put_component_template", None), ("put_settings", None), + ("remote_info", None), ("reroute", None), ("state", _extract_args_metric_index), ("stats", None), @@ -239,26 +453,60 @@ def instrument_elasticsearch_client_cat(module): def instrument_elasticsearch_client_cluster(module): - for method_name, arg_extractor in _elasticsearch_client_cluster_methods: - if hasattr(getattr(module, "ClusterClient"), method_name): - wrap_elasticsearch_client_method(module, "ClusterClient", method_name, arg_extractor, "cluster") + # The module path was remapped in v8 to match previous versions. + # In order to avoid double wrapping we check the version before + # wrapping. + if ES_VERSION < (8,): + instrument_es_methods(module, "ClusterClient", _elasticsearch_client_cluster_methods_below_v8, "cluster") + + +def instrument_elasticsearch_client_cluster_v8(module): + instrument_es_methods(module, "ClusterClient", _elasticsearch_client_cluster_methods_v8, "cluster") -_elasticsearch_client_nodes_methods = ( +_elasticsearch_client_nodes_methods_below_v8 = ( ("hot_threads", None), ("info", None), ("shutdown", None), ("stats", None), ) +_elasticsearch_client_nodes_methods_v8 = ( + ("clear_repositories_metering_archive", None), + ("get_repositories_metering_info", None), + ("hot_threads", None), + ("info", None), + ("reload_secure_settings", None), + ("stats", None), + ("usage", None), +) def instrument_elasticsearch_client_nodes(module): - for method_name, arg_extractor in _elasticsearch_client_nodes_methods: - if hasattr(getattr(module, "NodesClient"), method_name): - wrap_elasticsearch_client_method(module, "NodesClient", method_name, arg_extractor, "nodes") + # The module path was remapped in v8 to match previous versions. + # In order to avoid double wrapping we check the version before + # wrapping. + if ES_VERSION < (8,): + instrument_es_methods(module, "NodesClient", _elasticsearch_client_nodes_methods_below_v8, "nodes") + +def instrument_elasticsearch_client_nodes_v8(module): + instrument_es_methods(module, "NodesClient", _elasticsearch_client_nodes_methods_v8, "nodes") -_elasticsearch_client_snapshot_methods = ( + +_elasticsearch_client_snapshot_methods_below_v8 = ( + ("create", None), + ("create_repository", None), + ("delete", None), + ("delete_repository", None), + ("get", None), + ("get_repository", None), + ("restore", None), + ("status", None), + ("verify_repository", None), +) +_elasticsearch_client_snapshot_methods_v8 = ( + ("cleanup_repository", None), + ("clone", None), ("create", None), ("create_repository", None), ("delete", None), @@ -272,9 +520,15 @@ def instrument_elasticsearch_client_nodes(module): def instrument_elasticsearch_client_snapshot(module): - for method_name, arg_extractor in _elasticsearch_client_snapshot_methods: - if hasattr(getattr(module, "SnapshotClient"), method_name): - wrap_elasticsearch_client_method(module, "SnapshotClient", method_name, arg_extractor, "snapshot") + # The module path was remapped in v8 to match previous versions. + # In order to avoid double wrapping we check the version before + # wrapping. + if ES_VERSION < (8,): + instrument_es_methods(module, "SnapshotClient", _elasticsearch_client_snapshot_methods_below_v8, "snapshot") + + +def instrument_elasticsearch_client_snapshot_v8(module): + instrument_es_methods(module, "SnapshotClient", _elasticsearch_client_snapshot_methods_v8, "snapshot") _elasticsearch_client_tasks_methods = ( @@ -285,23 +539,44 @@ def instrument_elasticsearch_client_snapshot(module): def instrument_elasticsearch_client_tasks(module): - for method_name, arg_extractor in _elasticsearch_client_tasks_methods: - if hasattr(getattr(module, "TasksClient"), method_name): - wrap_elasticsearch_client_method(module, "TasksClient", method_name, arg_extractor, "tasks") + # The module path was remapped in v8 to match previous versions. + # In order to avoid double wrapping we check the version before + # wrapping. + if ES_VERSION < (8,): + instrument_es_methods(module, "TasksClient", _elasticsearch_client_tasks_methods, "tasks") + + +def instrument_elasticsearch_client_tasks_v8(module): + instrument_es_methods(module, "TasksClient", _elasticsearch_client_tasks_methods, "tasks") -_elasticsearch_client_ingest_methods = ( +_elasticsearch_client_ingest_methods_below_v8 = ( ("get_pipeline", None), ("put_pipeline", None), ("delete_pipeline", None), ("simulate", None), ) +_elasticsearch_client_ingest_methods_v8 = ( + ("delete_pipeline", None), + ("geo_ip_stats", None), + ("get_pipeline", None), + ("processor_grok", None), + ("put_pipeline", None), + ("simulate", None), +) + def instrument_elasticsearch_client_ingest(module): - for method_name, arg_extractor in _elasticsearch_client_ingest_methods: - if hasattr(getattr(module, "IngestClient"), method_name): - wrap_elasticsearch_client_method(module, "IngestClient", method_name, arg_extractor, "ingest") + # The module path was remapped in v8 to match previous versions. + # In order to avoid double wrapping we check the version before + # wrapping. + if ES_VERSION < (8,): + instrument_es_methods(module, "IngestClient", _elasticsearch_client_ingest_methods_below_v8, "ingest") + + +def instrument_elasticsearch_client_ingest_v8(module): + instrument_es_methods(module, "IngestClient", _elasticsearch_client_ingest_methods_v8, "ingest") # @@ -326,6 +601,17 @@ def instrument_elasticsearch_connection_base(module): wrap_function_wrapper(module, "Connection.__init__", _nr_Connection__init__wrapper) +def BaseNode__init__wrapper(wrapped, instance, args, kwargs): + result = wrapped(*args, **kwargs) + instance._nr_host_port = (instance.host, str(instance.port)) + return result + + +def instrument_elastic_transport__node__base(module): + if hasattr(module, "BaseNode"): + wrap_function_wrapper(module, "BaseNode.__init__", BaseNode__init__wrapper) + + def _nr_get_connection_wrapper(wrapped, instance, args, kwargs): """Read instance info from Connection and stash on Transaction.""" @@ -343,7 +629,7 @@ def _nr_get_connection_wrapper(wrapped, instance, args, kwargs): if tracer_settings.instance_reporting.enabled: host, port_path_or_id = conn._nr_host_port instance_info = (host, port_path_or_id, None) - except: + except Exception: instance_info = ("unknown", "unknown", None) transaction._nr_datastore_instance_info = instance_info @@ -351,5 +637,26 @@ def _nr_get_connection_wrapper(wrapped, instance, args, kwargs): return conn +def _nr_perform_request_wrapper(wrapped, instance, args, kwargs): + """Read instance info from Connection and stash on Transaction.""" + + transaction = current_transaction() + + if transaction is None: + return wrapped(*args, **kwargs) + + if not hasattr(instance.node_pool.get, "_nr_wrapped"): + instance.node_pool.get = function_wrapper(_nr_get_connection_wrapper)(instance.node_pool.get) + instance.node_pool.get._nr_wrapped = True + + return wrapped(*args, **kwargs) + + def instrument_elasticsearch_transport(module): - wrap_function_wrapper(module, "Transport.get_connection", _nr_get_connection_wrapper) + if hasattr(module, "Transport") and hasattr(module.Transport, "get_connection"): + wrap_function_wrapper(module, "Transport.get_connection", _nr_get_connection_wrapper) + + +def instrument_elastic_transport__transport(module): + if hasattr(module, "Transport") and hasattr(module.Transport, "perform_request"): + wrap_function_wrapper(module, "Transport.perform_request", _nr_perform_request_wrapper) diff --git a/tests/datastore_elasticsearch/conftest.py b/tests/datastore_elasticsearch/conftest.py index d665bce87..5cb0b0824 100644 --- a/tests/datastore_elasticsearch/conftest.py +++ b/tests/datastore_elasticsearch/conftest.py @@ -13,25 +13,43 @@ # limitations under the License. import pytest +from testing_support.db_settings import elasticsearch_settings +from testing_support.fixtures import ( # noqa + code_coverage_fixture, + collector_agent_registration_fixture, + collector_available_fixture, +) -from testing_support.fixtures import (code_coverage_fixture, - collector_agent_registration_fixture, collector_available_fixture) +from newrelic.common.package_version_utils import get_package_version _coverage_source = [ - 'newrelic.hooks.datastore_elasticsearch', + "newrelic.hooks.datastore_elasticsearch", ] code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { - 'transaction_tracer.explain_threshold': 0.0, - 'transaction_tracer.transaction_threshold': 0.0, - 'transaction_tracer.stack_trace_threshold': 0.0, - 'debug.log_data_collector_payloads': True, - 'debug.record_transaction_failure': True + "transaction_tracer.explain_threshold": 0.0, + "transaction_tracer.transaction_threshold": 0.0, + "transaction_tracer.stack_trace_threshold": 0.0, + "debug.log_data_collector_payloads": True, + "debug.record_transaction_failure": True, } collector_agent_registration = collector_agent_registration_fixture( - app_name='Python Agent Test (datastore_elasticsearch)', - default_settings=_default_settings, - linked_applications=['Python Agent Test (datastore)']) + app_name="Python Agent Test (datastore_elasticsearch)", + default_settings=_default_settings, + linked_applications=["Python Agent Test (datastore)"], +) + +ES_VERSION = tuple([int(n) for n in get_package_version("elasticsearch").split(".")]) +ES_SETTINGS = elasticsearch_settings()[0] +ES_MULTIPLE_SETTINGS = elasticsearch_settings() +ES_URL = "http://%s:%s" % (ES_SETTINGS["host"], ES_SETTINGS["port"]) + + +@pytest.fixture(scope="session") +def client(): + from elasticsearch import Elasticsearch + + return Elasticsearch(ES_URL) diff --git a/tests/datastore_elasticsearch/test_connection.py b/tests/datastore_elasticsearch/test_connection.py index 37df49b80..2e888af9b 100644 --- a/tests/datastore_elasticsearch/test_connection.py +++ b/tests/datastore_elasticsearch/test_connection.py @@ -12,21 +12,53 @@ # See the License for the specific language governing permissions and # limitations under the License. -from elasticsearch.connection.base import Connection +import pytest + +try: + from elasticsearch.connection.base import Connection +except ImportError: + from elastic_transport._models import NodeConfig + from elastic_transport._node._base import BaseNode as Connection + +from conftest import ES_VERSION, ES_SETTINGS + + +HOST = {"scheme": "http", "host": ES_SETTINGS["host"], "port": int(ES_SETTINGS["port"])} + +IS_V8 = ES_VERSION >= (8,) +SKIP_IF_V7 = pytest.mark.skipif(not IS_V8, reason="Skipping v8 tests.") +SKIP_IF_V8 = pytest.mark.skipif(IS_V8, reason="Skipping v7 tests.") def test_connection_default(): - conn = Connection() - assert conn._nr_host_port == ('localhost', '9200') + if IS_V8: + conn = Connection(NodeConfig(**HOST)) + else: + conn = Connection(**HOST) + + assert conn._nr_host_port == ("localhost", ES_SETTINGS["port"]) + +@SKIP_IF_V7 +def test_connection_config(): + conn = Connection(NodeConfig(scheme="http", host="foo", port=8888)) + assert conn._nr_host_port == ("foo", "8888") + + +@SKIP_IF_V8 def test_connection_host_arg(): - conn = Connection('the_host') - assert conn._nr_host_port == ('the_host', '9200') + conn = Connection("the_host") + assert conn._nr_host_port == ("the_host", "9200") + +@SKIP_IF_V8 def test_connection_args(): - conn = Connection('the_host', 9999) - assert conn._nr_host_port == ('the_host', '9999') + conn = Connection("the_host", 9999) + assert conn._nr_host_port == ("the_host", "9999") + +@SKIP_IF_V8 def test_connection_kwargs(): - conn = Connection(host='foo', port=8888) - assert conn._nr_host_port == ('foo', '8888') + conn = Connection(host="foo", port=8888) + assert conn._nr_host_port == ("foo", "8888") + diff --git a/tests/datastore_elasticsearch/test_database_duration.py b/tests/datastore_elasticsearch/test_database_duration.py index a76f700b1..e2599c67b 100644 --- a/tests/datastore_elasticsearch/test_database_duration.py +++ b/tests/datastore_elasticsearch/test_database_duration.py @@ -14,38 +14,48 @@ import sqlite3 -from elasticsearch import Elasticsearch +from testing_support.validators.validate_database_duration import ( + validate_database_duration, +) from newrelic.api.background_task import background_task -from testing_support.db_settings import elasticsearch_settings -from testing_support.validators.validate_database_duration import validate_database_duration +from conftest import ES_VERSION -ES_SETTINGS = elasticsearch_settings()[0] -ES_URL = 'http://%s:%s' % (ES_SETTINGS['host'], ES_SETTINGS['port']) -def _exercise_es(es): - es.index(index="contacts", doc_type="person", - body={"name": "Joe Tester", "age": 25, "title": "QA Engineer"}, id=1) - es.index(index="contacts", doc_type="person", - body={"name": "Jessica Coder", "age": 32, "title": "Programmer"}, id=2) - es.index(index="contacts", doc_type="person", - body={"name": "Freddy Tester", "age": 29, "title": "Assistant"}, id=3) - es.indices.refresh('contacts') + + +def _exercise_es_v7(es): + es.index(index="contacts", doc_type="person", body={"name": "Joe Tester", "age": 25, "title": "QA Engineer"}, id=1) + es.index( + index="contacts", doc_type="person", body={"name": "Jessica Coder", "age": 32, "title": "Programmer"}, id=2 + ) + es.index(index="contacts", doc_type="person", body={"name": "Freddy Tester", "age": 29, "title": "Assistant"}, id=3) + es.indices.refresh("contacts") + + +def _exercise_es_v8(es): + es.index(index="contacts", body={"name": "Joe Tester", "age": 25, "title": "QA Engineer"}, id=1) + es.index(index="contacts", body={"name": "Jessica Coder", "age": 32, "title": "Programmer"}, id=2) + es.index(index="contacts", body={"name": "Freddy Tester", "age": 29, "title": "Assistant"}, id=3) + es.indices.refresh(index="contacts") + + +_exercise_es = _exercise_es_v7 if ES_VERSION < (8, 0, 0) else _exercise_es_v8 + @validate_database_duration() @background_task() -def test_elasticsearch_database_duration(): - client = Elasticsearch(ES_URL) +def test_elasticsearch_database_duration(client): _exercise_es(client) + @validate_database_duration() @background_task() -def test_elasticsearch_and_sqlite_database_duration(): +def test_elasticsearch_and_sqlite_database_duration(client): # Make Elasticsearch queries - client = Elasticsearch(ES_URL) _exercise_es(client) # Make sqlite queries diff --git a/tests/datastore_elasticsearch/test_elasticsearch.py b/tests/datastore_elasticsearch/test_elasticsearch.py index 65a0374df..d2c892ea9 100644 --- a/tests/datastore_elasticsearch/test_elasticsearch.py +++ b/tests/datastore_elasticsearch/test_elasticsearch.py @@ -12,122 +12,120 @@ # See the License for the specific language governing permissions and # limitations under the License. -from elasticsearch import Elasticsearch import elasticsearch.client +from testing_support.fixtures import override_application_settings +from testing_support.util import instance_hostname +from testing_support.validators.validate_transaction_errors import ( + validate_transaction_errors, +) +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) from newrelic.api.background_task import background_task -from testing_support.fixtures import override_application_settings -from testing_support.validators.validate_transaction_errors import validate_transaction_errors -from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics -from testing_support.db_settings import elasticsearch_settings -from testing_support.util import instance_hostname +from conftest import ES_VERSION, ES_SETTINGS -ES_SETTINGS = elasticsearch_settings()[0] -ES_URL = 'http://%s:%s' % (ES_SETTINGS['host'], ES_SETTINGS['port']) # Settings _enable_instance_settings = { - 'datastore_tracer.instance_reporting.enabled': True, + "datastore_tracer.instance_reporting.enabled": True, } _disable_instance_settings = { - 'datastore_tracer.instance_reporting.enabled': False, + "datastore_tracer.instance_reporting.enabled": False, } # Metrics _base_scoped_metrics = [ - ('Datastore/statement/Elasticsearch/_all/cluster.health', 1), - ('Datastore/statement/Elasticsearch/_all/search', 2), - ('Datastore/statement/Elasticsearch/address/index', 2), - ('Datastore/statement/Elasticsearch/address/search', 1), - ('Datastore/statement/Elasticsearch/contacts/index', 3), - ('Datastore/statement/Elasticsearch/contacts/indices.refresh', 1), - ('Datastore/statement/Elasticsearch/contacts/search', 2), - ('Datastore/statement/Elasticsearch/other/search', 2), + ("Datastore/statement/Elasticsearch/_all/cluster.health", 1), + ("Datastore/statement/Elasticsearch/_all/search", 2), + ("Datastore/statement/Elasticsearch/address/index", 2), + ("Datastore/statement/Elasticsearch/address/search", 1), + ("Datastore/statement/Elasticsearch/contacts/index", 3), + ("Datastore/statement/Elasticsearch/contacts/indices.refresh", 1), + ("Datastore/statement/Elasticsearch/contacts/search", 2), + ("Datastore/statement/Elasticsearch/other/search", 2), ] _base_rollup_metrics = [ - ('Datastore/operation/Elasticsearch/cluster.health', 1), - ('Datastore/operation/Elasticsearch/index', 5), - ('Datastore/operation/Elasticsearch/indices.refresh', 1), - ('Datastore/operation/Elasticsearch/search', 7), - ('Datastore/statement/Elasticsearch/_all/cluster.health', 1), - ('Datastore/statement/Elasticsearch/_all/search', 2), - ('Datastore/statement/Elasticsearch/address/index', 2), - ('Datastore/statement/Elasticsearch/address/search', 1), - ('Datastore/statement/Elasticsearch/contacts/index', 3), - ('Datastore/statement/Elasticsearch/contacts/indices.refresh', 1), - ('Datastore/statement/Elasticsearch/contacts/search', 2), - ('Datastore/statement/Elasticsearch/other/search', 2), + ("Datastore/operation/Elasticsearch/cluster.health", 1), + ("Datastore/operation/Elasticsearch/index", 5), + ("Datastore/operation/Elasticsearch/indices.refresh", 1), + ("Datastore/operation/Elasticsearch/search", 7), + ("Datastore/statement/Elasticsearch/_all/cluster.health", 1), + ("Datastore/statement/Elasticsearch/_all/search", 2), + ("Datastore/statement/Elasticsearch/address/index", 2), + ("Datastore/statement/Elasticsearch/address/search", 1), + ("Datastore/statement/Elasticsearch/contacts/index", 3), + ("Datastore/statement/Elasticsearch/contacts/indices.refresh", 1), + ("Datastore/statement/Elasticsearch/contacts/search", 2), + ("Datastore/statement/Elasticsearch/other/search", 2), ] # Version support +def is_importable(module_path): + try: + __import__(module_path) + return True + except ImportError: + return False + + _all_count = 14 -try: - import elasticsearch.client.cat - _base_scoped_metrics.append( - ('Datastore/operation/Elasticsearch/cat.health', 1)) - _base_rollup_metrics.append( - ('Datastore/operation/Elasticsearch/cat.health', 1)) +if is_importable("elasticsearch.client.cat") or is_importable("elasticsearch._sync.client.cat"): + _base_scoped_metrics.append(("Datastore/operation/Elasticsearch/cat.health", 1)) + _base_rollup_metrics.append(("Datastore/operation/Elasticsearch/cat.health", 1)) _all_count += 1 -except ImportError: - _base_scoped_metrics.append( - ('Datastore/operation/Elasticsearch/cat.health', None)) - _base_rollup_metrics.append( - ('Datastore/operation/Elasticsearch/cat.health', None)) - -try: - import elasticsearch.client.nodes - _base_scoped_metrics.append( - ('Datastore/operation/Elasticsearch/nodes.info', 1)) - _base_rollup_metrics.append( - ('Datastore/operation/Elasticsearch/nodes.info', 1)) +else: + _base_scoped_metrics.append(("Datastore/operation/Elasticsearch/cat.health", None)) + _base_rollup_metrics.append(("Datastore/operation/Elasticsearch/cat.health", None)) + +if is_importable("elasticsearch.client.nodes") or is_importable("elasticsearch._sync.client.nodes"): + _base_scoped_metrics.append(("Datastore/operation/Elasticsearch/nodes.info", 1)) + _base_rollup_metrics.append(("Datastore/operation/Elasticsearch/nodes.info", 1)) _all_count += 1 -except ImportError: - _base_scoped_metrics.append( - ('Datastore/operation/Elasticsearch/nodes.info', None)) - _base_rollup_metrics.append( - ('Datastore/operation/Elasticsearch/nodes.info', None)) - -if (hasattr(elasticsearch.client, 'SnapshotClient') and - hasattr(elasticsearch.client.SnapshotClient, 'status')): - _base_scoped_metrics.append( - ('Datastore/operation/Elasticsearch/snapshot.status', 1)) - _base_rollup_metrics.append( - ('Datastore/operation/Elasticsearch/snapshot.status', 1)) +else: + _base_scoped_metrics.append(("Datastore/operation/Elasticsearch/nodes.info", None)) + _base_rollup_metrics.append(("Datastore/operation/Elasticsearch/nodes.info", None)) + +if hasattr(elasticsearch.client, "SnapshotClient") and hasattr(elasticsearch.client.SnapshotClient, "status"): + _base_scoped_metrics.append(("Datastore/operation/Elasticsearch/snapshot.status", 1)) + _base_rollup_metrics.append(("Datastore/operation/Elasticsearch/snapshot.status", 1)) _all_count += 1 else: - _base_scoped_metrics.append( - ('Datastore/operation/Elasticsearch/snapshot.status', None)) - _base_rollup_metrics.append( - ('Datastore/operation/Elasticsearch/snapshot.status', None)) - -if hasattr(elasticsearch.client.IndicesClient, 'status'): - _base_scoped_metrics.append( - ('Datastore/statement/Elasticsearch/_all/indices.status', 1)) - _base_rollup_metrics.extend([ - ('Datastore/operation/Elasticsearch/indices.status', 1), - ('Datastore/statement/Elasticsearch/_all/indices.status', 1), - ]) + _base_scoped_metrics.append(("Datastore/operation/Elasticsearch/snapshot.status", None)) + _base_rollup_metrics.append(("Datastore/operation/Elasticsearch/snapshot.status", None)) + +if hasattr(elasticsearch.client.IndicesClient, "status"): + _base_scoped_metrics.append(("Datastore/statement/Elasticsearch/_all/indices.status", 1)) + _base_rollup_metrics.extend( + [ + ("Datastore/operation/Elasticsearch/indices.status", 1), + ("Datastore/statement/Elasticsearch/_all/indices.status", 1), + ] + ) _all_count += 1 else: - _base_scoped_metrics.append( - ('Datastore/operation/Elasticsearch/indices.status', None)) - _base_rollup_metrics.extend([ - ('Datastore/operation/Elasticsearch/indices.status', None), - ('Datastore/statement/Elasticsearch/_all/indices.status', None), - ]) - -_base_rollup_metrics.extend([ - ('Datastore/all', _all_count), - ('Datastore/allOther', _all_count), - ('Datastore/Elasticsearch/all', _all_count), - ('Datastore/Elasticsearch/allOther', _all_count), -]) + _base_scoped_metrics.append(("Datastore/operation/Elasticsearch/indices.status", None)) + _base_rollup_metrics.extend( + [ + ("Datastore/operation/Elasticsearch/indices.status", None), + ("Datastore/statement/Elasticsearch/_all/indices.status", None), + ] + ) + +_base_rollup_metrics.extend( + [ + ("Datastore/all", _all_count), + ("Datastore/allOther", _all_count), + ("Datastore/Elasticsearch/all", _all_count), + ("Datastore/Elasticsearch/allOther", _all_count), + ] +) # Instance info @@ -137,74 +135,105 @@ _enable_scoped_metrics = list(_base_scoped_metrics) _enable_rollup_metrics = list(_base_rollup_metrics) -_host = instance_hostname(ES_SETTINGS['host']) -_port = ES_SETTINGS['port'] +_host = instance_hostname(ES_SETTINGS["host"]) +_port = ES_SETTINGS["port"] -_instance_metric_name = 'Datastore/instance/Elasticsearch/%s/%s' % ( - _host, _port) +_instance_metric_name = "Datastore/instance/Elasticsearch/%s/%s" % (_host, _port) -_enable_rollup_metrics.append( - (_instance_metric_name, _all_count) -) +_enable_rollup_metrics.append((_instance_metric_name, _all_count)) -_disable_rollup_metrics.append( - (_instance_metric_name, None) -) +_disable_rollup_metrics.append((_instance_metric_name, None)) # Query -def _exercise_es(es): - es.index(index="contacts", doc_type="person", - body={"name": "Joe Tester", "age": 25, "title": "QA Engineer"}, id=1) - es.index(index="contacts", doc_type="person", - body={"name": "Jessica Coder", "age": 32, "title": "Programmer"}, id=2) - es.index(index="contacts", doc_type="person", - body={"name": "Freddy Tester", "age": 29, "title": "Assistant"}, id=3) - es.indices.refresh('contacts') - es.index(index="address", doc_type="employee", body={"name": "Sherlock", - "address": "221B Baker Street, London"}, id=1) - es.index(index="address", doc_type="employee", body={"name": "Bilbo", - "address": "Bag End, Bagshot row, Hobbiton, Shire"}, id=2) - es.search(index='contacts', q='name:Joe') - es.search(index='contacts', q='name:jessica') - es.search(index='address', q='name:Sherlock') - es.search(index=['contacts', 'address'], q='name:Bilbo') - es.search(index='contacts,address', q='name:Bilbo') - es.search(index='*', q='name:Bilbo') - es.search(q='name:Bilbo') + +def _exercise_es_v7(es): + es.index(index="contacts", doc_type="person", body={"name": "Joe Tester", "age": 25, "title": "QA Engineer"}, id=1) + es.index( + index="contacts", doc_type="person", body={"name": "Jessica Coder", "age": 32, "title": "Programmer"}, id=2 + ) + es.index(index="contacts", doc_type="person", body={"name": "Freddy Tester", "age": 29, "title": "Assistant"}, id=3) + es.indices.refresh("contacts") + es.index( + index="address", doc_type="employee", body={"name": "Sherlock", "address": "221B Baker Street, London"}, id=1 + ) + es.index( + index="address", + doc_type="employee", + body={"name": "Bilbo", "address": "Bag End, Bagshot row, Hobbiton, Shire"}, + id=2, + ) + es.search(index="contacts", q="name:Joe") + es.search(index="contacts", q="name:jessica") + es.search(index="address", q="name:Sherlock") + es.search(index=["contacts", "address"], q="name:Bilbo") + es.search(index="contacts,address", q="name:Bilbo") + es.search(index="*", q="name:Bilbo") + es.search(q="name:Bilbo") es.cluster.health() - if hasattr(es, 'cat'): + if hasattr(es, "cat"): es.cat.health() - if hasattr(es, 'nodes'): + if hasattr(es, "nodes"): es.nodes.info() - if hasattr(es, 'snapshot') and hasattr(es.snapshot, 'status'): + if hasattr(es, "snapshot") and hasattr(es.snapshot, "status"): es.snapshot.status() - if hasattr(es.indices, 'status'): + if hasattr(es.indices, "status"): es.indices.status() + +def _exercise_es_v8(es): + es.index(index="contacts", body={"name": "Joe Tester", "age": 25, "title": "QA Engineer"}, id=1) + es.index(index="contacts", body={"name": "Jessica Coder", "age": 32, "title": "Programmer"}, id=2) + es.index(index="contacts", body={"name": "Freddy Tester", "age": 29, "title": "Assistant"}, id=3) + es.indices.refresh(index="contacts") + es.index(index="address", body={"name": "Sherlock", "address": "221B Baker Street, London"}, id=1) + es.index(index="address", body={"name": "Bilbo", "address": "Bag End, Bagshot row, Hobbiton, Shire"}, id=2) + es.search(index="contacts", q="name:Joe") + es.search(index="contacts", q="name:jessica") + es.search(index="address", q="name:Sherlock") + es.search(index=["contacts", "address"], q="name:Bilbo") + es.search(index="contacts,address", q="name:Bilbo") + es.search(index="*", q="name:Bilbo") + es.search(q="name:Bilbo") + es.cluster.health() + + if hasattr(es, "cat"): + es.cat.health() + if hasattr(es, "nodes"): + es.nodes.info() + if hasattr(es, "snapshot") and hasattr(es.snapshot, "status"): + es.snapshot.status() + if hasattr(es.indices, "status"): + es.indices.status() + + +_exercise_es = _exercise_es_v7 if ES_VERSION < (8, 0, 0) else _exercise_es_v8 + + # Test @validate_transaction_errors(errors=[]) @validate_transaction_metrics( - 'test_elasticsearch:test_elasticsearch_operation_disabled', - scoped_metrics=_disable_scoped_metrics, - rollup_metrics=_disable_rollup_metrics, - background_task=True) + "test_elasticsearch:test_elasticsearch_operation_disabled", + scoped_metrics=_disable_scoped_metrics, + rollup_metrics=_disable_rollup_metrics, + background_task=True, +) @override_application_settings(_disable_instance_settings) @background_task() -def test_elasticsearch_operation_disabled(): - client = Elasticsearch(ES_URL) +def test_elasticsearch_operation_disabled(client): _exercise_es(client) + @validate_transaction_errors(errors=[]) @validate_transaction_metrics( - 'test_elasticsearch:test_elasticsearch_operation_enabled', - scoped_metrics=_enable_scoped_metrics, - rollup_metrics=_enable_rollup_metrics, - background_task=True) + "test_elasticsearch:test_elasticsearch_operation_enabled", + scoped_metrics=_enable_scoped_metrics, + rollup_metrics=_enable_rollup_metrics, + background_task=True, +) @override_application_settings(_enable_instance_settings) @background_task() -def test_elasticsearch_operation_enabled(): - client = Elasticsearch(ES_URL) +def test_elasticsearch_operation_enabled(client): _exercise_es(client) diff --git a/tests/datastore_elasticsearch/test_instrumented_methods.py b/tests/datastore_elasticsearch/test_instrumented_methods.py index 28ca8f975..4ad88c2a5 100644 --- a/tests/datastore_elasticsearch/test_instrumented_methods.py +++ b/tests/datastore_elasticsearch/test_instrumented_methods.py @@ -11,61 +11,131 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - import elasticsearch import elasticsearch.client +import pytest +from conftest import ES_VERSION +from testing_support.validators.validate_datastore_trace_inputs import ( + validate_datastore_trace_inputs, +) + +from newrelic.api.background_task import background_task -from newrelic.hooks.datastore_elasticsearch import ( - _elasticsearch_client_methods, - _elasticsearch_client_indices_methods, - _elasticsearch_client_cat_methods, - _elasticsearch_client_cluster_methods, - _elasticsearch_client_nodes_methods, - _elasticsearch_client_snapshot_methods, - _elasticsearch_client_tasks_methods, - _elasticsearch_client_ingest_methods, +RUN_IF_V8 = pytest.mark.skipif( + ES_VERSION < (8,), reason="Only run for v8+. We don't support all methods in previous versions." ) -def _test_methods_wrapped(object, method_name_tuples): - for method_name, _ in method_name_tuples: - method = getattr(object, method_name, None) - if method is not None: - err = '%s.%s isnt being wrapped' % (object, method) - assert hasattr(method, '__wrapped__'), err +@pytest.fixture +def client(client): + if ES_VERSION < (8, 0): + client.index( + index="contacts", doc_type="person", body={"name": "Joe Tester", "age": 25, "title": "QA Engineer"}, id=1 + ) + else: + client.index(index="contacts", body={"name": "Joe Tester", "age": 25, "title": "QA Engineer"}, id=1) + return client + + +@pytest.mark.parametrize( + "sub_module,method,args,kwargs,expected_index", + [ + (None, "exists", (), {"index": "contacts", "id": 1}, "contacts"), + (None, "info", (), {}, None), + pytest.param( + None, + "msearch", + (), + {"searches": [{}, {"query": {"match": {"message": "this is a test"}}}], "index": "contacts"}, + "contacts", + marks=RUN_IF_V8, + ), + ("indices", "exists", (), {"index": "contacts"}, "contacts"), + ("indices", "exists_template", (), {"name": "no-exist"}, None), + ("cat", "count", (), {"index": "contacts"}, "contacts"), + ("cat", "health", (), {}, None), + pytest.param( + "cluster", + "allocation_explain", + (), + {"index": "contacts", "shard": 0, "primary": True}, + "contacts", + marks=RUN_IF_V8, + ), + ("cluster", "get_settings", (), {}, None), + ("cluster", "health", (), {"index": "contacts"}, "contacts"), + ("nodes", "info", (), {}, None), + ("snapshot", "status", (), {}, None), + ("tasks", "list", (), {}, None), + ("ingest", "geo_ip_stats", (), {}, None), + ], +) +def test_method_on_client_datastore_trace_inputs(client, sub_module, method, args, kwargs, expected_index): + expected_operation = "%s.%s" % (sub_module, method) if sub_module else method + + @validate_datastore_trace_inputs(target=expected_index, operation=expected_operation) + @background_task() + def _test(): + if not sub_module: + getattr(client, method)(*args, **kwargs) + else: + getattr(getattr(client, sub_module), method)(*args, **kwargs) + + _test() + + +def _test_methods_wrapped(_object, ignored_methods=None): + if not ignored_methods: + ignored_methods = {"perform_request", "transport"} + + def is_wrapped(m): + return hasattr(getattr(_object, m), "__wrapped__") + + methods = {m for m in dir(_object) if not m[0] == "_"} + uninstrumented = {m for m in (methods - ignored_methods) if not is_wrapped(m)} + assert not uninstrumented, "There are uninstrumented methods: %s" % uninstrumented + + +@RUN_IF_V8 def test_instrumented_methods_client(): - _test_methods_wrapped(elasticsearch.Elasticsearch, - _elasticsearch_client_methods) + _test_methods_wrapped(elasticsearch.Elasticsearch) + +@RUN_IF_V8 def test_instrumented_methods_client_indices(): - _test_methods_wrapped(elasticsearch.client.IndicesClient, - _elasticsearch_client_indices_methods) + _test_methods_wrapped(elasticsearch.client.IndicesClient) + +@RUN_IF_V8 def test_instrumented_methods_client_cluster(): - _test_methods_wrapped(elasticsearch.client.ClusterClient, - _elasticsearch_client_cluster_methods) + _test_methods_wrapped(elasticsearch.client.ClusterClient) + +@RUN_IF_V8 def test_instrumented_methods_client_cat(): - if hasattr(elasticsearch.client, 'CatClient'): - _test_methods_wrapped(elasticsearch.client.CatClient, - _elasticsearch_client_cat_methods) + if hasattr(elasticsearch.client, "CatClient"): + _test_methods_wrapped(elasticsearch.client.CatClient) + +@RUN_IF_V8 def test_instrumented_methods_client_nodes(): - if hasattr(elasticsearch.client, 'NodesClient'): - _test_methods_wrapped(elasticsearch.client.NodesClient, - _elasticsearch_client_nodes_methods) + if hasattr(elasticsearch.client, "NodesClient"): + _test_methods_wrapped(elasticsearch.client.NodesClient) + +@RUN_IF_V8 def test_instrumented_methods_client_snapshot(): - if hasattr(elasticsearch.client, 'SnapshotClient'): - _test_methods_wrapped(elasticsearch.client.SnapshotClient, - _elasticsearch_client_snapshot_methods) + if hasattr(elasticsearch.client, "SnapshotClient"): + _test_methods_wrapped(elasticsearch.client.SnapshotClient) + +@RUN_IF_V8 def test_instrumented_methods_client_tasks(): - if hasattr(elasticsearch.client, 'TasksClient'): - _test_methods_wrapped(elasticsearch.client.TasksClient, - _elasticsearch_client_tasks_methods) + if hasattr(elasticsearch.client, "TasksClient"): + _test_methods_wrapped(elasticsearch.client.TasksClient) + +@RUN_IF_V8 def test_instrumented_methods_client_ingest(): - if hasattr(elasticsearch.client, 'IngestClient'): - _test_methods_wrapped(elasticsearch.client.IngestClient, - _elasticsearch_client_ingest_methods) + if hasattr(elasticsearch.client, "IngestClient"): + _test_methods_wrapped(elasticsearch.client.IngestClient) diff --git a/tests/datastore_elasticsearch/test_mget.py b/tests/datastore_elasticsearch/test_mget.py index 9f0b442e8..f3f7c0979 100644 --- a/tests/datastore_elasticsearch/test_mget.py +++ b/tests/datastore_elasticsearch/test_mget.py @@ -13,42 +13,43 @@ # limitations under the License. import pytest - from elasticsearch import Elasticsearch -from elasticsearch.connection_pool import RoundRobinSelector +try: + from elastic_transport import RoundRobinSelector +except ImportError: + from elasticsearch.connection_pool import RoundRobinSelector + +from conftest import ES_MULTIPLE_SETTINGS, ES_VERSION from testing_support.fixtures import override_application_settings -from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics -from testing_support.db_settings import elasticsearch_settings from testing_support.util import instance_hostname +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) from newrelic.api.background_task import background_task -ES_MULTIPLE_SETTINGS = elasticsearch_settings() - # Settings _enable_instance_settings = { - 'datastore_tracer.instance_reporting.enabled': True, + "datastore_tracer.instance_reporting.enabled": True, } _disable_instance_settings = { - 'datastore_tracer.instance_reporting.enabled': False, + "datastore_tracer.instance_reporting.enabled": False, } # Metrics -_base_scoped_metrics = ( - ('Datastore/statement/Elasticsearch/contacts/index', 2), -) +_base_scoped_metrics = (("Datastore/statement/Elasticsearch/contacts/index", 2),) _base_rollup_metrics = ( - ('Datastore/all', 3), - ('Datastore/allOther', 3), - ('Datastore/Elasticsearch/all', 3), - ('Datastore/Elasticsearch/allOther', 3), - ('Datastore/operation/Elasticsearch/index', 2), - ('Datastore/operation/Elasticsearch/mget', 1), - ('Datastore/statement/Elasticsearch/contacts/index', 2), + ("Datastore/all", 3), + ("Datastore/allOther", 3), + ("Datastore/Elasticsearch/all", 3), + ("Datastore/Elasticsearch/allOther", 3), + ("Datastore/operation/Elasticsearch/index", 2), + ("Datastore/operation/Elasticsearch/mget", 1), + ("Datastore/statement/Elasticsearch/contacts/index", 2), ) _disable_scoped_metrics = list(_base_scoped_metrics) @@ -61,89 +62,101 @@ es_1 = ES_MULTIPLE_SETTINGS[0] es_2 = ES_MULTIPLE_SETTINGS[1] - host_1 = instance_hostname(es_1['host']) - port_1 = es_1['port'] + host_1 = instance_hostname(es_1["host"]) + port_1 = es_1["port"] - host_2 = instance_hostname(es_2['host']) - port_2 = es_2['port'] + host_2 = instance_hostname(es_2["host"]) + port_2 = es_2["port"] - instance_metric_name_1 = 'Datastore/instance/Elasticsearch/%s/%s' % ( - host_1, port_1) - instance_metric_name_2 = 'Datastore/instance/Elasticsearch/%s/%s' % ( - host_2, port_2) + instance_metric_name_1 = "Datastore/instance/Elasticsearch/%s/%s" % (host_1, port_1) + instance_metric_name_2 = "Datastore/instance/Elasticsearch/%s/%s" % (host_2, port_2) - _enable_rollup_metrics.extend([ + _enable_rollup_metrics.extend( + [ (instance_metric_name_1, 2), (instance_metric_name_2, 1), - ]) + ] + ) - _disable_rollup_metrics.extend([ + _disable_rollup_metrics.extend( + [ (instance_metric_name_1, None), (instance_metric_name_2, None), - ]) + ] + ) + + +@pytest.fixture(scope="module") +def client(): + urls = ["http://%s:%s" % (db["host"], db["port"]) for db in ES_MULTIPLE_SETTINGS] + # When selecting a connection from the pool, use the round robin method. + # This is actually the default already. Using round robin will ensure that + # doing two db calls will mean elastic search is talking to two different + # dbs. + if ES_VERSION >= (8,): + client = Elasticsearch(urls, node_selector_class=RoundRobinSelector, randomize_hosts=False) + else: + client = Elasticsearch(urls, selector_class=RoundRobinSelector, randomize_hosts=False) + return client + # Query + def _exercise_es_multi(es): # set on db 1 - es.index(index='contacts', doc_type='person', - body={'name': 'Joe Tester', 'age': 25, 'title': 'QA Engineer'}, - id=1) - - # set on db 2 - es.index(index='contacts', doc_type='person', - body={'name': 'Jane Tester', 'age': 22, 'title': 'Senior QA Engineer'}, - id=2) + if ES_VERSION >= (8,): + es.index(index="contacts", body={"name": "Joe Tester", "age": 25, "title": "QA Engineer"}, id=1) + # set on db 2 + es.index(index="contacts", body={"name": "Jane Tester", "age": 22, "title": "Senior QA Engineer"}, id=2) + else: + es.index( + index="contacts", doc_type="person", body={"name": "Joe Tester", "age": 25, "title": "QA Engineer"}, id=1 + ) + # set on db 2 + es.index( + index="contacts", + doc_type="person", + body={"name": "Jane Tester", "age": 22, "title": "Senior QA Engineer"}, + id=2, + ) # ask db 1, will return info from db 1 and 2 mget_body = { - 'docs': [ - {'_id': 1, '_index': 'contacts'}, - {'_id': 2, '_index': 'contacts'}, + "docs": [ + {"_id": 1, "_index": "contacts"}, + {"_id": 2, "_index": "contacts"}, ] } - results = es.mget(mget_body) - assert len(results['docs']) == 2 + results = es.mget(body=mget_body) + assert len(results["docs"]) == 2 + # Test -@pytest.mark.skipif(len(ES_MULTIPLE_SETTINGS) < 2, - reason='Test environment not configured with multiple databases.') + +@pytest.mark.skipif(len(ES_MULTIPLE_SETTINGS) < 2, reason="Test environment not configured with multiple databases.") @override_application_settings(_enable_instance_settings) @validate_transaction_metrics( - 'test_mget:test_multi_get_enabled', - scoped_metrics=_enable_scoped_metrics, - rollup_metrics=_enable_rollup_metrics, - background_task=True) + "test_mget:test_multi_get_enabled", + scoped_metrics=_enable_scoped_metrics, + rollup_metrics=_enable_rollup_metrics, + background_task=True, +) @background_task() -def test_multi_get_enabled(): - urls = ['http://%s:%s' % (db['host'], db['port']) for db in - ES_MULTIPLE_SETTINGS] - # When selecting a connection from the pool, use the round robin method. - # This is actually the default already. Using round robin will ensure that - # doing two db calls will mean elastic search is talking to two different - # dbs. - client = Elasticsearch(urls, selector_class=RoundRobinSelector, - randomize_hosts=False) +def test_multi_get_enabled(client): _exercise_es_multi(client) -@pytest.mark.skipif(len(ES_MULTIPLE_SETTINGS) < 2, - reason='Test environment not configured with multiple databases.') + +@pytest.mark.skipif(len(ES_MULTIPLE_SETTINGS) < 2, reason="Test environment not configured with multiple databases.") @override_application_settings(_disable_instance_settings) @validate_transaction_metrics( - 'test_mget:test_multi_get_disabled', - scoped_metrics=_disable_scoped_metrics, - rollup_metrics=_disable_rollup_metrics, - background_task=True) + "test_mget:test_multi_get_disabled", + scoped_metrics=_disable_scoped_metrics, + rollup_metrics=_disable_rollup_metrics, + background_task=True, +) @background_task() -def test_multi_get_disabled(): - urls = ['http://%s:%s' % (db['host'], db['port']) for db in - ES_MULTIPLE_SETTINGS] - # When selecting a connection from the pool, use the round robin method. - # This is actually the default already. Using round robin will ensure that - # doing two db calls will mean elastic search is talking to two different - # dbs. - client = Elasticsearch(urls, selector_class=RoundRobinSelector, - randomize_hosts=False) +def test_multi_get_disabled(client): _exercise_es_multi(client) diff --git a/tests/datastore_elasticsearch/test_multiple_dbs.py b/tests/datastore_elasticsearch/test_multiple_dbs.py index 70a7be4f1..71c47b168 100644 --- a/tests/datastore_elasticsearch/test_multiple_dbs.py +++ b/tests/datastore_elasticsearch/test_multiple_dbs.py @@ -13,40 +13,36 @@ # limitations under the License. import pytest - +from conftest import ES_MULTIPLE_SETTINGS, ES_VERSION from elasticsearch import Elasticsearch - from testing_support.fixtures import override_application_settings -from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics -from testing_support.db_settings import elasticsearch_settings from testing_support.util import instance_hostname +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) from newrelic.api.background_task import background_task -ES_MULTIPLE_SETTINGS = elasticsearch_settings() - # Settings _enable_instance_settings = { - 'datastore_tracer.instance_reporting.enabled': True, + "datastore_tracer.instance_reporting.enabled": True, } _disable_instance_settings = { - 'datastore_tracer.instance_reporting.enabled': False, + "datastore_tracer.instance_reporting.enabled": False, } # Metrics -_base_scoped_metrics = ( - ('Datastore/statement/Elasticsearch/contacts/index', 2), -) +_base_scoped_metrics = (("Datastore/statement/Elasticsearch/contacts/index", 2),) _base_rollup_metrics = ( - ('Datastore/all', 2), - ('Datastore/allOther', 2), - ('Datastore/Elasticsearch/all', 2), - ('Datastore/Elasticsearch/allOther', 2), - ('Datastore/operation/Elasticsearch/index', 2), - ('Datastore/statement/Elasticsearch/contacts/index', 2), + ("Datastore/all", 2), + ("Datastore/allOther", 2), + ("Datastore/Elasticsearch/all", 2), + ("Datastore/Elasticsearch/allOther", 2), + ("Datastore/operation/Elasticsearch/index", 2), + ("Datastore/statement/Elasticsearch/contacts/index", 2), ) _disable_scoped_metrics = list(_base_scoped_metrics) @@ -59,61 +55,71 @@ es_1 = ES_MULTIPLE_SETTINGS[0] es_2 = ES_MULTIPLE_SETTINGS[1] - host_1 = instance_hostname(es_1['host']) - port_1 = es_1['port'] + host_1 = instance_hostname(es_1["host"]) + port_1 = es_1["port"] - host_2 = instance_hostname(es_2['host']) - port_2 = es_2['port'] + host_2 = instance_hostname(es_2["host"]) + port_2 = es_2["port"] - instance_metric_name_1 = 'Datastore/instance/Elasticsearch/%s/%s' % ( - host_1, port_1) - instance_metric_name_2 = 'Datastore/instance/Elasticsearch/%s/%s' % ( - host_2, port_2) + instance_metric_name_1 = "Datastore/instance/Elasticsearch/%s/%s" % (host_1, port_1) + instance_metric_name_2 = "Datastore/instance/Elasticsearch/%s/%s" % (host_2, port_2) - _enable_rollup_metrics.extend([ + _enable_rollup_metrics.extend( + [ (instance_metric_name_1, 1), (instance_metric_name_2, 1), - ]) + ] + ) - _disable_rollup_metrics.extend([ + _disable_rollup_metrics.extend( + [ (instance_metric_name_1, None), (instance_metric_name_2, None), - ]) + ] + ) # Query + def _exercise_es(es): - es.index(index='contacts', doc_type='person', - body={'name': 'Joe Tester', 'age': 25, 'title': 'QA Engineer'}, id=1) + if ES_VERSION >= (8,): + es.index(index="contacts", body={"name": "Joe Tester", "age": 25, "title": "QA Engineer"}, id=1) + else: + es.index( + index="contacts", doc_type="person", body={"name": "Joe Tester", "age": 25, "title": "QA Engineer"}, id=1 + ) + # Test -@pytest.mark.skipif(len(ES_MULTIPLE_SETTINGS) < 2, - reason='Test environment not configured with multiple databases.') + +@pytest.mark.skipif(len(ES_MULTIPLE_SETTINGS) < 2, reason="Test environment not configured with multiple databases.") @override_application_settings(_enable_instance_settings) @validate_transaction_metrics( - 'test_multiple_dbs:test_multiple_dbs_enabled', - scoped_metrics=_enable_scoped_metrics, - rollup_metrics=_enable_rollup_metrics, - background_task=True) + "test_multiple_dbs:test_multiple_dbs_enabled", + scoped_metrics=_enable_scoped_metrics, + rollup_metrics=_enable_rollup_metrics, + background_task=True, +) @background_task() def test_multiple_dbs_enabled(): for db in ES_MULTIPLE_SETTINGS: - es_url = 'http://%s:%s' % (db['host'], db['port']) + es_url = "http://%s:%s" % (db["host"], db["port"]) client = Elasticsearch(es_url) _exercise_es(client) -@pytest.mark.skipif(len(ES_MULTIPLE_SETTINGS) < 2, - reason='Test environment not configured with multiple databases.') + +@pytest.mark.skipif(len(ES_MULTIPLE_SETTINGS) < 2, reason="Test environment not configured with multiple databases.") @override_application_settings(_disable_instance_settings) @validate_transaction_metrics( - 'test_multiple_dbs:test_multiple_dbs_disabled', - scoped_metrics=_disable_scoped_metrics, - rollup_metrics=_disable_rollup_metrics, - background_task=True) + "test_multiple_dbs:test_multiple_dbs_disabled", + scoped_metrics=_disable_scoped_metrics, + rollup_metrics=_disable_rollup_metrics, + background_task=True, +) @background_task() def test_multiple_dbs_disabled(): for db in ES_MULTIPLE_SETTINGS: - es_url = 'http://%s:%s' % (db['host'], db['port']) + es_url = "http://%s:%s" % (db["host"], db["port"]) client = Elasticsearch(es_url) _exercise_es(client) diff --git a/tests/datastore_elasticsearch/test_trace_node.py b/tests/datastore_elasticsearch/test_trace_node.py index 445b4a4eb..af96b80b4 100644 --- a/tests/datastore_elasticsearch/test_trace_node.py +++ b/tests/datastore_elasticsearch/test_trace_node.py @@ -12,8 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -from elasticsearch import Elasticsearch -from testing_support.db_settings import elasticsearch_settings from testing_support.fixtures import ( override_application_settings, validate_tt_parenting, @@ -25,8 +23,7 @@ from newrelic.api.background_task import background_task -ES_SETTINGS = elasticsearch_settings()[0] -ES_URL = "http://%s:%s" % (ES_SETTINGS["host"], ES_SETTINGS["port"]) +from conftest import ES_SETTINGS, ES_VERSION # Settings @@ -79,10 +76,16 @@ # Query -def _exercise_es(es): +def _exercise_es_v7(es): es.index(index="contacts", doc_type="person", body={"name": "Joe Tester", "age": 25, "title": "QA Master"}, id=1) +def _exercise_es_v8(es): + es.index(index="contacts", body={"name": "Joe Tester", "age": 25, "title": "QA Master"}, id=1) + + +_exercise_es = _exercise_es_v7 if ES_VERSION < (8, 0, 0) else _exercise_es_v8 + # Tests @@ -90,8 +93,7 @@ def _exercise_es(es): @validate_tt_collector_json(datastore_params=_enabled_required, datastore_forgone_params=_enabled_forgone) @validate_tt_parenting(_tt_parenting) @background_task() -def test_trace_node_datastore_params_enable_instance(): - client = Elasticsearch(ES_URL) +def test_trace_node_datastore_params_enable_instance(client): _exercise_es(client) @@ -99,8 +101,7 @@ def test_trace_node_datastore_params_enable_instance(): @validate_tt_collector_json(datastore_params=_disabled_required, datastore_forgone_params=_disabled_forgone) @validate_tt_parenting(_tt_parenting) @background_task() -def test_trace_node_datastore_params_disable_instance(): - client = Elasticsearch(ES_URL) +def test_trace_node_datastore_params_disable_instance(client): _exercise_es(client) @@ -108,6 +109,5 @@ def test_trace_node_datastore_params_disable_instance(): @validate_tt_collector_json(datastore_params=_instance_only_required, datastore_forgone_params=_instance_only_forgone) @validate_tt_parenting(_tt_parenting) @background_task() -def test_trace_node_datastore_params_instance_only(): - client = Elasticsearch(ES_URL) +def test_trace_node_datastore_params_instance_only(client): _exercise_es(client) diff --git a/tests/datastore_elasticsearch/test_transport.py b/tests/datastore_elasticsearch/test_transport.py index 49896ba07..a091a9a92 100644 --- a/tests/datastore_elasticsearch/test_transport.py +++ b/tests/datastore_elasticsearch/test_transport.py @@ -1,6 +1,6 @@ # Copyright 2010 New Relic, Inc. # -# Licensed under the Apache License, Version 2.0 (the "License"); +# Licensed under the Apache License, ES_VERSION 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # @@ -12,63 +12,99 @@ # See the License for the specific language governing permissions and # limitations under the License. -from elasticsearch import VERSION -from elasticsearch.client.utils import _make_path -from elasticsearch.transport import Transport -from elasticsearch.connection.http_requests import RequestsHttpConnection -from elasticsearch.connection.http_urllib3 import Urllib3HttpConnection +import pytest +from conftest import ES_SETTINGS, ES_VERSION from elasticsearch.serializer import JSONSerializer -from newrelic.api.application import application_instance as application -from newrelic.api.background_task import BackgroundTask +from newrelic.api.background_task import background_task +from newrelic.api.transaction import current_transaction -from testing_support.db_settings import elasticsearch_settings +try: + from elasticsearch.connection.http_requests import RequestsHttpConnection + from elasticsearch.connection.http_urllib3 import Urllib3HttpConnection + from elasticsearch.transport import Transport -ES_SETTINGS = elasticsearch_settings()[0] -HOST = { - 'host':ES_SETTINGS['host'], - 'port': int(ES_SETTINGS['port']) -} -INDEX = 'contacts' -DOC_TYPE = 'person' -ID = 1 -METHOD = _make_path(INDEX, DOC_TYPE, ID) -PARAMS = {} -HEADERS = {"Content-Type": "application/json"} -DATA = {"name": "Joe Tester"} -BODY = JSONSerializer().dumps(DATA).encode('utf-8') + NodeConfig = dict +except ImportError: + from elastic_transport._models import NodeConfig + from elastic_transport._node._http_requests import ( + RequestsHttpNode as RequestsHttpConnection, + ) + from elastic_transport._node._http_urllib3 import ( + Urllib3HttpNode as Urllib3HttpConnection, + ) + from elastic_transport._transport import Transport -def test_transport_get_connection(): - app = application() - with BackgroundTask(app, 'transport_perform_request') as transaction: - transport = Transport([HOST]) - transport.get_connection() +IS_V8 = ES_VERSION >= (8,) +IS_V7 = ES_VERSION >= (7,) and ES_VERSION < (8, 0) +IS_BELOW_V7 = ES_VERSION < (7,) - expected = (ES_SETTINGS['host'], ES_SETTINGS['port'], None) - assert transaction._nr_datastore_instance_info == expected +RUN_IF_V8 = pytest.mark.skipif(IS_V7 or IS_BELOW_V7, reason="Only run for v8+") +RUN_IF_V7 = pytest.mark.skipif(IS_V8 or IS_BELOW_V7, reason="Only run for v7") +RUN_IF_BELOW_V7 = pytest.mark.skipif(not IS_BELOW_V7, reason="Only run for versions below v7") -def test_transport_perform_request_urllib3(): - app = application() - with BackgroundTask(app, 'perform_request_urllib3') as transaction: - transport = Transport([HOST], connection_class=Urllib3HttpConnection) - if VERSION >= (7, 16, 0): - transport.perform_request('POST', METHOD, headers=HEADERS, params=PARAMS, body=DATA) - else: - transport.perform_request('POST', METHOD, params=PARAMS, body=DATA) - expected = (ES_SETTINGS['host'], ES_SETTINGS['port'], None) - assert transaction._nr_datastore_instance_info == expected +HOST = NodeConfig(scheme="http", host=ES_SETTINGS["host"], port=int(ES_SETTINGS["port"])) + +METHOD = "/contacts/person/1" +HEADERS = {"Content-Type": "application/json"} +DATA = {"name": "Joe Tester"} + +BODY = JSONSerializer().dumps(DATA) +if hasattr(BODY, "encode"): + BODY = BODY.encode("utf-8") + +@pytest.mark.parametrize( + "transport_kwargs, perform_request_kwargs", + [ + pytest.param({}, {"body": DATA}, id="DefaultTransport_below_v7", marks=RUN_IF_BELOW_V7), + pytest.param({}, {"headers": HEADERS, "body": DATA}, id="DefaultTransport_v7+", marks=RUN_IF_V7 or RUN_IF_V8), + pytest.param( + {"connection_class": Urllib3HttpConnection}, + {"body": DATA}, + id="Urllib3HttpConnectionv7", + marks=RUN_IF_BELOW_V7, + ), + pytest.param( + {"connection_class": RequestsHttpConnection}, + {"body": DATA}, + id="RequestsHttpConnectionv7", + marks=RUN_IF_BELOW_V7, + ), + pytest.param( + {"connection_class": Urllib3HttpConnection}, + {"headers": HEADERS, "body": DATA}, + id="Urllib3HttpConnectionv7", + marks=RUN_IF_V7, + ), + pytest.param( + {"connection_class": RequestsHttpConnection}, + {"headers": HEADERS, "body": DATA}, + id="RequestsHttpConnectionv7", + marks=RUN_IF_V7, + ), + pytest.param( + {"node_class": Urllib3HttpConnection}, + {"headers": HEADERS, "body": DATA}, + id="Urllib3HttpNodev8", + marks=RUN_IF_V8, + ), + pytest.param( + {"node_class": RequestsHttpConnection}, + {"headers": HEADERS, "body": DATA}, + id="RequestsHttpNodev8", + marks=RUN_IF_V8, + ), + ], +) +@background_task() +def test_transport_connection_classes(transport_kwargs, perform_request_kwargs): + transaction = current_transaction() -def test_transport_perform_request_requests(): - app = application() - with BackgroundTask(app, 'perform_request_requests') as transaction: - transport = Transport([HOST], connection_class=RequestsHttpConnection) - if VERSION >= (7, 16, 0): - transport.perform_request('POST', METHOD, headers=HEADERS, params=PARAMS, body=DATA) - else: - transport.perform_request('POST', METHOD, params=PARAMS, body=DATA) + transport = Transport([HOST], **transport_kwargs) + transport.perform_request("POST", METHOD, **perform_request_kwargs) - expected = (ES_SETTINGS['host'], ES_SETTINGS['port'], None) + expected = (ES_SETTINGS["host"], ES_SETTINGS["port"], None) assert transaction._nr_datastore_instance_info == expected diff --git a/tests/testing_support/validators/validate_datastore_trace_inputs.py b/tests/testing_support/validators/validate_datastore_trace_inputs.py new file mode 100644 index 000000000..ade4ebea6 --- /dev/null +++ b/tests/testing_support/validators/validate_datastore_trace_inputs.py @@ -0,0 +1,50 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from testing_support.fixtures import catch_background_exceptions + +from newrelic.common.object_wrapper import transient_function_wrapper + +""" +operation: method name +target: search argument +""" + + +def validate_datastore_trace_inputs(operation=None, target=None): + @transient_function_wrapper("newrelic.api.datastore_trace", "DatastoreTrace.__init__") + @catch_background_exceptions + def _validate_datastore_trace_inputs(wrapped, instance, args, kwargs): + def _bind_params(product, target, operation, host=None, port_path_or_id=None, database_name=None, **kwargs): + return (product, target, operation, host, port_path_or_id, database_name, kwargs) + + ( + captured_product, + captured_target, + captured_operation, + captured_host, + captured_port_path_or_id, + captured_database_name, + captured_kwargs, + ) = _bind_params(*args, **kwargs) + + if target is not None: + assert captured_target == target, "%s didn't match expected %s" % (captured_target, target) + if operation is not None: + assert captured_operation == operation, "%s didn't match expected %s" % (captured_operation, operation) + + return wrapped(*args, **kwargs) + + return _validate_datastore_trace_inputs diff --git a/tox.ini b/tox.ini index 3e5a58cd8..07c040d29 100644 --- a/tox.ini +++ b/tox.ini @@ -72,9 +72,8 @@ envlist = python-cross_agent-pypy-without_extensions, postgres-datastore_asyncpg-{py37,py38,py39,py310,py311}, memcached-datastore_bmemcached-{pypy,py27,py37,py38,py39,py310,py311}-memcached030, - elasticsearchserver01-datastore_pyelasticsearch-{py27,pypy}, - elasticsearchserver01-datastore_elasticsearch-py27-elasticsearch{00,01,02,05}, - elasticsearchserver07-datastore_elasticsearch-{py27,py37,py38,py39,py310,py311,pypy,pypy37}-elasticsearch{07}, + elasticsearchserver07-datastore_elasticsearch-{py27,py37,py38,py39,py310,py311,pypy,pypy37}-elasticsearch07, + elasticsearchserver08-datastore_elasticsearch-{py37,py38,py39,py310,py311,pypy37}-elasticsearch08, memcached-datastore_memcache-{py27,py37,py38,py39,py310,py311,pypy,pypy37}-memcached01, mysql-datastore_mysql-mysql080023-py27, mysql-datastore_mysql-mysqllatest-{py37,py38,py39,py310,py311}, @@ -236,11 +235,8 @@ deps = datastore_bmemcached-memcached030: python-binary-memcached<0.31 datastore_bmemcached-memcached030: uhashring<2.0 datastore_elasticsearch: requests - datastore_elasticsearch-elasticsearch00: elasticsearch<1.0 - datastore_elasticsearch-elasticsearch01: elasticsearch<2.0 - datastore_elasticsearch-elasticsearch02: elasticsearch<3.0 - datastore_elasticsearch-elasticsearch05: elasticsearch<6.0 datastore_elasticsearch-elasticsearch07: elasticsearch<8.0 + datastore_elasticsearch-elasticsearch08: elasticsearch<9.0 datastore_memcache-memcached01: python-memcached<2 datastore_mysql-mysqllatest: mysql-connector-python datastore_mysql-mysql080023: mysql-connector-python<8.0.24 From 061ad59fd89d647e0cea06bb56269a5d40f4a7a1 Mon Sep 17 00:00:00 2001 From: Uma Annamalai Date: Thu, 9 Feb 2023 13:13:29 -0800 Subject: [PATCH 048/108] Update contributors workspace link in CONTRIBUTING.rst. (#760) * Update link in CONTRIBUTING.rst. * Update to RST syntax. * [Mega-Linter] Apply linters fixes --------- Co-authored-by: umaannamalai Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> --- CONTRIBUTING.rst | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 5e95d3806..12081d1ee 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -237,4 +237,5 @@ We host a public Slack with a dedicated channel for contributors and maintainers of open source projects hosted by New Relic. If you are contributing to this project, you're welcome to request access to the #oss-contributors channel in the newrelicusers.slack.com workspace. To -request access, see https://newrelicusers-signup.herokuapp.com/. +request access, please use this `link +`__. From 300de2a86b2975c12eae7ae3eed75d1ffd8712ec Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Thu, 9 Feb 2023 14:32:08 -0800 Subject: [PATCH 049/108] Add Retry to Pip Install (#763) * Add retry to pip install * Fix retry backoff constant * Fix script failures --------- Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> --- .github/scripts/retry.sh | 28 ++++++++++++++++++++++++++++ tox.ini | 8 ++++---- 2 files changed, 32 insertions(+), 4 deletions(-) create mode 100755 .github/scripts/retry.sh diff --git a/.github/scripts/retry.sh b/.github/scripts/retry.sh new file mode 100755 index 000000000..1cb17836e --- /dev/null +++ b/.github/scripts/retry.sh @@ -0,0 +1,28 @@ +#!/bin/bash + +# Time in seconds to backoff after the initial attempt. +INITIAL_BACKOFF=10 + +# Grab first arg as number of retries +retries=$1 +shift + +# Use for loop to repeatedly try the wrapped command, breaking on success +for i in $(seq 1 $retries); do + echo "Running: $@" + + # Exponential backoff + if [[ i -gt 1 ]]; then + # Starts with the initial backoff then doubles every retry. + backoff=$(($INITIAL_BACKOFF * (2 ** (i - 2)))) + echo "Command failed, retrying in $backoff seconds..." + sleep $backoff + fi + + # Run wrapped command, and exit on success + $@ && break + result=$? +done + +# Exit with status code of wrapped command +exit $? diff --git a/tox.ini b/tox.ini index 07c040d29..83f70c30b 100644 --- a/tox.ini +++ b/tox.ini @@ -208,8 +208,8 @@ deps = application_celery: celery<6.0 application_celery-py{py37,37}: importlib-metadata<5.0 application_gearman: gearman<3.0.0 - component_djangorestframework-djangorestframework0300: Django < 1.9 - component_djangorestframework-djangorestframework0300: djangorestframework < 3.1 + component_djangorestframework-djangorestframework0300: Django<1.9 + component_djangorestframework-djangorestframework0300: djangorestframework<3.1 component_djangorestframework-djangorestframeworklatest: Django component_djangorestframework-djangorestframeworklatest: djangorestframework component_flask_rest: flask @@ -416,8 +416,8 @@ commands = install_command= # Older pip versions that support python 2 have issues with using the cache directory and cause crashes on GitHub Actions - {py27,pypy}: pip install --no-cache-dir {opts} {packages} - !{py27,pypy}: pip install {opts} {packages} + {py27,pypy}: {toxinidir}/.github/scripts/retry.sh 3 pip install --no-cache-dir {opts} {packages} + !{py27,pypy}: {toxinidir}/.github/scripts/retry.sh 3 pip install {opts} {packages} extras = agent_streaming: infinite-tracing From 34102707a7569144d1fee573e151b03893b89f8f Mon Sep 17 00:00:00 2001 From: Justin Richert Date: Thu, 9 Feb 2023 18:43:09 -0600 Subject: [PATCH 050/108] Add aiohttp support for expected status codes (#735) * Add aiohttp support for expected status codes * Adjust naming convention * Fix expected tests for new validator behavior --------- Co-authored-by: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Co-authored-by: Tim Pansino --- newrelic/api/time_trace.py | 6 ++++- newrelic/hooks/framework_aiohttp.py | 18 ++++++++++++- .../test_ignore_expected_errors.py | 12 ++++++--- .../framework_aiohttp/_target_application.py | 5 ++++ tests/framework_aiohttp/test_server.py | 7 +++++- tests/testing_support/fixtures.py | 25 +++++++++++++++++++ .../validators/validate_transaction_errors.py | 13 +++++++--- 7 files changed, 76 insertions(+), 10 deletions(-) diff --git a/newrelic/api/time_trace.py b/newrelic/api/time_trace.py index dc010674c..31de73536 100644 --- a/newrelic/api/time_trace.py +++ b/newrelic/api/time_trace.py @@ -327,6 +327,10 @@ def _observe_exception(self, exc_info=None, ignore=None, expected=None, status_c if is_expected is None and callable(expected): is_expected = expected(exc, value, tb) + # Callable on transaction + if is_expected is None and hasattr(transaction, "_expect_errors"): + is_expected = transaction._expect_errors(exc, value, tb) + # List of class names if is_expected is None and expected is not None and not callable(expected): # Do not set is_expected to False @@ -631,7 +635,7 @@ def get_service_linking_metadata(application=None, settings=None): if application is None: from newrelic.api.application import application_instance application = application_instance(activate=False) - + if application is not None: settings = application.settings diff --git a/newrelic/hooks/framework_aiohttp.py b/newrelic/hooks/framework_aiohttp.py index de72ae0c5..68f4e70f1 100644 --- a/newrelic/hooks/framework_aiohttp.py +++ b/newrelic/hooks/framework_aiohttp.py @@ -26,7 +26,7 @@ function_wrapper, wrap_function_wrapper, ) -from newrelic.core.config import should_ignore_error +from newrelic.core.config import is_expected_error, should_ignore_error SUPPORTED_METHODS = ("connect", "head", "get", "delete", "options", "patch", "post", "put", "trace") @@ -61,6 +61,19 @@ def _should_ignore(exc, value, tb): return _should_ignore +def is_expected(transaction): + settings = transaction.settings + + def _is_expected(exc, value, tb): + from aiohttp import web + + if isinstance(value, web.HTTPException): + status_code = value.status_code + return is_expected_error((exc, value, tb), status_code, settings=settings) + + return _is_expected + + def _nr_process_response_proxy(response, transaction): nr_headers = transaction.process_response(response.status, response.headers) @@ -338,6 +351,9 @@ async def _coro(*_args, **_kwargs): # Patch in should_ignore to all notice_error calls transaction._ignore_errors = should_ignore(transaction) + # Patch in is_expected to all notice_error calls + transaction._expect_errors = is_expected(transaction) + import aiohttp transaction.add_framework_info(name="aiohttp", version=aiohttp.__version__) diff --git a/tests/agent_features/test_ignore_expected_errors.py b/tests/agent_features/test_ignore_expected_errors.py index 5cf61eced..93595aa35 100644 --- a/tests/agent_features/test_ignore_expected_errors.py +++ b/tests/agent_features/test_ignore_expected_errors.py @@ -94,8 +94,9 @@ def test_classes_error_event_inside_transaction(settings, expected, ignore): error_count = 1 if not ignore else 0 errors = _test_runtime_error if not ignore else [] + expected_errors = _runtime_error_name if expected and not ignore else None - @validate_transaction_errors(errors=errors) + @validate_transaction_errors(errors=errors, expected_errors=expected_errors) @validate_error_event_sample_data( required_attrs=attributes, required_user_attrs=False, @@ -268,8 +269,9 @@ def test_status_codes_inside_transaction(settings, expected, ignore, status_code error_count = 1 if not ignore else 0 errors = _test_teapot_error if not ignore else [] + expected_errors = _teapot_error_name if expected and not ignore else None - @validate_transaction_errors(errors=errors) + @validate_transaction_errors(errors=errors, expected_errors=expected_errors) @validate_error_event_sample_data( required_attrs=attributes, required_user_attrs=False, @@ -359,8 +361,9 @@ def test_mixed_ignore_expected_settings_inside_transaction( error_count = 1 if not ignore else 0 errors = _test_runtime_error if not ignore else [] + expected_errors = _runtime_error_name if expected and not ignore else None - @validate_transaction_errors(errors=errors) + @validate_transaction_errors(errors=errors, expected_errors=expected_errors) @validate_error_event_sample_data( required_attrs=attributes, required_user_attrs=False, @@ -428,8 +431,9 @@ def test_overrides_inside_transaction(override, result, parameter): error_count = 1 if not ignore else 0 errors = _test_runtime_error if not ignore else [] + expected_errors = _runtime_error_name if expected and not ignore else None - @validate_transaction_errors(errors=errors) + @validate_transaction_errors(errors=errors, expected_errors=expected_errors) @validate_error_event_sample_data( required_attrs=attributes, required_user_attrs=False, diff --git a/tests/framework_aiohttp/_target_application.py b/tests/framework_aiohttp/_target_application.py index 77d6fef6c..f15e7fd65 100644 --- a/tests/framework_aiohttp/_target_application.py +++ b/tests/framework_aiohttp/_target_application.py @@ -40,6 +40,10 @@ async def non_500_error(request): raise web.HTTPGone() +async def raise_403(request): + raise web.HTTPForbidden() + + async def raise_404(request): raise web.HTTPNotFound() @@ -167,6 +171,7 @@ def make_app(middlewares=None): app.router.add_route("*", "/error", error) app.router.add_route("*", "/known_error", KnownErrorView) app.router.add_route("*", "/non_500_error", non_500_error) + app.router.add_route("*", "/raise_403", raise_403) app.router.add_route("*", "/raise_404", raise_404) app.router.add_route("*", "/hang", hang) app.router.add_route("*", "/background", background) diff --git a/tests/framework_aiohttp/test_server.py b/tests/framework_aiohttp/test_server.py index aa6218c28..6a5ef0d10 100644 --- a/tests/framework_aiohttp/test_server.py +++ b/tests/framework_aiohttp/test_server.py @@ -19,6 +19,7 @@ from testing_support.fixtures import ( count_transactions, override_application_settings, + override_expected_status_codes, override_generic_settings, override_ignore_status_codes, ) @@ -64,6 +65,7 @@ ("/error?hello=world", "_target_application:error", "builtins:ValueError", 500), ("/non_500_error?hello=world", "_target_application:non_500_error", "aiohttp.web_exceptions:HTTPGone", 410), ("/raise_404?hello=world", "_target_application:raise_404", None, 404), + ("/raise_403?hello=world", "_target_application:raise_403", "aiohttp.web_exceptions:HTTPForbidden", 403), ], ) def test_error_exception(method, uri, metric_name, error, status, nr_enabled, aiohttp_app): @@ -79,7 +81,9 @@ async def fetch(): if error: errors.append(error) - @validate_transaction_errors(errors=errors) + @validate_transaction_errors( + errors=errors, expected_errors=["aiohttp.web_exceptions:HTTPForbidden"] + ) @validate_transaction_metrics( metric_name, scoped_metrics=[ @@ -111,6 +115,7 @@ async def fetch(): ) @validate_code_level_metrics(*metric_name.split(":")) @override_ignore_status_codes([404]) + @override_expected_status_codes([403]) def _test(): aiohttp_app.loop.run_until_complete(fetch()) diff --git a/tests/testing_support/fixtures.py b/tests/testing_support/fixtures.py index ebb0da830..ccbca9885 100644 --- a/tests/testing_support/fixtures.py +++ b/tests/testing_support/fixtures.py @@ -1468,6 +1468,31 @@ def _override_ignore_status_codes(wrapped, instance, args, kwargs): return _override_ignore_status_codes +def override_expected_status_codes(status_codes): + @function_wrapper + def _override_expected_status_codes(wrapped, instance, args, kwargs): + # Updates can be made to expected status codes in server + # side configs. Changes will be applied to application + # settings so we first check there and if they don't + # exist, we default to global settings + + application = application_instance() + settings = application and application.settings + + if not settings: + settings = global_settings() + + original = settings.error_collector.expected_status_codes + + try: + settings.error_collector.expected_status_codes = status_codes + return wrapped(*args, **kwargs) + finally: + settings.error_collector.expected_status_codes = original + + return _override_expected_status_codes + + def code_coverage_fixture(source=None): if source is None: source = ["newrelic"] diff --git a/tests/testing_support/validators/validate_transaction_errors.py b/tests/testing_support/validators/validate_transaction_errors.py index b00b7facd..04a34d262 100644 --- a/tests/testing_support/validators/validate_transaction_errors.py +++ b/tests/testing_support/validators/validate_transaction_errors.py @@ -12,18 +12,20 @@ # See the License for the specific language governing permissions and # limitations under the License. -import copy - from newrelic.common.object_wrapper import ( function_wrapper, transient_function_wrapper, ) from testing_support.fixtures import catch_background_exceptions -def validate_transaction_errors(errors=None, required_params=None, forgone_params=None): + +def validate_transaction_errors( + errors=None, required_params=None, forgone_params=None, expected_errors=None +): errors = errors or [] required_params = required_params or [] forgone_params = forgone_params or [] + expected_errors = expected_errors or [] captured_errors = [] @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.record_transaction") @@ -71,6 +73,11 @@ def _validate_transaction_errors(wrapped, instance, args, kwargs): for name, value in forgone_params: assert name not in e.custom_params, "name=%r, params=%r" % (name, e.custom_params) + if e.type in expected_errors: + assert e.expected is True + else: + assert e.expected is False + return output return _validate_transaction_errors From 6c06f9b75f5f6414422f0a91d560405d57481432 Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Fri, 10 Feb 2023 12:34:48 -0800 Subject: [PATCH 051/108] Fix PyPy Priority Sampling Test (#766) * Fix pypy priority sampling * [Mega-Linter] Apply linters fixes * Bump tests --------- Co-authored-by: TimPansino --- .../agent_features/test_priority_sampling.py | 59 ++++++++++--------- 1 file changed, 30 insertions(+), 29 deletions(-) diff --git a/tests/agent_features/test_priority_sampling.py b/tests/agent_features/test_priority_sampling.py index 41f7fc1f3..f73824f71 100644 --- a/tests/agent_features/test_priority_sampling.py +++ b/tests/agent_features/test_priority_sampling.py @@ -13,16 +13,18 @@ # limitations under the License. import pytest +from testing_support.fixtures import ( + core_application_stats_engine, + override_application_settings, + reset_core_stats_engine, +) -from testing_support.fixtures import (reset_core_stats_engine, - core_application_stats_engine, override_application_settings) from newrelic.api.application import application_instance as application from newrelic.api.background_task import BackgroundTask -@override_application_settings( - {'event_harvest_config.harvest_limits.analytic_event_data': 1}) -@pytest.mark.parametrize('first_transaction_saved', [True, False]) +@override_application_settings({"event_harvest_config.harvest_limits.analytic_event_data": 1}) +@pytest.mark.parametrize("first_transaction_saved", [True, False]) def test_priority_used_in_transaction_events(first_transaction_saved): first_priority = 1 if first_transaction_saved else 0 second_priority = 0 if first_transaction_saved else 1 @@ -32,46 +34,46 @@ def _test(): # Stats engine stats_engine = core_application_stats_engine() - with BackgroundTask(application(), name='T1') as txn: + with BackgroundTask(application(), name="T1") as txn: txn._priority = first_priority - with BackgroundTask(application(), name='T2') as txn: + with BackgroundTask(application(), name="T2") as txn: txn._priority = second_priority transaction_events = list(stats_engine.transaction_events) assert len(transaction_events) == 1 - # highest priority should win - assert stats_engine.transaction_events.pq[0][0] == 1 + # Highest priority should win. + # Priority can be 1 or 2 depending on randomness in sampling computation. + assert stats_engine.transaction_events.pq[0][0] >= 1 if first_transaction_saved: - assert transaction_events[0][0]['name'].endswith('/T1') + assert transaction_events[0][0]["name"].endswith("/T1") else: - assert transaction_events[0][0]['name'].endswith('/T2') + assert transaction_events[0][0]["name"].endswith("/T2") _test() -@override_application_settings({ - 'event_harvest_config.harvest_limits.error_event_data': 1}) -@pytest.mark.parametrize('first_transaction_saved', [True, False]) +@override_application_settings({"event_harvest_config.harvest_limits.error_event_data": 1}) +@pytest.mark.parametrize("first_transaction_saved", [True, False]) def test_priority_used_in_transaction_error_events(first_transaction_saved): first_priority = 1 if first_transaction_saved else 0 second_priority = 0 if first_transaction_saved else 1 @reset_core_stats_engine() def _test(): - with BackgroundTask(application(), name='T1') as txn: + with BackgroundTask(application(), name="T1") as txn: txn._priority = first_priority try: - raise ValueError('OOPS') + raise ValueError("OOPS") except ValueError: txn.notice_error() - with BackgroundTask(application(), name='T2') as txn: + with BackgroundTask(application(), name="T2") as txn: txn._priority = second_priority try: - raise ValueError('OOPS') + raise ValueError("OOPS") except ValueError: txn.notice_error() @@ -85,29 +87,28 @@ def _test(): assert stats_engine.error_events.pq[0][0] == 1 if first_transaction_saved: - assert error_events[0][0]['transactionName'].endswith('/T1') + assert error_events[0][0]["transactionName"].endswith("/T1") else: - assert error_events[0][0]['transactionName'].endswith('/T2') + assert error_events[0][0]["transactionName"].endswith("/T2") _test() -@override_application_settings({ - 'event_harvest_config.harvest_limits.custom_event_data': 1}) -@pytest.mark.parametrize('first_transaction_saved', [True, False]) +@override_application_settings({"event_harvest_config.harvest_limits.custom_event_data": 1}) +@pytest.mark.parametrize("first_transaction_saved", [True, False]) def test_priority_used_in_transaction_custom_events(first_transaction_saved): first_priority = 1 if first_transaction_saved else 0 second_priority = 0 if first_transaction_saved else 1 @reset_core_stats_engine() def _test(): - with BackgroundTask(application(), name='T1') as txn: + with BackgroundTask(application(), name="T1") as txn: txn._priority = first_priority - txn.record_custom_event('foobar', {'foo': 'bar'}) + txn.record_custom_event("foobar", {"foo": "bar"}) - with BackgroundTask(application(), name='T2') as txn: + with BackgroundTask(application(), name="T2") as txn: txn._priority = second_priority - txn.record_custom_event('barbaz', {'foo': 'bar'}) + txn.record_custom_event("barbaz", {"foo": "bar"}) # Stats engine stats_engine = core_application_stats_engine() @@ -119,8 +120,8 @@ def _test(): assert stats_engine.custom_events.pq[0][0] == 1 if first_transaction_saved: - assert custom_events[0][0]['type'] == 'foobar' + assert custom_events[0][0]["type"] == "foobar" else: - assert custom_events[0][0]['type'] == 'barbaz' + assert custom_events[0][0]["type"] == "barbaz" _test() From aae9563ff73d39f2ad341e4a6769beb288cdf783 Mon Sep 17 00:00:00 2001 From: Lalleh Rafeei <84813886+lrafeei@users.noreply.github.com> Date: Wed, 15 Feb 2023 09:36:10 -0800 Subject: [PATCH 052/108] Config linter fixes (#768) * Fix default value and lazy logging pylint * Fix default value and lazy logging pylint * Fix unnecessary 'else' in pylint * Fix logging-not-lazy in pylint * Fix redefined built-in error in Pylint * Fix implicit string concatenation in Pylint * Fix dict() to {} in Pylint * Make sure eval is OK to use for Pylint * Fix logging format string for Pylint * Change list comprehension to generator expression * [Mega-Linter] Apply linters fixes * Rerun tests --------- Co-authored-by: lrafeei --- newrelic/config.py | 140 ++++++++++++++++++++++----------------------- 1 file changed, 67 insertions(+), 73 deletions(-) diff --git a/newrelic/config.py b/newrelic/config.py index 203318287..7176c3ac8 100644 --- a/newrelic/config.py +++ b/newrelic/config.py @@ -42,9 +42,9 @@ import newrelic.console import newrelic.core.agent import newrelic.core.config -import newrelic.core.trace_cache as trace_cache from newrelic.common.log_file import initialize_logging from newrelic.common.object_names import expand_builtin_exception_name +from newrelic.core import trace_cache from newrelic.core.config import ( Settings, apply_config_setting, @@ -106,7 +106,11 @@ # instrumentation modules and extensions. -def extra_settings(section, types={}, defaults={}): +def extra_settings(section, types=None, defaults=None): + if types is None: + types = {} + if defaults is None: + defaults = {} settings = {} if _config_object.has_section(section): @@ -219,12 +223,12 @@ def _map_default_host_value(license_key): def _raise_configuration_error(section, option=None): _logger.error("CONFIGURATION ERROR") if section: - _logger.error("Section = %s" % section) + _logger.error("Section = %s", section) if option is None: options = _config_object.options(section) - _logger.error("Options = %s" % options) + _logger.error("Options = %s", options) _logger.exception("Exception Details") if not _ignore_errors: @@ -234,13 +238,12 @@ def _raise_configuration_error(section, option=None): "Check New Relic agent log file for further " "details." % section ) - else: - raise newrelic.api.exceptions.ConfigurationError( - "Invalid configuration. Check New Relic agent log file for further details." - ) + raise newrelic.api.exceptions.ConfigurationError( + "Invalid configuration. Check New Relic agent log file for further details." + ) else: - _logger.error("Option = %s" % option) + _logger.error("Option = %s", option) _logger.exception("Exception Details") if not _ignore_errors: @@ -250,12 +253,11 @@ def _raise_configuration_error(section, option=None): 'section "%s". Check New Relic agent log ' "file for further details." % (option, section) ) - else: - raise newrelic.api.exceptions.ConfigurationError( - 'Invalid configuration for option "%s". ' - "Check New Relic agent log file for further " - "details." % option - ) + raise newrelic.api.exceptions.ConfigurationError( + 'Invalid configuration for option "%s". ' + "Check New Relic agent log file for further " + "details." % option + ) def _process_setting(section, option, getter, mapper): @@ -285,9 +287,8 @@ def _process_setting(section, option, getter, mapper): if len(fields) == 1: setattr(target, fields[0], value) break - else: - target = getattr(target, fields[0]) - fields = fields[1].split(".", 1) + target = getattr(target, fields[0]) + fields = fields[1].split(".", 1) # Cache the configuration so can be dumped out to # log file when whole main configuration has been @@ -568,7 +569,7 @@ def _process_app_name_setting(): def _link_applications(application): for altname in linked: - _logger.debug("link to %s" % ((name, altname),)) + _logger.debug("link to %s", ((name, altname),)) application.link_to_application(altname) if linked: @@ -594,21 +595,21 @@ def _process_labels_setting(labels=None): deduped = {} for key, value in labels: - if len(key) > length_limit: _logger.warning( - "Improper configuration. Label key %s is too long. Truncating key to: %s" % (key, key[:length_limit]) + "Improper configuration. Label key %s is too long. Truncating key to: %s", key, key[:length_limit] ) if len(value) > length_limit: _logger.warning( - "Improper configuration. Label value %s is too " - "long. Truncating value to: %s" % (value, value[:length_limit]) + "Improper configuration. Label value %s is too long. Truncating value to: %s", + value, + value[:length_limit], ) if len(deduped) >= count_limit: _logger.warning( - "Improper configuration. Maximum number of labels reached. Using first %d labels." % count_limit + "Improper configuration. Maximum number of labels reached. Using first %d labels.", count_limit ) break @@ -728,8 +729,7 @@ def translate_deprecated_settings(settings, cached_settings): ), ] - for (old_key, new_key) in deprecated_settings_map: - + for old_key, new_key in deprecated_settings_map: if old_key in cached: _logger.info( "Deprecated setting found: %r. Please use new setting: %r.", @@ -753,7 +753,6 @@ def translate_deprecated_settings(settings, cached_settings): # deprecated settings, so it gets handled separately. if "ignored_params" in cached: - _logger.info( "Deprecated setting found: ignored_params. Please use " "new setting: attributes.exclude. For the new setting, an " @@ -885,7 +884,6 @@ def _load_configuration( log_file=None, log_level=None, ): - global _configuration_done global _config_file @@ -908,8 +906,7 @@ def _load_configuration( 'Prior configuration file used was "%s" and ' 'environment "%s".' % (_config_file, _environment) ) - else: - return + return _configuration_done = True @@ -923,7 +920,6 @@ def _load_configuration( # If no configuration file then nothing more to be done. if not config_file: - _logger.debug("no agent configuration file") # Force initialisation of the logging system now in case @@ -963,7 +959,7 @@ def _load_configuration( return - _logger.debug("agent configuration file was %s" % config_file) + _logger.debug("agent configuration file was %s", config_file) # Now read in the configuration file. Cache the config file # name in internal settings object as indication of succeeding. @@ -1014,7 +1010,7 @@ def _load_configuration( # against the internal settings object. for option, value in _cache_object: - _logger.debug("agent config %s = %s" % (option, repr(value))) + _logger.debug("agent config %s = %s", option, repr(value)) # Validate provided feature flags and log a warning if get one # which isn't valid. @@ -1063,7 +1059,7 @@ def _load_configuration( terminal = False rollup = None - _logger.debug("register function-trace %s" % ((module, object_path, name, group),)) + _logger.debug("register function-trace %s", ((module, object_path, name, group),)) hook = _function_trace_import_hook(object_path, name, group, label, params, terminal, rollup) newrelic.api.import_hook.register_import_hook(module, hook) @@ -1080,7 +1076,7 @@ def _load_configuration( name = None group = "Function" - _logger.debug("register generator-trace %s" % ((module, object_path, name, group),)) + _logger.debug("register generator-trace %s", ((module, object_path, name, group),)) hook = _generator_trace_import_hook(object_path, name, group) newrelic.api.import_hook.register_import_hook(module, hook) @@ -1091,10 +1087,10 @@ def _load_configuration( # Generic error reporting functions. -def _raise_instrumentation_error(type, locals): +def _raise_instrumentation_error(instrumentation_type, locals_dict): _logger.error("INSTRUMENTATION ERROR") - _logger.error("Type = %s" % type) - _logger.error("Locals = %s" % locals) + _logger.error("Type = %s", instrumentation_type) + _logger.error("Locals = %s", locals_dict) _logger.exception("Exception Details") if not _ignore_errors: @@ -1115,7 +1111,7 @@ def module_import_hook_results(): def _module_import_hook(target, module, function): def _instrument(target): - _logger.debug("instrument module %s" % ((target, module, function),)) + _logger.debug("instrument module %s", ((target, module, function),)) try: instrumented = target._nr_instrumented @@ -1123,7 +1119,7 @@ def _instrument(target): instrumented = target._nr_instrumented = set() if (module, function) in instrumented: - _logger.debug("instrumentation already run %s" % ((target, module, function),)) + _logger.debug("instrumentation already run %s", ((target, module, function),)) return instrumented.add((module, function)) @@ -1173,7 +1169,7 @@ def _process_module_configuration(): if target not in _module_import_hook_registry: _module_import_hook_registry[target] = (module, function) - _logger.debug("register module %s" % ((target, module, function),)) + _logger.debug("register module %s", ((target, module, function),)) hook = _module_import_hook(target, module, function) newrelic.api.import_hook.register_import_hook(target, hook) @@ -1186,7 +1182,7 @@ def _process_module_configuration(): def _module_function_glob(module, object_path): """Match functions and class methods in a module to file globbing syntax.""" - if not any([c in object_path for c in {"*", "?", "["}]): # Identify globbing patterns + if not any((c in object_path for c in ("*", "?", "["))): # Identify globbing patterns return (object_path,) # Returned value must be iterable else: # Gather module functions @@ -1194,7 +1190,7 @@ def _module_function_glob(module, object_path): available_functions = {k: v for k, v in module.__dict__.items() if callable(v) and not isinstance(v, type)} except Exception: # Default to empty dict if no functions available - available_functions = dict() + available_functions = {} # Gather module classes and methods try: @@ -1262,7 +1258,7 @@ def _process_wsgi_application_configuration(): if _config_object.has_option(section, "application"): application = _config_object.get(section, "application") - _logger.debug("register wsgi-application %s" % ((module, object_path, application),)) + _logger.debug("register wsgi-application %s", ((module, object_path, application),)) hook = _wsgi_application_import_hook(object_path, application) newrelic.api.import_hook.register_import_hook(module, hook) @@ -1318,10 +1314,10 @@ def _process_background_task_configuration(): group = _config_object.get(section, "group") if name and name.startswith("lambda "): - vars = {"callable_name": newrelic.api.object_wrapper.callable_name} - name = eval(name, vars) # nosec + callable_vars = {"callable_name": newrelic.api.object_wrapper.callable_name} + name = eval(name, callable_vars) # nosec, pylint: disable=W0123 - _logger.debug("register background-task %s" % ((module, object_path, application, name, group),)) + _logger.debug("register background-task %s", ((module, object_path, application, name, group),)) hook = _background_task_import_hook(object_path, application, name, group) newrelic.api.import_hook.register_import_hook(module, hook) @@ -1368,10 +1364,10 @@ def _process_database_trace_configuration(): sql = _config_object.get(section, "sql") if sql.startswith("lambda "): - vars = {"callable_name": newrelic.api.object_wrapper.callable_name} - sql = eval(sql, vars) # nosec + callable_vars = {"callable_name": newrelic.api.object_wrapper.callable_name} + sql = eval(sql, callable_vars) # nosec, pylint: disable=W0123 - _logger.debug("register database-trace %s" % ((module, object_path, sql),)) + _logger.debug("register database-trace %s", ((module, object_path, sql),)) hook = _database_trace_import_hook(object_path, sql) newrelic.api.import_hook.register_import_hook(module, hook) @@ -1423,14 +1419,14 @@ def _process_external_trace_configuration(): method = _config_object.get(section, "method") if url.startswith("lambda "): - vars = {"callable_name": newrelic.api.object_wrapper.callable_name} - url = eval(url, vars) # nosec + callable_vars = {"callable_name": newrelic.api.object_wrapper.callable_name} + url = eval(url, callable_vars) # nosec, pylint: disable=W0123 if method and method.startswith("lambda "): - vars = {"callable_name": newrelic.api.object_wrapper.callable_name} - method = eval(method, vars) # nosec + callable_vars = {"callable_name": newrelic.api.object_wrapper.callable_name} + method = eval(method, callable_vars) # nosec, pylint: disable=W0123 - _logger.debug("register external-trace %s" % ((module, object_path, library, url, method),)) + _logger.debug("register external-trace %s", ((module, object_path, library, url, method),)) hook = _external_trace_import_hook(object_path, library, url, method) newrelic.api.import_hook.register_import_hook(module, hook) @@ -1495,11 +1491,11 @@ def _process_function_trace_configuration(): rollup = _config_object.get(section, "rollup") if name and name.startswith("lambda "): - vars = {"callable_name": newrelic.api.object_wrapper.callable_name} - name = eval(name, vars) # nosec + callable_vars = {"callable_name": newrelic.api.object_wrapper.callable_name} + name = eval(name, callable_vars) # nosec, pylint: disable=W0123 _logger.debug( - "register function-trace %s" % ((module, object_path, name, group, label, params, terminal, rollup),) + "register function-trace %s", ((module, object_path, name, group, label, params, terminal, rollup),) ) hook = _function_trace_import_hook(object_path, name, group, label, params, terminal, rollup) @@ -1553,10 +1549,10 @@ def _process_generator_trace_configuration(): group = _config_object.get(section, "group") if name and name.startswith("lambda "): - vars = {"callable_name": newrelic.api.object_wrapper.callable_name} - name = eval(name, vars) # nosec + callable_vars = {"callable_name": newrelic.api.object_wrapper.callable_name} + name = eval(name, callable_vars) # nosec, pylint: disable=W0123 - _logger.debug("register generator-trace %s" % ((module, object_path, name, group),)) + _logger.debug("register generator-trace %s", ((module, object_path, name, group),)) hook = _generator_trace_import_hook(object_path, name, group) newrelic.api.import_hook.register_import_hook(module, hook) @@ -1612,10 +1608,10 @@ def _process_profile_trace_configuration(): depth = _config_object.get(section, "depth") if name and name.startswith("lambda "): - vars = {"callable_name": newrelic.api.object_wrapper.callable_name} - name = eval(name, vars) # nosec + callable_vars = {"callable_name": newrelic.api.object_wrapper.callable_name} + name = eval(name, callable_vars) # nosec, pylint: disable=W0123 - _logger.debug("register profile-trace %s" % ((module, object_path, name, group, depth),)) + _logger.debug("register profile-trace %s", ((module, object_path, name, group, depth),)) hook = _profile_trace_import_hook(object_path, name, group, depth=depth) newrelic.api.import_hook.register_import_hook(module, hook) @@ -1662,8 +1658,8 @@ def _process_memcache_trace_configuration(): command = _config_object.get(section, "command") if command.startswith("lambda "): - vars = {"callable_name": newrelic.api.object_wrapper.callable_name} - command = eval(command, vars) # nosec + callable_vars = {"callable_name": newrelic.api.object_wrapper.callable_name} + command = eval(command, callable_vars) # nosec, pylint: disable=W0123 _logger.debug("register memcache-trace %s", (module, object_path, command)) @@ -1680,7 +1676,7 @@ def _transaction_name_import_hook(object_path, name, group, priority): def _instrument(target): try: for func in _module_function_glob(target, object_path): - _logger.debug("wrap transaction-name %s" % ((target, func, name, group, priority),)) + _logger.debug("wrap transaction-name %s", ((target, func, name, group, priority),)) newrelic.api.transaction_name.wrap_transaction_name(target, func, name, group, priority) except Exception: _raise_instrumentation_error("transaction-name", locals()) @@ -1722,10 +1718,10 @@ def _process_transaction_name_configuration(): priority = _config_object.getint(section, "priority") if name and name.startswith("lambda "): - vars = {"callable_name": newrelic.api.object_wrapper.callable_name} - name = eval(name, vars) # nosec + callable_vars = {"callable_name": newrelic.api.object_wrapper.callable_name} + name = eval(name, callable_vars) # nosec, pylint: disable=W0123 - _logger.debug("register transaction-name %s" % ((module, object_path, name, group, priority),)) + _logger.debug("register transaction-name %s", ((module, object_path, name, group, priority),)) hook = _transaction_name_import_hook(object_path, name, group, priority) newrelic.api.import_hook.register_import_hook(module, hook) @@ -1883,7 +1879,6 @@ def _startup_data_source(): def _setup_data_source(): - global _data_sources_done if _data_sources_done: @@ -1942,7 +1937,7 @@ def _process_function_profile_configuration(): if _config_object.has_option(section, "checkpoint"): checkpoint = _config_object.getfloat(section, "checkpoint") - _logger.debug("register function-profile %s" % ((module, object_path, filename, delay, checkpoint),)) + _logger.debug("register function-profile %s", ((module, object_path, filename, delay, checkpoint),)) hook = _function_profile_import_hook(object_path, filename, delay, checkpoint) newrelic.api.import_hook.register_import_hook(module, hook) @@ -3123,7 +3118,6 @@ def _process_module_entry_points(): def _setup_instrumentation(): - global _instrumentation_done if _instrumentation_done: From a75c076648cafb2b5b6e8909b57ac1071af27281 Mon Sep 17 00:00:00 2001 From: Hannah Stepanek Date: Wed, 15 Feb 2023 10:48:56 -0700 Subject: [PATCH 053/108] Sync tests w/ agents/cross_agent_tests/pull/150 (#770) Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> --- .../utilization/utilization_json.json | 6 ++-- .../utilization_vendor_specific/gcp.json | 28 +++++++++---------- 2 files changed, 17 insertions(+), 17 deletions(-) diff --git a/tests/cross_agent/fixtures/utilization/utilization_json.json b/tests/cross_agent/fixtures/utilization/utilization_json.json index a5ed101bc..384464631 100644 --- a/tests/cross_agent/fixtures/utilization/utilization_json.json +++ b/tests/cross_agent/fixtures/utilization/utilization_json.json @@ -183,7 +183,7 @@ "input_hostname": "myotherhost", "input_full_hostname": "myotherhost.com", "input_ip_address": ["1.2.3.4"], - "input_gcp_id": "3161347020215157000", + "input_gcp_id": "3161347020215157123", "input_gcp_type": "projects/492690098729/machineTypes/custom-1-1024", "input_gcp_name": "aef-default-20170501t160547-7gh8", "input_gcp_zone": "projects/492690098729/zones/us-central1-c", @@ -196,7 +196,7 @@ "ip_address": ["1.2.3.4"], "vendors": { "gcp": { - "id": "3161347020215157000", + "id": "3161347020215157123", "machineType": "custom-1-1024", "name": "aef-default-20170501t160547-7gh8", "zone": "us-central1-c" @@ -211,7 +211,7 @@ "input_hostname": "myotherhost", "input_full_hostname": "myotherhost.com", "input_ip_address": ["1.2.3.4"], - "input_gcp_id": "3161347020215157000", + "input_gcp_id": "3161347020215157123", "input_gcp_type": "projects/492690098729/machineTypes/custom-1-1024", "input_gcp_name": null, "input_gcp_zone": "projects/492690098729/zones/us-central1-c", diff --git a/tests/cross_agent/fixtures/utilization_vendor_specific/gcp.json b/tests/cross_agent/fixtures/utilization_vendor_specific/gcp.json index 9912790c0..090b410ea 100644 --- a/tests/cross_agent/fixtures/utilization_vendor_specific/gcp.json +++ b/tests/cross_agent/fixtures/utilization_vendor_specific/gcp.json @@ -24,7 +24,7 @@ "uri": { "http://metadata.google.internal/computeMetadata/v1/instance/?recursive=true": { "response": { - "id": 3161347020215157000, + "id": 3161347020215157123, "machineType": "projects/492690098729/machineTypes/custom-1-1024", "name": "aef-default-20170501t160547-7gh8", "zone": "projects/492690098729/zones/us-central1-c" @@ -34,7 +34,7 @@ }, "expected_vendors_hash": { "gcp": { - "id": "3161347020215157000", + "id": "3161347020215157123", "machineType": "custom-1-1024", "name": "aef-default-20170501t160547-7gh8", "zone": "us-central1-c" @@ -46,7 +46,7 @@ "uri": { "http://metadata.google.internal/computeMetadata/v1/instance/?recursive=true": { "response": { - "id": 3161347020215157000, + "id": 3161347020215157123, "machineType": "", "name": "aef-default-20170501t160547-7gh8", "zone": "projects/492690098729/zones/us-central1-c" @@ -66,7 +66,7 @@ "uri": { "http://metadata.google.internal/computeMetadata/v1/instance/?recursive=true": { "response": { - "id": 3161347020215157000, + "id": 3161347020215157123, "machineType": "zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz", "name": "aef-default-20170501t160547-7gh8", "zone": "projects/492690098729/zones/us-central1-c" @@ -126,7 +126,7 @@ "uri": { "http://metadata.google.internal/computeMetadata/v1/instance/?recursive=true": { "response": { - "id": 3161347020215157000, + "id": 3161347020215157123, "machineType": "projects/492690098729/machineTypes/custom-1-1024", "name": "aef-default-20170501t160547-7gh8", "zone": "" @@ -146,7 +146,7 @@ "uri": { "http://metadata.google.internal/computeMetadata/v1/instance/?recursive=true": { "response": { - "id": 3161347020215157000, + "id": 3161347020215157123, "machineType": "projects/492690098729/machineTypes/custom-1-1024", "name": "aef-default-20170501t160547-7gh8", "zone": "zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz" @@ -166,7 +166,7 @@ "uri": { "http://metadata.google.internal/computeMetadata/v1/instance/?recursive=true": { "response": { - "id": 3161347020215157000, + "id": 3161347020215157123, "machineType": "projects/492690098729/machineTypes/custom-1-1024", "name": "", "zone": "projects/492690098729/zones/us-central1-c" @@ -186,7 +186,7 @@ "uri": { "http://metadata.google.internal/computeMetadata/v1/instance/?recursive=true": { "response": { - "id": 3161347020215157000, + "id": 3161347020215157123, "machineType": "projects/492690098729/machineTypes/custom-1-1024", "name": "zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz", "zone": "projects/492690098729/zones/us-central1-c" @@ -206,7 +206,7 @@ "uri": { "http://metadata.google.internal/computeMetadata/v1/instance/?recursive=true": { "response": { - "id": 3161347020215157000, + "id": 3161347020215157123, "machineType": "projects/492690098729/machineTypes/custom-1-1024", "name": "滈 橀槶澉 鞻饙騴 鱙鷭黂 甗糲 紁羑 嗂 蛶觢豥 餤駰鬳 釂鱞鸄", "zone": "projects/492690098729/zones/us-central1-c" @@ -216,7 +216,7 @@ }, "expected_vendors_hash": { "gcp": { - "id": "3161347020215157000", + "id": "3161347020215157123", "machineType": "custom-1-1024", "name": "滈 橀槶澉 鞻饙騴 鱙鷭黂 甗糲 紁羑 嗂 蛶觢豥 餤駰鬳 釂鱞鸄", "zone": "us-central1-c" @@ -228,7 +228,7 @@ "uri": { "http://metadata.google.internal/computeMetadata/v1/instance/?recursive=true": { "response": { - "id": 3161347020215157000, + "id": 3161347020215157123, "machineType": "projects/492690098729/machineTypes/custom-1-1024", "name": "滈 橀槶澉 鞻饙騴 鱙鷭黂 甗糲, 紁羑 嗂 蛶觢豥 餤駰鬳 釂鱞鸄", "zone": "projects/492690098729/zones/us-central1-c" @@ -248,7 +248,7 @@ "uri": { "http://metadata.google.internal/computeMetadata/v1/instance/?recursive=true": { "response": { - "id": 3161347020215157000, + "id": 3161347020215157123, "machineType": "projects/492690098729/machineTypes/custom-1-1024", "name": "Bang!", "zone": "projects/492690098729/zones/us-central1-c" @@ -268,7 +268,7 @@ "uri": { "http://metadata.google.internal/computeMetadata/v1/instance/?recursive=true": { "response": { - "id": 3161347020215157000, + "id": 3161347020215157123, "machineType": "projects/492690098729/machineTypes/custom-1-1024", "name": "a-b_c.3... and/or 503 867-5309", "zone": "projects/492690098729/zones/us-central1-c" @@ -278,7 +278,7 @@ }, "expected_vendors_hash": { "gcp": { - "id": "3161347020215157000", + "id": "3161347020215157123", "machineType": "custom-1-1024", "name": "a-b_c.3... and/or 503 867-5309", "zone": "us-central1-c" From 59abb6ac4f9d49aa11d6cd6b3993fbdfd04be270 Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Thu, 16 Feb 2023 09:56:35 -0800 Subject: [PATCH 054/108] Infinite Tracing Batching & Compression (#762) * Infinite Tracing Batching and Compression settings (#756) * Add compression setting * Add batching setting * Infinite Tracing Compression (#758) * Initial commit * Add compression option in StreamingRPC * Add compression default to tests * Add test to confirm compression settings * Remove commented out code * Set compression settings from settings override * Infinite Tracing Batching (#759) * Initial infinite tracing batching implementation * Add RecordSpanBatch method to mock grpc server * Span batching settings and testing. Co-authored-by: Lalleh Rafeei Co-authored-by: Hannah Stepanek Co-authored-by: Uma Annamalai * Add final 8t batching tests * Rename serialization test * Formatting * Guard unittests from failing due to batching * Linting * Simplify batching algorithm * Properly wire batching parametrization * Fix incorrect validator use * Data loss on reconnect regression testing Co-authored-by: Uma Annamalai Co-authored-by: Hannah Stepanek * Test stream buffer batch sizes * Fix logic in supportability metrics for spans * Clean up nested conditionals in stream buffer * Compression parametrization in serialization test * Formatting * Update 8t test_no_delay_on_ok * Update protobufs * Remove unnecessary patching from test * Fix waiting in supportability metric tests * Add sleep to waiting in test * Reorder sleep and condition check * Mark no data loss xfail for py2. * Fix conditional check * Fix flake8 linter issues --------- Co-authored-by: Lalleh Rafeei Co-authored-by: Hannah Stepanek Co-authored-by: Uma Annamalai Co-authored-by: Hannah Stepanek * Infinite Tracing Supportability Feature Toggle Metrics (#769) * Add 8T feature toggle supportability metrics * Remove supportability metrics when 8t is disabled. * Formatting --------- Co-authored-by: Lalleh Rafeei <84813886+lrafeei@users.noreply.github.com> Co-authored-by: Lalleh Rafeei Co-authored-by: Hannah Stepanek Co-authored-by: Uma Annamalai Co-authored-by: Hannah Stepanek --- newrelic/common/streaming_utils.py | 49 +- newrelic/config.py | 2 + newrelic/core/agent_streaming.py | 31 +- newrelic/core/application.py | 89 +++- newrelic/core/config.py | 8 +- newrelic/core/data_collector.py | 23 +- newrelic/core/infinite_tracing_pb2.py | 21 +- newrelic/core/infinite_tracing_v3_pb2.py | 498 +++++------------- newrelic/core/infinite_tracing_v4_pb2.py | 147 ++++-- newrelic/core/stats_engine.py | 51 +- tests/agent_streaming/_test_handler.py | 64 ++- tests/agent_streaming/conftest.py | 48 +- .../agent_streaming/test_infinite_tracing.py | 381 ++++++++++---- tests/agent_streaming/test_stream_buffer.py | 87 +++ tests/agent_streaming/test_streaming_rpc.py | 115 +++- tests/agent_unittests/conftest.py | 1 + tests/agent_unittests/test_harvest_loop.py | 18 +- tests/testing_support/util.py | 15 + .../validators/validate_metric_payload.py | 6 +- 19 files changed, 1022 insertions(+), 632 deletions(-) create mode 100644 tests/agent_streaming/test_stream_buffer.py diff --git a/newrelic/common/streaming_utils.py b/newrelic/common/streaming_utils.py index ccd0b44ef..ad1b371dc 100644 --- a/newrelic/common/streaming_utils.py +++ b/newrelic/common/streaming_utils.py @@ -17,20 +17,24 @@ import threading try: - from newrelic.core.infinite_tracing_pb2 import AttributeValue + from newrelic.core.infinite_tracing_pb2 import AttributeValue, SpanBatch except: - AttributeValue = None + AttributeValue, SpanBatch = None, None + _logger = logging.getLogger(__name__) class StreamBuffer(object): - def __init__(self, maxlen): + def __init__(self, maxlen, batching=False): self._queue = collections.deque(maxlen=maxlen) self._notify = self.condition() self._shutdown = False self._seen = 0 self._dropped = 0 + self._settings = None + + self.batching = batching @staticmethod def condition(*args, **kwargs): @@ -66,14 +70,23 @@ def stats(self): return seen, dropped + def __bool__(self): + return bool(self._queue) + + def __len__(self): + return len(self._queue) + def __iter__(self): return StreamBufferIterator(self) class StreamBufferIterator(object): + MAX_BATCH_SIZE = 100 + def __init__(self, stream_buffer): self.stream_buffer = stream_buffer self._notify = self.stream_buffer._notify + self.batching = self.stream_buffer.batching self._shutdown = False self._stream = None @@ -100,12 +113,30 @@ def __next__(self): self.shutdown() raise StopIteration - try: - return self.stream_buffer._queue.popleft() - except IndexError: - pass - - if not self.stream_closed() and not self.stream_buffer._queue: + if self.batching: + stream_buffer_len = len(self.stream_buffer) + if stream_buffer_len > self.MAX_BATCH_SIZE: + # Ensure batch size is never more than 100 to prevent issues with serializing large numbers + # of spans causing their age to exceed 10 seconds. That would cause them to be rejected + # by the trace observer. + batch = [self.stream_buffer._queue.popleft() for _ in range(self.MAX_BATCH_SIZE)] + return SpanBatch(spans=batch) + elif stream_buffer_len: + # For small span batches empty stream buffer into list and clear queue. + # This is only safe to do under lock which prevents items being added to the queue. + batch = list(self.stream_buffer._queue) + self.stream_buffer._queue.clear() + return SpanBatch(spans=batch) + + else: + # Send items from stream buffer one at a time. + try: + return self.stream_buffer._queue.popleft() + except IndexError: + pass + + # Wait until items are added to the stream buffer. + if not self.stream_closed() and not self.stream_buffer: self._notify.wait() next = __next__ diff --git a/newrelic/config.py b/newrelic/config.py index 7176c3ac8..dfdf058f4 100644 --- a/newrelic/config.py +++ b/newrelic/config.py @@ -531,6 +531,8 @@ def _process_configuration(section): _process_setting(section, "event_harvest_config.harvest_limits.log_event_data", "getint", None) _process_setting(section, "infinite_tracing.trace_observer_host", "get", None) _process_setting(section, "infinite_tracing.trace_observer_port", "getint", None) + _process_setting(section, "infinite_tracing.compression", "getboolean", None) + _process_setting(section, "infinite_tracing.batching", "getboolean", None) _process_setting(section, "infinite_tracing.span_queue_size", "getint", None) _process_setting(section, "code_level_metrics.enabled", "getboolean", None) diff --git a/newrelic/core/agent_streaming.py b/newrelic/core/agent_streaming.py index 5cd7117e6..b581f5d17 100644 --- a/newrelic/core/agent_streaming.py +++ b/newrelic/core/agent_streaming.py @@ -18,9 +18,9 @@ try: import grpc - from newrelic.core.infinite_tracing_pb2 import RecordStatus, Span + from newrelic.core.infinite_tracing_pb2 import RecordStatus, Span, SpanBatch except Exception: - grpc, RecordStatus, Span = None, None, None + grpc, RecordStatus, Span, SpanBatch = None, None, None, None _logger = logging.getLogger(__name__) @@ -33,7 +33,6 @@ class StreamingRpc(object): retry will not occur. """ - PATH = "/com.newrelic.trace.v1.IngestService/RecordSpan" RETRY_POLICY = ( (15, False), (15, False), @@ -44,7 +43,7 @@ class StreamingRpc(object): ) OPTIONS = [("grpc.enable_retries", 0)] - def __init__(self, endpoint, stream_buffer, metadata, record_metric, ssl=True): + def __init__(self, endpoint, stream_buffer, metadata, record_metric, ssl=True, compression=None): self._endpoint = endpoint self._ssl = ssl self.metadata = metadata @@ -57,17 +56,35 @@ def __init__(self, endpoint, stream_buffer, metadata, record_metric, ssl=True): self.notify = self.condition() self.record_metric = record_metric self.closed = False + # If this is not set, None is still a falsy value. + self.compression_setting = grpc.Compression.Gzip if compression else grpc.Compression.NoCompression + + if self.batching: # Stream buffer will be sending span batches + self.path = "/com.newrelic.trace.v1.IngestService/RecordSpanBatch" + self.serializer = SpanBatch.SerializeToString + else: + self.path = "/com.newrelic.trace.v1.IngestService/RecordSpan" + self.serializer = Span.SerializeToString self.create_channel() + @property + def batching(self): + # Determine batching by stream buffer settings + return self.stream_buffer.batching + def create_channel(self): if self._ssl: credentials = grpc.ssl_channel_credentials() - self.channel = grpc.secure_channel(self._endpoint, credentials, options=self.OPTIONS) + self.channel = grpc.secure_channel( + self._endpoint, credentials, compression=self.compression_setting, options=self.OPTIONS + ) else: - self.channel = grpc.insecure_channel(self._endpoint, options=self.OPTIONS) + self.channel = grpc.insecure_channel( + self._endpoint, compression=self.compression_setting, options=self.OPTIONS + ) - self.rpc = self.channel.stream_stream(self.PATH, Span.SerializeToString, RecordStatus.FromString) + self.rpc = self.channel.stream_stream(self.path, self.serializer, RecordStatus.FromString) def create_response_iterator(self): with self.stream_buffer._notify: diff --git a/newrelic/core/application.py b/newrelic/core/application.py index 419211a35..7be217428 100644 --- a/newrelic/core/application.py +++ b/newrelic/core/application.py @@ -520,30 +520,65 @@ def connect_to_data_collector(self, activate_agent): self._global_events_account = 0 - # Record metrics for how long it took us to connect and how - # many attempts we made. Also record metrics for the final - # successful attempt. If we went through multiple attempts, - # individual details of errors before the final one that - # worked are not recorded as recording them all in the - # initial harvest would possibly skew first harvest metrics - # and cause confusion as we cannot properly mark the time over - # which they were recorded. Make sure we do this before we - # mark the session active so we don't have to grab a lock on - # merging the internal metrics. - with InternalTraceContext(internal_metrics): + # Record metrics for how long it took us to connect and how + # many attempts we made. Also record metrics for the final + # successful attempt. If we went through multiple attempts, + # individual details of errors before the final one that + # worked are not recorded as recording them all in the + # initial harvest would possibly skew first harvest metrics + # and cause confusion as we cannot properly mark the time over + # which they were recorded. Make sure we do this before we + # mark the session active so we don't have to grab a lock on + # merging the internal metrics. + internal_metric( "Supportability/Python/Application/Registration/Duration", self._period_start - connect_start ) internal_metric("Supportability/Python/Application/Registration/Attempts", connect_attempts) - # Logging feature toggle supportability metrics - application_logging_metrics = configuration.application_logging.enabled and configuration.application_logging.metrics.enabled - application_logging_forwarding = configuration.application_logging.enabled and configuration.application_logging.forwarding.enabled - application_logging_local_decorating = configuration.application_logging.enabled and configuration.application_logging.local_decorating.enabled - internal_metric("Supportability/Logging/Forwarding/Python/%s" % ("enabled" if application_logging_forwarding else "disabled"), 1) - internal_metric("Supportability/Logging/LocalDecorating/Python/%s" % ("enabled" if application_logging_local_decorating else "disabled"), 1) - internal_metric("Supportability/Logging/Metrics/Python/%s" % ("enabled" if application_logging_metrics else "disabled"), 1) + # Record metrics for feature toggles from settings + + # Logging feature toggle metrics + application_logging_metrics = ( + configuration.application_logging.enabled and configuration.application_logging.metrics.enabled + ) + application_logging_forwarding = ( + configuration.application_logging.enabled and configuration.application_logging.forwarding.enabled + ) + application_logging_local_decorating = ( + configuration.application_logging.enabled and configuration.application_logging.local_decorating.enabled + ) + internal_metric( + "Supportability/Logging/Forwarding/Python/%s" + % ("enabled" if application_logging_forwarding else "disabled"), + 1, + ) + internal_metric( + "Supportability/Logging/LocalDecorating/Python/%s" + % ("enabled" if application_logging_local_decorating else "disabled"), + 1, + ) + internal_metric( + "Supportability/Logging/Metrics/Python/%s" % ("enabled" if application_logging_metrics else "disabled"), + 1, + ) + + # Infinite tracing feature toggle metrics + infinite_tracing = configuration.infinite_tracing.enabled # Property that checks trace observer host + if infinite_tracing: + infinite_tracing_batching = configuration.infinite_tracing.batching + infinite_tracing_compression = configuration.infinite_tracing.compression + internal_metric( + "Supportability/InfiniteTracing/gRPC/Batching/%s" + % ("enabled" if infinite_tracing_batching else "disabled"), + 1, + ) + internal_metric( + "Supportability/InfiniteTracing/gRPC/Compression/%s" + % ("enabled" if infinite_tracing_compression else "disabled"), + 1, + ) self._stats_engine.merge_custom_metrics(internal_metrics.metrics()) @@ -724,11 +759,9 @@ def stop_data_samplers(self): def remove_data_source(self, name): with self._data_samplers_lock: - data_sampler = [x for x in self._data_samplers if x.name == name] if len(data_sampler) > 0: - # Should be at most one data sampler for a given name. data_sampler = data_sampler[0] @@ -741,7 +774,6 @@ def remove_data_source(self, name): data_sampler.stop() except Exception: - # If sampler has not started yet, it may throw an error. _logger.debug( @@ -1066,7 +1098,6 @@ def harvest(self, shutdown=False, flexible=False): with InternalTraceContext(internal_metrics): with InternalTrace("Supportability/Python/Harvest/Calls/" + call_metric): - self._harvest_count += 1 start = time.time() @@ -1204,7 +1235,6 @@ def harvest(self, shutdown=False, flexible=False): stats.reset_synthetics_events() if configuration.collect_analytics_events and configuration.transaction_events.enabled: - transaction_events = stats.transaction_events if transaction_events: @@ -1235,7 +1265,7 @@ def harvest(self, shutdown=False, flexible=False): if configuration.infinite_tracing.enabled: span_stream = stats.span_stream # Only merge stats as part of default harvest - if span_stream and not flexible: + if span_stream is not None and not flexible: spans_seen, spans_dropped = span_stream.stats() spans_sent = spans_seen - spans_dropped @@ -1267,7 +1297,6 @@ def harvest(self, shutdown=False, flexible=False): and configuration.error_collector.capture_events and configuration.error_collector.enabled ): - error_events = stats.error_events if error_events: num_error_samples = error_events.num_samples @@ -1289,7 +1318,6 @@ def harvest(self, shutdown=False, flexible=False): # Send custom events if configuration.collect_custom_events and configuration.custom_insights_events.enabled: - customs = stats.custom_events if customs: @@ -1309,8 +1337,13 @@ def harvest(self, shutdown=False, flexible=False): # Send log events - if configuration and configuration.application_logging and configuration.application_logging.enabled and configuration.application_logging.forwarding and configuration.application_logging.forwarding.enabled: - + if ( + configuration + and configuration.application_logging + and configuration.application_logging.enabled + and configuration.application_logging.forwarding + and configuration.application_logging.forwarding.enabled + ): logs = stats.log_events if logs: diff --git a/newrelic/core/config.py b/newrelic/core/config.py index 4111c7149..72c4de03d 100644 --- a/newrelic/core/config.py +++ b/newrelic/core/config.py @@ -458,7 +458,6 @@ def _environ_as_mapping(name, default=""): return result for item in items.split(";"): - try: key, value = item.split(":") except ValueError: @@ -731,6 +730,8 @@ def default_host(license_key): _settings.infinite_tracing.trace_observer_host = os.environ.get("NEW_RELIC_INFINITE_TRACING_TRACE_OBSERVER_HOST", None) _settings.infinite_tracing.trace_observer_port = _environ_as_int("NEW_RELIC_INFINITE_TRACING_TRACE_OBSERVER_PORT", 443) +_settings.infinite_tracing.compression = _environ_as_bool("NEW_RELIC_INFINITE_TRACING_COMPRESSION", default=True) +_settings.infinite_tracing.batching = _environ_as_bool("NEW_RELIC_INFINITE_TRACING_BATCHING", default=True) _settings.infinite_tracing.ssl = True _settings.infinite_tracing.span_queue_size = _environ_as_int("NEW_RELIC_INFINITE_TRACING_SPAN_QUEUE_SIZE", 10000) @@ -939,7 +940,6 @@ def global_settings_dump(settings_object=None, serializable=False): components = urlparse.urlparse(proxy_host) if components.scheme: - netloc = create_obfuscated_netloc(components.username, components.password, components.hostname, obfuscated) if components.port: @@ -1062,14 +1062,14 @@ def apply_server_side_settings(server_side_config=None, settings=_settings): # Overlay with agent server side configuration settings. - for (name, value) in agent_config.items(): + for name, value in agent_config.items(): apply_config_setting(settings_snapshot, name, value) # Overlay with global server side configuration settings. # global server side configuration always takes precedence over the global # server side configuration settings. - for (name, value) in server_side_config.items(): + for name, value in server_side_config.items(): apply_config_setting(settings_snapshot, name, value) event_harvest_config = server_side_config.get("event_harvest_config", {}) diff --git a/newrelic/core/data_collector.py b/newrelic/core/data_collector.py index f8947927d..985e37240 100644 --- a/newrelic/core/data_collector.py +++ b/newrelic/core/data_collector.py @@ -61,6 +61,7 @@ def connect_span_stream(self, span_iterator, record_metric): port = self.configuration.infinite_tracing.trace_observer_port ssl = self.configuration.infinite_tracing.ssl + compression_setting = self.configuration.infinite_tracing.compression endpoint = "{}:{}".format(host, port) if ( @@ -68,14 +69,13 @@ def connect_span_stream(self, span_iterator, record_metric): and self.configuration.span_events.enabled and self.configuration.collect_span_events ): - metadata = ( ("agent_run_token", self.configuration.agent_run_id), ("license_key", self.configuration.license_key), ) rpc = self._rpc = StreamingRpc( - endpoint, span_iterator, metadata, record_metric, ssl=ssl + endpoint, span_iterator, metadata, record_metric, ssl=ssl, compression=compression_setting ) rpc.connect() return rpc @@ -135,9 +135,7 @@ def send_log_events(self, sampling_info, log_event_data): return self._protocol.send("log_event_data", payload) def get_agent_commands(self): - """Receive agent commands from the data collector. - - """ + """Receive agent commands from the data collector.""" payload = (self.agent_run_id,) return self._protocol.send("get_agent_commands", payload) @@ -180,8 +178,7 @@ def send_agent_command_results(self, cmd_results): return self._protocol.send("agent_command_results", payload) def send_profile_data(self, profile_data): - """Called to submit Profile Data. - """ + """Called to submit Profile Data.""" payload = (self.agent_run_id, profile_data) return self._protocol.send("profile_data", payload) @@ -206,9 +203,7 @@ class DeveloperModeSession(Session): def connect_span_stream(self, span_iterator, record_metric): if self.configuration.debug.connect_span_stream_in_developer_mode: - super(DeveloperModeSession, self).connect_span_stream( - span_iterator, record_metric - ) + super(DeveloperModeSession, self).connect_span_stream(span_iterator, record_metric) class ServerlessModeSession(Session): @@ -231,12 +226,8 @@ def shutdown_session(): def create_session(license_key, app_name, linked_applications, environment): settings = global_settings() if settings.serverless_mode.enabled: - return ServerlessModeSession( - app_name, linked_applications, environment, settings - ) + return ServerlessModeSession(app_name, linked_applications, environment, settings) elif settings.developer_mode: - return DeveloperModeSession( - app_name, linked_applications, environment, settings - ) + return DeveloperModeSession(app_name, linked_applications, environment, settings) else: return Session(app_name, linked_applications, environment, settings) diff --git a/newrelic/core/infinite_tracing_pb2.py b/newrelic/core/infinite_tracing_pb2.py index a0fa9dc54..278dc6d18 100644 --- a/newrelic/core/infinite_tracing_pb2.py +++ b/newrelic/core/infinite_tracing_pb2.py @@ -13,13 +13,24 @@ # limitations under the License. try: - from google.protobuf import __version__ - PROTOBUF_VERSION = tuple(int(v) for v in __version__.split(".")) + from google.protobuf import __version__ + + PROTOBUF_VERSION = tuple(int(v) for v in __version__.split(".")) except Exception: - PROTOBUF_VERSION = (0, 0, 0) + PROTOBUF_VERSION = (0, 0, 0) # Import appropriate generated pb2 file for protobuf version if PROTOBUF_VERSION >= (4,): - from newrelic.core.infinite_tracing_v4_pb2 import * + from newrelic.core.infinite_tracing_v4_pb2 import ( # noqa: F401; pylint: disable=W0611 + AttributeValue, + RecordStatus, + Span, + SpanBatch, + ) else: - from newrelic.core.infinite_tracing_v3_pb2 import * + from newrelic.core.infinite_tracing_v3_pb2 import ( # noqa: F401; pylint: disable=W0611 + AttributeValue, + RecordStatus, + Span, + SpanBatch, + ) diff --git a/newrelic/core/infinite_tracing_v3_pb2.py b/newrelic/core/infinite_tracing_v3_pb2.py index 987c96303..79e3ec4eb 100644 --- a/newrelic/core/infinite_tracing_v3_pb2.py +++ b/newrelic/core/infinite_tracing_v3_pb2.py @@ -12,375 +12,129 @@ # See the License for the specific language governing permissions and # limitations under the License. -try: - from google.protobuf import descriptor as _descriptor - from google.protobuf import message as _message - from google.protobuf import reflection as _reflection - from google.protobuf import symbol_database as _symbol_database - # @@protoc_insertion_point(imports) -except ImportError: - pass -else: - _sym_db = _symbol_database.Default() - - - DESCRIPTOR = _descriptor.FileDescriptor( - name='infinite_tracing.proto', - package='com.newrelic.trace.v1', - syntax='proto3', - serialized_options=None, - serialized_pb=b'\n\x16infinite_tracing.proto\x12\x15\x63om.newrelic.trace.v1\"\x86\x04\n\x04Span\x12\x10\n\x08trace_id\x18\x01 \x01(\t\x12?\n\nintrinsics\x18\x02 \x03(\x0b\x32+.com.newrelic.trace.v1.Span.IntrinsicsEntry\x12H\n\x0fuser_attributes\x18\x03 \x03(\x0b\x32/.com.newrelic.trace.v1.Span.UserAttributesEntry\x12J\n\x10\x61gent_attributes\x18\x04 \x03(\x0b\x32\x30.com.newrelic.trace.v1.Span.AgentAttributesEntry\x1aX\n\x0fIntrinsicsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x34\n\x05value\x18\x02 \x01(\x0b\x32%.com.newrelic.trace.v1.AttributeValue:\x02\x38\x01\x1a\\\n\x13UserAttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x34\n\x05value\x18\x02 \x01(\x0b\x32%.com.newrelic.trace.v1.AttributeValue:\x02\x38\x01\x1a]\n\x14\x41gentAttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x34\n\x05value\x18\x02 \x01(\x0b\x32%.com.newrelic.trace.v1.AttributeValue:\x02\x38\x01\"t\n\x0e\x41ttributeValue\x12\x16\n\x0cstring_value\x18\x01 \x01(\tH\x00\x12\x14\n\nbool_value\x18\x02 \x01(\x08H\x00\x12\x13\n\tint_value\x18\x03 \x01(\x03H\x00\x12\x16\n\x0c\x64ouble_value\x18\x04 \x01(\x01H\x00\x42\x07\n\x05value\"%\n\x0cRecordStatus\x12\x15\n\rmessages_seen\x18\x01 \x01(\x04\x32\x65\n\rIngestService\x12T\n\nRecordSpan\x12\x1b.com.newrelic.trace.v1.Span\x1a#.com.newrelic.trace.v1.RecordStatus\"\x00(\x01\x30\x01\x62\x06proto3' - ) - - - - - _SPAN_INTRINSICSENTRY = _descriptor.Descriptor( - name='IntrinsicsEntry', - full_name='com.newrelic.trace.v1.Span.IntrinsicsEntry', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='com.newrelic.trace.v1.Span.IntrinsicsEntry.key', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='value', full_name='com.newrelic.trace.v1.Span.IntrinsicsEntry.value', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=b'8\001', - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=291, - serialized_end=379, - ) - - _SPAN_USERATTRIBUTESENTRY = _descriptor.Descriptor( - name='UserAttributesEntry', - full_name='com.newrelic.trace.v1.Span.UserAttributesEntry', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='com.newrelic.trace.v1.Span.UserAttributesEntry.key', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='value', full_name='com.newrelic.trace.v1.Span.UserAttributesEntry.value', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=b'8\001', - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=381, - serialized_end=473, - ) - - _SPAN_AGENTATTRIBUTESENTRY = _descriptor.Descriptor( - name='AgentAttributesEntry', - full_name='com.newrelic.trace.v1.Span.AgentAttributesEntry', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='com.newrelic.trace.v1.Span.AgentAttributesEntry.key', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='value', full_name='com.newrelic.trace.v1.Span.AgentAttributesEntry.value', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=b'8\001', - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=475, - serialized_end=568, - ) - - _SPAN = _descriptor.Descriptor( - name='Span', - full_name='com.newrelic.trace.v1.Span', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='trace_id', full_name='com.newrelic.trace.v1.Span.trace_id', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='intrinsics', full_name='com.newrelic.trace.v1.Span.intrinsics', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='user_attributes', full_name='com.newrelic.trace.v1.Span.user_attributes', index=2, - number=3, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='agent_attributes', full_name='com.newrelic.trace.v1.Span.agent_attributes', index=3, - number=4, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[_SPAN_INTRINSICSENTRY, _SPAN_USERATTRIBUTESENTRY, _SPAN_AGENTATTRIBUTESENTRY, ], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=50, - serialized_end=568, - ) - - - _ATTRIBUTEVALUE = _descriptor.Descriptor( - name='AttributeValue', - full_name='com.newrelic.trace.v1.AttributeValue', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='string_value', full_name='com.newrelic.trace.v1.AttributeValue.string_value', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='bool_value', full_name='com.newrelic.trace.v1.AttributeValue.bool_value', index=1, - number=2, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='int_value', full_name='com.newrelic.trace.v1.AttributeValue.int_value', index=2, - number=3, type=3, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='double_value', full_name='com.newrelic.trace.v1.AttributeValue.double_value', index=3, - number=4, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='value', full_name='com.newrelic.trace.v1.AttributeValue.value', - index=0, containing_type=None, fields=[]), - ], - serialized_start=570, - serialized_end=686, - ) - - - _RECORDSTATUS = _descriptor.Descriptor( - name='RecordStatus', - full_name='com.newrelic.trace.v1.RecordStatus', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='messages_seen', full_name='com.newrelic.trace.v1.RecordStatus.messages_seen', index=0, - number=1, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=688, - serialized_end=725, - ) - - _SPAN_INTRINSICSENTRY.fields_by_name['value'].message_type = _ATTRIBUTEVALUE - _SPAN_INTRINSICSENTRY.containing_type = _SPAN - _SPAN_USERATTRIBUTESENTRY.fields_by_name['value'].message_type = _ATTRIBUTEVALUE - _SPAN_USERATTRIBUTESENTRY.containing_type = _SPAN - _SPAN_AGENTATTRIBUTESENTRY.fields_by_name['value'].message_type = _ATTRIBUTEVALUE - _SPAN_AGENTATTRIBUTESENTRY.containing_type = _SPAN - _SPAN.fields_by_name['intrinsics'].message_type = _SPAN_INTRINSICSENTRY - _SPAN.fields_by_name['user_attributes'].message_type = _SPAN_USERATTRIBUTESENTRY - _SPAN.fields_by_name['agent_attributes'].message_type = _SPAN_AGENTATTRIBUTESENTRY - _ATTRIBUTEVALUE.oneofs_by_name['value'].fields.append( - _ATTRIBUTEVALUE.fields_by_name['string_value']) - _ATTRIBUTEVALUE.fields_by_name['string_value'].containing_oneof = _ATTRIBUTEVALUE.oneofs_by_name['value'] - _ATTRIBUTEVALUE.oneofs_by_name['value'].fields.append( - _ATTRIBUTEVALUE.fields_by_name['bool_value']) - _ATTRIBUTEVALUE.fields_by_name['bool_value'].containing_oneof = _ATTRIBUTEVALUE.oneofs_by_name['value'] - _ATTRIBUTEVALUE.oneofs_by_name['value'].fields.append( - _ATTRIBUTEVALUE.fields_by_name['int_value']) - _ATTRIBUTEVALUE.fields_by_name['int_value'].containing_oneof = _ATTRIBUTEVALUE.oneofs_by_name['value'] - _ATTRIBUTEVALUE.oneofs_by_name['value'].fields.append( - _ATTRIBUTEVALUE.fields_by_name['double_value']) - _ATTRIBUTEVALUE.fields_by_name['double_value'].containing_oneof = _ATTRIBUTEVALUE.oneofs_by_name['value'] - DESCRIPTOR.message_types_by_name['Span'] = _SPAN - DESCRIPTOR.message_types_by_name['AttributeValue'] = _ATTRIBUTEVALUE - DESCRIPTOR.message_types_by_name['RecordStatus'] = _RECORDSTATUS - _sym_db.RegisterFileDescriptor(DESCRIPTOR) - - Span = _reflection.GeneratedProtocolMessageType('Span', (_message.Message,), { - - 'IntrinsicsEntry' : _reflection.GeneratedProtocolMessageType('IntrinsicsEntry', (_message.Message,), { - 'DESCRIPTOR' : _SPAN_INTRINSICSENTRY, - '__module__' : 'infinite_tracing_pb2' - # @@protoc_insertion_point(class_scope:com.newrelic.trace.v1.Span.IntrinsicsEntry) - }) - , - - 'UserAttributesEntry' : _reflection.GeneratedProtocolMessageType('UserAttributesEntry', (_message.Message,), { - 'DESCRIPTOR' : _SPAN_USERATTRIBUTESENTRY, - '__module__' : 'infinite_tracing_pb2' - # @@protoc_insertion_point(class_scope:com.newrelic.trace.v1.Span.UserAttributesEntry) - }) - , - - 'AgentAttributesEntry' : _reflection.GeneratedProtocolMessageType('AgentAttributesEntry', (_message.Message,), { - 'DESCRIPTOR' : _SPAN_AGENTATTRIBUTESENTRY, - '__module__' : 'infinite_tracing_pb2' - # @@protoc_insertion_point(class_scope:com.newrelic.trace.v1.Span.AgentAttributesEntry) - }) - , - 'DESCRIPTOR' : _SPAN, - '__module__' : 'infinite_tracing_pb2' - # @@protoc_insertion_point(class_scope:com.newrelic.trace.v1.Span) - }) - _sym_db.RegisterMessage(Span) - _sym_db.RegisterMessage(Span.IntrinsicsEntry) - _sym_db.RegisterMessage(Span.UserAttributesEntry) - _sym_db.RegisterMessage(Span.AgentAttributesEntry) - - AttributeValue = _reflection.GeneratedProtocolMessageType('AttributeValue', (_message.Message,), { - 'DESCRIPTOR' : _ATTRIBUTEVALUE, - '__module__' : 'infinite_tracing_pb2' - # @@protoc_insertion_point(class_scope:com.newrelic.trace.v1.AttributeValue) - }) - _sym_db.RegisterMessage(AttributeValue) - - RecordStatus = _reflection.GeneratedProtocolMessageType('RecordStatus', (_message.Message,), { - 'DESCRIPTOR' : _RECORDSTATUS, - '__module__' : 'infinite_tracing_pb2' - # @@protoc_insertion_point(class_scope:com.newrelic.trace.v1.RecordStatus) - }) - _sym_db.RegisterMessage(RecordStatus) - - - _SPAN_INTRINSICSENTRY._options = None - _SPAN_USERATTRIBUTESENTRY._options = None - _SPAN_AGENTATTRIBUTESENTRY._options = None - - _INGESTSERVICE = _descriptor.ServiceDescriptor( - name='IngestService', - full_name='com.newrelic.trace.v1.IngestService', - file=DESCRIPTOR, - index=0, - serialized_options=None, - serialized_start=727, - serialized_end=828, - methods=[ - _descriptor.MethodDescriptor( - name='RecordSpan', - full_name='com.newrelic.trace.v1.IngestService.RecordSpan', - index=0, - containing_service=None, - input_type=_SPAN, - output_type=_RECORDSTATUS, - serialized_options=None, - ), - ]) - _sym_db.RegisterServiceDescriptor(_INGESTSERVICE) - - DESCRIPTOR.services_by_name['IngestService'] = _INGESTSERVICE - - # @@protoc_insertion_point(module_scope) - +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: v1.proto +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n\x08v1.proto\x12\x15\x63om.newrelic.trace.v1"7\n\tSpanBatch\x12*\n\x05spans\x18\x01 \x03(\x0b\x32\x1b.com.newrelic.trace.v1.Span"\x86\x04\n\x04Span\x12\x10\n\x08trace_id\x18\x01 \x01(\t\x12?\n\nintrinsics\x18\x02 \x03(\x0b\x32+.com.newrelic.trace.v1.Span.IntrinsicsEntry\x12H\n\x0fuser_attributes\x18\x03 \x03(\x0b\x32/.com.newrelic.trace.v1.Span.UserAttributesEntry\x12J\n\x10\x61gent_attributes\x18\x04 \x03(\x0b\x32\x30.com.newrelic.trace.v1.Span.AgentAttributesEntry\x1aX\n\x0fIntrinsicsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x34\n\x05value\x18\x02 \x01(\x0b\x32%.com.newrelic.trace.v1.AttributeValue:\x02\x38\x01\x1a\\\n\x13UserAttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x34\n\x05value\x18\x02 \x01(\x0b\x32%.com.newrelic.trace.v1.AttributeValue:\x02\x38\x01\x1a]\n\x14\x41gentAttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x34\n\x05value\x18\x02 \x01(\x0b\x32%.com.newrelic.trace.v1.AttributeValue:\x02\x38\x01"t\n\x0e\x41ttributeValue\x12\x16\n\x0cstring_value\x18\x01 \x01(\tH\x00\x12\x14\n\nbool_value\x18\x02 \x01(\x08H\x00\x12\x13\n\tint_value\x18\x03 \x01(\x03H\x00\x12\x16\n\x0c\x64ouble_value\x18\x04 \x01(\x01H\x00\x42\x07\n\x05value"%\n\x0cRecordStatus\x12\x15\n\rmessages_seen\x18\x01 \x01(\x04\x32\xc5\x01\n\rIngestService\x12T\n\nRecordSpan\x12\x1b.com.newrelic.trace.v1.Span\x1a#.com.newrelic.trace.v1.RecordStatus"\x00(\x01\x30\x01\x12^\n\x0fRecordSpanBatch\x12 .com.newrelic.trace.v1.SpanBatch\x1a#.com.newrelic.trace.v1.RecordStatus"\x00(\x01\x30\x01\x62\x06proto3' +) + + +_SPANBATCH = DESCRIPTOR.message_types_by_name["SpanBatch"] +_SPAN = DESCRIPTOR.message_types_by_name["Span"] +_SPAN_INTRINSICSENTRY = _SPAN.nested_types_by_name["IntrinsicsEntry"] +_SPAN_USERATTRIBUTESENTRY = _SPAN.nested_types_by_name["UserAttributesEntry"] +_SPAN_AGENTATTRIBUTESENTRY = _SPAN.nested_types_by_name["AgentAttributesEntry"] +_ATTRIBUTEVALUE = DESCRIPTOR.message_types_by_name["AttributeValue"] +_RECORDSTATUS = DESCRIPTOR.message_types_by_name["RecordStatus"] +SpanBatch = _reflection.GeneratedProtocolMessageType( + "SpanBatch", + (_message.Message,), + { + "DESCRIPTOR": _SPANBATCH, + "__module__": "v1_pb2" + # @@protoc_insertion_point(class_scope:com.newrelic.trace.v1.SpanBatch) + }, +) +_sym_db.RegisterMessage(SpanBatch) + +Span = _reflection.GeneratedProtocolMessageType( + "Span", + (_message.Message,), + { + "IntrinsicsEntry": _reflection.GeneratedProtocolMessageType( + "IntrinsicsEntry", + (_message.Message,), + { + "DESCRIPTOR": _SPAN_INTRINSICSENTRY, + "__module__": "v1_pb2" + # @@protoc_insertion_point(class_scope:com.newrelic.trace.v1.Span.IntrinsicsEntry) + }, + ), + "UserAttributesEntry": _reflection.GeneratedProtocolMessageType( + "UserAttributesEntry", + (_message.Message,), + { + "DESCRIPTOR": _SPAN_USERATTRIBUTESENTRY, + "__module__": "v1_pb2" + # @@protoc_insertion_point(class_scope:com.newrelic.trace.v1.Span.UserAttributesEntry) + }, + ), + "AgentAttributesEntry": _reflection.GeneratedProtocolMessageType( + "AgentAttributesEntry", + (_message.Message,), + { + "DESCRIPTOR": _SPAN_AGENTATTRIBUTESENTRY, + "__module__": "v1_pb2" + # @@protoc_insertion_point(class_scope:com.newrelic.trace.v1.Span.AgentAttributesEntry) + }, + ), + "DESCRIPTOR": _SPAN, + "__module__": "v1_pb2" + # @@protoc_insertion_point(class_scope:com.newrelic.trace.v1.Span) + }, +) +_sym_db.RegisterMessage(Span) +_sym_db.RegisterMessage(Span.IntrinsicsEntry) +_sym_db.RegisterMessage(Span.UserAttributesEntry) +_sym_db.RegisterMessage(Span.AgentAttributesEntry) + +AttributeValue = _reflection.GeneratedProtocolMessageType( + "AttributeValue", + (_message.Message,), + { + "DESCRIPTOR": _ATTRIBUTEVALUE, + "__module__": "v1_pb2" + # @@protoc_insertion_point(class_scope:com.newrelic.trace.v1.AttributeValue) + }, +) +_sym_db.RegisterMessage(AttributeValue) + +RecordStatus = _reflection.GeneratedProtocolMessageType( + "RecordStatus", + (_message.Message,), + { + "DESCRIPTOR": _RECORDSTATUS, + "__module__": "v1_pb2" + # @@protoc_insertion_point(class_scope:com.newrelic.trace.v1.RecordStatus) + }, +) +_sym_db.RegisterMessage(RecordStatus) + +_INGESTSERVICE = DESCRIPTOR.services_by_name["IngestService"] +if _descriptor._USE_C_DESCRIPTORS is False: + DESCRIPTOR._options = None + _SPAN_INTRINSICSENTRY._options = None + _SPAN_INTRINSICSENTRY._serialized_options = b"8\001" + _SPAN_USERATTRIBUTESENTRY._options = None + _SPAN_USERATTRIBUTESENTRY._serialized_options = b"8\001" + _SPAN_AGENTATTRIBUTESENTRY._options = None + _SPAN_AGENTATTRIBUTESENTRY._serialized_options = b"8\001" + _SPANBATCH._serialized_start = 35 + _SPANBATCH._serialized_end = 90 + _SPAN._serialized_start = 93 + _SPAN._serialized_end = 611 + _SPAN_INTRINSICSENTRY._serialized_start = 334 + _SPAN_INTRINSICSENTRY._serialized_end = 422 + _SPAN_USERATTRIBUTESENTRY._serialized_start = 424 + _SPAN_USERATTRIBUTESENTRY._serialized_end = 516 + _SPAN_AGENTATTRIBUTESENTRY._serialized_start = 518 + _SPAN_AGENTATTRIBUTESENTRY._serialized_end = 611 + _ATTRIBUTEVALUE._serialized_start = 613 + _ATTRIBUTEVALUE._serialized_end = 729 + _RECORDSTATUS._serialized_start = 731 + _RECORDSTATUS._serialized_end = 768 + _INGESTSERVICE._serialized_start = 771 + _INGESTSERVICE._serialized_end = 968 +# @@protoc_insertion_point(module_scope) diff --git a/newrelic/core/infinite_tracing_v4_pb2.py b/newrelic/core/infinite_tracing_v4_pb2.py index ae1739670..79e3ec4eb 100644 --- a/newrelic/core/infinite_tracing_v4_pb2.py +++ b/newrelic/core/infinite_tracing_v4_pb2.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - # Copyright 2010 New Relic, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -17,41 +15,126 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # source: v1.proto """Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x08v1.proto\x12\x15\x63om.newrelic.trace.v1\"7\n\tSpanBatch\x12*\n\x05spans\x18\x01 \x03(\x0b\x32\x1b.com.newrelic.trace.v1.Span\"\x86\x04\n\x04Span\x12\x10\n\x08trace_id\x18\x01 \x01(\t\x12?\n\nintrinsics\x18\x02 \x03(\x0b\x32+.com.newrelic.trace.v1.Span.IntrinsicsEntry\x12H\n\x0fuser_attributes\x18\x03 \x03(\x0b\x32/.com.newrelic.trace.v1.Span.UserAttributesEntry\x12J\n\x10\x61gent_attributes\x18\x04 \x03(\x0b\x32\x30.com.newrelic.trace.v1.Span.AgentAttributesEntry\x1aX\n\x0fIntrinsicsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x34\n\x05value\x18\x02 \x01(\x0b\x32%.com.newrelic.trace.v1.AttributeValue:\x02\x38\x01\x1a\\\n\x13UserAttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x34\n\x05value\x18\x02 \x01(\x0b\x32%.com.newrelic.trace.v1.AttributeValue:\x02\x38\x01\x1a]\n\x14\x41gentAttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x34\n\x05value\x18\x02 \x01(\x0b\x32%.com.newrelic.trace.v1.AttributeValue:\x02\x38\x01\"t\n\x0e\x41ttributeValue\x12\x16\n\x0cstring_value\x18\x01 \x01(\tH\x00\x12\x14\n\nbool_value\x18\x02 \x01(\x08H\x00\x12\x13\n\tint_value\x18\x03 \x01(\x03H\x00\x12\x16\n\x0c\x64ouble_value\x18\x04 \x01(\x01H\x00\x42\x07\n\x05value\"%\n\x0cRecordStatus\x12\x15\n\rmessages_seen\x18\x01 \x01(\x04\x32\xc5\x01\n\rIngestService\x12T\n\nRecordSpan\x12\x1b.com.newrelic.trace.v1.Span\x1a#.com.newrelic.trace.v1.RecordStatus\"\x00(\x01\x30\x01\x12^\n\x0fRecordSpanBatch\x12 .com.newrelic.trace.v1.SpanBatch\x1a#.com.newrelic.trace.v1.RecordStatus\"\x00(\x01\x30\x01\x62\x06proto3') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'v1_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - _SPAN_INTRINSICSENTRY._options = None - _SPAN_INTRINSICSENTRY._serialized_options = b'8\001' - _SPAN_USERATTRIBUTESENTRY._options = None - _SPAN_USERATTRIBUTESENTRY._serialized_options = b'8\001' - _SPAN_AGENTATTRIBUTESENTRY._options = None - _SPAN_AGENTATTRIBUTESENTRY._serialized_options = b'8\001' - _SPANBATCH._serialized_start=35 - _SPANBATCH._serialized_end=90 - _SPAN._serialized_start=93 - _SPAN._serialized_end=611 - _SPAN_INTRINSICSENTRY._serialized_start=334 - _SPAN_INTRINSICSENTRY._serialized_end=422 - _SPAN_USERATTRIBUTESENTRY._serialized_start=424 - _SPAN_USERATTRIBUTESENTRY._serialized_end=516 - _SPAN_AGENTATTRIBUTESENTRY._serialized_start=518 - _SPAN_AGENTATTRIBUTESENTRY._serialized_end=611 - _ATTRIBUTEVALUE._serialized_start=613 - _ATTRIBUTEVALUE._serialized_end=729 - _RECORDSTATUS._serialized_start=731 - _RECORDSTATUS._serialized_end=768 - _INGESTSERVICE._serialized_start=771 - _INGESTSERVICE._serialized_end=968 + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n\x08v1.proto\x12\x15\x63om.newrelic.trace.v1"7\n\tSpanBatch\x12*\n\x05spans\x18\x01 \x03(\x0b\x32\x1b.com.newrelic.trace.v1.Span"\x86\x04\n\x04Span\x12\x10\n\x08trace_id\x18\x01 \x01(\t\x12?\n\nintrinsics\x18\x02 \x03(\x0b\x32+.com.newrelic.trace.v1.Span.IntrinsicsEntry\x12H\n\x0fuser_attributes\x18\x03 \x03(\x0b\x32/.com.newrelic.trace.v1.Span.UserAttributesEntry\x12J\n\x10\x61gent_attributes\x18\x04 \x03(\x0b\x32\x30.com.newrelic.trace.v1.Span.AgentAttributesEntry\x1aX\n\x0fIntrinsicsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x34\n\x05value\x18\x02 \x01(\x0b\x32%.com.newrelic.trace.v1.AttributeValue:\x02\x38\x01\x1a\\\n\x13UserAttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x34\n\x05value\x18\x02 \x01(\x0b\x32%.com.newrelic.trace.v1.AttributeValue:\x02\x38\x01\x1a]\n\x14\x41gentAttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x34\n\x05value\x18\x02 \x01(\x0b\x32%.com.newrelic.trace.v1.AttributeValue:\x02\x38\x01"t\n\x0e\x41ttributeValue\x12\x16\n\x0cstring_value\x18\x01 \x01(\tH\x00\x12\x14\n\nbool_value\x18\x02 \x01(\x08H\x00\x12\x13\n\tint_value\x18\x03 \x01(\x03H\x00\x12\x16\n\x0c\x64ouble_value\x18\x04 \x01(\x01H\x00\x42\x07\n\x05value"%\n\x0cRecordStatus\x12\x15\n\rmessages_seen\x18\x01 \x01(\x04\x32\xc5\x01\n\rIngestService\x12T\n\nRecordSpan\x12\x1b.com.newrelic.trace.v1.Span\x1a#.com.newrelic.trace.v1.RecordStatus"\x00(\x01\x30\x01\x12^\n\x0fRecordSpanBatch\x12 .com.newrelic.trace.v1.SpanBatch\x1a#.com.newrelic.trace.v1.RecordStatus"\x00(\x01\x30\x01\x62\x06proto3' +) + + +_SPANBATCH = DESCRIPTOR.message_types_by_name["SpanBatch"] +_SPAN = DESCRIPTOR.message_types_by_name["Span"] +_SPAN_INTRINSICSENTRY = _SPAN.nested_types_by_name["IntrinsicsEntry"] +_SPAN_USERATTRIBUTESENTRY = _SPAN.nested_types_by_name["UserAttributesEntry"] +_SPAN_AGENTATTRIBUTESENTRY = _SPAN.nested_types_by_name["AgentAttributesEntry"] +_ATTRIBUTEVALUE = DESCRIPTOR.message_types_by_name["AttributeValue"] +_RECORDSTATUS = DESCRIPTOR.message_types_by_name["RecordStatus"] +SpanBatch = _reflection.GeneratedProtocolMessageType( + "SpanBatch", + (_message.Message,), + { + "DESCRIPTOR": _SPANBATCH, + "__module__": "v1_pb2" + # @@protoc_insertion_point(class_scope:com.newrelic.trace.v1.SpanBatch) + }, +) +_sym_db.RegisterMessage(SpanBatch) + +Span = _reflection.GeneratedProtocolMessageType( + "Span", + (_message.Message,), + { + "IntrinsicsEntry": _reflection.GeneratedProtocolMessageType( + "IntrinsicsEntry", + (_message.Message,), + { + "DESCRIPTOR": _SPAN_INTRINSICSENTRY, + "__module__": "v1_pb2" + # @@protoc_insertion_point(class_scope:com.newrelic.trace.v1.Span.IntrinsicsEntry) + }, + ), + "UserAttributesEntry": _reflection.GeneratedProtocolMessageType( + "UserAttributesEntry", + (_message.Message,), + { + "DESCRIPTOR": _SPAN_USERATTRIBUTESENTRY, + "__module__": "v1_pb2" + # @@protoc_insertion_point(class_scope:com.newrelic.trace.v1.Span.UserAttributesEntry) + }, + ), + "AgentAttributesEntry": _reflection.GeneratedProtocolMessageType( + "AgentAttributesEntry", + (_message.Message,), + { + "DESCRIPTOR": _SPAN_AGENTATTRIBUTESENTRY, + "__module__": "v1_pb2" + # @@protoc_insertion_point(class_scope:com.newrelic.trace.v1.Span.AgentAttributesEntry) + }, + ), + "DESCRIPTOR": _SPAN, + "__module__": "v1_pb2" + # @@protoc_insertion_point(class_scope:com.newrelic.trace.v1.Span) + }, +) +_sym_db.RegisterMessage(Span) +_sym_db.RegisterMessage(Span.IntrinsicsEntry) +_sym_db.RegisterMessage(Span.UserAttributesEntry) +_sym_db.RegisterMessage(Span.AgentAttributesEntry) + +AttributeValue = _reflection.GeneratedProtocolMessageType( + "AttributeValue", + (_message.Message,), + { + "DESCRIPTOR": _ATTRIBUTEVALUE, + "__module__": "v1_pb2" + # @@protoc_insertion_point(class_scope:com.newrelic.trace.v1.AttributeValue) + }, +) +_sym_db.RegisterMessage(AttributeValue) + +RecordStatus = _reflection.GeneratedProtocolMessageType( + "RecordStatus", + (_message.Message,), + { + "DESCRIPTOR": _RECORDSTATUS, + "__module__": "v1_pb2" + # @@protoc_insertion_point(class_scope:com.newrelic.trace.v1.RecordStatus) + }, +) +_sym_db.RegisterMessage(RecordStatus) + +_INGESTSERVICE = DESCRIPTOR.services_by_name["IngestService"] +if _descriptor._USE_C_DESCRIPTORS is False: + DESCRIPTOR._options = None + _SPAN_INTRINSICSENTRY._options = None + _SPAN_INTRINSICSENTRY._serialized_options = b"8\001" + _SPAN_USERATTRIBUTESENTRY._options = None + _SPAN_USERATTRIBUTESENTRY._serialized_options = b"8\001" + _SPAN_AGENTATTRIBUTESENTRY._options = None + _SPAN_AGENTATTRIBUTESENTRY._serialized_options = b"8\001" + _SPANBATCH._serialized_start = 35 + _SPANBATCH._serialized_end = 90 + _SPAN._serialized_start = 93 + _SPAN._serialized_end = 611 + _SPAN_INTRINSICSENTRY._serialized_start = 334 + _SPAN_INTRINSICSENTRY._serialized_end = 422 + _SPAN_USERATTRIBUTESENTRY._serialized_start = 424 + _SPAN_USERATTRIBUTESENTRY._serialized_end = 516 + _SPAN_AGENTATTRIBUTESENTRY._serialized_start = 518 + _SPAN_AGENTATTRIBUTESENTRY._serialized_end = 611 + _ATTRIBUTEVALUE._serialized_start = 613 + _ATTRIBUTEVALUE._serialized_end = 729 + _RECORDSTATUS._serialized_start = 731 + _RECORDSTATUS._serialized_end = 768 + _INGESTSERVICE._serialized_start = 771 + _INGESTSERVICE._serialized_end = 968 # @@protoc_insertion_point(module_scope) diff --git a/newrelic/core/stats_engine.py b/newrelic/core/stats_engine.py index 0e8e74546..959d4ffae 100644 --- a/newrelic/core/stats_engine.py +++ b/newrelic/core/stats_engine.py @@ -36,15 +36,20 @@ from newrelic.common.encoding_utils import json_encode from newrelic.common.object_names import parse_exc_info from newrelic.common.streaming_utils import StreamBuffer -from newrelic.core.attribute import create_user_attributes, process_user_attribute, truncate, MAX_LOG_MESSAGE_LENGTH +from newrelic.core.attribute import ( + MAX_LOG_MESSAGE_LENGTH, + create_user_attributes, + process_user_attribute, + truncate, +) from newrelic.core.attribute_filter import DST_ERROR_COLLECTOR from newrelic.core.code_level_metrics import extract_code_from_traceback from newrelic.core.config import is_expected_error, should_ignore_error from newrelic.core.database_utils import explain_plan from newrelic.core.error_collector import TracedError +from newrelic.core.log_event_node import LogEventNode from newrelic.core.metric import TimeMetric from newrelic.core.stack_trace import exception_stack -from newrelic.core.log_event_node import LogEventNode _logger = logging.getLogger(__name__) @@ -751,7 +756,6 @@ def notice_error(self, error=None, attributes=None, expected=None, ignore=None, self.record_time_metric(TimeMetric(name="Errors/all", scope="", duration=0.0, exclusive=None)) def _error_event(self, error): - # This method is for recording error events outside of transactions, # don't let the poorly named 'type' attribute fool you. @@ -772,7 +776,6 @@ def _error_event(self, error): return error_event def record_custom_event(self, event): - settings = self.__settings if not settings: @@ -964,7 +967,6 @@ def record_transaction(self, transaction): transaction_tracer = settings.transaction_tracer if not transaction.suppress_transaction_trace and transaction_tracer.enabled and settings.collect_traces: - # Transactions saved for Synthetics transactions # do not depend on the transaction threshold. @@ -987,7 +989,6 @@ def record_transaction(self, transaction): self._synthetics_events.add(event) elif settings.collect_analytics_events and settings.transaction_events.enabled: - event = transaction.transaction_event(self.__stats_table) self._transaction_events.add(event, priority=transaction.priority) @@ -1008,40 +1009,50 @@ def record_transaction(self, transaction): # Merge in log events - if settings and settings.application_logging and settings.application_logging.enabled and settings.application_logging.forwarding and settings.application_logging.forwarding.enabled: + if ( + settings + and settings.application_logging + and settings.application_logging.enabled + and settings.application_logging.forwarding + and settings.application_logging.forwarding.enabled + ): self._log_events.merge(transaction.log_events, priority=transaction.priority) - def record_log_event(self, message, level=None, timestamp=None, priority=None): settings = self.__settings - if not (settings and settings.application_logging and settings.application_logging.enabled and settings.application_logging.forwarding and settings.application_logging.forwarding.enabled): + if not ( + settings + and settings.application_logging + and settings.application_logging.enabled + and settings.application_logging.forwarding + and settings.application_logging.forwarding.enabled + ): return - + timestamp = timestamp if timestamp is not None else time.time() level = str(level) if level is not None else "UNKNOWN" if not message or message.isspace(): _logger.debug("record_log_event called where message was missing. No log event will be sent.") return - + message = truncate(message, MAX_LOG_MESSAGE_LENGTH) event = LogEventNode( timestamp=timestamp, level=level, message=message, - attributes=get_linking_metadata(), + attributes=get_linking_metadata(), ) if priority is None: # Base priority for log events outside transactions is below those inside transactions - priority = random.random() - 1 + priority = random.random() - 1 # nosec self._log_events.add(event, priority=priority) return event - def metric_data(self, normalizer=None): """Returns a list containing the low level metric data for sending to the core application pertaining to the reporting @@ -1115,7 +1126,6 @@ def error_data(self): return self.__transaction_errors def slow_sql_data(self, connections): - _logger.debug("Generating slow SQL data.") if not self.__settings: @@ -1134,7 +1144,6 @@ def slow_sql_data(self, connections): result = [] for stats_node in slow_sql_nodes: - slow_sql_node = stats_node.slow_sql_node params = slow_sql_node.params or {} @@ -1398,7 +1407,9 @@ def reset_stats(self, settings, reset_stream=False): self.reset_synthetics_events() # streams are never reset after instantiation if reset_stream: - self._span_stream = StreamBuffer(settings.infinite_tracing.span_queue_size) + self._span_stream = StreamBuffer( + settings.infinite_tracing.span_queue_size, batching=settings.infinite_tracing.batching + ) def reset_metric_stats(self): """Resets the accumulated statistics back to initial state for @@ -1612,7 +1623,6 @@ def merge_metric_stats(self, snapshot): stats.merge_stats(other) def _merge_transaction_events(self, snapshot, rollback=False): - # Merge in transaction events. In the normal case snapshot is a # StatsEngine from a single transaction, and should only have one # event. Just to avoid issues, if there is more than one, don't merge. @@ -1631,7 +1641,6 @@ def _merge_transaction_events(self, snapshot, rollback=False): self._transaction_events.merge(events) def _merge_synthetics_events(self, snapshot, rollback=False): - # Merge Synthetic analytic events, appending to the list # that contains events from previous transactions. In the normal # case snapshot is a StatsEngine from a single transaction, and should @@ -1648,7 +1657,6 @@ def _merge_synthetics_events(self, snapshot, rollback=False): self._synthetics_events.merge(events) def _merge_error_events(self, snapshot): - # Merge in error events. Since we are using reservoir sampling that # gives equal probability to keeping each event, merge is the same as # rollback. There may be multiple error events per transaction. @@ -1676,7 +1684,6 @@ def _merge_log_events(self, snapshot, rollback=False): self._log_events.merge(events) def _merge_error_traces(self, snapshot): - # Append snapshot error details at end to maintain time # based order and then trim at maximum to be kept. snapshot will # always have newer data. @@ -1686,7 +1693,6 @@ def _merge_error_traces(self, snapshot): self.__transaction_errors = self.__transaction_errors[:maximum] def _merge_sql(self, snapshot): - # Add sql traces to the set of existing entries. If over # the limit of how many to collect, only merge in if already # seen the specific SQL. @@ -1701,7 +1707,6 @@ def _merge_sql(self, snapshot): stats.merge_stats(slow_sql_stats) def _merge_traces(self, snapshot): - # Limit number of Synthetics transactions maximum = self.__settings.agent_limits.synthetics_transactions diff --git a/tests/agent_streaming/_test_handler.py b/tests/agent_streaming/_test_handler.py index 9fa8b19f8..d46e72f4a 100644 --- a/tests/agent_streaming/_test_handler.py +++ b/tests/agent_streaming/_test_handler.py @@ -12,36 +12,84 @@ # See the License for the specific language governing permissions and # limitations under the License. +import time +from collections import deque from concurrent import futures +from threading import Event import grpc -from newrelic.core.infinite_tracing_pb2 import RecordStatus, Span + +from newrelic.core.infinite_tracing_pb2 import RecordStatus, Span, SpanBatch + +SPANS_PROCESSED_EVENT = Event() +SPANS_RECEIVED = deque() +SPAN_BATCHES_RECEIVED = deque() def record_span(request, context): metadata = dict(context.invocation_metadata()) - assert 'agent_run_token' in metadata - assert 'license_key' in metadata + assert "agent_run_token" in metadata + assert "license_key" in metadata for span in request: - status_code = span.intrinsics.get('status_code', None) - status_code = status_code and getattr( - grpc.StatusCode, status_code.string_value) + SPANS_RECEIVED.append(span) + SPANS_PROCESSED_EVENT.set() + + # Handle injecting status codes. + status_code = span.intrinsics.get("status_code", None) + status_code = status_code and getattr(grpc.StatusCode, status_code.string_value) if status_code is grpc.StatusCode.OK: - break + return elif status_code: context.abort(status_code, "Abort triggered by client") + # Give the client time to enter the wait condition before closing the server. + if span.intrinsics.get("wait_then_ok", None): + # Wait long enough that the client is now waiting for more spans and stuck in notify.wait(). + time.sleep(1) + return + yield RecordStatus(messages_seen=1) +def record_span_batch(request, context): + metadata = dict(context.invocation_metadata()) + assert "agent_run_token" in metadata + assert "license_key" in metadata + + for span_batch in request: + SPAN_BATCHES_RECEIVED.append(span_batch) + SPANS_PROCESSED_EVENT.set() + batch_size = 0 + + for span in span_batch.spans: + # Handle injecting status codes. + status_code = span.intrinsics.get("status_code", None) + status_code = status_code and getattr(grpc.StatusCode, status_code.string_value) + if status_code is grpc.StatusCode.OK: + return + elif status_code: + context.abort(status_code, "Abort triggered by client") + + # Give the client time to enter the wait condition before closing the server. + if span.intrinsics.get("wait_then_ok", None): + # Wait long enough that the client is now waiting for more spans and stuck in notify.wait(). + time.sleep(1) + return + + yield RecordStatus(messages_seen=batch_size) + + HANDLERS = ( grpc.method_handlers_generic_handler( "com.newrelic.trace.v1.IngestService", { "RecordSpan": grpc.stream_stream_rpc_method_handler( record_span, Span.FromString, RecordStatus.SerializeToString - ) + ), + "RecordSpanBatch": grpc.stream_stream_rpc_method_handler( + record_span_batch, SpanBatch.FromString, RecordStatus.SerializeToString + ), }, ), ) diff --git a/tests/agent_streaming/conftest.py b/tests/agent_streaming/conftest.py index 13e31da60..761aea644 100644 --- a/tests/agent_streaming/conftest.py +++ b/tests/agent_streaming/conftest.py @@ -12,17 +12,17 @@ # See the License for the specific language governing permissions and # limitations under the License. -import pytest -import random +import threading +import pytest +from testing_support.fixtures import collector_available_fixture # noqa from testing_support.fixtures import ( code_coverage_fixture, collector_agent_registration_fixture, - collector_available_fixture, ) from testing_support.mock_external_grpc_server import MockExternalgRPCServer + from newrelic.common.streaming_utils import StreamBuffer -import threading CONDITION_CLS = type(threading.Condition()) @@ -40,20 +40,19 @@ "agent_limits.errors_per_harvest": 100, "distributed_tracing.enabled": True, "infinite_tracing.trace_observer_host": "nr-internal.aws-us-east-2.tracing.staging-edge.nr-data.net", + "infinite_tracing.compression": True, "debug.connect_span_stream_in_developer_mode": True, } collector_agent_registration = collector_agent_registration_fixture( - app_name="Python Agent Test (agent_streaming)", - default_settings=_default_settings + app_name="Python Agent Test (agent_streaming)", default_settings=_default_settings ) @pytest.fixture(scope="module") def grpc_app_server(): - port = random.randint(50000, 50099) - with MockExternalgRPCServer(port=port) as server: - yield server, port + with MockExternalgRPCServer() as server: + yield server, server.port @pytest.fixture(scope="module") @@ -83,5 +82,34 @@ def buffer_empty_event(monkeypatch): def condition(*args, **kwargs): return SetEventOnWait(event, *args, **kwargs) - monkeypatch.setattr(StreamBuffer, 'condition', condition) + monkeypatch.setattr(StreamBuffer, "condition", condition) return event + + +@pytest.fixture(scope="session", params=[pytest.param(True, id="batching"), pytest.param(False, id="nonbatching")]) +def batching(request): + return request.param + + +@pytest.fixture(scope="function") +def spans_received(): + from _test_handler import SPANS_RECEIVED + + SPANS_RECEIVED.clear() + return SPANS_RECEIVED + + +@pytest.fixture(scope="function") +def span_batches_received(): + from _test_handler import SPAN_BATCHES_RECEIVED + + SPAN_BATCHES_RECEIVED.clear() + return SPAN_BATCHES_RECEIVED + + +@pytest.fixture(scope="function") +def spans_processed_event(): + from _test_handler import SPANS_PROCESSED_EVENT + + SPANS_PROCESSED_EVENT.clear() + return SPANS_PROCESSED_EVENT diff --git a/tests/agent_streaming/test_infinite_tracing.py b/tests/agent_streaming/test_infinite_tracing.py index 8942e6a75..f1119c38c 100644 --- a/tests/agent_streaming/test_infinite_tracing.py +++ b/tests/agent_streaming/test_infinite_tracing.py @@ -12,17 +12,20 @@ # See the License for the specific language governing permissions and # limitations under the License. -import pytest import threading +import time -from newrelic.core.config import global_settings +import pytest from testing_support.fixtures import override_generic_settings +from testing_support.util import conditional_decorator +from testing_support.validators.validate_metric_payload import validate_metric_payload -from newrelic.core.application import Application +from newrelic.common.streaming_utils import StreamBuffer from newrelic.core.agent_streaming import StreamingRpc -from newrelic.core.infinite_tracing_pb2 import Span, AttributeValue -from testing_support.validators.validate_metric_payload import ( - validate_metric_payload) +from newrelic.core.application import Application +from newrelic.core.config import global_settings +from newrelic.core.infinite_tracing_pb2 import AttributeValue, Span +from newrelic.packages import six settings = global_settings() @@ -31,7 +34,7 @@ @pytest.fixture() def app(): - app = Application('Python Agent Test (Infinite Tracing)') + app = Application("Python Agent Test (Infinite Tracing)") yield app # Calling internal_agent_shutdown on an application that is already closed # will raise an exception. @@ -40,22 +43,31 @@ def app(): app.internal_agent_shutdown(restart=False) except: pass - if active_session: + if active_session and active_session._rpc is not None: assert not active_session._rpc.response_processing_thread.is_alive() assert not active_session._rpc.channel @pytest.mark.parametrize( - 'status_code, metrics', ( - ('UNIMPLEMENTED', [ - ('Supportability/InfiniteTracing/Span/gRPC/UNIMPLEMENTED', 1), - ('Supportability/InfiniteTracing/Span/Response/Error', 1)]), - ('INTERNAL', [ - ('Supportability/InfiniteTracing/Span/gRPC/INTERNAL', 1), - ('Supportability/InfiniteTracing/Span/Response/Error', 1)]), - )) -def test_infinite_tracing_span_streaming(mock_grpc_server, - status_code, metrics, monkeypatch, app): + "status_code, metrics", + ( + ( + "UNIMPLEMENTED", + [ + ("Supportability/InfiniteTracing/Span/gRPC/UNIMPLEMENTED", 1), + ("Supportability/InfiniteTracing/Span/Response/Error", 1), + ], + ), + ( + "INTERNAL", + [ + ("Supportability/InfiniteTracing/Span/gRPC/INTERNAL", 1), + ("Supportability/InfiniteTracing/Span/Response/Error", 1), + ], + ), + ), +) +def test_infinite_tracing_span_streaming(mock_grpc_server, status_code, metrics, monkeypatch, app, batching): event = threading.Event() class TerminateOnWait(CONDITION_CLS): @@ -71,21 +83,24 @@ def wait(self, *args, **kwargs): def condition(*args, **kwargs): return TerminateOnWait(*args, **kwargs) - monkeypatch.setattr(StreamingRpc, 'condition', condition) + monkeypatch.setattr(StreamingRpc, "condition", condition) span = Span( - intrinsics={'status_code': AttributeValue(string_value=status_code)}, - agent_attributes={}, - user_attributes={}) - - @override_generic_settings(settings, { - 'distributed_tracing.enabled': True, - 'span_events.enabled': True, - 'infinite_tracing.trace_observer_host': 'localhost', - 'infinite_tracing.trace_observer_port': mock_grpc_server, - 'infinite_tracing.ssl': False, - }) - @validate_metric_payload(metrics=metrics) + intrinsics={"status_code": AttributeValue(string_value=status_code)}, agent_attributes={}, user_attributes={} + ) + + @override_generic_settings( + settings, + { + "distributed_tracing.enabled": True, + "span_events.enabled": True, + "infinite_tracing.trace_observer_host": "localhost", + "infinite_tracing.trace_observer_port": mock_grpc_server, + "infinite_tracing.ssl": False, + "infinite_tracing.batching": batching, + }, + ) + @validate_metric_payload(metrics) def _test(): app.connect_to_data_collector(None) @@ -98,9 +113,7 @@ def _test(): _test() -def test_reconnect_on_failure(monkeypatch, mock_grpc_server, - buffer_empty_event, app): - +def test_reconnect_on_failure(monkeypatch, mock_grpc_server, buffer_empty_event, app, batching): status_code = "INTERNAL" wait_event = threading.Event() continue_event = threading.Event() @@ -115,25 +128,25 @@ def wait(self, *args, **kwargs): def condition(*args, **kwargs): return WaitOnWait(*args, **kwargs) - monkeypatch.setattr(StreamingRpc, 'condition', condition) + monkeypatch.setattr(StreamingRpc, "condition", condition) terminating_span = Span( - intrinsics={'status_code': AttributeValue(string_value=status_code)}, - agent_attributes={}, - user_attributes={}) + intrinsics={"status_code": AttributeValue(string_value=status_code)}, agent_attributes={}, user_attributes={} + ) - span = Span( - intrinsics={}, - agent_attributes={}, - user_attributes={}) - - @override_generic_settings(settings, { - 'distributed_tracing.enabled': True, - 'span_events.enabled': True, - 'infinite_tracing.trace_observer_host': 'localhost', - 'infinite_tracing.trace_observer_port': mock_grpc_server, - 'infinite_tracing.ssl': False, - }) + span = Span(intrinsics={}, agent_attributes={}, user_attributes={}) + + @override_generic_settings( + settings, + { + "distributed_tracing.enabled": True, + "span_events.enabled": True, + "infinite_tracing.trace_observer_host": "localhost", + "infinite_tracing.trace_observer_port": mock_grpc_server, + "infinite_tracing.ssl": False, + "infinite_tracing.batching": batching, + }, + ) def _test(): app.connect_to_data_collector(None) @@ -182,7 +195,7 @@ def test_agent_restart(app): assert rpc.response_processing_thread.is_alive() -def test_disconnect_on_UNIMPLEMENTED(mock_grpc_server, monkeypatch, app): +def test_disconnect_on_UNIMPLEMENTED(mock_grpc_server, monkeypatch, app, batching): event = threading.Event() class WaitOnNotify(CONDITION_CLS): @@ -194,21 +207,25 @@ def notify_all(self, *args, **kwargs): def condition(*args, **kwargs): return WaitOnNotify(*args, **kwargs) - monkeypatch.setattr(StreamingRpc, 'condition', condition) + monkeypatch.setattr(StreamingRpc, "condition", condition) terminating_span = Span( - intrinsics={'status_code': AttributeValue( - string_value='UNIMPLEMENTED')}, + intrinsics={"status_code": AttributeValue(string_value="UNIMPLEMENTED")}, agent_attributes={}, - user_attributes={}) - - @override_generic_settings(settings, { - 'distributed_tracing.enabled': True, - 'span_events.enabled': True, - 'infinite_tracing.trace_observer_host': 'localhost', - 'infinite_tracing.trace_observer_port': mock_grpc_server, - 'infinite_tracing.ssl': False, - }) + user_attributes={}, + ) + + @override_generic_settings( + settings, + { + "distributed_tracing.enabled": True, + "span_events.enabled": True, + "infinite_tracing.trace_observer_host": "localhost", + "infinite_tracing.trace_observer_port": mock_grpc_server, + "infinite_tracing.ssl": False, + "infinite_tracing.batching": batching, + }, + ) def _test(): app.connect_to_data_collector(None) @@ -228,7 +245,7 @@ def _test(): def test_agent_shutdown(): # Get the application connected to the actual 8T endpoint - app = Application('Python Agent Test (Infinite Tracing)') + app = Application("Python Agent Test (Infinite Tracing)") app.connect_to_data_collector(None) rpc = app._active_session._rpc # Store references to the original rpc and threads @@ -239,39 +256,57 @@ def test_agent_shutdown(): @pytest.mark.xfail(reason="This test is flaky", strict=False) -def test_no_delay_on_ok(mock_grpc_server, monkeypatch, app): +def test_no_delay_on_ok(mock_grpc_server, monkeypatch, app, batching): wait_event = threading.Event() connect_event = threading.Event() - metrics = [('Supportability/InfiniteTracing/Span/gRPC/OK', 1), - ('Supportability/InfiniteTracing/Span/Response/Error', None)] + metrics = [ + ("Supportability/InfiniteTracing/Span/gRPC/OK", 1), + ("Supportability/InfiniteTracing/Span/Response/Error", None), + ] class SetFlagOnWait(CONDITION_CLS): + def __init__(self, event, *args, **kwargs): + super(SetFlagOnWait, self).__init__(*args, **kwargs) + self.event = event + def wait(self, *args, **kwargs): - wait_event.set() + self.event.set() return super(SetFlagOnWait, self).wait(*args, **kwargs) @staticmethod def condition(*args, **kwargs): - return SetFlagOnWait(*args, **kwargs) + return SetFlagOnWait(wait_event, *args, **kwargs) + + _create_channel = StreamingRpc.create_channel + + def create_channel(self, *args, **kwargs): + ret = _create_channel(self, *args, **kwargs) + connect_event.set() + return ret + + monkeypatch.setattr(StreamingRpc, "condition", condition) + monkeypatch.setattr(StreamingRpc, "create_channel", create_channel) - monkeypatch.setattr(StreamingRpc, 'condition', condition) span = Span( intrinsics={"status_code": AttributeValue(string_value="OK")}, agent_attributes={}, user_attributes={}, ) - @override_generic_settings(settings, { - 'distributed_tracing.enabled': True, - 'span_events.enabled': True, - 'infinite_tracing.trace_observer_host': 'localhost', - 'infinite_tracing.trace_observer_port': mock_grpc_server, - 'infinite_tracing.ssl': False, - }) - @validate_metric_payload(metrics=metrics) + @override_generic_settings( + settings, + { + "distributed_tracing.enabled": True, + "span_events.enabled": True, + "infinite_tracing.trace_observer_host": "localhost", + "infinite_tracing.trace_observer_port": mock_grpc_server, + "infinite_tracing.ssl": False, + "infinite_tracing.batching": batching, + }, + ) + @validate_metric_payload(metrics) def _test(): - def connect_complete(): connect_event.set() @@ -284,15 +319,6 @@ def connect_complete(): stream_buffer = app._stats_engine.span_stream rpc = app._active_session._rpc - _rpc = rpc.rpc - - def patched_rpc(*args, **kwargs): - connect_event.set() - return _rpc(*args, **kwargs) - - rpc.rpc = patched_rpc - - # Put a span that will trigger an OK status code and wait for an attempted # reconnect. stream_buffer.put(span) @@ -302,3 +328,182 @@ def patched_rpc(*args, **kwargs): app.harvest() _test() + + +@conditional_decorator( + condition=six.PY2, decorator=pytest.mark.xfail(reason="Test frequently times out on Py2.", strict=False) +) +def test_no_data_loss_on_reconnect(mock_grpc_server, app, buffer_empty_event, batching, spans_processed_event): + """ + Test for data loss when channel is closed by the server while waiting for more data in a request iterator. + + This is a bug that's caused by the periodic (15 second) disconnects issued by the trace observer. To observe, + wait long enough in __next__'s notify.wait() call until the server issues a grpc.StatusCode.OK causing a + disconnect and reconnect. Alternatively in the case of this test, we use a mock server to issue one at the + appropriate moment rather than waiting for a real trace observer to issue a disconnect. + + While in this state, the very next span placed in the StreamBuffer would wake up the request_iterator for the + now closed channel (which was waiting in the __next__ function) and be consumed. The channel, being closed, + would discard the data and finish shutting down. This is now prevented by guards checking if the channel is + closed before popping any data inside the request iterator, which instead raises a StopIteration. + + Relevant GitHub issue: https://github.com/grpc/grpc/issues/29110 + """ + + terminating_span = Span( + intrinsics={"wait_then_ok": AttributeValue(string_value="OK")}, agent_attributes={}, user_attributes={} + ) + + span = Span(intrinsics={}, agent_attributes={}, user_attributes={}) + + @override_generic_settings( + settings, + { + "distributed_tracing.enabled": True, + "span_events.enabled": True, + "infinite_tracing.trace_observer_host": "localhost", + "infinite_tracing.trace_observer_port": mock_grpc_server, + "infinite_tracing.ssl": False, + "infinite_tracing.batching": batching, + }, + ) + def _test(): + # Connect to app and retrieve references to various components + app.connect_to_data_collector(None) + + stream_buffer = app._stats_engine.span_stream + rpc = app._active_session._rpc + request_iterator = rpc.request_iterator + + # Wait until iterator is waiting on spans + assert buffer_empty_event.wait(timeout=5) + buffer_empty_event.clear() + + # Send a span that will trigger disconnect + stream_buffer.put(terminating_span) + + # Wait for spans to be processed by server + assert spans_processed_event.wait(timeout=5) + spans_processed_event.clear() + + # Wait for OK status code to close the channel + start_time = time.time() + while not (request_iterator._stream and request_iterator._stream.done()): + assert time.time() - start_time < 5, "Timed out waiting for OK status code." + time.sleep(0.5) + + # Put new span and wait until buffer has been emptied and either sent or lost + stream_buffer.put(span) + assert spans_processed_event.wait(timeout=5), "Data lost in stream buffer iterator." + + _test() + + +@pytest.mark.parametrize("dropped_spans", [0, 1]) +def test_span_supportability_metrics(mock_grpc_server, monkeypatch, app, dropped_spans, batching): + wait_event = threading.Event() + continue_event = threading.Event() + + total_spans = 3 + metrics = [ + ("Supportability/InfiniteTracing/Span/Seen", total_spans), + ( + "Supportability/InfiniteTracing/Span/Sent", + (total_spans - dropped_spans) or None, + ), # Replace 0 with None to indicate metric will not be sent + ] + + class WaitOnWait(CONDITION_CLS): + def wait(self, *args, **kwargs): + wait_event.set() + ret = super(WaitOnWait, self).wait(*args, **kwargs) + assert continue_event.wait(timeout=5) + return ret + + @staticmethod + def condition(*args, **kwargs): + return WaitOnWait(*args, **kwargs) + + monkeypatch.setattr(StreamBuffer, "condition", condition) + + span = Span( + intrinsics={}, + agent_attributes={}, + user_attributes={}, + ) + + @override_generic_settings( + settings, + { + "distributed_tracing.enabled": True, + "span_events.enabled": True, + "infinite_tracing.trace_observer_host": "localhost", + "infinite_tracing.trace_observer_port": mock_grpc_server, + "infinite_tracing.ssl": False, + "infinite_tracing.batching": batching, + "infinite_tracing.span_queue_size": total_spans - dropped_spans, + }, + ) + @validate_metric_payload(metrics) + def _test(): + app.connect_to_data_collector(None) + + assert wait_event.wait(timeout=5) + + stream_buffer = app._stats_engine.span_stream + + # Put enough spans to overflow buffer + for _ in range(total_spans): + stream_buffer.put(span) + + # Harvest all spans simultaneously + wait_event.clear() + continue_event.set() + assert wait_event.wait(timeout=5) + wait_event.clear() + + app.harvest() + + _test() + + +@pytest.mark.parametrize("trace_observer_host", ["localhost", None]) +@pytest.mark.parametrize("batching", [True, False]) +@pytest.mark.parametrize("compression", [True, False]) +def test_settings_supportability_metrics(mock_grpc_server, app, trace_observer_host, batching, compression): + connect_event = threading.Event() + + enabled = bool(trace_observer_host) + + metrics = [ + ("Supportability/InfiniteTracing/gRPC/Batching/enabled", 1 if enabled and batching else None), + ("Supportability/InfiniteTracing/gRPC/Batching/disabled", 1 if enabled and not batching else None), + ("Supportability/InfiniteTracing/gRPC/Compression/enabled", 1 if enabled and compression else None), + ("Supportability/InfiniteTracing/gRPC/Compression/disabled", 1 if enabled and not compression else None), + ] + + @override_generic_settings( + settings, + { + "distributed_tracing.enabled": True, + "span_events.enabled": True, + "infinite_tracing.trace_observer_host": trace_observer_host, + "infinite_tracing.trace_observer_port": mock_grpc_server, + "infinite_tracing.ssl": False, + "infinite_tracing.batching": batching, + "infinite_tracing.compression": compression, + }, + ) + @validate_metric_payload(metrics) + def _test(): + def connect_complete(): + connect_event.set() + + app.connect_to_data_collector(connect_complete) + + assert connect_event.wait(timeout=5) + connect_event.clear() + + app.harvest() + + _test() diff --git a/tests/agent_streaming/test_stream_buffer.py b/tests/agent_streaming/test_stream_buffer.py new file mode 100644 index 000000000..80551e9d3 --- /dev/null +++ b/tests/agent_streaming/test_stream_buffer.py @@ -0,0 +1,87 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +from conftest import CONDITION_CLS + +from newrelic.common.streaming_utils import StreamBuffer, StreamBufferIterator +from newrelic.core.infinite_tracing_pb2 import Span, SpanBatch + + +class StopIterationOnWait(CONDITION_CLS): + def wait(self, *args, **kwargs): + raise StopIteration() + + +@staticmethod +def stop_iteration_condition(*args, **kwargs): + return StopIterationOnWait(*args, **kwargs) + + +@pytest.fixture(scope="function") +def stop_iteration_on_wait(monkeypatch): + monkeypatch.setattr(StreamBuffer, "condition", stop_iteration_condition) + + +def test_stream_buffer_iterator_batching(stop_iteration_on_wait, batching): + stream_buffer = StreamBuffer(5, batching=batching) + + for _ in range(5): + span = Span(intrinsics={}, agent_attributes={}, user_attributes={}) + stream_buffer.put(span) + + buffer_contents = list(stream_buffer) + if batching: + assert len(buffer_contents) == 1 + assert isinstance(buffer_contents.pop(), SpanBatch) + else: + assert len(buffer_contents) == 5 + assert all(isinstance(element, Span) for element in stream_buffer) + + +def test_stream_buffer_iterator_max_batch_size(stop_iteration_on_wait): + stream_buffer = StreamBuffer(StreamBufferIterator.MAX_BATCH_SIZE + 1, batching=True) + + # Add 1 more span than the maximum batch size + for _ in range(StreamBufferIterator.MAX_BATCH_SIZE + 1): + span = Span(intrinsics={}, agent_attributes={}, user_attributes={}) + stream_buffer.put(span) + + # Pull all span batches out of buffer + buffer_contents = list(stream_buffer) + assert len(buffer_contents) == 2 + + # Large batch + batch = buffer_contents.pop(0) + assert isinstance(batch, SpanBatch) + assert len(batch.spans) == StreamBufferIterator.MAX_BATCH_SIZE + + # Single span batch + batch = buffer_contents.pop(0) + assert isinstance(batch, SpanBatch) + assert len(batch.spans) == 1 + + +def test_stream_buffer_queue_size(): + stream_buffer = StreamBuffer(1) + + # Add more spans than queue can hold + for _ in range(2): + span = Span(intrinsics={}, agent_attributes={}, user_attributes={}) + stream_buffer.put(span) + + # Ensure spans are dropped and not stored + assert len(stream_buffer) == 1 + assert stream_buffer._dropped == 1 + assert stream_buffer._seen == 2 diff --git a/tests/agent_streaming/test_streaming_rpc.py b/tests/agent_streaming/test_streaming_rpc.py index 3cf5ccc25..3ab74086e 100644 --- a/tests/agent_streaming/test_streaming_rpc.py +++ b/tests/agent_streaming/test_streaming_rpc.py @@ -14,10 +14,13 @@ import threading -from newrelic.core.agent_streaming import StreamingRpc -from newrelic.common.streaming_utils import StreamBuffer -from newrelic.core.infinite_tracing_pb2 import Span, AttributeValue +import pytest +from testing_support.fixtures import override_generic_settings +from newrelic.common.streaming_utils import StreamBuffer +from newrelic.core.agent_streaming import StreamingRpc +from newrelic.core.config import global_settings +from newrelic.core.infinite_tracing_pb2 import AttributeValue, Span CONDITION_CLS = type(threading.Condition()) DEFAULT_METADATA = (("agent_run_token", ""), ("license_key", "")) @@ -27,13 +30,54 @@ def record_metric(*args, **kwargs): pass -def test_close_before_connect(mock_grpc_server): +# This enumeration is taken from gRPC's implementation for compression: +# https://grpc.github.io/grpc/python/grpc.html#compression +@pytest.mark.parametrize( + "compression_setting, gRPC_compression_val", + ( + (None, 0), + (True, 2), + (False, 0), + ), +) +def test_correct_settings(mock_grpc_server, compression_setting, gRPC_compression_val): + settings = global_settings() + + @override_generic_settings( + settings, + { + "distributed_tracing.enabled": True, + "infinite_tracing.trace_observer_host": "localhost", + "infinite_tracing.trace_observer_port": mock_grpc_server, + "infinite_tracing.ssl": False, + "infinite_tracing.compression": compression_setting, + }, + ) + def _test(): + endpoint = "localhost:%s" % mock_grpc_server + stream_buffer = StreamBuffer(1) + + rpc = StreamingRpc( + endpoint, + stream_buffer, + DEFAULT_METADATA, + record_metric, + ssl=False, + compression=settings.infinite_tracing.compression, + ) + + rpc.connect() + assert rpc.compression_setting.value == gRPC_compression_val + rpc.close() + + _test() + + +def test_close_before_connect(mock_grpc_server, batching): endpoint = "localhost:%s" % mock_grpc_server - stream_buffer = StreamBuffer(0) + stream_buffer = StreamBuffer(0, batching=batching) - rpc = StreamingRpc( - endpoint, stream_buffer, DEFAULT_METADATA, record_metric, ssl=False - ) + rpc = StreamingRpc(endpoint, stream_buffer, DEFAULT_METADATA, record_metric, ssl=False) # Calling close will close the grpc channel rpc.close() @@ -44,13 +88,11 @@ def test_close_before_connect(mock_grpc_server): assert not rpc.response_processing_thread.is_alive() -def test_close_while_connected(mock_grpc_server, buffer_empty_event): +def test_close_while_connected(mock_grpc_server, buffer_empty_event, batching): endpoint = "localhost:%s" % mock_grpc_server - stream_buffer = StreamBuffer(1) + stream_buffer = StreamBuffer(1, batching=batching) - rpc = StreamingRpc( - endpoint, stream_buffer, DEFAULT_METADATA, record_metric, ssl=False - ) + rpc = StreamingRpc(endpoint, stream_buffer, DEFAULT_METADATA, record_metric, ssl=False) rpc.connect() # Check the processing thread is alive and spans are being sent @@ -67,7 +109,7 @@ def test_close_while_connected(mock_grpc_server, buffer_empty_event): assert not rpc.response_processing_thread.is_alive() -def test_close_while_awaiting_reconnect(mock_grpc_server, monkeypatch): +def test_close_while_awaiting_reconnect(mock_grpc_server, monkeypatch, batching): event = threading.Event() class WaitOnWait(CONDITION_CLS): @@ -89,11 +131,9 @@ def condition(*args, **kwargs): ) endpoint = "localhost:%s" % mock_grpc_server - stream_buffer = StreamBuffer(1) + stream_buffer = StreamBuffer(1, batching=batching) - rpc = StreamingRpc( - endpoint, stream_buffer, DEFAULT_METADATA, record_metric, ssl=False - ) + rpc = StreamingRpc(endpoint, stream_buffer, DEFAULT_METADATA, record_metric, ssl=False) rpc.connect() # Send a span to trigger reconnect @@ -104,3 +144,42 @@ def condition(*args, **kwargs): rpc.close() # Make sure the processing_thread is closed assert not rpc.response_processing_thread.is_alive() + + +@pytest.mark.parametrize("compression", (True, False)) +def test_rpc_serialization_and_deserialization( + mock_grpc_server, + batching, + compression, + buffer_empty_event, + spans_received, + span_batches_received, + spans_processed_event, +): + """StreamingRPC sends deserializable span to correct endpoint.""" + + endpoint = "localhost:%s" % mock_grpc_server + stream_buffer = StreamBuffer(1, batching=batching) + + span = Span( + intrinsics={}, + agent_attributes={}, + user_attributes={}, + ) + + rpc = StreamingRpc(endpoint, stream_buffer, DEFAULT_METADATA, record_metric, compression=compression, ssl=False) + + rpc.connect() + + buffer_empty_event.clear() + stream_buffer.put(span) + + assert buffer_empty_event.wait(5) + assert spans_processed_event.wait(5) + + if batching: + assert not spans_received, "Spans incorrectly received." + assert span_batches_received, "No span batches received." + else: + assert not span_batches_received, "Span batches incorrectly received." + assert spans_received, "No spans received." diff --git a/tests/agent_unittests/conftest.py b/tests/agent_unittests/conftest.py index 93f3228ad..fd5630f81 100644 --- a/tests/agent_unittests/conftest.py +++ b/tests/agent_unittests/conftest.py @@ -49,6 +49,7 @@ class FakeProtos(object): Span = object() + SpanBatch = object() sys.modules["grpc"] = object() diff --git a/tests/agent_unittests/test_harvest_loop.py b/tests/agent_unittests/test_harvest_loop.py index 7760e1307..0df575b9f 100644 --- a/tests/agent_unittests/test_harvest_loop.py +++ b/tests/agent_unittests/test_harvest_loop.py @@ -163,7 +163,6 @@ def transaction_node(request): def validate_metric_payload(metrics=[], endpoints_called=[]): - sent_metrics = {} @transient_function_wrapper("newrelic.core.agent_protocol", "AgentProtocol.send") @@ -204,7 +203,6 @@ def validate(): def validate_transaction_event_payloads(payload_validators): @function_wrapper def _wrapper(wrapped, instance, args, kwargs): - payloads = [] @transient_function_wrapper("newrelic.core.agent_protocol", "AgentProtocol.send") @@ -319,7 +317,6 @@ def test_serverless_application_harvest(): ], ) def test_application_harvest_with_spans(distributed_tracing_enabled, span_events_enabled, spans_created): - span_endpoints_called = [] max_samples_stored = 10 @@ -381,7 +378,10 @@ def _test(): (7, 10, 10, 7), ), ) -def test_application_harvest_with_span_streaming(span_queue_size, spans_to_send, expected_sent, expected_seen): +@pytest.mark.parametrize("span_batching", (True, False)) +def test_application_harvest_with_span_streaming( + span_batching, span_queue_size, spans_to_send, expected_sent, expected_seen +): @override_generic_settings( settings, { @@ -390,6 +390,7 @@ def test_application_harvest_with_span_streaming(span_queue_size, spans_to_send, "span_events.enabled": True, "infinite_tracing._trace_observer_host": "x", "infinite_tracing.span_queue_size": span_queue_size, + "infinite_tracing.batching": span_batching, }, ) @validate_metric_payload( @@ -600,7 +601,6 @@ def test_reservoir_size_zeros(harvest_name, event_name): @pytest.mark.parametrize("events_seen", (1, 5, 10)) def test_error_event_sampling_info(events_seen): - reservoir_size = 5 endpoints_called = [] @@ -671,7 +671,6 @@ def test_compute_sampled_no_reset(): def test_analytic_event_sampling_info(): - synthetics_limit = 10 transactions_limit = 20 @@ -756,7 +755,6 @@ def _test(): }, ) def test_transaction_events_disabled(): - endpoints_called = [] expected_metrics = ( ("Supportability/Python/RequestSampler/requests", None), @@ -798,7 +796,8 @@ def test_reset_synthetics_events(): @pytest.mark.parametrize( - "allowlist_event", ("analytic_event_data", "custom_event_data", "log_event_data", "error_event_data", "span_event_data") + "allowlist_event", + ("analytic_event_data", "custom_event_data", "log_event_data", "error_event_data", "span_event_data"), ) @override_generic_settings( settings, @@ -850,7 +849,8 @@ def test_flexible_events_harvested(allowlist_event): @pytest.mark.parametrize( - "allowlist_event", ("analytic_event_data", "custom_event_data", "log_event_data", "error_event_data", "span_event_data") + "allowlist_event", + ("analytic_event_data", "custom_event_data", "log_event_data", "error_event_data", "span_event_data"), ) @override_generic_settings( settings, diff --git a/tests/testing_support/util.py b/tests/testing_support/util.py index 8ec13e427..a792a95de 100644 --- a/tests/testing_support/util.py +++ b/tests/testing_support/util.py @@ -41,3 +41,18 @@ def get_open_port(): port = s.getsockname()[1] s.close() return port + +def conditional_decorator(condition, decorator): + """Applies a decorator if the condition is true. Accepts 0 argument callables for the condition.""" + def _conditional_decorator(func): + if callable(condition): + condition_eval = condition() + else: + condition_eval = condition + + if condition_eval: + return decorator(func) + else: + return func + + return _conditional_decorator diff --git a/tests/testing_support/validators/validate_metric_payload.py b/tests/testing_support/validators/validate_metric_payload.py index db0cc51bb..00e655191 100644 --- a/tests/testing_support/validators/validate_metric_payload.py +++ b/tests/testing_support/validators/validate_metric_payload.py @@ -52,11 +52,11 @@ def _bind_params(method, payload=(), *args, **kwargs): # only look for unscoped metrics unscoped_metric = (metric, '') if not count: - assert unscoped_metric not in sent_metrics + assert unscoped_metric not in sent_metrics, unscoped_metric else: - assert unscoped_metric in sent_metrics + assert unscoped_metric in sent_metrics, unscoped_metric metric_values = sent_metrics[unscoped_metric] - assert metric_values[0] == count + assert metric_values[0] == count, "%s: Expected: %d Got: %d" % (metric, count, metric_values[0]) return val From b54a7e9d0f3ad98179fa1a1fab7ac2298375cef8 Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Thu, 16 Feb 2023 14:05:35 -0800 Subject: [PATCH 055/108] Fix DT settings for txn feature tests (#771) --- .../test_transaction_event_data_and_some_browser_stuff_too.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/agent_features/test_transaction_event_data_and_some_browser_stuff_too.py b/tests/agent_features/test_transaction_event_data_and_some_browser_stuff_too.py index c2e22b68a..73bdfcf53 100644 --- a/tests/agent_features/test_transaction_event_data_and_some_browser_stuff_too.py +++ b/tests/agent_features/test_transaction_event_data_and_some_browser_stuff_too.py @@ -286,6 +286,7 @@ def validate_no_analytics_sample_data(wrapped, instance, args, kwargs): _test_collect_analytic_events_disabled_settings = { "collect_analytics_events": False, + "distributed_tracing.enabled": False, "browser_monitoring.attributes.enabled": True, } @@ -328,6 +329,7 @@ def test_collect_analytic_events_disabled(): _test_analytic_events_disabled_settings = { "transaction_events.enabled": False, + "distributed_tracing.enabled": False, "browser_monitoring.attributes.enabled": True, } From 86994c01ca5b96baf759274faa205def50b1628b Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Thu, 16 Feb 2023 14:15:00 -0800 Subject: [PATCH 056/108] Fix pyramid testing versions (#764) Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> --- tox.ini | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tox.ini b/tox.ini index 83f70c30b..933786c3e 100644 --- a/tox.ini +++ b/tox.ini @@ -142,7 +142,7 @@ envlist = grpc-framework_grpc-{py37,py38,py39,py310,py311}-grpclatest, python-framework_pyramid-{pypy,py27,py38}-Pyramid0104, python-framework_pyramid-{pypy,py27,pypy37,py37,py38,py39,py310,py311}-Pyramid0110-cornice, - python-framework_pyramid-{py37,py38,py39,py310,py311,pypy37}-Pyramidmaster, + python-framework_pyramid-{py37,py38,py39,py310,py311,pypy37}-Pyramidlatest, python-framework_sanic-{py38,pypy37}-sanic{190301,1906,1912,200904,210300,2109,2112,2203,2290}, python-framework_sanic-{py37,py38,py39,py310,py311,pypy37}-saniclatest, python-framework_starlette-{py310,pypy37}-starlette{0014,0015,0019}, @@ -342,7 +342,7 @@ deps = framework_pyramid-cornice: cornice!=5.0.0 framework_pyramid-Pyramid0104: Pyramid<1.5 framework_pyramid-Pyramid0110: Pyramid<1.11 - framework_pyramid-Pyramidmaster: https://github.com/Pylons/pyramid/archive/master.zip + framework_pyramid-Pyramidlatest: Pyramid framework_sanic-sanic1812: sanic<18.13 framework_sanic-sanic190301: sanic<19.3.2 framework_sanic-sanic1906: sanic<19.7 From b97575431dc152c34fbd446895af0888d5fb6ff1 Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Wed, 8 Mar 2023 10:48:04 -0800 Subject: [PATCH 057/108] Fix Ariadne Middleware Testing (#776) * Fix ariadne middleware testing Co-authored-by: Uma Annamalai Co-authored-by: Lalleh Rafeei * [Mega-Linter] Apply linters fixes * Bump tests --------- Co-authored-by: Uma Annamalai Co-authored-by: Lalleh Rafeei Co-authored-by: TimPansino --- tests/framework_ariadne/test_application.py | 31 +++++++++++++++++---- 1 file changed, 25 insertions(+), 6 deletions(-) diff --git a/tests/framework_ariadne/test_application.py b/tests/framework_ariadne/test_application.py index ed60397b9..0f16da449 100644 --- a/tests/framework_ariadne/test_application.py +++ b/tests/framework_ariadne/test_application.py @@ -15,12 +15,19 @@ import pytest from testing_support.fixtures import dt_enabled from testing_support.validators.validate_span_events import validate_span_events -from testing_support.validators.validate_transaction_errors import validate_transaction_errors -from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics -from testing_support.validators.validate_transaction_count import validate_transaction_count +from testing_support.validators.validate_transaction_count import ( + validate_transaction_count, +) +from testing_support.validators.validate_transaction_errors import ( + validate_transaction_errors, +) +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) from newrelic.api.background_task import background_task from newrelic.common.object_names import callable_name +from newrelic.common.package_version_utils import get_package_version_tuple @pytest.fixture(scope="session") @@ -194,7 +201,13 @@ def test_middleware(app, graphql_run, is_graphql_2): def _test(): from graphql import MiddlewareManager - ok, response = graphql_run(app, "{ hello }", middleware=MiddlewareManager(example_middleware)) + middleware = ( + [example_middleware] + if get_package_version_tuple("ariadne") >= (0, 18) + else MiddlewareManager(example_middleware) + ) + + ok, response = graphql_run(app, "{ hello }", middleware=middleware) assert ok and not response.get("errors") assert "Hello!" in str(response["data"]) @@ -244,7 +257,13 @@ def test_exception_in_middleware(app, graphql_run): def _test(): from graphql import MiddlewareManager - _, response = graphql_run(app, query, middleware=MiddlewareManager(error_middleware)) + middleware = ( + [error_middleware] + if get_package_version_tuple("ariadne") >= (0, 18) + else MiddlewareManager(error_middleware) + ) + + _, response = graphql_run(app, query, middleware=middleware) assert response["errors"] _test() @@ -322,7 +341,7 @@ def test_exception_in_validation(app, graphql_run, is_graphql_2, query, exc_clas exc_class = callable_name(GraphQLError) _test_exception_scoped_metrics = [ - ('GraphQL/operation/Ariadne///', 1), + ("GraphQL/operation/Ariadne///", 1), ] _test_exception_rollup_metrics = [ ("Errors/all", 1), From 16a0b87699213bee64c67349bfd6d0a2cd2055cd Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Wed, 8 Mar 2023 11:03:07 -0800 Subject: [PATCH 058/108] Exclude merged PRs from automatic mergify actions. (#774) Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> --- .github/mergify.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/mergify.yml b/.github/mergify.yml index dfd9c88f5..9536b3039 100644 --- a/.github/mergify.yml +++ b/.github/mergify.yml @@ -7,6 +7,7 @@ shared: - "label=ready-to-merge" - "check-success=tests" - "-draft" # Don't include draft PRs + - "-merged" - or: # Only handle branches that target main or develop branches - "base=main" - "base~=^develop" @@ -32,6 +33,7 @@ pull_request_rules: conditions: - "queue-position=-1" # Not queued - "-draft" # Don't include draft PRs + - "-merged" actions: update: @@ -49,6 +51,7 @@ pull_request_rules: - name: Toggle label on merge conflicts conditions: + - "-merged" - conflict actions: label: @@ -58,6 +61,7 @@ pull_request_rules: # Don't use a toggle for this, as the label constantly gets applied and removed when tests are rerun. - name: Add label on test failures conditions: + - "-merged" - or: - check-failure=tests - check-skipped=tests @@ -68,6 +72,7 @@ pull_request_rules: - name: Remove label on test success conditions: + - "-merged" - check-success=tests actions: label: From 572b24b2f772f1a637e9eaf756e49f8ae8d604a1 Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Wed, 15 Mar 2023 10:01:34 -0700 Subject: [PATCH 059/108] Refactor Code Coverage (#765) * Reorder dependency of code coverage fixture * Fix tests with coverage disabled * Refactor code coverage fixture * Clean out old coverage settings * Fix missing code coverage fixture * Fix pypy priority sampling * Start coverage from pytest-cov for better tracking * Refactor coverage config file * Ripping out coverage fixtures * Move tool config to bottom of tox.ini * Disabling py27 warning * Renaming env var --------- Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> --- tests/adapter_cheroot/conftest.py | 8 +--- tests/adapter_daphne/conftest.py | 11 +---- tests/adapter_gevent/conftest.py | 3 +- tests/adapter_gevent/pytest.ini | 2 - tests/adapter_gunicorn/conftest.py | 3 +- tests/adapter_gunicorn/pytest.ini | 2 - .../test_aiohttp_app_factory.py | 4 +- tests/adapter_gunicorn/test_asgi_app.py | 4 +- tests/adapter_gunicorn/test_gaiohttp.py | 4 +- tests/adapter_hypercorn/conftest.py | 11 +---- tests/adapter_uvicorn/conftest.py | 10 +--- tests/agent_features/conftest.py | 15 +----- tests/agent_streaming/conftest.py | 10 +--- tests/agent_unittests/conftest.py | 6 +-- tests/application_celery/conftest.py | 8 +--- tests/application_gearman/conftest.py | 8 +--- .../component_djangorestframework/conftest.py | 9 +--- tests/component_flask_rest/conftest.py | 9 +--- tests/component_graphqlserver/conftest.py | 11 +---- tests/component_tastypie/conftest.py | 8 +--- tests/coroutines_asyncio/conftest.py | 12 +---- tests/cross_agent/conftest.py | 10 +--- tests/datastore_aioredis/conftest.py | 12 +---- tests/datastore_aredis/conftest.py | 8 +--- tests/datastore_asyncpg/conftest.py | 13 +----- tests/datastore_bmemcached/conftest.py | 9 +--- tests/datastore_elasticsearch/conftest.py | 12 +---- tests/datastore_memcache/conftest.py | 9 +--- tests/datastore_mysql/conftest.py | 9 +--- tests/datastore_postgresql/conftest.py | 9 +--- tests/datastore_psycopg2/conftest.py | 9 +--- tests/datastore_psycopg2cffi/conftest.py | 10 +--- tests/datastore_pyelasticsearch/conftest.py | 8 +--- tests/datastore_pylibmc/conftest.py | 9 +--- tests/datastore_pymemcache/conftest.py | 9 +--- tests/datastore_pymongo/conftest.py | 11 +---- tests/datastore_pymysql/conftest.py | 10 +--- tests/datastore_pysolr/conftest.py | 8 +--- tests/datastore_redis/conftest.py | 8 +--- tests/datastore_solrpy/conftest.py | 8 +--- tests/datastore_sqlite/conftest.py | 9 +--- tests/datastore_sqlite/test_database.py | 2 +- tests/datastore_umemcache/conftest.py | 9 +--- tests/external_boto3/conftest.py | 8 +--- tests/external_botocore/conftest.py | 8 +--- tests/external_feedparser/conftest.py | 10 +--- tests/external_http/conftest.py | 9 +--- tests/external_httplib/conftest.py | 11 +---- tests/external_httplib2/conftest.py | 8 +--- tests/external_httpx/conftest.py | 12 +---- tests/external_requests/conftest.py | 9 +--- tests/external_urllib3/conftest.py | 9 +--- tests/framework_aiohttp/conftest.py | 12 +---- tests/framework_ariadne/conftest.py | 11 +---- tests/framework_bottle/conftest.py | 8 +--- tests/framework_cherrypy/conftest.py | 8 +--- tests/framework_django/conftest.py | 9 +--- tests/framework_falcon/conftest.py | 8 +--- tests/framework_fastapi/conftest.py | 11 +---- tests/framework_flask/conftest.py | 8 +--- tests/framework_graphene/conftest.py | 11 +---- tests/framework_graphql/conftest.py | 10 +--- tests/framework_grpc/conftest.py | 12 +---- tests/framework_pyramid/conftest.py | 8 +--- tests/framework_sanic/conftest.py | 12 +---- tests/framework_starlette/conftest.py | 11 +---- tests/framework_strawberry/conftest.py | 10 +--- tests/framework_tornado/conftest.py | 9 +--- tests/logger_logging/conftest.py | 11 +---- tests/logger_loguru/conftest.py | 11 +---- .../messagebroker_confluentkafka/conftest.py | 12 +---- tests/messagebroker_kafkapython/conftest.py | 12 +---- tests/messagebroker_pika/conftest.py | 12 +---- tests/template_mako/conftest.py | 8 +--- tests/testing_support/fixtures.py | 46 +------------------ tox.ini | 40 ++++++++++------ 76 files changed, 117 insertions(+), 636 deletions(-) delete mode 100644 tests/adapter_gevent/pytest.ini delete mode 100644 tests/adapter_gunicorn/pytest.ini diff --git a/tests/adapter_cheroot/conftest.py b/tests/adapter_cheroot/conftest.py index 7e255783e..37d9d4df4 100644 --- a/tests/adapter_cheroot/conftest.py +++ b/tests/adapter_cheroot/conftest.py @@ -14,14 +14,8 @@ import pytest -from testing_support.fixtures import (code_coverage_fixture, - collector_agent_registration_fixture, collector_available_fixture) +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 -_coverage_source = [ - 'newrelic.hooks.adapter_cheroot', -] - -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { 'transaction_tracer.explain_threshold': 0.0, diff --git a/tests/adapter_daphne/conftest.py b/tests/adapter_daphne/conftest.py index cda62f22e..3b35b2ee6 100644 --- a/tests/adapter_daphne/conftest.py +++ b/tests/adapter_daphne/conftest.py @@ -12,17 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from testing_support.fixtures import ( # noqa: F401; pylint: disable=W0611 - code_coverage_fixture, - collector_agent_registration_fixture, - collector_available_fixture, -) - -_coverage_source = [ - "newrelic.hooks.adapter_daphne", -] +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { "transaction_tracer.explain_threshold": 0.0, diff --git a/tests/adapter_gevent/conftest.py b/tests/adapter_gevent/conftest.py index e521babe5..01dacc9e6 100644 --- a/tests/adapter_gevent/conftest.py +++ b/tests/adapter_gevent/conftest.py @@ -15,8 +15,7 @@ import pytest import webtest -from testing_support.fixtures import (collector_agent_registration_fixture, - collector_available_fixture) +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 _default_settings = { 'transaction_tracer.explain_threshold': 0.0, diff --git a/tests/adapter_gevent/pytest.ini b/tests/adapter_gevent/pytest.ini deleted file mode 100644 index 458f898d8..000000000 --- a/tests/adapter_gevent/pytest.ini +++ /dev/null @@ -1,2 +0,0 @@ -[pytest] -usefixtures = collector_available_fixture collector_agent_registration diff --git a/tests/adapter_gunicorn/conftest.py b/tests/adapter_gunicorn/conftest.py index ca1c5fb22..228742c96 100644 --- a/tests/adapter_gunicorn/conftest.py +++ b/tests/adapter_gunicorn/conftest.py @@ -14,8 +14,7 @@ import pytest -from testing_support.fixtures import (collector_agent_registration_fixture, - collector_available_fixture) +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 _default_settings = { 'transaction_tracer.explain_threshold': 0.0, diff --git a/tests/adapter_gunicorn/pytest.ini b/tests/adapter_gunicorn/pytest.ini deleted file mode 100644 index 458f898d8..000000000 --- a/tests/adapter_gunicorn/pytest.ini +++ /dev/null @@ -1,2 +0,0 @@ -[pytest] -usefixtures = collector_available_fixture collector_agent_registration diff --git a/tests/adapter_gunicorn/test_aiohttp_app_factory.py b/tests/adapter_gunicorn/test_aiohttp_app_factory.py index 2e0729001..dc16b1231 100644 --- a/tests/adapter_gunicorn/test_aiohttp_app_factory.py +++ b/tests/adapter_gunicorn/test_aiohttp_app_factory.py @@ -30,8 +30,8 @@ reason='aiohttp app factories were implement in 3.1') @pytest.mark.parametrize('nr_enabled', (True, False)) def test_aiohttp_app_factory(nr_enabled): - nr_admin = os.path.join(os.environ['TOX_ENVDIR'], 'bin', 'newrelic-admin') - gunicorn = os.path.join(os.environ['TOX_ENVDIR'], 'bin', 'gunicorn') + nr_admin = os.path.join(os.environ['TOX_ENV_DIR'], 'bin', 'newrelic-admin') + gunicorn = os.path.join(os.environ['TOX_ENV_DIR'], 'bin', 'gunicorn') # Restart the server if it dies during testing for _ in range(5): diff --git a/tests/adapter_gunicorn/test_asgi_app.py b/tests/adapter_gunicorn/test_asgi_app.py index 2e3445303..93e348465 100644 --- a/tests/adapter_gunicorn/test_asgi_app.py +++ b/tests/adapter_gunicorn/test_asgi_app.py @@ -27,8 +27,8 @@ @pytest.mark.parametrize('nr_enabled', (True, False)) def test_asgi_app(nr_enabled): - nr_admin = os.path.join(os.environ['TOX_ENVDIR'], 'bin', 'newrelic-admin') - gunicorn = os.path.join(os.environ['TOX_ENVDIR'], 'bin', 'gunicorn') + nr_admin = os.path.join(os.environ['TOX_ENV_DIR'], 'bin', 'newrelic-admin') + gunicorn = os.path.join(os.environ['TOX_ENV_DIR'], 'bin', 'gunicorn') PORT = get_open_port() cmd = [gunicorn, '-b', '127.0.0.1:%d' % PORT, '--worker-class', diff --git a/tests/adapter_gunicorn/test_gaiohttp.py b/tests/adapter_gunicorn/test_gaiohttp.py index 3a421b039..9f205bad9 100644 --- a/tests/adapter_gunicorn/test_gaiohttp.py +++ b/tests/adapter_gunicorn/test_gaiohttp.py @@ -30,8 +30,8 @@ @pytest.mark.parametrize('nr_enabled', [True, False]) def test_gunicorn_gaiohttp_worker(nr_enabled): - nr_admin = os.path.join(os.environ['TOX_ENVDIR'], 'bin', 'newrelic-admin') - gunicorn = os.path.join(os.environ['TOX_ENVDIR'], 'bin', 'gunicorn') + nr_admin = os.path.join(os.environ['TOX_ENV_DIR'], 'bin', 'newrelic-admin') + gunicorn = os.path.join(os.environ['TOX_ENV_DIR'], 'bin', 'gunicorn') # Restart the server if it dies during testing for _ in range(5): diff --git a/tests/adapter_hypercorn/conftest.py b/tests/adapter_hypercorn/conftest.py index 50e8bad10..2276e9415 100644 --- a/tests/adapter_hypercorn/conftest.py +++ b/tests/adapter_hypercorn/conftest.py @@ -15,17 +15,8 @@ from testing_support.fixture.event_loop import ( # noqa: F401; pylint: disable=W0611 event_loop as loop, ) -from testing_support.fixtures import ( # noqa: F401; pylint: disable=W0611 - code_coverage_fixture, - collector_agent_registration_fixture, - collector_available_fixture, -) - -_coverage_source = [ - "newrelic.hooks.adapter_hypercorn", -] +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { "transaction_tracer.explain_threshold": 0.0, diff --git a/tests/adapter_uvicorn/conftest.py b/tests/adapter_uvicorn/conftest.py index f44b3016d..4f2f7c2df 100644 --- a/tests/adapter_uvicorn/conftest.py +++ b/tests/adapter_uvicorn/conftest.py @@ -13,17 +13,9 @@ # limitations under the License. import pytest -from testing_support.fixtures import ( - code_coverage_fixture, - collector_agent_registration_fixture, - collector_available_fixture, -) -_coverage_source = [ - "newrelic.hooks.adapter_uvicorn", -] +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { "transaction_tracer.explain_threshold": 0.0, diff --git a/tests/agent_features/conftest.py b/tests/agent_features/conftest.py index d3cadbd46..57263238b 100644 --- a/tests/agent_features/conftest.py +++ b/tests/agent_features/conftest.py @@ -13,25 +13,14 @@ # limitations under the License. import pytest -from testing_support.fixtures import ( # noqa: F401; pylint: disable=W0611 - code_coverage_fixture, - collector_agent_registration_fixture, - collector_available_fixture, -) + +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 from testing_support.fixtures import ( # noqa: F401; pylint: disable=W0611 newrelic_caplog as caplog, ) from newrelic.packages import six -_coverage_source = [ - "newrelic.api.transaction", - "newrelic.api.web_transaction", - "newrelic.common.coroutine", - "newrelic.api.lambda_handler", -] - -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { "transaction_tracer.explain_threshold": 0.0, diff --git a/tests/agent_streaming/conftest.py b/tests/agent_streaming/conftest.py index 761aea644..390aeda9c 100644 --- a/tests/agent_streaming/conftest.py +++ b/tests/agent_streaming/conftest.py @@ -15,20 +15,14 @@ import threading import pytest -from testing_support.fixtures import collector_available_fixture # noqa -from testing_support.fixtures import ( - code_coverage_fixture, - collector_agent_registration_fixture, -) + +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 from testing_support.mock_external_grpc_server import MockExternalgRPCServer from newrelic.common.streaming_utils import StreamBuffer CONDITION_CLS = type(threading.Condition()) -_coverage_source = [] - -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { "transaction_tracer.explain_threshold": 0.0, diff --git a/tests/agent_unittests/conftest.py b/tests/agent_unittests/conftest.py index fd5630f81..1504d1b8d 100644 --- a/tests/agent_unittests/conftest.py +++ b/tests/agent_unittests/conftest.py @@ -16,10 +16,8 @@ import tempfile import pytest -from testing_support.fixtures import ( # noqa: F401; pylint: disable=W0611 - collector_agent_registration_fixture, - collector_available_fixture, -) + +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 from testing_support.fixtures import ( # noqa: F401; pylint: disable=W0611 newrelic_caplog as caplog, ) diff --git a/tests/application_celery/conftest.py b/tests/application_celery/conftest.py index 92b9aabd0..49f0fe477 100644 --- a/tests/application_celery/conftest.py +++ b/tests/application_celery/conftest.py @@ -14,14 +14,8 @@ import pytest -from testing_support.fixtures import (code_coverage_fixture, - collector_agent_registration_fixture, collector_available_fixture) +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 -_coverage_source = [ - 'newrelic.hooks.application_celery', -] - -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { 'transaction_tracer.explain_threshold': 0.0, diff --git a/tests/application_gearman/conftest.py b/tests/application_gearman/conftest.py index ec469dcae..6a38806e2 100644 --- a/tests/application_gearman/conftest.py +++ b/tests/application_gearman/conftest.py @@ -14,14 +14,8 @@ import pytest -from testing_support.fixtures import (code_coverage_fixture, - collector_agent_registration_fixture, collector_available_fixture) +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 -_coverage_source = [ - 'newrelic.hooks.application_gearman', -] - -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { 'transaction_tracer.explain_threshold': 0.0, diff --git a/tests/component_djangorestframework/conftest.py b/tests/component_djangorestframework/conftest.py index cdd3d2e8f..a4b37571d 100644 --- a/tests/component_djangorestframework/conftest.py +++ b/tests/component_djangorestframework/conftest.py @@ -14,15 +14,8 @@ import pytest -from testing_support.fixtures import (code_coverage_fixture, - collector_agent_registration_fixture, collector_available_fixture) +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 -_coverage_source = [ - 'newrelic.hooks.framework_django', - 'newrelic.hooks.component_djangorestframework', -] - -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { 'transaction_tracer.explain_threshold': 0.0, diff --git a/tests/component_flask_rest/conftest.py b/tests/component_flask_rest/conftest.py index 12e985893..ff00973ab 100644 --- a/tests/component_flask_rest/conftest.py +++ b/tests/component_flask_rest/conftest.py @@ -14,15 +14,8 @@ import pytest -from testing_support.fixtures import (code_coverage_fixture, # noqa - collector_agent_registration_fixture, collector_available_fixture) +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 -_coverage_source = [ - 'newrelic.hooks.framework_flask', - 'newrelic.hooks.component_flask_rest', -] - -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { 'transaction_tracer.explain_threshold': 0.0, diff --git a/tests/component_graphqlserver/conftest.py b/tests/component_graphqlserver/conftest.py index c2b5f7d92..f62af8210 100644 --- a/tests/component_graphqlserver/conftest.py +++ b/tests/component_graphqlserver/conftest.py @@ -12,17 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from testing_support.fixtures import ( - code_coverage_fixture, - collector_agent_registration_fixture, - collector_available_fixture, -) - -_coverage_source = [ - "newrelic.hooks.component_graphqlserver", -] +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { "transaction_tracer.explain_threshold": 0.0, diff --git a/tests/component_tastypie/conftest.py b/tests/component_tastypie/conftest.py index da01fa46b..e38e3b2f3 100644 --- a/tests/component_tastypie/conftest.py +++ b/tests/component_tastypie/conftest.py @@ -14,14 +14,8 @@ import pytest -from testing_support.fixtures import (code_coverage_fixture, - collector_agent_registration_fixture, collector_available_fixture) +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 -_coverage_source = [ - 'newrelic.hooks.component_tastypie', -] - -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { 'transaction_tracer.explain_threshold': 0.0, diff --git a/tests/coroutines_asyncio/conftest.py b/tests/coroutines_asyncio/conftest.py index aa412d38a..5d3d843d0 100644 --- a/tests/coroutines_asyncio/conftest.py +++ b/tests/coroutines_asyncio/conftest.py @@ -13,18 +13,10 @@ # limitations under the License. import pytest -from testing_support.fixture.event_loop import event_loop -from testing_support.fixtures import ( - code_coverage_fixture, - collector_agent_registration_fixture, - collector_available_fixture, -) -_coverage_source = [ - "newrelic.hooks.coroutines_asyncio", -] +from testing_support.fixture.event_loop import event_loop +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { "transaction_tracer.explain_threshold": 0.0, diff --git a/tests/cross_agent/conftest.py b/tests/cross_agent/conftest.py index 0f0f7e3b9..d21ebf236 100644 --- a/tests/cross_agent/conftest.py +++ b/tests/cross_agent/conftest.py @@ -14,16 +14,8 @@ import pytest -from testing_support.fixtures import (code_coverage_fixture, - collector_agent_registration_fixture, collector_available_fixture) +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 -_coverage_source = [ - 'newrelic.api.transaction', - 'newrelic.api.web_transaction', - 'newrelic.core.attribute_filter', -] - -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { 'transaction_tracer.explain_threshold': 0.0, diff --git a/tests/datastore_aioredis/conftest.py b/tests/datastore_aioredis/conftest.py index 3d341f2b6..d50129255 100644 --- a/tests/datastore_aioredis/conftest.py +++ b/tests/datastore_aioredis/conftest.py @@ -18,11 +18,7 @@ from testing_support.db_settings import redis_settings from testing_support.fixture.event_loop import event_loop as loop -from testing_support.fixtures import ( # noqa: F401 - code_coverage_fixture, - collector_agent_registration_fixture, - collector_available_fixture, -) +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 try: import aioredis @@ -40,12 +36,6 @@ DB_SETTINGS = redis_settings()[0] -_coverage_source = [ - "newrelic.hooks.datastore_aioredis", -] - -code_coverage = code_coverage_fixture(source=_coverage_source) - _default_settings = { "transaction_tracer.explain_threshold": 0.0, "transaction_tracer.transaction_threshold": 0.0, diff --git a/tests/datastore_aredis/conftest.py b/tests/datastore_aredis/conftest.py index 8f43d088b..78067e0fe 100644 --- a/tests/datastore_aredis/conftest.py +++ b/tests/datastore_aredis/conftest.py @@ -15,14 +15,8 @@ import pytest from testing_support.fixture.event_loop import event_loop as loop # noqa: F401 -from testing_support.fixtures import (code_coverage_fixture, - collector_agent_registration_fixture, collector_available_fixture) +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 -_coverage_source = [ - 'newrelic.hooks.datastore_aredis', -] - -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { 'transaction_tracer.explain_threshold': 0.0, diff --git a/tests/datastore_asyncpg/conftest.py b/tests/datastore_asyncpg/conftest.py index 00720a55f..69bc0501a 100644 --- a/tests/datastore_asyncpg/conftest.py +++ b/tests/datastore_asyncpg/conftest.py @@ -13,11 +13,8 @@ # limitations under the License. from testing_support.fixture.event_loop import event_loop -from testing_support.fixtures import code_coverage_fixture # noqa -from testing_support.fixtures import ( - collector_agent_registration_fixture, - collector_available_fixture, -) + +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 _default_settings = { "transaction_tracer.explain_threshold": 0.0, @@ -30,9 +27,3 @@ collector_agent_registration = collector_agent_registration_fixture( app_name="Python Agent Test (datastore_asyncpg)", default_settings=_default_settings ) - -_coverage_source = [ - "newrelic.hooks.database_asyncpg", -] - -code_coverage = code_coverage_fixture(source=_coverage_source) diff --git a/tests/datastore_bmemcached/conftest.py b/tests/datastore_bmemcached/conftest.py index 3d41ed930..c970c1c34 100644 --- a/tests/datastore_bmemcached/conftest.py +++ b/tests/datastore_bmemcached/conftest.py @@ -14,15 +14,8 @@ import pytest -from testing_support.fixtures import (code_coverage_fixture, - collector_agent_registration_fixture, collector_available_fixture) -_coverage_source = [ - 'newrelic.api.memcache_trace', - 'newrelic.hooks.datastore_bmemcached', -] - -code_coverage = code_coverage_fixture(source=_coverage_source) +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 _default_settings = { 'transaction_tracer.explain_threshold': 0.0, diff --git a/tests/datastore_elasticsearch/conftest.py b/tests/datastore_elasticsearch/conftest.py index 5cb0b0824..53fa6fcdc 100644 --- a/tests/datastore_elasticsearch/conftest.py +++ b/tests/datastore_elasticsearch/conftest.py @@ -14,19 +14,11 @@ import pytest from testing_support.db_settings import elasticsearch_settings -from testing_support.fixtures import ( # noqa - code_coverage_fixture, - collector_agent_registration_fixture, - collector_available_fixture, -) -from newrelic.common.package_version_utils import get_package_version +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 -_coverage_source = [ - "newrelic.hooks.datastore_elasticsearch", -] +from newrelic.common.package_version_utils import get_package_version -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { "transaction_tracer.explain_threshold": 0.0, diff --git a/tests/datastore_memcache/conftest.py b/tests/datastore_memcache/conftest.py index d19451200..835e895bd 100644 --- a/tests/datastore_memcache/conftest.py +++ b/tests/datastore_memcache/conftest.py @@ -17,17 +17,10 @@ import pytest import memcache -from testing_support.fixtures import (code_coverage_fixture, - collector_agent_registration_fixture, collector_available_fixture) +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 from testing_support.db_settings import memcached_settings -_coverage_source = [ - 'newrelic.api.memcache_trace', - 'newrelic.hooks.datastore_memcache', -] - -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { 'transaction_tracer.explain_threshold': 0.0, diff --git a/tests/datastore_mysql/conftest.py b/tests/datastore_mysql/conftest.py index 9b490b057..a2f74c398 100644 --- a/tests/datastore_mysql/conftest.py +++ b/tests/datastore_mysql/conftest.py @@ -15,15 +15,8 @@ import pytest import os -from testing_support.fixtures import (code_coverage_fixture, - collector_agent_registration_fixture, collector_available_fixture) +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 -_coverage_source = [ - 'newrelic.hooks.database_mysql', - 'newrelic.hooks.database_dbapi2', -] - -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { 'transaction_tracer.explain_threshold': 0.0, diff --git a/tests/datastore_postgresql/conftest.py b/tests/datastore_postgresql/conftest.py index 741b53078..624fb4726 100644 --- a/tests/datastore_postgresql/conftest.py +++ b/tests/datastore_postgresql/conftest.py @@ -14,15 +14,8 @@ import pytest -from testing_support.fixtures import (code_coverage_fixture, - collector_agent_registration_fixture, collector_available_fixture) +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 -_coverage_source = [ - 'newrelic.hooks.database_postgresql', - 'newrelic.hooks.database_dbapi2', -] - -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { 'transaction_tracer.explain_threshold': 0.0, diff --git a/tests/datastore_psycopg2/conftest.py b/tests/datastore_psycopg2/conftest.py index 47bfd9f16..dd271909d 100644 --- a/tests/datastore_psycopg2/conftest.py +++ b/tests/datastore_psycopg2/conftest.py @@ -14,15 +14,8 @@ import pytest -from testing_support.fixtures import (code_coverage_fixture, - collector_agent_registration_fixture, collector_available_fixture) +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 -_coverage_source = [ - 'newrelic.hooks.database_psycopg2', - 'newrelic.hooks.database_dbapi2', -] - -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { 'transaction_tracer.explain_threshold': 0.0, diff --git a/tests/datastore_psycopg2cffi/conftest.py b/tests/datastore_psycopg2cffi/conftest.py index 9cff099f8..c9df1369b 100644 --- a/tests/datastore_psycopg2cffi/conftest.py +++ b/tests/datastore_psycopg2cffi/conftest.py @@ -14,16 +14,8 @@ import pytest -from testing_support.fixtures import (code_coverage_fixture, - collector_agent_registration_fixture, collector_available_fixture) +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 -_coverage_source = [ - 'newrelic.hooks.database_psycopg2cffi', - 'newrelic.hooks.database_psycopg2', - 'newrelic.hooks.database_dbapi2', -] - -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { 'transaction_tracer.explain_threshold': 0.0, diff --git a/tests/datastore_pyelasticsearch/conftest.py b/tests/datastore_pyelasticsearch/conftest.py index 101bf444a..192642135 100644 --- a/tests/datastore_pyelasticsearch/conftest.py +++ b/tests/datastore_pyelasticsearch/conftest.py @@ -14,14 +14,8 @@ import pytest -from testing_support.fixtures import (code_coverage_fixture, - collector_agent_registration_fixture, collector_available_fixture) +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 -_coverage_source = [ - 'newrelic.hooks.datastore_pyelasticsearch', -] - -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { 'transaction_tracer.explain_threshold': 0.0, diff --git a/tests/datastore_pylibmc/conftest.py b/tests/datastore_pylibmc/conftest.py index 4dd03c77a..40970bdca 100644 --- a/tests/datastore_pylibmc/conftest.py +++ b/tests/datastore_pylibmc/conftest.py @@ -14,15 +14,8 @@ import pytest -from testing_support.fixtures import (code_coverage_fixture, - collector_agent_registration_fixture, collector_available_fixture) +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 -_coverage_source = [ - 'newrelic.api.memcache_trace', - 'newrelic.hooks.datastore_pylibmc', -] - -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { 'transaction_tracer.explain_threshold': 0.0, diff --git a/tests/datastore_pymemcache/conftest.py b/tests/datastore_pymemcache/conftest.py index ff5420903..3d4e1ce76 100644 --- a/tests/datastore_pymemcache/conftest.py +++ b/tests/datastore_pymemcache/conftest.py @@ -14,15 +14,8 @@ import pytest -from testing_support.fixtures import (code_coverage_fixture, - collector_agent_registration_fixture, collector_available_fixture) +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 -_coverage_source = [ - 'newrelic.api.memcache_trace', - 'newrelic.hooks.datastore_pymemcache', -] - -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { 'transaction_tracer.explain_threshold': 0.0, diff --git a/tests/datastore_pymongo/conftest.py b/tests/datastore_pymongo/conftest.py index 8d279f2e2..d269182b0 100644 --- a/tests/datastore_pymongo/conftest.py +++ b/tests/datastore_pymongo/conftest.py @@ -12,17 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from testing_support.fixtures import ( # noqa: F401; pylint: disable=W0611 - code_coverage_fixture, - collector_agent_registration_fixture, - collector_available_fixture, -) - -_coverage_source = [ - "newrelic.hooks.datastore_pymongo", -] +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { "transaction_tracer.explain_threshold": 0.0, diff --git a/tests/datastore_pymysql/conftest.py b/tests/datastore_pymysql/conftest.py index 0aeb282a4..51d037432 100644 --- a/tests/datastore_pymysql/conftest.py +++ b/tests/datastore_pymysql/conftest.py @@ -14,16 +14,8 @@ import pytest -from testing_support.fixtures import (code_coverage_fixture, - collector_agent_registration_fixture, collector_available_fixture) +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 -_coverage_source = [ - 'newrelic.hooks.database_pymysql', - 'newrelic.hooks.database_mysqldb', - 'newrelic.hooks.database_dbapi2', -] - -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { 'transaction_tracer.explain_threshold': 0.0, diff --git a/tests/datastore_pysolr/conftest.py b/tests/datastore_pysolr/conftest.py index 1f5419454..07851b698 100644 --- a/tests/datastore_pysolr/conftest.py +++ b/tests/datastore_pysolr/conftest.py @@ -14,14 +14,8 @@ import pytest -from testing_support.fixtures import (code_coverage_fixture, - collector_agent_registration_fixture, collector_available_fixture) +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 -_coverage_source = [ - 'newrelic.hooks.datastore_pysolr', -] - -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { 'transaction_tracer.explain_threshold': 0.0, diff --git a/tests/datastore_redis/conftest.py b/tests/datastore_redis/conftest.py index 802924c75..53ff2658d 100644 --- a/tests/datastore_redis/conftest.py +++ b/tests/datastore_redis/conftest.py @@ -14,14 +14,8 @@ import pytest -from testing_support.fixtures import (code_coverage_fixture, - collector_agent_registration_fixture, collector_available_fixture) +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 -_coverage_source = [ - 'newrelic.hooks.datastore_redis', -] - -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { 'transaction_tracer.explain_threshold': 0.0, diff --git a/tests/datastore_solrpy/conftest.py b/tests/datastore_solrpy/conftest.py index 52248065c..4418e5d9a 100644 --- a/tests/datastore_solrpy/conftest.py +++ b/tests/datastore_solrpy/conftest.py @@ -14,14 +14,8 @@ import pytest -from testing_support.fixtures import (code_coverage_fixture, - collector_agent_registration_fixture, collector_available_fixture) +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 -_coverage_source = [ - 'newrelic.hooks.datastore_solrpy', -] - -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { 'transaction_tracer.explain_threshold': 0.0, diff --git a/tests/datastore_sqlite/conftest.py b/tests/datastore_sqlite/conftest.py index 270b9b8cf..ed695b251 100644 --- a/tests/datastore_sqlite/conftest.py +++ b/tests/datastore_sqlite/conftest.py @@ -14,15 +14,8 @@ import pytest -from testing_support.fixtures import (code_coverage_fixture, - collector_agent_registration_fixture, collector_available_fixture) +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 -_coverage_source = [ - 'newrelic.hooks.database_sqlite', - 'newrelic.hooks.database_dbapi2', -] - -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { 'transaction_tracer.explain_threshold': 0.0, diff --git a/tests/datastore_sqlite/test_database.py b/tests/datastore_sqlite/test_database.py index 4ca534c39..584ce57bc 100644 --- a/tests/datastore_sqlite/test_database.py +++ b/tests/datastore_sqlite/test_database.py @@ -23,7 +23,7 @@ from newrelic.api.background_task import background_task -DATABASE_DIR = os.environ.get('TOX_ENVDIR', '.') +DATABASE_DIR = os.environ.get('TOX_ENV_DIR', '.') DATABASE_NAME = ':memory:' _test_execute_via_cursor_scoped_metrics = [ diff --git a/tests/datastore_umemcache/conftest.py b/tests/datastore_umemcache/conftest.py index 1e945141d..aa61ca26a 100644 --- a/tests/datastore_umemcache/conftest.py +++ b/tests/datastore_umemcache/conftest.py @@ -14,15 +14,8 @@ import pytest -from testing_support.fixtures import (code_coverage_fixture, - collector_agent_registration_fixture, collector_available_fixture) +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 -_coverage_source = [ - 'newrelic.api.memcache_trace', - 'newrelic.hooks.datastore_umemcache', -] - -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { 'transaction_tracer.explain_threshold': 0.0, diff --git a/tests/external_boto3/conftest.py b/tests/external_boto3/conftest.py index 4daa3678b..90d82f007 100644 --- a/tests/external_boto3/conftest.py +++ b/tests/external_boto3/conftest.py @@ -14,14 +14,8 @@ import pytest -from testing_support.fixtures import (code_coverage_fixture, - collector_agent_registration_fixture, collector_available_fixture) +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 -_coverage_source = [ - 'newrelic.hooks.external_botocore', -] - -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { 'transaction_tracer.explain_threshold': 0.0, diff --git a/tests/external_botocore/conftest.py b/tests/external_botocore/conftest.py index 738b51f5a..e5cf15533 100644 --- a/tests/external_botocore/conftest.py +++ b/tests/external_botocore/conftest.py @@ -14,14 +14,8 @@ import pytest -from testing_support.fixtures import (code_coverage_fixture, - collector_agent_registration_fixture, collector_available_fixture) +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 -_coverage_source = [ - 'newrelic.hooks.external_botocore', -] - -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { 'transaction_tracer.explain_threshold': 0.0, diff --git a/tests/external_feedparser/conftest.py b/tests/external_feedparser/conftest.py index 818889161..11d19f1cd 100644 --- a/tests/external_feedparser/conftest.py +++ b/tests/external_feedparser/conftest.py @@ -13,8 +13,8 @@ # limitations under the License. import pytest -from testing_support.fixtures import (code_coverage_fixture, # noqa - collector_agent_registration_fixture, collector_available_fixture) + +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 from testing_support.mock_external_http_server import MockExternalHTTPServer _default_settings = { @@ -29,12 +29,6 @@ app_name='Python Agent Test (external_feedparser)', default_settings=_default_settings) -_coverage_source = [ - 'newrelic.hooks.external_feedparser', -] - -code_coverage = code_coverage_fixture(source=_coverage_source) - def create_handler(response): def handler(self): diff --git a/tests/external_http/conftest.py b/tests/external_http/conftest.py index c4f768907..f8afb49f3 100644 --- a/tests/external_http/conftest.py +++ b/tests/external_http/conftest.py @@ -14,17 +14,10 @@ import pytest -from testing_support.fixtures import (code_coverage_fixture, - collector_agent_registration_fixture, collector_available_fixture) +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 from testing_support.mock_external_http_server import ( MockExternalHTTPHResponseHeadersServer) -_coverage_source = [ - 'newrelic.api.external_trace', - 'newrelic.hooks.external_httplib', -] - -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { 'transaction_tracer.explain_threshold': 0.0, diff --git a/tests/external_httplib/conftest.py b/tests/external_httplib/conftest.py index 1c9a69713..2edbeab91 100644 --- a/tests/external_httplib/conftest.py +++ b/tests/external_httplib/conftest.py @@ -14,19 +14,10 @@ import pytest -from testing_support.fixtures import (code_coverage_fixture, - collector_agent_registration_fixture, collector_available_fixture) +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 from testing_support.mock_external_http_server import ( MockExternalHTTPHResponseHeadersServer) -_coverage_source = [ - 'newrelic.api.external_trace', - 'newrelic.hooks.external_httplib', - 'newrelic.hooks.external_urllib', - 'newrelic.hooks.external_urllib2', -] - -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { 'transaction_tracer.explain_threshold': 0.0, diff --git a/tests/external_httplib2/conftest.py b/tests/external_httplib2/conftest.py index b8bc40e6c..cf3501da5 100644 --- a/tests/external_httplib2/conftest.py +++ b/tests/external_httplib2/conftest.py @@ -14,17 +14,11 @@ import pytest -from testing_support.fixtures import (code_coverage_fixture, - collector_agent_registration_fixture, collector_available_fixture) +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 from testing_support.mock_external_http_server import ( MockExternalHTTPHResponseHeadersServer) -_coverage_source = [ - 'newrelic.hooks.external_httplib2', -] - -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { 'transaction_tracer.explain_threshold': 0.0, diff --git a/tests/external_httpx/conftest.py b/tests/external_httpx/conftest.py index bad35d45e..87ea1bec0 100644 --- a/tests/external_httpx/conftest.py +++ b/tests/external_httpx/conftest.py @@ -16,18 +16,8 @@ import pytest from testing_support.fixture.event_loop import event_loop as loop -from testing_support.fixtures import ( - code_coverage_fixture, - collector_agent_registration_fixture, - collector_available_fixture, -) - -_coverage_source = [ - "newrelic.api.external_trace", - "newrelic.hooks.external_httpx", -] +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { "transaction_tracer.explain_threshold": 0.0, diff --git a/tests/external_requests/conftest.py b/tests/external_requests/conftest.py index 02fee2419..10a2ccf05 100644 --- a/tests/external_requests/conftest.py +++ b/tests/external_requests/conftest.py @@ -14,17 +14,10 @@ import pytest -from testing_support.fixtures import (code_coverage_fixture, - collector_agent_registration_fixture, collector_available_fixture) +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 from testing_support.mock_external_http_server import ( MockExternalHTTPHResponseHeadersServer) -_coverage_source = [ - 'newrelic.api.external_trace', - 'newrelic.hooks.external_requests', -] - -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { 'transaction_tracer.explain_threshold': 0.0, diff --git a/tests/external_urllib3/conftest.py b/tests/external_urllib3/conftest.py index b71263dda..19d3f394b 100644 --- a/tests/external_urllib3/conftest.py +++ b/tests/external_urllib3/conftest.py @@ -14,18 +14,11 @@ import pytest -from testing_support.fixtures import (code_coverage_fixture, - collector_agent_registration_fixture, collector_available_fixture) +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 from testing_support.mock_external_http_server import ( MockExternalHTTPHResponseHeadersServer) -_coverage_source = [ - 'newrelic.api.external_trace', - 'newrelic.hooks.external_urllib3', -] - -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { 'transaction_tracer.explain_threshold': 0.0, diff --git a/tests/framework_aiohttp/conftest.py b/tests/framework_aiohttp/conftest.py index bb7e7716e..3bb814a9b 100644 --- a/tests/framework_aiohttp/conftest.py +++ b/tests/framework_aiohttp/conftest.py @@ -22,21 +22,13 @@ from testing_support.fixture.event_loop import ( # noqa: F401 pylint: disable=W0611 event_loop, ) -from testing_support.fixtures import ( # noqa: F401 pylint: disable=W0611 - code_coverage_fixture, - collector_agent_registration_fixture, - collector_available_fixture, -) + +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 from testing_support.mock_external_http_server import ( MockExternalHTTPHResponseHeadersServer, MockExternalHTTPServer, ) -_coverage_source = [ - "newrelic.hooks.framework_aiohttp", -] - -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { "transaction_tracer.explain_threshold": 0.0, diff --git a/tests/framework_ariadne/conftest.py b/tests/framework_ariadne/conftest.py index f7c94ed26..93623a685 100644 --- a/tests/framework_ariadne/conftest.py +++ b/tests/framework_ariadne/conftest.py @@ -14,17 +14,8 @@ import pytest import six -from testing_support.fixtures import ( # noqa: F401; pylint: disable=W0611 - code_coverage_fixture, - collector_agent_registration_fixture, - collector_available_fixture, -) - -_coverage_source = [ - "newrelic.hooks.framework_graphql", -] +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { "transaction_tracer.explain_threshold": 0.0, diff --git a/tests/framework_bottle/conftest.py b/tests/framework_bottle/conftest.py index 04ed187f3..095a3331f 100644 --- a/tests/framework_bottle/conftest.py +++ b/tests/framework_bottle/conftest.py @@ -14,14 +14,8 @@ import pytest -from testing_support.fixtures import (code_coverage_fixture, - collector_agent_registration_fixture, collector_available_fixture) +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 -_coverage_source = [ - 'newrelic.hooks.framework_bottle', -] - -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { 'transaction_tracer.explain_threshold': 0.0, diff --git a/tests/framework_cherrypy/conftest.py b/tests/framework_cherrypy/conftest.py index 0de6238ea..bc730bb1f 100644 --- a/tests/framework_cherrypy/conftest.py +++ b/tests/framework_cherrypy/conftest.py @@ -14,14 +14,8 @@ import pytest -from testing_support.fixtures import (code_coverage_fixture, - collector_agent_registration_fixture, collector_available_fixture) +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 -_coverage_source = [ - 'newrelic.hooks.framework_cherrypy', -] - -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { 'transaction_tracer.explain_threshold': 0.0, diff --git a/tests/framework_django/conftest.py b/tests/framework_django/conftest.py index 6f807c766..8a43ef5c9 100644 --- a/tests/framework_django/conftest.py +++ b/tests/framework_django/conftest.py @@ -14,15 +14,8 @@ import pytest -from testing_support.fixtures import (code_coverage_fixture, - collector_agent_registration_fixture, collector_available_fixture) +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 -_coverage_source = [ - 'newrelic.hooks.framework_django', - 'newrelic.hooks.framework_django_py3', -] - -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { 'transaction_tracer.explain_threshold': 0.0, diff --git a/tests/framework_falcon/conftest.py b/tests/framework_falcon/conftest.py index 3df2656cf..fd43715c6 100644 --- a/tests/framework_falcon/conftest.py +++ b/tests/framework_falcon/conftest.py @@ -14,14 +14,8 @@ import pytest -from testing_support.fixtures import (code_coverage_fixture, - collector_agent_registration_fixture, collector_available_fixture) +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 -_coverage_source = [ - 'newrelic.hooks.framework_falcon', -] - -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { 'transaction_tracer.explain_threshold': 0.0, diff --git a/tests/framework_fastapi/conftest.py b/tests/framework_fastapi/conftest.py index e976b05ed..d65398ffb 100644 --- a/tests/framework_fastapi/conftest.py +++ b/tests/framework_fastapi/conftest.py @@ -13,20 +13,11 @@ # limitations under the License. import pytest -from testing_support.fixtures import ( # noqa: F401; pylint: disable=W0611 - code_coverage_fixture, - collector_agent_registration_fixture, - collector_available_fixture, -) +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 from testing_support.fixtures import ( # noqa: F401; pylint: disable=W0611 newrelic_caplog as caplog, ) -_coverage_source = [ - "newrelic.hooks.framework_fastapi", -] - -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { "transaction_tracer.explain_threshold": 0.0, diff --git a/tests/framework_flask/conftest.py b/tests/framework_flask/conftest.py index abf124817..f90ed9b51 100644 --- a/tests/framework_flask/conftest.py +++ b/tests/framework_flask/conftest.py @@ -17,14 +17,8 @@ import pytest from flask import __version__ as flask_version -from testing_support.fixtures import (code_coverage_fixture, - collector_agent_registration_fixture, collector_available_fixture) +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 -_coverage_source = [ - 'newrelic.hooks.framework_flask', -] - -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { 'transaction_tracer.explain_threshold': 0.0, diff --git a/tests/framework_graphene/conftest.py b/tests/framework_graphene/conftest.py index 73eccda4a..a097f9750 100644 --- a/tests/framework_graphene/conftest.py +++ b/tests/framework_graphene/conftest.py @@ -14,17 +14,8 @@ import pytest import six -from testing_support.fixtures import ( # noqa: F401; pylint: disable=W0611 - code_coverage_fixture, - collector_agent_registration_fixture, - collector_available_fixture, -) - -_coverage_source = [ - "newrelic.hooks.framework_graphql", -] +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { "transaction_tracer.explain_threshold": 0.0, diff --git a/tests/framework_graphql/conftest.py b/tests/framework_graphql/conftest.py index 2084a5bb4..4d9e06758 100644 --- a/tests/framework_graphql/conftest.py +++ b/tests/framework_graphql/conftest.py @@ -14,17 +14,9 @@ import pytest import six -from testing_support.fixtures import ( - code_coverage_fixture, - collector_agent_registration_fixture, - collector_available_fixture, -) -_coverage_source = [ - "newrelic.hooks.framework_graphql", -] +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { "transaction_tracer.explain_threshold": 0.0, diff --git a/tests/framework_grpc/conftest.py b/tests/framework_grpc/conftest.py index 3e27d134d..970a096cf 100644 --- a/tests/framework_grpc/conftest.py +++ b/tests/framework_grpc/conftest.py @@ -16,20 +16,12 @@ import grpc import pytest -from testing_support.fixtures import ( # noqa - code_coverage_fixture, - collector_agent_registration_fixture, - collector_available_fixture, -) + +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 from testing_support.mock_external_grpc_server import MockExternalgRPCServer import newrelic.packages.six as six -_coverage_source = [ - "newrelic.hooks.framework_grpc", -] - -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { "transaction_tracer.explain_threshold": 0.0, diff --git a/tests/framework_pyramid/conftest.py b/tests/framework_pyramid/conftest.py index 6ca07b90a..289eabeaf 100644 --- a/tests/framework_pyramid/conftest.py +++ b/tests/framework_pyramid/conftest.py @@ -14,14 +14,8 @@ import pytest -from testing_support.fixtures import (code_coverage_fixture, - collector_agent_registration_fixture, collector_available_fixture) +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 -_coverage_source = [ - 'newrelic.hooks.framework_pyramid', -] - -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { 'transaction_tracer.explain_threshold': 0.0, diff --git a/tests/framework_sanic/conftest.py b/tests/framework_sanic/conftest.py index 434528bac..5152887b6 100644 --- a/tests/framework_sanic/conftest.py +++ b/tests/framework_sanic/conftest.py @@ -15,21 +15,13 @@ import asyncio import pytest -from testing_support.fixtures import ( # noqa: F401 pylint: disable=W0611 - code_coverage_fixture, - collector_agent_registration_fixture, - collector_available_fixture, -) + +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 from newrelic.common.object_wrapper import ( # noqa: F401 pylint: disable=W0611 transient_function_wrapper, ) -_coverage_source = [ - "newrelic.hooks.framework_sanic", -] - -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { "transaction_tracer.explain_threshold": 0.0, diff --git a/tests/framework_starlette/conftest.py b/tests/framework_starlette/conftest.py index a760fe847..7c843cb08 100644 --- a/tests/framework_starlette/conftest.py +++ b/tests/framework_starlette/conftest.py @@ -12,17 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from testing_support.fixtures import ( - code_coverage_fixture, - collector_agent_registration_fixture, - collector_available_fixture, -) - -_coverage_source = [ - "newrelic.hooks.framework_starlette", -] +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { "transaction_tracer.explain_threshold": 0.0, diff --git a/tests/framework_strawberry/conftest.py b/tests/framework_strawberry/conftest.py index 6cbf75b87..130866bcb 100644 --- a/tests/framework_strawberry/conftest.py +++ b/tests/framework_strawberry/conftest.py @@ -14,17 +14,9 @@ import pytest import six -from testing_support.fixtures import ( # noqa: F401; pylint: disable=W0611 - code_coverage_fixture, - collector_agent_registration_fixture, - collector_available_fixture, -) -_coverage_source = [ - "newrelic.hooks.framework_graphql", -] +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { "transaction_tracer.explain_threshold": 0.0, diff --git a/tests/framework_tornado/conftest.py b/tests/framework_tornado/conftest.py index cec4549d2..920b916ee 100644 --- a/tests/framework_tornado/conftest.py +++ b/tests/framework_tornado/conftest.py @@ -14,8 +14,7 @@ import pytest -from testing_support.fixtures import (code_coverage_fixture, # noqa - collector_agent_registration_fixture, collector_available_fixture) +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 _default_settings = { 'transaction_tracer.explain_threshold': 0.0, @@ -29,12 +28,6 @@ app_name='Python Agent Test (framework_tornado)', default_settings=_default_settings) -_coverage_source = [ - 'newrelic.hooks.framework_tornado', -] - -code_coverage = code_coverage_fixture(source=_coverage_source) - @pytest.fixture(scope='module') def app(request): diff --git a/tests/logger_logging/conftest.py b/tests/logger_logging/conftest.py index 514a14595..46e8f4ec3 100644 --- a/tests/logger_logging/conftest.py +++ b/tests/logger_logging/conftest.py @@ -15,17 +15,8 @@ import logging import pytest -from testing_support.fixtures import ( - code_coverage_fixture, - collector_agent_registration_fixture, - collector_available_fixture, -) - -_coverage_source = [ - "newrelic.hooks.logger_logging", -] +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { "transaction_tracer.explain_threshold": 0.0, diff --git a/tests/logger_loguru/conftest.py b/tests/logger_loguru/conftest.py index af632e300..65eaf4ab8 100644 --- a/tests/logger_loguru/conftest.py +++ b/tests/logger_loguru/conftest.py @@ -15,17 +15,8 @@ import logging import pytest -from testing_support.fixtures import ( - code_coverage_fixture, - collector_agent_registration_fixture, - collector_available_fixture, -) - -_coverage_source = [ - "newrelic.hooks.logger_loguru", -] +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { "transaction_tracer.explain_threshold": 0.0, diff --git a/tests/messagebroker_confluentkafka/conftest.py b/tests/messagebroker_confluentkafka/conftest.py index a86af3ff9..e29596d55 100644 --- a/tests/messagebroker_confluentkafka/conftest.py +++ b/tests/messagebroker_confluentkafka/conftest.py @@ -17,11 +17,8 @@ import pytest from testing_support.db_settings import kafka_settings -from testing_support.fixtures import ( # noqa: F401, pylint: disable=W0611 - code_coverage_fixture, - collector_agent_registration_fixture, - collector_available_fixture, -) + +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 from newrelic.api.transaction import current_transaction from newrelic.common.object_wrapper import transient_function_wrapper @@ -30,11 +27,6 @@ BROKER = "%s:%s" % (DB_SETTINGS["host"], DB_SETTINGS["port"]) -_coverage_source = [ - "newrelic.hooks.messagebroker_confluentkafka", -] - -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { "transaction_tracer.explain_threshold": 0.0, diff --git a/tests/messagebroker_kafkapython/conftest.py b/tests/messagebroker_kafkapython/conftest.py index 098486f34..becef31a0 100644 --- a/tests/messagebroker_kafkapython/conftest.py +++ b/tests/messagebroker_kafkapython/conftest.py @@ -18,11 +18,8 @@ import kafka import pytest from testing_support.db_settings import kafka_settings -from testing_support.fixtures import ( # noqa: F401, pylint: disable=W0611 - code_coverage_fixture, - collector_agent_registration_fixture, - collector_available_fixture, -) + +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 from newrelic.api.transaction import current_transaction from newrelic.common.object_wrapper import transient_function_wrapper @@ -32,11 +29,6 @@ BOOTSTRAP_SERVER = "%s:%s" % (DB_SETTINGS["host"], DB_SETTINGS["port"]) BROKER = [BOOTSTRAP_SERVER] -_coverage_source = [ - "newrelic.hooks.messagebroker_kafkapython", -] - -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { "transaction_tracer.explain_threshold": 0.0, diff --git a/tests/messagebroker_pika/conftest.py b/tests/messagebroker_pika/conftest.py index 9849ee014..67246f9c5 100644 --- a/tests/messagebroker_pika/conftest.py +++ b/tests/messagebroker_pika/conftest.py @@ -17,11 +17,8 @@ import pika import pytest from testing_support.db_settings import rabbitmq_settings -from testing_support.fixtures import ( # noqa: F401 - code_coverage_fixture, - collector_agent_registration_fixture, - collector_available_fixture, -) + +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 QUEUE = "test_pika-%s" % uuid.uuid4() QUEUE_2 = "test_pika-%s" % uuid.uuid4() @@ -36,11 +33,6 @@ DB_SETTINGS = rabbitmq_settings()[0] -_coverage_source = [ - "newrelic.hooks.messagebroker_pika", -] - -code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { "transaction_tracer.explain_threshold": 0.0, diff --git a/tests/template_mako/conftest.py b/tests/template_mako/conftest.py index b639a9851..623af56c0 100644 --- a/tests/template_mako/conftest.py +++ b/tests/template_mako/conftest.py @@ -12,8 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from testing_support.fixtures import (code_coverage_fixture, # noqa - collector_agent_registration_fixture, collector_available_fixture) +from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 _default_settings = { 'transaction_tracer.explain_threshold': 0.0, @@ -27,8 +26,3 @@ app_name='Python Agent Test (template_mako)', default_settings=_default_settings) -_coverage_source = [ - 'newrelic.hooks.template_mako', -] - -code_coverage = code_coverage_fixture(source=_coverage_source) diff --git a/tests/testing_support/fixtures.py b/tests/testing_support/fixtures.py index ccbca9885..8d05bf405 100644 --- a/tests/testing_support/fixtures.py +++ b/tests/testing_support/fixtures.py @@ -115,7 +115,7 @@ def initialize_agent(app_name=None, default_settings=None): for name, value in default_settings.items(): apply_config_setting(settings, name, value) - env_directory = os.environ.get("TOX_ENVDIR", None) + env_directory = os.environ.get("TOX_ENV_DIR", None) if env_directory is not None: log_directory = os.path.join(env_directory, "log") @@ -281,7 +281,7 @@ def finalize(): @pytest.fixture(scope="function") -def collector_available_fixture(request): +def collector_available_fixture(request, collector_agent_registration): application = application_instance() active = application.active assert active @@ -1493,48 +1493,6 @@ def _override_expected_status_codes(wrapped, instance, args, kwargs): return _override_expected_status_codes -def code_coverage_fixture(source=None): - if source is None: - source = ["newrelic"] - - github_actions = bool(os.environ.get("GITHUB_ACTIONS", None)) - tox_env_directory = os.environ.get("TOX_ENVDIR", None) - - if tox_env_directory: - data_file = os.path.join(tox_env_directory, ".coverage") - data_suffix = os.path.split(tox_env_directory)[-1] - coverage_directory = os.path.join(tox_env_directory, "htmlcov") - xml_report = os.path.join(tox_env_directory, "coverage.xml") - else: - data_file = ".coverage" - data_suffix = None - coverage_directory = "htmlcov" - xml_report = "coverage.xml" - - @pytest.fixture(scope="session") - def _code_coverage_fixture(request): - if not source: - yield None # Required, generator based fixtures must yield 1 value or pytest will throw an exception. - return - - from coverage import coverage - - cov = coverage(source=source, data_file=data_file, data_suffix=data_suffix, branch=True) - cov.start() - - yield cov - - # At exit, stop coverage and save to data file - cov.stop() - cov.save() - if not github_actions: - # Run html and xml reports locally - cov.html_report(directory=coverage_directory) - cov.xml_report(outfile=xml_report) - - return _code_coverage_fixture - - def reset_core_stats_engine(): """Reset the StatsEngine and custom StatsEngine of the core application.""" diff --git a/tox.ini b/tox.ini index 933786c3e..2160e9e9c 100644 --- a/tox.ini +++ b/tox.ini @@ -164,17 +164,6 @@ envlist = kafka-messagebroker_kafkapython-{py27,py38}-kafkapython{020001,020000,0104}, python-template_mako-{py27,py37,py38,py39,py310,py311} -[pytest] -usefixtures = - collector_available_fixture - collector_agent_registration - code_coverage - -[coverage:paths] -source = - newrelic/ - .tox/**/site-packages/newrelic/ - [testenv] deps = # Base Dependencies @@ -386,8 +375,10 @@ deps = template_mako: mako<1.2 setenv = - PYTHONPATH = {toxinidir}/tests - TOX_ENVDIR = {envdir} + PYTHONPATH={toxinidir}/tests + TOX_ENV_DIR={envdir} + COVERAGE_FILE={envdir}/.coverage.{envname} + COVERAGE_RCFILE={toxinidir}/tox.ini with_extensions: NEW_RELIC_EXTENSIONS = true without_extensions: NEW_RELIC_EXTENSIONS = false agent_features: NEW_RELIC_APDEX_T = 1000 @@ -491,3 +482,26 @@ changedir = messagebroker_confluentkafka: tests/messagebroker_confluentkafka messagebroker_kafkapython: tests/messagebroker_kafkapython template_mako: tests/template_mako + +[pytest] +addopts = --cov="newrelic" --cov-report=html --cov-report=xml +usefixtures = + collector_available_fixture + collector_agent_registration + +[coverage:run] +branch = True +omit = "newrelic/packages/**/*.py" +parallel = True +disable_warnings = couldnt-parse + +[coverage:paths] +source = + newrelic/ + .tox/**/site-packages/newrelic/ + +[coverage:html] +directory = ${TOX_ENV_DIR-.}/htmlcov + +[coverage:xml] +output = ${TOX_ENV_DIR-.}/coverage.xml From df9701c3576e966e296931e87243430a1d2f4e22 Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Mon, 20 Mar 2023 12:44:17 -0700 Subject: [PATCH 060/108] Add GraphQL Introspection Setting (#783) * Add graphql introspection setting * Sort settings object hierarchy * Add test for introspection queries setting * Expand introspection queries testing * [Mega-Linter] Apply linters fixes * Adjust introspection detection for graphql --------- Co-authored-by: TimPansino --- newrelic/core/config.py | 70 +++++++++++-------- newrelic/hooks/framework_graphql.py | 6 +- tests/framework_ariadne/test_application.py | 21 ++++-- tests/framework_graphene/test_application.py | 29 +++++--- tests/framework_graphql/test_application.py | 41 +++++++---- .../framework_strawberry/test_application.py | 39 +++++++---- 6 files changed, 136 insertions(+), 70 deletions(-) diff --git a/newrelic/core/config.py b/newrelic/core/config.py index 72c4de03d..80e9ccec0 100644 --- a/newrelic/core/config.py +++ b/newrelic/core/config.py @@ -334,6 +334,14 @@ def _can_enable_infinite_tracing(): return True +class InstrumentationSettings(Settings): + pass + + +class InstrumentationGraphQLSettings(Settings): + pass + + class EventHarvestConfigSettings(Settings): nested = True _lock = threading.Lock() @@ -355,50 +363,52 @@ class EventHarvestConfigHarvestLimitSettings(Settings): _settings = TopLevelSettings() +_settings.agent_limits = AgentLimitsSettings() _settings.application_logging = ApplicationLoggingSettings() _settings.application_logging.forwarding = ApplicationLoggingForwardingSettings() -_settings.application_logging.metrics = ApplicationLoggingMetricsSettings() _settings.application_logging.local_decorating = ApplicationLoggingLocalDecoratingSettings() +_settings.application_logging.metrics = ApplicationLoggingMetricsSettings() _settings.attributes = AttributesSettings() -_settings.gc_runtime_metrics = GCRuntimeMetricsSettings() -_settings.code_level_metrics = CodeLevelMetricsSettings() -_settings.thread_profiler = ThreadProfilerSettings() -_settings.transaction_tracer = TransactionTracerSettings() -_settings.transaction_tracer.attributes = TransactionTracerAttributesSettings() -_settings.error_collector = ErrorCollectorSettings() -_settings.error_collector.attributes = ErrorCollectorAttributesSettings() _settings.browser_monitoring = BrowserMonitorSettings() _settings.browser_monitoring.attributes = BrowserMonitorAttributesSettings() -_settings.transaction_name = TransactionNameSettings() -_settings.transaction_metrics = TransactionMetricsSettings() -_settings.event_loop_visibility = EventLoopVisibilitySettings() -_settings.rum = RumSettings() -_settings.slow_sql = SlowSqlSettings() -_settings.agent_limits = AgentLimitsSettings() +_settings.code_level_metrics = CodeLevelMetricsSettings() _settings.console = ConsoleSettings() -_settings.debug = DebugSettings() _settings.cross_application_tracer = CrossApplicationTracerSettings() -_settings.transaction_events = TransactionEventsSettings() -_settings.transaction_events.attributes = TransactionEventsAttributesSettings() _settings.custom_insights_events = CustomInsightsEventsSettings() -_settings.process_host = ProcessHostSettings() -_settings.synthetics = SyntheticsSettings() -_settings.message_tracer = MessageTracerSettings() -_settings.utilization = UtilizationSettings() -_settings.strip_exception_messages = StripExceptionMessageSettings() _settings.datastore_tracer = DatastoreTracerSettings() -_settings.datastore_tracer.instance_reporting = DatastoreTracerInstanceReportingSettings() _settings.datastore_tracer.database_name_reporting = DatastoreTracerDatabaseNameReportingSettings() +_settings.datastore_tracer.instance_reporting = DatastoreTracerInstanceReportingSettings() +_settings.debug = DebugSettings() +_settings.distributed_tracing = DistributedTracingSettings() +_settings.error_collector = ErrorCollectorSettings() +_settings.error_collector.attributes = ErrorCollectorAttributesSettings() +_settings.event_harvest_config = EventHarvestConfigSettings() +_settings.event_harvest_config.harvest_limits = EventHarvestConfigHarvestLimitSettings() +_settings.event_loop_visibility = EventLoopVisibilitySettings() +_settings.gc_runtime_metrics = GCRuntimeMetricsSettings() _settings.heroku = HerokuSettings() +_settings.infinite_tracing = InfiniteTracingSettings() +_settings.instrumentation = InstrumentationSettings() +_settings.instrumentation.graphql = InstrumentationGraphQLSettings() +_settings.message_tracer = MessageTracerSettings() +_settings.process_host = ProcessHostSettings() +_settings.rum = RumSettings() +_settings.serverless_mode = ServerlessModeSettings() +_settings.slow_sql = SlowSqlSettings() _settings.span_events = SpanEventSettings() _settings.span_events.attributes = SpanEventAttributesSettings() +_settings.strip_exception_messages = StripExceptionMessageSettings() +_settings.synthetics = SyntheticsSettings() +_settings.thread_profiler = ThreadProfilerSettings() +_settings.transaction_events = TransactionEventsSettings() +_settings.transaction_events.attributes = TransactionEventsAttributesSettings() +_settings.transaction_metrics = TransactionMetricsSettings() +_settings.transaction_name = TransactionNameSettings() _settings.transaction_segments = TransactionSegmentSettings() _settings.transaction_segments.attributes = TransactionSegmentAttributesSettings() -_settings.distributed_tracing = DistributedTracingSettings() -_settings.serverless_mode = ServerlessModeSettings() -_settings.infinite_tracing = InfiniteTracingSettings() -_settings.event_harvest_config = EventHarvestConfigSettings() -_settings.event_harvest_config.harvest_limits = EventHarvestConfigHarvestLimitSettings() +_settings.transaction_tracer = TransactionTracerSettings() +_settings.transaction_tracer.attributes = TransactionTracerAttributesSettings() +_settings.utilization = UtilizationSettings() _settings.log_file = os.environ.get("NEW_RELIC_LOG", None) @@ -735,6 +745,10 @@ def default_host(license_key): _settings.infinite_tracing.ssl = True _settings.infinite_tracing.span_queue_size = _environ_as_int("NEW_RELIC_INFINITE_TRACING_SPAN_QUEUE_SIZE", 10000) +_settings.instrumentation.graphql.capture_introspection_queries = os.environ.get( + "NEW_RELIC_INSTRUMENTATION_GRAPHQL_CAPTURE_INTROSPECTION_QUERIES", False +) + _settings.event_harvest_config.harvest_limits.analytic_event_data = _environ_as_int( "NEW_RELIC_ANALYTICS_EVENTS_MAX_SAMPLES_STORED", DEFAULT_RESERVOIR_SIZE ) diff --git a/newrelic/hooks/framework_graphql.py b/newrelic/hooks/framework_graphql.py index 378b714b8..30d8a2e19 100644 --- a/newrelic/hooks/framework_graphql.py +++ b/newrelic/hooks/framework_graphql.py @@ -105,6 +105,7 @@ def bind_operation_v2(exe_context, operation, root_value): def wrap_execute_operation(wrapped, instance, args, kwargs): transaction = current_transaction() trace = current_trace() + settings = transaction.settings if not transaction: return wrapped(*args, **kwargs) @@ -135,8 +136,9 @@ def wrap_execute_operation(wrapped, instance, args, kwargs): if operation.selection_set is not None: fields = operation.selection_set.selections # Ignore transactions for introspection queries - for field in fields: - if get_node_value(field, "name") in GRAPHQL_INTROSPECTION_FIELDS: + if not settings.instrumentation.graphql.capture_introspection_queries: + # If all selected fields are introspection fields + if all(get_node_value(field, "name") in GRAPHQL_INTROSPECTION_FIELDS for field in fields): ignore_transaction() fragments = execution_context.fragments diff --git a/tests/framework_ariadne/test_application.py b/tests/framework_ariadne/test_application.py index 0f16da449..cf8501a7a 100644 --- a/tests/framework_ariadne/test_application.py +++ b/tests/framework_ariadne/test_application.py @@ -13,7 +13,7 @@ # limitations under the License. import pytest -from testing_support.fixtures import dt_enabled +from testing_support.fixtures import dt_enabled, override_application_settings from testing_support.validators.validate_span_events import validate_span_events from testing_support.validators.validate_transaction_count import ( validate_transaction_count, @@ -520,8 +520,17 @@ def _test(): _test() -@validate_transaction_count(0) -@background_task() -def test_ignored_introspection_transactions(app, graphql_run): - ok, response = graphql_run(app, "{ __schema { types { name } } }") - assert ok and not response.get("errors") +@pytest.mark.parametrize("capture_introspection_setting", (True, False)) +def test_introspection_transactions(app, graphql_run, capture_introspection_setting): + txn_ct = 1 if capture_introspection_setting else 0 + + @override_application_settings( + {"instrumentation.graphql.capture_introspection_queries": capture_introspection_setting} + ) + @validate_transaction_count(txn_ct) + @background_task() + def _test(): + ok, response = graphql_run(app, "{ __schema { types { name } } }") + assert ok and not response.get("errors") + + _test() diff --git a/tests/framework_graphene/test_application.py b/tests/framework_graphene/test_application.py index b9d374a3c..fd02d992a 100644 --- a/tests/framework_graphene/test_application.py +++ b/tests/framework_graphene/test_application.py @@ -14,13 +14,17 @@ import pytest import six -from testing_support.fixtures import dt_enabled +from testing_support.fixtures import dt_enabled, override_application_settings from testing_support.validators.validate_span_events import validate_span_events from testing_support.validators.validate_transaction_count import ( validate_transaction_count, ) -from testing_support.validators.validate_transaction_errors import validate_transaction_errors -from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics +from testing_support.validators.validate_transaction_errors import ( + validate_transaction_errors, +) +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) from newrelic.api.background_task import background_task from newrelic.common.object_names import callable_name @@ -510,8 +514,17 @@ def _test(): _test() -@validate_transaction_count(0) -@background_task() -def test_ignored_introspection_transactions(app, graphql_run): - response = graphql_run(app, "{ __schema { types { name } } }") - assert not response.errors +@pytest.mark.parametrize("capture_introspection_setting", (True, False)) +def test_introspection_transactions(app, graphql_run, capture_introspection_setting): + txn_ct = 1 if capture_introspection_setting else 0 + + @override_application_settings( + {"instrumentation.graphql.capture_introspection_queries": capture_introspection_setting} + ) + @validate_transaction_count(txn_ct) + @background_task() + def _test(): + response = graphql_run(app, "{ __schema { types { name } } }") + assert not response.errors + + _test() diff --git a/tests/framework_graphql/test_application.py b/tests/framework_graphql/test_application.py index 56dc3a738..65d8cee3a 100644 --- a/tests/framework_graphql/test_application.py +++ b/tests/framework_graphql/test_application.py @@ -13,14 +13,20 @@ # limitations under the License. import pytest -from testing_support.fixtures import dt_enabled +from testing_support.fixtures import dt_enabled, override_application_settings +from testing_support.validators.validate_code_level_metrics import ( + validate_code_level_metrics, +) from testing_support.validators.validate_span_events import validate_span_events from testing_support.validators.validate_transaction_count import ( validate_transaction_count, ) -from testing_support.validators.validate_code_level_metrics import validate_code_level_metrics -from testing_support.validators.validate_transaction_errors import validate_transaction_errors -from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics +from testing_support.validators.validate_transaction_errors import ( + validate_transaction_errors, +) +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) from newrelic.api.background_task import background_task from newrelic.common.object_names import callable_name @@ -99,9 +105,9 @@ def test_basic(app, graphql_run): ) @background_task() def _test(): - response = graphql_run(app, '{ hello }') + response = graphql_run(app, "{ hello }") assert not response.errors - + _test() @@ -376,9 +382,7 @@ def test_operation_metrics_and_attrs(app, graphql_run): @validate_span_events(exact_agents=operation_attrs) @background_task() def _test(): - response = graphql_run( - app, "query MyQuery { library(index: 0) { branch, book { id, name } } }" - ) + response = graphql_run(app, "query MyQuery { library(index: 0) { branch, book { id, name } } }") assert not response.errors _test() @@ -507,8 +511,17 @@ def _test(): _test() -@validate_transaction_count(0) -@background_task() -def test_ignored_introspection_transactions(app, graphql_run): - response = graphql_run(app, "{ __schema { types { name } } }") - assert not response.errors +@pytest.mark.parametrize("capture_introspection_setting", (True, False)) +def test_introspection_transactions(app, graphql_run, capture_introspection_setting): + txn_ct = 1 if capture_introspection_setting else 0 + + @override_application_settings( + {"instrumentation.graphql.capture_introspection_queries": capture_introspection_setting} + ) + @validate_transaction_count(txn_ct) + @background_task() + def _test(): + response = graphql_run(app, "{ __schema { types { name } } }") + assert not response.errors + + _test() diff --git a/tests/framework_strawberry/test_application.py b/tests/framework_strawberry/test_application.py index d57de74f4..ac60a33e0 100644 --- a/tests/framework_strawberry/test_application.py +++ b/tests/framework_strawberry/test_application.py @@ -13,11 +13,17 @@ # limitations under the License. import pytest -from testing_support.fixtures import dt_enabled +from testing_support.fixtures import dt_enabled, override_application_settings from testing_support.validators.validate_span_events import validate_span_events -from testing_support.validators.validate_transaction_errors import validate_transaction_errors -from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics -from testing_support.validators.validate_transaction_count import validate_transaction_count +from testing_support.validators.validate_transaction_count import ( + validate_transaction_count, +) +from testing_support.validators.validate_transaction_errors import ( + validate_transaction_errors, +) +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) from newrelic.api.background_task import background_task from newrelic.common.object_names import callable_name @@ -61,12 +67,12 @@ def delay_import(): return delay_import -def example_middleware(next, root, info, **args): #pylint: disable=W0622 +def example_middleware(next, root, info, **args): # pylint: disable=W0622 return_value = next(root, info, **args) return return_value -def error_middleware(next, root, info, **args): #pylint: disable=W0622 +def error_middleware(next, root, info, **args): # pylint: disable=W0622 raise RuntimeError("Runtime Error!") @@ -248,7 +254,7 @@ def test_exception_in_validation(app, graphql_run, is_graphql_2, query, exc_clas exc_class = callable_name(GraphQLError) _test_exception_scoped_metrics = [ - ('GraphQL/operation/Strawberry///', 1), + ("GraphQL/operation/Strawberry///", 1), ] _test_exception_rollup_metrics = [ ("Errors/all", 1), @@ -434,8 +440,17 @@ def _test(): _test() -@validate_transaction_count(0) -@background_task() -def test_ignored_introspection_transactions(app, graphql_run): - response = graphql_run(app, "{ __schema { types { name } } }") - assert not response.errors +@pytest.mark.parametrize("capture_introspection_setting", (True, False)) +def test_introspection_transactions(app, graphql_run, capture_introspection_setting): + txn_ct = 1 if capture_introspection_setting else 0 + + @override_application_settings( + {"instrumentation.graphql.capture_introspection_queries": capture_introspection_setting} + ) + @validate_transaction_count(txn_ct) + @background_task() + def _test(): + response = graphql_run(app, "{ __schema { types { name } } }") + assert not response.errors + + _test() From fbb851e75d0456a0138c0265ba431d54856f5111 Mon Sep 17 00:00:00 2001 From: Uma Annamalai Date: Mon, 20 Mar 2023 15:26:13 -0700 Subject: [PATCH 061/108] Fix instance info tests for redis. (#784) * Fix instance info tests for redis. * [Mega-Linter] Apply linters fixes --------- Co-authored-by: umaannamalai --- tests/datastore_redis/test_instance_info.py | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/tests/datastore_redis/test_instance_info.py b/tests/datastore_redis/test_instance_info.py index b3e9a0d5d..c2edb31fc 100644 --- a/tests/datastore_redis/test_instance_info.py +++ b/tests/datastore_redis/test_instance_info.py @@ -100,6 +100,7 @@ def test_strict_redis_connection_instance_info(args, kwargs, expected): if (3, 5, 3) >= REDIS_PY_VERSION >= (2, 7, 5): _instance_info_from_url_tests.append((("redis://127.0.0.1",), {}, ("127.0.0.1", "6379", "0"))) + if REDIS_PY_VERSION >= (2, 10): _instance_info_from_url_tests.extend( [ @@ -115,6 +116,13 @@ def test_strict_redis_connection_instance_info(args, kwargs, expected): ] ) +if REDIS_PY_VERSION >= (4, 5, 2): + _instance_info_from_url_tests_4_5_2 = _instance_info_from_url_tests[:-3] + [ + (("unix:///path/to/socket.sock",), {}, ("localhost", "6379", "0")), + (("unix:///path/to/socket.sock?db=2",), {}, ("localhost", "6379", "2")), + (("unix:///path/to/socket.sock",), {"db": 2}, ("localhost", "6379", "2")), + ] + @pytest.mark.skipif(REDIS_PY_VERSION < (2, 6), reason="from_url not yet implemented in this redis-py version") @pytest.mark.parametrize("args,kwargs,expected", _instance_info_from_url_tests) @@ -133,7 +141,10 @@ def test_strict_redis_client_from_url(args, kwargs, expected): @pytest.mark.skipif(REDIS_PY_VERSION < (2, 6), reason="from_url not yet implemented in this redis-py version") -@pytest.mark.parametrize("args,kwargs,expected", _instance_info_from_url_tests) +@pytest.mark.parametrize( + "args,kwargs,expected", + _instance_info_from_url_tests if REDIS_PY_VERSION < (4, 5, 2) else _instance_info_from_url_tests_4_5_2, +) def test_redis_connection_from_url(args, kwargs, expected): r = redis.Redis.from_url(*args, **kwargs) if r.connection_pool.connection_class is redis.Connection: @@ -153,7 +164,10 @@ def test_redis_connection_from_url(args, kwargs, expected): @pytest.mark.skipif(REDIS_PY_VERSION < (2, 6), reason="from_url not yet implemented in this redis-py version") -@pytest.mark.parametrize("args,kwargs,expected", _instance_info_from_url_tests) +@pytest.mark.parametrize( + "args,kwargs,expected", + _instance_info_from_url_tests if REDIS_PY_VERSION < (4, 5, 2) else _instance_info_from_url_tests_4_5_2, +) def test_strict_redis_connection_from_url(args, kwargs, expected): r = redis.StrictRedis.from_url(*args, **kwargs) if r.connection_pool.connection_class is redis.Connection: From 912d08805bc66184facdfd5d5b2ec9d9d49272b2 Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Thu, 30 Mar 2023 09:04:55 -0700 Subject: [PATCH 062/108] Fix Redis Instance Info (#790) * Fix failing redis test for new default behavior * Revert "Fix instance info tests for redis. (#784)" This reverts commit f7108e3c2a54ab02a1104f6c16bd5fd799b9fc7e. --- newrelic/hooks/datastore_redis.py | 2 +- tests/datastore_redis/test_instance_info.py | 18 ++---------------- 2 files changed, 3 insertions(+), 17 deletions(-) diff --git a/newrelic/hooks/datastore_redis.py b/newrelic/hooks/datastore_redis.py index 23a6dfb77..b32c848b3 100644 --- a/newrelic/hooks/datastore_redis.py +++ b/newrelic/hooks/datastore_redis.py @@ -472,7 +472,7 @@ def _conn_attrs_to_dict(connection): def _instance_info(kwargs): host = kwargs.get("host") or "localhost" - port_path_or_id = str(kwargs.get("port") or kwargs.get("path", "unknown")) + port_path_or_id = str(kwargs.get("path") or kwargs.get("port", "unknown")) db = str(kwargs.get("db") or 0) return (host, port_path_or_id, db) diff --git a/tests/datastore_redis/test_instance_info.py b/tests/datastore_redis/test_instance_info.py index c2edb31fc..b3e9a0d5d 100644 --- a/tests/datastore_redis/test_instance_info.py +++ b/tests/datastore_redis/test_instance_info.py @@ -100,7 +100,6 @@ def test_strict_redis_connection_instance_info(args, kwargs, expected): if (3, 5, 3) >= REDIS_PY_VERSION >= (2, 7, 5): _instance_info_from_url_tests.append((("redis://127.0.0.1",), {}, ("127.0.0.1", "6379", "0"))) - if REDIS_PY_VERSION >= (2, 10): _instance_info_from_url_tests.extend( [ @@ -116,13 +115,6 @@ def test_strict_redis_connection_instance_info(args, kwargs, expected): ] ) -if REDIS_PY_VERSION >= (4, 5, 2): - _instance_info_from_url_tests_4_5_2 = _instance_info_from_url_tests[:-3] + [ - (("unix:///path/to/socket.sock",), {}, ("localhost", "6379", "0")), - (("unix:///path/to/socket.sock?db=2",), {}, ("localhost", "6379", "2")), - (("unix:///path/to/socket.sock",), {"db": 2}, ("localhost", "6379", "2")), - ] - @pytest.mark.skipif(REDIS_PY_VERSION < (2, 6), reason="from_url not yet implemented in this redis-py version") @pytest.mark.parametrize("args,kwargs,expected", _instance_info_from_url_tests) @@ -141,10 +133,7 @@ def test_strict_redis_client_from_url(args, kwargs, expected): @pytest.mark.skipif(REDIS_PY_VERSION < (2, 6), reason="from_url not yet implemented in this redis-py version") -@pytest.mark.parametrize( - "args,kwargs,expected", - _instance_info_from_url_tests if REDIS_PY_VERSION < (4, 5, 2) else _instance_info_from_url_tests_4_5_2, -) +@pytest.mark.parametrize("args,kwargs,expected", _instance_info_from_url_tests) def test_redis_connection_from_url(args, kwargs, expected): r = redis.Redis.from_url(*args, **kwargs) if r.connection_pool.connection_class is redis.Connection: @@ -164,10 +153,7 @@ def test_redis_connection_from_url(args, kwargs, expected): @pytest.mark.skipif(REDIS_PY_VERSION < (2, 6), reason="from_url not yet implemented in this redis-py version") -@pytest.mark.parametrize( - "args,kwargs,expected", - _instance_info_from_url_tests if REDIS_PY_VERSION < (4, 5, 2) else _instance_info_from_url_tests_4_5_2, -) +@pytest.mark.parametrize("args,kwargs,expected", _instance_info_from_url_tests) def test_strict_redis_connection_from_url(args, kwargs, expected): r = redis.StrictRedis.from_url(*args, **kwargs) if r.connection_pool.connection_class is redis.Connection: From 637879a8bacd9a7ff1b8fdea1f00d40628d692f7 Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Thu, 30 Mar 2023 12:16:44 -0700 Subject: [PATCH 063/108] Guard GraphQL Settings Lookup (#787) * Guard graphql settings lookup * [Mega-Linter] Apply linters fixes * Bump tests * Update graphql settings test --------- Co-authored-by: TimPansino Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> --- newrelic/hooks/framework_graphql.py | 3 +-- tests/framework_graphql/test_application.py | 8 ++++++++ 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/newrelic/hooks/framework_graphql.py b/newrelic/hooks/framework_graphql.py index 30d8a2e19..d261b2e9f 100644 --- a/newrelic/hooks/framework_graphql.py +++ b/newrelic/hooks/framework_graphql.py @@ -105,7 +105,6 @@ def bind_operation_v2(exe_context, operation, root_value): def wrap_execute_operation(wrapped, instance, args, kwargs): transaction = current_transaction() trace = current_trace() - settings = transaction.settings if not transaction: return wrapped(*args, **kwargs) @@ -136,7 +135,7 @@ def wrap_execute_operation(wrapped, instance, args, kwargs): if operation.selection_set is not None: fields = operation.selection_set.selections # Ignore transactions for introspection queries - if not settings.instrumentation.graphql.capture_introspection_queries: + if not (transaction.settings and transaction.settings.instrumentation.graphql.capture_introspection_queries): # If all selected fields are introspection fields if all(get_node_value(field, "name") in GRAPHQL_INTROSPECTION_FIELDS for field in fields): ignore_transaction() diff --git a/tests/framework_graphql/test_application.py b/tests/framework_graphql/test_application.py index 65d8cee3a..dd49ee37f 100644 --- a/tests/framework_graphql/test_application.py +++ b/tests/framework_graphql/test_application.py @@ -79,6 +79,14 @@ def error_middleware(next, root, info, **args): raise RuntimeError("Runtime Error!") +def test_no_harm_no_transaction(app, graphql_run): + def _test(): + response = graphql_run(app, "{ __schema { types { name } } }") + assert not response.errors + + _test() + + _runtime_error_name = callable_name(RuntimeError) _test_runtime_error = [(_runtime_error_name, "Runtime Error!")] _graphql_base_rollup_metrics = [ From 107c0a69fd568860bb762a9b790793ad75dccbae Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Thu, 30 Mar 2023 14:01:29 -0700 Subject: [PATCH 064/108] Errors Inbox Improvements (#791) * Errors inbox attributes and tests (#778) * Initial errors inbox commit Co-authored-by: Timothy Pansino Co-authored-by: Hannah Stepanek Co-authored-by: Uma Annamalai * Add enduser.id field * Move validate_error_trace_attributes into validators directory * Add error callback attributes test * Add tests for enduser.id & error.group.name Co-authored-by: Timothy Pansino * Uncomment code_coverage * Drop commented out line --------- Co-authored-by: Timothy Pansino Co-authored-by: Hannah Stepanek Co-authored-by: Uma Annamalai Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> Co-authored-by: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> * Error Group Callback API (#785) * Error group initial implementation * Rewrite error callback to pass map of info * Fixed incorrect validators causing errors Co-authored-by: Uma Annamalai Co-authored-by: Hannah Stepanek * Fix validation of error trace attributes * Expanded error callback test * Add incorrect type to error callback testing * Change error group callback to private setting * Add testing for error group callback inputs * Separate error group callback tests * Add explicit testing for the set API * Ensure error group is string * Fix python 2 type validation --------- Co-authored-by: Uma Annamalai Co-authored-by: Hannah Stepanek Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> * User Tracking for Errors Inbox (#789) * Add user tracking feature for errors inbox. * Address review comments, * Add high_security test. * Cleanup invalid tests test. * Update user_id string check. * Remove set_id outside txn test. --------- Co-authored-by: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> --------- Co-authored-by: Lalleh Rafeei <84813886+lrafeei@users.noreply.github.com> Co-authored-by: Timothy Pansino Co-authored-by: Hannah Stepanek Co-authored-by: Uma Annamalai Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> Co-authored-by: Uma Annamalai --- newrelic/agent.py | 4 + newrelic/api/settings.py | 30 +- newrelic/api/time_trace.py | 62 ++- newrelic/api/transaction.py | 23 +- newrelic/core/attribute.py | 52 ++- newrelic/core/config.py | 5 +- newrelic/core/error_node.py | 3 +- newrelic/core/stats_engine.py | 55 ++- newrelic/core/transaction_node.py | 435 +++++++++--------- .../test_attributes_in_action.py | 59 ++- tests/agent_features/test_error_events.py | 4 + .../test_error_group_callback.py | 238 ++++++++++ tests/agent_unittests/test_harvest_loop.py | 3 +- tests/testing_support/fixtures.py | 19 +- .../validate_error_trace_attributes.py | 47 ++ 15 files changed, 778 insertions(+), 261 deletions(-) create mode 100644 tests/agent_features/test_error_group_callback.py create mode 100644 tests/testing_support/validators/validate_error_trace_attributes.py diff --git a/newrelic/agent.py b/newrelic/agent.py index b0bf115e2..95a540780 100644 --- a/newrelic/agent.py +++ b/newrelic/agent.py @@ -155,7 +155,9 @@ def __asgi_application(*args, **kwargs): from newrelic.api.profile_trace import ProfileTraceWrapper as __ProfileTraceWrapper from newrelic.api.profile_trace import profile_trace as __profile_trace from newrelic.api.profile_trace import wrap_profile_trace as __wrap_profile_trace +from newrelic.api.settings import set_error_group_callback as __set_error_group_callback from newrelic.api.supportability import wrap_api_call as __wrap_api_call +from newrelic.api.transaction import set_user_id as __set_user_id from newrelic.api.transaction_name import ( TransactionNameWrapper as __TransactionNameWrapper, ) @@ -223,6 +225,8 @@ def __asgi_application(*args, **kwargs): get_linking_metadata = __wrap_api_call(__get_linking_metadata, "get_linking_metadata") add_custom_span_attribute = __wrap_api_call(__add_custom_span_attribute, "add_custom_span_attribute") current_transaction = __wrap_api_call(__current_transaction, "current_transaction") +set_user_id = __wrap_api_call(__set_user_id, "set_user_id") +set_error_group_callback = __wrap_api_call(__set_error_group_callback, "set_error_group_callback") set_transaction_name = __wrap_api_call(__set_transaction_name, "set_transaction_name") end_of_transaction = __wrap_api_call(__end_of_transaction, "end_of_transaction") set_background_task = __wrap_api_call(__set_background_task, "set_background_task") diff --git a/newrelic/api/settings.py b/newrelic/api/settings.py index fc70eb0d4..5cc9ba79f 100644 --- a/newrelic/api/settings.py +++ b/newrelic/api/settings.py @@ -12,10 +12,15 @@ # See the License for the specific language governing permissions and # limitations under the License. +import logging + import newrelic.core.config settings = newrelic.core.config.global_settings +_logger = logging.getLogger(__name__) + + RECORDSQL_OFF = 'off' RECORDSQL_RAW = 'raw' RECORDSQL_OBFUSCATED = 'obfuscated' @@ -23,5 +28,26 @@ COMPRESSED_CONTENT_ENCODING_DEFLATE = 'deflate' COMPRESSED_CONTENT_ENCODING_GZIP = 'gzip' -STRIP_EXCEPTION_MESSAGE = ("Message removed by New Relic " - "'strip_exception_messages' setting") +STRIP_EXCEPTION_MESSAGE = ("Message removed by New Relic 'strip_exception_messages' setting") + + +def set_error_group_callback(callback, application=None): + """Set the current callback to be used to determine error groups.""" + from newrelic.api.application import application_instance + + if callback is not None and not callable(callback): + _logger.error("Error group callback must be a callable, or None to unset this setting.") + return + + # Check for activated application if it exists and was not given. + application = application_instance(activate=False) if application is None else application + + # Get application settings if it exists, or fallback to global settings object + _settings = application.settings if application is not None else settings() + + if _settings is None: + _logger.error("Failed to set error_group_callback in application settings. Report this issue to New Relic support.") + return + + if _settings.error_collector: + _settings.error_collector._error_group_callback = callback diff --git a/newrelic/api/time_trace.py b/newrelic/api/time_trace.py index 31de73536..24be0e00f 100644 --- a/newrelic/api/time_trace.py +++ b/newrelic/api/time_trace.py @@ -30,6 +30,8 @@ from newrelic.core.config import is_expected_error, should_ignore_error from newrelic.core.trace_cache import trace_cache +from newrelic.packages import six + _logger = logging.getLogger(__name__) @@ -255,13 +257,15 @@ def _observe_exception(self, exc_info=None, ignore=None, expected=None, status_c if getattr(value, "_nr_ignored", None): return - module, name, fullnames, message = parse_exc_info((exc, value, tb)) + module, name, fullnames, message_raw = parse_exc_info((exc, value, tb)) fullname = fullnames[0] # Check to see if we need to strip the message before recording it. if settings.strip_exception_messages.enabled and fullname not in settings.strip_exception_messages.allowlist: message = STRIP_EXCEPTION_MESSAGE + else: + message = message_raw # Where expected or ignore are a callable they should return a # tri-state variable with the following behavior. @@ -344,7 +348,7 @@ def _observe_exception(self, exc_info=None, ignore=None, expected=None, status_c is_expected = is_expected_error(exc_info, status_code=status_code, settings=settings) # Record a supportability metric if error attributes are being - # overiden. + # overridden. if "error.class" in self.agent_attributes: transaction._record_supportability("Supportability/SpanEvent/Errors/Dropped") @@ -353,11 +357,23 @@ def _observe_exception(self, exc_info=None, ignore=None, expected=None, status_c self._add_agent_attribute("error.message", message) self._add_agent_attribute("error.expected", is_expected) - return fullname, message, tb, is_expected + return fullname, message, message_raw, tb, is_expected def notice_error(self, error=None, attributes=None, expected=None, ignore=None, status_code=None): attributes = attributes if attributes is not None else {} + # If no exception details provided, use current exception. + + # Pull from sys.exc_info if no exception is passed + if not error or None in error: + error = sys.exc_info() + + # If no exception to report, exit + if not error or None in error: + return + + exc, value, tb = error + recorded = self._observe_exception( error, ignore=ignore, @@ -365,7 +381,7 @@ def notice_error(self, error=None, attributes=None, expected=None, ignore=None, status_code=status_code, ) if recorded: - fullname, message, tb, is_expected = recorded + fullname, message, message_raw, tb, is_expected = recorded transaction = self.transaction settings = transaction and transaction.settings @@ -392,16 +408,45 @@ def notice_error(self, error=None, attributes=None, expected=None, ignore=None, ) custom_params = {} - if settings and settings.code_level_metrics and settings.code_level_metrics.enabled: - source = extract_code_from_traceback(tb) - else: - source = None + # Extract additional details about the exception + + source = None + error_group_name = None + if settings: + if settings.code_level_metrics and settings.code_level_metrics.enabled: + source = extract_code_from_traceback(tb) + + if settings.error_collector and settings.error_collector.error_group_callback is not None: + try: + # Call callback to obtain error group name + input_attributes = {} + input_attributes.update(transaction._custom_params) + input_attributes.update(attributes) + error_group_name_raw = settings.error_collector.error_group_callback(value, { + "traceback": tb, + "error.class": exc, + "error.message": message_raw, + "error.expected": is_expected, + "custom_params": input_attributes, + "transactionName": getattr(transaction, "name", None), + "response.status": getattr(transaction, "_response_code", None), + "request.method": getattr(transaction, "_request_method", None), + "request.uri": getattr(transaction, "_request_uri", None), + }) + if error_group_name_raw: + _, error_group_name = process_user_attribute("error.group.name", error_group_name_raw) + if error_group_name is None or not isinstance(error_group_name, six.string_types): + raise ValueError("Invalid attribute value for error.group.name. Expected string, got: %s" % repr(error_group_name_raw)) + except Exception: + _logger.error("Encountered error when calling error group callback:\n%s", "".join(traceback.format_exception(*sys.exc_info()))) + error_group_name = None transaction._create_error_node( settings, fullname, message, is_expected, + error_group_name, custom_params, self.guid, tb, @@ -634,6 +679,7 @@ def get_service_linking_metadata(application=None, settings=None): if not settings: if application is None: from newrelic.api.application import application_instance + application = application_instance(activate=False) if application is not None: diff --git a/newrelic/api/transaction.py b/newrelic/api/transaction.py index 08638e056..f04bcba84 100644 --- a/newrelic/api/transaction.py +++ b/newrelic/api/transaction.py @@ -46,6 +46,7 @@ obfuscate, ) from newrelic.core.attribute import ( + MAX_ATTRIBUTE_LENGTH, MAX_LOG_MESSAGE_LENGTH, MAX_NUM_USER_ATTRIBUTES, create_agent_attributes, @@ -1547,7 +1548,9 @@ def notice_error(self, error=None, attributes=None, expected=None, ignore=None, status_code=status_code, ) - def _create_error_node(self, settings, fullname, message, expected, custom_params, span_id, tb, source): + def _create_error_node( + self, settings, fullname, message, expected, error_group_name, custom_params, span_id, tb, source + ): # Only remember up to limit of what can be caught for a # single transaction. This could be trimmed further # later if there are already recorded errors and would @@ -1576,9 +1579,8 @@ def _create_error_node(self, settings, fullname, message, expected, custom_param span_id=span_id, stack_trace=exception_stack(tb), custom_params=custom_params, - file_name=None, - line_number=None, source=source, + error_group_name=error_group_name, ) # TODO: Errors are recorded in time order. If @@ -1812,6 +1814,21 @@ def add_custom_parameters(items): return add_custom_attributes(items) +def set_user_id(user_id): + transaction = current_transaction() + + if not user_id or not transaction: + return + + if not isinstance(user_id, six.string_types): + _logger.warning("The set_user_id API requires a string-based user ID.") + return + + user_id = truncate(user_id, MAX_ATTRIBUTE_LENGTH) + + transaction._add_agent_attribute("enduser.id", user_id) + + def add_framework_info(name, version=None): transaction = current_transaction() if transaction: diff --git a/newrelic/core/attribute.py b/newrelic/core/attribute.py index c5f19e4c0..372711369 100644 --- a/newrelic/core/attribute.py +++ b/newrelic/core/attribute.py @@ -42,46 +42,48 @@ _TRANSACTION_EVENT_DEFAULT_ATTRIBUTES = set( ( - "host.displayName", - "request.method", - "request.headers.contentType", - "request.headers.contentLength", - "request.uri", - "response.status", - "request.headers.accept", - "response.headers.contentLength", - "response.headers.contentType", - "request.headers.host", - "request.headers.userAgent", - "message.queueName", - "message.routingKey", - "http.url", - "http.statusCode", - "aws.requestId", - "aws.operation", "aws.lambda.arn", "aws.lambda.coldStart", "aws.lambda.eventSource.arn", + "aws.operation", + "aws.requestId", + "code.filepath", + "code.function", + "code.lineno", + "code.namespace", "db.collection", "db.instance", "db.operation", "db.statement", + "enduser.id", "error.class", - "error.message", "error.expected", - "peer.hostname", - "peer.address", + "error.message", + "error.group.name", "graphql.field.name", "graphql.field.parentType", "graphql.field.path", "graphql.field.returnType", "graphql.operation.name", - "graphql.operation.type", "graphql.operation.query", - "code.filepath", - "code.function", - "code.lineno", - "code.namespace", + "graphql.operation.type", + "host.displayName", + "http.statusCode", + "http.url", + "message.queueName", + "message.routingKey", + "peer.address", + "peer.hostname", + "request.headers.accept", + "request.headers.contentLength", + "request.headers.contentType", + "request.headers.host", + "request.headers.userAgent", + "request.method", + "request.uri", + "response.headers.contentLength", + "response.headers.contentType", + "response.status", ) ) diff --git a/newrelic/core/config.py b/newrelic/core/config.py index 80e9ccec0..7489be222 100644 --- a/newrelic/core/config.py +++ b/newrelic/core/config.py @@ -138,7 +138,9 @@ class TransactionTracerAttributesSettings(Settings): class ErrorCollectorSettings(Settings): - pass + @property + def error_group_callback(self): + return self._error_group_callback class ErrorCollectorAttributesSettings(Settings): @@ -698,6 +700,7 @@ def default_host(license_key): _settings.error_collector.ignore_status_codes = _parse_status_codes("100-102 200-208 226 300-308 404", set()) _settings.error_collector.expected_classes = [] _settings.error_collector.expected_status_codes = set() +_settings.error_collector._error_group_callback = None _settings.error_collector.attributes.enabled = True _settings.error_collector.attributes.exclude = [] _settings.error_collector.attributes.include = [] diff --git a/newrelic/core/error_node.py b/newrelic/core/error_node.py index 67c1b449a..fe0157b81 100644 --- a/newrelic/core/error_node.py +++ b/newrelic/core/error_node.py @@ -24,8 +24,7 @@ "span_id", "stack_trace", "custom_params", - "file_name", - "line_number", "source", + "error_group_name", ], ) diff --git a/newrelic/core/stats_engine.py b/newrelic/core/stats_engine.py index 959d4ffae..203e3e796 100644 --- a/newrelic/core/stats_engine.py +++ b/newrelic/core/stats_engine.py @@ -26,6 +26,7 @@ import random import sys import time +import traceback import warnings import zlib from heapq import heapify, heapreplace @@ -38,6 +39,7 @@ from newrelic.common.streaming_utils import StreamBuffer from newrelic.core.attribute import ( MAX_LOG_MESSAGE_LENGTH, + create_agent_attributes, create_user_attributes, process_user_attribute, truncate, @@ -610,13 +612,15 @@ def notice_error(self, error=None, attributes=None, expected=None, ignore=None, if getattr(value, "_nr_ignored", None): return - module, name, fullnames, message = parse_exc_info(error) + module, name, fullnames, message_raw = parse_exc_info(error) fullname = fullnames[0] # Check to see if we need to strip the message before recording it. if settings.strip_exception_messages.enabled and fullname not in settings.strip_exception_messages.allowlist: message = STRIP_EXCEPTION_MESSAGE + else: + message = message_raw # Where expected or ignore are a callable they should return a # tri-state variable with the following behavior. @@ -712,6 +716,42 @@ def notice_error(self, error=None, attributes=None, expected=None, ignore=None, user_attributes = create_user_attributes(custom_attributes, settings.attribute_filter) + + # Extract additional details about the exception as agent attributes + agent_attributes = {} + + if settings: + if settings.code_level_metrics and settings.code_level_metrics.enabled: + extract_code_from_traceback(tb).add_attrs(agent_attributes.__setitem__) + + if settings.error_collector and settings.error_collector.error_group_callback is not None: + error_group_name = None + try: + # Call callback to obtain error group name + error_group_name_raw = settings.error_collector.error_group_callback(value, { + "traceback": tb, + "error.class": exc, + "error.message": message_raw, + "error.expected": is_expected, + "custom_params": attributes, + # Transaction specific items should be set to None + "transactionName": None, + "response.status": None, + "request.method": None, + "request.uri": None, + }) + if error_group_name_raw: + _, error_group_name = process_user_attribute("error.group.name", error_group_name_raw) + if error_group_name is None or not isinstance(error_group_name, six.string_types): + raise ValueError("Invalid attribute value for error.group.name. Expected string, got: %s" % repr(error_group_name_raw)) + else: + agent_attributes["error.group.name"] = error_group_name + + except Exception: + _logger.error("Encountered error when calling error group callback:\n%s", "".join(traceback.format_exception(*sys.exc_info()))) + + agent_attributes = create_agent_attributes(agent_attributes, settings.attribute_filter) + # Record the exception details. attributes = {} @@ -731,9 +771,10 @@ def notice_error(self, error=None, attributes=None, expected=None, ignore=None, # set source code attributes attributes["agentAttributes"] = {} - if settings and settings.code_level_metrics and settings.code_level_metrics.enabled: - extract_code_from_traceback(tb).add_attrs(attributes["agentAttributes"].__setitem__) - + for attr in agent_attributes: + if attr.destinations & DST_ERROR_COLLECTOR: + attributes["agentAttributes"][attr.name] = attr.value + error_details = TracedError( start_time=time.time(), path="Exception", message=message, type=fullname, parameters=attributes ) @@ -771,7 +812,11 @@ def _error_event(self, error): # Leave agent attributes field blank since not a transaction - error_event = [error.parameters["intrinsics"], error.parameters["userAttributes"], {}] + error_event = [ + error.parameters["intrinsics"], + error.parameters["userAttributes"], + error.parameters["agentAttributes"], + ] return error_event diff --git a/newrelic/core/transaction_node.py b/newrelic/core/transaction_node.py index 11b254b8a..0faae3790 100644 --- a/newrelic/core/transaction_node.py +++ b/newrelic/core/transaction_node.py @@ -22,35 +22,81 @@ import newrelic.core.error_collector import newrelic.core.trace_node - +from newrelic.common.streaming_utils import SpanProtoAttrs +from newrelic.core.attribute import create_agent_attributes, create_user_attributes +from newrelic.core.attribute_filter import ( + DST_ERROR_COLLECTOR, + DST_TRANSACTION_EVENTS, + DST_TRANSACTION_TRACER, +) from newrelic.core.metric import ApdexMetric, TimeMetric from newrelic.core.string_table import StringTable -from newrelic.core.attribute import create_user_attributes -from newrelic.core.attribute_filter import (DST_ERROR_COLLECTOR, - DST_TRANSACTION_TRACER, DST_TRANSACTION_EVENTS) - -from newrelic.common.streaming_utils import SpanProtoAttrs try: from newrelic.core.infinite_tracing_pb2 import Span except: pass -_TransactionNode = namedtuple('_TransactionNode', - ['settings', 'path', 'type', 'group', 'base_name', 'name_for_metric', - 'port', 'request_uri', 'queue_start', 'start_time', - 'end_time', 'last_byte_time', 'response_time', 'total_time', - 'duration', 'exclusive', 'root', 'errors', 'slow_sql', - 'custom_events', 'log_events', 'apdex_t', 'suppress_apdex', 'custom_metrics', - 'guid', 'cpu_time', 'suppress_transaction_trace', 'client_cross_process_id', - 'referring_transaction_guid', 'record_tt', 'synthetics_resource_id', - 'synthetics_job_id', 'synthetics_monitor_id', 'synthetics_header', - 'is_part_of_cat', 'trip_id', 'path_hash', 'referring_path_hash', - 'alternate_path_hashes', 'trace_intrinsics', 'agent_attributes', - 'distributed_trace_intrinsics', 'user_attributes', 'priority', - 'sampled', 'parent_transport_duration', 'parent_span', 'parent_type', - 'parent_account', 'parent_app', 'parent_tx', 'parent_transport_type', - 'root_span_guid', 'trace_id', 'loop_time']) +_TransactionNode = namedtuple( + "_TransactionNode", + [ + "settings", + "path", + "type", + "group", + "base_name", + "name_for_metric", + "port", + "request_uri", + "queue_start", + "start_time", + "end_time", + "last_byte_time", + "response_time", + "total_time", + "duration", + "exclusive", + "root", + "errors", + "slow_sql", + "custom_events", + "log_events", + "apdex_t", + "suppress_apdex", + "custom_metrics", + "guid", + "cpu_time", + "suppress_transaction_trace", + "client_cross_process_id", + "referring_transaction_guid", + "record_tt", + "synthetics_resource_id", + "synthetics_job_id", + "synthetics_monitor_id", + "synthetics_header", + "is_part_of_cat", + "trip_id", + "path_hash", + "referring_path_hash", + "alternate_path_hashes", + "trace_intrinsics", + "agent_attributes", + "distributed_trace_intrinsics", + "user_attributes", + "priority", + "sampled", + "parent_transport_duration", + "parent_span", + "parent_type", + "parent_account", + "parent_app", + "parent_tx", + "parent_transport_type", + "root_span_guid", + "trace_id", + "loop_time", + ], +) class TransactionNode(_TransactionNode): @@ -71,7 +117,7 @@ def __hash__(self): @property def string_table(self): - result = getattr(self, '_string_table', None) + result = getattr(self, "_string_table", None) if result is not None: return result self._string_table = StringTable() @@ -96,7 +142,7 @@ def time_metrics(self, stats): if not self.base_name: return - if self.type == 'WebTransaction': + if self.type == "WebTransaction": # Report time taken by request dispatcher. We don't # know upstream time distinct from actual request # time so can't report time exclusively in the @@ -109,11 +155,7 @@ def time_metrics(self, stats): # and how the exclusive component would appear in # the overview graphs. - yield TimeMetric( - name='HttpDispatcher', - scope='', - duration=self.response_time, - exclusive=None) + yield TimeMetric(name="HttpDispatcher", scope="", duration=self.response_time, exclusive=None) # Upstream queue time within any web server front end. @@ -128,114 +170,84 @@ def time_metrics(self, stats): if queue_wait < 0: queue_wait = 0 - yield TimeMetric( - name='WebFrontend/QueueTime', - scope='', - duration=queue_wait, - exclusive=None) + yield TimeMetric(name="WebFrontend/QueueTime", scope="", duration=queue_wait, exclusive=None) # Generate the full transaction metric. - yield TimeMetric( - name=self.path, - scope='', - duration=self.response_time, - exclusive=self.exclusive) + yield TimeMetric(name=self.path, scope="", duration=self.response_time, exclusive=self.exclusive) # Generate the rollup metric. - if self.type != 'WebTransaction': - rollup = '%s/all' % self.type + if self.type != "WebTransaction": + rollup = "%s/all" % self.type else: rollup = self.type - yield TimeMetric( - name=rollup, - scope='', - duration=self.response_time, - exclusive=self.exclusive) + yield TimeMetric(name=rollup, scope="", duration=self.response_time, exclusive=self.exclusive) # Generate Unscoped Total Time metrics. - if self.type == 'WebTransaction': - metric_prefix = 'WebTransactionTotalTime' - metric_suffix = 'Web' + if self.type == "WebTransaction": + metric_prefix = "WebTransactionTotalTime" + metric_suffix = "Web" else: - metric_prefix = 'OtherTransactionTotalTime' - metric_suffix = 'Other' + metric_prefix = "OtherTransactionTotalTime" + metric_suffix = "Other" yield TimeMetric( - name='%s/%s' % (metric_prefix, self.name_for_metric), - scope='', - duration=self.total_time, - exclusive=self.total_time) + name="%s/%s" % (metric_prefix, self.name_for_metric), + scope="", + duration=self.total_time, + exclusive=self.total_time, + ) - yield TimeMetric( - name=metric_prefix, - scope='', - duration=self.total_time, - exclusive=self.total_time) + yield TimeMetric(name=metric_prefix, scope="", duration=self.total_time, exclusive=self.total_time) # Generate Distributed Tracing metrics if self.settings.distributed_tracing.enabled: dt_tag = "%s/%s/%s/%s/all" % ( - self.parent_type or 'Unknown', - self.parent_account or 'Unknown', - self.parent_app or 'Unknown', - self.parent_transport_type or 'Unknown') + self.parent_type or "Unknown", + self.parent_account or "Unknown", + self.parent_app or "Unknown", + self.parent_transport_type or "Unknown", + ) - for bonus_tag in ('', metric_suffix): + for bonus_tag in ("", metric_suffix): yield TimeMetric( name="DurationByCaller/%s%s" % (dt_tag, bonus_tag), - scope='', + scope="", duration=self.duration, - exclusive=self.duration) + exclusive=self.duration, + ) if self.parent_transport_duration is not None: yield TimeMetric( name="TransportDuration/%s%s" % (dt_tag, bonus_tag), - scope='', + scope="", duration=self.parent_transport_duration, - exclusive=self.parent_transport_duration) + exclusive=self.parent_transport_duration, + ) if self.errors: yield TimeMetric( - name='ErrorsByCaller/%s%s' % (dt_tag, bonus_tag), - scope='', - duration=0.0, - exclusive=None) + name="ErrorsByCaller/%s%s" % (dt_tag, bonus_tag), scope="", duration=0.0, exclusive=None + ) # Generate Error metrics if self.errors: if False in (error.expected for error in self.errors): # Generate overall rollup metric indicating if errors present. - yield TimeMetric( - name='Errors/all', - scope='', - duration=0.0, - exclusive=None) + yield TimeMetric(name="Errors/all", scope="", duration=0.0, exclusive=None) # Generate individual error metric for transaction. - yield TimeMetric( - name='Errors/%s' % self.path, - scope='', - duration=0.0, - exclusive=None) + yield TimeMetric(name="Errors/%s" % self.path, scope="", duration=0.0, exclusive=None) # Generate rollup metric for WebTransaction errors. - yield TimeMetric( - name='Errors/all%s' % metric_suffix, - scope='', - duration=0.0, - exclusive=None) + yield TimeMetric(name="Errors/all%s" % metric_suffix, scope="", duration=0.0, exclusive=None) else: - yield TimeMetric( - name='ErrorsExpected/all', - scope='', - duration=0.0, - exclusive=None) + yield TimeMetric(name="ErrorsExpected/all", scope="", duration=0.0, exclusive=None) # Now for the children. for child in self.root.children: @@ -243,9 +255,7 @@ def time_metrics(self, stats): yield metric def apdex_metrics(self, stats): - """Return a generator yielding the apdex metrics for this node. - - """ + """Return a generator yielding the apdex metrics for this node.""" if not self.base_name: return @@ -255,7 +265,7 @@ def apdex_metrics(self, stats): # The apdex metrics are only relevant to web transactions. - if self.type != 'WebTransaction': + if self.type != "WebTransaction": return # The magic calculations based on apdex_t. The apdex_t @@ -280,20 +290,18 @@ def apdex_metrics(self, stats): # Generate the full apdex metric. yield ApdexMetric( - name='Apdex/%s' % self.name_for_metric, - satisfying=satisfying, - tolerating=tolerating, - frustrating=frustrating, - apdex_t=self.apdex_t) + name="Apdex/%s" % self.name_for_metric, + satisfying=satisfying, + tolerating=tolerating, + frustrating=frustrating, + apdex_t=self.apdex_t, + ) # Generate the rollup metric. yield ApdexMetric( - name='Apdex', - satisfying=satisfying, - tolerating=tolerating, - frustrating=frustrating, - apdex_t=self.apdex_t) + name="Apdex", satisfying=satisfying, tolerating=tolerating, frustrating=frustrating, apdex_t=self.apdex_t + ) def error_details(self): """Return a generator yielding the details for each unique error @@ -318,38 +326,49 @@ def error_details(self): params = {} params["stack_trace"] = error.stack_trace - intrinsics = {'spanId': error.span_id, 'error.expected': error.expected} + intrinsics = {"spanId": error.span_id, "error.expected": error.expected} intrinsics.update(self.trace_intrinsics) - params['intrinsics'] = intrinsics + params["intrinsics"] = intrinsics - params['agentAttributes'] = {} + params["agentAttributes"] = {} for attr in self.agent_attributes: if attr.destinations & DST_ERROR_COLLECTOR: - params['agentAttributes'][attr.name] = attr.value + params["agentAttributes"][attr.name] = attr.value - params['userAttributes'] = {} + params["userAttributes"] = {} for attr in self.user_attributes: if attr.destinations & DST_ERROR_COLLECTOR: - params['userAttributes'][attr.name] = attr.value + params["userAttributes"][attr.name] = attr.value - # add source context attrs for error - if self.settings and self.settings.code_level_metrics and self.settings.code_level_metrics.enabled and getattr(error, "source", None): - error.source.add_attrs(params['agentAttributes'].__setitem__) + # add error specific agent attributes to this error's agentAttributes - # add error specific custom params to this error's userAttributes + err_agent_attrs = {} + error_group_name = error.error_group_name + if error_group_name: + err_agent_attrs["error.group.name"] = error_group_name + + # add source context attrs for error + if ( + self.settings + and self.settings.code_level_metrics + and self.settings.code_level_metrics.enabled + and getattr(error, "source", None) + ): + error.source.add_attrs(err_agent_attrs.__setitem__) + + err_agent_attrs = create_agent_attributes(err_agent_attrs, self.settings.attribute_filter) + for attr in err_agent_attrs: + if attr.destinations & DST_ERROR_COLLECTOR: + params["agentAttributes"][attr.name] = attr.value - err_attrs = create_user_attributes(error.custom_params, - self.settings.attribute_filter) + err_attrs = create_user_attributes(error.custom_params, self.settings.attribute_filter) for attr in err_attrs: if attr.destinations & DST_ERROR_COLLECTOR: - params['userAttributes'][attr.name] = attr.value + params["userAttributes"][attr.name] = attr.value yield newrelic.core.error_collector.TracedError( - start_time=error.timestamp, - path=self.path, - message=error.message, - type=error.type, - parameters=params) + start_time=error.timestamp, path=self.path, message=error.message, type=error.type, parameters=params + ) def transaction_trace(self, stats, limit, connections): @@ -362,19 +381,19 @@ def transaction_trace(self, stats, limit, connections): attributes = {} - attributes['intrinsics'] = self.trace_intrinsics + attributes["intrinsics"] = self.trace_intrinsics - attributes['agentAttributes'] = {} + attributes["agentAttributes"] = {} for attr in self.agent_attributes: if attr.destinations & DST_TRANSACTION_TRACER: - attributes['agentAttributes'][attr.name] = attr.value - if attr.name == 'request.uri': + attributes["agentAttributes"][attr.name] = attr.value + if attr.name == "request.uri": self.include_transaction_trace_request_uri = True - attributes['userAttributes'] = {} + attributes["userAttributes"] = {} for attr in self.user_attributes: if attr.destinations & DST_TRANSACTION_TRACER: - attributes['userAttributes'][attr.name] = attr.value + attributes["userAttributes"][attr.name] = attr.value # There is an additional trace node labeled as 'ROOT' # that needs to be inserted below the root node object @@ -382,19 +401,17 @@ def transaction_trace(self, stats, limit, connections): # from the actual top node for the transaction. root = newrelic.core.trace_node.TraceNode( - start_time=trace_node.start_time, - end_time=trace_node.end_time, - name='ROOT', - params={}, - children=[trace_node], - label=None) + start_time=trace_node.start_time, + end_time=trace_node.end_time, + name="ROOT", + params={}, + children=[trace_node], + label=None, + ) return newrelic.core.trace_node.RootNode( - start_time=start_time, - empty0={}, - empty1={}, - root=root, - attributes=attributes) + start_time=start_time, empty0={}, empty1={}, root=root, attributes=attributes + ) def slow_sql_nodes(self, stats): for item in self.slow_sql: @@ -405,18 +422,18 @@ def apdex_perf_zone(self): # Apdex is only valid for WebTransactions. - if self.type != 'WebTransaction': + if self.type != "WebTransaction": return None if self.errors and False in (error.expected for error in self.errors): - return 'F' + return "F" else: if self.duration <= self.apdex_t: - return 'S' + return "S" elif self.duration <= 4 * self.apdex_t: - return 'T' + return "T" else: - return 'F' + return "F" def transaction_event(self, stats_table): # Create the transaction event, which is a list of attributes. @@ -445,41 +462,38 @@ def transaction_event_intrinsics(self, stats_table): intrinsics = self._event_intrinsics(stats_table) - intrinsics['type'] = 'Transaction' - intrinsics['name'] = self.path - intrinsics['totalTime'] = self.total_time + intrinsics["type"] = "Transaction" + intrinsics["name"] = self.path + intrinsics["totalTime"] = self.total_time def _add_if_not_empty(key, value): if value: intrinsics[key] = value - + apdex_perf_zone = self.apdex_perf_zone() - _add_if_not_empty('apdexPerfZone', apdex_perf_zone) - _add_if_not_empty('nr.apdexPerfZone', apdex_perf_zone) + _add_if_not_empty("apdexPerfZone", apdex_perf_zone) + _add_if_not_empty("nr.apdexPerfZone", apdex_perf_zone) if self.errors: - intrinsics['error'] = True + intrinsics["error"] = True if self.path_hash: - intrinsics['nr.guid'] = self.guid - intrinsics['nr.tripId'] = self.trip_id - intrinsics['nr.pathHash'] = self.path_hash + intrinsics["nr.guid"] = self.guid + intrinsics["nr.tripId"] = self.trip_id + intrinsics["nr.pathHash"] = self.path_hash - _add_if_not_empty('nr.referringPathHash', - self.referring_path_hash) - _add_if_not_empty('nr.alternatePathHashes', - ','.join(self.alternate_path_hashes)) - _add_if_not_empty('nr.referringTransactionGuid', - self.referring_transaction_guid) + _add_if_not_empty("nr.referringPathHash", self.referring_path_hash) + _add_if_not_empty("nr.alternatePathHashes", ",".join(self.alternate_path_hashes)) + _add_if_not_empty("nr.referringTransactionGuid", self.referring_transaction_guid) if self.synthetics_resource_id: - intrinsics['nr.guid'] = self.guid + intrinsics["nr.guid"] = self.guid if self.parent_tx: - intrinsics['parentId'] = self.parent_tx + intrinsics["parentId"] = self.parent_tx if self.parent_span: - intrinsics['parentSpanId'] = self.parent_span + intrinsics["parentSpanId"] = self.parent_span return intrinsics @@ -502,10 +516,21 @@ def error_events(self, stats_table): if attr.destinations & DST_ERROR_COLLECTOR: user_attributes[attr.name] = attr.value + # add error specific agent attributes to this error's agentAttributes + + err_agent_attrs = {} + error_group_name = error.error_group_name + if error_group_name: + err_agent_attrs["error.group.name"] = error_group_name + + err_agent_attrs = create_agent_attributes(err_agent_attrs, self.settings.attribute_filter) + for attr in err_agent_attrs: + if attr.destinations & DST_ERROR_COLLECTOR: + agent_attributes[attr.name] = attr.value + # add error specific custom params to this error's userAttributes - err_attrs = create_user_attributes(error.custom_params, - self.settings.attribute_filter) + err_attrs = create_user_attributes(error.custom_params, self.settings.attribute_filter) for attr in err_attrs: if attr.destinations & DST_ERROR_COLLECTOR: user_attributes[attr.name] = attr.value @@ -519,24 +544,24 @@ def error_event_intrinsics(self, error, stats_table): intrinsics = self._event_intrinsics(stats_table) - intrinsics['type'] = "TransactionError" - intrinsics['error.class'] = error.type - intrinsics['error.message'] = error.message - intrinsics['error.expected'] = error.expected - intrinsics['transactionName'] = self.path - intrinsics['spanId'] = error.span_id + intrinsics["type"] = "TransactionError" + intrinsics["error.class"] = error.type + intrinsics["error.message"] = error.message + intrinsics["error.expected"] = error.expected + intrinsics["transactionName"] = self.path + intrinsics["spanId"] = error.span_id - intrinsics['nr.transactionGuid'] = self.guid + intrinsics["nr.transactionGuid"] = self.guid if self.referring_transaction_guid: guid = self.referring_transaction_guid - intrinsics['nr.referringTransactionGuid'] = guid + intrinsics["nr.referringTransactionGuid"] = guid return intrinsics def _event_intrinsics(self, stats_table): """Common attributes for analytics events""" - cache = getattr(self, '_event_intrinsics_cache', None) + cache = getattr(self, "_event_intrinsics_cache", None) if cache is not None: # We don't want to execute this function more than once, since @@ -546,24 +571,24 @@ def _event_intrinsics(self, stats_table): intrinsics = self.distributed_trace_intrinsics.copy() - intrinsics['timestamp'] = int(1000.0 * self.start_time) - intrinsics['duration'] = self.response_time + intrinsics["timestamp"] = int(1000.0 * self.start_time) + intrinsics["duration"] = self.response_time if self.port: - intrinsics['port'] = self.port + intrinsics["port"] = self.port # Add the Synthetics attributes to the intrinsics dict. if self.synthetics_resource_id: - intrinsics['nr.syntheticsResourceId'] = self.synthetics_resource_id - intrinsics['nr.syntheticsJobId'] = self.synthetics_job_id - intrinsics['nr.syntheticsMonitorId'] = self.synthetics_monitor_id + intrinsics["nr.syntheticsResourceId"] = self.synthetics_resource_id + intrinsics["nr.syntheticsJobId"] = self.synthetics_job_id + intrinsics["nr.syntheticsMonitorId"] = self.synthetics_monitor_id def _add_call_time(source, target): # include time for keys previously added to stats table via # stats_engine.record_transaction - if (source, '') in stats_table: - call_time = stats_table[(source, '')].total_call_time + if (source, "") in stats_table: + call_time = stats_table[(source, "")].total_call_time if target in intrinsics: intrinsics[target] += call_time else: @@ -572,45 +597,43 @@ def _add_call_time(source, target): def _add_call_count(source, target): # include counts for keys previously added to stats table via # stats_engine.record_transaction - if (source, '') in stats_table: - call_count = stats_table[(source, '')].call_count + if (source, "") in stats_table: + call_count = stats_table[(source, "")].call_count if target in intrinsics: intrinsics[target] += call_count else: intrinsics[target] = call_count - _add_call_time('WebFrontend/QueueTime', 'queueDuration') + _add_call_time("WebFrontend/QueueTime", "queueDuration") - _add_call_time('External/all', 'externalDuration') - _add_call_time('Datastore/all', 'databaseDuration') - _add_call_time('Memcache/all', 'memcacheDuration') + _add_call_time("External/all", "externalDuration") + _add_call_time("Datastore/all", "databaseDuration") + _add_call_time("Memcache/all", "memcacheDuration") - _add_call_count('External/all', 'externalCallCount') - _add_call_count('Datastore/all', 'databaseCallCount') + _add_call_count("External/all", "externalCallCount") + _add_call_count("Datastore/all", "databaseCallCount") if self.loop_time: - intrinsics['eventLoopTime'] = self.loop_time - _add_call_time('EventLoop/Wait/all', 'eventLoopWait') + intrinsics["eventLoopTime"] = self.loop_time + _add_call_time("EventLoop/Wait/all", "eventLoopWait") self._event_intrinsics_cache = intrinsics.copy() return intrinsics def span_protos(self, settings): - for i_attrs, u_attrs, a_attrs in self.span_events( - settings, attr_class=SpanProtoAttrs): - yield Span(trace_id=self.trace_id, - intrinsics=i_attrs, - user_attributes=u_attrs, - agent_attributes=a_attrs) + for i_attrs, u_attrs, a_attrs in self.span_events(settings, attr_class=SpanProtoAttrs): + yield Span(trace_id=self.trace_id, intrinsics=i_attrs, user_attributes=u_attrs, agent_attributes=a_attrs) def span_events(self, settings, attr_class=dict): - base_attrs = attr_class(( - ('transactionId', self.guid), - ('traceId', self.trace_id), - ('sampled', self.sampled), - ('priority', self.priority), - )) + base_attrs = attr_class( + ( + ("transactionId", self.guid), + ("traceId", self.trace_id), + ("sampled", self.sampled), + ("priority", self.priority), + ) + ) for event in self.root.span_events( settings, diff --git a/tests/agent_features/test_attributes_in_action.py b/tests/agent_features/test_attributes_in_action.py index aa44d3e2d..e56994d0a 100644 --- a/tests/agent_features/test_attributes_in_action.py +++ b/tests/agent_features/test_attributes_in_action.py @@ -25,6 +25,9 @@ validate_error_event_attributes_outside_transaction, validate_error_trace_attributes_outside_transaction, ) +from testing_support.validators.validate_error_trace_attributes import ( + validate_error_trace_attributes, +) from testing_support.validators.validate_span_events import validate_span_events from testing_support.validators.validate_transaction_error_trace_attributes import ( validate_transaction_error_trace_attributes, @@ -37,9 +40,10 @@ ) from newrelic.api.application import application_instance as application +from newrelic.api.background_task import background_task from newrelic.api.message_transaction import message_transaction from newrelic.api.time_trace import notice_error -from newrelic.api.transaction import add_custom_attribute +from newrelic.api.transaction import add_custom_attribute, current_transaction, set_user_id from newrelic.api.wsgi_application import wsgi_application from newrelic.common.object_names import callable_name @@ -93,7 +97,16 @@ AGENT_KEYS_ALL = TRACE_ERROR_AGENT_KEYS + REQ_PARAMS -TRANS_EVENT_INTRINSICS = ("name", "duration", "type", "timestamp", "totalTime", "error", "nr.apdexPerfZone", "apdexPerfZone") +TRANS_EVENT_INTRINSICS = ( + "name", + "duration", + "type", + "timestamp", + "totalTime", + "error", + "nr.apdexPerfZone", + "apdexPerfZone", +) TRANS_EVENT_AGENT_KEYS = [ "response.status", "request.method", @@ -911,3 +924,45 @@ def test_routing_key_agent_attribute(): @message_transaction(library="RabbitMQ", destination_type="Exchange", destination_name="x") def test_none_type_routing_key_agent_attribute(): pass + + +_required_agent_attributes = ["enduser.id"] +_forgone_agent_attributes = [] + + +@pytest.mark.parametrize('input_user_id, reported_user_id, high_security',( + ("1234", "1234", True), + ("a" * 260, "a" * 255, False), +)) +def test_enduser_id_attribute_api_valid_types(input_user_id, reported_user_id, high_security): + @reset_core_stats_engine() + @validate_error_trace_attributes( + callable_name(ValueError), exact_attrs={"user": {}, "intrinsic": {}, "agent": {"enduser.id": reported_user_id}} + ) + @validate_error_event_attributes(exact_attrs={"user": {}, "intrinsic": {}, "agent": {"enduser.id": reported_user_id}}) + @validate_attributes("agent", _required_agent_attributes, _forgone_agent_attributes) + @background_task() + @override_application_settings({"high_security": high_security}) + def _test(): + set_user_id(input_user_id) + + try: + raise ValueError() + except Exception: + notice_error() + _test() + + +@pytest.mark.parametrize('input_user_id',(None, '', 123)) +def test_enduser_id_attribute_api_invalid_types(input_user_id): + @reset_core_stats_engine() + @validate_attributes("agent", [], ["enduser.id"]) + @background_task() + def _test(): + set_user_id(input_user_id) + + try: + raise ValueError() + except Exception: + notice_error() + _test() diff --git a/tests/agent_features/test_error_events.py b/tests/agent_features/test_error_events.py index 99b3935be..72bdb14f7 100644 --- a/tests/agent_features/test_error_events.py +++ b/tests/agent_features/test_error_events.py @@ -16,6 +16,7 @@ import time import webtest + from testing_support.fixtures import ( cat_enabled, make_cross_agent_headers, @@ -26,6 +27,9 @@ validate_transaction_error_event_count, ) from testing_support.sample_applications import fully_featured_app +from testing_support.validators.validate_error_trace_attributes import ( + validate_error_trace_attributes, +) from testing_support.validators.validate_non_transaction_error_event import ( validate_non_transaction_error_event, ) diff --git a/tests/agent_features/test_error_group_callback.py b/tests/agent_features/test_error_group_callback.py new file mode 100644 index 000000000..742391162 --- /dev/null +++ b/tests/agent_features/test_error_group_callback.py @@ -0,0 +1,238 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import threading +import traceback +import sys + +import pytest + +from testing_support.fixtures import ( + override_application_settings, + reset_core_stats_engine, + validate_error_event_attributes, + validate_error_event_attributes_outside_transaction, + validate_error_trace_attributes_outside_transaction, +) +from testing_support.validators.validate_error_trace_attributes import ( + validate_error_trace_attributes, +) + +from newrelic.api.application import application_instance as application +from newrelic.api.background_task import background_task +from newrelic.api.time_trace import notice_error +from newrelic.api.transaction import current_transaction +from newrelic.api.settings import set_error_group_callback +from newrelic.api.web_transaction import web_transaction +from newrelic.common.object_names import callable_name + + +_callback_called = threading.Event() +_truncated_value = "A" * 300 + +def error_group_callback(exc, data): + _callback_called.set() + + if isinstance(exc, ValueError): + return "value" + elif isinstance(exc, ZeroDivisionError): + return _truncated_value + elif isinstance(exc, IndexError): + return [] + elif isinstance(exc, LookupError): + return 123 + elif isinstance(exc, TypeError): + return "" + + +def test_clear_error_group_callback(): + settings = application().settings + set_error_group_callback(lambda x, y: None) + assert settings.error_collector.error_group_callback is not None, "Failed to set callback." + set_error_group_callback(None) + assert settings.error_collector.error_group_callback is None, "Failed to clear callback." + + +@pytest.mark.parametrize("callback,accepted", [ + (error_group_callback, True), + (lambda x, y: None, True), + (None, False), + ("string", False) +]) +def test_set_error_group_callback(callback, accepted): + try: + set_error_group_callback(callback) + settings = application().settings + if accepted: + assert settings.error_collector.error_group_callback is not None, "Failed to set callback." + else: + assert settings.error_collector.error_group_callback is None, "Accepted bad callback." + finally: + set_error_group_callback(None) + + +@pytest.mark.parametrize("exc_class,group_name,high_security", [ + (ValueError, "value", False), + (ValueError, "value", True), + (TypeError, None, False), + (RuntimeError, None, False), + (IndexError, None, False), + (LookupError, None, False), + (ZeroDivisionError, _truncated_value[:255], False), +], ids=("standard", "high-security", "empty-string", "None-value", "list-type", "int-type", "truncated-value")) +@reset_core_stats_engine() +def test_error_group_name_callback(exc_class, group_name, high_security): + _callback_called.clear() + + if group_name is not None: + exact = {"user": {}, "intrinsic": {}, "agent": {"error.group.name": group_name}} + forgone = None + else: + exact = None + forgone = {"user": [], "intrinsic": [], "agent": ["error.group.name"]} + + @validate_error_trace_attributes( + callable_name(exc_class), forgone_params=forgone, exact_attrs=exact + ) + @validate_error_event_attributes(forgone_params=forgone, exact_attrs=exact) + @override_application_settings({"high_security": high_security}) + @background_task() + def _test(): + + try: + raise exc_class() + except Exception: + notice_error() + + assert _callback_called.is_set() + + try: + set_error_group_callback(error_group_callback) + _test() + finally: + set_error_group_callback(None) + + +@pytest.mark.parametrize("exc_class,group_name,high_security", [ + (ValueError, "value", False), + (ValueError, "value", True), + (TypeError, None, False), + (RuntimeError, None, False), + (IndexError, None, False), + (LookupError, None, False), + (ZeroDivisionError, _truncated_value[:255], False), +], ids=("standard", "high-security", "empty-string", "None-value", "list-type", "int-type", "truncated-value")) +@reset_core_stats_engine() +def test_error_group_name_callback_outside_transaction(exc_class, group_name, high_security): + _callback_called.clear() + + if group_name is not None: + exact = {"user": {}, "intrinsic": {}, "agent": {"error.group.name": group_name}} + forgone = None + else: + exact = None + forgone = {"user": [], "intrinsic": [], "agent": ["error.group.name"]} + + @validate_error_trace_attributes_outside_transaction( + callable_name(exc_class), forgone_params=forgone, exact_attrs=exact + ) + @validate_error_event_attributes_outside_transaction(forgone_params=forgone, exact_attrs=exact) + @override_application_settings({"high_security": high_security}) + def _test(): + try: + raise exc_class() + except Exception: + app = application() + notice_error(application=app) + + assert _callback_called.is_set() + + try: + set_error_group_callback(error_group_callback) + _test() + finally: + set_error_group_callback(None) + + +@pytest.mark.parametrize("transaction_decorator", [ + background_task(name="TestBackgroundTask"), + web_transaction(name="TestWebTransaction", host="localhost", port=1234, request_method="GET", request_path="/", headers=[],), + None, +], ids=("background_task", "web_transation", "outside_transaction")) +@reset_core_stats_engine() +def test_error_group_name_callback_attributes(transaction_decorator): + callback_errors = [] + _data = [] + + def callback(error, data): + def _callback(): + import types + _data.append(data) + txn = current_transaction() + + # Standard attributes + assert isinstance(error, Exception) + assert isinstance(data["traceback"], types.TracebackType) + assert data["error.class"] is type(error) + assert data["error.message"] == "text" + assert data["error.expected"] is False + + # All attributes should always be included, but set to None when not relevant. + if txn is None: # Outside transaction + assert data["transactionName"] is None + assert data["custom_params"] == {'notice_error_attribute': 1} + assert data["response.status"] is None + assert data["request.method"] is None + assert data["request.uri"] is None + elif txn.background_task: # Background task + assert data["transactionName"] == "TestBackgroundTask" + assert data["custom_params"] == {'notice_error_attribute': 1, 'txn_attribute': 2} + assert data["response.status"] is None + assert data["request.method"] is None + assert data["request.uri"] is None + else: # Web transaction + assert data["transactionName"] == "TestWebTransaction" + assert data["custom_params"] == {'notice_error_attribute': 1, 'txn_attribute': 2} + assert data["response.status"] == 200 + assert data["request.method"] == "GET" + assert data["request.uri"] == "/" + + try: + _callback() + except Exception: + callback_errors.append(sys.exc_info()) + raise + + def _test(): + try: + txn = current_transaction() + if txn: + txn.add_custom_attribute("txn_attribute", 2) + if not txn.background_task: + txn.process_response(200, []) + raise Exception("text") + except Exception: + app = application() if transaction_decorator is None else None # Only set outside transaction + notice_error(application=app, attributes={"notice_error_attribute": 1}) + + assert not callback_errors, "Callback inputs failed to validate.\nerror: %s\ndata: %s" % (traceback.format_exception(*callback_errors[0]), str(_data[0])) + + if transaction_decorator is not None: + _test = transaction_decorator(_test) # Manually decorate test function + + try: + set_error_group_callback(callback) + _test() + finally: + set_error_group_callback(None) diff --git a/tests/agent_unittests/test_harvest_loop.py b/tests/agent_unittests/test_harvest_loop.py index 0df575b9f..305622107 100644 --- a/tests/agent_unittests/test_harvest_loop.py +++ b/tests/agent_unittests/test_harvest_loop.py @@ -61,9 +61,8 @@ def transaction_node(request): expected=False, span_id=None, stack_trace="", + error_group_name=None, custom_params={}, - file_name=None, - line_number=None, source=None, ) diff --git a/tests/testing_support/fixtures.py b/tests/testing_support/fixtures.py index 8d05bf405..07de22cf0 100644 --- a/tests/testing_support/fixtures.py +++ b/tests/testing_support/fixtures.py @@ -742,6 +742,8 @@ def validate_error_trace_attributes_outside_transaction( forgone_params = forgone_params or {} exact_attrs = exact_attrs or {} + target_error = [] + @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.notice_error") def _validate_error_trace_attributes_outside_transaction(wrapped, instance, args, kwargs): try: @@ -749,15 +751,22 @@ def _validate_error_trace_attributes_outside_transaction(wrapped, instance, args except: raise else: - target_error = core_application_stats_engine_error(err_name) + target_error.append(core_application_stats_engine_error(err_name)) + + return result - check_error_attributes( - target_error.parameters, required_params, forgone_params, exact_attrs, is_transaction=False - ) + + @function_wrapper + def _validator_wrapper(wrapped, instance, args, kwargs): + result = _validate_error_trace_attributes_outside_transaction(wrapped)(*args, **kwargs) + + assert target_error and target_error[0] is not None, "No error found with name %s" % err_name + check_error_attributes(target_error[0].parameters, required_params, forgone_params, exact_attrs) return result - return _validate_error_trace_attributes_outside_transaction + + return _validator_wrapper def validate_error_event_attributes_outside_transaction( diff --git a/tests/testing_support/validators/validate_error_trace_attributes.py b/tests/testing_support/validators/validate_error_trace_attributes.py new file mode 100644 index 000000000..2b4ddf4ae --- /dev/null +++ b/tests/testing_support/validators/validate_error_trace_attributes.py @@ -0,0 +1,47 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from testing_support.fixtures import check_error_attributes + +from newrelic.common.object_wrapper import transient_function_wrapper, function_wrapper + + +def validate_error_trace_attributes(err_name, required_params=None, forgone_params=None, exact_attrs=None): + required_params = required_params or {} + forgone_params = forgone_params or {} + exact_attrs = exact_attrs or {} + + target_error = [] + + @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.record_transaction") + def _validate_error_trace_attributes(wrapped, instance, args, kwargs): + try: + result = wrapped(*args, **kwargs) + except Exception: + raise + else: + target_error.append(next((e for e in instance.error_data() if e.type == err_name), None)) + + return result + + @function_wrapper + def _validator_wrapper(wrapped, instance, args, kwargs): + result = _validate_error_trace_attributes(wrapped)(*args, **kwargs) + + assert target_error and target_error[0] is not None, "No error found with name %s" % err_name + check_error_attributes(target_error[0].parameters, required_params, forgone_params, exact_attrs) + + return result + + return _validator_wrapper From 11c1cdae32dd255de62b92bf0a70daf9342ca506 Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Wed, 5 Apr 2023 09:36:43 -0700 Subject: [PATCH 065/108] Update Packages (#793) * Update urllib3 to v1.26.15 * Update six to v1.16.0 * Update coverage exclude for newrelic/packages * [Mega-Linter] Apply linters fixes * Drop removed package from urllib3 * Update pytest * Downgrade websockets version for old sanic testing --------- Co-authored-by: TimPansino --- newrelic/packages/six.py | 689 ++++++++++++++++-- newrelic/packages/urllib3/LICENSE.txt | 2 +- newrelic/packages/urllib3/__init__.py | 1 + newrelic/packages/urllib3/_version.py | 2 +- newrelic/packages/urllib3/connection.py | 15 +- newrelic/packages/urllib3/connectionpool.py | 44 +- .../packages/urllib3/contrib/appengine.py | 2 +- newrelic/packages/urllib3/contrib/ntlmpool.py | 4 +- .../packages/urllib3/contrib/pyopenssl.py | 17 +- .../urllib3/contrib/securetransport.py | 1 - .../packages/urllib3/packages/__init__.py | 5 - newrelic/packages/urllib3/packages/six.py | 1 - .../packages/ssl_match_hostname/__init__.py | 24 - newrelic/packages/urllib3/poolmanager.py | 1 + newrelic/packages/urllib3/response.py | 72 +- newrelic/packages/urllib3/util/connection.py | 3 +- newrelic/packages/urllib3/util/request.py | 5 +- newrelic/packages/urllib3/util/retry.py | 62 +- .../ssl_match_hostname.py} | 15 +- newrelic/packages/urllib3/util/timeout.py | 9 +- newrelic/packages/urllib3/util/url.py | 11 +- newrelic/packages/urllib3/util/wait.py | 1 - setup.py | 3 - tox.ini | 5 +- 24 files changed, 834 insertions(+), 160 deletions(-) delete mode 100644 newrelic/packages/urllib3/packages/ssl_match_hostname/__init__.py rename newrelic/packages/urllib3/{packages/ssl_match_hostname/_implementation.py => util/ssl_match_hostname.py} (92%) diff --git a/newrelic/packages/six.py b/newrelic/packages/six.py index 8a877b174..4e15675d8 100644 --- a/newrelic/packages/six.py +++ b/newrelic/packages/six.py @@ -1,6 +1,4 @@ -"""Utilities for writing code that runs on Python 2 and 3""" - -# Copyright (c) 2010-2013 Benjamin Peterson +# Copyright (c) 2010-2020 Benjamin Peterson # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal @@ -20,17 +18,24 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. +"""Utilities for writing code that runs on Python 2 and 3""" + +from __future__ import absolute_import + +import functools +import itertools import operator import sys import types __author__ = "Benjamin Peterson " -__version__ = "1.3.0" +__version__ = "1.16.0" # Useful for very coarse version differentiation. PY2 = sys.version_info[0] == 2 PY3 = sys.version_info[0] == 3 +PY34 = sys.version_info[0:2] >= (3, 4) if PY3: string_types = str, @@ -53,6 +58,7 @@ else: # It's possible to have sizeof(long) != sizeof(Py_ssize_t). class X(object): + def __len__(self): return 1 << 31 try: @@ -65,6 +71,11 @@ def __len__(self): MAXSIZE = int((1 << 63) - 1) del X +if PY34: + from importlib.util import spec_from_loader +else: + spec_from_loader = None + def _add_doc(func, doc): """Add documentation to a function.""" @@ -84,9 +95,13 @@ def __init__(self, name): def __get__(self, obj, tp): result = self._resolve() - setattr(obj, self.name, result) - # This is a bit ugly, but it avoids running this again. - delattr(tp, self.name) + setattr(obj, self.name, result) # Invokes __set__. + try: + # This is a bit ugly, but it avoids running this again by + # removing this descriptor. + delattr(obj.__class__, self.name) + except AttributeError: + pass return result @@ -104,6 +119,27 @@ def __init__(self, name, old, new=None): def _resolve(self): return _import_module(self.mod) + def __getattr__(self, attr): + _module = self._resolve() + value = getattr(_module, attr) + setattr(self, attr, value) + return value + + +class _LazyModule(types.ModuleType): + + def __init__(self, name): + super(_LazyModule, self).__init__(name) + self.__doc__ = self.__class__.__doc__ + + def __dir__(self): + attrs = ["__doc__", "__name__"] + attrs += [attr.name for attr in self._moved_attributes] + return attrs + + # Subclasses should override this + _moved_attributes = [] + class MovedAttribute(_LazyDescr): @@ -130,34 +166,126 @@ def _resolve(self): return getattr(module, self.attr) +class _SixMetaPathImporter(object): + + """ + A meta path importer to import six.moves and its submodules. + + This class implements a PEP302 finder and loader. It should be compatible + with Python 2.5 and all existing versions of Python3 + """ + + def __init__(self, six_module_name): + self.name = six_module_name + self.known_modules = {} + + def _add_module(self, mod, *fullnames): + for fullname in fullnames: + self.known_modules[self.name + "." + fullname] = mod + + def _get_module(self, fullname): + return self.known_modules[self.name + "." + fullname] + + def find_module(self, fullname, path=None): + if fullname in self.known_modules: + return self + return None + + def find_spec(self, fullname, path, target=None): + if fullname in self.known_modules: + return spec_from_loader(fullname, self) + return None + + def __get_module(self, fullname): + try: + return self.known_modules[fullname] + except KeyError: + raise ImportError("This loader does not know module " + fullname) + + def load_module(self, fullname): + try: + # in case of a reload + return sys.modules[fullname] + except KeyError: + pass + mod = self.__get_module(fullname) + if isinstance(mod, MovedModule): + mod = mod._resolve() + else: + mod.__loader__ = self + sys.modules[fullname] = mod + return mod + + def is_package(self, fullname): + """ + Return true, if the named module is a package. + + We need this method to get correct spec objects with + Python 3.4 (see PEP451) + """ + return hasattr(self.__get_module(fullname), "__path__") + + def get_code(self, fullname): + """Return None + + Required, if is_package is implemented""" + self.__get_module(fullname) # eventually raises ImportError + return None + get_source = get_code # same as get_code + + def create_module(self, spec): + return self.load_module(spec.name) + + def exec_module(self, module): + pass + +_importer = _SixMetaPathImporter(__name__) + + +class _MovedItems(_LazyModule): -class _MovedItems(types.ModuleType): """Lazy loading of moved objects""" + __path__ = [] # mark as package _moved_attributes = [ MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), + MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), + MovedAttribute("intern", "__builtin__", "sys"), MovedAttribute("map", "itertools", "builtins", "imap", "map"), + MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), + MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), + MovedAttribute("getoutput", "commands", "subprocess"), MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), - MovedAttribute("reload_module", "__builtin__", "imp", "reload"), + MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), MovedAttribute("reduce", "__builtin__", "functools"), + MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), MovedAttribute("StringIO", "StringIO", "io"), + MovedAttribute("UserDict", "UserDict", "collections"), + MovedAttribute("UserList", "UserList", "collections"), + MovedAttribute("UserString", "UserString", "collections"), MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), - + MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), MovedModule("builtins", "__builtin__"), MovedModule("configparser", "ConfigParser"), + MovedModule("collections_abc", "collections", "collections.abc" if sys.version_info >= (3, 3) else "collections"), MovedModule("copyreg", "copy_reg"), + MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), + MovedModule("dbm_ndbm", "dbm", "dbm.ndbm"), + MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread" if sys.version_info < (3, 9) else "_thread"), MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), MovedModule("http_cookies", "Cookie", "http.cookies"), MovedModule("html_entities", "htmlentitydefs", "html.entities"), MovedModule("html_parser", "HTMLParser", "html.parser"), MovedModule("http_client", "httplib", "http.client"), + MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), + MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"), MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), + MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), - MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), @@ -165,12 +293,14 @@ class _MovedItems(types.ModuleType): MovedModule("queue", "Queue"), MovedModule("reprlib", "repr"), MovedModule("socketserver", "SocketServer"), + MovedModule("_thread", "thread", "_thread"), MovedModule("tkinter", "Tkinter"), MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), MovedModule("tkinter_tix", "Tix", "tkinter.tix"), + MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), MovedModule("tkinter_colorchooser", "tkColorChooser", @@ -182,14 +312,199 @@ class _MovedItems(types.ModuleType): MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", "tkinter.simpledialog"), + MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), + MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), + MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), - MovedModule("winreg", "_winreg"), + MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), + MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), ] +# Add windows specific modules. +if sys.platform == "win32": + _moved_attributes += [ + MovedModule("winreg", "_winreg"), + ] + for attr in _moved_attributes: setattr(_MovedItems, attr.name, attr) + if isinstance(attr, MovedModule): + _importer._add_module(attr, "moves." + attr.name) +del attr + +_MovedItems._moved_attributes = _moved_attributes + +moves = _MovedItems(__name__ + ".moves") +_importer._add_module(moves, "moves") + + +class Module_six_moves_urllib_parse(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_parse""" + + +_urllib_parse_moved_attributes = [ + MovedAttribute("ParseResult", "urlparse", "urllib.parse"), + MovedAttribute("SplitResult", "urlparse", "urllib.parse"), + MovedAttribute("parse_qs", "urlparse", "urllib.parse"), + MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), + MovedAttribute("urldefrag", "urlparse", "urllib.parse"), + MovedAttribute("urljoin", "urlparse", "urllib.parse"), + MovedAttribute("urlparse", "urlparse", "urllib.parse"), + MovedAttribute("urlsplit", "urlparse", "urllib.parse"), + MovedAttribute("urlunparse", "urlparse", "urllib.parse"), + MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), + MovedAttribute("quote", "urllib", "urllib.parse"), + MovedAttribute("quote_plus", "urllib", "urllib.parse"), + MovedAttribute("unquote", "urllib", "urllib.parse"), + MovedAttribute("unquote_plus", "urllib", "urllib.parse"), + MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"), + MovedAttribute("urlencode", "urllib", "urllib.parse"), + MovedAttribute("splitquery", "urllib", "urllib.parse"), + MovedAttribute("splittag", "urllib", "urllib.parse"), + MovedAttribute("splituser", "urllib", "urllib.parse"), + MovedAttribute("splitvalue", "urllib", "urllib.parse"), + MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), + MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), + MovedAttribute("uses_params", "urlparse", "urllib.parse"), + MovedAttribute("uses_query", "urlparse", "urllib.parse"), + MovedAttribute("uses_relative", "urlparse", "urllib.parse"), +] +for attr in _urllib_parse_moved_attributes: + setattr(Module_six_moves_urllib_parse, attr.name, attr) del attr -moves = sys.modules[__name__ + ".moves"] = _MovedItems("moves") +Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes + +_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), + "moves.urllib_parse", "moves.urllib.parse") + + +class Module_six_moves_urllib_error(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_error""" + + +_urllib_error_moved_attributes = [ + MovedAttribute("URLError", "urllib2", "urllib.error"), + MovedAttribute("HTTPError", "urllib2", "urllib.error"), + MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), +] +for attr in _urllib_error_moved_attributes: + setattr(Module_six_moves_urllib_error, attr.name, attr) +del attr + +Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes + +_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), + "moves.urllib_error", "moves.urllib.error") + + +class Module_six_moves_urllib_request(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_request""" + + +_urllib_request_moved_attributes = [ + MovedAttribute("urlopen", "urllib2", "urllib.request"), + MovedAttribute("install_opener", "urllib2", "urllib.request"), + MovedAttribute("build_opener", "urllib2", "urllib.request"), + MovedAttribute("pathname2url", "urllib", "urllib.request"), + MovedAttribute("url2pathname", "urllib", "urllib.request"), + MovedAttribute("getproxies", "urllib", "urllib.request"), + MovedAttribute("Request", "urllib2", "urllib.request"), + MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), + MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), + MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), + MovedAttribute("BaseHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), + MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), + MovedAttribute("FileHandler", "urllib2", "urllib.request"), + MovedAttribute("FTPHandler", "urllib2", "urllib.request"), + MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), + MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), + MovedAttribute("urlretrieve", "urllib", "urllib.request"), + MovedAttribute("urlcleanup", "urllib", "urllib.request"), + MovedAttribute("URLopener", "urllib", "urllib.request"), + MovedAttribute("FancyURLopener", "urllib", "urllib.request"), + MovedAttribute("proxy_bypass", "urllib", "urllib.request"), + MovedAttribute("parse_http_list", "urllib2", "urllib.request"), + MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"), +] +for attr in _urllib_request_moved_attributes: + setattr(Module_six_moves_urllib_request, attr.name, attr) +del attr + +Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes + +_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), + "moves.urllib_request", "moves.urllib.request") + + +class Module_six_moves_urllib_response(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_response""" + + +_urllib_response_moved_attributes = [ + MovedAttribute("addbase", "urllib", "urllib.response"), + MovedAttribute("addclosehook", "urllib", "urllib.response"), + MovedAttribute("addinfo", "urllib", "urllib.response"), + MovedAttribute("addinfourl", "urllib", "urllib.response"), +] +for attr in _urllib_response_moved_attributes: + setattr(Module_six_moves_urllib_response, attr.name, attr) +del attr + +Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes + +_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), + "moves.urllib_response", "moves.urllib.response") + + +class Module_six_moves_urllib_robotparser(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_robotparser""" + + +_urllib_robotparser_moved_attributes = [ + MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), +] +for attr in _urllib_robotparser_moved_attributes: + setattr(Module_six_moves_urllib_robotparser, attr.name, attr) +del attr + +Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes + +_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), + "moves.urllib_robotparser", "moves.urllib.robotparser") + + +class Module_six_moves_urllib(types.ModuleType): + + """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" + __path__ = [] # mark as package + parse = _importer._get_module("moves.urllib_parse") + error = _importer._get_module("moves.urllib_error") + request = _importer._get_module("moves.urllib_request") + response = _importer._get_module("moves.urllib_response") + robotparser = _importer._get_module("moves.urllib_robotparser") + + def __dir__(self): + return ['parse', 'error', 'request', 'response', 'robotparser'] + +_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), + "moves.urllib") def add_move(move): @@ -216,11 +531,6 @@ def remove_move(name): _func_code = "__code__" _func_defaults = "__defaults__" _func_globals = "__globals__" - - _iterkeys = "keys" - _itervalues = "values" - _iteritems = "items" - _iterlists = "lists" else: _meth_func = "im_func" _meth_self = "im_self" @@ -230,11 +540,6 @@ def remove_move(name): _func_defaults = "func_defaults" _func_globals = "func_globals" - _iterkeys = "iterkeys" - _itervalues = "itervalues" - _iteritems = "iteritems" - _iterlists = "iterlists" - try: advance_iterator = next @@ -257,6 +562,9 @@ def get_unbound_function(unbound): create_bound_method = types.MethodType + def create_unbound_method(func, cls): + return func + Iterator = object else: def get_unbound_function(unbound): @@ -265,6 +573,9 @@ def get_unbound_function(unbound): def create_bound_method(func, obj): return types.MethodType(func, obj, obj.__class__) + def create_unbound_method(func, cls): + return types.MethodType(func, None, cls) + class Iterator(object): def next(self): @@ -283,73 +594,132 @@ def next(self): get_function_globals = operator.attrgetter(_func_globals) -def iterkeys(d, **kw): - """Return an iterator over the keys of a dictionary.""" - return iter(getattr(d, _iterkeys)(**kw)) +if PY3: + def iterkeys(d, **kw): + return iter(d.keys(**kw)) + + def itervalues(d, **kw): + return iter(d.values(**kw)) -def itervalues(d, **kw): - """Return an iterator over the values of a dictionary.""" - return iter(getattr(d, _itervalues)(**kw)) + def iteritems(d, **kw): + return iter(d.items(**kw)) -def iteritems(d, **kw): - """Return an iterator over the (key, value) pairs of a dictionary.""" - return iter(getattr(d, _iteritems)(**kw)) + def iterlists(d, **kw): + return iter(d.lists(**kw)) -def iterlists(d, **kw): - """Return an iterator over the (key, [values]) pairs of a dictionary.""" - return iter(getattr(d, _iterlists)(**kw)) + viewkeys = operator.methodcaller("keys") + + viewvalues = operator.methodcaller("values") + + viewitems = operator.methodcaller("items") +else: + def iterkeys(d, **kw): + return d.iterkeys(**kw) + + def itervalues(d, **kw): + return d.itervalues(**kw) + + def iteritems(d, **kw): + return d.iteritems(**kw) + + def iterlists(d, **kw): + return d.iterlists(**kw) + + viewkeys = operator.methodcaller("viewkeys") + + viewvalues = operator.methodcaller("viewvalues") + + viewitems = operator.methodcaller("viewitems") + +_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") +_add_doc(itervalues, "Return an iterator over the values of a dictionary.") +_add_doc(iteritems, + "Return an iterator over the (key, value) pairs of a dictionary.") +_add_doc(iterlists, + "Return an iterator over the (key, [values]) pairs of a dictionary.") if PY3: def b(s): return s.encode("latin-1") + def u(s): return s unichr = chr - if sys.version_info[1] <= 1: - def int2byte(i): - return bytes((i,)) - else: - # This is about 2x faster than the implementation above on 3.2+ - int2byte = operator.methodcaller("to_bytes", 1, "big") + import struct + int2byte = struct.Struct(">B").pack + del struct byte2int = operator.itemgetter(0) indexbytes = operator.getitem iterbytes = iter import io StringIO = io.StringIO BytesIO = io.BytesIO + del io + _assertCountEqual = "assertCountEqual" + if sys.version_info[1] <= 1: + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" + _assertNotRegex = "assertNotRegexpMatches" + else: + _assertRaisesRegex = "assertRaisesRegex" + _assertRegex = "assertRegex" + _assertNotRegex = "assertNotRegex" else: def b(s): return s + # Workaround for standalone backslash + def u(s): - return unicode(s, "unicode_escape") + return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") unichr = unichr int2byte = chr + def byte2int(bs): return ord(bs[0]) + def indexbytes(buf, i): return ord(buf[i]) - def iterbytes(buf): - return (ord(byte) for byte in buf) + iterbytes = functools.partial(itertools.imap, ord) import StringIO StringIO = BytesIO = StringIO.StringIO + _assertCountEqual = "assertItemsEqual" + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" + _assertNotRegex = "assertNotRegexpMatches" _add_doc(b, """Byte literal""") _add_doc(u, """Text literal""") -if PY3: - import builtins - exec_ = getattr(builtins, "exec") +def assertCountEqual(self, *args, **kwargs): + return getattr(self, _assertCountEqual)(*args, **kwargs) - def reraise(tp, value, tb=None): - if value.__traceback__ is not tb: - raise value.with_traceback(tb) - raise value +def assertRaisesRegex(self, *args, **kwargs): + return getattr(self, _assertRaisesRegex)(*args, **kwargs) + +def assertRegex(self, *args, **kwargs): + return getattr(self, _assertRegex)(*args, **kwargs) - print_ = getattr(builtins, "print") - del builtins + +def assertNotRegex(self, *args, **kwargs): + return getattr(self, _assertNotRegex)(*args, **kwargs) + + +if PY3: + exec_ = getattr(moves.builtins, "exec") + + def reraise(tp, value, tb=None): + try: + if value is None: + value = tp() + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + finally: + value = None + tb = None else: def exec_(_code_, _globs_=None, _locs_=None): @@ -364,20 +734,45 @@ def exec_(_code_, _globs_=None, _locs_=None): _locs_ = _globs_ exec("""exec _code_ in _globs_, _locs_""") - exec_("""def reraise(tp, value, tb=None): - raise tp, value, tb + try: + raise tp, value, tb + finally: + tb = None +""") + + +if sys.version_info[:2] > (3,): + exec_("""def raise_from(value, from_value): + try: + raise value from from_value + finally: + value = None """) +else: + def raise_from(value, from_value): + raise value +print_ = getattr(moves.builtins, "print", None) +if print_ is None: def print_(*args, **kwargs): - """The new-style print function.""" + """The new-style print function for Python 2.4 and 2.5.""" fp = kwargs.pop("file", sys.stdout) if fp is None: return + def write(data): if not isinstance(data, basestring): data = str(data) + # If the file has an encoding, encode unicode with it. + if (isinstance(fp, file) and + isinstance(data, unicode) and + fp.encoding is not None): + errors = getattr(fp, "errors", None) + if errors is None: + errors = "strict" + data = data.encode(fp.encoding, errors) fp.write(data) want_unicode = False sep = kwargs.pop("sep", None) @@ -414,10 +809,190 @@ def write(data): write(sep) write(arg) write(end) +if sys.version_info[:2] < (3, 3): + _print = print_ + + def print_(*args, **kwargs): + fp = kwargs.get("file", sys.stdout) + flush = kwargs.pop("flush", False) + _print(*args, **kwargs) + if flush and fp is not None: + fp.flush() _add_doc(reraise, """Reraise an exception.""") +if sys.version_info[0:2] < (3, 4): + # This does exactly the same what the :func:`py3:functools.update_wrapper` + # function does on Python versions after 3.2. It sets the ``__wrapped__`` + # attribute on ``wrapper`` object and it doesn't raise an error if any of + # the attributes mentioned in ``assigned`` and ``updated`` are missing on + # ``wrapped`` object. + def _update_wrapper(wrapper, wrapped, + assigned=functools.WRAPPER_ASSIGNMENTS, + updated=functools.WRAPPER_UPDATES): + for attr in assigned: + try: + value = getattr(wrapped, attr) + except AttributeError: + continue + else: + setattr(wrapper, attr, value) + for attr in updated: + getattr(wrapper, attr).update(getattr(wrapped, attr, {})) + wrapper.__wrapped__ = wrapped + return wrapper + _update_wrapper.__doc__ = functools.update_wrapper.__doc__ + + def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, + updated=functools.WRAPPER_UPDATES): + return functools.partial(_update_wrapper, wrapped=wrapped, + assigned=assigned, updated=updated) + wraps.__doc__ = functools.wraps.__doc__ + +else: + wraps = functools.wraps + def with_metaclass(meta, *bases): """Create a base class with a metaclass.""" - return meta("NewBase", bases, {}) + # This requires a bit of explanation: the basic idea is to make a dummy + # metaclass for one level of class instantiation that replaces itself with + # the actual metaclass. + class metaclass(type): + + def __new__(cls, name, this_bases, d): + if sys.version_info[:2] >= (3, 7): + # This version introduced PEP 560 that requires a bit + # of extra care (we mimic what is done by __build_class__). + resolved_bases = types.resolve_bases(bases) + if resolved_bases is not bases: + d['__orig_bases__'] = bases + else: + resolved_bases = bases + return meta(name, resolved_bases, d) + + @classmethod + def __prepare__(cls, name, this_bases): + return meta.__prepare__(name, bases) + return type.__new__(metaclass, 'temporary_class', (), {}) + + +def add_metaclass(metaclass): + """Class decorator for creating a class with a metaclass.""" + def wrapper(cls): + orig_vars = cls.__dict__.copy() + slots = orig_vars.get('__slots__') + if slots is not None: + if isinstance(slots, str): + slots = [slots] + for slots_var in slots: + orig_vars.pop(slots_var) + orig_vars.pop('__dict__', None) + orig_vars.pop('__weakref__', None) + if hasattr(cls, '__qualname__'): + orig_vars['__qualname__'] = cls.__qualname__ + return metaclass(cls.__name__, cls.__bases__, orig_vars) + return wrapper + + +def ensure_binary(s, encoding='utf-8', errors='strict'): + """Coerce **s** to six.binary_type. + + For Python 2: + - `unicode` -> encoded to `str` + - `str` -> `str` + + For Python 3: + - `str` -> encoded to `bytes` + - `bytes` -> `bytes` + """ + if isinstance(s, binary_type): + return s + if isinstance(s, text_type): + return s.encode(encoding, errors) + raise TypeError("not expecting type '%s'" % type(s)) + + +def ensure_str(s, encoding='utf-8', errors='strict'): + """Coerce *s* to `str`. + + For Python 2: + - `unicode` -> encoded to `str` + - `str` -> `str` + + For Python 3: + - `str` -> `str` + - `bytes` -> decoded to `str` + """ + # Optimization: Fast return for the common case. + if type(s) is str: + return s + if PY2 and isinstance(s, text_type): + return s.encode(encoding, errors) + elif PY3 and isinstance(s, binary_type): + return s.decode(encoding, errors) + elif not isinstance(s, (text_type, binary_type)): + raise TypeError("not expecting type '%s'" % type(s)) + return s + + +def ensure_text(s, encoding='utf-8', errors='strict'): + """Coerce *s* to six.text_type. + + For Python 2: + - `unicode` -> `unicode` + - `str` -> `unicode` + + For Python 3: + - `str` -> `str` + - `bytes` -> decoded to `str` + """ + if isinstance(s, binary_type): + return s.decode(encoding, errors) + elif isinstance(s, text_type): + return s + else: + raise TypeError("not expecting type '%s'" % type(s)) + + +def python_2_unicode_compatible(klass): + """ + A class decorator that defines __unicode__ and __str__ methods under Python 2. + Under Python 3 it does nothing. + + To support Python 2 and 3 with a single code base, define a __str__ method + returning text and apply this decorator to the class. + """ + if PY2: + if '__str__' not in klass.__dict__: + raise ValueError("@python_2_unicode_compatible cannot be applied " + "to %s because it doesn't define __str__()." % + klass.__name__) + klass.__unicode__ = klass.__str__ + klass.__str__ = lambda self: self.__unicode__().encode('utf-8') + return klass + + +# Complete the moves implementation. +# This code is at the end of this module to speed up module loading. +# Turn this module into a package. +__path__ = [] # required for PEP 302 and PEP 451 +__package__ = __name__ # see PEP 366 @ReservedAssignment +if globals().get("__spec__") is not None: + __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable +# Remove other six meta path importers, since they cause problems. This can +# happen if six is removed from sys.modules and then reloaded. (Setuptools does +# this for some reason.) +if sys.meta_path: + for i, importer in enumerate(sys.meta_path): + # Here's some real nastiness: Another "instance" of the six module might + # be floating around. Therefore, we can't use isinstance() to check for + # the six meta path importer, since the other six instance will have + # inserted an importer with different class. + if (type(importer).__name__ == "_SixMetaPathImporter" and + importer.name == __name__): + del sys.meta_path[i] + break + del i, importer +# Finally, add the importer to the meta path import hook. +sys.meta_path.append(_importer) diff --git a/newrelic/packages/urllib3/LICENSE.txt b/newrelic/packages/urllib3/LICENSE.txt index c89cf27b8..429a1767e 100644 --- a/newrelic/packages/urllib3/LICENSE.txt +++ b/newrelic/packages/urllib3/LICENSE.txt @@ -1,6 +1,6 @@ MIT License -Copyright (c) 2008-2019 Andrey Petrov and contributors (see CONTRIBUTORS.txt) +Copyright (c) 2008-2020 Andrey Petrov and contributors (see CONTRIBUTORS.txt) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/newrelic/packages/urllib3/__init__.py b/newrelic/packages/urllib3/__init__.py index fe86b59d7..c8c7ce691 100644 --- a/newrelic/packages/urllib3/__init__.py +++ b/newrelic/packages/urllib3/__init__.py @@ -19,6 +19,7 @@ from .util.timeout import Timeout from .util.url import get_host + __author__ = "Andrey Petrov (andrey.petrov@shazow.net)" __license__ = "MIT" __version__ = __version__ diff --git a/newrelic/packages/urllib3/_version.py b/newrelic/packages/urllib3/_version.py index 5141d980b..e12dd0e78 100644 --- a/newrelic/packages/urllib3/_version.py +++ b/newrelic/packages/urllib3/_version.py @@ -1,2 +1,2 @@ # This file is protected via CODEOWNERS -__version__ = "1.26.7" +__version__ = "1.26.15" diff --git a/newrelic/packages/urllib3/connection.py b/newrelic/packages/urllib3/connection.py index 60f70f794..54b96b191 100644 --- a/newrelic/packages/urllib3/connection.py +++ b/newrelic/packages/urllib3/connection.py @@ -51,7 +51,6 @@ class BrokenPipeError(Exception): SubjectAltNameWarning, SystemTimeWarning, ) -from .packages.ssl_match_hostname import CertificateError, match_hostname from .util import SKIP_HEADER, SKIPPABLE_HEADERS, connection from .util.ssl_ import ( assert_fingerprint, @@ -61,6 +60,7 @@ class BrokenPipeError(Exception): resolve_ssl_version, ssl_wrap_socket, ) +from .util.ssl_match_hostname import CertificateError, match_hostname log = logging.getLogger(__name__) @@ -68,7 +68,7 @@ class BrokenPipeError(Exception): # When it comes time to update this value as a part of regular maintenance # (ie test_recent_date is failing) update it to ~6 months before the current date. -RECENT_DATE = datetime.date(2020, 7, 1) +RECENT_DATE = datetime.date(2022, 1, 1) _CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]") @@ -229,6 +229,11 @@ def putheader(self, header, *values): ) def request(self, method, url, body=None, headers=None): + # Update the inner socket's timeout value to send the request. + # This only triggers if the connection is re-used. + if getattr(self, "sock", None) is not None: + self.sock.settimeout(self.timeout) + if headers is None: headers = {} else: @@ -355,17 +360,15 @@ def set_cert( def connect(self): # Add certificate verification - conn = self._new_conn() + self.sock = conn = self._new_conn() hostname = self.host tls_in_tls = False if self._is_using_tunnel(): if self.tls_in_tls_required: - conn = self._connect_tls_proxy(hostname, conn) + self.sock = conn = self._connect_tls_proxy(hostname, conn) tls_in_tls = True - self.sock = conn - # Calls self._set_hostport(), so self.host is # self._tunnel_host below. self._tunnel() diff --git a/newrelic/packages/urllib3/connectionpool.py b/newrelic/packages/urllib3/connectionpool.py index 8dccf4bc2..c23d736b1 100644 --- a/newrelic/packages/urllib3/connectionpool.py +++ b/newrelic/packages/urllib3/connectionpool.py @@ -2,6 +2,7 @@ import errno import logging +import re import socket import sys import warnings @@ -35,7 +36,6 @@ ) from .packages import six from .packages.six.moves import queue -from .packages.ssl_match_hostname import CertificateError from .request import RequestMethods from .response import HTTPResponse from .util.connection import is_connection_dropped @@ -44,6 +44,7 @@ from .util.request import set_file_position from .util.response import assert_header_parsing from .util.retry import Retry +from .util.ssl_match_hostname import CertificateError from .util.timeout import Timeout from .util.url import Url, _encode_target from .util.url import _normalize_host as normalize_host @@ -301,8 +302,11 @@ def _put_conn(self, conn): pass except queue.Full: # This should never happen if self.block == True - log.warning("Connection pool is full, discarding connection: %s", self.host) - + log.warning( + "Connection pool is full, discarding connection: %s. Connection pool size: %s", + self.host, + self.pool.qsize(), + ) # Connection never got put back into the pool, close it. if conn: conn.close() @@ -375,7 +379,7 @@ def _make_request( timeout_obj = self._get_timeout(timeout) timeout_obj.start_connect() - conn.timeout = timeout_obj.connect_timeout + conn.timeout = Timeout.resolve_default_timeout(timeout_obj.connect_timeout) # Trigger any extra validation we need to do. try: @@ -745,7 +749,35 @@ def urlopen( # Discard the connection for these exceptions. It will be # replaced during the next _get_conn() call. clean_exit = False - if isinstance(e, (BaseSSLError, CertificateError)): + + def _is_ssl_error_message_from_http_proxy(ssl_error): + # We're trying to detect the message 'WRONG_VERSION_NUMBER' but + # SSLErrors are kinda all over the place when it comes to the message, + # so we try to cover our bases here! + message = " ".join(re.split("[^a-z]", str(ssl_error).lower())) + return ( + "wrong version number" in message or "unknown protocol" in message + ) + + # Try to detect a common user error with proxies which is to + # set an HTTP proxy to be HTTPS when it should be 'http://' + # (ie {'http': 'http://proxy', 'https': 'https://proxy'}) + # Instead we add a nice error message and point to a URL. + if ( + isinstance(e, BaseSSLError) + and self.proxy + and _is_ssl_error_message_from_http_proxy(e) + and conn.proxy + and conn.proxy.scheme == "https" + ): + e = ProxyError( + "Your proxy appears to only use HTTP and not HTTPS, " + "try changing your proxy URL to be HTTP. See: " + "https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html" + "#https-proxy-error-http-proxy", + SSLError(e), + ) + elif isinstance(e, (BaseSSLError, CertificateError)): e = SSLError(e) elif isinstance(e, (SocketError, NewConnectionError)) and self.proxy: e = ProxyError("Cannot connect to proxy.", e) @@ -830,7 +862,7 @@ def urlopen( ) # Check if we should retry the HTTP response. - has_retry_after = bool(response.getheader("Retry-After")) + has_retry_after = bool(response.headers.get("Retry-After")) if retries.is_retry(method, response.status, has_retry_after): try: retries = retries.increment(method, url, response=response, _pool=self) diff --git a/newrelic/packages/urllib3/contrib/appengine.py b/newrelic/packages/urllib3/contrib/appengine.py index f91bdd6e7..a5a6d9103 100644 --- a/newrelic/packages/urllib3/contrib/appengine.py +++ b/newrelic/packages/urllib3/contrib/appengine.py @@ -224,7 +224,7 @@ def urlopen( ) # Check if we should retry the HTTP response. - has_retry_after = bool(http_response.getheader("Retry-After")) + has_retry_after = bool(http_response.headers.get("Retry-After")) if retries.is_retry(method, http_response.status, has_retry_after): retries = retries.increment(method, url, response=http_response, _pool=self) log.debug("Retry: %s", url) diff --git a/newrelic/packages/urllib3/contrib/ntlmpool.py b/newrelic/packages/urllib3/contrib/ntlmpool.py index 41a8fd174..471665754 100644 --- a/newrelic/packages/urllib3/contrib/ntlmpool.py +++ b/newrelic/packages/urllib3/contrib/ntlmpool.py @@ -69,7 +69,7 @@ def _new_conn(self): log.debug("Request headers: %s", headers) conn.request("GET", self.authurl, None, headers) res = conn.getresponse() - reshdr = dict(res.getheaders()) + reshdr = dict(res.headers) log.debug("Response status: %s %s", res.status, res.reason) log.debug("Response headers: %s", reshdr) log.debug("Response data: %s [...]", res.read(100)) @@ -101,7 +101,7 @@ def _new_conn(self): conn.request("GET", self.authurl, None, headers) res = conn.getresponse() log.debug("Response status: %s %s", res.status, res.reason) - log.debug("Response headers: %s", dict(res.getheaders())) + log.debug("Response headers: %s", dict(res.headers)) log.debug("Response data: %s [...]", res.read()[:100]) if res.status != 200: if res.status == 401: diff --git a/newrelic/packages/urllib3/contrib/pyopenssl.py b/newrelic/packages/urllib3/contrib/pyopenssl.py index def83afdb..1ed214b1d 100644 --- a/newrelic/packages/urllib3/contrib/pyopenssl.py +++ b/newrelic/packages/urllib3/contrib/pyopenssl.py @@ -47,10 +47,10 @@ """ from __future__ import absolute_import +import OpenSSL.crypto import OpenSSL.SSL from cryptography import x509 from cryptography.hazmat.backends.openssl import backend as openssl_backend -from cryptography.hazmat.backends.openssl.x509 import _Certificate try: from cryptography.x509 import UnsupportedExtension @@ -73,11 +73,20 @@ class UnsupportedExtension(Exception): import logging import ssl import sys +import warnings from .. import util from ..packages import six from ..util.ssl_ import PROTOCOL_TLS_CLIENT +warnings.warn( + "'urllib3.contrib.pyopenssl' module is deprecated and will be removed " + "in a future release of urllib3 2.x. Read more in this issue: " + "https://github.com/urllib3/urllib3/issues/2680", + category=DeprecationWarning, + stacklevel=2, +) + __all__ = ["inject_into_urllib3", "extract_from_urllib3"] # SNI always works. @@ -219,9 +228,8 @@ def get_subj_alt_name(peer_cert): if hasattr(peer_cert, "to_cryptography"): cert = peer_cert.to_cryptography() else: - # This is technically using private APIs, but should work across all - # relevant versions before PyOpenSSL got a proper API for this. - cert = _Certificate(openssl_backend, peer_cert._x509) + der = OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_ASN1, peer_cert) + cert = x509.load_der_x509_certificate(der, openssl_backend) # We want to find the SAN extension. Ask Cryptography to locate it (it's # faster than looping in Python) @@ -406,7 +414,6 @@ def makefile(self, mode, bufsize=-1): self._makefile_refs += 1 return _fileobject(self, mode, bufsize, close=True) - else: # Platform-specific: Python 3 makefile = backport_makefile diff --git a/newrelic/packages/urllib3/contrib/securetransport.py b/newrelic/packages/urllib3/contrib/securetransport.py index 554c015fe..6c46a3b9f 100644 --- a/newrelic/packages/urllib3/contrib/securetransport.py +++ b/newrelic/packages/urllib3/contrib/securetransport.py @@ -770,7 +770,6 @@ def makefile(self, mode, bufsize=-1): self._makefile_refs += 1 return _fileobject(self, mode, bufsize, close=True) - else: # Platform-specific: Python 3 def makefile(self, mode="r", buffering=None, *args, **kwargs): diff --git a/newrelic/packages/urllib3/packages/__init__.py b/newrelic/packages/urllib3/packages/__init__.py index fce4caa65..e69de29bb 100644 --- a/newrelic/packages/urllib3/packages/__init__.py +++ b/newrelic/packages/urllib3/packages/__init__.py @@ -1,5 +0,0 @@ -from __future__ import absolute_import - -from . import ssl_match_hostname - -__all__ = ("ssl_match_hostname",) diff --git a/newrelic/packages/urllib3/packages/six.py b/newrelic/packages/urllib3/packages/six.py index ba50acb06..f099a3dcd 100644 --- a/newrelic/packages/urllib3/packages/six.py +++ b/newrelic/packages/urllib3/packages/six.py @@ -772,7 +772,6 @@ def reraise(tp, value, tb=None): value = None tb = None - else: def exec_(_code_, _globs_=None, _locs_=None): diff --git a/newrelic/packages/urllib3/packages/ssl_match_hostname/__init__.py b/newrelic/packages/urllib3/packages/ssl_match_hostname/__init__.py deleted file mode 100644 index ef3fde520..000000000 --- a/newrelic/packages/urllib3/packages/ssl_match_hostname/__init__.py +++ /dev/null @@ -1,24 +0,0 @@ -import sys - -try: - # Our match_hostname function is the same as 3.10's, so we only want to - # import the match_hostname function if it's at least that good. - # We also fallback on Python 3.10+ because our code doesn't emit - # deprecation warnings and is the same as Python 3.10 otherwise. - if sys.version_info < (3, 5) or sys.version_info >= (3, 10): - raise ImportError("Fallback to vendored code") - - from ssl import CertificateError, match_hostname -except ImportError: - try: - # Backport of the function from a pypi module - from backports.ssl_match_hostname import ( # type: ignore - CertificateError, - match_hostname, - ) - except ImportError: - # Our vendored copy - from ._implementation import CertificateError, match_hostname # type: ignore - -# Not needed, but documenting what we provide. -__all__ = ("CertificateError", "match_hostname") diff --git a/newrelic/packages/urllib3/poolmanager.py b/newrelic/packages/urllib3/poolmanager.py index 3a31a285b..ca4ec3411 100644 --- a/newrelic/packages/urllib3/poolmanager.py +++ b/newrelic/packages/urllib3/poolmanager.py @@ -34,6 +34,7 @@ "ca_cert_dir", "ssl_context", "key_password", + "server_hostname", ) # All known keyword arguments that could be provided to the pool manager, its diff --git a/newrelic/packages/urllib3/response.py b/newrelic/packages/urllib3/response.py index 38693f4fc..0bd13d40b 100644 --- a/newrelic/packages/urllib3/response.py +++ b/newrelic/packages/urllib3/response.py @@ -2,16 +2,22 @@ import io import logging +import sys +import warnings import zlib from contextlib import contextmanager from socket import error as SocketError from socket import timeout as SocketTimeout try: - import brotli + try: + import brotlicffi as brotli + except ImportError: + import brotli except ImportError: brotli = None +from . import util from ._collections import HTTPHeaderDict from .connection import BaseSSLError, HTTPException from .exceptions import ( @@ -478,6 +484,54 @@ def _error_catcher(self): if self._original_response and self._original_response.isclosed(): self.release_conn() + def _fp_read(self, amt): + """ + Read a response with the thought that reading the number of bytes + larger than can fit in a 32-bit int at a time via SSL in some + known cases leads to an overflow error that has to be prevented + if `amt` or `self.length_remaining` indicate that a problem may + happen. + + The known cases: + * 3.8 <= CPython < 3.9.7 because of a bug + https://github.com/urllib3/urllib3/issues/2513#issuecomment-1152559900. + * urllib3 injected with pyOpenSSL-backed SSL-support. + * CPython < 3.10 only when `amt` does not fit 32-bit int. + """ + assert self._fp + c_int_max = 2 ** 31 - 1 + if ( + ( + (amt and amt > c_int_max) + or (self.length_remaining and self.length_remaining > c_int_max) + ) + and not util.IS_SECURETRANSPORT + and (util.IS_PYOPENSSL or sys.version_info < (3, 10)) + ): + buffer = io.BytesIO() + # Besides `max_chunk_amt` being a maximum chunk size, it + # affects memory overhead of reading a response by this + # method in CPython. + # `c_int_max` equal to 2 GiB - 1 byte is the actual maximum + # chunk size that does not lead to an overflow error, but + # 256 MiB is a compromise. + max_chunk_amt = 2 ** 28 + while amt is None or amt != 0: + if amt is not None: + chunk_amt = min(amt, max_chunk_amt) + amt -= chunk_amt + else: + chunk_amt = max_chunk_amt + data = self._fp.read(chunk_amt) + if not data: + break + buffer.write(data) + del data # to reduce peak memory usage by `max_chunk_amt`. + return buffer.getvalue() + else: + # StringIO doesn't like amt=None + return self._fp.read(amt) if amt is not None else self._fp.read() + def read(self, amt=None, decode_content=None, cache_content=False): """ Similar to :meth:`http.client.HTTPResponse.read`, but with two additional @@ -510,13 +564,11 @@ def read(self, amt=None, decode_content=None, cache_content=False): fp_closed = getattr(self._fp, "closed", False) with self._error_catcher(): + data = self._fp_read(amt) if not fp_closed else b"" if amt is None: - # cStringIO doesn't like amt=None - data = self._fp.read() if not fp_closed else b"" flush_decoder = True else: cache_content = False - data = self._fp.read(amt) if not fp_closed else b"" if ( amt != 0 and not data ): # Platform-specific: Buggy versions of Python. @@ -612,9 +664,21 @@ def from_httplib(ResponseCls, r, **response_kw): # Backwards-compatibility methods for http.client.HTTPResponse def getheaders(self): + warnings.warn( + "HTTPResponse.getheaders() is deprecated and will be removed " + "in urllib3 v2.1.0. Instead access HTTPResponse.headers directly.", + category=DeprecationWarning, + stacklevel=2, + ) return self.headers def getheader(self, name, default=None): + warnings.warn( + "HTTPResponse.getheader() is deprecated and will be removed " + "in urllib3 v2.1.0. Instead use HTTPResponse.headers.get(name, default).", + category=DeprecationWarning, + stacklevel=2, + ) return self.headers.get(name, default) # Backwards compatibility for http.cookiejar diff --git a/newrelic/packages/urllib3/util/connection.py b/newrelic/packages/urllib3/util/connection.py index 30b2d174d..6af1138f2 100644 --- a/newrelic/packages/urllib3/util/connection.py +++ b/newrelic/packages/urllib3/util/connection.py @@ -2,9 +2,8 @@ import socket -from ..exceptions import LocationParseError - from ..contrib import _appengine_environ +from ..exceptions import LocationParseError from ..packages import six from .wait import NoWayToWaitForSocketError, wait_for_read diff --git a/newrelic/packages/urllib3/util/request.py b/newrelic/packages/urllib3/util/request.py index 25103383e..b574b081e 100644 --- a/newrelic/packages/urllib3/util/request.py +++ b/newrelic/packages/urllib3/util/request.py @@ -14,7 +14,10 @@ ACCEPT_ENCODING = "gzip,deflate" try: - import brotli as _unused_module_brotli # noqa: F401 + try: + import brotlicffi as _unused_module_brotli # noqa: F401 + except ImportError: + import brotli as _unused_module_brotli # noqa: F401 except ImportError: pass else: diff --git a/newrelic/packages/urllib3/util/retry.py b/newrelic/packages/urllib3/util/retry.py index 0ccc767bd..2490d5e5b 100644 --- a/newrelic/packages/urllib3/util/retry.py +++ b/newrelic/packages/urllib3/util/retry.py @@ -69,6 +69,24 @@ def DEFAULT_REDIRECT_HEADERS_BLACKLIST(cls, value): ) cls.DEFAULT_REMOVE_HEADERS_ON_REDIRECT = value + @property + def BACKOFF_MAX(cls): + warnings.warn( + "Using 'Retry.BACKOFF_MAX' is deprecated and " + "will be removed in v2.0. Use 'Retry.DEFAULT_BACKOFF_MAX' instead", + DeprecationWarning, + ) + return cls.DEFAULT_BACKOFF_MAX + + @BACKOFF_MAX.setter + def BACKOFF_MAX(cls, value): + warnings.warn( + "Using 'Retry.BACKOFF_MAX' is deprecated and " + "will be removed in v2.0. Use 'Retry.DEFAULT_BACKOFF_MAX' instead", + DeprecationWarning, + ) + cls.DEFAULT_BACKOFF_MAX = value + @six.add_metaclass(_RetryMeta) class Retry(object): @@ -162,7 +180,7 @@ class Retry(object): .. warning:: - Previously this parameter was named ``method_allowlist``, that + Previously this parameter was named ``method_whitelist``, that usage is deprecated in v1.26.0 and will be removed in v2.0. :param iterable status_forcelist: @@ -181,7 +199,7 @@ class Retry(object): seconds. If the backoff_factor is 0.1, then :func:`.sleep` will sleep for [0.0s, 0.2s, 0.4s, ...] between retries. It will never be longer - than :attr:`Retry.BACKOFF_MAX`. + than :attr:`Retry.DEFAULT_BACKOFF_MAX`. By default, backoff is disabled (set to 0). @@ -220,7 +238,7 @@ class Retry(object): DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(["Authorization"]) #: Maximum backoff time. - BACKOFF_MAX = 120 + DEFAULT_BACKOFF_MAX = 120 def __init__( self, @@ -239,23 +257,23 @@ def __init__( respect_retry_after_header=True, remove_headers_on_redirect=_Default, # TODO: Deprecated, remove in v2.0 - method_allowlist=_Default, + method_whitelist=_Default, ): - if method_allowlist is not _Default: + if method_whitelist is not _Default: if allowed_methods is not _Default: raise ValueError( "Using both 'allowed_methods' and " - "'method_allowlist' together is not allowed. " + "'method_whitelist' together is not allowed. " "Instead only use 'allowed_methods'" ) warnings.warn( - "Using 'method_allowlist' with Retry is deprecated and " + "Using 'method_whitelist' with Retry is deprecated and " "will be removed in v2.0. Use 'allowed_methods' instead", DeprecationWarning, stacklevel=2, ) - allowed_methods = method_allowlist + allowed_methods = method_whitelist if allowed_methods is _Default: allowed_methods = self.DEFAULT_ALLOWED_METHODS if remove_headers_on_redirect is _Default: @@ -302,17 +320,17 @@ def new(self, **kw): # TODO: If already given in **kw we use what's given to us # If not given we need to figure out what to pass. We decide - # based on whether our class has the 'method_allowlist' property - # and if so we pass the deprecated 'method_allowlist' otherwise + # based on whether our class has the 'method_whitelist' property + # and if so we pass the deprecated 'method_whitelist' otherwise # we use 'allowed_methods'. Remove in v2.0 - if "method_allowlist" not in kw and "allowed_methods" not in kw: - if "method_allowlist" in self.__dict__: + if "method_whitelist" not in kw and "allowed_methods" not in kw: + if "method_whitelist" in self.__dict__: warnings.warn( - "Using 'method_allowlist' with Retry is deprecated and " + "Using 'method_whitelist' with Retry is deprecated and " "will be removed in v2.0. Use 'allowed_methods' instead", DeprecationWarning, ) - params["method_allowlist"] = self.allowed_methods + params["method_whitelist"] = self.allowed_methods else: params["allowed_methods"] = self.allowed_methods @@ -348,7 +366,7 @@ def get_backoff_time(self): return 0 backoff_value = self.backoff_factor * (2 ** (consecutive_errors_len - 1)) - return min(self.BACKOFF_MAX, backoff_value) + return min(self.DEFAULT_BACKOFF_MAX, backoff_value) def parse_retry_after(self, retry_after): # Whitespace: https://tools.ietf.org/html/rfc7230#section-3.2.4 @@ -376,7 +394,7 @@ def parse_retry_after(self, retry_after): def get_retry_after(self, response): """Get the value of Retry-After in seconds.""" - retry_after = response.getheader("Retry-After") + retry_after = response.headers.get("Retry-After") if retry_after is None: return None @@ -431,15 +449,15 @@ def _is_method_retryable(self, method): """Checks if a given HTTP method should be retried upon, depending if it is included in the allowed_methods """ - # TODO: For now favor if the Retry implementation sets its own method_allowlist + # TODO: For now favor if the Retry implementation sets its own method_whitelist # property outside of our constructor to avoid breaking custom implementations. - if "method_allowlist" in self.__dict__: + if "method_whitelist" in self.__dict__: warnings.warn( - "Using 'method_allowlist' with Retry is deprecated and " + "Using 'method_whitelist' with Retry is deprecated and " "will be removed in v2.0. Use 'allowed_methods' instead", DeprecationWarning, ) - allowed_methods = self.method_allowlist + allowed_methods = self.method_whitelist else: allowed_methods = self.allowed_methods @@ -584,10 +602,10 @@ def __repr__(self): ).format(cls=type(self), self=self) def __getattr__(self, item): - if item == "method_allowlist": + if item == "method_whitelist": # TODO: Remove this deprecated alias in v2.0 warnings.warn( - "Using 'method_allowlist' with Retry is deprecated and " + "Using 'method_whitelist' with Retry is deprecated and " "will be removed in v2.0. Use 'allowed_methods' instead", DeprecationWarning, ) diff --git a/newrelic/packages/urllib3/packages/ssl_match_hostname/_implementation.py b/newrelic/packages/urllib3/util/ssl_match_hostname.py similarity index 92% rename from newrelic/packages/urllib3/packages/ssl_match_hostname/_implementation.py rename to newrelic/packages/urllib3/util/ssl_match_hostname.py index 689208d3c..1dd950c48 100644 --- a/newrelic/packages/urllib3/packages/ssl_match_hostname/_implementation.py +++ b/newrelic/packages/urllib3/util/ssl_match_hostname.py @@ -9,7 +9,7 @@ # ipaddress has been backported to 2.6+ in pypi. If it is installed on the # system, use it to handle IPAddress ServerAltnames (this was added in # python-3.5) otherwise only do DNS matching. This allows -# backports.ssl_match_hostname to continue to be used in Python 2.7. +# util.ssl_match_hostname to continue to be used in Python 2.7. try: import ipaddress except ImportError: @@ -78,7 +78,8 @@ def _dnsname_match(dn, hostname, max_wildcards=1): def _to_unicode(obj): if isinstance(obj, str) and sys.version_info < (3,): - obj = unicode(obj, encoding="ascii", errors="strict") + # ignored flake8 # F821 to support python 2.7 function + obj = unicode(obj, encoding="ascii", errors="strict") # noqa: F821 return obj @@ -111,11 +112,9 @@ def match_hostname(cert, hostname): try: # Divergence from upstream: ipaddress can't handle byte str host_ip = ipaddress.ip_address(_to_unicode(hostname)) - except ValueError: - # Not an IP address (common case) - host_ip = None - except UnicodeError: - # Divergence from upstream: Have to deal with ipaddress not taking + except (UnicodeError, ValueError): + # ValueError: Not an IP address (common case) + # UnicodeError: Divergence from upstream: Have to deal with ipaddress not taking # byte strings. addresses should be all ascii, so we consider it not # an ipaddress in this case host_ip = None @@ -123,7 +122,7 @@ def match_hostname(cert, hostname): # Divergence from upstream: Make ipaddress library optional if ipaddress is None: host_ip = None - else: + else: # Defensive raise dnsnames = [] san = cert.get("subjectAltName", ()) diff --git a/newrelic/packages/urllib3/util/timeout.py b/newrelic/packages/urllib3/util/timeout.py index ff69593b0..78e18a627 100644 --- a/newrelic/packages/urllib3/util/timeout.py +++ b/newrelic/packages/urllib3/util/timeout.py @@ -2,9 +2,8 @@ import time -# The default socket timeout, used by httplib to indicate that no timeout was -# specified by the user -from socket import _GLOBAL_DEFAULT_TIMEOUT +# The default socket timeout, used by httplib to indicate that no timeout was; specified by the user +from socket import _GLOBAL_DEFAULT_TIMEOUT, getdefaulttimeout from ..exceptions import TimeoutStateError @@ -116,6 +115,10 @@ def __repr__(self): # __str__ provided for backwards compatibility __str__ = __repr__ + @classmethod + def resolve_default_timeout(cls, timeout): + return getdefaulttimeout() if timeout is cls.DEFAULT_TIMEOUT else timeout + @classmethod def _validate_timeout(cls, value, name): """Check that a timeout attribute is valid. diff --git a/newrelic/packages/urllib3/util/url.py b/newrelic/packages/urllib3/util/url.py index 81a03da9e..e5682d3be 100644 --- a/newrelic/packages/urllib3/util/url.py +++ b/newrelic/packages/urllib3/util/url.py @@ -50,7 +50,7 @@ "(?:(?:%(hex)s:){0,6}%(hex)s)?::", ] -UNRESERVED_PAT = r"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._!\-~" +UNRESERVED_PAT = r"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._\-~" IPV6_PAT = "(?:" + "|".join([x % _subs for x in _variations]) + ")" ZONE_ID_PAT = "(?:%25|%)(?:[" + UNRESERVED_PAT + "]|%[a-fA-F0-9]{2})+" IPV6_ADDRZ_PAT = r"\[" + IPV6_PAT + r"(?:" + ZONE_ID_PAT + r")?\]" @@ -63,7 +63,7 @@ BRACELESS_IPV6_ADDRZ_RE = re.compile("^" + IPV6_ADDRZ_PAT[2:-2] + "$") ZONE_ID_RE = re.compile("(" + ZONE_ID_PAT + r")\]$") -_HOST_PORT_PAT = ("^(%s|%s|%s)(?::([0-9]{0,5}))?$") % ( +_HOST_PORT_PAT = ("^(%s|%s|%s)(?::0*?(|0|[1-9][0-9]{0,4}))?$") % ( REG_NAME_PAT, IPV4_PAT, IPV6_ADDRZ_PAT, @@ -279,6 +279,9 @@ def _normalize_host(host, scheme): if scheme in NORMALIZABLE_SCHEMES: is_ipv6 = IPV6_ADDRZ_RE.match(host) if is_ipv6: + # IPv6 hosts of the form 'a::b%zone' are encoded in a URL as + # such per RFC 6874: 'a::b%25zone'. Unquote the ZoneID + # separator as necessary to return a valid RFC 4007 scoped IP. match = ZONE_ID_RE.search(host) if match: start, end = match.span(1) @@ -300,7 +303,7 @@ def _normalize_host(host, scheme): def _idna_encode(name): - if name and any([ord(x) > 128 for x in name]): + if name and any(ord(x) >= 128 for x in name): try: import idna except ImportError: @@ -331,7 +334,7 @@ def parse_url(url): """ Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is performed to parse incomplete urls. Fields not provided will be None. - This parser is RFC 3986 compliant. + This parser is RFC 3986 and RFC 6874 compliant. The parser logic and helper functions are based heavily on work done in the ``rfc3986`` module. diff --git a/newrelic/packages/urllib3/util/wait.py b/newrelic/packages/urllib3/util/wait.py index c280646c7..21b4590b3 100644 --- a/newrelic/packages/urllib3/util/wait.py +++ b/newrelic/packages/urllib3/util/wait.py @@ -42,7 +42,6 @@ class NoWayToWaitForSocketError(Exception): def _retry_on_intr(fn, timeout): return fn(timeout) - else: # Old and broken Pythons. def _retry_on_intr(fn, timeout): diff --git a/setup.py b/setup.py index 217ba458c..044125a23 100644 --- a/setup.py +++ b/setup.py @@ -108,7 +108,6 @@ def build_extension(self, ext): "newrelic/packages/urllib3/contrib/_securetransport", "newrelic/packages/urllib3/packages", "newrelic/packages/urllib3/packages/backports", - "newrelic/packages/urllib3/packages/ssl_match_hostname", "newrelic/packages/wrapt", "newrelic.samplers", ] @@ -175,7 +174,6 @@ def with_librt(): def run_setup(with_extensions): def _run_setup(): - # Create a local copy of kwargs, if there is no c compiler run_setup # will need to be re-run, and these arguments can not be present. @@ -244,7 +242,6 @@ def _run_setup(): run_setup(with_extensions=True) except BuildExtFailed: - print(75 * "*") print(WARNING) diff --git a/tox.ini b/tox.ini index 2160e9e9c..dde6b4f3d 100644 --- a/tox.ini +++ b/tox.ini @@ -167,7 +167,7 @@ envlist = [testenv] deps = # Base Dependencies - {py37,py38,py39,py310,py311,pypy37}: pytest==6.2.5 + {py37,py38,py39,py310,py311,pypy37}: pytest==7.2.2 {py27,pypy}: pytest==4.6.11 iniconfig pytest-cov @@ -344,6 +344,7 @@ deps = framework_sanic-sanic2290: sanic<22.9.1 framework_sanic-saniclatest: sanic framework_sanic-sanic{1812,190301,1906}: aiohttp + framework_sanic-sanic{1812,190301,1906,1912,200904,210300,2109,2112,2203,2290}: websockets<11 framework_starlette: graphene<3 framework_starlette-starlette0014: starlette<0.15 framework_starlette-starlette0015: starlette<0.16 @@ -491,7 +492,7 @@ usefixtures = [coverage:run] branch = True -omit = "newrelic/packages/**/*.py" +omit = "newrelic/packages/*" parallel = True disable_warnings = couldnt-parse From 4d644c6f4df479808c889a60094a46e2e1dd87d6 Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Wed, 5 Apr 2023 10:04:41 -0700 Subject: [PATCH 066/108] Remove Unused Instrumentation and Tests (#794) * Remove unused instrumentation files * Remove testing for deprecated CherryPy versions * Remove unused pyelasticsearch tests --------- Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> --- newrelic/hooks/memcache_pylibmc.py | 57 --------- newrelic/hooks/memcache_umemcache.py | 82 ------------- newrelic/hooks/nosql_pymongo.py | 44 ------- newrelic/hooks/nosql_redis.py | 62 ---------- newrelic/hooks/solr_pysolr.py | 39 ------ newrelic/hooks/solr_solrpy.py | 50 -------- tests/datastore_pyelasticsearch/conftest.py | 31 ----- .../test_pyelasticsearch.py | 116 ------------------ tox.ini | 10 +- 9 files changed, 2 insertions(+), 489 deletions(-) delete mode 100644 newrelic/hooks/memcache_pylibmc.py delete mode 100644 newrelic/hooks/memcache_umemcache.py delete mode 100644 newrelic/hooks/nosql_pymongo.py delete mode 100644 newrelic/hooks/nosql_redis.py delete mode 100644 newrelic/hooks/solr_pysolr.py delete mode 100644 newrelic/hooks/solr_solrpy.py delete mode 100644 tests/datastore_pyelasticsearch/conftest.py delete mode 100644 tests/datastore_pyelasticsearch/test_pyelasticsearch.py diff --git a/newrelic/hooks/memcache_pylibmc.py b/newrelic/hooks/memcache_pylibmc.py deleted file mode 100644 index 190980f08..000000000 --- a/newrelic/hooks/memcache_pylibmc.py +++ /dev/null @@ -1,57 +0,0 @@ -# Copyright 2010 New Relic, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import newrelic.api.memcache_trace - -def instrument(module): - - if hasattr(module.Client, 'add'): - newrelic.api.memcache_trace.wrap_memcache_trace( - module, 'Client.add', 'add') - if hasattr(module.Client, 'append'): - newrelic.api.memcache_trace.wrap_memcache_trace( - module, 'Client.append', 'replace') - if hasattr(module.Client, 'decr'): - newrelic.api.memcache_trace.wrap_memcache_trace( - module, 'Client.decr', 'decr') - if hasattr(module.Client, 'delete'): - newrelic.api.memcache_trace.wrap_memcache_trace( - module, 'Client.delete', 'delete') - if hasattr(module.Client, 'delete_multi'): - newrelic.api.memcache_trace.wrap_memcache_trace( - module, 'Client.delete_multi', 'delete') - if hasattr(module.Client, 'get'): - newrelic.api.memcache_trace.wrap_memcache_trace( - module, 'Client.get', 'get') - if hasattr(module.Client, 'get_multi'): - newrelic.api.memcache_trace.wrap_memcache_trace( - module, 'Client.get_multi', 'get') - if hasattr(module.Client, 'incr'): - newrelic.api.memcache_trace.wrap_memcache_trace( - module, 'Client.incr', 'incr') - if hasattr(module.Client, 'incr_multi'): - newrelic.api.memcache_trace.wrap_memcache_trace( - module, 'Client.incr_multi', 'incr') - if hasattr(module.Client, 'prepend'): - newrelic.api.memcache_trace.wrap_memcache_trace( - module, 'Client.prepend', 'replace') - if hasattr(module.Client, 'replace'): - newrelic.api.memcache_trace.wrap_memcache_trace( - module, 'Client.replace', 'replace') - if hasattr(module.Client, 'set'): - newrelic.api.memcache_trace.wrap_memcache_trace( - module, 'Client.set', 'set') - if hasattr(module.Client, 'set_multi'): - newrelic.api.memcache_trace.wrap_memcache_trace( - module, 'Client.set_multi', 'set') diff --git a/newrelic/hooks/memcache_umemcache.py b/newrelic/hooks/memcache_umemcache.py deleted file mode 100644 index 5241374e8..000000000 --- a/newrelic/hooks/memcache_umemcache.py +++ /dev/null @@ -1,82 +0,0 @@ -# Copyright 2010 New Relic, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from newrelic.api.memcache_trace import memcache_trace -from newrelic.api.object_wrapper import ObjectWrapper - -class Client(ObjectWrapper): - - def __init__(self, wrapped): - super(Client, self).__init__(wrapped, None, None) - - @memcache_trace('set') - def set(self, *args, **kwargs): - return self._nr_next_object.set(*args, **kwargs) - - @memcache_trace('get') - def get(self, *args, **kwargs): - return self._nr_next_object.get(*args, **kwargs) - - @memcache_trace('get') - def gets(self, *args, **kwargs): - return self._nr_next_object.gets(*args, **kwargs) - - @memcache_trace('get') - def get_multi(self, *args, **kwargs): - return self._nr_next_object.get_multi(*args, **kwargs) - - @memcache_trace('get') - def gets_multi(self, *args, **kwargs): - return self._nr_next_object.gets_multi(*args, **kwargs) - - @memcache_trace('add') - def add(self, *args, **kwargs): - return self._nr_next_object.add(*args, **kwargs) - - @memcache_trace('replace') - def replace(self, *args, **kwargs): - return self._nr_next_object.replace(*args, **kwargs) - - @memcache_trace('replace') - def append(self, *args, **kwargs): - return self._nr_next_object.append(*args, **kwargs) - - @memcache_trace('replace') - def prepend(self, *args, **kwargs): - return self._nr_next_object.prepend(*args, **kwargs) - - @memcache_trace('delete') - def delete(self, *args, **kwargs): - return self._nr_next_object.delete(*args, **kwargs) - - @memcache_trace('replace') - def cas(self, *args, **kwargs): - return self._nr_next_object.cas(*args, **kwargs) - - @memcache_trace('incr') - def incr(self, *args, **kwargs): - return self._nr_next_object.incr(*args, **kwargs) - - @memcache_trace('decr') - def decr(self, *args, **kwargs): - return self._nr_next_object.decr(*args, **kwargs) - -def instrument(module): - - _Client = module.Client - - def _client(*args, **kwargs): - return Client(_Client(*args, **kwargs)) - - module.Client = _client diff --git a/newrelic/hooks/nosql_pymongo.py b/newrelic/hooks/nosql_pymongo.py deleted file mode 100644 index b2d747b28..000000000 --- a/newrelic/hooks/nosql_pymongo.py +++ /dev/null @@ -1,44 +0,0 @@ -# Copyright 2010 New Relic, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import newrelic.api.function_trace - -_methods = ['save', 'insert', 'update', 'drop', 'remove', 'find_one', - 'find', 'count', 'create_index', 'ensure_index', 'drop_indexes', - 'drop_index', 'reindex', 'index_information', 'options', - 'group', 'rename', 'distinct', 'map_reduce', 'inline_map_reduce', - 'find_and_modify'] - -def instrument_pymongo_connection(module): - - # Must name function explicitly as pymongo overrides the - # __getattr__() method in a way that breaks introspection. - - newrelic.api.function_trace.wrap_function_trace( - module, 'Connection.__init__', - name='%s:Connection.__init__' % module.__name__) - -def instrument_pymongo_collection(module): - - # Must name function explicitly as pymongo overrides the - # __getattr__() method in a way that breaks introspection. - - for method in _methods: - if hasattr(module.Collection, method): - #newrelic.api.function_trace.wrap_function_trace( - # module, 'Collection.%s' % method, - # name=method, group='Custom/MongoDB') - newrelic.api.function_trace.wrap_function_trace( - module, 'Collection.%s' % method, - name='%s:Collection.%s' % (module.__name__, method)) diff --git a/newrelic/hooks/nosql_redis.py b/newrelic/hooks/nosql_redis.py deleted file mode 100644 index 1f099ec97..000000000 --- a/newrelic/hooks/nosql_redis.py +++ /dev/null @@ -1,62 +0,0 @@ -# Copyright 2010 New Relic, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import newrelic.api.function_trace - -_methods_1 = ['bgrewriteaof', 'bgsave', 'config_get', 'config_set', - 'dbsize', 'debug_object', 'delete', 'echo', 'flushall', - 'flushdb', 'info', 'lastsave', 'object', 'ping', 'save', - 'shutdown', 'slaveof', 'append', 'decr', 'exists', - 'expire', 'expireat', 'get', 'getbit', 'getset', 'incr', - 'keys', 'mget', 'mset', 'msetnx', 'move', 'persist', - 'randomkey', 'rename', 'renamenx', 'set', 'setbit', - 'setex', 'setnx', 'setrange', 'strlen', 'substr', 'ttl', - 'type', 'blpop', 'brpop', 'brpoplpush', 'lindex', - 'linsert', 'llen', 'lpop', 'lpush', 'lpushx', 'lrange', - 'lrem', 'lset', 'ltrim', 'rpop', 'rpoplpush', 'rpush', - 'rpushx', 'sort', 'sadd', 'scard', 'sdiff', 'sdiffstore', - 'sinter', 'sinterstore', 'sismember', 'smembers', - 'smove', 'spop', 'srandmember', 'srem', 'sunion', - 'sunionstore', 'zadd', 'zcard', 'zcount', 'zincrby', - 'zinterstore', 'zrange', 'zrangebyscore', 'zrank', 'zrem', - 'zremrangebyrank', 'zremrangebyscore', 'zrevrange', - 'zrevrangebyscore', 'zrevrank', 'zscore', 'zunionstore', - 'hdel', 'hexists', 'hget', 'hgetall', 'hincrby', 'hkeys', - 'hlen', 'hset', 'hsetnx', 'hmset', 'hmget', 'hvals', - 'publish'] - -_methods_2 = ['setex', 'lrem', 'zadd'] - -def instrument_redis_connection(module): - - newrelic.api.function_trace.wrap_function_trace( - module, 'Connection.connect') - -def instrument_redis_client(module): - - if hasattr(module, 'StrictRedis'): - for method in _methods_1: - if hasattr(module.StrictRedis, method): - newrelic.api.function_trace.wrap_function_trace( - module, 'StrictRedis.%s' % method) - else: - for method in _methods_1: - if hasattr(module.Redis, method): - newrelic.api.function_trace.wrap_function_trace( - module, 'Redis.%s' % method) - - for method in _methods_2: - if hasattr(module.Redis, method): - newrelic.api.function_trace.wrap_function_trace( - module, 'Redis.%s' % method) diff --git a/newrelic/hooks/solr_pysolr.py b/newrelic/hooks/solr_pysolr.py deleted file mode 100644 index a4972681b..000000000 --- a/newrelic/hooks/solr_pysolr.py +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright 2010 New Relic, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import newrelic.api.solr_trace - -def instrument(module): - - if hasattr(module.Solr, 'search'): - newrelic.api.solr_trace.wrap_solr_trace( - module, 'Solr.search', 'pysolr', 'query') - if hasattr(module.Solr, 'more_like_this'): - newrelic.api.solr_trace.wrap_solr_trace( - module, 'Solr.more_like_this', 'pysolr', 'query') - if hasattr(module.Solr, 'suggest_terms'): - newrelic.api.solr_trace.wrap_solr_trace( - module, 'Solr.suggest_terms', 'pysolr', 'query') - if hasattr(module.Solr, 'add'): - newrelic.api.solr_trace.wrap_solr_trace( - module, 'Solr.add', 'pysolr', 'add') - if hasattr(module.Solr, 'delete'): - newrelic.api.solr_trace.wrap_solr_trace( - module, 'Solr.delete', 'pysolr', 'delete') - if hasattr(module.Solr, 'commit'): - newrelic.api.solr_trace.wrap_solr_trace( - module, 'Solr.commit', 'pysolr', 'commit') - if hasattr(module.Solr, 'optimize'): - newrelic.api.solr_trace.wrap_solr_trace( - module, 'Solr.optimize', 'pysolr', 'optimize') diff --git a/newrelic/hooks/solr_solrpy.py b/newrelic/hooks/solr_solrpy.py deleted file mode 100644 index e8b87ad47..000000000 --- a/newrelic/hooks/solr_solrpy.py +++ /dev/null @@ -1,50 +0,0 @@ -# Copyright 2010 New Relic, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import newrelic.api.solr_trace - -def instrument(module): - - if hasattr(module.Solr, 'delete'): - newrelic.api.solr_trace.wrap_solr_trace( - module, 'Solr.delete', 'solrpy', 'delete') - if hasattr(module.Solr, 'delete_many'): - newrelic.api.solr_trace.wrap_solr_trace( - module, 'Solr.delete_many', 'solrpy', 'delete') - if hasattr(module.Solr, 'delete_query'): - newrelic.api.solr_trace.wrap_solr_trace( - module, 'Solr.delete_query', 'solrpy', 'delete') - if hasattr(module.Solr, 'add'): - newrelic.api.solr_trace.wrap_solr_trace( - module, 'Solr.add', 'solrpy', 'add') - if hasattr(module.Solr, 'add_many'): - newrelic.api.solr_trace.wrap_solr_trace( - module, 'Solr.add_many', 'solrpy', 'add') - if hasattr(module.Solr, 'commit'): - newrelic.api.solr_trace.wrap_solr_trace( - module, 'Solr.commit', 'solrpy', 'commit') - if hasattr(module.Solr, 'optimize'): - newrelic.api.solr_trace.wrap_solr_trace( - module, 'Solr.optimize', 'solrpy', 'optimize') - - if hasattr(module.SolrConnection, 'query'): - newrelic.api.solr_trace.wrap_solr_trace( - module, 'SolrConnection.query', 'solrpy', 'query') - if hasattr(module.SolrConnection, 'raw_query'): - newrelic.api.solr_trace.wrap_solr_trace( - module, 'SolrConnection.raw_query', 'solrpy', 'query') - - if hasattr(module, 'SearchHandler'): - newrelic.api.solr_trace.wrap_solr_trace( - module, 'SearchHandler.__call__', 'solrpy', 'query') diff --git a/tests/datastore_pyelasticsearch/conftest.py b/tests/datastore_pyelasticsearch/conftest.py deleted file mode 100644 index 192642135..000000000 --- a/tests/datastore_pyelasticsearch/conftest.py +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright 2010 New Relic, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import pytest - -from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture # noqa: F401; pylint: disable=W0611 - - -_default_settings = { - 'transaction_tracer.explain_threshold': 0.0, - 'transaction_tracer.transaction_threshold': 0.0, - 'transaction_tracer.stack_trace_threshold': 0.0, - 'debug.log_data_collector_payloads': True, - 'debug.record_transaction_failure': True -} - -collector_agent_registration = collector_agent_registration_fixture( - app_name='Python Agent Test (datastore_pyelasticsearch)', - default_settings=_default_settings, - linked_applications=['Python Agent Test (datastore)']) diff --git a/tests/datastore_pyelasticsearch/test_pyelasticsearch.py b/tests/datastore_pyelasticsearch/test_pyelasticsearch.py deleted file mode 100644 index deb3f68ff..000000000 --- a/tests/datastore_pyelasticsearch/test_pyelasticsearch.py +++ /dev/null @@ -1,116 +0,0 @@ -# Copyright 2010 New Relic, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import sqlite3 -from pyelasticsearch import ElasticSearch - -from testing_support.validators.validate_transaction_errors import validate_transaction_errors -from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics -from testing_support.db_settings import elasticsearch_settings -from testing_support.validators.validate_database_duration import validate_database_duration - -from newrelic.api.background_task import background_task - -ES_SETTINGS = elasticsearch_settings()[0] -ES_URL = 'http://%s:%s' % (ES_SETTINGS['host'], ES_SETTINGS['port']) - -def _exercise_es(es): - es.index("contacts", "person", - {"name": "Joe Tester", "age": 25, "title": "QA Engineer"}, id=1) - es.index("contacts", "person", - {"name": "Jessica Coder", "age": 32, "title": "Programmer"}, id=2) - es.index("contacts", "person", - {"name": "Freddy Tester", "age": 29, "title": "Assistant"}, id=3) - es.refresh('contacts') - es.index("address", "employee", {"name": "Sherlock", - "address": "221B Baker Street, London"}, id=1) - es.index("address", "employee", {"name": "Bilbo", - "address": "Bag End, Bagshot row, Hobbiton, Shire"}, id=2) - es.search('name:Joe', index='contacts') - es.search('name:jessica', index='contacts') - es.search('name:Sherlock', index='address') - es.search('name:Bilbo', index=['contacts', 'address']) - es.search('name:Bilbo', index='contacts,address') - es.search('name:Bilbo', index='*') - es.search('name:Bilbo') - es.status() - -# Common Metrics for tests that use _exercise_es(). - -_test_pyelasticsearch_scoped_metrics = [ - ('Datastore/statement/Elasticsearch/contacts/index', 3), - ('Datastore/statement/Elasticsearch/contacts/search', 2), - ('Datastore/statement/Elasticsearch/address/index', 2), - ('Datastore/statement/Elasticsearch/address/search', 1), - ('Datastore/statement/Elasticsearch/_all/search', 2), - ('Datastore/statement/Elasticsearch/other/search', 2), - ('Datastore/statement/Elasticsearch/contacts/refresh', 1), - ('Datastore/statement/Elasticsearch/_all/status', 1), -] - -_test_pyelasticsearch_rollup_metrics = [ - ('Datastore/all', 14), - ('Datastore/allOther', 14), - ('Datastore/Elasticsearch/all', 14), - ('Datastore/Elasticsearch/allOther', 14), - ('Datastore/operation/Elasticsearch/index', 5), - ('Datastore/operation/Elasticsearch/search', 7), - ('Datastore/operation/Elasticsearch/refresh', 1), - ('Datastore/operation/Elasticsearch/status', 1), - ('Datastore/statement/Elasticsearch/contacts/index', 3), - ('Datastore/statement/Elasticsearch/contacts/search', 2), - ('Datastore/statement/Elasticsearch/address/index', 2), - ('Datastore/statement/Elasticsearch/address/search', 1), - ('Datastore/statement/Elasticsearch/_all/search', 2), - ('Datastore/statement/Elasticsearch/other/search', 2), - ('Datastore/statement/Elasticsearch/contacts/refresh', 1), - ('Datastore/statement/Elasticsearch/_all/status', 1), -] - -@validate_transaction_errors(errors=[]) -@validate_transaction_metrics( - 'test_pyelasticsearch:test_pyelasticsearch_operation', - scoped_metrics=_test_pyelasticsearch_scoped_metrics, - rollup_metrics=_test_pyelasticsearch_rollup_metrics, - background_task=True) -@background_task() -def test_pyelasticsearch_operation(): - client = ElasticSearch(ES_URL) - _exercise_es(client) - -@validate_database_duration() -@background_task() -def test_elasticsearch_database_duration(): - client = ElasticSearch(ES_URL) - _exercise_es(client) - -@validate_database_duration() -@background_task() -def test_elasticsearch_and_sqlite_database_duration(): - - # Make ElasticSearch queries - - client = ElasticSearch(ES_URL) - _exercise_es(client) - - # Make sqlite queries - - conn = sqlite3.connect(":memory:") - cur = conn.cursor() - - cur.execute("CREATE TABLE contacts (name text, age int)") - cur.execute("INSERT INTO contacts VALUES ('Bob', 22)") - - conn.commit() - conn.close() diff --git a/tox.ini b/tox.ini index dde6b4f3d..5d9593803 100644 --- a/tox.ini +++ b/tox.ini @@ -115,9 +115,7 @@ envlist = python-framework_bottle-{py310,py311}-bottle0012, python-framework_bottle-pypy-bottle{0008,0009,0010,0011,0012}, ; CherryPy still uses inspect.getargspec, deprecated in favor of inspect.getfullargspec. Not supported in 3.11 - python-framework_cherrypy-{py37,py38,py39,py310,pypy37}-CherryPy18, - python-framework_cherrypy-{py37}-CherryPy0302, - python-framework_cherrypy-pypy37-CherryPy0303, + python-framework_cherrypy-{py37,py38,py39,py310,py311,pypy37}-CherryPylatest, python-framework_django-{pypy,py27}-Django0103, python-framework_django-{pypy,py27,py37}-Django0108, python-framework_django-{py39}-Django{0200,0201,0202,0300,0301,latest}, @@ -233,7 +231,6 @@ deps = datastore_postgresql: py-postgresql<1.3 datastore_psycopg2-psycopg2latest: psycopg2-binary datastore_psycopg2cffi-psycopg2cffilatest: psycopg2cffi - datastore_pyelasticsearch: pyelasticsearch<2.0 datastore_pylibmc: pylibmc datastore_pymemcache: pymemcache datastore_pymongo-pymongo03: pymongo<4.0 @@ -281,9 +278,7 @@ deps = framework_bottle: jinja2<3.1 framework_bottle: markupsafe<2.1 framework_cherrypy: routes - framework_cherrypy-CherryPy0302: CherryPy<3.3.0 - framework_cherrypy-CherryPy0303: CherryPy<3.4.0 - framework_cherrypy-CherryPy18: CherryPy<18.6.0 + framework_cherrypy-CherryPylatest: CherryPy framework_django-Django0103: Django<1.4 framework_django-Django0108: Django<1.9 framework_django-Django0200: Django<2.1 @@ -440,7 +435,6 @@ changedir = datastore_postgresql: tests/datastore_postgresql datastore_psycopg2: tests/datastore_psycopg2 datastore_psycopg2cffi: tests/datastore_psycopg2cffi - datastore_pyelasticsearch: tests/datastore_pyelasticsearch datastore_pylibmc: tests/datastore_pylibmc datastore_pymemcache: tests/datastore_pymemcache datastore_pymongo: tests/datastore_pymongo From 153690f53d5a1060b994ebfe26a3f8243d0f3a30 Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Wed, 12 Apr 2023 13:36:39 -0700 Subject: [PATCH 067/108] Fix Loguru Instrumentation for v0.7.0 (#798) * Add autosignature implementation * Fix loguru with auto-signature * [Mega-Linter] Apply linters fixes * Fix tests for Py2 * [Mega-Linter] Apply linters fixes * Bump tests * Remove unwrap from signature utils * Fix arg unpacking * Remove unwrap arg from bind_args * Fix linter errors --------- Co-authored-by: TimPansino Co-authored-by: Lalleh Rafeei --- newrelic/common/signature.py | 31 +++++++++++++++++++++++++ newrelic/hooks/logger_loguru.py | 20 +++++++++------- tests/agent_unittests/test_signature.py | 31 +++++++++++++++++++++++++ 3 files changed, 73 insertions(+), 9 deletions(-) create mode 100644 newrelic/common/signature.py create mode 100644 tests/agent_unittests/test_signature.py diff --git a/newrelic/common/signature.py b/newrelic/common/signature.py new file mode 100644 index 000000000..314998196 --- /dev/null +++ b/newrelic/common/signature.py @@ -0,0 +1,31 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from newrelic.packages import six + +if six.PY3: + from inspect import Signature + + def bind_args(func, args, kwargs): + """Bind arguments and apply defaults to missing arugments for a callable.""" + bound_args = Signature.from_callable(func).bind(*args, **kwargs) + bound_args.apply_defaults() + return bound_args.arguments + +else: + from inspect import getcallargs + + def bind_args(func, args, kwargs): + """Bind arguments and apply defaults to missing arugments for a callable.""" + return getcallargs(func, *args, **kwargs) diff --git a/newrelic/hooks/logger_loguru.py b/newrelic/hooks/logger_loguru.py index 801a1c8cd..9e7ed3eae 100644 --- a/newrelic/hooks/logger_loguru.py +++ b/newrelic/hooks/logger_loguru.py @@ -18,15 +18,18 @@ from newrelic.api.application import application_instance from newrelic.api.transaction import current_transaction, record_log_event from newrelic.common.object_wrapper import wrap_function_wrapper +from newrelic.common.signature import bind_args from newrelic.core.config import global_settings from newrelic.hooks.logger_logging import add_nr_linking_metadata from newrelic.packages import six -_logger = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) is_pypy = hasattr(sys, "pypy_version_info") + def loguru_version(): from loguru import __version__ + return tuple(int(x) for x in __version__.split(".")) @@ -54,7 +57,7 @@ def _nr_log_forwarder(message_instance): if application and application.enabled: application.record_custom_metric("Logging/lines", {"count": 1}) application.record_custom_metric("Logging/lines/%s" % level_name, {"count": 1}) - + if settings.application_logging.forwarding and settings.application_logging.forwarding.enabled: try: record_log_event(message, level_name, int(record["time"].timestamp())) @@ -64,14 +67,13 @@ def _nr_log_forwarder(message_instance): ALLOWED_LOGURU_OPTIONS_LENGTHS = frozenset((8, 9)) -def bind_log(level_id, static_level_no, from_decorator, options, message, args, kwargs): - assert len(options) in ALLOWED_LOGURU_OPTIONS_LENGTHS # Assert the options signature we expect - return level_id, static_level_no, from_decorator, list(options), message, args, kwargs - def wrap_log(wrapped, instance, args, kwargs): try: - level_id, static_level_no, from_decorator, options, message, subargs, subkwargs = bind_log(*args, **kwargs) + bound_args = bind_args(wrapped, args, kwargs) + options = bound_args["options"] = list(bound_args["options"]) + assert len(options) in ALLOWED_LOGURU_OPTIONS_LENGTHS # Assert the options signature we expect + options[-2] = nr_log_patcher(options[-2]) # Loguru looks into the stack trace to find the caller's module and function names. # options[1] tells loguru how far up to look in the stack trace to find the caller. @@ -87,14 +89,14 @@ def wrap_log(wrapped, instance, args, kwargs): _logger.debug("Exception in loguru handling: %s" % str(e)) return wrapped(*args, **kwargs) else: - return wrapped(level_id, static_level_no, from_decorator, options, message, subargs, subkwargs) + return wrapped(**bound_args) def nr_log_patcher(original_patcher=None): def _nr_log_patcher(record): if original_patcher: record = original_patcher(record) - + transaction = current_transaction() if transaction: diff --git a/tests/agent_unittests/test_signature.py b/tests/agent_unittests/test_signature.py new file mode 100644 index 000000000..8d44896f3 --- /dev/null +++ b/tests/agent_unittests/test_signature.py @@ -0,0 +1,31 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + +from newrelic.common.signature import bind_args + + +@pytest.mark.parametrize( + "func,args,kwargs,expected", + [ + (lambda x, y: None, (1,), {"y": 2}, {"x": 1, "y": 2}), + (lambda x=1, y=2: None, (1,), {"y": 2}, {"x": 1, "y": 2}), + (lambda x=1: None, (), {}, {"x": 1}), + ], + ids=("posargs", "kwargs", "defaults"), +) +def test_signature_binding(func, args, kwargs, expected): + bound_args = bind_args(func, args, kwargs) + assert bound_args == expected From caef2cc4a3e3ad0033e8ecfafeab1a6c1295c4e4 Mon Sep 17 00:00:00 2001 From: Lalleh Rafeei <84813886+lrafeei@users.noreply.github.com> Date: Tue, 25 Apr 2023 16:09:46 -0700 Subject: [PATCH 068/108] Remove Twisted framework (#800) * Initial twisted commit * Remove Twisted Framework --- newrelic/config.py | 13 - newrelic/hooks/framework_twisted.py | 560 ---------------------------- 2 files changed, 573 deletions(-) delete mode 100644 newrelic/hooks/framework_twisted.py diff --git a/newrelic/config.py b/newrelic/config.py index dfdf058f4..df95db029 100644 --- a/newrelic/config.py +++ b/newrelic/config.py @@ -3014,19 +3014,6 @@ def _process_module_builtin_defaults(): "instrument_cornice_service", ) - # _process_module_definition('twisted.web.server', - # 'newrelic.hooks.framework_twisted', - # 'instrument_twisted_web_server') - # _process_module_definition('twisted.web.http', - # 'newrelic.hooks.framework_twisted', - # 'instrument_twisted_web_http') - # _process_module_definition('twisted.web.resource', - # 'newrelic.hooks.framework_twisted', - # 'instrument_twisted_web_resource') - # _process_module_definition('twisted.internet.defer', - # 'newrelic.hooks.framework_twisted', - # 'instrument_twisted_internet_defer') - _process_module_definition("gevent.monkey", "newrelic.hooks.coroutines_gevent", "instrument_gevent_monkey") _process_module_definition( diff --git a/newrelic/hooks/framework_twisted.py b/newrelic/hooks/framework_twisted.py deleted file mode 100644 index 0270282a6..000000000 --- a/newrelic/hooks/framework_twisted.py +++ /dev/null @@ -1,560 +0,0 @@ -# Copyright 2010 New Relic, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging -import sys -import weakref -import UserList - -import newrelic.api.application -import newrelic.api.object_wrapper -import newrelic.api.transaction -import newrelic.api.web_transaction -import newrelic.api.function_trace -import newrelic.api.error_trace - -from newrelic.api.time_trace import notice_error - -_logger = logging.getLogger(__name__) - -class RequestProcessWrapper(object): - - def __init__(self, wrapped): - if isinstance(wrapped, tuple): - (instance, wrapped) = wrapped - else: - instance = None - - newrelic.api.object_wrapper.update_wrapper(self, wrapped) - - self._nr_instance = instance - self._nr_next_object = wrapped - - if not hasattr(self, '_nr_last_object'): - self._nr_last_object = wrapped - - def __get__(self, instance, klass): - if instance is None: - return self - descriptor = self._nr_next_object.__get__(instance, klass) - return self.__class__((instance, descriptor)) - - def __call__(self): - assert self._nr_instance != None - - transaction = newrelic.api.transaction.current_transaction() - - # Check to see if we are being called within the context of any - # sort of transaction. If we are, then we don't bother doing - # anything and just call the wrapped function. This should not - # really ever occur with Twisted.Web wrapper but check anyway. - - if transaction: - return self._nr_next_object() - - # Always use the default application specified in the agent - # configuration. - - application = newrelic.api.application.application_instance() - - # We need to fake up a WSGI like environ dictionary with the key - # bits of information we need. - - environ = {} - - environ['REQUEST_URI'] = self._nr_instance.path - - # Now start recording the actual web transaction. - - transaction = newrelic.api.web_transaction.WSGIWebTransaction( - application, environ, source=self._nr_next_object) - - if not transaction.enabled: - return self._nr_next_object() - - transaction.__enter__() - - self._nr_instance._nr_transaction = transaction - - self._nr_instance._nr_is_deferred_callback = False - self._nr_instance._nr_is_request_finished = False - self._nr_instance._nr_wait_function_trace = None - - # We need to add a reference to the Twisted.Web request object - # in the transaction as only able to stash the transaction in a - # deferred. Need to use a weakref to avoid an object cycle which - # may prevent cleanup of transaction. - - transaction._nr_current_request = weakref.ref(self._nr_instance) - - try: - # Call the original method in a trace object to give better - # context in transaction traces. Three things can happen - # within this call. The render() function which is in turn - # called can return a result immediately which means user - # code should have called finish() on the request, it can - # raise an exception which is caught in process() function - # where error handling calls finish(), or it can return that - # it is not done yet and register deferred callbacks to - # complete the request. - - result = newrelic.api.function_trace.FunctionTraceWrapper(self._nr_next_object, name='Request/Process', group='Python/Twisted') - - # In the case of a result having being returned or an - # exception occuring, then finish() will have been called. - # We can't just exit the transaction in the finish call - # however as need to still pop back up through the above - # function trace. So if flagged that have finished, then we - # exit the transaction here. Otherwise we setup a function - # trace to track wait time for deferred and manually pop the - # transaction as being the current one for this thread. - - if self._nr_instance._nr_is_request_finished: - transaction.__exit__(None, None, None) - self._nr_instance._nr_transaction = None - self._nr_instance = None - - else: - self._nr_instance._nr_wait_function_trace = \ - newrelic.api.function_trace.FunctionTrace( - name='Deferred/Wait', - group='Python/Twisted', - source=self._nr_next_object) - - self._nr_instance._nr_wait_function_trace.__enter__() - transaction.drop_transaction() - - except: # Catch all - # If an error occurs assume that transaction should be - # exited. Technically don't believe this should ever occur - # unless our code here has an error or Twisted.Web is - # broken. - - _logger.exception('Unexpected exception raised by Twisted.Web ' - 'Request.process() exception.') - - transaction.__exit__(*sys.exc_info()) - self._nr_instance._nr_transaction = None - self._nr_instance = None - - raise - - return result - -class RequestFinishWrapper(object): - - def __init__(self, wrapped): - if isinstance(wrapped, tuple): - (instance, wrapped) = wrapped - else: - instance = None - - newrelic.api.object_wrapper.update_wrapper(self, wrapped) - - self._nr_instance = instance - self._nr_next_object = wrapped - - if not hasattr(self, '_nr_last_object'): - self._nr_last_object = wrapped - - def __get__(self, instance, klass): - if instance is None: - return self - descriptor = self._nr_next_object.__get__(instance, klass) - return self.__class__((instance, descriptor)) - - def __call__(self): - assert self._nr_instance != None - - # Call finish() method straight away if request is not even - # associated with a transaction. - - if not hasattr(self._nr_instance, '_nr_transaction'): - return self._nr_next_object() - - # Technically we should only be able to be called here without - # an active transaction if we are in the wait state. If we - # are called in context of original request process() function - # or a deferred the transaction should already be registered. - - transaction = self._nr_instance._nr_transaction - - if self._nr_instance._nr_wait_function_trace: - if newrelic.api.transaction.current_transaction(): - _logger.debug('The Twisted.Web request finish() method is ' - 'being called while in wait state but there is ' - 'already a current transaction.') - else: - transaction.save_transaction() - - elif not newrelic.api.transaction.current_transaction(): - _logger.debug('The Twisted.Web request finish() method is ' - 'being called from request process() method or a ' - 'deferred but there is not a current transaction.') - - # Except for case of being called when in wait state, we can't - # actually exit the transaction at this point as may be called - # in context of an outer function trace node. We thus flag that - # are finished and pop back out allowing outer scope to actually - # exit the transaction. - - self._nr_instance._nr_is_request_finished = True - - # Now call the original finish() function. - - if self._nr_instance._nr_is_deferred_callback: - - # If we are in a deferred callback log any error against the - # transaction here so we know we will capture it. We - # possibly don't need to do it here as outer scope may catch - # it anyway. Duplicate will be ignored so not too important. - # Most likely the finish() call would never fail anyway. - - try: - result = newrelic.api.function_trace.FunctionTraceWrapper(self._nr_next_object, name='Request/Finish', group='Python/Twisted') - except: # Catch all - notice_error(sys.exc_info()) - raise - - elif self._nr_instance._nr_wait_function_trace: - - # Now handle the special case where finish() was called - # while in the wait state. We might get here through - # Twisted.Web itself somehow calling finish() when still - # waiting for a deferred. If this were to occur though then - # the transaction will not be popped if we simply marked - # request as finished as no outer scope to see that and - # clean up. We will thus need to end the function trace and - # exit the transaction. We end function trace here and then - # the transaction down below. - - try: - self._nr_instance._nr_wait_function_trace.__exit__( - None, None, None) - - result = newrelic.api.function_trace.FunctionTraceWrapper(self._nr_next_object, name='Request/Finish', group='Python/Twisted') - - - transaction.__exit__(None, None, None) - - except: # Catch all - transaction.__exit__(*sys.exc_info()) - raise - - finally: - self._nr_instance._nr_wait_function_trace = None - self._nr_instance._nr_transaction = None - self._nr_instance = None - - else: - # This should be the case where finish() is being called in - # the original render() function. - - result = newrelic.api.function_trace.FunctionTraceWrapper(self._nr_next_object, name='Request/Finish', group='Python/Twisted') - - return result - -class ResourceRenderWrapper(object): - - def __init__(self, wrapped): - if isinstance(wrapped, tuple): - (instance, wrapped) = wrapped - else: - instance = None - - newrelic.api.object_wrapper.update_wrapper(self, wrapped) - - self._nr_instance = instance - self._nr_next_object = wrapped - - if not hasattr(self, '_nr_last_object'): - self._nr_last_object = wrapped - - def __get__(self, instance, klass): - if instance is None: - return self - descriptor = self._nr_next_object.__get__(instance, klass) - return self.__class__((instance, descriptor)) - - def __call__(self, *args): - - # Temporary work around due to customer calling class method - # directly with 'self' as first argument. Need to work out best - # practice for dealing with this. - - if len(args) == 2: - # Assume called as unbound method with (self, request). - instance, request = args - else: - # Assume called as bound method with (request). - instance = self._nr_instance - request = args[-1] - - assert instance != None - - transaction = newrelic.api.transaction.current_transaction() - - if transaction is None: - return self._nr_next_object(*args) - - # This is wrapping the render() function of the resource. We - # name the function node and the web transaction after the name - # of the handler function augmented with the method type for the - # request. - - name = "%s.render_%s" % ( - newrelic.api.object_wrapper.callable_name( - instance), request.method) - transaction.set_transaction_name(name, priority=1) - - return newrelic.api.function_trace.FunctionTraceWrapper(self._nr_next_object, name)(*args) - - -class DeferredUserList(UserList.UserList): - - def pop(self, i=-1): - import twisted.internet.defer - item = super(DeferredUserList, self).pop(i) - - item0 = item[0] - item1 = item[1] - - if item0[0] != twisted.internet.defer._CONTINUE: - item0 = (newrelic.api.function_trace.FunctionTraceWrapper( - item0[0], group='Python/Twisted/Callback'), - item0[1], item0[2]) - - if item1[0] != twisted.internet.defer._CONTINUE: - item1 = (newrelic.api.function_trace.FunctionTraceWrapper( - item1[0], group='Python/Twisted/Errback'), - item1[1], item1[2]) - - return (item0, item1) - -class DeferredWrapper(object): - - def __init__(self, wrapped): - if isinstance(wrapped, tuple): - (instance, wrapped) = wrapped - else: - instance = None - - newrelic.api.object_wrapper.update_wrapper(self, wrapped) - - self._nr_instance = instance - self._nr_next_object = wrapped - - if not hasattr(self, '_nr_last_object'): - self._nr_last_object = wrapped - - def __get__(self, instance, klass): - if instance is None: - return self - descriptor = self._nr_next_object.__get__(instance, klass) - return self.__class__((instance, descriptor)) - - def __call__(self, *args, **kwargs): - - # This is wrapping the __init__() function so call that first. - - self._nr_next_object(*args, **kwargs) - - # We now wrap the list of deferred callbacks so can track when - # each callback is actually called. - - if self._nr_instance: - transaction = newrelic.api.transaction.current_transaction() - if transaction: - self._nr_instance._nr_transaction = transaction - self._nr_instance.callbacks = DeferredUserList( - self._nr_instance.callbacks) - -class DeferredCallbacksWrapper(object): - - def __init__(self, wrapped): - if isinstance(wrapped, tuple): - (instance, wrapped) = wrapped - else: - instance = None - - newrelic.api.object_wrapper.update_wrapper(self, wrapped) - - self._nr_instance = instance - self._nr_next_object = wrapped - - if not hasattr(self, '_nr_last_object'): - self._nr_last_object = wrapped - - def __get__(self, instance, klass): - if instance is None: - return self - descriptor = self._nr_next_object.__get__(instance, klass) - return self.__class__((instance, descriptor)) - - def __call__(self): - assert self._nr_instance != None - - transaction = newrelic.api.transaction.current_transaction() - - # If there is an active transaction then deferred is being - # called within a context of another deferred so simply call the - # callback and return. - - if transaction: - return self._nr_next_object() - - # If there is no transaction recorded against the deferred then - # don't need to do anything and can simply call the callback and - # return. - - if not hasattr(self._nr_instance, '_nr_transaction'): - return self._nr_next_object() - - transaction = self._nr_instance._nr_transaction - - # If we can't find a Twisted.Web request object associated with - # the transaction or it is no longer valid then simply call the - # callback and return. - - if not hasattr(transaction, '_nr_current_request'): - return self._nr_next_object() - - request = transaction._nr_current_request() - - if not request: - return self._nr_next_object() - - try: - # Save the transaction recorded against the deferred as the - # active transaction. - - transaction.save_transaction() - - # Record that are calling a deferred. This changes what we - # do if the request finish() method is being called. - - request._nr_is_deferred_callback = True - - # We should always be calling into a deferred when we are - # in the wait state for the request. We need to exit that - # wait state. - - if request._nr_wait_function_trace: - request._nr_wait_function_trace.__exit__(None, None, None) - request._nr_wait_function_trace = None - - else: - _logger.debug('Called a Twisted.Web deferred when we were ' - 'not in a wait state.') - - # Call the deferred and capture any errors that may come - # back from it. - - with newrelic.api.error_trace.ErrorTrace(): - return newrelic.api.function_trace.FunctionTraceWrapper(self._nr_next_object, name='Deferred/Call', group='Python/Twisted') - - finally: - # If the request finish() method was called from the - # deferred then we need to exit the transaction. Other wise - # we need to create a new function trace node for a new wait - # state and pop the transaction. - - if request._nr_is_request_finished: - transaction.__exit__(None, None, None) - self._nr_instance._nr_transaction = None - - else: - # XXX Should we be removing the transaction from the - # deferred object as well. Can the same deferred be - # called multiple times for same request. It probably - # can be reregistered. - - request._nr_wait_function_trace = \ - newrelic.api.function_trace.FunctionTrace( - name='Deferred/Wait', - group='Python/Twisted', - source=self._nr_next_object) - - request._nr_wait_function_trace.__enter__() - transaction.drop_transaction() - - request._nr_is_deferred_callback = False - -class InlineGeneratorWrapper(object): - - def __init__(self, wrapped, generator): - self._nr_wrapped = wrapped - self._nr_generator = generator - - def __iter__(self): - name = newrelic.api.object_wrapper.callable_name(self._nr_wrapped) - iterable = iter(self._nr_generator) - while 1: - with newrelic.api.function_trace.FunctionTrace( - name, group='Python/Twisted/Generator', source=self._nr_wrapped): - yield next(iterable) - -class InlineCallbacksWrapper(object): - - def __init__(self, wrapped): - if isinstance(wrapped, tuple): - (instance, wrapped) = wrapped - else: - instance = None - - newrelic.api.object_wrapper.update_wrapper(self, wrapped) - - self._nr_instance = instance - self._nr_next_object = wrapped - - if not hasattr(self, '_nr_last_object'): - self._nr_last_object = wrapped - - def __get__(self, instance, klass): - if instance is None: - return self - descriptor = self._nr_next_object.__get__(instance, klass) - return self.__class__((instance, descriptor)) - - def __call__(self, *args, **kwargs): - transaction = newrelic.api.transaction.current_transaction() - - if not transaction: - return self._nr_next_object(*args, **kwargs) - - result = self._nr_next_object(*args, **kwargs) - - if not result: - return result - - return iter(InlineGeneratorWrapper(self._nr_next_object, result)) - -def instrument_twisted_web_server(module): - module.Request.process = RequestProcessWrapper(module.Request.process) - -def instrument_twisted_web_http(module): - module.Request.finish = RequestFinishWrapper(module.Request.finish) - -def instrument_twisted_web_resource(module): - module.Resource.render = ResourceRenderWrapper(module.Resource.render) - -def instrument_twisted_internet_defer(module): - module.Deferred.__init__ = DeferredWrapper(module.Deferred.__init__) - module.Deferred._runCallbacks = DeferredCallbacksWrapper( - module.Deferred._runCallbacks) - - #_inlineCallbacks = module.inlineCallbacks - #def inlineCallbacks(f): - # return _inlineCallbacks(InlineCallbacksWrapper(f)) - #module.inlineCallbacks = inlineCallbacks From 363122a0efe0ad9f4784fc1f67fda046cb9bb7e8 Mon Sep 17 00:00:00 2001 From: Hannah Stepanek Date: Mon, 1 May 2023 13:34:35 -0700 Subject: [PATCH 069/108] Pin virtualenv, fix pip arg deprecation & disable kafka tests (#803) * Pin virtualenv * Fixup: use 20.21.1 instead * Replace install-options with config-settings See https://github.com/pypa/pip/issues/11358. * Temporarily disable kafka tests --- .../actions/setup-python-matrix/action.yml | 2 +- .github/workflows/tests.yml | 144 +++++++++--------- tox.ini | 2 +- 3 files changed, 74 insertions(+), 74 deletions(-) diff --git a/.github/actions/setup-python-matrix/action.yml b/.github/actions/setup-python-matrix/action.yml index bcb5cbc78..a2dc77e60 100644 --- a/.github/actions/setup-python-matrix/action.yml +++ b/.github/actions/setup-python-matrix/action.yml @@ -47,4 +47,4 @@ runs: shell: bash run: | python3.10 -m pip install -U pip - python3.10 -m pip install -U wheel setuptools 'tox<4' virtualenv!=20.0.24 + python3.10 -m pip install -U wheel setuptools 'tox<4' 'virtualenv<20.22.0' diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index d20a7c02a..15d105b83 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -38,7 +38,7 @@ jobs: - elasticsearchserver08 - gearman - grpc - - kafka + #- kafka - libcurl - memcached - mongodb @@ -534,77 +534,77 @@ jobs: path: ./**/.coverage.* retention-days: 1 - kafka: - env: - TOTAL_GROUPS: 4 - - strategy: - fail-fast: false - matrix: - group-number: [1, 2, 3, 4] - - runs-on: ubuntu-20.04 - timeout-minutes: 30 - - services: - zookeeper: - image: bitnami/zookeeper:3.7 - env: - ALLOW_ANONYMOUS_LOGIN: yes - - ports: - - 2181:2181 - - kafka: - image: bitnami/kafka:3.2 - ports: - - 8080:8080 - - 8081:8081 - env: - ALLOW_PLAINTEXT_LISTENER: yes - KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 - KAFKA_CFG_AUTO_CREATE_TOPICS_ENABLE: true - KAFKA_CFG_LISTENERS: L1://:8080,L2://:8081 - KAFKA_CFG_ADVERTISED_LISTENERS: L1://127.0.0.1:8080,L2://kafka:8081, - KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP: L1:PLAINTEXT,L2:PLAINTEXT - KAFKA_CFG_INTER_BROKER_LISTENER_NAME: L2 - - steps: - - uses: actions/checkout@v3 - - uses: ./.github/actions/setup-python-matrix - - # Special case packages - - name: Install librdkafka-dev - run: | - # Use lsb-release to find the codename of Ubuntu to use to install the correct library name - sudo apt-get update - sudo ln -fs /usr/share/zoneinfo/America/Los_Angeles /etc/localtime - sudo apt-get install -y wget gnupg2 software-properties-common - sudo wget -qO - https://packages.confluent.io/deb/7.2/archive.key | sudo apt-key add - - sudo add-apt-repository "deb https://packages.confluent.io/clients/deb $(lsb_release -cs) main" - sudo apt-get update - sudo apt-get install -y librdkafka-dev/$(lsb_release -c | cut -f 2) - - - name: Get Environments - id: get-envs - run: | - echo "::set-output name=envs::$(tox -l | grep "^${{ github.job }}\-" | ./.github/workflows/get-envs.py)" - env: - GROUP_NUMBER: ${{ matrix.group-number }} - - - name: Test - run: | - tox -vv -e ${{ steps.get-envs.outputs.envs }} - env: - TOX_PARALLEL_NO_SPINNER: 1 - PY_COLORS: 0 - - - name: Upload Coverage Artifacts - uses: actions/upload-artifact@v3 - with: - name: coverage-${{ github.job }}-${{ strategy.job-index }} - path: ./**/.coverage.* - retention-days: 1 + #kafka: + # env: + # TOTAL_GROUPS: 4 + + # strategy: + # fail-fast: false + # matrix: + # group-number: [1, 2, 3, 4] + + # runs-on: ubuntu-20.04 + # timeout-minutes: 30 + + # services: + # zookeeper: + # image: bitnami/zookeeper:3.7 + # env: + # ALLOW_ANONYMOUS_LOGIN: yes + + # ports: + # - 2181:2181 + + # kafka: + # image: bitnami/kafka:3.2 + # ports: + # - 8080:8080 + # - 8081:8081 + # env: + # ALLOW_PLAINTEXT_LISTENER: yes + # KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + # KAFKA_CFG_AUTO_CREATE_TOPICS_ENABLE: true + # KAFKA_CFG_LISTENERS: L1://:8080,L2://:8081 + # KAFKA_CFG_ADVERTISED_LISTENERS: L1://127.0.0.1:8080,L2://kafka:8081, + # KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP: L1:PLAINTEXT,L2:PLAINTEXT + # KAFKA_CFG_INTER_BROKER_LISTENER_NAME: L2 + + # steps: + # - uses: actions/checkout@v3 + # - uses: ./.github/actions/setup-python-matrix + + # # Special case packages + # - name: Install librdkafka-dev + # run: | + # # Use lsb-release to find the codename of Ubuntu to use to install the correct library name + # sudo apt-get update + # sudo ln -fs /usr/share/zoneinfo/America/Los_Angeles /etc/localtime + # sudo apt-get install -y wget gnupg2 software-properties-common + # sudo wget -qO - https://packages.confluent.io/deb/7.2/archive.key | sudo apt-key add - + # sudo add-apt-repository "deb https://packages.confluent.io/clients/deb $(lsb_release -cs) main" + # sudo apt-get update + # sudo apt-get install -y librdkafka-dev/$(lsb_release -c | cut -f 2) + + # - name: Get Environments + # id: get-envs + # run: | + # echo "::set-output name=envs::$(tox -l | grep "^${{ github.job }}\-" | ./.github/workflows/get-envs.py)" + # env: + # GROUP_NUMBER: ${{ matrix.group-number }} + + # - name: Test + # run: | + # tox -vv -e ${{ steps.get-envs.outputs.envs }} + # env: + # TOX_PARALLEL_NO_SPINNER: 1 + # PY_COLORS: 0 + + # - name: Upload Coverage Artifacts + # uses: actions/upload-artifact@v3 + # with: + # name: coverage-${{ github.job }}-${{ strategy.job-index }} + # path: ./**/.coverage.* + # retention-days: 1 mongodb: env: diff --git a/tox.ini b/tox.ini index 5d9593803..1d9110e4f 100644 --- a/tox.ini +++ b/tox.ini @@ -397,7 +397,7 @@ commands = framework_grpc: --grpc_python_out={toxinidir}/tests/framework_grpc/sample_application \ framework_grpc: /{toxinidir}/tests/framework_grpc/sample_application/sample_application.proto - libcurl: pip install --ignore-installed --install-option="--with-openssl" pycurl + libcurl: pip install --ignore-installed --config-settings="--build-option=--with-openssl" pycurl py.test -v [] From 7103506ca5639d339e3e47dfb9e4affb546c839b Mon Sep 17 00:00:00 2001 From: Hannah Stepanek Date: Mon, 1 May 2023 14:12:31 -0700 Subject: [PATCH 070/108] Add tests for pyodbc (#796) * Add tests for pyodbc * Move imports into tests to get import coverage * Fixup: remove time import * Trigger tests --------- Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> --- .github/workflows/tests.yml | 8 ++ tests/datastore_pyodbc/conftest.py | 33 +++++++ tests/datastore_pyodbc/test_pyodbc.py | 120 ++++++++++++++++++++++++++ tox.ini | 3 + 4 files changed, 164 insertions(+) create mode 100644 tests/datastore_pyodbc/conftest.py create mode 100644 tests/datastore_pyodbc/test_pyodbc.py diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 15d105b83..dc73168eb 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -255,6 +255,14 @@ jobs: - uses: actions/checkout@v3 - uses: ./.github/actions/setup-python-matrix + - name: Install odbc driver for postgresql + run: | + sudo apt-get update + sudo sudo apt-get install odbc-postgresql + sudo sed -i 's/Driver=psqlodbca.so/Driver=\/usr\/lib\/x86_64-linux-gnu\/odbc\/psqlodbca.so/g' /etc/odbcinst.ini + sudo sed -i 's/Driver=psqlodbcw.so/Driver=\/usr\/lib\/x86_64-linux-gnu\/odbc\/psqlodbcw.so/g' /etc/odbcinst.ini + sudo sed -i 's/Setup=libodbcpsqlS.so/Setup=\/usr\/lib\/x86_64-linux-gnu\/odbc\/libodbcpsqlS.so/g' /etc/odbcinst.ini + - name: Get Environments id: get-envs run: | diff --git a/tests/datastore_pyodbc/conftest.py b/tests/datastore_pyodbc/conftest.py new file mode 100644 index 000000000..b00a0a663 --- /dev/null +++ b/tests/datastore_pyodbc/conftest.py @@ -0,0 +1,33 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from testing_support.fixtures import ( # noqa: F401; pylint: disable=W0611 + collector_agent_registration_fixture, + collector_available_fixture, +) + +_default_settings = { + "transaction_tracer.explain_threshold": 0.0, + "transaction_tracer.transaction_threshold": 0.0, + "transaction_tracer.stack_trace_threshold": 0.0, + "debug.log_data_collector_payloads": True, + "debug.record_transaction_failure": True, + "debug.log_explain_plan_queries": True, +} + +collector_agent_registration = collector_agent_registration_fixture( + app_name="Python Agent Test (datastore_pyodbc)", + default_settings=_default_settings, + linked_applications=["Python Agent Test (datastore)"], +) diff --git a/tests/datastore_pyodbc/test_pyodbc.py b/tests/datastore_pyodbc/test_pyodbc.py new file mode 100644 index 000000000..119908e4d --- /dev/null +++ b/tests/datastore_pyodbc/test_pyodbc.py @@ -0,0 +1,120 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import pytest +from testing_support.db_settings import postgresql_settings +from testing_support.validators.validate_database_trace_inputs import ( + validate_database_trace_inputs, +) +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) + +from newrelic.api.background_task import background_task + +DB_SETTINGS = postgresql_settings()[0] + + +@validate_transaction_metrics( + "test_pyodbc:test_execute_via_cursor", + scoped_metrics=[ + ("Function/pyodbc:connect", 1), + ], + rollup_metrics=[ + ("Datastore/all", 1), + ("Datastore/allOther", 1), + ("Datastore/ODBC/all", 1), + ("Datastore/ODBC/allOther", 1), + ], + background_task=True, +) +@validate_database_trace_inputs(sql_parameters_type=tuple) +@background_task() +def test_execute_via_cursor(pyodbc_driver): + import pyodbc + + with pyodbc.connect( + "DRIVER={%s};SERVER=%s;PORT=%s;DATABASE=%s;UID=%s;PWD=%s" + % ( + pyodbc_driver, + DB_SETTINGS["host"], + DB_SETTINGS["port"], + DB_SETTINGS["name"], + DB_SETTINGS["user"], + DB_SETTINGS["password"], + ) + ) as connection: + cursor = connection.cursor() + cursor.execute("""drop table if exists %s""" % DB_SETTINGS["table_name"]) + cursor.execute("""create table %s """ % DB_SETTINGS["table_name"] + """(a integer, b real, c text)""") + cursor.executemany( + """insert into %s """ % DB_SETTINGS["table_name"] + """values (?, ?, ?)""", + [(1, 1.0, "1.0"), (2, 2.2, "2.2"), (3, 3.3, "3.3")], + ) + cursor.execute("""select * from %s""" % DB_SETTINGS["table_name"]) + for row in cursor: + pass + cursor.execute( + """update %s """ % DB_SETTINGS["table_name"] + """set a=?, b=?, c=? where a=?""", + (4, 4.0, "4.0", 1), + ) + cursor.execute("""delete from %s where a=2""" % DB_SETTINGS["table_name"]) + connection.commit() + + cursor.execute("SELECT now()") + cursor.execute("SELECT pg_sleep(0.25)") + + connection.rollback() + connection.commit() + + +@validate_transaction_metrics( + "test_pyodbc:test_rollback_on_exception", + scoped_metrics=[ + ("Function/pyodbc:connect", 1), + ], + rollup_metrics=[ + ("Datastore/all", 1), + ("Datastore/allOther", 1), + ("Datastore/ODBC/all", 1), + ("Datastore/ODBC/allOther", 1), + ], + background_task=True, +) +@validate_database_trace_inputs(sql_parameters_type=tuple) +@background_task() +def test_rollback_on_exception(pyodbc_driver): + import pyodbc + + with pytest.raises(RuntimeError): + with pyodbc.connect( + "DRIVER={%s};SERVER=%s;PORT=%s;DATABASE=%s;UID=%s;PWD=%s" + % ( + pyodbc_driver, + DB_SETTINGS["host"], + DB_SETTINGS["port"], + DB_SETTINGS["name"], + DB_SETTINGS["user"], + DB_SETTINGS["password"], + ) + ) as connection: + raise RuntimeError("error") + + +@pytest.fixture +def pyodbc_driver(): + import pyodbc + + driver_name = "PostgreSQL Unicode" + assert driver_name in pyodbc.drivers() + return driver_name diff --git a/tox.ini b/tox.ini index 1d9110e4f..122daa68b 100644 --- a/tox.ini +++ b/tox.ini @@ -80,6 +80,7 @@ envlist = postgres-datastore_postgresql-{py37,py38,py39}, postgres-datastore_psycopg2-{py27,py37,py38,py39,py310,py311}-psycopg2latest postgres-datastore_psycopg2cffi-{py27,pypy,py37,py38,py39,py310,py311}-psycopg2cffilatest, + postgres-datastore_pyodbc-{py27,py37,py311}-pyodbclatest memcached-datastore_pylibmc-{py27,py37}, memcached-datastore_pymemcache-{py27,py37,py38,py39,py310,py311,pypy,pypy37}, mongodb-datastore_pymongo-{py27,py37,py38,py39,py310,py311,pypy}-pymongo{03}, @@ -231,6 +232,7 @@ deps = datastore_postgresql: py-postgresql<1.3 datastore_psycopg2-psycopg2latest: psycopg2-binary datastore_psycopg2cffi-psycopg2cffilatest: psycopg2cffi + datastore_pyodbc-pyodbclatest: pyodbc datastore_pylibmc: pylibmc datastore_pymemcache: pymemcache datastore_pymongo-pymongo03: pymongo<4.0 @@ -434,6 +436,7 @@ changedir = datastore_mysql: tests/datastore_mysql datastore_postgresql: tests/datastore_postgresql datastore_psycopg2: tests/datastore_psycopg2 + datastore_pyodbc: tests/datastore_pyodbc datastore_psycopg2cffi: tests/datastore_psycopg2cffi datastore_pylibmc: tests/datastore_pylibmc datastore_pymemcache: tests/datastore_pymemcache From be4fb3dda0e734889acd6bc53cf91f26c18c2118 Mon Sep 17 00:00:00 2001 From: Lalleh Rafeei <84813886+lrafeei@users.noreply.github.com> Date: Mon, 1 May 2023 16:01:09 -0700 Subject: [PATCH 071/108] Add tests for Waitress (#797) * Change import format * Initial commit * Add more tests to adapter_waitress * Remove commented out code * [Mega-Linter] Apply linters fixes * Add assertions to all tests * Add more NR testing to waitress --------- Co-authored-by: lrafeei Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> --- newrelic/hooks/adapter_waitress.py | 17 ++-- tests/adapter_waitress/_application.py | 54 ++++++++++ tests/adapter_waitress/conftest.py | 40 ++++++++ tests/adapter_waitress/test_wsgi.py | 101 +++++++++++++++++++ tests/testing_support/sample_applications.py | 43 +++++++- tox.ini | 8 ++ 6 files changed, 250 insertions(+), 13 deletions(-) create mode 100644 tests/adapter_waitress/_application.py create mode 100644 tests/adapter_waitress/conftest.py create mode 100644 tests/adapter_waitress/test_wsgi.py diff --git a/newrelic/hooks/adapter_waitress.py b/newrelic/hooks/adapter_waitress.py index bdeb15b37..2353510e3 100644 --- a/newrelic/hooks/adapter_waitress.py +++ b/newrelic/hooks/adapter_waitress.py @@ -12,17 +12,16 @@ # See the License for the specific language governing permissions and # limitations under the License. -import newrelic.api.wsgi_application -import newrelic.api.in_function +from newrelic.api.in_function import wrap_in_function +from newrelic.api.wsgi_application import WSGIApplicationWrapper +from newrelic.common.package_version_utils import get_package_version -def instrument_waitress_server(module): - def wrap_wsgi_application_entry_point(server, application, - *args, **kwargs): - application = newrelic.api.wsgi_application.WSGIApplicationWrapper( - application) +def instrument_waitress_server(module): + def wrap_wsgi_application_entry_point(server, application, *args, **kwargs): + dispatcher_details = ("Waitress", get_package_version("waitress")) + application = WSGIApplicationWrapper(application, dispatcher=dispatcher_details) args = [server, application] + list(args) return (args, kwargs) - newrelic.api.in_function.wrap_in_function(module, - 'WSGIServer.__init__', wrap_wsgi_application_entry_point) + wrap_in_function(module, "WSGIServer.__init__", wrap_wsgi_application_entry_point) diff --git a/tests/adapter_waitress/_application.py b/tests/adapter_waitress/_application.py new file mode 100644 index 000000000..c3b36f0c2 --- /dev/null +++ b/tests/adapter_waitress/_application.py @@ -0,0 +1,54 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from threading import Thread +from time import sleep + +from testing_support.sample_applications import ( + raise_exception_application, + raise_exception_finalize, + raise_exception_response, + simple_app_raw, +) +from testing_support.util import get_open_port + + +def sample_application(environ, start_response): + path_info = environ.get("PATH_INFO") + + if path_info.startswith("/raise-exception-application"): + return raise_exception_application(environ, start_response) + elif path_info.startswith("/raise-exception-response"): + return raise_exception_response(environ, start_response) + elif path_info.startswith("/raise-exception-finalize"): + return raise_exception_finalize(environ, start_response) + + return simple_app_raw(environ, start_response) + + +def setup_application(): + port = get_open_port() + + def run_wsgi(): + from waitress import serve + + serve(sample_application, host="127.0.0.1", port=port) + + wsgi_thread = Thread(target=run_wsgi) + wsgi_thread.daemon = True + wsgi_thread.start() + + sleep(1) + + return port diff --git a/tests/adapter_waitress/conftest.py b/tests/adapter_waitress/conftest.py new file mode 100644 index 000000000..aecbfd86d --- /dev/null +++ b/tests/adapter_waitress/conftest.py @@ -0,0 +1,40 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +import webtest +from testing_support.fixtures import ( # noqa: F401; pylint: disable=W0611 + collector_agent_registration_fixture, + collector_available_fixture, +) + +_default_settings = { + "transaction_tracer.explain_threshold": 0.0, + "transaction_tracer.transaction_threshold": 0.0, + "transaction_tracer.stack_trace_threshold": 0.0, + "debug.log_data_collector_payloads": True, + "debug.record_transaction_failure": True, +} + +collector_agent_registration = collector_agent_registration_fixture( + app_name="Python Agent Test (Waitress)", default_settings=_default_settings +) + + +@pytest.fixture(autouse=True, scope="session") +def target_application(): + import _application + + port = _application.setup_application() + return webtest.TestApp("http://localhost:%d" % port) diff --git a/tests/adapter_waitress/test_wsgi.py b/tests/adapter_waitress/test_wsgi.py new file mode 100644 index 000000000..c9fa42719 --- /dev/null +++ b/tests/adapter_waitress/test_wsgi.py @@ -0,0 +1,101 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from testing_support.fixtures import ( + override_application_settings, + raise_background_exceptions, + wait_for_background_threads, +) +from testing_support.validators.validate_transaction_errors import ( + validate_transaction_errors, +) +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) + +from newrelic.common.package_version_utils import get_package_version + +WAITRESS_VERSION = get_package_version("waitress") + + +@override_application_settings({"transaction_name.naming_scheme": "framework"}) +def test_wsgi_application_index(target_application): + @validate_transaction_metrics( + "_application:sample_application", + custom_metrics=[ + ("Python/Dispatcher/Waitress/%s" % WAITRESS_VERSION, 1), + ], + ) + @raise_background_exceptions() + @wait_for_background_threads() + def _test(): + response = target_application.get("/") + assert response.status == "200 OK" + + _test() + + +@override_application_settings({"transaction_name.naming_scheme": "framework"}) +def test_raise_exception_application(target_application): + @validate_transaction_errors(["builtins:RuntimeError"]) + @validate_transaction_metrics( + "_application:sample_application", + custom_metrics=[ + ("Python/Dispatcher/Waitress/%s" % WAITRESS_VERSION, 1), + ], + ) + @raise_background_exceptions() + @wait_for_background_threads() + def _test(): + response = target_application.get("/raise-exception-application/", status=500) + assert response.status == "500 Internal Server Error" + + _test() + + +@override_application_settings({"transaction_name.naming_scheme": "framework"}) +def test_raise_exception_response(target_application): + @validate_transaction_errors(["builtins:RuntimeError"]) + @validate_transaction_metrics( + "_application:sample_application", + custom_metrics=[ + ("Python/Dispatcher/Waitress/%s" % WAITRESS_VERSION, 1), + ], + ) + @raise_background_exceptions() + @wait_for_background_threads() + def _test(): + response = target_application.get("/raise-exception-response/", status=500) + assert response.status == "500 Internal Server Error" + + _test() + + +@override_application_settings({"transaction_name.naming_scheme": "framework"}) +def test_raise_exception_finalize(target_application): + @validate_transaction_errors(["builtins:RuntimeError"]) + @validate_transaction_metrics( + "_application:sample_application", + custom_metrics=[ + ("Python/Dispatcher/Waitress/%s" % WAITRESS_VERSION, 1), + ], + ) + @raise_background_exceptions() + @wait_for_background_threads() + def _test(): + response = target_application.get("/raise-exception-finalize/", status=500) + assert response.status == "500 Internal Server Error" + + _test() diff --git a/tests/testing_support/sample_applications.py b/tests/testing_support/sample_applications.py index 7973a4e11..3c8cc6a9c 100644 --- a/tests/testing_support/sample_applications.py +++ b/tests/testing_support/sample_applications.py @@ -79,7 +79,6 @@ def fully_featured_app(environ, start_response): environ["wsgi.input"].readlines() if use_user_attrs: - for attr, val in _custom_parameters.items(): add_custom_attribute(attr, val) @@ -97,7 +96,6 @@ def fully_featured_app(environ, start_response): n_errors = int(environ.get("n_errors", 1)) for i in range(n_errors): try: - # append number to stats engine to get unique errors, so they # don't immediately get filtered out. @@ -122,7 +120,6 @@ def fully_featured_app(environ, start_response): @wsgi_application() def simple_exceptional_app(environ, start_response): - start_response("500 :(", []) raise ValueError("Transaction had bad value") @@ -140,9 +137,47 @@ def simple_app_raw(environ, start_response): simple_app = wsgi_application()(simple_app_raw) +def raise_exception_application(environ, start_response): + raise RuntimeError("raise_exception_application") + + status = "200 OK" + output = b"WSGI RESPONSE" + + response_headers = [("Content-type", "text/plain"), ("Content-Length", str(len(output)))] + start_response(status, response_headers) + + return [output] + + +def raise_exception_response(environ, start_response): + status = "200 OK" + + response_headers = [("Content-type", "text/plain")] + start_response(status, response_headers) + + yield b"WSGI" + + raise RuntimeError("raise_exception_response") + + yield b" " + yield b"RESPONSE" + + +def raise_exception_finalize(environ, start_response): + status = "200 OK" + + response_headers = [("Content-type", "text/plain")] + start_response(status, response_headers) + + try: + yield b"WSGI RESPONSE" + + finally: + raise RuntimeError("raise_exception_finalize") + + @wsgi_application() def simple_custom_event_app(environ, start_response): - params = {"snowman": "\u2603", "foo": "bar"} record_custom_event("SimpleAppEvent", params) diff --git a/tox.ini b/tox.ini index 122daa68b..2c6051730 100644 --- a/tox.ini +++ b/tox.ini @@ -51,6 +51,9 @@ envlist = python-adapter_hypercorn-py38-hypercorn{0010,0011,0012,0013}, python-adapter_uvicorn-py37-uvicorn03, python-adapter_uvicorn-{py37,py38,py39,py310,py311}-uvicornlatest, + python-adapter_waitress-{py37,py38,py39}-waitress010404, + python-adapter_waitress-{py37,py38,py39,py310}-waitress02, + python-adapter_waitress-{py37,py38,py39,py310,py311}-waitresslatest, python-agent_features-{py27,py37,py38,py39,py310,py311}-{with,without}_extensions, python-agent_features-{pypy,pypy37}-without_extensions, python-agent_streaming-py27-grpc0125-{with,without}_extensions, @@ -192,6 +195,10 @@ deps = adapter_uvicorn-uvicorn03: uvicorn<0.4 adapter_uvicorn-uvicorn014: uvicorn<0.15 adapter_uvicorn-uvicornlatest: uvicorn + adapter_waitress: WSGIProxy2 + adapter_waitress-waitress010404: waitress<1.4.5 + adapter_waitress-waitress02: waitress<2.1 + adapter_waitress-waitresslatest: waitress agent_features: beautifulsoup4 application_celery: celery<6.0 application_celery-py{py37,37}: importlib-metadata<5.0 @@ -418,6 +425,7 @@ changedir = adapter_gunicorn: tests/adapter_gunicorn adapter_hypercorn: tests/adapter_hypercorn adapter_uvicorn: tests/adapter_uvicorn + adapter_waitress: tests/adapter_waitress agent_features: tests/agent_features agent_streaming: tests/agent_streaming agent_unittests: tests/agent_unittests From fd0fa35466b630e34e8476cc53ad0e163564e2de Mon Sep 17 00:00:00 2001 From: Uma Annamalai Date: Tue, 2 May 2023 10:55:36 -0700 Subject: [PATCH 072/108] Add testing for genshi and mako. (#799) * Add testing for genshi and mako. * [Mega-Linter] Apply linters fixes --------- Co-authored-by: umaannamalai Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> --- tests/template_genshi/conftest.py | 30 ++++++++++++++++++++++ tests/template_genshi/test_genshi.py | 38 ++++++++++++++++++++++++++++ tests/template_mako/test_mako.py | 17 ++++++++++--- tox.ini | 7 +++-- 4 files changed, 86 insertions(+), 6 deletions(-) create mode 100644 tests/template_genshi/conftest.py create mode 100644 tests/template_genshi/test_genshi.py diff --git a/tests/template_genshi/conftest.py b/tests/template_genshi/conftest.py new file mode 100644 index 000000000..932ec9bae --- /dev/null +++ b/tests/template_genshi/conftest.py @@ -0,0 +1,30 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from testing_support.fixtures import ( # noqa: F401; pylint: disable=W0611 + collector_agent_registration_fixture, + collector_available_fixture, +) + +_default_settings = { + "transaction_tracer.explain_threshold": 0.0, + "transaction_tracer.transaction_threshold": 0.0, + "transaction_tracer.stack_trace_threshold": 0.0, + "debug.log_data_collector_payloads": True, + "debug.record_transaction_failure": True, +} + +collector_agent_registration = collector_agent_registration_fixture( + app_name="Python Agent Test (template_genshi)", default_settings=_default_settings +) diff --git a/tests/template_genshi/test_genshi.py b/tests/template_genshi/test_genshi.py new file mode 100644 index 000000000..03420579e --- /dev/null +++ b/tests/template_genshi/test_genshi.py @@ -0,0 +1,38 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from genshi.template import MarkupTemplate +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) + +from newrelic.api.background_task import background_task + + +@validate_transaction_metrics( + "test_render", + background_task=True, + scoped_metrics=(("Template/Render/genshi.core:Stream.render", 1),), +) +@background_task(name="test_render") +def test_render(): + template_to_render = MarkupTemplate("

hello, $name!

") + result = template_to_render.generate(name="NR").render("xhtml") + assert result == "

hello, NR!

" + + +def test_render_outside_txn(): + template_to_render = MarkupTemplate("

hello, $name!

") + result = template_to_render.generate(name="NR").render("xhtml") + assert result == "

hello, NR!

" diff --git a/tests/template_mako/test_mako.py b/tests/template_mako/test_mako.py index 07d8f4974..2b6da130a 100644 --- a/tests/template_mako/test_mako.py +++ b/tests/template_mako/test_mako.py @@ -13,17 +13,26 @@ # limitations under the License. from mako.template import Template -from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) + from newrelic.api.background_task import background_task @validate_transaction_metrics( - 'test_render', + "test_render", background_task=True, - scoped_metrics=(('Template/Render/