From fee51a4b31040cf4fed7c45a191876de0f14705e Mon Sep 17 00:00:00 2001 From: Jair Henrique Date: Thu, 10 Oct 2024 17:51:21 -0300 Subject: [PATCH 1/6] Adds Valkey support --- .github/workflows/tests.yml | 63 ++ newrelic/config.py | 54 ++ newrelic/hooks/datastore_valkey.py | 761 ++++++++++++++++++ tests/datastore_valkey/conftest.py | 37 + tests/datastore_valkey/test_asyncio.py | 161 ++++ .../datastore_valkey/test_custom_conn_pool.py | 160 ++++ .../datastore_valkey/test_execute_command.py | 182 +++++ tests/datastore_valkey/test_generators.py | 294 +++++++ tests/datastore_valkey/test_get_and_set.py | 129 +++ tests/datastore_valkey/test_instance_info.py | 175 ++++ tests/datastore_valkey/test_multiple_dbs.py | 145 ++++ tests/datastore_valkey/test_span_event.py | 146 ++++ tests/datastore_valkey/test_trace_node.py | 132 +++ .../test_uninstrumented_methods.py | 128 +++ tests/testing_support/db_settings.py | 47 ++ tox.ini | 6 + 16 files changed, 2620 insertions(+) create mode 100644 newrelic/hooks/datastore_valkey.py create mode 100644 tests/datastore_valkey/conftest.py create mode 100644 tests/datastore_valkey/test_asyncio.py create mode 100644 tests/datastore_valkey/test_custom_conn_pool.py create mode 100644 tests/datastore_valkey/test_execute_command.py create mode 100644 tests/datastore_valkey/test_generators.py create mode 100644 tests/datastore_valkey/test_get_and_set.py create mode 100644 tests/datastore_valkey/test_instance_info.py create mode 100644 tests/datastore_valkey/test_multiple_dbs.py create mode 100644 tests/datastore_valkey/test_span_event.py create mode 100644 tests/datastore_valkey/test_trace_node.py create mode 100644 tests/datastore_valkey/test_uninstrumented_methods.py diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 4bb929904..4510b04fa 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -49,6 +49,7 @@ jobs: - redis - rediscluster - solr + - valkey steps: - name: Success @@ -1198,3 +1199,65 @@ jobs: name: coverage-${{ github.job }}-${{ strategy.job-index }} path: ./**/.coverage.* retention-days: 1 + + valkey: + env: + TOTAL_GROUPS: 2 + + strategy: + fail-fast: false + matrix: + group-number: [1, 2] + + runs-on: ubuntu-latest + container: + image: ghcr.io/newrelic/newrelic-python-agent-ci:latest + options: >- + --add-host=host.docker.internal:host-gateway + timeout-minutes: 30 + services: + valkey: + image: valkey + ports: + - 8080:6379 + - 8081:6379 + # Set health checks to wait until valkey has started + options: >- + --health-cmd "valkey-cli ping" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + + steps: + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # 4.1.1 + + - name: Fetch git tags + run: | + git config --global --add safe.directory "$GITHUB_WORKSPACE" + git fetch --tags origin + + - name: Configure pip cache + run: | + mkdir -p /github/home/.cache/pip + chown -R $(whoami) /github/home/.cache/pip + + - name: Get Environments + id: get-envs + run: | + echo "envs=$(tox -l | grep '^${{ github.job }}\-' | ./.github/workflows/get-envs.py)" >> $GITHUB_OUTPUT + env: + GROUP_NUMBER: ${{ matrix.group-number }} + + - name: Test + run: | + tox -vv -e ${{ steps.get-envs.outputs.envs }} -p auto + env: + TOX_PARALLEL_NO_SPINNER: 1 + PY_COLORS: 0 + + - name: Upload Coverage Artifacts + uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # 4.3.1 + with: + name: coverage-${{ github.job }}-${{ strategy.job-index }} + path: ./**/.coverage.* + retention-days: 1 diff --git a/newrelic/config.py b/newrelic/config.py index 7fd6f31b8..fcd1a526a 100644 --- a/newrelic/config.py +++ b/newrelic/config.py @@ -3621,6 +3621,60 @@ def _process_module_builtin_defaults(): "redis.commands.graph.commands", "newrelic.hooks.datastore_redis", "instrument_redis_commands_graph_commands" ) + _process_module_definition( + "valkey.asyncio.client", "newrelic.hooks.datastore_valkey", "instrument_asyncio_valkey_client" + ) + + _process_module_definition( + "valkey.asyncio.commands", "newrelic.hooks.datastore_valkey", "instrument_asyncio_valkey_client" + ) + + _process_module_definition( + "valkey.asyncio.connection", "newrelic.hooks.datastore_valkey", "instrument_asyncio_valkey_connection" + ) + + _process_module_definition( + "valkey.connection", + "newrelic.hooks.datastore_valkey", + "instrument_valkey_connection", + ) + _process_module_definition("valkey.client", "newrelic.hooks.datastore_valkey", "instrument_valkey_client") + + _process_module_definition( + "valkey.commands.cluster", "newrelic.hooks.datastore_valkey", "instrument_valkey_commands_cluster" + ) + + _process_module_definition( + "valkey.commands.core", "newrelic.hooks.datastore_valkey", "instrument_valkey_commands_core" + ) + + _process_module_definition( + "valkey.commands.sentinel", "newrelic.hooks.datastore_valkey", "instrument_valkey_commands_sentinel" + ) + + _process_module_definition( + "valkey.commands.json.commands", "newrelic.hooks.datastore_valkey", "instrument_valkey_commands_json_commands" + ) + + _process_module_definition( + "valkey.commands.search.commands", "newrelic.hooks.datastore_valkey", "instrument_valkey_commands_search_commands" + ) + + _process_module_definition( + "valkey.commands.timeseries.commands", + "newrelic.hooks.datastore_valkey", + "instrument_valkey_commands_timeseries_commands", + ) + + _process_module_definition( + "valkey.commands.bf.commands", "newrelic.hooks.datastore_valkey", "instrument_valkey_commands_bf_commands" + ) + + _process_module_definition( + "valkey.commands.graph.commands", "newrelic.hooks.datastore_valkey", "instrument_valkey_commands_graph_commands" + ) + + _process_module_definition("motor", "newrelic.hooks.datastore_motor", "patch_motor") _process_module_definition( diff --git a/newrelic/hooks/datastore_valkey.py b/newrelic/hooks/datastore_valkey.py new file mode 100644 index 000000000..2794656ce --- /dev/null +++ b/newrelic/hooks/datastore_valkey.py @@ -0,0 +1,761 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import re + +from newrelic.api.datastore_trace import ( + DatastoreTrace, + DatastoreTraceWrapper, + wrap_datastore_trace, +) +from newrelic.api.time_trace import current_trace +from newrelic.api.transaction import current_transaction +from newrelic.common.async_wrapper import ( + async_generator_wrapper, + coroutine_wrapper, + generator_wrapper, +) +from newrelic.common.object_wrapper import wrap_function_wrapper + +_valkey_client_sync_methods = { + "acl_dryrun", + "auth", + "bgrewriteaof", + "bitfield", + "blmpop", + "bzmpop", + "client", + "command", + "command_docs", + "command_getkeysandflags", + "command_info", + "debug_segfault", + "expiretime", + "failover", + "hello", + "hexpire", + "hexpireat", + "hexpiretime", + "hpersist", + "hpexpire", + "hpexpireat", + "hpexpiretime", + "hpttl", + "httl", + "latency_doctor", + "latency_graph", + "latency_histogram", + "lcs", + "lpop", + "lpos", + "memory_doctor", + "memory_help", + "monitor", + "pexpiretime", + "psetex", + "psync", + "pubsub", + "renamenx", + "rpop", + "script_debug", + "sentinel_ckquorum", + "sentinel_failover", + "sentinel_flushconfig", + "sentinel_get_master_addr_by_name", + "sentinel_master", + "sentinel_masters", + "sentinel_monitor", + "sentinel_remove", + "sentinel_reset", + "sentinel_sentinels", + "sentinel_set", + "sentinel_slaves", + "shutdown", + "sort", + "sort_ro", + "spop", + "srandmember", + "unwatch", + "watch", + "zlexcount", + "zrevrangebyscore", +} + + +_valkey_client_async_methods = { + "acl_cat", + "acl_deluser", + "acl_genpass", + "acl_getuser", + "acl_help", + "acl_list", + "acl_load", + "acl_log_reset", + "acl_log", + "acl_save", + "acl_setuser", + "acl_users", + "acl_whoami", + "add_document_hash", + "add_document", + "add", + "addnx", + "aggregate", + "aliasadd", + "aliasdel", + "aliasupdate", + "alter_schema_add", + "alter", + "append", + "arrappend", + "arrindex", + "arrinsert", + "arrlen", + "arrpop", + "arrtrim", + "bgsave", + "bitcount", + "bitfield_ro", + "bitop_and", + "bitop_not", + "bitop_or", + "bitop_xor", + "bitop", + "bitpos", + "blmove", + "blpop", + "brpop", + "brpoplpush", + "byrank", + "byrevrank", + "bzpopmax", + "bzpopmin", + "card", + "cdf", + "clear", + "client_getname", + "client_getredir", + "client_id", + "client_info", + "client_kill_filter", + "client_kill", + "client_list", + "client_no_evict", + "client_pause", + "client_reply", + "client_setinfo", + "client_setname", + "client_tracking", + "client_trackinginfo", + "client_unblock", + "client_unpause", + "cluster_add_slots", + "cluster_addslots", + "cluster_count_failure_report", + "cluster_count_failure_reports", + "cluster_count_key_in_slots", + "cluster_countkeysinslot", + "cluster_del_slots", + "cluster_delslots", + "cluster_failover", + "cluster_forget", + "cluster_get_keys_in_slot", + "cluster_get_keys_in_slots", + "cluster_info", + "cluster_keyslot", + "cluster_meet", + "cluster_nodes", + "cluster_replicate", + "cluster_reset_all_nodes", + "cluster_reset", + "cluster_save_config", + "cluster_set_config_epoch", + "cluster_setslot", + "cluster_slaves", + "cluster_slots", + "cluster", + "command_count", + "command_getkeys", + "command_list", + "command", + "commit", + "config_get", + "config_resetstat", + "config_rewrite", + "config_set", + "config", + "copy", + "count", + "create_index", + "create", + "createrule", + "dbsize", + "debug_object", + "debug_sleep", + "debug", + "decr", + "decrby", + "delete_document", + "delete", + "deleterule", + "dict_add", + "dict_del", + "dict_dump", + "drop_index", + "dropindex", + "dump", + "echo", + "eval_ro", + "eval", + "evalsha_ro", + "evalsha", + "execution_plan", + "exists", + "expire", + "expireat", + "explain_cli", + "explain", + "fcall_ro", + "fcall", + "flushall", + "flushdb", + "forget", + "function_delete", + "function_dump", + "function_flush", + "function_kill", + "function_list", + "function_load", + "function_restore", + "function_stats", + "gears_refresh_cluster", + "geoadd", + "geodist", + "geohash", + "geopos", + "georadius", + "georadiusbymember", + "geosearch", + "geosearchstore", + "get", + "getbit", + "getdel", + "getex", + "getrange", + "getset", + "hdel", + "hexists", + "hget", + "hgetall", + "hincrby", + "hincrbyfloat", + "hkeys", + "hlen", + "hmget", + "hmset_dict", + "hmset", + "hrandfield", + "hscan_iter", + "hscan", + "hset", + "hsetnx", + "hstrlen", + "hvals", + "incr", + "incrby", + "incrbyfloat", + "info", + "initbydim", + "initbyprob", + "insert", + "insertnx", + "keys", + "lastsave", + "latency_history", + "latency_latest", + "latency_reset", + "lindex", + "linsert", + "list", + "llen", + "lmove", + "lmpop", + "loadchunk", + "lolwut", + "lpush", + "lpushx", + "lrange", + "lrem", + "lset", + "ltrim", + "madd", + "max", + "memory_malloc_stats", + "memory_purge", + "memory_stats", + "memory_usage", + "merge", + "mexists", + "mget", + "migrate_keys", + "migrate", + "min", + "module_list", + "module_load", + "module_loadex", + "module_unload", + "move", + "mrange", + "mrevrange", + "mset", + "msetnx", + "numincrby", + "object_encoding", + "object_idletime", + "object_refcount", + "object", + "objkeys", + "objlen", + "persist", + "pexpire", + "pexpireat", + "pfadd", + "pfcount", + "pfmerge", + "ping", + "profile", + "psubscribe", + "pttl", + "publish", + "pubsub_channels", + "pubsub_numpat", + "pubsub_numsub", + "pubsub_shardchannels", + "pubsub_shardnumsub", + "punsubscribe", + "quantile", + "query", + "queryindex", + "quit", + "randomkey", + "range", + "rank", + "readonly", + "readwrite", + "rename", + "replicaof", + "reserve", + "reset", + "resp", + "restore", + "revrange", + "revrank", + "role", + "rpoplpush", + "rpush", + "rpushx", + "sadd", + "save", + "scan_iter", + "scan", + "scandump", + "scard", + "script_exists", + "script_flush", + "script_kill", + "script_load", + "sdiff", + "sdiffstore", + "search", + "select", + "set", + "setbit", + "setex", + "setnx", + "setrange", + "sinter", + "sintercard", + "sinterstore", + "sismember", + "slaveof", + "slowlog_get", + "slowlog_len", + "slowlog_reset", + "slowlog", + "smembers", + "smismember", + "smove", + "spellcheck", + "spublish", + "srem", + "sscan_iter", + "sscan", + "stralgo", + "strappend", + "strlen", + "subscribe", + "substr", + "sugadd", + "sugdel", + "sugget", + "suglen", + "sunion", + "sunionstore", + "swapdb", + "sync", + "syndump", + "synupdate", + "tagvals", + "tfcall_async", + "tfcall", + "tfunction_delete", + "tfunction_list", + "tfunction_load", + "time", + "toggle", + "touch", + "trimmed_mean", + "ttl", + "type", + "unlink", + "unsubscribe", + "wait", + "waitaof", + "xack", + "xadd", + "xautoclaim", + "xclaim", + "xdel", + "xgroup_create", + "xgroup_createconsumer", + "xgroup_del_consumer", + "xgroup_delconsumer", + "xgroup_destroy", + "xgroup_set_id", + "xgroup_setid", + "xinfo_consumers", + "xinfo_groups", + "xinfo_help", + "xinfo_stream", + "xlen", + "xpending_range", + "xpending", + "xrange", + "xread_group", + "xread", + "xreadgroup", + "xrevrange", + "xtrim", + "zadd", + "zaddoption", + "zcard", + "zcount", + "zdiff", + "zdiffstore", + "zincrby", + "zinter", + "zintercard", + "zinterstore", + "zmpop", + "zmscore", + "zpopmax", + "zpopmin", + "zrandmember", + "zrange", + "zrangebylex", + "zrangebyscore", + "zrangestore", + "zrank", + "zrem", + "zremrangebylex", + "zremrangebyrank", + "zremrangebyscore", + "zrevrange", + "zrevrangebylex", + "zrevrank", + "zscan_iter", + "zscan", + "zscore", + "zunion", + "zunionstore", +} + +_valkey_client_gen_methods = { + "scan_iter", + "hscan_iter", + "sscan_iter", + "zscan_iter", +} + +_valkey_client_methods = _valkey_client_sync_methods.union(_valkey_client_async_methods) + +_valkey_multipart_commands = set( + ["client", "cluster", "command", "config", "debug", "sentinel", "slowlog", "script"] +) + +_valkey_operation_re = re.compile(r"[-\s]+") + + +def _conn_attrs_to_dict(connection): + return { + "host": getattr(connection, "host", None), + "port": getattr(connection, "port", None), + "path": getattr(connection, "path", None), + "db": getattr(connection, "db", None), + } + + +def _instance_info(kwargs): + host = kwargs.get("host") or "localhost" + port_path_or_id = str(kwargs.get("path") or kwargs.get("port", "unknown")) + db = str(kwargs.get("db") or 0) + + return (host, port_path_or_id, db) + + +def _wrap_Valkey_method_wrapper_(module, instance_class_name, operation): + name = f"{instance_class_name}.{operation}" + if operation in _valkey_client_gen_methods: + async_wrapper = generator_wrapper + else: + async_wrapper = None + + wrap_datastore_trace( + module, + name, + product="Valkey", + target=None, + operation=operation, + async_wrapper=async_wrapper, + ) + + +def _wrap_asyncio_Valkey_method_wrapper(module, instance_class_name, operation): + def _nr_wrapper_asyncio_Valkey_method_(wrapped, instance, args, kwargs): + from valkey.asyncio.client import Pipeline + + if isinstance(instance, Pipeline): + return wrapped(*args, **kwargs) + + # Method should be run when awaited or iterated, therefore we wrap in an async wrapper. + return DatastoreTraceWrapper( + wrapped, + product="Valkey", + target=None, + operation=operation, + async_wrapper=async_wrapper, + )(*args, **kwargs) + + name = f"{instance_class_name}.{operation}" + if operation in _valkey_client_gen_methods: + async_wrapper = async_generator_wrapper + else: + async_wrapper = coroutine_wrapper + + wrap_function_wrapper(module, name, _nr_wrapper_asyncio_Valkey_method_) + + +async def wrap_async_Connection_send_command(wrapped, instance, args, kwargs): + transaction = current_transaction() + if not transaction: + return await wrapped(*args, **kwargs) + + host, port_path_or_id, db = (None, None, None) + + try: + dt = transaction.settings.datastore_tracer + if dt.instance_reporting.enabled or dt.database_name_reporting.enabled: + conn_kwargs = _conn_attrs_to_dict(instance) + host, port_path_or_id, db = _instance_info(conn_kwargs) + except Exception: + pass + + # Older Valkey clients would when sending multi part commands pass + # them in as separate arguments to send_command(). Need to therefore + # detect those and grab the next argument from the set of arguments. + + operation = args[0].strip().lower() + + # If it's not a multi part command, there's no need to trace it, so + # we can return early. + + if ( + operation.split()[0] not in _valkey_multipart_commands + ): # Set the datastore info on the DatastoreTrace containing this function call. + trace = current_trace() + + # Find DatastoreTrace no matter how many other traces are inbetween + while trace is not None and not isinstance(trace, DatastoreTrace): + trace = getattr(trace, "parent", None) + + if trace is not None: + trace.host = host + trace.port_path_or_id = port_path_or_id + trace.database_name = db + + return await wrapped(*args, **kwargs) + + # Convert multi args to single arg string + + if operation in _valkey_multipart_commands and len(args) > 1: + operation = f"{operation} {args[1].strip().lower()}" + + operation = _valkey_operation_re.sub("_", operation) + + with DatastoreTrace( + product="Valkey", + target=None, + operation=operation, + host=host, + port_path_or_id=port_path_or_id, + database_name=db, + ): + return await wrapped(*args, **kwargs) + + +def _nr_Connection_send_command_wrapper_(wrapped, instance, args, kwargs): + transaction = current_transaction() + + if transaction is None or not args: + return wrapped(*args, **kwargs) + + host, port_path_or_id, db = (None, None, None) + + try: + dt = transaction.settings.datastore_tracer + if dt.instance_reporting.enabled or dt.database_name_reporting.enabled: + conn_kwargs = _conn_attrs_to_dict(instance) + host, port_path_or_id, db = _instance_info(conn_kwargs) + except: + pass + + # Find DatastoreTrace no matter how many other traces are inbetween + trace = current_trace() + while trace is not None and not isinstance(trace, DatastoreTrace): + trace = getattr(trace, "parent", None) + + if trace is not None: + trace.host = host + trace.port_path_or_id = port_path_or_id + trace.database_name = db + + # Older Valkey clients would when sending multi part commands pass + # them in as separate arguments to send_command(). Need to therefore + # detect those and grab the next argument from the set of arguments. + + operation = args[0].strip().lower() + + # If it's not a multi part command, there's no need to trace it, so + # we can return early. + + if operation.split()[0] not in _valkey_multipart_commands: + return wrapped(*args, **kwargs) + + # Convert multi args to single arg string + + if operation in _valkey_multipart_commands and len(args) > 1: + operation = f"{operation} {args[1].strip().lower()}" + + operation = _valkey_operation_re.sub("_", operation) + + with DatastoreTrace( + product="Valkey", + target=None, + operation=operation, + host=host, + port_path_or_id=port_path_or_id, + database_name=db, + source=wrapped, + ): + return wrapped(*args, **kwargs) + + +def instrument_valkey_client(module): + if hasattr(module, "StrictValkey"): + for name in _valkey_client_methods: + if name in vars(module.StrictValkey): + _wrap_Valkey_method_wrapper_(module, "StrictValkey", name) + + if hasattr(module, "Valkey"): + for name in _valkey_client_methods: + if name in vars(module.Valkey): + _wrap_Valkey_method_wrapper_(module, "Valkey", name) + + +def instrument_asyncio_valkey_client(module): + if hasattr(module, "Valkey"): + class_ = getattr(module, "Valkey") + for operation in _valkey_client_async_methods: + if hasattr(class_, operation): + _wrap_asyncio_Valkey_method_wrapper(module, "Valkey", operation) + + +def instrument_valkey_commands_core(module): + _instrument_valkey_commands_module(module, "CoreCommands") + + +def instrument_valkey_commands_sentinel(module): + _instrument_valkey_commands_module(module, "SentinelCommands") + + +def instrument_valkey_commands_json_commands(module): + _instrument_valkey_commands_module(module, "JSONCommands") + + +def instrument_valkey_commands_search_commands(module): + _instrument_valkey_commands_module(module, "SearchCommands") + + +def instrument_valkey_commands_timeseries_commands(module): + _instrument_valkey_commands_module(module, "TimeSeriesCommands") + + +def instrument_valkey_commands_graph_commands(module): + _instrument_valkey_commands_module(module, "GraphCommands") + + +def instrument_valkey_commands_bf_commands(module): + _instrument_valkey_commands_module(module, "BFCommands") + _instrument_valkey_commands_module(module, "CFCommands") + _instrument_valkey_commands_module(module, "CMSCommands") + _instrument_valkey_commands_module(module, "TDigestCommands") + _instrument_valkey_commands_module(module, "TOPKCommands") + + +def instrument_valkey_commands_cluster(module): + _instrument_valkey_commands_module(module, "ValkeyClusterCommands") + + +def _instrument_valkey_commands_module(module, class_name): + for name in _valkey_client_methods: + if hasattr(module, class_name): + class_instance = getattr(module, class_name) + if hasattr(class_instance, name): + _wrap_Valkey_method_wrapper_(module, class_name, name) + + +def instrument_valkey_connection(module): + if hasattr(module, "Connection"): + if hasattr(module.Connection, "send_command"): + wrap_function_wrapper( + module, "Connection.send_command", _nr_Connection_send_command_wrapper_ + ) + + +def instrument_asyncio_valkey_connection(module): + if hasattr(module, "Connection"): + if hasattr(module.Connection, "send_command"): + wrap_function_wrapper( + module, "Connection.send_command", wrap_async_Connection_send_command + ) diff --git a/tests/datastore_valkey/conftest.py b/tests/datastore_valkey/conftest.py new file mode 100644 index 000000000..41ab651d1 --- /dev/null +++ b/tests/datastore_valkey/conftest.py @@ -0,0 +1,37 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from testing_support.fixture.event_loop import ( # noqa: F401; pylint: disable=W0611 + event_loop as loop, +) +from testing_support.fixtures import ( # noqa: F401; pylint: disable=W0611 + collector_agent_registration_fixture, + collector_available_fixture, +) + +_default_settings = { + "package_reporting.enabled": False, # Turn off package reporting for testing as it causes slow downs. + "transaction_tracer.explain_threshold": 0.0, + "transaction_tracer.transaction_threshold": 0.0, + "transaction_tracer.stack_trace_threshold": 0.0, + "debug.log_data_collector_payloads": True, + "debug.record_transaction_failure": True, +} + +collector_agent_registration = collector_agent_registration_fixture( + app_name="Python Agent Test (datastore_valkey)", + default_settings=_default_settings, + linked_applications=["Python Agent Test (datastore)"], +) diff --git a/tests/datastore_valkey/test_asyncio.py b/tests/datastore_valkey/test_asyncio.py new file mode 100644 index 000000000..165815457 --- /dev/null +++ b/tests/datastore_valkey/test_asyncio.py @@ -0,0 +1,161 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import asyncio + +import pytest +from testing_support.db_settings import valkey_settings +from testing_support.fixture.event_loop import event_loop as loop # noqa: F401 +from testing_support.util import instance_hostname +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) + +from newrelic.api.background_task import background_task + +# Settings +DB_SETTINGS = valkey_settings()[0] + +# Metrics for publish test +datastore_all_metric_count = 5 + +_base_scoped_metrics = [("Datastore/operation/Valkey/publish", 3)] + + +_base_scoped_metrics.append( + ("Datastore/operation/Valkey/client_setinfo", 2), +) + +_base_rollup_metrics = [ + ("Datastore/all", datastore_all_metric_count), + ("Datastore/allOther", datastore_all_metric_count), + ("Datastore/Valkey/all", datastore_all_metric_count), + ("Datastore/Valkey/allOther", datastore_all_metric_count), + ("Datastore/operation/Valkey/publish", 3), + ( + f"Datastore/instance/Valkey/{instance_hostname(DB_SETTINGS['host'])}/{DB_SETTINGS['port']}", + datastore_all_metric_count, + ), +] +_base_rollup_metrics.append( + ("Datastore/operation/Valkey/client_setinfo", 2), +) + + +# Metrics for connection pool test + +_base_pool_scoped_metrics = [ + ("Datastore/operation/Valkey/get", 1), + ("Datastore/operation/Valkey/set", 1), + ("Datastore/operation/Valkey/client_list", 1), +] + +_base_pool_rollup_metrics = [ + ("Datastore/all", 3), + ("Datastore/allOther", 3), + ("Datastore/Valkey/all", 3), + ("Datastore/Valkey/allOther", 3), + ("Datastore/operation/Valkey/get", 1), + ("Datastore/operation/Valkey/set", 1), + ("Datastore/operation/Valkey/client_list", 1), + ( + f"Datastore/instance/Valkey/{instance_hostname(DB_SETTINGS['host'])}/{DB_SETTINGS['port']}", + 3, + ), +] + + +# Tests + + +@pytest.fixture() +def client(loop): # noqa + import valkey.asyncio + + return loop.run_until_complete( + valkey.asyncio.Valkey(host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0) + ) + + +@pytest.fixture() +def client_pool(loop): # noqa + import valkey.asyncio + + connection_pool = valkey.asyncio.ConnectionPool( + host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0 + ) + return loop.run_until_complete( + valkey.asyncio.Valkey(connection_pool=connection_pool) + ) + + +@validate_transaction_metrics( + "test_asyncio:test_async_connection_pool", + scoped_metrics=_base_pool_scoped_metrics, + rollup_metrics=_base_pool_rollup_metrics, + background_task=True, +) +@background_task() +def test_async_connection_pool(client_pool, loop): # noqa + async def _test_async_pool(client_pool): + await client_pool.set("key1", "value1") + await client_pool.get("key1") + await client_pool.execute_command("CLIENT", "LIST") + + loop.run_until_complete(_test_async_pool(client_pool)) + + +@validate_transaction_metrics("test_asyncio:test_async_pipeline", background_task=True) +@background_task() +def test_async_pipeline(client, loop): # noqa + async def _test_pipeline(client): + async with client.pipeline(transaction=True) as pipe: + await pipe.set("key1", "value1") + await pipe.execute() + + loop.run_until_complete(_test_pipeline(client)) + + +@validate_transaction_metrics( + "test_asyncio:test_async_pubsub", + scoped_metrics=_base_scoped_metrics, + rollup_metrics=_base_rollup_metrics, + background_task=True, +) +@background_task() +def test_async_pubsub(client, loop): # noqa + messages_received = [] + + async def reader(pubsub): + while True: + message = await pubsub.get_message(ignore_subscribe_messages=True) + if message: + messages_received.append(message["data"].decode()) + if message["data"].decode() == "NOPE": + break + + async def _test_pubsub(): + async with client.pubsub() as pubsub: + await pubsub.psubscribe("channel:*") + + future = asyncio.create_task(reader(pubsub)) + + await client.publish("channel:1", "Hello") + await client.publish("channel:2", "World") + await client.publish("channel:1", "NOPE") + + await future + + loop.run_until_complete(_test_pubsub()) + assert messages_received == ["Hello", "World", "NOPE"] diff --git a/tests/datastore_valkey/test_custom_conn_pool.py b/tests/datastore_valkey/test_custom_conn_pool.py new file mode 100644 index 000000000..632e8b42c --- /dev/null +++ b/tests/datastore_valkey/test_custom_conn_pool.py @@ -0,0 +1,160 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""The purpose of these tests is to confirm that using a non-standard +connection pool that does not have a `connection_kwargs` attribute +will not result in an error. +""" + +import pytest +import valkey +from testing_support.db_settings import valkey_settings +from testing_support.fixtures import override_application_settings +from testing_support.util import instance_hostname +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) + +from newrelic.api.background_task import background_task + +DB_SETTINGS = valkey_settings()[0] + + +class FakeConnectionPool: + """Connection Pool without connection_kwargs attribute.""" + + def __init__(self, connection): + self.connection = connection + + def get_connection(self, name, *keys, **options): + return self.connection + + def release(self, connection): + self.connection.disconnect() + + def disconnect(self): + self.connection.disconnect() + + +# Settings + +_enable_instance_settings = { + "datastore_tracer.instance_reporting.enabled": True, +} +_disable_instance_settings = { + "datastore_tracer.instance_reporting.enabled": False, +} + +# Metrics + +datastore_all_metric_count = 5 + +_base_scoped_metrics = [ + ("Datastore/operation/Valkey/get", 1), + ("Datastore/operation/Valkey/set", 1), + ("Datastore/operation/Valkey/client_list", 1), +] + +_base_scoped_metrics.append( + ("Datastore/operation/Valkey/client_setinfo", 2), +) + +_base_rollup_metrics = [ + ("Datastore/all", datastore_all_metric_count), + ("Datastore/allOther", datastore_all_metric_count), + ("Datastore/Valkey/all", datastore_all_metric_count), + ("Datastore/Valkey/allOther", datastore_all_metric_count), + ("Datastore/operation/Valkey/get", 1), + ("Datastore/operation/Valkey/set", 1), + ("Datastore/operation/Valkey/client_list", 1), +] +_base_rollup_metrics.append( + ("Datastore/operation/Valkey/client_setinfo", 2), +) + +_host = instance_hostname(DB_SETTINGS["host"]) +_port = DB_SETTINGS["port"] + +_instance_metric_name = f"Datastore/instance/Valkey/{_host}/{_port}" + +instance_metric_count = 5 + +_enable_rollup_metrics = _base_rollup_metrics.append( + (_instance_metric_name, instance_metric_count) +) + +_disable_rollup_metrics = _base_rollup_metrics.append((_instance_metric_name, None)) + +# Operations + + +def exercise_valkey(client): + client.set("key", "value") + client.get("key") + client.execute_command("CLIENT", "LIST", parse="LIST") + + +# Tests +@override_application_settings(_enable_instance_settings) +@validate_transaction_metrics( + "test_custom_conn_pool:test_fake_conn_pool_enable_instance", + scoped_metrics=_base_scoped_metrics, + rollup_metrics=_enable_rollup_metrics, + background_task=True, +) +@background_task() +def test_fake_conn_pool_enable_instance(): + client = valkey.StrictValkey( + host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0 + ) + + # Get a real connection + + conn = client.connection_pool.get_connection("GET") + + # Replace the original connection pool with one that doesn't + # have the `connection_kwargs` attribute. + + fake_pool = FakeConnectionPool(conn) + client.connection_pool = fake_pool + assert not hasattr(client.connection_pool, "connection_kwargs") + + exercise_valkey(client) + + +@override_application_settings(_disable_instance_settings) +@validate_transaction_metrics( + "test_custom_conn_pool:test_fake_conn_pool_disable_instance", + scoped_metrics=_base_scoped_metrics, + rollup_metrics=_disable_rollup_metrics, + background_task=True, +) +@background_task() +def test_fake_conn_pool_disable_instance(): + client = valkey.StrictValkey( + host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0 + ) + + # Get a real connection + + conn = client.connection_pool.get_connection("GET") + + # Replace the original connection pool with one that doesn't + # have the `connection_kwargs` attribute. + + fake_pool = FakeConnectionPool(conn) + client.connection_pool = fake_pool + assert not hasattr(client.connection_pool, "connection_kwargs") + + exercise_valkey(client) diff --git a/tests/datastore_valkey/test_execute_command.py b/tests/datastore_valkey/test_execute_command.py new file mode 100644 index 000000000..c23cb0ce0 --- /dev/null +++ b/tests/datastore_valkey/test_execute_command.py @@ -0,0 +1,182 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import valkey + +from newrelic.api.background_task import background_task + +from testing_support.fixtures import override_application_settings +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) +from testing_support.db_settings import valkey_settings +from testing_support.util import instance_hostname + +DB_SETTINGS = valkey_settings()[0] + + +# Settings + +_enable_instance_settings = { + "datastore_tracer.instance_reporting.enabled": True, +} +_disable_instance_settings = { + "datastore_tracer.instance_reporting.enabled": False, +} + +# Metrics + +_base_scoped_metrics = [ + ("Datastore/operation/Valkey/client_list", 1), +] + +_base_scoped_metrics.append( + ("Datastore/operation/Valkey/client_setinfo", 2), +) + +_base_rollup_metrics = [ + ("Datastore/all", 3), + ("Datastore/allOther", 3), + ("Datastore/Valkey/all", 3), + ("Datastore/Valkey/allOther", 3), + ("Datastore/operation/Valkey/client_list", 1), +] +_base_rollup_metrics.append( + ("Datastore/operation/Valkey/client_setinfo", 2), +) + +_host = instance_hostname(DB_SETTINGS["host"]) +_port = DB_SETTINGS["port"] + +_instance_metric_name = f"Datastore/instance/Valkey/{_host}/{_port}" + +instance_metric_count = 3 + +_enable_rollup_metrics = _base_rollup_metrics.append( + (_instance_metric_name, instance_metric_count) +) + +_disable_rollup_metrics = _base_rollup_metrics.append((_instance_metric_name, None)) + + +def exercise_valkey_multi_args(client): + client.execute_command("CLIENT", "LIST", parse="LIST") + + +def exercise_valkey_single_arg(client): + client.execute_command("CLIENT LIST") + + +@override_application_settings(_enable_instance_settings) +@validate_transaction_metrics( + "test_execute_command:test_strict_valkey_execute_command_two_args_enable", + scoped_metrics=_base_scoped_metrics, + rollup_metrics=_enable_rollup_metrics, + background_task=True, +) +@background_task() +def test_strict_valkey_execute_command_two_args_enable(): + r = valkey.StrictValkey(host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0) + exercise_valkey_multi_args(r) + + +@override_application_settings(_disable_instance_settings) +@validate_transaction_metrics( + "test_execute_command:test_strict_valkey_execute_command_two_args_disabled", + scoped_metrics=_base_scoped_metrics, + rollup_metrics=_disable_rollup_metrics, + background_task=True, +) +@background_task() +def test_strict_valkey_execute_command_two_args_disabled(): + r = valkey.StrictValkey(host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0) + exercise_valkey_multi_args(r) + + +@override_application_settings(_enable_instance_settings) +@validate_transaction_metrics( + "test_execute_command:test_valkey_execute_command_two_args_enable", + scoped_metrics=_base_scoped_metrics, + rollup_metrics=_enable_rollup_metrics, + background_task=True, +) +@background_task() +def test_valkey_execute_command_two_args_enable(): + r = valkey.Valkey(host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0) + exercise_valkey_multi_args(r) + + +@override_application_settings(_disable_instance_settings) +@validate_transaction_metrics( + "test_execute_command:test_valkey_execute_command_two_args_disabled", + scoped_metrics=_base_scoped_metrics, + rollup_metrics=_disable_rollup_metrics, + background_task=True, +) +@background_task() +def test_valkey_execute_command_two_args_disabled(): + r = valkey.Valkey(host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0) + exercise_valkey_multi_args(r) + + +@override_application_settings(_enable_instance_settings) +@validate_transaction_metrics( + "test_execute_command:test_strict_valkey_execute_command_as_one_arg_enable", + scoped_metrics=_base_scoped_metrics, + rollup_metrics=_enable_rollup_metrics, + background_task=True, +) +@background_task() +def test_strict_valkey_execute_command_as_one_arg_enable(): + r = valkey.StrictValkey(host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0) + exercise_valkey_single_arg(r) + + +@override_application_settings(_disable_instance_settings) +@validate_transaction_metrics( + "test_execute_command:test_strict_valkey_execute_command_as_one_arg_disabled", + scoped_metrics=_base_scoped_metrics, + rollup_metrics=_disable_rollup_metrics, + background_task=True, +) +@background_task() +def test_strict_valkey_execute_command_as_one_arg_disabled(): + r = valkey.StrictValkey(host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0) + exercise_valkey_single_arg(r) + + +@override_application_settings(_enable_instance_settings) +@validate_transaction_metrics( + "test_execute_command:test_valkey_execute_command_as_one_arg_enable", + scoped_metrics=_base_scoped_metrics, + rollup_metrics=_enable_rollup_metrics, + background_task=True, +) +@background_task() +def test_valkey_execute_command_as_one_arg_enable(): + r = valkey.Valkey(host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0) + exercise_valkey_single_arg(r) + + +@override_application_settings(_disable_instance_settings) +@validate_transaction_metrics( + "test_execute_command:test_valkey_execute_command_as_one_arg_disabled", + scoped_metrics=_base_scoped_metrics, + rollup_metrics=_disable_rollup_metrics, + background_task=True, +) +@background_task() +def test_valkey_execute_command_as_one_arg_disabled(): + r = valkey.Valkey(host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0) + exercise_valkey_single_arg(r) diff --git a/tests/datastore_valkey/test_generators.py b/tests/datastore_valkey/test_generators.py new file mode 100644 index 000000000..fb1800bb6 --- /dev/null +++ b/tests/datastore_valkey/test_generators.py @@ -0,0 +1,294 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +import valkey +from testing_support.db_settings import valkey_settings +from testing_support.fixtures import override_application_settings +from testing_support.util import instance_hostname +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) + +from newrelic.api.background_task import background_task +from newrelic.api.datastore_trace import DatastoreTrace +from newrelic.api.time_trace import current_trace + +DB_SETTINGS = valkey_settings()[0] + +# Settings + +_enable_instance_settings = { + "datastore_tracer.instance_reporting.enabled": True, +} +_disable_instance_settings = { + "datastore_tracer.instance_reporting.enabled": False, +} + +# Metrics + +_base_scoped_metrics = ( + ("Datastore/operation/Valkey/scan_iter", 1), + ("Datastore/operation/Valkey/sscan_iter", 1), + ("Datastore/operation/Valkey/zscan_iter", 1), + ("Datastore/operation/Valkey/hscan_iter", 1), + ("Datastore/operation/Valkey/set", 1), + ("Datastore/operation/Valkey/sadd", 1), + ("Datastore/operation/Valkey/zadd", 1), + ("Datastore/operation/Valkey/hset", 1), +) + +_base_rollup_metrics = ( + ("Datastore/all", 8), + ("Datastore/allOther", 8), + ("Datastore/Valkey/all", 8), + ("Datastore/Valkey/allOther", 8), + ("Datastore/operation/Valkey/scan_iter", 1), + ("Datastore/operation/Valkey/sscan_iter", 1), + ("Datastore/operation/Valkey/zscan_iter", 1), + ("Datastore/operation/Valkey/hscan_iter", 1), + ("Datastore/operation/Valkey/set", 1), + ("Datastore/operation/Valkey/sadd", 1), + ("Datastore/operation/Valkey/zadd", 1), + ("Datastore/operation/Valkey/hset", 1), +) + +_disable_rollup_metrics = list(_base_rollup_metrics) +_enable_rollup_metrics = list(_base_rollup_metrics) + +_host = instance_hostname(DB_SETTINGS["host"]) +_port = DB_SETTINGS["port"] + +_instance_metric_name = f"Datastore/instance/Valkey/{_host}/{_port}" + +_enable_rollup_metrics.append((_instance_metric_name, 8)) + +_disable_rollup_metrics.append((_instance_metric_name, None)) + +# Operations + + +def exercise_valkey(client): + """ + Exercise client generators by iterating on various methods and ensuring they are + non-empty, and that traces are started and stopped with the generator. + """ + + # Set existing values + client.set("scan-key", "value") + client.sadd("sscan-key", "value") + client.zadd("zscan-key", {"value": 1}) + client.hset("hscan-key", "field", "value") + + # Check generators + flag = False + assert not isinstance( + current_trace(), DatastoreTrace + ) # Assert no active DatastoreTrace + for k in client.scan_iter("scan-*"): + assert k == b"scan-key" + assert isinstance( + current_trace(), DatastoreTrace + ) # Assert DatastoreTrace now active + flag = True + assert flag + + flag = False + assert not isinstance( + current_trace(), DatastoreTrace + ) # Assert no active DatastoreTrace + for k in client.sscan_iter("sscan-key"): + assert k == b"value" + assert isinstance( + current_trace(), DatastoreTrace + ) # Assert DatastoreTrace now active + flag = True + assert flag + + flag = False + assert not isinstance( + current_trace(), DatastoreTrace + ) # Assert no active DatastoreTrace + for k, _ in client.zscan_iter("zscan-key"): + assert k == b"value" + assert isinstance( + current_trace(), DatastoreTrace + ) # Assert DatastoreTrace now active + flag = True + assert flag + + flag = False + assert not isinstance( + current_trace(), DatastoreTrace + ) # Assert no active DatastoreTrace + for f, v in client.hscan_iter("hscan-key"): + assert f == b"field" + assert v == b"value" + assert isinstance( + current_trace(), DatastoreTrace + ) # Assert DatastoreTrace now active + flag = True + assert flag + + +async def exercise_valkey_async(client): + """ + Exercise client generators by iterating on various methods and ensuring they are + non-empty, and that traces are started and stopped with the generator. + """ + + # Set existing values + await client.set("scan-key", "value") + await client.sadd("sscan-key", "value") + await client.zadd("zscan-key", {"value": 1}) + await client.hset("hscan-key", "field", "value") + + # Check generators + flag = False + assert not isinstance( + current_trace(), DatastoreTrace + ) # Assert no active DatastoreTrace + async for k in client.scan_iter("scan-*"): + assert k == b"scan-key" + assert isinstance( + current_trace(), DatastoreTrace + ) # Assert DatastoreTrace now active + flag = True + assert flag + + flag = False + assert not isinstance( + current_trace(), DatastoreTrace + ) # Assert no active DatastoreTrace + async for k in client.sscan_iter("sscan-key"): + assert k == b"value" + assert isinstance( + current_trace(), DatastoreTrace + ) # Assert DatastoreTrace now active + flag = True + assert flag + + flag = False + assert not isinstance( + current_trace(), DatastoreTrace + ) # Assert no active DatastoreTrace + async for k, _ in client.zscan_iter("zscan-key"): + assert k == b"value" + assert isinstance( + current_trace(), DatastoreTrace + ) # Assert DatastoreTrace now active + flag = True + assert flag + + flag = False + assert not isinstance( + current_trace(), DatastoreTrace + ) # Assert no active DatastoreTrace + async for f, v in client.hscan_iter("hscan-key"): + assert f == b"field" + assert v == b"value" + assert isinstance( + current_trace(), DatastoreTrace + ) # Assert DatastoreTrace now active + flag = True + assert flag + + +# Tests + + +@override_application_settings(_enable_instance_settings) +@validate_transaction_metrics( + "test_generators:test_strict_valkey_generator_enable_instance", + scoped_metrics=_base_scoped_metrics, + rollup_metrics=_enable_rollup_metrics, + background_task=True, +) +@background_task() +def test_strict_valkey_generator_enable_instance(): + client = valkey.StrictValkey( + host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0 + ) + exercise_valkey(client) + + +@override_application_settings(_disable_instance_settings) +@validate_transaction_metrics( + "test_generators:test_strict_valkey_generator_disable_instance", + scoped_metrics=_base_scoped_metrics, + rollup_metrics=_disable_rollup_metrics, + background_task=True, +) +@background_task() +def test_strict_valkey_generator_disable_instance(): + client = valkey.StrictValkey( + host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0 + ) + exercise_valkey(client) + + +@override_application_settings(_enable_instance_settings) +@validate_transaction_metrics( + "test_generators:test_valkey_generator_enable_instance", + scoped_metrics=_base_scoped_metrics, + rollup_metrics=_enable_rollup_metrics, + background_task=True, +) +@background_task() +def test_valkey_generator_enable_instance(): + client = valkey.Valkey(host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0) + exercise_valkey(client) + + +@override_application_settings(_disable_instance_settings) +@validate_transaction_metrics( + "test_generators:test_valkey_generator_disable_instance", + scoped_metrics=_base_scoped_metrics, + rollup_metrics=_disable_rollup_metrics, + background_task=True, +) +@background_task() +def test_valkey_generator_disable_instance(): + client = valkey.Valkey(host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0) + exercise_valkey(client) + + +@override_application_settings(_enable_instance_settings) +@validate_transaction_metrics( + "test_generators:test_valkey_async_generator_enable_instance", + scoped_metrics=_base_scoped_metrics, + rollup_metrics=_enable_rollup_metrics, + background_task=True, +) +@background_task() +def test_valkey_async_generator_enable_instance(loop): + client = valkey.asyncio.Valkey( + host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0 + ) + loop.run_until_complete(exercise_valkey_async(client)) + + +@override_application_settings(_disable_instance_settings) +@validate_transaction_metrics( + "test_generators:test_valkey_async_generator_disable_instance", + scoped_metrics=_base_scoped_metrics, + rollup_metrics=_disable_rollup_metrics, + background_task=True, +) +@background_task() +def test_valkey_async_generator_disable_instance(loop): + client = valkey.asyncio.Valkey( + host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0 + ) + loop.run_until_complete(exercise_valkey_async(client)) diff --git a/tests/datastore_valkey/test_get_and_set.py b/tests/datastore_valkey/test_get_and_set.py new file mode 100644 index 000000000..676e307a8 --- /dev/null +++ b/tests/datastore_valkey/test_get_and_set.py @@ -0,0 +1,129 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import valkey + +from newrelic.api.background_task import background_task + +from testing_support.fixtures import override_application_settings +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) +from testing_support.db_settings import valkey_settings +from testing_support.util import instance_hostname + +DB_SETTINGS = valkey_settings()[0] + +# Settings + +_enable_instance_settings = { + "datastore_tracer.instance_reporting.enabled": True, +} +_disable_instance_settings = { + "datastore_tracer.instance_reporting.enabled": False, +} + +# Metrics + +_base_scoped_metrics = ( + ("Datastore/operation/Valkey/get", 1), + ("Datastore/operation/Valkey/set", 1), +) + +_base_rollup_metrics = ( + ("Datastore/all", 2), + ("Datastore/allOther", 2), + ("Datastore/Valkey/all", 2), + ("Datastore/Valkey/allOther", 2), + ("Datastore/operation/Valkey/get", 1), + ("Datastore/operation/Valkey/set", 1), +) + +_disable_rollup_metrics = list(_base_rollup_metrics) +_enable_rollup_metrics = list(_base_rollup_metrics) + +_host = instance_hostname(DB_SETTINGS["host"]) +_port = DB_SETTINGS["port"] + +_instance_metric_name = f"Datastore/instance/Valkey/{_host}/{_port}" + +_enable_rollup_metrics.append((_instance_metric_name, 2)) + +_disable_rollup_metrics.append((_instance_metric_name, None)) + +# Operations + + +def exercise_valkey(client): + client.set("key", "value") + client.get("key") + + +# Tests + + +@override_application_settings(_enable_instance_settings) +@validate_transaction_metrics( + "test_get_and_set:test_strict_valkey_operation_enable_instance", + scoped_metrics=_base_scoped_metrics, + rollup_metrics=_enable_rollup_metrics, + background_task=True, +) +@background_task() +def test_strict_valkey_operation_enable_instance(): + client = valkey.StrictValkey( + host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0 + ) + exercise_valkey(client) + + +@override_application_settings(_disable_instance_settings) +@validate_transaction_metrics( + "test_get_and_set:test_strict_valkey_operation_disable_instance", + scoped_metrics=_base_scoped_metrics, + rollup_metrics=_disable_rollup_metrics, + background_task=True, +) +@background_task() +def test_strict_valkey_operation_disable_instance(): + client = valkey.StrictValkey( + host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0 + ) + exercise_valkey(client) + + +@override_application_settings(_enable_instance_settings) +@validate_transaction_metrics( + "test_get_and_set:test_valkey_operation_enable_instance", + scoped_metrics=_base_scoped_metrics, + rollup_metrics=_enable_rollup_metrics, + background_task=True, +) +@background_task() +def test_valkey_operation_enable_instance(): + client = valkey.Valkey(host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0) + exercise_valkey(client) + + +@override_application_settings(_disable_instance_settings) +@validate_transaction_metrics( + "test_get_and_set:test_valkey_operation_disable_instance", + scoped_metrics=_base_scoped_metrics, + rollup_metrics=_disable_rollup_metrics, + background_task=True, +) +@background_task() +def test_valkey_operation_disable_instance(): + client = valkey.Valkey(host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0) + exercise_valkey(client) diff --git a/tests/datastore_valkey/test_instance_info.py b/tests/datastore_valkey/test_instance_info.py new file mode 100644 index 000000000..677fbd047 --- /dev/null +++ b/tests/datastore_valkey/test_instance_info.py @@ -0,0 +1,175 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +import valkey + +from newrelic.hooks.datastore_valkey import _conn_attrs_to_dict, _instance_info + +_instance_info_tests = [ + ((), {}, ("localhost", "6379", "0")), + ((), {"host": None}, ("localhost", "6379", "0")), + ((), {"host": ""}, ("localhost", "6379", "0")), + ((), {"db": None}, ("localhost", "6379", "0")), + ((), {"db": ""}, ("localhost", "6379", "0")), + ((), {"host": "127.0.0.1", "port": 1234, "db": 2}, ("127.0.0.1", "1234", "2")), + (("127.0.0.1", 1234, 2), {}, ("127.0.0.1", "1234", "2")), +] + + +class DisabledConnection(valkey.Connection): + @staticmethod + def connect(*args, **kwargs): + pass + + +class DisabledUnixConnection(valkey.UnixDomainSocketConnection, DisabledConnection): + pass + + +class DisabledSSLConnection(valkey.SSLConnection, DisabledConnection): + pass + + +@pytest.mark.parametrize("args,kwargs,expected", _instance_info_tests) +def test_valkey_client_instance_info(args, kwargs, expected): + r = valkey.Valkey(*args, **kwargs) + conn_kwargs = r.connection_pool.connection_kwargs + assert _instance_info(conn_kwargs) == expected + + +@pytest.mark.parametrize("args,kwargs,expected", _instance_info_tests) +def test_strict_valkey_client_instance_info(args, kwargs, expected): + r = valkey.StrictValkey(*args, **kwargs) + conn_kwargs = r.connection_pool.connection_kwargs + assert _instance_info(conn_kwargs) == expected + + +@pytest.mark.parametrize("args,kwargs,expected", _instance_info_tests) +def test_valkey_connection_instance_info(args, kwargs, expected): + r = valkey.Valkey(*args, **kwargs) + r.connection_pool.connection_class = DisabledConnection + connection = r.connection_pool.get_connection("SELECT") + try: + conn_kwargs = _conn_attrs_to_dict(connection) + assert _instance_info(conn_kwargs) == expected + finally: + r.connection_pool.release(connection) + + +@pytest.mark.parametrize("args,kwargs,expected", _instance_info_tests) +def test_strict_valkey_connection_instance_info(args, kwargs, expected): + r = valkey.StrictValkey(*args, **kwargs) + r.connection_pool.connection_class = DisabledConnection + connection = r.connection_pool.get_connection("SELECT") + try: + conn_kwargs = _conn_attrs_to_dict(connection) + assert _instance_info(conn_kwargs) == expected + finally: + r.connection_pool.release(connection) + + +_instance_info_from_url_tests = [ + (("valkey://localhost:1234/",), {}, ("localhost", "1234", "0")), + (("valkey://localhost:1234",), {}, ("localhost", "1234", "0")), + (("valkey://user:password@localhost:6379",), {}, ("localhost", "6379", "0")), + (("valkey://localhost:6379/2",), {}, ("localhost", "6379", "2")), + (("valkey://localhost:6379",), {"db": 2}, ("localhost", "6379", "2")), + (("valkey://@127.0.0.1:6379",), {}, ("127.0.0.1", "6379", "0")), + (("valkey://:1234/",), {}, ("localhost", "1234", "0")), + (("valkey://@:1234/",), {}, ("localhost", "1234", "0")), + (("valkey://localhost:1234/garbage",), {}, ("localhost", "1234", "0")), +] + +_instance_info_from_url_tests.extend( + [ + (("valkeys://localhost:6379/2/",), {}, ("localhost", "6379", "2")), + ( + ("valkey://localhost:6379",), + {"host": "someotherhost"}, + ("localhost", "6379", "0"), + ), + (("valkey://localhost:6379/2",), {"db": 3}, ("localhost", "6379", "2")), + (("valkey://localhost:6379/2/?db=111",), {}, ("localhost", "6379", "111")), + (("valkey://localhost:6379?db=2",), {}, ("localhost", "6379", "2")), + (("valkey://localhost:6379/2?db=111",), {}, ("localhost", "6379", "111")), + ( + ("unix:///path/to/socket.sock",), + {}, + ("localhost", "/path/to/socket.sock", "0"), + ), + ( + ("unix:///path/to/socket.sock?db=2",), + {}, + ("localhost", "/path/to/socket.sock", "2"), + ), + ( + ("unix:///path/to/socket.sock",), + {"db": 2}, + ("localhost", "/path/to/socket.sock", "2"), + ), + ] +) + + +@pytest.mark.parametrize("args,kwargs,expected", _instance_info_from_url_tests) +def test_valkey_client_from_url(args, kwargs, expected): + r = valkey.Valkey.from_url(*args, **kwargs) + conn_kwargs = r.connection_pool.connection_kwargs + assert _instance_info(conn_kwargs) == expected + + +@pytest.mark.parametrize("args,kwargs,expected", _instance_info_from_url_tests) +def test_strict_valkey_client_from_url(args, kwargs, expected): + r = valkey.StrictValkey.from_url(*args, **kwargs) + conn_kwargs = r.connection_pool.connection_kwargs + assert _instance_info(conn_kwargs) == expected + + +@pytest.mark.parametrize("args,kwargs,expected", _instance_info_from_url_tests) +def test_valkey_connection_from_url(args, kwargs, expected): + r = valkey.Valkey.from_url(*args, **kwargs) + if r.connection_pool.connection_class is valkey.Connection: + r.connection_pool.connection_class = DisabledConnection + elif r.connection_pool.connection_class is valkey.UnixDomainSocketConnection: + r.connection_pool.connection_class = DisabledUnixConnection + elif r.connection_pool.connection_class is valkey.SSLConnection: + r.connection_pool.connection_class = DisabledSSLConnection + else: + assert False, r.connection_pool.connection_class + connection = r.connection_pool.get_connection("SELECT") + try: + conn_kwargs = _conn_attrs_to_dict(connection) + assert _instance_info(conn_kwargs) == expected + finally: + r.connection_pool.release(connection) + + +@pytest.mark.parametrize("args,kwargs,expected", _instance_info_from_url_tests) +def test_strict_valkey_connection_from_url(args, kwargs, expected): + r = valkey.StrictValkey.from_url(*args, **kwargs) + if r.connection_pool.connection_class is valkey.Connection: + r.connection_pool.connection_class = DisabledConnection + elif r.connection_pool.connection_class is valkey.UnixDomainSocketConnection: + r.connection_pool.connection_class = DisabledUnixConnection + elif r.connection_pool.connection_class is valkey.SSLConnection: + r.connection_pool.connection_class = DisabledSSLConnection + else: + assert False, r.connection_pool.connection_class + connection = r.connection_pool.get_connection("SELECT") + try: + conn_kwargs = _conn_attrs_to_dict(connection) + assert _instance_info(conn_kwargs) == expected + finally: + r.connection_pool.release(connection) diff --git a/tests/datastore_valkey/test_multiple_dbs.py b/tests/datastore_valkey/test_multiple_dbs.py new file mode 100644 index 000000000..9bbcea995 --- /dev/null +++ b/tests/datastore_valkey/test_multiple_dbs.py @@ -0,0 +1,145 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +import valkey + +from newrelic.api.background_task import background_task + +from testing_support.fixtures import override_application_settings +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) +from testing_support.db_settings import valkey_settings +from testing_support.util import instance_hostname + +DB_MULTIPLE_SETTINGS = valkey_settings() + + +# Settings + +_enable_instance_settings = { + "datastore_tracer.instance_reporting.enabled": True, +} +_disable_instance_settings = { + "datastore_tracer.instance_reporting.enabled": False, +} + +# Metrics + +_base_scoped_metrics = [ + ("Datastore/operation/Valkey/get", 1), + ("Datastore/operation/Valkey/set", 1), + ("Datastore/operation/Valkey/client_list", 1), +] + +_base_scoped_metrics.append( + ("Datastore/operation/Valkey/client_setinfo", 2), +) + +datastore_all_metric_count = 5 + +_base_rollup_metrics = [ + ("Datastore/all", datastore_all_metric_count), + ("Datastore/allOther", datastore_all_metric_count), + ("Datastore/Valkey/all", datastore_all_metric_count), + ("Datastore/Valkey/allOther", datastore_all_metric_count), + ("Datastore/operation/Valkey/get", 1), + ("Datastore/operation/Valkey/set", 1), + ("Datastore/operation/Valkey/client_list", 1), +] + +_base_rollup_metrics.append( + ("Datastore/operation/Valkey/client_setinfo", 2), +) + + +if len(DB_MULTIPLE_SETTINGS) > 1: + valkey_1 = DB_MULTIPLE_SETTINGS[0] + valkey_2 = DB_MULTIPLE_SETTINGS[1] + + host_1 = instance_hostname(valkey_1["host"]) + port_1 = valkey_1["port"] + + host_2 = instance_hostname(valkey_2["host"]) + port_2 = valkey_2["port"] + + instance_metric_name_1 = f"Datastore/instance/Valkey/{host_1}/{port_1}" + instance_metric_name_2 = f"Datastore/instance/Valkey/{host_2}/{port_2}" + + instance_metric_name_1_count = 2 + instance_metric_name_2_count = 3 + + _enable_rollup_metrics = _base_rollup_metrics.extend( + [ + (instance_metric_name_1, instance_metric_name_1_count), + (instance_metric_name_2, instance_metric_name_2_count), + ] + ) + + _disable_rollup_metrics = _base_rollup_metrics.extend( + [ + (instance_metric_name_1, None), + (instance_metric_name_2, None), + ] + ) + + +def exercise_valkey(client_1, client_2): + client_1.set("key", "value") + client_1.get("key") + + client_2.execute_command("CLIENT", "LIST", parse="LIST") + + +@pytest.mark.skipif( + len(DB_MULTIPLE_SETTINGS) < 2, + reason="Test environment not configured with multiple databases.", +) +@override_application_settings(_enable_instance_settings) +@validate_transaction_metrics( + "test_multiple_dbs:test_multiple_datastores_enabled", + scoped_metrics=_base_scoped_metrics, + rollup_metrics=_enable_rollup_metrics, + background_task=True, +) +@background_task() +def test_multiple_datastores_enabled(): + valkey1 = DB_MULTIPLE_SETTINGS[0] + valkey2 = DB_MULTIPLE_SETTINGS[1] + + client_1 = valkey.StrictValkey(host=valkey1["host"], port=valkey1["port"], db=0) + client_2 = valkey.StrictValkey(host=valkey2["host"], port=valkey2["port"], db=1) + exercise_valkey(client_1, client_2) + + +@pytest.mark.skipif( + len(DB_MULTIPLE_SETTINGS) < 2, + reason="Test environment not configured with multiple databases.", +) +@override_application_settings(_disable_instance_settings) +@validate_transaction_metrics( + "test_multiple_dbs:test_multiple_datastores_disabled", + scoped_metrics=_base_scoped_metrics, + rollup_metrics=_disable_rollup_metrics, + background_task=True, +) +@background_task() +def test_multiple_datastores_disabled(): + valkey1 = DB_MULTIPLE_SETTINGS[0] + valkey2 = DB_MULTIPLE_SETTINGS[1] + + client_1 = valkey.StrictValkey(host=valkey1["host"], port=valkey1["port"], db=0) + client_2 = valkey.StrictValkey(host=valkey2["host"], port=valkey2["port"], db=1) + exercise_valkey(client_1, client_2) diff --git a/tests/datastore_valkey/test_span_event.py b/tests/datastore_valkey/test_span_event.py new file mode 100644 index 000000000..200551a92 --- /dev/null +++ b/tests/datastore_valkey/test_span_event.py @@ -0,0 +1,146 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +import valkey + +from newrelic.api.transaction import current_transaction +from newrelic.api.background_task import background_task + +from testing_support.db_settings import valkey_settings +from testing_support.fixtures import override_application_settings +from testing_support.validators.validate_span_events import validate_span_events +from testing_support.util import instance_hostname + + +DB_SETTINGS = valkey_settings()[0] +DATABASE_NUMBER = 0 + + +# Settings + +_enable_instance_settings = { + "datastore_tracer.instance_reporting.enabled": True, + "datastore_tracer.database_name_reporting.enabled": True, + "distributed_tracing.enabled": True, + "span_events.enabled": True, +} +_disable_instance_settings = { + "datastore_tracer.instance_reporting.enabled": False, + "datastore_tracer.database_name_reporting.enabled": False, + "distributed_tracing.enabled": True, + "span_events.enabled": True, +} + + +def _exercise_db(): + client = valkey.StrictValkey( + host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=DATABASE_NUMBER + ) + + client.set("key", "value") + client.get("key") + + client.execute_command("CLIENT", "LIST", parse="LIST") + + +# Tests + + +@pytest.mark.parametrize("db_instance_enabled", (True, False)) +@pytest.mark.parametrize("instance_enabled", (True, False)) +def test_span_events(instance_enabled, db_instance_enabled): + guid = "dbb533c53b749e0b" + priority = 0.5 + + common = { + "type": "Span", + "transactionId": guid, + "priority": priority, + "sampled": True, + "category": "datastore", + "component": "Valkey", + "span.kind": "client", + } + exact_agents = {} + + if instance_enabled: + settings = _enable_instance_settings.copy() + hostname = instance_hostname(DB_SETTINGS["host"]) + exact_agents.update( + { + "peer.address": f"{hostname}:{DB_SETTINGS['port']}", + "peer.hostname": hostname, + } + ) + else: + settings = _disable_instance_settings.copy() + exact_agents.update( + { + "peer.address": "Unknown:Unknown", + "peer.hostname": "Unknown", + } + ) + + if db_instance_enabled and instance_enabled: + exact_agents.update( + { + "db.instance": str(DATABASE_NUMBER), + } + ) + unexpected_agents = () + else: + settings["attributes.exclude"] = ["db.instance"] + unexpected_agents = ("db.instance",) + + query_1 = common.copy() + query_1["name"] = "Datastore/operation/Valkey/set" + + query_2 = common.copy() + query_2["name"] = "Datastore/operation/Valkey/get" + + query_3 = common.copy() + query_3["name"] = "Datastore/operation/Valkey/client_list" + + @validate_span_events( + count=1, + exact_intrinsics=query_1, + unexpected_intrinsics=("db.instance"), + exact_agents=exact_agents, + unexpected_agents=unexpected_agents, + ) + @validate_span_events( + count=1, + exact_intrinsics=query_2, + unexpected_intrinsics=("db.instance"), + exact_agents=exact_agents, + unexpected_agents=unexpected_agents, + ) + @validate_span_events( + count=1, + exact_intrinsics=query_3, + unexpected_intrinsics=("db.instance"), + exact_agents=exact_agents, + unexpected_agents=unexpected_agents, + ) + @override_application_settings(settings) + @background_task(name="span_events") + def _test(): + txn = current_transaction() + txn.guid = guid + txn._priority = priority + txn._sampled = True + _exercise_db() + + _test() diff --git a/tests/datastore_valkey/test_trace_node.py b/tests/datastore_valkey/test_trace_node.py new file mode 100644 index 000000000..54108e549 --- /dev/null +++ b/tests/datastore_valkey/test_trace_node.py @@ -0,0 +1,132 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import valkey +from testing_support.db_settings import valkey_settings +from testing_support.fixtures import override_application_settings +from testing_support.util import instance_hostname +from testing_support.validators.validate_tt_collector_json import ( + validate_tt_collector_json, +) + +from newrelic.api.background_task import background_task + +DB_SETTINGS = valkey_settings()[0] +DATABASE_NUMBER = 0 + +# Settings + +_enable_instance_settings = { + "datastore_tracer.instance_reporting.enabled": True, + "datastore_tracer.database_name_reporting.enabled": True, +} +_disable_instance_settings = { + "datastore_tracer.instance_reporting.enabled": False, + "datastore_tracer.database_name_reporting.enabled": False, +} +_instance_only_settings = { + "datastore_tracer.instance_reporting.enabled": True, + "datastore_tracer.database_name_reporting.enabled": False, +} +_database_only_settings = { + "datastore_tracer.instance_reporting.enabled": False, + "datastore_tracer.database_name_reporting.enabled": True, +} + +# Expected parameters + +_enabled_required = { + "host": instance_hostname(DB_SETTINGS["host"]), + "port_path_or_id": str(DB_SETTINGS["port"]), + "db.instance": str(DATABASE_NUMBER), +} +_enabled_forgone = {} + +_disabled_required = {} +_disabled_forgone = { + "host": "VALUE NOT USED", + "port_path_or_id": "VALUE NOT USED", + "db.instance": "VALUE NOT USED", +} + +_instance_only_required = { + "host": instance_hostname(DB_SETTINGS["host"]), + "port_path_or_id": str(DB_SETTINGS["port"]), +} +_instance_only_forgone = { + "db.instance": str(DATABASE_NUMBER), +} + +_database_only_required = { + "db.instance": str(DATABASE_NUMBER), +} +_database_only_forgone = { + "host": "VALUE NOT USED", + "port_path_or_id": "VALUE NOT USED", +} + + +# Query + + +def _exercise_db(): + client = valkey.StrictValkey( + host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=DATABASE_NUMBER + ) + + client.set("key", "value") + client.get("key") + + client.execute_command("CLIENT", "LIST", parse="LIST") + + +# Tests + + +@override_application_settings(_enable_instance_settings) +@validate_tt_collector_json( + datastore_params=_enabled_required, datastore_forgone_params=_enabled_forgone +) +@background_task() +def test_trace_node_datastore_params_enable_instance(): + _exercise_db() + + +@override_application_settings(_disable_instance_settings) +@validate_tt_collector_json( + datastore_params=_disabled_required, datastore_forgone_params=_disabled_forgone +) +@background_task() +def test_trace_node_datastore_params_disable_instance(): + _exercise_db() + + +@override_application_settings(_instance_only_settings) +@validate_tt_collector_json( + datastore_params=_instance_only_required, + datastore_forgone_params=_instance_only_forgone, +) +@background_task() +def test_trace_node_datastore_params_instance_only(): + _exercise_db() + + +@override_application_settings(_database_only_settings) +@validate_tt_collector_json( + datastore_params=_database_only_required, + datastore_forgone_params=_database_only_forgone, +) +@background_task() +def test_trace_node_datastore_params_database_only(): + _exercise_db() diff --git a/tests/datastore_valkey/test_uninstrumented_methods.py b/tests/datastore_valkey/test_uninstrumented_methods.py new file mode 100644 index 000000000..972ab7291 --- /dev/null +++ b/tests/datastore_valkey/test_uninstrumented_methods.py @@ -0,0 +1,128 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +import valkey +from testing_support.db_settings import valkey_settings + +DB_SETTINGS = valkey_settings()[0] + +valkey_client = valkey.Valkey(host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0) +strict_valkey_client = valkey.StrictValkey( + host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0 +) + + +IGNORED_METHODS = { + "MODULE_CALLBACKS", + "MODULE_VERSION", + "NAME", + "add_edge", + "add_node", + "append_bucket_size", + "append_capacity", + "append_error", + "append_expansion", + "append_items_and_increments", + "append_items", + "append_max_iterations", + "append_no_create", + "append_no_scale", + "append_values_and_weights", + "append_weights", + "auto_close_connection_pool", + "batch_indexer", + "BatchIndexer", + "bulk", + "call_procedure", + "client_no_touch", + "client_tracking_off", + "client_tracking_on", + "client", + "close", + "commandmixin", + "connection_pool", + "connection", + "debug_segfault", + "edges", + "execute_command", + "flush", + "from_pool", + "from_url", + "get_cache", + "get_connection_kwargs", + "get_encoder", + "get_label", + "get_params_args", + "get_property", + "get_relation", + "get_retry", + "index_name", + "labels", + "list_keys", + "load_document", + "load_external_module", + "lock", + "name", + "nodes", + "parse_response", + "pipeline", + "property_keys", + "register_script", + "relationship_types", + "response_callbacks", + "RESPONSE_CALLBACKS", + "sentinel", + "set_file", + "set_path", + "set_response_callback", + "set_retry", + "transaction", + "version", + "delete_command_from_cache", + "flush_cache", + "invalidate_key_from_cache", +} + +VALKEY_MODULES = { + "bf", + "cf", + "cms", + "ft", + "graph", + "json", + "tdigest", + "topk", + "ts", +} + +IGNORED_METHODS |= VALKEY_MODULES + + +@pytest.mark.parametrize("client", (valkey_client, strict_valkey_client)) +def test_uninstrumented_methods(client): + methods = {m for m in dir(client) if not m[0] == "_"} + is_wrapped = lambda m: hasattr(getattr(client, m), "__wrapped__") + uninstrumented = {m for m in methods - IGNORED_METHODS if not is_wrapped(m)} + + for module in VALKEY_MODULES: + if hasattr(client, module): + module_client = getattr(client, module)() + module_methods = {m for m in dir(module_client) if not m[0] == "_"} + is_wrapped = lambda m: hasattr(getattr(module_client, m), "__wrapped__") + uninstrumented |= { + m for m in module_methods - IGNORED_METHODS if not is_wrapped(m) + } + + assert not uninstrumented, f"Uninstrumented methods: {sorted(uninstrumented)}" diff --git a/tests/testing_support/db_settings.py b/tests/testing_support/db_settings.py index 441170477..8b9af3059 100644 --- a/tests/testing_support/db_settings.py +++ b/tests/testing_support/db_settings.py @@ -147,6 +147,53 @@ def redis_cluster_settings(): ] return settings +def valkey_settings(): + """Return a list of dict of settings for connecting to valkey. + + Will return the correct settings, depending on which of the environments it + is running in. It attempts to set variables in the following order, where + later environments override earlier ones. + + 1. Local + 2. Github Actions + """ + + host = "host.docker.internal" if "GITHUB_ACTIONS" in os.environ else "localhost" + instances = 2 + settings = [ + { + "host": host, + "port": 8080 + instance_num, + } + for instance_num in range(instances) + ] + return settings + + +def valkey_cluster_settings(): + """Return a list of dict of settings for connecting to valkey cluster. + + Will return the correct settings, depending on which of the environments it + is running in. It attempts to set variables in the following order, where + later environments override earlier ones. + + 1. Local + 2. Github Actions + """ + + host = "host.docker.internal" if "GITHUB_ACTIONS" in os.environ else "localhost" + instances = 1 + base_port = 6379 + + settings = [ + { + "host": host, + "port": base_port + instance_num, + } + for instance_num in range(instances) + ] + return settings + def memcached_settings(): """Return a list of dict of settings for connecting to memcached. diff --git a/tox.ini b/tox.ini index d31ab3653..700bd35f8 100644 --- a/tox.ini +++ b/tox.ini @@ -175,6 +175,8 @@ envlist = redis-datastore_redis-{py37,py311,pypy310}-redis04, redis-datastore_redis-{py37,py38,py39,py310,py311,py312,py313,pypy310}-redislatest, rediscluster-datastore_rediscluster-{py37,py312,py313,pypy310}-redislatest, + valkey-datastore_valkey-{py37,py38,py39,py310,py311,py312,py313,pypy310}-valkeylatest, + valkeycluster-datastore_valkeycluster-{py37,py312,py313,pypy310}-valkeylatest, solr-datastore_pysolr-{py37,py38,py39,py310,py311,py312,py313,pypy310}, [testenv] @@ -281,6 +283,8 @@ deps = datastore_redis-redis04: redis<5 datastore_redis-redislatest: redis datastore_rediscluster-redislatest: redis + datastore_valkey-valkeylatest: valkey + datastore_valkeycluster-valkeylatest: valkey external_aiobotocore-aiobotocorelatest: aiobotocore[awscli] external_aiobotocore-aiobotocorelatest: flask external_aiobotocore-aiobotocorelatest: flask-cors @@ -486,6 +490,8 @@ changedir = datastore_pysolr: tests/datastore_pysolr datastore_redis: tests/datastore_redis datastore_rediscluster: tests/datastore_rediscluster + datastore_valkey: tests/datastore_valkey + datastore_valkeycluster: tests/datastore_valkeycluster datastore_sqlite: tests/datastore_sqlite external_aiobotocore: tests/external_aiobotocore external_botocore: tests/external_botocore From 1a454cf2437e050d55897a0dda3b14ab761c7253 Mon Sep 17 00:00:00 2001 From: Hannah Stepanek Date: Tue, 22 Oct 2024 16:45:29 -0700 Subject: [PATCH 2/6] Fixup: flake8 errors --- newrelic/config.py | 7 +- .../datastore_valkey/test_custom_conn_pool.py | 13 +-- tests/datastore_valkey/test_generators.py | 81 +++++-------------- 3 files changed, 27 insertions(+), 74 deletions(-) diff --git a/newrelic/config.py b/newrelic/config.py index 8831be1b8..32b767c0e 100644 --- a/newrelic/config.py +++ b/newrelic/config.py @@ -3622,7 +3622,7 @@ def _process_module_builtin_defaults(): "redis.commands.graph.commands", "newrelic.hooks.datastore_redis", "instrument_redis_commands_graph_commands" ) - _process_module_definition( + _process_module_definition( "valkey.asyncio.client", "newrelic.hooks.datastore_valkey", "instrument_asyncio_valkey_client" ) @@ -3658,7 +3658,9 @@ def _process_module_builtin_defaults(): ) _process_module_definition( - "valkey.commands.search.commands", "newrelic.hooks.datastore_valkey", "instrument_valkey_commands_search_commands" + "valkey.commands.search.commands", + "newrelic.hooks.datastore_valkey", + "instrument_valkey_commands_search_commands", ) _process_module_definition( @@ -3675,7 +3677,6 @@ def _process_module_builtin_defaults(): "valkey.commands.graph.commands", "newrelic.hooks.datastore_valkey", "instrument_valkey_commands_graph_commands" ) - _process_module_definition("motor", "newrelic.hooks.datastore_motor", "patch_motor") _process_module_definition( diff --git a/tests/datastore_valkey/test_custom_conn_pool.py b/tests/datastore_valkey/test_custom_conn_pool.py index 632e8b42c..2b77d0b81 100644 --- a/tests/datastore_valkey/test_custom_conn_pool.py +++ b/tests/datastore_valkey/test_custom_conn_pool.py @@ -17,7 +17,6 @@ will not result in an error. """ -import pytest import valkey from testing_support.db_settings import valkey_settings from testing_support.fixtures import override_application_settings @@ -90,9 +89,7 @@ def disconnect(self): instance_metric_count = 5 -_enable_rollup_metrics = _base_rollup_metrics.append( - (_instance_metric_name, instance_metric_count) -) +_enable_rollup_metrics = _base_rollup_metrics.append((_instance_metric_name, instance_metric_count)) _disable_rollup_metrics = _base_rollup_metrics.append((_instance_metric_name, None)) @@ -115,9 +112,7 @@ def exercise_valkey(client): ) @background_task() def test_fake_conn_pool_enable_instance(): - client = valkey.StrictValkey( - host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0 - ) + client = valkey.StrictValkey(host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0) # Get a real connection @@ -142,9 +137,7 @@ def test_fake_conn_pool_enable_instance(): ) @background_task() def test_fake_conn_pool_disable_instance(): - client = valkey.StrictValkey( - host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0 - ) + client = valkey.StrictValkey(host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0) # Get a real connection diff --git a/tests/datastore_valkey/test_generators.py b/tests/datastore_valkey/test_generators.py index fb1800bb6..53e457311 100644 --- a/tests/datastore_valkey/test_generators.py +++ b/tests/datastore_valkey/test_generators.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import pytest import valkey from testing_support.db_settings import valkey_settings from testing_support.fixtures import override_application_settings @@ -93,51 +92,35 @@ def exercise_valkey(client): # Check generators flag = False - assert not isinstance( - current_trace(), DatastoreTrace - ) # Assert no active DatastoreTrace + assert not isinstance(current_trace(), DatastoreTrace) # Assert no active DatastoreTrace for k in client.scan_iter("scan-*"): assert k == b"scan-key" - assert isinstance( - current_trace(), DatastoreTrace - ) # Assert DatastoreTrace now active + assert isinstance(current_trace(), DatastoreTrace) # Assert DatastoreTrace now active flag = True assert flag flag = False - assert not isinstance( - current_trace(), DatastoreTrace - ) # Assert no active DatastoreTrace + assert not isinstance(current_trace(), DatastoreTrace) # Assert no active DatastoreTrace for k in client.sscan_iter("sscan-key"): assert k == b"value" - assert isinstance( - current_trace(), DatastoreTrace - ) # Assert DatastoreTrace now active + assert isinstance(current_trace(), DatastoreTrace) # Assert DatastoreTrace now active flag = True assert flag flag = False - assert not isinstance( - current_trace(), DatastoreTrace - ) # Assert no active DatastoreTrace + assert not isinstance(current_trace(), DatastoreTrace) # Assert no active DatastoreTrace for k, _ in client.zscan_iter("zscan-key"): assert k == b"value" - assert isinstance( - current_trace(), DatastoreTrace - ) # Assert DatastoreTrace now active + assert isinstance(current_trace(), DatastoreTrace) # Assert DatastoreTrace now active flag = True assert flag flag = False - assert not isinstance( - current_trace(), DatastoreTrace - ) # Assert no active DatastoreTrace + assert not isinstance(current_trace(), DatastoreTrace) # Assert no active DatastoreTrace for f, v in client.hscan_iter("hscan-key"): assert f == b"field" assert v == b"value" - assert isinstance( - current_trace(), DatastoreTrace - ) # Assert DatastoreTrace now active + assert isinstance(current_trace(), DatastoreTrace) # Assert DatastoreTrace now active flag = True assert flag @@ -156,51 +139,35 @@ async def exercise_valkey_async(client): # Check generators flag = False - assert not isinstance( - current_trace(), DatastoreTrace - ) # Assert no active DatastoreTrace + assert not isinstance(current_trace(), DatastoreTrace) # Assert no active DatastoreTrace async for k in client.scan_iter("scan-*"): assert k == b"scan-key" - assert isinstance( - current_trace(), DatastoreTrace - ) # Assert DatastoreTrace now active + assert isinstance(current_trace(), DatastoreTrace) # Assert DatastoreTrace now active flag = True assert flag flag = False - assert not isinstance( - current_trace(), DatastoreTrace - ) # Assert no active DatastoreTrace + assert not isinstance(current_trace(), DatastoreTrace) # Assert no active DatastoreTrace async for k in client.sscan_iter("sscan-key"): assert k == b"value" - assert isinstance( - current_trace(), DatastoreTrace - ) # Assert DatastoreTrace now active + assert isinstance(current_trace(), DatastoreTrace) # Assert DatastoreTrace now active flag = True assert flag flag = False - assert not isinstance( - current_trace(), DatastoreTrace - ) # Assert no active DatastoreTrace + assert not isinstance(current_trace(), DatastoreTrace) # Assert no active DatastoreTrace async for k, _ in client.zscan_iter("zscan-key"): assert k == b"value" - assert isinstance( - current_trace(), DatastoreTrace - ) # Assert DatastoreTrace now active + assert isinstance(current_trace(), DatastoreTrace) # Assert DatastoreTrace now active flag = True assert flag flag = False - assert not isinstance( - current_trace(), DatastoreTrace - ) # Assert no active DatastoreTrace + assert not isinstance(current_trace(), DatastoreTrace) # Assert no active DatastoreTrace async for f, v in client.hscan_iter("hscan-key"): assert f == b"field" assert v == b"value" - assert isinstance( - current_trace(), DatastoreTrace - ) # Assert DatastoreTrace now active + assert isinstance(current_trace(), DatastoreTrace) # Assert DatastoreTrace now active flag = True assert flag @@ -217,9 +184,7 @@ async def exercise_valkey_async(client): ) @background_task() def test_strict_valkey_generator_enable_instance(): - client = valkey.StrictValkey( - host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0 - ) + client = valkey.StrictValkey(host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0) exercise_valkey(client) @@ -232,9 +197,7 @@ def test_strict_valkey_generator_enable_instance(): ) @background_task() def test_strict_valkey_generator_disable_instance(): - client = valkey.StrictValkey( - host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0 - ) + client = valkey.StrictValkey(host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0) exercise_valkey(client) @@ -273,9 +236,7 @@ def test_valkey_generator_disable_instance(): ) @background_task() def test_valkey_async_generator_enable_instance(loop): - client = valkey.asyncio.Valkey( - host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0 - ) + client = valkey.asyncio.Valkey(host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0) loop.run_until_complete(exercise_valkey_async(client)) @@ -288,7 +249,5 @@ def test_valkey_async_generator_enable_instance(loop): ) @background_task() def test_valkey_async_generator_disable_instance(loop): - client = valkey.asyncio.Valkey( - host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0 - ) + client = valkey.asyncio.Valkey(host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0) loop.run_until_complete(exercise_valkey_async(client)) From e6c19e560ead3e695f45ac735ba862dea771fc49 Mon Sep 17 00:00:00 2001 From: Hannah Stepanek Date: Tue, 22 Oct 2024 17:04:51 -0700 Subject: [PATCH 3/6] Fixup: path to valkey docker image --- .github/workflows/tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 4510b04fa..0c4661079 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -1217,7 +1217,7 @@ jobs: timeout-minutes: 30 services: valkey: - image: valkey + image: valkey/valkey ports: - 8080:6379 - 8081:6379 From e9a6ad1a7f0aa8c3c871730f16920be927ea3ed9 Mon Sep 17 00:00:00 2001 From: Hannah Stepanek Date: Tue, 22 Oct 2024 17:09:10 -0700 Subject: [PATCH 4/6] Remove py37 from valkey as it doesn't support it --- tox.ini | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tox.ini b/tox.ini index 84a3fe818..6021c3193 100644 --- a/tox.ini +++ b/tox.ini @@ -176,8 +176,8 @@ envlist = redis-datastore_redis-{py37,py311,pypy310}-redis04, redis-datastore_redis-{py37,py38,py39,py310,py311,py312,py313,pypy310}-redislatest, rediscluster-datastore_rediscluster-{py37,py312,py313,pypy310}-redislatest, - valkey-datastore_valkey-{py37,py38,py39,py310,py311,py312,py313,pypy310}-valkeylatest, - valkeycluster-datastore_valkeycluster-{py37,py312,py313,pypy310}-valkeylatest, + valkey-datastore_valkey-{py38,py39,py310,py311,py312,py313,pypy310}-valkeylatest, + valkeycluster-datastore_valkeycluster-{py38,py313,pypy310}-valkeylatest, solr-datastore_pysolr-{py37,py38,py39,py310,py311,py312,py313,pypy310}, [testenv] From d3be1a15c4bc9e8c9d5227519629dc095a8bbfcd Mon Sep 17 00:00:00 2001 From: Jair Henrique Date: Wed, 23 Oct 2024 08:31:27 -0300 Subject: [PATCH 5/6] Removes valkey cluster references --- tests/testing_support/db_settings.py | 36 +++++++--------------------- tox.ini | 3 --- 2 files changed, 9 insertions(+), 30 deletions(-) diff --git a/tests/testing_support/db_settings.py b/tests/testing_support/db_settings.py index 8b9af3059..4de2cb661 100644 --- a/tests/testing_support/db_settings.py +++ b/tests/testing_support/db_settings.py @@ -147,6 +147,7 @@ def redis_cluster_settings(): ] return settings + def valkey_settings(): """Return a list of dict of settings for connecting to valkey. @@ -170,31 +171,6 @@ def valkey_settings(): return settings -def valkey_cluster_settings(): - """Return a list of dict of settings for connecting to valkey cluster. - - Will return the correct settings, depending on which of the environments it - is running in. It attempts to set variables in the following order, where - later environments override earlier ones. - - 1. Local - 2. Github Actions - """ - - host = "host.docker.internal" if "GITHUB_ACTIONS" in os.environ else "localhost" - instances = 1 - base_port = 6379 - - settings = [ - { - "host": host, - "port": base_port + instance_num, - } - for instance_num in range(instances) - ] - return settings - - def memcached_settings(): """Return a list of dict of settings for connecting to memcached. @@ -233,7 +209,11 @@ def mongodb_settings(): host = "host.docker.internal" if "GITHUB_ACTIONS" in os.environ else "127.0.0.1" instances = 2 settings = [ - {"host": host, "port": 8080 + instance_num, "collection": f"mongodb_collection_{str(os.getpid())}"} + { + "host": host, + "port": 8080 + instance_num, + "collection": f"mongodb_collection_{str(os.getpid())}", + } for instance_num in range(instances) ] return settings @@ -254,7 +234,9 @@ def firestore_settings(): host = "host.docker.internal" if "GITHUB_ACTIONS" in os.environ else "127.0.0.1" instances = 2 - settings = [{"host": host, "port": 8080 + instance_num} for instance_num in range(instances)] + settings = [ + {"host": host, "port": 8080 + instance_num} for instance_num in range(instances) + ] return settings diff --git a/tox.ini b/tox.ini index 6021c3193..df3f937c4 100644 --- a/tox.ini +++ b/tox.ini @@ -177,7 +177,6 @@ envlist = redis-datastore_redis-{py37,py38,py39,py310,py311,py312,py313,pypy310}-redislatest, rediscluster-datastore_rediscluster-{py37,py312,py313,pypy310}-redislatest, valkey-datastore_valkey-{py38,py39,py310,py311,py312,py313,pypy310}-valkeylatest, - valkeycluster-datastore_valkeycluster-{py38,py313,pypy310}-valkeylatest, solr-datastore_pysolr-{py37,py38,py39,py310,py311,py312,py313,pypy310}, [testenv] @@ -288,7 +287,6 @@ deps = datastore_redis-redislatest: redis datastore_rediscluster-redislatest: redis datastore_valkey-valkeylatest: valkey - datastore_valkeycluster-valkeylatest: valkey external_aiobotocore-aiobotocorelatest: aiobotocore[awscli] external_aiobotocore-aiobotocorelatest: flask external_aiobotocore-aiobotocorelatest: flask-cors @@ -492,7 +490,6 @@ changedir = datastore_redis: tests/datastore_redis datastore_rediscluster: tests/datastore_rediscluster datastore_valkey: tests/datastore_valkey - datastore_valkeycluster: tests/datastore_valkeycluster datastore_sqlite: tests/datastore_sqlite external_aiobotocore: tests/external_aiobotocore external_botocore: tests/external_botocore From d7a9f4c11f0e7fd4ab2762d098e5592a03e74ee1 Mon Sep 17 00:00:00 2001 From: Jair Henrique Date: Fri, 25 Oct 2024 08:47:12 -0300 Subject: [PATCH 6/6] Includes valkey docker image tag --- .github/workflows/tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 0c4661079..4ae76803e 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -1217,7 +1217,7 @@ jobs: timeout-minutes: 30 services: valkey: - image: valkey/valkey + image: valkey/valkey:latest ports: - 8080:6379 - 8081:6379