From 9f2326e88d4ee192d62b2e1c9f17ad9d6eecb896 Mon Sep 17 00:00:00 2001 From: Vivek Reddy Date: Tue, 7 Sep 2021 12:21:12 -0700 Subject: [PATCH] [debug dump util] Base Skeleton and Click Class added (#1668) What I did HLD for Dump Utility: HLD. Added the top level CLI command i.e "dump state" Added corresponding UT's Added the bash autocompletion support files Added the implementation for the customization options provided, such as --db, --key-map, --show, --table & --namespace How I did it How to verify it UT's are implemented --- doc/Command-Reference.md | 55 +++++ dump/main.py | 229 ++++++++++++++++++ setup.py | 6 +- sonic-utilities-data/bash_completion.d/dump | 8 + tests/dump_tests/dump_state_test.py | 246 ++++++++++++++++++++ tests/mock_tables/asic_db.json | 19 +- 6 files changed, 561 insertions(+), 2 deletions(-) create mode 100644 dump/main.py create mode 100644 sonic-utilities-data/bash_completion.d/dump create mode 100644 tests/dump_tests/dump_state_test.py diff --git a/doc/Command-Reference.md b/doc/Command-Reference.md index 160ae8b7b4bd..15efe35ca64c 100644 --- a/doc/Command-Reference.md +++ b/doc/Command-Reference.md @@ -159,6 +159,7 @@ * [SONiC Package Manager](#sonic-package-manager) * [SONiC Installer](#sonic-installer) * [Troubleshooting Commands](#troubleshooting-commands) + * [Debug Dumps](#debug-dumps) * [Routing Stack](#routing-stack) * [Quagga BGP Show Commands](#Quagga-BGP-Show-Commands) * [ZTP Configuration And Show Commands](#ztp-configuration-and-show-commands) @@ -9712,6 +9713,60 @@ If the SONiC system was running for quite some time `show techsupport` will prod admin@sonic:~$ show techsupport --since='hour ago' # Will collect syslog and core files for the last one hour ``` +### Debug Dumps + +In SONiC, there usually exists a set of tables related/relevant to a particular module. All of these might have to be looked at to confirm whether any configuration update is properly applied and propagated. This utility comes in handy because it prints a unified view of the redis-state for a given module + +- Usage: + ``` + Usage: dump state [OPTIONS] MODULE IDENTIFIER + Dump the redis-state of the identifier for the module specified + + Options: + -s, --show Display Modules Available + -d, --db TEXT Only dump from these Databases + -t, --table Print in tabular format [default: False] + -k, --key-map Only fetch the keys matched, don't extract field-value dumps [default: False] + -v, --verbose Prints any intermediate output to stdout useful for dev & troubleshooting [default: False] + -n, --namespace TEXT Dump the redis-state for this namespace. [default: DEFAULT_NAMESPACE] + --help Show this message and exit. + ``` + + +- Examples: + ``` + root@sonic# dump state --show + Module Identifier + -------- ------------ + port port_name + copp trap_id + ``` + + ``` + admin@sonic:~$ dump state copp arp_req --key-map --db ASIC_DB + { + "arp_req": { + "ASIC_DB": { + "keys": [ + "ASIC_STATE:SAI_OBJECT_TYPE_HOSTIF_TRAP:oid:0x22000000000c5b", + "ASIC_STATE:SAI_OBJECT_TYPE_HOSTIF_TRAP_GROUP:oid:0x11000000000c59", + "ASIC_STATE:SAI_OBJECT_TYPE_POLICER:oid:0x12000000000c5a", + "ASIC_STATE:SAI_OBJECT_TYPE_QUEUE:oid:0x15000000000626" + ], + "tables_not_found": [], + "vidtorid": { + "oid:0x22000000000c5b": "oid:0x200000000022", + "oid:0x11000000000c59": "oid:0x300000011", + "oid:0x12000000000c5a": "oid:0x200000012", + "oid:0x15000000000626": "oid:0x12e0000040015" + } + } + } + } + ``` + + + Go Back To [Beginning of the document](#) or [Beginning of this section](#troubleshooting-commands) ## Routing Stack diff --git a/dump/main.py b/dump/main.py new file mode 100644 index 000000000000..3a183520c3e8 --- /dev/null +++ b/dump/main.py @@ -0,0 +1,229 @@ +import os +import sys +import json +import re +import click +from tabulate import tabulate +from sonic_py_common import multi_asic +from utilities_common.constants import DEFAULT_NAMESPACE +from dump.match_infra import RedisSource, JsonSource, ConnectionPool +from dump import plugins + + +# Autocompletion Helper +def get_available_modules(ctx, args, incomplete): + return [k for k in plugins.dump_modules.keys() if incomplete in k] + + +# Display Modules Callback +def show_modules(ctx, param, value): + if not value or ctx.resilient_parsing: + return + header = ["Module", "Identifier"] + display = [] + for mod in plugins.dump_modules: + display.append((mod, plugins.dump_modules[mod].ARG_NAME)) + click.echo(tabulate(display, header)) + ctx.exit() + + +@click.group() +def dump(): + pass + + +@dump.command() +@click.pass_context +@click.argument('module', required=True, type=str, autocompletion=get_available_modules) +@click.argument('identifier', required=True, type=str) +@click.option('--show', '-s', is_flag=True, default=False, expose_value=False, + callback=show_modules, help='Display Modules Available', is_eager=True) +@click.option('--db', '-d', multiple=True, + help='Only dump from these Databases or the CONFIG_FILE') +@click.option('--table', '-t', is_flag=True, default=False, + help='Print in tabular format', show_default=True) +@click.option('--key-map', '-k', is_flag=True, default=False, show_default=True, + help="Only fetch the keys matched, don't extract field-value dumps") +@click.option('--verbose', '-v', is_flag=True, default=False, show_default=True, + help="Prints any intermediate output to stdout useful for dev & troubleshooting") +@click.option('--namespace', '-n', default=DEFAULT_NAMESPACE, type=str, + show_default=True, help='Dump the redis-state for this namespace.') +def state(ctx, module, identifier, db, table, key_map, verbose, namespace): + """ + Dump the current state of the identifier for the specified module from Redis DB or CONFIG_FILE + """ + if not multi_asic.is_multi_asic() and namespace != DEFAULT_NAMESPACE: + click.echo("Namespace option is not valid for a single-ASIC device") + ctx.exit() + + if multi_asic.is_multi_asic() and (namespace != DEFAULT_NAMESPACE and namespace not in multi_asic.get_namespace_list()): + click.echo("Namespace option is not valid. Choose one of {}".format(multi_asic.get_namespace_list())) + ctx.exit() + + if module not in plugins.dump_modules: + click.echo("No Matching Plugin has been Implemented") + ctx.exit() + + if verbose: + os.environ["VERBOSE"] = "1" + else: + os.environ["VERBOSE"] = "0" + + ctx.module = module + obj = plugins.dump_modules[module]() + + if identifier == "all": + ids = obj.get_all_args(namespace) + else: + ids = identifier.split(",") + + params = {} + collected_info = {} + params['namespace'] = namespace + for arg in ids: + params[plugins.dump_modules[module].ARG_NAME] = arg + collected_info[arg] = obj.execute(params) + + if len(db) > 0: + collected_info = filter_out_dbs(db, collected_info) + + vidtorid = extract_rid(collected_info, namespace) + + if not key_map: + collected_info = populate_fv(collected_info, module, namespace) + + for id in vidtorid.keys(): + collected_info[id]["ASIC_DB"]["vidtorid"] = vidtorid[id] + + print_dump(collected_info, table, module, identifier, key_map) + + return + + +def extract_rid(info, ns): + r = RedisSource(ConnectionPool()) + r.connect("ASIC_DB", ns) + vidtorid = {} + vid_cache = {} # Cache Entries to reduce number of Redis Calls + for arg in info.keys(): + mp = get_v_r_map(r, info[arg], vid_cache) + if mp: + vidtorid[arg] = mp + return vidtorid + + +def get_v_r_map(r, single_dict, vid_cache): + v_r_map = {} + asic_obj_ptrn = "ASIC_STATE:.*:oid:0x\w{1,14}" + + if "ASIC_DB" in single_dict and "keys" in single_dict["ASIC_DB"]: + for redis_key in single_dict["ASIC_DB"]["keys"]: + if re.match(asic_obj_ptrn, redis_key): + matches = re.findall(r"oid:0x\w{1,14}", redis_key) + if matches: + vid = matches[0] + if vid in vid_cache: + rid = vid_cache[vid] + else: + rid = r.hget("ASIC_DB", "VIDTORID", vid) + vid_cache[vid] = rid + v_r_map[vid] = rid if rid else "Real ID Not Found" + return v_r_map + + +# Filter dbs which are not required +def filter_out_dbs(db_list, collected_info): + args_ = list(collected_info.keys()) + for arg in args_: + dbs = list(collected_info[arg].keys()) + for db in dbs: + if db not in db_list: + del collected_info[arg][db] + return collected_info + + +def populate_fv(info, module, namespace): + all_dbs = set() + for id in info.keys(): + for db_name in info[id].keys(): + all_dbs.add(db_name) + + db_cfg_file = JsonSource() + db_conn = ConnectionPool().initialize_connector(namespace) + for db_name in all_dbs: + if db_name is "CONFIG_FILE": + db_cfg_file.connect(plugins.dump_modules[module].CONFIG_FILE, namespace) + else: + db_conn.connect(db_name) + + final_info = {} + for id in info.keys(): + final_info[id] = {} + for db_name in info[id].keys(): + final_info[id][db_name] = {} + final_info[id][db_name]["keys"] = [] + final_info[id][db_name]["tables_not_found"] = info[id][db_name]["tables_not_found"] + for key in info[id][db_name]["keys"]: + if db_name is "CONFIG_FILE": + fv = db_dict[db_name].get(db_name, key) + else: + fv = db_conn.get_all(db_name, key) + final_info[id][db_name]["keys"].append({key: fv}) + + return final_info + + +def get_dict_str(key_obj): + table = [] + for pair in key_obj.items(): + table.append(list(pair)) + return tabulate(table, headers=["field", "value"], tablefmt="psql") + + +# print dump +def print_dump(collected_info, table, module, identifier, key_map): + if not table: + click.echo(json.dumps(collected_info, indent=4)) + return + + top_header = [plugins.dump_modules[module].ARG_NAME, "DB_NAME", "DUMP"] + final_collection = [] + for ids in collected_info.keys(): + for db in collected_info[ids].keys(): + total_info = "" + + if collected_info[ids][db]["tables_not_found"]: + tabulate_fmt = [] + for tab in collected_info[ids][db]["tables_not_found"]: + tabulate_fmt.append([tab]) + total_info += tabulate(tabulate_fmt, ["Tables Not Found"], tablefmt="grid") + total_info += "\n" + + if not key_map: + values = [] + hdrs = ["Keys", "field-value pairs"] + for key_obj in collected_info[ids][db]["keys"]: + if isinstance(key_obj, dict) and key_obj: + key = list(key_obj.keys())[0] + values.append([key, get_dict_str(key_obj[key])]) + total_info += str(tabulate(values, hdrs, tablefmt="grid")) + else: + temp = [] + for key_ in collected_info[ids][db]["keys"]: + temp.append([key_]) + total_info += str(tabulate(temp, headers=["Keys Collected"], tablefmt="grid")) + + total_info += "\n" + if "vidtorid" in collected_info[ids][db]: + temp = [] + for pair in collected_info[ids][db]["vidtorid"].items(): + temp.append(list(pair)) + total_info += str(tabulate(temp, headers=["vid", "rid"], tablefmt="grid")) + final_collection.append([ids, db, total_info]) + + click.echo(tabulate(final_collection, top_header, tablefmt="grid")) + return + + +if __name__ == '__main__': + dump() diff --git a/setup.py b/setup.py index 806b5bcb0b80..04d3f6cee096 100644 --- a/setup.py +++ b/setup.py @@ -31,6 +31,8 @@ 'crm', 'debug', 'generic_config_updater', + 'dump', + 'dump.plugins', 'pfcwd', 'sfputil', 'ssdutil', @@ -71,7 +73,8 @@ 'filter_fdb_input/*', 'pfcwd_input/*', 'wm_input/*', - 'ecn_input/*'] + 'ecn_input/*', + 'dump_input/*'] }, scripts=[ 'scripts/aclshow', @@ -143,6 +146,7 @@ 'counterpoll = counterpoll.main:cli', 'crm = crm.main:cli', 'debug = debug.main:cli', + 'dump = dump.main:dump', 'filter_fdb_entries = fdbutil.filter_fdb_entries:main', 'pfcwd = pfcwd.main:cli', 'sfputil = sfputil.main:cli', diff --git a/sonic-utilities-data/bash_completion.d/dump b/sonic-utilities-data/bash_completion.d/dump new file mode 100644 index 000000000000..8644671ef16f --- /dev/null +++ b/sonic-utilities-data/bash_completion.d/dump @@ -0,0 +1,8 @@ +_dump_completion() { + COMPREPLY=( $( env COMP_WORDS="${COMP_WORDS[*]}" \ + COMP_CWORD=$COMP_CWORD \ + _DUMP_COMPLETE=complete $1 ) ) + return 0 +} + +complete -F _dump_completion -o default dump diff --git a/tests/dump_tests/dump_state_test.py b/tests/dump_tests/dump_state_test.py new file mode 100644 index 000000000000..49217b8cf8c9 --- /dev/null +++ b/tests/dump_tests/dump_state_test.py @@ -0,0 +1,246 @@ +import os +import sys +import json +import pytest +from unittest import mock, TestCase +from click.testing import CliRunner +import dump.main as dump +from deepdiff import DeepDiff +from importlib import reload +from utilities_common.db import Db +import traceback +from utilities_common.constants import DEFAULT_NAMESPACE + + +def compare_json_output(exp_json, rec, exclude_paths=None): + print("EXPECTED: \n") + print(json.dumps(exp_json, indent=4)) + try: + rec_json = json.loads(rec) + except Exception as e: + print(rec) + assert False, "CLI Output is not in JSON Format" + return DeepDiff(exp_json, rec_json, exclude_paths=exclude_paths) + + +table_display_output = '''\ ++-------------+-----------+----------------------------------------------------------------------------+ +| port_name | DB_NAME | DUMP | ++=============+===========+============================================================================+ +| Ethernet0 | STATE_DB | +----------------------+-------------------------------------------------+ | +| | | | Keys | field-value pairs | | +| | | +======================+=================================================+ | +| | | | PORT_TABLE|Ethernet0 | +------------------+--------------------------+ | | +| | | | | | field | value | | | +| | | | | |------------------+--------------------------| | | +| | | | | | supported_speeds | 10000,25000,40000,100000 | | | +| | | | | +------------------+--------------------------+ | | +| | | +----------------------+-------------------------------------------------+ | ++-------------+-----------+----------------------------------------------------------------------------+ +''' + + +table_display_output_no_filtering = '''\ ++-------------+-----------+-----------------------------------------------------------+ +| port_name | DB_NAME | DUMP | ++=============+===========+===========================================================+ +| Ethernet0 | CONFIG_DB | +------------------+ | +| | | | Keys Collected | | +| | | +==================+ | +| | | | PORT|Ethernet0 | | +| | | +------------------+ | ++-------------+-----------+-----------------------------------------------------------+ +| Ethernet0 | APPL_DB | +----------------------+ | +| | | | Keys Collected | | +| | | +======================+ | +| | | | PORT_TABLE:Ethernet0 | | +| | | +----------------------+ | ++-------------+-----------+-----------------------------------------------------------+ +| Ethernet0 | ASIC_DB | +-------------------------------------------------------+ | +| | | | Keys Collected | | +| | | +=======================================================+ | +| | | | ASIC_STATE:SAI_OBJECT_TYPE_HOSTIF:oid:0xd00000000056d | | +| | | +-------------------------------------------------------+ | +| | | | ASIC_STATE:SAI_OBJECT_TYPE_PORT:oid:0x10000000004a4 | | +| | | +-------------------------------------------------------+ | +| | | +---------------------+---------------------+ | +| | | | vid | rid | | +| | | +=====================+=====================+ | +| | | | oid:0xd00000000056d | oid:0xd | | +| | | +---------------------+---------------------+ | +| | | | oid:0x10000000004a4 | oid:0x1690000000001 | | +| | | +---------------------+---------------------+ | ++-------------+-----------+-----------------------------------------------------------+ +| Ethernet0 | STATE_DB | +----------------------+ | +| | | | Keys Collected | | +| | | +======================+ | +| | | | PORT_TABLE|Ethernet0 | | +| | | +----------------------+ | ++-------------+-----------+-----------------------------------------------------------+ +''' + + +class TestDumpState(object): + + @classmethod + def setup_class(cls): + print("SETUP") + os.environ["UTILITIES_UNIT_TESTING"] = "1" + mock_db_path = os.path.join(os.path.dirname(__file__), "../mock_tables/") + + def test_identifier_single(self): + runner = CliRunner() + result = runner.invoke(dump.state, ["port", "Ethernet0"]) + expected = {'Ethernet0': {'CONFIG_DB': {'keys': [{'PORT|Ethernet0': {'alias': 'etp1', 'description': 'etp1', 'index': '0', 'lanes': '25,26,27,28', 'mtu': '9100', 'pfc_asym': 'off', 'speed': '40000'}}], 'tables_not_found': []}, + 'APPL_DB': {'keys': [{'PORT_TABLE:Ethernet0': {'index': '0', 'lanes': '0', 'alias': 'Ethernet0', 'description': 'ARISTA01T2:Ethernet1', 'speed': '25000', 'oper_status': 'down', 'pfc_asym': 'off', 'mtu': '9100', 'fec': 'rs', 'admin_status': 'up'}}], 'tables_not_found': []}, + 'ASIC_DB': {'keys': [{'ASIC_STATE:SAI_OBJECT_TYPE_HOSTIF:oid:0xd00000000056d': {'SAI_HOSTIF_ATTR_NAME': 'Ethernet0', 'SAI_HOSTIF_ATTR_OBJ_ID': 'oid:0x10000000004a4', 'SAI_HOSTIF_ATTR_OPER_STATUS': 'true', 'SAI_HOSTIF_ATTR_TYPE': 'SAI_HOSTIF_TYPE_NETDEV', 'SAI_HOSTIF_ATTR_VLAN_TAG': 'SAI_HOSTIF_VLAN_TAG_STRIP'}}, {'ASIC_STATE:SAI_OBJECT_TYPE_PORT:oid:0x10000000004a4': {'NULL': 'NULL', 'SAI_PORT_ATTR_ADMIN_STATE': 'true', 'SAI_PORT_ATTR_MTU': '9122', 'SAI_PORT_ATTR_SPEED': '100000'}}], 'tables_not_found': [], 'vidtorid': {'oid:0xd00000000056d': 'oid:0xd', 'oid:0x10000000004a4': 'oid:0x1690000000001'}}, + 'STATE_DB': {'keys': [{'PORT_TABLE|Ethernet0': {'supported_speeds': '10000,25000,40000,100000'}}], 'tables_not_found': []}}} + + assert result.exit_code == 0, "exit code: {}, Exception: {}, Traceback: {}".format(result.exit_code, result.exception, result.exc_info) + # Cause other tests depend and change these paths in the mock_db, this test would fail everytime when a field or a value in changed in this path, creating noise + # and therefore ignoring these paths. field-value dump capability of the utility is nevertheless verified using f-v dumps of ASIC_DB & STATE_DB + pths = ["root['Ethernet0']['CONFIG_DB']['keys'][0]['PORT|Ethernet0']", "root['Ethernet0']['APPL_DB']['keys'][0]['PORT_TABLE:Ethernet0']"] + ddiff = compare_json_output(expected, result.output, exclude_paths=pths) + assert not ddiff, ddiff + + def test_identifier_multiple(self): + runner = CliRunner() + result = runner.invoke(dump.state, ["port", "Ethernet0,Ethernet4"]) + print(result.output) + expected = {"Ethernet0": + {"CONFIG_DB": {"keys": [{"PORT|Ethernet0": {"alias": "etp1", "description": "etp1", "index": "0", "lanes": "25,26,27,28", "mtu": "9100", "pfc_asym": "off", "speed": "40000"}}], "tables_not_found": []}, + "APPL_DB": {"keys": [{"PORT_TABLE:Ethernet0": {"index": "0", "lanes": "0", "alias": "Ethernet0", "description": "ARISTA01T2:Ethernet1", "speed": "25000", "oper_status": "down", "pfc_asym": "off", "mtu": "9100", "fec": "rs", "admin_status": "up"}}], "tables_not_found": []}, + "ASIC_DB": {"keys": [{"ASIC_STATE:SAI_OBJECT_TYPE_HOSTIF:oid:0xd00000000056d": {"SAI_HOSTIF_ATTR_NAME": "Ethernet0", "SAI_HOSTIF_ATTR_OBJ_ID": "oid:0x10000000004a4", "SAI_HOSTIF_ATTR_OPER_STATUS": "true", "SAI_HOSTIF_ATTR_TYPE": "SAI_HOSTIF_TYPE_NETDEV", "SAI_HOSTIF_ATTR_VLAN_TAG": "SAI_HOSTIF_VLAN_TAG_STRIP"}}, {"ASIC_STATE:SAI_OBJECT_TYPE_PORT:oid:0x10000000004a4": {"NULL": "NULL", "SAI_PORT_ATTR_ADMIN_STATE": "true", "SAI_PORT_ATTR_MTU": "9122", "SAI_PORT_ATTR_SPEED": "100000"}}], "tables_not_found": [], "vidtorid": {"oid:0xd00000000056d": "oid:0xd", "oid:0x10000000004a4": "oid:0x1690000000001"}}, + "STATE_DB": {"keys": [{"PORT_TABLE|Ethernet0": {"supported_speeds": "10000,25000,40000,100000"}}], "tables_not_found": []}}, + "Ethernet4": + {"CONFIG_DB": {"keys": [{"PORT|Ethernet4": {"admin_status": "up", "alias": "etp2", "description": "Servers0:eth0", "index": "1", "lanes": "29,30,31,32", "mtu": "9100", "pfc_asym": "off", "speed": "40000"}}], "tables_not_found": []}, + "APPL_DB": {"keys": [], "tables_not_found": ["PORT_TABLE"]}, + "ASIC_DB": {"keys": [], "tables_not_found": ["ASIC_STATE:SAI_OBJECT_TYPE_HOSTIF", "ASIC_STATE:SAI_OBJECT_TYPE_PORT"]}, + "STATE_DB": {"keys": [], "tables_not_found": ["PORT_TABLE"]}}} + assert result.exit_code == 0, "exit code: {}, Exception: {}, Traceback: {}".format(result.exit_code, result.exception, result.exc_info) + pths = ["root['Ethernet0']['CONFIG_DB']['keys'][0]['PORT|Ethernet0']", "root['Ethernet0']['APPL_DB']['keys'][0]['PORT_TABLE:Ethernet0']"] + pths.extend(["root['Ethernet4']['CONFIG_DB']['keys'][0]['PORT|Ethernet4']", "root['Ethernet4']['APPL_DB']['keys'][0]['PORT_TABLE:Ethernet4']"]) + ddiff = compare_json_output(expected, result.output, pths) + assert not ddiff, ddiff + + def test_option_key_map(self): + runner = CliRunner() + result = runner.invoke(dump.state, ["port", "Ethernet0", "--key-map"]) + print(result.output) + expected = {"Ethernet0": {"CONFIG_DB": {"keys": ["PORT|Ethernet0"], "tables_not_found": []}, + "APPL_DB": {"keys": ["PORT_TABLE:Ethernet0"], "tables_not_found": []}, + "ASIC_DB": {"keys": ["ASIC_STATE:SAI_OBJECT_TYPE_HOSTIF:oid:0xd00000000056d", "ASIC_STATE:SAI_OBJECT_TYPE_PORT:oid:0x10000000004a4"], "tables_not_found": [], "vidtorid": {"oid:0xd00000000056d": "oid:0xd", "oid:0x10000000004a4": "oid:0x1690000000001"}}, + "STATE_DB": {"keys": ["PORT_TABLE|Ethernet0"], "tables_not_found": []}}} + assert result.exit_code == 0, "exit code: {}, Exception: {}, Traceback: {}".format(result.exit_code, result.exception, result.exc_info) + ddiff = compare_json_output(expected, result.output) + assert not ddiff, ddiff + + def test_option_db_filtering(self): + runner = CliRunner() + result = runner.invoke(dump.state, ["port", "Ethernet0", "--db", "ASIC_DB", "--db", "STATE_DB"]) + print(result.output) + expected = {"Ethernet0": {"ASIC_DB": {"keys": [{"ASIC_STATE:SAI_OBJECT_TYPE_HOSTIF:oid:0xd00000000056d": {"SAI_HOSTIF_ATTR_NAME": "Ethernet0", "SAI_HOSTIF_ATTR_OBJ_ID": "oid:0x10000000004a4", "SAI_HOSTIF_ATTR_OPER_STATUS": "true", "SAI_HOSTIF_ATTR_TYPE": "SAI_HOSTIF_TYPE_NETDEV", "SAI_HOSTIF_ATTR_VLAN_TAG": "SAI_HOSTIF_VLAN_TAG_STRIP"}}, {"ASIC_STATE:SAI_OBJECT_TYPE_PORT:oid:0x10000000004a4": {"NULL": "NULL", "SAI_PORT_ATTR_ADMIN_STATE": "true", "SAI_PORT_ATTR_MTU": "9122", "SAI_PORT_ATTR_SPEED": "100000"}}], "tables_not_found": [], "vidtorid": {"oid:0xd00000000056d": "oid:0xd", "oid:0x10000000004a4": "oid:0x1690000000001"}}, + "STATE_DB": {"keys": [{"PORT_TABLE|Ethernet0": {"supported_speeds": "10000,25000,40000,100000"}}], "tables_not_found": []}}} + assert result.exit_code == 0, "exit code: {}, Exception: {}, Traceback: {}".format(result.exit_code, result.exception, result.exc_info) + ddiff = compare_json_output(expected, result.output) + assert not ddiff, ddiff + + def test_option_tabular_display(self): + runner = CliRunner() + result = runner.invoke(dump.state, ["port", "Ethernet0", "--db", "STATE_DB", "--table"]) + print(result.output) + assert result.exit_code == 0, "exit code: {}, Exception: {}, Traceback: {}".format(result.exit_code, result.exception, result.exc_info) + assert table_display_output == result.output + + def test_option_tabular_display_no_db_filter(self): + runner = CliRunner() + result = runner.invoke(dump.state, ["port", "Ethernet0", "--table", "--key-map"]) + print(result.output) + assert result.exit_code == 0, "exit code: {}, Exception: {}, Traceback: {}".format(result.exit_code, result.exception, result.exc_info) + assert table_display_output_no_filtering == result.output + + def test_identifier_all_with_filtering(self): + runner = CliRunner() + expected_entries = [] + for i in range(0, 125, 4): + expected_entries.append("Ethernet" + str(i)) + result = runner.invoke(dump.state, ["port", "all", "--db", "CONFIG_DB", "--key-map"]) + print(result.output) + try: + rec_json = json.loads(result.output) + except Exception as e: + assert 0, "CLI Output is not in JSON Format" + ddiff = DeepDiff(set(expected_entries), set(rec_json.keys())) + assert not ddiff, "Expected Entries were not recieved when passing all keyword" + + def test_namespace_single_asic(self): + runner = CliRunner() + result = runner.invoke(dump.state, ["port", "Ethernet0", "--table", "--key-map", "--namespace", "asic0"]) + print(result.output) + assert result.output == "Namespace option is not valid for a single-ASIC device\n" + + @classmethod + def teardown(cls): + print("TEARDOWN") + os.environ["UTILITIES_UNIT_TESTING"] = "0" + + +class TestDumpStateMultiAsic(object): + + @classmethod + def setup_class(cls): + print("SETUP") + os.environ["UTILITIES_UNIT_TESTING"] = "2" + os.environ["UTILITIES_UNIT_TESTING_TOPOLOGY"] = "multi_asic" + from ..mock_tables import mock_multi_asic + reload(mock_multi_asic) + from ..mock_tables import dbconnector + dbconnector.load_namespace_config() + + def test_default_namespace(self): + runner = CliRunner() + db = Db() + result = runner.invoke(dump.state, ["port", "Ethernet0", "--key-map"], obj=db) + expected = {"Ethernet0": {"CONFIG_DB": {"keys": ["PORT|Ethernet0"], "tables_not_found": []}, + "APPL_DB": {"keys": ["PORT_TABLE:Ethernet0"], "tables_not_found": []}, + "ASIC_DB": {"keys": ["ASIC_STATE:SAI_OBJECT_TYPE_HOSTIF:oid:0xd00000000056d", "ASIC_STATE:SAI_OBJECT_TYPE_PORT:oid:0x10000000004a4"], "tables_not_found": [], "vidtorid": {"oid:0xd00000000056d": "oid:0xd", "oid:0x10000000004a4": "oid:0x1690000000001"}}, + "STATE_DB": {"keys": ["PORT_TABLE|Ethernet0"], "tables_not_found": []}}} + assert result.exit_code == 0, "exit code: {}, Exception: {}, Output: {}".format(result.exit_code, result.exception, result.output) + ddiff = compare_json_output(expected, result.output) + assert not ddiff, ddiff + + def test_namespace_asic0(self): + runner = CliRunner() + db = Db() + result = runner.invoke(dump.state, ["port", "Ethernet0", "--namespace", "asic0"], obj=db) + expected = {"Ethernet0": {"CONFIG_DB": {"keys": [{"PORT|Ethernet0": {"admin_status": "up", "alias": "Ethernet1/1", "asic_port_name": "Eth0-ASIC0", "description": "ARISTA01T2:Ethernet3/1/1", "lanes": "33,34,35,36", "mtu": "9100", "pfc_asym": "off", "role": "Ext", "speed": "40000"}}], "tables_not_found": []}, + "APPL_DB": {"keys": [{"PORT_TABLE:Ethernet0": {"lanes": "33,34,35,36", "description": "ARISTA01T2:Ethernet3/1/1", "pfc_asym": "off", "mtu": "9100", "alias": "Ethernet1/1", "oper_status": "up", "admin_status": "up", "role": "Ext", "speed": "40000", "asic_port_name": "Eth0-ASIC0"}}], "tables_not_found": []}, + "ASIC_DB": {"keys": [], "tables_not_found": ["ASIC_STATE:SAI_OBJECT_TYPE_HOSTIF", "ASIC_STATE:SAI_OBJECT_TYPE_PORT"]}, "STATE_DB": {"keys": [], "tables_not_found": ["PORT_TABLE"]}}} + print(expected) + assert result.exit_code == 0, "exit code: {}, Exception: {}, Output: {}".format(result.exit_code, result.exception, result.output) + ddiff = compare_json_output(expected, result.output) + assert not ddiff, ddiff + + def test_namespace_asic1(self): + runner = CliRunner() + db = Db() + result = runner.invoke(dump.state, ["port", "Ethernet-BP256", "--namespace", "asic1"], obj=db) + expected = {"Ethernet-BP256": + {"CONFIG_DB": {"keys": [{"PORT|Ethernet-BP256": {"admin_status": "up", "alias": "Ethernet-BP256", "asic_port_name": "Eth0-ASIC1", "description": "ASIC0:Eth16-ASIC0", "lanes": "61,62,63,64", "mtu": "9100", "pfc_asym": "off", "role": "Int", "speed": "40000"}}], "tables_not_found": []}, + "APPL_DB": {"keys": [{"PORT_TABLE:Ethernet-BP256": {"oper_status": "up", "lanes": "61,62,63,64", "description": "ASIC0:Eth16-ASIC0", "pfc_asym": "off", "mtu": "9100", "alias": "Ethernet-BP256", "admin_status": "up", "speed": "40000", "asic_port_name": "Eth0-ASIC1"}}], "tables_not_found": []}, + "ASIC_DB": {"keys": [], "tables_not_found": ["ASIC_STATE:SAI_OBJECT_TYPE_HOSTIF", "ASIC_STATE:SAI_OBJECT_TYPE_PORT"]}, + "STATE_DB": {"keys": [], "tables_not_found": ["PORT_TABLE"]}}} + assert result.exit_code == 0, "exit code: {}, Exception: {}, Output: {}".format(result.exit_code, result.exception, result.output) + ddiff = compare_json_output(expected, result.output) + assert not ddiff, ddiff + + def test_invalid_namespace(self): + runner = CliRunner() + db = Db() + result = runner.invoke(dump.state, ["port", "Ethernet0", "--namespace", "asic3"], obj=db) + assert result.output == "Namespace option is not valid. Choose one of ['asic0', 'asic1']\n", result + + def teardown_class(cls): + print("TEARDOWN") + os.environ["UTILITIES_UNIT_TESTING"] = "0" + os.environ["UTILITIES_UNIT_TESTING_TOPOLOGY"] = "" diff --git a/tests/mock_tables/asic_db.json b/tests/mock_tables/asic_db.json index 1a769b82b567..333899f27376 100644 --- a/tests/mock_tables/asic_db.json +++ b/tests/mock_tables/asic_db.json @@ -2,5 +2,22 @@ "ASIC_STATE:SAI_OBJECT_TYPE_SWITCH:oid:0x21000000000000": { "SAI_SWITCH_ATTR_INIT_SWITCH": "true", "SAI_SWITCH_ATTR_SRC_MAC_ADDRESS": "DE:AD:BE:EF:CA:FE" - } + }, + "ASIC_STATE:SAI_OBJECT_TYPE_HOSTIF:oid:0xd00000000056d": { + "SAI_HOSTIF_ATTR_NAME": "Ethernet0", + "SAI_HOSTIF_ATTR_OBJ_ID": "oid:0x10000000004a4", + "SAI_HOSTIF_ATTR_OPER_STATUS": "true", + "SAI_HOSTIF_ATTR_TYPE": "SAI_HOSTIF_TYPE_NETDEV", + "SAI_HOSTIF_ATTR_VLAN_TAG": "SAI_HOSTIF_VLAN_TAG_STRIP" + }, + "ASIC_STATE:SAI_OBJECT_TYPE_PORT:oid:0x10000000004a4": { + "NULL": "NULL", + "SAI_PORT_ATTR_ADMIN_STATE": "true", + "SAI_PORT_ATTR_MTU": "9122", + "SAI_PORT_ATTR_SPEED": "100000" + }, + "VIDTORID":{ + "oid:0xd00000000056d": "oid:0xd", + "oid:0x10000000004a4": "oid:0x1690000000001" + } }