Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Swssconfig #11

Closed
wants to merge 4 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Makefile.am
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
SUBDIRS = neighsyncd intfsyncd portsyncd orchagent
SUBDIRS = neighsyncd intfsyncd portsyncd orchagent swssconfig

if FPM
SUBDIRS += fpmsyncd
Expand Down
1 change: 1 addition & 0 deletions configure.ac
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ AC_CONFIG_FILES([
neighsyncd/Makefile
intfsyncd/Makefile
portsyncd/Makefile
swssconfig/Makefile
])

AC_OUTPUT
2 changes: 1 addition & 1 deletion debian/rules
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ include /usr/share/dpkg/default.mk
# -DCMAKE_LIBRARY_PATH=$(DEB_HOST_MULTIARCH)

override_dh_auto_configure:
dh_auto_configure -- --with-fpm=fpm/
dh_auto_configure -- --with-fpm=fpm/

override_dh_auto_install:
dh_auto_install
15 changes: 15 additions & 0 deletions swssconfig/Makefile.am
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
INCLUDES = -I $(top_srcdir)

bin_PROGRAMS = swssconfig

if DEBUG
DBGFLAGS = -ggdb -DDEBUG
else
DBGFLAGS = -g
endif

swssconfig_SOURCES = swssconfig.cpp

swssconfig_CFLAGS = $(DBGFLAGS) $(AM_CFLAGS) $(CFLAGS_COMMON)
swssconfig_CPPFLAGS = $(DBGFLAGS) $(AM_CFLAGS) $(CFLAGS_COMMON)
swssconfig_LDADD = -lnl-3 -lnl-route-3 -lhiredis -lswsscommon
54 changes: 54 additions & 0 deletions swssconfig/sample/sample.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
[
{
"QOS_TABLE:TC_TO_QUEUE_MAP_TABLE:AZURE": {
"5": "1",
"6": "1"
},
"OP": "SET"
},
{
"QOS_TABLE:DSCP_TO_TC_MAP_TABLE:AZURE": {
"7":"5",
"6":"5",
"3":"3",
"8":"7",
"9":"8"
},
"OP": "SET"
},
{
"QOS_TABLE:PORT_TABLE:ETHERNET4": {
"dscp_to_tc_map" : "[DSCP_TO_TC_MAP_TABLE:AZURE]",
"tc_to_queue_map": "[TC_TO_QUEUE_MAP_TABLE:AZURE]"
},
"OP": "SET"
},
{
"QOS_TABLE:SCHEDULER_TABLE:SCAVENGER" : {
"algorithm":"DWRR",
"weight": "35"
},
"OP": "SET"
},
{
"QOS_TABLE:SCHEDULER_TABLE:BEST_EFFORT" : {
"algorithm":"PRIORITY",
"priority": "7"
},
"OP": "SET"
},
{
"QOS_TABLE:WRED_PROFILE_TABLE:AZURE" : {
"yellow_max_threshold":"200",
"green_max_threshold": "100"
},
"OP": "SET"
},
{
"QUEUE_TABLE:ETHERNET4:1" : {
"scheduler" : "[SCHEDULER_TABLE:BEST_EFFORT]",
"wred_profile" : "[WRED_PROFILE_TABLE:AZURE]"
},
"OP": "SET"
}
]
86 changes: 86 additions & 0 deletions swssconfig/sample/sample.json.output.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
127.0.0.1:6379> keys *
1) "QUEUE_TABLE_KEY_QUEUE"
2) "DSCP_TO_TC_MAP_TABLE_VALUE_QUEUE"
3) "SCHEDULER_TABLE_OP_QUEUE"
4) "DSCP_TO_TC_MAP_TABLE_OP_QUEUE"
5) "SCHEDULER_TABLE_VALUE_QUEUE"
6) "PORT_TABLE_OP_QUEUE"
7) "WRED_PROFILE_TABLE_KEY_QUEUE"
8) "TC_TO_QUEUE_MAP_TABLE_VALUE_QUEUE"
9) "TC_TO_QUEUE_MAP_TABLE_OP_QUEUE"
10) "PORT_TABLE_KEY_QUEUE"
11) "TC_TO_QUEUE_MAP_TABLE:AZURE"
12) "TC_TO_QUEUE_MAP_TABLE_KEY_QUEUE"
13) "DSCP_TO_TC_MAP_TABLE_KEY_QUEUE"
14) "QUEUE_TABLE_OP_QUEUE"
15) "QUEUE_TABLE_VALUE_QUEUE"
16) "SCHEDULER_TABLE:BEST_EFFORT"
17) "SCHEDULER_TABLE:SCAVENGER"
18) "WRED_PROFILE_TABLE_OP_QUEUE"
19) "DSCP_TO_TC_MAP_TABLE:AZURE"
20) "WRED_PROFILE_TABLE:AZURE"
21) "PORT_TABLE_VALUE_QUEUE"
22) "QUEUE_TABLE:ETHERNET4:1"
23) "WRED_PROFILE_TABLE_VALUE_QUEUE"
24) "PORT_TABLE:ETHERNET4"
25) "SCHEDULER_TABLE_KEY_QUEUE"

hgetall TC_TO_QUEUE_MAP_TABLE:AZURE
hgetall SCHEDULER_TABLE:BEST_EFFORT
hgetall SCHEDULER_TABLE:SCAVENGER
hgetall DSCP_TO_TC_MAP_TABLE:AZURE
hgetall QUEUE_TABLE:ETHERNET4:1
hgetall PORT_TABLE:ETHERNET4
hgetall WRED_PROFILE_TABLE:AZURE

127.0.0.1:6379> hgetall "WRED_PROFILE_TABLE:AZURE"
1) "green_max_threshold"
2) "100"
3) "yellow_max_threshold"
4) "200"
127.0.0.1:6379>
127.0.0.1:6379> hgetall TC_TO_QUEUE_MAP_TABLE:AZURE
1) "5"
2) "1"
3) "6"
4) "1"
127.0.0.1:6379> hgetall SCHEDULER_TABLE:BEST_EFFORT
1) "algorithm"
2) "PRIORITY"
3) "priority"
4) "7"
127.0.0.1:6379> hgetall SCHEDULER_TABLE:SCAVENGER
1) "algorithm"
2) "DWRR"
3) "weight"
4) "35"
127.0.0.1:6379> hgetall DSCP_TO_TC_MAP_TABLE:AZURE
1) "3"
2) "3"
3) "6"
4) "5"
5) "7"
6) "5"
7) "8"
8) "7"
9) "9"
10) "8"
127.0.0.1:6379> hgetall QUEUE_TABLE:ETHERNET4:1
1) "scheduler"
2) "[SCHEDULER_TABLE:BEST_EFFORT]"
3) "wred_profile"
4) "[WRED_PROFILE_TABLE:AZURE]"
127.0.0.1:6379> hgetall PORT_TABLE:ETHERNET4
1) "dscp_to_tc_map"
2) "[DSCP_TO_TC_MAP_TABLE:AZURE]"
3) "tc_to_queue_map"
4) "[TC_TO_QUEUE_MAP_TABLE:AZURE]"
127.0.0.1:6379>

hgetall TC_TO_QUEUE_MAP_TABLE:AZURE
hgetall SCHEDULER_TABLE:BEST_EFFORT
hgetall SCHEDULER_TABLE:SCAVENGER
hgetall DSCP_TO_TC_MAP_TABLE:AZURE
hgetall QUEUE_TABLE:ETHERNET4:1
hgetall PORT_QOS_MAP_TABLE:ETHERNET4
hgetall WRED_PROFILE_TABLE:AZURE
201 changes: 201 additions & 0 deletions swssconfig/swssconfig.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,201 @@
#include <stdlib.h>
#include <iostream>
#include <vector>
#include "logger.h"
#include <fstream>
#include "dbconnector.h"
#include "producertable.h"
#include "json.hpp"
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Where is json.hpp?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Would suggest to add "with-nlohmann-json=Include path" to configure.ac
https://github.com/nlohmann/json

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

After discussion, we're still putting the json.hpp file into our common library. it's only one file and better be source controlled from our side to avoid potential future updates breaking the build.


using namespace std;
using namespace swss;
using json = nlohmann::json;
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please don't use "using namespace", please use full namespace like "std::string" instead of "string" this will avoid making conflicts with same name in multiple namespaces

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It's source file (CPP) and not include, it shouldn't cause conflicts. I think it's okay.


int db_port = 6379;
const char* const hostname = "localhost";
const char* const op_name = "OP";
const char* const name_delimiter = ":";
const int el_count = 2;

#define _in_
#define _out_
#define _inout_
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't think we should need this.

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think it's valuable annotation. We should actually have this moved into common code.


typedef struct _sonic_db_item_t {
string op_val;
string hash_name;
std::vector<FieldValueTuple> fvVector;
}sonic_db_item_t;

void usage(char **argv)
{
cout <<"Usage: " << argv[0] << " json_file_path\n";
}

void dump_db_item_cout(_in_ sonic_db_item_t &db_item)
{
cout << "db_item [\n";
cout << "operation: " << db_item.op_val << "\n";
cout << "hash: " << db_item.hash_name << "\n";
cout << "[\n";
for(auto &fv: db_item.fvVector) {
cout << "field: " << fvField(fv);
cout << "value: " << fvValue(fv) << "\n";
}
cout << "]\n";
cout << "]\n";
}
void dump_db_item(_in_ sonic_db_item_t &db_item)
{
SWSS_LOG_NOTICE("db_item: [\n");
SWSS_LOG_NOTICE("operation: %s", db_item.op_val.c_str());
SWSS_LOG_NOTICE("hash: %s\n", db_item.hash_name.c_str());
SWSS_LOG_NOTICE("fields: [\n");
for(auto &fv: db_item.fvVector) {
SWSS_LOG_NOTICE("field: %s", fvField(fv).c_str());
SWSS_LOG_NOTICE("value: %s\n", fvValue(fv).c_str());
}
SWSS_LOG_NOTICE("]\n");
SWSS_LOG_NOTICE("]\n");
}

bool write_db_data(_in_ std::vector<sonic_db_item_t> &db_items)
{
DBConnector db(APPL_DB, hostname, db_port, 0);
#ifdef _DUMPT_TO_COUT_
for (sonic_db_item_t &db_item : db_items) {
dump_db_item_cout(db_item);
}
#endif //_DUMPT_TO_COUT_
for (sonic_db_item_t &db_item : db_items) {
dump_db_item(db_item);

std::size_t pos = db_item.hash_name.find(name_delimiter);
if((string::npos == pos) || ((db_item.hash_name.size() - 1) == pos)) {
SWSS_LOG_ERROR("Invalid formatted hash:%s\n", db_item.hash_name.c_str());
return false;
}
string table_name = db_item.hash_name.substr(0, pos);
string key_name = db_item.hash_name.substr(pos + 1);
ProducerTable producer(&db, table_name);

if(db_item.op_val == SET_COMMAND) {
producer.set(key_name, db_item.fvVector, SET_COMMAND);
}
if(db_item.op_val == DEL_COMMAND) {
producer.del(key_name, DEL_COMMAND);
}
}
return true;
}

bool load_json_db_data(
_in_ std::iostream &fs,
_out_ std::vector<sonic_db_item_t> &db_items)
{
json json_array;
fs >> json_array;
if(!json_array.is_array()) {
SWSS_LOG_ERROR("root element must be an array\n");
return false;
}

for (size_t i = 0; i < json_array.size(); i++) {

auto &arr_item = json_array[i];

if(arr_item.is_object()) {
if(el_count != arr_item.size()) {
SWSS_LOG_ERROR("root element must be an array\n");
return false;
}

db_items.push_back(sonic_db_item_t());
sonic_db_item_t &cur_db_item = db_items[db_items.size() - 1];

//
// iterate over array items
// each item must have following structure:
// {
// "OP":"SET/DEL",
// db_key_name {
// 1*Nfield:value
// }
// }
//
//
for (json::iterator child_it = arr_item.begin(); child_it != arr_item.end(); ++child_it) {
auto cur_obj_key = child_it.key();
auto &cur_obj = child_it.value();

string field_str;
int val;
string value_str;

if(cur_obj.is_object()) {
cur_db_item.hash_name = cur_obj_key;
for (json::iterator cur_obj_it = cur_obj.begin(); cur_obj_it != cur_obj.end(); ++cur_obj_it) {

field_str = cur_obj_it.key();
if((*cur_obj_it).is_number()) {
val = (*cur_obj_it).get<int>();
value_str = std::to_string(val);
}
if((*cur_obj_it).is_string()) {
value_str = (*cur_obj_it).get<string>();
}
cur_db_item.fvVector.push_back(FieldValueTuple(field_str, value_str));
}
}
else {
if(op_name != child_it.key()) {
SWSS_LOG_ERROR("Invalid entry. expected item:%s\n", op_name);
return false;
}
cur_db_item.op_val = cur_obj.get<std::string>();
}
}
}
else {
SWSS_LOG_WARN("Skipping processing of an array item which is not an object\n:%s", arr_item.dump().c_str());
}
}
return true;
}

int main(int argc, char **argv)
{
Logger::setMinPrio(Logger::SWSS_DEBUG);

if (argc != 2)
{
usage(argv);
exit(EXIT_FAILURE);
}
std::vector<sonic_db_item_t> db_items;
std::fstream fs;
try {
fs.open (argv[1], std::fstream::in | std::fstream::out | std::fstream::app);
if(!load_json_db_data(fs, db_items)) {
SWSS_LOG_ERROR("Failed loading data from json file\n");
fs.close();
return EXIT_FAILURE;
}
fs.close();
}
catch(...) {
cout << "Failed loading json file: " << argv[1] << " Please refer to logs\n";
return EXIT_FAILURE;
}
try {
if(!write_db_data(db_items)) {
SWSS_LOG_ERROR("Failed writing data to db\n");
return EXIT_FAILURE;
}
}
catch(...) {
cout << "Failed applying settings from json file: " << argv[1] << " Please refer to logs\n";
return EXIT_FAILURE;
}
return EXIT_SUCCESS;
}