From 6afddad247619c48d72e845f89578b6d67d3fcf8 Mon Sep 17 00:00:00 2001 From: zhenghaoz Date: Fri, 25 Oct 2024 22:31:42 +0800 Subject: [PATCH 01/14] fix: remove score check (#873) --- storage/cache/database.go | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/storage/cache/database.go b/storage/cache/database.go index e8816fd6c..6fa1ce143 100644 --- a/storage/cache/database.go +++ b/storage/cache/database.go @@ -16,6 +16,7 @@ package cache import ( "context" + "math" "sort" "strconv" "strings" @@ -228,9 +229,7 @@ func (aggregator *DocumentAggregator) Add(category string, values []string, scor Timestamp: aggregator.Timestamp, } } else { - if aggregator.Documents[value].Score != scores[i] { - panic("score should be the same") - } + aggregator.Documents[value].Score = math.Max(aggregator.Documents[value].Score, scores[i]) aggregator.Documents[value].Categories = append(aggregator.Documents[value].Categories, category) } } From 76a3ca5c56cc28a7d5dfe602c99949c24a9fa317 Mon Sep 17 00:00:00 2001 From: zhenghaoz Date: Sat, 26 Oct 2024 01:50:44 +0800 Subject: [PATCH 02/14] data: support ClickHouse back (#874) --- .circleci/config.yml | 4 +- .github/workflows/build_test.yml | 14 +- CONTRIBUTING.md | 1 + README.md | 2 +- client/docker-compose.yml.j2 | 21 +++ config/config.go | 3 + config/config.toml | 5 +- docker-compose.yml | 14 ++ go.mod | 14 +- go.sum | 8 + server/bench_test.go | 32 ++-- server/bench_test.sh | 3 + storage/data/database.go | 29 +++ storage/data/database_test.go | 48 ++++- storage/data/mongodb.go | 5 + storage/data/no_database.go | 5 + storage/data/no_database_test.go | 7 +- storage/data/sql.go | 310 ++++++++++++++++++++++++------- storage/data/sql_test.go | 34 +++- storage/docker-compose.yml | 5 + storage/scheme.go | 6 + 21 files changed, 470 insertions(+), 100 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index ddb7037f0..342b85159 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -100,7 +100,7 @@ jobs: - run: name: Run tests no_output_timeout: 20m - command: go test -timeout 20m -v ./... -skip "TestPostgres|TestMySQL|TestMongo|TestRedis" + command: go test -timeout 20m -v ./... -skip "TestPostgres|TestMySQL|TestMongo|TestRedis|TestClickHouse" unit-test-windows: executor: win/server-2022 @@ -139,7 +139,7 @@ jobs: - run: name: Run tests no_output_timeout: 20m - command: go test -timeout 20m -v ./... -skip "TestPostgres|TestMySQL|TestMongo|TestRedis" + command: go test -timeout 20m -v ./... -skip "TestPostgres|TestMySQL|TestMongo|TestRedis|TestClickHouse" workflows: unit-test: diff --git a/.github/workflows/build_test.yml b/.github/workflows/build_test.yml index 7fefe409a..2fda49a54 100644 --- a/.github/workflows/build_test.yml +++ b/.github/workflows/build_test.yml @@ -50,6 +50,16 @@ jobs: --health-timeout 5s --health-retries 5 + clickhouse: + image: clickhouse/clickhouse-server:21.10 + ports: + - 8123 + options: >- + --health-cmd="clickhouse-client --query 'SELECT 1'" + --health-interval=10s + --health-timeout=5s + --health-retries=5 + redis: image: redis/redis-stack:6.2.6-v9 ports: @@ -102,6 +112,8 @@ jobs: POSTGRES_URI: postgres://gorse:gorse_pass@localhost:${{ job.services.postgres.ports[5432] }}/ # MongoDB MONGO_URI: mongodb://root:password@localhost:${{ job.services.mongo.ports[27017] }}/ + # ClickHouse + CLICKHOUSE_URI: clickhouse://localhost:${{ job.services.clickhouse.ports[8123] }}/ # Redis REDIS_URI: redis://localhost:${{ job.services.redis.ports[6379] }}/ @@ -113,7 +125,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - database: [mysql, postgres, mongo] + database: [mysql, postgres, mongo, clickhouse] steps: - uses: actions/checkout@v1 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index f2d0a0ec6..061fc961d 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -80,6 +80,7 @@ The default database URLs are directed to these databases in `storage/docker-com | `MYSQL_URI` | `mysql://root:password@tcp(127.0.0.1:3306)/` | | `POSTGRES_URI` | `postgres://gorse:gorse_pass@127.0.0.1/` | | `MONGO_URI` | `mongodb://root:password@127.0.0.1:27017/` | +| `CLICKHOUSE_URI` | `clickhouse://127.0.0.1:8123/` | | `REDIS_URI` | `redis://127.0.0.1:6379/` | For example, use TiDB as a test database by: diff --git a/README.md b/README.md index 8190f5e58..52b2478e0 100644 --- a/README.md +++ b/README.md @@ -95,7 +95,7 @@ For more information: ## Architecture -Gorse is a single-node training and distributed prediction recommender system. Gorse stores data in MySQL, MongoDB, or Postgres, with intermediate results cached in Redis, MySQL, MongoDB and Postgres. +Gorse is a single-node training and distributed prediction recommender system. Gorse stores data in MySQL, MongoDB, Postgres, or ClickHouse, with intermediate results cached in Redis, MySQL, MongoDB and Postgres. 1. The cluster consists of a master node, multiple worker nodes, and server nodes. 1. The master node is responsible for model training, non-personalized item recommendation, configuration management, and membership management. diff --git a/client/docker-compose.yml.j2 b/client/docker-compose.yml.j2 index ac3de45b0..8ad3454af 100644 --- a/client/docker-compose.yml.j2 +++ b/client/docker-compose.yml.j2 @@ -63,6 +63,22 @@ services: timeout: 5s retries: 5 + {% elif database == 'clickhouse' %} + + clickhouse: + image: clickhouse/clickhouse-server:21.10 + ports: + - 8123:8123 + environment: + CLICKHOUSE_DB: gorse + CLICKHOUSE_USER: gorse + CLICKHOUSE_PASSWORD: gorse_pass + healthcheck: + test: clickhouse-client --user $$CLICKHOUSE_USER --password $$CLICKHOUSE_PASSWORD --query "SELECT 1" + interval: 10s + timeout: 5s + retries: 5 + {% endif %} worker: @@ -117,6 +133,8 @@ services: GORSE_DATA_STORE: postgres://gorse:gorse_pass@postgres/gorse?sslmode=disable {% elif database == 'mongo' %} GORSE_DATA_STORE: mongodb://root:password@mongo:27017/gorse?authSource=admin&connect=direct + {% elif database == 'clickhouse' %} + GORSE_DATA_STORE: clickhouse://gorse:gorse_pass@clickhouse:8123/gorse?mutations_sync=2 {% endif %} command: > -c /etc/gorse/config.toml @@ -138,6 +156,9 @@ services: {% elif database == 'mongo' %} mongo: condition: service_healthy + {% elif database == 'clickhouse' %} + clickhouse: + condition: service_healthy {% endif %} volumes: diff --git a/config/config.go b/config/config.go index 5a44e2f70..f13d92f0b 100644 --- a/config/config.go +++ b/config/config.go @@ -607,6 +607,9 @@ func (config *Config) Validate(oneModel bool) error { storage.MySQLPrefix, storage.PostgresPrefix, storage.PostgreSQLPrefix, + storage.ClickhousePrefix, + storage.CHHTTPPrefix, + storage.CHHTTPSPrefix, } if oneModel { prefixes = append(prefixes, storage.SQLitePrefix) diff --git a/config/config.toml b/config/config.toml index e0dd91ee0..e6fb194a5 100644 --- a/config/config.toml +++ b/config/config.toml @@ -9,10 +9,13 @@ # mongodb+srv://[username:password@]host1[:port1][,...hostN[:portN]][/[defaultauthdb][?options]] cache_store = "redis://localhost:6379/0" -# The database for persist data, support MySQL, Postgres and MongoDB: +# The database for persist data, support MySQL, Postgres, ClickHouse and MongoDB: # mysql://[username[:password]@][protocol[(address)]]/dbname[?param1=value1&...¶mN=valueN] # postgres://bob:secret@1.2.3.4:5432/mydb?sslmode=verify-full # postgresql://bob:secret@1.2.3.4:5432/mydb?sslmode=verify-full +# clickhouse://user:password@host[:port]/database?param1=value1&...¶mN=valueN +# chhttp://user:password@host[:port]/database?param1=value1&...¶mN=valueN +# chhttps://user:password@host[:port]/database?param1=value1&...¶mN=valueN # mongodb://[username:password@]host1[:port1][,...hostN[:portN]][/[defaultauthdb][?options]] # mongodb+srv://[username:password@]host1[:port1][,...hostN[:portN]][/[defaultauthdb][?options]] data_store = "mysql://gorse:gorse_pass@tcp(localhost:3306)/gorse" diff --git a/docker-compose.yml b/docker-compose.yml index 55f9d76c4..62e100ed7 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -41,6 +41,17 @@ services: # volumes: # - mongo_data:/data/db + # clickhouse: + # image: clickhouse/clickhouse-server:21.10 + # ports: + # - 8123:8123 + # environment: + # CLICKHOUSE_DB: gorse + # CLICKHOUSE_USER: gorse + # CLICKHOUSE_PASSWORD: gorse_pass + # volumes: + # - clickhouse_data:/var/lib/clickhouse + worker: image: zhenghaoz/gorse-worker restart: unless-stopped @@ -84,6 +95,7 @@ services: GORSE_DATA_STORE: mysql://gorse:gorse_pass@tcp(mysql:3306)/gorse # GORSE_DATA_STORE: postgres://gorse:gorse_pass@postgres/gorse?sslmode=disable # GORSE_DATA_STORE: mongodb://root:password@mongo:27017/gorse?authSource=admin&connect=direct + # GORSE_DATA_STORE: clickhouse://gorse:gorse_pass@clickhouse:8123/gorse command: > -c /etc/gorse/config.toml --log-path /var/log/gorse/master.log @@ -97,6 +109,7 @@ services: - mysql # - postgres # - mongo + # - clickhouse volumes: worker_data: @@ -106,3 +119,4 @@ volumes: mysql_data: # postgres_data: # mongo_data: + # clickhouse_data: diff --git a/go.mod b/go.mod index d03af06a5..ad9b3ee3d 100644 --- a/go.mod +++ b/go.mod @@ -32,6 +32,7 @@ require ( github.com/klauspost/cpuid/v2 v2.2.3 github.com/lafikl/consistent v0.0.0-20220512074542-bdd3606bfc3e github.com/lib/pq v1.10.6 + github.com/mailru/go-clickhouse/v2 v2.0.1-0.20221121001540-b259988ad8e5 github.com/mitchellh/mapstructure v1.5.0 github.com/orcaman/concurrent-map v1.0.0 github.com/prometheus/client_golang v1.13.0 @@ -68,6 +69,7 @@ require ( gopkg.in/yaml.v2 v2.4.0 gorgonia.org/gorgonia v0.9.18-0.20230327110624-d1c17944ed22 gorgonia.org/tensor v0.9.23 + gorm.io/driver/clickhouse v0.4.2 gorm.io/driver/mysql v1.3.4 gorm.io/driver/postgres v1.3.5 gorm.io/driver/sqlite v1.3.4 @@ -101,6 +103,7 @@ require ( github.com/golang/snappy v0.0.4 // indirect github.com/google/flatbuffers v2.0.6+incompatible // indirect github.com/grpc-ecosystem/grpc-gateway/v2 v2.22.0 // indirect + github.com/hashicorp/go-version v1.6.0 // indirect github.com/hashicorp/hcl v1.0.0 // indirect github.com/inconshreveable/mousetrap v1.0.0 // indirect github.com/jackc/chunkreader/v2 v2.0.1 // indirect @@ -179,8 +182,9 @@ require ( modernc.org/token v1.0.1 // indirect ) -replace gorm.io/driver/sqlite v1.3.4 => github.com/gorse-io/sqlite v1.3.3-0.20220713123255-c322aec4e59e - -replace gorgonia.org/tensor v0.9.23 => github.com/gorse-io/tensor v0.0.0-20230617102451-4c006ddc5162 - -replace gorgonia.org/gorgonia v0.9.18-0.20230327110624-d1c17944ed22 => github.com/gorse-io/gorgonia v0.0.0-20230817132253-6dd1dbf95849 +replace ( + gorgonia.org/gorgonia v0.9.18-0.20230327110624-d1c17944ed22 => github.com/gorse-io/gorgonia v0.0.0-20230817132253-6dd1dbf95849 + gorgonia.org/tensor v0.9.23 => github.com/gorse-io/tensor v0.0.0-20230617102451-4c006ddc5162 + gorm.io/driver/clickhouse v0.4.2 => github.com/gorse-io/clickhouse v0.3.3-0.20220715124633-688011a495bb + gorm.io/driver/sqlite v1.3.4 => github.com/gorse-io/sqlite v1.3.3-0.20220713123255-c322aec4e59e +) diff --git a/go.sum b/go.sum index 49488f278..ab7e3b29f 100644 --- a/go.sum +++ b/go.sum @@ -288,6 +288,7 @@ github.com/google/pprof v0.0.0-20221118152302-e6195bd50e26/go.mod h1:dDKJzRmX4S3 github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.2.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= @@ -298,6 +299,8 @@ github.com/gorgonia/bindgen v0.0.0-20180812032444-09626750019e/go.mod h1:YzKk63P github.com/gorgonia/bindgen v0.0.0-20210223094355-432cd89e7765/go.mod h1:BLHSe436vhQKRfm6wxJgebeK4fDY+ER/8jV3vVH9yYU= github.com/gorilla/securecookie v1.1.1 h1:miw7JPhV+b/lAHSXz4qd/nN9jRiAFV5FwjeKyCS8BvQ= github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4= +github.com/gorse-io/clickhouse v0.3.3-0.20220715124633-688011a495bb h1:z/oOWE+Vy0PLcwIulZmIug4FtmvE3dJ1YOGprLeHwwY= +github.com/gorse-io/clickhouse v0.3.3-0.20220715124633-688011a495bb/go.mod h1:iILWzbul8U+gsf4kqbheF2QzBmdvVp63mloGGK8emDI= github.com/gorse-io/dashboard v0.0.0-20230729051855-6c53a42d2bd4 h1:x0bLXsLkjEZdztd0Tw+Hx38vIjzabyj2Fk0EDitKcLk= github.com/gorse-io/dashboard v0.0.0-20230729051855-6c53a42d2bd4/go.mod h1:bv2Yg9Pn4Dca4xPJbvibpF6LH6BjoxcjsEdIuojNano= github.com/gorse-io/gorgonia v0.0.0-20230817132253-6dd1dbf95849 h1:Hwywr6NxzYeZYn35KwOsw7j8ZiMT60TBzpbn1MbEido= @@ -309,6 +312,9 @@ github.com/gorse-io/tensor v0.0.0-20230617102451-4c006ddc5162/go.mod h1:1dsOegMm github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= github.com/grpc-ecosystem/grpc-gateway/v2 v2.22.0 h1:asbCHRVmodnJTuQ3qamDwqVOIjwqUPTYmYuemVOx+Ys= github.com/grpc-ecosystem/grpc-gateway/v2 v2.22.0/go.mod h1:ggCgvZ2r7uOoQjOyu2Y1NhHmEPPzzuhWgcza5M1Ji1I= +github.com/hashicorp/go-version v1.5.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= +github.com/hashicorp/go-version v1.6.0 h1:feTTfFNnjP967rlCxM/I9g701jU+RN74YKx2mOkIeek= +github.com/hashicorp/go-version v1.6.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= @@ -442,6 +448,8 @@ github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN github.com/mailru/easyjson v0.7.6/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= +github.com/mailru/go-clickhouse/v2 v2.0.1-0.20221121001540-b259988ad8e5 h1:JgQ+kJg8uKs6JjnDxnMgkKT4PPH36uU6chpYw2PQc9Q= +github.com/mailru/go-clickhouse/v2 v2.0.1-0.20221121001540-b259988ad8e5/go.mod h1:TwxN829KnFZ7jAka9l9EoCV+U0CBFq83SFev4oLbnNU= github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ= github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= diff --git a/server/bench_test.go b/server/bench_test.go index 6a107b6ca..417345aa7 100644 --- a/server/bench_test.go +++ b/server/bench_test.go @@ -19,16 +19,6 @@ import ( "database/sql" "encoding/json" "fmt" - "math/rand" - "net" - "net/http" - "os" - "runtime" - "strconv" - "strings" - "testing" - "time" - "github.com/emicklei/go-restful/v3" "github.com/go-resty/resty/v2" "github.com/redis/go-redis/v9" @@ -41,6 +31,15 @@ import ( "go.mongodb.org/mongo-driver/mongo" "go.mongodb.org/mongo-driver/mongo/options" "google.golang.org/protobuf/proto" + "math/rand" + "net" + "net/http" + "os" + "runtime" + "strconv" + "strings" + "testing" + "time" ) const ( @@ -61,7 +60,7 @@ func init() { } return defaultValue } - benchDataStore = env("BENCH_DATA_STORE", "mysql://root:password@tcp(127.0.0.1:3306)/") + benchDataStore = env("BENCH_DATA_STORE", "clickhouse://127.0.0.1:8123/") benchCacheStore = env("BENCH_CACHE_STORE", "redis://127.0.0.1:6379/") } @@ -192,6 +191,17 @@ func (s *benchServer) prepareData(b *testing.B, url, benchName string) string { err = db.Close() require.NoError(b, err) return url + strings.ToLower(dbName) + "?sslmode=disable&TimeZone=UTC" + } else if strings.HasPrefix(url, "clickhouse://") { + uri := "http://" + url[len("clickhouse://"):] + db, err := sql.Open("clickhouse", uri) + require.NoError(b, err) + _, err = db.Exec("DROP DATABASE IF EXISTS " + dbName) + require.NoError(b, err) + _, err = db.Exec("CREATE DATABASE " + dbName) + require.NoError(b, err) + err = db.Close() + require.NoError(b, err) + return url + dbName + "?mutations_sync=2" } else if strings.HasPrefix(url, "mongodb://") { ctx := context.Background() cli, err := mongo.Connect(ctx, options.Client().ApplyURI(url+"?authSource=admin&connect=direct")) diff --git a/server/bench_test.sh b/server/bench_test.sh index ad7bacd34..f3e380079 100644 --- a/server/bench_test.sh +++ b/server/bench_test.sh @@ -40,6 +40,9 @@ case $CACHE_ARG in esac case $DATA_ARG in + clickhouse) + export BENCH_DATA_STORE='clickhouse://127.0.0.1:8123/' + ;; mysql) export BENCH_DATA_STORE='mysql://root:password@tcp(127.0.0.1:3306)/' ;; diff --git a/storage/data/database.go b/storage/data/database.go index 8c4088ea9..4803b873b 100644 --- a/storage/data/database.go +++ b/storage/data/database.go @@ -18,6 +18,7 @@ import ( "context" "encoding/json" "reflect" + "net/url" "sort" "strings" "time" @@ -33,6 +34,7 @@ import ( "go.mongodb.org/mongo-driver/x/mongo/driver/connstring" "go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo" semconv "go.opentelemetry.io/otel/semconv/v1.12.0" + "gorm.io/driver/clickhouse" "gorm.io/driver/mysql" "gorm.io/driver/postgres" "gorm.io/driver/sqlite" @@ -223,6 +225,7 @@ type Database interface { Init() error Ping() error Close() error + Optimize() error Purge() error BatchInsertItems(ctx context.Context, items []Item) error BatchGetItems(ctx context.Context, itemIds []string) ([]Item, error) @@ -297,6 +300,32 @@ func Open(path, tablePrefix string) (Database, error) { return nil, errors.Trace(err) } return database, nil + } else if strings.HasPrefix(path, storage.ClickhousePrefix) || strings.HasPrefix(path, storage.CHHTTPPrefix) || strings.HasPrefix(path, storage.CHHTTPSPrefix) { + // replace schema + parsed, err := url.Parse(path) + if err != nil { + return nil, errors.Trace(err) + } + if strings.HasPrefix(path, storage.CHHTTPSPrefix) { + parsed.Scheme = "https" + } else { + parsed.Scheme = "http" + } + uri := parsed.String() + database := new(SQLDatabase) + database.driver = ClickHouse + database.TablePrefix = storage.TablePrefix(tablePrefix) + if database.client, err = otelsql.Open("chhttp", uri, + otelsql.WithAttributes(semconv.DBSystemKey.String("clickhouse")), + otelsql.WithSpanOptions(otelsql.SpanOptions{DisableErrSkip: true}), + ); err != nil { + return nil, errors.Trace(err) + } + database.gormDB, err = gorm.Open(clickhouse.New(clickhouse.Config{Conn: database.client}), storage.NewGORMConfig(tablePrefix)) + if err != nil { + return nil, errors.Trace(err) + } + return database, nil } else if strings.HasPrefix(path, storage.MongoPrefix) || strings.HasPrefix(path, storage.MongoSrvPrefix) { // connect to database database := new(MongoDB) diff --git a/storage/data/database_test.go b/storage/data/database_test.go index c19cbf99e..06c671af3 100644 --- a/storage/data/database_test.go +++ b/storage/data/database_test.go @@ -126,6 +126,14 @@ func (suite *baseTestSuite) getFeedbackStream(ctx context.Context, batchSize int return feedbacks } +func (suite *baseTestSuite) isClickHouse() bool { + if sqlDB, isSQL := suite.Database.(*SQLDatabase); !isSQL { + return false + } else { + return sqlDB.driver == ClickHouse + } +} + func (suite *baseTestSuite) TearDownSuite() { err := suite.Database.Close() suite.NoError(err) @@ -183,6 +191,8 @@ func (suite *baseTestSuite) TestUsers() { // test override err = suite.Database.BatchInsertUsers(ctx, []User{{UserId: "1", Comment: "override"}}) suite.NoError(err) + err = suite.Database.Optimize() + suite.NoError(err) user, err = suite.Database.GetUser(ctx, "1") suite.NoError(err) suite.Equal("override", user.Comment) @@ -193,6 +203,8 @@ func (suite *baseTestSuite) TestUsers() { suite.NoError(err) err = suite.Database.ModifyUser(ctx, "1", UserPatch{Subscribe: []string{"d", "e", "f"}}) suite.NoError(err) + err = suite.Database.Optimize() + suite.NoError(err) user, err = suite.Database.GetUser(ctx, "1") suite.NoError(err) suite.Equal("modify", user.Comment) @@ -259,12 +271,19 @@ func (suite *baseTestSuite) TestFeedback() { feedbackFromStream = suite.getFeedbackStream(ctx, 3, WithBeginUserId("1"), WithEndUserId("3"), WithEndTime(time.Now()), WithFeedbackTypes(positiveFeedbackType)) suite.Equal(feedback[1:4], feedbackFromStream) // Get items + err = suite.Database.Optimize() + suite.NoError(err) items := suite.getItems(ctx, 3) suite.Equal(5, len(items)) for i, item := range items { suite.Equal(strconv.Itoa(i*2), item.ItemId) if item.ItemId != "0" { - suite.Zero(item.Timestamp) + if suite.isClickHouse() { + // ClickHouse returns 1970-01-01 as zero date. + suite.Zero(item.Timestamp.Unix()) + } else { + suite.Zero(item.Timestamp) + } suite.Empty(item.Labels) suite.Empty(item.Comment) } @@ -314,6 +333,8 @@ func (suite *baseTestSuite) TestFeedback() { Comment: "override", }}, true, true, true) suite.NoError(err) + err = suite.Database.Optimize() + suite.NoError(err) ret, err = suite.Database.GetUserFeedback(ctx, "0", lo.ToPtr(time.Now()), positiveFeedbackType) suite.NoError(err) suite.Equal(1, len(ret)) @@ -324,6 +345,8 @@ func (suite *baseTestSuite) TestFeedback() { Comment: "not_override", }}, true, true, false) suite.NoError(err) + err = suite.Database.Optimize() + suite.NoError(err) ret, err = suite.Database.GetUserFeedback(ctx, "0", lo.ToPtr(time.Now()), positiveFeedbackType) suite.NoError(err) suite.Equal(1, len(ret)) @@ -436,6 +459,8 @@ func (suite *baseTestSuite) TestItems() { // test override err = suite.Database.BatchInsertItems(ctx, []Item{{ItemId: "4", IsHidden: false, Categories: []string{"b"}, Labels: []string{"o"}, Comment: "override"}}) suite.NoError(err) + err = suite.Database.Optimize() + suite.NoError(err) item, err := suite.Database.GetItem(ctx, "4") suite.NoError(err) suite.False(item.IsHidden) @@ -455,6 +480,8 @@ func (suite *baseTestSuite) TestItems() { suite.NoError(err) err = suite.Database.ModifyItem(ctx, "2", ItemPatch{Timestamp: ×tamp}) suite.NoError(err) + err = suite.Database.Optimize() + suite.NoError(err) item, err = suite.Database.GetItem(ctx, "2") suite.NoError(err) suite.True(item.IsHidden) @@ -548,14 +575,20 @@ func (suite *baseTestSuite) TestDeleteFeedback() { // delete user-item feedback deleteCount, err := suite.Database.DeleteUserItemFeedback(ctx, "2", "3") suite.NoError(err) - suite.Equal(3, deleteCount) + if !suite.isClickHouse() { + // RowAffected isn't supported by ClickHouse, + suite.Equal(3, deleteCount) + } ret, err = suite.Database.GetUserItemFeedback(ctx, "2", "3") suite.NoError(err) suite.Empty(ret) feedbackType1 := "type1" deleteCount, err = suite.Database.DeleteUserItemFeedback(ctx, "1", "3", feedbackType1) suite.NoError(err) - suite.Equal(1, deleteCount) + if !suite.isClickHouse() { + // RowAffected isn't supported by ClickHouse, + suite.Equal(1, deleteCount) + } ret, err = suite.Database.GetUserItemFeedback(ctx, "1", "3", feedbackType2) suite.NoError(err) suite.Empty(ret) @@ -662,6 +695,8 @@ func (suite *baseTestSuite) TestTimezone() { suite.NoError(err) err = suite.Database.ModifyItem(ctx, "200", ItemPatch{Timestamp: &now}) suite.NoError(err) + err = suite.Database.Optimize() + suite.NoError(err) switch database := suite.Database.(type) { case *SQLDatabase: switch suite.Database.(*SQLDatabase).driver { @@ -672,6 +707,13 @@ func (suite *baseTestSuite) TestTimezone() { item, err = suite.Database.GetItem(ctx, "200") suite.NoError(err) suite.Equal(now.Round(time.Microsecond).In(time.UTC), item.Timestamp) + case ClickHouse: + item, err := suite.Database.GetItem(ctx, "100") + suite.NoError(err) + suite.Equal(now.Truncate(time.Second).In(time.UTC), item.Timestamp) + item, err = suite.Database.GetItem(ctx, "200") + suite.NoError(err) + suite.Equal(now.Truncate(time.Second).In(time.UTC), item.Timestamp) case SQLite: item, err := suite.Database.GetItem(ctx, "100") suite.NoError(err) diff --git a/storage/data/mongodb.go b/storage/data/mongodb.go index c3c35a80d..3848e6b0a 100644 --- a/storage/data/mongodb.go +++ b/storage/data/mongodb.go @@ -65,6 +65,11 @@ type MongoDB struct { dbName string } +// Optimize is used by ClickHouse only. +func (db *MongoDB) Optimize() error { + return nil +} + // Init collections and indices in MongoDB. func (db *MongoDB) Init() error { ctx := context.Background() diff --git a/storage/data/no_database.go b/storage/data/no_database.go index b79e3921f..f936e84fe 100644 --- a/storage/data/no_database.go +++ b/storage/data/no_database.go @@ -22,6 +22,11 @@ import ( // NoDatabase means that no database used. type NoDatabase struct{} +// Optimize is used by ClickHouse only. +func (NoDatabase) Optimize() error { + return ErrNoDatabase +} + // Init method of NoDatabase returns ErrNoDatabase. func (NoDatabase) Init() error { return ErrNoDatabase diff --git a/storage/data/no_database_test.go b/storage/data/no_database_test.go index 11f90f5f9..776e7edb6 100644 --- a/storage/data/no_database_test.go +++ b/storage/data/no_database_test.go @@ -16,11 +16,10 @@ package data import ( "context" - "testing" - "time" - "github.com/samber/lo" "github.com/stretchr/testify/assert" + "testing" + "time" ) func TestNoDatabase(t *testing.T) { @@ -29,6 +28,8 @@ func TestNoDatabase(t *testing.T) { err := database.Close() assert.ErrorIs(t, err, ErrNoDatabase) + err = database.Optimize() + assert.ErrorIs(t, err, ErrNoDatabase) err = database.Init() assert.ErrorIs(t, err, ErrNoDatabase) err = database.Ping() diff --git a/storage/data/sql.go b/storage/data/sql.go index f37af786b..922b56693 100644 --- a/storage/data/sql.go +++ b/storage/data/sql.go @@ -25,6 +25,7 @@ import ( _ "github.com/go-sql-driver/mysql" "github.com/juju/errors" _ "github.com/lib/pq" + _ "github.com/mailru/go-clickhouse/v2" "github.com/samber/lo" "github.com/zhenghaoz/gorse/base/jsonutil" "github.com/zhenghaoz/gorse/base/log" @@ -41,6 +42,7 @@ type SQLDriver int const ( MySQL SQLDriver = iota Postgres + ClickHouse SQLite ) @@ -84,6 +86,34 @@ func NewSQLUser(user User) (sqlUser SQLUser) { return } +type ClickHouseItem struct { + SQLItem `gorm:"embedded"` + Version time.Time `gorm:"column:version"` +} + +func NewClickHouseItem(item Item) (clickHouseItem ClickHouseItem) { + clickHouseItem.SQLItem = NewSQLItem(item) + clickHouseItem.Timestamp = item.Timestamp.In(time.UTC) + clickHouseItem.Version = time.Now().In(time.UTC) + return +} + +type ClickhouseUser struct { + SQLUser `gorm:"embedded"` + Version time.Time `gorm:"column:version"` +} + +func NewClickhouseUser(user User) (clickhouseUser ClickhouseUser) { + clickhouseUser.SQLUser = NewSQLUser(user) + clickhouseUser.Version = time.Now().In(time.UTC) + return +} + +type ClickHouseFeedback struct { + Feedback `gorm:"embedded"` + Version time.Time `gorm:"column:version"` +} + // SQLDatabase use MySQL as data storage. type SQLDatabase struct { storage.TablePrefix @@ -92,6 +122,19 @@ type SQLDatabase struct { driver SQLDriver } +// Optimize is used by ClickHouse only. +func (d *SQLDatabase) Optimize() error { + if d.driver == ClickHouse { + for _, tableName := range []string{d.UsersTable(), d.ItemsTable(), d.FeedbackTable()} { + _, err := d.client.Exec("OPTIMIZE TABLE " + tableName) + if err != nil { + return errors.Trace(err) + } + } + } + return nil +} + // Init tables and indices in MySQL. func (d *SQLDatabase) Init() error { switch d.driver { @@ -176,6 +219,44 @@ func (d *SQLDatabase) Init() error { if err != nil { return errors.Trace(err) } + case ClickHouse: + // create tables + type Items struct { + ItemId string `gorm:"column:item_id;type:String"` + IsHidden int `gorm:"column:is_hidden;type:Boolean;default:0"` + Categories string `gorm:"column:categories;type:String;default:'[]'"` + Timestamp time.Time `gorm:"column:time_stamp;type:Datetime"` + Labels string `gorm:"column:labels;type:String;default:'[]'"` + Comment string `gorm:"column:comment;type:String"` + Version struct{} `gorm:"column:version;type:DateTime"` + } + err := d.gormDB.Set("gorm:table_options", "ENGINE = ReplacingMergeTree(version) ORDER BY item_id").AutoMigrate(Items{}) + if err != nil { + return errors.Trace(err) + } + type Users struct { + UserId string `gorm:"column:user_id;type:String"` + Labels string `gorm:"column:labels;type:String;default:'[]'"` + Subscribe string `gorm:"column:subscribe;type:String;default:'[]'"` + Comment string `gorm:"column:comment;type:String"` + Version struct{} `gorm:"column:version;type:DateTime"` + } + err = d.gormDB.Set("gorm:table_options", "ENGINE = ReplacingMergeTree(version) ORDER BY user_id").AutoMigrate(Users{}) + if err != nil { + return errors.Trace(err) + } + type Feedback struct { + FeedbackType string `gorm:"column:feedback_type;type:String"` + UserId string `gorm:"column:user_id;type:String;index:user_index,type:bloom_filter(0.01),granularity:1"` + ItemId string `gorm:"column:item_id;type:String;index:item_index,type:bloom_filter(0.01),granularity:1"` + Timestamp time.Time `gorm:"column:time_stamp;type:DateTime"` + Comment string `gorm:"column:comment;type:String"` + Version struct{} `gorm:"column:version;type:DateTime"` + } + err = d.gormDB.Set("gorm:table_options", "ENGINE = ReplacingMergeTree(version) ORDER BY (feedback_type, user_id, item_id)").AutoMigrate(Feedback{}) + if err != nil { + return errors.Trace(err) + } } return nil } @@ -191,10 +272,19 @@ func (d *SQLDatabase) Close() error { func (d *SQLDatabase) Purge() error { tables := []string{d.ItemsTable(), d.FeedbackTable(), d.UsersTable()} - for _, tableName := range tables { - err := d.gormDB.Exec(fmt.Sprintf("DELETE FROM %s", tableName)).Error - if err != nil { - return errors.Trace(err) + if d.driver == ClickHouse { + for _, tableName := range tables { + err := d.gormDB.Exec(fmt.Sprintf("alter table %s delete where 1=1", tableName)).Error + if err != nil { + return errors.Trace(err) + } + } + } else { + for _, tableName := range tables { + err := d.gormDB.Exec(fmt.Sprintf("DELETE FROM %s", tableName)).Error + if err != nil { + return errors.Trace(err) + } } } return nil @@ -205,23 +295,36 @@ func (d *SQLDatabase) BatchInsertItems(ctx context.Context, items []Item) error if len(items) == 0 { return nil } - rows := make([]SQLItem, 0, len(items)) - memo := mapset.NewSet[string]() - for _, item := range items { - if !memo.Contains(item.ItemId) { - memo.Add(item.ItemId) - row := NewSQLItem(item) - if d.driver == SQLite { - row.Timestamp = row.Timestamp.In(time.UTC) + if d.driver == ClickHouse { + rows := make([]ClickHouseItem, 0, len(items)) + memo := mapset.NewSet[string]() + for _, item := range items { + if !memo.Contains(item.ItemId) { + memo.Add(item.ItemId) + rows = append(rows, NewClickHouseItem(item)) + } + } + err := d.gormDB.Create(rows).Error + return errors.Trace(err) + } else { + rows := make([]SQLItem, 0, len(items)) + memo := mapset.NewSet[string]() + for _, item := range items { + if !memo.Contains(item.ItemId) { + memo.Add(item.ItemId) + row := NewSQLItem(item) + if d.driver == SQLite { + row.Timestamp = row.Timestamp.In(time.UTC) + } + rows = append(rows, row) } - rows = append(rows, row) } + err := d.gormDB.WithContext(ctx).Clauses(clause.OnConflict{ + Columns: []clause.Column{{Name: "item_id"}}, + DoUpdates: clause.AssignmentColumns([]string{"is_hidden", "categories", "time_stamp", "labels", "comment"}), + }).Create(rows).Error + return errors.Trace(err) } - err := d.gormDB.WithContext(ctx).Clauses(clause.OnConflict{ - Columns: []clause.Column{{Name: "item_id"}}, - DoUpdates: clause.AssignmentColumns([]string{"is_hidden", "categories", "time_stamp", "labels", "comment"}), - }).Create(rows).Error - return errors.Trace(err) } func (d *SQLDatabase) BatchGetItems(ctx context.Context, itemIds []string) ([]Item, error) { @@ -304,7 +407,7 @@ func (d *SQLDatabase) ModifyItem(ctx context.Context, itemId string, patch ItemP } if patch.Timestamp != nil { switch d.driver { - case SQLite: + case ClickHouse, SQLite: attributes["time_stamp"] = patch.Timestamp.In(time.UTC) default: attributes["time_stamp"] = patch.Timestamp @@ -420,19 +523,32 @@ func (d *SQLDatabase) BatchInsertUsers(ctx context.Context, users []User) error if len(users) == 0 { return nil } - rows := make([]SQLUser, 0, len(users)) - memo := mapset.NewSet[string]() - for _, user := range users { - if !memo.Contains(user.UserId) { - memo.Add(user.UserId) - rows = append(rows, NewSQLUser(user)) + if d.driver == ClickHouse { + rows := make([]ClickhouseUser, 0, len(users)) + memo := mapset.NewSet[string]() + for _, user := range users { + if !memo.Contains(user.UserId) { + memo.Add(user.UserId) + rows = append(rows, NewClickhouseUser(user)) + } } + err := d.gormDB.Create(rows).Error + return errors.Trace(err) + } else { + rows := make([]SQLUser, 0, len(users)) + memo := mapset.NewSet[string]() + for _, user := range users { + if !memo.Contains(user.UserId) { + memo.Add(user.UserId) + rows = append(rows, NewSQLUser(user)) + } + } + err := d.gormDB.WithContext(ctx).Clauses(clause.OnConflict{ + Columns: []clause.Column{{Name: "user_id"}}, + DoUpdates: clause.AssignmentColumns([]string{"labels", "subscribe", "comment"}), + }).Create(rows).Error + return errors.Trace(err) } - err := d.gormDB.WithContext(ctx).Clauses(clause.OnConflict{ - Columns: []clause.Column{{Name: "user_id"}}, - DoUpdates: clause.AssignmentColumns([]string{"labels", "subscribe", "comment"}), - }).Create(rows).Error - return errors.Trace(err) } // DeleteUser deletes a user from MySQL. @@ -602,18 +718,33 @@ func (d *SQLDatabase) BatchInsertFeedback(ctx context.Context, feedback []Feedba // insert users if insertUser { userList := users.ToSlice() - err := tx.Clauses(clause.OnConflict{ - Columns: []clause.Column{{Name: "user_id"}}, - DoNothing: true, - }).Create(lo.Map(userList, func(userId string, _ int) SQLUser { - return SQLUser{ - UserId: userId, - Labels: "null", - Subscribe: "null", + if d.driver == ClickHouse { + err := tx.Create(lo.Map(userList, func(userId string, _ int) ClickhouseUser { + return ClickhouseUser{ + SQLUser: SQLUser{ + UserId: userId, + Labels: "[]", + Subscribe: "[]", + }, + } + })).Error + if err != nil { + return errors.Trace(err) + } + } else { + err := tx.Clauses(clause.OnConflict{ + Columns: []clause.Column{{Name: "user_id"}}, + DoNothing: true, + }).Create(lo.Map(userList, func(userId string, _ int) SQLUser { + return SQLUser{ + UserId: userId, + Labels: "null", + Subscribe: "null", + } + })).Error + if err != nil { + return errors.Trace(err) } - })).Error - if err != nil { - return errors.Trace(err) } } else { for _, user := range users.ToSlice() { @@ -631,18 +762,33 @@ func (d *SQLDatabase) BatchInsertFeedback(ctx context.Context, feedback []Feedba // insert items if insertItem { itemList := items.ToSlice() - err := tx.Clauses(clause.OnConflict{ - Columns: []clause.Column{{Name: "item_id"}}, - DoNothing: true, - }).Create(lo.Map(itemList, func(itemId string, _ int) SQLItem { - return SQLItem{ - ItemId: itemId, - Labels: "null", - Categories: "null", + if d.driver == ClickHouse { + err := tx.Create(lo.Map(itemList, func(itemId string, _ int) ClickHouseItem { + return ClickHouseItem{ + SQLItem: SQLItem{ + ItemId: itemId, + Labels: "[]", + Categories: "[]", + }, + } + })).Error + if err != nil { + return errors.Trace(err) + } + } else { + err := tx.Clauses(clause.OnConflict{ + Columns: []clause.Column{{Name: "item_id"}}, + DoNothing: true, + }).Create(lo.Map(itemList, func(itemId string, _ int) SQLItem { + return SQLItem{ + ItemId: itemId, + Labels: "null", + Categories: "null", + } + })).Error + if err != nil { + return errors.Trace(err) } - })).Error - if err != nil { - return errors.Trace(err) } } else { for _, item := range items.ToSlice() { @@ -658,28 +804,50 @@ func (d *SQLDatabase) BatchInsertFeedback(ctx context.Context, feedback []Feedba } } // insert feedback - rows := make([]Feedback, 0, len(feedback)) - memo := make(map[lo.Tuple3[string, string, string]]struct{}) - for _, f := range feedback { - if users.Contains(f.UserId) && items.Contains(f.ItemId) { - if _, exist := memo[lo.Tuple3[string, string, string]{f.FeedbackType, f.UserId, f.ItemId}]; !exist { - memo[lo.Tuple3[string, string, string]{f.FeedbackType, f.UserId, f.ItemId}] = struct{}{} - if d.driver == SQLite { + if d.driver == ClickHouse { + rows := make([]ClickHouseFeedback, 0, len(feedback)) + memo := make(map[lo.Tuple3[string, string, string]]struct{}) + for _, f := range feedback { + if users.Contains(f.UserId) && items.Contains(f.ItemId) { + if _, exist := memo[lo.Tuple3[string, string, string]{f.FeedbackType, f.UserId, f.ItemId}]; !exist { + memo[lo.Tuple3[string, string, string]{f.FeedbackType, f.UserId, f.ItemId}] = struct{}{} f.Timestamp = f.Timestamp.In(time.UTC) + rows = append(rows, ClickHouseFeedback{ + Feedback: f, + Version: lo.If(overwrite, time.Now().In(time.UTC)).Else(time.Time{}), + }) } - rows = append(rows, f) } } + if len(rows) == 0 { + return nil + } + err := tx.Create(rows).Error + return errors.Trace(err) + } else { + rows := make([]Feedback, 0, len(feedback)) + memo := make(map[lo.Tuple3[string, string, string]]struct{}) + for _, f := range feedback { + if users.Contains(f.UserId) && items.Contains(f.ItemId) { + if _, exist := memo[lo.Tuple3[string, string, string]{f.FeedbackType, f.UserId, f.ItemId}]; !exist { + memo[lo.Tuple3[string, string, string]{f.FeedbackType, f.UserId, f.ItemId}] = struct{}{} + if d.driver == SQLite { + f.Timestamp = f.Timestamp.In(time.UTC) + } + rows = append(rows, f) + } + } + } + if len(rows) == 0 { + return nil + } + err := tx.Clauses(clause.OnConflict{ + Columns: []clause.Column{{Name: "feedback_type"}, {Name: "user_id"}, {Name: "item_id"}}, + DoNothing: !overwrite, + DoUpdates: lo.If(overwrite, clause.AssignmentColumns([]string{"time_stamp", "comment"})).Else(nil), + }).Create(rows).Error + return errors.Trace(err) } - if len(rows) == 0 { - return nil - } - err := tx.Clauses(clause.OnConflict{ - Columns: []clause.Column{{Name: "feedback_type"}, {Name: "user_id"}, {Name: "item_id"}}, - DoNothing: !overwrite, - DoUpdates: lo.If(overwrite, clause.AssignmentColumns([]string{"time_stamp", "comment"})).Else(nil), - }).Create(rows).Error - return errors.Trace(err) } // GetFeedback returns feedback from MySQL. @@ -819,7 +987,7 @@ func (d *SQLDatabase) DeleteUserItemFeedback(ctx context.Context, userId, itemId if tx.Error != nil { return 0, errors.Trace(tx.Error) } - if tx.Error != nil { + if tx.Error != nil && d.driver != ClickHouse { return 0, errors.Trace(tx.Error) } return int(tx.RowsAffected), nil @@ -827,7 +995,7 @@ func (d *SQLDatabase) DeleteUserItemFeedback(ctx context.Context, userId, itemId func (d *SQLDatabase) convertTimeZone(timestamp *time.Time) time.Time { switch d.driver { - case SQLite: + case ClickHouse, SQLite: return timestamp.In(time.UTC) default: return *timestamp diff --git a/storage/data/sql_test.go b/storage/data/sql_test.go index dccff5bd8..fd9938db0 100644 --- a/storage/data/sql_test.go +++ b/storage/data/sql_test.go @@ -27,8 +27,9 @@ import ( ) var ( - mySqlDSN string - postgresDSN string + mySqlDSN string + postgresDSN string + clickhouseDSN string ) func init() { @@ -41,6 +42,7 @@ func init() { } mySqlDSN = env("MYSQL_URI", "mysql://root:password@tcp(127.0.0.1:3306)/") postgresDSN = env("POSTGRES_URI", "postgres://gorse:gorse_pass@127.0.0.1/") + clickhouseDSN = env("CLICKHOUSE_URI", "clickhouse://127.0.0.1:8123/") } type MySQLTestSuite struct { @@ -106,6 +108,34 @@ func TestPostgres(t *testing.T) { suite.Run(t, new(PostgresTestSuite)) } +type ClickHouseTestSuite struct { + baseTestSuite +} + +func (suite *ClickHouseTestSuite) SetupSuite() { + var err error + // create database + databaseComm, err := sql.Open("chhttp", "http://"+clickhouseDSN[len(storage.ClickhousePrefix):]) + suite.NoError(err) + const dbName = "gorse_data_test" + _, err = databaseComm.Exec("DROP DATABASE IF EXISTS " + dbName) + suite.NoError(err) + _, err = databaseComm.Exec("CREATE DATABASE " + dbName) + suite.NoError(err) + err = databaseComm.Close() + suite.NoError(err) + // connect database + suite.Database, err = Open(clickhouseDSN+dbName+"?mutations_sync=2", "gorse_") + suite.NoError(err) + // create schema + err = suite.Database.Init() + suite.NoError(err) +} + +func TestClickHouse(t *testing.T) { + suite.Run(t, new(ClickHouseTestSuite)) +} + type SQLiteTestSuite struct { baseTestSuite } diff --git a/storage/docker-compose.yml b/storage/docker-compose.yml index 18d768152..fa3f7edfb 100644 --- a/storage/docker-compose.yml +++ b/storage/docker-compose.yml @@ -30,3 +30,8 @@ services: environment: MONGO_INITDB_ROOT_USERNAME: root MONGO_INITDB_ROOT_PASSWORD: password + + clickhouse: + image: clickhouse/clickhouse-server:22 + ports: + - 8123:8123 diff --git a/storage/scheme.go b/storage/scheme.go index 03f62f825..74accf9db 100644 --- a/storage/scheme.go +++ b/storage/scheme.go @@ -34,6 +34,9 @@ const ( MongoSrvPrefix = "mongodb+srv://" PostgresPrefix = "postgres://" PostgreSQLPrefix = "postgresql://" + ClickhousePrefix = "clickhouse://" + CHHTTPPrefix = "chhttp://" + CHHTTPSPrefix = "chhttps://" SQLitePrefix = "sqlite://" RedisPrefix = "redis://" RedissPrefix = "rediss://" @@ -143,6 +146,9 @@ func NewGORMConfig(tablePrefix string) *gorm.Config { "SQLDocument", "Documents", "PostgresDocument", "Documents", "TimeSeriesPoint", "time_series_points", + "ClickhouseUser", "Users", + "ClickHouseItem", "Items", + "ClickHouseFeedback", "Feedback", ), }, } From b959e382eedb8f65b15a5d53e03581d6f9d84a83 Mon Sep 17 00:00:00 2001 From: zhenghaoz Date: Sat, 26 Oct 2024 18:59:40 +0800 Subject: [PATCH 03/14] data: optimize ClickHouse queries via materialized views (#875) --- .github/workflows/build_test.yml | 2 +- client/docker-compose.yml.j2 | 2 +- docker-compose.yml | 2 +- storage/data/database.go | 6 +- storage/data/database_test.go | 14 ++-- storage/data/sql.go | 110 +++++++++++++++++++++++++------ storage/scheme.go | 10 +++ 7 files changed, 117 insertions(+), 29 deletions(-) diff --git a/.github/workflows/build_test.yml b/.github/workflows/build_test.yml index 2fda49a54..680ad4108 100644 --- a/.github/workflows/build_test.yml +++ b/.github/workflows/build_test.yml @@ -51,7 +51,7 @@ jobs: --health-retries 5 clickhouse: - image: clickhouse/clickhouse-server:21.10 + image: clickhouse/clickhouse-server:22 ports: - 8123 options: >- diff --git a/client/docker-compose.yml.j2 b/client/docker-compose.yml.j2 index 8ad3454af..62e745903 100644 --- a/client/docker-compose.yml.j2 +++ b/client/docker-compose.yml.j2 @@ -66,7 +66,7 @@ services: {% elif database == 'clickhouse' %} clickhouse: - image: clickhouse/clickhouse-server:21.10 + image: clickhouse/clickhouse-server:22 ports: - 8123:8123 environment: diff --git a/docker-compose.yml b/docker-compose.yml index 62e100ed7..27354f54c 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -42,7 +42,7 @@ services: # - mongo_data:/data/db # clickhouse: - # image: clickhouse/clickhouse-server:21.10 + # image: clickhouse/clickhouse-server:22 # ports: # - 8123:8123 # environment: diff --git a/storage/data/database.go b/storage/data/database.go index 4803b873b..3dfaeee5c 100644 --- a/storage/data/database.go +++ b/storage/data/database.go @@ -17,8 +17,8 @@ package data import ( "context" "encoding/json" - "reflect" "net/url" + "reflect" "sort" "strings" "time" @@ -147,6 +147,10 @@ type Feedback struct { Comment string `gorm:"column:comment" mapsstructure:"comment"` } +type UserFeedback Feedback + +type ItemFeedback Feedback + // SortFeedbacks sorts feedback from latest to oldest. func SortFeedbacks(feedback []Feedback) { sort.Sort(feedbackSorter(feedback)) diff --git a/storage/data/database_test.go b/storage/data/database_test.go index 06c671af3..ac9fcad1a 100644 --- a/storage/data/database_test.go +++ b/storage/data/database_test.go @@ -35,6 +35,7 @@ var ( positiveFeedbackType = "positiveFeedbackType" negativeFeedbackType = "negativeFeedbackType" duplicateFeedbackType = "duplicateFeedbackType" + dateTime64Zero = time.Date(1900, 1, 1, 0, 0, 0, 0, time.UTC) ) type baseTestSuite struct { @@ -279,8 +280,8 @@ func (suite *baseTestSuite) TestFeedback() { suite.Equal(strconv.Itoa(i*2), item.ItemId) if item.ItemId != "0" { if suite.isClickHouse() { - // ClickHouse returns 1970-01-01 as zero date. - suite.Zero(item.Timestamp.Unix()) + // ClickHouse returns 1900-01-01 00:00:00 +0000 UTC as zero date. + suite.Equal(dateTime64Zero, item.Timestamp) } else { suite.Zero(item.Timestamp) } @@ -310,9 +311,10 @@ func (suite *baseTestSuite) TestFeedback() { // Get typed feedback by user ret, err = suite.Database.GetUserFeedback(ctx, "2", lo.ToPtr(time.Now()), positiveFeedbackType) suite.NoError(err) - suite.Equal(1, len(ret)) - suite.Equal("2", ret[0].UserId) - suite.Equal("4", ret[0].ItemId) + if suite.Equal(1, len(ret)) { + suite.Equal("2", ret[0].UserId) + suite.Equal("4", ret[0].ItemId) + } // Get all feedback by user ret, err = suite.Database.GetUserFeedback(ctx, "2", lo.ToPtr(time.Now())) suite.NoError(err) @@ -579,6 +581,8 @@ func (suite *baseTestSuite) TestDeleteFeedback() { // RowAffected isn't supported by ClickHouse, suite.Equal(3, deleteCount) } + err = suite.Database.Optimize() + suite.NoError(err) ret, err = suite.Database.GetUserItemFeedback(ctx, "2", "3") suite.NoError(err) suite.Empty(ret) diff --git a/storage/data/sql.go b/storage/data/sql.go index 922b56693..caf5a85fc 100644 --- a/storage/data/sql.go +++ b/storage/data/sql.go @@ -125,7 +125,7 @@ type SQLDatabase struct { // Optimize is used by ClickHouse only. func (d *SQLDatabase) Optimize() error { if d.driver == ClickHouse { - for _, tableName := range []string{d.UsersTable(), d.ItemsTable(), d.FeedbackTable()} { + for _, tableName := range []string{d.UsersTable(), d.ItemsTable(), d.FeedbackTable(), d.UserFeedbackTable(), d.ItemFeedbackTable()} { _, err := d.client.Exec("OPTIMIZE TABLE " + tableName) if err != nil { return errors.Trace(err) @@ -225,7 +225,7 @@ func (d *SQLDatabase) Init() error { ItemId string `gorm:"column:item_id;type:String"` IsHidden int `gorm:"column:is_hidden;type:Boolean;default:0"` Categories string `gorm:"column:categories;type:String;default:'[]'"` - Timestamp time.Time `gorm:"column:time_stamp;type:Datetime"` + Timestamp time.Time `gorm:"column:time_stamp;type:Datetime64(9,'UTC')"` Labels string `gorm:"column:labels;type:String;default:'[]'"` Comment string `gorm:"column:comment;type:String"` Version struct{} `gorm:"column:version;type:DateTime"` @@ -247,9 +247,9 @@ func (d *SQLDatabase) Init() error { } type Feedback struct { FeedbackType string `gorm:"column:feedback_type;type:String"` - UserId string `gorm:"column:user_id;type:String;index:user_index,type:bloom_filter(0.01),granularity:1"` - ItemId string `gorm:"column:item_id;type:String;index:item_index,type:bloom_filter(0.01),granularity:1"` - Timestamp time.Time `gorm:"column:time_stamp;type:DateTime"` + UserId string `gorm:"column:user_id;type:String"` + ItemId string `gorm:"column:item_id;type:String"` + Timestamp time.Time `gorm:"column:time_stamp;type:DateTime64(9,'UTC')"` Comment string `gorm:"column:comment;type:String"` Version struct{} `gorm:"column:version;type:DateTime"` } @@ -257,6 +257,27 @@ func (d *SQLDatabase) Init() error { if err != nil { return errors.Trace(err) } + // create materialized views + type UserFeedback Feedback + err = d.gormDB.Set("gorm:table_options", "ENGINE = ReplacingMergeTree(version) ORDER BY (user_id, item_id, feedback_type)").AutoMigrate(UserFeedback{}) + if err != nil { + return errors.Trace(err) + } + err = d.gormDB.Exec(fmt.Sprintf("CREATE MATERIALIZED VIEW IF NOT EXISTS %s_mv TO %s AS SELECT * FROM %s", + d.UserFeedbackTable(), d.UserFeedbackTable(), d.FeedbackTable())).Error + if err != nil { + return errors.Trace(err) + } + type ItemFeedback Feedback + err = d.gormDB.Set("gorm:table_options", "ENGINE = ReplacingMergeTree(version) ORDER BY (item_id, user_id, feedback_type)").AutoMigrate(ItemFeedback{}) + if err != nil { + return errors.Trace(err) + } + err = d.gormDB.Exec(fmt.Sprintf("CREATE MATERIALIZED VIEW IF NOT EXISTS %s_mv TO %s AS SELECT * FROM %s", + d.ItemFeedbackTable(), d.ItemFeedbackTable(), d.FeedbackTable())).Error + if err != nil { + return errors.Trace(err) + } } return nil } @@ -271,8 +292,8 @@ func (d *SQLDatabase) Close() error { } func (d *SQLDatabase) Purge() error { - tables := []string{d.ItemsTable(), d.FeedbackTable(), d.UsersTable()} if d.driver == ClickHouse { + tables := []string{d.ItemsTable(), d.FeedbackTable(), d.UsersTable(), d.UserFeedbackTable(), d.ItemFeedbackTable()} for _, tableName := range tables { err := d.gormDB.Exec(fmt.Sprintf("alter table %s delete where 1=1", tableName)).Error if err != nil { @@ -280,6 +301,7 @@ func (d *SQLDatabase) Purge() error { } } } else { + tables := []string{d.ItemsTable(), d.FeedbackTable(), d.UsersTable()} for _, tableName := range tables { err := d.gormDB.Exec(fmt.Sprintf("DELETE FROM %s", tableName)).Error if err != nil { @@ -357,6 +379,14 @@ func (d *SQLDatabase) DeleteItem(ctx context.Context, itemId string) error { if err := d.gormDB.WithContext(ctx).Delete(&Feedback{}, "item_id = ?", itemId).Error; err != nil { return errors.Trace(err) } + if d.driver == ClickHouse { + if err := d.gormDB.WithContext(ctx).Delete(&ItemFeedback{}, "item_id = ?", itemId).Error; err != nil { + return errors.Trace(err) + } + if err := d.gormDB.WithContext(ctx).Delete(&UserFeedback{}, "item_id = ?", itemId).Error; err != nil { + return errors.Trace(err) + } + } return nil } @@ -492,10 +522,18 @@ func (d *SQLDatabase) GetItemStream(ctx context.Context, batchSize int, timeLimi // GetItemFeedback returns feedback of a item from MySQL. func (d *SQLDatabase) GetItemFeedback(ctx context.Context, itemId string, feedbackTypes ...string) ([]Feedback, error) { - tx := d.gormDB.WithContext(ctx).Table(d.FeedbackTable()).Select("user_id, item_id, feedback_type, time_stamp") + tx := d.gormDB.WithContext(ctx) + if d.driver == ClickHouse { + tx = tx.Table(d.ItemFeedbackTable()) + } else { + tx = tx.Table(d.FeedbackTable()) + } + tx = tx.Select("user_id, item_id, feedback_type, time_stamp") switch d.driver { case SQLite: tx.Where("time_stamp <= DATETIME() AND item_id = ?", itemId) + case ClickHouse: + tx.Where("time_stamp <= NOW('UTC') AND item_id = ?", itemId) default: tx.Where("time_stamp <= NOW() AND item_id = ?", itemId) } @@ -559,6 +597,14 @@ func (d *SQLDatabase) DeleteUser(ctx context.Context, userId string) error { if err := d.gormDB.WithContext(ctx).Delete(&Feedback{}, "user_id = ?", userId).Error; err != nil { return errors.Trace(err) } + if d.driver == ClickHouse { + if err := d.gormDB.WithContext(ctx).Delete(&ItemFeedback{}, "user_id = ?", userId).Error; err != nil { + return errors.Trace(err) + } + if err := d.gormDB.WithContext(ctx).Delete(&UserFeedback{}, "user_id = ?", userId).Error; err != nil { + return errors.Trace(err) + } + } return nil } @@ -674,8 +720,13 @@ func (d *SQLDatabase) GetUserStream(ctx context.Context, batchSize int) (chan [] // GetUserFeedback returns feedback of a user from MySQL. func (d *SQLDatabase) GetUserFeedback(ctx context.Context, userId string, endTime *time.Time, feedbackTypes ...string) ([]Feedback, error) { - tx := d.gormDB.WithContext(ctx).Table(d.FeedbackTable()). - Select("feedback_type, user_id, item_id, time_stamp, comment"). + tx := d.gormDB.WithContext(ctx) + if d.driver == ClickHouse { + tx = tx.Table(d.UserFeedbackTable()) + } else { + tx = tx.Table(d.FeedbackTable()) + } + tx = tx.Select("feedback_type, user_id, item_id, time_stamp, comment"). Where("user_id = ?", userId) if endTime != nil { tx.Where("time_stamp <= ?", d.convertTimeZone(endTime)) @@ -955,8 +1006,13 @@ func (d *SQLDatabase) GetFeedbackStream(ctx context.Context, batchSize int, scan // GetUserItemFeedback gets a feedback by user id and item id from MySQL. func (d *SQLDatabase) GetUserItemFeedback(ctx context.Context, userId, itemId string, feedbackTypes ...string) ([]Feedback, error) { - tx := d.gormDB.WithContext(ctx).Table(d.FeedbackTable()). - Select("feedback_type, user_id, item_id, time_stamp, comment"). + tx := d.gormDB.WithContext(ctx) + if d.driver == ClickHouse { + tx = tx.Table(d.UserFeedbackTable()) + } else { + tx = tx.Table(d.FeedbackTable()) + } + tx = tx.Select("feedback_type, user_id, item_id, time_stamp, comment"). Where("user_id = ? AND item_id = ?", userId, itemId) if len(feedbackTypes) > 0 { tx.Where("feedback_type IN ?", feedbackTypes) @@ -979,18 +1035,32 @@ func (d *SQLDatabase) GetUserItemFeedback(ctx context.Context, userId, itemId st // DeleteUserItemFeedback deletes a feedback by user id and item id from MySQL. func (d *SQLDatabase) DeleteUserItemFeedback(ctx context.Context, userId, itemId string, feedbackTypes ...string) (int, error) { - tx := d.gormDB.WithContext(ctx).Where("user_id = ? AND item_id = ?", userId, itemId) - if len(feedbackTypes) > 0 { - tx.Where("feedback_type IN ?", feedbackTypes) + deleteUserItemFeedback := func(value any) (int, error) { + tx := d.gormDB.WithContext(ctx).Where("user_id = ? AND item_id = ?", userId, itemId) + if len(feedbackTypes) > 0 { + tx.Where("feedback_type IN ?", feedbackTypes) + } + tx.Delete(value) + if tx.Error != nil { + return 0, errors.Trace(tx.Error) + } + return int(tx.RowsAffected), nil } - tx.Delete(&Feedback{}) - if tx.Error != nil { - return 0, errors.Trace(tx.Error) + rowAffected, err := deleteUserItemFeedback(&Feedback{}) + if err != nil { + return 0, errors.Trace(err) } - if tx.Error != nil && d.driver != ClickHouse { - return 0, errors.Trace(tx.Error) + if d.driver == ClickHouse { + _, err = deleteUserItemFeedback(&UserFeedback{}) + if err != nil { + return 0, errors.Trace(err) + } + _, err = deleteUserItemFeedback(&ItemFeedback{}) + if err != nil { + return 0, errors.Trace(err) + } } - return int(tx.RowsAffected), nil + return rowAffected, nil } func (d *SQLDatabase) convertTimeZone(timestamp *time.Time) time.Time { diff --git a/storage/scheme.go b/storage/scheme.go index 74accf9db..ae60fdc9d 100644 --- a/storage/scheme.go +++ b/storage/scheme.go @@ -125,6 +125,16 @@ func (tp TablePrefix) FeedbackTable() string { return string(tp) + "feedback" } +// UserFeedbackTable returns the materialized view of user feedback. +func (tp TablePrefix) UserFeedbackTable() string { + return string(tp) + "user_feedback" +} + +// ItemFeedbackTable returns the materialized view of item feedback. +func (tp TablePrefix) ItemFeedbackTable() string { + return string(tp) + "item_feedback" +} + func (tp TablePrefix) Key(key string) string { return string(tp) + key } From 4d39637fdbfee181d44ec33c3cdfd58b699711e0 Mon Sep 17 00:00:00 2001 From: zhenghaoz Date: Thu, 31 Oct 2024 22:25:55 +0800 Subject: [PATCH 04/14] ci: add Apple M1 runner (#879) --- .github/workflows/build_test.yml | 44 ++++++++++++++++++++++++++++++++ 1 file changed, 44 insertions(+) diff --git a/.github/workflows/build_test.yml b/.github/workflows/build_test.yml index 680ad4108..4cc97fff6 100644 --- a/.github/workflows/build_test.yml +++ b/.github/workflows/build_test.yml @@ -120,6 +120,50 @@ jobs: - name: Upload run: bash <(curl -s https://codecov.io/bash) + unit_test_m1: + name: unit tests (M1) + runs-on: macos-latest + steps: + - name: Set up dataset + run: | + mkdir -p ~/.gorse/dataset + mkdir -p ~/.gorse/download + wget https://cdn.gorse.io/datasets/ml-100k.zip -P ~/.gorse/download + wget https://cdn.gorse.io/datasets/ml-1m.zip -P ~/.gorse/download + wget https://cdn.gorse.io/datasets/pinterest-20.zip -P ~/.gorse/download + wget https://cdn.gorse.io/datasets/frappe.zip -P ~/.gorse/download + wget https://cdn.gorse.io/datasets/ml-tag.zip -P ~/.gorse/download + wget https://cdn.gorse.io/datasets/criteo.zip -P ~/.gorse/download + unzip ~/.gorse/download/ml-100k.zip -d ~/.gorse/dataset + unzip ~/.gorse/download/ml-1m.zip -d ~/.gorse/dataset + unzip ~/.gorse/download/pinterest-20.zip -d ~/.gorse/dataset + unzip ~/.gorse/download/frappe.zip -d ~/.gorse/dataset + unzip ~/.gorse/download/ml-tag.zip -d ~/.gorse/dataset + unzip ~/.gorse/download/criteo.zip -d ~/.gorse/dataset + + - name: Set up Go 1.23.x + uses: actions/setup-go@v4 + with: + go-version: 1.23.x + id: go + + - name: Check out code into the Go module directory + uses: actions/checkout@v2 + + - name: Get dependencies + run: | + go get -v -t -d ./... + if [ -f Gopkg.toml ]; then + curl https://raw.githubusercontent.com/golang/dep/master/install.sh | sh + dep ensure + fi + + - name: Build + run: go build -v ./... + + - name: Test + run: go test -timeout 20m -v ./... -skip "TestPostgres|TestMySQL|TestMongo|TestRedis|TestClickHouse" + integrate_test: name: integrate tests runs-on: ubuntu-latest From debc903cff6fdd5afe477bb28735df76a14d423c Mon Sep 17 00:00:00 2001 From: Kursat Aktas Date: Sun, 3 Nov 2024 05:01:56 +0300 Subject: [PATCH 05/14] Introducing Gorse Guru on Gurubase.io (#882) --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 52b2478e0..c59abf640 100644 --- a/README.md +++ b/README.md @@ -9,6 +9,7 @@ [![GoDoc](https://godoc.org/github.com/zhenghaoz/gorse?status.svg)](https://godoc.org/github.com/zhenghaoz/gorse) [![Discord](https://img.shields.io/discord/830635934210588743)](https://discord.gg/x6gAtNNkAE) [![Twitter Follow](https://img.shields.io/twitter/follow/gorse_io?label=Follow&style=social)](https://twitter.com/gorse_io) +[![Gurubase](https://img.shields.io/badge/Gurubase-Ask%20Gorse%20Guru-006BFF)](https://gurubase.io/g/gorse) Gorse is an open-source recommendation system written in Go. Gorse aims to be a universal open-source recommender system that can be quickly introduced into a wide variety of online services. By importing items, users, and interaction data into Gorse, the system will automatically train models to generate recommendations for each user. Project features are as follows. From 795761c2adbc8bfe8dfdf484b724aaa77c09fc0b Mon Sep 17 00:00:00 2001 From: zhenghaoz Date: Wed, 6 Nov 2024 22:20:49 +0800 Subject: [PATCH 06/14] refactor: rename Document to Score (#883) --- master/rest.go | 2 +- master/rest_test.go | 20 ++--- master/tasks.go | 28 +++--- master/tasks_test.go | 84 +++++++++--------- server/bench_test.go | 16 ++-- server/rest.go | 82 +++++++++--------- server/rest_test.go | 118 ++++++++++++------------- storage/cache/database.go | 30 +++---- storage/cache/database_test.go | 90 +++++++++---------- storage/cache/mongodb.go | 12 +-- storage/cache/no_database.go | 8 +- storage/cache/no_database_test.go | 8 +- storage/cache/redis.go | 12 +-- storage/cache/redis_test.go | 16 ++-- storage/cache/sql.go | 18 ++-- worker/worker.go | 68 +++++++-------- worker/worker_test.go | 138 +++++++++++++++--------------- 17 files changed, 375 insertions(+), 375 deletions(-) diff --git a/master/rest.go b/master/rest.go index 77e4b95c8..8cc351d40 100644 --- a/master/rest.go +++ b/master/rest.go @@ -881,7 +881,7 @@ func (m *Master) searchDocuments(collection, subset, category string, request *r return } // Get the popular list - scores, err := m.CacheClient.SearchDocuments(ctx, collection, subset, []string{category}, offset, m.Config.Recommend.CacheSize) + scores, err := m.CacheClient.SearchScores(ctx, collection, subset, []string{category}, offset, m.Config.Recommend.CacheSize) if err != nil { server.InternalServerError(response, err) return diff --git a/master/rest_test.go b/master/rest_test.go index cb23f7656..bb5e4d843 100644 --- a/master/rest_test.go +++ b/master/rest_test.go @@ -648,14 +648,14 @@ func TestServer_SearchDocumentsOfItems(t *testing.T) { for i, operator := range operators { t.Run(operator.Name, func(t *testing.T) { // Put scores - scores := []cache.Document{ + scores := []cache.Score{ {Id: strconv.Itoa(i) + "0", Score: 100, Categories: []string{operator.Category}}, {Id: strconv.Itoa(i) + "1", Score: 99, Categories: []string{operator.Category}}, {Id: strconv.Itoa(i) + "2", Score: 98, Categories: []string{operator.Category}}, {Id: strconv.Itoa(i) + "3", Score: 97, Categories: []string{operator.Category}}, {Id: strconv.Itoa(i) + "4", Score: 96, Categories: []string{operator.Category}}, } - err := s.CacheClient.AddDocuments(ctx, operator.Collection, operator.Subset, scores) + err := s.CacheClient.AddScores(ctx, operator.Collection, operator.Subset, scores) assert.NoError(t, err) items := make([]ScoredItem, 0) for _, score := range scores { @@ -699,14 +699,14 @@ func TestServer_SearchDocumentsOfUsers(t *testing.T) { for _, operator := range operators { t.Logf("test RESTful API: %v", operator.Get) // Put scores - scores := []cache.Document{ + scores := []cache.Score{ {Id: "0", Score: 100, Categories: []string{""}}, {Id: "1", Score: 99, Categories: []string{""}}, {Id: "2", Score: 98, Categories: []string{""}}, {Id: "3", Score: 97, Categories: []string{""}}, {Id: "4", Score: 96, Categories: []string{""}}, } - err := s.CacheClient.AddDocuments(ctx, operator.Prefix, operator.Label, scores) + err := s.CacheClient.AddScores(ctx, operator.Prefix, operator.Label, scores) assert.NoError(t, err) users := make([]ScoreUser, 0) for _, score := range scores { @@ -758,7 +758,7 @@ func TestServer_GetRecommends(t *testing.T) { s, cookie := newMockServer(t) defer s.Close(t) // inset recommendation - itemIds := []cache.Document{ + itemIds := []cache.Score{ {Id: "1", Score: 99, Categories: []string{""}}, {Id: "2", Score: 98, Categories: []string{""}}, {Id: "3", Score: 97, Categories: []string{""}}, @@ -769,7 +769,7 @@ func TestServer_GetRecommends(t *testing.T) { {Id: "8", Score: 92, Categories: []string{""}}, } ctx := context.Background() - err := s.CacheClient.AddDocuments(ctx, cache.OfflineRecommend, "0", itemIds) + err := s.CacheClient.AddScores(ctx, cache.OfflineRecommend, "0", itemIds) assert.NoError(t, err) // insert feedback feedback := []data.Feedback{ @@ -825,14 +825,14 @@ func TestMaster_Purge(t *testing.T) { assert.NoError(t, err) assert.ElementsMatch(t, []string{"a", "b", "c"}, set) - err = s.CacheClient.AddDocuments(ctx, "sorted", "", []cache.Document{ + err = s.CacheClient.AddScores(ctx, "sorted", "", []cache.Score{ {Id: "a", Score: 1, Categories: []string{""}}, {Id: "b", Score: 2, Categories: []string{""}}, {Id: "c", Score: 3, Categories: []string{""}}}) assert.NoError(t, err) - z, err := s.CacheClient.SearchDocuments(ctx, "sorted", "", []string{""}, 0, -1) + z, err := s.CacheClient.SearchScores(ctx, "sorted", "", []string{""}, 0, -1) assert.NoError(t, err) - assert.ElementsMatch(t, []cache.Document{ + assert.ElementsMatch(t, []cache.Score{ {Id: "a", Score: 1, Categories: []string{""}}, {Id: "b", Score: 2, Categories: []string{""}}, {Id: "c", Score: 3, Categories: []string{""}}}, z) @@ -869,7 +869,7 @@ func TestMaster_Purge(t *testing.T) { set, err = s.CacheClient.GetSet(ctx, "set") assert.NoError(t, err) assert.Empty(t, set) - z, err = s.CacheClient.SearchDocuments(ctx, "sorted", "", []string{""}, 0, -1) + z, err = s.CacheClient.SearchScores(ctx, "sorted", "", []string{""}, 0, -1) assert.NoError(t, err) assert.Empty(t, z) diff --git a/master/tasks.go b/master/tasks.go index e8ba09203..7116d8196 100644 --- a/master/tasks.go +++ b/master/tasks.go @@ -91,10 +91,10 @@ func (m *Master) runLoadDatasetTask() error { } // save popular items to cache - if err = m.CacheClient.AddDocuments(ctx, cache.PopularItems, "", popularItems.ToSlice()); err != nil { + if err = m.CacheClient.AddScores(ctx, cache.PopularItems, "", popularItems.ToSlice()); err != nil { log.Logger().Error("failed to cache popular items", zap.Error(err)) } - if err = m.CacheClient.DeleteDocuments(ctx, []string{cache.PopularItems}, cache.DocumentCondition{Before: &popularItems.Timestamp}); err != nil { + if err = m.CacheClient.DeleteScores(ctx, []string{cache.PopularItems}, cache.ScoreCondition{Before: &popularItems.Timestamp}); err != nil { log.Logger().Error("failed to reclaim outdated items", zap.Error(err)) } if err = m.CacheClient.Set(ctx, cache.Time(cache.Key(cache.GlobalMeta, cache.LastUpdatePopularItemsTime), time.Now())); err != nil { @@ -102,10 +102,10 @@ func (m *Master) runLoadDatasetTask() error { } // save the latest items to cache - if err = m.CacheClient.AddDocuments(ctx, cache.LatestItems, "", latestItems.ToSlice()); err != nil { + if err = m.CacheClient.AddScores(ctx, cache.LatestItems, "", latestItems.ToSlice()); err != nil { log.Logger().Error("failed to cache latest items", zap.Error(err)) } - if err = m.CacheClient.DeleteDocuments(ctx, []string{cache.LatestItems}, cache.DocumentCondition{Before: &latestItems.Timestamp}); err != nil { + if err = m.CacheClient.DeleteScores(ctx, []string{cache.LatestItems}, cache.ScoreCondition{Before: &latestItems.Timestamp}); err != nil { log.Logger().Error("failed to reclaim outdated items", zap.Error(err)) } if err = m.CacheClient.Set(ctx, cache.Time(cache.Key(cache.GlobalMeta, cache.LastUpdateLatestItemsTime), time.Now())); err != nil { @@ -397,10 +397,10 @@ func (m *Master) findItemNeighborsBruteForce(dataset *ranking.DataSet, labeledIt } aggregator.Add(category, recommends, scores) } - if err := m.CacheClient.AddDocuments(ctx, cache.ItemNeighbors, itemId, aggregator.ToSlice()); err != nil { + if err := m.CacheClient.AddScores(ctx, cache.ItemNeighbors, itemId, aggregator.ToSlice()); err != nil { return errors.Trace(err) } - if err := m.CacheClient.DeleteDocuments(ctx, []string{cache.ItemNeighbors}, cache.DocumentCondition{ + if err := m.CacheClient.DeleteScores(ctx, []string{cache.ItemNeighbors}, cache.ScoreCondition{ Subset: proto.String(itemId), Before: &aggregator.Timestamp, }); err != nil { @@ -502,10 +502,10 @@ func (m *Master) findItemNeighborsIVF(dataset *ranking.DataSet, labelIDF, userID aggregator.Add(category, resultValues, resultScores) } } - if err := m.CacheClient.AddDocuments(ctx, cache.ItemNeighbors, itemId, aggregator.ToSlice()); err != nil { + if err := m.CacheClient.AddScores(ctx, cache.ItemNeighbors, itemId, aggregator.ToSlice()); err != nil { return errors.Trace(err) } - if err := m.CacheClient.DeleteDocuments(ctx, []string{cache.ItemNeighbors}, cache.DocumentCondition{ + if err := m.CacheClient.DeleteScores(ctx, []string{cache.ItemNeighbors}, cache.ScoreCondition{ Subset: proto.String(itemId), Before: &aggregator.Timestamp, }); err != nil { @@ -716,10 +716,10 @@ func (m *Master) findUserNeighborsBruteForce(ctx context.Context, dataset *ranki } aggregator := cache.NewDocumentAggregator(startSearchTime) aggregator.Add("", recommends, scores) - if err := m.CacheClient.AddDocuments(ctx, cache.UserNeighbors, userId, aggregator.ToSlice()); err != nil { + if err := m.CacheClient.AddScores(ctx, cache.UserNeighbors, userId, aggregator.ToSlice()); err != nil { return errors.Trace(err) } - if err := m.CacheClient.DeleteDocuments(ctx, []string{cache.UserNeighbors}, cache.DocumentCondition{ + if err := m.CacheClient.DeleteScores(ctx, []string{cache.UserNeighbors}, cache.ScoreCondition{ Subset: proto.String(userId), Before: &aggregator.Timestamp, }); err != nil { @@ -808,10 +808,10 @@ func (m *Master) findUserNeighborsIVF(ctx context.Context, dataset *ranking.Data } aggregator := cache.NewDocumentAggregator(startSearchTime) aggregator.Add("", resultValues, resultScores) - if err := m.CacheClient.AddDocuments(ctx, cache.UserNeighbors, userId, aggregator.ToSlice()); err != nil { + if err := m.CacheClient.AddScores(ctx, cache.UserNeighbors, userId, aggregator.ToSlice()); err != nil { return errors.Trace(err) } - if err := m.CacheClient.DeleteDocuments(ctx, []string{cache.UserNeighbors}, cache.DocumentCondition{ + if err := m.CacheClient.DeleteScores(ctx, []string{cache.UserNeighbors}, cache.ScoreCondition{ Subset: proto.String(userId), Before: &aggregator.Timestamp, }); err != nil { @@ -872,7 +872,7 @@ func (m *Master) checkUserNeighborCacheTimeout(userId string) bool { ) ctx := context.Background() // check cache - if items, err := m.CacheClient.SearchDocuments(ctx, cache.UserNeighbors, userId, []string{""}, 0, -1); err != nil { + if items, err := m.CacheClient.SearchScores(ctx, cache.UserNeighbors, userId, []string{""}, 0, -1); err != nil { log.Logger().Error("failed to load user neighbors", zap.String("user_id", userId), zap.Error(err)) return true } else if len(items) == 0 { @@ -927,7 +927,7 @@ func (m *Master) checkItemNeighborCacheTimeout(itemId string, categories []strin // check cache for _, category := range append([]string{""}, categories...) { - items, err := m.CacheClient.SearchDocuments(ctx, cache.ItemNeighbors, itemId, []string{category}, 0, -1) + items, err := m.CacheClient.SearchScores(ctx, cache.ItemNeighbors, itemId, []string{category}, 0, -1) if err != nil { log.Logger().Error("failed to load item neighbors", zap.String("item_id", itemId), zap.Error(err)) return true diff --git a/master/tasks_test.go b/master/tasks_test.go index 6750c43d0..8fa037047 100644 --- a/master/tasks_test.go +++ b/master/tasks_test.go @@ -89,11 +89,11 @@ func (s *MasterTestSuite) TestFindItemNeighborsBruteForce() { s.Config.Recommend.ItemNeighbors.NeighborType = config.NeighborTypeRelated neighborTask := NewFindItemNeighborsTask(&s.Master) s.NoError(neighborTask.run(context.Background(), nil)) - similar, err := s.CacheClient.SearchDocuments(ctx, cache.ItemNeighbors, "9", []string{""}, 0, 100) + similar, err := s.CacheClient.SearchScores(ctx, cache.ItemNeighbors, "9", []string{""}, 0, 100) s.NoError(err) s.Equal([]string{"7", "5", "3"}, cache.ConvertDocumentsToValues(similar)) // similar items in category (common users) - similar, err = s.CacheClient.SearchDocuments(ctx, cache.ItemNeighbors, "9", []string{"*"}, 0, 100) + similar, err = s.CacheClient.SearchScores(ctx, cache.ItemNeighbors, "9", []string{"*"}, 0, 100) s.NoError(err) s.Equal([]string{"7", "5", "1"}, cache.ConvertDocumentsToValues(similar)) @@ -103,11 +103,11 @@ func (s *MasterTestSuite) TestFindItemNeighborsBruteForce() { s.Config.Recommend.ItemNeighbors.NeighborType = config.NeighborTypeSimilar neighborTask = NewFindItemNeighborsTask(&s.Master) s.NoError(neighborTask.run(context.Background(), nil)) - similar, err = s.CacheClient.SearchDocuments(ctx, cache.ItemNeighbors, "8", []string{""}, 0, 100) + similar, err = s.CacheClient.SearchScores(ctx, cache.ItemNeighbors, "8", []string{""}, 0, 100) s.NoError(err) s.Equal([]string{"0", "2", "4"}, cache.ConvertDocumentsToValues(similar)) // similar items in category (common labels) - similar, err = s.CacheClient.SearchDocuments(ctx, cache.ItemNeighbors, "8", []string{"*"}, 0, 100) + similar, err = s.CacheClient.SearchScores(ctx, cache.ItemNeighbors, "8", []string{"*"}, 0, 100) s.NoError(err) s.Equal([]string{"0", "2", "6"}, cache.ConvertDocumentsToValues(similar)) @@ -119,10 +119,10 @@ func (s *MasterTestSuite) TestFindItemNeighborsBruteForce() { s.Config.Recommend.ItemNeighbors.NeighborType = config.NeighborTypeAuto neighborTask = NewFindItemNeighborsTask(&s.Master) s.NoError(neighborTask.run(context.Background(), nil)) - similar, err = s.CacheClient.SearchDocuments(ctx, cache.ItemNeighbors, "8", []string{""}, 0, 100) + similar, err = s.CacheClient.SearchScores(ctx, cache.ItemNeighbors, "8", []string{""}, 0, 100) s.NoError(err) s.Equal([]string{"0", "2", "4"}, cache.ConvertDocumentsToValues(similar)) - similar, err = s.CacheClient.SearchDocuments(ctx, cache.ItemNeighbors, "9", []string{""}, 0, 100) + similar, err = s.CacheClient.SearchScores(ctx, cache.ItemNeighbors, "9", []string{""}, 0, 100) s.NoError(err) s.Equal([]string{"7", "5", "3"}, cache.ConvertDocumentsToValues(similar)) } @@ -194,11 +194,11 @@ func (s *MasterTestSuite) TestFindItemNeighborsIVF() { s.Config.Recommend.ItemNeighbors.NeighborType = config.NeighborTypeRelated neighborTask := NewFindItemNeighborsTask(&s.Master) s.NoError(neighborTask.run(context.Background(), nil)) - similar, err := s.CacheClient.SearchDocuments(ctx, cache.ItemNeighbors, "9", []string{""}, 0, 100) + similar, err := s.CacheClient.SearchScores(ctx, cache.ItemNeighbors, "9", []string{""}, 0, 100) s.NoError(err) s.Equal([]string{"7", "5", "3"}, cache.ConvertDocumentsToValues(similar)) // similar items in category (common users) - similar, err = s.CacheClient.SearchDocuments(ctx, cache.ItemNeighbors, "9", []string{"*"}, 0, 100) + similar, err = s.CacheClient.SearchScores(ctx, cache.ItemNeighbors, "9", []string{"*"}, 0, 100) s.NoError(err) s.Equal([]string{"7", "5", "1"}, cache.ConvertDocumentsToValues(similar)) @@ -208,11 +208,11 @@ func (s *MasterTestSuite) TestFindItemNeighborsIVF() { s.Config.Recommend.ItemNeighbors.NeighborType = config.NeighborTypeSimilar neighborTask = NewFindItemNeighborsTask(&s.Master) s.NoError(neighborTask.run(context.Background(), nil)) - similar, err = s.CacheClient.SearchDocuments(ctx, cache.ItemNeighbors, "8", []string{""}, 0, 100) + similar, err = s.CacheClient.SearchScores(ctx, cache.ItemNeighbors, "8", []string{""}, 0, 100) s.NoError(err) s.Equal([]string{"0", "2", "4"}, cache.ConvertDocumentsToValues(similar)) // similar items in category (common labels) - similar, err = s.CacheClient.SearchDocuments(ctx, cache.ItemNeighbors, "8", []string{"*"}, 0, 100) + similar, err = s.CacheClient.SearchScores(ctx, cache.ItemNeighbors, "8", []string{"*"}, 0, 100) s.NoError(err) s.Equal([]string{"0", "2", "6"}, cache.ConvertDocumentsToValues(similar)) @@ -224,10 +224,10 @@ func (s *MasterTestSuite) TestFindItemNeighborsIVF() { s.Config.Recommend.ItemNeighbors.NeighborType = config.NeighborTypeAuto neighborTask = NewFindItemNeighborsTask(&s.Master) s.NoError(neighborTask.run(context.Background(), nil)) - similar, err = s.CacheClient.SearchDocuments(ctx, cache.ItemNeighbors, "8", []string{""}, 0, 100) + similar, err = s.CacheClient.SearchScores(ctx, cache.ItemNeighbors, "8", []string{""}, 0, 100) s.NoError(err) s.Equal([]string{"0", "2", "4"}, cache.ConvertDocumentsToValues(similar)) - similar, err = s.CacheClient.SearchDocuments(ctx, cache.ItemNeighbors, "9", []string{""}, 0, 100) + similar, err = s.CacheClient.SearchScores(ctx, cache.ItemNeighbors, "9", []string{""}, 0, 100) s.NoError(err) s.Equal([]string{"7", "5", "3"}, cache.ConvertDocumentsToValues(similar)) } @@ -261,7 +261,7 @@ func (s *MasterTestSuite) TestFindItemNeighborsIVF_ZeroIDF() { s.Config.Recommend.ItemNeighbors.NeighborType = config.NeighborTypeRelated neighborTask := NewFindItemNeighborsTask(&s.Master) s.NoError(neighborTask.run(context.Background(), nil)) - similar, err := s.CacheClient.SearchDocuments(ctx, cache.ItemNeighbors, "0", []string{""}, 0, 100) + similar, err := s.CacheClient.SearchScores(ctx, cache.ItemNeighbors, "0", []string{""}, 0, 100) s.NoError(err) s.Equal([]string{"1"}, cache.ConvertDocumentsToValues(similar)) @@ -269,7 +269,7 @@ func (s *MasterTestSuite) TestFindItemNeighborsIVF_ZeroIDF() { s.Config.Recommend.ItemNeighbors.NeighborType = config.NeighborTypeSimilar neighborTask = NewFindItemNeighborsTask(&s.Master) s.NoError(neighborTask.run(context.Background(), nil)) - similar, err = s.CacheClient.SearchDocuments(ctx, cache.ItemNeighbors, "0", []string{""}, 0, 100) + similar, err = s.CacheClient.SearchScores(ctx, cache.ItemNeighbors, "0", []string{""}, 0, 100) s.NoError(err) s.Equal([]string{"1"}, cache.ConvertDocumentsToValues(similar)) } @@ -321,7 +321,7 @@ func (s *MasterTestSuite) TestFindUserNeighborsBruteForce() { s.Config.Recommend.UserNeighbors.NeighborType = config.NeighborTypeRelated neighborTask := NewFindUserNeighborsTask(&s.Master) s.NoError(neighborTask.run(context.Background(), nil)) - similar, err := s.CacheClient.SearchDocuments(ctx, cache.UserNeighbors, "9", []string{""}, 0, 100) + similar, err := s.CacheClient.SearchScores(ctx, cache.UserNeighbors, "9", []string{""}, 0, 100) s.NoError(err) s.Equal([]string{"7", "5", "3"}, cache.ConvertDocumentsToValues(similar)) @@ -331,7 +331,7 @@ func (s *MasterTestSuite) TestFindUserNeighborsBruteForce() { s.Config.Recommend.UserNeighbors.NeighborType = config.NeighborTypeSimilar neighborTask = NewFindUserNeighborsTask(&s.Master) s.NoError(neighborTask.run(context.Background(), nil)) - similar, err = s.CacheClient.SearchDocuments(ctx, cache.UserNeighbors, "8", []string{""}, 0, 100) + similar, err = s.CacheClient.SearchScores(ctx, cache.UserNeighbors, "8", []string{""}, 0, 100) s.NoError(err) s.Equal([]string{"0", "2", "4"}, cache.ConvertDocumentsToValues(similar)) @@ -343,10 +343,10 @@ func (s *MasterTestSuite) TestFindUserNeighborsBruteForce() { s.Config.Recommend.UserNeighbors.NeighborType = config.NeighborTypeAuto neighborTask = NewFindUserNeighborsTask(&s.Master) s.NoError(neighborTask.run(context.Background(), nil)) - similar, err = s.CacheClient.SearchDocuments(ctx, cache.UserNeighbors, "8", []string{""}, 0, 100) + similar, err = s.CacheClient.SearchScores(ctx, cache.UserNeighbors, "8", []string{""}, 0, 100) s.NoError(err) s.Equal([]string{"0", "2", "4"}, cache.ConvertDocumentsToValues(similar)) - similar, err = s.CacheClient.SearchDocuments(ctx, cache.UserNeighbors, "9", []string{""}, 0, 100) + similar, err = s.CacheClient.SearchScores(ctx, cache.UserNeighbors, "9", []string{""}, 0, 100) s.NoError(err) s.Equal([]string{"7", "5", "3"}, cache.ConvertDocumentsToValues(similar)) } @@ -401,7 +401,7 @@ func (s *MasterTestSuite) TestFindUserNeighborsIVF() { s.Config.Recommend.UserNeighbors.NeighborType = config.NeighborTypeRelated neighborTask := NewFindUserNeighborsTask(&s.Master) s.NoError(neighborTask.run(context.Background(), nil)) - similar, err := s.CacheClient.SearchDocuments(ctx, cache.UserNeighbors, "9", []string{""}, 0, 100) + similar, err := s.CacheClient.SearchScores(ctx, cache.UserNeighbors, "9", []string{""}, 0, 100) s.NoError(err) s.Equal([]string{"7", "5", "3"}, cache.ConvertDocumentsToValues(similar)) @@ -411,7 +411,7 @@ func (s *MasterTestSuite) TestFindUserNeighborsIVF() { s.Config.Recommend.UserNeighbors.NeighborType = config.NeighborTypeSimilar neighborTask = NewFindUserNeighborsTask(&s.Master) s.NoError(neighborTask.run(context.Background(), nil)) - similar, err = s.CacheClient.SearchDocuments(ctx, cache.UserNeighbors, "8", []string{""}, 0, 100) + similar, err = s.CacheClient.SearchScores(ctx, cache.UserNeighbors, "8", []string{""}, 0, 100) s.NoError(err) s.Equal([]string{"0", "2", "4"}, cache.ConvertDocumentsToValues(similar)) @@ -423,10 +423,10 @@ func (s *MasterTestSuite) TestFindUserNeighborsIVF() { s.Config.Recommend.UserNeighbors.NeighborType = config.NeighborTypeAuto neighborTask = NewFindUserNeighborsTask(&s.Master) s.NoError(neighborTask.run(context.Background(), nil)) - similar, err = s.CacheClient.SearchDocuments(ctx, cache.UserNeighbors, "8", []string{""}, 0, 100) + similar, err = s.CacheClient.SearchScores(ctx, cache.UserNeighbors, "8", []string{""}, 0, 100) s.NoError(err) s.Equal([]string{"0", "2", "4"}, cache.ConvertDocumentsToValues(similar)) - similar, err = s.CacheClient.SearchDocuments(ctx, cache.UserNeighbors, "9", []string{""}, 0, 100) + similar, err = s.CacheClient.SearchScores(ctx, cache.UserNeighbors, "9", []string{""}, 0, 100) s.NoError(err) s.Equal([]string{"7", "5", "3"}, cache.ConvertDocumentsToValues(similar)) } @@ -460,7 +460,7 @@ func (s *MasterTestSuite) TestFindUserNeighborsIVF_ZeroIDF() { s.Config.Recommend.UserNeighbors.NeighborType = config.NeighborTypeRelated neighborTask := NewFindUserNeighborsTask(&s.Master) s.NoError(neighborTask.run(context.Background(), nil)) - similar, err := s.CacheClient.SearchDocuments(ctx, cache.UserNeighbors, "0", []string{""}, 0, 100) + similar, err := s.CacheClient.SearchScores(ctx, cache.UserNeighbors, "0", []string{""}, 0, 100) s.NoError(err) s.Equal([]string{"1"}, cache.ConvertDocumentsToValues(similar)) @@ -468,7 +468,7 @@ func (s *MasterTestSuite) TestFindUserNeighborsIVF_ZeroIDF() { s.Config.Recommend.UserNeighbors.NeighborType = config.NeighborTypeSimilar neighborTask = NewFindUserNeighborsTask(&s.Master) s.NoError(neighborTask.run(context.Background(), nil)) - similar, err = s.CacheClient.SearchDocuments(ctx, cache.UserNeighbors, "0", []string{""}, 0, 100) + similar, err = s.CacheClient.SearchScores(ctx, cache.UserNeighbors, "0", []string{""}, 0, 100) s.NoError(err) s.Equal([]string{"1"}, cache.ConvertDocumentsToValues(similar)) } @@ -568,43 +568,43 @@ func (s *MasterTestSuite) TestLoadDataFromDatabase() { s.Equal(45, s.clickTrainSet.NegativeCount+s.clickTestSet.NegativeCount) // check latest items - latest, err := s.CacheClient.SearchDocuments(ctx, cache.LatestItems, "", []string{""}, 0, 100) + latest, err := s.CacheClient.SearchScores(ctx, cache.LatestItems, "", []string{""}, 0, 100) s.NoError(err) - s.Equal([]cache.Document{ + s.Equal([]cache.Score{ {Id: items[8].ItemId, Score: float64(items[8].Timestamp.Unix())}, {Id: items[7].ItemId, Score: float64(items[7].Timestamp.Unix())}, {Id: items[6].ItemId, Score: float64(items[6].Timestamp.Unix())}, - }, lo.Map(latest, func(document cache.Document, _ int) cache.Document { - return cache.Document{Id: document.Id, Score: document.Score} + }, lo.Map(latest, func(document cache.Score, _ int) cache.Score { + return cache.Score{Id: document.Id, Score: document.Score} })) - latest, err = s.CacheClient.SearchDocuments(ctx, cache.LatestItems, "", []string{"2"}, 0, 100) + latest, err = s.CacheClient.SearchScores(ctx, cache.LatestItems, "", []string{"2"}, 0, 100) s.NoError(err) - s.Equal([]cache.Document{ + s.Equal([]cache.Score{ {Id: items[8].ItemId, Score: float64(items[8].Timestamp.Unix())}, {Id: items[5].ItemId, Score: float64(items[5].Timestamp.Unix())}, {Id: items[2].ItemId, Score: float64(items[2].Timestamp.Unix())}, - }, lo.Map(latest, func(document cache.Document, _ int) cache.Document { - return cache.Document{Id: document.Id, Score: document.Score} + }, lo.Map(latest, func(document cache.Score, _ int) cache.Score { + return cache.Score{Id: document.Id, Score: document.Score} })) // check popular items - popular, err := s.CacheClient.SearchDocuments(ctx, cache.PopularItems, "", []string{""}, 0, 3) + popular, err := s.CacheClient.SearchScores(ctx, cache.PopularItems, "", []string{""}, 0, 3) s.NoError(err) - s.Equal([]cache.Document{ + s.Equal([]cache.Score{ {Id: items[8].ItemId, Score: 9}, {Id: items[7].ItemId, Score: 8}, {Id: items[6].ItemId, Score: 7}, - }, lo.Map(popular, func(document cache.Document, _ int) cache.Document { - return cache.Document{Id: document.Id, Score: document.Score} + }, lo.Map(popular, func(document cache.Score, _ int) cache.Score { + return cache.Score{Id: document.Id, Score: document.Score} })) - popular, err = s.CacheClient.SearchDocuments(ctx, cache.PopularItems, "", []string{"2"}, 0, 3) + popular, err = s.CacheClient.SearchScores(ctx, cache.PopularItems, "", []string{"2"}, 0, 3) s.NoError(err) - s.Equal([]cache.Document{ + s.Equal([]cache.Score{ {Id: items[8].ItemId, Score: 9}, {Id: items[5].ItemId, Score: 6}, {Id: items[2].ItemId, Score: 3}, - }, lo.Map(popular, func(document cache.Document, _ int) cache.Document { - return cache.Document{Id: document.Id, Score: document.Score} + }, lo.Map(popular, func(document cache.Score, _ int) cache.Score { + return cache.Score{Id: document.Id, Score: document.Score} })) // check categories @@ -619,7 +619,7 @@ func (s *MasterTestSuite) TestCheckItemNeighborCacheTimeout() { // empty cache s.True(s.checkItemNeighborCacheTimeout("1", nil)) - err := s.CacheClient.AddDocuments(ctx, cache.ItemNeighbors, "1", []cache.Document{ + err := s.CacheClient.AddScores(ctx, cache.ItemNeighbors, "1", []cache.Score{ {Id: "2", Score: 1, Categories: []string{""}}, {Id: "3", Score: 2, Categories: []string{""}}, {Id: "4", Score: 3, Categories: []string{""}}, @@ -654,7 +654,7 @@ func (s *MasterTestSuite) TestCheckUserNeighborCacheTimeout() { // empty cache s.True(s.checkUserNeighborCacheTimeout("1")) - err := s.CacheClient.AddDocuments(ctx, cache.UserNeighbors, "1", []cache.Document{ + err := s.CacheClient.AddScores(ctx, cache.UserNeighbors, "1", []cache.Score{ {Id: "1", Score: 1, Categories: []string{""}}, {Id: "2", Score: 2, Categories: []string{""}}, {Id: "3", Score: 3, Categories: []string{""}}, diff --git a/server/bench_test.go b/server/bench_test.go index 417345aa7..53afe0493 100644 --- a/server/bench_test.go +++ b/server/bench_test.go @@ -797,14 +797,14 @@ func BenchmarkGetRecommendCache(b *testing.B) { ctx := context.Background() for batchSize := 10; batchSize <= 1000; batchSize *= 10 { b.Run(strconv.Itoa(batchSize), func(b *testing.B) { - documents := make([]cache.Document, batchSize) + documents := make([]cache.Score, batchSize) for i := range documents { documents[i].Id = strconv.Itoa(i) documents[i].Score = float64(i) documents[i].Categories = []string{""} } lo.Reverse(documents) - err := s.CacheClient.AddDocuments(ctx, cache.PopularItems, "", documents) + err := s.CacheClient.AddScores(ctx, cache.PopularItems, "", documents) require.NoError(b, err) s.Config.Recommend.CacheSize = len(documents) @@ -835,7 +835,7 @@ func BenchmarkRecommendFromOfflineCache(b *testing.B) { ctx := context.Background() for batchSize := 10; batchSize <= 1000; batchSize *= 10 { b.Run(strconv.Itoa(batchSize), func(b *testing.B) { - documents := make([]cache.Document, batchSize*2) + documents := make([]cache.Score, batchSize*2) expects := make([]string, batchSize) feedbacks := make([]data.Feedback, batchSize) for i := range documents { @@ -852,7 +852,7 @@ func BenchmarkRecommendFromOfflineCache(b *testing.B) { } lo.Reverse(documents) lo.Reverse(expects) - err := s.CacheClient.AddDocuments(ctx, cache.OfflineRecommend, "init_user_1", documents) + err := s.CacheClient.AddScores(ctx, cache.OfflineRecommend, "init_user_1", documents) require.NoError(b, err) err = s.DataClient.BatchInsertFeedback(ctx, feedbacks, true, true, true) require.NoError(b, err) @@ -886,7 +886,7 @@ func BenchmarkRecommendFromLatest(b *testing.B) { for batchSize := 10; batchSize <= 1000; batchSize *= 10 { b.Run(strconv.Itoa(batchSize), func(b *testing.B) { - documents := make([]cache.Document, batchSize*2) + documents := make([]cache.Score, batchSize*2) expects := make([]string, batchSize) feedbacks := make([]data.Feedback, batchSize) for i := range documents { @@ -903,7 +903,7 @@ func BenchmarkRecommendFromLatest(b *testing.B) { } lo.Reverse(documents) lo.Reverse(expects) - err := s.CacheClient.AddDocuments(ctx, cache.LatestItems, "", documents) + err := s.CacheClient.AddScores(ctx, cache.LatestItems, "", documents) require.NoError(b, err) err = s.DataClient.BatchInsertFeedback(ctx, feedbacks, true, true, true) require.NoError(b, err) @@ -938,7 +938,7 @@ func BenchmarkRecommendFromItemBased(b *testing.B) { for batchSize := 10; batchSize <= 1000; batchSize *= 10 { b.Run(strconv.Itoa(batchSize), func(b *testing.B) { // insert user feedbacks - documents := make([]cache.Document, batchSize*2) + documents := make([]cache.Score, batchSize*2) for i := range documents { documents[i].Id = fmt.Sprintf("init_item_%d", i) documents[i].Score = float64(i) @@ -958,7 +958,7 @@ func BenchmarkRecommendFromItemBased(b *testing.B) { // insert user neighbors for i := 0; i < s.Config.Recommend.Online.NumFeedbackFallbackItemBased; i++ { - err := s.CacheClient.AddDocuments(ctx, cache.ItemNeighbors, fmt.Sprintf("init_item_%d", i), documents) + err := s.CacheClient.AddScores(ctx, cache.ItemNeighbors, fmt.Sprintf("init_item_%d", i), documents) require.NoError(b, err) } diff --git a/server/rest.go b/server/rest.go index 98ae7c40c..c0bab8ebb 100644 --- a/server/rest.go +++ b/server/rest.go @@ -418,8 +418,8 @@ func (s *RestServer) CreateWebService() { Param(ws.PathParameter("user-id", "ID of the user to get recommendation").DataType("string")). Param(ws.QueryParameter("n", "Number of returned items").DataType("integer")). Param(ws.QueryParameter("offset", "Offset of returned items").DataType("integer")). - Returns(http.StatusOK, "OK", []cache.Document{}). - Writes([]cache.Document{})) + Returns(http.StatusOK, "OK", []cache.Score{}). + Writes([]cache.Score{})) ws.Route(ws.GET("/intermediate/recommend/{user-id}/{category}").To(s.getCollaborative). Doc("Get the collaborative filtering recommendation for a user"). Metadata(restfulspec.KeyOpenAPITags, []string{DetractedAPITag}). @@ -428,8 +428,8 @@ func (s *RestServer) CreateWebService() { Param(ws.PathParameter("category", "Category of returned items.").DataType("string")). Param(ws.QueryParameter("n", "Number of returned items").DataType("integer")). Param(ws.QueryParameter("offset", "Offset of returned items").DataType("integer")). - Returns(http.StatusOK, "OK", []cache.Document{}). - Writes([]cache.Document{})) + Returns(http.StatusOK, "OK", []cache.Score{}). + Writes([]cache.Score{})) // Get popular items ws.Route(ws.GET("/popular").To(s.getPopular). @@ -439,8 +439,8 @@ func (s *RestServer) CreateWebService() { Param(ws.QueryParameter("n", "Number of returned recommendations").DataType("integer")). Param(ws.QueryParameter("offset", "Offset of returned recommendations").DataType("integer")). Param(ws.QueryParameter("user-id", "Remove read items of a user").DataType("string")). - Returns(http.StatusOK, "OK", []cache.Document{}). - Writes([]cache.Document{})) + Returns(http.StatusOK, "OK", []cache.Score{}). + Writes([]cache.Score{})) ws.Route(ws.GET("/popular/{category}").To(s.getPopular). Doc("Get popular items in category."). Metadata(restfulspec.KeyOpenAPITags, []string{RecommendationAPITag}). @@ -449,8 +449,8 @@ func (s *RestServer) CreateWebService() { Param(ws.QueryParameter("n", "Number of returned items").DataType("integer")). Param(ws.QueryParameter("offset", "Offset of returned items").DataType("integer")). Param(ws.QueryParameter("user-id", "Remove read items of a user").DataType("string")). - Returns(http.StatusOK, "OK", []cache.Document{}). - Writes([]cache.Document{})) + Returns(http.StatusOK, "OK", []cache.Score{}). + Writes([]cache.Score{})) // Get latest items ws.Route(ws.GET("/latest").To(s.getLatest). Doc("Get the latest items."). @@ -459,8 +459,8 @@ func (s *RestServer) CreateWebService() { Param(ws.QueryParameter("n", "Number of returned items").DataType("integer")). Param(ws.QueryParameter("offset", "Offset of returned items").DataType("integer")). Param(ws.QueryParameter("user-id", "Remove read items of a user").DataType("string")). - Returns(http.StatusOK, "OK", []cache.Document{}). - Writes([]cache.Document{})) + Returns(http.StatusOK, "OK", []cache.Score{}). + Writes([]cache.Score{})) ws.Route(ws.GET("/latest/{category}").To(s.getLatest). Doc("Get the latest items in category."). Metadata(restfulspec.KeyOpenAPITags, []string{RecommendationAPITag}). @@ -469,8 +469,8 @@ func (s *RestServer) CreateWebService() { Param(ws.QueryParameter("n", "Number of returned items").DataType("integer")). Param(ws.QueryParameter("offset", "Offset of returned items").DataType("integer")). Param(ws.QueryParameter("user-id", "Remove read items of a user").DataType("string")). - Returns(http.StatusOK, "OK", []cache.Document{}). - Writes([]cache.Document{})) + Returns(http.StatusOK, "OK", []cache.Score{}). + Writes([]cache.Score{})) // Get neighbors ws.Route(ws.GET("/item/{item-id}/neighbors/").To(s.getItemNeighbors). Doc("Get neighbors of a item"). @@ -479,8 +479,8 @@ func (s *RestServer) CreateWebService() { Param(ws.PathParameter("item-id", "ID of the item to get neighbors").DataType("string")). Param(ws.QueryParameter("n", "Number of returned items").DataType("integer")). Param(ws.QueryParameter("offset", "Offset of returned items").DataType("integer")). - Returns(http.StatusOK, "OK", []cache.Document{}). - Writes([]cache.Document{})) + Returns(http.StatusOK, "OK", []cache.Score{}). + Writes([]cache.Score{})) ws.Route(ws.GET("/item/{item-id}/neighbors/{category}").To(s.getItemNeighbors). Doc("Get neighbors of a item in category."). Metadata(restfulspec.KeyOpenAPITags, []string{RecommendationAPITag}). @@ -489,8 +489,8 @@ func (s *RestServer) CreateWebService() { Param(ws.PathParameter("category", "Category of returned items").DataType("string")). Param(ws.QueryParameter("n", "Number of returned items").DataType("integer")). Param(ws.QueryParameter("offset", "Offset of returned items").DataType("integer")). - Returns(http.StatusOK, "OK", []cache.Document{}). - Writes([]cache.Document{})) + Returns(http.StatusOK, "OK", []cache.Score{}). + Writes([]cache.Score{})) ws.Route(ws.GET("/user/{user-id}/neighbors/").To(s.getUserNeighbors). Doc("Get neighbors of a user."). Metadata(restfulspec.KeyOpenAPITags, []string{RecommendationAPITag}). @@ -498,8 +498,8 @@ func (s *RestServer) CreateWebService() { Param(ws.PathParameter("user-id", "ID of the user to get neighbors").DataType("string")). Param(ws.QueryParameter("n", "Number of returned users").DataType("integer")). Param(ws.QueryParameter("offset", "Offset of returned users").DataType("integer")). - Returns(http.StatusOK, "OK", []cache.Document{}). - Writes([]cache.Document{})) + Returns(http.StatusOK, "OK", []cache.Score{}). + Writes([]cache.Score{})) ws.Route(ws.GET("/recommend/{user-id}").To(s.getRecommend). Doc("Get recommendation for user."). Metadata(restfulspec.KeyOpenAPITags, []string{RecommendationAPITag}). @@ -531,8 +531,8 @@ func (s *RestServer) CreateWebService() { Param(ws.QueryParameter("n", "Number of returned items").DataType("integer")). Param(ws.QueryParameter("offset", "Offset of returned items").DataType("integer")). Reads([]Feedback{}). - Returns(http.StatusOK, "OK", []cache.Document{}). - Writes([]cache.Document{})) + Returns(http.StatusOK, "OK", []cache.Score{}). + Writes([]cache.Score{})) ws.Route(ws.POST("/session/recommend/{category}").To(s.sessionRecommend). Doc("Get recommendation for session."). Metadata(restfulspec.KeyOpenAPITags, []string{RecommendationAPITag}). @@ -541,8 +541,8 @@ func (s *RestServer) CreateWebService() { Param(ws.QueryParameter("n", "Number of returned items").DataType("integer")). Param(ws.QueryParameter("offset", "Offset of returned items").DataType("integer")). Reads([]Feedback{}). - Returns(http.StatusOK, "OK", []cache.Document{}). - Writes([]cache.Document{})) + Returns(http.StatusOK, "OK", []cache.Score{}). + Writes([]cache.Score{})) ws.Route(ws.GET("/measurements/{name}").To(s.getMeasurements). Doc("Get measurements."). @@ -612,7 +612,7 @@ func (s *RestServer) searchDocuments(collection, subset, category string, isItem } // Get the sorted list - items, err := s.CacheClient.SearchDocuments(ctx, collection, subset, []string{category}, offset, end) + items, err := s.CacheClient.SearchScores(ctx, collection, subset, []string{category}, offset, end) if err != nil { InternalServerError(response, err) return @@ -620,7 +620,7 @@ func (s *RestServer) searchDocuments(collection, subset, category string, isItem // Remove read items if userId != "" { - prunedItems := make([]cache.Document, 0, len(items)) + prunedItems := make([]cache.Score, 0, len(items)) for _, item := range items { if !readItems.Contains(item.Id) { prunedItems = append(prunedItems, item) @@ -799,7 +799,7 @@ type Recommender func(ctx *recommendContext) error func (s *RestServer) RecommendOffline(ctx *recommendContext) error { if len(ctx.results) < ctx.n { start := time.Now() - recommendation, err := s.CacheClient.SearchDocuments(ctx.context, cache.OfflineRecommend, ctx.userId, ctx.categories, 0, s.Config.Recommend.CacheSize) + recommendation, err := s.CacheClient.SearchScores(ctx.context, cache.OfflineRecommend, ctx.userId, ctx.categories, 0, s.Config.Recommend.CacheSize) if err != nil { return errors.Trace(err) } @@ -819,7 +819,7 @@ func (s *RestServer) RecommendOffline(ctx *recommendContext) error { func (s *RestServer) RecommendCollaborative(ctx *recommendContext) error { if len(ctx.results) < ctx.n { start := time.Now() - collaborativeRecommendation, err := s.CacheClient.SearchDocuments(ctx.context, cache.CollaborativeRecommend, ctx.userId, ctx.categories, 0, s.Config.Recommend.CacheSize) + collaborativeRecommendation, err := s.CacheClient.SearchScores(ctx.context, cache.CollaborativeRecommend, ctx.userId, ctx.categories, 0, s.Config.Recommend.CacheSize) if err != nil { return errors.Trace(err) } @@ -841,7 +841,7 @@ func (s *RestServer) RecommendUserBased(ctx *recommendContext) error { start := time.Now() candidates := make(map[string]float64) // load similar users - similarUsers, err := s.CacheClient.SearchDocuments(ctx.context, cache.UserNeighbors, ctx.userId, []string{""}, 0, s.Config.Recommend.CacheSize) + similarUsers, err := s.CacheClient.SearchScores(ctx.context, cache.UserNeighbors, ctx.userId, []string{""}, 0, s.Config.Recommend.CacheSize) if err != nil { return errors.Trace(err) } @@ -898,7 +898,7 @@ func (s *RestServer) RecommendItemBased(ctx *recommendContext) error { candidates := make(map[string]float64) for _, feedback := range userFeedback { // load similar items - similarItems, err := s.CacheClient.SearchDocuments(ctx.context, cache.ItemNeighbors, feedback.ItemId, ctx.categories, 0, s.Config.Recommend.CacheSize) + similarItems, err := s.CacheClient.SearchScores(ctx.context, cache.ItemNeighbors, feedback.ItemId, ctx.categories, 0, s.Config.Recommend.CacheSize) if err != nil { return errors.Trace(err) } @@ -928,7 +928,7 @@ func (s *RestServer) RecommendItemBased(ctx *recommendContext) error { func (s *RestServer) RecommendLatest(ctx *recommendContext) error { if len(ctx.results) < ctx.n { start := time.Now() - items, err := s.CacheClient.SearchDocuments(ctx.context, cache.LatestItems, "", ctx.categories, 0, s.Config.Recommend.CacheSize) + items, err := s.CacheClient.SearchScores(ctx.context, cache.LatestItems, "", ctx.categories, 0, s.Config.Recommend.CacheSize) if err != nil { return errors.Trace(err) } @@ -948,7 +948,7 @@ func (s *RestServer) RecommendLatest(ctx *recommendContext) error { func (s *RestServer) RecommendPopular(ctx *recommendContext) error { if len(ctx.results) < ctx.n { start := time.Now() - items, err := s.CacheClient.SearchDocuments(ctx.context, cache.PopularItems, "", ctx.categories, 0, s.Config.Recommend.CacheSize) + items, err := s.CacheClient.SearchScores(ctx.context, cache.PopularItems, "", ctx.categories, 0, s.Config.Recommend.CacheSize) if err != nil { return errors.Trace(err) } @@ -1090,7 +1090,7 @@ func (s *RestServer) sessionRecommend(request *restful.Request, response *restfu usedFeedbackCount := 0 for _, feedback := range userFeedback { // load similar items - similarItems, err := s.CacheClient.SearchDocuments(ctx, cache.ItemNeighbors, feedback.ItemId, []string{category}, 0, s.Config.Recommend.CacheSize) + similarItems, err := s.CacheClient.SearchScores(ctx, cache.ItemNeighbors, feedback.ItemId, []string{category}, 0, s.Config.Recommend.CacheSize) if err != nil { BadRequest(response, err) return @@ -1116,8 +1116,8 @@ func (s *RestServer) sessionRecommend(request *restful.Request, response *restfu filter.Push(id, score) } names, scores := filter.PopAll() - result := lo.Map(names, func(_ string, i int) cache.Document { - return cache.Document{ + result := lo.Map(names, func(_ string, i int) cache.Score { + return cache.Score{ Id: names[i], Score: scores[i], } @@ -1378,7 +1378,7 @@ func (s *RestServer) batchInsertItems(ctx context.Context, response *restful.Res Comment: item.Comment, }) // insert to latest items cache - if err = s.CacheClient.AddDocuments(ctx, cache.LatestItems, "", []cache.Document{{ + if err = s.CacheClient.AddScores(ctx, cache.LatestItems, "", []cache.Score{{ Id: item.ItemId, Score: float64(timestamp.Unix()), Categories: withWildCard(item.Categories), @@ -1388,7 +1388,7 @@ func (s *RestServer) batchInsertItems(ctx context.Context, response *restful.Res return } // update items cache - if err = s.CacheClient.UpdateDocuments(ctx, cache.ItemCache, item.ItemId, cache.DocumentPatch{ + if err = s.CacheClient.UpdateScores(ctx, cache.ItemCache, item.ItemId, cache.ScorePatch{ Categories: withWildCard(item.Categories), IsHidden: &item.IsHidden, }); err != nil { @@ -1492,21 +1492,21 @@ func (s *RestServer) modifyItem(request *restful.Request, response *restful.Resp } // remove hidden item from cache if patch.IsHidden != nil { - if err := s.CacheClient.UpdateDocuments(ctx, cache.ItemCache, itemId, cache.DocumentPatch{IsHidden: patch.IsHidden}); err != nil { + if err := s.CacheClient.UpdateScores(ctx, cache.ItemCache, itemId, cache.ScorePatch{IsHidden: patch.IsHidden}); err != nil { InternalServerError(response, err) return } } // add item to latest items cache if patch.Timestamp != nil { - if err := s.CacheClient.UpdateDocuments(ctx, []string{cache.LatestItems}, itemId, cache.DocumentPatch{Score: proto.Float64(float64(patch.Timestamp.Unix()))}); err != nil { + if err := s.CacheClient.UpdateScores(ctx, []string{cache.LatestItems}, itemId, cache.ScorePatch{Score: proto.Float64(float64(patch.Timestamp.Unix()))}); err != nil { InternalServerError(response, err) return } } // update categories in cache if patch.Categories != nil { - if err := s.CacheClient.UpdateDocuments(ctx, cache.ItemCache, itemId, cache.DocumentPatch{Categories: withWildCard(patch.Categories)}); err != nil { + if err := s.CacheClient.UpdateScores(ctx, cache.ItemCache, itemId, cache.ScorePatch{Categories: withWildCard(patch.Categories)}); err != nil { InternalServerError(response, err) return } @@ -1580,7 +1580,7 @@ func (s *RestServer) deleteItem(request *restful.Request, response *restful.Resp return } // delete item from cache - if err := s.CacheClient.DeleteDocuments(ctx, cache.ItemCache, cache.DocumentCondition{Id: &itemId}); err != nil { + if err := s.CacheClient.DeleteScores(ctx, cache.ItemCache, cache.ScoreCondition{Id: &itemId}); err != nil { InternalServerError(response, err) return } @@ -1610,7 +1610,7 @@ func (s *RestServer) insertItemCategory(request *restful.Request, response *rest return } // insert category to cache - if err = s.CacheClient.UpdateDocuments(ctx, cache.ItemCache, itemId, cache.DocumentPatch{Categories: withWildCard(item.Categories)}); err != nil { + if err = s.CacheClient.UpdateScores(ctx, cache.ItemCache, itemId, cache.ScorePatch{Categories: withWildCard(item.Categories)}); err != nil { InternalServerError(response, err) return } @@ -1639,7 +1639,7 @@ func (s *RestServer) deleteItemCategory(request *restful.Request, response *rest } item.Categories = categories // delete category from cache - if err = s.CacheClient.UpdateDocuments(ctx, cache.ItemCache, itemId, cache.DocumentPatch{Categories: withWildCard(categories)}); err != nil { + if err = s.CacheClient.UpdateScores(ctx, cache.ItemCache, itemId, cache.ScorePatch{Categories: withWildCard(categories)}); err != nil { InternalServerError(response, err) return } diff --git a/server/rest_test.go b/server/rest_test.go index a30670e4e..95c544fb9 100644 --- a/server/rest_test.go +++ b/server/rest_test.go @@ -243,7 +243,7 @@ func (suite *ServerTestSuite) TestItems() { }, } // insert popular scores - err := suite.CacheClient.AddDocuments(ctx, cache.PopularItems, "", []cache.Document{ + err := suite.CacheClient.AddScores(ctx, cache.PopularItems, "", []cache.Score{ {Id: "0", Score: 10}, {Id: "2", Score: 12}, {Id: "4", Score: 14}, @@ -297,7 +297,7 @@ func (suite *ServerTestSuite) TestItems() { }). Expect(t). Status(http.StatusOK). - Body(suite.marshal([]cache.Document{ + Body(suite.marshal([]cache.Score{ {Id: items[3].ItemId, Score: float64(items[3].Timestamp.Unix())}, {Id: items[1].ItemId, Score: float64(items[1].Timestamp.Unix())}, })). @@ -311,7 +311,7 @@ func (suite *ServerTestSuite) TestItems() { }). Expect(t). Status(http.StatusOK). - Body(suite.marshal([]cache.Document{ + Body(suite.marshal([]cache.Score{ {Id: items[3].ItemId, Score: float64(items[3].Timestamp.Unix())}, {Id: items[1].ItemId, Score: float64(items[1].Timestamp.Unix())}, })). @@ -326,7 +326,7 @@ func (suite *ServerTestSuite) TestItems() { }). Expect(t). Status(http.StatusOK). - Body(suite.marshal([]cache.Document{ + Body(suite.marshal([]cache.Score{ {Id: items[3].ItemId, Score: 16}, {Id: items[1].ItemId, Score: 12}, })). @@ -340,7 +340,7 @@ func (suite *ServerTestSuite) TestItems() { }). Expect(t). Status(http.StatusOK). - Body(suite.marshal([]cache.Document{ + Body(suite.marshal([]cache.Score{ {Id: items[3].ItemId, Score: 16}, {Id: items[1].ItemId, Score: 12}, })). @@ -377,7 +377,7 @@ func (suite *ServerTestSuite) TestItems() { }). Expect(t). Status(http.StatusOK). - Body(suite.marshal([]cache.Document{ + Body(suite.marshal([]cache.Score{ {Id: items[1].ItemId, Score: float64(items[1].Timestamp.Unix())}, })). End() @@ -390,7 +390,7 @@ func (suite *ServerTestSuite) TestItems() { }). Expect(t). Status(http.StatusOK). - Body(suite.marshal([]cache.Document{ + Body(suite.marshal([]cache.Score{ {Id: items[1].ItemId, Score: float64(items[1].Timestamp.Unix())}, })). End() @@ -404,7 +404,7 @@ func (suite *ServerTestSuite) TestItems() { }). Expect(t). Status(http.StatusOK). - Body(suite.marshal([]cache.Document{ + Body(suite.marshal([]cache.Score{ {Id: items[1].ItemId, Score: 12}, })). End() @@ -417,7 +417,7 @@ func (suite *ServerTestSuite) TestItems() { }). Expect(t). Status(http.StatusOK). - Body(suite.marshal([]cache.Document{ + Body(suite.marshal([]cache.Score{ {Id: items[1].ItemId, Score: 12}, })). End() @@ -475,7 +475,7 @@ func (suite *ServerTestSuite) TestItems() { }). Expect(t). Status(http.StatusOK). - Body(suite.marshal([]cache.Document{ + Body(suite.marshal([]cache.Score{ {Id: "2", Score: float64(timestamp.Unix())}, })). End() @@ -488,7 +488,7 @@ func (suite *ServerTestSuite) TestItems() { }). Expect(t). Status(http.StatusOK). - Body(suite.marshal([]cache.Document{})). + Body(suite.marshal([]cache.Score{})). End() // get popular items apitest.New(). @@ -500,7 +500,7 @@ func (suite *ServerTestSuite) TestItems() { }). Expect(t). Status(http.StatusOK). - Body(suite.marshal([]cache.Document{ + Body(suite.marshal([]cache.Score{ {Id: "2", Score: 12}, })). End() @@ -513,7 +513,7 @@ func (suite *ServerTestSuite) TestItems() { }). Expect(t). Status(http.StatusOK). - Body(suite.marshal([]cache.Document{})). + Body(suite.marshal([]cache.Score{})). End() // insert category @@ -550,7 +550,7 @@ func (suite *ServerTestSuite) TestItems() { }). Expect(t). Status(http.StatusOK). - Body(suite.marshal([]cache.Document{ + Body(suite.marshal([]cache.Score{ {Id: "2", Score: float64(timestamp.Unix())}, })). End() @@ -564,7 +564,7 @@ func (suite *ServerTestSuite) TestItems() { }). Expect(t). Status(http.StatusOK). - Body(suite.marshal([]cache.Document{ + Body(suite.marshal([]cache.Score{ {Id: "2", Score: 12}, })). End() @@ -603,7 +603,7 @@ func (suite *ServerTestSuite) TestItems() { }). Expect(t). Status(http.StatusOK). - Body(suite.marshal([]cache.Document{})). + Body(suite.marshal([]cache.Score{})). End() // get popular items apitest.New(). @@ -615,7 +615,7 @@ func (suite *ServerTestSuite) TestItems() { }). Expect(t). Status(http.StatusOK). - Body(suite.marshal([]cache.Document{})). + Body(suite.marshal([]cache.Score{})). End() // insert items without timestamp @@ -833,14 +833,14 @@ func (suite *ServerTestSuite) TestNonPersonalizedRecommend() { for i, operator := range operators { suite.T().Run(operator.Name, func(t *testing.T) { // insert documents - documents := []cache.Document{ + documents := []cache.Score{ {Id: strconv.Itoa(i) + "0", Score: 100, Categories: []string{operator.Category}}, {Id: strconv.Itoa(i) + "1", Score: 99, Categories: []string{operator.Category}}, {Id: strconv.Itoa(i) + "2", Score: 98, Categories: []string{operator.Category}}, {Id: strconv.Itoa(i) + "3", Score: 97, Categories: []string{operator.Category}}, {Id: strconv.Itoa(i) + "4", Score: 96, Categories: []string{operator.Category}}, } - err := suite.CacheClient.AddDocuments(ctx, operator.Collection, operator.Subset, documents) + err := suite.CacheClient.AddScores(ctx, operator.Collection, operator.Subset, documents) assert.NoError(t, err) // hidden item apitest.New(). @@ -868,7 +868,7 @@ func (suite *ServerTestSuite) TestNonPersonalizedRecommend() { Header("X-API-Key", apiKey). Expect(t). Status(http.StatusOK). - Body(suite.marshal(([]cache.Document{documents[0], documents[1], documents[2], documents[4]}))). + Body(suite.marshal(([]cache.Score{documents[0], documents[1], documents[2], documents[4]}))). End() apitest.New(). Handler(suite.handler). @@ -879,7 +879,7 @@ func (suite *ServerTestSuite) TestNonPersonalizedRecommend() { "n": "3"}). Expect(t). Status(http.StatusOK). - Body(suite.marshal(([]cache.Document{documents[0], documents[1], documents[2]}))). + Body(suite.marshal(([]cache.Score{documents[0], documents[1], documents[2]}))). End() apitest.New(). Handler(suite.handler). @@ -890,7 +890,7 @@ func (suite *ServerTestSuite) TestNonPersonalizedRecommend() { "n": "3"}). Expect(t). Status(http.StatusOK). - Body(suite.marshal(([]cache.Document{documents[1], documents[2], documents[4]}))). + Body(suite.marshal(([]cache.Score{documents[1], documents[2], documents[4]}))). End() apitest.New(). Handler(suite.handler). @@ -901,7 +901,7 @@ func (suite *ServerTestSuite) TestNonPersonalizedRecommend() { "n": "0"}). Expect(t). Status(http.StatusOK). - Body(suite.marshal(([]cache.Document{documents[0], documents[1], documents[2], documents[4]}))). + Body(suite.marshal(([]cache.Score{documents[0], documents[1], documents[2], documents[4]}))). End() apitest.New(). Handler(suite.handler). @@ -913,7 +913,7 @@ func (suite *ServerTestSuite) TestNonPersonalizedRecommend() { "n": "0"}). Expect(t). Status(http.StatusOK). - Body(suite.marshal(([]cache.Document{documents[0], documents[2], documents[4]}))). + Body(suite.marshal(([]cache.Score{documents[0], documents[2], documents[4]}))). End() }) } @@ -1017,7 +1017,7 @@ func (suite *ServerTestSuite) TestGetRecommends() { ctx := context.Background() t := suite.T() // insert hidden items - err := suite.CacheClient.AddDocuments(ctx, cache.OfflineRecommend, "0", []cache.Document{{Id: "0", Score: 100, Categories: []string{""}}}) + err := suite.CacheClient.AddScores(ctx, cache.OfflineRecommend, "0", []cache.Score{{Id: "0", Score: 100, Categories: []string{""}}}) assert.NoError(t, err) // hide item apitest.New(). @@ -1041,7 +1041,7 @@ func (suite *ServerTestSuite) TestGetRecommends() { }) assert.NoError(t, err) // insert recommendation - err = suite.CacheClient.AddDocuments(ctx, cache.OfflineRecommend, "0", []cache.Document{ + err = suite.CacheClient.AddScores(ctx, cache.OfflineRecommend, "0", []cache.Score{ {Id: "1", Score: 99, Categories: []string{""}}, {Id: "2", Score: 98, Categories: []string{""}}, {Id: "3", Score: 97, Categories: []string{""}}, @@ -1144,7 +1144,7 @@ func (suite *ServerTestSuite) TestGetRecommendsWithMultiCategories() { ctx := context.Background() t := suite.T() // insert recommendation - err := suite.CacheClient.AddDocuments(ctx, cache.OfflineRecommend, "0", []cache.Document{ + err := suite.CacheClient.AddScores(ctx, cache.OfflineRecommend, "0", []cache.Score{ {Id: "1", Score: 1, Categories: []string{""}}, {Id: "2", Score: 2, Categories: []string{"", "2"}}, {Id: "3", Score: 3, Categories: []string{"", "3"}}, @@ -1175,7 +1175,7 @@ func (suite *ServerTestSuite) TestGetRecommendsWithReplacement() { t := suite.T() suite.Config.Recommend.Replacement.EnableReplacement = true // insert recommendation - err := suite.CacheClient.AddDocuments(ctx, cache.OfflineRecommend, "0", []cache.Document{ + err := suite.CacheClient.AddScores(ctx, cache.OfflineRecommend, "0", []cache.Score{ {Id: "0", Score: 100, Categories: []string{""}}, {Id: "1", Score: 99, Categories: []string{""}}, {Id: "2", Score: 98, Categories: []string{""}}, @@ -1230,7 +1230,7 @@ func (suite *ServerTestSuite) TestServerGetRecommendsFallbackItemBasedSimilar() suite.Config.Recommend.Online.NumFeedbackFallbackItemBased = 4 suite.Config.Recommend.DataSource.PositiveFeedbackTypes = []string{"a"} // insert recommendation - err := suite.CacheClient.AddDocuments(ctx, cache.OfflineRecommend, "0", []cache.Document{ + err := suite.CacheClient.AddScores(ctx, cache.OfflineRecommend, "0", []cache.Score{ {Id: "1", Score: 99}, {Id: "2", Score: 98}, {Id: "3", Score: 97}, @@ -1255,25 +1255,25 @@ func (suite *ServerTestSuite) TestServerGetRecommendsFallbackItemBasedSimilar() End() // insert similar items - err = suite.CacheClient.AddDocuments(ctx, cache.ItemNeighbors, "1", []cache.Document{ + err = suite.CacheClient.AddScores(ctx, cache.ItemNeighbors, "1", []cache.Score{ {Id: "2", Score: 100000, Categories: []string{""}}, {Id: "9", Score: 1, Categories: []string{"", "*"}}, }) assert.NoError(t, err) - err = suite.CacheClient.AddDocuments(ctx, cache.ItemNeighbors, "2", []cache.Document{ + err = suite.CacheClient.AddScores(ctx, cache.ItemNeighbors, "2", []cache.Score{ {Id: "3", Score: 100000, Categories: []string{"", "*"}}, {Id: "8", Score: 1, Categories: []string{""}}, {Id: "9", Score: 1, Categories: []string{"", "*"}}, }) assert.NoError(t, err) - err = suite.CacheClient.AddDocuments(ctx, cache.ItemNeighbors, "3", []cache.Document{ + err = suite.CacheClient.AddScores(ctx, cache.ItemNeighbors, "3", []cache.Score{ {Id: "4", Score: 100000, Categories: []string{""}}, {Id: "7", Score: 1, Categories: []string{"", "*"}}, {Id: "8", Score: 1, Categories: []string{""}}, {Id: "9", Score: 1, Categories: []string{"", "*"}}, }) assert.NoError(t, err) - err = suite.CacheClient.AddDocuments(ctx, cache.ItemNeighbors, "4", []cache.Document{ + err = suite.CacheClient.AddScores(ctx, cache.ItemNeighbors, "4", []cache.Score{ {Id: "1", Score: 100000, Categories: []string{"", "*"}}, {Id: "6", Score: 1, Categories: []string{""}}, {Id: "7", Score: 1, Categories: []string{"", "*"}}, @@ -1281,7 +1281,7 @@ func (suite *ServerTestSuite) TestServerGetRecommendsFallbackItemBasedSimilar() {Id: "9", Score: 1, Categories: []string{"", "*"}}, }) assert.NoError(t, err) - err = suite.CacheClient.AddDocuments(ctx, cache.ItemNeighbors, "5", []cache.Document{ + err = suite.CacheClient.AddScores(ctx, cache.ItemNeighbors, "5", []cache.Score{ {Id: "1", Score: 1, Categories: []string{""}}, {Id: "6", Score: 1, Categories: []string{""}}, {Id: "7", Score: 100000, Categories: []string{""}}, @@ -1321,8 +1321,8 @@ func (suite *ServerTestSuite) TestGetRecommendsFallbackUserBasedSimilar() { ctx := context.Background() t := suite.T() // insert recommendation - err := suite.CacheClient.AddDocuments(ctx, cache.OfflineRecommend, "0", - []cache.Document{{Id: "1", Score: 99}, {Id: "2", Score: 98}, {Id: "3", Score: 97}, {Id: "4", Score: 96}}) + err := suite.CacheClient.AddScores(ctx, cache.OfflineRecommend, "0", + []cache.Score{{Id: "1", Score: 99}, {Id: "2", Score: 98}, {Id: "3", Score: 97}, {Id: "4", Score: 96}}) assert.NoError(t, err) // insert feedback feedback := []data.Feedback{ @@ -1341,7 +1341,7 @@ func (suite *ServerTestSuite) TestGetRecommendsFallbackUserBasedSimilar() { Body(`{"RowAffected": 4}`). End() // insert similar users - err = suite.CacheClient.AddDocuments(ctx, cache.UserNeighbors, "0", []cache.Document{ + err = suite.CacheClient.AddScores(ctx, cache.UserNeighbors, "0", []cache.Score{ {Id: "1", Score: 2, Categories: []string{""}}, {Id: "2", Score: 1.5, Categories: []string{""}}, {Id: "3", Score: 1, Categories: []string{""}}, @@ -1397,52 +1397,52 @@ func (suite *ServerTestSuite) TestGetRecommendsFallbackPreCached() { ctx := context.Background() t := suite.T() // insert offline recommendation - err := suite.CacheClient.AddDocuments(ctx, cache.OfflineRecommend, "0", []cache.Document{ + err := suite.CacheClient.AddScores(ctx, cache.OfflineRecommend, "0", []cache.Score{ {Id: "1", Score: 99, Categories: []string{""}}, {Id: "2", Score: 98, Categories: []string{""}}, {Id: "3", Score: 97, Categories: []string{""}}, {Id: "4", Score: 96, Categories: []string{""}}}) assert.NoError(t, err) - err = suite.CacheClient.AddDocuments(ctx, cache.OfflineRecommend, "0", []cache.Document{ + err = suite.CacheClient.AddScores(ctx, cache.OfflineRecommend, "0", []cache.Score{ {Id: "101", Score: 99, Categories: []string{"*"}}, {Id: "102", Score: 98, Categories: []string{"*"}}, {Id: "103", Score: 97, Categories: []string{"*"}}, {Id: "104", Score: 96, Categories: []string{"*"}}}) assert.NoError(t, err) // insert latest - err = suite.CacheClient.AddDocuments(ctx, cache.LatestItems, "", []cache.Document{ + err = suite.CacheClient.AddScores(ctx, cache.LatestItems, "", []cache.Score{ {Id: "5", Score: 95, Categories: []string{""}}, {Id: "6", Score: 94, Categories: []string{""}}, {Id: "7", Score: 93, Categories: []string{""}}, {Id: "8", Score: 92, Categories: []string{""}}}) assert.NoError(t, err) - err = suite.CacheClient.AddDocuments(ctx, cache.LatestItems, "", []cache.Document{ + err = suite.CacheClient.AddScores(ctx, cache.LatestItems, "", []cache.Score{ {Id: "105", Score: 95, Categories: []string{"*"}}, {Id: "106", Score: 94, Categories: []string{"*"}}, {Id: "107", Score: 93, Categories: []string{"*"}}, {Id: "108", Score: 92, Categories: []string{"*"}}}) assert.NoError(t, err) // insert popular - err = suite.CacheClient.AddDocuments(ctx, cache.PopularItems, "", []cache.Document{ + err = suite.CacheClient.AddScores(ctx, cache.PopularItems, "", []cache.Score{ {Id: "9", Score: 91, Categories: []string{""}}, {Id: "10", Score: 90, Categories: []string{""}}, {Id: "11", Score: 89, Categories: []string{""}}, {Id: "12", Score: 88, Categories: []string{""}}}) assert.NoError(t, err) - err = suite.CacheClient.AddDocuments(ctx, cache.PopularItems, "", []cache.Document{ + err = suite.CacheClient.AddScores(ctx, cache.PopularItems, "", []cache.Score{ {Id: "109", Score: 91, Categories: []string{"*"}}, {Id: "110", Score: 90, Categories: []string{"*"}}, {Id: "111", Score: 89, Categories: []string{"*"}}, {Id: "112", Score: 88, Categories: []string{"*"}}}) assert.NoError(t, err) // insert collaborative filtering - err = suite.CacheClient.AddDocuments(ctx, cache.CollaborativeRecommend, "0", []cache.Document{ + err = suite.CacheClient.AddScores(ctx, cache.CollaborativeRecommend, "0", []cache.Score{ {Id: "13", Score: 79, Categories: []string{""}}, {Id: "14", Score: 78, Categories: []string{""}}, {Id: "15", Score: 77, Categories: []string{""}}, {Id: "16", Score: 76, Categories: []string{""}}}) assert.NoError(t, err) - err = suite.CacheClient.AddDocuments(ctx, cache.CollaborativeRecommend, "0", []cache.Document{ + err = suite.CacheClient.AddScores(ctx, cache.CollaborativeRecommend, "0", []cache.Score{ {Id: "113", Score: 79, Categories: []string{"*"}}, {Id: "114", Score: 78, Categories: []string{"*"}}, {Id: "115", Score: 77, Categories: []string{"*"}}, @@ -1541,26 +1541,26 @@ func (suite *ServerTestSuite) TestSessionRecommend() { suite.Config.Recommend.DataSource.PositiveFeedbackTypes = []string{"a"} // insert similar items - err := suite.CacheClient.AddDocuments(ctx, cache.ItemNeighbors, "1", []cache.Document{ + err := suite.CacheClient.AddScores(ctx, cache.ItemNeighbors, "1", []cache.Score{ {Id: "2", Score: 100000, Categories: []string{""}}, {Id: "9", Score: 1, Categories: []string{"", "*"}}, {Id: "100", Score: 100000, Categories: []string{""}}, }) assert.NoError(t, err) - err = suite.CacheClient.AddDocuments(ctx, cache.ItemNeighbors, "2", []cache.Document{ + err = suite.CacheClient.AddScores(ctx, cache.ItemNeighbors, "2", []cache.Score{ {Id: "3", Score: 100000, Categories: []string{"", "*"}}, {Id: "8", Score: 1, Categories: []string{""}}, {Id: "9", Score: 1, Categories: []string{"", "*"}}, }) assert.NoError(t, err) - err = suite.CacheClient.AddDocuments(ctx, cache.ItemNeighbors, "3", []cache.Document{ + err = suite.CacheClient.AddScores(ctx, cache.ItemNeighbors, "3", []cache.Score{ {Id: "4", Score: 100000, Categories: []string{""}}, {Id: "7", Score: 1, Categories: []string{"", "*"}}, {Id: "8", Score: 1, Categories: []string{""}}, {Id: "9", Score: 1, Categories: []string{"", "*"}}, }) assert.NoError(t, err) - err = suite.CacheClient.AddDocuments(ctx, cache.ItemNeighbors, "4", []cache.Document{ + err = suite.CacheClient.AddScores(ctx, cache.ItemNeighbors, "4", []cache.Score{ {Id: "1", Score: 100000, Categories: []string{"", "*"}}, {Id: "6", Score: 1, Categories: []string{""}}, {Id: "7", Score: 1, Categories: []string{"", "*"}}, @@ -1568,7 +1568,7 @@ func (suite *ServerTestSuite) TestSessionRecommend() { {Id: "9", Score: 1, Categories: []string{"", "*"}}, }) assert.NoError(t, err) - err = suite.CacheClient.AddDocuments(ctx, cache.ItemNeighbors, "5", []cache.Document{ + err = suite.CacheClient.AddScores(ctx, cache.ItemNeighbors, "5", []cache.Score{ {Id: "1", Score: 1, Categories: []string{""}}, {Id: "6", Score: 1, Categories: []string{""}}, {Id: "7", Score: 100000, Categories: []string{""}}, @@ -1606,7 +1606,7 @@ func (suite *ServerTestSuite) TestSessionRecommend() { JSON(feedback). Expect(t). Status(http.StatusOK). - Body(suite.marshal([]cache.Document{{Id: "9", Score: 4}, {Id: "8", Score: 3}, {Id: "7", Score: 2}})). + Body(suite.marshal([]cache.Score{{Id: "9", Score: 4}, {Id: "8", Score: 3}, {Id: "7", Score: 2}})). End() apitest.New(). Handler(suite.handler). @@ -1618,7 +1618,7 @@ func (suite *ServerTestSuite) TestSessionRecommend() { JSON(feedback). Expect(t). Status(http.StatusOK). - Body(suite.marshal([]cache.Document(nil))). + Body(suite.marshal([]cache.Score(nil))). End() suite.Config.Recommend.Online.FallbackRecommend = []string{"item_based"} apitest.New(). @@ -1631,7 +1631,7 @@ func (suite *ServerTestSuite) TestSessionRecommend() { JSON(feedback). Expect(t). Status(http.StatusOK). - Body(suite.marshal([]cache.Document{{Id: "9", Score: 4}, {Id: "7", Score: 2}})). + Body(suite.marshal([]cache.Score{{Id: "9", Score: 4}, {Id: "7", Score: 2}})). End() } @@ -1660,22 +1660,22 @@ func (suite *ServerTestSuite) TestVisibility() { End() // insert cache - var documents []cache.Document + var documents []cache.Score for i := range items { - documents = append(documents, cache.Document{ + documents = append(documents, cache.Score{ Id: strconv.Itoa(i), Score: float64(time.Date(1989, 6, i+1, 1, 1, 1, 1, time.UTC).Unix()), Categories: []string{"", "a"}, }) } lo.Reverse(documents) - err := suite.CacheClient.AddDocuments(ctx, cache.LatestItems, "", documents) + err := suite.CacheClient.AddScores(ctx, cache.LatestItems, "", documents) assert.NoError(t, err) - err = suite.CacheClient.AddDocuments(ctx, cache.PopularItems, "", documents) + err = suite.CacheClient.AddScores(ctx, cache.PopularItems, "", documents) assert.NoError(t, err) - err = suite.CacheClient.AddDocuments(ctx, cache.ItemNeighbors, "100", documents) + err = suite.CacheClient.AddScores(ctx, cache.ItemNeighbors, "100", documents) assert.NoError(t, err) - err = suite.CacheClient.AddDocuments(ctx, cache.OfflineRecommend, "100", documents) + err = suite.CacheClient.AddScores(ctx, cache.OfflineRecommend, "100", documents) assert.NoError(t, err) // delete item diff --git a/storage/cache/database.go b/storage/cache/database.go index 6fa1ce143..60b58d636 100644 --- a/storage/cache/database.go +++ b/storage/cache/database.go @@ -182,7 +182,7 @@ func (r *ReturnValue) Time() (time.Time, error) { return t.In(time.UTC), nil } -type Document struct { +type Score struct { Id string Score float64 IsHidden bool `json:"-"` @@ -190,13 +190,13 @@ type Document struct { Timestamp time.Time `json:"-"` } -func SortDocuments(documents []Document) { +func SortDocuments(documents []Score) { sort.Slice(documents, func(i, j int) bool { return documents[i].Score > documents[j].Score }) } -func ConvertDocumentsToValues(documents []Document) []string { +func ConvertDocumentsToValues(documents []Score) []string { values := make([]string, len(documents)) for i := range values { values[i] = documents[i].Id @@ -208,13 +208,13 @@ func ConvertDocumentsToValues(documents []Document) []string { // In old recommender system, the recommendation is genereated per category. // In the new recommender system, the recommendation is generated globally. type DocumentAggregator struct { - Documents map[string]*Document + Documents map[string]*Score Timestamp time.Time } func NewDocumentAggregator(timestamp time.Time) *DocumentAggregator { return &DocumentAggregator{ - Documents: make(map[string]*Document), + Documents: make(map[string]*Score), Timestamp: timestamp, } } @@ -222,7 +222,7 @@ func NewDocumentAggregator(timestamp time.Time) *DocumentAggregator { func (aggregator *DocumentAggregator) Add(category string, values []string, scores []float64) { for i, value := range values { if _, ok := aggregator.Documents[value]; !ok { - aggregator.Documents[value] = &Document{ + aggregator.Documents[value] = &Score{ Id: value, Score: scores[i], Categories: []string{category}, @@ -235,8 +235,8 @@ func (aggregator *DocumentAggregator) Add(category string, values []string, scor } } -func (aggregator *DocumentAggregator) ToSlice() []Document { - documents := make([]Document, 0, len(aggregator.Documents)) +func (aggregator *DocumentAggregator) ToSlice() []Score { + documents := make([]Score, 0, len(aggregator.Documents)) for _, document := range aggregator.Documents { sort.Strings(document.Categories) documents = append(documents, *document) @@ -244,20 +244,20 @@ func (aggregator *DocumentAggregator) ToSlice() []Document { return documents } -type DocumentCondition struct { +type ScoreCondition struct { Subset *string Id *string Before *time.Time } -func (condition *DocumentCondition) Check() error { +func (condition *ScoreCondition) Check() error { if condition.Id == nil && condition.Before == nil && condition.Subset == nil { return errors.NotValidf("document condition") } return nil } -type DocumentPatch struct { +type ScorePatch struct { IsHidden *bool Categories []string Score *float64 @@ -290,10 +290,10 @@ type Database interface { Pop(ctx context.Context, name string) (string, error) Remain(ctx context.Context, name string) (int64, error) - AddDocuments(ctx context.Context, collection, subset string, documents []Document) error - SearchDocuments(ctx context.Context, collection, subset string, query []string, begin, end int) ([]Document, error) - DeleteDocuments(ctx context.Context, collection []string, condition DocumentCondition) error - UpdateDocuments(ctx context.Context, collection []string, id string, patch DocumentPatch) error + AddScores(ctx context.Context, collection, subset string, documents []Score) error + SearchScores(ctx context.Context, collection, subset string, query []string, begin, end int) ([]Score, error) + DeleteScores(ctx context.Context, collection []string, condition ScoreCondition) error + UpdateScores(ctx context.Context, collection []string, id string, patch ScorePatch) error AddTimeSeriesPoints(ctx context.Context, points []TimeSeriesPoint) error GetTimeSeriesPoints(ctx context.Context, name string, begin, end time.Time) ([]TimeSeriesPoint, error) diff --git a/storage/cache/database_test.go b/storage/cache/database_test.go index 9bd8235d7..5046d603d 100644 --- a/storage/cache/database_test.go +++ b/storage/cache/database_test.go @@ -228,7 +228,7 @@ func (suite *baseTestSuite) TestPushPop() { func (suite *baseTestSuite) TestDocument() { ts := time.Date(2023, 1, 1, 0, 0, 0, 0, time.UTC) ctx := context.Background() - err := suite.AddDocuments(ctx, "a", "", []Document{{ + err := suite.AddScores(ctx, "a", "", []Score{{ Id: "0", Score: math.MaxFloat64, IsHidden: true, @@ -236,14 +236,14 @@ func (suite *baseTestSuite) TestDocument() { Timestamp: ts, }}) suite.NoError(err) - err = suite.AddDocuments(ctx, "a", "", []Document{{ + err = suite.AddScores(ctx, "a", "", []Score{{ Id: "1", Score: 100, Categories: []string{"a", "b"}, Timestamp: ts, }}) suite.NoError(err) - err = suite.AddDocuments(ctx, "a", "", []Document{ + err = suite.AddScores(ctx, "a", "", []Score{ { Id: "1", Score: 1, @@ -276,7 +276,7 @@ func (suite *baseTestSuite) TestDocument() { }, }) suite.NoError(err) - err = suite.AddDocuments(ctx, "b", "", []Document{{ + err = suite.AddScores(ctx, "b", "", []Score{{ Id: "6", Score: 6, Categories: []string{"b"}, @@ -285,15 +285,15 @@ func (suite *baseTestSuite) TestDocument() { suite.NoError(err) // search documents - documents, err := suite.SearchDocuments(ctx, "a", "", []string{"b"}, 1, 3) + documents, err := suite.SearchScores(ctx, "a", "", []string{"b"}, 1, 3) suite.NoError(err) - suite.Equal([]Document{ + suite.Equal([]Score{ {Id: "3", Score: 3, Categories: []string{"b"}, Timestamp: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC)}, {Id: "2", Score: 2, Categories: []string{"b", "c"}, Timestamp: ts}, }, documents) - documents, err = suite.SearchDocuments(ctx, "a", "", []string{"b"}, 0, -1) + documents, err = suite.SearchScores(ctx, "a", "", []string{"b"}, 0, -1) suite.NoError(err) - suite.Equal([]Document{ + suite.Equal([]Score{ {Id: "5", Score: 5, Categories: []string{"b"}, Timestamp: ts}, {Id: "3", Score: 3, Categories: []string{"b"}, Timestamp: time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC)}, {Id: "2", Score: 2, Categories: []string{"b", "c"}, Timestamp: ts}, @@ -301,45 +301,45 @@ func (suite *baseTestSuite) TestDocument() { }, documents) // search documents with empty category - documents, err = suite.SearchDocuments(ctx, "a", "", []string{""}, 0, -1) + documents, err = suite.SearchScores(ctx, "a", "", []string{""}, 0, -1) suite.NoError(err) - suite.Equal([]Document{{Id: "4", Score: 4, Categories: []string{""}, Timestamp: ts}}, documents) + suite.Equal([]Score{{Id: "4", Score: 4, Categories: []string{""}, Timestamp: ts}}, documents) // delete nothing - err = suite.DeleteDocuments(ctx, []string{"a"}, DocumentCondition{}) + err = suite.DeleteScores(ctx, []string{"a"}, ScoreCondition{}) suite.ErrorIs(err, errors.NotValid) // delete by value - err = suite.DeleteDocuments(ctx, []string{"a"}, DocumentCondition{Id: proto.String("5")}) + err = suite.DeleteScores(ctx, []string{"a"}, ScoreCondition{Id: proto.String("5")}) suite.NoError(err) - documents, err = suite.SearchDocuments(ctx, "a", "", []string{"b"}, 0, 1) + documents, err = suite.SearchScores(ctx, "a", "", []string{"b"}, 0, 1) suite.NoError(err) suite.Len(documents, 1) suite.Equal("3", documents[0].Id) // delete by timestamp - err = suite.DeleteDocuments(ctx, []string{"a"}, DocumentCondition{Before: lo.ToPtr(time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC))}) + err = suite.DeleteScores(ctx, []string{"a"}, ScoreCondition{Before: lo.ToPtr(time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC))}) suite.NoError(err) - documents, err = suite.SearchDocuments(ctx, "a", "", []string{"b"}, 0, 1) + documents, err = suite.SearchScores(ctx, "a", "", []string{"b"}, 0, 1) suite.NoError(err) suite.Len(documents, 1) suite.Equal("2", documents[0].Id) // update categories - err = suite.UpdateDocuments(ctx, []string{"a"}, "2", DocumentPatch{Categories: []string{"c", "s"}}) + err = suite.UpdateScores(ctx, []string{"a"}, "2", ScorePatch{Categories: []string{"c", "s"}}) suite.NoError(err) - documents, err = suite.SearchDocuments(ctx, "a", "", []string{"s"}, 0, 1) + documents, err = suite.SearchScores(ctx, "a", "", []string{"s"}, 0, 1) suite.NoError(err) suite.Len(documents, 1) suite.Equal("2", documents[0].Id) - err = suite.UpdateDocuments(ctx, []string{"a"}, "2", DocumentPatch{Categories: []string{"c"}}) + err = suite.UpdateScores(ctx, []string{"a"}, "2", ScorePatch{Categories: []string{"c"}}) suite.NoError(err) - documents, err = suite.SearchDocuments(ctx, "a", "", []string{"s"}, 0, 1) + documents, err = suite.SearchScores(ctx, "a", "", []string{"s"}, 0, 1) suite.NoError(err) suite.Empty(documents) // update is hidden - err = suite.UpdateDocuments(ctx, []string{"a"}, "0", DocumentPatch{IsHidden: proto.Bool(false)}) + err = suite.UpdateScores(ctx, []string{"a"}, "0", ScorePatch{IsHidden: proto.Bool(false)}) suite.NoError(err) - documents, err = suite.SearchDocuments(ctx, "a", "", []string{"b"}, 0, 1) + documents, err = suite.SearchScores(ctx, "a", "", []string{"b"}, 0, 1) suite.NoError(err) suite.Len(documents, 1) suite.Equal("0", documents[0].Id) @@ -348,7 +348,7 @@ func (suite *baseTestSuite) TestDocument() { func (suite *baseTestSuite) TestSubsetDocument() { ts := time.Date(2023, 1, 1, 0, 0, 0, 0, time.UTC) ctx := context.Background() - err := suite.AddDocuments(ctx, "a", "a", []Document{ + err := suite.AddScores(ctx, "a", "a", []Score{ { Id: "1", Score: 1, @@ -369,7 +369,7 @@ func (suite *baseTestSuite) TestSubsetDocument() { }, }) suite.NoError(err) - err = suite.AddDocuments(ctx, "b", "", []Document{ + err = suite.AddScores(ctx, "b", "", []Score{ { Id: "4", Score: 4, @@ -392,49 +392,49 @@ func (suite *baseTestSuite) TestSubsetDocument() { suite.NoError(err) // search documents - documents, err := suite.SearchDocuments(ctx, "a", "a", []string{"b"}, 0, -1) + documents, err := suite.SearchScores(ctx, "a", "a", []string{"b"}, 0, -1) suite.NoError(err) - suite.Equal([]Document{ + suite.Equal([]Score{ {Id: "3", Score: 3, Categories: []string{"b"}, Timestamp: ts}, {Id: "2", Score: 2, Categories: []string{"b", "c"}, Timestamp: ts}, {Id: "1", Score: 1, Categories: []string{"a", "b"}, Timestamp: ts}, }, documents) // update categories - err = suite.UpdateDocuments(ctx, []string{"a", "b"}, "2", DocumentPatch{Categories: []string{"b", "s"}}) + err = suite.UpdateScores(ctx, []string{"a", "b"}, "2", ScorePatch{Categories: []string{"b", "s"}}) suite.NoError(err) - documents, err = suite.SearchDocuments(ctx, "a", "a", []string{"s"}, 0, 1) + documents, err = suite.SearchScores(ctx, "a", "a", []string{"s"}, 0, 1) suite.NoError(err) suite.Len(documents, 1) suite.Equal("2", documents[0].Id) - documents, err = suite.SearchDocuments(ctx, "b", "", []string{"s"}, 0, 1) + documents, err = suite.SearchScores(ctx, "b", "", []string{"s"}, 0, 1) suite.NoError(err) suite.Len(documents, 1) suite.Equal("2", documents[0].Id) // delete by value - err = suite.DeleteDocuments(ctx, []string{"a", "b"}, DocumentCondition{Id: proto.String("3")}) + err = suite.DeleteScores(ctx, []string{"a", "b"}, ScoreCondition{Id: proto.String("3")}) suite.NoError(err) - documents, err = suite.SearchDocuments(ctx, "a", "a", []string{"b"}, 0, 1) + documents, err = suite.SearchScores(ctx, "a", "a", []string{"b"}, 0, 1) suite.NoError(err) suite.Len(documents, 1) suite.Equal("2", documents[0].Id) - documents, err = suite.SearchDocuments(ctx, "b", "", []string{"b"}, 0, 1) + documents, err = suite.SearchScores(ctx, "b", "", []string{"b"}, 0, 1) suite.NoError(err) suite.Len(documents, 1) suite.Equal("2", documents[0].Id) // delete in subset - err = suite.DeleteDocuments(ctx, []string{"a", "b"}, DocumentCondition{ + err = suite.DeleteScores(ctx, []string{"a", "b"}, ScoreCondition{ Subset: proto.String("a"), Id: proto.String("2"), }) suite.NoError(err) - documents, err = suite.SearchDocuments(ctx, "a", "a", []string{"b"}, 0, 1) + documents, err = suite.SearchScores(ctx, "a", "a", []string{"b"}, 0, 1) suite.NoError(err) suite.Len(documents, 1) suite.Equal("1", documents[0].Id) - documents, err = suite.SearchDocuments(ctx, "b", "", []string{"b"}, 0, 1) + documents, err = suite.SearchScores(ctx, "b", "", []string{"b"}, 0, 1) suite.NoError(err) suite.Len(documents, 1) suite.Equal("2", documents[0].Id) @@ -491,22 +491,22 @@ func primeFactor(n int) []int { } func benchmark(b *testing.B, database Database) { - b.Run("AddDocuments", func(b *testing.B) { + b.Run("AddScores", func(b *testing.B) { benchmarkAddDocuments(b, database) }) - b.Run("SearchDocuments", func(b *testing.B) { + b.Run("SearchScores", func(b *testing.B) { benchmarkSearchDocuments(b, database) }) - b.Run("UpdateDocuments", func(b *testing.B) { + b.Run("UpdateScores", func(b *testing.B) { benchmarkUpdateDocuments(b, database) }) } func benchmarkAddDocuments(b *testing.B, database Database) { ctx := context.Background() - var documents []Document + var documents []Score for i := 1; i <= b.N; i++ { - documents = append(documents, Document{ + documents = append(documents, Score{ Id: strconv.Itoa(i), Score: float64(-i), Categories: lo.Map(primeFactor(i), func(n, _ int) string { return strconv.Itoa(n) }), @@ -514,23 +514,23 @@ func benchmarkAddDocuments(b *testing.B, database Database) { }) } b.ResetTimer() - err := database.AddDocuments(ctx, "a", "", documents) + err := database.AddScores(ctx, "a", "", documents) assert.NoError(b, err) } func benchmarkSearchDocuments(b *testing.B, database Database) { // insert data ctx := context.Background() - var documents []Document + var documents []Score for i := 1; i <= benchmarkDataSize; i++ { - documents = append(documents, Document{ + documents = append(documents, Score{ Id: strconv.Itoa(i), Score: float64(-i), Categories: lo.Map(primeFactor(i), func(n, _ int) string { return strconv.Itoa(n) }), Timestamp: time.Date(2023, 1, 1, 0, 0, 0, 0, time.UTC), }) } - err := database.AddDocuments(ctx, "a", "", documents) + err := database.AddScores(ctx, "a", "", documents) assert.NoError(b, err) // search data b.ResetTimer() @@ -538,7 +538,7 @@ func benchmarkSearchDocuments(b *testing.B, database Database) { // select a random prime p := primeTable[rand.Intn(len(primeTable))] // search documents - r, err := database.SearchDocuments(ctx, "a", "", []string{strconv.Itoa(p)}, 0, 10) + r, err := database.SearchScores(ctx, "a", "", []string{strconv.Itoa(p)}, 0, 10) assert.NoError(b, err) assert.NotEmpty(b, r) } @@ -551,7 +551,7 @@ func benchmarkUpdateDocuments(b *testing.B, database Database) { // select a random number n := rand.Intn(benchmarkDataSize) + 1 // update documents - err := database.UpdateDocuments(ctx, []string{"a"}, strconv.Itoa(n), DocumentPatch{ + err := database.UpdateScores(ctx, []string{"a"}, strconv.Itoa(n), ScorePatch{ Score: proto.Float64(float64(n)), }) assert.NoError(b, err) diff --git a/storage/cache/mongodb.go b/storage/cache/mongodb.go index cfbc8195b..54dfb831f 100644 --- a/storage/cache/mongodb.go +++ b/storage/cache/mongodb.go @@ -317,7 +317,7 @@ func (m MongoDB) Remain(ctx context.Context, name string) (int64, error) { }) } -func (m MongoDB) AddDocuments(ctx context.Context, collection, subset string, documents []Document) error { +func (m MongoDB) AddScores(ctx context.Context, collection, subset string, documents []Score) error { if len(documents) == 0 { return nil } @@ -341,7 +341,7 @@ func (m MongoDB) AddDocuments(ctx context.Context, collection, subset string, do return errors.Trace(err) } -func (m MongoDB) SearchDocuments(ctx context.Context, collection, subset string, query []string, begin, end int) ([]Document, error) { +func (m MongoDB) SearchScores(ctx context.Context, collection, subset string, query []string, begin, end int) ([]Score, error) { if len(query) == 0 { return nil, nil } @@ -358,9 +358,9 @@ func (m MongoDB) SearchDocuments(ctx context.Context, collection, subset string, if err != nil { return nil, errors.Trace(err) } - documents := make([]Document, 0) + documents := make([]Score, 0) for cur.Next(ctx) { - var document Document + var document Score if err = cur.Decode(&document); err != nil { return nil, errors.Trace(err) } @@ -369,7 +369,7 @@ func (m MongoDB) SearchDocuments(ctx context.Context, collection, subset string, return documents, nil } -func (m MongoDB) UpdateDocuments(ctx context.Context, collections []string, id string, patch DocumentPatch) error { +func (m MongoDB) UpdateScores(ctx context.Context, collections []string, id string, patch ScorePatch) error { if len(collections) == 0 { return nil } @@ -393,7 +393,7 @@ func (m MongoDB) UpdateDocuments(ctx context.Context, collections []string, id s return errors.Trace(err) } -func (m MongoDB) DeleteDocuments(ctx context.Context, collections []string, condition DocumentCondition) error { +func (m MongoDB) DeleteScores(ctx context.Context, collections []string, condition ScoreCondition) error { if err := condition.Check(); err != nil { return errors.Trace(err) } diff --git a/storage/cache/no_database.go b/storage/cache/no_database.go index c00f9b565..3ca70e932 100644 --- a/storage/cache/no_database.go +++ b/storage/cache/no_database.go @@ -90,19 +90,19 @@ func (NoDatabase) Remain(_ context.Context, _ string) (int64, error) { return 0, ErrNoDatabase } -func (NoDatabase) AddDocuments(_ context.Context, _, _ string, _ []Document) error { +func (NoDatabase) AddScores(_ context.Context, _, _ string, _ []Score) error { return ErrNoDatabase } -func (NoDatabase) SearchDocuments(_ context.Context, _, _ string, _ []string, _, _ int) ([]Document, error) { +func (NoDatabase) SearchScores(_ context.Context, _, _ string, _ []string, _, _ int) ([]Score, error) { return nil, ErrNoDatabase } -func (NoDatabase) UpdateDocuments(_ context.Context, _ []string, _ string, _ DocumentPatch) error { +func (NoDatabase) UpdateScores(_ context.Context, _ []string, _ string, _ ScorePatch) error { return ErrNoDatabase } -func (NoDatabase) DeleteDocuments(_ context.Context, _ []string, _ DocumentCondition) error { +func (NoDatabase) DeleteScores(_ context.Context, _ []string, _ ScoreCondition) error { return ErrNoDatabase } diff --git a/storage/cache/no_database_test.go b/storage/cache/no_database_test.go index 150c9b8fe..b49127754 100644 --- a/storage/cache/no_database_test.go +++ b/storage/cache/no_database_test.go @@ -62,13 +62,13 @@ func TestNoDatabase(t *testing.T) { _, err = database.Remain(ctx, "") assert.ErrorIs(t, err, ErrNoDatabase) - err = database.AddDocuments(ctx, "", "", nil) + err = database.AddScores(ctx, "", "", nil) assert.ErrorIs(t, err, ErrNoDatabase) - _, err = database.SearchDocuments(ctx, "", "", nil, 0, 0) + _, err = database.SearchScores(ctx, "", "", nil, 0, 0) assert.ErrorIs(t, err, ErrNoDatabase) - err = database.UpdateDocuments(ctx, nil, "", DocumentPatch{}) + err = database.UpdateScores(ctx, nil, "", ScorePatch{}) assert.ErrorIs(t, err, ErrNoDatabase) - err = database.DeleteDocuments(ctx, nil, DocumentCondition{}) + err = database.DeleteScores(ctx, nil, ScoreCondition{}) assert.ErrorIs(t, err, ErrNoDatabase) err = database.AddTimeSeriesPoints(ctx, nil) diff --git a/storage/cache/redis.go b/storage/cache/redis.go index 409f7b086..18c74c56f 100644 --- a/storage/cache/redis.go +++ b/storage/cache/redis.go @@ -237,7 +237,7 @@ func (r *Redis) documentKey(collection, subset, value string) string { return r.DocumentTable() + ":" + collection + ":" + subset + ":" + value } -func (r *Redis) AddDocuments(ctx context.Context, collection, subset string, documents []Document) error { +func (r *Redis) AddScores(ctx context.Context, collection, subset string, documents []Score) error { p := r.client.Pipeline() for _, document := range documents { p.HSet(ctx, r.documentKey(collection, subset, document.Id), @@ -253,7 +253,7 @@ func (r *Redis) AddDocuments(ctx context.Context, collection, subset string, doc return errors.Trace(err) } -func (r *Redis) SearchDocuments(ctx context.Context, collection, subset string, query []string, begin, end int) ([]Document, error) { +func (r *Redis) SearchScores(ctx context.Context, collection, subset string, query []string, begin, end int) ([]Score, error) { if len(query) == 0 { return nil, nil } @@ -278,9 +278,9 @@ func (r *Redis) SearchDocuments(ctx context.Context, collection, subset string, if err != nil { return nil, errors.Trace(err) } - documents := make([]Document, 0, len(result.Docs)) + documents := make([]Score, 0, len(result.Docs)) for _, doc := range result.Docs { - var document Document + var document Score document.Id = doc.Fields["id"] score, err := strconv.ParseFloat(doc.Fields["score"], 64) if err != nil { @@ -307,7 +307,7 @@ func (r *Redis) SearchDocuments(ctx context.Context, collection, subset string, return documents, nil } -func (r *Redis) UpdateDocuments(ctx context.Context, collections []string, id string, patch DocumentPatch) error { +func (r *Redis) UpdateScores(ctx context.Context, collections []string, id string, patch ScorePatch) error { if len(collections) == 0 { return nil } @@ -359,7 +359,7 @@ func (r *Redis) UpdateDocuments(ctx context.Context, collections []string, id st return nil } -func (r *Redis) DeleteDocuments(ctx context.Context, collections []string, condition DocumentCondition) error { +func (r *Redis) DeleteScores(ctx context.Context, collections []string, condition ScoreCondition) error { if err := condition.Check(); err != nil { return errors.Trace(err) } diff --git a/storage/cache/redis_test.go b/storage/cache/redis_test.go index 10b03ae05..a63ab6949 100644 --- a/storage/cache/redis_test.go +++ b/storage/cache/redis_test.go @@ -67,29 +67,29 @@ func (suite *RedisTestSuite) TestEscapeCharacters() { collection := fmt.Sprintf("a%s1", c) subset := fmt.Sprintf("b%s2", c) id := fmt.Sprintf("c%s3", c) - err := suite.AddDocuments(ctx, collection, subset, []Document{{ + err := suite.AddScores(ctx, collection, subset, []Score{{ Id: id, Score: math.MaxFloat64, Categories: []string{"a", "b"}, Timestamp: ts, }}) suite.NoError(err) - documents, err := suite.SearchDocuments(ctx, collection, subset, []string{"b"}, 0, -1) + documents, err := suite.SearchScores(ctx, collection, subset, []string{"b"}, 0, -1) suite.NoError(err) - suite.Equal([]Document{{Id: id, Score: math.MaxFloat64, Categories: []string{"a", "b"}, Timestamp: ts}}, documents) + suite.Equal([]Score{{Id: id, Score: math.MaxFloat64, Categories: []string{"a", "b"}, Timestamp: ts}}, documents) - err = suite.UpdateDocuments(ctx, []string{collection}, id, DocumentPatch{Score: proto.Float64(1)}) + err = suite.UpdateScores(ctx, []string{collection}, id, ScorePatch{Score: proto.Float64(1)}) suite.NoError(err) - documents, err = suite.SearchDocuments(ctx, collection, subset, []string{"b"}, 0, -1) + documents, err = suite.SearchScores(ctx, collection, subset, []string{"b"}, 0, -1) suite.NoError(err) - suite.Equal([]Document{{Id: id, Score: 1, Categories: []string{"a", "b"}, Timestamp: ts}}, documents) + suite.Equal([]Score{{Id: id, Score: 1, Categories: []string{"a", "b"}, Timestamp: ts}}, documents) - err = suite.DeleteDocuments(ctx, []string{collection}, DocumentCondition{ + err = suite.DeleteScores(ctx, []string{collection}, ScoreCondition{ Subset: proto.String(subset), Id: proto.String(id), }) suite.NoError(err) - documents, err = suite.SearchDocuments(ctx, collection, subset, []string{"b"}, 0, -1) + documents, err = suite.SearchScores(ctx, collection, subset, []string{"b"}, 0, -1) suite.NoError(err) suite.Empty(documents) }) diff --git a/storage/cache/sql.go b/storage/cache/sql.go index 274291dcc..bef865d75 100644 --- a/storage/cache/sql.go +++ b/storage/cache/sql.go @@ -391,11 +391,11 @@ func (db *SQLDatabase) Remain(ctx context.Context, name string) (count int64, er return } -func (db *SQLDatabase) AddDocuments(ctx context.Context, collection, subset string, documents []Document) error { +func (db *SQLDatabase) AddScores(ctx context.Context, collection, subset string, documents []Score) error { var rows any switch db.driver { case Postgres: - rows = lo.Map(documents, func(document Document, _ int) PostgresDocument { + rows = lo.Map(documents, func(document Score, _ int) PostgresDocument { return PostgresDocument{ Collection: collection, Subset: subset, @@ -407,7 +407,7 @@ func (db *SQLDatabase) AddDocuments(ctx context.Context, collection, subset stri } }) case SQLite, MySQL: - rows = lo.Map(documents, func(document Document, _ int) SQLDocument { + rows = lo.Map(documents, func(document Score, _ int) SQLDocument { return SQLDocument{ Collection: collection, Subset: subset, @@ -426,7 +426,7 @@ func (db *SQLDatabase) AddDocuments(ctx context.Context, collection, subset stri return nil } -func (db *SQLDatabase) SearchDocuments(ctx context.Context, collection, subset string, query []string, begin, end int) ([]Document, error) { +func (db *SQLDatabase) SearchScores(ctx context.Context, collection, subset string, query []string, begin, end int) ([]Score, error) { if len(query) == 0 { return nil, nil } @@ -451,7 +451,7 @@ func (db *SQLDatabase) SearchDocuments(ctx context.Context, collection, subset s if err != nil { return nil, errors.Trace(err) } - documents := make([]Document, 0, 10) + documents := make([]Score, 0, 10) for rows.Next() { switch db.driver { case Postgres: @@ -459,14 +459,14 @@ func (db *SQLDatabase) SearchDocuments(ctx context.Context, collection, subset s if err = rows.Scan(&document.Id, &document.Score, &document.Categories, &document.Timestamp); err != nil { return nil, errors.Trace(err) } - documents = append(documents, Document{ + documents = append(documents, Score{ Id: document.Id, Score: document.Score, Categories: document.Categories, Timestamp: document.Timestamp, }) case SQLite, MySQL: - var document Document + var document Score if err = db.gormDB.ScanRows(rows, &document); err != nil { return nil, errors.Trace(err) } @@ -477,7 +477,7 @@ func (db *SQLDatabase) SearchDocuments(ctx context.Context, collection, subset s return documents, nil } -func (db *SQLDatabase) UpdateDocuments(ctx context.Context, collections []string, id string, patch DocumentPatch) error { +func (db *SQLDatabase) UpdateScores(ctx context.Context, collections []string, id string, patch ScorePatch) error { if len(collections) == 0 { return nil } @@ -506,7 +506,7 @@ func (db *SQLDatabase) UpdateDocuments(ctx context.Context, collections []string return tx.Error } -func (db *SQLDatabase) DeleteDocuments(ctx context.Context, collections []string, condition DocumentCondition) error { +func (db *SQLDatabase) DeleteScores(ctx context.Context, collections []string, condition ScoreCondition) error { if err := condition.Check(); err != nil { return errors.Trace(err) } diff --git a/worker/worker.go b/worker/worker.go index 88855ab5d..f6c92f501 100644 --- a/worker/worker.go +++ b/worker/worker.go @@ -624,7 +624,7 @@ func (w *Worker) Recommend(users []data.User) { scores := make(map[string]float64) for _, itemId := range positiveItems { // load similar items - similarItems, err := w.CacheClient.SearchDocuments(ctx, cache.ItemNeighbors, itemId, []string{category}, 0, w.Config.Recommend.CacheSize) + similarItems, err := w.CacheClient.SearchScores(ctx, cache.ItemNeighbors, itemId, []string{category}, 0, w.Config.Recommend.CacheSize) if err != nil { log.Logger().Error("failed to load similar items", zap.Error(err)) return errors.Trace(err) @@ -662,7 +662,7 @@ func (w *Worker) Recommend(users []data.User) { localStartTime := time.Now() scores := make(map[string]float64) // load similar users - similarUsers, err := w.CacheClient.SearchDocuments(ctx, cache.UserNeighbors, userId, []string{""}, 0, w.Config.Recommend.CacheSize) + similarUsers, err := w.CacheClient.SearchScores(ctx, cache.UserNeighbors, userId, []string{""}, 0, w.Config.Recommend.CacheSize) if err != nil { log.Logger().Error("failed to load similar users", zap.Error(err)) return errors.Trace(err) @@ -715,7 +715,7 @@ func (w *Worker) Recommend(users []data.User) { if w.Config.Recommend.Offline.EnableLatestRecommend { localStartTime := time.Now() for _, category := range append([]string{""}, itemCategories...) { - latestItems, err := w.CacheClient.SearchDocuments(ctx, cache.LatestItems, "", []string{category}, 0, w.Config.Recommend.CacheSize) + latestItems, err := w.CacheClient.SearchScores(ctx, cache.LatestItems, "", []string{category}, 0, w.Config.Recommend.CacheSize) if err != nil { log.Logger().Error("failed to load latest items", zap.Error(err)) return errors.Trace(err) @@ -735,7 +735,7 @@ func (w *Worker) Recommend(users []data.User) { if w.Config.Recommend.Offline.EnablePopularRecommend { localStartTime := time.Now() for _, category := range append([]string{""}, itemCategories...) { - popularItems, err := w.CacheClient.SearchDocuments(ctx, cache.PopularItems, "", []string{category}, 0, w.Config.Recommend.CacheSize) + popularItems, err := w.CacheClient.SearchScores(ctx, cache.PopularItems, "", []string{category}, 0, w.Config.Recommend.CacheSize) if err != nil { log.Logger().Error("failed to load popular items", zap.Error(err)) return errors.Trace(err) @@ -756,7 +756,7 @@ func (w *Worker) Recommend(users []data.User) { // 2. If collaborative filtering model is available, use it to rank items. // 3. Otherwise, merge all recommenders' results randomly. ctrUsed := false - results := make(map[string][]cache.Document) + results := make(map[string][]cache.Score) for category, catCandidates := range candidates { if w.Config.Recommend.Offline.EnableClickThroughPrediction && w.rankers[workerId] != nil && !w.rankers[workerId].Invalid() { results[category], err = w.rankByClickTroughRate(&user, catCandidates, itemCache, w.rankers[workerId]) @@ -794,13 +794,13 @@ func (w *Worker) Recommend(users []data.User) { log.Logger().Error("failed to explore latest and popular items", zap.Error(err)) return errors.Trace(err) } - aggregator.Add(category, lo.Map(scores, func(document cache.Document, _ int) string { + aggregator.Add(category, lo.Map(scores, func(document cache.Score, _ int) string { return document.Id - }), lo.Map(scores, func(document cache.Document, _ int) float64 { + }), lo.Map(scores, func(document cache.Score, _ int) float64 { return document.Score })) } - if err = w.CacheClient.AddDocuments(ctx, cache.OfflineRecommend, userId, aggregator.ToSlice()); err != nil { + if err = w.CacheClient.AddScores(ctx, cache.OfflineRecommend, userId, aggregator.ToSlice()); err != nil { log.Logger().Error("failed to cache recommendation", zap.Error(err)) return errors.Trace(err) } @@ -860,11 +860,11 @@ func (w *Worker) collaborativeRecommendBruteForce(userId string, itemCategories recommend[category] = recommendItems aggregator.Add(category, recommendItems, recommendScores) } - if err := w.CacheClient.AddDocuments(ctx, cache.CollaborativeRecommend, userId, aggregator.ToSlice()); err != nil { + if err := w.CacheClient.AddScores(ctx, cache.CollaborativeRecommend, userId, aggregator.ToSlice()); err != nil { log.Logger().Error("failed to cache collaborative filtering recommendation result", zap.String("user_id", userId), zap.Error(err)) return nil, 0, errors.Trace(err) } - if err := w.CacheClient.DeleteDocuments(ctx, []string{cache.CollaborativeRecommend}, cache.DocumentCondition{Before: &localStartTime}); err != nil { + if err := w.CacheClient.DeleteScores(ctx, []string{cache.CollaborativeRecommend}, cache.ScoreCondition{Before: &localStartTime}); err != nil { log.Logger().Error("failed to delete stale collaborative filtering recommendation result", zap.String("user_id", userId), zap.Error(err)) return nil, 0, errors.Trace(err) } @@ -893,18 +893,18 @@ func (w *Worker) collaborativeRecommendHNSW(rankingIndex *search.HNSW, userId st recommend[category] = recommendItems aggregator.Add(category, recommendItems, recommendScores) } - if err := w.CacheClient.AddDocuments(ctx, cache.CollaborativeRecommend, userId, aggregator.ToSlice()); err != nil { + if err := w.CacheClient.AddScores(ctx, cache.CollaborativeRecommend, userId, aggregator.ToSlice()); err != nil { log.Logger().Error("failed to cache collaborative filtering recommendation result", zap.String("user_id", userId), zap.Error(err)) return nil, 0, errors.Trace(err) } - if err := w.CacheClient.DeleteDocuments(ctx, []string{cache.CollaborativeRecommend}, cache.DocumentCondition{Before: &localStartTime}); err != nil { + if err := w.CacheClient.DeleteScores(ctx, []string{cache.CollaborativeRecommend}, cache.ScoreCondition{Before: &localStartTime}); err != nil { log.Logger().Error("failed to delete stale collaborative filtering recommendation result", zap.String("user_id", userId), zap.Error(err)) return nil, 0, errors.Trace(err) } return recommend, time.Since(localStartTime), nil } -func (w *Worker) rankByCollaborativeFiltering(userId string, candidates [][]string) ([]cache.Document, error) { +func (w *Worker) rankByCollaborativeFiltering(userId string, candidates [][]string) ([]cache.Score, error) { // concat candidates memo := mapset.NewSet[string]() var itemIds []string @@ -917,9 +917,9 @@ func (w *Worker) rankByCollaborativeFiltering(userId string, candidates [][]stri } } // rank by collaborative filtering - topItems := make([]cache.Document, 0, len(candidates)) + topItems := make([]cache.Score, 0, len(candidates)) for _, itemId := range itemIds { - topItems = append(topItems, cache.Document{ + topItems = append(topItems, cache.Score{ Id: itemId, Score: float64(w.RankingModel.Predict(userId, itemId)), }) @@ -929,7 +929,7 @@ func (w *Worker) rankByCollaborativeFiltering(userId string, candidates [][]stri } // rankByClickTroughRate ranks items by predicted click-through-rate. -func (w *Worker) rankByClickTroughRate(user *data.User, candidates [][]string, itemCache *ItemCache, predictor click.FactorizationMachine) ([]cache.Document, error) { +func (w *Worker) rankByClickTroughRate(user *data.User, candidates [][]string, itemCache *ItemCache, predictor click.FactorizationMachine) ([]cache.Score, error) { // concat candidates memo := mapset.NewSet[string]() var itemIds []string @@ -951,7 +951,7 @@ func (w *Worker) rankByClickTroughRate(user *data.User, candidates [][]string, i } } // rank by CTR - topItems := make([]cache.Document, 0, len(items)) + topItems := make([]cache.Score, 0, len(items)) if batchPredictor, ok := predictor.(click.BatchInference); ok { inputs := make([]lo.Tuple4[string, string, []click.Feature, []click.Feature], len(items)) for i, item := range items { @@ -962,14 +962,14 @@ func (w *Worker) rankByClickTroughRate(user *data.User, candidates [][]string, i } output := batchPredictor.BatchPredict(inputs) for i, score := range output { - topItems = append(topItems, cache.Document{ + topItems = append(topItems, cache.Score{ Id: items[i].ItemId, Score: float64(score), }) } } else { for _, item := range items { - topItems = append(topItems, cache.Document{ + topItems = append(topItems, cache.Score{ Id: item.ItemId, Score: float64(predictor.Predict(user.UserId, item.ItemId, click.ConvertLabelsToFeatures(user.Labels), click.ConvertLabelsToFeatures(item.Labels))), }) @@ -979,10 +979,10 @@ func (w *Worker) rankByClickTroughRate(user *data.User, candidates [][]string, i return topItems, nil } -func (w *Worker) mergeAndShuffle(candidates [][]string) []cache.Document { +func (w *Worker) mergeAndShuffle(candidates [][]string) []cache.Score { memo := mapset.NewSet[string]() pos := make([]int, len(candidates)) - var recommend []cache.Document + var recommend []cache.Score for { // filter out ended slice var src []int @@ -1000,13 +1000,13 @@ func (w *Worker) mergeAndShuffle(candidates [][]string) []cache.Document { pos[j]++ if !memo.Contains(candidateId) { memo.Add(candidateId) - recommend = append(recommend, cache.Document{Score: math.Exp(float64(-len(recommend))), Id: candidateId}) + recommend = append(recommend, cache.Score{Score: math.Exp(float64(-len(recommend))), Id: candidateId}) } } return recommend } -func (w *Worker) exploreRecommend(exploitRecommend []cache.Document, excludeSet mapset.Set[string], category string) ([]cache.Document, error) { +func (w *Worker) exploreRecommend(exploitRecommend []cache.Score, excludeSet mapset.Set[string], category string) ([]cache.Score, error) { var localExcludeSet mapset.Set[string] ctx := context.Background() if w.Config.Recommend.Replacement.EnableReplacement { @@ -1024,24 +1024,24 @@ func (w *Worker) exploreRecommend(exploitRecommend []cache.Document, excludeSet exploreLatestThreshold += threshold } // load popular items - popularItems, err := w.CacheClient.SearchDocuments(ctx, cache.PopularItems, "", []string{category}, 0, w.Config.Recommend.CacheSize) + popularItems, err := w.CacheClient.SearchScores(ctx, cache.PopularItems, "", []string{category}, 0, w.Config.Recommend.CacheSize) if err != nil { return nil, errors.Trace(err) } // load the latest items - latestItems, err := w.CacheClient.SearchDocuments(ctx, cache.LatestItems, "", []string{category}, 0, w.Config.Recommend.CacheSize) + latestItems, err := w.CacheClient.SearchScores(ctx, cache.LatestItems, "", []string{category}, 0, w.Config.Recommend.CacheSize) if err != nil { return nil, errors.Trace(err) } // explore recommendation - var exploreRecommend []cache.Document + var exploreRecommend []cache.Score score := 1.0 if len(exploitRecommend) > 0 { score += exploitRecommend[0].Score } for range exploitRecommend { dice := w.randGenerator.Float64() - var recommendItem cache.Document + var recommendItem cache.Score if dice < explorePopularThreshold && len(popularItems) > 0 { score -= 1e-5 recommendItem.Id = popularItems[0].Id @@ -1084,8 +1084,8 @@ func (w *Worker) checkUserActiveTime(ctx context.Context, userId string) bool { return true } // remove recommend cache for inactive users - if err := w.CacheClient.DeleteDocuments(ctx, []string{cache.OfflineRecommend, cache.CollaborativeRecommend}, - cache.DocumentCondition{Subset: proto.String(userId)}); err != nil { + if err := w.CacheClient.DeleteScores(ctx, []string{cache.OfflineRecommend, cache.CollaborativeRecommend}, + cache.ScoreCondition{Subset: proto.String(userId)}); err != nil { log.Logger().Error("failed to delete recommend cache", zap.String("user_id", userId), zap.Error(err)) } return false @@ -1104,7 +1104,7 @@ func (w *Worker) checkRecommendCacheTimeout(ctx context.Context, userId string, ) // check cache for _, category := range append([]string{""}, categories...) { - items, err := w.CacheClient.SearchDocuments(ctx, cache.OfflineRecommend, userId, []string{category}, 0, -1) + items, err := w.CacheClient.SearchScores(ctx, cache.OfflineRecommend, userId, []string{category}, 0, -1) if err != nil { log.Logger().Error("failed to load offline recommendation", zap.String("user_id", userId), zap.Error(err)) return true @@ -1213,14 +1213,14 @@ func (w *Worker) pullUsers(peers []string, me string) ([]data.User, error) { } // replacement inserts historical items back to recommendation. -func (w *Worker) replacement(recommend map[string][]cache.Document, user *data.User, feedbacks []data.Feedback, itemCache *ItemCache) (map[string][]cache.Document, error) { +func (w *Worker) replacement(recommend map[string][]cache.Score, user *data.User, feedbacks []data.Feedback, itemCache *ItemCache) (map[string][]cache.Score, error) { upperBounds := make(map[string]float64) lowerBounds := make(map[string]float64) - newRecommend := make(map[string][]cache.Document) + newRecommend := make(map[string][]cache.Score) for category, scores := range recommend { // find minimal score if len(scores) > 0 { - s := lo.Map(scores, func(score cache.Document, _ int) float64 { + s := lo.Map(scores, func(score cache.Score, _ int) float64 { return score.Score }) upperBounds[category] = funk.MaxFloat64(s) @@ -1281,7 +1281,7 @@ func (w *Worker) replacement(recommend map[string][]cache.Document, user *data.U } score += lowerBound } - newRecommend[category] = append(newRecommend[category], cache.Document{Id: itemId, Score: score}) + newRecommend[category] = append(newRecommend[category], cache.Score{Id: itemId, Score: score}) } } else { log.Logger().Warn("item doesn't exists in database", zap.String("item_id", itemId)) diff --git a/worker/worker_test.go b/worker/worker_test.go index 0a9eae460..5a2babac0 100644 --- a/worker/worker_test.go +++ b/worker/worker_test.go @@ -121,7 +121,7 @@ func (suite *WorkerTestSuite) TestCheckRecommendCacheTimeout() { // empty cache suite.True(suite.checkRecommendCacheTimeout(ctx, "0", nil)) - err := suite.CacheClient.AddDocuments(ctx, cache.OfflineRecommend, "0", []cache.Document{{Id: "0", Score: 0, Categories: []string{""}}}) + err := suite.CacheClient.AddScores(ctx, cache.OfflineRecommend, "0", []cache.Score{{Id: "0", Score: 0, Categories: []string{""}}}) suite.NoError(err) // digest mismatch @@ -138,7 +138,7 @@ func (suite *WorkerTestSuite) TestCheckRecommendCacheTimeout() { err = suite.CacheClient.Set(ctx, cache.Time(cache.Key(cache.LastUpdateUserRecommendTime, "0"), time.Now().Add(time.Hour*100))) suite.NoError(err) suite.False(suite.checkRecommendCacheTimeout(ctx, "0", nil)) - err = suite.CacheClient.DeleteDocuments(ctx, []string{cache.OfflineRecommend}, cache.DocumentCondition{Subset: proto.String("0")}) + err = suite.CacheClient.DeleteScores(ctx, []string{cache.OfflineRecommend}, cache.ScoreCondition{Subset: proto.String("0")}) suite.NoError(err) suite.True(suite.checkRecommendCacheTimeout(ctx, "0", nil)) } @@ -239,17 +239,17 @@ func (suite *WorkerTestSuite) TestRecommendMatrixFactorizationBruteForce() { recommendTime, err := suite.CacheClient.Get(ctx, cache.Key(cache.LastUpdateUserRecommendTime, "0")).Time() suite.NoError(err) - recommends, err := suite.CacheClient.SearchDocuments(ctx, cache.OfflineRecommend, "0", []string{""}, 0, -1) + recommends, err := suite.CacheClient.SearchScores(ctx, cache.OfflineRecommend, "0", []string{""}, 0, -1) suite.NoError(err) - suite.Equal([]cache.Document{ + suite.Equal([]cache.Score{ {Id: "3", Score: 3, Categories: []string{"", "*"}, Timestamp: recommendTime}, {Id: "2", Score: 2, Categories: []string{""}, Timestamp: recommendTime}, {Id: "1", Score: 1, Categories: []string{"", "*"}, Timestamp: recommendTime}, {Id: "0", Score: 0, Categories: []string{""}, Timestamp: recommendTime}, }, recommends) - recommends, err = suite.CacheClient.SearchDocuments(ctx, cache.OfflineRecommend, "0", []string{"*"}, 0, -1) + recommends, err = suite.CacheClient.SearchScores(ctx, cache.OfflineRecommend, "0", []string{"*"}, 0, -1) suite.NoError(err) - suite.Equal([]cache.Document{ + suite.Equal([]cache.Score{ {Id: "3", Score: 3, Categories: []string{"", "*"}, Timestamp: recommendTime}, {Id: "1", Score: 1, Categories: []string{"", "*"}, Timestamp: recommendTime}, }, recommends) @@ -292,9 +292,9 @@ func (suite *WorkerTestSuite) TestRecommendMatrixFactorizationHNSW() { recommendTime, err := suite.CacheClient.Get(ctx, cache.Key(cache.LastUpdateUserRecommendTime, "0")).Time() suite.NoError(err) - recommends, err := suite.CacheClient.SearchDocuments(ctx, cache.OfflineRecommend, "0", []string{""}, 0, -1) + recommends, err := suite.CacheClient.SearchScores(ctx, cache.OfflineRecommend, "0", []string{""}, 0, -1) suite.NoError(err) - suite.Equal([]cache.Document{ + suite.Equal([]cache.Score{ {Id: "3", Score: 3, Categories: []string{"", "*"}, Timestamp: recommendTime}, {Id: "2", Score: 2, Categories: []string{""}, Timestamp: recommendTime}, {Id: "1", Score: 1, Categories: []string{"", "*"}, Timestamp: recommendTime}, @@ -316,20 +316,20 @@ func (suite *WorkerTestSuite) TestRecommendItemBased() { suite.NoError(err) // insert similar items - err = suite.CacheClient.AddDocuments(ctx, cache.ItemNeighbors, "21", []cache.Document{ + err = suite.CacheClient.AddScores(ctx, cache.ItemNeighbors, "21", []cache.Score{ {Id: "22", Score: 100000, Categories: []string{"", "*"}}, {Id: "25", Score: 1000000, Categories: []string{""}}, {Id: "29", Score: 1, Categories: []string{""}}, }) suite.NoError(err) - err = suite.CacheClient.AddDocuments(ctx, cache.ItemNeighbors, "22", []cache.Document{ + err = suite.CacheClient.AddScores(ctx, cache.ItemNeighbors, "22", []cache.Score{ {Id: "23", Score: 100000, Categories: []string{"", "*"}}, {Id: "25", Score: 1000000, Categories: []string{""}}, {Id: "28", Score: 1, Categories: []string{"", "*"}}, {Id: "29", Score: 1, Categories: []string{""}}, }) suite.NoError(err) - err = suite.CacheClient.AddDocuments(ctx, cache.ItemNeighbors, "23", []cache.Document{ + err = suite.CacheClient.AddScores(ctx, cache.ItemNeighbors, "23", []cache.Score{ {Id: "24", Score: 100000, Categories: []string{"", "*"}}, {Id: "25", Score: 1000000, Categories: []string{""}}, {Id: "27", Score: 1, Categories: []string{""}}, @@ -337,7 +337,7 @@ func (suite *WorkerTestSuite) TestRecommendItemBased() { {Id: "29", Score: 1, Categories: []string{""}}, }) suite.NoError(err) - err = suite.CacheClient.AddDocuments(ctx, cache.ItemNeighbors, "24", []cache.Document{ + err = suite.CacheClient.AddScores(ctx, cache.ItemNeighbors, "24", []cache.Score{ {Id: "21", Score: 100000, Categories: []string{""}}, {Id: "25", Score: 1000000, Categories: []string{""}}, {Id: "26", Score: 1, Categories: []string{"", "*"}}, @@ -363,16 +363,16 @@ func (suite *WorkerTestSuite) TestRecommendItemBased() { recommendTime, err := suite.CacheClient.Get(ctx, cache.Key(cache.LastUpdateUserRecommendTime, "0")).Time() suite.NoError(err) // read recommend result - recommends, err := suite.CacheClient.SearchDocuments(ctx, cache.OfflineRecommend, "0", []string{""}, 0, 3) + recommends, err := suite.CacheClient.SearchScores(ctx, cache.OfflineRecommend, "0", []string{""}, 0, 3) suite.NoError(err) - suite.Equal([]cache.Document{ + suite.Equal([]cache.Score{ {Id: "29", Score: 29, Categories: []string{""}, Timestamp: recommendTime}, {Id: "28", Score: 28, Categories: []string{"", "*"}, Timestamp: recommendTime}, {Id: "27", Score: 27, Categories: []string{""}, Timestamp: recommendTime}, }, recommends) - recommends, err = suite.CacheClient.SearchDocuments(ctx, cache.OfflineRecommend, "0", []string{"*"}, 0, 3) + recommends, err = suite.CacheClient.SearchScores(ctx, cache.OfflineRecommend, "0", []string{"*"}, 0, 3) suite.NoError(err) - suite.Equal([]cache.Document{ + suite.Equal([]cache.Score{ {Id: "28", Score: 28, Categories: []string{"", "*"}, Timestamp: recommendTime}, {Id: "26", Score: 26, Categories: []string{"", "*"}, Timestamp: recommendTime}, }, recommends) @@ -383,7 +383,7 @@ func (suite *WorkerTestSuite) TestRecommendUserBased() { suite.Config.Recommend.Offline.EnableColRecommend = false suite.Config.Recommend.Offline.EnableUserBasedRecommend = true // insert similar users - err := suite.CacheClient.AddDocuments(ctx, cache.UserNeighbors, "0", []cache.Document{ + err := suite.CacheClient.AddScores(ctx, cache.UserNeighbors, "0", []cache.Score{ {Id: "1", Score: 2, Categories: []string{""}}, {Id: "2", Score: 1.5, Categories: []string{""}}, {Id: "3", Score: 1, Categories: []string{""}}, @@ -422,16 +422,16 @@ func (suite *WorkerTestSuite) TestRecommendUserBased() { recommendTime, err := suite.CacheClient.Get(ctx, cache.Key(cache.LastUpdateUserRecommendTime, "0")).Time() suite.NoError(err) // read recommend result - recommends, err := suite.CacheClient.SearchDocuments(ctx, cache.OfflineRecommend, "0", []string{""}, 0, 3) + recommends, err := suite.CacheClient.SearchScores(ctx, cache.OfflineRecommend, "0", []string{""}, 0, 3) suite.NoError(err) - suite.Equal([]cache.Document{ + suite.Equal([]cache.Score{ {Id: "48", Score: 48, Categories: []string{"", "*"}, Timestamp: recommendTime}, {Id: "13", Score: 13, Categories: []string{""}, Timestamp: recommendTime}, {Id: "12", Score: 12, Categories: []string{"", "*"}, Timestamp: recommendTime}, }, recommends) - recommends, err = suite.CacheClient.SearchDocuments(ctx, cache.OfflineRecommend, "0", []string{"*"}, 0, 3) + recommends, err = suite.CacheClient.SearchScores(ctx, cache.OfflineRecommend, "0", []string{"*"}, 0, 3) suite.NoError(err) - suite.Equal([]cache.Document{ + suite.Equal([]cache.Score{ {Id: "48", Score: 48, Categories: []string{"", "*"}, Timestamp: recommendTime}, {Id: "12", Score: 12, Categories: []string{"", "*"}, Timestamp: recommendTime}, }, recommends) @@ -442,7 +442,7 @@ func (suite *WorkerTestSuite) TestRecommendPopular() { suite.Config.Recommend.Offline.EnableColRecommend = false suite.Config.Recommend.Offline.EnablePopularRecommend = true // insert popular items - err := suite.CacheClient.AddDocuments(ctx, cache.PopularItems, "", []cache.Document{ + err := suite.CacheClient.AddScores(ctx, cache.PopularItems, "", []cache.Score{ {Id: "11", Score: 11, Categories: []string{""}}, {Id: "10", Score: 10, Categories: []string{""}}, {Id: "9", Score: 9, Categories: []string{""}}, @@ -469,16 +469,16 @@ func (suite *WorkerTestSuite) TestRecommendPopular() { recommendTime, err := suite.CacheClient.Get(ctx, cache.Key(cache.LastUpdateUserRecommendTime, "0")).Time() suite.NoError(err) // read recommend result - recommends, err := suite.CacheClient.SearchDocuments(ctx, cache.OfflineRecommend, "0", []string{""}, 0, -1) + recommends, err := suite.CacheClient.SearchScores(ctx, cache.OfflineRecommend, "0", []string{""}, 0, -1) suite.NoError(err) - suite.Equal([]cache.Document{ + suite.Equal([]cache.Score{ {Id: "10", Score: 10, Categories: []string{""}, Timestamp: recommendTime}, {Id: "9", Score: 9, Categories: []string{""}, Timestamp: recommendTime}, {Id: "8", Score: 8, Categories: []string{""}, Timestamp: recommendTime}, }, recommends) - recommends, err = suite.CacheClient.SearchDocuments(ctx, cache.OfflineRecommend, "0", []string{"*"}, 0, -1) + recommends, err = suite.CacheClient.SearchScores(ctx, cache.OfflineRecommend, "0", []string{"*"}, 0, -1) suite.NoError(err) - suite.Equal([]cache.Document{ + suite.Equal([]cache.Score{ {Id: "20", Score: 20, Categories: []string{"*"}, Timestamp: recommendTime}, {Id: "19", Score: 19, Categories: []string{"*"}, Timestamp: recommendTime}, {Id: "18", Score: 18, Categories: []string{"*"}, Timestamp: recommendTime}, @@ -491,7 +491,7 @@ func (suite *WorkerTestSuite) TestRecommendLatest() { suite.Config.Recommend.Offline.EnableColRecommend = false suite.Config.Recommend.Offline.EnableLatestRecommend = true // insert latest items - err := suite.CacheClient.AddDocuments(ctx, cache.LatestItems, "", []cache.Document{ + err := suite.CacheClient.AddScores(ctx, cache.LatestItems, "", []cache.Score{ {Id: "11", Score: 11, Categories: []string{""}}, {Id: "10", Score: 10, Categories: []string{""}}, {Id: "9", Score: 9, Categories: []string{""}}, @@ -518,16 +518,16 @@ func (suite *WorkerTestSuite) TestRecommendLatest() { recommendTime, err := suite.CacheClient.Get(ctx, cache.Key(cache.LastUpdateUserRecommendTime, "0")).Time() suite.NoError(err) // read recommend result - recommends, err := suite.CacheClient.SearchDocuments(ctx, cache.OfflineRecommend, "0", []string{""}, 0, -1) + recommends, err := suite.CacheClient.SearchScores(ctx, cache.OfflineRecommend, "0", []string{""}, 0, -1) suite.NoError(err) - suite.Equal([]cache.Document{ + suite.Equal([]cache.Score{ {Id: "10", Score: 10, Categories: []string{""}, Timestamp: recommendTime}, {Id: "9", Score: 9, Categories: []string{""}, Timestamp: recommendTime}, {Id: "8", Score: 8, Categories: []string{""}, Timestamp: recommendTime}, }, recommends) - recommends, err = suite.CacheClient.SearchDocuments(ctx, cache.OfflineRecommend, "0", []string{"*"}, 0, -1) + recommends, err = suite.CacheClient.SearchScores(ctx, cache.OfflineRecommend, "0", []string{"*"}, 0, -1) suite.NoError(err) - suite.Equal([]cache.Document{ + suite.Equal([]cache.Score{ {Id: "20", Score: 20, Categories: []string{"*"}, Timestamp: recommendTime}, {Id: "19", Score: 19, Categories: []string{"*"}, Timestamp: recommendTime}, {Id: "18", Score: 18, Categories: []string{"*"}, Timestamp: recommendTime}, @@ -539,7 +539,7 @@ func (suite *WorkerTestSuite) TestRecommendColdStart() { suite.Config.Recommend.Offline.EnableColRecommend = true suite.Config.Recommend.Offline.EnableLatestRecommend = true // insert latest items - err := suite.CacheClient.AddDocuments(ctx, cache.LatestItems, "", []cache.Document{ + err := suite.CacheClient.AddScores(ctx, cache.LatestItems, "", []cache.Score{ {Id: "11", Score: 11, Categories: []string{""}}, {Id: "10", Score: 10, Categories: []string{""}}, {Id: "9", Score: 9, Categories: []string{""}}, @@ -564,40 +564,40 @@ func (suite *WorkerTestSuite) TestRecommendColdStart() { // ranking model not exist m := newMockMatrixFactorizationForRecommend(10, 100) suite.Recommend([]data.User{{UserId: "0"}}) - recommends, err := suite.CacheClient.SearchDocuments(ctx, cache.OfflineRecommend, "0", []string{""}, 0, -1) + recommends, err := suite.CacheClient.SearchScores(ctx, cache.OfflineRecommend, "0", []string{""}, 0, -1) suite.NoError(err) - suite.Equal([]string{"10", "9", "8"}, lo.Map(recommends, func(d cache.Document, _ int) string { return d.Id })) - recommends, err = suite.CacheClient.SearchDocuments(ctx, cache.OfflineRecommend, "0", []string{"*"}, 0, -1) + suite.Equal([]string{"10", "9", "8"}, lo.Map(recommends, func(d cache.Score, _ int) string { return d.Id })) + recommends, err = suite.CacheClient.SearchScores(ctx, cache.OfflineRecommend, "0", []string{"*"}, 0, -1) suite.NoError(err) - suite.Equal([]string{"20", "19", "18"}, lo.Map(recommends, func(d cache.Document, _ int) string { return d.Id })) + suite.Equal([]string{"20", "19", "18"}, lo.Map(recommends, func(d cache.Score, _ int) string { return d.Id })) // user not predictable suite.RankingModel = m suite.Recommend([]data.User{{UserId: "100"}}) - recommends, err = suite.CacheClient.SearchDocuments(ctx, cache.OfflineRecommend, "100", []string{""}, 0, -1) + recommends, err = suite.CacheClient.SearchScores(ctx, cache.OfflineRecommend, "100", []string{""}, 0, -1) suite.NoError(err) - suite.Equal([]string{"10", "9", "8"}, lo.Map(recommends, func(d cache.Document, _ int) string { return d.Id })) - recommends, err = suite.CacheClient.SearchDocuments(ctx, cache.OfflineRecommend, "100", []string{"*"}, 0, -1) + suite.Equal([]string{"10", "9", "8"}, lo.Map(recommends, func(d cache.Score, _ int) string { return d.Id })) + recommends, err = suite.CacheClient.SearchScores(ctx, cache.OfflineRecommend, "100", []string{"*"}, 0, -1) suite.NoError(err) - suite.Equal([]string{"20", "19", "18"}, lo.Map(recommends, func(d cache.Document, _ int) string { return d.Id })) + suite.Equal([]string{"20", "19", "18"}, lo.Map(recommends, func(d cache.Score, _ int) string { return d.Id })) } func (suite *WorkerTestSuite) TestMergeAndShuffle() { scores := suite.mergeAndShuffle([][]string{{"1", "2", "3"}, {"1", "3", "5"}}) - suite.ElementsMatch([]string{"1", "2", "3", "5"}, lo.Map(scores, func(d cache.Document, _ int) string { return d.Id })) + suite.ElementsMatch([]string{"1", "2", "3", "5"}, lo.Map(scores, func(d cache.Score, _ int) string { return d.Id })) } func (suite *WorkerTestSuite) TestExploreRecommend() { ctx := context.Background() suite.Config.Recommend.Offline.ExploreRecommend = map[string]float64{"popular": 0.3, "latest": 0.3} // insert popular items - err := suite.CacheClient.AddDocuments(ctx, cache.PopularItems, "", []cache.Document{{Id: "popular", Score: 0, Categories: []string{""}, Timestamp: time.Now()}}) + err := suite.CacheClient.AddScores(ctx, cache.PopularItems, "", []cache.Score{{Id: "popular", Score: 0, Categories: []string{""}, Timestamp: time.Now()}}) suite.NoError(err) // insert latest items - err = suite.CacheClient.AddDocuments(ctx, cache.LatestItems, "", []cache.Document{{Id: "latest", Score: 0, Categories: []string{""}, Timestamp: time.Now()}}) + err = suite.CacheClient.AddScores(ctx, cache.LatestItems, "", []cache.Score{{Id: "latest", Score: 0, Categories: []string{""}, Timestamp: time.Now()}}) suite.NoError(err) - recommend, err := suite.exploreRecommend([]cache.Document{ + recommend, err := suite.exploreRecommend([]cache.Score{ {Id: "8", Score: 8}, {Id: "7", Score: 7}, {Id: "6", Score: 6}, @@ -608,14 +608,14 @@ func (suite *WorkerTestSuite) TestExploreRecommend() { {Id: "1", Score: 1}, }, mapset.NewSet[string](), "") suite.NoError(err) - items := lo.Map(recommend, func(d cache.Document, _ int) string { return d.Id }) + items := lo.Map(recommend, func(d cache.Score, _ int) string { return d.Id }) suite.Contains(items, "latest") suite.Contains(items, "popular") items = funk.FilterString(items, func(item string) bool { return item != "latest" && item != "popular" }) suite.IsDecreasing(items) - scores := lo.Map(recommend, func(d cache.Document, _ int) float64 { return d.Score }) + scores := lo.Map(recommend, func(d cache.Score, _ int) float64 { return d.Score }) suite.IsDecreasing(scores) suite.Equal(8, len(recommend)) } @@ -872,10 +872,10 @@ func (suite *WorkerTestSuite) TestRankByCollaborativeFiltering() { suite.RankingModel = newMockMatrixFactorizationForRecommend(10, 10) result, err := suite.rankByCollaborativeFiltering("1", [][]string{{"1", "2", "3", "4", "5"}}) suite.NoError(err) - suite.Equal([]string{"5", "4", "3", "2", "1"}, lo.Map(result, func(d cache.Document, _ int) string { + suite.Equal([]string{"5", "4", "3", "2", "1"}, lo.Map(result, func(d cache.Score, _ int) string { return d.Id })) - suite.IsDecreasing(lo.Map(result, func(d cache.Document, _ int) float64 { + suite.IsDecreasing(lo.Map(result, func(d cache.Score, _ int) float64 { return d.Score })) } @@ -893,10 +893,10 @@ func (suite *WorkerTestSuite) TestRankByClickTroughRate() { // rank items result, err := suite.rankByClickTroughRate(&data.User{UserId: "1"}, [][]string{{"1", "2", "3", "4", "5"}}, itemCache, new(mockFactorizationMachine)) suite.NoError(err) - suite.Equal([]string{"5", "4", "3", "2", "1"}, lo.Map(result, func(d cache.Document, _ int) string { + suite.Equal([]string{"5", "4", "3", "2", "1"}, lo.Map(result, func(d cache.Score, _ int) string { return d.Id })) - suite.IsDecreasing(lo.Map(result, func(d cache.Document, _ int) float64 { + suite.IsDecreasing(lo.Map(result, func(d cache.Score, _ int) float64 { return d.Score })) } @@ -929,9 +929,9 @@ func (suite *WorkerTestSuite) TestReplacement_ClickThroughRate() { recommendTime, err := suite.CacheClient.Get(ctx, cache.Key(cache.LastUpdateUserRecommendTime, "0")).Time() suite.NoError(err) // read recommend result - recommends, err := suite.CacheClient.SearchDocuments(ctx, cache.OfflineRecommend, "0", []string{""}, 0, 3) + recommends, err := suite.CacheClient.SearchScores(ctx, cache.OfflineRecommend, "0", []string{""}, 0, 3) suite.NoError(err) - suite.Equal([]cache.Document{ + suite.Equal([]cache.Score{ {Id: "10", Score: 10, Categories: []string{""}, Timestamp: recommendTime}, {Id: "9", Score: 9, Categories: []string{""}, Timestamp: recommendTime}, }, recommends) @@ -940,7 +940,7 @@ func (suite *WorkerTestSuite) TestReplacement_ClickThroughRate() { err = suite.CacheClient.Set(ctx, cache.Time(cache.Key(cache.LastUpdateUserRecommendTime, "0"), time.Now().AddDate(-1, 0, 0))) suite.NoError(err) // insert popular items - err = suite.CacheClient.AddDocuments(ctx, cache.PopularItems, "", []cache.Document{ + err = suite.CacheClient.AddScores(ctx, cache.PopularItems, "", []cache.Score{ {Id: "7", Score: 10, Categories: []string{""}}, {Id: "6", Score: 9, Categories: []string{""}}, {Id: "5", Score: 8, Categories: []string{""}}, @@ -958,9 +958,9 @@ func (suite *WorkerTestSuite) TestReplacement_ClickThroughRate() { recommendTime, err = suite.CacheClient.Get(ctx, cache.Key(cache.LastUpdateUserRecommendTime, "0")).Time() suite.NoError(err) // read recommend result - recommends, err = suite.CacheClient.SearchDocuments(ctx, cache.OfflineRecommend, "0", []string{""}, 0, 3) + recommends, err = suite.CacheClient.SearchScores(ctx, cache.OfflineRecommend, "0", []string{""}, 0, 3) suite.NoError(err) - suite.Equal([]cache.Document{ + suite.Equal([]cache.Score{ {Id: "10", Score: 9, Categories: []string{""}, Timestamp: recommendTime}, {Id: "9", Score: 7.4, Categories: []string{""}, Timestamp: recommendTime}, {Id: "7", Score: 7, Categories: []string{""}, Timestamp: recommendTime}, @@ -994,9 +994,9 @@ func (suite *WorkerTestSuite) TestReplacement_CollaborativeFiltering() { recommendTime, err := suite.CacheClient.Get(ctx, cache.Key(cache.LastUpdateUserRecommendTime, "0")).Time() suite.NoError(err) // read recommend result - recommends, err := suite.CacheClient.SearchDocuments(ctx, cache.OfflineRecommend, "0", []string{""}, 0, 3) + recommends, err := suite.CacheClient.SearchScores(ctx, cache.OfflineRecommend, "0", []string{""}, 0, 3) suite.NoError(err) - suite.Equal([]cache.Document{ + suite.Equal([]cache.Score{ {Id: "10", Score: 10, Categories: []string{""}, Timestamp: recommendTime}, {Id: "9", Score: 9, Categories: []string{""}, Timestamp: recommendTime}, }, recommends) @@ -1005,7 +1005,7 @@ func (suite *WorkerTestSuite) TestReplacement_CollaborativeFiltering() { err = suite.CacheClient.Set(ctx, cache.Time(cache.Key(cache.LastUpdateUserRecommendTime, "0"), time.Now().AddDate(-1, 0, 0))) suite.NoError(err) // insert popular items - err = suite.CacheClient.AddDocuments(ctx, cache.PopularItems, "", []cache.Document{ + err = suite.CacheClient.AddScores(ctx, cache.PopularItems, "", []cache.Score{ {Id: "7", Score: 10, Categories: []string{""}}, {Id: "6", Score: 9, Categories: []string{""}}, {Id: "5", Score: 8, Categories: []string{""}}}) @@ -1022,9 +1022,9 @@ func (suite *WorkerTestSuite) TestReplacement_CollaborativeFiltering() { recommendTime, err = suite.CacheClient.Get(ctx, cache.Key(cache.LastUpdateUserRecommendTime, "0")).Time() suite.NoError(err) // read recommend result - recommends, err = suite.CacheClient.SearchDocuments(ctx, cache.OfflineRecommend, "0", []string{""}, 0, 3) + recommends, err = suite.CacheClient.SearchScores(ctx, cache.OfflineRecommend, "0", []string{""}, 0, 3) suite.NoError(err) - suite.Equal([]cache.Document{ + suite.Equal([]cache.Score{ {Id: "10", Score: 9, Categories: []string{""}, Timestamp: recommendTime}, {Id: "9", Score: 7.4, Categories: []string{""}, Timestamp: recommendTime}, {Id: "7", Score: 7, Categories: []string{""}, Timestamp: recommendTime}, @@ -1037,23 +1037,23 @@ func (suite *WorkerTestSuite) TestUserActivity() { suite.NoError(err) err = suite.CacheClient.Set(ctx, cache.Time(cache.Key(cache.LastModifyUserTime, "1"), time.Now().AddDate(0, 0, -10))) suite.NoError(err) - err = suite.CacheClient.AddDocuments(ctx, cache.OfflineRecommend, "0", []cache.Document{{Id: "0", Score: 1, Categories: []string{""}}}) + err = suite.CacheClient.AddScores(ctx, cache.OfflineRecommend, "0", []cache.Score{{Id: "0", Score: 1, Categories: []string{""}}}) suite.NoError(err) - err = suite.CacheClient.AddDocuments(ctx, cache.OfflineRecommend, "1", []cache.Document{{Id: "1", Score: 1, Categories: []string{""}}}) + err = suite.CacheClient.AddScores(ctx, cache.OfflineRecommend, "1", []cache.Score{{Id: "1", Score: 1, Categories: []string{""}}}) suite.NoError(err) - err = suite.CacheClient.AddDocuments(ctx, cache.OfflineRecommend, "2", []cache.Document{{Id: "2", Score: 1, Categories: []string{""}}}) + err = suite.CacheClient.AddScores(ctx, cache.OfflineRecommend, "2", []cache.Score{{Id: "2", Score: 1, Categories: []string{""}}}) suite.NoError(err) suite.True(suite.checkUserActiveTime(ctx, "0")) suite.True(suite.checkUserActiveTime(ctx, "1")) suite.True(suite.checkUserActiveTime(ctx, "2")) - docs, err := suite.CacheClient.SearchDocuments(ctx, cache.OfflineRecommend, "0", []string{""}, 0, 1) + docs, err := suite.CacheClient.SearchScores(ctx, cache.OfflineRecommend, "0", []string{""}, 0, 1) suite.NoError(err) suite.NotEmpty(docs) - docs, err = suite.CacheClient.SearchDocuments(ctx, cache.OfflineRecommend, "1", []string{""}, 0, 1) + docs, err = suite.CacheClient.SearchScores(ctx, cache.OfflineRecommend, "1", []string{""}, 0, 1) suite.NoError(err) suite.NotEmpty(docs) - docs, err = suite.CacheClient.SearchDocuments(ctx, cache.OfflineRecommend, "2", []string{""}, 0, 1) + docs, err = suite.CacheClient.SearchScores(ctx, cache.OfflineRecommend, "2", []string{""}, 0, 1) suite.NoError(err) suite.NotEmpty(docs) @@ -1061,13 +1061,13 @@ func (suite *WorkerTestSuite) TestUserActivity() { suite.True(suite.checkUserActiveTime(ctx, "0")) suite.False(suite.checkUserActiveTime(ctx, "1")) suite.True(suite.checkUserActiveTime(ctx, "2")) - docs, err = suite.CacheClient.SearchDocuments(ctx, cache.OfflineRecommend, "0", []string{""}, 0, 1) + docs, err = suite.CacheClient.SearchScores(ctx, cache.OfflineRecommend, "0", []string{""}, 0, 1) suite.NoError(err) suite.NotEmpty(docs) - docs, err = suite.CacheClient.SearchDocuments(ctx, cache.OfflineRecommend, "1", []string{""}, 0, 1) + docs, err = suite.CacheClient.SearchScores(ctx, cache.OfflineRecommend, "1", []string{""}, 0, 1) suite.NoError(err) suite.Empty(docs) - docs, err = suite.CacheClient.SearchDocuments(ctx, cache.OfflineRecommend, "2", []string{""}, 0, 1) + docs, err = suite.CacheClient.SearchScores(ctx, cache.OfflineRecommend, "2", []string{""}, 0, 1) suite.NoError(err) suite.NotEmpty(docs) } From 76b87811ceca3478427d528a3a6885e57b7026a1 Mon Sep 17 00:00:00 2001 From: zhenghaoz Date: Mon, 11 Nov 2024 22:30:19 +0800 Subject: [PATCH 07/14] support dump and restore binary backup (#886) --- master/rest.go | 309 ++++++++++ master/rest_test.go | 73 +++ protocol/protocol.pb.go | 1086 ++++++++++++++++++++++++---------- protocol/protocol.proto | 27 + protocol/protocol_grpc.pb.go | 127 ++-- 5 files changed, 1222 insertions(+), 400 deletions(-) diff --git a/master/rest.go b/master/rest.go index 8cc351d40..72c641fad 100644 --- a/master/rest.go +++ b/master/rest.go @@ -17,6 +17,7 @@ package master import ( "bufio" "context" + "encoding/binary" "encoding/json" "fmt" "io" @@ -45,10 +46,13 @@ import ( "github.com/zhenghaoz/gorse/config" "github.com/zhenghaoz/gorse/model/click" "github.com/zhenghaoz/gorse/model/ranking" + "github.com/zhenghaoz/gorse/protocol" "github.com/zhenghaoz/gorse/server" "github.com/zhenghaoz/gorse/storage/cache" "github.com/zhenghaoz/gorse/storage/data" "go.uber.org/zap" + "google.golang.org/protobuf/proto" + "google.golang.org/protobuf/types/known/timestamppb" ) func (m *Master) CreateWebService() { @@ -225,6 +229,8 @@ func (m *Master) StartHttpServer() { container.Handle("/api/bulk/users", http.HandlerFunc(m.importExportUsers)) container.Handle("/api/bulk/items", http.HandlerFunc(m.importExportItems)) container.Handle("/api/bulk/feedback", http.HandlerFunc(m.importExportFeedback)) + container.Handle("/api/dump", http.HandlerFunc(m.dump)) + container.Handle("/api/restore", http.HandlerFunc(m.restore)) if m.workerScheduleHandler == nil { container.Handle("/api/admin/schedule", http.HandlerFunc(m.scheduleAPIHandler)) } else { @@ -1499,3 +1505,306 @@ func (s *Master) checkAdmin(request *http.Request) bool { } return false } + +const ( + EOF = int64(0) + UserStream = int64(-1) + ItemStream = int64(-2) + FeedbackStream = int64(-3) +) + +type DumpStats struct { + Users int + Items int + Feedback int + Duration time.Duration +} + +func writeDump[T proto.Message](w io.Writer, data T) error { + bytes, err := proto.Marshal(data) + if err != nil { + return err + } + if err = binary.Write(w, binary.LittleEndian, int64(len(bytes))); err != nil { + return err + } + if _, err = w.Write(bytes); err != nil { + return err + } + return nil +} + +func readDump[T proto.Message](r io.Reader, data T) (int64, error) { + var size int64 + if err := binary.Read(r, binary.LittleEndian, &size); err != nil { + return 0, err + } + if size <= 0 { + return size, nil + } + bytes := make([]byte, size) + if _, err := r.Read(bytes); err != nil { + return 0, err + } + return size, proto.Unmarshal(bytes, data) +} + +func (m *Master) dump(response http.ResponseWriter, request *http.Request) { + if !m.checkAdmin(request) { + writeError(response, http.StatusUnauthorized, "unauthorized") + return + } + if request.Method != http.MethodGet { + writeError(response, http.StatusMethodNotAllowed, "method not allowed") + return + } + response.Header().Set("Content-Type", "application/octet-stream") + var stats DumpStats + start := time.Now() + // dump users + if err := binary.Write(response, binary.LittleEndian, UserStream); err != nil { + writeError(response, http.StatusInternalServerError, err.Error()) + return + } + userStream, errChan := m.DataClient.GetUserStream(context.Background(), batchSize) + for users := range userStream { + for _, user := range users { + labels, err := json.Marshal(user.Labels) + if err != nil { + writeError(response, http.StatusInternalServerError, err.Error()) + return + } + if err := writeDump(response, &protocol.User{ + UserId: user.UserId, + Labels: labels, + Comment: user.Comment, + }); err != nil { + writeError(response, http.StatusInternalServerError, err.Error()) + return + } + stats.Users++ + } + } + if err := <-errChan; err != nil { + writeError(response, http.StatusInternalServerError, err.Error()) + return + } + // dump items + if err := binary.Write(response, binary.LittleEndian, ItemStream); err != nil { + writeError(response, http.StatusInternalServerError, err.Error()) + return + } + itemStream, errChan := m.DataClient.GetItemStream(context.Background(), batchSize, nil) + for items := range itemStream { + for _, item := range items { + labels, err := json.Marshal(item.Labels) + if err != nil { + writeError(response, http.StatusInternalServerError, err.Error()) + return + } + if err := writeDump(response, &protocol.Item{ + ItemId: item.ItemId, + IsHidden: item.IsHidden, + Categories: item.Categories, + Timestamp: timestamppb.New(item.Timestamp), + Labels: labels, + Comment: item.Comment, + }); err != nil { + writeError(response, http.StatusInternalServerError, err.Error()) + return + } + stats.Items++ + } + } + if err := <-errChan; err != nil { + writeError(response, http.StatusInternalServerError, err.Error()) + return + } + // dump feedback + if err := binary.Write(response, binary.LittleEndian, FeedbackStream); err != nil { + writeError(response, http.StatusInternalServerError, err.Error()) + return + } + feedbackStream, errChan := m.DataClient.GetFeedbackStream(context.Background(), batchSize, data.WithEndTime(*m.Config.Now())) + for feedbacks := range feedbackStream { + for _, feedback := range feedbacks { + if err := writeDump(response, &protocol.Feedback{ + FeedbackType: feedback.FeedbackType, + UserId: feedback.UserId, + ItemId: feedback.ItemId, + Timestamp: timestamppb.New(feedback.Timestamp), + Comment: feedback.Comment, + }); err != nil { + writeError(response, http.StatusInternalServerError, err.Error()) + return + } + stats.Feedback++ + } + } + if err := <-errChan; err != nil { + writeError(response, http.StatusInternalServerError, err.Error()) + return + } + // dump EOF + if err := binary.Write(response, binary.LittleEndian, EOF); err != nil { + writeError(response, http.StatusInternalServerError, err.Error()) + return + } + stats.Duration = time.Since(start) + log.Logger().Info("complete dump", + zap.Int("users", stats.Users), + zap.Int("items", stats.Items), + zap.Int("feedback", stats.Feedback), + zap.Duration("duration", stats.Duration)) + server.Ok(restful.NewResponse(response), stats) +} + +func (m *Master) restore(response http.ResponseWriter, request *http.Request) { + if !m.checkAdmin(request) { + writeError(response, http.StatusUnauthorized, "unauthorized") + return + } + if request.Method != http.MethodPost { + writeError(response, http.StatusMethodNotAllowed, "method not allowed") + return + } + var ( + flag int64 + err error + stats DumpStats + start = time.Now() + ) + if err = binary.Read(request.Body, binary.LittleEndian, &flag); err != nil { + if errors.Is(err, io.EOF) { + server.Ok(restful.NewResponse(response), struct{}{}) + return + } else { + writeError(response, http.StatusInternalServerError, err.Error()) + return + } + } + for flag != EOF { + switch flag { + case UserStream: + users := make([]data.User, 0, batchSize) + for { + var user protocol.User + if flag, err = readDump(request.Body, &user); err != nil { + writeError(response, http.StatusInternalServerError, err.Error()) + return + } + if flag <= 0 { + break + } + labels := make(map[string]interface{}) + if err := json.Unmarshal(user.Labels, &labels); err != nil { + writeError(response, http.StatusInternalServerError, err.Error()) + return + } + users = append(users, data.User{ + UserId: user.UserId, + Labels: labels, + Comment: user.Comment, + }) + stats.Users++ + if len(users) == batchSize { + if err := m.DataClient.BatchInsertUsers(context.Background(), users); err != nil { + writeError(response, http.StatusInternalServerError, err.Error()) + return + } + users = users[:0] + } + } + if len(users) > 0 { + if err := m.DataClient.BatchInsertUsers(context.Background(), users); err != nil { + writeError(response, http.StatusInternalServerError, err.Error()) + return + } + } + case ItemStream: + items := make([]data.Item, 0, batchSize) + for { + var item protocol.Item + if flag, err = readDump(request.Body, &item); err != nil { + writeError(response, http.StatusInternalServerError, err.Error()) + return + } + if flag <= 0 { + break + } + labels := make(map[string]interface{}) + if err := json.Unmarshal(item.Labels, &labels); err != nil { + writeError(response, http.StatusInternalServerError, err.Error()) + return + } + items = append(items, data.Item{ + ItemId: item.ItemId, + IsHidden: item.IsHidden, + Categories: item.Categories, + Timestamp: item.Timestamp.AsTime(), + Labels: labels, + Comment: item.Comment, + }) + stats.Items++ + if len(items) == batchSize { + if err := m.DataClient.BatchInsertItems(context.Background(), items); err != nil { + writeError(response, http.StatusInternalServerError, err.Error()) + return + } + items = items[:0] + } + } + if len(items) > 0 { + if err := m.DataClient.BatchInsertItems(context.Background(), items); err != nil { + writeError(response, http.StatusInternalServerError, err.Error()) + return + } + } + case FeedbackStream: + feedbacks := make([]data.Feedback, 0, batchSize) + for { + var feedback protocol.Feedback + if flag, err = readDump(request.Body, &feedback); err != nil { + writeError(response, http.StatusInternalServerError, err.Error()) + return + } + if flag <= 0 { + break + } + feedbacks = append(feedbacks, data.Feedback{ + FeedbackKey: data.FeedbackKey{ + FeedbackType: feedback.FeedbackType, + UserId: feedback.UserId, + ItemId: feedback.ItemId, + }, + Timestamp: feedback.Timestamp.AsTime(), + Comment: feedback.Comment, + }) + stats.Feedback++ + if len(feedbacks) == batchSize { + if err := m.DataClient.BatchInsertFeedback(context.Background(), feedbacks, true, true, true); err != nil { + writeError(response, http.StatusInternalServerError, err.Error()) + return + } + feedbacks = feedbacks[:0] + } + } + if len(feedbacks) > 0 { + if err := m.DataClient.BatchInsertFeedback(context.Background(), feedbacks, true, true, true); err != nil { + writeError(response, http.StatusInternalServerError, err.Error()) + return + } + } + default: + writeError(response, http.StatusInternalServerError, fmt.Sprintf("unknown flag %v", flag)) + return + } + } + stats.Duration = time.Since(start) + log.Logger().Info("complete restore", + zap.Int("users", stats.Users), + zap.Int("items", stats.Items), + zap.Int("feedback", stats.Feedback), + zap.Duration("duration", stats.Duration)) + server.Ok(restful.NewResponse(response), stats) +} diff --git a/master/rest_test.go b/master/rest_test.go index bb5e4d843..692d4f239 100644 --- a/master/rest_test.go +++ b/master/rest_test.go @@ -991,3 +991,76 @@ func TestMaster_TokenLogin(t *testing.T) { Status(http.StatusOK). End() } + +func TestDumpAndRestore(t *testing.T) { + s, cookie := newMockServer(t) + defer s.Close(t) + ctx := context.Background() + // insert users + users := make([]data.User, batchSize+1) + for i := range users { + users[i] = data.User{ + UserId: fmt.Sprintf("%05d", i), + Labels: map[string]any{"a": fmt.Sprintf("%d", 2*i+1), "b": fmt.Sprintf("%d", 2*i+2)}, + } + } + err := s.DataClient.BatchInsertUsers(ctx, users) + assert.NoError(t, err) + // insert items + items := make([]data.Item, batchSize+1) + for i := range items { + items[i] = data.Item{ + ItemId: fmt.Sprintf("%05d", i), + Labels: map[string]any{"a": fmt.Sprintf("%d", 2*i+1), "b": fmt.Sprintf("%d", 2*i+2)}, + } + } + err = s.DataClient.BatchInsertItems(ctx, items) + assert.NoError(t, err) + // insert feedback + feedback := make([]data.Feedback, batchSize+1) + for i := range feedback { + feedback[i] = data.Feedback{ + FeedbackKey: data.FeedbackKey{ + FeedbackType: "click", + UserId: fmt.Sprintf("%05d", i), + ItemId: fmt.Sprintf("%05d", i), + }, + } + } + err = s.DataClient.BatchInsertFeedback(ctx, feedback, true, true, true) + assert.NoError(t, err) + + // dump data + req := httptest.NewRequest("GET", "https://example.com/", nil) + req.Header.Set("Cookie", cookie) + w := httptest.NewRecorder() + s.dump(w, req) + assert.Equal(t, http.StatusOK, w.Code) + + // restore data + err = s.DataClient.Purge() + assert.NoError(t, err) + req = httptest.NewRequest("POST", "https://example.com/", bytes.NewReader(w.Body.Bytes())) + req.Header.Set("Cookie", cookie) + req.Header.Set("Content-Type", "application/octet-stream") + w = httptest.NewRecorder() + s.restore(w, req) + assert.Equal(t, http.StatusOK, w.Code) + + // check data + _, returnUsers, err := s.DataClient.GetUsers(ctx, "", len(users)) + assert.NoError(t, err) + if assert.Equal(t, len(users), len(returnUsers)) { + assert.Equal(t, users, returnUsers) + } + _, returnItems, err := s.DataClient.GetItems(ctx, "", len(items), nil) + assert.NoError(t, err) + if assert.Equal(t, len(items), len(returnItems)) { + assert.Equal(t, items, returnItems) + } + _, returnFeedback, err := s.DataClient.GetFeedback(ctx, "", len(feedback), nil, lo.ToPtr(time.Now())) + assert.NoError(t, err) + if assert.Equal(t, len(feedback), len(returnFeedback)) { + assert.Equal(t, feedback, returnFeedback) + } +} diff --git a/protocol/protocol.pb.go b/protocol/protocol.pb.go index f14fd456e..dc447388f 100644 --- a/protocol/protocol.pb.go +++ b/protocol/protocol.pb.go @@ -1,24 +1,39 @@ +// Copyright 2020 gorse Project Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + // Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.35.1 +// protoc v5.28.3 // source: protocol.proto package protocol import ( - fmt "fmt" - proto "github.com/golang/protobuf/proto" - math "math" + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + timestamppb "google.golang.org/protobuf/types/known/timestamppb" + reflect "reflect" + sync "sync" ) -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) type NodeType int32 @@ -28,463 +43,894 @@ const ( NodeType_ClientNode NodeType = 2 ) -var NodeType_name = map[int32]string{ - 0: "ServerNode", - 1: "WorkerNode", - 2: "ClientNode", -} +// Enum value maps for NodeType. +var ( + NodeType_name = map[int32]string{ + 0: "ServerNode", + 1: "WorkerNode", + 2: "ClientNode", + } + NodeType_value = map[string]int32{ + "ServerNode": 0, + "WorkerNode": 1, + "ClientNode": 2, + } +) -var NodeType_value = map[string]int32{ - "ServerNode": 0, - "WorkerNode": 1, - "ClientNode": 2, +func (x NodeType) Enum() *NodeType { + p := new(NodeType) + *p = x + return p } func (x NodeType) String() string { - return proto.EnumName(NodeType_name, int32(x)) + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (NodeType) Descriptor() protoreflect.EnumDescriptor { + return file_protocol_proto_enumTypes[0].Descriptor() +} + +func (NodeType) Type() protoreflect.EnumType { + return &file_protocol_proto_enumTypes[0] } +func (x NodeType) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use NodeType.Descriptor instead. func (NodeType) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_2bc2336598a3f7e0, []int{0} + return file_protocol_proto_rawDescGZIP(), []int{0} } -type Meta struct { - Config string `protobuf:"bytes,1,opt,name=config,proto3" json:"config,omitempty"` - RankingModelVersion int64 `protobuf:"varint,3,opt,name=ranking_model_version,json=rankingModelVersion,proto3" json:"ranking_model_version,omitempty"` - ClickModelVersion int64 `protobuf:"varint,4,opt,name=click_model_version,json=clickModelVersion,proto3" json:"click_model_version,omitempty"` - Me string `protobuf:"bytes,5,opt,name=me,proto3" json:"me,omitempty"` - Servers []string `protobuf:"bytes,6,rep,name=servers,proto3" json:"servers,omitempty"` - Workers []string `protobuf:"bytes,7,rep,name=workers,proto3" json:"workers,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *Meta) Reset() { *m = Meta{} } -func (m *Meta) String() string { return proto.CompactTextString(m) } -func (*Meta) ProtoMessage() {} -func (*Meta) Descriptor() ([]byte, []int) { - return fileDescriptor_2bc2336598a3f7e0, []int{0} +type User struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + UserId string `protobuf:"bytes,1,opt,name=user_id,json=userId,proto3" json:"user_id,omitempty"` + Labels []byte `protobuf:"bytes,2,opt,name=labels,proto3" json:"labels,omitempty"` + Comment string `protobuf:"bytes,3,opt,name=comment,proto3" json:"comment,omitempty"` } -func (m *Meta) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_Meta.Unmarshal(m, b) +func (x *User) Reset() { + *x = User{} + mi := &file_protocol_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } -func (m *Meta) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_Meta.Marshal(b, m, deterministic) + +func (x *User) String() string { + return protoimpl.X.MessageStringOf(x) } -func (m *Meta) XXX_Merge(src proto.Message) { - xxx_messageInfo_Meta.Merge(m, src) + +func (*User) ProtoMessage() {} + +func (x *User) ProtoReflect() protoreflect.Message { + mi := &file_protocol_proto_msgTypes[0] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) } -func (m *Meta) XXX_Size() int { - return xxx_messageInfo_Meta.Size(m) + +// Deprecated: Use User.ProtoReflect.Descriptor instead. +func (*User) Descriptor() ([]byte, []int) { + return file_protocol_proto_rawDescGZIP(), []int{0} } -func (m *Meta) XXX_DiscardUnknown() { - xxx_messageInfo_Meta.DiscardUnknown(m) + +func (x *User) GetUserId() string { + if x != nil { + return x.UserId + } + return "" } -var xxx_messageInfo_Meta proto.InternalMessageInfo +func (x *User) GetLabels() []byte { + if x != nil { + return x.Labels + } + return nil +} -func (m *Meta) GetConfig() string { - if m != nil { - return m.Config +func (x *User) GetComment() string { + if x != nil { + return x.Comment } return "" } -func (m *Meta) GetRankingModelVersion() int64 { - if m != nil { - return m.RankingModelVersion +type Item struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Namespace string `protobuf:"bytes,1,opt,name=namespace,proto3" json:"namespace,omitempty"` + ItemId string `protobuf:"bytes,2,opt,name=item_id,json=itemId,proto3" json:"item_id,omitempty"` + IsHidden bool `protobuf:"varint,3,opt,name=is_hidden,json=isHidden,proto3" json:"is_hidden,omitempty"` + Categories []string `protobuf:"bytes,4,rep,name=categories,proto3" json:"categories,omitempty"` + Timestamp *timestamppb.Timestamp `protobuf:"bytes,5,opt,name=timestamp,proto3" json:"timestamp,omitempty"` + Labels []byte `protobuf:"bytes,6,opt,name=labels,proto3" json:"labels,omitempty"` + Comment string `protobuf:"bytes,7,opt,name=comment,proto3" json:"comment,omitempty"` +} + +func (x *Item) Reset() { + *x = Item{} + mi := &file_protocol_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *Item) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Item) ProtoMessage() {} + +func (x *Item) ProtoReflect() protoreflect.Message { + mi := &file_protocol_proto_msgTypes[1] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms } - return 0 + return mi.MessageOf(x) } -func (m *Meta) GetClickModelVersion() int64 { - if m != nil { - return m.ClickModelVersion +// Deprecated: Use Item.ProtoReflect.Descriptor instead. +func (*Item) Descriptor() ([]byte, []int) { + return file_protocol_proto_rawDescGZIP(), []int{1} +} + +func (x *Item) GetNamespace() string { + if x != nil { + return x.Namespace } - return 0 + return "" } -func (m *Meta) GetMe() string { - if m != nil { - return m.Me +func (x *Item) GetItemId() string { + if x != nil { + return x.ItemId } return "" } -func (m *Meta) GetServers() []string { - if m != nil { - return m.Servers +func (x *Item) GetIsHidden() bool { + if x != nil { + return x.IsHidden + } + return false +} + +func (x *Item) GetCategories() []string { + if x != nil { + return x.Categories } return nil } -func (m *Meta) GetWorkers() []string { - if m != nil { - return m.Workers +func (x *Item) GetTimestamp() *timestamppb.Timestamp { + if x != nil { + return x.Timestamp } return nil } -type Fragment struct { - Data []byte `protobuf:"bytes,1,opt,name=data,proto3" json:"data,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` +func (x *Item) GetLabels() []byte { + if x != nil { + return x.Labels + } + return nil } -func (m *Fragment) Reset() { *m = Fragment{} } -func (m *Fragment) String() string { return proto.CompactTextString(m) } -func (*Fragment) ProtoMessage() {} -func (*Fragment) Descriptor() ([]byte, []int) { - return fileDescriptor_2bc2336598a3f7e0, []int{1} +func (x *Item) GetComment() string { + if x != nil { + return x.Comment + } + return "" } -func (m *Fragment) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_Fragment.Unmarshal(m, b) +type Feedback struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Namespace string `protobuf:"bytes,1,opt,name=namespace,proto3" json:"namespace,omitempty"` + FeedbackType string `protobuf:"bytes,2,opt,name=feedback_type,json=feedbackType,proto3" json:"feedback_type,omitempty"` + UserId string `protobuf:"bytes,3,opt,name=user_id,json=userId,proto3" json:"user_id,omitempty"` + ItemId string `protobuf:"bytes,4,opt,name=item_id,json=itemId,proto3" json:"item_id,omitempty"` + Timestamp *timestamppb.Timestamp `protobuf:"bytes,5,opt,name=timestamp,proto3" json:"timestamp,omitempty"` + Comment string `protobuf:"bytes,6,opt,name=comment,proto3" json:"comment,omitempty"` } -func (m *Fragment) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_Fragment.Marshal(b, m, deterministic) + +func (x *Feedback) Reset() { + *x = Feedback{} + mi := &file_protocol_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } -func (m *Fragment) XXX_Merge(src proto.Message) { - xxx_messageInfo_Fragment.Merge(m, src) + +func (x *Feedback) String() string { + return protoimpl.X.MessageStringOf(x) } -func (m *Fragment) XXX_Size() int { - return xxx_messageInfo_Fragment.Size(m) + +func (*Feedback) ProtoMessage() {} + +func (x *Feedback) ProtoReflect() protoreflect.Message { + mi := &file_protocol_proto_msgTypes[2] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) } -func (m *Fragment) XXX_DiscardUnknown() { - xxx_messageInfo_Fragment.DiscardUnknown(m) + +// Deprecated: Use Feedback.ProtoReflect.Descriptor instead. +func (*Feedback) Descriptor() ([]byte, []int) { + return file_protocol_proto_rawDescGZIP(), []int{2} } -var xxx_messageInfo_Fragment proto.InternalMessageInfo +func (x *Feedback) GetNamespace() string { + if x != nil { + return x.Namespace + } + return "" +} + +func (x *Feedback) GetFeedbackType() string { + if x != nil { + return x.FeedbackType + } + return "" +} + +func (x *Feedback) GetUserId() string { + if x != nil { + return x.UserId + } + return "" +} -func (m *Fragment) GetData() []byte { - if m != nil { - return m.Data +func (x *Feedback) GetItemId() string { + if x != nil { + return x.ItemId + } + return "" +} + +func (x *Feedback) GetTimestamp() *timestamppb.Timestamp { + if x != nil { + return x.Timestamp } return nil } -type VersionInfo struct { - Version int64 `protobuf:"varint,1,opt,name=version,proto3" json:"version,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` +func (x *Feedback) GetComment() string { + if x != nil { + return x.Comment + } + return "" } -func (m *VersionInfo) Reset() { *m = VersionInfo{} } -func (m *VersionInfo) String() string { return proto.CompactTextString(m) } -func (*VersionInfo) ProtoMessage() {} -func (*VersionInfo) Descriptor() ([]byte, []int) { - return fileDescriptor_2bc2336598a3f7e0, []int{2} +type Meta struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Config string `protobuf:"bytes,1,opt,name=config,proto3" json:"config,omitempty"` + RankingModelVersion int64 `protobuf:"varint,3,opt,name=ranking_model_version,json=rankingModelVersion,proto3" json:"ranking_model_version,omitempty"` + ClickModelVersion int64 `protobuf:"varint,4,opt,name=click_model_version,json=clickModelVersion,proto3" json:"click_model_version,omitempty"` + Me string `protobuf:"bytes,5,opt,name=me,proto3" json:"me,omitempty"` + Servers []string `protobuf:"bytes,6,rep,name=servers,proto3" json:"servers,omitempty"` + Workers []string `protobuf:"bytes,7,rep,name=workers,proto3" json:"workers,omitempty"` } -func (m *VersionInfo) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_VersionInfo.Unmarshal(m, b) +func (x *Meta) Reset() { + *x = Meta{} + mi := &file_protocol_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } -func (m *VersionInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_VersionInfo.Marshal(b, m, deterministic) + +func (x *Meta) String() string { + return protoimpl.X.MessageStringOf(x) } -func (m *VersionInfo) XXX_Merge(src proto.Message) { - xxx_messageInfo_VersionInfo.Merge(m, src) + +func (*Meta) ProtoMessage() {} + +func (x *Meta) ProtoReflect() protoreflect.Message { + mi := &file_protocol_proto_msgTypes[3] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) } -func (m *VersionInfo) XXX_Size() int { - return xxx_messageInfo_VersionInfo.Size(m) + +// Deprecated: Use Meta.ProtoReflect.Descriptor instead. +func (*Meta) Descriptor() ([]byte, []int) { + return file_protocol_proto_rawDescGZIP(), []int{3} } -func (m *VersionInfo) XXX_DiscardUnknown() { - xxx_messageInfo_VersionInfo.DiscardUnknown(m) + +func (x *Meta) GetConfig() string { + if x != nil { + return x.Config + } + return "" } -var xxx_messageInfo_VersionInfo proto.InternalMessageInfo +func (x *Meta) GetRankingModelVersion() int64 { + if x != nil { + return x.RankingModelVersion + } + return 0 +} -func (m *VersionInfo) GetVersion() int64 { - if m != nil { - return m.Version +func (x *Meta) GetClickModelVersion() int64 { + if x != nil { + return x.ClickModelVersion } return 0 } -type NodeInfo struct { - NodeType NodeType `protobuf:"varint,1,opt,name=node_type,json=nodeType,proto3,enum=protocol.NodeType" json:"node_type,omitempty"` - NodeName string `protobuf:"bytes,2,opt,name=node_name,json=nodeName,proto3" json:"node_name,omitempty"` - HttpPort int64 `protobuf:"varint,3,opt,name=http_port,json=httpPort,proto3" json:"http_port,omitempty"` - BinaryVersion string `protobuf:"bytes,4,opt,name=binary_version,json=binaryVersion,proto3" json:"binary_version,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *NodeInfo) Reset() { *m = NodeInfo{} } -func (m *NodeInfo) String() string { return proto.CompactTextString(m) } -func (*NodeInfo) ProtoMessage() {} -func (*NodeInfo) Descriptor() ([]byte, []int) { - return fileDescriptor_2bc2336598a3f7e0, []int{3} +func (x *Meta) GetMe() string { + if x != nil { + return x.Me + } + return "" } -func (m *NodeInfo) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_NodeInfo.Unmarshal(m, b) +func (x *Meta) GetServers() []string { + if x != nil { + return x.Servers + } + return nil } -func (m *NodeInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_NodeInfo.Marshal(b, m, deterministic) + +func (x *Meta) GetWorkers() []string { + if x != nil { + return x.Workers + } + return nil } -func (m *NodeInfo) XXX_Merge(src proto.Message) { - xxx_messageInfo_NodeInfo.Merge(m, src) + +type Fragment struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Data []byte `protobuf:"bytes,1,opt,name=data,proto3" json:"data,omitempty"` } -func (m *NodeInfo) XXX_Size() int { - return xxx_messageInfo_NodeInfo.Size(m) + +func (x *Fragment) Reset() { + *x = Fragment{} + mi := &file_protocol_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } -func (m *NodeInfo) XXX_DiscardUnknown() { - xxx_messageInfo_NodeInfo.DiscardUnknown(m) + +func (x *Fragment) String() string { + return protoimpl.X.MessageStringOf(x) } -var xxx_messageInfo_NodeInfo proto.InternalMessageInfo +func (*Fragment) ProtoMessage() {} -func (m *NodeInfo) GetNodeType() NodeType { - if m != nil { - return m.NodeType +func (x *Fragment) ProtoReflect() protoreflect.Message { + mi := &file_protocol_proto_msgTypes[4] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Fragment.ProtoReflect.Descriptor instead. +func (*Fragment) Descriptor() ([]byte, []int) { + return file_protocol_proto_rawDescGZIP(), []int{4} +} + +func (x *Fragment) GetData() []byte { + if x != nil { + return x.Data + } + return nil +} + +type VersionInfo struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Version int64 `protobuf:"varint,1,opt,name=version,proto3" json:"version,omitempty"` +} + +func (x *VersionInfo) Reset() { + *x = VersionInfo{} + mi := &file_protocol_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *VersionInfo) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*VersionInfo) ProtoMessage() {} + +func (x *VersionInfo) ProtoReflect() protoreflect.Message { + mi := &file_protocol_proto_msgTypes[5] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use VersionInfo.ProtoReflect.Descriptor instead. +func (*VersionInfo) Descriptor() ([]byte, []int) { + return file_protocol_proto_rawDescGZIP(), []int{5} +} + +func (x *VersionInfo) GetVersion() int64 { + if x != nil { + return x.Version + } + return 0 +} + +type NodeInfo struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + NodeType NodeType `protobuf:"varint,1,opt,name=node_type,json=nodeType,proto3,enum=protocol.NodeType" json:"node_type,omitempty"` + NodeName string `protobuf:"bytes,2,opt,name=node_name,json=nodeName,proto3" json:"node_name,omitempty"` + HttpPort int64 `protobuf:"varint,3,opt,name=http_port,json=httpPort,proto3" json:"http_port,omitempty"` + BinaryVersion string `protobuf:"bytes,4,opt,name=binary_version,json=binaryVersion,proto3" json:"binary_version,omitempty"` +} + +func (x *NodeInfo) Reset() { + *x = NodeInfo{} + mi := &file_protocol_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *NodeInfo) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*NodeInfo) ProtoMessage() {} + +func (x *NodeInfo) ProtoReflect() protoreflect.Message { + mi := &file_protocol_proto_msgTypes[6] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use NodeInfo.ProtoReflect.Descriptor instead. +func (*NodeInfo) Descriptor() ([]byte, []int) { + return file_protocol_proto_rawDescGZIP(), []int{6} +} + +func (x *NodeInfo) GetNodeType() NodeType { + if x != nil { + return x.NodeType } return NodeType_ServerNode } -func (m *NodeInfo) GetNodeName() string { - if m != nil { - return m.NodeName +func (x *NodeInfo) GetNodeName() string { + if x != nil { + return x.NodeName } return "" } -func (m *NodeInfo) GetHttpPort() int64 { - if m != nil { - return m.HttpPort +func (x *NodeInfo) GetHttpPort() int64 { + if x != nil { + return x.HttpPort } return 0 } -func (m *NodeInfo) GetBinaryVersion() string { - if m != nil { - return m.BinaryVersion +func (x *NodeInfo) GetBinaryVersion() string { + if x != nil { + return x.BinaryVersion } return "" } type Progress struct { - Tracer string `protobuf:"bytes,1,opt,name=tracer,proto3" json:"tracer,omitempty"` - Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` - Status string `protobuf:"bytes,3,opt,name=status,proto3" json:"status,omitempty"` - Error string `protobuf:"bytes,4,opt,name=error,proto3" json:"error,omitempty"` - Count int64 `protobuf:"varint,5,opt,name=count,proto3" json:"count,omitempty"` - Total int64 `protobuf:"varint,6,opt,name=total,proto3" json:"total,omitempty"` - StartTime int64 `protobuf:"varint,7,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` - FinishTime int64 `protobuf:"varint,8,opt,name=finish_time,json=finishTime,proto3" json:"finish_time,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *Progress) Reset() { *m = Progress{} } -func (m *Progress) String() string { return proto.CompactTextString(m) } -func (*Progress) ProtoMessage() {} -func (*Progress) Descriptor() ([]byte, []int) { - return fileDescriptor_2bc2336598a3f7e0, []int{4} -} + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields -func (m *Progress) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_Progress.Unmarshal(m, b) -} -func (m *Progress) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_Progress.Marshal(b, m, deterministic) + Tracer string `protobuf:"bytes,1,opt,name=tracer,proto3" json:"tracer,omitempty"` + Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + Status string `protobuf:"bytes,3,opt,name=status,proto3" json:"status,omitempty"` + Error string `protobuf:"bytes,4,opt,name=error,proto3" json:"error,omitempty"` + Count int64 `protobuf:"varint,5,opt,name=count,proto3" json:"count,omitempty"` + Total int64 `protobuf:"varint,6,opt,name=total,proto3" json:"total,omitempty"` + StartTime int64 `protobuf:"varint,7,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + FinishTime int64 `protobuf:"varint,8,opt,name=finish_time,json=finishTime,proto3" json:"finish_time,omitempty"` } -func (m *Progress) XXX_Merge(src proto.Message) { - xxx_messageInfo_Progress.Merge(m, src) + +func (x *Progress) Reset() { + *x = Progress{} + mi := &file_protocol_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } -func (m *Progress) XXX_Size() int { - return xxx_messageInfo_Progress.Size(m) + +func (x *Progress) String() string { + return protoimpl.X.MessageStringOf(x) } -func (m *Progress) XXX_DiscardUnknown() { - xxx_messageInfo_Progress.DiscardUnknown(m) + +func (*Progress) ProtoMessage() {} + +func (x *Progress) ProtoReflect() protoreflect.Message { + mi := &file_protocol_proto_msgTypes[7] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) } -var xxx_messageInfo_Progress proto.InternalMessageInfo +// Deprecated: Use Progress.ProtoReflect.Descriptor instead. +func (*Progress) Descriptor() ([]byte, []int) { + return file_protocol_proto_rawDescGZIP(), []int{7} +} -func (m *Progress) GetTracer() string { - if m != nil { - return m.Tracer +func (x *Progress) GetTracer() string { + if x != nil { + return x.Tracer } return "" } -func (m *Progress) GetName() string { - if m != nil { - return m.Name +func (x *Progress) GetName() string { + if x != nil { + return x.Name } return "" } -func (m *Progress) GetStatus() string { - if m != nil { - return m.Status +func (x *Progress) GetStatus() string { + if x != nil { + return x.Status } return "" } -func (m *Progress) GetError() string { - if m != nil { - return m.Error +func (x *Progress) GetError() string { + if x != nil { + return x.Error } return "" } -func (m *Progress) GetCount() int64 { - if m != nil { - return m.Count +func (x *Progress) GetCount() int64 { + if x != nil { + return x.Count } return 0 } -func (m *Progress) GetTotal() int64 { - if m != nil { - return m.Total +func (x *Progress) GetTotal() int64 { + if x != nil { + return x.Total } return 0 } -func (m *Progress) GetStartTime() int64 { - if m != nil { - return m.StartTime +func (x *Progress) GetStartTime() int64 { + if x != nil { + return x.StartTime } return 0 } -func (m *Progress) GetFinishTime() int64 { - if m != nil { - return m.FinishTime +func (x *Progress) GetFinishTime() int64 { + if x != nil { + return x.FinishTime } return 0 } type PushProgressRequest struct { - Progress []*Progress `protobuf:"bytes,1,rep,name=progress,proto3" json:"progress,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields -func (m *PushProgressRequest) Reset() { *m = PushProgressRequest{} } -func (m *PushProgressRequest) String() string { return proto.CompactTextString(m) } -func (*PushProgressRequest) ProtoMessage() {} -func (*PushProgressRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_2bc2336598a3f7e0, []int{5} + Progress []*Progress `protobuf:"bytes,1,rep,name=progress,proto3" json:"progress,omitempty"` } -func (m *PushProgressRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_PushProgressRequest.Unmarshal(m, b) +func (x *PushProgressRequest) Reset() { + *x = PushProgressRequest{} + mi := &file_protocol_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } -func (m *PushProgressRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_PushProgressRequest.Marshal(b, m, deterministic) -} -func (m *PushProgressRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_PushProgressRequest.Merge(m, src) -} -func (m *PushProgressRequest) XXX_Size() int { - return xxx_messageInfo_PushProgressRequest.Size(m) + +func (x *PushProgressRequest) String() string { + return protoimpl.X.MessageStringOf(x) } -func (m *PushProgressRequest) XXX_DiscardUnknown() { - xxx_messageInfo_PushProgressRequest.DiscardUnknown(m) + +func (*PushProgressRequest) ProtoMessage() {} + +func (x *PushProgressRequest) ProtoReflect() protoreflect.Message { + mi := &file_protocol_proto_msgTypes[8] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) } -var xxx_messageInfo_PushProgressRequest proto.InternalMessageInfo +// Deprecated: Use PushProgressRequest.ProtoReflect.Descriptor instead. +func (*PushProgressRequest) Descriptor() ([]byte, []int) { + return file_protocol_proto_rawDescGZIP(), []int{8} +} -func (m *PushProgressRequest) GetProgress() []*Progress { - if m != nil { - return m.Progress +func (x *PushProgressRequest) GetProgress() []*Progress { + if x != nil { + return x.Progress } return nil } type PushProgressResponse struct { - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields } -func (m *PushProgressResponse) Reset() { *m = PushProgressResponse{} } -func (m *PushProgressResponse) String() string { return proto.CompactTextString(m) } -func (*PushProgressResponse) ProtoMessage() {} +func (x *PushProgressResponse) Reset() { + *x = PushProgressResponse{} + mi := &file_protocol_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *PushProgressResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PushProgressResponse) ProtoMessage() {} + +func (x *PushProgressResponse) ProtoReflect() protoreflect.Message { + mi := &file_protocol_proto_msgTypes[9] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PushProgressResponse.ProtoReflect.Descriptor instead. func (*PushProgressResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_2bc2336598a3f7e0, []int{6} -} - -func (m *PushProgressResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_PushProgressResponse.Unmarshal(m, b) -} -func (m *PushProgressResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_PushProgressResponse.Marshal(b, m, deterministic) -} -func (m *PushProgressResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_PushProgressResponse.Merge(m, src) -} -func (m *PushProgressResponse) XXX_Size() int { - return xxx_messageInfo_PushProgressResponse.Size(m) -} -func (m *PushProgressResponse) XXX_DiscardUnknown() { - xxx_messageInfo_PushProgressResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_PushProgressResponse proto.InternalMessageInfo - -func init() { - proto.RegisterEnum("protocol.NodeType", NodeType_name, NodeType_value) - proto.RegisterType((*Meta)(nil), "protocol.Meta") - proto.RegisterType((*Fragment)(nil), "protocol.Fragment") - proto.RegisterType((*VersionInfo)(nil), "protocol.VersionInfo") - proto.RegisterType((*NodeInfo)(nil), "protocol.NodeInfo") - proto.RegisterType((*Progress)(nil), "protocol.Progress") - proto.RegisterType((*PushProgressRequest)(nil), "protocol.PushProgressRequest") - proto.RegisterType((*PushProgressResponse)(nil), "protocol.PushProgressResponse") -} - -func init() { - proto.RegisterFile("protocol.proto", fileDescriptor_2bc2336598a3f7e0) -} - -var fileDescriptor_2bc2336598a3f7e0 = []byte{ - // 592 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x53, 0xcd, 0x6e, 0xd3, 0x4e, - 0x10, 0x8f, 0x93, 0x36, 0x71, 0xa6, 0x6d, 0xfe, 0x7f, 0xb6, 0x1f, 0xb2, 0x8a, 0x5a, 0x22, 0xa3, - 0x8a, 0x88, 0x43, 0x82, 0xca, 0x8d, 0x03, 0x42, 0x54, 0x50, 0x71, 0x68, 0xa9, 0x4c, 0x05, 0x12, - 0x97, 0x68, 0xeb, 0x4c, 0x6d, 0xab, 0xf1, 0xae, 0xd9, 0x9d, 0x80, 0xda, 0x67, 0xe0, 0x11, 0x78, - 0x1e, 0xce, 0x3c, 0x12, 0xda, 0xb1, 0x9d, 0xba, 0x45, 0x1c, 0xb8, 0xed, 0xef, 0x63, 0xf6, 0xe3, - 0x37, 0xb3, 0x30, 0x28, 0x8c, 0x26, 0x1d, 0xeb, 0xf9, 0x98, 0x17, 0xc2, 0xaf, 0x71, 0xf8, 0xd3, - 0x83, 0x95, 0x13, 0x24, 0x29, 0x76, 0xa0, 0x1b, 0x6b, 0x75, 0x99, 0x25, 0x81, 0x37, 0xf4, 0x46, - 0xfd, 0xa8, 0x42, 0xe2, 0x10, 0xb6, 0x8d, 0x54, 0x57, 0x99, 0x4a, 0xa6, 0xb9, 0x9e, 0xe1, 0x7c, - 0xfa, 0x15, 0x8d, 0xcd, 0xb4, 0x0a, 0x3a, 0x43, 0x6f, 0xd4, 0x89, 0x36, 0x2b, 0xf1, 0xc4, 0x69, - 0x1f, 0x4b, 0x49, 0x8c, 0x61, 0x33, 0x9e, 0x67, 0xf1, 0xd5, 0xbd, 0x8a, 0x15, 0xae, 0x78, 0xc0, - 0xd2, 0x1d, 0xff, 0x00, 0xda, 0x39, 0x06, 0xab, 0x7c, 0x6e, 0x3b, 0x47, 0x11, 0x40, 0xcf, 0xa2, - 0x71, 0x65, 0x41, 0x77, 0xd8, 0x19, 0xf5, 0xa3, 0x1a, 0x3a, 0xe5, 0x9b, 0x36, 0x57, 0x4e, 0xe9, - 0x95, 0x4a, 0x05, 0xc3, 0x7d, 0xf0, 0xdf, 0x1a, 0x99, 0xe4, 0xa8, 0x48, 0x08, 0x58, 0x99, 0x49, - 0x92, 0xfc, 0x92, 0xf5, 0x88, 0xd7, 0xe1, 0x13, 0x58, 0xab, 0x8e, 0x7b, 0xa7, 0x2e, 0xb5, 0xdb, - 0xa8, 0xbe, 0x96, 0xc7, 0xd7, 0xaa, 0x61, 0xf8, 0xc3, 0x03, 0xff, 0x54, 0xcf, 0x90, 0x6d, 0x13, - 0xe8, 0x2b, 0x3d, 0xc3, 0x29, 0x5d, 0x17, 0xc8, 0xc6, 0xc1, 0xa1, 0x18, 0x2f, 0xc3, 0x74, 0xb6, - 0xf3, 0xeb, 0x02, 0x23, 0x5f, 0x55, 0x2b, 0xf1, 0xb0, 0x2a, 0x50, 0x32, 0xc7, 0xa0, 0xcd, 0x2f, - 0x62, 0xf1, 0x54, 0xe6, 0x2c, 0xa6, 0x44, 0xc5, 0xb4, 0xd0, 0x86, 0xaa, 0xfc, 0x7c, 0x47, 0x9c, - 0x69, 0x43, 0xe2, 0x00, 0x06, 0x17, 0x99, 0x92, 0xe6, 0xfa, 0x4e, 0x5e, 0xfd, 0x68, 0xa3, 0x64, - 0xab, 0xcb, 0x87, 0xbf, 0x3c, 0xf0, 0xcf, 0x8c, 0x4e, 0x0c, 0x5a, 0xeb, 0x9a, 0x46, 0x46, 0xc6, - 0x68, 0xea, 0xa6, 0x95, 0xc8, 0x05, 0xd0, 0xb8, 0x00, 0xaf, 0x9d, 0xd7, 0x92, 0xa4, 0x85, 0xe5, - 0x93, 0xfb, 0x51, 0x85, 0xc4, 0x16, 0xac, 0xa2, 0x31, 0xda, 0x54, 0xc7, 0x95, 0xc0, 0xb1, 0xb1, - 0x5e, 0x28, 0xe2, 0xae, 0x74, 0xa2, 0x12, 0x38, 0x96, 0x34, 0xc9, 0x79, 0xd0, 0x2d, 0x59, 0x06, - 0x62, 0x0f, 0xc0, 0x92, 0x34, 0x34, 0xa5, 0x2c, 0xc7, 0xa0, 0xc7, 0x52, 0x9f, 0x99, 0xf3, 0x2c, - 0x47, 0xf1, 0x08, 0xd6, 0x2e, 0x33, 0x95, 0xd9, 0xb4, 0xd4, 0x7d, 0xd6, 0xa1, 0xa4, 0x9c, 0x21, - 0x7c, 0x03, 0x9b, 0x67, 0x0b, 0x9b, 0xd6, 0xaf, 0x8a, 0xf0, 0xcb, 0x02, 0x2d, 0x89, 0x31, 0xb8, - 0x31, 0x65, 0x2a, 0xf0, 0x86, 0x9d, 0xd1, 0x5a, 0x33, 0xfa, 0xa5, 0x79, 0xe9, 0x09, 0x77, 0x60, - 0xeb, 0xee, 0x36, 0xb6, 0xd0, 0xca, 0xe2, 0xd3, 0x17, 0x65, 0x3f, 0xb9, 0x3d, 0x03, 0x80, 0x0f, - 0x3c, 0x4a, 0x8e, 0xf9, 0xbf, 0xe5, 0xf0, 0x27, 0x1e, 0x20, 0xc6, 0x9e, 0xc3, 0x47, 0xf3, 0x0c, - 0x15, 0x31, 0x6e, 0x1f, 0x7e, 0x6f, 0x43, 0xf7, 0x44, 0x5a, 0x42, 0x23, 0x26, 0xd0, 0x3b, 0x46, - 0xe2, 0xbf, 0x72, 0x6f, 0x04, 0xdc, 0xa4, 0xec, 0x0e, 0x6e, 0x39, 0xe7, 0x09, 0x5b, 0xe2, 0x15, - 0xfc, 0x77, 0x8c, 0x14, 0x35, 0xfe, 0x87, 0xd8, 0xbe, 0x35, 0x35, 0x86, 0x71, 0xb7, 0xb1, 0x5f, - 0x3d, 0xc3, 0x61, 0xeb, 0x99, 0x27, 0x5e, 0xc2, 0xc6, 0x31, 0xd2, 0xd1, 0xf2, 0xbf, 0xfc, 0x6b, - 0xfd, 0x7b, 0x58, 0x6f, 0x26, 0x22, 0xf6, 0x1a, 0xf9, 0xfd, 0x19, 0xf8, 0xee, 0xfe, 0xdf, 0xe4, - 0x32, 0xc8, 0xb0, 0xf5, 0xfa, 0xe0, 0xf3, 0xe3, 0x24, 0xa3, 0x74, 0x71, 0x31, 0x8e, 0x75, 0x3e, - 0xb9, 0x49, 0x51, 0x25, 0xa9, 0xd4, 0x37, 0x93, 0x44, 0x1b, 0x8b, 0x93, 0xba, 0xfa, 0xa2, 0xcb, - 0xab, 0xe7, 0xbf, 0x03, 0x00, 0x00, 0xff, 0xff, 0x0e, 0x9c, 0xdb, 0xcd, 0x77, 0x04, 0x00, 0x00, + return file_protocol_proto_rawDescGZIP(), []int{9} +} + +var File_protocol_proto protoreflect.FileDescriptor + +var file_protocol_proto_rawDesc = []byte{ + 0x0a, 0x0e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x12, 0x08, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, + 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x69, 0x6d, 0x65, + 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x51, 0x0a, 0x04, 0x55, + 0x73, 0x65, 0x72, 0x12, 0x17, 0x0a, 0x07, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x75, 0x73, 0x65, 0x72, 0x49, 0x64, 0x12, 0x16, 0x0a, 0x06, + 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x06, 0x6c, 0x61, + 0x62, 0x65, 0x6c, 0x73, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x22, 0xe6, + 0x01, 0x0a, 0x04, 0x49, 0x74, 0x65, 0x6d, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, + 0x70, 0x61, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, + 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x17, 0x0a, 0x07, 0x69, 0x74, 0x65, 0x6d, 0x5f, 0x69, 0x64, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x69, 0x74, 0x65, 0x6d, 0x49, 0x64, 0x12, 0x1b, + 0x0a, 0x09, 0x69, 0x73, 0x5f, 0x68, 0x69, 0x64, 0x64, 0x65, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, + 0x08, 0x52, 0x08, 0x69, 0x73, 0x48, 0x69, 0x64, 0x64, 0x65, 0x6e, 0x12, 0x1e, 0x0a, 0x0a, 0x63, + 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x69, 0x65, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x09, 0x52, + 0x0a, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x69, 0x65, 0x73, 0x12, 0x38, 0x0a, 0x09, 0x74, + 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, + 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, + 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x74, 0x69, 0x6d, 0x65, + 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, 0x16, 0x0a, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x18, + 0x06, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x12, 0x18, 0x0a, + 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, + 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x22, 0xd3, 0x01, 0x0a, 0x08, 0x46, 0x65, 0x65, 0x64, + 0x62, 0x61, 0x63, 0x6b, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, + 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, + 0x63, 0x65, 0x12, 0x23, 0x0a, 0x0d, 0x66, 0x65, 0x65, 0x64, 0x62, 0x61, 0x63, 0x6b, 0x5f, 0x74, + 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x66, 0x65, 0x65, 0x64, 0x62, + 0x61, 0x63, 0x6b, 0x54, 0x79, 0x70, 0x65, 0x12, 0x17, 0x0a, 0x07, 0x75, 0x73, 0x65, 0x72, 0x5f, + 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x75, 0x73, 0x65, 0x72, 0x49, 0x64, + 0x12, 0x17, 0x0a, 0x07, 0x69, 0x74, 0x65, 0x6d, 0x5f, 0x69, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x06, 0x69, 0x74, 0x65, 0x6d, 0x49, 0x64, 0x12, 0x38, 0x0a, 0x09, 0x74, 0x69, 0x6d, + 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, + 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, + 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, + 0x61, 0x6d, 0x70, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x18, 0x06, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x22, 0xc6, 0x01, + 0x0a, 0x04, 0x4d, 0x65, 0x74, 0x61, 0x12, 0x16, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x32, + 0x0a, 0x15, 0x72, 0x61, 0x6e, 0x6b, 0x69, 0x6e, 0x67, 0x5f, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x5f, + 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x13, 0x72, + 0x61, 0x6e, 0x6b, 0x69, 0x6e, 0x67, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x56, 0x65, 0x72, 0x73, 0x69, + 0x6f, 0x6e, 0x12, 0x2e, 0x0a, 0x13, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x5f, 0x6d, 0x6f, 0x64, 0x65, + 0x6c, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x03, 0x52, + 0x11, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x56, 0x65, 0x72, 0x73, 0x69, + 0x6f, 0x6e, 0x12, 0x0e, 0x0a, 0x02, 0x6d, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, + 0x6d, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x73, 0x18, 0x06, 0x20, + 0x03, 0x28, 0x09, 0x52, 0x07, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x73, 0x12, 0x18, 0x0a, 0x07, + 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x09, 0x52, 0x07, 0x77, + 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x73, 0x22, 0x1e, 0x0a, 0x08, 0x46, 0x72, 0x61, 0x67, 0x6d, 0x65, + 0x6e, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, + 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x22, 0x27, 0x0a, 0x0b, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, + 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x22, + 0x9c, 0x01, 0x0a, 0x08, 0x4e, 0x6f, 0x64, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x2f, 0x0a, 0x09, + 0x6e, 0x6f, 0x64, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, + 0x12, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x4e, 0x6f, 0x64, 0x65, 0x54, + 0x79, 0x70, 0x65, 0x52, 0x08, 0x6e, 0x6f, 0x64, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1b, 0x0a, + 0x09, 0x6e, 0x6f, 0x64, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x08, 0x6e, 0x6f, 0x64, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x68, 0x74, + 0x74, 0x70, 0x5f, 0x70, 0x6f, 0x72, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x08, 0x68, + 0x74, 0x74, 0x70, 0x50, 0x6f, 0x72, 0x74, 0x12, 0x25, 0x0a, 0x0e, 0x62, 0x69, 0x6e, 0x61, 0x72, + 0x79, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x0d, 0x62, 0x69, 0x6e, 0x61, 0x72, 0x79, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0xd0, + 0x01, 0x0a, 0x08, 0x50, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x12, 0x16, 0x0a, 0x06, 0x74, + 0x72, 0x61, 0x63, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x74, 0x72, 0x61, + 0x63, 0x65, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, + 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, + 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, + 0x65, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x14, 0x0a, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x05, + 0x20, 0x01, 0x28, 0x03, 0x52, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x74, + 0x6f, 0x74, 0x61, 0x6c, 0x18, 0x06, 0x20, 0x01, 0x28, 0x03, 0x52, 0x05, 0x74, 0x6f, 0x74, 0x61, + 0x6c, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, + 0x07, 0x20, 0x01, 0x28, 0x03, 0x52, 0x09, 0x73, 0x74, 0x61, 0x72, 0x74, 0x54, 0x69, 0x6d, 0x65, + 0x12, 0x1f, 0x0a, 0x0b, 0x66, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, + 0x08, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0a, 0x66, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x54, 0x69, 0x6d, + 0x65, 0x22, 0x45, 0x0a, 0x13, 0x50, 0x75, 0x73, 0x68, 0x50, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, + 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2e, 0x0a, 0x08, 0x70, 0x72, 0x6f, 0x67, + 0x72, 0x65, 0x73, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x50, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x52, 0x08, + 0x70, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x22, 0x16, 0x0a, 0x14, 0x50, 0x75, 0x73, 0x68, + 0x50, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x2a, 0x3a, 0x0a, 0x08, 0x4e, 0x6f, 0x64, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x0e, 0x0a, 0x0a, + 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x4e, 0x6f, 0x64, 0x65, 0x10, 0x00, 0x12, 0x0e, 0x0a, 0x0a, + 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4e, 0x6f, 0x64, 0x65, 0x10, 0x01, 0x12, 0x0e, 0x0a, 0x0a, + 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x10, 0x02, 0x32, 0x8c, 0x02, 0x0a, + 0x06, 0x4d, 0x61, 0x73, 0x74, 0x65, 0x72, 0x12, 0x2f, 0x0a, 0x07, 0x47, 0x65, 0x74, 0x4d, 0x65, + 0x74, 0x61, 0x12, 0x12, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x4e, 0x6f, + 0x64, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x1a, 0x0e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, + 0x6c, 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x22, 0x00, 0x12, 0x40, 0x0a, 0x0f, 0x47, 0x65, 0x74, 0x52, + 0x61, 0x6e, 0x6b, 0x69, 0x6e, 0x67, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x12, 0x15, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x6e, + 0x66, 0x6f, 0x1a, 0x12, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x46, 0x72, + 0x61, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x22, 0x00, 0x30, 0x01, 0x12, 0x3e, 0x0a, 0x0d, 0x47, 0x65, + 0x74, 0x43, 0x6c, 0x69, 0x63, 0x6b, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x12, 0x15, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x6e, + 0x66, 0x6f, 0x1a, 0x12, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x46, 0x72, + 0x61, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x22, 0x00, 0x30, 0x01, 0x12, 0x4f, 0x0a, 0x0c, 0x50, 0x75, + 0x73, 0x68, 0x50, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x12, 0x1d, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x50, 0x75, 0x73, 0x68, 0x50, 0x72, 0x6f, 0x67, 0x72, 0x65, + 0x73, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x50, 0x75, 0x73, 0x68, 0x50, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, + 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0x25, 0x5a, 0x23, 0x67, + 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x7a, 0x68, 0x65, 0x6e, 0x67, 0x68, + 0x61, 0x6f, 0x7a, 0x2f, 0x67, 0x6f, 0x72, 0x73, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, + 0x6f, 0x6c, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_protocol_proto_rawDescOnce sync.Once + file_protocol_proto_rawDescData = file_protocol_proto_rawDesc +) + +func file_protocol_proto_rawDescGZIP() []byte { + file_protocol_proto_rawDescOnce.Do(func() { + file_protocol_proto_rawDescData = protoimpl.X.CompressGZIP(file_protocol_proto_rawDescData) + }) + return file_protocol_proto_rawDescData +} + +var file_protocol_proto_enumTypes = make([]protoimpl.EnumInfo, 1) +var file_protocol_proto_msgTypes = make([]protoimpl.MessageInfo, 10) +var file_protocol_proto_goTypes = []any{ + (NodeType)(0), // 0: protocol.NodeType + (*User)(nil), // 1: protocol.User + (*Item)(nil), // 2: protocol.Item + (*Feedback)(nil), // 3: protocol.Feedback + (*Meta)(nil), // 4: protocol.Meta + (*Fragment)(nil), // 5: protocol.Fragment + (*VersionInfo)(nil), // 6: protocol.VersionInfo + (*NodeInfo)(nil), // 7: protocol.NodeInfo + (*Progress)(nil), // 8: protocol.Progress + (*PushProgressRequest)(nil), // 9: protocol.PushProgressRequest + (*PushProgressResponse)(nil), // 10: protocol.PushProgressResponse + (*timestamppb.Timestamp)(nil), // 11: google.protobuf.Timestamp +} +var file_protocol_proto_depIdxs = []int32{ + 11, // 0: protocol.Item.timestamp:type_name -> google.protobuf.Timestamp + 11, // 1: protocol.Feedback.timestamp:type_name -> google.protobuf.Timestamp + 0, // 2: protocol.NodeInfo.node_type:type_name -> protocol.NodeType + 8, // 3: protocol.PushProgressRequest.progress:type_name -> protocol.Progress + 7, // 4: protocol.Master.GetMeta:input_type -> protocol.NodeInfo + 6, // 5: protocol.Master.GetRankingModel:input_type -> protocol.VersionInfo + 6, // 6: protocol.Master.GetClickModel:input_type -> protocol.VersionInfo + 9, // 7: protocol.Master.PushProgress:input_type -> protocol.PushProgressRequest + 4, // 8: protocol.Master.GetMeta:output_type -> protocol.Meta + 5, // 9: protocol.Master.GetRankingModel:output_type -> protocol.Fragment + 5, // 10: protocol.Master.GetClickModel:output_type -> protocol.Fragment + 10, // 11: protocol.Master.PushProgress:output_type -> protocol.PushProgressResponse + 8, // [8:12] is the sub-list for method output_type + 4, // [4:8] is the sub-list for method input_type + 4, // [4:4] is the sub-list for extension type_name + 4, // [4:4] is the sub-list for extension extendee + 0, // [0:4] is the sub-list for field type_name +} + +func init() { file_protocol_proto_init() } +func file_protocol_proto_init() { + if File_protocol_proto != nil { + return + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_protocol_proto_rawDesc, + NumEnums: 1, + NumMessages: 10, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_protocol_proto_goTypes, + DependencyIndexes: file_protocol_proto_depIdxs, + EnumInfos: file_protocol_proto_enumTypes, + MessageInfos: file_protocol_proto_msgTypes, + }.Build() + File_protocol_proto = out.File + file_protocol_proto_rawDesc = nil + file_protocol_proto_goTypes = nil + file_protocol_proto_depIdxs = nil } diff --git a/protocol/protocol.proto b/protocol/protocol.proto index 54c1c43ec..cd1779e56 100644 --- a/protocol/protocol.proto +++ b/protocol/protocol.proto @@ -17,6 +17,33 @@ option go_package = "github.com/zhenghaoz/gorse/protocol"; package protocol; +import "google/protobuf/timestamp.proto"; + +message User { + string user_id = 1; + bytes labels = 2; + string comment = 3; +} + +message Item { + string namespace = 1; + string item_id = 2; + bool is_hidden = 3; + repeated string categories = 4; + google.protobuf.Timestamp timestamp = 5; + bytes labels = 6; + string comment = 7; +} + +message Feedback { + string namespace = 1; + string feedback_type = 2; + string user_id = 3; + string item_id = 4; + google.protobuf.Timestamp timestamp = 5; + string comment = 6; +} + enum NodeType { ServerNode = 0; WorkerNode = 1; diff --git a/protocol/protocol_grpc.pb.go b/protocol/protocol_grpc.pb.go index c76123352..7a720c69d 100644 --- a/protocol/protocol_grpc.pb.go +++ b/protocol/protocol_grpc.pb.go @@ -14,8 +14,8 @@ // Code generated by protoc-gen-go-grpc. DO NOT EDIT. // versions: -// - protoc-gen-go-grpc v1.3.0 -// - protoc v3.12.4 +// - protoc-gen-go-grpc v1.5.1 +// - protoc v5.28.3 // source: protocol.proto package protocol @@ -29,8 +29,8 @@ import ( // This is a compile-time assertion to ensure that this generated file // is compatible with the grpc package it is being compiled against. -// Requires gRPC-Go v1.32.0 or later. -const _ = grpc.SupportPackageIsVersion7 +// Requires gRPC-Go v1.64.0 or later. +const _ = grpc.SupportPackageIsVersion9 const ( Master_GetMeta_FullMethodName = "/protocol.Master/GetMeta" @@ -46,8 +46,8 @@ type MasterClient interface { // meta distribute GetMeta(ctx context.Context, in *NodeInfo, opts ...grpc.CallOption) (*Meta, error) // data distribute - GetRankingModel(ctx context.Context, in *VersionInfo, opts ...grpc.CallOption) (Master_GetRankingModelClient, error) - GetClickModel(ctx context.Context, in *VersionInfo, opts ...grpc.CallOption) (Master_GetClickModelClient, error) + GetRankingModel(ctx context.Context, in *VersionInfo, opts ...grpc.CallOption) (grpc.ServerStreamingClient[Fragment], error) + GetClickModel(ctx context.Context, in *VersionInfo, opts ...grpc.CallOption) (grpc.ServerStreamingClient[Fragment], error) PushProgress(ctx context.Context, in *PushProgressRequest, opts ...grpc.CallOption) (*PushProgressResponse, error) } @@ -60,20 +60,22 @@ func NewMasterClient(cc grpc.ClientConnInterface) MasterClient { } func (c *masterClient) GetMeta(ctx context.Context, in *NodeInfo, opts ...grpc.CallOption) (*Meta, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) out := new(Meta) - err := c.cc.Invoke(ctx, Master_GetMeta_FullMethodName, in, out, opts...) + err := c.cc.Invoke(ctx, Master_GetMeta_FullMethodName, in, out, cOpts...) if err != nil { return nil, err } return out, nil } -func (c *masterClient) GetRankingModel(ctx context.Context, in *VersionInfo, opts ...grpc.CallOption) (Master_GetRankingModelClient, error) { - stream, err := c.cc.NewStream(ctx, &Master_ServiceDesc.Streams[0], Master_GetRankingModel_FullMethodName, opts...) +func (c *masterClient) GetRankingModel(ctx context.Context, in *VersionInfo, opts ...grpc.CallOption) (grpc.ServerStreamingClient[Fragment], error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + stream, err := c.cc.NewStream(ctx, &Master_ServiceDesc.Streams[0], Master_GetRankingModel_FullMethodName, cOpts...) if err != nil { return nil, err } - x := &masterGetRankingModelClient{stream} + x := &grpc.GenericClientStream[VersionInfo, Fragment]{ClientStream: stream} if err := x.ClientStream.SendMsg(in); err != nil { return nil, err } @@ -83,29 +85,16 @@ func (c *masterClient) GetRankingModel(ctx context.Context, in *VersionInfo, opt return x, nil } -type Master_GetRankingModelClient interface { - Recv() (*Fragment, error) - grpc.ClientStream -} - -type masterGetRankingModelClient struct { - grpc.ClientStream -} - -func (x *masterGetRankingModelClient) Recv() (*Fragment, error) { - m := new(Fragment) - if err := x.ClientStream.RecvMsg(m); err != nil { - return nil, err - } - return m, nil -} +// This type alias is provided for backwards compatibility with existing code that references the prior non-generic stream type by name. +type Master_GetRankingModelClient = grpc.ServerStreamingClient[Fragment] -func (c *masterClient) GetClickModel(ctx context.Context, in *VersionInfo, opts ...grpc.CallOption) (Master_GetClickModelClient, error) { - stream, err := c.cc.NewStream(ctx, &Master_ServiceDesc.Streams[1], Master_GetClickModel_FullMethodName, opts...) +func (c *masterClient) GetClickModel(ctx context.Context, in *VersionInfo, opts ...grpc.CallOption) (grpc.ServerStreamingClient[Fragment], error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + stream, err := c.cc.NewStream(ctx, &Master_ServiceDesc.Streams[1], Master_GetClickModel_FullMethodName, cOpts...) if err != nil { return nil, err } - x := &masterGetClickModelClient{stream} + x := &grpc.GenericClientStream[VersionInfo, Fragment]{ClientStream: stream} if err := x.ClientStream.SendMsg(in); err != nil { return nil, err } @@ -115,26 +104,13 @@ func (c *masterClient) GetClickModel(ctx context.Context, in *VersionInfo, opts return x, nil } -type Master_GetClickModelClient interface { - Recv() (*Fragment, error) - grpc.ClientStream -} - -type masterGetClickModelClient struct { - grpc.ClientStream -} - -func (x *masterGetClickModelClient) Recv() (*Fragment, error) { - m := new(Fragment) - if err := x.ClientStream.RecvMsg(m); err != nil { - return nil, err - } - return m, nil -} +// This type alias is provided for backwards compatibility with existing code that references the prior non-generic stream type by name. +type Master_GetClickModelClient = grpc.ServerStreamingClient[Fragment] func (c *masterClient) PushProgress(ctx context.Context, in *PushProgressRequest, opts ...grpc.CallOption) (*PushProgressResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) out := new(PushProgressResponse) - err := c.cc.Invoke(ctx, Master_PushProgress_FullMethodName, in, out, opts...) + err := c.cc.Invoke(ctx, Master_PushProgress_FullMethodName, in, out, cOpts...) if err != nil { return nil, err } @@ -143,34 +119,38 @@ func (c *masterClient) PushProgress(ctx context.Context, in *PushProgressRequest // MasterServer is the server API for Master service. // All implementations must embed UnimplementedMasterServer -// for forward compatibility +// for forward compatibility. type MasterServer interface { // meta distribute GetMeta(context.Context, *NodeInfo) (*Meta, error) // data distribute - GetRankingModel(*VersionInfo, Master_GetRankingModelServer) error - GetClickModel(*VersionInfo, Master_GetClickModelServer) error + GetRankingModel(*VersionInfo, grpc.ServerStreamingServer[Fragment]) error + GetClickModel(*VersionInfo, grpc.ServerStreamingServer[Fragment]) error PushProgress(context.Context, *PushProgressRequest) (*PushProgressResponse, error) mustEmbedUnimplementedMasterServer() } -// UnimplementedMasterServer must be embedded to have forward compatible implementations. -type UnimplementedMasterServer struct { -} +// UnimplementedMasterServer must be embedded to have +// forward compatible implementations. +// +// NOTE: this should be embedded by value instead of pointer to avoid a nil +// pointer dereference when methods are called. +type UnimplementedMasterServer struct{} func (UnimplementedMasterServer) GetMeta(context.Context, *NodeInfo) (*Meta, error) { return nil, status.Errorf(codes.Unimplemented, "method GetMeta not implemented") } -func (UnimplementedMasterServer) GetRankingModel(*VersionInfo, Master_GetRankingModelServer) error { +func (UnimplementedMasterServer) GetRankingModel(*VersionInfo, grpc.ServerStreamingServer[Fragment]) error { return status.Errorf(codes.Unimplemented, "method GetRankingModel not implemented") } -func (UnimplementedMasterServer) GetClickModel(*VersionInfo, Master_GetClickModelServer) error { +func (UnimplementedMasterServer) GetClickModel(*VersionInfo, grpc.ServerStreamingServer[Fragment]) error { return status.Errorf(codes.Unimplemented, "method GetClickModel not implemented") } func (UnimplementedMasterServer) PushProgress(context.Context, *PushProgressRequest) (*PushProgressResponse, error) { return nil, status.Errorf(codes.Unimplemented, "method PushProgress not implemented") } func (UnimplementedMasterServer) mustEmbedUnimplementedMasterServer() {} +func (UnimplementedMasterServer) testEmbeddedByValue() {} // UnsafeMasterServer may be embedded to opt out of forward compatibility for this service. // Use of this interface is not recommended, as added methods to MasterServer will @@ -180,6 +160,13 @@ type UnsafeMasterServer interface { } func RegisterMasterServer(s grpc.ServiceRegistrar, srv MasterServer) { + // If the following call pancis, it indicates UnimplementedMasterServer was + // embedded by pointer and is nil. This will cause panics if an + // unimplemented method is ever invoked, so we test this at initialization + // time to prevent it from happening at runtime later due to I/O. + if t, ok := srv.(interface{ testEmbeddedByValue() }); ok { + t.testEmbeddedByValue() + } s.RegisterService(&Master_ServiceDesc, srv) } @@ -206,42 +193,22 @@ func _Master_GetRankingModel_Handler(srv interface{}, stream grpc.ServerStream) if err := stream.RecvMsg(m); err != nil { return err } - return srv.(MasterServer).GetRankingModel(m, &masterGetRankingModelServer{stream}) + return srv.(MasterServer).GetRankingModel(m, &grpc.GenericServerStream[VersionInfo, Fragment]{ServerStream: stream}) } -type Master_GetRankingModelServer interface { - Send(*Fragment) error - grpc.ServerStream -} - -type masterGetRankingModelServer struct { - grpc.ServerStream -} - -func (x *masterGetRankingModelServer) Send(m *Fragment) error { - return x.ServerStream.SendMsg(m) -} +// This type alias is provided for backwards compatibility with existing code that references the prior non-generic stream type by name. +type Master_GetRankingModelServer = grpc.ServerStreamingServer[Fragment] func _Master_GetClickModel_Handler(srv interface{}, stream grpc.ServerStream) error { m := new(VersionInfo) if err := stream.RecvMsg(m); err != nil { return err } - return srv.(MasterServer).GetClickModel(m, &masterGetClickModelServer{stream}) -} - -type Master_GetClickModelServer interface { - Send(*Fragment) error - grpc.ServerStream + return srv.(MasterServer).GetClickModel(m, &grpc.GenericServerStream[VersionInfo, Fragment]{ServerStream: stream}) } -type masterGetClickModelServer struct { - grpc.ServerStream -} - -func (x *masterGetClickModelServer) Send(m *Fragment) error { - return x.ServerStream.SendMsg(m) -} +// This type alias is provided for backwards compatibility with existing code that references the prior non-generic stream type by name. +type Master_GetClickModelServer = grpc.ServerStreamingServer[Fragment] func _Master_PushProgress_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(PushProgressRequest) From f224de5080c88a6000c8c592fd6941d085085f66 Mon Sep 17 00:00:00 2001 From: zhenghaoz Date: Wed, 13 Nov 2024 00:00:40 +0800 Subject: [PATCH 08/14] replace import/export format with JSON (#885) --- go.mod | 6 +- go.sum | 4 +- master/rest.go | 524 +++++++++++++++++--------------------------- master/rest_test.go | 335 +++++++++++++--------------- 4 files changed, 358 insertions(+), 511 deletions(-) diff --git a/go.mod b/go.mod index ad9b3ee3d..c903d281f 100644 --- a/go.mod +++ b/go.mod @@ -1,8 +1,8 @@ module github.com/zhenghaoz/gorse -go 1.22 +go 1.23.2 -toolchain go1.23.1 +toolchain go1.23.3 require ( github.com/ReneKroon/ttlcache/v2 v2.11.0 @@ -23,7 +23,7 @@ require ( github.com/golang/protobuf v1.5.2 github.com/google/uuid v1.6.0 github.com/gorilla/securecookie v1.1.1 - github.com/gorse-io/dashboard v0.0.0-20230729051855-6c53a42d2bd4 + github.com/gorse-io/dashboard v0.0.0-20241112140226-19a1b322242c github.com/haxii/go-swagger-ui v0.0.0-20210203093335-a63a6bbde946 github.com/jaswdr/faker v1.16.0 github.com/json-iterator/go v1.1.12 diff --git a/go.sum b/go.sum index ab7e3b29f..45a4eb7cd 100644 --- a/go.sum +++ b/go.sum @@ -301,8 +301,8 @@ github.com/gorilla/securecookie v1.1.1 h1:miw7JPhV+b/lAHSXz4qd/nN9jRiAFV5FwjeKyC github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4= github.com/gorse-io/clickhouse v0.3.3-0.20220715124633-688011a495bb h1:z/oOWE+Vy0PLcwIulZmIug4FtmvE3dJ1YOGprLeHwwY= github.com/gorse-io/clickhouse v0.3.3-0.20220715124633-688011a495bb/go.mod h1:iILWzbul8U+gsf4kqbheF2QzBmdvVp63mloGGK8emDI= -github.com/gorse-io/dashboard v0.0.0-20230729051855-6c53a42d2bd4 h1:x0bLXsLkjEZdztd0Tw+Hx38vIjzabyj2Fk0EDitKcLk= -github.com/gorse-io/dashboard v0.0.0-20230729051855-6c53a42d2bd4/go.mod h1:bv2Yg9Pn4Dca4xPJbvibpF6LH6BjoxcjsEdIuojNano= +github.com/gorse-io/dashboard v0.0.0-20241112140226-19a1b322242c h1:OtOi5F+9Kou/ji0WwiJqVB82sB83279CpzfZcBdnJrU= +github.com/gorse-io/dashboard v0.0.0-20241112140226-19a1b322242c/go.mod h1:iWSDK04UCelym9Uy4YY/tDa6cMGTLpN49Najyhuv35A= github.com/gorse-io/gorgonia v0.0.0-20230817132253-6dd1dbf95849 h1:Hwywr6NxzYeZYn35KwOsw7j8ZiMT60TBzpbn1MbEido= github.com/gorse-io/gorgonia v0.0.0-20230817132253-6dd1dbf95849/go.mod h1:TtVGAt7ENNmgBnC0JA68CAjIDCEtcqaRHvnkAWJ/Fu0= github.com/gorse-io/sqlite v1.3.3-0.20220713123255-c322aec4e59e h1:uPQtYQzG1QcC3Qbv+tuEe8Q2l++V4KEcqYSSwB9qobg= diff --git a/master/rest.go b/master/rest.go index 72c641fad..e5ea54de1 100644 --- a/master/rest.go +++ b/master/rest.go @@ -15,7 +15,6 @@ package master import ( - "bufio" "context" "encoding/binary" "encoding/json" @@ -968,24 +967,13 @@ func (m *Master) importExportUsers(response http.ResponseWriter, request *http.R switch request.Method { case http.MethodGet: var err error - response.Header().Set("Content-Type", "text/csv") - response.Header().Set("Content-Disposition", "attachment;filename=users.csv") - // write header - if _, err = response.Write([]byte("user_id,labels\r\n")); err != nil { - server.InternalServerError(restful.NewResponse(response), err) - return - } - // write rows - userChan, errChan := m.DataClient.GetUserStream(ctx, batchSize) - for users := range userChan { + response.Header().Set("Content-Type", "application/jsonl") + response.Header().Set("Content-Disposition", "attachment;filename=users.jsonl") + encoder := json.NewEncoder(response) + userStream, errChan := m.DataClient.GetUserStream(ctx, batchSize) + for users := range userStream { for _, user := range users { - labels, err := json.Marshal(user.Labels) - if err != nil { - server.InternalServerError(restful.NewResponse(response), err) - return - } - if _, err = response.Write([]byte(fmt.Sprintf("%s,%s\r\n", - base.Escape(user.UserId), base.Escape(string(labels))))); err != nil { + if err = encoder.Encode(user); err != nil { server.InternalServerError(restful.NewResponse(response), err) return } @@ -996,89 +984,62 @@ func (m *Master) importExportUsers(response http.ResponseWriter, request *http.R return } case http.MethodPost: - hasHeader := formValue(request, "has-header", "true") == "true" - sep := formValue(request, "sep", ",") - // field separator must be a single character - if len(sep) != 1 { - server.BadRequest(restful.NewResponse(response), fmt.Errorf("field separator must be a single character")) - return - } - labelSep := formValue(request, "label-sep", "|") - fmtString := formValue(request, "format", "ul") + // open file file, _, err := request.FormFile("file") if err != nil { server.BadRequest(restful.NewResponse(response), err) return } defer file.Close() - m.importUsers(ctx, response, file, hasHeader, sep, labelSep, fmtString) - } -} - -func (m *Master) importUsers(ctx context.Context, response http.ResponseWriter, file io.Reader, hasHeader bool, sep, labelSep, fmtString string) { - - lineCount := 0 - timeStart := time.Now() - users := make([]data.User, 0) - err := base.ReadLines(bufio.NewScanner(file), sep, func(lineNumber int, splits []string) bool { - var err error - // skip header - if hasHeader { - hasHeader = false - return true - } - splits, err = format(fmtString, "ul", splits, lineNumber) - if err != nil { - server.BadRequest(restful.NewResponse(response), err) - return false - } - // 1. user id - if err = base.ValidateId(splits[0]); err != nil { - server.BadRequest(restful.NewResponse(response), - fmt.Errorf("invalid user id `%v` at line %d (%s)", splits[0], lineNumber, err.Error())) - return false - } - user := data.User{UserId: splits[0]} - // 2. labels - if splits[1] != "" { - var labels any - if err = json.Unmarshal([]byte(splits[1]), &labels); err != nil { + // parse and import users + decoder := json.NewDecoder(file) + lineCount := 0 + timeStart := time.Now() + users := make([]data.User, 0, batchSize) + for { + // parse line + var user data.User + if err = decoder.Decode(&user); err != nil { + if errors.Is(err, io.EOF) { + break + } + server.BadRequest(restful.NewResponse(response), err) + return + } + // validate user id + if err = base.ValidateId(user.UserId); err != nil { server.BadRequest(restful.NewResponse(response), - fmt.Errorf("invalid labels `%v` at line %d (%s)", splits[1], lineNumber, err.Error())) - return false + fmt.Errorf("invalid user id `%v` at line %d (%s)", user.UserId, lineCount, err.Error())) + return + } + users = append(users, user) + // batch insert + if len(users) == batchSize { + err = m.DataClient.BatchInsertUsers(ctx, users) + if err != nil { + server.InternalServerError(restful.NewResponse(response), err) + return + } + users = make([]data.User, 0, batchSize) } - user.Labels = labels + lineCount++ } - users = append(users, user) - // batch insert - if len(users) == batchSize { + if len(users) > 0 { err = m.DataClient.BatchInsertUsers(ctx, users) if err != nil { server.InternalServerError(restful.NewResponse(response), err) - return false + return } - users = nil - } - lineCount++ - return true - }) - if err != nil { - server.BadRequest(restful.NewResponse(response), err) - return - } - if len(users) > 0 { - err = m.DataClient.BatchInsertUsers(ctx, users) - if err != nil { - server.InternalServerError(restful.NewResponse(response), err) - return } + m.notifyDataImported() + timeUsed := time.Since(timeStart) + log.Logger().Info("complete import users", + zap.Duration("time_used", timeUsed), + zap.Int("num_users", lineCount)) + server.Ok(restful.NewResponse(response), server.Success{RowAffected: lineCount}) + default: + writeError(response, http.StatusMethodNotAllowed, "method not allowed") } - m.notifyDataImported() - timeUsed := time.Since(timeStart) - log.Logger().Info("complete import users", - zap.Duration("time_used", timeUsed), - zap.Int("num_users", lineCount)) - server.Ok(restful.NewResponse(response), server.Success{RowAffected: lineCount}) } func (m *Master) importExportItems(response http.ResponseWriter, request *http.Request) { @@ -1098,25 +1059,13 @@ func (m *Master) importExportItems(response http.ResponseWriter, request *http.R switch request.Method { case http.MethodGet: var err error - response.Header().Set("Content-Type", "text/csv") - response.Header().Set("Content-Disposition", "attachment;filename=items.csv") - // write header - if _, err = response.Write([]byte("item_id,is_hidden,categories,time_stamp,labels,description\r\n")); err != nil { - server.InternalServerError(restful.NewResponse(response), err) - return - } - // write rows - itemChan, errChan := m.DataClient.GetItemStream(ctx, batchSize, nil) - for items := range itemChan { + response.Header().Set("Content-Type", "application/jsonl") + response.Header().Set("Content-Disposition", "attachment;filename=items.jsonl") + encoder := json.NewEncoder(response) + itemStream, errChan := m.DataClient.GetItemStream(ctx, batchSize, nil) + for items := range itemStream { for _, item := range items { - labels, err := json.Marshal(item.Labels) - if err != nil { - server.InternalServerError(restful.NewResponse(response), err) - return - } - if _, err = response.Write([]byte(fmt.Sprintf("%s,%t,%s,%v,%s,%s\r\n", - base.Escape(item.ItemId), item.IsHidden, base.Escape(strings.Join(item.Categories, "|")), - item.Timestamp, base.Escape(string(labels)), base.Escape(item.Comment)))); err != nil { + if err = encoder.Encode(item); err != nil { server.InternalServerError(restful.NewResponse(response), err) return } @@ -1127,150 +1076,87 @@ func (m *Master) importExportItems(response http.ResponseWriter, request *http.R return } case http.MethodPost: - hasHeader := formValue(request, "has-header", "true") == "true" - sep := formValue(request, "sep", ",") - // field separator must be a single character - if len(sep) != 1 { - server.BadRequest(restful.NewResponse(response), fmt.Errorf("field separator must be a single character")) - return - } - labelSep := formValue(request, "label-sep", "|") - fmtString := formValue(request, "format", "ihctld") + // open file file, _, err := request.FormFile("file") if err != nil { server.BadRequest(restful.NewResponse(response), err) return } defer file.Close() - m.importItems(ctx, response, file, hasHeader, sep, labelSep, fmtString) - default: - writeError(response, http.StatusMethodNotAllowed, "method not allowed") - } -} - -func (m *Master) importItems(ctx context.Context, response http.ResponseWriter, file io.Reader, hasHeader bool, sep, labelSep, fmtString string) { - lineCount := 0 - timeStart := time.Now() - items := make([]data.Item, 0) - err := base.ReadLines(bufio.NewScanner(file), sep, func(lineNumber int, splits []string) bool { - var err error - // skip header - if hasHeader { - hasHeader = false - return true - } - splits, err = format(fmtString, "ihctld", splits, lineNumber) - if err != nil { - server.BadRequest(restful.NewResponse(response), err) - return false - } - // 1. item id - if err = base.ValidateId(splits[0]); err != nil { - server.BadRequest(restful.NewResponse(response), - fmt.Errorf("invalid item id `%v` at line %d (%s)", splits[0], lineNumber, err.Error())) - return false - } - item := data.Item{ItemId: splits[0]} - // 2. hidden - if splits[1] != "" { - item.IsHidden, err = strconv.ParseBool(splits[1]) - if err != nil { + // parse and import items + decoder := json.NewDecoder(file) + lineCount := 0 + timeStart := time.Now() + items := make([]data.Item, 0, batchSize) + for { + // parse line + var item server.Item + if err = decoder.Decode(&item); err != nil { + if errors.Is(err, io.EOF) { + break + } + server.BadRequest(restful.NewResponse(response), err) + return + } + // validate item id + if err = base.ValidateId(item.ItemId); err != nil { server.BadRequest(restful.NewResponse(response), - fmt.Errorf("invalid hidden value `%v` at line %d (%s)", splits[1], lineNumber, err.Error())) - return false + fmt.Errorf("invalid item id `%v` at line %d (%s)", item.ItemId, lineCount, err.Error())) + return } - } - // 3. categories - if splits[2] != "" { - item.Categories = strings.Split(splits[2], labelSep) + // validate categories for _, category := range item.Categories { if err = base.ValidateId(category); err != nil { server.BadRequest(restful.NewResponse(response), - fmt.Errorf("invalid category `%v` at line %d (%s)", category, lineNumber, err.Error())) - return false + fmt.Errorf("invalid category `%v` at line %d (%s)", category, lineCount, err.Error())) + return } } - } - // 4. timestamp - if splits[3] != "" { - item.Timestamp, err = dateparse.ParseAny(splits[3]) - if err != nil { - server.BadRequest(restful.NewResponse(response), - fmt.Errorf("failed to parse datetime `%v` at line %v", splits[1], lineNumber)) - return false + // parse timestamp + var timestamp time.Time + if item.Timestamp != "" { + timestamp, err = dateparse.ParseAny(item.Timestamp) + if err != nil { + server.BadRequest(restful.NewResponse(response), + fmt.Errorf("failed to parse datetime `%v` at line %v", item.Timestamp, lineCount)) + return + } } - } - // 5. labels - if splits[4] != "" { - var labels any - if err = json.Unmarshal([]byte(splits[4]), &labels); err != nil { - server.BadRequest(restful.NewResponse(response), - fmt.Errorf("failed to parse labels `%v` at line %v", splits[4], lineNumber)) - return false + items = append(items, data.Item{ + ItemId: item.ItemId, + IsHidden: item.IsHidden, + Categories: item.Categories, + Timestamp: timestamp, + Labels: item.Labels, + Comment: item.Comment, + }) + // batch insert + if len(items) == batchSize { + err = m.DataClient.BatchInsertItems(ctx, items) + if err != nil { + server.InternalServerError(restful.NewResponse(response), err) + return + } + items = make([]data.Item, 0, batchSize) } - item.Labels = labels + lineCount++ } - // 6. comment - item.Comment = splits[5] - items = append(items, item) - // batch insert - if len(items) == batchSize { + if len(items) > 0 { err = m.DataClient.BatchInsertItems(ctx, items) if err != nil { server.InternalServerError(restful.NewResponse(response), err) - return false + return } - items = nil - } - lineCount++ - return true - }) - if err != nil { - server.BadRequest(restful.NewResponse(response), err) - return - } - if len(items) > 0 { - err = m.DataClient.BatchInsertItems(ctx, items) - if err != nil { - server.InternalServerError(restful.NewResponse(response), err) - return } + m.notifyDataImported() + timeUsed := time.Since(timeStart) + log.Logger().Info("complete import items", + zap.Duration("time_used", timeUsed), + zap.Int("num_items", lineCount)) + server.Ok(restful.NewResponse(response), server.Success{RowAffected: lineCount}) + default: + writeError(response, http.StatusMethodNotAllowed, "method not allowed") } - m.notifyDataImported() - timeUsed := time.Since(timeStart) - log.Logger().Info("complete import items", - zap.Duration("time_used", timeUsed), - zap.Int("num_items", lineCount)) - server.Ok(restful.NewResponse(response), server.Success{RowAffected: lineCount}) -} - -func format(inFmt, outFmt string, s []string, lineCount int) ([]string, error) { - if len(s) < len(inFmt) { - log.Logger().Error("number of fields mismatch", - zap.Int("expect", len(inFmt)), - zap.Int("actual", len(s))) - return nil, fmt.Errorf("number of fields mismatch at line %v", lineCount) - } - if inFmt == outFmt { - return s, nil - } - pool := make(map[uint8]string) - for i := range inFmt { - pool[inFmt[i]] = s[i] - } - out := make([]string, len(outFmt)) - for i, c := range outFmt { - out[i] = pool[uint8(c)] - } - return out, nil -} - -func formValue(request *http.Request, fieldName, defaultValue string) string { - value := request.FormValue(fieldName) - if value == "" { - return defaultValue - } - return value } func (m *Master) importExportFeedback(response http.ResponseWriter, request *http.Request) { @@ -1285,19 +1171,13 @@ func (m *Master) importExportFeedback(response http.ResponseWriter, request *htt switch request.Method { case http.MethodGet: var err error - response.Header().Set("Content-Type", "text/csv") - response.Header().Set("Content-Disposition", "attachment;filename=feedback.csv") - // write header - if _, err = response.Write([]byte("feedback_type,user_id,item_id,time_stamp\r\n")); err != nil { - server.InternalServerError(restful.NewResponse(response), err) - return - } - // write rows - feedbackChan, errChan := m.DataClient.GetFeedbackStream(ctx, batchSize, data.WithEndTime(*m.Config.Now())) - for feedback := range feedbackChan { + response.Header().Set("Content-Type", "application/jsonl") + response.Header().Set("Content-Disposition", "attachment;filename=feedback.jsonl") + encoder := json.NewEncoder(response) + feedbackStream, errChan := m.DataClient.GetFeedbackStream(ctx, batchSize, data.WithEndTime(*m.Config.Now())) + for feedback := range feedbackStream { for _, v := range feedback { - if _, err = response.Write([]byte(fmt.Sprintf("%s,%s,%s,%v\r\n", - base.Escape(v.FeedbackType), base.Escape(v.UserId), base.Escape(v.ItemId), v.Timestamp))); err != nil { + if err = encoder.Encode(v); err != nil { server.InternalServerError(restful.NewResponse(response), err) return } @@ -1308,109 +1188,95 @@ func (m *Master) importExportFeedback(response http.ResponseWriter, request *htt return } case http.MethodPost: - hasHeader := formValue(request, "has-header", "true") == "true" - sep := formValue(request, "sep", ",") - // field separator must be a single character - if len(sep) != 1 { - server.BadRequest(restful.NewResponse(response), fmt.Errorf("field separator must be a single character")) - return - } - fmtString := formValue(request, "format", "fuit") - // import items + // open file file, _, err := request.FormFile("file") if err != nil { server.BadRequest(restful.NewResponse(response), err) return } defer file.Close() - m.importFeedback(ctx, response, file, hasHeader, sep, fmtString) - default: - writeError(response, http.StatusMethodNotAllowed, "method not allowed") - } -} - -func (m *Master) importFeedback(ctx context.Context, response http.ResponseWriter, file io.Reader, hasHeader bool, sep, fmtString string) { - var err error - scanner := bufio.NewScanner(file) - lineCount := 0 - timeStart := time.Now() - feedbacks := make([]data.Feedback, 0) - err = base.ReadLines(scanner, sep, func(lineNumber int, splits []string) bool { - if hasHeader { - hasHeader = false - return true - } - // reorder fields - splits, err = format(fmtString, "fuit", splits, lineNumber) - if err != nil { - server.BadRequest(restful.NewResponse(response), err) - return false - } - feedback := data.Feedback{} - // 1. feedback type - feedback.FeedbackType = splits[0] - if err = base.ValidateId(splits[0]); err != nil { - server.BadRequest(restful.NewResponse(response), - fmt.Errorf("invalid feedback type `%v` at line %d (%s)", splits[0], lineNumber, err.Error())) - return false - } - // 2. user id - if err = base.ValidateId(splits[1]); err != nil { - server.BadRequest(restful.NewResponse(response), - fmt.Errorf("invalid user id `%v` at line %d (%s)", splits[1], lineNumber, err.Error())) - return false - } - feedback.UserId = splits[1] - // 3. item id - if err = base.ValidateId(splits[2]); err != nil { - server.BadRequest(restful.NewResponse(response), - fmt.Errorf("invalid item id `%v` at line %d (%s)", splits[2], lineNumber, err.Error())) - return false - } - feedback.ItemId = splits[2] - feedback.Timestamp, err = dateparse.ParseAny(splits[3]) - if err != nil { - server.BadRequest(restful.NewResponse(response), - fmt.Errorf("failed to parse datetime `%v` at line %d", splits[3], lineNumber)) - return false + // parse and import feedback + decoder := json.NewDecoder(file) + lineCount := 0 + timeStart := time.Now() + feedbacks := make([]data.Feedback, 0, batchSize) + for { + // parse line + var feedback server.Feedback + if err = decoder.Decode(&feedback); err != nil { + if errors.Is(err, io.EOF) { + break + } + server.BadRequest(restful.NewResponse(response), err) + return + } + // validate feedback type + if err = base.ValidateId(feedback.FeedbackType); err != nil { + server.BadRequest(restful.NewResponse(response), + fmt.Errorf("invalid feedback type `%v` at line %d (%s)", feedback.FeedbackType, lineCount, err.Error())) + return + } + // validate user id + if err = base.ValidateId(feedback.UserId); err != nil { + server.BadRequest(restful.NewResponse(response), + fmt.Errorf("invalid user id `%v` at line %d (%s)", feedback.UserId, lineCount, err.Error())) + return + } + // validate item id + if err = base.ValidateId(feedback.ItemId); err != nil { + server.BadRequest(restful.NewResponse(response), + fmt.Errorf("invalid item id `%v` at line %d (%s)", feedback.ItemId, lineCount, err.Error())) + return + } + // parse timestamp + var timestamp time.Time + if feedback.Timestamp != "" { + timestamp, err = dateparse.ParseAny(feedback.Timestamp) + if err != nil { + server.BadRequest(restful.NewResponse(response), + fmt.Errorf("failed to parse datetime `%v` at line %d", feedback.Timestamp, lineCount)) + return + } + } + feedbacks = append(feedbacks, data.Feedback{ + FeedbackKey: feedback.FeedbackKey, + Timestamp: timestamp, + Comment: feedback.Comment, + }) + // batch insert + if len(feedbacks) == batchSize { + // batch insert to data store + err = m.DataClient.BatchInsertFeedback(ctx, feedbacks, + m.Config.Server.AutoInsertUser, + m.Config.Server.AutoInsertItem, true) + if err != nil { + server.InternalServerError(restful.NewResponse(response), err) + return + } + feedbacks = make([]data.Feedback, 0, batchSize) + } + lineCount++ } - feedbacks = append(feedbacks, feedback) - // batch insert - if len(feedbacks) == batchSize { - // batch insert to data store + // insert to cache store + if len(feedbacks) > 0 { + // insert to data store err = m.DataClient.BatchInsertFeedback(ctx, feedbacks, m.Config.Server.AutoInsertUser, m.Config.Server.AutoInsertItem, true) if err != nil { server.InternalServerError(restful.NewResponse(response), err) - return false + return } - feedbacks = nil - } - lineCount++ - return true - }) - if err != nil { - server.BadRequest(restful.NewResponse(response), err) - return - } - // insert to cache store - if len(feedbacks) > 0 { - // insert to data store - err = m.DataClient.BatchInsertFeedback(ctx, feedbacks, - m.Config.Server.AutoInsertUser, - m.Config.Server.AutoInsertItem, true) - if err != nil { - server.InternalServerError(restful.NewResponse(response), err) - return } + m.notifyDataImported() + timeUsed := time.Since(timeStart) + log.Logger().Info("complete import feedback", + zap.Duration("time_used", timeUsed), + zap.Int("num_items", lineCount)) + server.Ok(restful.NewResponse(response), server.Success{RowAffected: lineCount}) + default: + writeError(response, http.StatusMethodNotAllowed, "method not allowed") } - m.notifyDataImported() - timeUsed := time.Since(timeStart) - log.Logger().Info("complete import feedback", - zap.Duration("time_used", timeUsed), - zap.Int("num_items", lineCount)) - server.Ok(restful.NewResponse(response), server.Success{RowAffected: lineCount}) } var checkList = mapset.NewSet("delete_users", "delete_items", "delete_feedback", "delete_cache") @@ -1543,7 +1409,7 @@ func readDump[T proto.Message](r io.Reader, data T) (int64, error) { return size, nil } bytes := make([]byte, size) - if _, err := r.Read(bytes); err != nil { + if _, err := io.ReadFull(r, bytes); err != nil { return 0, err } return size, proto.Unmarshal(bytes, data) @@ -1696,7 +1562,7 @@ func (m *Master) restore(response http.ResponseWriter, request *http.Request) { if flag <= 0 { break } - labels := make(map[string]interface{}) + var labels any if err := json.Unmarshal(user.Labels, &labels); err != nil { writeError(response, http.StatusInternalServerError, err.Error()) return @@ -1732,7 +1598,7 @@ func (m *Master) restore(response http.ResponseWriter, request *http.Request) { if flag <= 0 { break } - labels := make(map[string]interface{}) + var labels any if err := json.Unmarshal(item.Labels, &labels); err != nil { writeError(response, http.StatusInternalServerError, err.Error()) return diff --git a/master/rest_test.go b/master/rest_test.go index 692d4f239..2f7321be1 100644 --- a/master/rest_test.go +++ b/master/rest_test.go @@ -101,6 +101,16 @@ func marshal(t *testing.T, v interface{}) string { return string(s) } +func marshalJSONLines[T any](t *testing.T, v []T) string { + var buf bytes.Buffer + encoder := json.NewEncoder(&buf) + for _, item := range v { + err := encoder.Encode(item) + assert.NoError(t, err) + } + return buf.String() +} + func convertToMapStructure(t *testing.T, v interface{}) map[string]interface{} { var m map[string]interface{} err := mapstructure.Decode(v, &m) @@ -126,12 +136,9 @@ func TestMaster_ExportUsers(t *testing.T) { w := httptest.NewRecorder() s.importExportUsers(w, req) assert.Equal(t, http.StatusOK, w.Result().StatusCode) - assert.Equal(t, "text/csv", w.Header().Get("Content-Type")) - assert.Equal(t, "attachment;filename=users.csv", w.Header().Get("Content-Disposition")) - assert.Equal(t, "user_id,labels\r\n"+ - "1,\"{\"\"gender\"\":\"\"male\"\",\"\"job\"\":\"\"engineer\"\"}\"\r\n"+ - "2,\"{\"\"gender\"\":\"\"male\"\",\"\"job\"\":\"\"lawyer\"\"}\"\r\n"+ - "3,\"{\"\"gender\"\":\"\"female\"\",\"\"job\"\":\"\"teacher\"\"}\"\r\n", w.Body.String()) + assert.Equal(t, "application/jsonl", w.Header().Get("Content-Type")) + assert.Equal(t, "attachment;filename=users.jsonl", w.Header().Get("Content-Disposition")) + assert.Equal(t, marshalJSONLines(t, users), w.Body.String()) } func TestMaster_ExportItems(t *testing.T) { @@ -173,12 +180,9 @@ func TestMaster_ExportItems(t *testing.T) { w := httptest.NewRecorder() s.importExportItems(w, req) assert.Equal(t, http.StatusOK, w.Result().StatusCode) - assert.Equal(t, "text/csv", w.Header().Get("Content-Type")) - assert.Equal(t, "attachment;filename=items.csv", w.Header().Get("Content-Disposition")) - assert.Equal(t, "item_id,is_hidden,categories,time_stamp,labels,description\r\n"+ - "1,false,x,2020-01-01 01:01:01.000000001 +0000 UTC,\"{\"\"genre\"\":[\"\"comedy\"\",\"\"sci-fi\"\"]}\",\"o,n,e\"\r\n"+ - "2,false,x|y,2021-01-01 01:01:01.000000001 +0000 UTC,\"{\"\"genre\"\":[\"\"documentary\"\",\"\"sci-fi\"\"]}\",\"t\r\nw\r\no\"\r\n"+ - "3,true,,2022-01-01 01:01:01.000000001 +0000 UTC,null,\"\"\"three\"\"\"\r\n", w.Body.String()) + assert.Equal(t, "application/jsonl", w.Header().Get("Content-Type")) + assert.Equal(t, "attachment;filename=items.jsonl", w.Header().Get("Content-Disposition")) + assert.Equal(t, marshalJSONLines(t, items), w.Body.String()) } func TestMaster_ExportFeedback(t *testing.T) { @@ -189,8 +193,8 @@ func TestMaster_ExportFeedback(t *testing.T) { // insert feedback feedbacks := []data.Feedback{ {FeedbackKey: data.FeedbackKey{FeedbackType: "click", UserId: "0", ItemId: "2"}}, - {FeedbackKey: data.FeedbackKey{FeedbackType: "share", UserId: "1", ItemId: "4"}}, {FeedbackKey: data.FeedbackKey{FeedbackType: "read", UserId: "2", ItemId: "6"}}, + {FeedbackKey: data.FeedbackKey{FeedbackType: "share", UserId: "1", ItemId: "4"}}, } err := s.DataClient.BatchInsertFeedback(ctx, feedbacks, true, true, true) assert.NoError(t, err) @@ -200,68 +204,23 @@ func TestMaster_ExportFeedback(t *testing.T) { w := httptest.NewRecorder() s.importExportFeedback(w, req) assert.Equal(t, http.StatusOK, w.Result().StatusCode) - assert.Equal(t, "text/csv", w.Header().Get("Content-Type")) - assert.Equal(t, "attachment;filename=feedback.csv", w.Header().Get("Content-Disposition")) - assert.Equal(t, "feedback_type,user_id,item_id,time_stamp\r\n"+ - "click,0,2,0001-01-01 00:00:00 +0000 UTC\r\n"+ - "read,2,6,0001-01-01 00:00:00 +0000 UTC\r\n"+ - "share,1,4,0001-01-01 00:00:00 +0000 UTC\r\n", w.Body.String()) + assert.Equal(t, "application/jsonl", w.Header().Get("Content-Type")) + assert.Equal(t, "attachment;filename=feedback.jsonl", w.Header().Get("Content-Disposition")) + assert.Equal(t, marshalJSONLines(t, feedbacks), w.Body.String()) } func TestMaster_ImportUsers(t *testing.T) { s, cookie := newMockServer(t) defer s.Close(t) - - ctx := context.Background() - // send request - buf := bytes.NewBuffer(nil) - writer := multipart.NewWriter(buf) - err := writer.WriteField("has-header", "false") - assert.NoError(t, err) - err = writer.WriteField("sep", "\t") - assert.NoError(t, err) - err = writer.WriteField("label-sep", "::") - assert.NoError(t, err) - err = writer.WriteField("format", "lu") - assert.NoError(t, err) - file, err := writer.CreateFormFile("file", "users.csv") - assert.NoError(t, err) - _, err = file.Write([]byte("\"{\"\"gender\"\":\"\"male\"\",\"\"job\"\":\"\"engineer\"\"}\"\t1\n" + - "\"{\"\"gender\"\":\"\"male\"\",\"\"job\"\":\"\"lawyer\"\"}\"\t2\n" + - "\"{\"\"gender\"\":\"\"female\"\",\"\"job\"\":\"\"teacher\"\"}\"\t\"3\"\n")) - assert.NoError(t, err) - err = writer.Close() - assert.NoError(t, err) - req := httptest.NewRequest("POST", "https://example.com/", buf) - req.Header.Set("Cookie", cookie) - req.Header.Set("Content-Type", writer.FormDataContentType()) - w := httptest.NewRecorder() - s.importExportUsers(w, req) - // check - assert.Equal(t, http.StatusOK, w.Result().StatusCode) - assert.JSONEq(t, marshal(t, server.Success{RowAffected: 3}), w.Body.String()) - _, items, err := s.DataClient.GetUsers(ctx, "", 100) - assert.NoError(t, err) - assert.Equal(t, []data.User{ - {UserId: "1", Labels: map[string]any{"gender": "male", "job": "engineer"}}, - {UserId: "2", Labels: map[string]any{"gender": "male", "job": "lawyer"}}, - {UserId: "3", Labels: map[string]any{"gender": "female", "job": "teacher"}}, - }, items) -} - -func TestMaster_ImportUsers_DefaultFormat(t *testing.T) { - s, cookie := newMockServer(t) - defer s.Close(t) ctx := context.Background() // send request buf := bytes.NewBuffer(nil) writer := multipart.NewWriter(buf) - file, err := writer.CreateFormFile("file", "users.csv") + file, err := writer.CreateFormFile("file", "users.jsonl") assert.NoError(t, err) - _, err = file.Write([]byte("user_id,labels\r\n" + - "1,\"{\"\"性别\"\":\"\"男\"\",\"\"职业\"\":\"\"工程师\"\"}\"\r\n" + - "2,\"{\"\"性别\"\":\"\"男\"\",\"\"职业\"\":\"\"律师\"\"}\"\r\n" + - "\"3\",\"{\"\"性别\"\":\"\"女\"\",\"\"职业\"\":\"\"教师\"\"}\"\r\n")) + _, err = file.Write([]byte(`{"UserId":"1","Labels":{"性别":"男","职业":"工程师"}} +{"UserId":"2","Labels":{"性别":"男","职业":"律师"}} +{"UserId":"3","Labels":{"性别":"女","职业":"教师"}}`)) assert.NoError(t, err) err = writer.Close() assert.NoError(t, err) @@ -285,79 +244,15 @@ func TestMaster_ImportUsers_DefaultFormat(t *testing.T) { func TestMaster_ImportItems(t *testing.T) { s, cookie := newMockServer(t) defer s.Close(t) - ctx := context.Background() // send request buf := bytes.NewBuffer(nil) writer := multipart.NewWriter(buf) - err := writer.WriteField("has-header", "false") - assert.NoError(t, err) - err = writer.WriteField("sep", "\t") - assert.NoError(t, err) - err = writer.WriteField("label-sep", "::") - assert.NoError(t, err) - err = writer.WriteField("format", "ildtch") + file, err := writer.CreateFormFile("file", "items.jsonl") assert.NoError(t, err) - file, err := writer.CreateFormFile("file", "items.csv") - assert.NoError(t, err) - _, err = file.Write([]byte("1\t\"{\"\"genre\"\":[\"\"comedy\"\",\"\"sci-fi\"\"]}\"\t\"o,n,e\"\t2020-01-01 01:01:01.000000001 +0000 UTC\tx\t0\n" + - "2\t\"{\"\"genre\"\":[\"\"documentary\"\",\"\"sci-fi\"\"]}\"\t\"t\r\nw\r\no\"\t2021-01-01 01:01:01.000000001 +0000 UTC\tx::y\t0\n" + - "\"3\"\t\"\"\t\"\"\"three\"\"\"\t\"2022-01-01 01:01:01.000000001 +0000 UTC\"\t\t\"1\"\n")) - assert.NoError(t, err) - err = writer.Close() - assert.NoError(t, err) - req := httptest.NewRequest("POST", "https://example.com/", buf) - req.Header.Set("Cookie", cookie) - req.Header.Set("Content-Type", writer.FormDataContentType()) - w := httptest.NewRecorder() - s.importExportItems(w, req) - // check - assert.Equal(t, http.StatusOK, w.Result().StatusCode) - assert.JSONEq(t, marshal(t, server.Success{RowAffected: 3}), w.Body.String()) - _, items, err := s.DataClient.GetItems(ctx, "", 100, nil) - assert.NoError(t, err) - assert.Equal(t, []data.Item{ - { - ItemId: "1", - IsHidden: false, - Categories: []string{"x"}, - Timestamp: time.Date(2020, 1, 1, 1, 1, 1, 1, time.UTC), - Labels: map[string]any{"genre": []any{"comedy", "sci-fi"}}, - Comment: "o,n,e", - }, - { - ItemId: "2", - IsHidden: false, - Categories: []string{"x", "y"}, - Timestamp: time.Date(2021, 1, 1, 1, 1, 1, 1, time.UTC), - Labels: map[string]any{"genre": []any{"documentary", "sci-fi"}}, - Comment: "t\r\nw\r\no", - }, - { - ItemId: "3", - IsHidden: true, - Categories: nil, - Timestamp: time.Date(2022, 1, 1, 1, 1, 1, 1, time.UTC), - Labels: nil, - Comment: "\"three\"", - }, - }, items) -} - -func TestMaster_ImportItems_DefaultFormat(t *testing.T) { - s, cookie := newMockServer(t) - defer s.Close(t) - - ctx := context.Background() - // send request - buf := bytes.NewBuffer(nil) - writer := multipart.NewWriter(buf) - file, err := writer.CreateFormFile("file", "items.csv") - assert.NoError(t, err) - _, err = file.Write([]byte("item_id,is_hidden,categories,time_stamp,labels,description\r\n" + - "1,false,x,2020-01-01 01:01:01.000000001 +0000 UTC,\"{\"\"类型\"\":[\"\"喜剧\"\",\"\"科幻\"\"]}\",one\r\n" + - "2,false,x|y,2021-01-01 01:01:01.000000001 +0000 UTC,\"{\"\"类型\"\":[\"\"卡通\"\",\"\"科幻\"\"]}\",two\r\n" + - "\"3\",\"true\",,\"2022-01-01 01:01:01.000000001 +0000 UTC\",,\"three\"\r\n")) + _, err = file.Write([]byte(`{"ItemId":"1","IsHidden":false,"Categories":["x"],"Timestamp":"2020-01-01 01:01:01.000000001 +0000 UTC","Labels":{"类型":["喜剧","科幻"]},"Comment":"one"} +{"ItemId":"2","IsHidden":false,"Categories":["x","y"],"Timestamp":"2021-01-01 01:01:01.000000001 +0000 UTC","Labels":{"类型":["卡通","科幻"]},"Comment":"two"} +{"ItemId":"3","IsHidden":true,"Timestamp":"2022-01-01 01:01:01.000000001 +0000 UTC","Comment":"three"}`)) assert.NoError(t, err) err = writer.Close() assert.NoError(t, err) @@ -401,55 +296,15 @@ func TestMaster_ImportItems_DefaultFormat(t *testing.T) { func TestMaster_ImportFeedback(t *testing.T) { s, cookie := newMockServer(t) defer s.Close(t) - - ctx := context.Background() - // send request - buf := bytes.NewBuffer(nil) - writer := multipart.NewWriter(buf) - err := writer.WriteField("format", "uift") - assert.NoError(t, err) - err = writer.WriteField("sep", "\t") - assert.NoError(t, err) - err = writer.WriteField("has-header", "false") - assert.NoError(t, err) - file, err := writer.CreateFormFile("file", "feedback.csv") - assert.NoError(t, err) - _, err = file.Write([]byte("0\t2\tclick\t0001-01-01 00:00:00 +0000 UTC\n" + - "2\t6\tread\t0001-01-01 00:00:00 +0000 UTC\n" + - "\"1\"\t\"4\"\t\"share\"\t\"0001-01-01 00:00:00 +0000 UTC\"\n")) - assert.NoError(t, err) - err = writer.Close() - assert.NoError(t, err) - req := httptest.NewRequest("POST", "https://example.com/", buf) - req.Header.Set("Cookie", cookie) - req.Header.Set("Content-Type", writer.FormDataContentType()) - w := httptest.NewRecorder() - s.importExportFeedback(w, req) - // check - assert.Equal(t, http.StatusOK, w.Result().StatusCode) - assert.JSONEq(t, marshal(t, server.Success{RowAffected: 3}), w.Body.String()) - _, feedback, err := s.DataClient.GetFeedback(ctx, "", 100, nil, lo.ToPtr(time.Now())) - assert.NoError(t, err) - assert.Equal(t, []data.Feedback{ - {FeedbackKey: data.FeedbackKey{FeedbackType: "click", UserId: "0", ItemId: "2"}}, - {FeedbackKey: data.FeedbackKey{FeedbackType: "read", UserId: "2", ItemId: "6"}}, - {FeedbackKey: data.FeedbackKey{FeedbackType: "share", UserId: "1", ItemId: "4"}}, - }, feedback) -} - -func TestMaster_ImportFeedback_Default(t *testing.T) { - s, cookie := newMockServer(t) - defer s.Close(t) // send request ctx := context.Background() buf := bytes.NewBuffer(nil) writer := multipart.NewWriter(buf) - file, err := writer.CreateFormFile("file", "feedback.csv") + file, err := writer.CreateFormFile("file", "feedback.jsonl") assert.NoError(t, err) - _, err = file.Write([]byte("feedback_type,user_id,item_id,time_stamp\r\n" + - "click,0,2,0001-01-01 00:00:00 +0000 UTC\r\n" + - "read,2,6,0001-01-01 00:00:00 +0000 UTC\r\n" + - "\"share\",\"1\",\"4\",\"0001-01-01 00:00:00 +0000 UTC\"\r\n")) + _, err = file.Write([]byte(`{"FeedbackType":"click","UserId":"0","ItemId":"2","Timestamp":"0001-01-01 00:00:00 +0000 UTC"} +{"FeedbackType":"read","UserId":"2","ItemId":"6","Timestamp":"0001-01-01 00:00:00 +0000 UTC"} +{"FeedbackType":"share","UserId":"1","ItemId":"4","Timestamp":"0001-01-01 00:00:00 +0000 UTC"}`)) assert.NoError(t, err) err = writer.Close() assert.NoError(t, err) @@ -1064,3 +919,129 @@ func TestDumpAndRestore(t *testing.T) { assert.Equal(t, feedback, returnFeedback) } } + +func TestExportAndImport(t *testing.T) { + s, cookie := newMockServer(t) + defer s.Close(t) + ctx := context.Background() + // insert users + users := make([]data.User, batchSize+1) + for i := range users { + users[i] = data.User{ + UserId: fmt.Sprintf("%05d", i), + Labels: map[string]any{"a": fmt.Sprintf("%d", 2*i+1), "b": fmt.Sprintf("%d", 2*i+2)}, + } + } + err := s.DataClient.BatchInsertUsers(ctx, users) + assert.NoError(t, err) + // insert items + items := make([]data.Item, batchSize+1) + for i := range items { + items[i] = data.Item{ + ItemId: fmt.Sprintf("%05d", i), + Labels: map[string]any{"a": fmt.Sprintf("%d", 2*i+1), "b": fmt.Sprintf("%d", 2*i+2)}, + } + } + err = s.DataClient.BatchInsertItems(ctx, items) + assert.NoError(t, err) + // insert feedback + feedback := make([]data.Feedback, batchSize+1) + for i := range feedback { + feedback[i] = data.Feedback{ + FeedbackKey: data.FeedbackKey{ + FeedbackType: "click", + UserId: fmt.Sprintf("%05d", i), + ItemId: fmt.Sprintf("%05d", i), + }, + } + } + err = s.DataClient.BatchInsertFeedback(ctx, feedback, true, true, true) + assert.NoError(t, err) + + // export users + req := httptest.NewRequest("GET", "https://example.com/", nil) + req.Header.Set("Cookie", cookie) + w := httptest.NewRecorder() + s.importExportUsers(w, req) + assert.Equal(t, http.StatusOK, w.Code) + usersData := w.Body.Bytes() + // export items + req = httptest.NewRequest("GET", "https://example.com/", nil) + req.Header.Set("Cookie", cookie) + w = httptest.NewRecorder() + s.importExportItems(w, req) + assert.Equal(t, http.StatusOK, w.Code) + itemsData := w.Body.Bytes() + // export feedback + req = httptest.NewRequest("GET", "https://example.com/", nil) + req.Header.Set("Cookie", cookie) + w = httptest.NewRecorder() + s.importExportFeedback(w, req) + assert.Equal(t, http.StatusOK, w.Code) + feedbackData := w.Body.Bytes() + + err = s.DataClient.Purge() + assert.NoError(t, err) + // import users + buf := bytes.NewBuffer(nil) + writer := multipart.NewWriter(buf) + file, err := writer.CreateFormFile("file", "users.jsonl") + assert.NoError(t, err) + _, err = file.Write(usersData) + assert.NoError(t, err) + err = writer.Close() + assert.NoError(t, err) + req = httptest.NewRequest("POST", "https://example.com/", buf) + req.Header.Set("Cookie", cookie) + req.Header.Set("Content-Type", writer.FormDataContentType()) + w = httptest.NewRecorder() + s.importExportUsers(w, req) + assert.Equal(t, http.StatusOK, w.Code) + // import items + buf = bytes.NewBuffer(nil) + writer = multipart.NewWriter(buf) + file, err = writer.CreateFormFile("file", "items.jsonl") + assert.NoError(t, err) + _, err = file.Write(itemsData) + assert.NoError(t, err) + err = writer.Close() + assert.NoError(t, err) + req = httptest.NewRequest("POST", "https://example.com/", buf) + req.Header.Set("Cookie", cookie) + req.Header.Set("Content-Type", writer.FormDataContentType()) + w = httptest.NewRecorder() + s.importExportItems(w, req) + assert.Equal(t, http.StatusOK, w.Code) + // import feedback + buf = bytes.NewBuffer(nil) + writer = multipart.NewWriter(buf) + file, err = writer.CreateFormFile("file", "feedback.jsonl") + assert.NoError(t, err) + _, err = file.Write(feedbackData) + assert.NoError(t, err) + err = writer.Close() + assert.NoError(t, err) + req = httptest.NewRequest("POST", "https://example.com/", buf) + req.Header.Set("Cookie", cookie) + req.Header.Set("Content-Type", writer.FormDataContentType()) + w = httptest.NewRecorder() + s.importExportFeedback(w, req) + assert.Equal(t, http.StatusOK, w.Code) + + // check data + _, returnUsers, err := s.DataClient.GetUsers(ctx, "", len(users)) + assert.NoError(t, err) + if assert.Equal(t, len(users), len(returnUsers)) { + assert.Equal(t, users, returnUsers) + } + _, returnItems, err := s.DataClient.GetItems(ctx, "", len(items), nil) + assert.NoError(t, err) + if assert.Equal(t, len(items), len(returnItems)) { + assert.Equal(t, items, returnItems) + } + _, returnFeedback, err := s.DataClient.GetFeedback(ctx, "", len(feedback), nil, lo.ToPtr(time.Now())) + assert.NoError(t, err) + if assert.Equal(t, len(feedback), len(returnFeedback)) { + assert.Equal(t, feedback, returnFeedback) + } +} From a323b179f5e413de51b693fb3fb05c0bd56fc3f7 Mon Sep 17 00:00:00 2001 From: zhenghaoz Date: Sat, 16 Nov 2024 22:02:54 +0800 Subject: [PATCH 09/14] support authenticate via OIDC (#888) --- config/config.go | 39 +++++++---- config/config.toml | 20 ++++++ config/config_test.go | 20 +++++- go.mod | 13 ++-- go.sum | 17 +++-- master/master.go | 39 ++++++++--- master/rest.go | 148 +++++++++++++++++++++++++++--------------- master/rest_test.go | 83 ----------------------- master/rpc.go | 29 +++------ master/rpc_test.go | 10 ++- 10 files changed, 221 insertions(+), 197 deletions(-) diff --git a/config/config.go b/config/config.go index f13d92f0b..31a2e7474 100644 --- a/config/config.go +++ b/config/config.go @@ -60,6 +60,7 @@ type Config struct { Recommend RecommendConfig `mapstructure:"recommend"` Tracing TracingConfig `mapstructure:"tracing"` Experimental ExperimentalConfig `mapstructure:"experimental"` + OIDC OIDCConfig `mapstructure:"oidc"` } // DatabaseConfig is the configuration for the database. @@ -73,19 +74,18 @@ type DatabaseConfig struct { // MasterConfig is the configuration for the master. type MasterConfig struct { - Port int `mapstructure:"port" validate:"gte=0"` // master port - Host string `mapstructure:"host"` // master host - HttpPort int `mapstructure:"http_port" validate:"gte=0"` // HTTP port - HttpHost string `mapstructure:"http_host"` // HTTP host - HttpCorsDomains []string `mapstructure:"http_cors_domains"` // add allowed cors domains - HttpCorsMethods []string `mapstructure:"http_cors_methods"` // add allowed cors methods - NumJobs int `mapstructure:"n_jobs" validate:"gt=0"` // number of working jobs - MetaTimeout time.Duration `mapstructure:"meta_timeout" validate:"gt=0"` // cluster meta timeout (second) - DashboardUserName string `mapstructure:"dashboard_user_name"` // dashboard user name - DashboardPassword string `mapstructure:"dashboard_password"` // dashboard password - DashboardAuthServer string `mapstructure:"dashboard_auth_server"` // dashboard auth server - DashboardRedacted bool `mapstructure:"dashboard_redacted"` - AdminAPIKey string `mapstructure:"admin_api_key"` + Port int `mapstructure:"port" validate:"gte=0"` // master port + Host string `mapstructure:"host"` // master host + HttpPort int `mapstructure:"http_port" validate:"gte=0"` // HTTP port + HttpHost string `mapstructure:"http_host"` // HTTP host + HttpCorsDomains []string `mapstructure:"http_cors_domains"` // add allowed cors domains + HttpCorsMethods []string `mapstructure:"http_cors_methods"` // add allowed cors methods + NumJobs int `mapstructure:"n_jobs" validate:"gt=0"` // number of working jobs + MetaTimeout time.Duration `mapstructure:"meta_timeout" validate:"gt=0"` // cluster meta timeout (second) + DashboardUserName string `mapstructure:"dashboard_user_name"` // dashboard user name + DashboardPassword string `mapstructure:"dashboard_password"` // dashboard password + DashboardRedacted bool `mapstructure:"dashboard_redacted"` + AdminAPIKey string `mapstructure:"admin_api_key"` } // ServerConfig is the configuration for the server. @@ -179,6 +179,14 @@ type ExperimentalConfig struct { DeepLearningBatchSize int `mapstructure:"deep_learning_batch_size"` } +type OIDCConfig struct { + Enable bool `mapstructure:"enable"` + Issuer string `mapstructure:"issuer"` + ClientID string `mapstructure:"client_id"` + ClientSecret string `mapstructure:"client_secret"` + RedirectURL string `mapstructure:"redirect_url" validate:"omitempty,endswith=/callback/oauth2"` +} + func GetDefaultConfig() *Config { return &Config{ Master: MasterConfig{ @@ -558,6 +566,11 @@ func LoadConfig(path string, oneModel bool) (*Config, error) { {"master.dashboard_redacted", "GORSE_DASHBOARD_REDACTED"}, {"master.admin_api_key", "GORSE_ADMIN_API_KEY"}, {"server.api_key", "GORSE_SERVER_API_KEY"}, + {"oidc.enable", "GORSE_OIDC_ENABLE"}, + {"oidc.issuer", "GORSE_OIDC_ISSUER"}, + {"oidc.client_id", "GORSE_OIDC_CLIENT_ID"}, + {"oidc.client_secret", "GORSE_OIDC_CLIENT_SECRET"}, + {"oidc.redirect_url", "GORSE_OIDC_REDIRECT_URL"}, } for _, binding := range bindings { err := viper.BindEnv(binding.key, binding.env) diff --git a/config/config.toml b/config/config.toml index e6fb194a5..0e2f0cef3 100644 --- a/config/config.toml +++ b/config/config.toml @@ -260,3 +260,23 @@ enable_deep_learning = false # Batch size for deep learning recommenders. The default value is 128. deep_learning_batch_size = 128 + +[oidc] + +# Enable OpenID Connect (OIDC) authentication. The default value is false. +enable = false + +# The issuer of the OAuth provider. +issuer = "" + +# Public identifier of the OAuth application. +client_id = "" + +# Token access to the OAuth application. +client_secret = "" + +# URL used by the OAuth provider to redirect users after they are successfully authenticated +# (also referred to as the callback URL). You should set this to the concatenation of the +# Gorse dashboard URL and "/callback/oauth2". For example, if the Gorse dashboard URL is +# http://localhost:8088, the redirect URL should be: http://localhost:8088/callback/oauth2 +redirect_url = "" diff --git a/config/config_test.go b/config/config_test.go index 63c222c4e..4535a4707 100644 --- a/config/config_test.go +++ b/config/config_test.go @@ -41,6 +41,10 @@ func TestUnmarshal(t *testing.T) { text = strings.Replace(text, "data_table_prefix = \"gorse_\"", "data_table_prefix = \"gorse_data_\"", -1) text = strings.Replace(text, "http_cors_domains = []", "http_cors_domains = [\".*\"]", -1) text = strings.Replace(text, "http_cors_methods = []", "http_cors_methods = [\"GET\",\"PATCH\",\"POST\"]", -1) + text = strings.Replace(text, "issuer = \"\"", "issuer = \"https://accounts.google.com\"", -1) + text = strings.Replace(text, "client_id = \"\"", "client_id = \"client_id\"", -1) + text = strings.Replace(text, "client_secret = \"\"", "client_secret = \"client_secret\"", -1) + text = strings.Replace(text, "redirect_url = \"\"", "redirect_url = \"http://localhost:8088/callback/oauth2\"", -1) r, err := convert.TOML{}.Decode(bytes.NewBufferString(text)) assert.NoError(t, err) @@ -142,6 +146,11 @@ func TestUnmarshal(t *testing.T) { assert.Equal(t, 1.0, config.Tracing.Ratio) // [experimental] assert.Equal(t, 128, config.Experimental.DeepLearningBatchSize) + // [oauth2] + assert.Equal(t, "https://accounts.google.com", config.OIDC.Issuer) + assert.Equal(t, "client_id", config.OIDC.ClientID) + assert.Equal(t, "client_secret", config.OIDC.ClientSecret) + assert.Equal(t, "http://localhost:8088/callback/oauth2", config.OIDC.RedirectURL) }) } } @@ -180,6 +189,11 @@ func TestBindEnv(t *testing.T) { {"GORSE_DASHBOARD_REDACTED", "true"}, {"GORSE_ADMIN_API_KEY", ""}, {"GORSE_SERVER_API_KEY", ""}, + {"GORSE_OIDC_ENABLE", "true"}, + {"GORSE_OIDC_ISSUER", "https://accounts.google.com"}, + {"GORSE_OIDC_CLIENT_ID", "client_id"}, + {"GORSE_OIDC_CLIENT_SECRET", "client_secret"}, + {"GORSE_OIDC_REDIRECT_URL", "http://localhost:8088/callback/oauth2"}, } for _, variable := range variables { t.Setenv(variable.key, variable.value) @@ -199,10 +213,14 @@ func TestBindEnv(t *testing.T) { assert.Equal(t, 789, config.Master.NumJobs) assert.Equal(t, "user_name", config.Master.DashboardUserName) assert.Equal(t, "password", config.Master.DashboardPassword) - assert.Equal(t, "http://127.0.0.1:8888", config.Master.DashboardAuthServer) assert.Equal(t, true, config.Master.DashboardRedacted) assert.Equal(t, "", config.Master.AdminAPIKey) assert.Equal(t, "", config.Server.APIKey) + assert.Equal(t, true, config.OIDC.Enable) + assert.Equal(t, "https://accounts.google.com", config.OIDC.Issuer) + assert.Equal(t, "client_id", config.OIDC.ClientID) + assert.Equal(t, "client_secret", config.OIDC.ClientSecret) + assert.Equal(t, "http://localhost:8088/callback/oauth2", config.OIDC.RedirectURL) // check default values assert.Equal(t, 100, config.Recommend.CacheSize) diff --git a/go.mod b/go.mod index c903d281f..b08691f50 100644 --- a/go.mod +++ b/go.mod @@ -1,16 +1,14 @@ module github.com/zhenghaoz/gorse -go 1.23.2 - -toolchain go1.23.3 +go 1.23.3 require ( - github.com/ReneKroon/ttlcache/v2 v2.11.0 github.com/XSAM/otelsql v0.35.0 github.com/araddon/dateparse v0.0.0-20210429162001-6b43995a97de github.com/benhoyt/goawk v1.20.0 github.com/bits-and-blooms/bitset v1.2.1 github.com/chewxy/math32 v1.10.1 + github.com/coreos/go-oidc/v3 v3.11.0 github.com/deckarep/golang-set/v2 v2.3.1 github.com/emicklei/go-restful-openapi/v2 v2.9.0 github.com/emicklei/go-restful/v3 v3.9.0 @@ -20,12 +18,12 @@ require ( github.com/go-playground/validator/v10 v10.11.0 github.com/go-resty/resty/v2 v2.7.0 github.com/go-sql-driver/mysql v1.6.0 - github.com/golang/protobuf v1.5.2 github.com/google/uuid v1.6.0 github.com/gorilla/securecookie v1.1.1 - github.com/gorse-io/dashboard v0.0.0-20241112140226-19a1b322242c + github.com/gorse-io/dashboard v0.0.0-20241115145254-4def1c814899 github.com/haxii/go-swagger-ui v0.0.0-20210203093335-a63a6bbde946 github.com/jaswdr/faker v1.16.0 + github.com/jellydator/ttlcache/v3 v3.3.0 github.com/json-iterator/go v1.1.12 github.com/juju/errors v1.0.0 github.com/klauspost/asmfmt v1.3.2 @@ -63,6 +61,7 @@ require ( go.uber.org/atomic v1.10.0 go.uber.org/zap v1.24.0 golang.org/x/exp v0.0.0-20220722155223-a9213eeb770e + golang.org/x/oauth2 v0.22.0 google.golang.org/grpc v1.67.1 google.golang.org/protobuf v1.35.1 gopkg.in/natefinch/lumberjack.v2 v2.2.1 @@ -93,6 +92,7 @@ require ( github.com/dustin/go-humanize v1.0.0 // indirect github.com/felixge/httpsnoop v1.0.4 // indirect github.com/fsnotify/fsnotify v1.5.4 // indirect + github.com/go-jose/go-jose/v4 v4.0.2 // indirect github.com/go-logr/logr v1.4.2 // indirect github.com/go-logr/stdr v1.2.2 // indirect github.com/go-openapi/jsonpointer v0.19.5 // indirect @@ -100,6 +100,7 @@ require ( github.com/go-openapi/spec v0.20.7 // indirect github.com/go-openapi/swag v0.22.3 // indirect github.com/gogo/protobuf v1.3.2 // indirect + github.com/golang/protobuf v1.5.2 // indirect github.com/golang/snappy v0.0.4 // indirect github.com/google/flatbuffers v2.0.6+incompatible // indirect github.com/grpc-ecosystem/grpc-gateway/v2 v2.22.0 // indirect diff --git a/go.sum b/go.sum index 45a4eb7cd..29b72e9e8 100644 --- a/go.sum +++ b/go.sum @@ -44,8 +44,6 @@ github.com/Masterminds/semver/v3 v3.1.1 h1:hLg3sBzpNErnxhQtUy/mmLR2I9foDujNK030I github.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs= github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= -github.com/ReneKroon/ttlcache/v2 v2.11.0 h1:OvlcYFYi941SBN3v9dsDcC2N8vRxyHcCmJb3Vl4QMoM= -github.com/ReneKroon/ttlcache/v2 v2.11.0/go.mod h1:mBxvsNY+BT8qLLd6CuAJubbKo6r0jh3nb5et22bbfGY= github.com/XSAM/otelsql v0.35.0 h1:nMdbU/XLmBIB6qZF61uDqy46E0LVA4ZgF/FCNw8Had4= github.com/XSAM/otelsql v0.35.0/go.mod h1:wO028mnLzmBpstK8XPsoeRLl/kgt417yjAwOGDIptTc= github.com/ajstarks/deck v0.0.0-20200831202436-30c9fc6549a9/go.mod h1:JynElWSGnm/4RlzPXRlREEwqTHAN3T56Bv2ITsFT3gY= @@ -109,6 +107,8 @@ github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnht github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cockroachdb/apd v1.1.0 h1:3LFP3629v+1aKXU5Q37mxmRxX/pIu1nijXydLShEq5I= github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ= +github.com/coreos/go-oidc/v3 v3.11.0 h1:Ia3MxdwpSw702YW0xgfmP1GVCMA9aEFWu12XUZ3/OtI= +github.com/coreos/go-oidc/v3 v3.11.0/go.mod h1:gE3LgjOgFoHi9a4ce4/tJczr0Ai2/BoDhf0r5lltWI0= github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/coreos/go-systemd v0.0.0-20190719114852-fd7a80b32e1f/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= @@ -162,6 +162,8 @@ github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9 github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gota/gota v0.12.0/go.mod h1:UT+NsWpZC/FhaOyWb9Hui0jXg0Iq8e/YugZHTbyW/34= +github.com/go-jose/go-jose/v4 v4.0.2 h1:R3l3kkBds16bO7ZFAEEcofK0MkrAJt3jlJznWZG0nvk= +github.com/go-jose/go-jose/v4 v4.0.2/go.mod h1:WVf9LFMHh/QVrmqrOfqun0C45tMe3RoiKJMPvgWwLfY= github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= @@ -301,8 +303,8 @@ github.com/gorilla/securecookie v1.1.1 h1:miw7JPhV+b/lAHSXz4qd/nN9jRiAFV5FwjeKyC github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4= github.com/gorse-io/clickhouse v0.3.3-0.20220715124633-688011a495bb h1:z/oOWE+Vy0PLcwIulZmIug4FtmvE3dJ1YOGprLeHwwY= github.com/gorse-io/clickhouse v0.3.3-0.20220715124633-688011a495bb/go.mod h1:iILWzbul8U+gsf4kqbheF2QzBmdvVp63mloGGK8emDI= -github.com/gorse-io/dashboard v0.0.0-20241112140226-19a1b322242c h1:OtOi5F+9Kou/ji0WwiJqVB82sB83279CpzfZcBdnJrU= -github.com/gorse-io/dashboard v0.0.0-20241112140226-19a1b322242c/go.mod h1:iWSDK04UCelym9Uy4YY/tDa6cMGTLpN49Najyhuv35A= +github.com/gorse-io/dashboard v0.0.0-20241115145254-4def1c814899 h1:1BQ8+NLDKMYp7BcBhjJgEska+Gt8t2JTj6Rj0afYwG8= +github.com/gorse-io/dashboard v0.0.0-20241115145254-4def1c814899/go.mod h1:LBLzsMv3XVLmpaM/1q8/sGvv2Avj1YxmHBZfXcdqRjU= github.com/gorse-io/gorgonia v0.0.0-20230817132253-6dd1dbf95849 h1:Hwywr6NxzYeZYn35KwOsw7j8ZiMT60TBzpbn1MbEido= github.com/gorse-io/gorgonia v0.0.0-20230817132253-6dd1dbf95849/go.mod h1:TtVGAt7ENNmgBnC0JA68CAjIDCEtcqaRHvnkAWJ/Fu0= github.com/gorse-io/sqlite v1.3.3-0.20220713123255-c322aec4e59e h1:uPQtYQzG1QcC3Qbv+tuEe8Q2l++V4KEcqYSSwB9qobg= @@ -377,6 +379,8 @@ github.com/jackc/puddle v1.1.3/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dv github.com/jackc/puddle v1.2.1/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= github.com/jaswdr/faker v1.16.0 h1:5ZjusQbqIZwJnUymPirNKJI1yFCuozdSR9oeYPgD5Uk= github.com/jaswdr/faker v1.16.0/go.mod h1:x7ZlyB1AZqwqKZgyQlnqEG8FDptmHlncA5u2zY/yi6w= +github.com/jellydator/ttlcache/v3 v3.3.0 h1:BdoC9cE81qXfrxeb9eoJi9dWrdhSuwXMAnHTbnBm4Wc= +github.com/jellydator/ttlcache/v3 v3.3.0/go.mod h1:bj2/e0l4jRnQdrnSTaGTsh4GSXvMjQcy41i7th0GVGw= github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E= github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc= github.com/jinzhu/now v1.1.4/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8= @@ -678,7 +682,6 @@ go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= go.uber.org/atomic v1.10.0 h1:9qC72Qh0+3MqyJbAn8YU5xVq1frD8bn3JtD2oXtafVQ= go.uber.org/atomic v1.10.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0= -go.uber.org/goleak v1.1.10/go.mod h1:8a7PlsEVH3e/a/GLqe5IIrQx6GzcnRmZEufDUTk4A7A= go.uber.org/goleak v1.1.11/go.mod h1:cwTWslyiVhfpKIDGSZEM2HlOvcqm+tG4zioyIeLoqMQ= go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= @@ -837,6 +840,8 @@ golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= +golang.org/x/oauth2 v0.22.0 h1:BzDx2FehcG7jJwgWLELCdmLuxk2i+x9UDpSiss2u0ZA= +golang.org/x/oauth2 v0.22.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -965,7 +970,6 @@ golang.org/x/tools v0.0.0-20190927191325-030b2cf1153e/go.mod h1:b+2E5dAYhXwXZwtn golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191108193012-7d206e10da11/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= @@ -1001,7 +1005,6 @@ golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4f golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20210112230658-8b4aab62c064/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= diff --git a/master/master.go b/master/master.go index d22e8764a..1b9c8e6af 100644 --- a/master/master.go +++ b/master/master.go @@ -24,8 +24,9 @@ import ( "sync" "time" - "github.com/ReneKroon/ttlcache/v2" + "github.com/coreos/go-oidc/v3/oidc" "github.com/emicklei/go-restful/v3" + "github.com/jellydator/ttlcache/v3" "github.com/juju/errors" "github.com/zhenghaoz/gorse/base" "github.com/zhenghaoz/gorse/base/encoding" @@ -45,6 +46,7 @@ import ( "go.opentelemetry.io/otel" "go.opentelemetry.io/otel/propagation" "go.uber.org/zap" + "golang.org/x/oauth2" "google.golang.org/grpc" ) @@ -67,7 +69,7 @@ type Master struct { managedMode bool // cluster meta cache - ttlCache *ttlcache.Cache + ttlCache *ttlcache.Cache[string, *Node] nodesInfo map[string]*Node nodesInfoMutex sync.RWMutex @@ -92,6 +94,11 @@ type Master struct { clickModelMutex sync.RWMutex clickModelSearcher *click.ModelSearcher + // oauth2 + oauth2Config oauth2.Config + verifier *oidc.IDTokenVerifier + tokenCache *ttlcache.Cache[string, UserInfo] + localCache *LocalCache // events @@ -210,12 +217,10 @@ func (m *Master) Serve() { } // create cluster meta cache - m.ttlCache = ttlcache.NewCache() - m.ttlCache.SetExpirationCallback(m.nodeDown) - m.ttlCache.SetNewItemCallback(m.nodeUp) - if err = m.ttlCache.SetTTL(m.Config.Master.MetaTimeout + 10*time.Second); err != nil { - log.Logger().Fatal("failed to set TTL", zap.Error(err)) - } + m.ttlCache = ttlcache.New[string, *Node]( + ttlcache.WithTTL[string, *Node](m.Config.Master.MetaTimeout + 10*time.Second)) + m.ttlCache.OnEviction(m.nodeDown) + go m.ttlCache.Start() // connect data database m.DataClient, err = data.Open(m.Config.Database.DataStore, m.Config.Database.DataTablePrefix) @@ -262,6 +267,24 @@ func (m *Master) Serve() { } }() + if m.Config.OIDC.Enable { + provider, err := oidc.NewProvider(context.Background(), m.Config.OIDC.Issuer) + if err != nil { + log.Logger().Error("failed to create oidc provider", zap.Error(err)) + } else { + m.verifier = provider.Verifier(&oidc.Config{ClientID: m.Config.OIDC.ClientID}) + m.oauth2Config = oauth2.Config{ + ClientID: m.Config.OIDC.ClientID, + ClientSecret: m.Config.OIDC.ClientSecret, + RedirectURL: m.Config.OIDC.RedirectURL, + Endpoint: provider.Endpoint(), + Scopes: []string{oidc.ScopeOpenID, "profile", "email"}, + } + m.tokenCache = ttlcache.New(ttlcache.WithTTL[string, UserInfo](time.Hour)) + go m.tokenCache.Start() + } + } + // start http server m.StartHttpServer() } diff --git a/master/rest.go b/master/rest.go index e5ea54de1..892a2d9d7 100644 --- a/master/rest.go +++ b/master/rest.go @@ -54,12 +54,30 @@ import ( "google.golang.org/protobuf/types/known/timestamppb" ) +type UserInfo struct { + Name string `json:"name"` + FamilyName string `json:"family_name"` + GivenName string `json:"given_name"` + MiddleName string `json:"middle_name"` + NickName string `json:"nickname"` + Picture string `json:"picture"` + UpdatedAt string `json:"updated_at"` + Email string `json:"email"` + Verified bool `json:"email_verified"` + AuthType string `json:"auth_type"` +} + func (m *Master) CreateWebService() { ws := m.WebService ws.Consumes(restful.MIME_JSON).Produces(restful.MIME_JSON) ws.Path("/api/") ws.Filter(m.LoginFilter) + ws.Route(ws.GET("/dashboard/userinfo").To(m.handleUserInfo). + Doc("Get login user information."). + Metadata(restfulspec.KeyOpenAPITags, []string{"dashboard"}). + Returns(http.StatusOK, "OK", UserInfo{}). + Writes(UserInfo{})) ws.Route(ws.GET("/dashboard/cluster").To(m.getCluster). Doc("Get nodes in the cluster."). Metadata(restfulspec.KeyOpenAPITags, []string{"dashboard"}). @@ -224,6 +242,7 @@ func (m *Master) StartHttpServer() { container.Handle("/", http.HandlerFunc(m.dashboard)) container.Handle("/login", http.HandlerFunc(m.login)) container.Handle("/logout", http.HandlerFunc(m.logout)) + container.Handle("/callback/oauth2", http.HandlerFunc(m.handleOAuth2Callback)) container.Handle("/api/purge", http.HandlerFunc(m.purge)) container.Handle("/api/bulk/users", http.HandlerFunc(m.importExportUsers)) container.Handle("/api/bulk/items", http.HandlerFunc(m.importExportItems)) @@ -309,8 +328,13 @@ func (m *Master) dashboard(response http.ResponseWriter, request *http.Request) _, err := staticFileSystem.Open(request.RequestURI) if request.RequestURI == "/" || os.IsNotExist(err) { if !m.checkLogin(request) { - http.Redirect(response, request, "/login", http.StatusFound) - log.Logger().Info(fmt.Sprintf("%s %s", request.Method, request.URL), zap.Int("status_code", http.StatusFound)) + if m.Config.OIDC.Enable { + // Redirect to OIDC login + http.Redirect(response, request, m.oauth2Config.AuthCodeURL(""), http.StatusFound) + } else { + http.Redirect(response, request, "/login", http.StatusFound) + log.Logger().Info(fmt.Sprintf("%s %s", request.Method, request.URL), zap.Int("status_code", http.StatusFound)) + } return } noCache(staticFileServer).ServeHTTP(response, request) @@ -319,59 +343,15 @@ func (m *Master) dashboard(response http.ResponseWriter, request *http.Request) staticFileServer.ServeHTTP(response, request) } -func (m *Master) checkToken(token string) (bool, error) { - resp, err := http.Get(fmt.Sprintf("%s/auth/dashboard/%s", m.Config.Master.DashboardAuthServer, token)) - if err != nil { - return false, errors.Trace(err) - } - if resp.StatusCode == http.StatusOK { - return true, nil - } else if resp.StatusCode == http.StatusUnauthorized { - return false, nil - } else { - if message, err := io.ReadAll(resp.Body); err != nil { - return false, errors.Trace(err) - } else { - return false, errors.New(string(message)) - } - } -} - func (m *Master) login(response http.ResponseWriter, request *http.Request) { switch request.Method { case http.MethodGet: log.Logger().Info("GET /login", zap.Int("status_code", http.StatusOK)) staticFileServer.ServeHTTP(response, request) case http.MethodPost: - token := request.FormValue("token") name := request.FormValue("user_name") pass := request.FormValue("password") - if m.Config.Master.DashboardAuthServer != "" { - // check access token - if isValid, err := m.checkToken(token); err != nil { - server.InternalServerError(restful.NewResponse(response), err) - return - } else if !isValid { - http.Redirect(response, request, "login?msg=incorrect", http.StatusFound) - log.Logger().Info("POST /login", zap.Int("status_code", http.StatusUnauthorized)) - return - } - // save token to cache - if encoded, err := cookieHandler.Encode("token", token); err != nil { - server.InternalServerError(restful.NewResponse(response), err) - return - } else { - cookie := &http.Cookie{ - Name: "token", - Value: encoded, - Path: "/", - } - http.SetCookie(response, cookie) - http.Redirect(response, request, "/", http.StatusFound) - log.Logger().Info("POST /login", zap.Int("status_code", http.StatusUnauthorized)) - return - } - } else if m.Config.Master.DashboardUserName != "" || m.Config.Master.DashboardPassword != "" { + if m.Config.Master.DashboardUserName != "" || m.Config.Master.DashboardPassword != "" { if name != m.Config.Master.DashboardUserName || pass != m.Config.Master.DashboardPassword { http.Redirect(response, request, "login?msg=incorrect", http.StatusFound) log.Logger().Info("POST /login", zap.Int("status_code", http.StatusUnauthorized)) @@ -433,13 +413,11 @@ func (m *Master) checkLogin(request *http.Request) bool { if m.Config.Master.AdminAPIKey != "" && m.Config.Master.AdminAPIKey == request.Header.Get("X-Api-Key") { return true } - if m.Config.Master.DashboardAuthServer != "" { - if tokenCookie, err := request.Cookie("token"); err == nil { + if m.Config.OIDC.Enable { + if tokenCookie, err := request.Cookie("id_token"); err == nil { var token string - if err = cookieHandler.Decode("token", tokenCookie.Value, &token); err == nil { - if isValid, err := m.checkToken(token); err != nil { - log.Logger().Error("failed to check access token", zap.Error(err)) - } else if isValid { + if err = cookieHandler.Decode("id_token", tokenCookie.Value, &token); err == nil { + if m.tokenCache.Get(token) != nil { return true } } @@ -461,6 +439,26 @@ func (m *Master) checkLogin(request *http.Request) bool { return true } +func (m *Master) handleUserInfo(request *restful.Request, response *restful.Response) { + if m.Config.OIDC.Enable { + if tokenCookie, err := request.Request.Cookie("id_token"); err == nil { + var token string + if err = cookieHandler.Decode("id_token", tokenCookie.Value, &token); err == nil { + if item := m.tokenCache.Get(token); item != nil { + userInfo := item.Value() + userInfo.AuthType = "OIDC" + server.Ok(response, userInfo) + return + } + } + } + } else if m.Config.Master.DashboardUserName != "" { + server.Ok(response, UserInfo{ + Name: m.Config.Master.DashboardUserName, + }) + } +} + func (m *Master) getCategories(request *restful.Request, response *restful.Response) { ctx := context.Background() if request != nil && request.Request != nil { @@ -1674,3 +1672,47 @@ func (m *Master) restore(response http.ResponseWriter, request *http.Request) { zap.Duration("duration", stats.Duration)) server.Ok(restful.NewResponse(response), stats) } + +func (m *Master) handleOAuth2Callback(w http.ResponseWriter, r *http.Request) { + // Verify state and errors. + oauth2Token, err := m.oauth2Config.Exchange(r.Context(), r.URL.Query().Get("code")) + if err != nil { + server.InternalServerError(restful.NewResponse(w), err) + return + } + // Extract the ID Token from OAuth2 token. + rawIDToken, ok := oauth2Token.Extra("id_token").(string) + if !ok { + server.InternalServerError(restful.NewResponse(w), errors.New("missing id_token")) + return + } + // Parse and verify ID Token payload. + idToken, err := m.verifier.Verify(r.Context(), rawIDToken) + if err != nil { + server.InternalServerError(restful.NewResponse(w), err) + return + } + // Extract custom claims + var claims UserInfo + if err := idToken.Claims(&claims); err != nil { + server.InternalServerError(restful.NewResponse(w), err) + return + } + // Set token cache and cookie + m.tokenCache.Set(rawIDToken, claims, time.Until(idToken.Expiry)) + if encoded, err := cookieHandler.Encode("id_token", rawIDToken); err != nil { + server.InternalServerError(restful.NewResponse(w), err) + return + } else { + http.SetCookie(w, &http.Cookie{ + Name: "id_token", + Value: encoded, + Path: "/", + Expires: idToken.Expiry, + }) + http.Redirect(w, r, "/", http.StatusFound) + log.Logger().Info("login success via OIDC", + zap.String("name", claims.Name), + zap.String("email", claims.Email)) + } +} diff --git a/master/rest_test.go b/master/rest_test.go index 2f7321be1..2bfcab71b 100644 --- a/master/rest_test.go +++ b/master/rest_test.go @@ -20,7 +20,6 @@ import ( "encoding/json" "fmt" "mime/multipart" - "net" "net/http" "net/http/httptest" "strconv" @@ -765,88 +764,6 @@ func TestMaster_GetConfig(t *testing.T) { End() } -type mockAuthServer struct { - token string - srv *http.Server - ln net.Listener -} - -func NewMockAuthServer(token string) *mockAuthServer { - return &mockAuthServer{token: token} -} - -func (m *mockAuthServer) auth(request *restful.Request, response *restful.Response) { - token := request.PathParameter("token") - if token == m.token { - response.WriteHeader(http.StatusOK) - } else { - response.WriteHeader(http.StatusUnauthorized) - } -} - -func (m *mockAuthServer) Start(t *testing.T) { - ws := new(restful.WebService) - ws.Route(ws.GET("/auth/dashboard/{token}").To(m.auth)) - ct := restful.NewContainer() - ct.Add(ws) - m.srv = &http.Server{Handler: ct} - var err error - m.ln, err = net.Listen("tcp", "") - assert.NoError(t, err) - go func() { - if err = m.srv.Serve(m.ln); err != http.ErrServerClosed { - assert.NoError(t, err) - } - }() -} - -func (m *mockAuthServer) Close(t *testing.T) { - err := m.srv.Close() - assert.NoError(t, err) -} - -func (m *mockAuthServer) Addr() string { - return m.ln.Addr().String() -} - -func TestMaster_TokenLogin(t *testing.T) { - s, _ := newMockServer(t) - defer s.Close(t) - - // start auth server - authServer := NewMockAuthServer("abc") - authServer.Start(t) - defer authServer.Close(t) - s.Config.Master.DashboardAuthServer = fmt.Sprintf("http://%s", authServer.Addr()) - - // login fail - req := httptest.NewRequest("POST", "https://example.com/", - strings.NewReader("token=123")) - req.Header.Set("Content-Type", "application/x-www-form-urlencoded") - w := httptest.NewRecorder() - s.login(w, req) - assert.Equal(t, http.StatusFound, w.Code) - assert.Empty(t, w.Result().Cookies()) - - // login success - req = httptest.NewRequest("POST", "https://example.com/", - strings.NewReader("token=abc")) - req.Header.Set("Content-Type", "application/x-www-form-urlencoded") - w = httptest.NewRecorder() - s.login(w, req) - assert.Equal(t, http.StatusFound, w.Code) - assert.NotEmpty(t, w.Header().Get("Set-Cookie")) - - // validate cookie - apitest.New(). - Handler(s.handler). - Get("/api/dashboard/config"). - Header("Cookie", w.Header().Get("Set-Cookie")). - Expect(t). - Status(http.StatusOK). - End() -} - func TestDumpAndRestore(t *testing.T) { s, cookie := newMockServer(t) defer s.Close(t) diff --git a/master/rpc.go b/master/rpc.go index 34ece0d72..04bd0baec 100644 --- a/master/rpc.go +++ b/master/rpc.go @@ -20,6 +20,7 @@ import ( "io" "strings" + "github.com/jellydator/ttlcache/v3" "github.com/juju/errors" "github.com/zhenghaoz/gorse/base/log" "github.com/zhenghaoz/gorse/model/click" @@ -68,10 +69,10 @@ func (m *Master) GetMeta(ctx context.Context, nodeInfo *protocol.NodeInfo) (*pro // register node node := NewNode(ctx, nodeInfo) if node.Type != "" { - if err := m.ttlCache.Set(nodeInfo.NodeName, node); err != nil { - log.Logger().Error("failed to set ttl cache", zap.Error(err)) - return nil, err - } + m.ttlCache.Set(nodeInfo.NodeName, node, ttlcache.DefaultTTL) + m.nodesInfoMutex.Lock() + m.nodesInfo[nodeInfo.NodeName] = node + m.nodesInfoMutex.Unlock() } // marshall config s, err := json.Marshal(m.Config) @@ -209,28 +210,16 @@ func (m *Master) GetClickModel(version *protocol.VersionInfo, sender protocol.Ma return encoderError } -// nodeUp handles node information inserted events. -func (m *Master) nodeUp(key string, value interface{}) { - node := value.(*Node) - log.Logger().Info("node up", - zap.String("node_name", key), - zap.String("node_ip", node.IP), - zap.String("node_type", node.Type)) - m.nodesInfoMutex.Lock() - defer m.nodesInfoMutex.Unlock() - m.nodesInfo[key] = node -} - // nodeDown handles node information timeout events. -func (m *Master) nodeDown(key string, value interface{}) { - node := value.(*Node) +func (m *Master) nodeDown(ctx context.Context, reason ttlcache.EvictionReason, item *ttlcache.Item[string, *Node]) { + node := item.Value() log.Logger().Info("node down", - zap.String("node_name", key), + zap.String("node_name", item.Key()), zap.String("node_ip", node.IP), zap.String("node_type", node.Type)) m.nodesInfoMutex.Lock() defer m.nodesInfoMutex.Unlock() - delete(m.nodesInfo, key) + delete(m.nodesInfo, item.Key()) } func (m *Master) PushProgress( diff --git a/master/rpc_test.go b/master/rpc_test.go index 9c537b6a2..22954ce4f 100644 --- a/master/rpc_test.go +++ b/master/rpc_test.go @@ -21,7 +21,7 @@ import ( "testing" "time" - "github.com/ReneKroon/ttlcache/v2" + "github.com/jellydator/ttlcache/v3" "github.com/stretchr/testify/assert" "github.com/zhenghaoz/gorse/base/progress" "github.com/zhenghaoz/gorse/config" @@ -72,11 +72,9 @@ func newMockMasterRPC(_ *testing.T) *mockMasterRPC { } func (m *mockMasterRPC) Start(t *testing.T) { - m.ttlCache = ttlcache.NewCache() - m.ttlCache.SetExpirationCallback(m.nodeDown) - m.ttlCache.SetNewItemCallback(m.nodeUp) - err := m.ttlCache.SetTTL(time.Second) - assert.NoError(t, err) + m.ttlCache = ttlcache.New(ttlcache.WithTTL[string, *Node](time.Second)) + m.ttlCache.OnEviction(m.nodeDown) + go m.ttlCache.Start() listen, err := net.Listen("tcp", ":0") assert.NoError(t, err) From 40c703c301e2f6c6f0796443c522a46fce5af3ba Mon Sep 17 00:00:00 2001 From: zhenghaoz Date: Mon, 18 Nov 2024 13:43:17 +0800 Subject: [PATCH 10/14] config: support set isolation_level (#889) --- .github/workflows/build_test.yml | 2 +- config/config.go | 22 +++++++++++++++++----- config/config.toml | 5 +++++ config/config_test.go | 1 + master/master.go | 7 +++++-- storage/cache/database.go | 6 ++++-- storage/data/database.go | 6 ++++-- storage/options.go | 23 +++++++++++++++++++++++ storage/scheme.go | 2 +- 9 files changed, 61 insertions(+), 13 deletions(-) create mode 100644 storage/options.go diff --git a/.github/workflows/build_test.yml b/.github/workflows/build_test.yml index 4cc97fff6..e51acae1e 100644 --- a/.github/workflows/build_test.yml +++ b/.github/workflows/build_test.yml @@ -238,7 +238,7 @@ jobs: uses: actions/checkout@v2 - name: Test MariaDB - run: go test ./storage/data -run ^TestMySQL_ + run: go test ./storage/data -run TestMySQL env: MYSQL_URI: mysql://root:password@tcp(localhost:${{ job.services.mariadb.ports[3306] }})/ diff --git a/config/config.go b/config/config.go index 31a2e7474..8ae0b4305 100644 --- a/config/config.go +++ b/config/config.go @@ -65,11 +65,16 @@ type Config struct { // DatabaseConfig is the configuration for the database. type DatabaseConfig struct { - DataStore string `mapstructure:"data_store" validate:"required,data_store"` // database for data store - CacheStore string `mapstructure:"cache_store" validate:"required,cache_store"` // database for cache store - TablePrefix string `mapstructure:"table_prefix"` - DataTablePrefix string `mapstructure:"data_table_prefix"` - CacheTablePrefix string `mapstructure:"cache_table_prefix"` + DataStore string `mapstructure:"data_store" validate:"required,data_store"` // database for data store + CacheStore string `mapstructure:"cache_store" validate:"required,cache_store"` // database for cache store + TablePrefix string `mapstructure:"table_prefix"` + DataTablePrefix string `mapstructure:"data_table_prefix"` + CacheTablePrefix string `mapstructure:"cache_table_prefix"` + MySQL MySQLConfig `mapstructure:"mysql"` +} + +type MySQLConfig struct { + IsolationLevel string `mapstructure:"isolation_level" validate:"oneof=READ-UNCOMMITTED READ-COMMITTED REPEATABLE-READ SERIALIZABLE"` } // MasterConfig is the configuration for the master. @@ -189,6 +194,11 @@ type OIDCConfig struct { func GetDefaultConfig() *Config { return &Config{ + Database: DatabaseConfig{ + MySQL: MySQLConfig{ + IsolationLevel: "READ-UNCOMMITTED", + }, + }, Master: MasterConfig{ Port: 8086, Host: "0.0.0.0", @@ -476,6 +486,8 @@ func (config *TracingConfig) Equal(other TracingConfig) bool { func setDefault() { defaultConfig := GetDefaultConfig() + // [database.mysql] + viper.SetDefault("database.mysql.isolation_level", defaultConfig.Database.MySQL.IsolationLevel) // [master] viper.SetDefault("master.port", defaultConfig.Master.Port) viper.SetDefault("master.host", defaultConfig.Master.Host) diff --git a/config/config.toml b/config/config.toml index 0e2f0cef3..d5a6e78a7 100644 --- a/config/config.toml +++ b/config/config.toml @@ -29,6 +29,11 @@ cache_table_prefix = "" # The naming prefix for tables (collections, keys) in data storage databases. The default value is `table_prefix`. data_table_prefix = "" +[database.mysql] + +# Transaction isolation level. The default value is "READ-UNCOMMITTED". +isolation_level = "READ-UNCOMMITTED" + [master] # GRPC port of the master node. The default value is 8086. diff --git a/config/config_test.go b/config/config_test.go index 4535a4707..372e8ee88 100644 --- a/config/config_test.go +++ b/config/config_test.go @@ -65,6 +65,7 @@ func TestUnmarshal(t *testing.T) { assert.Equal(t, "gorse_", config.Database.TablePrefix) assert.Equal(t, "gorse_cache_", config.Database.CacheTablePrefix) assert.Equal(t, "gorse_data_", config.Database.DataTablePrefix) + assert.Equal(t, "READ-UNCOMMITTED", config.Database.MySQL.IsolationLevel) // [master] assert.Equal(t, 8086, config.Master.Port) assert.Equal(t, "0.0.0.0", config.Master.Host) diff --git a/master/master.go b/master/master.go index 1b9c8e6af..f7f385a8f 100644 --- a/master/master.go +++ b/master/master.go @@ -41,6 +41,7 @@ import ( "github.com/zhenghaoz/gorse/model/ranking" "github.com/zhenghaoz/gorse/protocol" "github.com/zhenghaoz/gorse/server" + "github.com/zhenghaoz/gorse/storage" "github.com/zhenghaoz/gorse/storage/cache" "github.com/zhenghaoz/gorse/storage/data" "go.opentelemetry.io/otel" @@ -223,7 +224,8 @@ func (m *Master) Serve() { go m.ttlCache.Start() // connect data database - m.DataClient, err = data.Open(m.Config.Database.DataStore, m.Config.Database.DataTablePrefix) + m.DataClient, err = data.Open(m.Config.Database.DataStore, m.Config.Database.DataTablePrefix, + storage.WithIsolationLevel(m.Config.Database.MySQL.IsolationLevel)) if err != nil { log.Logger().Fatal("failed to connect data database", zap.Error(err), zap.String("database", log.RedactDBURL(m.Config.Database.DataStore))) @@ -233,7 +235,8 @@ func (m *Master) Serve() { } // connect cache database - m.CacheClient, err = cache.Open(m.Config.Database.CacheStore, m.Config.Database.CacheTablePrefix) + m.CacheClient, err = cache.Open(m.Config.Database.CacheStore, m.Config.Database.CacheTablePrefix, + storage.WithIsolationLevel(m.Config.Database.MySQL.IsolationLevel)) if err != nil { log.Logger().Fatal("failed to connect cache database", zap.Error(err), zap.String("database", log.RedactDBURL(m.Config.Database.CacheStore))) diff --git a/storage/cache/database.go b/storage/cache/database.go index 60b58d636..a5b6f98bf 100644 --- a/storage/cache/database.go +++ b/storage/cache/database.go @@ -16,6 +16,7 @@ package cache import ( "context" + "fmt" "math" "sort" "strconv" @@ -300,7 +301,7 @@ type Database interface { } // Open a connection to a database. -func Open(path, tablePrefix string) (Database, error) { +func Open(path, tablePrefix string, opts ...storage.Option) (Database, error) { var err error if strings.HasPrefix(path, storage.RedisPrefix) || strings.HasPrefix(path, storage.RedissPrefix) { opt, err := redis.ParseURL(path) @@ -350,6 +351,7 @@ func Open(path, tablePrefix string) (Database, error) { return database, nil } else if strings.HasPrefix(path, storage.MySQLPrefix) { name := path[len(storage.MySQLPrefix):] + option := storage.NewOptions(opts...) // probe isolation variable name isolationVarName, err := storage.ProbeMySQLIsolationVariableName(name) if err != nil { @@ -357,7 +359,7 @@ func Open(path, tablePrefix string) (Database, error) { } // append parameters if name, err = storage.AppendMySQLParams(name, map[string]string{ - isolationVarName: "'READ-UNCOMMITTED'", + isolationVarName: fmt.Sprintf("'%s'", option.IsolationLevel), "parseTime": "true", }); err != nil { return nil, errors.Trace(err) diff --git a/storage/data/database.go b/storage/data/database.go index 3dfaeee5c..911ef3bfd 100644 --- a/storage/data/database.go +++ b/storage/data/database.go @@ -17,6 +17,7 @@ package data import ( "context" "encoding/json" + "fmt" "net/url" "reflect" "sort" @@ -254,10 +255,11 @@ type Database interface { } // Open a connection to a database. -func Open(path, tablePrefix string) (Database, error) { +func Open(path, tablePrefix string, opts ...storage.Option) (Database, error) { var err error if strings.HasPrefix(path, storage.MySQLPrefix) { name := path[len(storage.MySQLPrefix):] + option := storage.NewOptions(opts...) // probe isolation variable name isolationVarName, err := storage.ProbeMySQLIsolationVariableName(name) if err != nil { @@ -266,7 +268,7 @@ func Open(path, tablePrefix string) (Database, error) { // append parameters if name, err = storage.AppendMySQLParams(name, map[string]string{ "sql_mode": "'ONLY_FULL_GROUP_BY,STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_ENGINE_SUBSTITUTION'", - isolationVarName: "'READ-UNCOMMITTED'", + isolationVarName: fmt.Sprintf("'%s'", option.IsolationLevel), "parseTime": "true", }); err != nil { return nil, errors.Trace(err) diff --git a/storage/options.go b/storage/options.go new file mode 100644 index 000000000..9da303b94 --- /dev/null +++ b/storage/options.go @@ -0,0 +1,23 @@ +package storage + +type Options struct { + IsolationLevel string +} + +type Option func(*Options) + +func WithIsolationLevel(isolationLevel string) Option { + return func(o *Options) { + o.IsolationLevel = isolationLevel + } +} + +func NewOptions(opts ...Option) Options { + opt := Options{ + IsolationLevel: "READ-UNCOMMITTED", + } + for _, o := range opts { + o(&opt) + } + return opt +} diff --git a/storage/scheme.go b/storage/scheme.go index ae60fdc9d..4bce902e0 100644 --- a/storage/scheme.go +++ b/storage/scheme.go @@ -77,7 +77,7 @@ func ProbeMySQLIsolationVariableName(dsn string) (string, error) { return "", errors.Trace(err) } defer connection.Close() - rows, err := connection.Query("SHOW VARIABLES LIKE '%isolation%'") + rows, err := connection.Query("SHOW VARIABLES WHERE variable_name = 'transaction_isolation' OR variable_name = 'tx_isolation'") if err != nil { return "", errors.Trace(err) } From 2a4227f149eba060a293930ce90a654b1e775533 Mon Sep 17 00:00:00 2001 From: zhenghaoz Date: Mon, 18 Nov 2024 20:21:23 +0800 Subject: [PATCH 11/14] remove Intel macOS support (#891) --- .circleci/config.yml | 82 ----------------------------- .github/workflows/build_release.yml | 3 +- .github/workflows/build_test.yml | 55 +++++++++++-------- 3 files changed, 34 insertions(+), 106 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 342b85159..ebf25f8b0 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -61,89 +61,7 @@ jobs: no_output_timeout: 20m command: go test -timeout 20m -v ./... - unit-test-macos: - macos: - xcode: 14.2.0 - working_directory: ~/repo - steps: - - checkout - - run: - name: Set up dataset - command: | - mkdir -p ~/.gorse/dataset - mkdir -p ~/.gorse/download - wget https://cdn.gorse.io/datasets/ml-100k.zip -P ~/.gorse/download - wget https://cdn.gorse.io/datasets/ml-1m.zip -P ~/.gorse/download - wget https://cdn.gorse.io/datasets/pinterest-20.zip -P ~/.gorse/download - wget https://cdn.gorse.io/datasets/frappe.zip -P ~/.gorse/download - wget https://cdn.gorse.io/datasets/ml-tag.zip -P ~/.gorse/download - wget https://cdn.gorse.io/datasets/criteo.zip -P ~/.gorse/download - unzip ~/.gorse/download/ml-100k.zip -d ~/.gorse/dataset - unzip ~/.gorse/download/ml-1m.zip -d ~/.gorse/dataset - unzip ~/.gorse/download/pinterest-20.zip -d ~/.gorse/dataset - unzip ~/.gorse/download/frappe.zip -d ~/.gorse/dataset - unzip ~/.gorse/download/ml-tag.zip -d ~/.gorse/dataset - unzip ~/.gorse/download/criteo.zip -d ~/.gorse/dataset - - run: - name: Install Go - command: brew install go - - restore_cache: - keys: - - go-mod-v4-{{ checksum "go.sum" }} - - run: - name: Install Dependencies - command: go mod download - - save_cache: - key: go-mod-v4-{{ checksum "go.sum" }} - paths: - - "/go/pkg/mod" - - run: - name: Run tests - no_output_timeout: 20m - command: go test -timeout 20m -v ./... -skip "TestPostgres|TestMySQL|TestMongo|TestRedis|TestClickHouse" - - unit-test-windows: - executor: win/server-2022 - steps: - - checkout - - run: - name: Set up dataset - command: | - New-Item -Type Directory -Path ~/.gorse/dataset - New-Item -Type Directory -Path ~/.gorse/download - Invoke-WebRequest https://cdn.gorse.io/datasets/ml-100k.zip -OutFile ~/.gorse/download/ml-100k.zip - Invoke-WebRequest https://cdn.gorse.io/datasets/ml-1m.zip -OutFile ~/.gorse/download/ml-1m.zip - Invoke-WebRequest https://cdn.gorse.io/datasets/pinterest-20.zip -OutFile ~/.gorse/download/pinterest-20.zip - Invoke-WebRequest https://cdn.gorse.io/datasets/frappe.zip -OutFile ~/.gorse/download/frappe.zip - Invoke-WebRequest https://cdn.gorse.io/datasets/ml-tag.zip -OutFile ~/.gorse/download/ml-tag.zip - Invoke-WebRequest https://cdn.gorse.io/datasets/criteo.zip -OutFile ~/.gorse/download/criteo.zip - Expand-Archive ~/.gorse/download/ml-100k.zip -DestinationPath ~/.gorse/dataset - Expand-Archive ~/.gorse/download/ml-1m.zip -DestinationPath ~/.gorse/dataset - Expand-Archive ~/.gorse/download/pinterest-20.zip -DestinationPath ~/.gorse/dataset - Expand-Archive ~/.gorse/download/frappe.zip -DestinationPath ~/.gorse/dataset - Expand-Archive ~/.gorse/download/ml-tag.zip -DestinationPath ~/.gorse/dataset - Expand-Archive ~/.gorse/download/criteo.zip -DestinationPath ~/.gorse/dataset - - run: - name: Upgrade Go - command: choco upgrade golang -y - - restore_cache: - keys: - - go-mod-v4-{{ checksum "go.sum" }} - - run: - name: Install Dependencies - command: go mod download - - save_cache: - key: go-mod-v4-{{ checksum "go.sum" }} - paths: - - "/go/pkg/mod" - - run: - name: Run tests - no_output_timeout: 20m - command: go test -timeout 20m -v ./... -skip "TestPostgres|TestMySQL|TestMongo|TestRedis|TestClickHouse" - workflows: unit-test: jobs: - unit-test-arm64 - - unit-test-macos - - unit-test-windows diff --git a/.github/workflows/build_release.yml b/.github/workflows/build_release.yml index e16f1c8ea..6c94d9905 100644 --- a/.github/workflows/build_release.yml +++ b/.github/workflows/build_release.yml @@ -24,7 +24,7 @@ jobs: - name: Build release run: > gox -output="{{.OS}}/{{.Arch}}/{{.Dir}}" \ - -osarch='darwin/arm64 darwin/amd64 windows/arm64 windows/amd64 linux/arm64 linux/amd64' -ldflags=" + -osarch='darwin/arm64 windows/arm64 windows/amd64 linux/arm64 linux/amd64' -ldflags=" -X 'github.com/zhenghaoz/gorse/cmd/version.Version=$(git describe --tags $(git rev-parse HEAD))' -X 'github.com/zhenghaoz/gorse/cmd/version.GitCommit=$(git rev-parse HEAD)' -X 'github.com/zhenghaoz/gorse/cmd/version.BuildTime=$(date)'" ./... @@ -40,7 +40,6 @@ jobs: zip -j gorse_linux_arm64.zip linux/arm64/gorse-* zip -j gorse_windows_amd64.zip windows/amd64/gorse-* zip -j gorse_windows_arm64.zip windows/arm64/gorse-* - zip -j gorse_darwin_amd64.zip darwin/amd64/gorse-* zip -j gorse_darwin_arm64.zip darwin/arm64/gorse-* - name: Upload release diff --git a/.github/workflows/build_test.yml b/.github/workflows/build_test.yml index e51acae1e..89deb6cad 100644 --- a/.github/workflows/build_test.yml +++ b/.github/workflows/build_test.yml @@ -92,17 +92,6 @@ jobs: - name: Check out code into the Go module directory uses: actions/checkout@v2 - - name: Get dependencies - run: | - go get -v -t -d ./... - if [ -f Gopkg.toml ]; then - curl https://raw.githubusercontent.com/golang/dep/master/install.sh | sh - dep ensure - fi - - - name: Build - run: go build -v ./... - - name: Test run: go test -timeout 20m -v ./... -coverprofile=coverage.txt -covermode=atomic env: @@ -120,8 +109,8 @@ jobs: - name: Upload run: bash <(curl -s https://codecov.io/bash) - unit_test_m1: - name: unit tests (M1) + unit_test_macos: + name: unit tests (macOS) runs-on: macos-latest steps: - name: Set up dataset @@ -150,16 +139,38 @@ jobs: - name: Check out code into the Go module directory uses: actions/checkout@v2 - - name: Get dependencies + - name: Test + run: go test -timeout 20m -v ./... -skip "TestPostgres|TestMySQL|TestMongo|TestRedis|TestClickHouse" + + unit_test_windows: + name: unit tests (Windows) + runs-on: windows-latest + steps: + - name: Set up dataset run: | - go get -v -t -d ./... - if [ -f Gopkg.toml ]; then - curl https://raw.githubusercontent.com/golang/dep/master/install.sh | sh - dep ensure - fi - - - name: Build - run: go build -v ./... + New-Item -Type Directory -Path ~/.gorse/dataset + New-Item -Type Directory -Path ~/.gorse/download + Invoke-WebRequest https://cdn.gorse.io/datasets/ml-100k.zip -OutFile ~/.gorse/download/ml-100k.zip + Invoke-WebRequest https://cdn.gorse.io/datasets/ml-1m.zip -OutFile ~/.gorse/download/ml-1m.zip + Invoke-WebRequest https://cdn.gorse.io/datasets/pinterest-20.zip -OutFile ~/.gorse/download/pinterest-20.zip + Invoke-WebRequest https://cdn.gorse.io/datasets/frappe.zip -OutFile ~/.gorse/download/frappe.zip + Invoke-WebRequest https://cdn.gorse.io/datasets/ml-tag.zip -OutFile ~/.gorse/download/ml-tag.zip + Invoke-WebRequest https://cdn.gorse.io/datasets/criteo.zip -OutFile ~/.gorse/download/criteo.zip + Expand-Archive ~/.gorse/download/ml-100k.zip -DestinationPath ~/.gorse/dataset + Expand-Archive ~/.gorse/download/ml-1m.zip -DestinationPath ~/.gorse/dataset + Expand-Archive ~/.gorse/download/pinterest-20.zip -DestinationPath ~/.gorse/dataset + Expand-Archive ~/.gorse/download/frappe.zip -DestinationPath ~/.gorse/dataset + Expand-Archive ~/.gorse/download/ml-tag.zip -DestinationPath ~/.gorse/dataset + Expand-Archive ~/.gorse/download/criteo.zip -DestinationPath ~/.gorse/dataset + + - name: Set up Go 1.23.x + uses: actions/setup-go@v4 + with: + go-version: 1.23.x + id: go + + - name: Check out code into the Go module directory + uses: actions/checkout@v2 - name: Test run: go test -timeout 20m -v ./... -skip "TestPostgres|TestMySQL|TestMongo|TestRedis|TestClickHouse" From cca6da7d4bea4973e3e294066d7b931e5aefde7e Mon Sep 17 00:00:00 2001 From: zhenghaoz Date: Thu, 28 Nov 2024 21:59:08 +0800 Subject: [PATCH 12/14] support mTLS between nodes (#893) --- cmd/gorse-in-one/main.go | 2 +- cmd/gorse-server/main.go | 24 +++++++++++- cmd/gorse-worker/main.go | 24 +++++++++++- config/config.go | 8 ++++ config/config.toml | 12 ++++++ config/config_test.go | 16 ++++++++ go.mod | 2 + go.sum | 6 +++ master/master.go | 20 +++++++++- master/rpc_test.go | 84 +++++++++++++++++++++++++++++++++++++++ protocol/tls.go | 85 ++++++++++++++++++++++++++++++++++++++++ server/server.go | 23 ++++++++++- worker/worker.go | 25 +++++++++++- 13 files changed, 322 insertions(+), 9 deletions(-) create mode 100644 protocol/tls.go diff --git a/cmd/gorse-in-one/main.go b/cmd/gorse-in-one/main.go index ea9cd9297..f9876bd99 100644 --- a/cmd/gorse-in-one/main.go +++ b/cmd/gorse-in-one/main.go @@ -103,7 +103,7 @@ var oneCommand = &cobra.Command{ // Start worker workerJobs, _ := cmd.PersistentFlags().GetInt("recommend-jobs") w := worker.NewWorker(conf.Master.Host, conf.Master.Port, conf.Master.Host, - 0, workerJobs, "", managedMode) + 0, workerJobs, "", managedMode, nil) go func() { w.SetOneMode(m.Settings) w.Serve() diff --git a/cmd/gorse-server/main.go b/cmd/gorse-server/main.go index 4d2efc35d..b87abfda6 100644 --- a/cmd/gorse-server/main.go +++ b/cmd/gorse-server/main.go @@ -21,6 +21,7 @@ import ( "github.com/spf13/cobra" "github.com/zhenghaoz/gorse/base/log" "github.com/zhenghaoz/gorse/cmd/version" + "github.com/zhenghaoz/gorse/protocol" "github.com/zhenghaoz/gorse/server" "go.uber.org/zap" ) @@ -46,7 +47,25 @@ var serverCommand = &cobra.Command{ httpPort, _ := cmd.PersistentFlags().GetInt("http-port") httpHost, _ := cmd.PersistentFlags().GetString("http-host") cachePath, _ := cmd.PersistentFlags().GetString("cache-path") - s := server.NewServer(masterHost, masterPort, httpHost, httpPort, cachePath) + caFile, _ := cmd.PersistentFlags().GetString("ssl-ca") + certFile, _ := cmd.PersistentFlags().GetString("ssl-cert") + keyFile, _ := cmd.PersistentFlags().GetString("ssl-key") + var tlsConfig *protocol.TLSConfig + if caFile != "" && certFile != "" && keyFile != "" { + tlsConfig = &protocol.TLSConfig{ + SSLCA: caFile, + SSLCert: certFile, + SSLKey: keyFile, + } + } else if caFile == "" && certFile == "" && keyFile == "" { + tlsConfig = nil + } else { + log.Logger().Fatal("incomplete SSL configuration", + zap.String("ssl_ca", caFile), + zap.String("ssl_cert", certFile), + zap.String("ssl_key", keyFile)) + } + s := server.NewServer(masterHost, masterPort, httpHost, httpPort, cachePath, tlsConfig) // stop server done := make(chan struct{}) @@ -74,6 +93,9 @@ func init() { serverCommand.PersistentFlags().String("http-host", "127.0.0.1", "port for RESTful APIs and Prometheus metrics export") serverCommand.PersistentFlags().Bool("debug", false, "use debug log mode") serverCommand.PersistentFlags().String("cache-path", "server_cache.data", "path of cache file") + serverCommand.PersistentFlags().String("ssl-ca", "", "path of SSL CA") + serverCommand.PersistentFlags().String("ssl-cert", "", "path of SSL certificate") + serverCommand.PersistentFlags().String("ssl-key", "", "path of SSL key") } func main() { diff --git a/cmd/gorse-worker/main.go b/cmd/gorse-worker/main.go index 9769138e6..4cf9927ee 100644 --- a/cmd/gorse-worker/main.go +++ b/cmd/gorse-worker/main.go @@ -20,6 +20,7 @@ import ( "github.com/spf13/cobra" "github.com/zhenghaoz/gorse/base/log" "github.com/zhenghaoz/gorse/cmd/version" + "github.com/zhenghaoz/gorse/protocol" "github.com/zhenghaoz/gorse/worker" "go.uber.org/zap" ) @@ -45,7 +46,25 @@ var workerCommand = &cobra.Command{ log.SetLogger(cmd.PersistentFlags(), debug) // create worker cachePath, _ := cmd.PersistentFlags().GetString("cache-path") - w := worker.NewWorker(masterHost, masterPort, httpHost, httpPort, workingJobs, cachePath, managedModel) + caFile, _ := cmd.PersistentFlags().GetString("ssl-ca") + certFile, _ := cmd.PersistentFlags().GetString("ssl-cert") + keyFile, _ := cmd.PersistentFlags().GetString("ssl-key") + var tlsConfig *protocol.TLSConfig + if caFile != "" && certFile != "" && keyFile != "" { + tlsConfig = &protocol.TLSConfig{ + SSLCA: caFile, + SSLCert: certFile, + SSLKey: keyFile, + } + } else if caFile == "" && certFile == "" && keyFile == "" { + tlsConfig = nil + } else { + log.Logger().Fatal("incomplete SSL configuration", + zap.String("ssl_ca", caFile), + zap.String("ssl_cert", certFile), + zap.String("ssl_key", keyFile)) + } + w := worker.NewWorker(masterHost, masterPort, httpHost, httpPort, workingJobs, cachePath, managedModel, tlsConfig) w.Serve() }, } @@ -61,6 +80,9 @@ func init() { workerCommand.PersistentFlags().Bool("managed", false, "enable managed mode") workerCommand.PersistentFlags().IntP("jobs", "j", 1, "number of working jobs.") workerCommand.PersistentFlags().String("cache-path", "worker_cache.data", "path of cache file") + workerCommand.PersistentFlags().String("ssl-ca", "", "path of SSL CA") + workerCommand.PersistentFlags().String("ssl-cert", "", "path to SSL certificate") + workerCommand.PersistentFlags().String("ssl-key", "", "path to SSL key") } func main() { diff --git a/config/config.go b/config/config.go index 8ae0b4305..9f6f0ddcf 100644 --- a/config/config.go +++ b/config/config.go @@ -81,6 +81,10 @@ type MySQLConfig struct { type MasterConfig struct { Port int `mapstructure:"port" validate:"gte=0"` // master port Host string `mapstructure:"host"` // master host + SSLMode bool `mapstructure:"ssl_mode"` // enable SSL mode + SSLCA string `mapstructure:"ssl_ca"` // SSL CA file + SSLCert string `mapstructure:"ssl_cert"` // SSL certificate file + SSLKey string `mapstructure:"ssl_key"` // SSL key file HttpPort int `mapstructure:"http_port" validate:"gte=0"` // HTTP port HttpHost string `mapstructure:"http_host"` // HTTP host HttpCorsDomains []string `mapstructure:"http_cors_domains"` // add allowed cors domains @@ -569,6 +573,10 @@ func LoadConfig(path string, oneModel bool) (*Config, error) { {"database.data_table_prefix", "GORSE_DATA_TABLE_PREFIX"}, {"master.port", "GORSE_MASTER_PORT"}, {"master.host", "GORSE_MASTER_HOST"}, + {"master.ssl_mode", "GORSE_MASTER_SSL_MODE"}, + {"master.ssl_ca", "GORSE_MASTER_SSL_CA"}, + {"master.ssl_cert", "GORSE_MASTER_SSL_CERT"}, + {"master.ssl_key", "GORSE_MASTER_SSL_KEY"}, {"master.http_port", "GORSE_MASTER_HTTP_PORT"}, {"master.http_host", "GORSE_MASTER_HTTP_HOST"}, {"master.n_jobs", "GORSE_MASTER_JOBS"}, diff --git a/config/config.toml b/config/config.toml index d5a6e78a7..54c43cc89 100644 --- a/config/config.toml +++ b/config/config.toml @@ -42,6 +42,18 @@ port = 8086 # gRPC host of the master node. The default values is "0.0.0.0". host = "0.0.0.0" +# Enable SSL for the gRPC communication. The default value is false. +ssl_mode = false + +# SSL certification authority for the gRPC communication. +ssl_ca = "" + +# SSL certification for the gRPC communication. +ssl_cert = "" + +# SSL certification key for the gRPC communication. +ssl_key = "" + # HTTP port of the master node. The default values is 8088. http_port = 8088 diff --git a/config/config_test.go b/config/config_test.go index 372e8ee88..8f3c80eb2 100644 --- a/config/config_test.go +++ b/config/config_test.go @@ -32,6 +32,10 @@ func TestUnmarshal(t *testing.T) { data, err := os.ReadFile("config.toml") assert.NoError(t, err) text := string(data) + text = strings.Replace(text, "ssl_mode = false", "ssl_mode = true", -1) + text = strings.Replace(text, "ssl_ca = \"\"", "ssl_ca = \"ca.pem\"", -1) + text = strings.Replace(text, "ssl_cert = \"\"", "ssl_cert = \"cert.pem\"", -1) + text = strings.Replace(text, "ssl_key = \"\"", "ssl_key = \"key.pem\"", -1) text = strings.Replace(text, "dashboard_user_name = \"\"", "dashboard_user_name = \"admin\"", -1) text = strings.Replace(text, "dashboard_password = \"\"", "dashboard_password = \"password\"", -1) text = strings.Replace(text, "admin_api_key = \"\"", "admin_api_key = \"super_api_key\"", -1) @@ -69,6 +73,10 @@ func TestUnmarshal(t *testing.T) { // [master] assert.Equal(t, 8086, config.Master.Port) assert.Equal(t, "0.0.0.0", config.Master.Host) + assert.Equal(t, true, config.Master.SSLMode) + assert.Equal(t, "ca.pem", config.Master.SSLCA) + assert.Equal(t, "cert.pem", config.Master.SSLCert) + assert.Equal(t, "key.pem", config.Master.SSLKey) assert.Equal(t, 8088, config.Master.HttpPort) assert.Equal(t, "0.0.0.0", config.Master.HttpHost) assert.Equal(t, []string{".*"}, config.Master.HttpCorsDomains) @@ -181,6 +189,10 @@ func TestBindEnv(t *testing.T) { {"GORSE_CACHE_TABLE_PREFIX", "gorse_cache_"}, {"GORSE_MASTER_PORT", "123"}, {"GORSE_MASTER_HOST", ""}, + {"GORSE_MASTER_SSL_MODE", "true"}, + {"GORSE_MASTER_SSL_CA", "ca.pem"}, + {"GORSE_MASTER_SSL_CERT", "cert.pem"}, + {"GORSE_MASTER_SSL_KEY", "key.pem"}, {"GORSE_MASTER_HTTP_PORT", "456"}, {"GORSE_MASTER_HTTP_HOST", ""}, {"GORSE_MASTER_JOBS", "789"}, @@ -209,6 +221,10 @@ func TestBindEnv(t *testing.T) { assert.Equal(t, "gorse_data_", config.Database.DataTablePrefix) assert.Equal(t, 123, config.Master.Port) assert.Equal(t, "", config.Master.Host) + assert.Equal(t, true, config.Master.SSLMode) + assert.Equal(t, "ca.pem", config.Master.SSLCA) + assert.Equal(t, "cert.pem", config.Master.SSLCert) + assert.Equal(t, "key.pem", config.Master.SSLKey) assert.Equal(t, 456, config.Master.HttpPort) assert.Equal(t, "", config.Master.HttpHost) assert.Equal(t, 789, config.Master.NumJobs) diff --git a/go.mod b/go.mod index b08691f50..0a46704b0 100644 --- a/go.mod +++ b/go.mod @@ -30,6 +30,7 @@ require ( github.com/klauspost/cpuid/v2 v2.2.3 github.com/lafikl/consistent v0.0.0-20220512074542-bdd3606bfc3e github.com/lib/pq v1.10.6 + github.com/madflojo/testcerts v1.3.0 github.com/mailru/go-clickhouse/v2 v2.0.1-0.20221121001540-b259988ad8e5 github.com/mitchellh/mapstructure v1.5.0 github.com/orcaman/concurrent-map v1.0.0 @@ -63,6 +64,7 @@ require ( golang.org/x/exp v0.0.0-20220722155223-a9213eeb770e golang.org/x/oauth2 v0.22.0 google.golang.org/grpc v1.67.1 + google.golang.org/grpc/security/advancedtls v1.0.0 google.golang.org/protobuf v1.35.1 gopkg.in/natefinch/lumberjack.v2 v2.2.1 gopkg.in/yaml.v2 v2.4.0 diff --git a/go.sum b/go.sum index 29b72e9e8..d96f843f8 100644 --- a/go.sum +++ b/go.sum @@ -445,6 +445,8 @@ github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lib/pq v1.10.2/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/lib/pq v1.10.6 h1:jbk+ZieJ0D7EVGJYpL9QTz7/YW6UHbmdnZWYyK5cdBs= github.com/lib/pq v1.10.6/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/madflojo/testcerts v1.3.0 h1:H6r7WlzfeLqzcuOglfAlnj5Rkt5iQoH1ctTi7FsLOdE= +github.com/madflojo/testcerts v1.3.0/go.mod h1:MW8sh39gLnkKh4K0Nc55AyHEDl9l/FBLDUsQhpmkuo0= github.com/magiconair/properties v1.8.6 h1:5ibWZ6iY0NctNGWo87LalDlEZ6R41TqbbDamhfG/Qzo= github.com/magiconair/properties v1.8.6/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= @@ -1128,6 +1130,10 @@ google.golang.org/grpc v1.39.0/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnD google.golang.org/grpc v1.67.1 h1:zWnc1Vrcno+lHZCOofnIMvycFcc0QRGIzm9dhnDX68E= google.golang.org/grpc v1.67.1/go.mod h1:1gLDyUQU7CTLJI90u3nXZ9ekeghjeM7pTDZlqFNg2AA= google.golang.org/grpc/cmd/protoc-gen-go-grpc v0.0.0-20200910201057-6591123024b3/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= +google.golang.org/grpc/examples v0.0.0-20201112215255-90f1b3ee835b h1:NuxyvVZoDfHZwYW9LD4GJiF5/nhiSyP4/InTrvw9Ibk= +google.golang.org/grpc/examples v0.0.0-20201112215255-90f1b3ee835b/go.mod h1:IBqQ7wSUJ2Ep09a8rMWFsg4fmI2r38zwsq8a0GgxXpM= +google.golang.org/grpc/security/advancedtls v1.0.0 h1:/KQ7VP/1bs53/aopk9QhuPyFAp9Dm9Ejix3lzYkCrDA= +google.golang.org/grpc/security/advancedtls v1.0.0/go.mod h1:o+s4go+e1PJ2AjuQMY5hU82W7lDlefjJA6FqEHRVHWk= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= diff --git a/master/master.go b/master/master.go index f7f385a8f..4ea0e2326 100644 --- a/master/master.go +++ b/master/master.go @@ -258,12 +258,28 @@ func (m *Master) Serve() { go func() { log.Logger().Info("start rpc server", zap.String("host", m.Config.Master.Host), - zap.Int("port", m.Config.Master.Port)) + zap.Int("port", m.Config.Master.Port), + zap.Bool("ssl_mode", m.Config.Master.SSLMode), + zap.String("ssl_ca", m.Config.Master.SSLCA), + zap.String("ssl_cert", m.Config.Master.SSLCert), + zap.String("ssl_key", m.Config.Master.SSLKey)) + opts := []grpc.ServerOption{grpc.MaxSendMsgSize(math.MaxInt)} + if m.Config.Master.SSLMode { + c, err := protocol.NewServerCreds(&protocol.TLSConfig{ + SSLCA: m.Config.Master.SSLCA, + SSLCert: m.Config.Master.SSLCert, + SSLKey: m.Config.Master.SSLKey, + }) + if err != nil { + log.Logger().Fatal("failed to load server TLS", zap.Error(err)) + } + opts = append(opts, grpc.Creds(c)) + } lis, err := net.Listen("tcp", fmt.Sprintf("%s:%d", m.Config.Master.Host, m.Config.Master.Port)) if err != nil { log.Logger().Fatal("failed to listen", zap.Error(err)) } - m.grpcServer = grpc.NewServer(grpc.MaxSendMsgSize(math.MaxInt)) + m.grpcServer = grpc.NewServer(opts...) protocol.RegisterMasterServer(m.grpcServer, m) if err = m.grpcServer.Serve(lis); err != nil { log.Logger().Fatal("failed to start rpc server", zap.Error(err)) diff --git a/master/rpc_test.go b/master/rpc_test.go index 22954ce4f..278d68778 100644 --- a/master/rpc_test.go +++ b/master/rpc_test.go @@ -18,10 +18,13 @@ import ( "context" "encoding/json" "net" + "os" + "path/filepath" "testing" "time" "github.com/jellydator/ttlcache/v3" + "github.com/madflojo/testcerts" "github.com/stretchr/testify/assert" "github.com/zhenghaoz/gorse/base/progress" "github.com/zhenghaoz/gorse/config" @@ -86,6 +89,26 @@ func (m *mockMasterRPC) Start(t *testing.T) { assert.NoError(t, err) } +func (m *mockMasterRPC) StartTLS(t *testing.T, o *protocol.TLSConfig) { + m.ttlCache = ttlcache.New(ttlcache.WithTTL[string, *Node](time.Second)) + m.ttlCache.OnEviction(m.nodeDown) + go m.ttlCache.Start() + + listen, err := net.Listen("tcp", ":0") + assert.NoError(t, err) + m.addr <- listen.Addr().String() + creds, err := protocol.NewServerCreds(&protocol.TLSConfig{ + SSLCA: o.SSLCA, + SSLCert: o.SSLCert, + SSLKey: o.SSLKey, + }) + assert.NoError(t, err) + m.grpcServer = grpc.NewServer(grpc.Creds(creds)) + protocol.RegisterMasterServer(m.grpcServer, m) + err = m.grpcServer.Serve(listen) + assert.NoError(t, err) +} + func (m *mockMasterRPC) Stop() { m.grpcServer.Stop() } @@ -155,3 +178,64 @@ func TestRPC(t *testing.T) { rpcServer.Stop() } + +func generateToTempFile(t *testing.T) (string, string, string) { + // Generate Certificate Authority + ca := testcerts.NewCA() + // Create a signed Certificate and Key + certs, err := ca.NewKeyPair() + assert.NoError(t, err) + // Write certificates to a file + caFile := filepath.Join(t.TempDir(), "ca.pem") + certFile := filepath.Join(t.TempDir(), "cert.pem") + keyFile := filepath.Join(t.TempDir(), "key.pem") + pem := ca.PublicKey() + err = os.WriteFile(caFile, pem, 0640) + assert.NoError(t, err) + err = certs.ToFile(certFile, keyFile) + assert.NoError(t, err) + return caFile, certFile, keyFile +} + +func TestSSL(t *testing.T) { + caFile, certFile, keyFile := generateToTempFile(t) + o := &protocol.TLSConfig{ + SSLCA: caFile, + SSLCert: certFile, + SSLKey: keyFile, + } + rpcServer := newMockMasterRPC(t) + go rpcServer.StartTLS(t, o) + address := <-rpcServer.addr + + // success + c, err := protocol.NewClientCreds(o) + assert.NoError(t, err) + conn, err := grpc.Dial(address, grpc.WithTransportCredentials(c)) + assert.NoError(t, err) + client := protocol.NewMasterClient(conn) + _, err = client.GetMeta(context.Background(), &protocol.NodeInfo{NodeType: protocol.NodeType_ServerNode, NodeName: "server1", HttpPort: 1234}) + assert.NoError(t, err) + + // insecure + conn, err = grpc.Dial(address, grpc.WithInsecure()) + assert.NoError(t, err) + client = protocol.NewMasterClient(conn) + _, err = client.GetMeta(context.Background(), &protocol.NodeInfo{NodeType: protocol.NodeType_ServerNode, NodeName: "server1", HttpPort: 1234}) + assert.Error(t, err) + + // certificate mismatch + caFile2, certFile2, keyFile2 := generateToTempFile(t) + o2 := &protocol.TLSConfig{ + SSLCA: caFile2, + SSLCert: certFile2, + SSLKey: keyFile2, + } + c, err = protocol.NewClientCreds(o2) + assert.NoError(t, err) + conn, err = grpc.Dial(address, grpc.WithTransportCredentials(c)) + assert.NoError(t, err) + client = protocol.NewMasterClient(conn) + _, err = client.GetMeta(context.Background(), &protocol.NodeInfo{NodeType: protocol.NodeType_ServerNode, NodeName: "server1", HttpPort: 1234}) + assert.Error(t, err) +} diff --git a/protocol/tls.go b/protocol/tls.go new file mode 100644 index 000000000..e71170326 --- /dev/null +++ b/protocol/tls.go @@ -0,0 +1,85 @@ +// Copyright 2024 gorse Project Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package protocol + +import ( + "crypto/tls" + "crypto/x509" + "github.com/juju/errors" + "google.golang.org/grpc/credentials" + "google.golang.org/grpc/security/advancedtls" + "os" +) + +type TLSConfig struct { + SSLCA string + SSLCert string + SSLKey string +} + +func NewServerCreds(o *TLSConfig) (credentials.TransportCredentials, error) { + // Load certification authority + ca := x509.NewCertPool() + pem, err := os.ReadFile(o.SSLCA) + if err != nil { + return nil, errors.Trace(err) + } + if !ca.AppendCertsFromPEM(pem) { + return nil, errors.New("failed to append certificate") + } + // Load certification + certificate, err := tls.LoadX509KeyPair(o.SSLCert, o.SSLKey) + if err != nil { + return nil, errors.Trace(err) + } + // Create server credentials + return advancedtls.NewServerCreds(&advancedtls.Options{ + IdentityOptions: advancedtls.IdentityCertificateOptions{ + Certificates: []tls.Certificate{certificate}, + }, + RootOptions: advancedtls.RootCertificateOptions{ + RootCertificates: ca, + }, + RequireClientCert: true, + VerificationType: advancedtls.CertVerification, + }) +} + +func NewClientCreds(o *TLSConfig) (credentials.TransportCredentials, error) { + // Load certification authority + ca := x509.NewCertPool() + pem, err := os.ReadFile(o.SSLCA) + if err != nil { + return nil, errors.Trace(err) + } + if !ca.AppendCertsFromPEM(pem) { + return nil, errors.New("failed to append certificate") + } + // Load certification + certificate, err := tls.LoadX509KeyPair(o.SSLCert, o.SSLKey) + if err != nil { + return nil, errors.Trace(err) + } + // Create client credentials + return advancedtls.NewClientCreds(&advancedtls.Options{ + IdentityOptions: advancedtls.IdentityCertificateOptions{ + Certificates: []tls.Certificate{certificate}, + }, + RootOptions: advancedtls.RootCertificateOptions{ + RootCertificates: ca, + }, + VerificationType: advancedtls.CertVerification, + }) +} diff --git a/server/server.go b/server/server.go index 53ff83050..8f44baba3 100644 --- a/server/server.go +++ b/server/server.go @@ -49,15 +49,24 @@ type Server struct { serverName string masterHost string masterPort int + tlsConfig *protocol.TLSConfig testMode bool cacheFile string } // NewServer creates a server node. -func NewServer(masterHost string, masterPort int, serverHost string, serverPort int, cacheFile string) *Server { +func NewServer( + masterHost string, + masterPort int, + serverHost string, + serverPort int, + cacheFile string, + tlsConfig *protocol.TLSConfig, +) *Server { s := &Server{ masterHost: masterHost, masterPort: masterPort, + tlsConfig: tlsConfig, cacheFile: cacheFile, RestServer: RestServer{ Settings: config.NewSettings(), @@ -98,7 +107,17 @@ func (s *Server) Serve() { zap.Int("master_port", s.masterPort)) // connect to master - conn, err := grpc.Dial(fmt.Sprintf("%v:%v", s.masterHost, s.masterPort), grpc.WithTransportCredentials(insecure.NewCredentials())) + var opts []grpc.DialOption + if s.tlsConfig != nil { + c, err := protocol.NewClientCreds(s.tlsConfig) + if err != nil { + log.Logger().Fatal("failed to create credentials", zap.Error(err)) + } + opts = append(opts, grpc.WithTransportCredentials(c)) + } else { + opts = append(opts, grpc.WithTransportCredentials(insecure.NewCredentials())) + } + conn, err := grpc.Dial(fmt.Sprintf("%v:%v", s.masterHost, s.masterPort), opts...) if err != nil { log.Logger().Fatal("failed to connect master", zap.Error(err)) } diff --git a/worker/worker.go b/worker/worker.go index f6c92f501..899336896 100644 --- a/worker/worker.go +++ b/worker/worker.go @@ -82,6 +82,7 @@ type Worker struct { httpPort int masterHost string masterPort int + tlsConfig *protocol.TLSConfig cacheFile string // database connection path @@ -114,7 +115,16 @@ type Worker struct { } // NewWorker creates a new worker node. -func NewWorker(masterHost string, masterPort int, httpHost string, httpPort, jobs int, cacheFile string, managedMode bool) *Worker { +func NewWorker( + masterHost string, + masterPort int, + httpHost string, + httpPort int, + jobs int, + cacheFile string, + managedMode bool, + tlsConfig *protocol.TLSConfig, +) *Worker { return &Worker{ rankers: make([]click.FactorizationMachine, jobs), managedMode: managedMode, @@ -124,6 +134,7 @@ func NewWorker(masterHost string, masterPort int, httpHost string, httpPort, job cacheFile: cacheFile, masterHost: masterHost, masterPort: masterPort, + tlsConfig: tlsConfig, httpHost: httpHost, httpPort: httpPort, jobs: jobs, @@ -389,7 +400,17 @@ func (w *Worker) Serve() { w.tracer = progress.NewTracer(w.workerName) // connect to master - conn, err := grpc.Dial(fmt.Sprintf("%v:%v", w.masterHost, w.masterPort), grpc.WithTransportCredentials(insecure.NewCredentials())) + var opts []grpc.DialOption + if w.tlsConfig != nil { + c, err := protocol.NewClientCreds(w.tlsConfig) + if err != nil { + log.Logger().Fatal("failed to create credentials", zap.Error(err)) + } + opts = append(opts, grpc.WithTransportCredentials(c)) + } else { + opts = append(opts, grpc.WithTransportCredentials(insecure.NewCredentials())) + } + conn, err := grpc.Dial(fmt.Sprintf("%v:%v", w.masterHost, w.masterPort), opts...) if err != nil { log.Logger().Fatal("failed to connect master", zap.Error(err)) } From 057ee55c7a2ed33795451a3859a38f34219f0837 Mon Sep 17 00:00:00 2001 From: zhenghaoz Date: Wed, 4 Dec 2024 20:44:54 +0800 Subject: [PATCH 13/14] implement database proxy (#895) --- protocol/cache_store.pb.go | 2193 ++++++++++++++++++++++++++ protocol/cache_store.proto | 196 +++ protocol/cache_store_grpc.pb.go | 743 +++++++++ protocol/data_store.pb.go | 2546 +++++++++++++++++++++++++++++++ protocol/data_store.proto | 226 +++ protocol/data_store_grpc.pb.go | 905 +++++++++++ protocol/protocol.pb.go | 296 ++-- protocol/protocol.proto | 6 + protocol/task.go | 2 + storage/cache/proxy.go | 442 ++++++ storage/cache/proxy_test.go | 84 + storage/data/proxy.go | 982 ++++++++++++ storage/data/proxy_test.go | 76 + 13 files changed, 8592 insertions(+), 105 deletions(-) create mode 100644 protocol/cache_store.pb.go create mode 100644 protocol/cache_store.proto create mode 100644 protocol/cache_store_grpc.pb.go create mode 100644 protocol/data_store.pb.go create mode 100644 protocol/data_store.proto create mode 100644 protocol/data_store_grpc.pb.go create mode 100644 storage/cache/proxy.go create mode 100644 storage/cache/proxy_test.go create mode 100644 storage/data/proxy.go create mode 100644 storage/data/proxy_test.go diff --git a/protocol/cache_store.pb.go b/protocol/cache_store.pb.go new file mode 100644 index 000000000..8501c881d --- /dev/null +++ b/protocol/cache_store.pb.go @@ -0,0 +1,2193 @@ +// Copyright 2024 gorse Project Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.35.1 +// protoc v5.28.3 +// source: cache_store.proto + +package protocol + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + timestamppb "google.golang.org/protobuf/types/known/timestamppb" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type Value struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` +} + +func (x *Value) Reset() { + *x = Value{} + mi := &file_cache_store_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *Value) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Value) ProtoMessage() {} + +func (x *Value) ProtoReflect() protoreflect.Message { + mi := &file_cache_store_proto_msgTypes[0] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Value.ProtoReflect.Descriptor instead. +func (*Value) Descriptor() ([]byte, []int) { + return file_cache_store_proto_rawDescGZIP(), []int{0} +} + +func (x *Value) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *Value) GetValue() string { + if x != nil { + return x.Value + } + return "" +} + +type Score struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + Score float64 `protobuf:"fixed64,2,opt,name=score,proto3" json:"score,omitempty"` + IsHidden bool `protobuf:"varint,3,opt,name=is_hidden,json=isHidden,proto3" json:"is_hidden,omitempty"` + Categories []string `protobuf:"bytes,4,rep,name=categories,proto3" json:"categories,omitempty"` + Timestamp *timestamppb.Timestamp `protobuf:"bytes,5,opt,name=timestamp,proto3" json:"timestamp,omitempty"` +} + +func (x *Score) Reset() { + *x = Score{} + mi := &file_cache_store_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *Score) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Score) ProtoMessage() {} + +func (x *Score) ProtoReflect() protoreflect.Message { + mi := &file_cache_store_proto_msgTypes[1] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Score.ProtoReflect.Descriptor instead. +func (*Score) Descriptor() ([]byte, []int) { + return file_cache_store_proto_rawDescGZIP(), []int{1} +} + +func (x *Score) GetId() string { + if x != nil { + return x.Id + } + return "" +} + +func (x *Score) GetScore() float64 { + if x != nil { + return x.Score + } + return 0 +} + +func (x *Score) GetIsHidden() bool { + if x != nil { + return x.IsHidden + } + return false +} + +func (x *Score) GetCategories() []string { + if x != nil { + return x.Categories + } + return nil +} + +func (x *Score) GetTimestamp() *timestamppb.Timestamp { + if x != nil { + return x.Timestamp + } + return nil +} + +type ScoreCondition struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Subset *string `protobuf:"bytes,1,opt,name=subset,proto3,oneof" json:"subset,omitempty"` + Id *string `protobuf:"bytes,2,opt,name=id,proto3,oneof" json:"id,omitempty"` + Before *timestamppb.Timestamp `protobuf:"bytes,3,opt,name=before,proto3,oneof" json:"before,omitempty"` +} + +func (x *ScoreCondition) Reset() { + *x = ScoreCondition{} + mi := &file_cache_store_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ScoreCondition) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ScoreCondition) ProtoMessage() {} + +func (x *ScoreCondition) ProtoReflect() protoreflect.Message { + mi := &file_cache_store_proto_msgTypes[2] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ScoreCondition.ProtoReflect.Descriptor instead. +func (*ScoreCondition) Descriptor() ([]byte, []int) { + return file_cache_store_proto_rawDescGZIP(), []int{2} +} + +func (x *ScoreCondition) GetSubset() string { + if x != nil && x.Subset != nil { + return *x.Subset + } + return "" +} + +func (x *ScoreCondition) GetId() string { + if x != nil && x.Id != nil { + return *x.Id + } + return "" +} + +func (x *ScoreCondition) GetBefore() *timestamppb.Timestamp { + if x != nil { + return x.Before + } + return nil +} + +type ScorePatch struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + IsHidden *bool `protobuf:"varint,1,opt,name=is_hidden,json=isHidden,proto3,oneof" json:"is_hidden,omitempty"` + Categories []string `protobuf:"bytes,2,rep,name=categories,proto3" json:"categories,omitempty"` + Score *float64 `protobuf:"fixed64,3,opt,name=score,proto3,oneof" json:"score,omitempty"` +} + +func (x *ScorePatch) Reset() { + *x = ScorePatch{} + mi := &file_cache_store_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ScorePatch) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ScorePatch) ProtoMessage() {} + +func (x *ScorePatch) ProtoReflect() protoreflect.Message { + mi := &file_cache_store_proto_msgTypes[3] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ScorePatch.ProtoReflect.Descriptor instead. +func (*ScorePatch) Descriptor() ([]byte, []int) { + return file_cache_store_proto_rawDescGZIP(), []int{3} +} + +func (x *ScorePatch) GetIsHidden() bool { + if x != nil && x.IsHidden != nil { + return *x.IsHidden + } + return false +} + +func (x *ScorePatch) GetCategories() []string { + if x != nil { + return x.Categories + } + return nil +} + +func (x *ScorePatch) GetScore() float64 { + if x != nil && x.Score != nil { + return *x.Score + } + return 0 +} + +type TimeSeriesPoint struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Timestamp *timestamppb.Timestamp `protobuf:"bytes,2,opt,name=timestamp,proto3" json:"timestamp,omitempty"` + Value float64 `protobuf:"fixed64,3,opt,name=value,proto3" json:"value,omitempty"` +} + +func (x *TimeSeriesPoint) Reset() { + *x = TimeSeriesPoint{} + mi := &file_cache_store_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *TimeSeriesPoint) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*TimeSeriesPoint) ProtoMessage() {} + +func (x *TimeSeriesPoint) ProtoReflect() protoreflect.Message { + mi := &file_cache_store_proto_msgTypes[4] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use TimeSeriesPoint.ProtoReflect.Descriptor instead. +func (*TimeSeriesPoint) Descriptor() ([]byte, []int) { + return file_cache_store_proto_rawDescGZIP(), []int{4} +} + +func (x *TimeSeriesPoint) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *TimeSeriesPoint) GetTimestamp() *timestamppb.Timestamp { + if x != nil { + return x.Timestamp + } + return nil +} + +func (x *TimeSeriesPoint) GetValue() float64 { + if x != nil { + return x.Value + } + return 0 +} + +type GetRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` +} + +func (x *GetRequest) Reset() { + *x = GetRequest{} + mi := &file_cache_store_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GetRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetRequest) ProtoMessage() {} + +func (x *GetRequest) ProtoReflect() protoreflect.Message { + mi := &file_cache_store_proto_msgTypes[5] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetRequest.ProtoReflect.Descriptor instead. +func (*GetRequest) Descriptor() ([]byte, []int) { + return file_cache_store_proto_rawDescGZIP(), []int{5} +} + +func (x *GetRequest) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +type GetResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Value *string `protobuf:"bytes,1,opt,name=value,proto3,oneof" json:"value,omitempty"` +} + +func (x *GetResponse) Reset() { + *x = GetResponse{} + mi := &file_cache_store_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GetResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetResponse) ProtoMessage() {} + +func (x *GetResponse) ProtoReflect() protoreflect.Message { + mi := &file_cache_store_proto_msgTypes[6] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetResponse.ProtoReflect.Descriptor instead. +func (*GetResponse) Descriptor() ([]byte, []int) { + return file_cache_store_proto_rawDescGZIP(), []int{6} +} + +func (x *GetResponse) GetValue() string { + if x != nil && x.Value != nil { + return *x.Value + } + return "" +} + +type SetRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Values []*Value `protobuf:"bytes,1,rep,name=values,proto3" json:"values,omitempty"` +} + +func (x *SetRequest) Reset() { + *x = SetRequest{} + mi := &file_cache_store_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *SetRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SetRequest) ProtoMessage() {} + +func (x *SetRequest) ProtoReflect() protoreflect.Message { + mi := &file_cache_store_proto_msgTypes[7] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SetRequest.ProtoReflect.Descriptor instead. +func (*SetRequest) Descriptor() ([]byte, []int) { + return file_cache_store_proto_rawDescGZIP(), []int{7} +} + +func (x *SetRequest) GetValues() []*Value { + if x != nil { + return x.Values + } + return nil +} + +type SetResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *SetResponse) Reset() { + *x = SetResponse{} + mi := &file_cache_store_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *SetResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SetResponse) ProtoMessage() {} + +func (x *SetResponse) ProtoReflect() protoreflect.Message { + mi := &file_cache_store_proto_msgTypes[8] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SetResponse.ProtoReflect.Descriptor instead. +func (*SetResponse) Descriptor() ([]byte, []int) { + return file_cache_store_proto_rawDescGZIP(), []int{8} +} + +type DeleteRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` +} + +func (x *DeleteRequest) Reset() { + *x = DeleteRequest{} + mi := &file_cache_store_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *DeleteRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DeleteRequest) ProtoMessage() {} + +func (x *DeleteRequest) ProtoReflect() protoreflect.Message { + mi := &file_cache_store_proto_msgTypes[9] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DeleteRequest.ProtoReflect.Descriptor instead. +func (*DeleteRequest) Descriptor() ([]byte, []int) { + return file_cache_store_proto_rawDescGZIP(), []int{9} +} + +func (x *DeleteRequest) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +type DeleteResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *DeleteResponse) Reset() { + *x = DeleteResponse{} + mi := &file_cache_store_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *DeleteResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DeleteResponse) ProtoMessage() {} + +func (x *DeleteResponse) ProtoReflect() protoreflect.Message { + mi := &file_cache_store_proto_msgTypes[10] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DeleteResponse.ProtoReflect.Descriptor instead. +func (*DeleteResponse) Descriptor() ([]byte, []int) { + return file_cache_store_proto_rawDescGZIP(), []int{10} +} + +type GetSetRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Key string `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` +} + +func (x *GetSetRequest) Reset() { + *x = GetSetRequest{} + mi := &file_cache_store_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GetSetRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetSetRequest) ProtoMessage() {} + +func (x *GetSetRequest) ProtoReflect() protoreflect.Message { + mi := &file_cache_store_proto_msgTypes[11] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetSetRequest.ProtoReflect.Descriptor instead. +func (*GetSetRequest) Descriptor() ([]byte, []int) { + return file_cache_store_proto_rawDescGZIP(), []int{11} +} + +func (x *GetSetRequest) GetKey() string { + if x != nil { + return x.Key + } + return "" +} + +type GetSetResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Members []string `protobuf:"bytes,1,rep,name=members,proto3" json:"members,omitempty"` +} + +func (x *GetSetResponse) Reset() { + *x = GetSetResponse{} + mi := &file_cache_store_proto_msgTypes[12] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GetSetResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetSetResponse) ProtoMessage() {} + +func (x *GetSetResponse) ProtoReflect() protoreflect.Message { + mi := &file_cache_store_proto_msgTypes[12] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetSetResponse.ProtoReflect.Descriptor instead. +func (*GetSetResponse) Descriptor() ([]byte, []int) { + return file_cache_store_proto_rawDescGZIP(), []int{12} +} + +func (x *GetSetResponse) GetMembers() []string { + if x != nil { + return x.Members + } + return nil +} + +type SetSetRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Key string `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` + Members []string `protobuf:"bytes,2,rep,name=members,proto3" json:"members,omitempty"` +} + +func (x *SetSetRequest) Reset() { + *x = SetSetRequest{} + mi := &file_cache_store_proto_msgTypes[13] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *SetSetRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SetSetRequest) ProtoMessage() {} + +func (x *SetSetRequest) ProtoReflect() protoreflect.Message { + mi := &file_cache_store_proto_msgTypes[13] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SetSetRequest.ProtoReflect.Descriptor instead. +func (*SetSetRequest) Descriptor() ([]byte, []int) { + return file_cache_store_proto_rawDescGZIP(), []int{13} +} + +func (x *SetSetRequest) GetKey() string { + if x != nil { + return x.Key + } + return "" +} + +func (x *SetSetRequest) GetMembers() []string { + if x != nil { + return x.Members + } + return nil +} + +type SetSetResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *SetSetResponse) Reset() { + *x = SetSetResponse{} + mi := &file_cache_store_proto_msgTypes[14] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *SetSetResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SetSetResponse) ProtoMessage() {} + +func (x *SetSetResponse) ProtoReflect() protoreflect.Message { + mi := &file_cache_store_proto_msgTypes[14] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SetSetResponse.ProtoReflect.Descriptor instead. +func (*SetSetResponse) Descriptor() ([]byte, []int) { + return file_cache_store_proto_rawDescGZIP(), []int{14} +} + +type AddSetRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Key string `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` + Members []string `protobuf:"bytes,2,rep,name=members,proto3" json:"members,omitempty"` +} + +func (x *AddSetRequest) Reset() { + *x = AddSetRequest{} + mi := &file_cache_store_proto_msgTypes[15] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *AddSetRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AddSetRequest) ProtoMessage() {} + +func (x *AddSetRequest) ProtoReflect() protoreflect.Message { + mi := &file_cache_store_proto_msgTypes[15] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AddSetRequest.ProtoReflect.Descriptor instead. +func (*AddSetRequest) Descriptor() ([]byte, []int) { + return file_cache_store_proto_rawDescGZIP(), []int{15} +} + +func (x *AddSetRequest) GetKey() string { + if x != nil { + return x.Key + } + return "" +} + +func (x *AddSetRequest) GetMembers() []string { + if x != nil { + return x.Members + } + return nil +} + +type AddSetResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *AddSetResponse) Reset() { + *x = AddSetResponse{} + mi := &file_cache_store_proto_msgTypes[16] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *AddSetResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AddSetResponse) ProtoMessage() {} + +func (x *AddSetResponse) ProtoReflect() protoreflect.Message { + mi := &file_cache_store_proto_msgTypes[16] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AddSetResponse.ProtoReflect.Descriptor instead. +func (*AddSetResponse) Descriptor() ([]byte, []int) { + return file_cache_store_proto_rawDescGZIP(), []int{16} +} + +type RemSetRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Key string `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` + Members []string `protobuf:"bytes,2,rep,name=members,proto3" json:"members,omitempty"` +} + +func (x *RemSetRequest) Reset() { + *x = RemSetRequest{} + mi := &file_cache_store_proto_msgTypes[17] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *RemSetRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RemSetRequest) ProtoMessage() {} + +func (x *RemSetRequest) ProtoReflect() protoreflect.Message { + mi := &file_cache_store_proto_msgTypes[17] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RemSetRequest.ProtoReflect.Descriptor instead. +func (*RemSetRequest) Descriptor() ([]byte, []int) { + return file_cache_store_proto_rawDescGZIP(), []int{17} +} + +func (x *RemSetRequest) GetKey() string { + if x != nil { + return x.Key + } + return "" +} + +func (x *RemSetRequest) GetMembers() []string { + if x != nil { + return x.Members + } + return nil +} + +type RemSetResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *RemSetResponse) Reset() { + *x = RemSetResponse{} + mi := &file_cache_store_proto_msgTypes[18] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *RemSetResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RemSetResponse) ProtoMessage() {} + +func (x *RemSetResponse) ProtoReflect() protoreflect.Message { + mi := &file_cache_store_proto_msgTypes[18] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RemSetResponse.ProtoReflect.Descriptor instead. +func (*RemSetResponse) Descriptor() ([]byte, []int) { + return file_cache_store_proto_rawDescGZIP(), []int{18} +} + +type PushRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` +} + +func (x *PushRequest) Reset() { + *x = PushRequest{} + mi := &file_cache_store_proto_msgTypes[19] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *PushRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PushRequest) ProtoMessage() {} + +func (x *PushRequest) ProtoReflect() protoreflect.Message { + mi := &file_cache_store_proto_msgTypes[19] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PushRequest.ProtoReflect.Descriptor instead. +func (*PushRequest) Descriptor() ([]byte, []int) { + return file_cache_store_proto_rawDescGZIP(), []int{19} +} + +func (x *PushRequest) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *PushRequest) GetValue() string { + if x != nil { + return x.Value + } + return "" +} + +type PushResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *PushResponse) Reset() { + *x = PushResponse{} + mi := &file_cache_store_proto_msgTypes[20] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *PushResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PushResponse) ProtoMessage() {} + +func (x *PushResponse) ProtoReflect() protoreflect.Message { + mi := &file_cache_store_proto_msgTypes[20] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PushResponse.ProtoReflect.Descriptor instead. +func (*PushResponse) Descriptor() ([]byte, []int) { + return file_cache_store_proto_rawDescGZIP(), []int{20} +} + +type PopRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` +} + +func (x *PopRequest) Reset() { + *x = PopRequest{} + mi := &file_cache_store_proto_msgTypes[21] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *PopRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PopRequest) ProtoMessage() {} + +func (x *PopRequest) ProtoReflect() protoreflect.Message { + mi := &file_cache_store_proto_msgTypes[21] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PopRequest.ProtoReflect.Descriptor instead. +func (*PopRequest) Descriptor() ([]byte, []int) { + return file_cache_store_proto_rawDescGZIP(), []int{21} +} + +func (x *PopRequest) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +type PopResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Value *string `protobuf:"bytes,1,opt,name=value,proto3,oneof" json:"value,omitempty"` +} + +func (x *PopResponse) Reset() { + *x = PopResponse{} + mi := &file_cache_store_proto_msgTypes[22] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *PopResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PopResponse) ProtoMessage() {} + +func (x *PopResponse) ProtoReflect() protoreflect.Message { + mi := &file_cache_store_proto_msgTypes[22] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PopResponse.ProtoReflect.Descriptor instead. +func (*PopResponse) Descriptor() ([]byte, []int) { + return file_cache_store_proto_rawDescGZIP(), []int{22} +} + +func (x *PopResponse) GetValue() string { + if x != nil && x.Value != nil { + return *x.Value + } + return "" +} + +type RemainRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` +} + +func (x *RemainRequest) Reset() { + *x = RemainRequest{} + mi := &file_cache_store_proto_msgTypes[23] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *RemainRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RemainRequest) ProtoMessage() {} + +func (x *RemainRequest) ProtoReflect() protoreflect.Message { + mi := &file_cache_store_proto_msgTypes[23] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RemainRequest.ProtoReflect.Descriptor instead. +func (*RemainRequest) Descriptor() ([]byte, []int) { + return file_cache_store_proto_rawDescGZIP(), []int{23} +} + +func (x *RemainRequest) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +type RemainResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Count int64 `protobuf:"varint,1,opt,name=count,proto3" json:"count,omitempty"` +} + +func (x *RemainResponse) Reset() { + *x = RemainResponse{} + mi := &file_cache_store_proto_msgTypes[24] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *RemainResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RemainResponse) ProtoMessage() {} + +func (x *RemainResponse) ProtoReflect() protoreflect.Message { + mi := &file_cache_store_proto_msgTypes[24] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RemainResponse.ProtoReflect.Descriptor instead. +func (*RemainResponse) Descriptor() ([]byte, []int) { + return file_cache_store_proto_rawDescGZIP(), []int{24} +} + +func (x *RemainResponse) GetCount() int64 { + if x != nil { + return x.Count + } + return 0 +} + +type AddScoresRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Collection string `protobuf:"bytes,1,opt,name=collection,proto3" json:"collection,omitempty"` + Subset string `protobuf:"bytes,2,opt,name=subset,proto3" json:"subset,omitempty"` + Documents []*Score `protobuf:"bytes,3,rep,name=documents,proto3" json:"documents,omitempty"` +} + +func (x *AddScoresRequest) Reset() { + *x = AddScoresRequest{} + mi := &file_cache_store_proto_msgTypes[25] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *AddScoresRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AddScoresRequest) ProtoMessage() {} + +func (x *AddScoresRequest) ProtoReflect() protoreflect.Message { + mi := &file_cache_store_proto_msgTypes[25] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AddScoresRequest.ProtoReflect.Descriptor instead. +func (*AddScoresRequest) Descriptor() ([]byte, []int) { + return file_cache_store_proto_rawDescGZIP(), []int{25} +} + +func (x *AddScoresRequest) GetCollection() string { + if x != nil { + return x.Collection + } + return "" +} + +func (x *AddScoresRequest) GetSubset() string { + if x != nil { + return x.Subset + } + return "" +} + +func (x *AddScoresRequest) GetDocuments() []*Score { + if x != nil { + return x.Documents + } + return nil +} + +type AddScoresResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *AddScoresResponse) Reset() { + *x = AddScoresResponse{} + mi := &file_cache_store_proto_msgTypes[26] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *AddScoresResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AddScoresResponse) ProtoMessage() {} + +func (x *AddScoresResponse) ProtoReflect() protoreflect.Message { + mi := &file_cache_store_proto_msgTypes[26] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AddScoresResponse.ProtoReflect.Descriptor instead. +func (*AddScoresResponse) Descriptor() ([]byte, []int) { + return file_cache_store_proto_rawDescGZIP(), []int{26} +} + +type SearchScoresRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Collection string `protobuf:"bytes,1,opt,name=collection,proto3" json:"collection,omitempty"` + Subset string `protobuf:"bytes,2,opt,name=subset,proto3" json:"subset,omitempty"` + Query []string `protobuf:"bytes,3,rep,name=query,proto3" json:"query,omitempty"` + Begin int32 `protobuf:"varint,4,opt,name=begin,proto3" json:"begin,omitempty"` + End int32 `protobuf:"varint,5,opt,name=end,proto3" json:"end,omitempty"` +} + +func (x *SearchScoresRequest) Reset() { + *x = SearchScoresRequest{} + mi := &file_cache_store_proto_msgTypes[27] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *SearchScoresRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SearchScoresRequest) ProtoMessage() {} + +func (x *SearchScoresRequest) ProtoReflect() protoreflect.Message { + mi := &file_cache_store_proto_msgTypes[27] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SearchScoresRequest.ProtoReflect.Descriptor instead. +func (*SearchScoresRequest) Descriptor() ([]byte, []int) { + return file_cache_store_proto_rawDescGZIP(), []int{27} +} + +func (x *SearchScoresRequest) GetCollection() string { + if x != nil { + return x.Collection + } + return "" +} + +func (x *SearchScoresRequest) GetSubset() string { + if x != nil { + return x.Subset + } + return "" +} + +func (x *SearchScoresRequest) GetQuery() []string { + if x != nil { + return x.Query + } + return nil +} + +func (x *SearchScoresRequest) GetBegin() int32 { + if x != nil { + return x.Begin + } + return 0 +} + +func (x *SearchScoresRequest) GetEnd() int32 { + if x != nil { + return x.End + } + return 0 +} + +type SearchScoresResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Documents []*Score `protobuf:"bytes,1,rep,name=documents,proto3" json:"documents,omitempty"` +} + +func (x *SearchScoresResponse) Reset() { + *x = SearchScoresResponse{} + mi := &file_cache_store_proto_msgTypes[28] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *SearchScoresResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SearchScoresResponse) ProtoMessage() {} + +func (x *SearchScoresResponse) ProtoReflect() protoreflect.Message { + mi := &file_cache_store_proto_msgTypes[28] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SearchScoresResponse.ProtoReflect.Descriptor instead. +func (*SearchScoresResponse) Descriptor() ([]byte, []int) { + return file_cache_store_proto_rawDescGZIP(), []int{28} +} + +func (x *SearchScoresResponse) GetDocuments() []*Score { + if x != nil { + return x.Documents + } + return nil +} + +type DeleteScoresRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Collection []string `protobuf:"bytes,1,rep,name=collection,proto3" json:"collection,omitempty"` + Condition *ScoreCondition `protobuf:"bytes,2,opt,name=condition,proto3" json:"condition,omitempty"` +} + +func (x *DeleteScoresRequest) Reset() { + *x = DeleteScoresRequest{} + mi := &file_cache_store_proto_msgTypes[29] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *DeleteScoresRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DeleteScoresRequest) ProtoMessage() {} + +func (x *DeleteScoresRequest) ProtoReflect() protoreflect.Message { + mi := &file_cache_store_proto_msgTypes[29] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DeleteScoresRequest.ProtoReflect.Descriptor instead. +func (*DeleteScoresRequest) Descriptor() ([]byte, []int) { + return file_cache_store_proto_rawDescGZIP(), []int{29} +} + +func (x *DeleteScoresRequest) GetCollection() []string { + if x != nil { + return x.Collection + } + return nil +} + +func (x *DeleteScoresRequest) GetCondition() *ScoreCondition { + if x != nil { + return x.Condition + } + return nil +} + +type DeleteScoresResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *DeleteScoresResponse) Reset() { + *x = DeleteScoresResponse{} + mi := &file_cache_store_proto_msgTypes[30] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *DeleteScoresResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DeleteScoresResponse) ProtoMessage() {} + +func (x *DeleteScoresResponse) ProtoReflect() protoreflect.Message { + mi := &file_cache_store_proto_msgTypes[30] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DeleteScoresResponse.ProtoReflect.Descriptor instead. +func (*DeleteScoresResponse) Descriptor() ([]byte, []int) { + return file_cache_store_proto_rawDescGZIP(), []int{30} +} + +type UpdateScoresRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Collection []string `protobuf:"bytes,1,rep,name=collection,proto3" json:"collection,omitempty"` + Id string `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` + Patch *ScorePatch `protobuf:"bytes,3,opt,name=patch,proto3" json:"patch,omitempty"` +} + +func (x *UpdateScoresRequest) Reset() { + *x = UpdateScoresRequest{} + mi := &file_cache_store_proto_msgTypes[31] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *UpdateScoresRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*UpdateScoresRequest) ProtoMessage() {} + +func (x *UpdateScoresRequest) ProtoReflect() protoreflect.Message { + mi := &file_cache_store_proto_msgTypes[31] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use UpdateScoresRequest.ProtoReflect.Descriptor instead. +func (*UpdateScoresRequest) Descriptor() ([]byte, []int) { + return file_cache_store_proto_rawDescGZIP(), []int{31} +} + +func (x *UpdateScoresRequest) GetCollection() []string { + if x != nil { + return x.Collection + } + return nil +} + +func (x *UpdateScoresRequest) GetId() string { + if x != nil { + return x.Id + } + return "" +} + +func (x *UpdateScoresRequest) GetPatch() *ScorePatch { + if x != nil { + return x.Patch + } + return nil +} + +type UpdateScoresResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *UpdateScoresResponse) Reset() { + *x = UpdateScoresResponse{} + mi := &file_cache_store_proto_msgTypes[32] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *UpdateScoresResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*UpdateScoresResponse) ProtoMessage() {} + +func (x *UpdateScoresResponse) ProtoReflect() protoreflect.Message { + mi := &file_cache_store_proto_msgTypes[32] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use UpdateScoresResponse.ProtoReflect.Descriptor instead. +func (*UpdateScoresResponse) Descriptor() ([]byte, []int) { + return file_cache_store_proto_rawDescGZIP(), []int{32} +} + +type AddTimeSeriesPointsRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Points []*TimeSeriesPoint `protobuf:"bytes,1,rep,name=points,proto3" json:"points,omitempty"` +} + +func (x *AddTimeSeriesPointsRequest) Reset() { + *x = AddTimeSeriesPointsRequest{} + mi := &file_cache_store_proto_msgTypes[33] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *AddTimeSeriesPointsRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AddTimeSeriesPointsRequest) ProtoMessage() {} + +func (x *AddTimeSeriesPointsRequest) ProtoReflect() protoreflect.Message { + mi := &file_cache_store_proto_msgTypes[33] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AddTimeSeriesPointsRequest.ProtoReflect.Descriptor instead. +func (*AddTimeSeriesPointsRequest) Descriptor() ([]byte, []int) { + return file_cache_store_proto_rawDescGZIP(), []int{33} +} + +func (x *AddTimeSeriesPointsRequest) GetPoints() []*TimeSeriesPoint { + if x != nil { + return x.Points + } + return nil +} + +type AddTimeSeriesPointsResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *AddTimeSeriesPointsResponse) Reset() { + *x = AddTimeSeriesPointsResponse{} + mi := &file_cache_store_proto_msgTypes[34] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *AddTimeSeriesPointsResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AddTimeSeriesPointsResponse) ProtoMessage() {} + +func (x *AddTimeSeriesPointsResponse) ProtoReflect() protoreflect.Message { + mi := &file_cache_store_proto_msgTypes[34] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AddTimeSeriesPointsResponse.ProtoReflect.Descriptor instead. +func (*AddTimeSeriesPointsResponse) Descriptor() ([]byte, []int) { + return file_cache_store_proto_rawDescGZIP(), []int{34} +} + +type GetTimeSeriesPointsRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Begin *timestamppb.Timestamp `protobuf:"bytes,2,opt,name=begin,proto3" json:"begin,omitempty"` + End *timestamppb.Timestamp `protobuf:"bytes,3,opt,name=end,proto3" json:"end,omitempty"` +} + +func (x *GetTimeSeriesPointsRequest) Reset() { + *x = GetTimeSeriesPointsRequest{} + mi := &file_cache_store_proto_msgTypes[35] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GetTimeSeriesPointsRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetTimeSeriesPointsRequest) ProtoMessage() {} + +func (x *GetTimeSeriesPointsRequest) ProtoReflect() protoreflect.Message { + mi := &file_cache_store_proto_msgTypes[35] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetTimeSeriesPointsRequest.ProtoReflect.Descriptor instead. +func (*GetTimeSeriesPointsRequest) Descriptor() ([]byte, []int) { + return file_cache_store_proto_rawDescGZIP(), []int{35} +} + +func (x *GetTimeSeriesPointsRequest) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *GetTimeSeriesPointsRequest) GetBegin() *timestamppb.Timestamp { + if x != nil { + return x.Begin + } + return nil +} + +func (x *GetTimeSeriesPointsRequest) GetEnd() *timestamppb.Timestamp { + if x != nil { + return x.End + } + return nil +} + +type GetTimeSeriesPointsResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Points []*TimeSeriesPoint `protobuf:"bytes,1,rep,name=points,proto3" json:"points,omitempty"` +} + +func (x *GetTimeSeriesPointsResponse) Reset() { + *x = GetTimeSeriesPointsResponse{} + mi := &file_cache_store_proto_msgTypes[36] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GetTimeSeriesPointsResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetTimeSeriesPointsResponse) ProtoMessage() {} + +func (x *GetTimeSeriesPointsResponse) ProtoReflect() protoreflect.Message { + mi := &file_cache_store_proto_msgTypes[36] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetTimeSeriesPointsResponse.ProtoReflect.Descriptor instead. +func (*GetTimeSeriesPointsResponse) Descriptor() ([]byte, []int) { + return file_cache_store_proto_rawDescGZIP(), []int{36} +} + +func (x *GetTimeSeriesPointsResponse) GetPoints() []*TimeSeriesPoint { + if x != nil { + return x.Points + } + return nil +} + +var File_cache_store_proto protoreflect.FileDescriptor + +var file_cache_store_proto_rawDesc = []byte{ + 0x0a, 0x11, 0x63, 0x61, 0x63, 0x68, 0x65, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x12, 0x08, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x1a, 0x1f, 0x67, + 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, + 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x0e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x31, + 0x0a, 0x05, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x22, 0xa4, 0x01, 0x0a, 0x05, 0x53, 0x63, 0x6f, 0x72, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, + 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x14, 0x0a, 0x05, 0x73, + 0x63, 0x6f, 0x72, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x01, 0x52, 0x05, 0x73, 0x63, 0x6f, 0x72, + 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x69, 0x73, 0x5f, 0x68, 0x69, 0x64, 0x64, 0x65, 0x6e, 0x18, 0x03, + 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x69, 0x73, 0x48, 0x69, 0x64, 0x64, 0x65, 0x6e, 0x12, 0x1e, + 0x0a, 0x0a, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x69, 0x65, 0x73, 0x18, 0x04, 0x20, 0x03, + 0x28, 0x09, 0x52, 0x0a, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x69, 0x65, 0x73, 0x12, 0x38, + 0x0a, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x05, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x74, + 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x22, 0x98, 0x01, 0x0a, 0x0e, 0x53, 0x63, 0x6f, + 0x72, 0x65, 0x43, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1b, 0x0a, 0x06, 0x73, + 0x75, 0x62, 0x73, 0x65, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x06, 0x73, + 0x75, 0x62, 0x73, 0x65, 0x74, 0x88, 0x01, 0x01, 0x12, 0x13, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x02, 0x69, 0x64, 0x88, 0x01, 0x01, 0x12, 0x37, 0x0a, + 0x06, 0x62, 0x65, 0x66, 0x6f, 0x72, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, + 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, + 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x48, 0x02, 0x52, 0x06, 0x62, 0x65, 0x66, + 0x6f, 0x72, 0x65, 0x88, 0x01, 0x01, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x73, 0x75, 0x62, 0x73, 0x65, + 0x74, 0x42, 0x05, 0x0a, 0x03, 0x5f, 0x69, 0x64, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x62, 0x65, 0x66, + 0x6f, 0x72, 0x65, 0x22, 0x81, 0x01, 0x0a, 0x0a, 0x53, 0x63, 0x6f, 0x72, 0x65, 0x50, 0x61, 0x74, + 0x63, 0x68, 0x12, 0x20, 0x0a, 0x09, 0x69, 0x73, 0x5f, 0x68, 0x69, 0x64, 0x64, 0x65, 0x6e, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x08, 0x69, 0x73, 0x48, 0x69, 0x64, 0x64, 0x65, + 0x6e, 0x88, 0x01, 0x01, 0x12, 0x1e, 0x0a, 0x0a, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x69, + 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0a, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, + 0x72, 0x69, 0x65, 0x73, 0x12, 0x19, 0x0a, 0x05, 0x73, 0x63, 0x6f, 0x72, 0x65, 0x18, 0x03, 0x20, + 0x01, 0x28, 0x01, 0x48, 0x01, 0x52, 0x05, 0x73, 0x63, 0x6f, 0x72, 0x65, 0x88, 0x01, 0x01, 0x42, + 0x0c, 0x0a, 0x0a, 0x5f, 0x69, 0x73, 0x5f, 0x68, 0x69, 0x64, 0x64, 0x65, 0x6e, 0x42, 0x08, 0x0a, + 0x06, 0x5f, 0x73, 0x63, 0x6f, 0x72, 0x65, 0x22, 0x75, 0x0a, 0x0f, 0x54, 0x69, 0x6d, 0x65, 0x53, + 0x65, 0x72, 0x69, 0x65, 0x73, 0x50, 0x6f, 0x69, 0x6e, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, + 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x38, + 0x0a, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x74, + 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x01, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x20, + 0x0a, 0x0a, 0x47, 0x65, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, + 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, + 0x22, 0x32, 0x0a, 0x0b, 0x47, 0x65, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, + 0x19, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, + 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x88, 0x01, 0x01, 0x42, 0x08, 0x0a, 0x06, 0x5f, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x22, 0x35, 0x0a, 0x0a, 0x53, 0x65, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x12, 0x27, 0x0a, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x56, 0x61, + 0x6c, 0x75, 0x65, 0x52, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x22, 0x0d, 0x0a, 0x0b, 0x53, + 0x65, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x23, 0x0a, 0x0d, 0x44, 0x65, + 0x6c, 0x65, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, + 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, + 0x10, 0x0a, 0x0e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x22, 0x21, 0x0a, 0x0d, 0x47, 0x65, 0x74, 0x53, 0x65, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x03, 0x6b, 0x65, 0x79, 0x22, 0x2a, 0x0a, 0x0e, 0x47, 0x65, 0x74, 0x53, 0x65, 0x74, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x6d, 0x62, 0x65, 0x72, + 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x07, 0x6d, 0x65, 0x6d, 0x62, 0x65, 0x72, 0x73, + 0x22, 0x3b, 0x0a, 0x0d, 0x53, 0x65, 0x74, 0x53, 0x65, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, + 0x6b, 0x65, 0x79, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x6d, 0x62, 0x65, 0x72, 0x73, 0x18, 0x02, + 0x20, 0x03, 0x28, 0x09, 0x52, 0x07, 0x6d, 0x65, 0x6d, 0x62, 0x65, 0x72, 0x73, 0x22, 0x10, 0x0a, + 0x0e, 0x53, 0x65, 0x74, 0x53, 0x65, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, + 0x3b, 0x0a, 0x0d, 0x41, 0x64, 0x64, 0x53, 0x65, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, + 0x65, 0x79, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x6d, 0x62, 0x65, 0x72, 0x73, 0x18, 0x02, 0x20, + 0x03, 0x28, 0x09, 0x52, 0x07, 0x6d, 0x65, 0x6d, 0x62, 0x65, 0x72, 0x73, 0x22, 0x10, 0x0a, 0x0e, + 0x41, 0x64, 0x64, 0x53, 0x65, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x3b, + 0x0a, 0x0d, 0x52, 0x65, 0x6d, 0x53, 0x65, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, + 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, + 0x79, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x6d, 0x62, 0x65, 0x72, 0x73, 0x18, 0x02, 0x20, 0x03, + 0x28, 0x09, 0x52, 0x07, 0x6d, 0x65, 0x6d, 0x62, 0x65, 0x72, 0x73, 0x22, 0x10, 0x0a, 0x0e, 0x52, + 0x65, 0x6d, 0x53, 0x65, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x37, 0x0a, + 0x0b, 0x50, 0x75, 0x73, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, + 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, + 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x0e, 0x0a, 0x0c, 0x50, 0x75, 0x73, 0x68, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x20, 0x0a, 0x0a, 0x50, 0x6f, 0x70, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x32, 0x0a, 0x0b, 0x50, 0x6f, 0x70, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x19, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x88, + 0x01, 0x01, 0x42, 0x08, 0x0a, 0x06, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x23, 0x0a, 0x0d, + 0x52, 0x65, 0x6d, 0x61, 0x69, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x12, 0x0a, + 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, + 0x65, 0x22, 0x26, 0x0a, 0x0e, 0x52, 0x65, 0x6d, 0x61, 0x69, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x03, 0x52, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x22, 0x79, 0x0a, 0x10, 0x41, 0x64, 0x64, + 0x53, 0x63, 0x6f, 0x72, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1e, 0x0a, + 0x0a, 0x63, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x0a, 0x63, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x16, 0x0a, + 0x06, 0x73, 0x75, 0x62, 0x73, 0x65, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, + 0x75, 0x62, 0x73, 0x65, 0x74, 0x12, 0x2d, 0x0a, 0x09, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, + 0x74, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x63, 0x6f, 0x6c, 0x2e, 0x53, 0x63, 0x6f, 0x72, 0x65, 0x52, 0x09, 0x64, 0x6f, 0x63, 0x75, 0x6d, + 0x65, 0x6e, 0x74, 0x73, 0x22, 0x13, 0x0a, 0x11, 0x41, 0x64, 0x64, 0x53, 0x63, 0x6f, 0x72, 0x65, + 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x8b, 0x01, 0x0a, 0x13, 0x53, 0x65, + 0x61, 0x72, 0x63, 0x68, 0x53, 0x63, 0x6f, 0x72, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x12, 0x1e, 0x0a, 0x0a, 0x63, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x63, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x75, 0x62, 0x73, 0x65, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x06, 0x73, 0x75, 0x62, 0x73, 0x65, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x71, 0x75, 0x65, + 0x72, 0x79, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x05, 0x71, 0x75, 0x65, 0x72, 0x79, 0x12, + 0x14, 0x0a, 0x05, 0x62, 0x65, 0x67, 0x69, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x05, 0x52, 0x05, + 0x62, 0x65, 0x67, 0x69, 0x6e, 0x12, 0x10, 0x0a, 0x03, 0x65, 0x6e, 0x64, 0x18, 0x05, 0x20, 0x01, + 0x28, 0x05, 0x52, 0x03, 0x65, 0x6e, 0x64, 0x22, 0x45, 0x0a, 0x14, 0x53, 0x65, 0x61, 0x72, 0x63, + 0x68, 0x53, 0x63, 0x6f, 0x72, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, + 0x2d, 0x0a, 0x09, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x53, 0x63, + 0x6f, 0x72, 0x65, 0x52, 0x09, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x22, 0x6d, + 0x0a, 0x13, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x53, 0x63, 0x6f, 0x72, 0x65, 0x73, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1e, 0x0a, 0x0a, 0x63, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0a, 0x63, 0x6f, 0x6c, 0x6c, 0x65, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x36, 0x0a, 0x09, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, + 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x63, 0x6f, 0x6c, 0x2e, 0x53, 0x63, 0x6f, 0x72, 0x65, 0x43, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, + 0x6f, 0x6e, 0x52, 0x09, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x16, 0x0a, + 0x14, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x53, 0x63, 0x6f, 0x72, 0x65, 0x73, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x71, 0x0a, 0x13, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, + 0x63, 0x6f, 0x72, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1e, 0x0a, 0x0a, + 0x63, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, + 0x52, 0x0a, 0x63, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x0e, 0x0a, 0x02, + 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x2a, 0x0a, 0x05, + 0x70, 0x61, 0x74, 0x63, 0x68, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x53, 0x63, 0x6f, 0x72, 0x65, 0x50, 0x61, 0x74, 0x63, + 0x68, 0x52, 0x05, 0x70, 0x61, 0x74, 0x63, 0x68, 0x22, 0x16, 0x0a, 0x14, 0x55, 0x70, 0x64, 0x61, + 0x74, 0x65, 0x53, 0x63, 0x6f, 0x72, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x22, 0x4f, 0x0a, 0x1a, 0x41, 0x64, 0x64, 0x54, 0x69, 0x6d, 0x65, 0x53, 0x65, 0x72, 0x69, 0x65, + 0x73, 0x50, 0x6f, 0x69, 0x6e, 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x31, + 0x0a, 0x06, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x53, 0x65, + 0x72, 0x69, 0x65, 0x73, 0x50, 0x6f, 0x69, 0x6e, 0x74, 0x52, 0x06, 0x70, 0x6f, 0x69, 0x6e, 0x74, + 0x73, 0x22, 0x1d, 0x0a, 0x1b, 0x41, 0x64, 0x64, 0x54, 0x69, 0x6d, 0x65, 0x53, 0x65, 0x72, 0x69, + 0x65, 0x73, 0x50, 0x6f, 0x69, 0x6e, 0x74, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x22, 0x90, 0x01, 0x0a, 0x1a, 0x47, 0x65, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x53, 0x65, 0x72, 0x69, + 0x65, 0x73, 0x50, 0x6f, 0x69, 0x6e, 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, + 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, + 0x61, 0x6d, 0x65, 0x12, 0x30, 0x0a, 0x05, 0x62, 0x65, 0x67, 0x69, 0x6e, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x05, + 0x62, 0x65, 0x67, 0x69, 0x6e, 0x12, 0x2c, 0x0a, 0x03, 0x65, 0x6e, 0x64, 0x18, 0x03, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x03, + 0x65, 0x6e, 0x64, 0x22, 0x50, 0x0a, 0x1b, 0x47, 0x65, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x53, 0x65, + 0x72, 0x69, 0x65, 0x73, 0x50, 0x6f, 0x69, 0x6e, 0x74, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x12, 0x31, 0x0a, 0x06, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x54, 0x69, + 0x6d, 0x65, 0x53, 0x65, 0x72, 0x69, 0x65, 0x73, 0x50, 0x6f, 0x69, 0x6e, 0x74, 0x52, 0x06, 0x70, + 0x6f, 0x69, 0x6e, 0x74, 0x73, 0x32, 0xa1, 0x09, 0x0a, 0x0a, 0x43, 0x61, 0x63, 0x68, 0x65, 0x53, + 0x74, 0x6f, 0x72, 0x65, 0x12, 0x37, 0x0a, 0x04, 0x50, 0x69, 0x6e, 0x67, 0x12, 0x15, 0x2e, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x50, 0x69, 0x6e, 0x67, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x50, + 0x69, 0x6e, 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x34, 0x0a, + 0x03, 0x47, 0x65, 0x74, 0x12, 0x14, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, + 0x47, 0x65, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x15, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x47, 0x65, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x22, 0x00, 0x12, 0x34, 0x0a, 0x03, 0x53, 0x65, 0x74, 0x12, 0x14, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x53, 0x65, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x1a, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x53, 0x65, 0x74, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x3d, 0x0a, 0x06, 0x44, 0x65, 0x6c, + 0x65, 0x74, 0x65, 0x12, 0x17, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x44, + 0x65, 0x6c, 0x65, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x18, 0x2e, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x3d, 0x0a, 0x06, 0x47, 0x65, 0x74, 0x53, + 0x65, 0x74, 0x12, 0x17, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x47, 0x65, + 0x74, 0x53, 0x65, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x18, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x47, 0x65, 0x74, 0x53, 0x65, 0x74, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x3d, 0x0a, 0x06, 0x53, 0x65, 0x74, 0x53, 0x65, + 0x74, 0x12, 0x17, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x53, 0x65, 0x74, + 0x53, 0x65, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x18, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x53, 0x65, 0x74, 0x53, 0x65, 0x74, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x3d, 0x0a, 0x06, 0x41, 0x64, 0x64, 0x53, 0x65, 0x74, + 0x12, 0x17, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x41, 0x64, 0x64, 0x53, + 0x65, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x18, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x41, 0x64, 0x64, 0x53, 0x65, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x3d, 0x0a, 0x06, 0x52, 0x65, 0x6d, 0x53, 0x65, 0x74, 0x12, + 0x17, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x52, 0x65, 0x6d, 0x53, 0x65, + 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x18, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x63, 0x6f, 0x6c, 0x2e, 0x52, 0x65, 0x6d, 0x53, 0x65, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x22, 0x00, 0x12, 0x37, 0x0a, 0x04, 0x50, 0x75, 0x73, 0x68, 0x12, 0x15, 0x2e, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x50, 0x75, 0x73, 0x68, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x50, + 0x75, 0x73, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x34, 0x0a, + 0x03, 0x50, 0x6f, 0x70, 0x12, 0x14, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, + 0x50, 0x6f, 0x70, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x15, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x50, 0x6f, 0x70, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x22, 0x00, 0x12, 0x3d, 0x0a, 0x06, 0x52, 0x65, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x17, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x52, 0x65, 0x6d, 0x61, 0x69, 0x6e, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x18, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, + 0x6c, 0x2e, 0x52, 0x65, 0x6d, 0x61, 0x69, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x22, 0x00, 0x12, 0x46, 0x0a, 0x09, 0x41, 0x64, 0x64, 0x53, 0x63, 0x6f, 0x72, 0x65, 0x73, 0x12, + 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x41, 0x64, 0x64, 0x53, 0x63, + 0x6f, 0x72, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1b, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x41, 0x64, 0x64, 0x53, 0x63, 0x6f, 0x72, 0x65, 0x73, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x4f, 0x0a, 0x0c, 0x53, 0x65, + 0x61, 0x72, 0x63, 0x68, 0x53, 0x63, 0x6f, 0x72, 0x65, 0x73, 0x12, 0x1d, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x53, 0x65, 0x61, 0x72, 0x63, 0x68, 0x53, 0x63, 0x6f, 0x72, + 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x53, 0x65, 0x61, 0x72, 0x63, 0x68, 0x53, 0x63, 0x6f, 0x72, 0x65, + 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x4f, 0x0a, 0x0c, 0x44, + 0x65, 0x6c, 0x65, 0x74, 0x65, 0x53, 0x63, 0x6f, 0x72, 0x65, 0x73, 0x12, 0x1d, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x53, 0x63, 0x6f, + 0x72, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x53, 0x63, 0x6f, 0x72, + 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x4f, 0x0a, 0x0c, + 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x63, 0x6f, 0x72, 0x65, 0x73, 0x12, 0x1d, 0x2e, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x63, + 0x6f, 0x72, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x63, 0x6f, + 0x72, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x64, 0x0a, + 0x13, 0x41, 0x64, 0x64, 0x54, 0x69, 0x6d, 0x65, 0x53, 0x65, 0x72, 0x69, 0x65, 0x73, 0x50, 0x6f, + 0x69, 0x6e, 0x74, 0x73, 0x12, 0x24, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, + 0x41, 0x64, 0x64, 0x54, 0x69, 0x6d, 0x65, 0x53, 0x65, 0x72, 0x69, 0x65, 0x73, 0x50, 0x6f, 0x69, + 0x6e, 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x25, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x41, 0x64, 0x64, 0x54, 0x69, 0x6d, 0x65, 0x53, 0x65, 0x72, + 0x69, 0x65, 0x73, 0x50, 0x6f, 0x69, 0x6e, 0x74, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x22, 0x00, 0x12, 0x64, 0x0a, 0x13, 0x47, 0x65, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x53, 0x65, + 0x72, 0x69, 0x65, 0x73, 0x50, 0x6f, 0x69, 0x6e, 0x74, 0x73, 0x12, 0x24, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x47, 0x65, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x53, 0x65, 0x72, + 0x69, 0x65, 0x73, 0x50, 0x6f, 0x69, 0x6e, 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x1a, 0x25, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x47, 0x65, 0x74, 0x54, + 0x69, 0x6d, 0x65, 0x53, 0x65, 0x72, 0x69, 0x65, 0x73, 0x50, 0x6f, 0x69, 0x6e, 0x74, 0x73, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0x25, 0x5a, 0x23, 0x67, 0x69, 0x74, + 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x7a, 0x68, 0x65, 0x6e, 0x67, 0x68, 0x61, 0x6f, + 0x7a, 0x2f, 0x67, 0x6f, 0x72, 0x73, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, + 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_cache_store_proto_rawDescOnce sync.Once + file_cache_store_proto_rawDescData = file_cache_store_proto_rawDesc +) + +func file_cache_store_proto_rawDescGZIP() []byte { + file_cache_store_proto_rawDescOnce.Do(func() { + file_cache_store_proto_rawDescData = protoimpl.X.CompressGZIP(file_cache_store_proto_rawDescData) + }) + return file_cache_store_proto_rawDescData +} + +var file_cache_store_proto_msgTypes = make([]protoimpl.MessageInfo, 37) +var file_cache_store_proto_goTypes = []any{ + (*Value)(nil), // 0: protocol.Value + (*Score)(nil), // 1: protocol.Score + (*ScoreCondition)(nil), // 2: protocol.ScoreCondition + (*ScorePatch)(nil), // 3: protocol.ScorePatch + (*TimeSeriesPoint)(nil), // 4: protocol.TimeSeriesPoint + (*GetRequest)(nil), // 5: protocol.GetRequest + (*GetResponse)(nil), // 6: protocol.GetResponse + (*SetRequest)(nil), // 7: protocol.SetRequest + (*SetResponse)(nil), // 8: protocol.SetResponse + (*DeleteRequest)(nil), // 9: protocol.DeleteRequest + (*DeleteResponse)(nil), // 10: protocol.DeleteResponse + (*GetSetRequest)(nil), // 11: protocol.GetSetRequest + (*GetSetResponse)(nil), // 12: protocol.GetSetResponse + (*SetSetRequest)(nil), // 13: protocol.SetSetRequest + (*SetSetResponse)(nil), // 14: protocol.SetSetResponse + (*AddSetRequest)(nil), // 15: protocol.AddSetRequest + (*AddSetResponse)(nil), // 16: protocol.AddSetResponse + (*RemSetRequest)(nil), // 17: protocol.RemSetRequest + (*RemSetResponse)(nil), // 18: protocol.RemSetResponse + (*PushRequest)(nil), // 19: protocol.PushRequest + (*PushResponse)(nil), // 20: protocol.PushResponse + (*PopRequest)(nil), // 21: protocol.PopRequest + (*PopResponse)(nil), // 22: protocol.PopResponse + (*RemainRequest)(nil), // 23: protocol.RemainRequest + (*RemainResponse)(nil), // 24: protocol.RemainResponse + (*AddScoresRequest)(nil), // 25: protocol.AddScoresRequest + (*AddScoresResponse)(nil), // 26: protocol.AddScoresResponse + (*SearchScoresRequest)(nil), // 27: protocol.SearchScoresRequest + (*SearchScoresResponse)(nil), // 28: protocol.SearchScoresResponse + (*DeleteScoresRequest)(nil), // 29: protocol.DeleteScoresRequest + (*DeleteScoresResponse)(nil), // 30: protocol.DeleteScoresResponse + (*UpdateScoresRequest)(nil), // 31: protocol.UpdateScoresRequest + (*UpdateScoresResponse)(nil), // 32: protocol.UpdateScoresResponse + (*AddTimeSeriesPointsRequest)(nil), // 33: protocol.AddTimeSeriesPointsRequest + (*AddTimeSeriesPointsResponse)(nil), // 34: protocol.AddTimeSeriesPointsResponse + (*GetTimeSeriesPointsRequest)(nil), // 35: protocol.GetTimeSeriesPointsRequest + (*GetTimeSeriesPointsResponse)(nil), // 36: protocol.GetTimeSeriesPointsResponse + (*timestamppb.Timestamp)(nil), // 37: google.protobuf.Timestamp + (*PingRequest)(nil), // 38: protocol.PingRequest + (*PingResponse)(nil), // 39: protocol.PingResponse +} +var file_cache_store_proto_depIdxs = []int32{ + 37, // 0: protocol.Score.timestamp:type_name -> google.protobuf.Timestamp + 37, // 1: protocol.ScoreCondition.before:type_name -> google.protobuf.Timestamp + 37, // 2: protocol.TimeSeriesPoint.timestamp:type_name -> google.protobuf.Timestamp + 0, // 3: protocol.SetRequest.values:type_name -> protocol.Value + 1, // 4: protocol.AddScoresRequest.documents:type_name -> protocol.Score + 1, // 5: protocol.SearchScoresResponse.documents:type_name -> protocol.Score + 2, // 6: protocol.DeleteScoresRequest.condition:type_name -> protocol.ScoreCondition + 3, // 7: protocol.UpdateScoresRequest.patch:type_name -> protocol.ScorePatch + 4, // 8: protocol.AddTimeSeriesPointsRequest.points:type_name -> protocol.TimeSeriesPoint + 37, // 9: protocol.GetTimeSeriesPointsRequest.begin:type_name -> google.protobuf.Timestamp + 37, // 10: protocol.GetTimeSeriesPointsRequest.end:type_name -> google.protobuf.Timestamp + 4, // 11: protocol.GetTimeSeriesPointsResponse.points:type_name -> protocol.TimeSeriesPoint + 38, // 12: protocol.CacheStore.Ping:input_type -> protocol.PingRequest + 5, // 13: protocol.CacheStore.Get:input_type -> protocol.GetRequest + 7, // 14: protocol.CacheStore.Set:input_type -> protocol.SetRequest + 9, // 15: protocol.CacheStore.Delete:input_type -> protocol.DeleteRequest + 11, // 16: protocol.CacheStore.GetSet:input_type -> protocol.GetSetRequest + 13, // 17: protocol.CacheStore.SetSet:input_type -> protocol.SetSetRequest + 15, // 18: protocol.CacheStore.AddSet:input_type -> protocol.AddSetRequest + 17, // 19: protocol.CacheStore.RemSet:input_type -> protocol.RemSetRequest + 19, // 20: protocol.CacheStore.Push:input_type -> protocol.PushRequest + 21, // 21: protocol.CacheStore.Pop:input_type -> protocol.PopRequest + 23, // 22: protocol.CacheStore.Remain:input_type -> protocol.RemainRequest + 25, // 23: protocol.CacheStore.AddScores:input_type -> protocol.AddScoresRequest + 27, // 24: protocol.CacheStore.SearchScores:input_type -> protocol.SearchScoresRequest + 29, // 25: protocol.CacheStore.DeleteScores:input_type -> protocol.DeleteScoresRequest + 31, // 26: protocol.CacheStore.UpdateScores:input_type -> protocol.UpdateScoresRequest + 33, // 27: protocol.CacheStore.AddTimeSeriesPoints:input_type -> protocol.AddTimeSeriesPointsRequest + 35, // 28: protocol.CacheStore.GetTimeSeriesPoints:input_type -> protocol.GetTimeSeriesPointsRequest + 39, // 29: protocol.CacheStore.Ping:output_type -> protocol.PingResponse + 6, // 30: protocol.CacheStore.Get:output_type -> protocol.GetResponse + 8, // 31: protocol.CacheStore.Set:output_type -> protocol.SetResponse + 10, // 32: protocol.CacheStore.Delete:output_type -> protocol.DeleteResponse + 12, // 33: protocol.CacheStore.GetSet:output_type -> protocol.GetSetResponse + 14, // 34: protocol.CacheStore.SetSet:output_type -> protocol.SetSetResponse + 16, // 35: protocol.CacheStore.AddSet:output_type -> protocol.AddSetResponse + 18, // 36: protocol.CacheStore.RemSet:output_type -> protocol.RemSetResponse + 20, // 37: protocol.CacheStore.Push:output_type -> protocol.PushResponse + 22, // 38: protocol.CacheStore.Pop:output_type -> protocol.PopResponse + 24, // 39: protocol.CacheStore.Remain:output_type -> protocol.RemainResponse + 26, // 40: protocol.CacheStore.AddScores:output_type -> protocol.AddScoresResponse + 28, // 41: protocol.CacheStore.SearchScores:output_type -> protocol.SearchScoresResponse + 30, // 42: protocol.CacheStore.DeleteScores:output_type -> protocol.DeleteScoresResponse + 32, // 43: protocol.CacheStore.UpdateScores:output_type -> protocol.UpdateScoresResponse + 34, // 44: protocol.CacheStore.AddTimeSeriesPoints:output_type -> protocol.AddTimeSeriesPointsResponse + 36, // 45: protocol.CacheStore.GetTimeSeriesPoints:output_type -> protocol.GetTimeSeriesPointsResponse + 29, // [29:46] is the sub-list for method output_type + 12, // [12:29] is the sub-list for method input_type + 12, // [12:12] is the sub-list for extension type_name + 12, // [12:12] is the sub-list for extension extendee + 0, // [0:12] is the sub-list for field type_name +} + +func init() { file_cache_store_proto_init() } +func file_cache_store_proto_init() { + if File_cache_store_proto != nil { + return + } + file_protocol_proto_init() + file_cache_store_proto_msgTypes[2].OneofWrappers = []any{} + file_cache_store_proto_msgTypes[3].OneofWrappers = []any{} + file_cache_store_proto_msgTypes[6].OneofWrappers = []any{} + file_cache_store_proto_msgTypes[22].OneofWrappers = []any{} + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_cache_store_proto_rawDesc, + NumEnums: 0, + NumMessages: 37, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_cache_store_proto_goTypes, + DependencyIndexes: file_cache_store_proto_depIdxs, + MessageInfos: file_cache_store_proto_msgTypes, + }.Build() + File_cache_store_proto = out.File + file_cache_store_proto_rawDesc = nil + file_cache_store_proto_goTypes = nil + file_cache_store_proto_depIdxs = nil +} diff --git a/protocol/cache_store.proto b/protocol/cache_store.proto new file mode 100644 index 000000000..ddd5cf610 --- /dev/null +++ b/protocol/cache_store.proto @@ -0,0 +1,196 @@ +// Copyright 2024 gorse Project Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +option go_package = "github.com/zhenghaoz/gorse/protocol"; + +package protocol; + +import "google/protobuf/timestamp.proto"; +import "protocol.proto"; + +message Value { + string name = 1; + string value = 2; +} + +message Score { + string id = 1; + double score = 2; + bool is_hidden = 3; + repeated string categories = 4; + google.protobuf.Timestamp timestamp = 5; +} + +message ScoreCondition { + optional string subset = 1; + optional string id = 2; + optional google.protobuf.Timestamp before = 3; +} + +message ScorePatch { + optional bool is_hidden = 1; + repeated string categories = 2; + optional double score = 3; +} + +message TimeSeriesPoint { + string name = 1; + google.protobuf.Timestamp timestamp = 2; + double value = 3; +} + +message GetRequest { + string name = 1; +} + +message GetResponse { + optional string value = 1; +} + +message SetRequest { + repeated Value values = 1; +} + +message SetResponse {} + +message DeleteRequest { + string name = 1; +} + +message DeleteResponse {} + +message GetSetRequest { + string key = 1; +} + +message GetSetResponse { + repeated string members = 1; +} + +message SetSetRequest { + string key = 1; + repeated string members = 2; +} + +message SetSetResponse {} + +message AddSetRequest { + string key = 1; + repeated string members = 2; +} + +message AddSetResponse {} + +message RemSetRequest { + string key = 1; + repeated string members = 2; +} + +message RemSetResponse {} + +message PushRequest { + string name = 1; + string value = 2; +} + +message PushResponse {} + +message PopRequest { + string name = 1; +} + +message PopResponse { + optional string value = 1; +} + +message RemainRequest { + string name = 1; +} + +message RemainResponse { + int64 count = 1; +} + +message AddScoresRequest { + string collection = 1; + string subset = 2; + repeated Score documents = 3; +} + +message AddScoresResponse {} + +message SearchScoresRequest { + string collection = 1; + string subset = 2; + repeated string query = 3; + int32 begin = 4; + int32 end = 5; +} + +message SearchScoresResponse { + repeated Score documents = 1; +} + +message DeleteScoresRequest { + repeated string collection = 1; + ScoreCondition condition = 2; +} + +message DeleteScoresResponse {} + +message UpdateScoresRequest { + repeated string collection = 1; + string id = 2; + ScorePatch patch = 3; +} + +message UpdateScoresResponse {} + +message AddTimeSeriesPointsRequest { + repeated TimeSeriesPoint points = 1; +} + +message AddTimeSeriesPointsResponse {} + +message GetTimeSeriesPointsRequest { + string name = 1; + google.protobuf.Timestamp begin = 2; + google.protobuf.Timestamp end = 3; +} + +message GetTimeSeriesPointsResponse { + repeated TimeSeriesPoint points = 1; +} + +service CacheStore { + rpc Ping(PingRequest) returns (PingResponse) {} + rpc Get(GetRequest) returns (GetResponse) {} + rpc Set(SetRequest) returns (SetResponse) {} + rpc Delete(DeleteRequest) returns (DeleteResponse) {} + rpc GetSet(GetSetRequest) returns (GetSetResponse) {} + rpc SetSet(SetSetRequest) returns (SetSetResponse) {} + rpc AddSet(AddSetRequest) returns (AddSetResponse) {} + rpc RemSet(RemSetRequest) returns (RemSetResponse) {} + rpc Push(PushRequest) returns (PushResponse) {} + rpc Pop(PopRequest) returns (PopResponse) {} + rpc Remain(RemainRequest) returns (RemainResponse) {} + rpc AddScores(AddScoresRequest) returns (AddScoresResponse) {} + rpc SearchScores(SearchScoresRequest) returns (SearchScoresResponse) {} + rpc DeleteScores(DeleteScoresRequest) returns (DeleteScoresResponse) {} + rpc UpdateScores(UpdateScoresRequest) returns (UpdateScoresResponse) {} + rpc AddTimeSeriesPoints(AddTimeSeriesPointsRequest) returns (AddTimeSeriesPointsResponse) {} + rpc GetTimeSeriesPoints(GetTimeSeriesPointsRequest) returns (GetTimeSeriesPointsResponse) {} +} diff --git a/protocol/cache_store_grpc.pb.go b/protocol/cache_store_grpc.pb.go new file mode 100644 index 000000000..090f70e1c --- /dev/null +++ b/protocol/cache_store_grpc.pb.go @@ -0,0 +1,743 @@ +// Copyright 2024 gorse Project Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.5.1 +// - protoc v5.28.3 +// source: cache_store.proto + +package protocol + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.64.0 or later. +const _ = grpc.SupportPackageIsVersion9 + +const ( + CacheStore_Ping_FullMethodName = "/protocol.CacheStore/Ping" + CacheStore_Get_FullMethodName = "/protocol.CacheStore/Get" + CacheStore_Set_FullMethodName = "/protocol.CacheStore/Set" + CacheStore_Delete_FullMethodName = "/protocol.CacheStore/Delete" + CacheStore_GetSet_FullMethodName = "/protocol.CacheStore/GetSet" + CacheStore_SetSet_FullMethodName = "/protocol.CacheStore/SetSet" + CacheStore_AddSet_FullMethodName = "/protocol.CacheStore/AddSet" + CacheStore_RemSet_FullMethodName = "/protocol.CacheStore/RemSet" + CacheStore_Push_FullMethodName = "/protocol.CacheStore/Push" + CacheStore_Pop_FullMethodName = "/protocol.CacheStore/Pop" + CacheStore_Remain_FullMethodName = "/protocol.CacheStore/Remain" + CacheStore_AddScores_FullMethodName = "/protocol.CacheStore/AddScores" + CacheStore_SearchScores_FullMethodName = "/protocol.CacheStore/SearchScores" + CacheStore_DeleteScores_FullMethodName = "/protocol.CacheStore/DeleteScores" + CacheStore_UpdateScores_FullMethodName = "/protocol.CacheStore/UpdateScores" + CacheStore_AddTimeSeriesPoints_FullMethodName = "/protocol.CacheStore/AddTimeSeriesPoints" + CacheStore_GetTimeSeriesPoints_FullMethodName = "/protocol.CacheStore/GetTimeSeriesPoints" +) + +// CacheStoreClient is the client API for CacheStore service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type CacheStoreClient interface { + Ping(ctx context.Context, in *PingRequest, opts ...grpc.CallOption) (*PingResponse, error) + Get(ctx context.Context, in *GetRequest, opts ...grpc.CallOption) (*GetResponse, error) + Set(ctx context.Context, in *SetRequest, opts ...grpc.CallOption) (*SetResponse, error) + Delete(ctx context.Context, in *DeleteRequest, opts ...grpc.CallOption) (*DeleteResponse, error) + GetSet(ctx context.Context, in *GetSetRequest, opts ...grpc.CallOption) (*GetSetResponse, error) + SetSet(ctx context.Context, in *SetSetRequest, opts ...grpc.CallOption) (*SetSetResponse, error) + AddSet(ctx context.Context, in *AddSetRequest, opts ...grpc.CallOption) (*AddSetResponse, error) + RemSet(ctx context.Context, in *RemSetRequest, opts ...grpc.CallOption) (*RemSetResponse, error) + Push(ctx context.Context, in *PushRequest, opts ...grpc.CallOption) (*PushResponse, error) + Pop(ctx context.Context, in *PopRequest, opts ...grpc.CallOption) (*PopResponse, error) + Remain(ctx context.Context, in *RemainRequest, opts ...grpc.CallOption) (*RemainResponse, error) + AddScores(ctx context.Context, in *AddScoresRequest, opts ...grpc.CallOption) (*AddScoresResponse, error) + SearchScores(ctx context.Context, in *SearchScoresRequest, opts ...grpc.CallOption) (*SearchScoresResponse, error) + DeleteScores(ctx context.Context, in *DeleteScoresRequest, opts ...grpc.CallOption) (*DeleteScoresResponse, error) + UpdateScores(ctx context.Context, in *UpdateScoresRequest, opts ...grpc.CallOption) (*UpdateScoresResponse, error) + AddTimeSeriesPoints(ctx context.Context, in *AddTimeSeriesPointsRequest, opts ...grpc.CallOption) (*AddTimeSeriesPointsResponse, error) + GetTimeSeriesPoints(ctx context.Context, in *GetTimeSeriesPointsRequest, opts ...grpc.CallOption) (*GetTimeSeriesPointsResponse, error) +} + +type cacheStoreClient struct { + cc grpc.ClientConnInterface +} + +func NewCacheStoreClient(cc grpc.ClientConnInterface) CacheStoreClient { + return &cacheStoreClient{cc} +} + +func (c *cacheStoreClient) Ping(ctx context.Context, in *PingRequest, opts ...grpc.CallOption) (*PingResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(PingResponse) + err := c.cc.Invoke(ctx, CacheStore_Ping_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cacheStoreClient) Get(ctx context.Context, in *GetRequest, opts ...grpc.CallOption) (*GetResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(GetResponse) + err := c.cc.Invoke(ctx, CacheStore_Get_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cacheStoreClient) Set(ctx context.Context, in *SetRequest, opts ...grpc.CallOption) (*SetResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(SetResponse) + err := c.cc.Invoke(ctx, CacheStore_Set_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cacheStoreClient) Delete(ctx context.Context, in *DeleteRequest, opts ...grpc.CallOption) (*DeleteResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(DeleteResponse) + err := c.cc.Invoke(ctx, CacheStore_Delete_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cacheStoreClient) GetSet(ctx context.Context, in *GetSetRequest, opts ...grpc.CallOption) (*GetSetResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(GetSetResponse) + err := c.cc.Invoke(ctx, CacheStore_GetSet_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cacheStoreClient) SetSet(ctx context.Context, in *SetSetRequest, opts ...grpc.CallOption) (*SetSetResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(SetSetResponse) + err := c.cc.Invoke(ctx, CacheStore_SetSet_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cacheStoreClient) AddSet(ctx context.Context, in *AddSetRequest, opts ...grpc.CallOption) (*AddSetResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(AddSetResponse) + err := c.cc.Invoke(ctx, CacheStore_AddSet_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cacheStoreClient) RemSet(ctx context.Context, in *RemSetRequest, opts ...grpc.CallOption) (*RemSetResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(RemSetResponse) + err := c.cc.Invoke(ctx, CacheStore_RemSet_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cacheStoreClient) Push(ctx context.Context, in *PushRequest, opts ...grpc.CallOption) (*PushResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(PushResponse) + err := c.cc.Invoke(ctx, CacheStore_Push_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cacheStoreClient) Pop(ctx context.Context, in *PopRequest, opts ...grpc.CallOption) (*PopResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(PopResponse) + err := c.cc.Invoke(ctx, CacheStore_Pop_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cacheStoreClient) Remain(ctx context.Context, in *RemainRequest, opts ...grpc.CallOption) (*RemainResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(RemainResponse) + err := c.cc.Invoke(ctx, CacheStore_Remain_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cacheStoreClient) AddScores(ctx context.Context, in *AddScoresRequest, opts ...grpc.CallOption) (*AddScoresResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(AddScoresResponse) + err := c.cc.Invoke(ctx, CacheStore_AddScores_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cacheStoreClient) SearchScores(ctx context.Context, in *SearchScoresRequest, opts ...grpc.CallOption) (*SearchScoresResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(SearchScoresResponse) + err := c.cc.Invoke(ctx, CacheStore_SearchScores_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cacheStoreClient) DeleteScores(ctx context.Context, in *DeleteScoresRequest, opts ...grpc.CallOption) (*DeleteScoresResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(DeleteScoresResponse) + err := c.cc.Invoke(ctx, CacheStore_DeleteScores_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cacheStoreClient) UpdateScores(ctx context.Context, in *UpdateScoresRequest, opts ...grpc.CallOption) (*UpdateScoresResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(UpdateScoresResponse) + err := c.cc.Invoke(ctx, CacheStore_UpdateScores_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cacheStoreClient) AddTimeSeriesPoints(ctx context.Context, in *AddTimeSeriesPointsRequest, opts ...grpc.CallOption) (*AddTimeSeriesPointsResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(AddTimeSeriesPointsResponse) + err := c.cc.Invoke(ctx, CacheStore_AddTimeSeriesPoints_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cacheStoreClient) GetTimeSeriesPoints(ctx context.Context, in *GetTimeSeriesPointsRequest, opts ...grpc.CallOption) (*GetTimeSeriesPointsResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(GetTimeSeriesPointsResponse) + err := c.cc.Invoke(ctx, CacheStore_GetTimeSeriesPoints_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +// CacheStoreServer is the server API for CacheStore service. +// All implementations must embed UnimplementedCacheStoreServer +// for forward compatibility. +type CacheStoreServer interface { + Ping(context.Context, *PingRequest) (*PingResponse, error) + Get(context.Context, *GetRequest) (*GetResponse, error) + Set(context.Context, *SetRequest) (*SetResponse, error) + Delete(context.Context, *DeleteRequest) (*DeleteResponse, error) + GetSet(context.Context, *GetSetRequest) (*GetSetResponse, error) + SetSet(context.Context, *SetSetRequest) (*SetSetResponse, error) + AddSet(context.Context, *AddSetRequest) (*AddSetResponse, error) + RemSet(context.Context, *RemSetRequest) (*RemSetResponse, error) + Push(context.Context, *PushRequest) (*PushResponse, error) + Pop(context.Context, *PopRequest) (*PopResponse, error) + Remain(context.Context, *RemainRequest) (*RemainResponse, error) + AddScores(context.Context, *AddScoresRequest) (*AddScoresResponse, error) + SearchScores(context.Context, *SearchScoresRequest) (*SearchScoresResponse, error) + DeleteScores(context.Context, *DeleteScoresRequest) (*DeleteScoresResponse, error) + UpdateScores(context.Context, *UpdateScoresRequest) (*UpdateScoresResponse, error) + AddTimeSeriesPoints(context.Context, *AddTimeSeriesPointsRequest) (*AddTimeSeriesPointsResponse, error) + GetTimeSeriesPoints(context.Context, *GetTimeSeriesPointsRequest) (*GetTimeSeriesPointsResponse, error) + mustEmbedUnimplementedCacheStoreServer() +} + +// UnimplementedCacheStoreServer must be embedded to have +// forward compatible implementations. +// +// NOTE: this should be embedded by value instead of pointer to avoid a nil +// pointer dereference when methods are called. +type UnimplementedCacheStoreServer struct{} + +func (UnimplementedCacheStoreServer) Ping(context.Context, *PingRequest) (*PingResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method Ping not implemented") +} +func (UnimplementedCacheStoreServer) Get(context.Context, *GetRequest) (*GetResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method Get not implemented") +} +func (UnimplementedCacheStoreServer) Set(context.Context, *SetRequest) (*SetResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method Set not implemented") +} +func (UnimplementedCacheStoreServer) Delete(context.Context, *DeleteRequest) (*DeleteResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method Delete not implemented") +} +func (UnimplementedCacheStoreServer) GetSet(context.Context, *GetSetRequest) (*GetSetResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetSet not implemented") +} +func (UnimplementedCacheStoreServer) SetSet(context.Context, *SetSetRequest) (*SetSetResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method SetSet not implemented") +} +func (UnimplementedCacheStoreServer) AddSet(context.Context, *AddSetRequest) (*AddSetResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method AddSet not implemented") +} +func (UnimplementedCacheStoreServer) RemSet(context.Context, *RemSetRequest) (*RemSetResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method RemSet not implemented") +} +func (UnimplementedCacheStoreServer) Push(context.Context, *PushRequest) (*PushResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method Push not implemented") +} +func (UnimplementedCacheStoreServer) Pop(context.Context, *PopRequest) (*PopResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method Pop not implemented") +} +func (UnimplementedCacheStoreServer) Remain(context.Context, *RemainRequest) (*RemainResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method Remain not implemented") +} +func (UnimplementedCacheStoreServer) AddScores(context.Context, *AddScoresRequest) (*AddScoresResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method AddScores not implemented") +} +func (UnimplementedCacheStoreServer) SearchScores(context.Context, *SearchScoresRequest) (*SearchScoresResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method SearchScores not implemented") +} +func (UnimplementedCacheStoreServer) DeleteScores(context.Context, *DeleteScoresRequest) (*DeleteScoresResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method DeleteScores not implemented") +} +func (UnimplementedCacheStoreServer) UpdateScores(context.Context, *UpdateScoresRequest) (*UpdateScoresResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method UpdateScores not implemented") +} +func (UnimplementedCacheStoreServer) AddTimeSeriesPoints(context.Context, *AddTimeSeriesPointsRequest) (*AddTimeSeriesPointsResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method AddTimeSeriesPoints not implemented") +} +func (UnimplementedCacheStoreServer) GetTimeSeriesPoints(context.Context, *GetTimeSeriesPointsRequest) (*GetTimeSeriesPointsResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetTimeSeriesPoints not implemented") +} +func (UnimplementedCacheStoreServer) mustEmbedUnimplementedCacheStoreServer() {} +func (UnimplementedCacheStoreServer) testEmbeddedByValue() {} + +// UnsafeCacheStoreServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to CacheStoreServer will +// result in compilation errors. +type UnsafeCacheStoreServer interface { + mustEmbedUnimplementedCacheStoreServer() +} + +func RegisterCacheStoreServer(s grpc.ServiceRegistrar, srv CacheStoreServer) { + // If the following call pancis, it indicates UnimplementedCacheStoreServer was + // embedded by pointer and is nil. This will cause panics if an + // unimplemented method is ever invoked, so we test this at initialization + // time to prevent it from happening at runtime later due to I/O. + if t, ok := srv.(interface{ testEmbeddedByValue() }); ok { + t.testEmbeddedByValue() + } + s.RegisterService(&CacheStore_ServiceDesc, srv) +} + +func _CacheStore_Ping_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PingRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CacheStoreServer).Ping(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: CacheStore_Ping_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CacheStoreServer).Ping(ctx, req.(*PingRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CacheStore_Get_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CacheStoreServer).Get(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: CacheStore_Get_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CacheStoreServer).Get(ctx, req.(*GetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CacheStore_Set_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CacheStoreServer).Set(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: CacheStore_Set_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CacheStoreServer).Set(ctx, req.(*SetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CacheStore_Delete_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CacheStoreServer).Delete(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: CacheStore_Delete_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CacheStoreServer).Delete(ctx, req.(*DeleteRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CacheStore_GetSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CacheStoreServer).GetSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: CacheStore_GetSet_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CacheStoreServer).GetSet(ctx, req.(*GetSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CacheStore_SetSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SetSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CacheStoreServer).SetSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: CacheStore_SetSet_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CacheStoreServer).SetSet(ctx, req.(*SetSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CacheStore_AddSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AddSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CacheStoreServer).AddSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: CacheStore_AddSet_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CacheStoreServer).AddSet(ctx, req.(*AddSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CacheStore_RemSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RemSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CacheStoreServer).RemSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: CacheStore_RemSet_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CacheStoreServer).RemSet(ctx, req.(*RemSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CacheStore_Push_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PushRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CacheStoreServer).Push(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: CacheStore_Push_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CacheStoreServer).Push(ctx, req.(*PushRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CacheStore_Pop_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PopRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CacheStoreServer).Pop(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: CacheStore_Pop_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CacheStoreServer).Pop(ctx, req.(*PopRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CacheStore_Remain_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RemainRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CacheStoreServer).Remain(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: CacheStore_Remain_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CacheStoreServer).Remain(ctx, req.(*RemainRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CacheStore_AddScores_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AddScoresRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CacheStoreServer).AddScores(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: CacheStore_AddScores_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CacheStoreServer).AddScores(ctx, req.(*AddScoresRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CacheStore_SearchScores_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SearchScoresRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CacheStoreServer).SearchScores(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: CacheStore_SearchScores_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CacheStoreServer).SearchScores(ctx, req.(*SearchScoresRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CacheStore_DeleteScores_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteScoresRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CacheStoreServer).DeleteScores(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: CacheStore_DeleteScores_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CacheStoreServer).DeleteScores(ctx, req.(*DeleteScoresRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CacheStore_UpdateScores_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateScoresRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CacheStoreServer).UpdateScores(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: CacheStore_UpdateScores_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CacheStoreServer).UpdateScores(ctx, req.(*UpdateScoresRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CacheStore_AddTimeSeriesPoints_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AddTimeSeriesPointsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CacheStoreServer).AddTimeSeriesPoints(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: CacheStore_AddTimeSeriesPoints_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CacheStoreServer).AddTimeSeriesPoints(ctx, req.(*AddTimeSeriesPointsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CacheStore_GetTimeSeriesPoints_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetTimeSeriesPointsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CacheStoreServer).GetTimeSeriesPoints(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: CacheStore_GetTimeSeriesPoints_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CacheStoreServer).GetTimeSeriesPoints(ctx, req.(*GetTimeSeriesPointsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// CacheStore_ServiceDesc is the grpc.ServiceDesc for CacheStore service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var CacheStore_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "protocol.CacheStore", + HandlerType: (*CacheStoreServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "Ping", + Handler: _CacheStore_Ping_Handler, + }, + { + MethodName: "Get", + Handler: _CacheStore_Get_Handler, + }, + { + MethodName: "Set", + Handler: _CacheStore_Set_Handler, + }, + { + MethodName: "Delete", + Handler: _CacheStore_Delete_Handler, + }, + { + MethodName: "GetSet", + Handler: _CacheStore_GetSet_Handler, + }, + { + MethodName: "SetSet", + Handler: _CacheStore_SetSet_Handler, + }, + { + MethodName: "AddSet", + Handler: _CacheStore_AddSet_Handler, + }, + { + MethodName: "RemSet", + Handler: _CacheStore_RemSet_Handler, + }, + { + MethodName: "Push", + Handler: _CacheStore_Push_Handler, + }, + { + MethodName: "Pop", + Handler: _CacheStore_Pop_Handler, + }, + { + MethodName: "Remain", + Handler: _CacheStore_Remain_Handler, + }, + { + MethodName: "AddScores", + Handler: _CacheStore_AddScores_Handler, + }, + { + MethodName: "SearchScores", + Handler: _CacheStore_SearchScores_Handler, + }, + { + MethodName: "DeleteScores", + Handler: _CacheStore_DeleteScores_Handler, + }, + { + MethodName: "UpdateScores", + Handler: _CacheStore_UpdateScores_Handler, + }, + { + MethodName: "AddTimeSeriesPoints", + Handler: _CacheStore_AddTimeSeriesPoints_Handler, + }, + { + MethodName: "GetTimeSeriesPoints", + Handler: _CacheStore_GetTimeSeriesPoints_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "cache_store.proto", +} diff --git a/protocol/data_store.pb.go b/protocol/data_store.pb.go new file mode 100644 index 000000000..98a96ec57 --- /dev/null +++ b/protocol/data_store.pb.go @@ -0,0 +1,2546 @@ +// Copyright 2024 gorse Project Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.35.1 +// protoc v5.28.3 +// source: data_store.proto + +package protocol + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + timestamppb "google.golang.org/protobuf/types/known/timestamppb" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type UserPatch struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Labels []byte `protobuf:"bytes,1,opt,name=labels,proto3" json:"labels,omitempty"` + Comment *string `protobuf:"bytes,2,opt,name=comment,proto3,oneof" json:"comment,omitempty"` + Subscribe []string `protobuf:"bytes,3,rep,name=subscribe,proto3" json:"subscribe,omitempty"` +} + +func (x *UserPatch) Reset() { + *x = UserPatch{} + mi := &file_data_store_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *UserPatch) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*UserPatch) ProtoMessage() {} + +func (x *UserPatch) ProtoReflect() protoreflect.Message { + mi := &file_data_store_proto_msgTypes[0] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use UserPatch.ProtoReflect.Descriptor instead. +func (*UserPatch) Descriptor() ([]byte, []int) { + return file_data_store_proto_rawDescGZIP(), []int{0} +} + +func (x *UserPatch) GetLabels() []byte { + if x != nil { + return x.Labels + } + return nil +} + +func (x *UserPatch) GetComment() string { + if x != nil && x.Comment != nil { + return *x.Comment + } + return "" +} + +func (x *UserPatch) GetSubscribe() []string { + if x != nil { + return x.Subscribe + } + return nil +} + +type ItemPatch struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + IsHidden *bool `protobuf:"varint,1,opt,name=is_hidden,json=isHidden,proto3,oneof" json:"is_hidden,omitempty"` + Categories []string `protobuf:"bytes,2,rep,name=categories,proto3" json:"categories,omitempty"` + Timestamp *timestamppb.Timestamp `protobuf:"bytes,3,opt,name=timestamp,proto3,oneof" json:"timestamp,omitempty"` + Labels []byte `protobuf:"bytes,4,opt,name=labels,proto3" json:"labels,omitempty"` + Comment *string `protobuf:"bytes,5,opt,name=comment,proto3,oneof" json:"comment,omitempty"` +} + +func (x *ItemPatch) Reset() { + *x = ItemPatch{} + mi := &file_data_store_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ItemPatch) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ItemPatch) ProtoMessage() {} + +func (x *ItemPatch) ProtoReflect() protoreflect.Message { + mi := &file_data_store_proto_msgTypes[1] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ItemPatch.ProtoReflect.Descriptor instead. +func (*ItemPatch) Descriptor() ([]byte, []int) { + return file_data_store_proto_rawDescGZIP(), []int{1} +} + +func (x *ItemPatch) GetIsHidden() bool { + if x != nil && x.IsHidden != nil { + return *x.IsHidden + } + return false +} + +func (x *ItemPatch) GetCategories() []string { + if x != nil { + return x.Categories + } + return nil +} + +func (x *ItemPatch) GetTimestamp() *timestamppb.Timestamp { + if x != nil { + return x.Timestamp + } + return nil +} + +func (x *ItemPatch) GetLabels() []byte { + if x != nil { + return x.Labels + } + return nil +} + +func (x *ItemPatch) GetComment() string { + if x != nil && x.Comment != nil { + return *x.Comment + } + return "" +} + +type ScanOptions struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + BeginUserId *string `protobuf:"bytes,1,opt,name=begin_user_id,json=beginUserId,proto3,oneof" json:"begin_user_id,omitempty"` + EndUserId *string `protobuf:"bytes,2,opt,name=end_user_id,json=endUserId,proto3,oneof" json:"end_user_id,omitempty"` + BeginTime *timestamppb.Timestamp `protobuf:"bytes,3,opt,name=begin_time,json=beginTime,proto3,oneof" json:"begin_time,omitempty"` + EndTime *timestamppb.Timestamp `protobuf:"bytes,4,opt,name=end_time,json=endTime,proto3,oneof" json:"end_time,omitempty"` + FeedbackTypes []string `protobuf:"bytes,5,rep,name=feedback_types,json=feedbackTypes,proto3" json:"feedback_types,omitempty"` +} + +func (x *ScanOptions) Reset() { + *x = ScanOptions{} + mi := &file_data_store_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ScanOptions) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ScanOptions) ProtoMessage() {} + +func (x *ScanOptions) ProtoReflect() protoreflect.Message { + mi := &file_data_store_proto_msgTypes[2] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ScanOptions.ProtoReflect.Descriptor instead. +func (*ScanOptions) Descriptor() ([]byte, []int) { + return file_data_store_proto_rawDescGZIP(), []int{2} +} + +func (x *ScanOptions) GetBeginUserId() string { + if x != nil && x.BeginUserId != nil { + return *x.BeginUserId + } + return "" +} + +func (x *ScanOptions) GetEndUserId() string { + if x != nil && x.EndUserId != nil { + return *x.EndUserId + } + return "" +} + +func (x *ScanOptions) GetBeginTime() *timestamppb.Timestamp { + if x != nil { + return x.BeginTime + } + return nil +} + +func (x *ScanOptions) GetEndTime() *timestamppb.Timestamp { + if x != nil { + return x.EndTime + } + return nil +} + +func (x *ScanOptions) GetFeedbackTypes() []string { + if x != nil { + return x.FeedbackTypes + } + return nil +} + +type BatchInsertItemsRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Items []*Item `protobuf:"bytes,1,rep,name=items,proto3" json:"items,omitempty"` +} + +func (x *BatchInsertItemsRequest) Reset() { + *x = BatchInsertItemsRequest{} + mi := &file_data_store_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *BatchInsertItemsRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BatchInsertItemsRequest) ProtoMessage() {} + +func (x *BatchInsertItemsRequest) ProtoReflect() protoreflect.Message { + mi := &file_data_store_proto_msgTypes[3] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BatchInsertItemsRequest.ProtoReflect.Descriptor instead. +func (*BatchInsertItemsRequest) Descriptor() ([]byte, []int) { + return file_data_store_proto_rawDescGZIP(), []int{3} +} + +func (x *BatchInsertItemsRequest) GetItems() []*Item { + if x != nil { + return x.Items + } + return nil +} + +type BatchInsertItemsResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *BatchInsertItemsResponse) Reset() { + *x = BatchInsertItemsResponse{} + mi := &file_data_store_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *BatchInsertItemsResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BatchInsertItemsResponse) ProtoMessage() {} + +func (x *BatchInsertItemsResponse) ProtoReflect() protoreflect.Message { + mi := &file_data_store_proto_msgTypes[4] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BatchInsertItemsResponse.ProtoReflect.Descriptor instead. +func (*BatchInsertItemsResponse) Descriptor() ([]byte, []int) { + return file_data_store_proto_rawDescGZIP(), []int{4} +} + +type BatchGetItemsRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + ItemIds []string `protobuf:"bytes,1,rep,name=item_ids,json=itemIds,proto3" json:"item_ids,omitempty"` +} + +func (x *BatchGetItemsRequest) Reset() { + *x = BatchGetItemsRequest{} + mi := &file_data_store_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *BatchGetItemsRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BatchGetItemsRequest) ProtoMessage() {} + +func (x *BatchGetItemsRequest) ProtoReflect() protoreflect.Message { + mi := &file_data_store_proto_msgTypes[5] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BatchGetItemsRequest.ProtoReflect.Descriptor instead. +func (*BatchGetItemsRequest) Descriptor() ([]byte, []int) { + return file_data_store_proto_rawDescGZIP(), []int{5} +} + +func (x *BatchGetItemsRequest) GetItemIds() []string { + if x != nil { + return x.ItemIds + } + return nil +} + +type BatchGetItemsResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Items []*Item `protobuf:"bytes,1,rep,name=items,proto3" json:"items,omitempty"` +} + +func (x *BatchGetItemsResponse) Reset() { + *x = BatchGetItemsResponse{} + mi := &file_data_store_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *BatchGetItemsResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BatchGetItemsResponse) ProtoMessage() {} + +func (x *BatchGetItemsResponse) ProtoReflect() protoreflect.Message { + mi := &file_data_store_proto_msgTypes[6] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BatchGetItemsResponse.ProtoReflect.Descriptor instead. +func (*BatchGetItemsResponse) Descriptor() ([]byte, []int) { + return file_data_store_proto_rawDescGZIP(), []int{6} +} + +func (x *BatchGetItemsResponse) GetItems() []*Item { + if x != nil { + return x.Items + } + return nil +} + +type DeleteItemRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + ItemId string `protobuf:"bytes,1,opt,name=item_id,json=itemId,proto3" json:"item_id,omitempty"` +} + +func (x *DeleteItemRequest) Reset() { + *x = DeleteItemRequest{} + mi := &file_data_store_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *DeleteItemRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DeleteItemRequest) ProtoMessage() {} + +func (x *DeleteItemRequest) ProtoReflect() protoreflect.Message { + mi := &file_data_store_proto_msgTypes[7] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DeleteItemRequest.ProtoReflect.Descriptor instead. +func (*DeleteItemRequest) Descriptor() ([]byte, []int) { + return file_data_store_proto_rawDescGZIP(), []int{7} +} + +func (x *DeleteItemRequest) GetItemId() string { + if x != nil { + return x.ItemId + } + return "" +} + +type DeleteItemResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *DeleteItemResponse) Reset() { + *x = DeleteItemResponse{} + mi := &file_data_store_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *DeleteItemResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DeleteItemResponse) ProtoMessage() {} + +func (x *DeleteItemResponse) ProtoReflect() protoreflect.Message { + mi := &file_data_store_proto_msgTypes[8] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DeleteItemResponse.ProtoReflect.Descriptor instead. +func (*DeleteItemResponse) Descriptor() ([]byte, []int) { + return file_data_store_proto_rawDescGZIP(), []int{8} +} + +type GetItemRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + ItemId string `protobuf:"bytes,1,opt,name=item_id,json=itemId,proto3" json:"item_id,omitempty"` +} + +func (x *GetItemRequest) Reset() { + *x = GetItemRequest{} + mi := &file_data_store_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GetItemRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetItemRequest) ProtoMessage() {} + +func (x *GetItemRequest) ProtoReflect() protoreflect.Message { + mi := &file_data_store_proto_msgTypes[9] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetItemRequest.ProtoReflect.Descriptor instead. +func (*GetItemRequest) Descriptor() ([]byte, []int) { + return file_data_store_proto_rawDescGZIP(), []int{9} +} + +func (x *GetItemRequest) GetItemId() string { + if x != nil { + return x.ItemId + } + return "" +} + +type GetItemResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Item *Item `protobuf:"bytes,1,opt,name=item,proto3,oneof" json:"item,omitempty"` +} + +func (x *GetItemResponse) Reset() { + *x = GetItemResponse{} + mi := &file_data_store_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GetItemResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetItemResponse) ProtoMessage() {} + +func (x *GetItemResponse) ProtoReflect() protoreflect.Message { + mi := &file_data_store_proto_msgTypes[10] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetItemResponse.ProtoReflect.Descriptor instead. +func (*GetItemResponse) Descriptor() ([]byte, []int) { + return file_data_store_proto_rawDescGZIP(), []int{10} +} + +func (x *GetItemResponse) GetItem() *Item { + if x != nil { + return x.Item + } + return nil +} + +type ModifyItemRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + ItemId string `protobuf:"bytes,1,opt,name=item_id,json=itemId,proto3" json:"item_id,omitempty"` + Patch *ItemPatch `protobuf:"bytes,2,opt,name=patch,proto3" json:"patch,omitempty"` +} + +func (x *ModifyItemRequest) Reset() { + *x = ModifyItemRequest{} + mi := &file_data_store_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ModifyItemRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModifyItemRequest) ProtoMessage() {} + +func (x *ModifyItemRequest) ProtoReflect() protoreflect.Message { + mi := &file_data_store_proto_msgTypes[11] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModifyItemRequest.ProtoReflect.Descriptor instead. +func (*ModifyItemRequest) Descriptor() ([]byte, []int) { + return file_data_store_proto_rawDescGZIP(), []int{11} +} + +func (x *ModifyItemRequest) GetItemId() string { + if x != nil { + return x.ItemId + } + return "" +} + +func (x *ModifyItemRequest) GetPatch() *ItemPatch { + if x != nil { + return x.Patch + } + return nil +} + +type ModifyItemResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *ModifyItemResponse) Reset() { + *x = ModifyItemResponse{} + mi := &file_data_store_proto_msgTypes[12] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ModifyItemResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModifyItemResponse) ProtoMessage() {} + +func (x *ModifyItemResponse) ProtoReflect() protoreflect.Message { + mi := &file_data_store_proto_msgTypes[12] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModifyItemResponse.ProtoReflect.Descriptor instead. +func (*ModifyItemResponse) Descriptor() ([]byte, []int) { + return file_data_store_proto_rawDescGZIP(), []int{12} +} + +type GetItemsRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Cursor string `protobuf:"bytes,1,opt,name=cursor,proto3" json:"cursor,omitempty"` + N int32 `protobuf:"varint,2,opt,name=n,proto3" json:"n,omitempty"` + BeginTime *timestamppb.Timestamp `protobuf:"bytes,3,opt,name=begin_time,json=beginTime,proto3" json:"begin_time,omitempty"` +} + +func (x *GetItemsRequest) Reset() { + *x = GetItemsRequest{} + mi := &file_data_store_proto_msgTypes[13] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GetItemsRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetItemsRequest) ProtoMessage() {} + +func (x *GetItemsRequest) ProtoReflect() protoreflect.Message { + mi := &file_data_store_proto_msgTypes[13] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetItemsRequest.ProtoReflect.Descriptor instead. +func (*GetItemsRequest) Descriptor() ([]byte, []int) { + return file_data_store_proto_rawDescGZIP(), []int{13} +} + +func (x *GetItemsRequest) GetCursor() string { + if x != nil { + return x.Cursor + } + return "" +} + +func (x *GetItemsRequest) GetN() int32 { + if x != nil { + return x.N + } + return 0 +} + +func (x *GetItemsRequest) GetBeginTime() *timestamppb.Timestamp { + if x != nil { + return x.BeginTime + } + return nil +} + +type GetItemsResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Cursor string `protobuf:"bytes,1,opt,name=cursor,proto3" json:"cursor,omitempty"` + Items []*Item `protobuf:"bytes,2,rep,name=items,proto3" json:"items,omitempty"` +} + +func (x *GetItemsResponse) Reset() { + *x = GetItemsResponse{} + mi := &file_data_store_proto_msgTypes[14] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GetItemsResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetItemsResponse) ProtoMessage() {} + +func (x *GetItemsResponse) ProtoReflect() protoreflect.Message { + mi := &file_data_store_proto_msgTypes[14] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetItemsResponse.ProtoReflect.Descriptor instead. +func (*GetItemsResponse) Descriptor() ([]byte, []int) { + return file_data_store_proto_rawDescGZIP(), []int{14} +} + +func (x *GetItemsResponse) GetCursor() string { + if x != nil { + return x.Cursor + } + return "" +} + +func (x *GetItemsResponse) GetItems() []*Item { + if x != nil { + return x.Items + } + return nil +} + +type GetItemFeedbackRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + ItemId string `protobuf:"bytes,1,opt,name=item_id,json=itemId,proto3" json:"item_id,omitempty"` + FeedbackTypes []string `protobuf:"bytes,2,rep,name=feedback_types,json=feedbackTypes,proto3" json:"feedback_types,omitempty"` +} + +func (x *GetItemFeedbackRequest) Reset() { + *x = GetItemFeedbackRequest{} + mi := &file_data_store_proto_msgTypes[15] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GetItemFeedbackRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetItemFeedbackRequest) ProtoMessage() {} + +func (x *GetItemFeedbackRequest) ProtoReflect() protoreflect.Message { + mi := &file_data_store_proto_msgTypes[15] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetItemFeedbackRequest.ProtoReflect.Descriptor instead. +func (*GetItemFeedbackRequest) Descriptor() ([]byte, []int) { + return file_data_store_proto_rawDescGZIP(), []int{15} +} + +func (x *GetItemFeedbackRequest) GetItemId() string { + if x != nil { + return x.ItemId + } + return "" +} + +func (x *GetItemFeedbackRequest) GetFeedbackTypes() []string { + if x != nil { + return x.FeedbackTypes + } + return nil +} + +type BatchInsertUsersRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Users []*User `protobuf:"bytes,1,rep,name=users,proto3" json:"users,omitempty"` +} + +func (x *BatchInsertUsersRequest) Reset() { + *x = BatchInsertUsersRequest{} + mi := &file_data_store_proto_msgTypes[16] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *BatchInsertUsersRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BatchInsertUsersRequest) ProtoMessage() {} + +func (x *BatchInsertUsersRequest) ProtoReflect() protoreflect.Message { + mi := &file_data_store_proto_msgTypes[16] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BatchInsertUsersRequest.ProtoReflect.Descriptor instead. +func (*BatchInsertUsersRequest) Descriptor() ([]byte, []int) { + return file_data_store_proto_rawDescGZIP(), []int{16} +} + +func (x *BatchInsertUsersRequest) GetUsers() []*User { + if x != nil { + return x.Users + } + return nil +} + +type BatchInsertUsersResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *BatchInsertUsersResponse) Reset() { + *x = BatchInsertUsersResponse{} + mi := &file_data_store_proto_msgTypes[17] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *BatchInsertUsersResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BatchInsertUsersResponse) ProtoMessage() {} + +func (x *BatchInsertUsersResponse) ProtoReflect() protoreflect.Message { + mi := &file_data_store_proto_msgTypes[17] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BatchInsertUsersResponse.ProtoReflect.Descriptor instead. +func (*BatchInsertUsersResponse) Descriptor() ([]byte, []int) { + return file_data_store_proto_rawDescGZIP(), []int{17} +} + +type DeleteUserRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + UserId string `protobuf:"bytes,1,opt,name=user_id,json=userId,proto3" json:"user_id,omitempty"` +} + +func (x *DeleteUserRequest) Reset() { + *x = DeleteUserRequest{} + mi := &file_data_store_proto_msgTypes[18] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *DeleteUserRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DeleteUserRequest) ProtoMessage() {} + +func (x *DeleteUserRequest) ProtoReflect() protoreflect.Message { + mi := &file_data_store_proto_msgTypes[18] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DeleteUserRequest.ProtoReflect.Descriptor instead. +func (*DeleteUserRequest) Descriptor() ([]byte, []int) { + return file_data_store_proto_rawDescGZIP(), []int{18} +} + +func (x *DeleteUserRequest) GetUserId() string { + if x != nil { + return x.UserId + } + return "" +} + +type DeleteUserResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *DeleteUserResponse) Reset() { + *x = DeleteUserResponse{} + mi := &file_data_store_proto_msgTypes[19] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *DeleteUserResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DeleteUserResponse) ProtoMessage() {} + +func (x *DeleteUserResponse) ProtoReflect() protoreflect.Message { + mi := &file_data_store_proto_msgTypes[19] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DeleteUserResponse.ProtoReflect.Descriptor instead. +func (*DeleteUserResponse) Descriptor() ([]byte, []int) { + return file_data_store_proto_rawDescGZIP(), []int{19} +} + +type GetUserRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + UserId string `protobuf:"bytes,1,opt,name=user_id,json=userId,proto3" json:"user_id,omitempty"` +} + +func (x *GetUserRequest) Reset() { + *x = GetUserRequest{} + mi := &file_data_store_proto_msgTypes[20] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GetUserRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetUserRequest) ProtoMessage() {} + +func (x *GetUserRequest) ProtoReflect() protoreflect.Message { + mi := &file_data_store_proto_msgTypes[20] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetUserRequest.ProtoReflect.Descriptor instead. +func (*GetUserRequest) Descriptor() ([]byte, []int) { + return file_data_store_proto_rawDescGZIP(), []int{20} +} + +func (x *GetUserRequest) GetUserId() string { + if x != nil { + return x.UserId + } + return "" +} + +type GetUserResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + User *User `protobuf:"bytes,1,opt,name=user,proto3,oneof" json:"user,omitempty"` +} + +func (x *GetUserResponse) Reset() { + *x = GetUserResponse{} + mi := &file_data_store_proto_msgTypes[21] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GetUserResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetUserResponse) ProtoMessage() {} + +func (x *GetUserResponse) ProtoReflect() protoreflect.Message { + mi := &file_data_store_proto_msgTypes[21] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetUserResponse.ProtoReflect.Descriptor instead. +func (*GetUserResponse) Descriptor() ([]byte, []int) { + return file_data_store_proto_rawDescGZIP(), []int{21} +} + +func (x *GetUserResponse) GetUser() *User { + if x != nil { + return x.User + } + return nil +} + +type ModifyUserRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + UserId string `protobuf:"bytes,1,opt,name=user_id,json=userId,proto3" json:"user_id,omitempty"` + Patch *UserPatch `protobuf:"bytes,2,opt,name=patch,proto3" json:"patch,omitempty"` +} + +func (x *ModifyUserRequest) Reset() { + *x = ModifyUserRequest{} + mi := &file_data_store_proto_msgTypes[22] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ModifyUserRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModifyUserRequest) ProtoMessage() {} + +func (x *ModifyUserRequest) ProtoReflect() protoreflect.Message { + mi := &file_data_store_proto_msgTypes[22] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModifyUserRequest.ProtoReflect.Descriptor instead. +func (*ModifyUserRequest) Descriptor() ([]byte, []int) { + return file_data_store_proto_rawDescGZIP(), []int{22} +} + +func (x *ModifyUserRequest) GetUserId() string { + if x != nil { + return x.UserId + } + return "" +} + +func (x *ModifyUserRequest) GetPatch() *UserPatch { + if x != nil { + return x.Patch + } + return nil +} + +type ModifyUserResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *ModifyUserResponse) Reset() { + *x = ModifyUserResponse{} + mi := &file_data_store_proto_msgTypes[23] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ModifyUserResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ModifyUserResponse) ProtoMessage() {} + +func (x *ModifyUserResponse) ProtoReflect() protoreflect.Message { + mi := &file_data_store_proto_msgTypes[23] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ModifyUserResponse.ProtoReflect.Descriptor instead. +func (*ModifyUserResponse) Descriptor() ([]byte, []int) { + return file_data_store_proto_rawDescGZIP(), []int{23} +} + +type GetUsersRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Cursor string `protobuf:"bytes,1,opt,name=cursor,proto3" json:"cursor,omitempty"` + N int32 `protobuf:"varint,2,opt,name=n,proto3" json:"n,omitempty"` +} + +func (x *GetUsersRequest) Reset() { + *x = GetUsersRequest{} + mi := &file_data_store_proto_msgTypes[24] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GetUsersRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetUsersRequest) ProtoMessage() {} + +func (x *GetUsersRequest) ProtoReflect() protoreflect.Message { + mi := &file_data_store_proto_msgTypes[24] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetUsersRequest.ProtoReflect.Descriptor instead. +func (*GetUsersRequest) Descriptor() ([]byte, []int) { + return file_data_store_proto_rawDescGZIP(), []int{24} +} + +func (x *GetUsersRequest) GetCursor() string { + if x != nil { + return x.Cursor + } + return "" +} + +func (x *GetUsersRequest) GetN() int32 { + if x != nil { + return x.N + } + return 0 +} + +type GetUsersResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Cursor string `protobuf:"bytes,1,opt,name=cursor,proto3" json:"cursor,omitempty"` + Users []*User `protobuf:"bytes,2,rep,name=users,proto3" json:"users,omitempty"` +} + +func (x *GetUsersResponse) Reset() { + *x = GetUsersResponse{} + mi := &file_data_store_proto_msgTypes[25] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GetUsersResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetUsersResponse) ProtoMessage() {} + +func (x *GetUsersResponse) ProtoReflect() protoreflect.Message { + mi := &file_data_store_proto_msgTypes[25] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetUsersResponse.ProtoReflect.Descriptor instead. +func (*GetUsersResponse) Descriptor() ([]byte, []int) { + return file_data_store_proto_rawDescGZIP(), []int{25} +} + +func (x *GetUsersResponse) GetCursor() string { + if x != nil { + return x.Cursor + } + return "" +} + +func (x *GetUsersResponse) GetUsers() []*User { + if x != nil { + return x.Users + } + return nil +} + +type GetUserFeedbackRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + UserId string `protobuf:"bytes,1,opt,name=user_id,json=userId,proto3" json:"user_id,omitempty"` + EndTime *timestamppb.Timestamp `protobuf:"bytes,2,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + FeedbackTypes []string `protobuf:"bytes,3,rep,name=feedback_types,json=feedbackTypes,proto3" json:"feedback_types,omitempty"` +} + +func (x *GetUserFeedbackRequest) Reset() { + *x = GetUserFeedbackRequest{} + mi := &file_data_store_proto_msgTypes[26] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GetUserFeedbackRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetUserFeedbackRequest) ProtoMessage() {} + +func (x *GetUserFeedbackRequest) ProtoReflect() protoreflect.Message { + mi := &file_data_store_proto_msgTypes[26] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetUserFeedbackRequest.ProtoReflect.Descriptor instead. +func (*GetUserFeedbackRequest) Descriptor() ([]byte, []int) { + return file_data_store_proto_rawDescGZIP(), []int{26} +} + +func (x *GetUserFeedbackRequest) GetUserId() string { + if x != nil { + return x.UserId + } + return "" +} + +func (x *GetUserFeedbackRequest) GetEndTime() *timestamppb.Timestamp { + if x != nil { + return x.EndTime + } + return nil +} + +func (x *GetUserFeedbackRequest) GetFeedbackTypes() []string { + if x != nil { + return x.FeedbackTypes + } + return nil +} + +type GetUserItemFeedbackRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + UserId string `protobuf:"bytes,1,opt,name=user_id,json=userId,proto3" json:"user_id,omitempty"` + ItemId string `protobuf:"bytes,2,opt,name=item_id,json=itemId,proto3" json:"item_id,omitempty"` + FeedbackTypes []string `protobuf:"bytes,3,rep,name=feedback_types,json=feedbackTypes,proto3" json:"feedback_types,omitempty"` +} + +func (x *GetUserItemFeedbackRequest) Reset() { + *x = GetUserItemFeedbackRequest{} + mi := &file_data_store_proto_msgTypes[27] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GetUserItemFeedbackRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetUserItemFeedbackRequest) ProtoMessage() {} + +func (x *GetUserItemFeedbackRequest) ProtoReflect() protoreflect.Message { + mi := &file_data_store_proto_msgTypes[27] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetUserItemFeedbackRequest.ProtoReflect.Descriptor instead. +func (*GetUserItemFeedbackRequest) Descriptor() ([]byte, []int) { + return file_data_store_proto_rawDescGZIP(), []int{27} +} + +func (x *GetUserItemFeedbackRequest) GetUserId() string { + if x != nil { + return x.UserId + } + return "" +} + +func (x *GetUserItemFeedbackRequest) GetItemId() string { + if x != nil { + return x.ItemId + } + return "" +} + +func (x *GetUserItemFeedbackRequest) GetFeedbackTypes() []string { + if x != nil { + return x.FeedbackTypes + } + return nil +} + +type DeleteUserItemFeedbackRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + UserId string `protobuf:"bytes,1,opt,name=user_id,json=userId,proto3" json:"user_id,omitempty"` + ItemId string `protobuf:"bytes,2,opt,name=item_id,json=itemId,proto3" json:"item_id,omitempty"` + FeedbackTypes []string `protobuf:"bytes,3,rep,name=feedback_types,json=feedbackTypes,proto3" json:"feedback_types,omitempty"` +} + +func (x *DeleteUserItemFeedbackRequest) Reset() { + *x = DeleteUserItemFeedbackRequest{} + mi := &file_data_store_proto_msgTypes[28] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *DeleteUserItemFeedbackRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DeleteUserItemFeedbackRequest) ProtoMessage() {} + +func (x *DeleteUserItemFeedbackRequest) ProtoReflect() protoreflect.Message { + mi := &file_data_store_proto_msgTypes[28] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DeleteUserItemFeedbackRequest.ProtoReflect.Descriptor instead. +func (*DeleteUserItemFeedbackRequest) Descriptor() ([]byte, []int) { + return file_data_store_proto_rawDescGZIP(), []int{28} +} + +func (x *DeleteUserItemFeedbackRequest) GetUserId() string { + if x != nil { + return x.UserId + } + return "" +} + +func (x *DeleteUserItemFeedbackRequest) GetItemId() string { + if x != nil { + return x.ItemId + } + return "" +} + +func (x *DeleteUserItemFeedbackRequest) GetFeedbackTypes() []string { + if x != nil { + return x.FeedbackTypes + } + return nil +} + +type DeleteUserItemFeedbackResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Count int32 `protobuf:"varint,1,opt,name=count,proto3" json:"count,omitempty"` +} + +func (x *DeleteUserItemFeedbackResponse) Reset() { + *x = DeleteUserItemFeedbackResponse{} + mi := &file_data_store_proto_msgTypes[29] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *DeleteUserItemFeedbackResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DeleteUserItemFeedbackResponse) ProtoMessage() {} + +func (x *DeleteUserItemFeedbackResponse) ProtoReflect() protoreflect.Message { + mi := &file_data_store_proto_msgTypes[29] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DeleteUserItemFeedbackResponse.ProtoReflect.Descriptor instead. +func (*DeleteUserItemFeedbackResponse) Descriptor() ([]byte, []int) { + return file_data_store_proto_rawDescGZIP(), []int{29} +} + +func (x *DeleteUserItemFeedbackResponse) GetCount() int32 { + if x != nil { + return x.Count + } + return 0 +} + +type BatchInsertFeedbackRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Feedback []*Feedback `protobuf:"bytes,1,rep,name=feedback,proto3" json:"feedback,omitempty"` + InsertUser bool `protobuf:"varint,2,opt,name=insert_user,json=insertUser,proto3" json:"insert_user,omitempty"` + InsertItem bool `protobuf:"varint,3,opt,name=insert_item,json=insertItem,proto3" json:"insert_item,omitempty"` + Overwrite bool `protobuf:"varint,4,opt,name=overwrite,proto3" json:"overwrite,omitempty"` +} + +func (x *BatchInsertFeedbackRequest) Reset() { + *x = BatchInsertFeedbackRequest{} + mi := &file_data_store_proto_msgTypes[30] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *BatchInsertFeedbackRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BatchInsertFeedbackRequest) ProtoMessage() {} + +func (x *BatchInsertFeedbackRequest) ProtoReflect() protoreflect.Message { + mi := &file_data_store_proto_msgTypes[30] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BatchInsertFeedbackRequest.ProtoReflect.Descriptor instead. +func (*BatchInsertFeedbackRequest) Descriptor() ([]byte, []int) { + return file_data_store_proto_rawDescGZIP(), []int{30} +} + +func (x *BatchInsertFeedbackRequest) GetFeedback() []*Feedback { + if x != nil { + return x.Feedback + } + return nil +} + +func (x *BatchInsertFeedbackRequest) GetInsertUser() bool { + if x != nil { + return x.InsertUser + } + return false +} + +func (x *BatchInsertFeedbackRequest) GetInsertItem() bool { + if x != nil { + return x.InsertItem + } + return false +} + +func (x *BatchInsertFeedbackRequest) GetOverwrite() bool { + if x != nil { + return x.Overwrite + } + return false +} + +type BatchInsertFeedbackResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *BatchInsertFeedbackResponse) Reset() { + *x = BatchInsertFeedbackResponse{} + mi := &file_data_store_proto_msgTypes[31] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *BatchInsertFeedbackResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BatchInsertFeedbackResponse) ProtoMessage() {} + +func (x *BatchInsertFeedbackResponse) ProtoReflect() protoreflect.Message { + mi := &file_data_store_proto_msgTypes[31] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BatchInsertFeedbackResponse.ProtoReflect.Descriptor instead. +func (*BatchInsertFeedbackResponse) Descriptor() ([]byte, []int) { + return file_data_store_proto_rawDescGZIP(), []int{31} +} + +type GetFeedbackRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Cursor string `protobuf:"bytes,1,opt,name=cursor,proto3" json:"cursor,omitempty"` + N int32 `protobuf:"varint,2,opt,name=n,proto3" json:"n,omitempty"` + BeginTime *timestamppb.Timestamp `protobuf:"bytes,3,opt,name=begin_time,json=beginTime,proto3" json:"begin_time,omitempty"` + EndTime *timestamppb.Timestamp `protobuf:"bytes,4,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + FeedbackTypes []string `protobuf:"bytes,5,rep,name=feedback_types,json=feedbackTypes,proto3" json:"feedback_types,omitempty"` +} + +func (x *GetFeedbackRequest) Reset() { + *x = GetFeedbackRequest{} + mi := &file_data_store_proto_msgTypes[32] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GetFeedbackRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetFeedbackRequest) ProtoMessage() {} + +func (x *GetFeedbackRequest) ProtoReflect() protoreflect.Message { + mi := &file_data_store_proto_msgTypes[32] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetFeedbackRequest.ProtoReflect.Descriptor instead. +func (*GetFeedbackRequest) Descriptor() ([]byte, []int) { + return file_data_store_proto_rawDescGZIP(), []int{32} +} + +func (x *GetFeedbackRequest) GetCursor() string { + if x != nil { + return x.Cursor + } + return "" +} + +func (x *GetFeedbackRequest) GetN() int32 { + if x != nil { + return x.N + } + return 0 +} + +func (x *GetFeedbackRequest) GetBeginTime() *timestamppb.Timestamp { + if x != nil { + return x.BeginTime + } + return nil +} + +func (x *GetFeedbackRequest) GetEndTime() *timestamppb.Timestamp { + if x != nil { + return x.EndTime + } + return nil +} + +func (x *GetFeedbackRequest) GetFeedbackTypes() []string { + if x != nil { + return x.FeedbackTypes + } + return nil +} + +type GetFeedbackResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Cursor string `protobuf:"bytes,1,opt,name=cursor,proto3" json:"cursor,omitempty"` + Feedback []*Feedback `protobuf:"bytes,2,rep,name=feedback,proto3" json:"feedback,omitempty"` +} + +func (x *GetFeedbackResponse) Reset() { + *x = GetFeedbackResponse{} + mi := &file_data_store_proto_msgTypes[33] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GetFeedbackResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetFeedbackResponse) ProtoMessage() {} + +func (x *GetFeedbackResponse) ProtoReflect() protoreflect.Message { + mi := &file_data_store_proto_msgTypes[33] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetFeedbackResponse.ProtoReflect.Descriptor instead. +func (*GetFeedbackResponse) Descriptor() ([]byte, []int) { + return file_data_store_proto_rawDescGZIP(), []int{33} +} + +func (x *GetFeedbackResponse) GetCursor() string { + if x != nil { + return x.Cursor + } + return "" +} + +func (x *GetFeedbackResponse) GetFeedback() []*Feedback { + if x != nil { + return x.Feedback + } + return nil +} + +type GetUserStreamRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + BatchSize int32 `protobuf:"varint,1,opt,name=batch_size,json=batchSize,proto3" json:"batch_size,omitempty"` +} + +func (x *GetUserStreamRequest) Reset() { + *x = GetUserStreamRequest{} + mi := &file_data_store_proto_msgTypes[34] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GetUserStreamRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetUserStreamRequest) ProtoMessage() {} + +func (x *GetUserStreamRequest) ProtoReflect() protoreflect.Message { + mi := &file_data_store_proto_msgTypes[34] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetUserStreamRequest.ProtoReflect.Descriptor instead. +func (*GetUserStreamRequest) Descriptor() ([]byte, []int) { + return file_data_store_proto_rawDescGZIP(), []int{34} +} + +func (x *GetUserStreamRequest) GetBatchSize() int32 { + if x != nil { + return x.BatchSize + } + return 0 +} + +type GetUserStreamResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Users []*User `protobuf:"bytes,1,rep,name=users,proto3" json:"users,omitempty"` +} + +func (x *GetUserStreamResponse) Reset() { + *x = GetUserStreamResponse{} + mi := &file_data_store_proto_msgTypes[35] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GetUserStreamResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetUserStreamResponse) ProtoMessage() {} + +func (x *GetUserStreamResponse) ProtoReflect() protoreflect.Message { + mi := &file_data_store_proto_msgTypes[35] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetUserStreamResponse.ProtoReflect.Descriptor instead. +func (*GetUserStreamResponse) Descriptor() ([]byte, []int) { + return file_data_store_proto_rawDescGZIP(), []int{35} +} + +func (x *GetUserStreamResponse) GetUsers() []*User { + if x != nil { + return x.Users + } + return nil +} + +type GetItemStreamRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + BatchSize int32 `protobuf:"varint,1,opt,name=batch_size,json=batchSize,proto3" json:"batch_size,omitempty"` + TimeLimit *timestamppb.Timestamp `protobuf:"bytes,2,opt,name=time_limit,json=timeLimit,proto3" json:"time_limit,omitempty"` +} + +func (x *GetItemStreamRequest) Reset() { + *x = GetItemStreamRequest{} + mi := &file_data_store_proto_msgTypes[36] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GetItemStreamRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetItemStreamRequest) ProtoMessage() {} + +func (x *GetItemStreamRequest) ProtoReflect() protoreflect.Message { + mi := &file_data_store_proto_msgTypes[36] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetItemStreamRequest.ProtoReflect.Descriptor instead. +func (*GetItemStreamRequest) Descriptor() ([]byte, []int) { + return file_data_store_proto_rawDescGZIP(), []int{36} +} + +func (x *GetItemStreamRequest) GetBatchSize() int32 { + if x != nil { + return x.BatchSize + } + return 0 +} + +func (x *GetItemStreamRequest) GetTimeLimit() *timestamppb.Timestamp { + if x != nil { + return x.TimeLimit + } + return nil +} + +type GetItemStreamResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Items []*Item `protobuf:"bytes,1,rep,name=items,proto3" json:"items,omitempty"` +} + +func (x *GetItemStreamResponse) Reset() { + *x = GetItemStreamResponse{} + mi := &file_data_store_proto_msgTypes[37] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GetItemStreamResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetItemStreamResponse) ProtoMessage() {} + +func (x *GetItemStreamResponse) ProtoReflect() protoreflect.Message { + mi := &file_data_store_proto_msgTypes[37] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetItemStreamResponse.ProtoReflect.Descriptor instead. +func (*GetItemStreamResponse) Descriptor() ([]byte, []int) { + return file_data_store_proto_rawDescGZIP(), []int{37} +} + +func (x *GetItemStreamResponse) GetItems() []*Item { + if x != nil { + return x.Items + } + return nil +} + +type GetFeedbackStreamRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + BatchSize int32 `protobuf:"varint,1,opt,name=batch_size,json=batchSize,proto3" json:"batch_size,omitempty"` + ScanOptions *ScanOptions `protobuf:"bytes,2,opt,name=scan_options,json=scanOptions,proto3" json:"scan_options,omitempty"` +} + +func (x *GetFeedbackStreamRequest) Reset() { + *x = GetFeedbackStreamRequest{} + mi := &file_data_store_proto_msgTypes[38] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GetFeedbackStreamRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetFeedbackStreamRequest) ProtoMessage() {} + +func (x *GetFeedbackStreamRequest) ProtoReflect() protoreflect.Message { + mi := &file_data_store_proto_msgTypes[38] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetFeedbackStreamRequest.ProtoReflect.Descriptor instead. +func (*GetFeedbackStreamRequest) Descriptor() ([]byte, []int) { + return file_data_store_proto_rawDescGZIP(), []int{38} +} + +func (x *GetFeedbackStreamRequest) GetBatchSize() int32 { + if x != nil { + return x.BatchSize + } + return 0 +} + +func (x *GetFeedbackStreamRequest) GetScanOptions() *ScanOptions { + if x != nil { + return x.ScanOptions + } + return nil +} + +type GetFeedbackStreamResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Feedback []*Feedback `protobuf:"bytes,1,rep,name=feedback,proto3" json:"feedback,omitempty"` +} + +func (x *GetFeedbackStreamResponse) Reset() { + *x = GetFeedbackStreamResponse{} + mi := &file_data_store_proto_msgTypes[39] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GetFeedbackStreamResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetFeedbackStreamResponse) ProtoMessage() {} + +func (x *GetFeedbackStreamResponse) ProtoReflect() protoreflect.Message { + mi := &file_data_store_proto_msgTypes[39] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetFeedbackStreamResponse.ProtoReflect.Descriptor instead. +func (*GetFeedbackStreamResponse) Descriptor() ([]byte, []int) { + return file_data_store_proto_rawDescGZIP(), []int{39} +} + +func (x *GetFeedbackStreamResponse) GetFeedback() []*Feedback { + if x != nil { + return x.Feedback + } + return nil +} + +var File_data_store_proto protoreflect.FileDescriptor + +var file_data_store_proto_rawDesc = []byte{ + 0x0a, 0x10, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x12, 0x08, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x1a, 0x1f, 0x67, 0x6f, + 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x69, + 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x0e, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x6c, 0x0a, + 0x09, 0x55, 0x73, 0x65, 0x72, 0x50, 0x61, 0x74, 0x63, 0x68, 0x12, 0x16, 0x0a, 0x06, 0x6c, 0x61, + 0x62, 0x65, 0x6c, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x06, 0x6c, 0x61, 0x62, 0x65, + 0x6c, 0x73, 0x12, 0x1d, 0x0a, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x88, 0x01, + 0x01, 0x12, 0x1c, 0x0a, 0x09, 0x73, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x18, 0x03, + 0x20, 0x03, 0x28, 0x09, 0x52, 0x09, 0x73, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x42, + 0x0a, 0x0a, 0x08, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x22, 0xeb, 0x01, 0x0a, 0x09, + 0x49, 0x74, 0x65, 0x6d, 0x50, 0x61, 0x74, 0x63, 0x68, 0x12, 0x20, 0x0a, 0x09, 0x69, 0x73, 0x5f, + 0x68, 0x69, 0x64, 0x64, 0x65, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x08, + 0x69, 0x73, 0x48, 0x69, 0x64, 0x64, 0x65, 0x6e, 0x88, 0x01, 0x01, 0x12, 0x1e, 0x0a, 0x0a, 0x63, + 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x69, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, + 0x0a, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x69, 0x65, 0x73, 0x12, 0x3d, 0x0a, 0x09, 0x74, + 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, + 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, + 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x48, 0x01, 0x52, 0x09, 0x74, 0x69, + 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x88, 0x01, 0x01, 0x12, 0x16, 0x0a, 0x06, 0x6c, 0x61, + 0x62, 0x65, 0x6c, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x06, 0x6c, 0x61, 0x62, 0x65, + 0x6c, 0x73, 0x12, 0x1d, 0x0a, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x18, 0x05, 0x20, + 0x01, 0x28, 0x09, 0x48, 0x02, 0x52, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x88, 0x01, + 0x01, 0x42, 0x0c, 0x0a, 0x0a, 0x5f, 0x69, 0x73, 0x5f, 0x68, 0x69, 0x64, 0x64, 0x65, 0x6e, 0x42, + 0x0c, 0x0a, 0x0a, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x42, 0x0a, 0x0a, + 0x08, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x22, 0xbc, 0x02, 0x0a, 0x0b, 0x53, 0x63, + 0x61, 0x6e, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x27, 0x0a, 0x0d, 0x62, 0x65, 0x67, + 0x69, 0x6e, 0x5f, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x48, 0x00, 0x52, 0x0b, 0x62, 0x65, 0x67, 0x69, 0x6e, 0x55, 0x73, 0x65, 0x72, 0x49, 0x64, 0x88, + 0x01, 0x01, 0x12, 0x23, 0x0a, 0x0b, 0x65, 0x6e, 0x64, 0x5f, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x69, + 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x09, 0x65, 0x6e, 0x64, 0x55, 0x73, + 0x65, 0x72, 0x49, 0x64, 0x88, 0x01, 0x01, 0x12, 0x3e, 0x0a, 0x0a, 0x62, 0x65, 0x67, 0x69, 0x6e, + 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, + 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, + 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x48, 0x02, 0x52, 0x09, 0x62, 0x65, 0x67, 0x69, 0x6e, + 0x54, 0x69, 0x6d, 0x65, 0x88, 0x01, 0x01, 0x12, 0x3a, 0x0a, 0x08, 0x65, 0x6e, 0x64, 0x5f, 0x74, + 0x69, 0x6d, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, + 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, + 0x73, 0x74, 0x61, 0x6d, 0x70, 0x48, 0x03, 0x52, 0x07, 0x65, 0x6e, 0x64, 0x54, 0x69, 0x6d, 0x65, + 0x88, 0x01, 0x01, 0x12, 0x25, 0x0a, 0x0e, 0x66, 0x65, 0x65, 0x64, 0x62, 0x61, 0x63, 0x6b, 0x5f, + 0x74, 0x79, 0x70, 0x65, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0d, 0x66, 0x65, 0x65, + 0x64, 0x62, 0x61, 0x63, 0x6b, 0x54, 0x79, 0x70, 0x65, 0x73, 0x42, 0x10, 0x0a, 0x0e, 0x5f, 0x62, + 0x65, 0x67, 0x69, 0x6e, 0x5f, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x42, 0x0e, 0x0a, 0x0c, + 0x5f, 0x65, 0x6e, 0x64, 0x5f, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x42, 0x0d, 0x0a, 0x0b, + 0x5f, 0x62, 0x65, 0x67, 0x69, 0x6e, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x42, 0x0b, 0x0a, 0x09, 0x5f, + 0x65, 0x6e, 0x64, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x22, 0x3f, 0x0a, 0x17, 0x42, 0x61, 0x74, 0x63, + 0x68, 0x49, 0x6e, 0x73, 0x65, 0x72, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x12, 0x24, 0x0a, 0x05, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x18, 0x01, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x49, 0x74, + 0x65, 0x6d, 0x52, 0x05, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x22, 0x1a, 0x0a, 0x18, 0x42, 0x61, 0x74, + 0x63, 0x68, 0x49, 0x6e, 0x73, 0x65, 0x72, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x31, 0x0a, 0x14, 0x42, 0x61, 0x74, 0x63, 0x68, 0x47, 0x65, + 0x74, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x19, 0x0a, + 0x08, 0x69, 0x74, 0x65, 0x6d, 0x5f, 0x69, 0x64, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, + 0x07, 0x69, 0x74, 0x65, 0x6d, 0x49, 0x64, 0x73, 0x22, 0x3d, 0x0a, 0x15, 0x42, 0x61, 0x74, 0x63, + 0x68, 0x47, 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x12, 0x24, 0x0a, 0x05, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x0e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x49, 0x74, 0x65, 0x6d, + 0x52, 0x05, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x22, 0x2c, 0x0a, 0x11, 0x44, 0x65, 0x6c, 0x65, 0x74, + 0x65, 0x49, 0x74, 0x65, 0x6d, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x17, 0x0a, 0x07, + 0x69, 0x74, 0x65, 0x6d, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x69, + 0x74, 0x65, 0x6d, 0x49, 0x64, 0x22, 0x14, 0x0a, 0x12, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x49, + 0x74, 0x65, 0x6d, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x29, 0x0a, 0x0e, 0x47, + 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x17, 0x0a, + 0x07, 0x69, 0x74, 0x65, 0x6d, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, + 0x69, 0x74, 0x65, 0x6d, 0x49, 0x64, 0x22, 0x43, 0x0a, 0x0f, 0x47, 0x65, 0x74, 0x49, 0x74, 0x65, + 0x6d, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x27, 0x0a, 0x04, 0x69, 0x74, 0x65, + 0x6d, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, + 0x6f, 0x6c, 0x2e, 0x49, 0x74, 0x65, 0x6d, 0x48, 0x00, 0x52, 0x04, 0x69, 0x74, 0x65, 0x6d, 0x88, + 0x01, 0x01, 0x42, 0x07, 0x0a, 0x05, 0x5f, 0x69, 0x74, 0x65, 0x6d, 0x22, 0x57, 0x0a, 0x11, 0x4d, + 0x6f, 0x64, 0x69, 0x66, 0x79, 0x49, 0x74, 0x65, 0x6d, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x12, 0x17, 0x0a, 0x07, 0x69, 0x74, 0x65, 0x6d, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x06, 0x69, 0x74, 0x65, 0x6d, 0x49, 0x64, 0x12, 0x29, 0x0a, 0x05, 0x70, 0x61, 0x74, + 0x63, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x63, 0x6f, 0x6c, 0x2e, 0x49, 0x74, 0x65, 0x6d, 0x50, 0x61, 0x74, 0x63, 0x68, 0x52, 0x05, 0x70, + 0x61, 0x74, 0x63, 0x68, 0x22, 0x14, 0x0a, 0x12, 0x4d, 0x6f, 0x64, 0x69, 0x66, 0x79, 0x49, 0x74, + 0x65, 0x6d, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x72, 0x0a, 0x0f, 0x47, 0x65, + 0x74, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x16, 0x0a, + 0x06, 0x63, 0x75, 0x72, 0x73, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x63, + 0x75, 0x72, 0x73, 0x6f, 0x72, 0x12, 0x0c, 0x0a, 0x01, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, + 0x52, 0x01, 0x6e, 0x12, 0x39, 0x0a, 0x0a, 0x62, 0x65, 0x67, 0x69, 0x6e, 0x5f, 0x74, 0x69, 0x6d, + 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, + 0x61, 0x6d, 0x70, 0x52, 0x09, 0x62, 0x65, 0x67, 0x69, 0x6e, 0x54, 0x69, 0x6d, 0x65, 0x22, 0x50, + 0x0a, 0x10, 0x47, 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x63, 0x75, 0x72, 0x73, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x06, 0x63, 0x75, 0x72, 0x73, 0x6f, 0x72, 0x12, 0x24, 0x0a, 0x05, 0x69, 0x74, + 0x65, 0x6d, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x49, 0x74, 0x65, 0x6d, 0x52, 0x05, 0x69, 0x74, 0x65, 0x6d, 0x73, + 0x22, 0x58, 0x0a, 0x16, 0x47, 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x46, 0x65, 0x65, 0x64, 0x62, + 0x61, 0x63, 0x6b, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x17, 0x0a, 0x07, 0x69, 0x74, + 0x65, 0x6d, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x69, 0x74, 0x65, + 0x6d, 0x49, 0x64, 0x12, 0x25, 0x0a, 0x0e, 0x66, 0x65, 0x65, 0x64, 0x62, 0x61, 0x63, 0x6b, 0x5f, + 0x74, 0x79, 0x70, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0d, 0x66, 0x65, 0x65, + 0x64, 0x62, 0x61, 0x63, 0x6b, 0x54, 0x79, 0x70, 0x65, 0x73, 0x22, 0x3f, 0x0a, 0x17, 0x42, 0x61, + 0x74, 0x63, 0x68, 0x49, 0x6e, 0x73, 0x65, 0x72, 0x74, 0x55, 0x73, 0x65, 0x72, 0x73, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x24, 0x0a, 0x05, 0x75, 0x73, 0x65, 0x72, 0x73, 0x18, 0x01, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, + 0x55, 0x73, 0x65, 0x72, 0x52, 0x05, 0x75, 0x73, 0x65, 0x72, 0x73, 0x22, 0x1a, 0x0a, 0x18, 0x42, + 0x61, 0x74, 0x63, 0x68, 0x49, 0x6e, 0x73, 0x65, 0x72, 0x74, 0x55, 0x73, 0x65, 0x72, 0x73, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x2c, 0x0a, 0x11, 0x44, 0x65, 0x6c, 0x65, 0x74, + 0x65, 0x55, 0x73, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x17, 0x0a, 0x07, + 0x75, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x75, + 0x73, 0x65, 0x72, 0x49, 0x64, 0x22, 0x14, 0x0a, 0x12, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x55, + 0x73, 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x29, 0x0a, 0x0e, 0x47, + 0x65, 0x74, 0x55, 0x73, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x17, 0x0a, + 0x07, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, + 0x75, 0x73, 0x65, 0x72, 0x49, 0x64, 0x22, 0x43, 0x0a, 0x0f, 0x47, 0x65, 0x74, 0x55, 0x73, 0x65, + 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x27, 0x0a, 0x04, 0x75, 0x73, 0x65, + 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, + 0x6f, 0x6c, 0x2e, 0x55, 0x73, 0x65, 0x72, 0x48, 0x00, 0x52, 0x04, 0x75, 0x73, 0x65, 0x72, 0x88, + 0x01, 0x01, 0x42, 0x07, 0x0a, 0x05, 0x5f, 0x75, 0x73, 0x65, 0x72, 0x22, 0x57, 0x0a, 0x11, 0x4d, + 0x6f, 0x64, 0x69, 0x66, 0x79, 0x55, 0x73, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x12, 0x17, 0x0a, 0x07, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x06, 0x75, 0x73, 0x65, 0x72, 0x49, 0x64, 0x12, 0x29, 0x0a, 0x05, 0x70, 0x61, 0x74, + 0x63, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x63, 0x6f, 0x6c, 0x2e, 0x55, 0x73, 0x65, 0x72, 0x50, 0x61, 0x74, 0x63, 0x68, 0x52, 0x05, 0x70, + 0x61, 0x74, 0x63, 0x68, 0x22, 0x14, 0x0a, 0x12, 0x4d, 0x6f, 0x64, 0x69, 0x66, 0x79, 0x55, 0x73, + 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x37, 0x0a, 0x0f, 0x47, 0x65, + 0x74, 0x55, 0x73, 0x65, 0x72, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x16, 0x0a, + 0x06, 0x63, 0x75, 0x72, 0x73, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x63, + 0x75, 0x72, 0x73, 0x6f, 0x72, 0x12, 0x0c, 0x0a, 0x01, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, + 0x52, 0x01, 0x6e, 0x22, 0x50, 0x0a, 0x10, 0x47, 0x65, 0x74, 0x55, 0x73, 0x65, 0x72, 0x73, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x63, 0x75, 0x72, 0x73, 0x6f, + 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x63, 0x75, 0x72, 0x73, 0x6f, 0x72, 0x12, + 0x24, 0x0a, 0x05, 0x75, 0x73, 0x65, 0x72, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0e, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x55, 0x73, 0x65, 0x72, 0x52, 0x05, + 0x75, 0x73, 0x65, 0x72, 0x73, 0x22, 0x8f, 0x01, 0x0a, 0x16, 0x47, 0x65, 0x74, 0x55, 0x73, 0x65, + 0x72, 0x46, 0x65, 0x65, 0x64, 0x62, 0x61, 0x63, 0x6b, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x12, 0x17, 0x0a, 0x07, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x06, 0x75, 0x73, 0x65, 0x72, 0x49, 0x64, 0x12, 0x35, 0x0a, 0x08, 0x65, 0x6e, 0x64, + 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, + 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, + 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x07, 0x65, 0x6e, 0x64, 0x54, 0x69, 0x6d, 0x65, + 0x12, 0x25, 0x0a, 0x0e, 0x66, 0x65, 0x65, 0x64, 0x62, 0x61, 0x63, 0x6b, 0x5f, 0x74, 0x79, 0x70, + 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0d, 0x66, 0x65, 0x65, 0x64, 0x62, 0x61, + 0x63, 0x6b, 0x54, 0x79, 0x70, 0x65, 0x73, 0x22, 0x75, 0x0a, 0x1a, 0x47, 0x65, 0x74, 0x55, 0x73, + 0x65, 0x72, 0x49, 0x74, 0x65, 0x6d, 0x46, 0x65, 0x65, 0x64, 0x62, 0x61, 0x63, 0x6b, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x17, 0x0a, 0x07, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x64, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x75, 0x73, 0x65, 0x72, 0x49, 0x64, 0x12, 0x17, + 0x0a, 0x07, 0x69, 0x74, 0x65, 0x6d, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x06, 0x69, 0x74, 0x65, 0x6d, 0x49, 0x64, 0x12, 0x25, 0x0a, 0x0e, 0x66, 0x65, 0x65, 0x64, 0x62, + 0x61, 0x63, 0x6b, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, + 0x0d, 0x66, 0x65, 0x65, 0x64, 0x62, 0x61, 0x63, 0x6b, 0x54, 0x79, 0x70, 0x65, 0x73, 0x22, 0x78, + 0x0a, 0x1d, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x55, 0x73, 0x65, 0x72, 0x49, 0x74, 0x65, 0x6d, + 0x46, 0x65, 0x65, 0x64, 0x62, 0x61, 0x63, 0x6b, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, + 0x17, 0x0a, 0x07, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x06, 0x75, 0x73, 0x65, 0x72, 0x49, 0x64, 0x12, 0x17, 0x0a, 0x07, 0x69, 0x74, 0x65, 0x6d, + 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x69, 0x74, 0x65, 0x6d, 0x49, + 0x64, 0x12, 0x25, 0x0a, 0x0e, 0x66, 0x65, 0x65, 0x64, 0x62, 0x61, 0x63, 0x6b, 0x5f, 0x74, 0x79, + 0x70, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0d, 0x66, 0x65, 0x65, 0x64, 0x62, + 0x61, 0x63, 0x6b, 0x54, 0x79, 0x70, 0x65, 0x73, 0x22, 0x36, 0x0a, 0x1e, 0x44, 0x65, 0x6c, 0x65, + 0x74, 0x65, 0x55, 0x73, 0x65, 0x72, 0x49, 0x74, 0x65, 0x6d, 0x46, 0x65, 0x65, 0x64, 0x62, 0x61, + 0x63, 0x6b, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x63, 0x6f, + 0x75, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, + 0x22, 0xac, 0x01, 0x0a, 0x1a, 0x42, 0x61, 0x74, 0x63, 0x68, 0x49, 0x6e, 0x73, 0x65, 0x72, 0x74, + 0x46, 0x65, 0x65, 0x64, 0x62, 0x61, 0x63, 0x6b, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, + 0x2e, 0x0a, 0x08, 0x66, 0x65, 0x65, 0x64, 0x62, 0x61, 0x63, 0x6b, 0x18, 0x01, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x12, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x46, 0x65, 0x65, + 0x64, 0x62, 0x61, 0x63, 0x6b, 0x52, 0x08, 0x66, 0x65, 0x65, 0x64, 0x62, 0x61, 0x63, 0x6b, 0x12, + 0x1f, 0x0a, 0x0b, 0x69, 0x6e, 0x73, 0x65, 0x72, 0x74, 0x5f, 0x75, 0x73, 0x65, 0x72, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, 0x69, 0x6e, 0x73, 0x65, 0x72, 0x74, 0x55, 0x73, 0x65, 0x72, + 0x12, 0x1f, 0x0a, 0x0b, 0x69, 0x6e, 0x73, 0x65, 0x72, 0x74, 0x5f, 0x69, 0x74, 0x65, 0x6d, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, 0x69, 0x6e, 0x73, 0x65, 0x72, 0x74, 0x49, 0x74, 0x65, + 0x6d, 0x12, 0x1c, 0x0a, 0x09, 0x6f, 0x76, 0x65, 0x72, 0x77, 0x72, 0x69, 0x74, 0x65, 0x18, 0x04, + 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x6f, 0x76, 0x65, 0x72, 0x77, 0x72, 0x69, 0x74, 0x65, 0x22, + 0x1d, 0x0a, 0x1b, 0x42, 0x61, 0x74, 0x63, 0x68, 0x49, 0x6e, 0x73, 0x65, 0x72, 0x74, 0x46, 0x65, + 0x65, 0x64, 0x62, 0x61, 0x63, 0x6b, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0xd3, + 0x01, 0x0a, 0x12, 0x47, 0x65, 0x74, 0x46, 0x65, 0x65, 0x64, 0x62, 0x61, 0x63, 0x6b, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x16, 0x0a, 0x06, 0x63, 0x75, 0x72, 0x73, 0x6f, 0x72, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x63, 0x75, 0x72, 0x73, 0x6f, 0x72, 0x12, 0x0c, 0x0a, + 0x01, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x01, 0x6e, 0x12, 0x39, 0x0a, 0x0a, 0x62, + 0x65, 0x67, 0x69, 0x6e, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, + 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x62, 0x65, 0x67, + 0x69, 0x6e, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x35, 0x0a, 0x08, 0x65, 0x6e, 0x64, 0x5f, 0x74, 0x69, + 0x6d, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, + 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, + 0x74, 0x61, 0x6d, 0x70, 0x52, 0x07, 0x65, 0x6e, 0x64, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x25, 0x0a, + 0x0e, 0x66, 0x65, 0x65, 0x64, 0x62, 0x61, 0x63, 0x6b, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x73, 0x18, + 0x05, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0d, 0x66, 0x65, 0x65, 0x64, 0x62, 0x61, 0x63, 0x6b, 0x54, + 0x79, 0x70, 0x65, 0x73, 0x22, 0x5d, 0x0a, 0x13, 0x47, 0x65, 0x74, 0x46, 0x65, 0x65, 0x64, 0x62, + 0x61, 0x63, 0x6b, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x63, + 0x75, 0x72, 0x73, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x63, 0x75, 0x72, + 0x73, 0x6f, 0x72, 0x12, 0x2e, 0x0a, 0x08, 0x66, 0x65, 0x65, 0x64, 0x62, 0x61, 0x63, 0x6b, 0x18, + 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, + 0x2e, 0x46, 0x65, 0x65, 0x64, 0x62, 0x61, 0x63, 0x6b, 0x52, 0x08, 0x66, 0x65, 0x65, 0x64, 0x62, + 0x61, 0x63, 0x6b, 0x22, 0x35, 0x0a, 0x14, 0x47, 0x65, 0x74, 0x55, 0x73, 0x65, 0x72, 0x53, 0x74, + 0x72, 0x65, 0x61, 0x6d, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x62, + 0x61, 0x74, 0x63, 0x68, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, + 0x09, 0x62, 0x61, 0x74, 0x63, 0x68, 0x53, 0x69, 0x7a, 0x65, 0x22, 0x3d, 0x0a, 0x15, 0x47, 0x65, + 0x74, 0x55, 0x73, 0x65, 0x72, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x12, 0x24, 0x0a, 0x05, 0x75, 0x73, 0x65, 0x72, 0x73, 0x18, 0x01, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x55, 0x73, + 0x65, 0x72, 0x52, 0x05, 0x75, 0x73, 0x65, 0x72, 0x73, 0x22, 0x70, 0x0a, 0x14, 0x47, 0x65, 0x74, + 0x49, 0x74, 0x65, 0x6d, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x62, 0x61, 0x74, 0x63, 0x68, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x62, 0x61, 0x74, 0x63, 0x68, 0x53, 0x69, 0x7a, 0x65, + 0x12, 0x39, 0x0a, 0x0a, 0x74, 0x69, 0x6d, 0x65, 0x5f, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, + 0x52, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x22, 0x3d, 0x0a, 0x15, 0x47, + 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x24, 0x0a, 0x05, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x18, 0x01, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x49, + 0x74, 0x65, 0x6d, 0x52, 0x05, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x22, 0x73, 0x0a, 0x18, 0x47, 0x65, + 0x74, 0x46, 0x65, 0x65, 0x64, 0x62, 0x61, 0x63, 0x6b, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x62, 0x61, 0x74, 0x63, 0x68, 0x5f, + 0x73, 0x69, 0x7a, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x62, 0x61, 0x74, 0x63, + 0x68, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x38, 0x0a, 0x0c, 0x73, 0x63, 0x61, 0x6e, 0x5f, 0x6f, 0x70, + 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x53, 0x63, 0x61, 0x6e, 0x4f, 0x70, 0x74, 0x69, 0x6f, + 0x6e, 0x73, 0x52, 0x0b, 0x73, 0x63, 0x61, 0x6e, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x22, + 0x4b, 0x0a, 0x19, 0x47, 0x65, 0x74, 0x46, 0x65, 0x65, 0x64, 0x62, 0x61, 0x63, 0x6b, 0x53, 0x74, + 0x72, 0x65, 0x61, 0x6d, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2e, 0x0a, 0x08, + 0x66, 0x65, 0x65, 0x64, 0x62, 0x61, 0x63, 0x6b, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x12, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x46, 0x65, 0x65, 0x64, 0x62, 0x61, + 0x63, 0x6b, 0x52, 0x08, 0x66, 0x65, 0x65, 0x64, 0x62, 0x61, 0x63, 0x6b, 0x32, 0xc7, 0x0d, 0x0a, + 0x09, 0x44, 0x61, 0x74, 0x61, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x12, 0x37, 0x0a, 0x04, 0x50, 0x69, + 0x6e, 0x67, 0x12, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x50, 0x69, + 0x6e, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x50, 0x69, 0x6e, 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x22, 0x00, 0x12, 0x5b, 0x0a, 0x10, 0x42, 0x61, 0x74, 0x63, 0x68, 0x49, 0x6e, 0x73, 0x65, + 0x72, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x12, 0x21, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, + 0x6f, 0x6c, 0x2e, 0x42, 0x61, 0x74, 0x63, 0x68, 0x49, 0x6e, 0x73, 0x65, 0x72, 0x74, 0x49, 0x74, + 0x65, 0x6d, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x22, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x42, 0x61, 0x74, 0x63, 0x68, 0x49, 0x6e, 0x73, 0x65, 0x72, + 0x74, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, + 0x12, 0x52, 0x0a, 0x0d, 0x42, 0x61, 0x74, 0x63, 0x68, 0x47, 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, + 0x73, 0x12, 0x1e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x42, 0x61, 0x74, + 0x63, 0x68, 0x47, 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x1a, 0x1f, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x42, 0x61, 0x74, + 0x63, 0x68, 0x47, 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x22, 0x00, 0x12, 0x49, 0x0a, 0x0a, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x49, 0x74, + 0x65, 0x6d, 0x12, 0x1b, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x44, 0x65, + 0x6c, 0x65, 0x74, 0x65, 0x49, 0x74, 0x65, 0x6d, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, + 0x1c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, + 0x65, 0x49, 0x74, 0x65, 0x6d, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, + 0x40, 0x0a, 0x07, 0x47, 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x12, 0x18, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x47, 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x1a, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, + 0x47, 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, + 0x00, 0x12, 0x49, 0x0a, 0x0a, 0x4d, 0x6f, 0x64, 0x69, 0x66, 0x79, 0x49, 0x74, 0x65, 0x6d, 0x12, + 0x1b, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x4d, 0x6f, 0x64, 0x69, 0x66, + 0x79, 0x49, 0x74, 0x65, 0x6d, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1c, 0x2e, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x4d, 0x6f, 0x64, 0x69, 0x66, 0x79, 0x49, 0x74, + 0x65, 0x6d, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x43, 0x0a, 0x08, + 0x47, 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x12, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x63, 0x6f, 0x6c, 0x2e, 0x47, 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x1a, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x47, + 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, + 0x00, 0x12, 0x54, 0x0a, 0x0f, 0x47, 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x46, 0x65, 0x65, 0x64, + 0x62, 0x61, 0x63, 0x6b, 0x12, 0x20, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, + 0x47, 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x46, 0x65, 0x65, 0x64, 0x62, 0x61, 0x63, 0x6b, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1d, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, + 0x6c, 0x2e, 0x47, 0x65, 0x74, 0x46, 0x65, 0x65, 0x64, 0x62, 0x61, 0x63, 0x6b, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x5b, 0x0a, 0x10, 0x42, 0x61, 0x74, 0x63, 0x68, + 0x49, 0x6e, 0x73, 0x65, 0x72, 0x74, 0x55, 0x73, 0x65, 0x72, 0x73, 0x12, 0x21, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x42, 0x61, 0x74, 0x63, 0x68, 0x49, 0x6e, 0x73, 0x65, + 0x72, 0x74, 0x55, 0x73, 0x65, 0x72, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x22, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x42, 0x61, 0x74, 0x63, 0x68, 0x49, + 0x6e, 0x73, 0x65, 0x72, 0x74, 0x55, 0x73, 0x65, 0x72, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x22, 0x00, 0x12, 0x49, 0x0a, 0x0a, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x55, 0x73, + 0x65, 0x72, 0x12, 0x1b, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x44, 0x65, + 0x6c, 0x65, 0x74, 0x65, 0x55, 0x73, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, + 0x1c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, + 0x65, 0x55, 0x73, 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, + 0x40, 0x0a, 0x07, 0x47, 0x65, 0x74, 0x55, 0x73, 0x65, 0x72, 0x12, 0x18, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x47, 0x65, 0x74, 0x55, 0x73, 0x65, 0x72, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x1a, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, + 0x47, 0x65, 0x74, 0x55, 0x73, 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, + 0x00, 0x12, 0x49, 0x0a, 0x0a, 0x4d, 0x6f, 0x64, 0x69, 0x66, 0x79, 0x55, 0x73, 0x65, 0x72, 0x12, + 0x1b, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x4d, 0x6f, 0x64, 0x69, 0x66, + 0x79, 0x55, 0x73, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1c, 0x2e, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x4d, 0x6f, 0x64, 0x69, 0x66, 0x79, 0x55, 0x73, + 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x43, 0x0a, 0x08, + 0x47, 0x65, 0x74, 0x55, 0x73, 0x65, 0x72, 0x73, 0x12, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x63, 0x6f, 0x6c, 0x2e, 0x47, 0x65, 0x74, 0x55, 0x73, 0x65, 0x72, 0x73, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x1a, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x47, + 0x65, 0x74, 0x55, 0x73, 0x65, 0x72, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, + 0x00, 0x12, 0x54, 0x0a, 0x0f, 0x47, 0x65, 0x74, 0x55, 0x73, 0x65, 0x72, 0x46, 0x65, 0x65, 0x64, + 0x62, 0x61, 0x63, 0x6b, 0x12, 0x20, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, + 0x47, 0x65, 0x74, 0x55, 0x73, 0x65, 0x72, 0x46, 0x65, 0x65, 0x64, 0x62, 0x61, 0x63, 0x6b, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1d, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, + 0x6c, 0x2e, 0x47, 0x65, 0x74, 0x46, 0x65, 0x65, 0x64, 0x62, 0x61, 0x63, 0x6b, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x5c, 0x0a, 0x13, 0x47, 0x65, 0x74, 0x55, 0x73, + 0x65, 0x72, 0x49, 0x74, 0x65, 0x6d, 0x46, 0x65, 0x65, 0x64, 0x62, 0x61, 0x63, 0x6b, 0x12, 0x24, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x47, 0x65, 0x74, 0x55, 0x73, 0x65, + 0x72, 0x49, 0x74, 0x65, 0x6d, 0x46, 0x65, 0x65, 0x64, 0x62, 0x61, 0x63, 0x6b, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1d, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, + 0x47, 0x65, 0x74, 0x46, 0x65, 0x65, 0x64, 0x62, 0x61, 0x63, 0x6b, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x6d, 0x0a, 0x16, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x55, + 0x73, 0x65, 0x72, 0x49, 0x74, 0x65, 0x6d, 0x46, 0x65, 0x65, 0x64, 0x62, 0x61, 0x63, 0x6b, 0x12, + 0x27, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, + 0x65, 0x55, 0x73, 0x65, 0x72, 0x49, 0x74, 0x65, 0x6d, 0x46, 0x65, 0x65, 0x64, 0x62, 0x61, 0x63, + 0x6b, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x28, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x63, 0x6f, 0x6c, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x55, 0x73, 0x65, 0x72, 0x49, 0x74, + 0x65, 0x6d, 0x46, 0x65, 0x65, 0x64, 0x62, 0x61, 0x63, 0x6b, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x22, 0x00, 0x12, 0x64, 0x0a, 0x13, 0x42, 0x61, 0x74, 0x63, 0x68, 0x49, 0x6e, 0x73, + 0x65, 0x72, 0x74, 0x46, 0x65, 0x65, 0x64, 0x62, 0x61, 0x63, 0x6b, 0x12, 0x24, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x42, 0x61, 0x74, 0x63, 0x68, 0x49, 0x6e, 0x73, 0x65, + 0x72, 0x74, 0x46, 0x65, 0x65, 0x64, 0x62, 0x61, 0x63, 0x6b, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x1a, 0x25, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x42, 0x61, 0x74, + 0x63, 0x68, 0x49, 0x6e, 0x73, 0x65, 0x72, 0x74, 0x46, 0x65, 0x65, 0x64, 0x62, 0x61, 0x63, 0x6b, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x4c, 0x0a, 0x0b, 0x47, 0x65, + 0x74, 0x46, 0x65, 0x65, 0x64, 0x62, 0x61, 0x63, 0x6b, 0x12, 0x1c, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x47, 0x65, 0x74, 0x46, 0x65, 0x65, 0x64, 0x62, 0x61, 0x63, 0x6b, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1d, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, + 0x6f, 0x6c, 0x2e, 0x47, 0x65, 0x74, 0x46, 0x65, 0x65, 0x64, 0x62, 0x61, 0x63, 0x6b, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x54, 0x0a, 0x0d, 0x47, 0x65, 0x74, 0x55, + 0x73, 0x65, 0x72, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x12, 0x1e, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x47, 0x65, 0x74, 0x55, 0x73, 0x65, 0x72, 0x53, 0x74, 0x72, 0x65, + 0x61, 0x6d, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1f, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x47, 0x65, 0x74, 0x55, 0x73, 0x65, 0x72, 0x53, 0x74, 0x72, 0x65, + 0x61, 0x6d, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x30, 0x01, 0x12, 0x54, + 0x0a, 0x0d, 0x47, 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x12, + 0x1e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x47, 0x65, 0x74, 0x49, 0x74, + 0x65, 0x6d, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, + 0x1f, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x47, 0x65, 0x74, 0x49, 0x74, + 0x65, 0x6d, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x22, 0x00, 0x30, 0x01, 0x12, 0x60, 0x0a, 0x11, 0x47, 0x65, 0x74, 0x46, 0x65, 0x65, 0x64, 0x62, + 0x61, 0x63, 0x6b, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x12, 0x22, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x47, 0x65, 0x74, 0x46, 0x65, 0x65, 0x64, 0x62, 0x61, 0x63, 0x6b, + 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x23, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x47, 0x65, 0x74, 0x46, 0x65, 0x65, 0x64, + 0x62, 0x61, 0x63, 0x6b, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x22, 0x00, 0x30, 0x01, 0x42, 0x25, 0x5a, 0x23, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, + 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x7a, 0x68, 0x65, 0x6e, 0x67, 0x68, 0x61, 0x6f, 0x7a, 0x2f, 0x67, + 0x6f, 0x72, 0x73, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x62, 0x06, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_data_store_proto_rawDescOnce sync.Once + file_data_store_proto_rawDescData = file_data_store_proto_rawDesc +) + +func file_data_store_proto_rawDescGZIP() []byte { + file_data_store_proto_rawDescOnce.Do(func() { + file_data_store_proto_rawDescData = protoimpl.X.CompressGZIP(file_data_store_proto_rawDescData) + }) + return file_data_store_proto_rawDescData +} + +var file_data_store_proto_msgTypes = make([]protoimpl.MessageInfo, 40) +var file_data_store_proto_goTypes = []any{ + (*UserPatch)(nil), // 0: protocol.UserPatch + (*ItemPatch)(nil), // 1: protocol.ItemPatch + (*ScanOptions)(nil), // 2: protocol.ScanOptions + (*BatchInsertItemsRequest)(nil), // 3: protocol.BatchInsertItemsRequest + (*BatchInsertItemsResponse)(nil), // 4: protocol.BatchInsertItemsResponse + (*BatchGetItemsRequest)(nil), // 5: protocol.BatchGetItemsRequest + (*BatchGetItemsResponse)(nil), // 6: protocol.BatchGetItemsResponse + (*DeleteItemRequest)(nil), // 7: protocol.DeleteItemRequest + (*DeleteItemResponse)(nil), // 8: protocol.DeleteItemResponse + (*GetItemRequest)(nil), // 9: protocol.GetItemRequest + (*GetItemResponse)(nil), // 10: protocol.GetItemResponse + (*ModifyItemRequest)(nil), // 11: protocol.ModifyItemRequest + (*ModifyItemResponse)(nil), // 12: protocol.ModifyItemResponse + (*GetItemsRequest)(nil), // 13: protocol.GetItemsRequest + (*GetItemsResponse)(nil), // 14: protocol.GetItemsResponse + (*GetItemFeedbackRequest)(nil), // 15: protocol.GetItemFeedbackRequest + (*BatchInsertUsersRequest)(nil), // 16: protocol.BatchInsertUsersRequest + (*BatchInsertUsersResponse)(nil), // 17: protocol.BatchInsertUsersResponse + (*DeleteUserRequest)(nil), // 18: protocol.DeleteUserRequest + (*DeleteUserResponse)(nil), // 19: protocol.DeleteUserResponse + (*GetUserRequest)(nil), // 20: protocol.GetUserRequest + (*GetUserResponse)(nil), // 21: protocol.GetUserResponse + (*ModifyUserRequest)(nil), // 22: protocol.ModifyUserRequest + (*ModifyUserResponse)(nil), // 23: protocol.ModifyUserResponse + (*GetUsersRequest)(nil), // 24: protocol.GetUsersRequest + (*GetUsersResponse)(nil), // 25: protocol.GetUsersResponse + (*GetUserFeedbackRequest)(nil), // 26: protocol.GetUserFeedbackRequest + (*GetUserItemFeedbackRequest)(nil), // 27: protocol.GetUserItemFeedbackRequest + (*DeleteUserItemFeedbackRequest)(nil), // 28: protocol.DeleteUserItemFeedbackRequest + (*DeleteUserItemFeedbackResponse)(nil), // 29: protocol.DeleteUserItemFeedbackResponse + (*BatchInsertFeedbackRequest)(nil), // 30: protocol.BatchInsertFeedbackRequest + (*BatchInsertFeedbackResponse)(nil), // 31: protocol.BatchInsertFeedbackResponse + (*GetFeedbackRequest)(nil), // 32: protocol.GetFeedbackRequest + (*GetFeedbackResponse)(nil), // 33: protocol.GetFeedbackResponse + (*GetUserStreamRequest)(nil), // 34: protocol.GetUserStreamRequest + (*GetUserStreamResponse)(nil), // 35: protocol.GetUserStreamResponse + (*GetItemStreamRequest)(nil), // 36: protocol.GetItemStreamRequest + (*GetItemStreamResponse)(nil), // 37: protocol.GetItemStreamResponse + (*GetFeedbackStreamRequest)(nil), // 38: protocol.GetFeedbackStreamRequest + (*GetFeedbackStreamResponse)(nil), // 39: protocol.GetFeedbackStreamResponse + (*timestamppb.Timestamp)(nil), // 40: google.protobuf.Timestamp + (*Item)(nil), // 41: protocol.Item + (*User)(nil), // 42: protocol.User + (*Feedback)(nil), // 43: protocol.Feedback + (*PingRequest)(nil), // 44: protocol.PingRequest + (*PingResponse)(nil), // 45: protocol.PingResponse +} +var file_data_store_proto_depIdxs = []int32{ + 40, // 0: protocol.ItemPatch.timestamp:type_name -> google.protobuf.Timestamp + 40, // 1: protocol.ScanOptions.begin_time:type_name -> google.protobuf.Timestamp + 40, // 2: protocol.ScanOptions.end_time:type_name -> google.protobuf.Timestamp + 41, // 3: protocol.BatchInsertItemsRequest.items:type_name -> protocol.Item + 41, // 4: protocol.BatchGetItemsResponse.items:type_name -> protocol.Item + 41, // 5: protocol.GetItemResponse.item:type_name -> protocol.Item + 1, // 6: protocol.ModifyItemRequest.patch:type_name -> protocol.ItemPatch + 40, // 7: protocol.GetItemsRequest.begin_time:type_name -> google.protobuf.Timestamp + 41, // 8: protocol.GetItemsResponse.items:type_name -> protocol.Item + 42, // 9: protocol.BatchInsertUsersRequest.users:type_name -> protocol.User + 42, // 10: protocol.GetUserResponse.user:type_name -> protocol.User + 0, // 11: protocol.ModifyUserRequest.patch:type_name -> protocol.UserPatch + 42, // 12: protocol.GetUsersResponse.users:type_name -> protocol.User + 40, // 13: protocol.GetUserFeedbackRequest.end_time:type_name -> google.protobuf.Timestamp + 43, // 14: protocol.BatchInsertFeedbackRequest.feedback:type_name -> protocol.Feedback + 40, // 15: protocol.GetFeedbackRequest.begin_time:type_name -> google.protobuf.Timestamp + 40, // 16: protocol.GetFeedbackRequest.end_time:type_name -> google.protobuf.Timestamp + 43, // 17: protocol.GetFeedbackResponse.feedback:type_name -> protocol.Feedback + 42, // 18: protocol.GetUserStreamResponse.users:type_name -> protocol.User + 40, // 19: protocol.GetItemStreamRequest.time_limit:type_name -> google.protobuf.Timestamp + 41, // 20: protocol.GetItemStreamResponse.items:type_name -> protocol.Item + 2, // 21: protocol.GetFeedbackStreamRequest.scan_options:type_name -> protocol.ScanOptions + 43, // 22: protocol.GetFeedbackStreamResponse.feedback:type_name -> protocol.Feedback + 44, // 23: protocol.DataStore.Ping:input_type -> protocol.PingRequest + 3, // 24: protocol.DataStore.BatchInsertItems:input_type -> protocol.BatchInsertItemsRequest + 5, // 25: protocol.DataStore.BatchGetItems:input_type -> protocol.BatchGetItemsRequest + 7, // 26: protocol.DataStore.DeleteItem:input_type -> protocol.DeleteItemRequest + 9, // 27: protocol.DataStore.GetItem:input_type -> protocol.GetItemRequest + 11, // 28: protocol.DataStore.ModifyItem:input_type -> protocol.ModifyItemRequest + 13, // 29: protocol.DataStore.GetItems:input_type -> protocol.GetItemsRequest + 15, // 30: protocol.DataStore.GetItemFeedback:input_type -> protocol.GetItemFeedbackRequest + 16, // 31: protocol.DataStore.BatchInsertUsers:input_type -> protocol.BatchInsertUsersRequest + 18, // 32: protocol.DataStore.DeleteUser:input_type -> protocol.DeleteUserRequest + 20, // 33: protocol.DataStore.GetUser:input_type -> protocol.GetUserRequest + 22, // 34: protocol.DataStore.ModifyUser:input_type -> protocol.ModifyUserRequest + 24, // 35: protocol.DataStore.GetUsers:input_type -> protocol.GetUsersRequest + 26, // 36: protocol.DataStore.GetUserFeedback:input_type -> protocol.GetUserFeedbackRequest + 27, // 37: protocol.DataStore.GetUserItemFeedback:input_type -> protocol.GetUserItemFeedbackRequest + 28, // 38: protocol.DataStore.DeleteUserItemFeedback:input_type -> protocol.DeleteUserItemFeedbackRequest + 30, // 39: protocol.DataStore.BatchInsertFeedback:input_type -> protocol.BatchInsertFeedbackRequest + 32, // 40: protocol.DataStore.GetFeedback:input_type -> protocol.GetFeedbackRequest + 34, // 41: protocol.DataStore.GetUserStream:input_type -> protocol.GetUserStreamRequest + 36, // 42: protocol.DataStore.GetItemStream:input_type -> protocol.GetItemStreamRequest + 38, // 43: protocol.DataStore.GetFeedbackStream:input_type -> protocol.GetFeedbackStreamRequest + 45, // 44: protocol.DataStore.Ping:output_type -> protocol.PingResponse + 4, // 45: protocol.DataStore.BatchInsertItems:output_type -> protocol.BatchInsertItemsResponse + 6, // 46: protocol.DataStore.BatchGetItems:output_type -> protocol.BatchGetItemsResponse + 8, // 47: protocol.DataStore.DeleteItem:output_type -> protocol.DeleteItemResponse + 10, // 48: protocol.DataStore.GetItem:output_type -> protocol.GetItemResponse + 12, // 49: protocol.DataStore.ModifyItem:output_type -> protocol.ModifyItemResponse + 14, // 50: protocol.DataStore.GetItems:output_type -> protocol.GetItemsResponse + 33, // 51: protocol.DataStore.GetItemFeedback:output_type -> protocol.GetFeedbackResponse + 17, // 52: protocol.DataStore.BatchInsertUsers:output_type -> protocol.BatchInsertUsersResponse + 19, // 53: protocol.DataStore.DeleteUser:output_type -> protocol.DeleteUserResponse + 21, // 54: protocol.DataStore.GetUser:output_type -> protocol.GetUserResponse + 23, // 55: protocol.DataStore.ModifyUser:output_type -> protocol.ModifyUserResponse + 25, // 56: protocol.DataStore.GetUsers:output_type -> protocol.GetUsersResponse + 33, // 57: protocol.DataStore.GetUserFeedback:output_type -> protocol.GetFeedbackResponse + 33, // 58: protocol.DataStore.GetUserItemFeedback:output_type -> protocol.GetFeedbackResponse + 29, // 59: protocol.DataStore.DeleteUserItemFeedback:output_type -> protocol.DeleteUserItemFeedbackResponse + 31, // 60: protocol.DataStore.BatchInsertFeedback:output_type -> protocol.BatchInsertFeedbackResponse + 33, // 61: protocol.DataStore.GetFeedback:output_type -> protocol.GetFeedbackResponse + 35, // 62: protocol.DataStore.GetUserStream:output_type -> protocol.GetUserStreamResponse + 37, // 63: protocol.DataStore.GetItemStream:output_type -> protocol.GetItemStreamResponse + 39, // 64: protocol.DataStore.GetFeedbackStream:output_type -> protocol.GetFeedbackStreamResponse + 44, // [44:65] is the sub-list for method output_type + 23, // [23:44] is the sub-list for method input_type + 23, // [23:23] is the sub-list for extension type_name + 23, // [23:23] is the sub-list for extension extendee + 0, // [0:23] is the sub-list for field type_name +} + +func init() { file_data_store_proto_init() } +func file_data_store_proto_init() { + if File_data_store_proto != nil { + return + } + file_protocol_proto_init() + file_data_store_proto_msgTypes[0].OneofWrappers = []any{} + file_data_store_proto_msgTypes[1].OneofWrappers = []any{} + file_data_store_proto_msgTypes[2].OneofWrappers = []any{} + file_data_store_proto_msgTypes[10].OneofWrappers = []any{} + file_data_store_proto_msgTypes[21].OneofWrappers = []any{} + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_data_store_proto_rawDesc, + NumEnums: 0, + NumMessages: 40, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_data_store_proto_goTypes, + DependencyIndexes: file_data_store_proto_depIdxs, + MessageInfos: file_data_store_proto_msgTypes, + }.Build() + File_data_store_proto = out.File + file_data_store_proto_rawDesc = nil + file_data_store_proto_goTypes = nil + file_data_store_proto_depIdxs = nil +} diff --git a/protocol/data_store.proto b/protocol/data_store.proto new file mode 100644 index 000000000..22a73d8af --- /dev/null +++ b/protocol/data_store.proto @@ -0,0 +1,226 @@ +// Copyright 2024 gorse Project Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +option go_package = "github.com/zhenghaoz/gorse/protocol"; + +package protocol; + +import "google/protobuf/timestamp.proto"; +import "protocol.proto"; + +message UserPatch { + bytes labels = 1; + optional string comment = 2; + repeated string subscribe = 3; +} + +message ItemPatch { + optional bool is_hidden = 1; + repeated string categories = 2; + optional google.protobuf.Timestamp timestamp = 3; + bytes labels = 4; + optional string comment = 5; +} + +message ScanOptions { + optional string begin_user_id = 1; + optional string end_user_id = 2; + optional google.protobuf.Timestamp begin_time = 3; + optional google.protobuf.Timestamp end_time = 4; + repeated string feedback_types = 5; +} + +message BatchInsertItemsRequest { + repeated Item items = 1; +} + +message BatchInsertItemsResponse {} + +message BatchGetItemsRequest { + repeated string item_ids = 1; +} + +message BatchGetItemsResponse { + repeated Item items = 1; +} + +message DeleteItemRequest { + string item_id = 1; +} + +message DeleteItemResponse {} + +message GetItemRequest { + string item_id = 1; +} + +message GetItemResponse { + optional Item item = 1; +} + +message ModifyItemRequest { + string item_id = 1; + ItemPatch patch = 2; +} + +message ModifyItemResponse {} + +message GetItemsRequest { + string cursor = 1; + int32 n = 2; + google.protobuf.Timestamp begin_time = 3; +} + +message GetItemsResponse { + string cursor = 1; + repeated Item items = 2; +} + +message GetItemFeedbackRequest { + string item_id = 1; + repeated string feedback_types = 2; +} + +message BatchInsertUsersRequest { + repeated User users = 1; +} + +message BatchInsertUsersResponse {} + +message DeleteUserRequest { + string user_id = 1; +} + +message DeleteUserResponse {} + +message GetUserRequest { + string user_id = 1; +} + +message GetUserResponse { + optional User user = 1; +} + +message ModifyUserRequest { + string user_id = 1; + UserPatch patch = 2; +} + +message ModifyUserResponse {} + +message GetUsersRequest { + string cursor = 1; + int32 n = 2; +} + +message GetUsersResponse { + string cursor = 1; + repeated User users = 2; +} + +message GetUserFeedbackRequest { + string user_id = 1; + google.protobuf.Timestamp end_time = 2; + repeated string feedback_types = 3; +} + +message GetUserItemFeedbackRequest { + string user_id = 1; + string item_id = 2; + repeated string feedback_types = 3; +} + +message DeleteUserItemFeedbackRequest { + string user_id = 1; + string item_id = 2; + repeated string feedback_types = 3; +} + +message DeleteUserItemFeedbackResponse { + int32 count = 1; +} + +message BatchInsertFeedbackRequest { + repeated Feedback feedback = 1; + bool insert_user = 2; + bool insert_item = 3; + bool overwrite = 4; +} + +message BatchInsertFeedbackResponse {} + +message GetFeedbackRequest { + string cursor = 1; + int32 n = 2; + google.protobuf.Timestamp begin_time = 3; + google.protobuf.Timestamp end_time = 4; + repeated string feedback_types = 5; +} + +message GetFeedbackResponse { + string cursor = 1; + repeated Feedback feedback = 2; +} + +message GetUserStreamRequest { + int32 batch_size = 1; +} + +message GetUserStreamResponse { + repeated User users = 1; +} + +message GetItemStreamRequest { + int32 batch_size = 1; + google.protobuf.Timestamp time_limit = 2; +} + +message GetItemStreamResponse { + repeated Item items = 1; +} + +message GetFeedbackStreamRequest { + int32 batch_size = 1; + ScanOptions scan_options = 2; +} + +message GetFeedbackStreamResponse { + repeated Feedback feedback = 1; +} + +service DataStore { + rpc Ping(PingRequest) returns (PingResponse) {} + rpc BatchInsertItems(BatchInsertItemsRequest) returns (BatchInsertItemsResponse) {} + rpc BatchGetItems(BatchGetItemsRequest) returns (BatchGetItemsResponse) {} + rpc DeleteItem(DeleteItemRequest) returns (DeleteItemResponse) {} + rpc GetItem(GetItemRequest) returns (GetItemResponse) {} + rpc ModifyItem(ModifyItemRequest) returns (ModifyItemResponse) {} + rpc GetItems(GetItemsRequest) returns (GetItemsResponse) {} + rpc GetItemFeedback(GetItemFeedbackRequest) returns (GetFeedbackResponse) {} + rpc BatchInsertUsers(BatchInsertUsersRequest) returns (BatchInsertUsersResponse) {} + rpc DeleteUser(DeleteUserRequest) returns (DeleteUserResponse) {} + rpc GetUser(GetUserRequest) returns (GetUserResponse) {} + rpc ModifyUser(ModifyUserRequest) returns (ModifyUserResponse) {} + rpc GetUsers(GetUsersRequest) returns (GetUsersResponse) {} + rpc GetUserFeedback(GetUserFeedbackRequest) returns (GetFeedbackResponse) {} + rpc GetUserItemFeedback(GetUserItemFeedbackRequest) returns (GetFeedbackResponse) {} + rpc DeleteUserItemFeedback(DeleteUserItemFeedbackRequest) returns (DeleteUserItemFeedbackResponse) {} + rpc BatchInsertFeedback(BatchInsertFeedbackRequest) returns (BatchInsertFeedbackResponse) {} + rpc GetFeedback(GetFeedbackRequest) returns (GetFeedbackResponse) {} + rpc GetUserStream(GetUserStreamRequest) returns (stream GetUserStreamResponse) {} + rpc GetItemStream(GetItemStreamRequest) returns (stream GetItemStreamResponse) {} + rpc GetFeedbackStream(GetFeedbackStreamRequest) returns (stream GetFeedbackStreamResponse) {} +} diff --git a/protocol/data_store_grpc.pb.go b/protocol/data_store_grpc.pb.go new file mode 100644 index 000000000..2d814a9a7 --- /dev/null +++ b/protocol/data_store_grpc.pb.go @@ -0,0 +1,905 @@ +// Copyright 2024 gorse Project Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.5.1 +// - protoc v5.28.3 +// source: data_store.proto + +package protocol + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.64.0 or later. +const _ = grpc.SupportPackageIsVersion9 + +const ( + DataStore_Ping_FullMethodName = "/protocol.DataStore/Ping" + DataStore_BatchInsertItems_FullMethodName = "/protocol.DataStore/BatchInsertItems" + DataStore_BatchGetItems_FullMethodName = "/protocol.DataStore/BatchGetItems" + DataStore_DeleteItem_FullMethodName = "/protocol.DataStore/DeleteItem" + DataStore_GetItem_FullMethodName = "/protocol.DataStore/GetItem" + DataStore_ModifyItem_FullMethodName = "/protocol.DataStore/ModifyItem" + DataStore_GetItems_FullMethodName = "/protocol.DataStore/GetItems" + DataStore_GetItemFeedback_FullMethodName = "/protocol.DataStore/GetItemFeedback" + DataStore_BatchInsertUsers_FullMethodName = "/protocol.DataStore/BatchInsertUsers" + DataStore_DeleteUser_FullMethodName = "/protocol.DataStore/DeleteUser" + DataStore_GetUser_FullMethodName = "/protocol.DataStore/GetUser" + DataStore_ModifyUser_FullMethodName = "/protocol.DataStore/ModifyUser" + DataStore_GetUsers_FullMethodName = "/protocol.DataStore/GetUsers" + DataStore_GetUserFeedback_FullMethodName = "/protocol.DataStore/GetUserFeedback" + DataStore_GetUserItemFeedback_FullMethodName = "/protocol.DataStore/GetUserItemFeedback" + DataStore_DeleteUserItemFeedback_FullMethodName = "/protocol.DataStore/DeleteUserItemFeedback" + DataStore_BatchInsertFeedback_FullMethodName = "/protocol.DataStore/BatchInsertFeedback" + DataStore_GetFeedback_FullMethodName = "/protocol.DataStore/GetFeedback" + DataStore_GetUserStream_FullMethodName = "/protocol.DataStore/GetUserStream" + DataStore_GetItemStream_FullMethodName = "/protocol.DataStore/GetItemStream" + DataStore_GetFeedbackStream_FullMethodName = "/protocol.DataStore/GetFeedbackStream" +) + +// DataStoreClient is the client API for DataStore service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type DataStoreClient interface { + Ping(ctx context.Context, in *PingRequest, opts ...grpc.CallOption) (*PingResponse, error) + BatchInsertItems(ctx context.Context, in *BatchInsertItemsRequest, opts ...grpc.CallOption) (*BatchInsertItemsResponse, error) + BatchGetItems(ctx context.Context, in *BatchGetItemsRequest, opts ...grpc.CallOption) (*BatchGetItemsResponse, error) + DeleteItem(ctx context.Context, in *DeleteItemRequest, opts ...grpc.CallOption) (*DeleteItemResponse, error) + GetItem(ctx context.Context, in *GetItemRequest, opts ...grpc.CallOption) (*GetItemResponse, error) + ModifyItem(ctx context.Context, in *ModifyItemRequest, opts ...grpc.CallOption) (*ModifyItemResponse, error) + GetItems(ctx context.Context, in *GetItemsRequest, opts ...grpc.CallOption) (*GetItemsResponse, error) + GetItemFeedback(ctx context.Context, in *GetItemFeedbackRequest, opts ...grpc.CallOption) (*GetFeedbackResponse, error) + BatchInsertUsers(ctx context.Context, in *BatchInsertUsersRequest, opts ...grpc.CallOption) (*BatchInsertUsersResponse, error) + DeleteUser(ctx context.Context, in *DeleteUserRequest, opts ...grpc.CallOption) (*DeleteUserResponse, error) + GetUser(ctx context.Context, in *GetUserRequest, opts ...grpc.CallOption) (*GetUserResponse, error) + ModifyUser(ctx context.Context, in *ModifyUserRequest, opts ...grpc.CallOption) (*ModifyUserResponse, error) + GetUsers(ctx context.Context, in *GetUsersRequest, opts ...grpc.CallOption) (*GetUsersResponse, error) + GetUserFeedback(ctx context.Context, in *GetUserFeedbackRequest, opts ...grpc.CallOption) (*GetFeedbackResponse, error) + GetUserItemFeedback(ctx context.Context, in *GetUserItemFeedbackRequest, opts ...grpc.CallOption) (*GetFeedbackResponse, error) + DeleteUserItemFeedback(ctx context.Context, in *DeleteUserItemFeedbackRequest, opts ...grpc.CallOption) (*DeleteUserItemFeedbackResponse, error) + BatchInsertFeedback(ctx context.Context, in *BatchInsertFeedbackRequest, opts ...grpc.CallOption) (*BatchInsertFeedbackResponse, error) + GetFeedback(ctx context.Context, in *GetFeedbackRequest, opts ...grpc.CallOption) (*GetFeedbackResponse, error) + GetUserStream(ctx context.Context, in *GetUserStreamRequest, opts ...grpc.CallOption) (grpc.ServerStreamingClient[GetUserStreamResponse], error) + GetItemStream(ctx context.Context, in *GetItemStreamRequest, opts ...grpc.CallOption) (grpc.ServerStreamingClient[GetItemStreamResponse], error) + GetFeedbackStream(ctx context.Context, in *GetFeedbackStreamRequest, opts ...grpc.CallOption) (grpc.ServerStreamingClient[GetFeedbackStreamResponse], error) +} + +type dataStoreClient struct { + cc grpc.ClientConnInterface +} + +func NewDataStoreClient(cc grpc.ClientConnInterface) DataStoreClient { + return &dataStoreClient{cc} +} + +func (c *dataStoreClient) Ping(ctx context.Context, in *PingRequest, opts ...grpc.CallOption) (*PingResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(PingResponse) + err := c.cc.Invoke(ctx, DataStore_Ping_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataStoreClient) BatchInsertItems(ctx context.Context, in *BatchInsertItemsRequest, opts ...grpc.CallOption) (*BatchInsertItemsResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(BatchInsertItemsResponse) + err := c.cc.Invoke(ctx, DataStore_BatchInsertItems_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataStoreClient) BatchGetItems(ctx context.Context, in *BatchGetItemsRequest, opts ...grpc.CallOption) (*BatchGetItemsResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(BatchGetItemsResponse) + err := c.cc.Invoke(ctx, DataStore_BatchGetItems_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataStoreClient) DeleteItem(ctx context.Context, in *DeleteItemRequest, opts ...grpc.CallOption) (*DeleteItemResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(DeleteItemResponse) + err := c.cc.Invoke(ctx, DataStore_DeleteItem_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataStoreClient) GetItem(ctx context.Context, in *GetItemRequest, opts ...grpc.CallOption) (*GetItemResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(GetItemResponse) + err := c.cc.Invoke(ctx, DataStore_GetItem_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataStoreClient) ModifyItem(ctx context.Context, in *ModifyItemRequest, opts ...grpc.CallOption) (*ModifyItemResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(ModifyItemResponse) + err := c.cc.Invoke(ctx, DataStore_ModifyItem_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataStoreClient) GetItems(ctx context.Context, in *GetItemsRequest, opts ...grpc.CallOption) (*GetItemsResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(GetItemsResponse) + err := c.cc.Invoke(ctx, DataStore_GetItems_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataStoreClient) GetItemFeedback(ctx context.Context, in *GetItemFeedbackRequest, opts ...grpc.CallOption) (*GetFeedbackResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(GetFeedbackResponse) + err := c.cc.Invoke(ctx, DataStore_GetItemFeedback_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataStoreClient) BatchInsertUsers(ctx context.Context, in *BatchInsertUsersRequest, opts ...grpc.CallOption) (*BatchInsertUsersResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(BatchInsertUsersResponse) + err := c.cc.Invoke(ctx, DataStore_BatchInsertUsers_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataStoreClient) DeleteUser(ctx context.Context, in *DeleteUserRequest, opts ...grpc.CallOption) (*DeleteUserResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(DeleteUserResponse) + err := c.cc.Invoke(ctx, DataStore_DeleteUser_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataStoreClient) GetUser(ctx context.Context, in *GetUserRequest, opts ...grpc.CallOption) (*GetUserResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(GetUserResponse) + err := c.cc.Invoke(ctx, DataStore_GetUser_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataStoreClient) ModifyUser(ctx context.Context, in *ModifyUserRequest, opts ...grpc.CallOption) (*ModifyUserResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(ModifyUserResponse) + err := c.cc.Invoke(ctx, DataStore_ModifyUser_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataStoreClient) GetUsers(ctx context.Context, in *GetUsersRequest, opts ...grpc.CallOption) (*GetUsersResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(GetUsersResponse) + err := c.cc.Invoke(ctx, DataStore_GetUsers_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataStoreClient) GetUserFeedback(ctx context.Context, in *GetUserFeedbackRequest, opts ...grpc.CallOption) (*GetFeedbackResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(GetFeedbackResponse) + err := c.cc.Invoke(ctx, DataStore_GetUserFeedback_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataStoreClient) GetUserItemFeedback(ctx context.Context, in *GetUserItemFeedbackRequest, opts ...grpc.CallOption) (*GetFeedbackResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(GetFeedbackResponse) + err := c.cc.Invoke(ctx, DataStore_GetUserItemFeedback_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataStoreClient) DeleteUserItemFeedback(ctx context.Context, in *DeleteUserItemFeedbackRequest, opts ...grpc.CallOption) (*DeleteUserItemFeedbackResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(DeleteUserItemFeedbackResponse) + err := c.cc.Invoke(ctx, DataStore_DeleteUserItemFeedback_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataStoreClient) BatchInsertFeedback(ctx context.Context, in *BatchInsertFeedbackRequest, opts ...grpc.CallOption) (*BatchInsertFeedbackResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(BatchInsertFeedbackResponse) + err := c.cc.Invoke(ctx, DataStore_BatchInsertFeedback_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataStoreClient) GetFeedback(ctx context.Context, in *GetFeedbackRequest, opts ...grpc.CallOption) (*GetFeedbackResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(GetFeedbackResponse) + err := c.cc.Invoke(ctx, DataStore_GetFeedback_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataStoreClient) GetUserStream(ctx context.Context, in *GetUserStreamRequest, opts ...grpc.CallOption) (grpc.ServerStreamingClient[GetUserStreamResponse], error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + stream, err := c.cc.NewStream(ctx, &DataStore_ServiceDesc.Streams[0], DataStore_GetUserStream_FullMethodName, cOpts...) + if err != nil { + return nil, err + } + x := &grpc.GenericClientStream[GetUserStreamRequest, GetUserStreamResponse]{ClientStream: stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +// This type alias is provided for backwards compatibility with existing code that references the prior non-generic stream type by name. +type DataStore_GetUserStreamClient = grpc.ServerStreamingClient[GetUserStreamResponse] + +func (c *dataStoreClient) GetItemStream(ctx context.Context, in *GetItemStreamRequest, opts ...grpc.CallOption) (grpc.ServerStreamingClient[GetItemStreamResponse], error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + stream, err := c.cc.NewStream(ctx, &DataStore_ServiceDesc.Streams[1], DataStore_GetItemStream_FullMethodName, cOpts...) + if err != nil { + return nil, err + } + x := &grpc.GenericClientStream[GetItemStreamRequest, GetItemStreamResponse]{ClientStream: stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +// This type alias is provided for backwards compatibility with existing code that references the prior non-generic stream type by name. +type DataStore_GetItemStreamClient = grpc.ServerStreamingClient[GetItemStreamResponse] + +func (c *dataStoreClient) GetFeedbackStream(ctx context.Context, in *GetFeedbackStreamRequest, opts ...grpc.CallOption) (grpc.ServerStreamingClient[GetFeedbackStreamResponse], error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + stream, err := c.cc.NewStream(ctx, &DataStore_ServiceDesc.Streams[2], DataStore_GetFeedbackStream_FullMethodName, cOpts...) + if err != nil { + return nil, err + } + x := &grpc.GenericClientStream[GetFeedbackStreamRequest, GetFeedbackStreamResponse]{ClientStream: stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +// This type alias is provided for backwards compatibility with existing code that references the prior non-generic stream type by name. +type DataStore_GetFeedbackStreamClient = grpc.ServerStreamingClient[GetFeedbackStreamResponse] + +// DataStoreServer is the server API for DataStore service. +// All implementations must embed UnimplementedDataStoreServer +// for forward compatibility. +type DataStoreServer interface { + Ping(context.Context, *PingRequest) (*PingResponse, error) + BatchInsertItems(context.Context, *BatchInsertItemsRequest) (*BatchInsertItemsResponse, error) + BatchGetItems(context.Context, *BatchGetItemsRequest) (*BatchGetItemsResponse, error) + DeleteItem(context.Context, *DeleteItemRequest) (*DeleteItemResponse, error) + GetItem(context.Context, *GetItemRequest) (*GetItemResponse, error) + ModifyItem(context.Context, *ModifyItemRequest) (*ModifyItemResponse, error) + GetItems(context.Context, *GetItemsRequest) (*GetItemsResponse, error) + GetItemFeedback(context.Context, *GetItemFeedbackRequest) (*GetFeedbackResponse, error) + BatchInsertUsers(context.Context, *BatchInsertUsersRequest) (*BatchInsertUsersResponse, error) + DeleteUser(context.Context, *DeleteUserRequest) (*DeleteUserResponse, error) + GetUser(context.Context, *GetUserRequest) (*GetUserResponse, error) + ModifyUser(context.Context, *ModifyUserRequest) (*ModifyUserResponse, error) + GetUsers(context.Context, *GetUsersRequest) (*GetUsersResponse, error) + GetUserFeedback(context.Context, *GetUserFeedbackRequest) (*GetFeedbackResponse, error) + GetUserItemFeedback(context.Context, *GetUserItemFeedbackRequest) (*GetFeedbackResponse, error) + DeleteUserItemFeedback(context.Context, *DeleteUserItemFeedbackRequest) (*DeleteUserItemFeedbackResponse, error) + BatchInsertFeedback(context.Context, *BatchInsertFeedbackRequest) (*BatchInsertFeedbackResponse, error) + GetFeedback(context.Context, *GetFeedbackRequest) (*GetFeedbackResponse, error) + GetUserStream(*GetUserStreamRequest, grpc.ServerStreamingServer[GetUserStreamResponse]) error + GetItemStream(*GetItemStreamRequest, grpc.ServerStreamingServer[GetItemStreamResponse]) error + GetFeedbackStream(*GetFeedbackStreamRequest, grpc.ServerStreamingServer[GetFeedbackStreamResponse]) error + mustEmbedUnimplementedDataStoreServer() +} + +// UnimplementedDataStoreServer must be embedded to have +// forward compatible implementations. +// +// NOTE: this should be embedded by value instead of pointer to avoid a nil +// pointer dereference when methods are called. +type UnimplementedDataStoreServer struct{} + +func (UnimplementedDataStoreServer) Ping(context.Context, *PingRequest) (*PingResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method Ping not implemented") +} +func (UnimplementedDataStoreServer) BatchInsertItems(context.Context, *BatchInsertItemsRequest) (*BatchInsertItemsResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method BatchInsertItems not implemented") +} +func (UnimplementedDataStoreServer) BatchGetItems(context.Context, *BatchGetItemsRequest) (*BatchGetItemsResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method BatchGetItems not implemented") +} +func (UnimplementedDataStoreServer) DeleteItem(context.Context, *DeleteItemRequest) (*DeleteItemResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method DeleteItem not implemented") +} +func (UnimplementedDataStoreServer) GetItem(context.Context, *GetItemRequest) (*GetItemResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetItem not implemented") +} +func (UnimplementedDataStoreServer) ModifyItem(context.Context, *ModifyItemRequest) (*ModifyItemResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ModifyItem not implemented") +} +func (UnimplementedDataStoreServer) GetItems(context.Context, *GetItemsRequest) (*GetItemsResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetItems not implemented") +} +func (UnimplementedDataStoreServer) GetItemFeedback(context.Context, *GetItemFeedbackRequest) (*GetFeedbackResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetItemFeedback not implemented") +} +func (UnimplementedDataStoreServer) BatchInsertUsers(context.Context, *BatchInsertUsersRequest) (*BatchInsertUsersResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method BatchInsertUsers not implemented") +} +func (UnimplementedDataStoreServer) DeleteUser(context.Context, *DeleteUserRequest) (*DeleteUserResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method DeleteUser not implemented") +} +func (UnimplementedDataStoreServer) GetUser(context.Context, *GetUserRequest) (*GetUserResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetUser not implemented") +} +func (UnimplementedDataStoreServer) ModifyUser(context.Context, *ModifyUserRequest) (*ModifyUserResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ModifyUser not implemented") +} +func (UnimplementedDataStoreServer) GetUsers(context.Context, *GetUsersRequest) (*GetUsersResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetUsers not implemented") +} +func (UnimplementedDataStoreServer) GetUserFeedback(context.Context, *GetUserFeedbackRequest) (*GetFeedbackResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetUserFeedback not implemented") +} +func (UnimplementedDataStoreServer) GetUserItemFeedback(context.Context, *GetUserItemFeedbackRequest) (*GetFeedbackResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetUserItemFeedback not implemented") +} +func (UnimplementedDataStoreServer) DeleteUserItemFeedback(context.Context, *DeleteUserItemFeedbackRequest) (*DeleteUserItemFeedbackResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method DeleteUserItemFeedback not implemented") +} +func (UnimplementedDataStoreServer) BatchInsertFeedback(context.Context, *BatchInsertFeedbackRequest) (*BatchInsertFeedbackResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method BatchInsertFeedback not implemented") +} +func (UnimplementedDataStoreServer) GetFeedback(context.Context, *GetFeedbackRequest) (*GetFeedbackResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetFeedback not implemented") +} +func (UnimplementedDataStoreServer) GetUserStream(*GetUserStreamRequest, grpc.ServerStreamingServer[GetUserStreamResponse]) error { + return status.Errorf(codes.Unimplemented, "method GetUserStream not implemented") +} +func (UnimplementedDataStoreServer) GetItemStream(*GetItemStreamRequest, grpc.ServerStreamingServer[GetItemStreamResponse]) error { + return status.Errorf(codes.Unimplemented, "method GetItemStream not implemented") +} +func (UnimplementedDataStoreServer) GetFeedbackStream(*GetFeedbackStreamRequest, grpc.ServerStreamingServer[GetFeedbackStreamResponse]) error { + return status.Errorf(codes.Unimplemented, "method GetFeedbackStream not implemented") +} +func (UnimplementedDataStoreServer) mustEmbedUnimplementedDataStoreServer() {} +func (UnimplementedDataStoreServer) testEmbeddedByValue() {} + +// UnsafeDataStoreServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to DataStoreServer will +// result in compilation errors. +type UnsafeDataStoreServer interface { + mustEmbedUnimplementedDataStoreServer() +} + +func RegisterDataStoreServer(s grpc.ServiceRegistrar, srv DataStoreServer) { + // If the following call pancis, it indicates UnimplementedDataStoreServer was + // embedded by pointer and is nil. This will cause panics if an + // unimplemented method is ever invoked, so we test this at initialization + // time to prevent it from happening at runtime later due to I/O. + if t, ok := srv.(interface{ testEmbeddedByValue() }); ok { + t.testEmbeddedByValue() + } + s.RegisterService(&DataStore_ServiceDesc, srv) +} + +func _DataStore_Ping_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PingRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataStoreServer).Ping(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: DataStore_Ping_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataStoreServer).Ping(ctx, req.(*PingRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataStore_BatchInsertItems_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BatchInsertItemsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataStoreServer).BatchInsertItems(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: DataStore_BatchInsertItems_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataStoreServer).BatchInsertItems(ctx, req.(*BatchInsertItemsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataStore_BatchGetItems_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BatchGetItemsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataStoreServer).BatchGetItems(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: DataStore_BatchGetItems_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataStoreServer).BatchGetItems(ctx, req.(*BatchGetItemsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataStore_DeleteItem_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteItemRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataStoreServer).DeleteItem(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: DataStore_DeleteItem_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataStoreServer).DeleteItem(ctx, req.(*DeleteItemRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataStore_GetItem_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetItemRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataStoreServer).GetItem(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: DataStore_GetItem_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataStoreServer).GetItem(ctx, req.(*GetItemRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataStore_ModifyItem_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ModifyItemRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataStoreServer).ModifyItem(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: DataStore_ModifyItem_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataStoreServer).ModifyItem(ctx, req.(*ModifyItemRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataStore_GetItems_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetItemsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataStoreServer).GetItems(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: DataStore_GetItems_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataStoreServer).GetItems(ctx, req.(*GetItemsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataStore_GetItemFeedback_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetItemFeedbackRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataStoreServer).GetItemFeedback(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: DataStore_GetItemFeedback_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataStoreServer).GetItemFeedback(ctx, req.(*GetItemFeedbackRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataStore_BatchInsertUsers_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BatchInsertUsersRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataStoreServer).BatchInsertUsers(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: DataStore_BatchInsertUsers_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataStoreServer).BatchInsertUsers(ctx, req.(*BatchInsertUsersRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataStore_DeleteUser_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteUserRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataStoreServer).DeleteUser(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: DataStore_DeleteUser_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataStoreServer).DeleteUser(ctx, req.(*DeleteUserRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataStore_GetUser_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetUserRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataStoreServer).GetUser(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: DataStore_GetUser_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataStoreServer).GetUser(ctx, req.(*GetUserRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataStore_ModifyUser_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ModifyUserRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataStoreServer).ModifyUser(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: DataStore_ModifyUser_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataStoreServer).ModifyUser(ctx, req.(*ModifyUserRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataStore_GetUsers_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetUsersRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataStoreServer).GetUsers(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: DataStore_GetUsers_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataStoreServer).GetUsers(ctx, req.(*GetUsersRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataStore_GetUserFeedback_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetUserFeedbackRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataStoreServer).GetUserFeedback(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: DataStore_GetUserFeedback_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataStoreServer).GetUserFeedback(ctx, req.(*GetUserFeedbackRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataStore_GetUserItemFeedback_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetUserItemFeedbackRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataStoreServer).GetUserItemFeedback(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: DataStore_GetUserItemFeedback_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataStoreServer).GetUserItemFeedback(ctx, req.(*GetUserItemFeedbackRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataStore_DeleteUserItemFeedback_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteUserItemFeedbackRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataStoreServer).DeleteUserItemFeedback(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: DataStore_DeleteUserItemFeedback_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataStoreServer).DeleteUserItemFeedback(ctx, req.(*DeleteUserItemFeedbackRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataStore_BatchInsertFeedback_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BatchInsertFeedbackRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataStoreServer).BatchInsertFeedback(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: DataStore_BatchInsertFeedback_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataStoreServer).BatchInsertFeedback(ctx, req.(*BatchInsertFeedbackRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataStore_GetFeedback_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetFeedbackRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataStoreServer).GetFeedback(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: DataStore_GetFeedback_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataStoreServer).GetFeedback(ctx, req.(*GetFeedbackRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataStore_GetUserStream_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(GetUserStreamRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(DataStoreServer).GetUserStream(m, &grpc.GenericServerStream[GetUserStreamRequest, GetUserStreamResponse]{ServerStream: stream}) +} + +// This type alias is provided for backwards compatibility with existing code that references the prior non-generic stream type by name. +type DataStore_GetUserStreamServer = grpc.ServerStreamingServer[GetUserStreamResponse] + +func _DataStore_GetItemStream_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(GetItemStreamRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(DataStoreServer).GetItemStream(m, &grpc.GenericServerStream[GetItemStreamRequest, GetItemStreamResponse]{ServerStream: stream}) +} + +// This type alias is provided for backwards compatibility with existing code that references the prior non-generic stream type by name. +type DataStore_GetItemStreamServer = grpc.ServerStreamingServer[GetItemStreamResponse] + +func _DataStore_GetFeedbackStream_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(GetFeedbackStreamRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(DataStoreServer).GetFeedbackStream(m, &grpc.GenericServerStream[GetFeedbackStreamRequest, GetFeedbackStreamResponse]{ServerStream: stream}) +} + +// This type alias is provided for backwards compatibility with existing code that references the prior non-generic stream type by name. +type DataStore_GetFeedbackStreamServer = grpc.ServerStreamingServer[GetFeedbackStreamResponse] + +// DataStore_ServiceDesc is the grpc.ServiceDesc for DataStore service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var DataStore_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "protocol.DataStore", + HandlerType: (*DataStoreServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "Ping", + Handler: _DataStore_Ping_Handler, + }, + { + MethodName: "BatchInsertItems", + Handler: _DataStore_BatchInsertItems_Handler, + }, + { + MethodName: "BatchGetItems", + Handler: _DataStore_BatchGetItems_Handler, + }, + { + MethodName: "DeleteItem", + Handler: _DataStore_DeleteItem_Handler, + }, + { + MethodName: "GetItem", + Handler: _DataStore_GetItem_Handler, + }, + { + MethodName: "ModifyItem", + Handler: _DataStore_ModifyItem_Handler, + }, + { + MethodName: "GetItems", + Handler: _DataStore_GetItems_Handler, + }, + { + MethodName: "GetItemFeedback", + Handler: _DataStore_GetItemFeedback_Handler, + }, + { + MethodName: "BatchInsertUsers", + Handler: _DataStore_BatchInsertUsers_Handler, + }, + { + MethodName: "DeleteUser", + Handler: _DataStore_DeleteUser_Handler, + }, + { + MethodName: "GetUser", + Handler: _DataStore_GetUser_Handler, + }, + { + MethodName: "ModifyUser", + Handler: _DataStore_ModifyUser_Handler, + }, + { + MethodName: "GetUsers", + Handler: _DataStore_GetUsers_Handler, + }, + { + MethodName: "GetUserFeedback", + Handler: _DataStore_GetUserFeedback_Handler, + }, + { + MethodName: "GetUserItemFeedback", + Handler: _DataStore_GetUserItemFeedback_Handler, + }, + { + MethodName: "DeleteUserItemFeedback", + Handler: _DataStore_DeleteUserItemFeedback_Handler, + }, + { + MethodName: "BatchInsertFeedback", + Handler: _DataStore_BatchInsertFeedback_Handler, + }, + { + MethodName: "GetFeedback", + Handler: _DataStore_GetFeedback_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "GetUserStream", + Handler: _DataStore_GetUserStream_Handler, + ServerStreams: true, + }, + { + StreamName: "GetItemStream", + Handler: _DataStore_GetItemStream_Handler, + ServerStreams: true, + }, + { + StreamName: "GetFeedbackStream", + Handler: _DataStore_GetFeedbackStream_Handler, + ServerStreams: true, + }, + }, + Metadata: "data_store.proto", +} diff --git a/protocol/protocol.pb.go b/protocol/protocol.pb.go index dc447388f..5c087c4e9 100644 --- a/protocol/protocol.pb.go +++ b/protocol/protocol.pb.go @@ -89,9 +89,10 @@ type User struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - UserId string `protobuf:"bytes,1,opt,name=user_id,json=userId,proto3" json:"user_id,omitempty"` - Labels []byte `protobuf:"bytes,2,opt,name=labels,proto3" json:"labels,omitempty"` - Comment string `protobuf:"bytes,3,opt,name=comment,proto3" json:"comment,omitempty"` + UserId string `protobuf:"bytes,1,opt,name=user_id,json=userId,proto3" json:"user_id,omitempty"` + Labels []byte `protobuf:"bytes,2,opt,name=labels,proto3" json:"labels,omitempty"` + Comment string `protobuf:"bytes,3,opt,name=comment,proto3" json:"comment,omitempty"` + Subscribe []string `protobuf:"bytes,4,rep,name=subscribe,proto3" json:"subscribe,omitempty"` } func (x *User) Reset() { @@ -145,6 +146,13 @@ func (x *User) GetComment() string { return "" } +func (x *User) GetSubscribe() []string { + if x != nil { + return x.Subscribe + } + return nil +} + type Item struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -749,116 +757,192 @@ func (*PushProgressResponse) Descriptor() ([]byte, []int) { return file_protocol_proto_rawDescGZIP(), []int{9} } +type PingRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *PingRequest) Reset() { + *x = PingRequest{} + mi := &file_protocol_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *PingRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PingRequest) ProtoMessage() {} + +func (x *PingRequest) ProtoReflect() protoreflect.Message { + mi := &file_protocol_proto_msgTypes[10] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PingRequest.ProtoReflect.Descriptor instead. +func (*PingRequest) Descriptor() ([]byte, []int) { + return file_protocol_proto_rawDescGZIP(), []int{10} +} + +type PingResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *PingResponse) Reset() { + *x = PingResponse{} + mi := &file_protocol_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *PingResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PingResponse) ProtoMessage() {} + +func (x *PingResponse) ProtoReflect() protoreflect.Message { + mi := &file_protocol_proto_msgTypes[11] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PingResponse.ProtoReflect.Descriptor instead. +func (*PingResponse) Descriptor() ([]byte, []int) { + return file_protocol_proto_rawDescGZIP(), []int{11} +} + var File_protocol_proto protoreflect.FileDescriptor var file_protocol_proto_rawDesc = []byte{ 0x0a, 0x0e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x08, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x69, 0x6d, 0x65, - 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x51, 0x0a, 0x04, 0x55, + 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x6f, 0x0a, 0x04, 0x55, 0x73, 0x65, 0x72, 0x12, 0x17, 0x0a, 0x07, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x75, 0x73, 0x65, 0x72, 0x49, 0x64, 0x12, 0x16, 0x0a, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x18, - 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x22, 0xe6, - 0x01, 0x0a, 0x04, 0x49, 0x74, 0x65, 0x6d, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, - 0x70, 0x61, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, - 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x17, 0x0a, 0x07, 0x69, 0x74, 0x65, 0x6d, 0x5f, 0x69, 0x64, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x69, 0x74, 0x65, 0x6d, 0x49, 0x64, 0x12, 0x1b, - 0x0a, 0x09, 0x69, 0x73, 0x5f, 0x68, 0x69, 0x64, 0x64, 0x65, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, - 0x08, 0x52, 0x08, 0x69, 0x73, 0x48, 0x69, 0x64, 0x64, 0x65, 0x6e, 0x12, 0x1e, 0x0a, 0x0a, 0x63, - 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x69, 0x65, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x09, 0x52, - 0x0a, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x69, 0x65, 0x73, 0x12, 0x38, 0x0a, 0x09, 0x74, - 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, - 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, - 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x74, 0x69, 0x6d, 0x65, - 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, 0x16, 0x0a, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x18, - 0x06, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x12, 0x18, 0x0a, - 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, - 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x22, 0xd3, 0x01, 0x0a, 0x08, 0x46, 0x65, 0x65, 0x64, - 0x62, 0x61, 0x63, 0x6b, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, - 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, - 0x63, 0x65, 0x12, 0x23, 0x0a, 0x0d, 0x66, 0x65, 0x65, 0x64, 0x62, 0x61, 0x63, 0x6b, 0x5f, 0x74, - 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x66, 0x65, 0x65, 0x64, 0x62, - 0x61, 0x63, 0x6b, 0x54, 0x79, 0x70, 0x65, 0x12, 0x17, 0x0a, 0x07, 0x75, 0x73, 0x65, 0x72, 0x5f, - 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x75, 0x73, 0x65, 0x72, 0x49, 0x64, - 0x12, 0x17, 0x0a, 0x07, 0x69, 0x74, 0x65, 0x6d, 0x5f, 0x69, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x06, 0x69, 0x74, 0x65, 0x6d, 0x49, 0x64, 0x12, 0x38, 0x0a, 0x09, 0x74, 0x69, 0x6d, + 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x1c, + 0x0a, 0x09, 0x73, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x18, 0x04, 0x20, 0x03, 0x28, + 0x09, 0x52, 0x09, 0x73, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x22, 0xe6, 0x01, 0x0a, + 0x04, 0x49, 0x74, 0x65, 0x6d, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, + 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, + 0x61, 0x63, 0x65, 0x12, 0x17, 0x0a, 0x07, 0x69, 0x74, 0x65, 0x6d, 0x5f, 0x69, 0x64, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x69, 0x74, 0x65, 0x6d, 0x49, 0x64, 0x12, 0x1b, 0x0a, 0x09, + 0x69, 0x73, 0x5f, 0x68, 0x69, 0x64, 0x64, 0x65, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, + 0x08, 0x69, 0x73, 0x48, 0x69, 0x64, 0x64, 0x65, 0x6e, 0x12, 0x1e, 0x0a, 0x0a, 0x63, 0x61, 0x74, + 0x65, 0x67, 0x6f, 0x72, 0x69, 0x65, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0a, 0x63, + 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x69, 0x65, 0x73, 0x12, 0x38, 0x0a, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, - 0x61, 0x6d, 0x70, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x18, 0x06, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x22, 0xc6, 0x01, - 0x0a, 0x04, 0x4d, 0x65, 0x74, 0x61, 0x12, 0x16, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x32, - 0x0a, 0x15, 0x72, 0x61, 0x6e, 0x6b, 0x69, 0x6e, 0x67, 0x5f, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x5f, - 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x13, 0x72, - 0x61, 0x6e, 0x6b, 0x69, 0x6e, 0x67, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x56, 0x65, 0x72, 0x73, 0x69, - 0x6f, 0x6e, 0x12, 0x2e, 0x0a, 0x13, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x5f, 0x6d, 0x6f, 0x64, 0x65, - 0x6c, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x03, 0x52, - 0x11, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x56, 0x65, 0x72, 0x73, 0x69, - 0x6f, 0x6e, 0x12, 0x0e, 0x0a, 0x02, 0x6d, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, - 0x6d, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x73, 0x18, 0x06, 0x20, - 0x03, 0x28, 0x09, 0x52, 0x07, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x73, 0x12, 0x18, 0x0a, 0x07, - 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x09, 0x52, 0x07, 0x77, - 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x73, 0x22, 0x1e, 0x0a, 0x08, 0x46, 0x72, 0x61, 0x67, 0x6d, 0x65, - 0x6e, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, - 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x22, 0x27, 0x0a, 0x0b, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, - 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x22, - 0x9c, 0x01, 0x0a, 0x08, 0x4e, 0x6f, 0x64, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x2f, 0x0a, 0x09, - 0x6e, 0x6f, 0x64, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, - 0x12, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x4e, 0x6f, 0x64, 0x65, 0x54, - 0x79, 0x70, 0x65, 0x52, 0x08, 0x6e, 0x6f, 0x64, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1b, 0x0a, - 0x09, 0x6e, 0x6f, 0x64, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x08, 0x6e, 0x6f, 0x64, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x68, 0x74, - 0x74, 0x70, 0x5f, 0x70, 0x6f, 0x72, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x08, 0x68, - 0x74, 0x74, 0x70, 0x50, 0x6f, 0x72, 0x74, 0x12, 0x25, 0x0a, 0x0e, 0x62, 0x69, 0x6e, 0x61, 0x72, - 0x79, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x0d, 0x62, 0x69, 0x6e, 0x61, 0x72, 0x79, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0xd0, - 0x01, 0x0a, 0x08, 0x50, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x12, 0x16, 0x0a, 0x06, 0x74, - 0x72, 0x61, 0x63, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x74, 0x72, 0x61, - 0x63, 0x65, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, - 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, - 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, - 0x65, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x14, 0x0a, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x05, - 0x20, 0x01, 0x28, 0x03, 0x52, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x74, - 0x6f, 0x74, 0x61, 0x6c, 0x18, 0x06, 0x20, 0x01, 0x28, 0x03, 0x52, 0x05, 0x74, 0x6f, 0x74, 0x61, - 0x6c, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, - 0x07, 0x20, 0x01, 0x28, 0x03, 0x52, 0x09, 0x73, 0x74, 0x61, 0x72, 0x74, 0x54, 0x69, 0x6d, 0x65, - 0x12, 0x1f, 0x0a, 0x0b, 0x66, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, - 0x08, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0a, 0x66, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x54, 0x69, 0x6d, - 0x65, 0x22, 0x45, 0x0a, 0x13, 0x50, 0x75, 0x73, 0x68, 0x50, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, - 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2e, 0x0a, 0x08, 0x70, 0x72, 0x6f, 0x67, - 0x72, 0x65, 0x73, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x50, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x52, 0x08, - 0x70, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x22, 0x16, 0x0a, 0x14, 0x50, 0x75, 0x73, 0x68, - 0x50, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x2a, 0x3a, 0x0a, 0x08, 0x4e, 0x6f, 0x64, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x0e, 0x0a, 0x0a, - 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x4e, 0x6f, 0x64, 0x65, 0x10, 0x00, 0x12, 0x0e, 0x0a, 0x0a, - 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4e, 0x6f, 0x64, 0x65, 0x10, 0x01, 0x12, 0x0e, 0x0a, 0x0a, - 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x10, 0x02, 0x32, 0x8c, 0x02, 0x0a, - 0x06, 0x4d, 0x61, 0x73, 0x74, 0x65, 0x72, 0x12, 0x2f, 0x0a, 0x07, 0x47, 0x65, 0x74, 0x4d, 0x65, - 0x74, 0x61, 0x12, 0x12, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x4e, 0x6f, - 0x64, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x1a, 0x0e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, - 0x6c, 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x22, 0x00, 0x12, 0x40, 0x0a, 0x0f, 0x47, 0x65, 0x74, 0x52, - 0x61, 0x6e, 0x6b, 0x69, 0x6e, 0x67, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x12, 0x15, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x6e, - 0x66, 0x6f, 0x1a, 0x12, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x46, 0x72, - 0x61, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x22, 0x00, 0x30, 0x01, 0x12, 0x3e, 0x0a, 0x0d, 0x47, 0x65, - 0x74, 0x43, 0x6c, 0x69, 0x63, 0x6b, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x12, 0x15, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x6e, - 0x66, 0x6f, 0x1a, 0x12, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x46, 0x72, - 0x61, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x22, 0x00, 0x30, 0x01, 0x12, 0x4f, 0x0a, 0x0c, 0x50, 0x75, - 0x73, 0x68, 0x50, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x12, 0x1d, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x50, 0x75, 0x73, 0x68, 0x50, 0x72, 0x6f, 0x67, 0x72, 0x65, - 0x73, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x50, 0x75, 0x73, 0x68, 0x50, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, - 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0x25, 0x5a, 0x23, 0x67, - 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x7a, 0x68, 0x65, 0x6e, 0x67, 0x68, - 0x61, 0x6f, 0x7a, 0x2f, 0x67, 0x6f, 0x72, 0x73, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, - 0x6f, 0x6c, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x61, 0x6d, 0x70, 0x12, 0x16, 0x0a, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x18, 0x06, 0x20, + 0x01, 0x28, 0x0c, 0x52, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x12, 0x18, 0x0a, 0x07, 0x63, + 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x63, 0x6f, + 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x22, 0xd3, 0x01, 0x0a, 0x08, 0x46, 0x65, 0x65, 0x64, 0x62, 0x61, + 0x63, 0x6b, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, + 0x12, 0x23, 0x0a, 0x0d, 0x66, 0x65, 0x65, 0x64, 0x62, 0x61, 0x63, 0x6b, 0x5f, 0x74, 0x79, 0x70, + 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x66, 0x65, 0x65, 0x64, 0x62, 0x61, 0x63, + 0x6b, 0x54, 0x79, 0x70, 0x65, 0x12, 0x17, 0x0a, 0x07, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x64, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x75, 0x73, 0x65, 0x72, 0x49, 0x64, 0x12, 0x17, + 0x0a, 0x07, 0x69, 0x74, 0x65, 0x6d, 0x5f, 0x69, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x06, 0x69, 0x74, 0x65, 0x6d, 0x49, 0x64, 0x12, 0x38, 0x0a, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, + 0x74, 0x61, 0x6d, 0x70, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, + 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, + 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, + 0x70, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x18, 0x06, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x22, 0xc6, 0x01, 0x0a, 0x04, + 0x4d, 0x65, 0x74, 0x61, 0x12, 0x16, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x32, 0x0a, 0x15, + 0x72, 0x61, 0x6e, 0x6b, 0x69, 0x6e, 0x67, 0x5f, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x5f, 0x76, 0x65, + 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x13, 0x72, 0x61, 0x6e, + 0x6b, 0x69, 0x6e, 0x67, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, + 0x12, 0x2e, 0x0a, 0x13, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x5f, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x5f, + 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x03, 0x52, 0x11, 0x63, + 0x6c, 0x69, 0x63, 0x6b, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, + 0x12, 0x0e, 0x0a, 0x02, 0x6d, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x6d, 0x65, + 0x12, 0x18, 0x0a, 0x07, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, + 0x09, 0x52, 0x07, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x73, 0x12, 0x18, 0x0a, 0x07, 0x77, 0x6f, + 0x72, 0x6b, 0x65, 0x72, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x09, 0x52, 0x07, 0x77, 0x6f, 0x72, + 0x6b, 0x65, 0x72, 0x73, 0x22, 0x1e, 0x0a, 0x08, 0x46, 0x72, 0x61, 0x67, 0x6d, 0x65, 0x6e, 0x74, + 0x12, 0x12, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, + 0x64, 0x61, 0x74, 0x61, 0x22, 0x27, 0x0a, 0x0b, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x49, + 0x6e, 0x66, 0x6f, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x03, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0x9c, 0x01, + 0x0a, 0x08, 0x4e, 0x6f, 0x64, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x2f, 0x0a, 0x09, 0x6e, 0x6f, + 0x64, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x12, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x4e, 0x6f, 0x64, 0x65, 0x54, 0x79, 0x70, + 0x65, 0x52, 0x08, 0x6e, 0x6f, 0x64, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x6e, + 0x6f, 0x64, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, + 0x6e, 0x6f, 0x64, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x68, 0x74, 0x74, 0x70, + 0x5f, 0x70, 0x6f, 0x72, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x08, 0x68, 0x74, 0x74, + 0x70, 0x50, 0x6f, 0x72, 0x74, 0x12, 0x25, 0x0a, 0x0e, 0x62, 0x69, 0x6e, 0x61, 0x72, 0x79, 0x5f, + 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x62, + 0x69, 0x6e, 0x61, 0x72, 0x79, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0xd0, 0x01, 0x0a, + 0x08, 0x50, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x12, 0x16, 0x0a, 0x06, 0x74, 0x72, 0x61, + 0x63, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x74, 0x72, 0x61, 0x63, 0x65, + 0x72, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x14, 0x0a, + 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, + 0x72, 0x6f, 0x72, 0x12, 0x14, 0x0a, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x05, 0x20, 0x01, + 0x28, 0x03, 0x52, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x74, 0x6f, 0x74, + 0x61, 0x6c, 0x18, 0x06, 0x20, 0x01, 0x28, 0x03, 0x52, 0x05, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x12, + 0x1d, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x07, 0x20, + 0x01, 0x28, 0x03, 0x52, 0x09, 0x73, 0x74, 0x61, 0x72, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x1f, + 0x0a, 0x0b, 0x66, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x08, 0x20, + 0x01, 0x28, 0x03, 0x52, 0x0a, 0x66, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x54, 0x69, 0x6d, 0x65, 0x22, + 0x45, 0x0a, 0x13, 0x50, 0x75, 0x73, 0x68, 0x50, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2e, 0x0a, 0x08, 0x70, 0x72, 0x6f, 0x67, 0x72, 0x65, + 0x73, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x63, 0x6f, 0x6c, 0x2e, 0x50, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x52, 0x08, 0x70, 0x72, + 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x22, 0x16, 0x0a, 0x14, 0x50, 0x75, 0x73, 0x68, 0x50, 0x72, + 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x0d, + 0x0a, 0x0b, 0x50, 0x69, 0x6e, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x0e, 0x0a, + 0x0c, 0x50, 0x69, 0x6e, 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2a, 0x3a, 0x0a, + 0x08, 0x4e, 0x6f, 0x64, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x0e, 0x0a, 0x0a, 0x53, 0x65, 0x72, + 0x76, 0x65, 0x72, 0x4e, 0x6f, 0x64, 0x65, 0x10, 0x00, 0x12, 0x0e, 0x0a, 0x0a, 0x57, 0x6f, 0x72, + 0x6b, 0x65, 0x72, 0x4e, 0x6f, 0x64, 0x65, 0x10, 0x01, 0x12, 0x0e, 0x0a, 0x0a, 0x43, 0x6c, 0x69, + 0x65, 0x6e, 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x10, 0x02, 0x32, 0x8c, 0x02, 0x0a, 0x06, 0x4d, 0x61, + 0x73, 0x74, 0x65, 0x72, 0x12, 0x2f, 0x0a, 0x07, 0x47, 0x65, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x12, + 0x12, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x4e, 0x6f, 0x64, 0x65, 0x49, + 0x6e, 0x66, 0x6f, 0x1a, 0x0e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x4d, + 0x65, 0x74, 0x61, 0x22, 0x00, 0x12, 0x40, 0x0a, 0x0f, 0x47, 0x65, 0x74, 0x52, 0x61, 0x6e, 0x6b, + 0x69, 0x6e, 0x67, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x12, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x63, 0x6f, 0x6c, 0x2e, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x1a, + 0x12, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x46, 0x72, 0x61, 0x67, 0x6d, + 0x65, 0x6e, 0x74, 0x22, 0x00, 0x30, 0x01, 0x12, 0x3e, 0x0a, 0x0d, 0x47, 0x65, 0x74, 0x43, 0x6c, + 0x69, 0x63, 0x6b, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x12, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x63, 0x6f, 0x6c, 0x2e, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x1a, + 0x12, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x46, 0x72, 0x61, 0x67, 0x6d, + 0x65, 0x6e, 0x74, 0x22, 0x00, 0x30, 0x01, 0x12, 0x4f, 0x0a, 0x0c, 0x50, 0x75, 0x73, 0x68, 0x50, + 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x12, 0x1d, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, + 0x6f, 0x6c, 0x2e, 0x50, 0x75, 0x73, 0x68, 0x50, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, + 0x6c, 0x2e, 0x50, 0x75, 0x73, 0x68, 0x50, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0x25, 0x5a, 0x23, 0x67, 0x69, 0x74, 0x68, + 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x7a, 0x68, 0x65, 0x6e, 0x67, 0x68, 0x61, 0x6f, 0x7a, + 0x2f, 0x67, 0x6f, 0x72, 0x73, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x62, + 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -874,7 +958,7 @@ func file_protocol_proto_rawDescGZIP() []byte { } var file_protocol_proto_enumTypes = make([]protoimpl.EnumInfo, 1) -var file_protocol_proto_msgTypes = make([]protoimpl.MessageInfo, 10) +var file_protocol_proto_msgTypes = make([]protoimpl.MessageInfo, 12) var file_protocol_proto_goTypes = []any{ (NodeType)(0), // 0: protocol.NodeType (*User)(nil), // 1: protocol.User @@ -887,11 +971,13 @@ var file_protocol_proto_goTypes = []any{ (*Progress)(nil), // 8: protocol.Progress (*PushProgressRequest)(nil), // 9: protocol.PushProgressRequest (*PushProgressResponse)(nil), // 10: protocol.PushProgressResponse - (*timestamppb.Timestamp)(nil), // 11: google.protobuf.Timestamp + (*PingRequest)(nil), // 11: protocol.PingRequest + (*PingResponse)(nil), // 12: protocol.PingResponse + (*timestamppb.Timestamp)(nil), // 13: google.protobuf.Timestamp } var file_protocol_proto_depIdxs = []int32{ - 11, // 0: protocol.Item.timestamp:type_name -> google.protobuf.Timestamp - 11, // 1: protocol.Feedback.timestamp:type_name -> google.protobuf.Timestamp + 13, // 0: protocol.Item.timestamp:type_name -> google.protobuf.Timestamp + 13, // 1: protocol.Feedback.timestamp:type_name -> google.protobuf.Timestamp 0, // 2: protocol.NodeInfo.node_type:type_name -> protocol.NodeType 8, // 3: protocol.PushProgressRequest.progress:type_name -> protocol.Progress 7, // 4: protocol.Master.GetMeta:input_type -> protocol.NodeInfo @@ -920,7 +1006,7 @@ func file_protocol_proto_init() { GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_protocol_proto_rawDesc, NumEnums: 1, - NumMessages: 10, + NumMessages: 12, NumExtensions: 0, NumServices: 1, }, diff --git a/protocol/protocol.proto b/protocol/protocol.proto index cd1779e56..486e1c27b 100644 --- a/protocol/protocol.proto +++ b/protocol/protocol.proto @@ -11,6 +11,7 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. + syntax = "proto3"; option go_package = "github.com/zhenghaoz/gorse/protocol"; @@ -23,6 +24,7 @@ message User { string user_id = 1; bytes labels = 2; string comment = 3; + repeated string subscribe = 4; } message Item { @@ -103,3 +105,7 @@ message PushProgressRequest { } message PushProgressResponse {} + +message PingRequest {} + +message PingResponse {} diff --git a/protocol/task.go b/protocol/task.go index b35c563e7..0d4f37f82 100644 --- a/protocol/task.go +++ b/protocol/task.go @@ -20,6 +20,8 @@ import ( "github.com/zhenghaoz/gorse/base/progress" ) +//go:generate protoc --go_out=. --go_opt=paths=source_relative --go-grpc_out=. --go-grpc_opt=paths=source_relative cache_store.proto +//go:generate protoc --go_out=. --go_opt=paths=source_relative --go-grpc_out=. --go-grpc_opt=paths=source_relative data_store.proto //go:generate protoc --go_out=. --go_opt=paths=source_relative --go-grpc_out=. --go-grpc_opt=paths=source_relative protocol.proto func DecodeProgress(in *PushProgressRequest) []progress.Progress { diff --git a/storage/cache/proxy.go b/storage/cache/proxy.go new file mode 100644 index 000000000..d79b3aad4 --- /dev/null +++ b/storage/cache/proxy.go @@ -0,0 +1,442 @@ +// Copyright 2024 gorse Project Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cache + +import ( + "context" + "github.com/juju/errors" + "github.com/samber/lo" + "github.com/zhenghaoz/gorse/protocol" + "google.golang.org/grpc" + "google.golang.org/protobuf/proto" + "google.golang.org/protobuf/types/known/timestamppb" + "io" + "net" + "time" +) + +type ProxyServer struct { + protocol.UnimplementedCacheStoreServer + database Database + server *grpc.Server +} + +func NewProxyServer(database Database) *ProxyServer { + return &ProxyServer{database: database} +} + +func (p *ProxyServer) Serve(lis net.Listener) error { + p.server = grpc.NewServer() + protocol.RegisterCacheStoreServer(p.server, p) + return p.server.Serve(lis) +} + +func (p *ProxyServer) Stop() { + p.server.Stop() +} + +func (p *ProxyServer) Ping(context.Context, *protocol.PingRequest) (*protocol.PingResponse, error) { + return &protocol.PingResponse{}, p.database.Ping() +} + +func (p *ProxyServer) Get(ctx context.Context, request *protocol.GetRequest) (*protocol.GetResponse, error) { + value := p.database.Get(ctx, request.GetName()) + if errors.Is(value.err, errors.NotFound) { + return &protocol.GetResponse{}, nil + } + return &protocol.GetResponse{Value: proto.String(value.value)}, value.err +} + +func (p *ProxyServer) Set(ctx context.Context, request *protocol.SetRequest) (*protocol.SetResponse, error) { + values := make([]Value, len(request.Values)) + for i, value := range request.Values { + values[i] = Value{ + name: value.GetName(), + value: value.GetValue(), + } + } + return &protocol.SetResponse{}, p.database.Set(ctx, values...) +} + +func (p *ProxyServer) Delete(ctx context.Context, request *protocol.DeleteRequest) (*protocol.DeleteResponse, error) { + return &protocol.DeleteResponse{}, p.database.Delete(ctx, request.GetName()) +} + +func (p *ProxyServer) GetSet(ctx context.Context, request *protocol.GetSetRequest) (*protocol.GetSetResponse, error) { + members, err := p.database.GetSet(ctx, request.GetKey()) + if err != nil { + return nil, err + } + return &protocol.GetSetResponse{Members: members}, nil +} + +func (p *ProxyServer) SetSet(ctx context.Context, request *protocol.SetSetRequest) (*protocol.SetSetResponse, error) { + return &protocol.SetSetResponse{}, p.database.SetSet(ctx, request.GetKey(), request.GetMembers()...) +} + +func (p *ProxyServer) AddSet(ctx context.Context, request *protocol.AddSetRequest) (*protocol.AddSetResponse, error) { + return &protocol.AddSetResponse{}, p.database.AddSet(ctx, request.GetKey(), request.GetMembers()...) +} + +func (p *ProxyServer) RemSet(ctx context.Context, request *protocol.RemSetRequest) (*protocol.RemSetResponse, error) { + return &protocol.RemSetResponse{}, p.database.RemSet(ctx, request.GetKey(), request.GetMembers()...) +} + +func (p *ProxyServer) Push(ctx context.Context, request *protocol.PushRequest) (*protocol.PushResponse, error) { + return &protocol.PushResponse{}, p.database.Push(ctx, request.GetName(), request.GetValue()) +} + +func (p *ProxyServer) Pop(ctx context.Context, request *protocol.PopRequest) (*protocol.PopResponse, error) { + value, err := p.database.Pop(ctx, request.GetName()) + if err != nil { + if errors.Is(err, io.EOF) { + return &protocol.PopResponse{}, nil + } + return nil, err + } + return &protocol.PopResponse{Value: proto.String(value)}, nil +} + +func (p *ProxyServer) Remain(ctx context.Context, request *protocol.RemainRequest) (*protocol.RemainResponse, error) { + count, err := p.database.Remain(ctx, request.GetName()) + if err != nil { + return nil, err + } + return &protocol.RemainResponse{Count: count}, nil +} + +func (p *ProxyServer) AddScores(ctx context.Context, request *protocol.AddScoresRequest) (*protocol.AddScoresResponse, error) { + scores := make([]Score, len(request.Documents)) + for i, doc := range request.Documents { + scores[i] = Score{ + Id: doc.GetId(), + Score: doc.GetScore(), + IsHidden: doc.GetIsHidden(), + Categories: doc.GetCategories(), + Timestamp: doc.GetTimestamp().AsTime(), + } + } + return &protocol.AddScoresResponse{}, p.database.AddScores(ctx, request.GetCollection(), request.GetSubset(), scores) +} + +func (p *ProxyServer) SearchScores(ctx context.Context, request *protocol.SearchScoresRequest) (*protocol.SearchScoresResponse, error) { + resp, err := p.database.SearchScores(ctx, request.GetCollection(), request.GetSubset(), request.GetQuery(), int(request.GetBegin()), int(request.GetEnd())) + if err != nil { + return nil, err + } + scores := make([]*protocol.Score, len(resp)) + for i, score := range resp { + scores[i] = &protocol.Score{ + Id: score.Id, + Score: score.Score, + IsHidden: score.IsHidden, + Categories: score.Categories, + Timestamp: timestamppb.New(score.Timestamp), + } + } + return &protocol.SearchScoresResponse{Documents: scores}, nil +} + +func (p *ProxyServer) DeleteScores(ctx context.Context, request *protocol.DeleteScoresRequest) (*protocol.DeleteScoresResponse, error) { + var before *time.Time + if request.Condition.Before != nil { + before = lo.ToPtr(request.Condition.Before.AsTime()) + } + return &protocol.DeleteScoresResponse{}, p.database.DeleteScores(ctx, request.GetCollection(), ScoreCondition{ + Subset: request.Condition.Subset, + Id: request.Condition.Id, + Before: before, + }) +} + +func (p *ProxyServer) UpdateScores(ctx context.Context, request *protocol.UpdateScoresRequest) (*protocol.UpdateScoresResponse, error) { + return &protocol.UpdateScoresResponse{}, p.database.UpdateScores(ctx, request.GetCollection(), request.GetId(), ScorePatch{ + IsHidden: request.GetPatch().IsHidden, + Categories: request.GetPatch().Categories, + Score: request.GetPatch().Score, + }) +} + +func (p *ProxyServer) AddTimeSeriesPoints(ctx context.Context, request *protocol.AddTimeSeriesPointsRequest) (*protocol.AddTimeSeriesPointsResponse, error) { + points := make([]TimeSeriesPoint, len(request.Points)) + for i, point := range request.Points { + points[i] = TimeSeriesPoint{ + Name: point.Name, + Timestamp: point.Timestamp.AsTime(), + Value: point.Value, + } + } + return &protocol.AddTimeSeriesPointsResponse{}, p.database.AddTimeSeriesPoints(ctx, points) +} + +func (p *ProxyServer) GetTimeSeriesPoints(ctx context.Context, request *protocol.GetTimeSeriesPointsRequest) (*protocol.GetTimeSeriesPointsResponse, error) { + resp, err := p.database.GetTimeSeriesPoints(ctx, request.GetName(), request.GetBegin().AsTime(), request.GetEnd().AsTime()) + if err != nil { + return nil, err + } + points := make([]*protocol.TimeSeriesPoint, len(resp)) + for i, point := range resp { + points[i] = &protocol.TimeSeriesPoint{ + Name: point.Name, + Timestamp: timestamppb.New(point.Timestamp), + Value: point.Value, + } + } + return &protocol.GetTimeSeriesPointsResponse{Points: points}, nil +} + +type ProxyClient struct { + *grpc.ClientConn + protocol.CacheStoreClient +} + +func (p ProxyClient) Ping() error { + _, err := p.CacheStoreClient.Ping(context.Background(), &protocol.PingRequest{}) + return err +} + +func (p ProxyClient) Init() error { + return errors.MethodNotAllowedf("init is not allowed in proxy client") +} + +func (p ProxyClient) Scan(_ func(string) error) error { + return errors.MethodNotAllowedf("scan is not allowed in proxy client") +} + +func (p ProxyClient) Purge() error { + return errors.MethodNotAllowedf("purge is not allowed in proxy client") +} + +func (p ProxyClient) Set(ctx context.Context, values ...Value) error { + pbValues := make([]*protocol.Value, len(values)) + for i, value := range values { + pbValues[i] = &protocol.Value{ + Name: value.name, + Value: value.value, + } + } + _, err := p.CacheStoreClient.Set(ctx, &protocol.SetRequest{ + Values: pbValues, + }) + return err +} + +func (p ProxyClient) Get(ctx context.Context, name string) *ReturnValue { + resp, err := p.CacheStoreClient.Get(ctx, &protocol.GetRequest{ + Name: name, + }) + if err != nil { + return &ReturnValue{err: err} + } + if resp.Value == nil { + return &ReturnValue{err: errors.NotFound} + } + return &ReturnValue{value: resp.GetValue(), err: err} +} + +func (p ProxyClient) Delete(ctx context.Context, name string) error { + _, err := p.CacheStoreClient.Delete(ctx, &protocol.DeleteRequest{ + Name: name, + }) + return err +} + +func (p ProxyClient) GetSet(ctx context.Context, key string) ([]string, error) { + resp, err := p.CacheStoreClient.GetSet(ctx, &protocol.GetSetRequest{ + Key: key, + }) + if err != nil { + return nil, err + } + return resp.Members, nil +} + +func (p ProxyClient) SetSet(ctx context.Context, key string, members ...string) error { + _, err := p.CacheStoreClient.SetSet(ctx, &protocol.SetSetRequest{ + Key: key, + Members: members, + }) + return err +} + +func (p ProxyClient) AddSet(ctx context.Context, key string, members ...string) error { + _, err := p.CacheStoreClient.AddSet(ctx, &protocol.AddSetRequest{ + Key: key, + Members: members, + }) + return err +} + +func (p ProxyClient) RemSet(ctx context.Context, key string, members ...string) error { + _, err := p.CacheStoreClient.RemSet(ctx, &protocol.RemSetRequest{ + Key: key, + Members: members, + }) + return err +} + +func (p ProxyClient) Push(ctx context.Context, name, value string) error { + _, err := p.CacheStoreClient.Push(ctx, &protocol.PushRequest{ + Name: name, + Value: value, + }) + return err +} + +func (p ProxyClient) Pop(ctx context.Context, name string) (string, error) { + resp, err := p.CacheStoreClient.Pop(ctx, &protocol.PopRequest{ + Name: name, + }) + if err != nil { + return "", err + } + if resp.Value == nil { + return "", io.EOF + } + return resp.GetValue(), nil +} + +func (p ProxyClient) Remain(ctx context.Context, name string) (int64, error) { + resp, err := p.CacheStoreClient.Remain(ctx, &protocol.RemainRequest{ + Name: name, + }) + if err != nil { + return 0, err + } + return resp.Count, nil +} + +func (p ProxyClient) AddScores(ctx context.Context, collection, subset string, documents []Score) error { + scores := make([]*protocol.Score, len(documents)) + for i, doc := range documents { + scores[i] = &protocol.Score{ + Id: doc.Id, + Score: doc.Score, + IsHidden: doc.IsHidden, + Categories: doc.Categories, + Timestamp: timestamppb.New(doc.Timestamp), + } + } + _, err := p.CacheStoreClient.AddScores(ctx, &protocol.AddScoresRequest{ + Collection: collection, + Subset: subset, + Documents: scores, + }) + return err +} + +func (p ProxyClient) SearchScores(ctx context.Context, collection, subset string, query []string, begin, end int) ([]Score, error) { + resp, err := p.CacheStoreClient.SearchScores(ctx, &protocol.SearchScoresRequest{ + Collection: collection, + Subset: subset, + Query: query, + Begin: int32(begin), + End: int32(end), + }) + if err != nil { + return nil, err + } + scores := make([]Score, len(resp.Documents)) + for i, score := range resp.Documents { + scores[i] = Score{ + Id: score.Id, + Score: score.Score, + IsHidden: score.IsHidden, + Categories: score.Categories, + Timestamp: score.Timestamp.AsTime(), + } + } + return scores, nil +} + +func (p ProxyClient) DeleteScores(ctx context.Context, collection []string, condition ScoreCondition) error { + if err := condition.Check(); err != nil { + return errors.Trace(err) + } + var before *timestamppb.Timestamp + if condition.Before != nil { + before = timestamppb.New(*condition.Before) + } + _, err := p.CacheStoreClient.DeleteScores(ctx, &protocol.DeleteScoresRequest{ + Collection: collection, + Condition: &protocol.ScoreCondition{ + Subset: condition.Subset, + Id: condition.Id, + Before: before, + }, + }) + return err +} + +func (p ProxyClient) UpdateScores(ctx context.Context, collection []string, id string, patch ScorePatch) error { + _, err := p.CacheStoreClient.UpdateScores(ctx, &protocol.UpdateScoresRequest{ + Collection: collection, + Id: id, + Patch: &protocol.ScorePatch{ + Score: patch.Score, + IsHidden: patch.IsHidden, + Categories: patch.Categories, + }, + }) + return err +} + +func (p ProxyClient) AddTimeSeriesPoints(ctx context.Context, points []TimeSeriesPoint) error { + pbPoints := make([]*protocol.TimeSeriesPoint, len(points)) + for i, point := range points { + pbPoints[i] = &protocol.TimeSeriesPoint{ + Name: point.Name, + Timestamp: timestamppb.New(point.Timestamp), + Value: point.Value, + } + } + _, err := p.CacheStoreClient.AddTimeSeriesPoints(ctx, &protocol.AddTimeSeriesPointsRequest{ + Points: pbPoints, + }) + return err +} + +func (p ProxyClient) GetTimeSeriesPoints(ctx context.Context, name string, begin, end time.Time) ([]TimeSeriesPoint, error) { + resp, err := p.CacheStoreClient.GetTimeSeriesPoints(ctx, &protocol.GetTimeSeriesPointsRequest{ + Name: name, + Begin: timestamppb.New(begin), + End: timestamppb.New(end), + }) + if err != nil { + return nil, err + } + points := make([]TimeSeriesPoint, len(resp.Points)) + for i, point := range resp.Points { + points[i] = TimeSeriesPoint{ + Name: point.Name, + Timestamp: point.Timestamp.AsTime(), + Value: point.Value, + } + } + return points, nil +} + +func OpenProxyClient(address string) (*ProxyClient, error) { + // Create gRPC connection + conn, err := grpc.Dial(address, grpc.WithInsecure()) + if err != nil { + return nil, err + } + // Create client + return &ProxyClient{ + ClientConn: conn, + CacheStoreClient: protocol.NewCacheStoreClient(conn), + }, nil +} diff --git a/storage/cache/proxy_test.go b/storage/cache/proxy_test.go new file mode 100644 index 000000000..01af74d73 --- /dev/null +++ b/storage/cache/proxy_test.go @@ -0,0 +1,84 @@ +// Copyright 2024 gorse Project Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cache + +import ( + "fmt" + "github.com/stretchr/testify/suite" + "net" + "testing" +) + +type ProxyTestSuite struct { + baseTestSuite + SQLite Database + Server *ProxyServer +} + +func (suite *ProxyTestSuite) SetupSuite() { + // create database + var err error + path := fmt.Sprintf("sqlite://%s/sqlite.db", suite.T().TempDir()) + suite.SQLite, err = Open(path, "gorse_") + suite.NoError(err) + // create schema + err = suite.SQLite.Init() + suite.NoError(err) + // start server + lis, err := net.Listen("tcp", "localhost:0") + suite.NoError(err) + suite.Server = NewProxyServer(suite.SQLite) + go func() { + err = suite.Server.Serve(lis) + suite.NoError(err) + }() + // create proxy + suite.Database, err = OpenProxyClient(lis.Addr().String()) + suite.NoError(err) +} + +func (suite *ProxyTestSuite) TearDownSuite() { + suite.Server.Stop() + suite.NoError(suite.Database.Close()) + suite.NoError(suite.SQLite.Close()) +} + +func (suite *ProxyTestSuite) SetupTest() { + err := suite.SQLite.Ping() + suite.NoError(err) + err = suite.SQLite.Purge() + suite.NoError(err) +} + +func (suite *ProxyTestSuite) TearDownTest() { + err := suite.SQLite.Purge() + suite.NoError(err) +} + +func (suite *ProxyTestSuite) TestInit() { + suite.T().Skip() +} + +func (suite *ProxyTestSuite) TestPurge() { + suite.T().Skip() +} + +func (suite *ProxyTestSuite) TestScan() { + suite.T().Skip() +} + +func TestProxy(t *testing.T) { + suite.Run(t, new(ProxyTestSuite)) +} diff --git a/storage/data/proxy.go b/storage/data/proxy.go new file mode 100644 index 000000000..12eaf29c0 --- /dev/null +++ b/storage/data/proxy.go @@ -0,0 +1,982 @@ +// Copyright 2024 gorse Project Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package data + +import ( + "context" + "encoding/json" + "github.com/juju/errors" + "github.com/samber/lo" + "github.com/zhenghaoz/gorse/protocol" + "google.golang.org/grpc" + "google.golang.org/protobuf/types/known/timestamppb" + "io" + "net" + "time" +) + +type ProxyServer struct { + protocol.UnimplementedDataStoreServer + database Database + server *grpc.Server +} + +func NewProxyServer(database Database) *ProxyServer { + return &ProxyServer{database: database} +} + +func (p *ProxyServer) Serve(lis net.Listener) error { + p.server = grpc.NewServer() + protocol.RegisterDataStoreServer(p.server, p) + return p.server.Serve(lis) +} + +func (p *ProxyServer) Stop() { + p.server.Stop() +} + +func (p *ProxyServer) Ping(_ context.Context, _ *protocol.PingRequest) (*protocol.PingResponse, error) { + return &protocol.PingResponse{}, p.database.Ping() +} + +func (p *ProxyServer) BatchInsertItems(ctx context.Context, in *protocol.BatchInsertItemsRequest) (*protocol.BatchInsertItemsResponse, error) { + items := make([]Item, len(in.Items)) + for i, item := range in.Items { + var labels any + err := json.Unmarshal(item.Labels, &labels) + if err != nil { + return nil, err + } + items[i] = Item{ + ItemId: item.ItemId, + IsHidden: item.IsHidden, + Categories: item.Categories, + Timestamp: item.Timestamp.AsTime(), + Labels: labels, + Comment: item.Comment, + } + } + err := p.database.BatchInsertItems(ctx, items) + return &protocol.BatchInsertItemsResponse{}, err +} + +func (p *ProxyServer) BatchGetItems(ctx context.Context, in *protocol.BatchGetItemsRequest) (*protocol.BatchGetItemsResponse, error) { + items, err := p.database.BatchGetItems(ctx, in.ItemIds) + if err != nil { + return nil, err + } + pbItems := make([]*protocol.Item, len(items)) + for i, item := range items { + labels, err := json.Marshal(item.Labels) + if err != nil { + return nil, err + } + pbItems[i] = &protocol.Item{ + ItemId: item.ItemId, + IsHidden: item.IsHidden, + Categories: item.Categories, + Timestamp: timestamppb.New(item.Timestamp), + Labels: labels, + Comment: item.Comment, + } + } + return &protocol.BatchGetItemsResponse{Items: pbItems}, nil +} + +func (p *ProxyServer) DeleteItem(ctx context.Context, in *protocol.DeleteItemRequest) (*protocol.DeleteItemResponse, error) { + err := p.database.DeleteItem(ctx, in.ItemId) + return &protocol.DeleteItemResponse{}, err +} + +func (p *ProxyServer) GetItem(ctx context.Context, in *protocol.GetItemRequest) (*protocol.GetItemResponse, error) { + item, err := p.database.GetItem(ctx, in.ItemId) + if err != nil { + if errors.Is(err, errors.NotFound) { + return &protocol.GetItemResponse{}, nil + } + return nil, err + } + labels, err := json.Marshal(item.Labels) + if err != nil { + return nil, err + } + return &protocol.GetItemResponse{ + Item: &protocol.Item{ + ItemId: item.ItemId, + IsHidden: item.IsHidden, + Categories: item.Categories, + Timestamp: timestamppb.New(item.Timestamp), + Labels: labels, + Comment: item.Comment, + }, + }, nil +} + +func (p *ProxyServer) ModifyItem(ctx context.Context, in *protocol.ModifyItemRequest) (*protocol.ModifyItemResponse, error) { + var labels any + if in.Patch.Labels != nil { + err := json.Unmarshal(in.Patch.Labels, &labels) + if err != nil { + return nil, err + } + } + var timestamp *time.Time + if in.Patch.Timestamp != nil { + timestamp = lo.ToPtr(in.Patch.Timestamp.AsTime()) + } + err := p.database.ModifyItem(ctx, in.ItemId, ItemPatch{ + IsHidden: in.Patch.IsHidden, + Categories: in.Patch.Categories, + Labels: labels, + Comment: in.Patch.Comment, + Timestamp: timestamp, + }) + return &protocol.ModifyItemResponse{}, err +} + +func (p *ProxyServer) GetItems(ctx context.Context, in *protocol.GetItemsRequest) (*protocol.GetItemsResponse, error) { + var beginTime *time.Time + if in.BeginTime != nil { + beginTime = lo.ToPtr(in.BeginTime.AsTime()) + } + cursor, items, err := p.database.GetItems(ctx, in.Cursor, int(in.N), beginTime) + if err != nil { + return nil, err + } + pbItems := make([]*protocol.Item, len(items)) + for i, item := range items { + labels, err := json.Marshal(item.Labels) + if err != nil { + return nil, err + } + pbItems[i] = &protocol.Item{ + ItemId: item.ItemId, + IsHidden: item.IsHidden, + Categories: item.Categories, + Timestamp: timestamppb.New(item.Timestamp), + Labels: labels, + Comment: item.Comment, + } + } + return &protocol.GetItemsResponse{Cursor: cursor, Items: pbItems}, nil +} + +func (p *ProxyServer) GetItemFeedback(ctx context.Context, in *protocol.GetItemFeedbackRequest) (*protocol.GetFeedbackResponse, error) { + feedback, err := p.database.GetItemFeedback(ctx, in.ItemId, in.FeedbackTypes...) + if err != nil { + return nil, err + } + pbFeedback := make([]*protocol.Feedback, len(feedback)) + for i, f := range feedback { + pbFeedback[i] = &protocol.Feedback{ + FeedbackType: f.FeedbackType, + UserId: f.UserId, + ItemId: f.ItemId, + Timestamp: timestamppb.New(f.Timestamp), + Comment: f.Comment, + } + } + return &protocol.GetFeedbackResponse{Feedback: pbFeedback}, nil +} + +func (p *ProxyServer) BatchInsertUsers(ctx context.Context, in *protocol.BatchInsertUsersRequest) (*protocol.BatchInsertUsersResponse, error) { + users := make([]User, len(in.Users)) + for i, user := range in.Users { + var labels any + err := json.Unmarshal(user.Labels, &labels) + if err != nil { + return nil, err + } + users[i] = User{ + UserId: user.UserId, + Labels: labels, + Comment: user.Comment, + Subscribe: user.Subscribe, + } + } + err := p.database.BatchInsertUsers(ctx, users) + return &protocol.BatchInsertUsersResponse{}, err +} + +func (p *ProxyServer) DeleteUser(ctx context.Context, in *protocol.DeleteUserRequest) (*protocol.DeleteUserResponse, error) { + err := p.database.DeleteUser(ctx, in.UserId) + return &protocol.DeleteUserResponse{}, err +} + +func (p *ProxyServer) GetUser(ctx context.Context, in *protocol.GetUserRequest) (*protocol.GetUserResponse, error) { + user, err := p.database.GetUser(ctx, in.UserId) + if err != nil { + if errors.Is(err, errors.NotFound) { + return &protocol.GetUserResponse{}, nil + } + return nil, err + } + labels, err := json.Marshal(user.Labels) + if err != nil { + return nil, err + } + return &protocol.GetUserResponse{ + User: &protocol.User{ + UserId: user.UserId, + Labels: labels, + Comment: user.Comment, + Subscribe: user.Subscribe, + }, + }, nil +} + +func (p *ProxyServer) ModifyUser(ctx context.Context, in *protocol.ModifyUserRequest) (*protocol.ModifyUserResponse, error) { + var labels any + if in.Patch.Labels != nil { + err := json.Unmarshal(in.Patch.Labels, &labels) + if err != nil { + return nil, err + } + } + err := p.database.ModifyUser(ctx, in.UserId, UserPatch{ + Labels: labels, + Comment: in.Patch.Comment, + Subscribe: in.Patch.Subscribe, + }) + return &protocol.ModifyUserResponse{}, err +} + +func (p *ProxyServer) GetUsers(ctx context.Context, in *protocol.GetUsersRequest) (*protocol.GetUsersResponse, error) { + cursor, users, err := p.database.GetUsers(ctx, in.Cursor, int(in.N)) + if err != nil { + return nil, err + } + pbUsers := make([]*protocol.User, len(users)) + for i, user := range users { + labels, err := json.Marshal(user.Labels) + if err != nil { + return nil, err + } + pbUsers[i] = &protocol.User{ + UserId: user.UserId, + Labels: labels, + Comment: user.Comment, + Subscribe: user.Subscribe, + } + } + return &protocol.GetUsersResponse{Cursor: cursor, Users: pbUsers}, nil +} + +func (p *ProxyServer) GetUserFeedback(ctx context.Context, in *protocol.GetUserFeedbackRequest) (*protocol.GetFeedbackResponse, error) { + var endTime *time.Time + if in.EndTime != nil { + endTime = lo.ToPtr(in.EndTime.AsTime()) + } + feedback, err := p.database.GetUserFeedback(ctx, in.UserId, endTime, in.FeedbackTypes...) + if err != nil { + return nil, err + } + pbFeedback := make([]*protocol.Feedback, len(feedback)) + for i, f := range feedback { + pbFeedback[i] = &protocol.Feedback{ + FeedbackType: f.FeedbackType, + UserId: f.UserId, + ItemId: f.ItemId, + Timestamp: timestamppb.New(f.Timestamp), + Comment: f.Comment, + } + } + return &protocol.GetFeedbackResponse{Feedback: pbFeedback}, nil +} + +func (p *ProxyServer) GetUserItemFeedback(ctx context.Context, in *protocol.GetUserItemFeedbackRequest) (*protocol.GetFeedbackResponse, error) { + feedback, err := p.database.GetUserItemFeedback(ctx, in.UserId, in.ItemId, in.FeedbackTypes...) + if err != nil { + return nil, err + } + pbFeedback := make([]*protocol.Feedback, len(feedback)) + for i, f := range feedback { + pbFeedback[i] = &protocol.Feedback{ + FeedbackType: f.FeedbackType, + UserId: f.UserId, + ItemId: f.ItemId, + Timestamp: timestamppb.New(f.Timestamp), + Comment: f.Comment, + } + } + return &protocol.GetFeedbackResponse{Feedback: pbFeedback}, nil +} + +func (p *ProxyServer) DeleteUserItemFeedback(ctx context.Context, in *protocol.DeleteUserItemFeedbackRequest) (*protocol.DeleteUserItemFeedbackResponse, error) { + count, err := p.database.DeleteUserItemFeedback(ctx, in.UserId, in.ItemId, in.FeedbackTypes...) + return &protocol.DeleteUserItemFeedbackResponse{Count: int32(count)}, err +} + +func (p *ProxyServer) BatchInsertFeedback(ctx context.Context, in *protocol.BatchInsertFeedbackRequest) (*protocol.BatchInsertFeedbackResponse, error) { + feedback := make([]Feedback, len(in.Feedback)) + for i, f := range in.Feedback { + feedback[i] = Feedback{ + FeedbackKey: FeedbackKey{ + FeedbackType: f.FeedbackType, + UserId: f.UserId, + ItemId: f.ItemId, + }, + Timestamp: f.Timestamp.AsTime(), + Comment: f.Comment, + } + } + err := p.database.BatchInsertFeedback(ctx, feedback, in.InsertUser, in.InsertItem, in.Overwrite) + return &protocol.BatchInsertFeedbackResponse{}, err +} + +func (p *ProxyServer) GetFeedback(ctx context.Context, in *protocol.GetFeedbackRequest) (*protocol.GetFeedbackResponse, error) { + var beginTime, endTime *time.Time + if in.BeginTime != nil { + beginTime = lo.ToPtr(in.BeginTime.AsTime()) + } + if in.EndTime != nil { + endTime = lo.ToPtr(in.EndTime.AsTime()) + } + cursor, feedback, err := p.database.GetFeedback(ctx, in.Cursor, int(in.N), beginTime, endTime, in.FeedbackTypes...) + if err != nil { + return nil, err + } + pbFeedback := make([]*protocol.Feedback, len(feedback)) + for i, f := range feedback { + pbFeedback[i] = &protocol.Feedback{ + FeedbackType: f.FeedbackType, + UserId: f.UserId, + ItemId: f.ItemId, + Timestamp: timestamppb.New(f.Timestamp), + Comment: f.Comment, + } + } + return &protocol.GetFeedbackResponse{Cursor: cursor, Feedback: pbFeedback}, nil +} + +func (p *ProxyServer) GetUserStream(in *protocol.GetUserStreamRequest, stream grpc.ServerStreamingServer[protocol.GetUserStreamResponse]) error { + usersChan, errChan := p.database.GetUserStream(stream.Context(), int(in.BatchSize)) + for users := range usersChan { + pbUsers := make([]*protocol.User, len(users)) + for i, user := range users { + labels, err := json.Marshal(user.Labels) + if err != nil { + return err + } + pbUsers[i] = &protocol.User{ + UserId: user.UserId, + Labels: labels, + Comment: user.Comment, + Subscribe: user.Subscribe, + } + } + err := stream.Send(&protocol.GetUserStreamResponse{Users: pbUsers}) + if err != nil { + return err + } + } + return <-errChan +} + +func (p *ProxyServer) GetItemStream(in *protocol.GetItemStreamRequest, stream grpc.ServerStreamingServer[protocol.GetItemStreamResponse]) error { + var timeLimit *time.Time + if in.TimeLimit != nil { + timeLimit = lo.ToPtr(in.TimeLimit.AsTime()) + } + itemsChan, errChan := p.database.GetItemStream(stream.Context(), int(in.BatchSize), timeLimit) + for items := range itemsChan { + pbItems := make([]*protocol.Item, len(items)) + for i, item := range items { + labels, err := json.Marshal(item.Labels) + if err != nil { + return err + } + pbItems[i] = &protocol.Item{ + ItemId: item.ItemId, + IsHidden: item.IsHidden, + Categories: item.Categories, + Timestamp: timestamppb.New(item.Timestamp), + Labels: labels, + Comment: item.Comment, + } + } + err := stream.Send(&protocol.GetItemStreamResponse{Items: pbItems}) + if err != nil { + return err + } + } + return <-errChan +} + +func (p *ProxyServer) GetFeedbackStream(in *protocol.GetFeedbackStreamRequest, stream grpc.ServerStreamingServer[protocol.GetFeedbackStreamResponse]) error { + var opts []ScanOption + if in.ScanOptions.BeginTime != nil { + opts = append(opts, WithBeginTime(in.ScanOptions.BeginTime.AsTime())) + } + if in.ScanOptions.EndTime != nil { + opts = append(opts, WithEndTime(in.ScanOptions.EndTime.AsTime())) + } + if in.ScanOptions.FeedbackTypes != nil { + opts = append(opts, WithFeedbackTypes(in.ScanOptions.FeedbackTypes...)) + } + if in.ScanOptions.BeginUserId != nil { + opts = append(opts, WithBeginUserId(*in.ScanOptions.BeginUserId)) + } + if in.ScanOptions.EndUserId != nil { + opts = append(opts, WithEndUserId(*in.ScanOptions.EndUserId)) + } + feedbackChan, errChan := p.database.GetFeedbackStream(stream.Context(), int(in.BatchSize), opts...) + for feedback := range feedbackChan { + pbFeedback := make([]*protocol.Feedback, len(feedback)) + for i, f := range feedback { + pbFeedback[i] = &protocol.Feedback{ + FeedbackType: f.FeedbackType, + UserId: f.UserId, + ItemId: f.ItemId, + Timestamp: timestamppb.New(f.Timestamp), + Comment: f.Comment, + } + } + err := stream.Send(&protocol.GetFeedbackStreamResponse{Feedback: pbFeedback}) + if err != nil { + return err + } + } + return <-errChan +} + +type ProxyClient struct { + *grpc.ClientConn + protocol.DataStoreClient +} + +func OpenProxyClient(address string) (*ProxyClient, error) { + // Create gRPC connection + conn, err := grpc.Dial(address, grpc.WithInsecure()) + if err != nil { + return nil, err + } + // Create client + return &ProxyClient{ + ClientConn: conn, + DataStoreClient: protocol.NewDataStoreClient(conn), + }, nil +} + +func (p ProxyClient) Init() error { + return errors.MethodNotAllowedf("method Init is not allowed in ProxyClient") +} + +func (p ProxyClient) Ping() error { + _, err := p.DataStoreClient.Ping(context.Background(), &protocol.PingRequest{}) + return err +} + +func (p ProxyClient) Close() error { + return p.ClientConn.Close() +} + +func (p ProxyClient) Optimize() error { + return nil +} + +func (p ProxyClient) Purge() error { + return errors.MethodNotAllowedf("method Purge is not allowed in ProxyClient") +} + +func (p ProxyClient) BatchInsertItems(ctx context.Context, items []Item) error { + pbItems := make([]*protocol.Item, len(items)) + for i, item := range items { + labels, err := json.Marshal(item.Labels) + if err != nil { + return err + } + pbItems[i] = &protocol.Item{ + ItemId: item.ItemId, + IsHidden: item.IsHidden, + Categories: item.Categories, + Timestamp: timestamppb.New(item.Timestamp), + Labels: labels, + Comment: item.Comment, + } + } + _, err := p.DataStoreClient.BatchInsertItems(ctx, &protocol.BatchInsertItemsRequest{Items: pbItems}) + return err +} + +func (p ProxyClient) BatchGetItems(ctx context.Context, itemIds []string) ([]Item, error) { + resp, err := p.DataStoreClient.BatchGetItems(ctx, &protocol.BatchGetItemsRequest{ItemIds: itemIds}) + if err != nil { + return nil, err + } + items := make([]Item, len(resp.Items)) + for i, item := range resp.Items { + var labels any + err = json.Unmarshal(item.Labels, &labels) + if err != nil { + return nil, err + } + items[i] = Item{ + ItemId: item.ItemId, + IsHidden: item.IsHidden, + Categories: item.Categories, + Timestamp: item.Timestamp.AsTime(), + Labels: labels, + Comment: item.Comment, + } + } + return items, nil +} + +func (p ProxyClient) DeleteItem(ctx context.Context, itemId string) error { + _, err := p.DataStoreClient.DeleteItem(ctx, &protocol.DeleteItemRequest{ItemId: itemId}) + return err +} + +func (p ProxyClient) GetItem(ctx context.Context, itemId string) (Item, error) { + resp, err := p.DataStoreClient.GetItem(ctx, &protocol.GetItemRequest{ItemId: itemId}) + if err != nil { + return Item{}, err + } + if resp.Item == nil { + return Item{}, errors.Annotate(ErrItemNotExist, itemId) + } + var labels any + if err = json.Unmarshal(resp.Item.Labels, &labels); err != nil { + return Item{}, err + } + return Item{ + ItemId: resp.Item.ItemId, + IsHidden: resp.Item.IsHidden, + Categories: resp.Item.Categories, + Timestamp: resp.Item.Timestamp.AsTime(), + Labels: labels, + Comment: resp.Item.Comment, + }, nil +} + +func (p ProxyClient) ModifyItem(ctx context.Context, itemId string, patch ItemPatch) error { + var labels []byte + if patch.Labels != nil { + var err error + labels, err = json.Marshal(patch.Labels) + if err != nil { + return err + } + } + var timestamp *timestamppb.Timestamp + if patch.Timestamp != nil { + timestamp = timestamppb.New(*patch.Timestamp) + } + _, err := p.DataStoreClient.ModifyItem(ctx, &protocol.ModifyItemRequest{ + ItemId: itemId, + Patch: &protocol.ItemPatch{ + IsHidden: patch.IsHidden, + Categories: patch.Categories, + Labels: labels, + Comment: patch.Comment, + Timestamp: timestamp, + }, + }) + return err +} + +func (p ProxyClient) GetItems(ctx context.Context, cursor string, n int, beginTime *time.Time) (string, []Item, error) { + var beginTimeProto *timestamppb.Timestamp + if beginTime != nil { + beginTimeProto = timestamppb.New(*beginTime) + } + resp, err := p.DataStoreClient.GetItems(ctx, &protocol.GetItemsRequest{Cursor: cursor, N: int32(n), BeginTime: beginTimeProto}) + if err != nil { + return "", nil, err + } + items := make([]Item, len(resp.Items)) + for i, item := range resp.Items { + var labels any + err = json.Unmarshal(item.Labels, &labels) + if err != nil { + return "", nil, err + } + items[i] = Item{ + ItemId: item.ItemId, + IsHidden: item.IsHidden, + Categories: item.Categories, + Timestamp: item.Timestamp.AsTime(), + Labels: labels, + Comment: item.Comment, + } + } + return resp.Cursor, items, nil +} + +func (p ProxyClient) GetItemFeedback(ctx context.Context, itemId string, feedbackTypes ...string) ([]Feedback, error) { + resp, err := p.DataStoreClient.GetItemFeedback(ctx, &protocol.GetItemFeedbackRequest{ + ItemId: itemId, + FeedbackTypes: feedbackTypes, + }) + if err != nil { + return nil, err + } + feedback := make([]Feedback, len(resp.Feedback)) + for i, f := range resp.Feedback { + feedback[i] = Feedback{ + FeedbackKey: FeedbackKey{ + FeedbackType: f.FeedbackType, + UserId: f.UserId, + ItemId: f.ItemId, + }, + Timestamp: f.Timestamp.AsTime(), + Comment: f.Comment, + } + } + return feedback, nil +} + +func (p ProxyClient) BatchInsertUsers(ctx context.Context, users []User) error { + pbUsers := make([]*protocol.User, len(users)) + for i, user := range users { + labels, err := json.Marshal(user.Labels) + if err != nil { + return err + } + pbUsers[i] = &protocol.User{ + UserId: user.UserId, + Labels: labels, + Comment: user.Comment, + Subscribe: user.Subscribe, + } + } + _, err := p.DataStoreClient.BatchInsertUsers(ctx, &protocol.BatchInsertUsersRequest{Users: pbUsers}) + return err +} + +func (p ProxyClient) DeleteUser(ctx context.Context, userId string) error { + _, err := p.DataStoreClient.DeleteUser(ctx, &protocol.DeleteUserRequest{UserId: userId}) + return err +} + +func (p ProxyClient) GetUser(ctx context.Context, userId string) (User, error) { + resp, err := p.DataStoreClient.GetUser(ctx, &protocol.GetUserRequest{UserId: userId}) + if err != nil { + return User{}, err + } + if resp.User == nil { + return User{}, errors.Annotate(ErrUserNotExist, userId) + } + var labels any + if err = json.Unmarshal(resp.User.Labels, &labels); err != nil { + return User{}, err + } + return User{ + UserId: resp.User.UserId, + Labels: labels, + Comment: resp.User.Comment, + Subscribe: resp.User.Subscribe, + }, nil +} + +func (p ProxyClient) ModifyUser(ctx context.Context, userId string, patch UserPatch) error { + var labels []byte + if patch.Labels != nil { + var err error + labels, err = json.Marshal(patch.Labels) + if err != nil { + return err + } + } + _, err := p.DataStoreClient.ModifyUser(ctx, &protocol.ModifyUserRequest{ + UserId: userId, + Patch: &protocol.UserPatch{ + Labels: labels, + Comment: patch.Comment, + Subscribe: patch.Subscribe, + }, + }) + return err +} + +func (p ProxyClient) GetUsers(ctx context.Context, cursor string, n int) (string, []User, error) { + resp, err := p.DataStoreClient.GetUsers(ctx, &protocol.GetUsersRequest{Cursor: cursor, N: int32(n)}) + if err != nil { + return "", nil, err + } + users := make([]User, len(resp.Users)) + for i, user := range resp.Users { + var labels any + err = json.Unmarshal(user.Labels, &labels) + if err != nil { + return "", nil, err + } + users[i] = User{ + UserId: user.UserId, + Labels: labels, + Comment: user.Comment, + Subscribe: user.Subscribe, + } + } + return resp.Cursor, users, nil +} + +func (p ProxyClient) GetUserFeedback(ctx context.Context, userId string, endTime *time.Time, feedbackTypes ...string) ([]Feedback, error) { + req := &protocol.GetUserFeedbackRequest{UserId: userId} + if endTime != nil { + req.EndTime = timestamppb.New(*endTime) + } + if len(feedbackTypes) > 0 { + req.FeedbackTypes = feedbackTypes + } + resp, err := p.DataStoreClient.GetUserFeedback(ctx, req) + if err != nil { + return nil, err + } + feedback := make([]Feedback, len(resp.Feedback)) + for i, f := range resp.Feedback { + feedback[i] = Feedback{ + FeedbackKey: FeedbackKey{ + FeedbackType: f.FeedbackType, + UserId: f.UserId, + ItemId: f.ItemId, + }, + Timestamp: f.Timestamp.AsTime(), + Comment: f.Comment, + } + } + return feedback, nil +} + +func (p ProxyClient) GetUserItemFeedback(ctx context.Context, userId, itemId string, feedbackTypes ...string) ([]Feedback, error) { + resp, err := p.DataStoreClient.GetUserItemFeedback(ctx, &protocol.GetUserItemFeedbackRequest{ + UserId: userId, + ItemId: itemId, + FeedbackTypes: feedbackTypes, + }) + if err != nil { + return nil, err + } + feedback := make([]Feedback, len(resp.Feedback)) + for i, f := range resp.Feedback { + feedback[i] = Feedback{ + FeedbackKey: FeedbackKey{ + FeedbackType: f.FeedbackType, + UserId: f.UserId, + ItemId: f.ItemId, + }, + Timestamp: f.Timestamp.AsTime(), + Comment: f.Comment, + } + } + return feedback, nil +} + +func (p ProxyClient) DeleteUserItemFeedback(ctx context.Context, userId, itemId string, feedbackTypes ...string) (int, error) { + resp, err := p.DataStoreClient.DeleteUserItemFeedback(ctx, &protocol.DeleteUserItemFeedbackRequest{ + UserId: userId, + ItemId: itemId, + FeedbackTypes: feedbackTypes, + }) + if err != nil { + return 0, err + } + return int(resp.Count), nil +} + +func (p ProxyClient) BatchInsertFeedback(ctx context.Context, feedback []Feedback, insertUser, insertItem, overwrite bool) error { + reqFeedback := make([]*protocol.Feedback, len(feedback)) + for i, f := range feedback { + reqFeedback[i] = &protocol.Feedback{ + FeedbackType: f.FeedbackType, + UserId: f.UserId, + ItemId: f.ItemId, + Timestamp: timestamppb.New(f.Timestamp), + Comment: f.Comment, + } + } + _, err := p.DataStoreClient.BatchInsertFeedback(ctx, &protocol.BatchInsertFeedbackRequest{ + Feedback: reqFeedback, + InsertUser: insertUser, + InsertItem: insertItem, + Overwrite: overwrite, + }) + return err +} + +func (p ProxyClient) GetFeedback(ctx context.Context, cursor string, n int, beginTime, endTime *time.Time, feedbackTypes ...string) (string, []Feedback, error) { + req := &protocol.GetFeedbackRequest{ + Cursor: cursor, + N: int32(n), + } + if beginTime != nil { + req.BeginTime = timestamppb.New(*beginTime) + } + if endTime != nil { + req.EndTime = timestamppb.New(*endTime) + } + if len(feedbackTypes) > 0 { + req.FeedbackTypes = feedbackTypes + } + resp, err := p.DataStoreClient.GetFeedback(ctx, req) + if err != nil { + return "", nil, err + } + feedback := make([]Feedback, len(resp.Feedback)) + for i, f := range resp.Feedback { + feedback[i] = Feedback{ + FeedbackKey: FeedbackKey{ + FeedbackType: f.FeedbackType, + UserId: f.UserId, + ItemId: f.ItemId, + }, + Timestamp: f.Timestamp.AsTime(), + Comment: f.Comment, + } + } + return resp.Cursor, feedback, nil +} + +func (p ProxyClient) GetUserStream(ctx context.Context, batchSize int) (chan []User, chan error) { + usersChan := make(chan []User, bufSize) + errChan := make(chan error, 1) + go func() { + defer close(usersChan) + defer close(errChan) + stream, err := p.DataStoreClient.GetUserStream(ctx, &protocol.GetUserStreamRequest{BatchSize: int32(batchSize)}) + if err != nil { + errChan <- err + return + } + for { + resp, err := stream.Recv() + if err != nil { + if err == io.EOF { + break + } + errChan <- err + return + } + users := make([]User, len(resp.Users)) + for i, user := range resp.Users { + var labels any + if err = json.Unmarshal(user.Labels, &labels); err != nil { + errChan <- err + return + } + users[i] = User{ + UserId: user.UserId, + Labels: labels, + Comment: user.Comment, + Subscribe: user.Subscribe, + } + } + usersChan <- users + } + }() + return usersChan, errChan +} + +func (p ProxyClient) GetItemStream(ctx context.Context, batchSize int, timeLimit *time.Time) (chan []Item, chan error) { + itemsChan := make(chan []Item, bufSize) + errChan := make(chan error, 1) + go func() { + defer close(itemsChan) + defer close(errChan) + stream, err := p.DataStoreClient.GetItemStream(ctx, &protocol.GetItemStreamRequest{BatchSize: int32(batchSize)}) + if err != nil { + errChan <- err + return + } + for { + resp, err := stream.Recv() + if err != nil { + if err == io.EOF { + break + } + errChan <- err + return + } + items := make([]Item, len(resp.Items)) + for i, item := range resp.Items { + var labels any + if err = json.Unmarshal(item.Labels, &labels); err != nil { + errChan <- err + return + } + items[i] = Item{ + ItemId: item.ItemId, + IsHidden: item.IsHidden, + Categories: item.Categories, + Timestamp: item.Timestamp.AsTime(), + Labels: labels, + Comment: item.Comment, + } + } + itemsChan <- items + } + }() + return itemsChan, errChan +} + +func (p ProxyClient) GetFeedbackStream(ctx context.Context, batchSize int, options ...ScanOption) (chan []Feedback, chan error) { + var o ScanOptions + for _, opt := range options { + opt(&o) + } + pbOptions := &protocol.ScanOptions{ + BeginUserId: o.BeginUserId, + EndUserId: o.EndUserId, + FeedbackTypes: o.FeedbackTypes, + } + if o.BeginTime != nil { + pbOptions.BeginTime = timestamppb.New(*o.BeginTime) + } + if o.EndTime != nil { + pbOptions.EndTime = timestamppb.New(*o.EndTime) + } + + feedbackChan := make(chan []Feedback, bufSize) + errChan := make(chan error, 1) + go func() { + defer close(feedbackChan) + defer close(errChan) + req := &protocol.GetFeedbackStreamRequest{ + BatchSize: int32(batchSize), + ScanOptions: pbOptions, + } + + stream, err := p.DataStoreClient.GetFeedbackStream(ctx, req) + if err != nil { + errChan <- err + return + } + for { + resp, err := stream.Recv() + if err != nil { + if err == io.EOF { + break + } + errChan <- err + return + } + feedback := make([]Feedback, len(resp.Feedback)) + for i, f := range resp.Feedback { + feedback[i] = Feedback{ + FeedbackKey: FeedbackKey{ + FeedbackType: f.FeedbackType, + UserId: f.UserId, + ItemId: f.ItemId, + }, + Timestamp: f.Timestamp.AsTime(), + Comment: f.Comment, + } + } + feedbackChan <- feedback + } + }() + return feedbackChan, errChan +} diff --git a/storage/data/proxy_test.go b/storage/data/proxy_test.go new file mode 100644 index 000000000..4ba215b6d --- /dev/null +++ b/storage/data/proxy_test.go @@ -0,0 +1,76 @@ +// Copyright 2024 gorse Project Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package data + +import ( + "fmt" + "github.com/stretchr/testify/suite" + "net" + "testing" +) + +type ProxyTestSuite struct { + baseTestSuite + SQLite Database + Server *ProxyServer +} + +func (suite *ProxyTestSuite) SetupSuite() { + // create database + var err error + path := fmt.Sprintf("sqlite://%s/sqlite.db", suite.T().TempDir()) + suite.SQLite, err = Open(path, "gorse_") + suite.NoError(err) + // create schema + err = suite.SQLite.Init() + suite.NoError(err) + // start server + lis, err := net.Listen("tcp", "localhost:0") + suite.NoError(err) + suite.Server = NewProxyServer(suite.SQLite) + go func() { + err = suite.Server.Serve(lis) + suite.NoError(err) + }() + // create proxy + suite.Database, err = OpenProxyClient(lis.Addr().String()) + suite.NoError(err) +} + +func (suite *ProxyTestSuite) TearDownSuite() { + suite.Server.Stop() + suite.NoError(suite.Database.Close()) + suite.NoError(suite.SQLite.Close()) +} + +func (suite *ProxyTestSuite) SetupTest() { + err := suite.SQLite.Ping() + suite.NoError(err) + err = suite.SQLite.Purge() + suite.NoError(err) +} + +func (suite *ProxyTestSuite) TearDownTest() { + err := suite.SQLite.Purge() + suite.NoError(err) +} + +func (suite *ProxyTestSuite) TestPurge() { + suite.T().Skip() +} + +func TestProxy(t *testing.T) { + suite.Run(t, new(ProxyTestSuite)) +} From e7fe64a26b5490fcd1c46cccd5ea07753ef93c88 Mon Sep 17 00:00:00 2001 From: Zhenghao Zhang Date: Sat, 7 Dec 2024 16:54:25 +0800 Subject: [PATCH 14/14] add dataset --- common/dataset/dataset.go | 184 +++++++++++++++++++++++++++++++++ common/dataset/dataset_test.go | 26 +++++ common/nn/layers.go | 14 +++ 3 files changed, 224 insertions(+) create mode 100644 common/dataset/dataset.go create mode 100644 common/dataset/dataset_test.go diff --git a/common/dataset/dataset.go b/common/dataset/dataset.go new file mode 100644 index 000000000..bd6484033 --- /dev/null +++ b/common/dataset/dataset.go @@ -0,0 +1,184 @@ +// Copyright 2024 gorse Project Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package dataset + +import ( + "archive/zip" + "encoding/csv" + "fmt" + "github.com/zhenghaoz/gorse/base/log" + "go.uber.org/zap" + "io" + "net/http" + "os" + "os/user" + "path/filepath" + "strconv" + "strings" +) + +var ( + tempDir string + datasetDir string +) + +func init() { + usr, err := user.Current() + if err != nil { + log.Logger().Fatal("failed to get user directory", zap.Error(err)) + } + datasetDir = filepath.Join(usr.HomeDir, ".gorse", "dataset") + tempDir = filepath.Join(usr.HomeDir, ".gorse", "temp") +} + +func LoadIris() ([][]float32, []int, error) { + // Download dataset + path, err := downloadAndUnzip("iris") + if err != nil { + return nil, nil, err + } + dataFile := filepath.Join(path, "iris.data") + // Load data + f, err := os.Open(dataFile) + if err != nil { + return nil, nil, err + } + reader := csv.NewReader(f) + rows, err := reader.ReadAll() + if err != nil { + return nil, nil, err + } + // Parse data + data := make([][]float32, len(rows)) + target := make([]int, len(rows)) + types := make(map[string]int) + for i, row := range rows { + data[i] = make([]float32, 4) + for j, cell := range row[:4] { + data[i][j], err = strconv.ParseFloat(cell, 64) + if err != nil { + return nil, nil, err + } + } + if _, exist := types[row[4]]; !exist { + types[row[4]] = len(types) + } + target[i] = types[row[4]] + } + return data, target, nil +} + +func downloadAndUnzip(name string) (string, error) { + url := fmt.Sprintf("https://pub-64226d9f34c64d6f829f5b63a5540d27.r2.dev/datasets/%s.zip", name) + path := filepath.Join(datasetDir, name) + if _, err := os.Stat(path); os.IsNotExist(err) { + zipFileName, _ := downloadFromUrl(url, tempDir) + if _, err := unzip(zipFileName, path); err != nil { + return "", err + } + } + return path, nil +} + +// downloadFromUrl downloads file from URL. +func downloadFromUrl(src, dst string) (string, error) { + log.Logger().Info("Download dataset", zap.String("source", src), zap.String("destination", dst)) + // Extract file name + tokens := strings.Split(src, "/") + fileName := filepath.Join(dst, tokens[len(tokens)-1]) + // Create file + if err := os.MkdirAll(filepath.Dir(fileName), os.ModePerm); err != nil { + return fileName, err + } + output, err := os.Create(fileName) + if err != nil { + log.Logger().Error("failed to create file", zap.Error(err), zap.String("filename", fileName)) + return fileName, err + } + defer output.Close() + // Download file + response, err := http.Get(src) + if err != nil { + log.Logger().Error("failed to download", zap.Error(err), zap.String("source", src)) + return fileName, err + } + defer response.Body.Close() + // Save file + _, err = io.Copy(output, response.Body) + if err != nil { + log.Logger().Error("failed to download", zap.Error(err), zap.String("source", src)) + return fileName, err + } + return fileName, nil +} + +// unzip zip file. +func unzip(src, dst string) ([]string, error) { + var fileNames []string + // Open zip file + r, err := zip.OpenReader(src) + if err != nil { + return fileNames, err + } + defer r.Close() + // Extract files + for _, f := range r.File { + // Open file + rc, err := f.Open() + if err != nil { + return fileNames, err + } + // Store filename/path for returning and using later on + filePath := filepath.Join(dst, f.Name) + // Check for ZipSlip. More Info: http://bit.ly/2MsjAWE + if !strings.HasPrefix(filePath, filepath.Clean(dst)+string(os.PathSeparator)) { + return fileNames, fmt.Errorf("%s: illegal file path", filePath) + } + // Add filename + fileNames = append(fileNames, filePath) + if f.FileInfo().IsDir() { + // Create folder + if err = os.MkdirAll(filePath, os.ModePerm); err != nil { + return fileNames, err + } + } else { + // Create all folders + if err = os.MkdirAll(filepath.Dir(filePath), os.ModePerm); err != nil { + return fileNames, err + } + // Create file + outFile, err := os.OpenFile(filePath, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, f.Mode()) + if err != nil { + return fileNames, err + } + // Save file + _, err = io.Copy(outFile, rc) + if err != nil { + return nil, err + } + // Close the file without defer to close before next iteration of loop + err = outFile.Close() + if err != nil { + return nil, err + } + } + // Close file + err = rc.Close() + if err != nil { + return nil, err + } + } + return fileNames, nil +} diff --git a/common/dataset/dataset_test.go b/common/dataset/dataset_test.go new file mode 100644 index 000000000..6a09b2ea3 --- /dev/null +++ b/common/dataset/dataset_test.go @@ -0,0 +1,26 @@ +package dataset + +import ( + "github.com/samber/lo" + "github.com/stretchr/testify/assert" + "github.com/zhenghaoz/gorse/common/nn" + "testing" +) + +func TestIris(t *testing.T) { + data, target, err := LoadIris() + assert.NoError(t, err) + _ = data + _ = target + + x := nn.NewTensor(lo.Flatten(data), len(data), 4) + + model := nn.NewSequential( + nn.NewLinear(4, 100), + nn.NewReLU(), + nn.NewLinear(100, 100), + nn.NewLinear(100, 3), + nn.NewFlatten(), + ) + _ = model +} diff --git a/common/nn/layers.go b/common/nn/layers.go index 00a8b6cee..ae6fba718 100644 --- a/common/nn/layers.go +++ b/common/nn/layers.go @@ -74,6 +74,20 @@ func (e *embeddingLayer) Forward(x *Tensor) *Tensor { return Embedding(e.w, x) } +type reluLayer struct{} + +func NewReLU() Layer { + return &reluLayer{} +} + +func (r *reluLayer) Parameters() []*Tensor { + return nil +} + +func (r *reluLayer) Forward(x *Tensor) *Tensor { + return ReLu(x) +} + type Sequential struct { layers []Layer }