Skip to content

Commit

Permalink
fix(ai-plugins): add missing kong.ctx.shared usage to llm.state (#13467)
Browse files Browse the repository at this point in the history
Missing from #13155

Co-authored-by: Xumin <100666470+StarlightIbuki@users.noreply.github.com>
  • Loading branch information
fffonion and StarlightIbuki committed Aug 9, 2024
1 parent 87d908f commit b631573
Show file tree
Hide file tree
Showing 4 changed files with 17 additions and 5 deletions.
3 changes: 2 additions & 1 deletion kong/llm/drivers/bedrock.lua
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ local string_gsub = string.gsub
local table_insert = table.insert
local string_lower = string.lower
local signer = require("resty.aws.request.sign")
local llm_state = require("kong.llm.state")
--

-- globals
Expand Down Expand Up @@ -381,7 +382,7 @@ end

-- returns err or nil
function _M.configure_request(conf, aws_sdk)
local operation = kong.ctx.shared.ai_proxy_streaming_mode and "converse-stream"
local operation = llm_state.is_streaming_mode() and "converse-stream"
or "converse"

local f_url = conf.model.options and conf.model.options.upstream_url
Expand Down
3 changes: 2 additions & 1 deletion kong/llm/drivers/gemini.lua
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ local string_gsub = string.gsub
local buffer = require("string.buffer")
local table_insert = table.insert
local string_lower = string.lower
local llm_state = require("kong.llm.state")
--

-- globals
Expand Down Expand Up @@ -338,7 +339,7 @@ end
-- returns err or nil
function _M.configure_request(conf, identity_interface)
local parsed_url
local operation = kong.ctx.shared.ai_proxy_streaming_mode and "streamGenerateContent"
local operation = llm_state.is_streaming_mode() and "streamGenerateContent"
or "generateContent"
local f_url = conf.model.options and conf.model.options.upstream_url

Expand Down
4 changes: 2 additions & 2 deletions kong/llm/drivers/shared.lua
Original file line number Diff line number Diff line change
Expand Up @@ -612,12 +612,12 @@ function _M.post_request(conf, response_object)
if kong.ctx.plugin[start_time_key] then
local llm_latency = math.floor((ngx.now() - kong.ctx.plugin[start_time_key]) * 1000)
request_analytics_plugin[log_entry_keys.META_CONTAINER][log_entry_keys.LLM_LATENCY] = llm_latency
kong.ctx.shared.ai_request_latency = llm_latency
llm_state.set_metrics("e2e_latency", llm_latency)

if response_object.usage and response_object.usage.completion_tokens then
local time_per_token = math.floor(llm_latency / response_object.usage.completion_tokens)
request_analytics_plugin[log_entry_keys.USAGE_CONTAINER][log_entry_keys.TIME_PER_TOKEN] = time_per_token
kong.ctx.shared.ai_request_time_per_token = time_per_token
llm_state.set_metrics("tpot_latency", time_per_token)
end
end

Expand Down
12 changes: 11 additions & 1 deletion kong/llm/state.lua
Original file line number Diff line number Diff line change
Expand Up @@ -94,4 +94,14 @@ function _M.get_response_tokens_count()
return kong.ctx.shared.llm_response_tokens_count
end

return _M
function _M.set_metrics(key, value)
local m = kong.ctx.shared.llm_metrics or {}
m[key] = value
kong.ctx.shared.llm_metrics = m
end

function _M.get_metrics(key)
return (kong.ctx.shared.llm_metrics or {})[key]
end

return _M

1 comment on commit b631573

@github-actions
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Bazel Build

Docker image available kong/kong:b6315737104d65c670f9c48001b2ee4b7d470ee4
Artifacts available https://github.com/Kong/kong/actions/runs/10316830136

Please sign in to comment.