From 933db57bba562feae5e01a89db3e59bfa0eb2246 Mon Sep 17 00:00:00 2001 From: Naoki Takezoe Date: Fri, 9 Jul 2021 15:47:51 +0900 Subject: [PATCH] Migration to Trino (#70) --- Gemfile | 1 + README.md | 38 +- Rakefile | 26 +- lib/presto-client.rb | 1 - lib/trino-client.rb | 1 + lib/{presto => trino}/client.rb | 8 +- lib/{presto => trino}/client/client.rb | 10 +- lib/{presto => trino}/client/errors.rb | 14 +- .../client/faraday_client.rb | 86 +- .../client/model_versions/0.149.rb | 6 +- .../client/model_versions/0.153.rb | 6 +- .../client/model_versions/0.173.rb | 6 +- .../client/model_versions/0.178.rb | 6 +- .../client/model_versions/0.205.rb | 6 +- .../client/model_versions/303.rb | 10 +- .../client/model_versions/316.rb | 6 +- lib/trino/client/model_versions/351.rb | 2726 +++++++++++++++++ lib/{presto => trino}/client/models.rb | 23 +- lib/{presto => trino}/client/query.rb | 20 +- .../client/statement_client.rb | 26 +- lib/{presto => trino}/client/version.rb | 4 +- modelgen/model_versions.rb | 9 +- modelgen/modelgen.rb | 32 +- modelgen/models.rb | 8 +- .../{presto_models.rb => trino_models.rb} | 4 +- release.rb | 2 +- spec/basic_query_spec.rb | 8 +- spec/client_spec.rb | 4 +- spec/gzip_spec.rb | 10 +- spec/model_spec.rb | 2 +- spec/spec_helper.rb | 6 +- spec/statement_client_spec.rb | 190 +- spec/tpch_query_spec.rb | 10 +- presto-client.gemspec => trino-client.gemspec | 12 +- 34 files changed, 3055 insertions(+), 272 deletions(-) delete mode 100644 lib/presto-client.rb create mode 100644 lib/trino-client.rb rename lib/{presto => trino}/client.rb (84%) rename lib/{presto => trino}/client/client.rb (90%) rename lib/{presto => trino}/client/errors.rb (80%) rename lib/{presto => trino}/client/faraday_client.rb (69%) rename lib/{presto => trino}/client/model_versions/0.149.rb (99%) rename lib/{presto => trino}/client/model_versions/0.153.rb (99%) rename lib/{presto => trino}/client/model_versions/0.173.rb (99%) rename lib/{presto => trino}/client/model_versions/0.178.rb (99%) rename lib/{presto => trino}/client/model_versions/0.205.rb (99%) rename lib/{presto => trino}/client/model_versions/303.rb (99%) rename lib/{presto => trino}/client/model_versions/316.rb (99%) create mode 100644 lib/trino/client/model_versions/351.rb rename lib/{presto => trino}/client/models.rb (57%) rename lib/{presto => trino}/client/query.rb (83%) rename lib/{presto => trino}/client/statement_client.rb (86%) rename lib/{presto => trino}/client/version.rb (94%) rename modelgen/{presto_models.rb => trino_models.rb} (99%) rename presto-client.gemspec => trino-client.gemspec (73%) diff --git a/Gemfile b/Gemfile index 79e72077..f02cbb70 100644 --- a/Gemfile +++ b/Gemfile @@ -3,4 +3,5 @@ gemspec group :development, :test do gem 'tiny-presto', '~> 0.0.7' + gem 'presto-client' end \ No newline at end of file diff --git a/README.md b/README.md index f6d53087..9dfd4388 100644 --- a/README.md +++ b/README.md @@ -1,19 +1,19 @@ -# Presto client library for Ruby +# Trino client library for Ruby -[![Build Status](https://travis-ci.org/treasure-data/presto-client-ruby.svg?branch=master)](https://travis-ci.org/treasure-data/presto-client-ruby) [![Gem](https://img.shields.io/gem/v/presto-client)](https://rubygems.org/gems/presto-client) [![Gem](https://img.shields.io/gem/dt/presto-client)](https://rubygems.org/gems/presto-client) [![GitHub](https://img.shields.io/github/license/treasure-data/presto-client-ruby)]() +[![Build Status](https://travis-ci.org/treasure-data/trino-client-ruby.svg?branch=master)](https://travis-ci.org/treasure-data/trino-client-ruby) [![Gem](https://img.shields.io/gem/v/trino-client)](https://rubygems.org/gems/trino-client) [![Gem](https://img.shields.io/gem/dt/trino-client)](https://rubygems.org/gems/trino-client) [![GitHub](https://img.shields.io/github/license/treasure-data/trino-client-ruby)]() -Presto is a distributed SQL query engine for big data: -https://prestosql.io/ +Trino is a distributed SQL query engine for big data: +https://trino.io/ -This is a client library for Ruby to run queries on Presto. +This is a client library for Ruby to run queries on Trino. ## Example ```ruby -require 'presto-client' +require 'trino-client' # create a client object: -client = Presto::Client.new( +client = Trino::Client.new( server: "localhost:8880", # required option ssl: {verify: false}, catalog: "native", @@ -74,7 +74,7 @@ $ bundle exec rake modelgen:latest ## Options -* **server** sets address (and port) of a Presto coordinator server. +* **server** sets address (and port) of a Trino coordinator server. * **ssl** enables https. * Setting `true` enables SSL and verifies server certificate using system's built-in certificates. * Setting `{verify: false}` enables SSL but doesn't verify server certificate. @@ -84,18 +84,18 @@ $ bundle exec rake modelgen:latest * **cert_store**: a `OpenSSL::X509::Store` object used for verification * **client_cert**: a `OpenSSL::X509::Certificate` object as client certificate * **client_key**: a `OpenSSL::PKey::RSA` or `OpenSSL::PKey::DSA` object used for client certificate -* **catalog** sets catalog (connector) name of Presto such as `hive-cdh4`, `hive-hadoop1`, etc. -* **schema** sets default schema name of Presto. You need to use qualified name like `FROM myschema.table1` to use non-default schemas. -* **source** sets source name to connect to a Presto. This name is shown on Presto web interface. +* **catalog** sets catalog (connector) name of Trino such as `hive-cdh4`, `hive-hadoop1`, etc. +* **schema** sets default schema name of Trino. You need to use qualified name like `FROM myschema.table1` to use non-default schemas. +* **source** sets source name to connect to a Trino. This name is shown on Trino web interface. * **client_info** sets client info to queries. It can be a string to pass a raw string, or an object that can be encoded to JSON. * **client_tags** sets client tags to queries. It needs to be an array of strings. The tags are shown on web interface. -* **user** sets user name to connect to a Presto. -* **password** sets a password to connect to Presto using basic auth. +* **user** sets user name to connect to a Trino. +* **password** sets a password to connect to Trino using basic auth. * **time_zone** sets time zone of queries. Time zone affects some functions such as `format_datetime`. * **language** sets language of queries. Language affects some functions such as `format_datetime`. * **properties** set session properties. Session properties affect internal behavior such as `hive.force_local_scheduling: true`, `raptor.reader_stream_buffer_size: "32MB"`, etc. -* **query_timeout** sets timeout in seconds for the entire query execution (from the first API call until there're no more output data). If timeout happens, client raises PrestoQueryTimeoutError. Default is nil (disabled). -* **plan_timeout** sets timeout in seconds for query planning execution (from the first API call until result columns become available). If timeout happens, client raises PrestoQueryTimeoutError. Default is nil (disabled). +* **query_timeout** sets timeout in seconds for the entire query execution (from the first API call until there're no more output data). If timeout happens, client raises TrinoQueryTimeoutError. Default is nil (disabled). +* **plan_timeout** sets timeout in seconds for query planning execution (from the first API call until result columns become available). If timeout happens, client raises TrinoQueryTimeoutError. Default is nil (disabled). * **http_headers** sets custom HTTP headers. It must be a Hash of string to string. * **http_proxy** sets host:port of a HTTP proxy server. * **http_debug** enables debug message to STDOUT for each HTTP requests. @@ -103,7 +103,7 @@ $ bundle exec rake modelgen:latest * **http_timeout** sets timeout in seconds to read data from a server. * **gzip** enables gzip compression. * **follow_redirect** enables HTTP redirection support. -* **model_version** set the presto version to which a job is submitted. Supported versions are 316, 303, 0.205, 0.178, 0.173, 0.153 and 0.149. Default is 316. +* **model_version** set the Trino version to which a job is submitted. Supported versions are 351, 316, 303, 0.205, 0.178, 0.173, 0.153 and 0.149. Default is 351. See [RDoc](http://www.rubydoc.info/gems/presto-client/) for the full documentation. @@ -111,7 +111,7 @@ See [RDoc](http://www.rubydoc.info/gems/presto-client/) for the full documentati ### Releasing a new version -1. First update `lib/presto/client/version.rb` to the next version. +1. First update `lib/trino/client/version.rb` to the next version. 2. Run the following command which will update `ChangeLog.md` file automatically. ``` $ ruby release.rb @@ -126,6 +126,6 @@ $ git tag "vX.Y.Z" 4. Push package ``` -$ gem build presto-client.gemspec -$ gem push presto-client-X.Y.Z.gem +$ gem build trino-client.gemspec +$ gem push trino-client-X.Y.Z.gem ``` diff --git a/Rakefile b/Rakefile index 7ea6eac3..3b3e0bee 100644 --- a/Rakefile +++ b/Rakefile @@ -11,13 +11,7 @@ RSpec::Core::RakeTask.new(:spec) task :default => [:spec, :build] GEN_MODEL_VERSIONS = %w[ - 0.149 - 0.153 - 0.173 - 0.178 - 0.205 - 303 - 316 + 351 ] namespace "modelgen" do @@ -27,25 +21,25 @@ namespace "modelgen" do @versions = GEN_MODEL_VERSIONS @latest_version = GEN_MODEL_VERSIONS.last data = erb.result - File.write("lib/presto/client/models.rb", data) + File.write("lib/trino/client/models.rb", data) end task :all => GEN_MODEL_VERSIONS GEN_MODEL_VERSIONS.each do |ver| - file "build/presto-#{ver}.tar.gz" do + file "build/trino-#{ver}.tar.gz" do mkdir_p "build" - sh "curl -L -o build/presto-#{ver}.tar.gz https://github.com/prestosql/presto/archive/#{ver}.tar.gz" + sh "curl -L -o build/trino-#{ver}.tar.gz https://github.com/trinodb/trino/archive/#{ver}.tar.gz" end - file "lib/presto/client/model_versions/#{ver}.rb" => "build/presto-#{ver}.tar.gz" do - sh "tar zxf build/presto-#{ver}.tar.gz -C build" - mkdir_p "lib/presto/client/model_versions" - sh "#{RbConfig.ruby} modelgen/modelgen.rb #{ver} build/presto-#{ver} modelgen/model_versions.rb lib/presto/client/model_versions/#{ver}.rb" - puts "Generated lib/presto/client/model_versions/#{ver}.rb." + file "lib/trino/client/model_versions/#{ver}.rb" => "build/trino-#{ver}.tar.gz" do + sh "tar zxf build/trino-#{ver}.tar.gz -C build" + mkdir_p "lib/trino/client/model_versions" + sh "#{RbConfig.ruby} modelgen/modelgen.rb #{ver} build/trino-#{ver} modelgen/model_versions.rb lib/trino/client/model_versions/#{ver}.rb" + puts "Generated lib/trino/client/model_versions/#{ver}.rb." end - task ver => "lib/presto/client/model_versions/#{ver}.rb" + task ver => "lib/trino/client/model_versions/#{ver}.rb" end end diff --git a/lib/presto-client.rb b/lib/presto-client.rb deleted file mode 100644 index 035153b0..00000000 --- a/lib/presto-client.rb +++ /dev/null @@ -1 +0,0 @@ -require 'presto/client' diff --git a/lib/trino-client.rb b/lib/trino-client.rb new file mode 100644 index 00000000..d668fbfb --- /dev/null +++ b/lib/trino-client.rb @@ -0,0 +1 @@ +require 'trino/client' diff --git a/lib/presto/client.rb b/lib/trino/client.rb similarity index 84% rename from lib/presto/client.rb rename to lib/trino/client.rb index 3af7e36f..dfcb56b6 100644 --- a/lib/presto/client.rb +++ b/lib/trino/client.rb @@ -1,5 +1,5 @@ # -# Presto client for Ruby +# Trino client for Ruby # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,11 +13,11 @@ # See the License for the specific language governing permissions and # limitations under the License. # -module Presto +module Trino module Client - require 'presto/client/version' - require 'presto/client/client' + require 'trino/client/version' + require 'trino/client/client' end end diff --git a/lib/presto/client/client.rb b/lib/trino/client/client.rb similarity index 90% rename from lib/presto/client/client.rb rename to lib/trino/client/client.rb index aa562216..f0dbaec1 100644 --- a/lib/presto/client/client.rb +++ b/lib/trino/client/client.rb @@ -1,5 +1,5 @@ # -# Presto client for Ruby +# Trino client for Ruby # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # -module Presto::Client +module Trino::Client - require 'presto/client/models' - require 'presto/client/query' + require 'trino/client/models' + require 'trino/client/query' class Client def initialize(options) @@ -57,7 +57,7 @@ def run(query) end end - # Accepts the raw response from the Presto Client and returns an + # Accepts the raw response from the Trino Client and returns an # array of hashes where you can access the data in each row using the # output name specified in the query with AS: # SELECT expression AS output_name diff --git a/lib/presto/client/errors.rb b/lib/trino/client/errors.rb similarity index 80% rename from lib/presto/client/errors.rb rename to lib/trino/client/errors.rb index ddc0ddf2..e4bd4721 100644 --- a/lib/presto/client/errors.rb +++ b/lib/trino/client/errors.rb @@ -1,5 +1,5 @@ # -# Presto client for Ruby +# Trino client for Ruby # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,11 +13,11 @@ # See the License for the specific language governing permissions and # limitations under the License. # -module Presto::Client - class PrestoError < StandardError +module Trino::Client + class TrinoError < StandardError end - class PrestoHttpError < PrestoError + class TrinoHttpError < TrinoError def initialize(status, message) super(message) @status = status @@ -26,10 +26,10 @@ def initialize(status, message) attr_reader :status end - class PrestoClientError < PrestoError + class TrinoClientError < TrinoError end - class PrestoQueryError < PrestoError + class TrinoQueryError < TrinoError def initialize(message, query_id, error_code, error_name, failure_info) super(message) @query_id = query_id @@ -41,6 +41,6 @@ def initialize(message, query_id, error_code, error_name, failure_info) attr_reader :error_code, :error_name, :failure_info end - class PrestoQueryTimeoutError < PrestoError + class TrinoQueryTimeoutError < TrinoError end end diff --git a/lib/presto/client/faraday_client.rb b/lib/trino/client/faraday_client.rb similarity index 69% rename from lib/presto/client/faraday_client.rb rename to lib/trino/client/faraday_client.rb index b17aae9c..603b4cb3 100644 --- a/lib/presto/client/faraday_client.rb +++ b/lib/trino/client/faraday_client.rb @@ -1,5 +1,5 @@ # -# Presto client for Ruby +# Trino client for Ruby # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,10 +13,27 @@ # See the License for the specific language governing permissions and # limitations under the License. # -module Presto::Client +module Trino::Client require 'cgi' + module TrinoHeaders + TRINO_USER = "X-Trino-User" + TRINO_SOURCE = "X-Trino-Source" + TRINO_CATALOG = "X-Trino-Catalog" + TRINO_SCHEMA = "X-Trino-Schema" + TRINO_TIME_ZONE = "X-Trino-Time-Zone" + TRINO_LANGUAGE = "X-Trino-Language" + TRINO_SESSION = "X-Trino-Session" + TRINO_CLIENT_INFO = "X-Trino-Client-Info"; + TRINO_CLIENT_TAGS = "X-Trino-Client-Tags"; + + TRINO_CURRENT_STATE = "X-Trino-Current-State" + TRINO_MAX_WAIT = "X-Trino-Max-Wait" + TRINO_MAX_SIZE = "X-Trino-Max-Size" + TRINO_PAGE_SEQUENCE_ID = "X-Trino-Page-Sequence-Id" + end + module PrestoHeaders PRESTO_USER = "X-Presto-User" PRESTO_SOURCE = "X-Presto-Source" @@ -35,7 +52,7 @@ module PrestoHeaders end HEADERS = { - "User-Agent" => "presto-ruby/#{VERSION}", + "User-Agent" => "trino-ruby/#{VERSION}", } def self.faraday_client(options) @@ -105,38 +122,79 @@ def self.faraday_ssl_options(options) end def self.optional_headers(options) + usePrestoHeader = false + if v = options[:model_version] && v < 351 + usePrestoHeader = true + end + headers = {} if v = options[:user] - headers[PrestoHeaders::PRESTO_USER] = v + if usePrestoHeader + headers[PrestoHeaders::PRESTO_USER] = v + else + headers[TrinoHeaders::TRINO_USER] = v + end end if v = options[:source] - headers[PrestoHeaders::PRESTO_SOURCE] = v + if usePrestoHeader + headers[PrestoHeaders::PRESTO_SOURCE] = v + else + headers[TrinoHeaders::TRINO_SOURCE] = v + end end if v = options[:catalog] - headers[PrestoHeaders::PRESTO_CATALOG] = v + if usePrestoHeader + headers[PrestoHeaders::PRESTO_CATALOG] = v + else + headers[TrinoHeaders::TRINO_CATALOG] = v + end end if v = options[:schema] - headers[PrestoHeaders::PRESTO_SCHEMA] = v + if usePrestoHeader + headers[PrestoHeaders::PRESTO_SCHEMA] = v + else + headers[TrinoHeaders::TRINO_SCHEMA] = v + end end if v = options[:time_zone] - headers[PrestoHeaders::PRESTO_TIME_ZONE] = v + if usePrestoHeader + headers[PrestoHeaders::PRESTO_TIME_ZONE] = v + else + headers[TrinoHeaders::TRINO_TIME_ZONE] = v + end end if v = options[:language] - headers[PrestoHeaders::PRESTO_LANGUAGE] = v + if usePrestoHeader + headers[PrestoHeaders::PRESTO_LANGUAGE] = v + else + headers[TrinoHeaders::TRINO_LANGUAGE] = v + end end if v = options[:properties] - headers[PrestoHeaders::PRESTO_SESSION] = encode_properties(v) + if usePrestoHeader + headers[PrestoHeaders::PRESTO_SESSION] = encode_properties(v) + else + headers[TrinoHeaders::TRINO_SESSION] = encode_properties(v) + end end if v = options[:client_info] - headers[PrestoHeaders::PRESTO_CLIENT_INFO] = encode_client_info(v) + if usePrestoHeader + headers[PrestoHeaders::PRESTO_CLIENT_INFO] = encode_client_info(v) + else + headers[TrinoHeaders::TRINO_CLIENT_INFO] = encode_client_info(v) + end end if v = options[:client_tags] - headers[PrestoHeaders::PRESTO_CLIENT_TAGS] = encode_client_tags(v) + if usePrestoHeader + headers[PrestoHeaders::PRESTO_CLIENT_TAGS] = encode_client_tags(v) + else + headers[TrinoHeaders::TRINO_CLIENT_TAGS] = encode_client_tags(v) + end end if options[:enable_x_msgpack] # option name is enable_"x"_msgpack because "Accept: application/x-msgpack" header is - # not officially supported by Presto. We can use this option only if a proxy server - # decodes & encodes response body. Once this option is supported by Presto, option + # not officially supported by Trino. We can use this option only if a proxy server + # decodes & encodes response body. Once this option is supported by Trino, option # name should be enable_msgpack, which might be slightly different behavior. headers['Accept'] = 'application/x-msgpack,application/json' end diff --git a/lib/presto/client/model_versions/0.149.rb b/lib/trino/client/model_versions/0.149.rb similarity index 99% rename from lib/presto/client/model_versions/0.149.rb rename to lib/trino/client/model_versions/0.149.rb index 82d45fec..52ce4507 100644 --- a/lib/presto/client/model_versions/0.149.rb +++ b/lib/trino/client/model_versions/0.149.rb @@ -1,5 +1,5 @@ # -# Presto client for Ruby +# Trino client for Ruby # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # -module Presto::Client::ModelVersions +module Trino::Client::ModelVersions #### - ## lib/presto/client/model_versions/*.rb is automatically generated using "rake modelgen:all" command. + ## lib/trino/client/model_versions/*.rb is automatically generated using "rake modelgen:all" command. ## You should not edit this file directly. To modify the class definitions, edit ## modelgen/model_versions.rb file and run "rake modelgen:all". ## diff --git a/lib/presto/client/model_versions/0.153.rb b/lib/trino/client/model_versions/0.153.rb similarity index 99% rename from lib/presto/client/model_versions/0.153.rb rename to lib/trino/client/model_versions/0.153.rb index 3c51599d..92f34f2a 100644 --- a/lib/presto/client/model_versions/0.153.rb +++ b/lib/trino/client/model_versions/0.153.rb @@ -1,5 +1,5 @@ # -# Presto client for Ruby +# Trino client for Ruby # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # -module Presto::Client::ModelVersions +module Trino::Client::ModelVersions #### - ## lib/presto/client/model_versions/*.rb is automatically generated using "rake modelgen:all" command. + ## lib/trino/client/model_versions/*.rb is automatically generated using "rake modelgen:all" command. ## You should not edit this file directly. To modify the class definitions, edit ## modelgen/model_versions.rb file and run "rake modelgen:all". ## diff --git a/lib/presto/client/model_versions/0.173.rb b/lib/trino/client/model_versions/0.173.rb similarity index 99% rename from lib/presto/client/model_versions/0.173.rb rename to lib/trino/client/model_versions/0.173.rb index f8d8fa92..3d75aa9b 100644 --- a/lib/presto/client/model_versions/0.173.rb +++ b/lib/trino/client/model_versions/0.173.rb @@ -1,5 +1,5 @@ # -# Presto client for Ruby +# Trino client for Ruby # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # -module Presto::Client::ModelVersions +module Trino::Client::ModelVersions #### - ## lib/presto/client/model_versions/*.rb is automatically generated using "rake modelgen:all" command. + ## lib/trino/client/model_versions/*.rb is automatically generated using "rake modelgen:all" command. ## You should not edit this file directly. To modify the class definitions, edit ## modelgen/model_versions.rb file and run "rake modelgen:all". ## diff --git a/lib/presto/client/model_versions/0.178.rb b/lib/trino/client/model_versions/0.178.rb similarity index 99% rename from lib/presto/client/model_versions/0.178.rb rename to lib/trino/client/model_versions/0.178.rb index 43f207ef..dc2f424c 100644 --- a/lib/presto/client/model_versions/0.178.rb +++ b/lib/trino/client/model_versions/0.178.rb @@ -1,5 +1,5 @@ # -# Presto client for Ruby +# Trino client for Ruby # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # -module Presto::Client::ModelVersions +module Trino::Client::ModelVersions #### - ## lib/presto/client/model_versions/*.rb is automatically generated using "rake modelgen:all" command. + ## lib/trino/client/model_versions/*.rb is automatically generated using "rake modelgen:all" command. ## You should not edit this file directly. To modify the class definitions, edit ## modelgen/model_versions.rb file and run "rake modelgen:all". ## diff --git a/lib/presto/client/model_versions/0.205.rb b/lib/trino/client/model_versions/0.205.rb similarity index 99% rename from lib/presto/client/model_versions/0.205.rb rename to lib/trino/client/model_versions/0.205.rb index 489fadd3..2ddd73ac 100644 --- a/lib/presto/client/model_versions/0.205.rb +++ b/lib/trino/client/model_versions/0.205.rb @@ -1,5 +1,5 @@ # -# Presto client for Ruby +# Trino client for Ruby # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # -module Presto::Client::ModelVersions +module Trino::Client::ModelVersions #### - ## lib/presto/client/model_versions/*.rb is automatically generated using "rake modelgen:all" command. + ## lib/trino/client/model_versions/*.rb is automatically generated using "rake modelgen:all" command. ## You should not edit this file directly. To modify the class definitions, edit ## modelgen/model_versions.rb file and run "rake modelgen:all". ## diff --git a/lib/presto/client/model_versions/303.rb b/lib/trino/client/model_versions/303.rb similarity index 99% rename from lib/presto/client/model_versions/303.rb rename to lib/trino/client/model_versions/303.rb index cf6359ab..010a39cb 100644 --- a/lib/presto/client/model_versions/303.rb +++ b/lib/trino/client/model_versions/303.rb @@ -1,5 +1,5 @@ # -# Presto client for Ruby +# Trino client for Ruby # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # -module Presto::Client::ModelVersions +module Trino::Client::ModelVersions #### - ## lib/presto/client/model_versions/*.rb is automatically generated using "rake modelgen:all" command. + ## lib/trino/client/model_versions/*.rb is automatically generated using "rake modelgen:all" command. ## You should not edit this file directly. To modify the class definitions, edit ## modelgen/model_versions.rb file and run "rake modelgen:all". ## @@ -1533,7 +1533,7 @@ def decode(hash) end end - class << PrestoWarning = + class << TrinoWarning = Base.new(:warning_code, :message) def decode(hash) unless hash.is_a?(Hash) @@ -1615,7 +1615,7 @@ def decode(hash) hash["outputStage"] && StageInfo.decode(hash["outputStage"]), hash["failureInfo"] && ExecutionFailureInfo.decode(hash["failureInfo"]), hash["errorCode"] && ErrorCode.decode(hash["errorCode"]), - hash["warnings"] && hash["warnings"].map {|h| PrestoWarning.decode(h) }, + hash["warnings"] && hash["warnings"].map {|h| TrinoWarning.decode(h) }, hash["inputs"] && hash["inputs"].map {|h| Input.decode(h) }, hash["output"] && Output.decode(hash["output"]), hash["completeInfo"], diff --git a/lib/presto/client/model_versions/316.rb b/lib/trino/client/model_versions/316.rb similarity index 99% rename from lib/presto/client/model_versions/316.rb rename to lib/trino/client/model_versions/316.rb index f9cfa405..8156ff91 100644 --- a/lib/presto/client/model_versions/316.rb +++ b/lib/trino/client/model_versions/316.rb @@ -1,5 +1,5 @@ # -# Presto client for Ruby +# Trino client for Ruby # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # -module Presto::Client::ModelVersions +module Trino::Client::ModelVersions #### - ## lib/presto/client/model_versions/*.rb is automatically generated using "rake modelgen:all" command. + ## lib/trino/client/model_versions/*.rb is automatically generated using "rake modelgen:all" command. ## You should not edit this file directly. To modify the class definitions, edit ## modelgen/model_versions.rb file and run "rake modelgen:all". ## diff --git a/lib/trino/client/model_versions/351.rb b/lib/trino/client/model_versions/351.rb new file mode 100644 index 00000000..d016de9c --- /dev/null +++ b/lib/trino/client/model_versions/351.rb @@ -0,0 +1,2726 @@ +# +# Trino client for Ruby +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +module Trino::Client::ModelVersions + + #### + ## lib/trino/client/model_versions/*.rb is automatically generated using "rake modelgen:all" command. + ## You should not edit this file directly. To modify the class definitions, edit + ## modelgen/model_versions.rb file and run "rake modelgen:all". + ## + + module V351 + class Base < Struct + class << self + alias_method :new_struct, :new + + def new(*args) + new_struct(*args) do + # make it immutable + undef_method :"[]=" + members.each do |m| + undef_method :"#{m}=" + end + + # replace constructor to receive hash instead of array + alias_method :initialize_struct, :initialize + + def initialize(params={}) + initialize_struct(*members.map {|m| params[m] }) + end + end + end + end + end + + class StageId < String + def initialize(str) + super + splitted = split('.', 2) + @query_id = splitted[0] + @id = splitted[1] + end + + attr_reader :query_id, :id + end + + class TaskId < String + def initialize(str) + super + splitted = split('.', 3) + @stage_id = StageId.new("#{splitted[0]}.#{splitted[1]}") + @query_id = @stage_id.query_id + @id = splitted[2] + end + + attr_reader :query_id, :stage_id, :id + end + + class Lifespan < String + def initialize(str) + super + if str == "TaskWide" + @grouped = false + @group_id = 0 + else + # Group1 + @grouped = true + @group_id = str[5..-1].to_i + end + end + + attr_reader :grouped, :group_id + end + + class ConnectorSession < Hash + def initialize(hash) + super() + merge!(hash) + end + end + + module PlanNode + def self.decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + model_class = case hash["@type"] + when "output" then OutputNode + when "project" then ProjectNode + when "tablescan" then TableScanNode + when "values" then ValuesNode + when "aggregation" then AggregationNode + when "markDistinct" then MarkDistinctNode + when "filter" then FilterNode + when "window" then WindowNode + when "rowNumber" then RowNumberNode + when "topnRowNumber" then TopNRowNumberNode + when "limit" then LimitNode + when "distinctlimit" then DistinctLimitNode + when "topn" then TopNNode + when "sample" then SampleNode + when "sort" then SortNode + when "remoteSource" then RemoteSourceNode + when "join" then JoinNode + when "semijoin" then SemiJoinNode + when "spatialjoin" then SpatialJoinNode + when "indexjoin" then IndexJoinNode + when "indexsource" then IndexSourceNode + when "tablewriter" then TableWriterNode + when "delete" then DeleteNode + when "metadatadelete" then MetadataDeleteNode + when "tablecommit" then TableFinishNode + when "unnest" then UnnestNode + when "exchange" then ExchangeNode + when "union" then UnionNode + when "intersect" then IntersectNode + when "scalar" then EnforceSingleRowNode + when "groupid" then GroupIdNode + when "explainAnalyze" then ExplainAnalyzeNode + when "apply" then ApplyNode + when "assignUniqueId" then AssignUniqueId + when "correlatedJoin" then CorrelatedJoinNode + when "statisticsWriterNode" then StatisticsWriterNode + end + if model_class + node = model_class.decode(hash) + class << node + attr_accessor :plan_node_type + end + node.plan_node_type = hash['@type'] + node + end + end + end + + # io.airlift.stats.Distribution.DistributionSnapshot + class << DistributionSnapshot = + Base.new(:max_error, :count, :total, :p01, :p05, :p10, :p25, :p50, :p75, :p90, :p95, :p99, :min, :max) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["maxError"], + hash["count"], + hash["total"], + hash["p01"], + hash["p05"], + hash["p10"], + hash["p25"], + hash["p50"], + hash["p75"], + hash["p90"], + hash["p95"], + hash["p99"], + hash["min"], + hash["max"], + ) + obj + end + end + + # This is a hybrid of JoinNode.EquiJoinClause and IndexJoinNode.EquiJoinClause + class << EquiJoinClause = + Base.new(:left, :right, :probe, :index) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["left"], + hash["right"], + hash["probe"], + hash["index"], + ) + obj + end + end + + class << WriterTarget = + Base.new(:type, :handle) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + model_class = case hash["@type"] + when "CreateTarget" then CreateTarget + when "InsertTarget" then InsertTarget + when "DeleteTarget" then DeleteTarget + end + if model_class + model_class.decode(hash) + end + end + end + + class << WriteStatisticsTarget = + Base.new(:type, :handle) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + model_class = case hash["@type"] + when "WriteStatisticsHandle" then WriteStatisticsHandle + end + if model_class + model_class.decode(hash) + end + end + end + + # Inner classes + module OperatorInfo + def self.decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + model_class = case hash["@type"] + when "exchangeClientStatus" then ExchangeClientStatus + when "localExchangeBuffer" then LocalExchangeBufferInfo + when "tableFinish" then TableFinishInfo + when "splitOperator" then SplitOperatorInfo + when "hashCollisionsInfo" then HashCollisionsInfo + when "partitionedOutput" then PartitionedOutputInfo + when "joinOperatorInfo" then JoinOperatorInfo + when "windowInfo" then WindowInfo + when "tableWriter" then TableWriterInfo + end + if model_class + model_class.decode(hash) + end + end + end + + class << HashCollisionsInfo = + Base.new(:weighted_hash_collisions, :weighted_sum_squared_hash_collisions, :weighted_expectedHash_collisions) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["weighted_hash_collisions"], + hash["weighted_sum_squared_hash_collisions"], + hash["weighted_expectedHash_collisions"] + ) + obj + end + end + + class ResourceGroupId < Array + def initialize(array) + super() + concat(array) + end + end + + ## + # Those model classes are automatically generated + # + + class << Aggregation = + Base.new(:resolved_function, :arguments, :distinct, :filter, :ordering_scheme, :mask) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["resolvedFunction"] && ResolvedFunction.decode(hash["resolvedFunction"]), + hash["arguments"], + hash["distinct"], + hash["filter"], + hash["orderingScheme"] && OrderingScheme.decode(hash["orderingScheme"]), + hash["mask"], + ) + obj + end + end + + class << AggregationNode = + Base.new(:id, :source, :aggregations, :grouping_sets, :pre_grouped_symbols, :step, :hash_symbol, :group_id_symbol) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["id"], + hash["source"] && PlanNode.decode(hash["source"]), + hash["aggregations"] && Hash[hash["aggregations"].to_a.map! {|k,v| [k, Aggregation.decode(v)] }], + hash["groupingSets"] && GroupingSetDescriptor.decode(hash["groupingSets"]), + hash["preGroupedSymbols"], + hash["step"] && hash["step"].downcase.to_sym, + hash["hashSymbol"], + hash["groupIdSymbol"], + ) + obj + end + end + + class << AnalyzeTableHandle = + Base.new(:catalog_name, :transaction_handle, :connector_handle) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["catalogName"], + hash["transactionHandle"], + hash["connectorHandle"], + ) + obj + end + end + + class << ApplyNode = + Base.new(:id, :input, :subquery, :subquery_assignments, :correlation, :origin_subquery) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["id"], + hash["input"] && PlanNode.decode(hash["input"]), + hash["subquery"] && PlanNode.decode(hash["subquery"]), + hash["subqueryAssignments"] && Assignments.decode(hash["subqueryAssignments"]), + hash["correlation"], + hash["originSubquery"], + ) + obj + end + end + + class << ArgumentBinding = + Base.new(:expression, :constant) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["expression"], + hash["constant"], + ) + obj + end + end + + class << AssignUniqueId = + Base.new(:id, :source, :id_column) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["id"], + hash["source"] && PlanNode.decode(hash["source"]), + hash["idColumn"], + ) + obj + end + end + + class << Assignments = + Base.new(:assignments) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["assignments"], + ) + obj + end + end + + class << BasicQueryInfo = + Base.new(:query_id, :session, :resource_group_id, :state, :memory_pool, :scheduled, :self, :query, :update_type, :prepared_query, :query_stats, :error_type, :error_code, :query_type) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["queryId"], + hash["session"] && SessionRepresentation.decode(hash["session"]), + hash["resourceGroupId"] && ResourceGroupId.new(hash["resourceGroupId"]), + hash["state"] && hash["state"].downcase.to_sym, + hash["memoryPool"], + hash["scheduled"], + hash["self"], + hash["query"], + hash["updateType"], + hash["preparedQuery"], + hash["queryStats"] && BasicQueryStats.decode(hash["queryStats"]), + hash["errorType"] && hash["errorType"].downcase.to_sym, + hash["errorCode"] && ErrorCode.decode(hash["errorCode"]), + hash["queryType"] && hash["queryType"].downcase.to_sym, + ) + obj + end + end + + class << BasicQueryStats = + Base.new(:create_time, :end_time, :queued_time, :elapsed_time, :execution_time, :total_drivers, :queued_drivers, :running_drivers, :completed_drivers, :raw_input_data_size, :raw_input_positions, :physical_input_data_size, :cumulative_user_memory, :user_memory_reservation, :total_memory_reservation, :peak_user_memory_reservation, :peak_total_memory_reservation, :total_cpu_time, :total_scheduled_time, :fully_blocked, :blocked_reasons, :progress_percentage) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["createTime"], + hash["endTime"], + hash["queuedTime"], + hash["elapsedTime"], + hash["executionTime"], + hash["totalDrivers"], + hash["queuedDrivers"], + hash["runningDrivers"], + hash["completedDrivers"], + hash["rawInputDataSize"], + hash["rawInputPositions"], + hash["physicalInputDataSize"], + hash["cumulativeUserMemory"], + hash["userMemoryReservation"], + hash["totalMemoryReservation"], + hash["peakUserMemoryReservation"], + hash["peakTotalMemoryReservation"], + hash["totalCpuTime"], + hash["totalScheduledTime"], + hash["fullyBlocked"], + hash["blockedReasons"] && hash["blockedReasons"].map {|h| h.downcase.to_sym }, + hash["progressPercentage"], + ) + obj + end + end + + class << BoundSignature = + Base.new(:name, :return_type, :argument_types) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["name"], + hash["returnType"], + hash["argumentTypes"], + ) + obj + end + end + + class << BufferInfo = + Base.new(:buffer_id, :finished, :buffered_pages, :pages_sent, :page_buffer_info) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["bufferId"], + hash["finished"], + hash["bufferedPages"], + hash["pagesSent"], + hash["pageBufferInfo"] && PageBufferInfo.decode(hash["pageBufferInfo"]), + ) + obj + end + end + + class << ClientColumn = + Base.new(:name, :type, :type_signature) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["name"], + hash["type"], + hash["typeSignature"] && ClientTypeSignature.decode(hash["typeSignature"]), + ) + obj + end + end + + class << ClientStageStats = + Base.new(:stage_id, :state, :done, :nodes, :total_splits, :queued_splits, :running_splits, :completed_splits, :cpu_time_millis, :wall_time_millis, :processed_rows, :processed_bytes, :physical_input_bytes, :sub_stages) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["stageId"], + hash["state"], + hash["done"], + hash["nodes"], + hash["totalSplits"], + hash["queuedSplits"], + hash["runningSplits"], + hash["completedSplits"], + hash["cpuTimeMillis"], + hash["wallTimeMillis"], + hash["processedRows"], + hash["processedBytes"], + hash["physicalInputBytes"], + hash["subStages"] && hash["subStages"].map {|h| ClientStageStats.decode(h) }, + ) + obj + end + end + + class << ClientTypeSignature = + Base.new(:raw_type, :arguments) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["rawType"], + hash["arguments"] && hash["arguments"].map {|h| ClientTypeSignatureParameter.decode(h) }, + ) + obj + end + end + + class << ClientTypeSignatureParameter = + Base.new(:kind, :value) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["kind"] && hash["kind"].downcase.to_sym, + hash["value"], + ) + obj + end + end + + class << Code = + Base.new(:code, :name) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["code"], + hash["name"], + ) + obj + end + end + + class << Column = + Base.new(:name, :type) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["name"], + hash["type"], + ) + obj + end + end + + class << ColumnStatisticMetadata = + Base.new(:column_name, :statistic_type) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["columnName"], + hash["statisticType"] && hash["statisticType"].downcase.to_sym, + ) + obj + end + end + + class << CorrelatedJoinNode = + Base.new(:id, :input, :subquery, :correlation, :type, :filter, :origin_subquery) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["id"], + hash["input"] && PlanNode.decode(hash["input"]), + hash["subquery"] && PlanNode.decode(hash["subquery"]), + hash["correlation"], + hash["type"], + hash["filter"], + hash["originSubquery"], + ) + obj + end + end + + class << CreateTarget = + Base.new(:handle, :schema_table_name) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["handle"] && OutputTableHandle.decode(hash["handle"]), + hash["schemaTableName"] && SchemaTableName.decode(hash["schemaTableName"]), + ) + obj + end + end + + class << DeleteNode = + Base.new(:id, :source, :target, :row_id, :outputs) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["id"], + hash["source"] && PlanNode.decode(hash["source"]), + hash["target"] && DeleteTarget.decode(hash["target"]), + hash["rowId"], + hash["outputs"], + ) + obj + end + end + + class << DeleteTarget = + Base.new(:handle, :schema_table_name) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["handle"] && TableHandle.decode(hash["handle"]), + hash["schemaTableName"] && SchemaTableName.decode(hash["schemaTableName"]), + ) + obj + end + end + + class << DistinctLimitNode = + Base.new(:id, :source, :limit, :partial, :distinct_symbols, :hash_symbol) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["id"], + hash["source"] && PlanNode.decode(hash["source"]), + hash["limit"], + hash["partial"], + hash["distinctSymbols"], + hash["hashSymbol"], + ) + obj + end + end + + class << DriverStats = + Base.new(:lifespan, :create_time, :start_time, :end_time, :queued_time, :elapsed_time, :user_memory_reservation, :revocable_memory_reservation, :system_memory_reservation, :total_scheduled_time, :total_cpu_time, :total_blocked_time, :fully_blocked, :blocked_reasons, :physical_input_data_size, :physical_input_positions, :physical_input_read_time, :internal_network_input_data_size, :internal_network_input_positions, :internal_network_input_read_time, :raw_input_data_size, :raw_input_positions, :raw_input_read_time, :processed_input_data_size, :processed_input_positions, :output_data_size, :output_positions, :physical_written_data_size, :operator_stats) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["lifespan"] && Lifespan.new(hash["lifespan"]), + hash["createTime"], + hash["startTime"], + hash["endTime"], + hash["queuedTime"], + hash["elapsedTime"], + hash["userMemoryReservation"], + hash["revocableMemoryReservation"], + hash["systemMemoryReservation"], + hash["totalScheduledTime"], + hash["totalCpuTime"], + hash["totalBlockedTime"], + hash["fullyBlocked"], + hash["blockedReasons"] && hash["blockedReasons"].map {|h| h.downcase.to_sym }, + hash["physicalInputDataSize"], + hash["physicalInputPositions"], + hash["physicalInputReadTime"], + hash["internalNetworkInputDataSize"], + hash["internalNetworkInputPositions"], + hash["internalNetworkInputReadTime"], + hash["rawInputDataSize"], + hash["rawInputPositions"], + hash["rawInputReadTime"], + hash["processedInputDataSize"], + hash["processedInputPositions"], + hash["outputDataSize"], + hash["outputPositions"], + hash["physicalWrittenDataSize"], + hash["operatorStats"] && hash["operatorStats"].map {|h| OperatorStats.decode(h) }, + ) + obj + end + end + + class << DriverWindowInfo = + Base.new(:sum_squared_differences_positions_of_index, :sum_squared_differences_size_of_index, :sum_squared_differences_size_in_partition, :total_partitions_count, :total_rows_count, :number_of_indexes) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["sumSquaredDifferencesPositionsOfIndex"], + hash["sumSquaredDifferencesSizeOfIndex"], + hash["sumSquaredDifferencesSizeInPartition"], + hash["totalPartitionsCount"], + hash["totalRowsCount"], + hash["numberOfIndexes"], + ) + obj + end + end + + class << DynamicFilterDomainStats = + Base.new(:dynamic_filter_id, :simplified_domain, :range_count, :discrete_values_count, :collection_duration) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["dynamicFilterId"], + hash["simplifiedDomain"], + hash["rangeCount"], + hash["discreteValuesCount"], + hash["collectionDuration"], + ) + obj + end + end + + class << DynamicFiltersStats = + Base.new(:dynamic_filter_domain_stats, :lazy_dynamic_filters, :replicated_dynamic_filters, :total_dynamic_filters, :dynamic_filters_completed) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["dynamicFilterDomainStats"] && hash["dynamicFilterDomainStats"].map {|h| DynamicFilterDomainStats.decode(h) }, + hash["lazyDynamicFilters"], + hash["replicatedDynamicFilters"], + hash["totalDynamicFilters"], + hash["dynamicFiltersCompleted"], + ) + obj + end + end + + class << EnforceSingleRowNode = + Base.new(:id, :source) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["id"], + hash["source"] && PlanNode.decode(hash["source"]), + ) + obj + end + end + + class << ErrorCode = + Base.new(:code, :name, :type) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["code"], + hash["name"], + hash["type"] && hash["type"].downcase.to_sym, + ) + obj + end + end + + class << ErrorLocation = + Base.new(:line_number, :column_number) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["lineNumber"], + hash["columnNumber"], + ) + obj + end + end + + class << ExchangeClientStatus = + Base.new(:buffered_bytes, :max_buffered_bytes, :average_bytes_per_request, :successful_requests_count, :buffered_pages, :no_more_locations, :page_buffer_client_statuses) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["bufferedBytes"], + hash["maxBufferedBytes"], + hash["averageBytesPerRequest"], + hash["successfulRequestsCount"], + hash["bufferedPages"], + hash["noMoreLocations"], + hash["pageBufferClientStatuses"] && hash["pageBufferClientStatuses"].map {|h| PageBufferClientStatus.decode(h) }, + ) + obj + end + end + + class << ExchangeNode = + Base.new(:id, :type, :scope, :partitioning_scheme, :sources, :inputs, :ordering_scheme) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["id"], + hash["type"], + hash["scope"] && hash["scope"].downcase.to_sym, + hash["partitioningScheme"] && PartitioningScheme.decode(hash["partitioningScheme"]), + hash["sources"] && hash["sources"].map {|h| PlanNode.decode(h) }, + hash["inputs"], + hash["orderingScheme"] && OrderingScheme.decode(hash["orderingScheme"]), + ) + obj + end + end + + class << ExecutionFailureInfo = + Base.new(:type, :message, :cause, :suppressed, :stack, :error_location, :error_code, :remote_host) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["type"], + hash["message"], + hash["cause"] && ExecutionFailureInfo.decode(hash["cause"]), + hash["suppressed"] && hash["suppressed"].map {|h| ExecutionFailureInfo.decode(h) }, + hash["stack"], + hash["errorLocation"] && ErrorLocation.decode(hash["errorLocation"]), + hash["errorCode"] && ErrorCode.decode(hash["errorCode"]), + hash["remoteHost"], + ) + obj + end + end + + class << ExplainAnalyzeNode = + Base.new(:id, :source, :output_symbol, :actual_outputs, :verbose) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["id"], + hash["source"] && PlanNode.decode(hash["source"]), + hash["outputSymbol"], + hash["actualOutputs"], + hash["verbose"], + ) + obj + end + end + + class << FailureInfo = + Base.new(:type, :message, :cause, :suppressed, :stack, :error_location) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["type"], + hash["message"], + hash["cause"] && FailureInfo.decode(hash["cause"]), + hash["suppressed"] && hash["suppressed"].map {|h| FailureInfo.decode(h) }, + hash["stack"], + hash["errorLocation"] && ErrorLocation.decode(hash["errorLocation"]), + ) + obj + end + end + + class << FilterNode = + Base.new(:id, :source, :predicate) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["id"], + hash["source"] && PlanNode.decode(hash["source"]), + hash["predicate"], + ) + obj + end + end + + class << Function = + Base.new(:resolved_function, :arguments, :frame, :ignore_nulls) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["resolvedFunction"] && ResolvedFunction.decode(hash["resolvedFunction"]), + hash["arguments"], + hash["frame"], + hash["ignoreNulls"], + ) + obj + end + end + + class << GroupIdNode = + Base.new(:id, :source, :grouping_sets, :grouping_columns, :aggregation_arguments, :group_id_symbol) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["id"], + hash["source"] && PlanNode.decode(hash["source"]), + hash["groupingSets"], + hash["groupingColumns"], + hash["aggregationArguments"], + hash["groupIdSymbol"], + ) + obj + end + end + + class << GroupingSetDescriptor = + Base.new(:grouping_keys, :grouping_set_count, :global_grouping_sets) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["groupingKeys"], + hash["groupingSetCount"], + hash["globalGroupingSets"], + ) + obj + end + end + + class << IndexHandle = + Base.new(:catalog_name, :transaction_handle, :connector_handle) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["catalogName"], + hash["transactionHandle"], + hash["connectorHandle"], + ) + obj + end + end + + class << IndexJoinNode = + Base.new(:id, :type, :probe_source, :index_source, :criteria, :probe_hash_symbol, :index_hash_symbol) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["id"], + hash["type"], + hash["probeSource"] && PlanNode.decode(hash["probeSource"]), + hash["indexSource"] && PlanNode.decode(hash["indexSource"]), + hash["criteria"] && hash["criteria"].map {|h| EquiJoinClause.decode(h) }, + hash["probeHashSymbol"], + hash["indexHashSymbol"], + ) + obj + end + end + + class << IndexSourceNode = + Base.new(:id, :index_handle, :table_handle, :lookup_symbols, :output_symbols, :assignments) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["id"], + hash["indexHandle"] && IndexHandle.decode(hash["indexHandle"]), + hash["tableHandle"] && TableHandle.decode(hash["tableHandle"]), + hash["lookupSymbols"], + hash["outputSymbols"], + hash["assignments"], + ) + obj + end + end + + class << Input = + Base.new(:catalog_name, :schema, :table, :connector_info, :columns, :fragment_id, :plan_node_id) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["catalogName"], + hash["schema"], + hash["table"], + hash["connectorInfo"], + hash["columns"] && hash["columns"].map {|h| Column.decode(h) }, + hash["fragmentId"], + hash["planNodeId"], + ) + obj + end + end + + class << InsertTableHandle = + Base.new(:catalog_name, :transaction_handle, :connector_handle) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["catalogName"], + hash["transactionHandle"], + hash["connectorHandle"], + ) + obj + end + end + + class << InsertTarget = + Base.new(:handle, :schema_table_name) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["handle"] && InsertTableHandle.decode(hash["handle"]), + hash["schemaTableName"] && SchemaTableName.decode(hash["schemaTableName"]), + ) + obj + end + end + + class << IntersectNode = + Base.new(:id, :sources, :output_to_inputs, :outputs, :distinct) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["id"], + hash["sources"] && hash["sources"].map {|h| PlanNode.decode(h) }, + hash["outputToInputs"], + hash["outputs"], + hash["distinct"], + ) + obj + end + end + + class << JoinNode = + Base.new(:id, :type, :left, :right, :criteria, :left_output_symbols, :right_output_symbols, :filter, :left_hash_symbol, :right_hash_symbol, :distribution_type, :spillable, :dynamic_filters, :reorder_join_stats_and_cost) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["id"], + hash["type"], + hash["left"] && PlanNode.decode(hash["left"]), + hash["right"] && PlanNode.decode(hash["right"]), + hash["criteria"] && hash["criteria"].map {|h| EquiJoinClause.decode(h) }, + hash["leftOutputSymbols"], + hash["rightOutputSymbols"], + hash["filter"], + hash["leftHashSymbol"], + hash["rightHashSymbol"], + hash["distributionType"] && hash["distributionType"].downcase.to_sym, + hash["spillable"], + hash["dynamicFilters"], + hash["reorderJoinStatsAndCost"] && PlanNodeStatsAndCostSummary.decode(hash["reorderJoinStatsAndCost"]), + ) + obj + end + end + + class << JoinOperatorInfo = + Base.new(:join_type, :log_histogram_probes, :log_histogram_output, :lookup_source_positions) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["joinType"] && hash["joinType"].downcase.to_sym, + hash["logHistogramProbes"], + hash["logHistogramOutput"], + hash["lookupSourcePositions"], + ) + obj + end + end + + class << LimitNode = + Base.new(:id, :source, :count, :ties_resolving_scheme, :partial) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["id"], + hash["source"] && PlanNode.decode(hash["source"]), + hash["count"], + hash["tiesResolvingScheme"] && OrderingScheme.decode(hash["tiesResolvingScheme"]), + hash["partial"], + ) + obj + end + end + + class << LocalCostEstimate = + Base.new(:cpu_cost, :max_memory, :network_cost) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["cpuCost"], + hash["maxMemory"], + hash["networkCost"], + ) + obj + end + end + + class << LocalExchangeBufferInfo = + Base.new(:buffered_bytes, :buffered_pages) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["bufferedBytes"], + hash["bufferedPages"], + ) + obj + end + end + + class << Mapping = + Base.new(:input, :outputs) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["input"], + hash["outputs"], + ) + obj + end + end + + class << MarkDistinctNode = + Base.new(:id, :source, :marker_symbol, :distinct_symbols, :hash_symbol) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["id"], + hash["source"] && PlanNode.decode(hash["source"]), + hash["markerSymbol"], + hash["distinctSymbols"], + hash["hashSymbol"], + ) + obj + end + end + + class << OperatorStats = + Base.new(:stage_id, :pipeline_id, :operator_id, :plan_node_id, :operator_type, :total_drivers, :add_input_calls, :add_input_wall, :add_input_cpu, :physical_input_data_size, :physical_input_positions, :internal_network_input_data_size, :internal_network_input_positions, :raw_input_data_size, :input_data_size, :input_positions, :sum_squared_input_positions, :get_output_calls, :get_output_wall, :get_output_cpu, :output_data_size, :output_positions, :dynamic_filter_splits_processed, :physical_written_data_size, :blocked_wall, :finish_calls, :finish_wall, :finish_cpu, :user_memory_reservation, :revocable_memory_reservation, :system_memory_reservation, :peak_user_memory_reservation, :peak_system_memory_reservation, :peak_revocable_memory_reservation, :peak_total_memory_reservation, :spilled_data_size, :blocked_reason, :info) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["stageId"], + hash["pipelineId"], + hash["operatorId"], + hash["planNodeId"], + hash["operatorType"], + hash["totalDrivers"], + hash["addInputCalls"], + hash["addInputWall"], + hash["addInputCpu"], + hash["physicalInputDataSize"], + hash["physicalInputPositions"], + hash["internalNetworkInputDataSize"], + hash["internalNetworkInputPositions"], + hash["rawInputDataSize"], + hash["inputDataSize"], + hash["inputPositions"], + hash["sumSquaredInputPositions"], + hash["getOutputCalls"], + hash["getOutputWall"], + hash["getOutputCpu"], + hash["outputDataSize"], + hash["outputPositions"], + hash["dynamicFilterSplitsProcessed"], + hash["physicalWrittenDataSize"], + hash["blockedWall"], + hash["finishCalls"], + hash["finishWall"], + hash["finishCpu"], + hash["userMemoryReservation"], + hash["revocableMemoryReservation"], + hash["systemMemoryReservation"], + hash["peakUserMemoryReservation"], + hash["peakSystemMemoryReservation"], + hash["peakRevocableMemoryReservation"], + hash["peakTotalMemoryReservation"], + hash["spilledDataSize"], + hash["blockedReason"] && hash["blockedReason"].downcase.to_sym, + hash["info"] && OperatorInfo.decode(hash["info"]), + ) + obj + end + end + + class << OrderingScheme = + Base.new(:order_by, :orderings) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["orderBy"], + hash["orderings"] && Hash[hash["orderings"].to_a.map! {|k,v| [k, v.downcase.to_sym] }], + ) + obj + end + end + + class << Output = + Base.new(:catalog_name, :schema, :table) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["catalogName"], + hash["schema"], + hash["table"], + ) + obj + end + end + + class << OutputBufferInfo = + Base.new(:type, :state, :can_add_buffers, :can_add_pages, :total_buffered_bytes, :total_buffered_pages, :total_rows_sent, :total_pages_sent, :buffers) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["type"], + hash["state"] && hash["state"].downcase.to_sym, + hash["canAddBuffers"], + hash["canAddPages"], + hash["totalBufferedBytes"], + hash["totalBufferedPages"], + hash["totalRowsSent"], + hash["totalPagesSent"], + hash["buffers"] && hash["buffers"].map {|h| BufferInfo.decode(h) }, + ) + obj + end + end + + class << OutputNode = + Base.new(:id, :source, :columns, :outputs) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["id"], + hash["source"] && PlanNode.decode(hash["source"]), + hash["columns"], + hash["outputs"], + ) + obj + end + end + + class << OutputTableHandle = + Base.new(:catalog_name, :transaction_handle, :connector_handle) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["catalogName"], + hash["transactionHandle"], + hash["connectorHandle"], + ) + obj + end + end + + class << PageBufferClientStatus = + Base.new(:uri, :state, :last_update, :rows_received, :pages_received, :rows_rejected, :pages_rejected, :requests_scheduled, :requests_completed, :requests_failed, :http_request_state) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["uri"], + hash["state"], + hash["lastUpdate"], + hash["rowsReceived"], + hash["pagesReceived"], + hash["rowsRejected"], + hash["pagesRejected"], + hash["requestsScheduled"], + hash["requestsCompleted"], + hash["requestsFailed"], + hash["httpRequestState"], + ) + obj + end + end + + class << PageBufferInfo = + Base.new(:partition, :buffered_pages, :buffered_bytes, :rows_added, :pages_added) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["partition"], + hash["bufferedPages"], + hash["bufferedBytes"], + hash["rowsAdded"], + hash["pagesAdded"], + ) + obj + end + end + + class << PartitionedOutputInfo = + Base.new(:rows_added, :pages_added, :output_buffer_peak_memory_usage) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["rowsAdded"], + hash["pagesAdded"], + hash["outputBufferPeakMemoryUsage"], + ) + obj + end + end + + class << Partitioning = + Base.new(:handle, :arguments) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["handle"] && PartitioningHandle.decode(hash["handle"]), + hash["arguments"] && hash["arguments"].map {|h| ArgumentBinding.decode(h) }, + ) + obj + end + end + + class << PartitioningHandle = + Base.new(:connector_id, :transaction_handle, :connector_handle) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["connectorId"], + hash["transactionHandle"], + hash["connectorHandle"], + ) + obj + end + end + + class << PartitioningScheme = + Base.new(:partitioning, :output_layout, :hash_column, :replicate_nulls_and_any, :bucket_to_partition) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["partitioning"] && Partitioning.decode(hash["partitioning"]), + hash["outputLayout"], + hash["hashColumn"], + hash["replicateNullsAndAny"], + hash["bucketToPartition"], + ) + obj + end + end + + class << PipelineStats = + Base.new(:pipeline_id, :first_start_time, :last_start_time, :last_end_time, :input_pipeline, :output_pipeline, :total_drivers, :queued_drivers, :queued_partitioned_drivers, :running_drivers, :running_partitioned_drivers, :blocked_drivers, :completed_drivers, :user_memory_reservation, :revocable_memory_reservation, :system_memory_reservation, :queued_time, :elapsed_time, :total_scheduled_time, :total_cpu_time, :total_blocked_time, :fully_blocked, :blocked_reasons, :physical_input_data_size, :physical_input_positions, :physical_input_read_time, :internal_network_input_data_size, :internal_network_input_positions, :raw_input_data_size, :raw_input_positions, :processed_input_data_size, :processed_input_positions, :output_data_size, :output_positions, :physical_written_data_size, :operator_summaries, :drivers) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["pipelineId"], + hash["firstStartTime"], + hash["lastStartTime"], + hash["lastEndTime"], + hash["inputPipeline"], + hash["outputPipeline"], + hash["totalDrivers"], + hash["queuedDrivers"], + hash["queuedPartitionedDrivers"], + hash["runningDrivers"], + hash["runningPartitionedDrivers"], + hash["blockedDrivers"], + hash["completedDrivers"], + hash["userMemoryReservation"], + hash["revocableMemoryReservation"], + hash["systemMemoryReservation"], + hash["queuedTime"] && DistributionSnapshot.decode(hash["queuedTime"]), + hash["elapsedTime"] && DistributionSnapshot.decode(hash["elapsedTime"]), + hash["totalScheduledTime"], + hash["totalCpuTime"], + hash["totalBlockedTime"], + hash["fullyBlocked"], + hash["blockedReasons"] && hash["blockedReasons"].map {|h| h.downcase.to_sym }, + hash["physicalInputDataSize"], + hash["physicalInputPositions"], + hash["physicalInputReadTime"], + hash["internalNetworkInputDataSize"], + hash["internalNetworkInputPositions"], + hash["rawInputDataSize"], + hash["rawInputPositions"], + hash["processedInputDataSize"], + hash["processedInputPositions"], + hash["outputDataSize"], + hash["outputPositions"], + hash["physicalWrittenDataSize"], + hash["operatorSummaries"] && hash["operatorSummaries"].map {|h| OperatorStats.decode(h) }, + hash["drivers"] && hash["drivers"].map {|h| DriverStats.decode(h) }, + ) + obj + end + end + + class << PlanCostEstimate = + Base.new(:cpu_cost, :max_memory, :max_memory_when_outputting, :network_cost, :root_node_local_cost_estimate) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["cpuCost"], + hash["maxMemory"], + hash["maxMemoryWhenOutputting"], + hash["networkCost"], + hash["rootNodeLocalCostEstimate"] && LocalCostEstimate.decode(hash["rootNodeLocalCostEstimate"]), + ) + obj + end + end + + class << PlanFragment = + Base.new(:id, :root, :symbols, :partitioning, :partitioned_sources, :partitioning_scheme, :stage_execution_descriptor, :stats_and_costs, :json_representation) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["id"], + hash["root"] && PlanNode.decode(hash["root"]), + hash["symbols"], + hash["partitioning"] && PartitioningHandle.decode(hash["partitioning"]), + hash["partitionedSources"], + hash["partitioningScheme"] && PartitioningScheme.decode(hash["partitioningScheme"]), + hash["stageExecutionDescriptor"] && StageExecutionDescriptor.decode(hash["stageExecutionDescriptor"]), + hash["statsAndCosts"] && StatsAndCosts.decode(hash["statsAndCosts"]), + hash["jsonRepresentation"], + ) + obj + end + end + + class << PlanNodeStatsAndCostSummary = + Base.new(:output_row_count, :output_size_in_bytes, :cpu_cost, :memory_cost, :network_cost) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["outputRowCount"], + hash["outputSizeInBytes"], + hash["cpuCost"], + hash["memoryCost"], + hash["networkCost"], + ) + obj + end + end + + class << PlanNodeStatsEstimate = + Base.new(:output_row_count, :symbol_statistics) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["outputRowCount"], + hash["symbolStatistics"] && Hash[hash["symbolStatistics"].to_a.map! {|k,v| [k, SymbolStatsEstimate.decode(v)] }], + ) + obj + end + end + + class << ProjectNode = + Base.new(:id, :source, :assignments) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["id"], + hash["source"] && PlanNode.decode(hash["source"]), + hash["assignments"] && Assignments.decode(hash["assignments"]), + ) + obj + end + end + + class << QueryError = + Base.new(:message, :sql_state, :error_code, :error_name, :error_type, :error_location, :failure_info) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["message"], + hash["sqlState"], + hash["errorCode"], + hash["errorName"], + hash["errorType"], + hash["errorLocation"] && ErrorLocation.decode(hash["errorLocation"]), + hash["failureInfo"] && FailureInfo.decode(hash["failureInfo"]), + ) + obj + end + end + + class << QueryInfo = + Base.new(:query_id, :session, :state, :memory_pool, :scheduled, :self, :field_names, :query, :prepared_query, :query_stats, :set_catalog, :set_schema, :set_path, :set_session_properties, :reset_session_properties, :set_roles, :added_prepared_statements, :deallocated_prepared_statements, :started_transaction_id, :clear_transaction_id, :update_type, :output_stage, :failure_info, :error_code, :warnings, :inputs, :output, :referenced_tables, :routines, :complete_info, :resource_group_id, :query_type, :final_query_info) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["queryId"], + hash["session"] && SessionRepresentation.decode(hash["session"]), + hash["state"] && hash["state"].downcase.to_sym, + hash["memoryPool"], + hash["scheduled"], + hash["self"], + hash["fieldNames"], + hash["query"], + hash["preparedQuery"], + hash["queryStats"] && QueryStats.decode(hash["queryStats"]), + hash["setCatalog"], + hash["setSchema"], + hash["setPath"], + hash["setSessionProperties"], + hash["resetSessionProperties"], + hash["setRoles"] && Hash[hash["setRoles"].to_a.map! {|k,v| [k, SelectedRole.decode(v)] }], + hash["addedPreparedStatements"], + hash["deallocatedPreparedStatements"], + hash["startedTransactionId"], + hash["clearTransactionId"], + hash["updateType"], + hash["outputStage"] && StageInfo.decode(hash["outputStage"]), + hash["failureInfo"] && ExecutionFailureInfo.decode(hash["failureInfo"]), + hash["errorCode"] && ErrorCode.decode(hash["errorCode"]), + hash["warnings"] && hash["warnings"].map {|h| TrinoWarning.decode(h) }, + hash["inputs"] && hash["inputs"].map {|h| Input.decode(h) }, + hash["output"] && Output.decode(hash["output"]), + hash["referencedTables"] && hash["referencedTables"].map {|h| TableInfo.decode(h) }, + hash["routines"] && hash["routines"].map {|h| RoutineInfo.decode(h) }, + hash["completeInfo"], + hash["resourceGroupId"] && ResourceGroupId.new(hash["resourceGroupId"]), + hash["queryType"] && hash["queryType"].downcase.to_sym, + hash["finalQueryInfo"], + ) + obj + end + end + + class << QueryResults = + Base.new(:id, :info_uri, :partial_cancel_uri, :next_uri, :columns, :data, :stats, :error, :warnings, :update_type, :update_count) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["id"], + hash["infoUri"], + hash["partialCancelUri"], + hash["nextUri"], + hash["columns"] && hash["columns"].map {|h| ClientColumn.decode(h) }, + hash["data"], + hash["stats"] && StatementStats.decode(hash["stats"]), + hash["error"] && QueryError.decode(hash["error"]), + hash["warnings"] && hash["warnings"].map {|h| Warning.decode(h) }, + hash["updateType"], + hash["updateCount"], + ) + obj + end + end + + class << QueryStats = + Base.new(:create_time, :execution_start_time, :last_heartbeat, :end_time, :elapsed_time, :queued_time, :resource_waiting_time, :dispatching_time, :execution_time, :analysis_time, :planning_time, :finishing_time, :total_tasks, :running_tasks, :completed_tasks, :total_drivers, :queued_drivers, :running_drivers, :blocked_drivers, :completed_drivers, :cumulative_user_memory, :user_memory_reservation, :revocable_memory_reservation, :total_memory_reservation, :peak_user_memory_reservation, :peak_revocable_memory_reservation, :peak_non_revocable_memory_reservation, :peak_total_memory_reservation, :peak_task_user_memory, :peak_task_revocable_memory, :peak_task_total_memory, :scheduled, :total_scheduled_time, :total_cpu_time, :total_blocked_time, :fully_blocked, :blocked_reasons, :physical_input_data_size, :physical_input_positions, :physical_input_read_time, :internal_network_input_data_size, :internal_network_input_positions, :raw_input_data_size, :raw_input_positions, :processed_input_data_size, :processed_input_positions, :output_data_size, :output_positions, :physical_written_data_size, :stage_gc_statistics, :dynamic_filters_stats, :operator_summaries) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["createTime"], + hash["executionStartTime"], + hash["lastHeartbeat"], + hash["endTime"], + hash["elapsedTime"], + hash["queuedTime"], + hash["resourceWaitingTime"], + hash["dispatchingTime"], + hash["executionTime"], + hash["analysisTime"], + hash["planningTime"], + hash["finishingTime"], + hash["totalTasks"], + hash["runningTasks"], + hash["completedTasks"], + hash["totalDrivers"], + hash["queuedDrivers"], + hash["runningDrivers"], + hash["blockedDrivers"], + hash["completedDrivers"], + hash["cumulativeUserMemory"], + hash["userMemoryReservation"], + hash["revocableMemoryReservation"], + hash["totalMemoryReservation"], + hash["peakUserMemoryReservation"], + hash["peakRevocableMemoryReservation"], + hash["peakNonRevocableMemoryReservation"], + hash["peakTotalMemoryReservation"], + hash["peakTaskUserMemory"], + hash["peakTaskRevocableMemory"], + hash["peakTaskTotalMemory"], + hash["scheduled"], + hash["totalScheduledTime"], + hash["totalCpuTime"], + hash["totalBlockedTime"], + hash["fullyBlocked"], + hash["blockedReasons"] && hash["blockedReasons"].map {|h| h.downcase.to_sym }, + hash["physicalInputDataSize"], + hash["physicalInputPositions"], + hash["physicalInputReadTime"], + hash["internalNetworkInputDataSize"], + hash["internalNetworkInputPositions"], + hash["rawInputDataSize"], + hash["rawInputPositions"], + hash["processedInputDataSize"], + hash["processedInputPositions"], + hash["outputDataSize"], + hash["outputPositions"], + hash["physicalWrittenDataSize"], + hash["stageGcStatistics"] && hash["stageGcStatistics"].map {|h| StageGcStatistics.decode(h) }, + hash["dynamicFiltersStats"] && DynamicFiltersStats.decode(hash["dynamicFiltersStats"]), + hash["operatorSummaries"] && hash["operatorSummaries"].map {|h| OperatorStats.decode(h) }, + ) + obj + end + end + + class << RefreshMaterializedViewTarget = + Base.new(:table_handle, :insert_handle, :schema_table_name, :source_table_handles) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["tableHandle"] && TableHandle.decode(hash["tableHandle"]), + hash["insertHandle"] && InsertTableHandle.decode(hash["insertHandle"]), + hash["schemaTableName"] && SchemaTableName.decode(hash["schemaTableName"]), + hash["sourceTableHandles"] && hash["sourceTableHandles"].map {|h| TableHandle.decode(h) }, + ) + obj + end + end + + class << RemoteSourceNode = + Base.new(:id, :source_fragment_ids, :outputs, :ordering_scheme, :exchange_type) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["id"], + hash["sourceFragmentIds"], + hash["outputs"], + hash["orderingScheme"] && OrderingScheme.decode(hash["orderingScheme"]), + hash["exchangeType"] && hash["exchangeType"].downcase.to_sym, + ) + obj + end + end + + class << ResolvedFunction = + Base.new(:signature, :id, :type_dependencies, :function_dependencies) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["signature"] && BoundSignature.decode(hash["signature"]), + hash["id"], + hash["typeDependencies"], + hash["functionDependencies"] && hash["functionDependencies"].map {|h| ResolvedFunction.decode(h) }, + ) + obj + end + end + + class << ResourceEstimates = + Base.new(:execution_time, :cpu_time, :peak_memory_bytes) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["executionTime"], + hash["cpuTime"], + hash["peakMemoryBytes"], + ) + obj + end + end + + class << RoutineInfo = + Base.new(:routine, :authorization) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["routine"], + hash["authorization"], + ) + obj + end + end + + class << RowNumberNode = + Base.new(:id, :source, :partition_by, :order_sensitive, :row_number_symbol, :max_row_count_per_partition, :hash_symbol) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["id"], + hash["source"] && PlanNode.decode(hash["source"]), + hash["partitionBy"], + hash["orderSensitive"], + hash["rowNumberSymbol"], + hash["maxRowCountPerPartition"], + hash["hashSymbol"], + ) + obj + end + end + + class << SampleNode = + Base.new(:id, :source, :sample_ratio, :sample_type) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["id"], + hash["source"] && PlanNode.decode(hash["source"]), + hash["sampleRatio"], + hash["sampleType"], + ) + obj + end + end + + class << SchemaTableName = + Base.new(:schema, :table) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["schema"], + hash["table"], + ) + obj + end + end + + class << SelectedRole = + Base.new(:type, :role) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["type"], + hash["role"], + ) + obj + end + end + + class << SemiJoinNode = + Base.new(:id, :source, :filtering_source, :source_join_symbol, :filtering_source_join_symbol, :semi_join_output, :source_hash_symbol, :filtering_source_hash_symbol, :distribution_type, :dynamic_filter_id) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["id"], + hash["source"] && PlanNode.decode(hash["source"]), + hash["filteringSource"] && PlanNode.decode(hash["filteringSource"]), + hash["sourceJoinSymbol"], + hash["filteringSourceJoinSymbol"], + hash["semiJoinOutput"], + hash["sourceHashSymbol"], + hash["filteringSourceHashSymbol"], + hash["distributionType"] && hash["distributionType"].downcase.to_sym, + hash["dynamicFilterId"], + ) + obj + end + end + + class << SessionRepresentation = + Base.new(:query_id, :transaction_id, :client_transaction_support, :user, :groups, :principal, :source, :catalog, :schema, :path, :trace_token, :time_zone_key, :locale, :remote_user_address, :user_agent, :client_info, :client_tags, :client_capabilities, :resource_estimates, :start, :system_properties, :catalog_properties, :unprocessed_catalog_properties, :roles, :prepared_statements, :protocol_name) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["queryId"], + hash["transactionId"], + hash["clientTransactionSupport"], + hash["user"], + hash["groups"], + hash["principal"], + hash["source"], + hash["catalog"], + hash["schema"], + hash["path"] && SqlPath.decode(hash["path"]), + hash["traceToken"], + hash["timeZoneKey"], + hash["locale"], + hash["remoteUserAddress"], + hash["userAgent"], + hash["clientInfo"], + hash["clientTags"], + hash["clientCapabilities"], + hash["resourceEstimates"] && ResourceEstimates.decode(hash["resourceEstimates"]), + hash["start"], + hash["systemProperties"], + hash["catalogProperties"], + hash["unprocessedCatalogProperties"], + hash["roles"] && Hash[hash["roles"].to_a.map! {|k,v| [k, SelectedRole.decode(v)] }], + hash["preparedStatements"], + hash["protocolName"], + ) + obj + end + end + + class << SortNode = + Base.new(:id, :source, :ordering_scheme, :partial) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["id"], + hash["source"] && PlanNode.decode(hash["source"]), + hash["orderingScheme"] && OrderingScheme.decode(hash["orderingScheme"]), + hash["partial"], + ) + obj + end + end + + class << SpatialJoinNode = + Base.new(:id, :type, :left, :right, :output_symbols, :filter, :left_partition_symbol, :right_partition_symbol, :kdb_tree) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["id"], + hash["type"], + hash["left"] && PlanNode.decode(hash["left"]), + hash["right"] && PlanNode.decode(hash["right"]), + hash["outputSymbols"], + hash["filter"], + hash["leftPartitionSymbol"], + hash["rightPartitionSymbol"], + hash["kdbTree"], + ) + obj + end + end + + class << Specification = + Base.new(:partition_by, :ordering_scheme) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["partitionBy"], + hash["orderingScheme"] && OrderingScheme.decode(hash["orderingScheme"]), + ) + obj + end + end + + class << SplitOperatorInfo = + Base.new(:catalog_name, :split_info) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["catalogName"], + hash["splitInfo"], + ) + obj + end + end + + class << SqlPath = + Base.new(:raw_path) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["rawPath"], + ) + obj + end + end + + class << StageExecutionDescriptor = + Base.new(:strategy, :grouped_execution_scan_nodes) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["strategy"] && hash["strategy"].downcase.to_sym, + hash["groupedExecutionScanNodes"], + ) + obj + end + end + + class << StageGcStatistics = + Base.new(:stage_id, :tasks, :full_gc_tasks, :min_full_gc_sec, :max_full_gc_sec, :total_full_gc_sec, :average_full_gc_sec) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["stageId"], + hash["tasks"], + hash["fullGcTasks"], + hash["minFullGcSec"], + hash["maxFullGcSec"], + hash["totalFullGcSec"], + hash["averageFullGcSec"], + ) + obj + end + end + + class << StageInfo = + Base.new(:stage_id, :state, :plan, :types, :stage_stats, :tasks, :sub_stages, :tables, :failure_cause) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["stageId"] && StageId.new(hash["stageId"]), + hash["state"] && hash["state"].downcase.to_sym, + hash["plan"] && PlanFragment.decode(hash["plan"]), + hash["types"], + hash["stageStats"] && StageStats.decode(hash["stageStats"]), + hash["tasks"] && hash["tasks"].map {|h| TaskInfo.decode(h) }, + hash["subStages"] && hash["subStages"].map {|h| StageInfo.decode(h) }, + hash["tables"] && Hash[hash["tables"].to_a.map! {|k,v| [k, TableInfo.decode(v)] }], + hash["failureCause"] && ExecutionFailureInfo.decode(hash["failureCause"]), + ) + obj + end + end + + class << StageStats = + Base.new(:scheduling_complete, :get_split_distribution, :total_tasks, :running_tasks, :completed_tasks, :total_drivers, :queued_drivers, :running_drivers, :blocked_drivers, :completed_drivers, :cumulative_user_memory, :user_memory_reservation, :revocable_memory_reservation, :total_memory_reservation, :peak_user_memory_reservation, :peak_revocable_memory_reservation, :total_scheduled_time, :total_cpu_time, :total_blocked_time, :fully_blocked, :blocked_reasons, :physical_input_data_size, :physical_input_positions, :physical_input_read_time, :internal_network_input_data_size, :internal_network_input_positions, :raw_input_data_size, :raw_input_positions, :processed_input_data_size, :processed_input_positions, :buffered_data_size, :output_data_size, :output_positions, :physical_written_data_size, :gc_info, :operator_summaries) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["schedulingComplete"], + hash["getSplitDistribution"] && DistributionSnapshot.decode(hash["getSplitDistribution"]), + hash["totalTasks"], + hash["runningTasks"], + hash["completedTasks"], + hash["totalDrivers"], + hash["queuedDrivers"], + hash["runningDrivers"], + hash["blockedDrivers"], + hash["completedDrivers"], + hash["cumulativeUserMemory"], + hash["userMemoryReservation"], + hash["revocableMemoryReservation"], + hash["totalMemoryReservation"], + hash["peakUserMemoryReservation"], + hash["peakRevocableMemoryReservation"], + hash["totalScheduledTime"], + hash["totalCpuTime"], + hash["totalBlockedTime"], + hash["fullyBlocked"], + hash["blockedReasons"] && hash["blockedReasons"].map {|h| h.downcase.to_sym }, + hash["physicalInputDataSize"], + hash["physicalInputPositions"], + hash["physicalInputReadTime"], + hash["internalNetworkInputDataSize"], + hash["internalNetworkInputPositions"], + hash["rawInputDataSize"], + hash["rawInputPositions"], + hash["processedInputDataSize"], + hash["processedInputPositions"], + hash["bufferedDataSize"], + hash["outputDataSize"], + hash["outputPositions"], + hash["physicalWrittenDataSize"], + hash["gcInfo"] && StageGcStatistics.decode(hash["gcInfo"]), + hash["operatorSummaries"] && hash["operatorSummaries"].map {|h| OperatorStats.decode(h) }, + ) + obj + end + end + + class << StatementStats = + Base.new(:state, :queued, :scheduled, :nodes, :total_splits, :queued_splits, :running_splits, :completed_splits, :cpu_time_millis, :wall_time_millis, :queued_time_millis, :elapsed_time_millis, :processed_rows, :processed_bytes, :physical_input_bytes, :peak_memory_bytes, :spilled_bytes, :root_stage) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["state"], + hash["queued"], + hash["scheduled"], + hash["nodes"], + hash["totalSplits"], + hash["queuedSplits"], + hash["runningSplits"], + hash["completedSplits"], + hash["cpuTimeMillis"], + hash["wallTimeMillis"], + hash["queuedTimeMillis"], + hash["elapsedTimeMillis"], + hash["processedRows"], + hash["processedBytes"], + hash["physicalInputBytes"], + hash["peakMemoryBytes"], + hash["spilledBytes"], + hash["rootStage"] && ClientStageStats.decode(hash["rootStage"]), + ) + obj + end + end + + class << StatisticAggregations = + Base.new(:aggregations, :grouping_symbols) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["aggregations"] && Hash[hash["aggregations"].to_a.map! {|k,v| [k, Aggregation.decode(v)] }], + hash["groupingSymbols"], + ) + obj + end + end + + class << StatisticAggregationsDescriptor_Symbol = + Base.new(:grouping, :table_statistics, :column_statistics) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["grouping"], + hash["tableStatistics"] && Hash[hash["tableStatistics"].to_a.map! {|k,v| [k.downcase.to_sym, v] }], + hash["columnStatistics"] && Hash[hash["columnStatistics"].to_a.map! {|k,v| [ColumnStatisticMetadata.decode(k), v] }], + ) + obj + end + end + + class << StatisticsWriterNode = + Base.new(:id, :source, :target, :row_count_symbol, :row_count_enabled, :descriptor) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["id"], + hash["source"] && PlanNode.decode(hash["source"]), + hash["target"] && WriteStatisticsTarget.decode(hash["target"]), + hash["rowCountSymbol"], + hash["rowCountEnabled"], + hash["descriptor"] && StatisticAggregationsDescriptor_Symbol.decode(hash["descriptor"]), + ) + obj + end + end + + class << StatsAndCosts = + Base.new(:stats, :costs) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["stats"] && Hash[hash["stats"].to_a.map! {|k,v| [k, PlanNodeStatsEstimate.decode(v)] }], + hash["costs"] && Hash[hash["costs"].to_a.map! {|k,v| [k, PlanCostEstimate.decode(v)] }], + ) + obj + end + end + + class << SymbolStatsEstimate = + Base.new(:low_value, :high_value, :nulls_fraction, :average_row_size, :distinct_values_count) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["lowValue"], + hash["highValue"], + hash["nullsFraction"], + hash["averageRowSize"], + hash["distinctValuesCount"], + ) + obj + end + end + + class << TableFinishInfo = + Base.new(:connector_output_metadata, :json_length_limit_exceeded, :statistics_wall_time, :statistics_cpu_time) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["connectorOutputMetadata"], + hash["jsonLengthLimitExceeded"], + hash["statisticsWallTime"], + hash["statisticsCpuTime"], + ) + obj + end + end + + class << TableFinishNode = + Base.new(:id, :source, :target, :row_count_symbol, :statistics_aggregation, :statistics_aggregation_descriptor) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["id"], + hash["source"] && PlanNode.decode(hash["source"]), + hash["target"] && WriterTarget.decode(hash["target"]), + hash["rowCountSymbol"], + hash["statisticsAggregation"] && StatisticAggregations.decode(hash["statisticsAggregation"]), + hash["statisticsAggregationDescriptor"] && StatisticAggregationsDescriptor_Symbol.decode(hash["statisticsAggregationDescriptor"]), + ) + obj + end + end + + class << TableHandle = + Base.new(:catalog_name, :connector_handle, :transaction, :layout) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["catalogName"], + hash["connectorHandle"], + hash["transaction"], + hash["layout"], + ) + obj + end + end + + class << TableInfo = + Base.new(:table_name, :predicate) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["tableName"], + hash["predicate"], + ) + obj + end + end + + class << TableScanNode = + Base.new(:id, :table, :output_symbols, :assignments, :for_delete) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["id"], + hash["table"] && TableHandle.decode(hash["table"]), + hash["outputSymbols"], + hash["assignments"], + hash["forDelete"], + ) + obj + end + end + + class << TableWriterInfo = + Base.new(:page_sink_peak_memory_usage, :statistics_wall_time, :statistics_cpu_time, :validation_cpu_time) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["pageSinkPeakMemoryUsage"], + hash["statisticsWallTime"], + hash["statisticsCpuTime"], + hash["validationCpuTime"], + ) + obj + end + end + + class << TableWriterNode = + Base.new(:id, :source, :target, :row_count_symbol, :fragment_symbol, :columns, :column_names, :not_null_column_symbols, :partitioning_scheme, :statistics_aggregation, :statistics_aggregation_descriptor) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["id"], + hash["source"] && PlanNode.decode(hash["source"]), + hash["target"] && WriterTarget.decode(hash["target"]), + hash["rowCountSymbol"], + hash["fragmentSymbol"], + hash["columns"], + hash["columnNames"], + hash["notNullColumnSymbols"], + hash["partitioningScheme"] && PartitioningScheme.decode(hash["partitioningScheme"]), + hash["statisticsAggregation"] && StatisticAggregations.decode(hash["statisticsAggregation"]), + hash["statisticsAggregationDescriptor"] && StatisticAggregationsDescriptor_Symbol.decode(hash["statisticsAggregationDescriptor"]), + ) + obj + end + end + + class << TaskInfo = + Base.new(:task_status, :last_heartbeat, :output_buffers, :no_more_splits, :stats, :needs_plan) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["taskStatus"] && TaskStatus.decode(hash["taskStatus"]), + hash["lastHeartbeat"], + hash["outputBuffers"] && OutputBufferInfo.decode(hash["outputBuffers"]), + hash["noMoreSplits"], + hash["stats"] && TaskStats.decode(hash["stats"]), + hash["needsPlan"], + ) + obj + end + end + + class << TaskStats = + Base.new(:create_time, :first_start_time, :last_start_time, :last_end_time, :end_time, :elapsed_time, :queued_time, :total_drivers, :queued_drivers, :queued_partitioned_drivers, :running_drivers, :running_partitioned_drivers, :blocked_drivers, :completed_drivers, :cumulative_user_memory, :user_memory_reservation, :revocable_memory_reservation, :system_memory_reservation, :total_scheduled_time, :total_cpu_time, :total_blocked_time, :fully_blocked, :blocked_reasons, :physical_input_data_size, :physical_input_positions, :physical_input_read_time, :internal_network_input_data_size, :internal_network_input_positions, :raw_input_data_size, :raw_input_positions, :processed_input_data_size, :processed_input_positions, :output_data_size, :output_positions, :physical_written_data_size, :full_gc_count, :full_gc_time, :pipelines) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["createTime"], + hash["firstStartTime"], + hash["lastStartTime"], + hash["lastEndTime"], + hash["endTime"], + hash["elapsedTime"], + hash["queuedTime"], + hash["totalDrivers"], + hash["queuedDrivers"], + hash["queuedPartitionedDrivers"], + hash["runningDrivers"], + hash["runningPartitionedDrivers"], + hash["blockedDrivers"], + hash["completedDrivers"], + hash["cumulativeUserMemory"], + hash["userMemoryReservation"], + hash["revocableMemoryReservation"], + hash["systemMemoryReservation"], + hash["totalScheduledTime"], + hash["totalCpuTime"], + hash["totalBlockedTime"], + hash["fullyBlocked"], + hash["blockedReasons"] && hash["blockedReasons"].map {|h| h.downcase.to_sym }, + hash["physicalInputDataSize"], + hash["physicalInputPositions"], + hash["physicalInputReadTime"], + hash["internalNetworkInputDataSize"], + hash["internalNetworkInputPositions"], + hash["rawInputDataSize"], + hash["rawInputPositions"], + hash["processedInputDataSize"], + hash["processedInputPositions"], + hash["outputDataSize"], + hash["outputPositions"], + hash["physicalWrittenDataSize"], + hash["fullGcCount"], + hash["fullGcTime"], + hash["pipelines"] && hash["pipelines"].map {|h| PipelineStats.decode(h) }, + ) + obj + end + end + + class << TaskStatus = + Base.new(:task_id, :task_instance_id, :version, :state, :self, :node_id, :completed_driver_groups, :failures, :queued_partitioned_drivers, :running_partitioned_drivers, :output_buffer_overutilized, :physical_written_data_size, :memory_reservation, :system_memory_reservation, :revocable_memory_reservation, :full_gc_count, :full_gc_time, :dynamic_filters_version) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["taskId"] && TaskId.new(hash["taskId"]), + hash["taskInstanceId"], + hash["version"], + hash["state"] && hash["state"].downcase.to_sym, + hash["self"], + hash["nodeId"], + hash["completedDriverGroups"] && hash["completedDriverGroups"].map {|h| Lifespan.new(h) }, + hash["failures"] && hash["failures"].map {|h| ExecutionFailureInfo.decode(h) }, + hash["queuedPartitionedDrivers"], + hash["runningPartitionedDrivers"], + hash["outputBufferOverutilized"], + hash["physicalWrittenDataSize"], + hash["memoryReservation"], + hash["systemMemoryReservation"], + hash["revocableMemoryReservation"], + hash["fullGcCount"], + hash["fullGcTime"], + hash["dynamicFiltersVersion"], + ) + obj + end + end + + class << TopNNode = + Base.new(:id, :source, :count, :ordering_scheme, :step) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["id"], + hash["source"] && PlanNode.decode(hash["source"]), + hash["count"], + hash["orderingScheme"] && OrderingScheme.decode(hash["orderingScheme"]), + hash["step"] && hash["step"].downcase.to_sym, + ) + obj + end + end + + class << TopNRowNumberNode = + Base.new(:id, :source, :specification, :row_number_symbol, :max_row_count_per_partition, :partial, :hash_symbol) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["id"], + hash["source"] && PlanNode.decode(hash["source"]), + hash["specification"] && Specification.decode(hash["specification"]), + hash["rowNumberSymbol"], + hash["maxRowCountPerPartition"], + hash["partial"], + hash["hashSymbol"], + ) + obj + end + end + + class << TrinoWarning = + Base.new(:warning_code, :message) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["warningCode"] && WarningCode.decode(hash["warningCode"]), + hash["message"], + ) + obj + end + end + + class << UnionNode = + Base.new(:id, :sources, :output_to_inputs, :outputs) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["id"], + hash["sources"] && hash["sources"].map {|h| PlanNode.decode(h) }, + hash["outputToInputs"], + hash["outputs"], + ) + obj + end + end + + class << UnnestNode = + Base.new(:id, :source, :replicate_symbols, :mappings, :ordinality_symbol, :join_type, :filter) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["id"], + hash["source"] && PlanNode.decode(hash["source"]), + hash["replicateSymbols"], + hash["mappings"] && hash["mappings"].map {|h| Mapping.decode(h) }, + hash["ordinalitySymbol"], + hash["joinType"], + hash["filter"], + ) + obj + end + end + + class << ValuesNode = + Base.new(:id, :output_symbols, :row_count, :rows) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["id"], + hash["outputSymbols"], + hash["rowCount"], + hash["rows"], + ) + obj + end + end + + class << Warning = + Base.new(:warning_code, :message) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["warningCode"] && Code.decode(hash["warningCode"]), + hash["message"], + ) + obj + end + end + + class << WarningCode = + Base.new(:code, :name) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["code"], + hash["name"], + ) + obj + end + end + + class << WindowInfo = + Base.new(:window_infos) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["windowInfos"] && hash["windowInfos"].map {|h| DriverWindowInfo.decode(h) }, + ) + obj + end + end + + class << WindowNode = + Base.new(:id, :source, :specification, :window_functions, :hash_symbol, :pre_partitioned_inputs, :pre_sorted_order_prefix) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["id"], + hash["source"] && PlanNode.decode(hash["source"]), + hash["specification"] && Specification.decode(hash["specification"]), + hash["windowFunctions"] && Hash[hash["windowFunctions"].to_a.map! {|k,v| [k, Function.decode(v)] }], + hash["hashSymbol"], + hash["prePartitionedInputs"], + hash["preSortedOrderPrefix"], + ) + obj + end + end + + class << WriteStatisticsHandle = + Base.new(:handle) + def decode(hash) + unless hash.is_a?(Hash) + raise TypeError, "Can't convert #{hash.class} to Hash" + end + obj = allocate + obj.send(:initialize_struct, + hash["handle"] && AnalyzeTableHandle.decode(hash["handle"]), + ) + obj + end + end + + + end +end diff --git a/lib/presto/client/models.rb b/lib/trino/client/models.rb similarity index 57% rename from lib/presto/client/models.rb rename to lib/trino/client/models.rb index e67040a0..4c47e20e 100644 --- a/lib/presto/client/models.rb +++ b/lib/trino/client/models.rb @@ -1,5 +1,5 @@ # -# Presto client for Ruby +# Trino client for Ruby # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # -module Presto::Client +module Trino::Client #### - ## lib/presto/client/models.rb is automatically generated using "rake modelgen:latest" command. + ## lib/trino/client/models.rb is automatically generated using "rake modelgen:latest" command. ## You should not edit this file directly. To modify the class definitions, edit ## modelgen/models.rb file and run "rake modelgen:latest". ## @@ -24,14 +24,15 @@ module Presto::Client module ModelVersions end - require 'presto/client/model_versions/0.149.rb' - require 'presto/client/model_versions/0.153.rb' - require 'presto/client/model_versions/0.173.rb' - require 'presto/client/model_versions/0.178.rb' - require 'presto/client/model_versions/0.205.rb' - require 'presto/client/model_versions/303.rb' - require 'presto/client/model_versions/316.rb' + require 'trino/client/model_versions/0.149.rb' + require 'trino/client/model_versions/0.153.rb' + require 'trino/client/model_versions/0.173.rb' + require 'trino/client/model_versions/0.178.rb' + require 'trino/client/model_versions/0.205.rb' + require 'trino/client/model_versions/303.rb' + require 'trino/client/model_versions/316.rb' + require 'trino/client/model_versions/351.rb' - Models = ModelVersions::V316 + Models = ModelVersions::V351 end diff --git a/lib/presto/client/query.rb b/lib/trino/client/query.rb similarity index 83% rename from lib/presto/client/query.rb rename to lib/trino/client/query.rb index a01d2e54..8ee3835c 100644 --- a/lib/presto/client/query.rb +++ b/lib/trino/client/query.rb @@ -1,5 +1,5 @@ # -# Presto client for Ruby +# Trino client for Ruby # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,14 +13,14 @@ # See the License for the specific language governing permissions and # limitations under the License. # -module Presto::Client +module Trino::Client require 'faraday' require 'faraday_middleware' - require 'presto/client/models' - require 'presto/client/errors' - require 'presto/client/faraday_client' - require 'presto/client/statement_client' + require 'trino/client/models' + require 'trino/client/errors' + require 'trino/client/faraday_client' + require 'trino/client/statement_client' class Query def self.start(query, options) @@ -40,7 +40,7 @@ def self.kill(query_id, options) end def self.faraday_client(options) - Presto::Client.faraday_client(options) + Trino::Client.faraday_client(options) end def initialize(api) @@ -103,7 +103,7 @@ def each_row_chunk(&block) wait_for_data if self.columns == nil - raise PrestoError, "Query #{@api.current_results.id} has no columns" + raise TrinoError, "Query #{@api.current_results.id} has no columns" end begin @@ -132,11 +132,11 @@ def close def raise_if_failed if @api.client_aborted? - raise PrestoClientError, "Query aborted by user" + raise TrinoClientError, "Query aborted by user" elsif @api.query_failed? results = @api.current_results error = results.error - raise PrestoQueryError.new("Query #{results.id} failed: #{error.message}", results.id, error.error_code, error.error_name, error.failure_info) + raise TrinoQueryError.new("Query #{results.id} failed: #{error.message}", results.id, error.error_code, error.error_name, error.failure_info) end end end diff --git a/lib/presto/client/statement_client.rb b/lib/trino/client/statement_client.rb similarity index 86% rename from lib/presto/client/statement_client.rb rename to lib/trino/client/statement_client.rb index 554fa884..d79c3ead 100644 --- a/lib/presto/client/statement_client.rb +++ b/lib/trino/client/statement_client.rb @@ -1,5 +1,5 @@ # -# Presto client for Ruby +# Trino client for Ruby # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,15 +13,15 @@ # See the License for the specific language governing permissions and # limitations under the License. # -module Presto::Client +module Trino::Client require 'json' require 'msgpack' - require 'presto/client/models' - require 'presto/client/errors' + require 'trino/client/models' + require 'trino/client/errors' class StatementClient - # Presto can return too deep nested JSON + # Trino can return too deep nested JSON JSON_OPTIONS = { :max_nesting => false } @@ -75,7 +75,7 @@ def post_query_request! # TODO error handling if response.status != 200 - exception! PrestoHttpError.new(response.status, "Failed to start query: #{response.body} (#{response.status})") + exception! TrinoHttpError.new(response.status, "Failed to start query: #{response.body} (#{response.status})") end @results_headers = response.headers @@ -168,7 +168,7 @@ def decode_model(uri, hash, body_class) if body.size > 1024 + 3 body = "#{body[0, 1024]}..." end - exception! PrestoHttpError.new(500, "Presto API returned unexpected structure at #{uri}. Expected #{body_class} but got #{body}: #{e}") + exception! TrinoHttpError.new(500, "Trino API returned unexpected structure at #{uri}. Expected #{body_class} but got #{body}: #{e}") end end @@ -183,7 +183,7 @@ def parse_body(response) JSON.parse(response.body, opts = JSON_OPTIONS) end rescue => e - exception! PrestoHttpError.new(500, "Presto API returned unexpected data format. #{e}") + exception! TrinoHttpError.new(500, "Trino API returned unexpected data format. #{e}") end end @@ -210,7 +210,7 @@ def faraday_get_with_retry(uri, &block) if response.status != 503 # retry only if 503 Service Unavailable # deterministic error - exception! PrestoHttpError.new(response.status, "Presto API error at #{uri} returned #{response.status}: #{response.body}") + exception! TrinoHttpError.new(response.status, "Trino API error at #{uri} returned #{response.status}: #{response.body}") end end @@ -220,7 +220,7 @@ def faraday_get_with_retry(uri, &block) sleep attempts * 0.1 end while (Process.clock_gettime(Process::CLOCK_MONOTONIC) - start) < @retry_timeout && !client_aborted? - exception! PrestoHttpError.new(408, "Presto API error due to timeout") + exception! TrinoHttpError.new(408, "Trino API error due to timeout") end def raise_if_timeout! @@ -236,7 +236,7 @@ def raise_if_timeout! if @plan_timeout && (@results == nil || @results.columns == nil) && elapsed > @plan_timeout # @results is not set (even first faraday_get_with_retry isn't called yet) or - # result from Presto doesn't include result schema. Query planning isn't done yet. + # result from Trino doesn't include result schema. Query planning isn't done yet. raise_timeout_error! end end @@ -244,9 +244,9 @@ def raise_if_timeout! def raise_timeout_error! if query_id = @results && @results.id - exception! PrestoQueryTimeoutError.new("Query #{query_id} timed out") + exception! TrinoQueryTimeoutError.new("Query #{query_id} timed out") else - exception! PrestoQueryTimeoutError.new("Query timed out") + exception! TrinoQueryTimeoutError.new("Query timed out") end end diff --git a/lib/presto/client/version.rb b/lib/trino/client/version.rb similarity index 94% rename from lib/presto/client/version.rb rename to lib/trino/client/version.rb index fcf6f8d1..6e195d29 100644 --- a/lib/presto/client/version.rb +++ b/lib/trino/client/version.rb @@ -1,5 +1,5 @@ # -# Presto client for Ruby +# Trino client for Ruby # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -module Presto +module Trino module Client VERSION = "0.6.5" end diff --git a/modelgen/model_versions.rb b/modelgen/model_versions.rb index db1d20da..6ce29734 100644 --- a/modelgen/model_versions.rb +++ b/modelgen/model_versions.rb @@ -1,5 +1,5 @@ # -# Presto client for Ruby +# Trino client for Ruby # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # -module Presto::Client::ModelVersions +module Trino::Client::ModelVersions #### - ## lib/presto/client/model_versions/*.rb is automatically generated using "rake modelgen:all" command. + ## lib/trino/client/model_versions/*.rb is automatically generated using "rake modelgen:all" command. ## You should not edit this file directly. To modify the class definitions, edit ## modelgen/model_versions.rb file and run "rake modelgen:all". ## @@ -115,6 +115,7 @@ def self.decode(hash) when "remoteSource" then RemoteSourceNode when "join" then JoinNode when "semijoin" then SemiJoinNode + when "spatialjoin" then SpatialJoinNode when "indexjoin" then IndexJoinNode when "indexsource" then IndexSourceNode when "tablewriter" then TableWriterNode @@ -130,7 +131,7 @@ def self.decode(hash) when "explainAnalyze" then ExplainAnalyzeNode when "apply" then ApplyNode when "assignUniqueId" then AssignUniqueId - when "lateralJoin" then LateralJoinNode + when "correlatedJoin" then CorrelatedJoinNode when "statisticsWriterNode" then StatisticsWriterNode end if model_class diff --git a/modelgen/modelgen.rb b/modelgen/modelgen.rb index f26ca4ba..ece2f669 100644 --- a/modelgen/modelgen.rb +++ b/modelgen/modelgen.rb @@ -1,12 +1,12 @@ if ARGV.length != 4 - puts "usage: " + puts "usage: " exit 1 end model_version, source_dir, template_path, output_path = *ARGV -require_relative 'presto_models' +require_relative 'trino_models' require 'erb' erb = ERB.new(File.read(template_path)) @@ -16,8 +16,8 @@ predefined_simple_classes = %w[StageId TaskId Lifespan ConnectorSession ResourceGroupId] predefined_models = %w[DistributionSnapshot PlanNode EquiJoinClause WriterTarget WriteStatisticsTarget OperatorInfo HashCollisionsInfo] -assume_primitive = %w[Object Type Long Symbol QueryId PlanNodeId PlanFragmentId MemoryPoolId TransactionId URI Duration DataSize DateTime ColumnHandle ConnectorTableHandle ConnectorOutputTableHandle ConnectorIndexHandle ConnectorColumnHandle ConnectorInsertTableHandle ConnectorTableLayoutHandle Expression FunctionCall TimeZoneKey Locale TypeSignature Frame TupleDomain SerializableNativeValue ConnectorTransactionHandle OutputBufferId ConnectorPartitioningHandle NullableValue ConnectorId HostAddress JsonNode Node CatalogName QualifiedObjectName] -enum_types = %w[QueryState StageState TaskState QueueState PlanDistribution OutputPartitioning Step SortOrder BufferState NullPartitioning BlockedReason ParameterKind FunctionKind PartitionFunctionHandle Scope ErrorType DistributionType PipelineExecutionStrategy JoinType ExchangeNode.Type ColumnStatisticType TableStatisticType StageExecutionStrategy SemanticErrorCode] +assume_primitive = %w[Object Type Long Symbol QueryId PlanNodeId PlanFragmentId MemoryPoolId TransactionId URI Duration DataSize DateTime ColumnHandle ConnectorTableHandle ConnectorOutputTableHandle ConnectorIndexHandle ConnectorColumnHandle ConnectorInsertTableHandle ConnectorTableLayoutHandle Expression FunctionCall TimeZoneKey Locale TypeSignature Frame TupleDomain SerializableNativeValue ConnectorTransactionHandle OutputBufferId ConnectorPartitioningHandle NullableValue ConnectorId HostAddress JsonNode Node CatalogName QualifiedObjectName FunctionId DynamicFilterId Instant] +enum_types = %w[QueryState StageState TaskState QueueState PlanDistribution OutputPartitioning Step SortOrder BufferState NullPartitioning BlockedReason ParameterKind FunctionKind PartitionFunctionHandle Scope ErrorType DistributionType PipelineExecutionStrategy JoinType ExchangeNode.Type ColumnStatisticType TableStatisticType StageExecutionStrategy SemanticErrorCode QueryType] root_models = %w[QueryResults QueryInfo BasicQueryInfo] + %w[ OutputNode @@ -38,6 +38,7 @@ RemoteSourceNode JoinNode SemiJoinNode +SpatialJoinNode IndexJoinNode IndexSourceNode TableWriterNode @@ -52,7 +53,7 @@ ExplainAnalyzeNode ApplyNode AssignUniqueId -LateralJoinNode +CorrelatedJoinNode StatisticsWriterNode ] + %w[ ExchangeClientStatus @@ -72,14 +73,15 @@ ].each_slice(3).map { |x, y, z| [[x,y], z] }.flatten(1)] path_mapping = Hash[*%w[ -ClientColumn presto-client/src/main/java/io/prestosql/client/Column.java -ClientStageStats presto-client/src/main/java/io/prestosql/client/StageStats.java -Column presto-main/src/main/java/io/prestosql/execution/Column.java -QueryStats presto-main/src/main/java/io/prestosql/execution/QueryStats.java -StageStats presto-main/src/main/java/io/prestosql/execution/StageStats.java -PartitionedOutputInfo presto-main/src/main/java/io/prestosql/operator/PartitionedOutputOperator.java -TableWriterInfo presto-main/src/main/java/io/prestosql/operator/TableWriterOperator.java -TableInfo presto-main/src/main/java/io/prestosql/execution/TableInfo.java +ClientColumn client/trino-client/src/main/java/io/trino/client/Column.java +ClientStageStats client/trino-client/src/main/java/io/trino/client/StageStats.java +Column core/trino-main/src/main/java/io/trino/execution/Column.java +QueryStats core/trino-main/src/main/java/io/trino/execution/QueryStats.java +StageStats core/trino-main/src/main/java/io/trino/execution/StageStats.java +PartitionedOutputInfo core/trino-main/src/main/java/io/trino/operator/PartitionedOutputOperator.java +TableWriterInfo core/trino-main/src/main/java/io/trino/operator/TableWriterOperator.java +TableInfo core/trino-main/src/main/java/io/trino/execution/TableInfo.java +DynamicFiltersStats core/trino-main/src/main/java/io/trino/server/DynamicFilterService.java ].map.with_index { |v,i| i % 2 == 0 ? v : (source_path + "/" + v) }] # model => [ [key,nullable,type], ... ] @@ -87,7 +89,7 @@ 'QueryInfo' => [['finalQueryInfo', nil, 'boolean']] } -analyzer = PrestoModels::ModelAnalyzer.new( +analyzer = TrinoModels::ModelAnalyzer.new( source_path, skip_models: predefined_models + predefined_simple_classes + assume_primitive + enum_types, path_mapping: path_mapping, @@ -98,7 +100,7 @@ models = analyzer.models skipped_models = analyzer.skipped_models -formatter = PrestoModels::ModelFormatter.new( +formatter = TrinoModels::ModelFormatter.new( base_indent_count: 2, struct_class: "Base", special_struct_initialize_method: "initialize_struct", diff --git a/modelgen/models.rb b/modelgen/models.rb index ce667dfc..69bb8672 100644 --- a/modelgen/models.rb +++ b/modelgen/models.rb @@ -1,5 +1,5 @@ # -# Presto client for Ruby +# Trino client for Ruby # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # -module Presto::Client +module Trino::Client #### - ## lib/presto/client/models.rb is automatically generated using "rake modelgen:latest" command. + ## lib/trino/client/models.rb is automatically generated using "rake modelgen:latest" command. ## You should not edit this file directly. To modify the class definitions, edit ## modelgen/models.rb file and run "rake modelgen:latest". ## @@ -24,7 +24,7 @@ module Presto::Client module ModelVersions end <% @versions.each do |ver| %> - require 'presto/client/model_versions/<%= ver %>.rb'<% end %> + require 'trino/client/model_versions/<%= ver %>.rb'<% end %> Models = ModelVersions::V<%= @latest_version.gsub(".", "_") %> diff --git a/modelgen/presto_models.rb b/modelgen/trino_models.rb similarity index 99% rename from modelgen/presto_models.rb rename to modelgen/trino_models.rb index a6f7bdd6..df07f0c8 100644 --- a/modelgen/presto_models.rb +++ b/modelgen/trino_models.rb @@ -1,5 +1,5 @@ -module PrestoModels +module TrinoModels require 'find' require 'stringio' @@ -146,7 +146,7 @@ def find_class_file(model_name, parent_model) @source_files ||= Find.find(@source_path).to_a pattern = /\/#{model_name}.java$/ - matched = @source_files.find_all {|path| path =~ pattern && !path.include?('/test/')} + matched = @source_files.find_all {|path| path =~ pattern && !path.include?('/test/') && !path.include?('/verifier/')} if matched.empty? raise ModelAnalysisError, "Model class #{model_name} is not found" end diff --git a/release.rb b/release.rb index 113e6f8b..0f74e4a6 100644 --- a/release.rb +++ b/release.rb @@ -2,7 +2,7 @@ require 'fileutils' -PREFIX = 'https://github.com/treasure-data/presto-client-ruby' +PREFIX = 'https://github.com/treasure-data/trino-client-ruby' RELEASE_NOTES_FILE = "ChangeLog.md" last_tag = `git describe --tags --abbrev=0`.chomp diff --git a/spec/basic_query_spec.rb b/spec/basic_query_spec.rb index 7b6bd07e..5319e42f 100644 --- a/spec/basic_query_spec.rb +++ b/spec/basic_query_spec.rb @@ -1,11 +1,11 @@ require 'spec_helper' -describe Presto::Client::Client do +describe Trino::Client::Client do before(:all) do WebMock.disable! - @cluster = TinyPresto::Cluster.new('ghcr.io/trinodb/presto', '316') + @cluster = TinyPresto::Cluster.new() @container = @cluster.run - @client = Presto::Client.new(server: 'localhost:8080', catalog: 'memory', user: 'test-user', schema: 'default') + @client = Trino::Client.new(server: 'localhost:8080', catalog: 'memory', user: 'test-user', schema: 'default') loop do begin @client.run('show schemas') @@ -67,7 +67,7 @@ it 'partial cancel' do @client.query('show schemas') do |q| q.cancel - expect { q.query_info }.to raise_error(Presto::Client::PrestoHttpError, /Error 410 Gone/) + expect { q.query_info }.to raise_error(Trino::Client::TrinoHttpError, /Error 410 Gone/) end end diff --git a/spec/client_spec.rb b/spec/client_spec.rb index 448e9cad..4621c373 100644 --- a/spec/client_spec.rb +++ b/spec/client_spec.rb @@ -1,7 +1,7 @@ require 'spec_helper' -describe Presto::Client::Client do - let(:client) { Presto::Client.new({}) } +describe Trino::Client::Client do + let(:client) { Trino::Client.new({}) } describe 'rehashes' do let(:columns) do diff --git a/spec/gzip_spec.rb b/spec/gzip_spec.rb index 0d18db16..b6e7ed10 100644 --- a/spec/gzip_spec.rb +++ b/spec/gzip_spec.rb @@ -1,20 +1,20 @@ require 'spec_helper' -describe Presto::Client::Client do +describe Trino::Client::Client do before(:all) do @spec_path = File.dirname(__FILE__) WebMock.disable! - @cluster = TinyPresto::Cluster.new('ghcr.io/trinodb/presto', '316') + @cluster = TinyPresto::Cluster.new() @container = @cluster.run - @client = Presto::Client.new(server: 'localhost:8080', catalog: 'tpch', user: 'test-user', schema: 'tiny', gzip: true, http_debug: true) + @client = Trino::Client.new(server: 'localhost:8080', catalog: 'tpch', user: 'test-user', schema: 'tiny', gzip: true, http_debug: true) loop do begin # Make sure to all workers are available. - @client.run('select 1234') + @client.run('show schemas') break rescue StandardError => exception puts "Waiting for cluster ready... #{exception}" - sleep(5) + sleep(3) end end puts 'Cluster is ready' diff --git a/spec/model_spec.rb b/spec/model_spec.rb index 1c512823..1cfd6073 100644 --- a/spec/model_spec.rb +++ b/spec/model_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -describe Presto::Client::Models do +describe Trino::Client::Models do describe 'rehash of BlockedReason' do h = { "operatorId" => 0, diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index 0f141040..5897191f 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -14,8 +14,8 @@ require 'json' require 'webmock/rspec' -require 'presto-client' -include Presto::Client +require 'trino-client' +include Trino::Client require 'tiny-presto' @@ -30,7 +30,7 @@ def run_with_retry(client, sql) begin columns, rows = @client.run(sql) return columns, rows - rescue Presto::Client::PrestoQueryError => e + rescue Trino::Client::TrinoQueryError => e if RETRYABLE_ERRORS.any? { |error| e.message =~ error } sleep(i) i += 1 diff --git a/spec/statement_client_spec.rb b/spec/statement_client_spec.rb index 9e63742a..b5a7461f 100644 --- a/spec/statement_client_spec.rb +++ b/spec/statement_client_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -describe Presto::Client::StatementClient do +describe Trino::Client::StatementClient do let :options do { server: "localhost", @@ -26,19 +26,19 @@ end let :faraday do - Presto::Client.faraday_client(options) + Trino::Client.faraday_client(options) end it "sets headers" do stub_request(:post, "localhost/v1/statement"). with(body: query, headers: { - "User-Agent" => "presto-ruby/#{VERSION}", - "X-Presto-Catalog" => options[:catalog], - "X-Presto-Schema" => options[:schema], - "X-Presto-User" => options[:user], - "X-Presto-Language" => options[:language], - "X-Presto-Time-Zone" => options[:time_zone], + "User-Agent" => "trino-ruby/#{VERSION}", + "X-Trino-Catalog" => options[:catalog], + "X-Trino-Schema" => options[:schema], + "X-Trino-User" => options[:user], + "X-Trino-Language" => options[:language], + "X-Trino-Time-Zone" => options[:time_zone], }).to_return(body: response_json.to_json) StatementClient.new(faraday, query, options) @@ -57,22 +57,22 @@ stub_request(:post, "localhost/v1/statement"). with(body: query, headers: { - "User-Agent" => "presto-ruby/#{VERSION}", - "X-Presto-Catalog" => options[:catalog], - "X-Presto-Schema" => options[:schema], - "X-Presto-User" => options[:user], - "X-Presto-Language" => options[:language], - "X-Presto-Time-Zone" => options[:time_zone], + "User-Agent" => "trino-ruby/#{VERSION}", + "X-Trino-Catalog" => options[:catalog], + "X-Trino-Schema" => options[:schema], + "X-Trino-User" => options[:user], + "X-Trino-Language" => options[:language], + "X-Trino-Time-Zone" => options[:time_zone], }).to_return(body: response_json2.to_json) stub_request(:get, "localhost/v1/next_uri"). with(headers: { - "User-Agent" => "presto-ruby/#{VERSION}", - "X-Presto-Catalog" => options[:catalog], - "X-Presto-Schema" => options[:schema], - "X-Presto-User" => options[:user], - "X-Presto-Language" => options[:language], - "X-Presto-Time-Zone" => options[:time_zone], + "User-Agent" => "trino-ruby/#{VERSION}", + "X-Trino-Catalog" => options[:catalog], + "X-Trino-Schema" => options[:schema], + "X-Trino-User" => options[:user], + "X-Trino-Language" => options[:language], + "X-Trino-Time-Zone" => options[:time_zone], }).to_return(body: lambda{|req|if retry_p; response_json.to_json; else; retry_p=true; raise Timeout::Error.new("execution expired"); end }) sc = StatementClient.new(faraday, query, options.merge(http_open_timeout: 1)) @@ -86,23 +86,23 @@ stub_request(:post, "localhost/v1/statement"). with(body: query, headers: { - "User-Agent" => "presto-ruby/#{VERSION}", - "X-Presto-Catalog" => options[:catalog], - "X-Presto-Schema" => options[:schema], - "X-Presto-User" => options[:user], - "X-Presto-Language" => options[:language], - "X-Presto-Time-Zone" => options[:time_zone], + "User-Agent" => "trino-ruby/#{VERSION}", + "X-Trino-Catalog" => options[:catalog], + "X-Trino-Schema" => options[:schema], + "X-Trino-User" => options[:user], + "X-Trino-Language" => options[:language], + "X-Trino-Time-Zone" => options[:time_zone], "Accept" => "application/x-msgpack,application/json" }).to_return(body: MessagePack.dump(response_json2), headers: {"Content-Type" => "application/x-msgpack"}) stub_request(:get, "localhost/v1/next_uri"). with(headers: { - "User-Agent" => "presto-ruby/#{VERSION}", - "X-Presto-Catalog" => options[:catalog], - "X-Presto-Schema" => options[:schema], - "X-Presto-User" => options[:user], - "X-Presto-Language" => options[:language], - "X-Presto-Time-Zone" => options[:time_zone], + "User-Agent" => "trino-ruby/#{VERSION}", + "X-Trino-Catalog" => options[:catalog], + "X-Trino-Schema" => options[:schema], + "X-Trino-User" => options[:user], + "X-Trino-Language" => options[:language], + "X-Trino-Time-Zone" => options[:time_zone], "Accept" => "application/x-msgpack,application/json" }).to_return(body: lambda{|req|if retry_p; MessagePack.dump(response_json); else; retry_p=true; raise Timeout::Error.new("execution expired"); end }, headers: {"Content-Type" => "application/x-msgpack"}) @@ -113,11 +113,11 @@ retry_p.should be_true end - # presto version could be "V0_ddd" or "Vddd" - /\APresto::Client::ModelVersions::V(\w+)/ =~ Presto::Client::Models.to_s + # trino version could be "V0_ddd" or "Vddd" + /\Trino::Client::ModelVersions::V(\w+)/ =~ Trino::Client::Models.to_s # https://github.com/prestosql/presto/commit/80a2c5113d47e3390bf6dc041486a1c9dfc04592 - # renamed DeleteHandle to DeleteTarget, then DeleteHandle exists when presto version + # renamed DeleteHandle to DeleteTarget, then DeleteHandle exists when trino version # is less than 313. if $1[0, 2] == "0_" || $1.to_i < 314 it "decodes DeleteHandle" do @@ -174,7 +174,7 @@ stub_request(:post, "localhost/v1/statement"). with(body: query).to_return(body: response_json2.to_json, headers: {"X-Test-Header" => "123"}) - q = Presto::Client.new(options).query(query) + q = Trino::Client.new(options).query(query) q.current_results_headers["X-Test-Header"].should == "123" end @@ -197,12 +197,12 @@ describe '#query_info' do let :headers do { - "User-Agent" => "presto-ruby/#{VERSION}", - "X-Presto-Catalog" => options[:catalog], - "X-Presto-Schema" => options[:schema], - "X-Presto-User" => options[:user], - "X-Presto-Language" => options[:language], - "X-Presto-Time-Zone" => options[:time_zone], + "User-Agent" => "trino-ruby/#{VERSION}", + "X-Trino-Catalog" => options[:catalog], + "X-Trino-Schema" => options[:schema], + "X-Trino-User" => options[:user], + "X-Trino-Language" => options[:language], + "X-Trino-Time-Zone" => options[:time_zone], } end @@ -219,7 +219,7 @@ with(headers: headers). to_return(body: {"session" => "invalid session structure"}.to_json) statement_client.query_info - end.should raise_error(PrestoHttpError, /Presto API returned unexpected structure at \/v1\/query\/queryid\. Expected Presto::Client::ModelVersions::.*::QueryInfo but got {"session":"invalid session structure"}/) + end.should raise_error(TrinoHttpError, /Trino API returned unexpected structure at \/v1\/query\/queryid\. Expected Trino::Client::ModelVersions::.*::QueryInfo but got {"session":"invalid session structure"}/) end it "raises an exception if response format is unexpected" do @@ -228,14 +228,14 @@ with(headers: headers). to_return(body: "unexpected data structure (not JSON)") statement_client.query_info - end.should raise_error(PrestoHttpError, /Presto API returned unexpected data format./) + end.should raise_error(TrinoHttpError, /Trino API returned unexpected data format./) end it "is redirected if server returned 301" do stub_request(:get, "http://localhost/v1/query/#{response_json2[:id]}"). with(headers: headers). to_return(status: 301, headers: {"Location" => "http://localhost/v1/query/redirected"}) - + stub_request(:get, "http://localhost/v1/query/redirected"). with(headers: headers). to_return(body: {"queryId" => "queryid"}.to_json) @@ -252,71 +252,71 @@ stub_request(:delete, "http://localhost/v1/query/#{query_id}"). with(body: "", headers: { - "User-Agent" => "presto-ruby/#{VERSION}", - "X-Presto-Catalog" => options[:catalog], - "X-Presto-Schema" => options[:schema], - "X-Presto-User" => options[:user], - "X-Presto-Language" => options[:language], - "X-Presto-Time-Zone" => options[:time_zone], + "User-Agent" => "trino-ruby/#{VERSION}", + "X-Trino-Catalog" => options[:catalog], + "X-Trino-Schema" => options[:schema], + "X-Trino-User" => options[:user], + "X-Trino-Language" => options[:language], + "X-Trino-Time-Zone" => options[:time_zone], }).to_return(body: {}.to_json) - Presto::Client.new(options).kill(query_id) + Trino::Client.new(options).kill(query_id) end end describe 'advanced HTTP headers' do let(:headers) do { - "User-Agent" => "presto-ruby/#{VERSION}", - "X-Presto-Catalog" => options[:catalog], - "X-Presto-Schema" => options[:schema], - "X-Presto-User" => options[:user], - "X-Presto-Language" => options[:language], - "X-Presto-Time-Zone" => options[:time_zone], + "User-Agent" => "trino-ruby/#{VERSION}", + "X-Trino-Catalog" => options[:catalog], + "X-Trino-Schema" => options[:schema], + "X-Trino-User" => options[:user], + "X-Trino-Language" => options[:language], + "X-Trino-Time-Zone" => options[:time_zone], } end - it "sets X-Presto-Session from properties" do + it "sets X-Trino-Session from properties" do options[:properties] = {"hello" => "world", "name"=>"value"} stub_request(:post, "localhost/v1/statement"). with(body: query, headers: headers.merge({ - "X-Presto-Session" => options[:properties].map {|k,v| "#{k}=#{v}"}.join(", ") + "X-Trino-Session" => options[:properties].map {|k,v| "#{k}=#{v}"}.join(", ") })). to_return(body: response_json.to_json) StatementClient.new(faraday, query, options) end - it "sets X-Presto-Client-Info from client_info" do + it "sets X-Trino-Client-Info from client_info" do options[:client_info] = "raw" stub_request(:post, "localhost/v1/statement"). with(body: query, - headers: headers.merge("X-Presto-Client-Info" => "raw")). + headers: headers.merge("X-Trino-Client-Info" => "raw")). to_return(body: response_json.to_json) StatementClient.new(faraday, query, options) end - it "sets X-Presto-Client-Info in JSON from client_info" do + it "sets X-Trino-Client-Info in JSON from client_info" do options[:client_info] = {"k1" => "v1", "k2" => "v2"} stub_request(:post, "localhost/v1/statement"). with(body: query, - headers: headers.merge("X-Presto-Client-Info" => '{"k1":"v1","k2":"v2"}')). + headers: headers.merge("X-Trino-Client-Info" => '{"k1":"v1","k2":"v2"}')). to_return(body: response_json.to_json) StatementClient.new(faraday, query, options) end - it "sets X-Presto-Client-Tags" do + it "sets X-Trino-Client-Tags" do options[:client_tags] = ["k1:v1", "k2:v2"] stub_request(:post, "localhost/v1/statement"). with(body: query, - headers: headers.merge("X-Presto-Client-Tags" => "k1:v1,k2:v2")). + headers: headers.merge("X-Trino-Client-Tags" => "k1:v1,k2:v2")). to_return(body: response_json.to_json) StatementClient.new(faraday, query, options) @@ -330,12 +330,12 @@ stub_request(:post, "https://localhost/v1/statement"). with(body: query, headers: { - "User-Agent" => "presto-ruby/#{VERSION}", - "X-Presto-Catalog" => options[:catalog], - "X-Presto-Schema" => options[:schema], - "X-Presto-User" => options[:user], - "X-Presto-Language" => options[:language], - "X-Presto-Time-Zone" => options[:time_zone], + "User-Agent" => "trino-ruby/#{VERSION}", + "X-Trino-Catalog" => options[:catalog], + "X-Trino-Schema" => options[:schema], + "X-Trino-User" => options[:user], + "X-Trino-Language" => options[:language], + "X-Trino-Time-Zone" => options[:time_zone], }, basic_auth: [options[:user], password] ).to_return(body: response_json.to_json) @@ -433,14 +433,14 @@ end end - it "supports multiple model versions" do + it "supports Presto" do stub_request(:post, "localhost/v1/statement"). with({body: query}). to_return(body: response_json.to_json) faraday = Faraday.new(url: "http://localhost") - client = StatementClient.new(faraday, query, options.merge(model_version: "0.149")) - client.current_results.should be_a_kind_of(ModelVersions::V0_149::QueryResults) + client = StatementClient.new(faraday, query, options.merge(model_version: "316")) + client.current_results.should be_a_kind_of(ModelVersions::V316::QueryResults) end it "rejects unsupported model version" do @@ -466,12 +466,12 @@ stub_request(:post, "localhost/v1/statement"). with(body: query, headers: { - "User-Agent" => "presto-ruby/#{VERSION}", - "X-Presto-Catalog" => options[:catalog], - "X-Presto-Schema" => options[:schema], - "X-Presto-User" => options[:user], - "X-Presto-Language" => options[:language], - "X-Presto-Time-Zone" => options[:time_zone], + "User-Agent" => "trino-ruby/#{VERSION}", + "X-Trino-Catalog" => options[:catalog], + "X-Trino-Schema" => options[:schema], + "X-Trino-User" => options[:user], + "X-Trino-Language" => options[:language], + "X-Trino-Time-Zone" => options[:time_zone], }).to_return(body: nested_json.to_json(:max_nesting => false)) StatementClient.new(faraday, query, options) @@ -480,12 +480,12 @@ describe "query timeout" do let :headers do { - "User-Agent" => "presto-ruby/#{VERSION}", - "X-Presto-Catalog" => options[:catalog], - "X-Presto-Schema" => options[:schema], - "X-Presto-User" => options[:user], - "X-Presto-Language" => options[:language], - "X-Presto-Time-Zone" => options[:time_zone], + "User-Agent" => "trino-ruby/#{VERSION}", + "X-Trino-Catalog" => options[:catalog], + "X-Trino-Schema" => options[:schema], + "X-Trino-User" => options[:user], + "X-Trino-Language" => options[:language], + "X-Trino-Time-Zone" => options[:time_zone], } end @@ -528,7 +528,7 @@ end [:plan_timeout, :query_timeout].each do |timeout_type| - it "raises PrestoQueryTimeoutError if timeout during planning" do + it "raises TrinoQueryTimeoutError if timeout during planning" do stub_request(:post, "localhost/v1/statement"). with(body: query, headers: headers). to_return(body: planning_response.to_json) @@ -546,27 +546,27 @@ to_return(body: planning_response.to_json) lambda do client.advance - end.should raise_error(Presto::Client::PrestoQueryTimeoutError, "Query queryid timed out") + end.should raise_error(Trino::Client::TrinoQueryTimeoutError, "Query queryid timed out") end - it "raises PrestoQueryTimeoutError if timeout during initial resuming" do + it "raises TrinoQueryTimeoutError if timeout during initial resuming" do stub_request(:get, "localhost/v1/next_uri"). with(headers: headers). to_return(body: lambda{|req| raise Timeout::Error.new("execution expired")}) lambda do StatementClient.new(faraday, query, options.merge(timeout_type => 1), "/v1/next_uri") - end.should raise_error(Presto::Client::PrestoQueryTimeoutError, "Query timed out") + end.should raise_error(Trino::Client::TrinoQueryTimeoutError, "Query timed out") end - it "raises PrestoHttpError if timeout during initial resuming and #{timeout_type} < retry_timeout" do + it "raises TrinoHttpError if timeout during initial resuming and #{timeout_type} < retry_timeout" do stub_request(:get, "localhost/v1/next_uri"). with(headers: headers). to_return(body: lambda{|req| raise Timeout::Error.new("execution expired")}) lambda do StatementClient.new(faraday, query, options.merge(timeout_type => 2, retry_timeout: 1), "/v1/next_uri") - end.should raise_error(Presto::Client::PrestoHttpError, "Presto API error due to timeout") + end.should raise_error(Trino::Client::TrinoHttpError, "Trino API error due to timeout") end end @@ -590,7 +590,7 @@ client.advance end - it "raises PrestoQueryTimeoutError if timeout during execution" do + it "raises TrinoQueryTimeoutError if timeout during execution" do stub_request(:post, "localhost/v1/statement"). with(body: query, headers: headers). to_return(body: planning_response.to_json) @@ -608,7 +608,7 @@ to_return(body: late_running_response.to_json) lambda do client.advance - end.should raise_error(Presto::Client::PrestoQueryTimeoutError, "Query queryid timed out") + end.should raise_error(Trino::Client::TrinoQueryTimeoutError, "Query queryid timed out") end it "doesn't raise errors if query is done" do diff --git a/spec/tpch_query_spec.rb b/spec/tpch_query_spec.rb index 837af72f..abad9204 100644 --- a/spec/tpch_query_spec.rb +++ b/spec/tpch_query_spec.rb @@ -1,20 +1,20 @@ require 'spec_helper' -describe Presto::Client::Client do +describe Trino::Client::Client do before(:all) do @spec_path = File.dirname(__FILE__) WebMock.disable! - @cluster = TinyPresto::Cluster.new('ghcr.io/trinodb/presto', '316') + @cluster = TinyPresto::Cluster.new() @container = @cluster.run - @client = Presto::Client.new(server: 'localhost:8080', catalog: 'tpch', user: 'test-user', schema: 'tiny') + @client = Trino::Client.new(server: 'localhost:8080', catalog: 'tpch', user: 'test-user', schema: 'tiny') loop do begin # Make sure to all workers are available. - @client.run('select 1234') + @client.run('show schemas') break rescue StandardError => exception puts "Waiting for cluster ready... #{exception}" - sleep(5) + sleep(3) end end puts 'Cluster is ready' diff --git a/presto-client.gemspec b/trino-client.gemspec similarity index 73% rename from presto-client.gemspec rename to trino-client.gemspec index 53fe7c8e..b16faf73 100644 --- a/presto-client.gemspec +++ b/trino-client.gemspec @@ -1,14 +1,14 @@ -require File.expand_path 'lib/presto/client/version', File.dirname(__FILE__) +require File.expand_path 'lib/trino/client/version', File.dirname(__FILE__) Gem::Specification.new do |gem| - gem.name = "presto-client" - gem.version = Presto::Client::VERSION + gem.name = "trino-client" + gem.version = Trino::Client::VERSION gem.authors = ["Sadayuki Furuhashi"] gem.email = ["sf@treasure-data.com"] - gem.description = %q{Presto client library} - gem.summary = %q{Presto client library} - gem.homepage = "https://github.com/treasure-data/presto-client-ruby" + gem.description = %q{Trino client library} + gem.summary = %q{Trino client library} + gem.homepage = "https://github.com/treasure-data/trino-client-ruby" gem.license = "Apache-2.0" gem.files = `git ls-files`.split($\)