Class: Google::Cloud::AIPlatform::V1::PredictionService::Client
- Inherits:
-
Object
- Object
- Google::Cloud::AIPlatform::V1::PredictionService::Client
- Includes:
- Paths
- Defined in:
- lib/google/cloud/ai_platform/v1/prediction_service/client.rb
Overview
Client for the PredictionService service.
A service for online predictions and explanations.
Defined Under Namespace
Classes: Configuration
Instance Attribute Summary collapse
-
#iam_policy_client ⇒ Google::Iam::V1::IAMPolicy::Client
readonly
Get the associated client for mix-in of the IAMPolicy.
-
#location_client ⇒ Google::Cloud::Location::Locations::Client
readonly
Get the associated client for mix-in of the Locations.
Class Method Summary collapse
-
.configure {|config| ... } ⇒ Client::Configuration
Configure the PredictionService Client class.
Instance Method Summary collapse
-
#configure {|config| ... } ⇒ Client::Configuration
Configure the PredictionService Client instance.
-
#direct_predict(request, options = nil) {|response, operation| ... } ⇒ ::Google::Cloud::AIPlatform::V1::DirectPredictResponse
Perform an unary online prediction request to a gRPC model server for Vertex first-party products and frameworks.
-
#direct_raw_predict(request, options = nil) {|response, operation| ... } ⇒ ::Google::Cloud::AIPlatform::V1::DirectRawPredictResponse
Perform an unary online prediction request to a gRPC model server for custom containers.
-
#explain(request, options = nil) {|response, operation| ... } ⇒ ::Google::Cloud::AIPlatform::V1::ExplainResponse
Perform an online explanation.
-
#generate_content(request, options = nil) {|response, operation| ... } ⇒ ::Google::Cloud::AIPlatform::V1::GenerateContentResponse
Generate content with multimodal inputs.
-
#initialize {|config| ... } ⇒ Client
constructor
Create a new PredictionService client object.
-
#predict(request, options = nil) {|response, operation| ... } ⇒ ::Google::Cloud::AIPlatform::V1::PredictResponse
Perform an online prediction.
-
#raw_predict(request, options = nil) {|response, operation| ... } ⇒ ::Google::Api::HttpBody
Perform an online prediction with an arbitrary HTTP payload.
-
#server_streaming_predict(request, options = nil) {|response, operation| ... } ⇒ ::Enumerable<::Google::Cloud::AIPlatform::V1::StreamingPredictResponse>
Perform a server-side streaming online prediction request for Vertex LLM streaming.
-
#stream_direct_predict(request, options = nil) {|response, operation| ... } ⇒ ::Enumerable<::Google::Cloud::AIPlatform::V1::StreamDirectPredictResponse>
Perform a streaming online prediction request to a gRPC model server for Vertex first-party products and frameworks.
-
#stream_direct_raw_predict(request, options = nil) {|response, operation| ... } ⇒ ::Enumerable<::Google::Cloud::AIPlatform::V1::StreamDirectRawPredictResponse>
Perform a streaming online prediction request to a gRPC model server for custom containers.
-
#stream_generate_content(request, options = nil) {|response, operation| ... } ⇒ ::Enumerable<::Google::Cloud::AIPlatform::V1::GenerateContentResponse>
Generate content with multimodal inputs with streaming support.
-
#stream_raw_predict(request, options = nil) {|response, operation| ... } ⇒ ::Enumerable<::Google::Api::HttpBody>
Perform a streaming online prediction with an arbitrary HTTP payload.
-
#streaming_predict(request, options = nil) {|response, operation| ... } ⇒ ::Enumerable<::Google::Cloud::AIPlatform::V1::StreamingPredictResponse>
Perform a streaming online prediction request for Vertex first-party products and frameworks.
-
#streaming_raw_predict(request, options = nil) {|response, operation| ... } ⇒ ::Enumerable<::Google::Cloud::AIPlatform::V1::StreamingRawPredictResponse>
Perform a streaming online prediction request through gRPC.
-
#universe_domain ⇒ String
The effective universe domain.
Methods included from Paths
Constructor Details
#initialize {|config| ... } ⇒ Client
Create a new PredictionService client object.
126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 |
# File 'lib/google/cloud/ai_platform/v1/prediction_service/client.rb', line 126 def initialize # These require statements are intentionally placed here to initialize # the gRPC module only when it's required. # See https://github.com/googleapis/toolkit/issues/446 require "gapic/grpc" require "google/cloud/aiplatform/v1/prediction_service_services_pb" # Create the configuration object @config = Configuration.new Client.configure # Yield the configuration if needed yield @config if block_given? # Create credentials credentials = @config.credentials # Use self-signed JWT if the endpoint is unchanged from default, # but only if the default endpoint does not have a region prefix. enable_self_signed_jwt = @config.endpoint.nil? || (@config.endpoint == Configuration::DEFAULT_ENDPOINT && !@config.endpoint.split(".").first.include?("-")) credentials ||= Credentials.default scope: @config.scope, enable_self_signed_jwt: enable_self_signed_jwt if credentials.is_a?(::String) || credentials.is_a?(::Hash) credentials = Credentials.new credentials, scope: @config.scope end @quota_project_id = @config.quota_project @quota_project_id ||= credentials.quota_project_id if credentials.respond_to? :quota_project_id @prediction_service_stub = ::Gapic::ServiceStub.new( ::Google::Cloud::AIPlatform::V1::PredictionService::Stub, credentials: credentials, endpoint: @config.endpoint, endpoint_template: DEFAULT_ENDPOINT_TEMPLATE, universe_domain: @config.universe_domain, channel_args: @config.channel_args, interceptors: @config.interceptors, channel_pool_config: @config.channel_pool ) @location_client = Google::Cloud::Location::Locations::Client.new do |config| config.credentials = credentials config.quota_project = @quota_project_id config.endpoint = @prediction_service_stub.endpoint config.universe_domain = @prediction_service_stub.universe_domain end @iam_policy_client = Google::Iam::V1::IAMPolicy::Client.new do |config| config.credentials = credentials config.quota_project = @quota_project_id config.endpoint = @prediction_service_stub.endpoint config.universe_domain = @prediction_service_stub.universe_domain end end |
Instance Attribute Details
#iam_policy_client ⇒ Google::Iam::V1::IAMPolicy::Client (readonly)
Get the associated client for mix-in of the IAMPolicy.
192 193 194 |
# File 'lib/google/cloud/ai_platform/v1/prediction_service/client.rb', line 192 def iam_policy_client @iam_policy_client end |
#location_client ⇒ Google::Cloud::Location::Locations::Client (readonly)
Get the associated client for mix-in of the Locations.
185 186 187 |
# File 'lib/google/cloud/ai_platform/v1/prediction_service/client.rb', line 185 def location_client @location_client end |
Class Method Details
.configure {|config| ... } ⇒ Client::Configuration
Configure the PredictionService Client class.
See Configuration for a description of the configuration fields.
64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 |
# File 'lib/google/cloud/ai_platform/v1/prediction_service/client.rb', line 64 def self.configure @configure ||= begin namespace = ["Google", "Cloud", "AIPlatform", "V1"] parent_config = while namespace.any? parent_name = namespace.join "::" parent_const = const_get parent_name break parent_const.configure if parent_const.respond_to? :configure namespace.pop end default_config = Client::Configuration.new parent_config default_config end yield @configure if block_given? @configure end |
Instance Method Details
#configure {|config| ... } ⇒ Client::Configuration
Configure the PredictionService Client instance.
The configuration is set to the derived mode, meaning that values can be changed, but structural changes (adding new fields, etc.) are not allowed. Structural changes should be made on configure.
See Configuration for a description of the configuration fields.
96 97 98 99 |
# File 'lib/google/cloud/ai_platform/v1/prediction_service/client.rb', line 96 def configure yield @config if block_given? @config end |
#direct_predict(request, options = nil) ⇒ ::Google::Cloud::AIPlatform::V1::DirectPredictResponse #direct_predict(endpoint: nil, inputs: nil, parameters: nil) ⇒ ::Google::Cloud::AIPlatform::V1::DirectPredictResponse
Perform an unary online prediction request to a gRPC model server for Vertex first-party products and frameworks.
558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 |
# File 'lib/google/cloud/ai_platform/v1/prediction_service/client.rb', line 558 def direct_predict request, = nil raise ::ArgumentError, "request must be provided" if request.nil? request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::AIPlatform::V1::DirectPredictRequest # Converts hash and nil to an options object = ::Gapic::CallOptions.new(**.to_h) if .respond_to? :to_h # Customize the options with defaults = @config.rpcs.direct_predict..to_h # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers [:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ lib_name: @config.lib_name, lib_version: @config.lib_version, gapic_version: ::Google::Cloud::AIPlatform::V1::VERSION [:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? [:"x-goog-user-project"] = @quota_project_id if @quota_project_id header_params = {} if request.endpoint header_params["endpoint"] = request.endpoint end request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") [:"x-goog-request-params"] ||= request_params_header .apply_defaults timeout: @config.rpcs.direct_predict.timeout, metadata: , retry_policy: @config.rpcs.direct_predict.retry_policy .apply_defaults timeout: @config.timeout, metadata: @config., retry_policy: @config.retry_policy @prediction_service_stub.call_rpc :direct_predict, request, options: do |response, operation| yield response, operation if block_given? return response end rescue ::GRPC::BadStatus => e raise ::Google::Cloud::Error.from_error(e) end |
#direct_raw_predict(request, options = nil) ⇒ ::Google::Cloud::AIPlatform::V1::DirectRawPredictResponse #direct_raw_predict(endpoint: nil, method_name: nil, input: nil) ⇒ ::Google::Cloud::AIPlatform::V1::DirectRawPredictResponse
Perform an unary online prediction request to a gRPC model server for custom containers.
657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 |
# File 'lib/google/cloud/ai_platform/v1/prediction_service/client.rb', line 657 def direct_raw_predict request, = nil raise ::ArgumentError, "request must be provided" if request.nil? request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::AIPlatform::V1::DirectRawPredictRequest # Converts hash and nil to an options object = ::Gapic::CallOptions.new(**.to_h) if .respond_to? :to_h # Customize the options with defaults = @config.rpcs.direct_raw_predict..to_h # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers [:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ lib_name: @config.lib_name, lib_version: @config.lib_version, gapic_version: ::Google::Cloud::AIPlatform::V1::VERSION [:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? [:"x-goog-user-project"] = @quota_project_id if @quota_project_id header_params = {} if request.endpoint header_params["endpoint"] = request.endpoint end request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") [:"x-goog-request-params"] ||= request_params_header .apply_defaults timeout: @config.rpcs.direct_raw_predict.timeout, metadata: , retry_policy: @config.rpcs.direct_raw_predict.retry_policy .apply_defaults timeout: @config.timeout, metadata: @config., retry_policy: @config.retry_policy @prediction_service_stub.call_rpc :direct_raw_predict, request, options: do |response, operation| yield response, operation if block_given? return response end rescue ::GRPC::BadStatus => e raise ::Google::Cloud::Error.from_error(e) end |
#explain(request, options = nil) ⇒ ::Google::Cloud::AIPlatform::V1::ExplainResponse #explain(endpoint: nil, instances: nil, parameters: nil, explanation_spec_override: nil, deployed_model_id: nil) ⇒ ::Google::Cloud::AIPlatform::V1::ExplainResponse
Perform an online explanation.
If deployed_model_id is specified, the corresponding DeployModel must have explanation_spec populated. If deployed_model_id is not specified, all DeployedModels must have explanation_spec populated.
1199 1200 1201 1202 1203 1204 1205 1206 1207 1208 1209 1210 1211 1212 1213 1214 1215 1216 1217 1218 1219 1220 1221 1222 1223 1224 1225 1226 1227 1228 1229 1230 1231 1232 1233 1234 1235 1236 1237 1238 1239 |
# File 'lib/google/cloud/ai_platform/v1/prediction_service/client.rb', line 1199 def explain request, = nil raise ::ArgumentError, "request must be provided" if request.nil? request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::AIPlatform::V1::ExplainRequest # Converts hash and nil to an options object = ::Gapic::CallOptions.new(**.to_h) if .respond_to? :to_h # Customize the options with defaults = @config.rpcs.explain..to_h # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers [:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ lib_name: @config.lib_name, lib_version: @config.lib_version, gapic_version: ::Google::Cloud::AIPlatform::V1::VERSION [:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? [:"x-goog-user-project"] = @quota_project_id if @quota_project_id header_params = {} if request.endpoint header_params["endpoint"] = request.endpoint end request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") [:"x-goog-request-params"] ||= request_params_header .apply_defaults timeout: @config.rpcs.explain.timeout, metadata: , retry_policy: @config.rpcs.explain.retry_policy .apply_defaults timeout: @config.timeout, metadata: @config., retry_policy: @config.retry_policy @prediction_service_stub.call_rpc :explain, request, options: do |response, operation| yield response, operation if block_given? return response end rescue ::GRPC::BadStatus => e raise ::Google::Cloud::Error.from_error(e) end |
#generate_content(request, options = nil) ⇒ ::Google::Cloud::AIPlatform::V1::GenerateContentResponse #generate_content(model: nil, contents: nil, system_instruction: nil, tools: nil, tool_config: nil, labels: nil, safety_settings: nil, generation_config: nil) ⇒ ::Google::Cloud::AIPlatform::V1::GenerateContentResponse
Generate content with multimodal inputs.
1325 1326 1327 1328 1329 1330 1331 1332 1333 1334 1335 1336 1337 1338 1339 1340 1341 1342 1343 1344 1345 1346 1347 1348 1349 1350 1351 1352 1353 1354 1355 1356 1357 1358 1359 1360 1361 1362 1363 1364 1365 |
# File 'lib/google/cloud/ai_platform/v1/prediction_service/client.rb', line 1325 def generate_content request, = nil raise ::ArgumentError, "request must be provided" if request.nil? request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::AIPlatform::V1::GenerateContentRequest # Converts hash and nil to an options object = ::Gapic::CallOptions.new(**.to_h) if .respond_to? :to_h # Customize the options with defaults = @config.rpcs.generate_content..to_h # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers [:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ lib_name: @config.lib_name, lib_version: @config.lib_version, gapic_version: ::Google::Cloud::AIPlatform::V1::VERSION [:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? [:"x-goog-user-project"] = @quota_project_id if @quota_project_id header_params = {} if request.model header_params["model"] = request.model end request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") [:"x-goog-request-params"] ||= request_params_header .apply_defaults timeout: @config.rpcs.generate_content.timeout, metadata: , retry_policy: @config.rpcs.generate_content.retry_policy .apply_defaults timeout: @config.timeout, metadata: @config., retry_policy: @config.retry_policy @prediction_service_stub.call_rpc :generate_content, request, options: do |response, operation| yield response, operation if block_given? return response end rescue ::GRPC::BadStatus => e raise ::Google::Cloud::Error.from_error(e) end |
#predict(request, options = nil) ⇒ ::Google::Cloud::AIPlatform::V1::PredictResponse #predict(endpoint: nil, instances: nil, parameters: nil) ⇒ ::Google::Cloud::AIPlatform::V1::PredictResponse
Perform an online prediction.
258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 |
# File 'lib/google/cloud/ai_platform/v1/prediction_service/client.rb', line 258 def predict request, = nil raise ::ArgumentError, "request must be provided" if request.nil? request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::AIPlatform::V1::PredictRequest # Converts hash and nil to an options object = ::Gapic::CallOptions.new(**.to_h) if .respond_to? :to_h # Customize the options with defaults = @config.rpcs.predict..to_h # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers [:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ lib_name: @config.lib_name, lib_version: @config.lib_version, gapic_version: ::Google::Cloud::AIPlatform::V1::VERSION [:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? [:"x-goog-user-project"] = @quota_project_id if @quota_project_id header_params = {} if request.endpoint header_params["endpoint"] = request.endpoint end request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") [:"x-goog-request-params"] ||= request_params_header .apply_defaults timeout: @config.rpcs.predict.timeout, metadata: , retry_policy: @config.rpcs.predict.retry_policy .apply_defaults timeout: @config.timeout, metadata: @config., retry_policy: @config.retry_policy @prediction_service_stub.call_rpc :predict, request, options: do |response, operation| yield response, operation if block_given? return response end rescue ::GRPC::BadStatus => e raise ::Google::Cloud::Error.from_error(e) end |
#raw_predict(request, options = nil) ⇒ ::Google::Api::HttpBody #raw_predict(endpoint: nil, http_body: nil) ⇒ ::Google::Api::HttpBody
Perform an online prediction with an arbitrary HTTP payload.
The response includes the following HTTP headers:
X-Vertex-AI-Endpoint-Id
: ID of the Endpoint that served this prediction.X-Vertex-AI-Deployed-Model-Id
: ID of the Endpoint's DeployedModel that served this prediction.
372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 |
# File 'lib/google/cloud/ai_platform/v1/prediction_service/client.rb', line 372 def raw_predict request, = nil raise ::ArgumentError, "request must be provided" if request.nil? request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::AIPlatform::V1::RawPredictRequest # Converts hash and nil to an options object = ::Gapic::CallOptions.new(**.to_h) if .respond_to? :to_h # Customize the options with defaults = @config.rpcs.raw_predict..to_h # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers [:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ lib_name: @config.lib_name, lib_version: @config.lib_version, gapic_version: ::Google::Cloud::AIPlatform::V1::VERSION [:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? [:"x-goog-user-project"] = @quota_project_id if @quota_project_id header_params = {} if request.endpoint header_params["endpoint"] = request.endpoint end request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") [:"x-goog-request-params"] ||= request_params_header .apply_defaults timeout: @config.rpcs.raw_predict.timeout, metadata: , retry_policy: @config.rpcs.raw_predict.retry_policy .apply_defaults timeout: @config.timeout, metadata: @config., retry_policy: @config.retry_policy @prediction_service_stub.call_rpc :raw_predict, request, options: do |response, operation| yield response, operation if block_given? return response end rescue ::GRPC::BadStatus => e raise ::Google::Cloud::Error.from_error(e) end |
#server_streaming_predict(request, options = nil) ⇒ ::Enumerable<::Google::Cloud::AIPlatform::V1::StreamingPredictResponse> #server_streaming_predict(endpoint: nil, inputs: nil, parameters: nil) ⇒ ::Enumerable<::Google::Cloud::AIPlatform::V1::StreamingPredictResponse>
Perform a server-side streaming online prediction request for Vertex LLM streaming.
993 994 995 996 997 998 999 1000 1001 1002 1003 1004 1005 1006 1007 1008 1009 1010 1011 1012 1013 1014 1015 1016 1017 1018 1019 1020 1021 1022 1023 1024 1025 1026 1027 1028 1029 1030 1031 1032 1033 |
# File 'lib/google/cloud/ai_platform/v1/prediction_service/client.rb', line 993 def server_streaming_predict request, = nil raise ::ArgumentError, "request must be provided" if request.nil? request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::AIPlatform::V1::StreamingPredictRequest # Converts hash and nil to an options object = ::Gapic::CallOptions.new(**.to_h) if .respond_to? :to_h # Customize the options with defaults = @config.rpcs.server_streaming_predict..to_h # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers [:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ lib_name: @config.lib_name, lib_version: @config.lib_version, gapic_version: ::Google::Cloud::AIPlatform::V1::VERSION [:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? [:"x-goog-user-project"] = @quota_project_id if @quota_project_id header_params = {} if request.endpoint header_params["endpoint"] = request.endpoint end request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") [:"x-goog-request-params"] ||= request_params_header .apply_defaults timeout: @config.rpcs.server_streaming_predict.timeout, metadata: , retry_policy: @config.rpcs.server_streaming_predict.retry_policy .apply_defaults timeout: @config.timeout, metadata: @config., retry_policy: @config.retry_policy @prediction_service_stub.call_rpc :server_streaming_predict, request, options: do |response, operation| yield response, operation if block_given? return response end rescue ::GRPC::BadStatus => e raise ::Google::Cloud::Error.from_error(e) end |
#stream_direct_predict(request, options = nil) {|response, operation| ... } ⇒ ::Enumerable<::Google::Cloud::AIPlatform::V1::StreamDirectPredictResponse>
Perform a streaming online prediction request to a gRPC model server for Vertex first-party products and frameworks.
740 741 742 743 744 745 746 747 748 749 750 751 752 753 754 755 756 757 758 759 760 761 762 763 764 765 766 767 768 769 770 771 772 773 774 775 776 777 |
# File 'lib/google/cloud/ai_platform/v1/prediction_service/client.rb', line 740 def stream_direct_predict request, = nil unless request.is_a? ::Enumerable raise ::ArgumentError, "request must be an Enumerable" unless request.respond_to? :to_enum request = request.to_enum end request = request.lazy.map do |req| ::Gapic::Protobuf.coerce req, to: ::Google::Cloud::AIPlatform::V1::StreamDirectPredictRequest end # Converts hash and nil to an options object = ::Gapic::CallOptions.new(**.to_h) if .respond_to? :to_h # Customize the options with defaults = @config.rpcs.stream_direct_predict..to_h # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers [:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ lib_name: @config.lib_name, lib_version: @config.lib_version, gapic_version: ::Google::Cloud::AIPlatform::V1::VERSION [:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? [:"x-goog-user-project"] = @quota_project_id if @quota_project_id .apply_defaults timeout: @config.rpcs.stream_direct_predict.timeout, metadata: , retry_policy: @config.rpcs.stream_direct_predict.retry_policy .apply_defaults timeout: @config.timeout, metadata: @config., retry_policy: @config.retry_policy @prediction_service_stub.call_rpc :stream_direct_predict, request, options: do |response, operation| yield response, operation if block_given? return response end rescue ::GRPC::BadStatus => e raise ::Google::Cloud::Error.from_error(e) end |
#stream_direct_raw_predict(request, options = nil) {|response, operation| ... } ⇒ ::Enumerable<::Google::Cloud::AIPlatform::V1::StreamDirectRawPredictResponse>
Perform a streaming online prediction request to a gRPC model server for custom containers.
820 821 822 823 824 825 826 827 828 829 830 831 832 833 834 835 836 837 838 839 840 841 842 843 844 845 846 847 848 849 850 851 852 853 854 855 856 857 |
# File 'lib/google/cloud/ai_platform/v1/prediction_service/client.rb', line 820 def stream_direct_raw_predict request, = nil unless request.is_a? ::Enumerable raise ::ArgumentError, "request must be an Enumerable" unless request.respond_to? :to_enum request = request.to_enum end request = request.lazy.map do |req| ::Gapic::Protobuf.coerce req, to: ::Google::Cloud::AIPlatform::V1::StreamDirectRawPredictRequest end # Converts hash and nil to an options object = ::Gapic::CallOptions.new(**.to_h) if .respond_to? :to_h # Customize the options with defaults = @config.rpcs.stream_direct_raw_predict..to_h # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers [:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ lib_name: @config.lib_name, lib_version: @config.lib_version, gapic_version: ::Google::Cloud::AIPlatform::V1::VERSION [:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? [:"x-goog-user-project"] = @quota_project_id if @quota_project_id .apply_defaults timeout: @config.rpcs.stream_direct_raw_predict.timeout, metadata: , retry_policy: @config.rpcs.stream_direct_raw_predict.retry_policy .apply_defaults timeout: @config.timeout, metadata: @config., retry_policy: @config.retry_policy @prediction_service_stub.call_rpc :stream_direct_raw_predict, request, options: do |response, operation| yield response, operation if block_given? return response end rescue ::GRPC::BadStatus => e raise ::Google::Cloud::Error.from_error(e) end |
#stream_generate_content(request, options = nil) ⇒ ::Enumerable<::Google::Cloud::AIPlatform::V1::GenerateContentResponse> #stream_generate_content(model: nil, contents: nil, system_instruction: nil, tools: nil, tool_config: nil, labels: nil, safety_settings: nil, generation_config: nil) ⇒ ::Enumerable<::Google::Cloud::AIPlatform::V1::GenerateContentResponse>
Generate content with multimodal inputs with streaming support.
1454 1455 1456 1457 1458 1459 1460 1461 1462 1463 1464 1465 1466 1467 1468 1469 1470 1471 1472 1473 1474 1475 1476 1477 1478 1479 1480 1481 1482 1483 1484 1485 1486 1487 1488 1489 1490 1491 1492 1493 1494 |
# File 'lib/google/cloud/ai_platform/v1/prediction_service/client.rb', line 1454 def stream_generate_content request, = nil raise ::ArgumentError, "request must be provided" if request.nil? request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::AIPlatform::V1::GenerateContentRequest # Converts hash and nil to an options object = ::Gapic::CallOptions.new(**.to_h) if .respond_to? :to_h # Customize the options with defaults = @config.rpcs.stream_generate_content..to_h # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers [:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ lib_name: @config.lib_name, lib_version: @config.lib_version, gapic_version: ::Google::Cloud::AIPlatform::V1::VERSION [:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? [:"x-goog-user-project"] = @quota_project_id if @quota_project_id header_params = {} if request.model header_params["model"] = request.model end request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") [:"x-goog-request-params"] ||= request_params_header .apply_defaults timeout: @config.rpcs.stream_generate_content.timeout, metadata: , retry_policy: @config.rpcs.stream_generate_content.retry_policy .apply_defaults timeout: @config.timeout, metadata: @config., retry_policy: @config.retry_policy @prediction_service_stub.call_rpc :stream_generate_content, request, options: do |response, operation| yield response, operation if block_given? return response end rescue ::GRPC::BadStatus => e raise ::Google::Cloud::Error.from_error(e) end |
#stream_raw_predict(request, options = nil) ⇒ ::Enumerable<::Google::Api::HttpBody> #stream_raw_predict(endpoint: nil, http_body: nil) ⇒ ::Enumerable<::Google::Api::HttpBody>
Perform a streaming online prediction with an arbitrary HTTP payload.
465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 |
# File 'lib/google/cloud/ai_platform/v1/prediction_service/client.rb', line 465 def stream_raw_predict request, = nil raise ::ArgumentError, "request must be provided" if request.nil? request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::AIPlatform::V1::StreamRawPredictRequest # Converts hash and nil to an options object = ::Gapic::CallOptions.new(**.to_h) if .respond_to? :to_h # Customize the options with defaults = @config.rpcs.stream_raw_predict..to_h # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers [:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ lib_name: @config.lib_name, lib_version: @config.lib_version, gapic_version: ::Google::Cloud::AIPlatform::V1::VERSION [:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? [:"x-goog-user-project"] = @quota_project_id if @quota_project_id header_params = {} if request.endpoint header_params["endpoint"] = request.endpoint end request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") [:"x-goog-request-params"] ||= request_params_header .apply_defaults timeout: @config.rpcs.stream_raw_predict.timeout, metadata: , retry_policy: @config.rpcs.stream_raw_predict.retry_policy .apply_defaults timeout: @config.timeout, metadata: @config., retry_policy: @config.retry_policy @prediction_service_stub.call_rpc :stream_raw_predict, request, options: do |response, operation| yield response, operation if block_given? return response end rescue ::GRPC::BadStatus => e raise ::Google::Cloud::Error.from_error(e) end |
#streaming_predict(request, options = nil) {|response, operation| ... } ⇒ ::Enumerable<::Google::Cloud::AIPlatform::V1::StreamingPredictResponse>
Perform a streaming online prediction request for Vertex first-party products and frameworks.
900 901 902 903 904 905 906 907 908 909 910 911 912 913 914 915 916 917 918 919 920 921 922 923 924 925 926 927 928 929 930 931 932 933 934 935 936 937 |
# File 'lib/google/cloud/ai_platform/v1/prediction_service/client.rb', line 900 def streaming_predict request, = nil unless request.is_a? ::Enumerable raise ::ArgumentError, "request must be an Enumerable" unless request.respond_to? :to_enum request = request.to_enum end request = request.lazy.map do |req| ::Gapic::Protobuf.coerce req, to: ::Google::Cloud::AIPlatform::V1::StreamingPredictRequest end # Converts hash and nil to an options object = ::Gapic::CallOptions.new(**.to_h) if .respond_to? :to_h # Customize the options with defaults = @config.rpcs.streaming_predict..to_h # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers [:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ lib_name: @config.lib_name, lib_version: @config.lib_version, gapic_version: ::Google::Cloud::AIPlatform::V1::VERSION [:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? [:"x-goog-user-project"] = @quota_project_id if @quota_project_id .apply_defaults timeout: @config.rpcs.streaming_predict.timeout, metadata: , retry_policy: @config.rpcs.streaming_predict.retry_policy .apply_defaults timeout: @config.timeout, metadata: @config., retry_policy: @config.retry_policy @prediction_service_stub.call_rpc :streaming_predict, request, options: do |response, operation| yield response, operation if block_given? return response end rescue ::GRPC::BadStatus => e raise ::Google::Cloud::Error.from_error(e) end |
#streaming_raw_predict(request, options = nil) {|response, operation| ... } ⇒ ::Enumerable<::Google::Cloud::AIPlatform::V1::StreamingRawPredictResponse>
Perform a streaming online prediction request through gRPC.
1075 1076 1077 1078 1079 1080 1081 1082 1083 1084 1085 1086 1087 1088 1089 1090 1091 1092 1093 1094 1095 1096 1097 1098 1099 1100 1101 1102 1103 1104 1105 1106 1107 1108 1109 1110 1111 1112 |
# File 'lib/google/cloud/ai_platform/v1/prediction_service/client.rb', line 1075 def streaming_raw_predict request, = nil unless request.is_a? ::Enumerable raise ::ArgumentError, "request must be an Enumerable" unless request.respond_to? :to_enum request = request.to_enum end request = request.lazy.map do |req| ::Gapic::Protobuf.coerce req, to: ::Google::Cloud::AIPlatform::V1::StreamingRawPredictRequest end # Converts hash and nil to an options object = ::Gapic::CallOptions.new(**.to_h) if .respond_to? :to_h # Customize the options with defaults = @config.rpcs.streaming_raw_predict..to_h # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers [:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ lib_name: @config.lib_name, lib_version: @config.lib_version, gapic_version: ::Google::Cloud::AIPlatform::V1::VERSION [:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? [:"x-goog-user-project"] = @quota_project_id if @quota_project_id .apply_defaults timeout: @config.rpcs.streaming_raw_predict.timeout, metadata: , retry_policy: @config.rpcs.streaming_raw_predict.retry_policy .apply_defaults timeout: @config.timeout, metadata: @config., retry_policy: @config.retry_policy @prediction_service_stub.call_rpc :streaming_raw_predict, request, options: do |response, operation| yield response, operation if block_given? return response end rescue ::GRPC::BadStatus => e raise ::Google::Cloud::Error.from_error(e) end |
#universe_domain ⇒ String
The effective universe domain
106 107 108 |
# File 'lib/google/cloud/ai_platform/v1/prediction_service/client.rb', line 106 def universe_domain @prediction_service_stub.universe_domain end |