As of January 1, 2020 this library no longer supports Python 2 on the latest released version.
Library versions released prior to that date will continue to be available. For more information please
visit Python 2 support on Google Cloud.
Source code for google.ai.generativelanguage_v1beta.services.generative_service.client
# -*- coding: utf-8 -*-
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
import os
import re
from typing import (
Callable,
Dict,
Iterable,
Mapping,
MutableMapping,
MutableSequence,
Optional,
Sequence,
Tuple,
Type,
Union,
cast,
)
import warnings
from google.api_core import client_options as client_options_lib
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.auth.transport import mtls # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
from google.oauth2 import service_account # type: ignore
from google.ai.generativelanguage_v1beta import gapic_version as package_version
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None]
except AttributeError: # pragma: NO COVER
OptionalRetry = Union[retries.Retry, object, None] # type: ignore
from google.longrunning import operations_pb2 # type: ignore
from google.ai.generativelanguage_v1beta.types import generative_service, safety
from google.ai.generativelanguage_v1beta.types import content
from google.ai.generativelanguage_v1beta.types import content as gag_content
from .transports.base import DEFAULT_CLIENT_INFO, GenerativeServiceTransport
from .transports.grpc import GenerativeServiceGrpcTransport
from .transports.grpc_asyncio import GenerativeServiceGrpcAsyncIOTransport
from .transports.rest import GenerativeServiceRestTransport
class GenerativeServiceClientMeta(type):
"""Metaclass for the GenerativeService client.
This provides class-level methods for building and retrieving
support objects (e.g. transport) without polluting the client instance
objects.
"""
_transport_registry = (
OrderedDict()
) # type: Dict[str, Type[GenerativeServiceTransport]]
_transport_registry["grpc"] = GenerativeServiceGrpcTransport
_transport_registry["grpc_asyncio"] = GenerativeServiceGrpcAsyncIOTransport
_transport_registry["rest"] = GenerativeServiceRestTransport
def get_transport_class(
cls,
label: Optional[str] = None,
) -> Type[GenerativeServiceTransport]:
"""Returns an appropriate transport class.
Args:
label: The name of the desired transport. If none is
provided, then the first transport in the registry is used.
Returns:
The transport class to use.
"""
# If a specific transport is requested, return that one.
if label:
return cls._transport_registry[label]
# No transport is requested; return the default (that is, the first one
# in the dictionary).
return next(iter(cls._transport_registry.values()))
[docs]class GenerativeServiceClient(metaclass=GenerativeServiceClientMeta):
"""API for using Large Models that generate multimodal content
and have additional capabilities beyond text generation.
"""
@staticmethod
def _get_default_mtls_endpoint(api_endpoint):
"""Converts api endpoint to mTLS endpoint.
Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
"*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
Args:
api_endpoint (Optional[str]): the api endpoint to convert.
Returns:
str: converted mTLS api endpoint.
"""
if not api_endpoint:
return api_endpoint
mtls_endpoint_re = re.compile(
r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?"
)
m = mtls_endpoint_re.match(api_endpoint)
name, mtls, sandbox, googledomain = m.groups()
if mtls or not googledomain:
return api_endpoint
if sandbox:
return api_endpoint.replace(
"sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
)
return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
# Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead.
DEFAULT_ENDPOINT = "generativelanguage.googleapis.com"
DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
DEFAULT_ENDPOINT
)
_DEFAULT_ENDPOINT_TEMPLATE = "generativelanguage.{UNIVERSE_DOMAIN}"
_DEFAULT_UNIVERSE = "googleapis.com"
[docs] @classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
GenerativeServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_info(info)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
[docs] @classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
GenerativeServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(filename)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
@property
def transport(self) -> GenerativeServiceTransport:
"""Returns the transport used by the client instance.
Returns:
GenerativeServiceTransport: The transport used by the client
instance.
"""
return self._transport
[docs] @staticmethod
def cached_content_path(
id: str,
) -> str:
"""Returns a fully-qualified cached_content string."""
return "cachedContents/{id}".format(
id=id,
)
[docs] @staticmethod
def parse_cached_content_path(path: str) -> Dict[str, str]:
"""Parses a cached_content path into its component segments."""
m = re.match(r"^cachedContents/(?P<id>.+?)$", path)
return m.groupdict() if m else {}
[docs] @staticmethod
def model_path(
model: str,
) -> str:
"""Returns a fully-qualified model string."""
return "models/{model}".format(
model=model,
)
[docs] @staticmethod
def parse_model_path(path: str) -> Dict[str, str]:
"""Parses a model path into its component segments."""
m = re.match(r"^models/(?P<model>.+?)$", path)
return m.groupdict() if m else {}
[docs] @staticmethod
def common_billing_account_path(
billing_account: str,
) -> str:
"""Returns a fully-qualified billing_account string."""
return "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
[docs] @staticmethod
def parse_common_billing_account_path(path: str) -> Dict[str, str]:
"""Parse a billing_account path into its component segments."""
m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path)
return m.groupdict() if m else {}
[docs] @staticmethod
def common_folder_path(
folder: str,
) -> str:
"""Returns a fully-qualified folder string."""
return "folders/{folder}".format(
folder=folder,
)
[docs] @staticmethod
def parse_common_folder_path(path: str) -> Dict[str, str]:
"""Parse a folder path into its component segments."""
m = re.match(r"^folders/(?P<folder>.+?)$", path)
return m.groupdict() if m else {}
[docs] @staticmethod
def common_organization_path(
organization: str,
) -> str:
"""Returns a fully-qualified organization string."""
return "organizations/{organization}".format(
organization=organization,
)
[docs] @staticmethod
def parse_common_organization_path(path: str) -> Dict[str, str]:
"""Parse a organization path into its component segments."""
m = re.match(r"^organizations/(?P<organization>.+?)$", path)
return m.groupdict() if m else {}
[docs] @staticmethod
def common_project_path(
project: str,
) -> str:
"""Returns a fully-qualified project string."""
return "projects/{project}".format(
project=project,
)
[docs] @staticmethod
def parse_common_project_path(path: str) -> Dict[str, str]:
"""Parse a project path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)$", path)
return m.groupdict() if m else {}
[docs] @staticmethod
def common_location_path(
project: str,
location: str,
) -> str:
"""Returns a fully-qualified location string."""
return "projects/{project}/locations/{location}".format(
project=project,
location=location,
)
[docs] @staticmethod
def parse_common_location_path(path: str) -> Dict[str, str]:
"""Parse a location path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path)
return m.groupdict() if m else {}
[docs] @classmethod
def get_mtls_endpoint_and_cert_source(
cls, client_options: Optional[client_options_lib.ClientOptions] = None
):
"""Deprecated. Return the API endpoint and client cert source for mutual TLS.
The client cert source is determined in the following order:
(1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the
client cert source is None.
(2) if `client_options.client_cert_source` is provided, use the provided one; if the
default client cert source exists, use the default one; otherwise the client cert
source is None.
The API endpoint is determined in the following order:
(1) if `client_options.api_endpoint` if provided, use the provided one.
(2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the
default mTLS endpoint; if the environment variable is "never", use the default API
endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise
use the default API endpoint.
More details can be found at https://google.aip.dev/auth/4114.
Args:
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. Only the `api_endpoint` and `client_cert_source` properties may be used
in this method.
Returns:
Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the
client cert source to use.
Raises:
google.auth.exceptions.MutualTLSChannelError: If any errors happen.
"""
warnings.warn(
"get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.",
DeprecationWarning,
)
if client_options is None:
client_options = client_options_lib.ClientOptions()
use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")
use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
if use_client_cert not in ("true", "false"):
raise ValueError(
"Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
)
if use_mtls_endpoint not in ("auto", "never", "always"):
raise MutualTLSChannelError(
"Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`"
)
# Figure out the client cert source to use.
client_cert_source = None
if use_client_cert == "true":
if client_options.client_cert_source:
client_cert_source = client_options.client_cert_source
elif mtls.has_default_client_cert_source():
client_cert_source = mtls.default_client_cert_source()
# Figure out which api endpoint to use.
if client_options.api_endpoint is not None:
api_endpoint = client_options.api_endpoint
elif use_mtls_endpoint == "always" or (
use_mtls_endpoint == "auto" and client_cert_source
):
api_endpoint = cls.DEFAULT_MTLS_ENDPOINT
else:
api_endpoint = cls.DEFAULT_ENDPOINT
return api_endpoint, client_cert_source
@staticmethod
def _read_environment_variables():
"""Returns the environment variables used by the client.
Returns:
Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE,
GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables.
Raises:
ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not
any of ["true", "false"].
google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT
is not any of ["auto", "never", "always"].
"""
use_client_cert = os.getenv(
"GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"
).lower()
use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower()
universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN")
if use_client_cert not in ("true", "false"):
raise ValueError(
"Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
)
if use_mtls_endpoint not in ("auto", "never", "always"):
raise MutualTLSChannelError(
"Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`"
)
return use_client_cert == "true", use_mtls_endpoint, universe_domain_env
@staticmethod
def _get_client_cert_source(provided_cert_source, use_cert_flag):
"""Return the client cert source to be used by the client.
Args:
provided_cert_source (bytes): The client certificate source provided.
use_cert_flag (bool): A flag indicating whether to use the client certificate.
Returns:
bytes or None: The client cert source to be used by the client.
"""
client_cert_source = None
if use_cert_flag:
if provided_cert_source:
client_cert_source = provided_cert_source
elif mtls.has_default_client_cert_source():
client_cert_source = mtls.default_client_cert_source()
return client_cert_source
@staticmethod
def _get_api_endpoint(
api_override, client_cert_source, universe_domain, use_mtls_endpoint
):
"""Return the API endpoint used by the client.
Args:
api_override (str): The API endpoint override. If specified, this is always
the return value of this function and the other arguments are not used.
client_cert_source (bytes): The client certificate source used by the client.
universe_domain (str): The universe domain used by the client.
use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters.
Possible values are "always", "auto", or "never".
Returns:
str: The API endpoint to be used by the client.
"""
if api_override is not None:
api_endpoint = api_override
elif use_mtls_endpoint == "always" or (
use_mtls_endpoint == "auto" and client_cert_source
):
_default_universe = GenerativeServiceClient._DEFAULT_UNIVERSE
if universe_domain != _default_universe:
raise MutualTLSChannelError(
f"mTLS is not supported in any universe other than {_default_universe}."
)
api_endpoint = GenerativeServiceClient.DEFAULT_MTLS_ENDPOINT
else:
api_endpoint = GenerativeServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(
UNIVERSE_DOMAIN=universe_domain
)
return api_endpoint
@staticmethod
def _get_universe_domain(
client_universe_domain: Optional[str], universe_domain_env: Optional[str]
) -> str:
"""Return the universe domain used by the client.
Args:
client_universe_domain (Optional[str]): The universe domain configured via the client options.
universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable.
Returns:
str: The universe domain to be used by the client.
Raises:
ValueError: If the universe domain is an empty string.
"""
universe_domain = GenerativeServiceClient._DEFAULT_UNIVERSE
if client_universe_domain is not None:
universe_domain = client_universe_domain
elif universe_domain_env is not None:
universe_domain = universe_domain_env
if len(universe_domain.strip()) == 0:
raise ValueError("Universe Domain cannot be an empty string.")
return universe_domain
def _validate_universe_domain(self):
"""Validates client's and credentials' universe domains are consistent.
Returns:
bool: True iff the configured universe domain is valid.
Raises:
ValueError: If the configured universe domain is not valid.
"""
# NOTE (b/349488459): universe validation is disabled until further notice.
return True
@property
def api_endpoint(self):
"""Return the API endpoint used by the client instance.
Returns:
str: The API endpoint used by the client instance.
"""
return self._api_endpoint
@property
def universe_domain(self) -> str:
"""Return the universe domain used by the client instance.
Returns:
str: The universe domain used by the client instance.
"""
return self._universe_domain
def __init__(
self,
*,
credentials: Optional[ga_credentials.Credentials] = None,
transport: Optional[
Union[
str,
GenerativeServiceTransport,
Callable[..., GenerativeServiceTransport],
]
] = None,
client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiates the generative service client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Optional[Union[str,GenerativeServiceTransport,Callable[..., GenerativeServiceTransport]]]):
The transport to use, or a Callable that constructs and returns a new transport.
If a Callable is given, it will be called with the same set of initialization
arguments as used in the GenerativeServiceTransport constructor.
If set to None, a transport is chosen automatically.
client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]):
Custom options for the client.
1. The ``api_endpoint`` property can be used to override the
default endpoint provided by the client when ``transport`` is
not explicitly provided. Only if this property is not set and
``transport`` was not explicitly provided, the endpoint is
determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment
variable, which have one of the following values:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto-switch to the
default mTLS endpoint if client certificate is present; this is
the default value).
2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide a client certificate for mTLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
3. The ``universe_domain`` property can be used to override the
default "googleapis.com" universe. Note that the ``api_endpoint``
property still takes precedence; and ``universe_domain`` is
currently not supported for mTLS.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
self._client_options = client_options
if isinstance(self._client_options, dict):
self._client_options = client_options_lib.from_dict(self._client_options)
if self._client_options is None:
self._client_options = client_options_lib.ClientOptions()
self._client_options = cast(
client_options_lib.ClientOptions, self._client_options
)
universe_domain_opt = getattr(self._client_options, "universe_domain", None)
(
self._use_client_cert,
self._use_mtls_endpoint,
self._universe_domain_env,
) = GenerativeServiceClient._read_environment_variables()
self._client_cert_source = GenerativeServiceClient._get_client_cert_source(
self._client_options.client_cert_source, self._use_client_cert
)
self._universe_domain = GenerativeServiceClient._get_universe_domain(
universe_domain_opt, self._universe_domain_env
)
self._api_endpoint = None # updated below, depending on `transport`
# Initialize the universe domain validation.
self._is_universe_domain_valid = False
api_key_value = getattr(self._client_options, "api_key", None)
if api_key_value and credentials:
raise ValueError(
"client_options.api_key and credentials are mutually exclusive"
)
# Save or instantiate the transport.
# Ordinarily, we provide the transport, but allowing a custom transport
# instance provides an extensibility point for unusual situations.
transport_provided = isinstance(transport, GenerativeServiceTransport)
if transport_provided:
# transport is a GenerativeServiceTransport instance.
if credentials or self._client_options.credentials_file or api_key_value:
raise ValueError(
"When providing a transport instance, "
"provide its credentials directly."
)
if self._client_options.scopes:
raise ValueError(
"When providing a transport instance, provide its scopes "
"directly."
)
self._transport = cast(GenerativeServiceTransport, transport)
self._api_endpoint = self._transport.host
self._api_endpoint = (
self._api_endpoint
or GenerativeServiceClient._get_api_endpoint(
self._client_options.api_endpoint,
self._client_cert_source,
self._universe_domain,
self._use_mtls_endpoint,
)
)
if not transport_provided:
import google.auth._default # type: ignore
if api_key_value and hasattr(
google.auth._default, "get_api_key_credentials"
):
credentials = google.auth._default.get_api_key_credentials(
api_key_value
)
transport_init: Union[
Type[GenerativeServiceTransport],
Callable[..., GenerativeServiceTransport],
] = (
GenerativeServiceClient.get_transport_class(transport)
if isinstance(transport, str) or transport is None
else cast(Callable[..., GenerativeServiceTransport], transport)
)
# initialize with the provided callable or the passed in class
self._transport = transport_init(
credentials=credentials,
credentials_file=self._client_options.credentials_file,
host=self._api_endpoint,
scopes=self._client_options.scopes,
client_cert_source_for_mtls=self._client_cert_source,
quota_project_id=self._client_options.quota_project_id,
client_info=client_info,
always_use_jwt_access=True,
api_audience=self._client_options.api_audience,
)
[docs] def generate_content(
self,
request: Optional[
Union[generative_service.GenerateContentRequest, dict]
] = None,
*,
model: Optional[str] = None,
contents: Optional[MutableSequence[content.Content]] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> generative_service.GenerateContentResponse:
r"""Generates a model response given an input
``GenerateContentRequest``. Refer to the `text generation
guide <https://ai.google.dev/gemini-api/docs/text-generation>`__
for detailed usage information. Input capabilities differ
between models, including tuned models. Refer to the `model
guide <https://ai.google.dev/gemini-api/docs/models/gemini>`__
and `tuning
guide <https://ai.google.dev/gemini-api/docs/model-tuning>`__
for details.
.. code-block:: python
# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.ai import generativelanguage_v1beta
def sample_generate_content():
# Create a client
client = generativelanguage_v1beta.GenerativeServiceClient()
# Initialize request argument(s)
request = generativelanguage_v1beta.GenerateContentRequest(
model="model_value",
)
# Make the request
response = client.generate_content(request=request)
# Handle the response
print(response)
Args:
request (Union[google.ai.generativelanguage_v1beta.types.GenerateContentRequest, dict]):
The request object. Request to generate a completion from
the model.
model (str):
Required. The name of the ``Model`` to use for
generating the completion.
Format: ``name=models/{model}``.
This corresponds to the ``model`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
contents (MutableSequence[google.ai.generativelanguage_v1beta.types.Content]):
Required. The content of the current conversation with
the model.
For single-turn queries, this is a single instance. For
multi-turn queries like
`chat <https://ai.google.dev/gemini-api/docs/text-generation#chat>`__,
this is a repeated field that contains the conversation
history and the latest request.
This corresponds to the ``contents`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.ai.generativelanguage_v1beta.types.GenerateContentResponse:
Response from the model supporting multiple candidate
responses.
Safety ratings and content filtering are reported for
both prompt in
GenerateContentResponse.prompt_feedback and for each
candidate in finish_reason and in safety_ratings. The
API: - Returns either all requested candidates or
none of them - Returns no candidates at all only if
there was something wrong with the prompt (check
prompt_feedback) - Reports feedback on each candidate
in finish_reason and safety_ratings.
"""
# Create or coerce a protobuf request object.
# - Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([model, contents])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# - Use the request object if provided (there's no risk of modifying the input as
# there are no flattened fields), or create one.
if not isinstance(request, generative_service.GenerateContentRequest):
request = generative_service.GenerateContentRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if model is not None:
request.model = model
if contents is not None:
request.contents = contents
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.generate_content]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)),
)
# Validate the universe domain.
self._validate_universe_domain()
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
[docs] def generate_answer(
self,
request: Optional[Union[generative_service.GenerateAnswerRequest, dict]] = None,
*,
model: Optional[str] = None,
contents: Optional[MutableSequence[content.Content]] = None,
safety_settings: Optional[MutableSequence[safety.SafetySetting]] = None,
answer_style: Optional[
generative_service.GenerateAnswerRequest.AnswerStyle
] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> generative_service.GenerateAnswerResponse:
r"""Generates a grounded answer from the model given an input
``GenerateAnswerRequest``.
.. code-block:: python
# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.ai import generativelanguage_v1beta
def sample_generate_answer():
# Create a client
client = generativelanguage_v1beta.GenerativeServiceClient()
# Initialize request argument(s)
request = generativelanguage_v1beta.GenerateAnswerRequest(
model="model_value",
answer_style="VERBOSE",
)
# Make the request
response = client.generate_answer(request=request)
# Handle the response
print(response)
Args:
request (Union[google.ai.generativelanguage_v1beta.types.GenerateAnswerRequest, dict]):
The request object. Request to generate a grounded answer from the
``Model``.
model (str):
Required. The name of the ``Model`` to use for
generating the grounded response.
Format: ``model=models/{model}``.
This corresponds to the ``model`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
contents (MutableSequence[google.ai.generativelanguage_v1beta.types.Content]):
Required. The content of the current conversation with
the ``Model``. For single-turn queries, this is a single
question to answer. For multi-turn queries, this is a
repeated field that contains conversation history and
the last ``Content`` in the list containing the
question.
Note: ``GenerateAnswer`` only supports queries in
English.
This corresponds to the ``contents`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
safety_settings (MutableSequence[google.ai.generativelanguage_v1beta.types.SafetySetting]):
Optional. A list of unique ``SafetySetting`` instances
for blocking unsafe content.
This will be enforced on the
``GenerateAnswerRequest.contents`` and
``GenerateAnswerResponse.candidate``. There should not
be more than one setting for each ``SafetyCategory``
type. The API will block any contents and responses that
fail to meet the thresholds set by these settings. This
list overrides the default settings for each
``SafetyCategory`` specified in the safety_settings. If
there is no ``SafetySetting`` for a given
``SafetyCategory`` provided in the list, the API will
use the default safety setting for that category. Harm
categories HARM_CATEGORY_HATE_SPEECH,
HARM_CATEGORY_SEXUALLY_EXPLICIT,
HARM_CATEGORY_DANGEROUS_CONTENT,
HARM_CATEGORY_HARASSMENT are supported. Refer to the
`guide <https://ai.google.dev/gemini-api/docs/safety-settings>`__
for detailed information on available safety settings.
Also refer to the `Safety
guidance <https://ai.google.dev/gemini-api/docs/safety-guidance>`__
to learn how to incorporate safety considerations in
your AI applications.
This corresponds to the ``safety_settings`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
answer_style (google.ai.generativelanguage_v1beta.types.GenerateAnswerRequest.AnswerStyle):
Required. Style in which answers
should be returned.
This corresponds to the ``answer_style`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.ai.generativelanguage_v1beta.types.GenerateAnswerResponse:
Response from the model for a
grounded answer.
"""
# Create or coerce a protobuf request object.
# - Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([model, contents, safety_settings, answer_style])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# - Use the request object if provided (there's no risk of modifying the input as
# there are no flattened fields), or create one.
if not isinstance(request, generative_service.GenerateAnswerRequest):
request = generative_service.GenerateAnswerRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if model is not None:
request.model = model
if contents is not None:
request.contents = contents
if safety_settings is not None:
request.safety_settings = safety_settings
if answer_style is not None:
request.answer_style = answer_style
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.generate_answer]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)),
)
# Validate the universe domain.
self._validate_universe_domain()
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
[docs] def stream_generate_content(
self,
request: Optional[
Union[generative_service.GenerateContentRequest, dict]
] = None,
*,
model: Optional[str] = None,
contents: Optional[MutableSequence[content.Content]] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> Iterable[generative_service.GenerateContentResponse]:
r"""Generates a `streamed
response <https://ai.google.dev/gemini-api/docs/text-generation?lang=python#generate-a-text-stream>`__
from the model given an input ``GenerateContentRequest``.
.. code-block:: python
# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.ai import generativelanguage_v1beta
def sample_stream_generate_content():
# Create a client
client = generativelanguage_v1beta.GenerativeServiceClient()
# Initialize request argument(s)
request = generativelanguage_v1beta.GenerateContentRequest(
model="model_value",
)
# Make the request
stream = client.stream_generate_content(request=request)
# Handle the response
for response in stream:
print(response)
Args:
request (Union[google.ai.generativelanguage_v1beta.types.GenerateContentRequest, dict]):
The request object. Request to generate a completion from
the model.
model (str):
Required. The name of the ``Model`` to use for
generating the completion.
Format: ``name=models/{model}``.
This corresponds to the ``model`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
contents (MutableSequence[google.ai.generativelanguage_v1beta.types.Content]):
Required. The content of the current conversation with
the model.
For single-turn queries, this is a single instance. For
multi-turn queries like
`chat <https://ai.google.dev/gemini-api/docs/text-generation#chat>`__,
this is a repeated field that contains the conversation
history and the latest request.
This corresponds to the ``contents`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
Iterable[google.ai.generativelanguage_v1beta.types.GenerateContentResponse]:
Response from the model supporting multiple candidate
responses.
Safety ratings and content filtering are reported for
both prompt in
GenerateContentResponse.prompt_feedback and for each
candidate in finish_reason and in safety_ratings. The
API: - Returns either all requested candidates or
none of them - Returns no candidates at all only if
there was something wrong with the prompt (check
prompt_feedback) - Reports feedback on each candidate
in finish_reason and safety_ratings.
"""
# Create or coerce a protobuf request object.
# - Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([model, contents])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# - Use the request object if provided (there's no risk of modifying the input as
# there are no flattened fields), or create one.
if not isinstance(request, generative_service.GenerateContentRequest):
request = generative_service.GenerateContentRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if model is not None:
request.model = model
if contents is not None:
request.contents = contents
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.stream_generate_content]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)),
)
# Validate the universe domain.
self._validate_universe_domain()
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
[docs] def embed_content(
self,
request: Optional[Union[generative_service.EmbedContentRequest, dict]] = None,
*,
model: Optional[str] = None,
content: Optional[gag_content.Content] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> generative_service.EmbedContentResponse:
r"""Generates a text embedding vector from the input ``Content``
using the specified `Gemini Embedding
model <https://ai.google.dev/gemini-api/docs/models/gemini#text-embedding>`__.
.. code-block:: python
# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.ai import generativelanguage_v1beta
def sample_embed_content():
# Create a client
client = generativelanguage_v1beta.GenerativeServiceClient()
# Initialize request argument(s)
request = generativelanguage_v1beta.EmbedContentRequest(
model="model_value",
)
# Make the request
response = client.embed_content(request=request)
# Handle the response
print(response)
Args:
request (Union[google.ai.generativelanguage_v1beta.types.EmbedContentRequest, dict]):
The request object. Request containing the ``Content`` for the model to
embed.
model (str):
Required. The model's resource name. This serves as an
ID for the Model to use.
This name should match a model name returned by the
``ListModels`` method.
Format: ``models/{model}``
This corresponds to the ``model`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
content (google.ai.generativelanguage_v1beta.types.Content):
Required. The content to embed. Only the ``parts.text``
fields will be counted.
This corresponds to the ``content`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.ai.generativelanguage_v1beta.types.EmbedContentResponse:
The response to an EmbedContentRequest.
"""
# Create or coerce a protobuf request object.
# - Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([model, content])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# - Use the request object if provided (there's no risk of modifying the input as
# there are no flattened fields), or create one.
if not isinstance(request, generative_service.EmbedContentRequest):
request = generative_service.EmbedContentRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if model is not None:
request.model = model
if content is not None:
request.content = content
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.embed_content]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)),
)
# Validate the universe domain.
self._validate_universe_domain()
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
[docs] def batch_embed_contents(
self,
request: Optional[
Union[generative_service.BatchEmbedContentsRequest, dict]
] = None,
*,
model: Optional[str] = None,
requests: Optional[
MutableSequence[generative_service.EmbedContentRequest]
] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> generative_service.BatchEmbedContentsResponse:
r"""Generates multiple embedding vectors from the input ``Content``
which consists of a batch of strings represented as
``EmbedContentRequest`` objects.
.. code-block:: python
# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.ai import generativelanguage_v1beta
def sample_batch_embed_contents():
# Create a client
client = generativelanguage_v1beta.GenerativeServiceClient()
# Initialize request argument(s)
requests = generativelanguage_v1beta.EmbedContentRequest()
requests.model = "model_value"
request = generativelanguage_v1beta.BatchEmbedContentsRequest(
model="model_value",
requests=requests,
)
# Make the request
response = client.batch_embed_contents(request=request)
# Handle the response
print(response)
Args:
request (Union[google.ai.generativelanguage_v1beta.types.BatchEmbedContentsRequest, dict]):
The request object. Batch request to get embeddings from
the model for a list of prompts.
model (str):
Required. The model's resource name. This serves as an
ID for the Model to use.
This name should match a model name returned by the
``ListModels`` method.
Format: ``models/{model}``
This corresponds to the ``model`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
requests (MutableSequence[google.ai.generativelanguage_v1beta.types.EmbedContentRequest]):
Required. Embed requests for the batch. The model in
each of these requests must match the model specified
``BatchEmbedContentsRequest.model``.
This corresponds to the ``requests`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.ai.generativelanguage_v1beta.types.BatchEmbedContentsResponse:
The response to a BatchEmbedContentsRequest.
"""
# Create or coerce a protobuf request object.
# - Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([model, requests])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# - Use the request object if provided (there's no risk of modifying the input as
# there are no flattened fields), or create one.
if not isinstance(request, generative_service.BatchEmbedContentsRequest):
request = generative_service.BatchEmbedContentsRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if model is not None:
request.model = model
if requests is not None:
request.requests = requests
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.batch_embed_contents]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)),
)
# Validate the universe domain.
self._validate_universe_domain()
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
[docs] def count_tokens(
self,
request: Optional[Union[generative_service.CountTokensRequest, dict]] = None,
*,
model: Optional[str] = None,
contents: Optional[MutableSequence[content.Content]] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> generative_service.CountTokensResponse:
r"""Runs a model's tokenizer on input ``Content`` and returns the
token count. Refer to the `tokens
guide <https://ai.google.dev/gemini-api/docs/tokens>`__ to learn
more about tokens.
.. code-block:: python
# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.ai import generativelanguage_v1beta
def sample_count_tokens():
# Create a client
client = generativelanguage_v1beta.GenerativeServiceClient()
# Initialize request argument(s)
request = generativelanguage_v1beta.CountTokensRequest(
model="model_value",
)
# Make the request
response = client.count_tokens(request=request)
# Handle the response
print(response)
Args:
request (Union[google.ai.generativelanguage_v1beta.types.CountTokensRequest, dict]):
The request object. Counts the number of tokens in the ``prompt`` sent to a
model.
Models may tokenize text differently, so each model may
return a different ``token_count``.
model (str):
Required. The model's resource name. This serves as an
ID for the Model to use.
This name should match a model name returned by the
``ListModels`` method.
Format: ``models/{model}``
This corresponds to the ``model`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
contents (MutableSequence[google.ai.generativelanguage_v1beta.types.Content]):
Optional. The input given to the model as a prompt. This
field is ignored when ``generate_content_request`` is
set.
This corresponds to the ``contents`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.ai.generativelanguage_v1beta.types.CountTokensResponse:
A response from CountTokens.
It returns the model's token_count for the prompt.
"""
# Create or coerce a protobuf request object.
# - Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([model, contents])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# - Use the request object if provided (there's no risk of modifying the input as
# there are no flattened fields), or create one.
if not isinstance(request, generative_service.CountTokensRequest):
request = generative_service.CountTokensRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if model is not None:
request.model = model
if contents is not None:
request.contents = contents
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.count_tokens]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)),
)
# Validate the universe domain.
self._validate_universe_domain()
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
def __enter__(self) -> "GenerativeServiceClient":
return self
[docs] def __exit__(self, type, value, traceback):
"""Releases underlying transport's resources.
.. warning::
ONLY use as a context manager if the transport is NOT shared
with other clients! Exiting the with block will CLOSE the transport
and may cause errors in other clients!
"""
self.transport.close()
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=package_version.__version__
)
__all__ = ("GenerativeServiceClient",)