| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| import inspect |
| import json |
| import types |
| import typing |
| from typing import Any, Callable, Dict, Mapping, Optional, Sequence, Union |
|
|
| import proto |
|
|
| from google.cloud.aiplatform import base |
| from google.api import httpbody_pb2 |
| from google.protobuf import struct_pb2 |
| from google.protobuf import json_format |
|
|
| try: |
| |
| |
| |
| import langchain_core.runnables.config |
|
|
| RunnableConfig = langchain_core.runnables.config.RunnableConfig |
| except ImportError: |
| RunnableConfig = Any |
|
|
| JsonDict = Dict[str, Any] |
|
|
| _LOGGER = base.Logger(__name__) |
|
|
|
|
| def to_proto( |
| obj: Union[JsonDict, proto.Message], |
| message: Optional[proto.Message] = None, |
| ) -> proto.Message: |
| """Parses a JSON-like object into a message. |
| |
| If the object is already a message, this will return the object as-is. If |
| the object is a JSON Dict, this will parse and merge the object into the |
| message. |
| |
| Args: |
| obj (Union[dict[str, Any], proto.Message]): |
| Required. The object to convert to a proto message. |
| message (proto.Message): |
| Optional. A protocol buffer message to merge the obj into. It |
| defaults to Struct() if unspecified. |
| |
| Returns: |
| proto.Message: The same message passed as argument. |
| """ |
| if message is None: |
| message = struct_pb2.Struct() |
| if isinstance(obj, (proto.Message, struct_pb2.Struct)): |
| return obj |
| try: |
| json_format.ParseDict(obj, message._pb) |
| except AttributeError: |
| json_format.ParseDict(obj, message) |
| return message |
|
|
|
|
| def to_dict(message: proto.Message) -> JsonDict: |
| """Converts the contents of the protobuf message to JSON format. |
| |
| Args: |
| message (proto.Message): |
| Required. The proto message to be converted to a JSON dictionary. |
| |
| Returns: |
| dict[str, Any]: A dictionary containing the contents of the proto. |
| """ |
| try: |
| |
| result: JsonDict = json.loads(json_format.MessageToJson(message._pb)) |
| except AttributeError: |
| result: JsonDict = json.loads(json_format.MessageToJson(message)) |
| return result |
|
|
|
|
| def to_parsed_json(body: httpbody_pb2.HttpBody) -> Any: |
| """Converts the contents of the httpbody message to JSON format. |
| |
| Args: |
| body (httpbody_pb2.HttpBody): |
| Required. The httpbody body to be converted to a JSON. |
| |
| Returns: |
| Any: A JSON object. |
| """ |
| content_type = getattr(body, "content_type", None) |
| data = getattr(body, "data", None) |
|
|
| if content_type is None or data is None or "application/json" not in content_type: |
| return body |
|
|
| try: |
| utf8_data = data.decode("utf-8") |
| except Exception as e: |
| _LOGGER.warning(f"Failed to decode data: {data}. Exception: {e}") |
| return body |
| try: |
| return json.loads(utf8_data) |
| except Exception as e: |
| _LOGGER.warning(f"Failed to parse JSON: {utf8_data}. Exception: {e}") |
| return body |
|
|
|
|
| def generate_schema( |
| f: Callable[..., Any], |
| *, |
| schema_name: Optional[str] = None, |
| descriptions: Mapping[str, str] = {}, |
| required: Sequence[str] = [], |
| ) -> JsonDict: |
| """Generates the OpenAPI Schema for a callable object. |
| |
| Only positional and keyword arguments of the function `f` will be supported |
| in the OpenAPI Schema that is generated. I.e. `*args` and `**kwargs` will |
| not be present in the OpenAPI schema returned from this function. For those |
| cases, you can either include it in the docstring for `f`, or modify the |
| OpenAPI schema returned from this function to include additional arguments. |
| |
| Args: |
| f (Callable): |
| Required. The function to generate an OpenAPI Schema for. |
| schema_name (str): |
| Optional. The name for the OpenAPI schema. If unspecified, the name |
| of the Callable will be used. |
| descriptions (Mapping[str, str]): |
| Optional. A `{name: description}` mapping for annotating input |
| arguments of the function with user-provided descriptions. It |
| defaults to an empty dictionary (i.e. there will not be any |
| description for any of the inputs). |
| required (Sequence[str]): |
| Optional. For the user to specify the set of required arguments in |
| function calls to `f`. If specified, it will be automatically |
| inferred from `f`. |
| |
| Returns: |
| dict[str, Any]: The OpenAPI Schema for the function `f` in JSON format. |
| """ |
| pydantic = _import_pydantic_or_raise() |
| defaults = dict(inspect.signature(f).parameters) |
| fields_dict = { |
| name: ( |
| |
| |
| (param.annotation if param.annotation != inspect.Parameter.empty else Any), |
| pydantic.Field( |
| |
| |
| |
| |
| |
| |
| description=descriptions.get(name, None), |
| ), |
| ) |
| for name, param in defaults.items() |
| |
| if param.kind |
| in ( |
| inspect.Parameter.POSITIONAL_OR_KEYWORD, |
| inspect.Parameter.KEYWORD_ONLY, |
| inspect.Parameter.POSITIONAL_ONLY, |
| ) |
| } |
| parameters = pydantic.create_model(f.__name__, **fields_dict).schema() |
| |
| |
| |
| |
| parameters.pop("title", "") |
| for name, function_arg in parameters.get("properties", {}).items(): |
| function_arg.pop("title", "") |
| annotation = defaults[name].annotation |
| |
| |
| |
| |
| if typing.get_origin(annotation) is typing.Union and type( |
| None |
| ) in typing.get_args(annotation): |
| |
| |
| |
| |
| for schema in function_arg.pop("anyOf", []): |
| schema_type = schema.get("type") |
| if schema_type and schema_type != "null": |
| function_arg["type"] = schema_type |
| break |
| function_arg["nullable"] = True |
| |
| if required: |
| |
| parameters["required"] = required |
| else: |
| |
| parameters["required"] = [ |
| k |
| for k in defaults |
| if ( |
| defaults[k].default == inspect.Parameter.empty |
| and defaults[k].kind |
| in ( |
| inspect.Parameter.POSITIONAL_OR_KEYWORD, |
| inspect.Parameter.KEYWORD_ONLY, |
| inspect.Parameter.POSITIONAL_ONLY, |
| ) |
| ) |
| ] |
| schema = dict(name=f.__name__, description=f.__doc__, parameters=parameters) |
| if schema_name: |
| schema["name"] = schema_name |
| return schema |
|
|
|
|
| def is_noop_or_proxy_tracer_provider(tracer_provider) -> bool: |
| """Returns True if the tracer_provider is Proxy or NoOp.""" |
| opentelemetry = _import_opentelemetry_or_warn() |
| ProxyTracerProvider = opentelemetry.trace.ProxyTracerProvider |
| NoOpTracerProvider = opentelemetry.trace.NoOpTracerProvider |
| return isinstance(tracer_provider, (NoOpTracerProvider, ProxyTracerProvider)) |
|
|
|
|
| def _import_cloud_storage_or_raise() -> types.ModuleType: |
| """Tries to import the Cloud Storage module.""" |
| try: |
| from google.cloud import storage |
| except ImportError as e: |
| raise ImportError( |
| "Cloud Storage is not installed. Please call " |
| "'pip install google-cloud-aiplatform[reasoningengine]'." |
| ) from e |
| return storage |
|
|
|
|
| def _import_cloudpickle_or_raise() -> types.ModuleType: |
| """Tries to import the cloudpickle module.""" |
| try: |
| import cloudpickle |
| except ImportError as e: |
| raise ImportError( |
| "cloudpickle is not installed. Please call " |
| "'pip install google-cloud-aiplatform[reasoningengine]'." |
| ) from e |
| return cloudpickle |
|
|
|
|
| def _import_pydantic_or_raise() -> types.ModuleType: |
| """Tries to import the pydantic module.""" |
| try: |
| import pydantic |
|
|
| _ = pydantic.Field |
| except AttributeError: |
| from pydantic import v1 as pydantic |
| except ImportError as e: |
| raise ImportError( |
| "pydantic is not installed. Please call " |
| "'pip install google-cloud-aiplatform[reasoningengine]'." |
| ) from e |
| return pydantic |
|
|
|
|
| def _import_opentelemetry_or_warn() -> Optional[types.ModuleType]: |
| """Tries to import the opentelemetry module.""" |
| try: |
| import opentelemetry |
|
|
| return opentelemetry |
| except ImportError: |
| _LOGGER.warning( |
| "opentelemetry-sdk is not installed. Please call " |
| "'pip install google-cloud-aiplatform[reasoningengine]'." |
| ) |
| return None |
|
|
|
|
| def _import_opentelemetry_sdk_trace_or_warn() -> Optional[types.ModuleType]: |
| """Tries to import the opentelemetry.sdk.trace module.""" |
| try: |
| import opentelemetry.sdk.trace |
|
|
| return opentelemetry.sdk.trace |
| except ImportError: |
| _LOGGER.warning( |
| "opentelemetry-sdk is not installed. Please call " |
| "'pip install google-cloud-aiplatform[reasoningengine]'." |
| ) |
| return None |
|
|
|
|
| def _import_cloud_trace_v2_or_warn() -> Optional[types.ModuleType]: |
| """Tries to import the google.cloud.trace_v2 module.""" |
| try: |
| import google.cloud.trace_v2 |
|
|
| return google.cloud.trace_v2 |
| except ImportError: |
| _LOGGER.warning( |
| "google-cloud-trace is not installed. Please call " |
| "'pip install google-cloud-aiplatform[reasoningengine]'." |
| ) |
| return None |
|
|
|
|
| def _import_cloud_trace_exporter_or_warn() -> Optional[types.ModuleType]: |
| """Tries to import the opentelemetry.exporter.cloud_trace module.""" |
| try: |
| import opentelemetry.exporter.cloud_trace |
|
|
| return opentelemetry.exporter.cloud_trace |
| except ImportError: |
| _LOGGER.warning( |
| "opentelemetry-exporter-gcp-trace is not installed. Please " |
| "call 'pip install google-cloud-aiplatform[langchain]'." |
| ) |
| return None |
|
|
|
|
| def _import_openinference_langchain_or_warn() -> Optional[types.ModuleType]: |
| """Tries to import the openinference.instrumentation.langchain module.""" |
| try: |
| import openinference.instrumentation.langchain |
|
|
| return openinference.instrumentation.langchain |
| except ImportError: |
| _LOGGER.warning( |
| "openinference-instrumentation-langchain is not installed. Please " |
| "call 'pip install google-cloud-aiplatform[langchain]'." |
| ) |
| return None |
|
|