diff --git a/codegen/opentelemetry-codegen-json/README.rst b/codegen/opentelemetry-codegen-json/README.rst
new file mode 100644
index 00000000000..96bfa639567
--- /dev/null
+++ b/codegen/opentelemetry-codegen-json/README.rst
@@ -0,0 +1,22 @@
+OpenTelemetry JSON Code Generator
+===========================
+
+|pypi|
+
+.. |pypi| image:: https://badge.fury.io/py/opentelemetry-codegen-json.svg
+ :target: https://pypi.org/project/opentelemetry-codegen-json/
+
+This library is a protocol buffer plugin that generates code for the OpenTelemetry protocol in JSON format.
+
+Installation
+------------
+
+::
+
+ pip install opentelemetry-codegen-json
+
+
+References
+----------
+
+* `OpenTelemetry `_
diff --git a/codegen/opentelemetry-codegen-json/pyproject.toml b/codegen/opentelemetry-codegen-json/pyproject.toml
new file mode 100644
index 00000000000..1f59509ae69
--- /dev/null
+++ b/codegen/opentelemetry-codegen-json/pyproject.toml
@@ -0,0 +1,49 @@
+[build-system]
+requires = ["hatchling"]
+build-backend = "hatchling.build"
+
+[project]
+name = "opentelemetry-codegen-json"
+dynamic = ["version"]
+description = "Protobuf plugin to generate JSON serializers and deserializers for OpenTelemetry protobuf messages"
+readme = "README.rst"
+license = "Apache-2.0"
+requires-python = ">=3.9"
+authors = [
+ { name = "OpenTelemetry Authors", email = "cncf-opentelemetry-contributors@lists.cncf.io" },
+]
+classifiers = [
+ "Development Status :: 4 - Beta",
+ "Framework :: OpenTelemetry",
+ "Intended Audience :: Developers",
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.9",
+ "Programming Language :: Python :: 3.10",
+ "Programming Language :: Python :: 3.11",
+ "Programming Language :: Python :: 3.12",
+ "Programming Language :: Python :: 3.13",
+ "Programming Language :: Python :: 3.14",
+]
+dependencies = [
+ "protobuf>=4.25.3",
+ "types-protobuf>=4.24",
+]
+
+[project.scripts]
+protoc-gen-otlp_json = "opentelemetry.codegen.json.plugin:main"
+
+[project.urls]
+Homepage = "https://github.com/open-telemetry/opentelemetry-python/codegen/opentelemetry-codegen-proto-json"
+Repository = "https://github.com/open-telemetry/opentelemetry-python"
+
+[tool.hatch.version]
+path = "src/opentelemetry/codegen/json/version/__init__.py"
+
+[tool.hatch.build.targets.sdist]
+include = [
+ "/src",
+]
+
+[tool.hatch.build.targets.wheel]
+packages = ["src/opentelemetry"]
diff --git a/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/__init__.py b/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/__init__.py b/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/analyzer.py b/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/analyzer.py
new file mode 100644
index 00000000000..264776b8be2
--- /dev/null
+++ b/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/analyzer.py
@@ -0,0 +1,336 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+
+import logging
+from collections import defaultdict
+from dataclasses import dataclass
+from typing import Optional
+
+from google.protobuf import descriptor_pb2 as descriptor
+from google.protobuf.compiler import plugin_pb2 as plugin
+
+from opentelemetry.codegen.json.types import (
+ to_json_field_name,
+)
+
+_logger = logging.getLogger(__name__)
+
+
+@dataclass(frozen=True, slots=True)
+class ProtoType:
+ """Represents a field type with its Python equivalent."""
+
+ proto_type: int
+ is_repeated: bool = False
+ is_optional: bool = False
+ is_message: bool = False
+ is_enum: bool = False
+ type_name: Optional[str] = None # Fully qualified name for messages/enums
+
+
+@dataclass(frozen=True, slots=True)
+class FieldInfo:
+ """Contains info about a message field."""
+
+ name: str
+ number: int
+ field_type: ProtoType
+ json_name: str
+ default_value: Optional[str] = None
+ oneof_index: Optional[int] = None
+ is_oneof_member: bool = False
+
+
+@dataclass(frozen=True, slots=True)
+class EnumInfo:
+ """Contains info about an enum."""
+
+ name: str
+ package: str
+ file_name: str
+ values: tuple[tuple[str, int], ...] # List of (name, number) tuples
+ parent_path: Optional[str] = (
+ None # Full parent class path (e.g. "Span.Event")
+ )
+
+ @property
+ def python_class_path(self) -> str:
+ """Get Python class path (e.g. 'Span.Event.EventType' for nested)."""
+ if self.parent_path:
+ return f"{self.parent_path}.{self.name}"
+ return self.name
+
+ @property
+ def fully_qualified_name(self) -> str:
+ """Get fully qualified proto name."""
+ return f"{self.package}.{self.python_class_path}"
+
+
+@dataclass(frozen=True, slots=True)
+class MessageInfo:
+ """Contains all info about a protobuf message."""
+
+ name: str
+ package: str
+ file_name: str
+ fields: tuple[FieldInfo, ...]
+ nested_messages: tuple[MessageInfo, ...]
+ nested_enums: tuple[EnumInfo, ...]
+ parent_path: Optional[str] = (
+ None # Full parent class path (e.g. "Span.Event")
+ )
+
+ @property
+ def fully_qualified_name(self) -> str:
+ """Full proto package path."""
+ return f"{self.package}.{self.python_class_path}"
+
+ @property
+ def python_class_path(self) -> str:
+ """Path for nested classes in Python (e.g. 'Span.Event.SubEvent')."""
+ if self.parent_path:
+ return f"{self.parent_path}.{self.name}"
+ return self.name
+
+
+class DescriptorAnalyzer:
+ """Analyzes protobuf descriptors and builds structured representation."""
+
+ def __init__(self, request: plugin.CodeGeneratorRequest) -> None:
+ self._request = request
+ self._messages: dict[
+ str, MessageInfo
+ ] = {} # Maps fully_qualified_name -> MessageInfo
+ self._enums: dict[
+ str, EnumInfo
+ ] = {} # Maps fully_qualified_name -> EnumInfo
+ self._file_to_messages: dict[str, list[MessageInfo]] = defaultdict(
+ list
+ ) # Maps proto file -> list of top-level MessageInfo
+ self._file_to_enums: dict[str, list[EnumInfo]] = defaultdict(
+ list
+ ) # Maps proto file -> list of top-level EnumInfo
+ self._file_dependencies: dict[str, list[str]] = defaultdict(
+ list
+ ) # Maps file -> list of imported files
+
+ @property
+ def messages(self) -> dict[str, MessageInfo]:
+ """Get all messages indexed by fully qualified name."""
+ return self._messages
+
+ @property
+ def enums(self) -> dict[str, EnumInfo]:
+ """Get all enums indexed by fully qualified name."""
+ return self._enums
+
+ @property
+ def file_to_messages(self) -> dict[str, list[MessageInfo]]:
+ """Get top level messages for each file."""
+ return self._file_to_messages
+
+ @property
+ def file_to_enums(self) -> dict[str, list[EnumInfo]]:
+ """Get top level enums for each file."""
+ return self._file_to_enums
+
+ @property
+ def file_dependencies(self) -> dict[str, list[str]]:
+ """Get file dependencies."""
+ return self._file_dependencies
+
+ def analyze(self) -> None:
+ """Process all files in the request."""
+ for proto_file in self._request.proto_file:
+ self._analyze_file(proto_file)
+
+ def _analyze_file(
+ self, file_descriptor: descriptor.FileDescriptorProto
+ ) -> None:
+ """Analyze a single proto file."""
+ package = file_descriptor.package
+ file_name = file_descriptor.name
+
+ _logger.debug("Processing file: %s (package: %s)", file_name, package)
+
+ self._file_dependencies[file_name] = list(file_descriptor.dependency)
+
+ self._file_to_enums[file_name].extend(
+ self._analyze_enum(enum_type, package, file_name, parent_path=None)
+ for enum_type in file_descriptor.enum_type
+ )
+ self._file_to_messages[file_name].extend(
+ self._analyze_message(
+ message_type, package, file_name, parent_path=None
+ )
+ for message_type in file_descriptor.message_type
+ )
+
+ def _analyze_message(
+ self,
+ message_desc: descriptor.DescriptorProto,
+ package: str,
+ file_name: str,
+ parent_path: Optional[str] = None,
+ ) -> MessageInfo:
+ """
+ Recursively analyze message and nested types.
+
+ Args:
+ message_desc: The message descriptor
+ package: The proto package name
+ file_name: The proto file name
+ parent_path: Full parent class path for nested messages (e.g. "Span.Event")
+
+ Returns:
+ MessageInfo for this message
+ """
+ # Determine the class path for nested types
+ current_path = (
+ f"{parent_path}.{message_desc.name}"
+ if parent_path
+ else message_desc.name
+ )
+
+ nested_enums = tuple(
+ self._analyze_enum(enum_type, package, file_name, current_path)
+ for enum_type in message_desc.enum_type
+ )
+
+ nested_messages = tuple(
+ self._analyze_message(
+ nested_type, package, file_name, current_path
+ )
+ for nested_type in message_desc.nested_type
+ if not nested_type.options.map_entry # Skip map entry types
+ )
+
+ fields = tuple(
+ self._analyze_field(field_desc)
+ for field_desc in message_desc.field
+ )
+
+ msg_info = MessageInfo(
+ name=message_desc.name,
+ package=package,
+ file_name=file_name,
+ fields=fields,
+ nested_messages=nested_messages,
+ nested_enums=nested_enums,
+ parent_path=parent_path,
+ )
+
+ self._messages[msg_info.fully_qualified_name] = msg_info
+ return msg_info
+
+ def _analyze_field(
+ self,
+ field_desc: descriptor.FieldDescriptorProto,
+ ) -> FieldInfo:
+ """Analyze a single field."""
+ is_repeated = (
+ field_desc.label == descriptor.FieldDescriptorProto.LABEL_REPEATED
+ )
+ is_optional = field_desc.proto3_optional
+ oneof_index = (
+ field_desc.oneof_index
+ if field_desc.HasField("oneof_index")
+ else None
+ )
+
+ # Get JSON name
+ json_name = (
+ field_desc.json_name
+ if field_desc.json_name
+ else to_json_field_name(field_desc.name)
+ )
+
+ is_message = (
+ field_desc.type == descriptor.FieldDescriptorProto.TYPE_MESSAGE
+ )
+ is_enum = field_desc.type == descriptor.FieldDescriptorProto.TYPE_ENUM
+ type_name = (
+ field_desc.type_name.lstrip(".")
+ if field_desc.HasField("type_name")
+ else None
+ )
+
+ proto_type = ProtoType(
+ proto_type=field_desc.type,
+ is_repeated=is_repeated,
+ is_optional=is_optional,
+ is_message=is_message,
+ is_enum=is_enum,
+ type_name=type_name,
+ )
+
+ return FieldInfo(
+ name=field_desc.name,
+ number=field_desc.number,
+ field_type=proto_type,
+ json_name=json_name,
+ oneof_index=oneof_index,
+ is_oneof_member=oneof_index is not None and not is_optional,
+ )
+
+ def _analyze_enum(
+ self,
+ enum_desc: descriptor.EnumDescriptorProto,
+ package: str,
+ file_name: str,
+ parent_path: Optional[str] = None,
+ ) -> EnumInfo:
+ """
+ Analyze an enum.
+
+ Args:
+ enum_desc: The enum descriptor
+ package: The proto package name
+ file_name: The proto file name
+ parent_path: Full parent class path for nested enums (e.g. "Span.Event")
+
+ Returns:
+ EnumInfo for this enum
+ """
+ enum_info = EnumInfo(
+ name=enum_desc.name,
+ package=package,
+ file_name=file_name,
+ values=tuple(
+ (value_desc.name, value_desc.number)
+ for value_desc in enum_desc.value
+ ),
+ parent_path=parent_path,
+ )
+
+ self._enums[enum_info.fully_qualified_name] = enum_info
+ return enum_info
+
+ def get_message_by_name(
+ self, fully_qualified_name: str
+ ) -> Optional[MessageInfo]:
+ """Get message by fully qualified name."""
+ return self._messages.get(fully_qualified_name)
+
+ def get_enum_by_name(
+ self, fully_qualified_name: str
+ ) -> Optional[EnumInfo]:
+ """Get enum by fully qualified name."""
+ return self._enums.get(fully_qualified_name)
+
+ def get_messages_for_file(self, file_name: str) -> list[MessageInfo]:
+ """Get top-level messages for a specific file."""
+ return self._file_to_messages.get(file_name, [])
diff --git a/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/generator.py b/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/generator.py
new file mode 100644
index 00000000000..89098b72805
--- /dev/null
+++ b/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/generator.py
@@ -0,0 +1,951 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+
+import logging
+from collections import defaultdict
+from pathlib import Path
+from typing import Callable, Final, Optional, Set
+
+from google.protobuf import descriptor_pb2 as descriptor
+from google.protobuf.compiler import plugin_pb2 as plugin
+
+from opentelemetry.codegen.json.analyzer import (
+ DescriptorAnalyzer,
+ EnumInfo,
+ FieldInfo,
+ MessageInfo,
+ ProtoType,
+)
+from opentelemetry.codegen.json.types import (
+ get_default_value,
+ get_json_allowed_types,
+ get_python_type,
+ is_bytes_type,
+ is_hex_encoded_field,
+ is_int64_type,
+)
+from opentelemetry.codegen.json.version import __version__ as GENERATOR_VERSION
+from opentelemetry.codegen.json.writer import CodeWriter
+
+_logger = logging.getLogger(__name__)
+
+UTILS_MODULE_NAME: Final[str] = "_otlp_json_utils"
+
+
+class OtlpJsonGenerator:
+ """
+ Generates Python dataclasses and JSON serialization/deserialization code
+ from protobuf descriptors.
+ """
+
+ def __init__(
+ self,
+ analyzer: DescriptorAnalyzer,
+ package_transform: Callable[[str], str],
+ version: str,
+ ) -> None:
+ """
+ Initialize the generator.
+
+ Args:
+ analyzer: Analyzed descriptor information
+ package_transform: A callable that transforms the proto file path.
+ version: Version string for the generated code.
+ """
+ self._analyzer = analyzer
+ self._package_transform = package_transform
+ self._version = version
+ self._generated_files: dict[str, str] = {}
+ self._common_root: str = ""
+
+ def generate_all(self) -> dict[str, str]:
+ """
+ Generate Python code for all proto files and support modules.
+
+ Returns:
+ Dictionary mapping output file paths to generated code
+ """
+ all_proto_files = set(self._analyzer.file_to_messages.keys()) | set(
+ self._analyzer.file_to_enums.keys()
+ )
+
+ file_to_output = {
+ proto_file: self._transform_proto_path(proto_file)
+ for proto_file in all_proto_files
+ if self._analyzer.file_to_messages.get(proto_file)
+ or self._analyzer.file_to_enums.get(proto_file)
+ }
+
+ if not file_to_output:
+ return {}
+
+ self._common_root = self._find_common_root(
+ list(file_to_output.values())
+ )
+
+ for proto_file, output_path in file_to_output.items():
+ messages = self._analyzer.file_to_messages.get(proto_file, [])
+ enums = self._analyzer.file_to_enums.get(proto_file, [])
+ code = self._generate_file(proto_file, messages, enums)
+ self._generated_files[output_path] = code
+
+ utils_path = f"{self._common_root}/{UTILS_MODULE_NAME}.py"
+ self._generated_files[utils_path] = self._load_utils_source()
+
+ version_init_path = f"{self._common_root}/version/__init__.py"
+ version_writer = CodeWriter(indent_size=4)
+ self._generate_header(version_writer)
+ version_writer.writemany(f'__version__ = "{self._version}"', "")
+ self._generated_files[version_init_path] = version_writer.to_string()
+
+ self._ensure_init_files()
+
+ return self._generated_files
+
+ def _load_utils_source(self) -> str:
+ """Load the source code for the utility module from its source file."""
+ utils_src_path = (
+ Path(__file__).parent / "runtime" / "otlp_json_utils.py"
+ )
+ try:
+ return utils_src_path.read_text(encoding="utf-8")
+ except Exception as e:
+ _logger.error(
+ "Failed to load utility module source from %s: %s",
+ utils_src_path,
+ e,
+ )
+ raise RuntimeError(
+ f"Failed to load utility module source from {utils_src_path}"
+ ) from e
+
+ def _find_common_root(self, paths: list[str]) -> str:
+ """Find the longest common directory prefix."""
+ if not paths:
+ return ""
+
+ # Split paths into components
+ split_paths = [p.split("/")[:-1] for p in paths]
+ if not split_paths:
+ return ""
+
+ # Find common prefix among components
+ common = []
+ for parts in zip(*split_paths):
+ if all(p == parts[0] for p in parts):
+ common.append(parts[0])
+ else:
+ break
+
+ return "/".join(common)
+
+ def _ensure_init_files(self) -> None:
+ """Ensure every directory in the output contains an __init__.py file."""
+ dirs = set()
+ for path in list(self._generated_files.keys()):
+ p = Path(path)
+ for parent in p.parents:
+ parent_str = str(parent)
+ # Skip '.', root, and the 'opentelemetry' namespace directory
+ if parent_str in (".", "/", "opentelemetry"):
+ continue
+ dirs.add(parent_str)
+
+ for d in dirs:
+ init_path = f"{d}/__init__.py"
+ if init_path not in self._generated_files:
+ self._generated_files[init_path] = ""
+
+ def _get_utils_module_path(self) -> str:
+ """Get the absolute module path for the utility module."""
+ if not self._common_root:
+ return UTILS_MODULE_NAME
+ return f"{self._common_root.replace('/', '.')}.{UTILS_MODULE_NAME}"
+
+ def _transform_proto_path(self, proto_path: str) -> str:
+ """
+ Transform proto file path to output Python file path.
+
+ Example: 'opentelemetry/proto/trace/v1/trace.proto'
+ -> 'opentelemetry/proto_json/trace/v1/trace.py'
+
+ Args:
+ proto_path: Original .proto file path
+
+ Returns:
+ Transformed .py file path
+ """
+ transformed = self._package_transform(proto_path)
+ if transformed.endswith(".proto"):
+ transformed = transformed[:-6] + ".py"
+ return transformed
+
+ def _generate_file(
+ self,
+ proto_file: str,
+ messages: list[MessageInfo],
+ enums: list[EnumInfo],
+ ) -> str:
+ """
+ Generate complete Python file for a proto file.
+
+ Args:
+ proto_file: Original proto file path
+ messages: List of top-level messages in this file
+ enums: List of top-level enums in this file
+
+ Returns:
+ Generated Python code as string
+ """
+ writer = CodeWriter(indent_size=4)
+
+ self._generate_header(writer, proto_file)
+ self._generate_imports(
+ writer, proto_file, self._has_enums(messages, enums)
+ )
+ self._generate_enums_for_file(writer, enums)
+ self._generate_messages_for_file(writer, messages)
+ writer.blank_line()
+
+ return writer.to_string()
+
+ def _generate_header(
+ self, writer: CodeWriter, proto_file: str = ""
+ ) -> None:
+ """Generate file header with license and metadata."""
+ writer.writemany(
+ "# Copyright The OpenTelemetry Authors",
+ "#",
+ '# Licensed under the Apache License, Version 2.0 (the "License");',
+ "# you may not use this file except in compliance with the License.",
+ "# You may obtain a copy of the License at",
+ "#",
+ "# http://www.apache.org/licenses/LICENSE-2.0",
+ "#",
+ "# Unless required by applicable law or agreed to in writing, software",
+ '# distributed under the License is distributed on an "AS IS" BASIS,',
+ "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.",
+ "# See the License for the specific language governing permissions and",
+ "# limitations under the License.",
+ )
+ writer.blank_line()
+ if proto_file:
+ writer.comment(f'AUTO-GENERATED from "{proto_file}"')
+ writer.comment("DO NOT EDIT MANUALLY")
+ writer.blank_line()
+
+ def _generate_imports(
+ self,
+ writer: CodeWriter,
+ proto_file: str,
+ include_enum: bool,
+ ) -> None:
+ """
+ Generate all necessary import statements.
+
+ Args:
+ writer: Code writer instance
+ proto_file: Original proto file path
+ include_enum: Whether to include the enum module import
+ """
+ # Standard library imports
+ writer.writeln("from __future__ import annotations")
+ writer.blank_line()
+
+ std_imports = [
+ "builtins",
+ "dataclasses",
+ "functools",
+ "json",
+ "sys",
+ "typing",
+ ]
+ if include_enum:
+ std_imports.append("enum")
+
+ for module in sorted(std_imports):
+ writer.import_(module)
+
+ writer.blank_line()
+
+ writer.writeln("if sys.version_info >= (3, 10):")
+ with writer.indent():
+ writer.writeln(
+ "_dataclass = functools.partial(dataclasses.dataclass, slots=True)"
+ )
+ writer.writeln("else:")
+ with writer.indent():
+ writer.writeln("_dataclass = dataclasses.dataclass")
+ writer.blank_line()
+
+ # Collect all imports needed
+ imports = self._collect_imports(proto_file)
+
+ # Import the generated utility module
+ utils_module = self._get_utils_module_path()
+ imports.add(f"import {utils_module} as _utils")
+
+ # Generate cross file imports
+ if imports:
+ for import_info in sorted(imports):
+ writer.writeln(import_info)
+ writer.blank_line()
+ writer.blank_line()
+
+ def _get_module_path(self, proto_file: str) -> str:
+ """
+ Convert a proto file path to its transformed Python module path.
+
+ Example: 'opentelemetry/proto/common/v1/common.proto'
+ -> 'opentelemetry.proto_json.common.v1.common'
+
+ Args:
+ proto_file: Original .proto file path
+
+ Returns:
+ Python module path (dot-separated)
+ """
+ transformed = self._transform_proto_path(proto_file)
+ if transformed.endswith(".py"):
+ transformed = transformed[:-3]
+ return transformed.replace("/", ".")
+
+ def _collect_imports(self, proto_file: str) -> Set[str]:
+ """
+ Collect all import statements needed for cross file references.
+
+ Args:
+ proto_file: Current proto file path
+
+ Returns:
+ Set of import statement strings
+ """
+ return set(
+ "import " + self._get_module_path(dep_file)
+ for dep_file in self._analyzer.file_dependencies.get(
+ proto_file, []
+ )
+ )
+
+ def _generate_enums_for_file(
+ self,
+ writer: CodeWriter,
+ enums: list[EnumInfo],
+ ) -> None:
+ """
+ Generate all enums for a file (top level and nested).
+
+ Args:
+ writer: Code writer instance
+ enums: List of top level enums
+ """
+ for enum_info in enums:
+ self._generate_enum_class(writer, enum_info)
+ writer.blank_line()
+
+ def _generate_messages_for_file(
+ self, writer: CodeWriter, messages: list[MessageInfo]
+ ) -> None:
+ """
+ Generate all message classes for a file.
+
+ Args:
+ writer: Code writer instance
+ messages: List of top level messages
+ """
+ for i, message in enumerate(messages):
+ if i:
+ writer.blank_line(2)
+
+ self._generate_message_class(writer, message)
+
+ def _generate_message_class(
+ self, writer: CodeWriter, message: MessageInfo
+ ) -> None:
+ """
+ Generate a complete dataclass for a protobuf message.
+
+ Args:
+ writer: Code writer instance
+ message: Message information
+ """
+ with writer.dataclass(
+ message.name,
+ frozen=False,
+ slots=False,
+ decorators=("typing.final",),
+ decorator_name="_dataclass",
+ ):
+ if (
+ message.fields
+ or message.nested_messages
+ or message.nested_enums
+ ):
+ writer.docstring(
+ [f"Generated from protobuf message {message.name}"]
+ )
+ writer.blank_line()
+
+ for enum_info in message.nested_enums:
+ self._generate_enum_class(writer, enum_info)
+ writer.blank_line()
+
+ for nested_msg in message.nested_messages:
+ self._generate_message_class(writer, nested_msg)
+ writer.blank_line()
+
+ if message.fields:
+ for field_info in message.fields:
+ self._generate_field(writer, field_info, message)
+ else:
+ writer.pass_()
+
+ writer.blank_line()
+ self._generate_to_dict(writer, message)
+ writer.blank_line()
+ self._generate_to_json(writer, message)
+ writer.blank_line()
+ self._generate_from_dict(writer, message)
+ writer.blank_line()
+ self._generate_from_json(writer, message)
+
+ def _generate_to_dict(
+ self, writer: CodeWriter, message: MessageInfo
+ ) -> None:
+ """Generate to_dict() method."""
+ with writer.method(
+ "to_dict",
+ ["self"],
+ return_type="builtins.dict[builtins.str, typing.Any]",
+ ):
+ writer.docstring(
+ [
+ "Convert this message to a dictionary with lowerCamelCase keys.",
+ "",
+ "Returns:",
+ " Dictionary representation following OTLP JSON encoding",
+ ]
+ )
+ writer.writeln("_result = {}")
+
+ # Separate fields into oneof groups and standalone fields
+ oneof_groups: dict[int, list[FieldInfo]] = defaultdict(list)
+ standalone_fields: list[FieldInfo] = []
+
+ for field in message.fields:
+ if field.is_oneof_member and field.oneof_index is not None:
+ oneof_groups[field.oneof_index].append(field)
+ else:
+ standalone_fields.append(field)
+
+ # Handle standalone fields
+ for field in standalone_fields:
+ field_type = field.field_type
+ if field_type.is_repeated:
+ item_expr = self._get_serialization_expr(
+ field_type, field.name, "_v"
+ )
+ with writer.if_(f"self.{field.name}"):
+ if item_expr == "_v":
+ writer.writeln(
+ f'_result["{field.json_name}"] = self.{field.name}'
+ )
+ else:
+ writer.writeln(
+ f'_result["{field.json_name}"] = _utils.serialize_repeated('
+ f"self.{field.name}, lambda _v: {item_expr})"
+ )
+ else:
+ val_expr = self._get_serialization_expr(
+ field_type, field.name, f"self.{field.name}"
+ )
+ default = get_default_value(field_type.proto_type)
+ check = f"self.{field.name}"
+
+ with writer.if_(check):
+ writer.writeln(
+ f'_result["{field.json_name}"] = {val_expr}'
+ )
+
+ # Handle oneof groups
+ for group_index in sorted(oneof_groups.keys()):
+ group_fields = oneof_groups[group_index]
+ for i, field in enumerate(reversed(group_fields)):
+ field_type = field.field_type
+ condition = f"self.{field.name} is not None"
+ context = (
+ writer.elif_(condition) if i else writer.if_(condition)
+ )
+
+ with context:
+ val_expr = self._get_serialization_expr(
+ field_type, field.name, f"self.{field.name}"
+ )
+ writer.writeln(
+ f'_result["{field.json_name}"] = {val_expr}'
+ )
+
+ writer.return_("_result")
+
+ def _generate_to_json(
+ self, writer: CodeWriter, message: MessageInfo
+ ) -> None:
+ """Generate to_json() method."""
+ with writer.method("to_json", ["self"], return_type="builtins.str"):
+ writer.docstring(
+ [
+ "Serialize this message to a JSON string.",
+ "",
+ "Returns:",
+ " JSON string",
+ ]
+ )
+ writer.return_("json.dumps(self.to_dict())")
+
+ def _get_serialization_expr(
+ self, field_type: ProtoType, field_name: str, var_name: str
+ ) -> str:
+ """Get the Python expression to serialize a value of a given type."""
+ if field_type.is_message:
+ return f"{var_name}.to_dict()"
+ if field_type.is_enum:
+ return f"builtins.int({var_name})"
+ if is_hex_encoded_field(field_name):
+ return f"_utils.encode_hex({var_name})"
+ if is_int64_type(field_type.proto_type):
+ return f"_utils.encode_int64({var_name})"
+ if is_bytes_type(field_type.proto_type):
+ return f"_utils.encode_base64({var_name})"
+ if field_type.proto_type in (
+ descriptor.FieldDescriptorProto.TYPE_FLOAT,
+ descriptor.FieldDescriptorProto.TYPE_DOUBLE,
+ ):
+ return f"_utils.encode_float({var_name})"
+
+ return var_name
+
+ def _generate_from_dict(
+ self, writer: CodeWriter, message: MessageInfo
+ ) -> None:
+ """Generate from_dict() class method."""
+ with writer.method(
+ "from_dict",
+ ["cls", "data: builtins.dict[builtins.str, typing.Any]"],
+ decorators=["builtins.classmethod"],
+ return_type=f'"{message.python_class_path}"',
+ ):
+ writer.docstring(
+ [
+ "Create from a dictionary with lowerCamelCase keys.",
+ "",
+ "Args:",
+ " data: Dictionary representation following OTLP JSON encoding",
+ "",
+ "Returns:",
+ f" {message.name} instance",
+ ]
+ )
+ writer.writeln('_utils.validate_type(data, builtins.dict, "data")')
+ writer.writeln("_args = {}")
+ writer.blank_line()
+
+ # Separate fields into oneof groups and standalone fields
+ oneof_groups: dict[int, list[FieldInfo]] = defaultdict(list)
+ standalone_fields: list[FieldInfo] = []
+
+ for field in message.fields:
+ if field.is_oneof_member and field.oneof_index is not None:
+ oneof_groups[field.oneof_index].append(field)
+ else:
+ standalone_fields.append(field)
+
+ # Handle standalone fields
+ for field in standalone_fields:
+ field_type = field.field_type
+ with writer.if_(
+ f'(_value := data.get("{field.json_name}")) is not None'
+ ):
+ if field_type.is_repeated:
+ item_expr = self._get_deserialization_expr(
+ field_type, field.name, "_v", message
+ )
+ writer.writeln(
+ f'_args["{field.name}"] = _utils.deserialize_repeated('
+ f'_value, lambda _v: {item_expr}, "{field.name}")'
+ )
+ else:
+ self._generate_deserialization_statements(
+ writer, field, "_value", message, "_args"
+ )
+
+ # Handle oneof groups
+ for group_index in sorted(oneof_groups.keys()):
+ group_fields = oneof_groups[group_index]
+ for i, field in enumerate(reversed(group_fields)):
+ condition = f'(_value := data.get("{field.json_name}")) is not None'
+ context = (
+ writer.elif_(condition) if i else writer.if_(condition)
+ )
+
+ with context:
+ self._generate_deserialization_statements(
+ writer, field, "_value", message, "_args"
+ )
+
+ writer.blank_line()
+ writer.return_("cls(**_args)")
+
+ def _generate_from_json(
+ self, writer: CodeWriter, message: MessageInfo
+ ) -> None:
+ """Generate from_json() class method."""
+ with writer.method(
+ "from_json",
+ ["cls", "data: typing.Union[builtins.str, builtins.bytes]"],
+ decorators=["builtins.classmethod"],
+ return_type=f'"{message.python_class_path}"',
+ ):
+ writer.docstring(
+ [
+ "Deserialize from a JSON string or bytes.",
+ "",
+ "Args:",
+ " data: JSON string or bytes",
+ "",
+ "Returns:",
+ " Instance of the class",
+ ]
+ )
+ writer.return_("cls.from_dict(json.loads(data))")
+
+ def _generate_deserialization_statements(
+ self,
+ writer: CodeWriter,
+ field: FieldInfo,
+ var_name: str,
+ message: MessageInfo,
+ target_dict: str,
+ ) -> None:
+ """Generate validation and assignment statements for a field."""
+ field_type = field.field_type
+ if field_type.is_message and (type_name := field_type.type_name):
+ msg_type = self._resolve_message_type(type_name, message)
+ writer.writeln(
+ f'{target_dict}["{field.name}"] = {msg_type}.from_dict({var_name})'
+ )
+ elif field_type.is_enum and (type_name := field_type.type_name):
+ enum_type = self._resolve_enum_type(type_name, message)
+ writer.writeln(
+ f'_utils.validate_type({var_name}, builtins.int, "{field.name}")'
+ )
+ writer.writeln(
+ f'{target_dict}["{field.name}"] = {enum_type}({var_name})'
+ )
+ elif is_hex_encoded_field(field.name):
+ writer.writeln(
+ f'{target_dict}["{field.name}"] = _utils.decode_hex({var_name}, "{field.name}")'
+ )
+ elif is_int64_type(field_type.proto_type):
+ writer.writeln(
+ f'{target_dict}["{field.name}"] = _utils.parse_int64({var_name}, "{field.name}")'
+ )
+ elif is_bytes_type(field_type.proto_type):
+ writer.writeln(
+ f'{target_dict}["{field.name}"] = _utils.decode_base64({var_name}, "{field.name}")'
+ )
+ elif field_type.proto_type in (
+ descriptor.FieldDescriptorProto.TYPE_FLOAT,
+ descriptor.FieldDescriptorProto.TYPE_DOUBLE,
+ ):
+ writer.writeln(
+ f'{target_dict}["{field.name}"] = _utils.parse_float({var_name}, "{field.name}")'
+ )
+ else:
+ allowed_types = get_json_allowed_types(
+ field_type.proto_type, field.name
+ )
+ writer.writeln(
+ f'_utils.validate_type({var_name}, {allowed_types}, "{field.name}")'
+ )
+ writer.writeln(f'{target_dict}["{field.name}"] = {var_name}')
+
+ def _get_deserialization_expr(
+ self,
+ field_type: ProtoType,
+ field_name: str,
+ var_name: str,
+ context: MessageInfo,
+ ) -> str:
+ """Get the Python expression to deserialize a value of a given type."""
+ if field_type.is_message and (type_name := field_type.type_name):
+ msg_type = self._resolve_message_type(type_name, context)
+ return f"{msg_type}.from_dict({var_name})"
+ if field_type.is_enum and (type_name := field_type.type_name):
+ enum_type = self._resolve_enum_type(type_name, context)
+ return f"{enum_type}({var_name})"
+ if is_hex_encoded_field(field_name):
+ return f'_utils.decode_hex({var_name}, "{field_name}")'
+ if is_int64_type(field_type.proto_type):
+ return f'_utils.parse_int64({var_name}, "{field_name}")'
+ if is_bytes_type(field_type.proto_type):
+ return f'_utils.decode_base64({var_name}, "{field_name}")'
+ if field_type.proto_type in (
+ descriptor.FieldDescriptorProto.TYPE_FLOAT,
+ descriptor.FieldDescriptorProto.TYPE_DOUBLE,
+ ):
+ return f'_utils.parse_float({var_name}, "{field_name}")'
+
+ return var_name
+
+ def _generate_enum_class(
+ self, writer: CodeWriter, enum_info: EnumInfo
+ ) -> None:
+ """
+ Generate an IntEnum class for a protobuf enum.
+
+ Args:
+ writer: Code writer instance
+ enum_info: Enum information
+ """
+ with writer.enum(enum_info.name, enum_type="enum.IntEnum", decorators=("typing.final",)):
+ writer.docstring(
+ [f"Generated from protobuf enum {enum_info.name}"]
+ )
+ writer.blank_line()
+
+ if enum_info.values:
+ for name, number in enum_info.values:
+ writer.enum_member(name, number)
+ else:
+ writer.pass_()
+
+ def _generate_field(
+ self,
+ writer: CodeWriter,
+ field_info: FieldInfo,
+ parent_message: MessageInfo,
+ ) -> None:
+ """
+ Generate a dataclass field.
+
+ Args:
+ writer: Code writer instance
+ field_info: Field information
+ parent_message: Parent message (for context)
+ """
+ type_hint = self._get_field_type_hint(field_info, parent_message)
+ writer.field(
+ field_info.name,
+ type_hint,
+ default=self._get_field_default(field_info),
+ )
+
+ def _get_field_type_hint(
+ self, field_info: FieldInfo, parent_message: MessageInfo
+ ) -> str:
+ """
+ Get the Python type hint for a field.
+
+ Args:
+ field_info: Field information
+ parent_message: Parent message (for resolving nested types)
+
+ Returns:
+ Python type hint string
+ """
+ field_type = field_info.field_type
+
+ if field_type.is_message and (type_name := field_type.type_name):
+ base_type = self._resolve_message_type(type_name, parent_message)
+ elif field_type.is_enum and (type_name := field_type.type_name):
+ base_type = self._resolve_enum_type(type_name, parent_message)
+ else:
+ base_type = get_python_type(field_type.proto_type)
+
+ if field_type.is_repeated:
+ return f"builtins.list[{base_type}]"
+ if field_type.is_enum:
+ return f"typing.Union[{base_type}, builtins.int, None]"
+ return f"typing.Optional[{base_type}]"
+
+ def _resolve_message_type(
+ self, fully_qualified_name: str, context_message: MessageInfo
+ ) -> str:
+ """
+ Resolve a message type name to its Python class path.
+
+ Args:
+ fully_qualified_name: Fully qualified proto name (e.g. 'package.Message')
+ context_message: Current message (for resolving nested types)
+
+ Returns:
+ Python class reference (e.g. 'Message' or 'ParentMessage.NestedMessage')
+ """
+ # Look up the message in the analyzer
+ message_info = self._analyzer.get_message_by_name(fully_qualified_name)
+
+ if message_info is None:
+ _logger.warning(
+ "Could not resolve message type: %s", fully_qualified_name
+ )
+ return "typing.Any"
+
+ # If in same file, use relative class path
+ if message_info.file_name == context_message.file_name:
+ return message_info.python_class_path
+ # Cross file reference - use fully qualified module + class path
+ module_path = self._get_module_path(message_info.file_name)
+ return f"{module_path}.{message_info.python_class_path}"
+
+ def _resolve_enum_type(
+ self, fully_qualified_name: str, context_message: MessageInfo
+ ) -> str:
+ """
+ Resolve an enum type name to its Python class path.
+
+ Args:
+ fully_qualified_name: Fully qualified proto name
+ context_message: Current message (for resolving nested types)
+
+ Returns:
+ Python class reference
+ """
+ enum_info = self._analyzer.get_enum_by_name(fully_qualified_name)
+
+ if enum_info is None:
+ _logger.warning(
+ "Could not resolve enum type: %s", fully_qualified_name
+ )
+ return "builtins.int"
+
+ # If in same file, use relative class path
+ if enum_info.file_name == context_message.file_name:
+ return enum_info.python_class_path
+ # Cross file reference - use fully qualified module + class path
+ module_path = self._get_module_path(enum_info.file_name)
+ return f"{module_path}.{enum_info.python_class_path}"
+
+ def _get_field_default(self, field_info: FieldInfo) -> Optional[str]:
+ """
+ Get the default value for a field.
+
+ Args:
+ field_info: Field information
+
+ Returns:
+ Default value string or None
+ """
+ field_type = field_info.field_type
+
+ # Repeated fields default to empty list
+ if field_type.is_repeated:
+ return "dataclasses.field(default_factory=builtins.list)"
+
+ # Optional fields, Message types, and oneof members default to None
+ if (
+ field_type.is_optional
+ or field_type.is_message
+ or field_info.is_oneof_member
+ ):
+ return "None"
+
+ # Enum types default to 0
+ if field_type.is_enum:
+ return "0"
+
+ # Primitive types use proto defaults
+ return get_default_value(field_type.proto_type)
+
+ def _has_enums(
+ self, messages: list[MessageInfo], enums: list[EnumInfo]
+ ) -> bool:
+ """
+ Recursively check if there are any enums defined in the file.
+ """
+ if enums:
+ return True
+ for message in messages:
+ if message.nested_enums:
+ return True
+ if self._has_enums(list(message.nested_messages), []):
+ return True
+ return False
+
+
+def generate_code(
+ request: plugin.CodeGeneratorRequest,
+ package_transform: Callable[[str], str] = lambda p: p.replace(
+ "opentelemetry/proto/", "opentelemetry/proto_json/"
+ ),
+) -> dict[str, str]:
+ """
+ Main entry point for code generation.
+
+ Args:
+ request: Protobuf compiler plugin request
+ package_transform: Package transformation string or callable
+
+ Returns:
+ Dictionary mapping output file paths to generated code
+ """
+ analyzer = DescriptorAnalyzer(request)
+ analyzer.analyze()
+
+ generator = OtlpJsonGenerator(
+ analyzer, package_transform, version=GENERATOR_VERSION
+ )
+ return generator.generate_all()
+
+
+def generate_plugin_response(
+ request: plugin.CodeGeneratorRequest,
+ package_transform: Callable[[str], str] = lambda p: p.replace(
+ "opentelemetry/proto/", "opentelemetry/proto_json/"
+ ),
+) -> plugin.CodeGeneratorResponse:
+ """
+ Generate plugin response with all generated files.
+
+ Args:
+ request: Protobuf compiler plugin request
+ package_transform: Package transformation string
+
+ Returns:
+ Plugin response with generated files
+ """
+ response = plugin.CodeGeneratorResponse()
+
+ # Declare support for optional proto3 fields
+ response.supported_features |= (
+ plugin.CodeGeneratorResponse.FEATURE_PROTO3_OPTIONAL
+ )
+ response.supported_features |= (
+ plugin.CodeGeneratorResponse.FEATURE_SUPPORTS_EDITIONS
+ )
+
+ response.minimum_edition = descriptor.EDITION_LEGACY
+ response.maximum_edition = descriptor.EDITION_2024
+
+ # Generate code
+ generated_files = generate_code(request, package_transform)
+
+ # Create response files
+ for output_path, code in generated_files.items():
+ file_response = response.file.add()
+ file_response.name = output_path
+ file_response.content = code
+
+ return response
diff --git a/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/plugin.py b/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/plugin.py
new file mode 100644
index 00000000000..b53ecccaf84
--- /dev/null
+++ b/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/plugin.py
@@ -0,0 +1,65 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import sys
+from collections.abc import Iterator
+from contextlib import contextmanager
+from typing import Tuple
+
+from google.protobuf.compiler import plugin_pb2 as plugin
+
+from opentelemetry.codegen.json.generator import generate_plugin_response
+from opentelemetry.codegen.json.version import __version__
+
+_logger = logging.getLogger(__name__)
+
+
+@contextmanager
+def code_generation() -> Iterator[
+ Tuple[plugin.CodeGeneratorRequest, plugin.CodeGeneratorResponse],
+]:
+ if len(sys.argv) > 1 and sys.argv[1] in ("-V", "--version"):
+ print("opentelemetry-codegen-json " + __version__)
+ sys.exit(0)
+
+ data = sys.stdin.buffer.read()
+
+ request = plugin.CodeGeneratorRequest()
+ request.ParseFromString(data)
+
+ response = plugin.CodeGeneratorResponse()
+
+ yield request, response
+
+ output = response.SerializeToString()
+ sys.stdout.buffer.write(output)
+
+
+def main() -> None:
+ with code_generation() as (request, response):
+ generated_response = generate_plugin_response(request)
+
+ response.supported_features |= generated_response.supported_features
+ for file in generated_response.file:
+ response.file.add().CopyFrom(file)
+
+
+if __name__ == "__main__":
+ logging.basicConfig(
+ level=logging.DEBUG,
+ stream=sys.stderr,
+ format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
+ )
+ main()
diff --git a/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/runtime/__init__.py b/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/runtime/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/runtime/otlp_json_utils.py b/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/runtime/otlp_json_utils.py
new file mode 100644
index 00000000000..833245a3453
--- /dev/null
+++ b/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/runtime/otlp_json_utils.py
@@ -0,0 +1,147 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import base64
+import math
+from typing import Any, Callable, List, Optional, TypeVar, Union
+
+T = TypeVar("T")
+
+
+def encode_hex(value: bytes) -> str:
+ """
+ Encode bytes as hex string.
+ Used for trace_id and span_id per OTLP spec.
+ """
+ return value.hex() if value else ""
+
+
+def encode_base64(value: bytes) -> str:
+ """
+ Encode bytes as base64 string.
+ Standard Proto3 JSON mapping for bytes.
+ """
+ return base64.b64encode(value).decode("utf-8") if value else ""
+
+
+def encode_int64(value: int) -> str:
+ """
+ Encode 64 bit integers as strings.
+ Required for int64, uint64, fixed64, sfixed64 and sint64 per Proto3 JSON spec.
+ """
+ return str(value)
+
+
+def encode_float(value: float) -> Union[float, str]:
+ """
+ Encode float/double values.
+ """
+ if math.isnan(value):
+ return "NaN"
+ if math.isinf(value):
+ return "Infinity" if value > 0 else "-Infinity"
+ return value
+
+
+def serialize_repeated(
+ values: List[Any], map_fn: Callable[[Any], Any]
+) -> List[Any]:
+ """Helper to serialize repeated fields."""
+ return [map_fn(v) for v in values] if values else []
+
+
+def validate_type(
+ value: Any, expected_types: Union[type, tuple[type, ...]], field_name: str
+) -> None:
+ """
+ Validate that a value is of the expected type(s).
+ Raises TypeError if validation fails.
+ """
+ if not isinstance(value, expected_types):
+ raise TypeError(
+ f"Field '{field_name}' expected {expected_types}, "
+ f"got {type(value).__name__}"
+ )
+
+
+def decode_hex(value: Optional[str], field_name: str) -> bytes:
+ """Decode hex string to bytes."""
+ if not value:
+ return b""
+ validate_type(value, str, field_name)
+ try:
+ return bytes.fromhex(value)
+ except ValueError as e:
+ raise ValueError(
+ f"Invalid hex string for field '{field_name}': {e}"
+ ) from None
+
+
+def decode_base64(value: Optional[str], field_name: str) -> bytes:
+ """Decode base64 string to bytes."""
+ if not value:
+ return b""
+ validate_type(value, str, field_name)
+ try:
+ return base64.b64decode(value)
+ except Exception as e:
+ raise ValueError(
+ f"Invalid base64 string for field '{field_name}': {e}"
+ ) from None
+
+
+def parse_int64(value: Optional[Union[int, str]], field_name: str) -> int:
+ """Parse 64-bit integer from string or number."""
+ if value is None:
+ return 0
+ validate_type(value, (int, str), field_name)
+ try:
+ return int(value)
+ except (ValueError, TypeError):
+ raise ValueError(
+ f"Invalid int64 value for field '{field_name}': {value}"
+ ) from None
+
+
+def parse_float(
+ value: Optional[Union[float, int, str]], field_name: str
+) -> float:
+ """Parse float/double from number or special string."""
+ if value is None:
+ return 0.0
+ validate_type(value, (float, int, str), field_name)
+ if value == "NaN":
+ return math.nan
+ if value == "Infinity":
+ return math.inf
+ if value == "-Infinity":
+ return -math.inf
+ try:
+ return float(value)
+ except (ValueError, TypeError):
+ raise ValueError(
+ f"Invalid float value for field '{field_name}': {value}"
+ ) from None
+
+
+def deserialize_repeated(
+ values: Optional[List[Any]],
+ item_parser: Callable[[Any], T],
+ field_name: str,
+) -> List[T]:
+ """Helper to deserialize repeated fields."""
+ if values is None:
+ return []
+ validate_type(values, list, field_name)
+ return [item_parser(v) for v in values]
diff --git a/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/types.py b/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/types.py
new file mode 100644
index 00000000000..0e5902d1951
--- /dev/null
+++ b/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/types.py
@@ -0,0 +1,134 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from typing import Final
+
+from google.protobuf import descriptor_pb2 as descriptor
+
+PROTO_TO_PYTHON: Final[dict[int, str]] = {
+ descriptor.FieldDescriptorProto.TYPE_DOUBLE: "builtins.float",
+ descriptor.FieldDescriptorProto.TYPE_FLOAT: "builtins.float",
+ descriptor.FieldDescriptorProto.TYPE_INT64: "builtins.int",
+ descriptor.FieldDescriptorProto.TYPE_UINT64: "builtins.int",
+ descriptor.FieldDescriptorProto.TYPE_INT32: "builtins.int",
+ descriptor.FieldDescriptorProto.TYPE_FIXED64: "builtins.int",
+ descriptor.FieldDescriptorProto.TYPE_FIXED32: "builtins.int",
+ descriptor.FieldDescriptorProto.TYPE_BOOL: "builtins.bool",
+ descriptor.FieldDescriptorProto.TYPE_STRING: "builtins.str",
+ descriptor.FieldDescriptorProto.TYPE_BYTES: "builtins.bytes",
+ descriptor.FieldDescriptorProto.TYPE_UINT32: "builtins.int",
+ descriptor.FieldDescriptorProto.TYPE_SFIXED32: "builtins.int",
+ descriptor.FieldDescriptorProto.TYPE_SFIXED64: "builtins.int",
+ descriptor.FieldDescriptorProto.TYPE_SINT32: "builtins.int",
+ descriptor.FieldDescriptorProto.TYPE_SINT64: "builtins.int",
+}
+
+PROTO_DEFAULTS: Final[dict[int, str]] = {
+ descriptor.FieldDescriptorProto.TYPE_DOUBLE: "0.0",
+ descriptor.FieldDescriptorProto.TYPE_FLOAT: "0.0",
+ descriptor.FieldDescriptorProto.TYPE_INT64: "0",
+ descriptor.FieldDescriptorProto.TYPE_UINT64: "0",
+ descriptor.FieldDescriptorProto.TYPE_INT32: "0",
+ descriptor.FieldDescriptorProto.TYPE_FIXED64: "0",
+ descriptor.FieldDescriptorProto.TYPE_FIXED32: "0",
+ descriptor.FieldDescriptorProto.TYPE_BOOL: "False",
+ descriptor.FieldDescriptorProto.TYPE_STRING: '""',
+ descriptor.FieldDescriptorProto.TYPE_BYTES: 'b""',
+ descriptor.FieldDescriptorProto.TYPE_UINT32: "0",
+ descriptor.FieldDescriptorProto.TYPE_SFIXED32: "0",
+ descriptor.FieldDescriptorProto.TYPE_SFIXED64: "0",
+ descriptor.FieldDescriptorProto.TYPE_SINT32: "0",
+ descriptor.FieldDescriptorProto.TYPE_SINT64: "0",
+}
+
+INT64_TYPES: Final[set[int]] = {
+ descriptor.FieldDescriptorProto.TYPE_INT64,
+ descriptor.FieldDescriptorProto.TYPE_UINT64,
+ descriptor.FieldDescriptorProto.TYPE_FIXED64,
+ descriptor.FieldDescriptorProto.TYPE_SFIXED64,
+ descriptor.FieldDescriptorProto.TYPE_SINT64,
+}
+
+HEX_ENCODED_FIELDS: Final[set[str]] = {
+ "trace_id",
+ "span_id",
+ "parent_span_id",
+}
+
+
+def get_python_type(proto_type: int) -> str:
+ """Get Python type for a protobuf field type."""
+ return PROTO_TO_PYTHON.get(proto_type, "typing.Any")
+
+
+def get_default_value(proto_type: int) -> str:
+ """Get default value for a protobuf field type."""
+ return PROTO_DEFAULTS.get(proto_type, "None")
+
+
+def is_int64_type(proto_type: int) -> bool:
+ """Check if type is a 64-bit integer requiring string serialization."""
+ return proto_type in INT64_TYPES
+
+
+def is_bytes_type(proto_type: int) -> bool:
+ """Check if type is bytes."""
+ return proto_type == descriptor.FieldDescriptorProto.TYPE_BYTES
+
+
+def is_hex_encoded_field(field_name: str) -> bool:
+ """Check if this is a trace/span ID field requiring hex encoding."""
+ return field_name in HEX_ENCODED_FIELDS
+
+
+def to_json_field_name(snake_name: str) -> str:
+ """Convert snake_case field name to lowerCamelCase JSON name."""
+ components = snake_name.split("_")
+ return components[0] + "".join(x.title() for x in components[1:])
+
+
+def is_numeric_type(proto_type: int) -> bool:
+ """Check if type is numeric (int or float)."""
+ if proto_type in INT64_TYPES:
+ return True
+ return proto_type in {
+ descriptor.FieldDescriptorProto.TYPE_DOUBLE,
+ descriptor.FieldDescriptorProto.TYPE_FLOAT,
+ descriptor.FieldDescriptorProto.TYPE_INT32,
+ descriptor.FieldDescriptorProto.TYPE_FIXED32,
+ descriptor.FieldDescriptorProto.TYPE_UINT32,
+ descriptor.FieldDescriptorProto.TYPE_SFIXED32,
+ descriptor.FieldDescriptorProto.TYPE_SINT32,
+ }
+
+
+def get_json_allowed_types(proto_type: int, field_name: str = "") -> str:
+ """
+ Get the Python type(s) allowed for the JSON representation of a field.
+ Returns a string representation of the type or tuple of types.
+ """
+ if is_hex_encoded_field(field_name):
+ return "builtins.str"
+ if is_int64_type(proto_type):
+ return "(builtins.int, builtins.str)"
+ if is_bytes_type(proto_type):
+ return "builtins.str"
+ if proto_type in (
+ descriptor.FieldDescriptorProto.TYPE_FLOAT,
+ descriptor.FieldDescriptorProto.TYPE_DOUBLE,
+ ):
+ return "(builtins.float, builtins.int, builtins.str)"
+
+ py_type = get_python_type(proto_type)
+ return py_type
diff --git a/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/version/__init__.py b/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/version/__init__.py
new file mode 100644
index 00000000000..e8f18d28813
--- /dev/null
+++ b/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/version/__init__.py
@@ -0,0 +1 @@
+__version__ = "0.61b0.dev"
diff --git a/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/writer.py b/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/writer.py
new file mode 100644
index 00000000000..a3535a4eb76
--- /dev/null
+++ b/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/writer.py
@@ -0,0 +1,489 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+
+from collections.abc import Iterable
+from contextlib import contextmanager
+from typing import Any, Generator, Mapping, Optional, Union
+
+
+class CodeWriter:
+ def __init__(self, indent_size: int = 4) -> None:
+ self._lines: list[str] = []
+ self._indent_level: int = 0
+ self._indent_size: int = indent_size
+
+ @contextmanager
+ def indent(self) -> Generator[CodeWriter, None, None]:
+ self._indent_level += 1
+ try:
+ yield self
+ finally:
+ self._indent_level -= 1
+
+ def writeln(self, line: str = "") -> CodeWriter:
+ if not line:
+ self._lines.append("")
+ return self
+ indent = " " * (self._indent_level * self._indent_size)
+ self._lines.append(f"{indent}{line}")
+ return self
+
+ def writemany(self, *lines: str) -> CodeWriter:
+ for line in lines:
+ self.writeln(line)
+ return self
+
+ def comment(self, content: Union[str, Iterable[str]]) -> CodeWriter:
+ if isinstance(content, str):
+ self.writeln(f"# {content}")
+ return self
+ for line in content:
+ self.writeln(f"# {line}")
+ return self
+
+ def docstring(self, content: Union[str, Iterable[str]]) -> CodeWriter:
+ if isinstance(content, str):
+ self.writeln(f'"""{content}"""')
+ return self
+ self.writeln('"""')
+ for line in content:
+ self.writeln(line)
+ self.writeln('"""')
+ return self
+
+ def import_(self, module: str, *items: str) -> CodeWriter:
+ if items:
+ self.writeln(f"from {module} import {', '.join(items)}")
+ else:
+ self.writeln(f"import {module}")
+ return self
+
+ @contextmanager
+ def suite(self, header: str) -> Generator[CodeWriter, None, None]:
+ """Write header then indent"""
+ self.writeln(header)
+ with self.indent():
+ yield self
+
+ @contextmanager
+ def class_(
+ self,
+ name: str,
+ bases: Optional[Iterable[str]] = None,
+ decorators: Optional[Iterable[str]] = None,
+ ) -> Generator[CodeWriter, None, None]:
+ """Create a regular class with optional bases and decorators"""
+ if decorators is not None:
+ for dec in decorators:
+ self.writeln(f"@{dec}")
+
+ bases_str = f"({', '.join(bases)})" if bases else ""
+ self.writeln(f"class {name}{bases_str}:")
+
+ with self.indent():
+ yield self
+
+ @contextmanager
+ def dataclass(
+ self,
+ name: str,
+ bases: Optional[Iterable[str]] = None,
+ decorators: Optional[Iterable[str]] = None,
+ frozen: bool = False,
+ slots: bool = False,
+ decorator_name: str = "dataclasses.dataclass",
+ ) -> Generator[CodeWriter, None, None]:
+ """Create a dataclass with optional configuration"""
+ dc_params = []
+ if frozen:
+ dc_params.append("frozen=True")
+ if slots:
+ dc_params.append("slots=True")
+
+ dc_decorator = (
+ f"{decorator_name}({', '.join(dc_params)})"
+ if dc_params
+ else decorator_name
+ )
+
+ all_decorators = []
+ if decorators is not None:
+ all_decorators.extend(decorators)
+ all_decorators.append(dc_decorator)
+
+ for dec in all_decorators:
+ self.writeln(f"@{dec}")
+
+ bases_str = f"({', '.join(bases)})" if bases else ""
+ self.writeln(f"class {name}{bases_str}:")
+
+ with self.indent():
+ yield self
+
+ @contextmanager
+ def enum(
+ self,
+ name: str,
+ enum_type: str = "enum.Enum",
+ bases: Optional[Iterable[str]] = None,
+ decorators: Optional[Iterable[str]] = None,
+ ) -> Generator[CodeWriter, None, None]:
+ """Create an enum"""
+ if decorators is not None:
+ for dec in decorators:
+ self.writeln(f"@{dec}")
+
+ all_bases = [enum_type]
+ if bases is not None:
+ all_bases.extend(bases)
+
+ bases_str = ", ".join(all_bases)
+ self.writeln(f"class {name}({bases_str}):")
+
+ with self.indent():
+ yield self
+
+ def field(
+ self,
+ name: str,
+ type_hint: str,
+ default: Any = None,
+ default_factory: Optional[str] = None,
+ ) -> CodeWriter:
+ """Write a dataclass field"""
+ if default_factory:
+ self.writeln(
+ f"{name}: {type_hint} = dataclasses.field(default_factory={default_factory})"
+ )
+ elif default is not None:
+ self.writeln(f"{name}: {type_hint} = {default}")
+ else:
+ self.writeln(f"{name}: {type_hint}")
+ return self
+
+ def enum_member(self, name: str, value: Any) -> CodeWriter:
+ """Write an enum member"""
+ self.writeln(f"{name} = {value}")
+ return self
+
+ def auto_enum_member(self, name: str) -> CodeWriter:
+ """Write an auto() enum member"""
+ self.writeln(f"{name} = enum.auto()")
+ return self
+
+ @contextmanager
+ def function(
+ self,
+ name: str,
+ params: Union[Iterable[str], str],
+ decorators: Optional[Iterable[str]] = None,
+ return_type: Optional[str] = None,
+ ) -> Generator[CodeWriter, None, None]:
+ """Create a function as a context manager for building the body"""
+ if decorators is not None:
+ for dec in decorators:
+ self.writeln(f"@{dec}")
+
+ params_str = params if isinstance(params, str) else ", ".join(params)
+ return_annotation = f" -> {return_type}" if return_type else ""
+ self.writeln(f"def {name}({params_str}){return_annotation}:")
+
+ with self.indent():
+ yield self
+
+ def write_function(
+ self,
+ name: str,
+ params: Union[Iterable[str], str],
+ body_lines: Union[Iterable[str], str],
+ decorators: Optional[Iterable[str]] = None,
+ return_type: Optional[str] = None,
+ ) -> CodeWriter:
+ """Write a complete function"""
+ with self.function(
+ name, params, decorators=decorators, return_type=return_type
+ ):
+ if isinstance(body_lines, str):
+ self.writeln(body_lines)
+ else:
+ for line in body_lines:
+ self.writeln(line)
+ return self
+
+ @contextmanager
+ def method(
+ self,
+ name: str,
+ params: Union[Iterable[str], str],
+ decorators: Optional[Iterable[str]] = None,
+ return_type: Optional[str] = None,
+ ) -> Generator[CodeWriter, None, None]:
+ """Alias for function() - more semantic for methods in classes"""
+ with self.function(
+ name, params, decorators=decorators, return_type=return_type
+ ):
+ yield self
+
+ def staticmethod_(
+ self,
+ name: str,
+ params: Union[Iterable[str], str],
+ body_lines: Union[Iterable[str], str],
+ return_type: Optional[str] = None,
+ ) -> CodeWriter:
+ return self.write_function(
+ name,
+ params,
+ body_lines,
+ decorators=["builtins.staticmethod"],
+ return_type=return_type,
+ )
+
+ def classmethod_(
+ self,
+ name: str,
+ params: Union[Iterable[str], str],
+ body_lines: Union[Iterable[str], str],
+ return_type: Optional[str] = None,
+ ) -> CodeWriter:
+ return self.write_function(
+ name,
+ params,
+ body_lines,
+ decorators=["builtins.classmethod"],
+ return_type=return_type,
+ )
+
+ @contextmanager
+ def if_(self, condition: str) -> Generator[CodeWriter, None, None]:
+ """Create an if block"""
+ self.writeln(f"if {condition}:")
+ with self.indent():
+ yield self
+
+ @contextmanager
+ def elif_(self, condition: str) -> Generator[CodeWriter, None, None]:
+ """Create an elif block"""
+ self.writeln(f"elif {condition}:")
+ with self.indent():
+ yield self
+
+ @contextmanager
+ def else_(self) -> Generator[CodeWriter, None, None]:
+ """Create an else block"""
+ self.writeln("else:")
+ with self.indent():
+ yield self
+
+ @contextmanager
+ def for_(
+ self, var: str, iterable: str
+ ) -> Generator[CodeWriter, None, None]:
+ """Create a for loop"""
+ self.writeln(f"for {var} in {iterable}:")
+ with self.indent():
+ yield self
+
+ @contextmanager
+ def while_(self, condition: str) -> Generator[CodeWriter, None, None]:
+ """Create a while loop"""
+ self.writeln(f"while {condition}:")
+ with self.indent():
+ yield self
+
+ @contextmanager
+ def try_(self) -> Generator[CodeWriter, None, None]:
+ """Create a try block"""
+ self.writeln("try:")
+ with self.indent():
+ yield self
+
+ @contextmanager
+ def except_(
+ self, exception: Optional[str] = None, as_var: Optional[str] = None
+ ) -> Generator[CodeWriter, None, None]:
+ """Create an except block"""
+ if exception and as_var:
+ self.writeln(f"except {exception} as {as_var}:")
+ elif exception:
+ self.writeln(f"except {exception}:")
+ else:
+ self.writeln("except:")
+ with self.indent():
+ yield self
+
+ @contextmanager
+ def finally_(self) -> Generator[CodeWriter, None, None]:
+ """Create a finally block"""
+ self.writeln("finally:")
+ with self.indent():
+ yield self
+
+ @contextmanager
+ def with_(self, *contexts: str) -> Generator[CodeWriter, None, None]:
+ """Create a with statement"""
+ context_str = ", ".join(contexts)
+ self.writeln(f"with {context_str}:")
+ with self.indent():
+ yield self
+
+ def section(
+ self, title: str, char: str = "=", width: int = 70
+ ) -> CodeWriter:
+ """Create a commented section divider"""
+ self.blank_line()
+ self.comment(char * width)
+ self.comment(f" {title}")
+ self.comment(char * width)
+ self.blank_line()
+ return self
+
+ def module_docstring(self, text: str) -> CodeWriter:
+ """Write a module-level docstring"""
+ self.writeln(f'"""{text}"""')
+ self.blank_line()
+ return self
+
+ def assignment(
+ self, var: str, value: str, type_hint: Optional[str] = None
+ ) -> CodeWriter:
+ """Write a variable assignment"""
+ if type_hint:
+ self.writeln(f"{var}: {type_hint} = {value}")
+ else:
+ self.writeln(f"{var} = {value}")
+ return self
+
+ def return_(self, value: Optional[str] = None) -> CodeWriter:
+ """Write a return statement"""
+ if value:
+ self.writeln(f"return {value}")
+ else:
+ self.writeln("return")
+ return self
+
+ def raise_(
+ self, exception: str, message: Optional[str] = None
+ ) -> CodeWriter:
+ """Write a raise statement"""
+ if message:
+ self.writeln(f"raise {exception}({message!r})")
+ else:
+ self.writeln(f"raise {exception}")
+ return self
+
+ def yield_(self, value: str) -> CodeWriter:
+ """Write a yield statement"""
+ self.writeln(f"yield {value}")
+ return self
+
+ def assert_(
+ self, condition: str, message: Optional[str] = None
+ ) -> CodeWriter:
+ """Write an assert statement"""
+ if message:
+ self.writeln(f"assert {condition}, {message!r}")
+ else:
+ self.writeln(f"assert {condition}")
+ return self
+
+ def pass_(self) -> CodeWriter:
+ """Write a pass statement"""
+ self.writeln("pass")
+ return self
+
+ def break_(self) -> CodeWriter:
+ """Write a break statement"""
+ self.writeln("break")
+ return self
+
+ def continue_(self) -> CodeWriter:
+ """Write a continue statement"""
+ self.writeln("continue")
+ return self
+
+ def generate_init(
+ self, params_with_types: Mapping[str, str]
+ ) -> CodeWriter:
+ """Generate __init__ with automatic assignment"""
+ params = ["self"] + [
+ f"{name}: {type_}" for name, type_ in params_with_types.items()
+ ]
+ body = [f"self.{name} = {name}" for name in params_with_types.keys()]
+ self.write_function("__init__", params, body)
+ return self
+
+ def generate_repr(
+ self, class_name: str, fields: Iterable[str]
+ ) -> CodeWriter:
+ """Generate __repr__ method"""
+ field_strs = ", ".join([f"{f}={{self.{f}!r}}" for f in fields])
+ body = f"return f'{class_name}({field_strs})'"
+ self.write_function(
+ "__repr__", ["self"], body, return_type="builtins.str"
+ )
+ return self
+
+ def generate_eq(self, fields: Iterable[str]) -> CodeWriter:
+ """Generate __eq__ method"""
+ comparisons = " and ".join([f"self.{f} == other.{f}" for f in fields])
+ body = [
+ "if not isinstance(other, self.__class__):",
+ " return False",
+ f"return {comparisons}",
+ ]
+ self.write_function(
+ "__eq__", ["self", "other"], body, return_type="builtins.bool"
+ )
+ return self
+
+ def generate_str(
+ self, class_name: str, fields: Iterable[str]
+ ) -> CodeWriter:
+ """Generate __str__ method"""
+ field_strs = ", ".join([f"{f}={{self.{f}}}" for f in fields])
+ body = f"return f'{class_name}({field_strs})'"
+ self.write_function(
+ "__str__", ["self"], body, return_type="builtins.str"
+ )
+ return self
+
+ def generate_hash(self, fields: Iterable[str]) -> CodeWriter:
+ """Generate __hash__ method"""
+ if not fields:
+ body = "return builtins.hash(builtins.id(self))"
+ else:
+ field_tuple = ", ".join([f"self.{f}" for f in fields])
+ body = f"return builtins.hash(({field_tuple}))"
+ self.write_function(
+ "__hash__", ["self"], body, return_type="builtins.int"
+ )
+ return self
+
+ def write_block(self, lines: Iterable[str]) -> CodeWriter:
+ for line in lines:
+ self.writeln(line)
+ return self
+
+ def blank_line(self, count: int = 1) -> CodeWriter:
+ self._lines.extend([""] * count)
+ return self
+
+ def to_string(self) -> str:
+ return "\n".join(self._lines)
+
+ def to_lines(self) -> list[str]:
+ return self._lines
diff --git a/codegen/opentelemetry-codegen-json/tests/__init__.py b/codegen/opentelemetry-codegen-json/tests/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/codegen/opentelemetry-codegen-json/tests/test_writer.py b/codegen/opentelemetry-codegen-json/tests/test_writer.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/exporter/opentelemetry-exporter-otlp-json-common/LICENSE b/exporter/opentelemetry-exporter-otlp-json-common/LICENSE
new file mode 100644
index 00000000000..261eeb9e9f8
--- /dev/null
+++ b/exporter/opentelemetry-exporter-otlp-json-common/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/exporter/opentelemetry-exporter-otlp-json-common/README.rst b/exporter/opentelemetry-exporter-otlp-json-common/README.rst
new file mode 100644
index 00000000000..c3a9edc6c92
--- /dev/null
+++ b/exporter/opentelemetry-exporter-otlp-json-common/README.rst
@@ -0,0 +1,26 @@
+OpenTelemetry Json Encoding
+===============================
+
+|pypi|
+
+.. |pypi| image:: https://badge.fury.io/py/opentelemetry-exporter-otlp-json-common.svg
+ :target: https://pypi.org/project/opentelemetry-exporter-otlp-json-common/
+
+This library is provided as a convenience to encode to OTLP Json. Currently used by:
+
+* opentelemetry-exporter-otlp-json-http
+
+
+Installation
+------------
+
+::
+
+ pip install opentelemetry-exporter-otlp-json-common
+
+
+References
+----------
+
+* `OpenTelemetry `_
+* `OpenTelemetry Protocol Specification `_
diff --git a/exporter/opentelemetry-exporter-otlp-json-common/pyproject.toml b/exporter/opentelemetry-exporter-otlp-json-common/pyproject.toml
new file mode 100644
index 00000000000..09c710e9006
--- /dev/null
+++ b/exporter/opentelemetry-exporter-otlp-json-common/pyproject.toml
@@ -0,0 +1,48 @@
+[build-system]
+requires = ["hatchling"]
+build-backend = "hatchling.build"
+
+[project]
+name = "opentelemetry-exporter-otlp-json-common"
+dynamic = ["version"]
+description = "OpenTelemetry Json encoding"
+readme = "README.rst"
+license = "Apache-2.0"
+requires-python = ">=3.9"
+authors = [
+ { name = "OpenTelemetry Authors", email = "cncf-opentelemetry-contributors@lists.cncf.io" },
+]
+classifiers = [
+ "Development Status :: 5 - Production/Stable",
+ "Framework :: OpenTelemetry",
+ "Framework :: OpenTelemetry :: Exporters",
+ "Intended Audience :: Developers",
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.9",
+ "Programming Language :: Python :: 3.10",
+ "Programming Language :: Python :: 3.11",
+ "Programming Language :: Python :: 3.12",
+ "Programming Language :: Python :: 3.13",
+ "Programming Language :: Python :: 3.14",
+]
+dependencies = [
+ "opentelemetry-proto-json == 0.61b0.dev",
+ "opentelemetry-sdk ~= 1.40.0.dev",
+]
+
+[project.urls]
+Homepage = "https://github.com/open-telemetry/opentelemetry-python/tree/main/exporter/opentelemetry-exporter-otlp-json-common"
+Repository = "https://github.com/open-telemetry/opentelemetry-python"
+
+[tool.hatch.version]
+path = "src/opentelemetry/exporter/otlp/json/common/version/__init__.py"
+
+[tool.hatch.build.targets.sdist]
+include = [
+ "/src",
+ "/tests",
+]
+
+[tool.hatch.build.targets.wheel]
+packages = ["src/opentelemetry"]
diff --git a/exporter/opentelemetry-exporter-otlp-json-common/src/opentelemetry/exporter/otlp/json/common/__init__.py b/exporter/opentelemetry-exporter-otlp-json-common/src/opentelemetry/exporter/otlp/json/common/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/exporter/opentelemetry-exporter-otlp-json-common/src/opentelemetry/exporter/otlp/json/common/_internal/__init__.py b/exporter/opentelemetry-exporter-otlp-json-common/src/opentelemetry/exporter/otlp/json/common/_internal/__init__.py
new file mode 100644
index 00000000000..fb9dc0ecdbe
--- /dev/null
+++ b/exporter/opentelemetry-exporter-otlp-json-common/src/opentelemetry/exporter/otlp/json/common/_internal/__init__.py
@@ -0,0 +1,176 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from __future__ import annotations
+
+import logging
+from collections.abc import Mapping, Sequence
+from typing import (
+ Any,
+ Callable,
+ Optional,
+ TypeVar,
+)
+
+from opentelemetry.proto_json.common.v1.common import AnyValue as JSONAnyValue
+from opentelemetry.proto_json.common.v1.common import (
+ ArrayValue as JSONArrayValue,
+)
+from opentelemetry.proto_json.common.v1.common import (
+ InstrumentationScope as JSONInstrumentationScope,
+)
+from opentelemetry.proto_json.common.v1.common import KeyValue as JSONKeyValue
+from opentelemetry.proto_json.common.v1.common import (
+ KeyValueList as JSONKeyValueList,
+)
+from opentelemetry.proto_json.resource.v1.resource import (
+ Resource as JSONResource,
+)
+from opentelemetry.sdk.resources import Resource
+from opentelemetry.sdk.util.instrumentation import InstrumentationScope
+from opentelemetry.util.types import _ExtendedAttributes
+
+_logger = logging.getLogger(__name__)
+
+_TypingResourceT = TypeVar("_TypingResourceT")
+_ResourceDataT = TypeVar("_ResourceDataT")
+
+
+def _encode_instrumentation_scope(
+ instrumentation_scope: InstrumentationScope,
+) -> JSONInstrumentationScope:
+ return (
+ JSONInstrumentationScope(
+ name=instrumentation_scope.name,
+ version=instrumentation_scope.version,
+ attributes=_encode_attributes(instrumentation_scope.attributes),
+ )
+ if instrumentation_scope is not None
+ else JSONInstrumentationScope()
+ )
+
+
+def _encode_resource(resource: Resource) -> JSONResource:
+ return JSONResource(attributes=_encode_attributes(resource.attributes))
+
+
+# pylint: disable-next=too-many-return-statements
+def _encode_value(
+ value: Any, allow_null: bool = False
+) -> Optional[JSONAnyValue]:
+ if allow_null is True and value is None:
+ return None
+ if isinstance(value, bool):
+ return JSONAnyValue(bool_value=value)
+ if isinstance(value, str):
+ return JSONAnyValue(string_value=value)
+ if isinstance(value, int):
+ return JSONAnyValue(int_value=value)
+ if isinstance(value, float):
+ return JSONAnyValue(double_value=value)
+ if isinstance(value, bytes):
+ return JSONAnyValue(bytes_value=value)
+ if isinstance(value, Sequence):
+ return JSONAnyValue(
+ array_value=JSONArrayValue(
+ values=_encode_array(value, allow_null=allow_null)
+ )
+ )
+ if isinstance(value, Mapping):
+ return JSONAnyValue(
+ kvlist_value=JSONKeyValueList(
+ values=[
+ _encode_key_value(str(k), v, allow_null=allow_null)
+ for k, v in value.items()
+ ]
+ )
+ )
+ raise TypeError(f"Invalid type {type(value)} of value {value}")
+
+
+def _encode_key_value(
+ key: str, value: Any, allow_null: bool = False
+) -> JSONKeyValue:
+ return JSONKeyValue(
+ key=key, value=_encode_value(value, allow_null=allow_null)
+ )
+
+
+def _encode_array(
+ array: Sequence[Any], allow_null: bool = False
+) -> list[JSONAnyValue]:
+ if not allow_null:
+ # Let the exception get raised by _encode_value()
+ return [_encode_value(v, allow_null=allow_null) for v in array]
+
+ return [
+ _encode_value(v, allow_null=allow_null)
+ if v is not None
+ # Use an empty AnyValue to represent None in an array. Behavior may change pending
+ # https://github.com/open-telemetry/opentelemetry-specification/issues/4392
+ else JSONAnyValue()
+ for v in array
+ ]
+
+
+def _encode_span_id(span_id: int) -> bytes:
+ return span_id.to_bytes(length=8, byteorder="big", signed=False)
+
+
+def _encode_trace_id(trace_id: int) -> bytes:
+ return trace_id.to_bytes(length=16, byteorder="big", signed=False)
+
+
+def _encode_attributes(
+ attributes: _ExtendedAttributes,
+ allow_null: bool = False,
+) -> Optional[list[JSONKeyValue]]:
+ if not attributes:
+ return None
+ json_attributes = []
+ for key, value in attributes.items():
+ # pylint: disable=broad-exception-caught
+ try:
+ json_attributes.append(
+ _encode_key_value(key, value, allow_null=allow_null)
+ )
+ except Exception as error:
+ _logger.exception("Failed to encode key %s: %s", key, error)
+ return json_attributes
+
+
+def _get_resource_data(
+ sdk_resource_scope_data: dict[Resource, _ResourceDataT],
+ resource_class: Callable[..., _TypingResourceT],
+ name: str,
+) -> list[_TypingResourceT]:
+ resource_data = []
+
+ for (
+ sdk_resource,
+ scope_data,
+ ) in sdk_resource_scope_data.items():
+ json_resource = JSONResource(
+ attributes=_encode_attributes(sdk_resource.attributes)
+ )
+ resource_data.append(
+ resource_class(
+ **{
+ "resource": json_resource,
+ f"scope_{name}": scope_data.values(),
+ }
+ )
+ )
+ return resource_data
diff --git a/exporter/opentelemetry-exporter-otlp-json-common/src/opentelemetry/exporter/otlp/json/common/_internal/_log_encoder/__init__.py b/exporter/opentelemetry-exporter-otlp-json-common/src/opentelemetry/exporter/otlp/json/common/_internal/_log_encoder/__init__.py
new file mode 100644
index 00000000000..1f133398bfc
--- /dev/null
+++ b/exporter/opentelemetry-exporter-otlp-json-common/src/opentelemetry/exporter/otlp/json/common/_internal/_log_encoder/__init__.py
@@ -0,0 +1,110 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from collections import defaultdict
+from typing import Sequence
+
+from opentelemetry.exporter.otlp.json.common._internal import (
+ _encode_attributes,
+ _encode_instrumentation_scope,
+ _encode_resource,
+ _encode_span_id,
+ _encode_trace_id,
+ _encode_value,
+)
+from opentelemetry.proto_json.collector.logs.v1.logs_service import (
+ ExportLogsServiceRequest as JSONExportLogsServiceRequest,
+)
+from opentelemetry.proto_json.logs.v1.logs import LogRecord as JSONLogRecord
+from opentelemetry.proto_json.logs.v1.logs import (
+ ResourceLogs as JSONResourceLogs,
+)
+from opentelemetry.proto_json.logs.v1.logs import (
+ ScopeLogs as JSONScopeLogs,
+)
+from opentelemetry.sdk._logs import ReadableLogRecord
+
+
+def encode_logs(
+ batch: Sequence[ReadableLogRecord],
+) -> JSONExportLogsServiceRequest:
+ return JSONExportLogsServiceRequest(
+ resource_logs=_encode_resource_logs(batch)
+ )
+
+
+def _encode_log(readable_log_record: ReadableLogRecord) -> JSONLogRecord:
+ span_id = (
+ None
+ if readable_log_record.log_record.span_id == 0
+ else _encode_span_id(readable_log_record.log_record.span_id)
+ )
+ trace_id = (
+ None
+ if readable_log_record.log_record.trace_id == 0
+ else _encode_trace_id(readable_log_record.log_record.trace_id)
+ )
+ body = readable_log_record.log_record.body
+ return JSONLogRecord(
+ time_unix_nano=readable_log_record.log_record.timestamp,
+ observed_time_unix_nano=readable_log_record.log_record.observed_timestamp,
+ span_id=span_id,
+ trace_id=trace_id,
+ flags=int(readable_log_record.log_record.trace_flags),
+ body=_encode_value(body, allow_null=True),
+ severity_text=readable_log_record.log_record.severity_text,
+ attributes=_encode_attributes(
+ readable_log_record.log_record.attributes, allow_null=True
+ ),
+ dropped_attributes_count=readable_log_record.dropped_attributes,
+ severity_number=getattr(
+ readable_log_record.log_record.severity_number, "value", None
+ ),
+ event_name=readable_log_record.log_record.event_name,
+ )
+
+
+def _encode_resource_logs(
+ batch: Sequence[ReadableLogRecord],
+) -> list[JSONResourceLogs]:
+ sdk_resource_logs = defaultdict(lambda: defaultdict(list))
+
+ for readable_log in batch:
+ sdk_resource = readable_log.resource
+ sdk_instrumentation = readable_log.instrumentation_scope or None
+ json_log = _encode_log(readable_log)
+ sdk_resource_logs[sdk_resource][sdk_instrumentation].append(json_log)
+
+ json_resource_logs = []
+
+ for sdk_resource, sdk_instrumentations in sdk_resource_logs.items():
+ scope_logs = []
+ for sdk_instrumentation, json_logs in sdk_instrumentations.items():
+ scope_logs.append(
+ JSONScopeLogs(
+ scope=(_encode_instrumentation_scope(sdk_instrumentation)),
+ log_records=json_logs,
+ schema_url=sdk_instrumentation.schema_url
+ if sdk_instrumentation
+ else None,
+ )
+ )
+ json_resource_logs.append(
+ JSONResourceLogs(
+ resource=_encode_resource(sdk_resource),
+ scope_logs=scope_logs,
+ schema_url=sdk_resource.schema_url,
+ )
+ )
+
+ return json_resource_logs
diff --git a/exporter/opentelemetry-exporter-otlp-json-common/src/opentelemetry/exporter/otlp/json/common/_internal/metrics_encoder/__init__.py b/exporter/opentelemetry-exporter-otlp-json-common/src/opentelemetry/exporter/otlp/json/common/_internal/metrics_encoder/__init__.py
new file mode 100644
index 00000000000..4293c57c1ce
--- /dev/null
+++ b/exporter/opentelemetry-exporter-otlp-json-common/src/opentelemetry/exporter/otlp/json/common/_internal/metrics_encoder/__init__.py
@@ -0,0 +1,257 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import logging
+from collections.abc import Iterable
+
+from opentelemetry.exporter.otlp.json.common._internal import (
+ _encode_attributes,
+ _encode_instrumentation_scope,
+ _encode_span_id,
+ _encode_trace_id,
+)
+from opentelemetry.proto_json.collector.metrics.v1.metrics_service import (
+ ExportMetricsServiceRequest as JSONExportMetricsServiceRequest,
+)
+from opentelemetry.proto_json.metrics.v1 import metrics as json_metrics
+from opentelemetry.proto_json.resource.v1.resource import (
+ Resource as JSONResource,
+)
+from opentelemetry.sdk.metrics import (
+ Exemplar,
+)
+from opentelemetry.sdk.metrics._internal.point import (
+ ExponentialHistogramDataPoint,
+ HistogramDataPoint,
+ Metric,
+ NumberDataPoint,
+ ResourceMetrics,
+ ScopeMetrics,
+)
+from opentelemetry.sdk.metrics.export import (
+ ExponentialHistogram as ExponentialHistogramType,
+)
+from opentelemetry.sdk.metrics.export import (
+ Gauge,
+ MetricsData,
+ Sum,
+)
+from opentelemetry.sdk.metrics.export import (
+ Histogram as HistogramType,
+)
+
+_logger = logging.getLogger(__name__)
+
+
+def encode_metrics(data: MetricsData) -> JSONExportMetricsServiceRequest:
+ return JSONExportMetricsServiceRequest(
+ resource_metrics=[
+ _encode_resource_metrics(rm) for rm in data.resource_metrics
+ ]
+ )
+
+
+def _encode_resource_metrics(
+ rm: ResourceMetrics,
+) -> json_metrics.ResourceMetrics:
+ return json_metrics.ResourceMetrics(
+ resource=JSONResource(
+ attributes=_encode_attributes(rm.resource.attributes)
+ ),
+ scope_metrics=[_encode_scope_metrics(sm) for sm in rm.scope_metrics],
+ schema_url=rm.resource.schema_url,
+ )
+
+
+def _encode_scope_metrics(
+ sm: ScopeMetrics,
+) -> json_metrics.ScopeMetrics:
+ return json_metrics.ScopeMetrics(
+ scope=_encode_instrumentation_scope(sm.scope),
+ schema_url=sm.scope.schema_url,
+ metrics=[_encode_metric(m) for m in sm.metrics],
+ )
+
+
+def _encode_metric(metric: Metric) -> json_metrics.Metric:
+ json_metric = json_metrics.Metric(
+ name=metric.name,
+ description=metric.description,
+ unit=metric.unit,
+ )
+ if isinstance(metric.data, Gauge):
+ json_metric.gauge = json_metrics.Gauge(
+ data_points=[
+ _encode_gauge_data_point(pt) for pt in metric.data.data_points
+ ]
+ )
+ elif isinstance(metric.data, HistogramType):
+ json_metric.histogram = json_metrics.Histogram(
+ data_points=[
+ _encode_histogram_data_point(pt)
+ for pt in metric.data.data_points
+ ],
+ aggregation_temporality=metric.data.aggregation_temporality,
+ )
+ elif isinstance(metric.data, Sum):
+ json_metric.sum = json_metrics.Sum(
+ data_points=[
+ _encode_sum_data_point(pt) for pt in metric.data.data_points
+ ],
+ aggregation_temporality=metric.data.aggregation_temporality,
+ is_monotonic=metric.data.is_monotonic,
+ )
+ elif isinstance(metric.data, ExponentialHistogramType):
+ json_metric.exponential_histogram = json_metrics.ExponentialHistogram(
+ data_points=[
+ _encode_exponential_histogram_data_point(pt)
+ for pt in metric.data.data_points
+ ],
+ aggregation_temporality=metric.data.aggregation_temporality,
+ )
+ else:
+ _logger.warning(
+ "unsupported data type %s",
+ metric.data.__class__.__name__,
+ )
+ return json_metric
+
+
+def _encode_gauge_data_point(
+ data_point: NumberDataPoint,
+) -> json_metrics.NumberDataPoint:
+ pt = json_metrics.NumberDataPoint(
+ attributes=_encode_attributes(data_point.attributes),
+ time_unix_nano=data_point.time_unix_nano,
+ exemplars=_encode_exemplars(data_point.exemplars),
+ )
+ if isinstance(data_point.value, int):
+ pt.as_int = data_point.value
+ else:
+ pt.as_double = data_point.value
+ return pt
+
+
+def _encode_sum_data_point(
+ data_point: NumberDataPoint,
+) -> json_metrics.NumberDataPoint:
+ pt = json_metrics.NumberDataPoint(
+ attributes=_encode_attributes(data_point.attributes),
+ start_time_unix_nano=data_point.start_time_unix_nano,
+ time_unix_nano=data_point.time_unix_nano,
+ exemplars=_encode_exemplars(data_point.exemplars),
+ )
+ if isinstance(data_point.value, int):
+ pt.as_int = data_point.value
+ else:
+ pt.as_double = data_point.value
+ return pt
+
+
+def _encode_histogram_data_point(
+ data_point: HistogramDataPoint,
+) -> json_metrics.HistogramDataPoint:
+ return json_metrics.HistogramDataPoint(
+ attributes=_encode_attributes(data_point.attributes),
+ time_unix_nano=data_point.time_unix_nano,
+ start_time_unix_nano=data_point.start_time_unix_nano,
+ exemplars=_encode_exemplars(data_point.exemplars),
+ count=data_point.count,
+ sum=data_point.sum,
+ bucket_counts=data_point.bucket_counts,
+ explicit_bounds=data_point.explicit_bounds,
+ max=data_point.max,
+ min=data_point.min,
+ )
+
+
+def _encode_exponential_histogram_data_point(
+ data_point: ExponentialHistogramDataPoint,
+) -> json_metrics.ExponentialHistogramDataPoint:
+ return json_metrics.ExponentialHistogramDataPoint(
+ attributes=_encode_attributes(data_point.attributes),
+ time_unix_nano=data_point.time_unix_nano,
+ start_time_unix_nano=data_point.start_time_unix_nano,
+ exemplars=_encode_exemplars(data_point.exemplars),
+ count=data_point.count,
+ sum=data_point.sum,
+ scale=data_point.scale,
+ zero_count=data_point.zero_count,
+ positive=(
+ json_metrics.ExponentialHistogramDataPoint.Buckets(
+ offset=data_point.positive.offset,
+ bucket_counts=data_point.positive.bucket_counts,
+ )
+ if data_point.positive.bucket_counts
+ else None
+ ),
+ negative=(
+ json_metrics.ExponentialHistogramDataPoint.Buckets(
+ offset=data_point.negative.offset,
+ bucket_counts=data_point.negative.bucket_counts,
+ )
+ if data_point.negative.bucket_counts
+ else None
+ ),
+ flags=data_point.flags,
+ max=data_point.max,
+ min=data_point.min,
+ )
+
+
+def _encode_exemplars(
+ sdk_exemplars: Iterable[Exemplar],
+) -> list[json_metrics.Exemplar]:
+ """
+ Converts a list of SDK Exemplars into a list of json proto Exemplars.
+
+ Args:
+ sdk_exemplars: An iterable of exemplars from the OpenTelemetry SDK.
+
+ Returns:
+ list: A list of json proto exemplars.
+ """
+ json_exemplars = []
+ for sdk_exemplar in sdk_exemplars:
+ if (
+ sdk_exemplar.span_id is not None
+ and sdk_exemplar.trace_id is not None
+ ):
+ json_exemplar = json_metrics.Exemplar(
+ time_unix_nano=sdk_exemplar.time_unix_nano,
+ span_id=_encode_span_id(sdk_exemplar.span_id),
+ trace_id=_encode_trace_id(sdk_exemplar.trace_id),
+ filtered_attributes=_encode_attributes(
+ sdk_exemplar.filtered_attributes
+ ),
+ )
+ else:
+ json_exemplar = json_metrics.Exemplar(
+ time_unix_nano=sdk_exemplar.time_unix_nano,
+ filtered_attributes=_encode_attributes(
+ sdk_exemplar.filtered_attributes
+ ),
+ )
+
+ # Assign the value based on its type in the SDK exemplar
+ if isinstance(sdk_exemplar.value, float):
+ json_exemplar.as_double = sdk_exemplar.value
+ elif isinstance(sdk_exemplar.value, int):
+ json_exemplar.as_int = sdk_exemplar.value
+ else:
+ raise ValueError("Exemplar value must be an int or float")
+ json_exemplars.append(json_exemplar)
+
+ return json_exemplars
diff --git a/exporter/opentelemetry-exporter-otlp-json-common/src/opentelemetry/exporter/otlp/json/common/_internal/trace_encoder/__init__.py b/exporter/opentelemetry-exporter-otlp-json-common/src/opentelemetry/exporter/otlp/json/common/_internal/trace_encoder/__init__.py
new file mode 100644
index 00000000000..1de3351ffad
--- /dev/null
+++ b/exporter/opentelemetry-exporter-otlp-json-common/src/opentelemetry/exporter/otlp/json/common/_internal/trace_encoder/__init__.py
@@ -0,0 +1,185 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+from collections import defaultdict
+from typing import Optional, Sequence
+
+from opentelemetry.exporter.otlp.json.common._internal import (
+ _encode_attributes,
+ _encode_instrumentation_scope,
+ _encode_resource,
+ _encode_span_id,
+ _encode_trace_id,
+)
+from opentelemetry.proto_json.collector.trace.v1.trace_service import (
+ ExportTraceServiceRequest as JSONExportTraceServiceRequest,
+)
+from opentelemetry.proto_json.trace.v1.trace import (
+ ResourceSpans as JSONResourceSpans,
+)
+from opentelemetry.proto_json.trace.v1.trace import (
+ ScopeSpans as JSONScopeSpans,
+)
+from opentelemetry.proto_json.trace.v1.trace import Span as JSONSpan
+from opentelemetry.proto_json.trace.v1.trace import (
+ SpanFlags as JSONSpanFlags,
+)
+from opentelemetry.proto_json.trace.v1.trace import Status as JSONStatus
+from opentelemetry.sdk.trace import Event, ReadableSpan
+from opentelemetry.trace import Link, SpanKind
+from opentelemetry.trace.span import SpanContext, Status, TraceState
+
+# pylint: disable=E1101
+_SPAN_KIND_MAP = {
+ SpanKind.INTERNAL: JSONSpan.SpanKind.SPAN_KIND_INTERNAL,
+ SpanKind.SERVER: JSONSpan.SpanKind.SPAN_KIND_SERVER,
+ SpanKind.CLIENT: JSONSpan.SpanKind.SPAN_KIND_CLIENT,
+ SpanKind.PRODUCER: JSONSpan.SpanKind.SPAN_KIND_PRODUCER,
+ SpanKind.CONSUMER: JSONSpan.SpanKind.SPAN_KIND_CONSUMER,
+}
+
+_logger = logging.getLogger(__name__)
+
+
+def encode_spans(
+ sdk_spans: Sequence[ReadableSpan],
+) -> JSONExportTraceServiceRequest:
+ return JSONExportTraceServiceRequest(
+ resource_spans=_encode_resource_spans(sdk_spans)
+ )
+
+
+def _encode_resource_spans(
+ sdk_spans: Sequence[ReadableSpan],
+) -> list[JSONResourceSpans]:
+ sdk_resource_spans = defaultdict(lambda: defaultdict(list))
+
+ for sdk_span in sdk_spans:
+ sdk_resource = sdk_span.resource
+ sdk_instrumentation = sdk_span.instrumentation_scope or None
+ json_span = _encode_span(sdk_span)
+
+ sdk_resource_spans[sdk_resource][sdk_instrumentation].append(json_span)
+
+ json_resource_spans = []
+
+ for sdk_resource, sdk_instrumentations in sdk_resource_spans.items():
+ scope_spans = []
+ for sdk_instrumentation, json_spans in sdk_instrumentations.items():
+ scope_spans.append(
+ JSONScopeSpans(
+ scope=(_encode_instrumentation_scope(sdk_instrumentation)),
+ spans=json_spans,
+ schema_url=sdk_instrumentation.schema_url
+ if sdk_instrumentation
+ else None,
+ )
+ )
+ json_resource_spans.append(
+ JSONResourceSpans(
+ resource=_encode_resource(sdk_resource),
+ scope_spans=scope_spans,
+ schema_url=sdk_resource.schema_url,
+ )
+ )
+
+ return json_resource_spans
+
+
+def _span_flags(parent_span_context: Optional[SpanContext]) -> int:
+ flags = JSONSpanFlags.SPAN_FLAGS_CONTEXT_HAS_IS_REMOTE_MASK
+ if parent_span_context and parent_span_context.is_remote:
+ flags |= JSONSpanFlags.SPAN_FLAGS_CONTEXT_IS_REMOTE_MASK
+ return int(flags)
+
+
+def _encode_span(sdk_span: ReadableSpan) -> JSONSpan:
+ span_context = sdk_span.get_span_context()
+ return JSONSpan(
+ trace_id=_encode_trace_id(span_context.trace_id),
+ span_id=_encode_span_id(span_context.span_id),
+ trace_state=_encode_trace_state(span_context.trace_state),
+ parent_span_id=_encode_parent_id(sdk_span.parent),
+ name=sdk_span.name,
+ kind=_SPAN_KIND_MAP[sdk_span.kind],
+ start_time_unix_nano=sdk_span.start_time,
+ end_time_unix_nano=sdk_span.end_time,
+ attributes=_encode_attributes(sdk_span.attributes),
+ events=_encode_events(sdk_span.events),
+ links=_encode_links(sdk_span.links),
+ status=_encode_status(sdk_span.status),
+ dropped_attributes_count=sdk_span.dropped_attributes,
+ dropped_events_count=sdk_span.dropped_events,
+ dropped_links_count=sdk_span.dropped_links,
+ flags=_span_flags(sdk_span.parent),
+ )
+
+
+def _encode_events(
+ events: Sequence[Event],
+) -> Optional[list[JSONSpan.Event]]:
+ return (
+ [
+ JSONSpan.Event(
+ name=event.name,
+ time_unix_nano=event.timestamp,
+ attributes=_encode_attributes(event.attributes),
+ dropped_attributes_count=event.dropped_attributes,
+ )
+ for event in events
+ ]
+ if events
+ else None
+ )
+
+
+def _encode_links(links: Sequence[Link]) -> list[JSONSpan.Link]:
+ return (
+ [
+ JSONSpan.Link(
+ trace_id=_encode_trace_id(link.context.trace_id),
+ span_id=_encode_span_id(link.context.span_id),
+ attributes=_encode_attributes(link.attributes),
+ dropped_attributes_count=link.dropped_attributes,
+ flags=_span_flags(link.context),
+ )
+ for link in links
+ ]
+ if links
+ else None
+ )
+
+
+def _encode_status(status: Status) -> Optional[JSONStatus]:
+ return (
+ JSONStatus(
+ code=JSONStatus.StatusCode(status.status_code.value),
+ message=status.description,
+ )
+ if status is not None
+ else None
+ )
+
+
+def _encode_trace_state(trace_state: TraceState) -> Optional[str]:
+ return (
+ ",".join([f"{key}={value}" for key, value in (trace_state.items())])
+ if trace_state is not None
+ else None
+ )
+
+
+def _encode_parent_id(context: Optional[SpanContext]) -> Optional[bytes]:
+ return _encode_span_id(context.span_id) if context else None
diff --git a/exporter/opentelemetry-exporter-otlp-json-common/src/opentelemetry/exporter/otlp/json/common/_log_encoder.py b/exporter/opentelemetry-exporter-otlp-json-common/src/opentelemetry/exporter/otlp/json/common/_log_encoder.py
new file mode 100644
index 00000000000..b21b8e8ba91
--- /dev/null
+++ b/exporter/opentelemetry-exporter-otlp-json-common/src/opentelemetry/exporter/otlp/json/common/_log_encoder.py
@@ -0,0 +1,20 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from opentelemetry.exporter.otlp.json.common._internal._log_encoder import (
+ encode_logs,
+)
+
+__all__ = ["encode_logs"]
diff --git a/exporter/opentelemetry-exporter-otlp-json-common/src/opentelemetry/exporter/otlp/json/common/metrics_encoder.py b/exporter/opentelemetry-exporter-otlp-json-common/src/opentelemetry/exporter/otlp/json/common/metrics_encoder.py
new file mode 100644
index 00000000000..a4c621ef60f
--- /dev/null
+++ b/exporter/opentelemetry-exporter-otlp-json-common/src/opentelemetry/exporter/otlp/json/common/metrics_encoder.py
@@ -0,0 +1,20 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from opentelemetry.exporter.otlp.json.common._internal.metrics_encoder import (
+ encode_metrics,
+)
+
+__all__ = ["encode_metrics"]
diff --git a/exporter/opentelemetry-exporter-otlp-json-common/src/opentelemetry/exporter/otlp/json/common/py.typed b/exporter/opentelemetry-exporter-otlp-json-common/src/opentelemetry/exporter/otlp/json/common/py.typed
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/exporter/opentelemetry-exporter-otlp-json-common/src/opentelemetry/exporter/otlp/json/common/trace_encoder.py b/exporter/opentelemetry-exporter-otlp-json-common/src/opentelemetry/exporter/otlp/json/common/trace_encoder.py
new file mode 100644
index 00000000000..71f2b321576
--- /dev/null
+++ b/exporter/opentelemetry-exporter-otlp-json-common/src/opentelemetry/exporter/otlp/json/common/trace_encoder.py
@@ -0,0 +1,20 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from opentelemetry.exporter.otlp.json.common._internal.trace_encoder import (
+ encode_spans,
+)
+
+__all__ = ["encode_spans"]
diff --git a/exporter/opentelemetry-exporter-otlp-json-common/src/opentelemetry/exporter/otlp/json/common/version/__init__.py b/exporter/opentelemetry-exporter-otlp-json-common/src/opentelemetry/exporter/otlp/json/common/version/__init__.py
new file mode 100644
index 00000000000..c099e9440e9
--- /dev/null
+++ b/exporter/opentelemetry-exporter-otlp-json-common/src/opentelemetry/exporter/otlp/json/common/version/__init__.py
@@ -0,0 +1,15 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+__version__ = "0.61b0.dev"
diff --git a/exporter/opentelemetry-exporter-otlp-json-common/test-requirements.txt b/exporter/opentelemetry-exporter-otlp-json-common/test-requirements.txt
new file mode 100644
index 00000000000..8da844175a2
--- /dev/null
+++ b/exporter/opentelemetry-exporter-otlp-json-common/test-requirements.txt
@@ -0,0 +1,7 @@
+pytest==7.4.4
+-e opentelemetry-api
+-e opentelemetry-sdk
+-e opentelemetry-semantic-conventions
+-e tests/opentelemetry-test-utils
+-e opentelemetry-proto-json
+-e exporter/opentelemetry-exporter-otlp-json-common
\ No newline at end of file
diff --git a/exporter/opentelemetry-exporter-otlp-json-common/tests/__init__.py b/exporter/opentelemetry-exporter-otlp-json-common/tests/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/exporter/opentelemetry-exporter-otlp-json-http/LICENSE b/exporter/opentelemetry-exporter-otlp-json-http/LICENSE
new file mode 100644
index 00000000000..261eeb9e9f8
--- /dev/null
+++ b/exporter/opentelemetry-exporter-otlp-json-http/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/exporter/opentelemetry-exporter-otlp-json-http/README.rst b/exporter/opentelemetry-exporter-otlp-json-http/README.rst
new file mode 100644
index 00000000000..da4b9f2c925
--- /dev/null
+++ b/exporter/opentelemetry-exporter-otlp-json-http/README.rst
@@ -0,0 +1,25 @@
+OpenTelemetry Collector Json over HTTP Exporter
+===================================================
+
+|pypi|
+
+.. |pypi| image:: https://badge.fury.io/py/opentelemetry-exporter-otlp-json-http.svg
+ :target: https://pypi.org/project/opentelemetry-exporter-otlp-json-http/
+
+This library allows to export data to the OpenTelemetry Collector using the OpenTelemetry Protocol using Json over HTTP.
+
+Installation
+------------
+
+::
+
+ pip install opentelemetry-exporter-otlp-json-http
+
+
+References
+----------
+
+* `OpenTelemetry Collector Exporter `_
+* `OpenTelemetry Collector `_
+* `OpenTelemetry `_
+* `OpenTelemetry Protocol Specification `_
diff --git a/exporter/opentelemetry-exporter-otlp-json-http/pyproject.toml b/exporter/opentelemetry-exporter-otlp-json-http/pyproject.toml
new file mode 100644
index 00000000000..516ae788292
--- /dev/null
+++ b/exporter/opentelemetry-exporter-otlp-json-http/pyproject.toml
@@ -0,0 +1,62 @@
+[build-system]
+requires = ["hatchling"]
+build-backend = "hatchling.build"
+
+[project]
+name = "opentelemetry-exporter-otlp-json-http"
+dynamic = ["version"]
+description = "OpenTelemetry Collector Json over HTTP Exporter"
+readme = "README.rst"
+license = "Apache-2.0"
+requires-python = ">=3.9"
+authors = [
+ { name = "OpenTelemetry Authors", email = "cncf-opentelemetry-contributors@lists.cncf.io" },
+]
+classifiers = [
+ "Development Status :: 5 - Production/Stable",
+ "Framework :: OpenTelemetry",
+ "Framework :: OpenTelemetry :: Exporters",
+ "Intended Audience :: Developers",
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.9",
+ "Programming Language :: Python :: 3.10",
+ "Programming Language :: Python :: 3.11",
+ "Programming Language :: Python :: 3.12",
+ "Programming Language :: Python :: 3.13",
+ "Programming Language :: Python :: 3.14",
+]
+dependencies = [
+ "opentelemetry-api ~= 1.15",
+ "opentelemetry-sdk ~= 1.40.0.dev",
+ "opentelemetry-proto-json == 0.61b0.dev",
+ "opentelemetry-exporter-otlp-json-common == 0.61b0.dev",
+ "urllib3 ~= 2.6",
+]
+
+[project.entry-points.opentelemetry_traces_exporter]
+otlp_json_http = "opentelemetry.exporter.otlp.json.http.trace_exporter:OTLPSpanExporter"
+
+[project.entry-points.opentelemetry_metrics_exporter]
+otlp_json_http = "opentelemetry.exporter.otlp.json.http.metric_exporter:OTLPMetricExporter"
+
+[project.entry-points.opentelemetry_logs_exporter]
+otlp_json_http = "opentelemetry.exporter.otlp.json.http._log_exporter:OTLPLogExporter"
+
+[project.urls]
+Homepage = "https://github.com/open-telemetry/opentelemetry-python/tree/main/exporter/opentelemetry-exporter-otlp-json-http"
+Repository = "https://github.com/open-telemetry/opentelemetry-python"
+
+[project.optional-dependencies]
+
+[tool.hatch.version]
+path = "src/opentelemetry/exporter/otlp/json/http/version/__init__.py"
+
+[tool.hatch.build.targets.sdist]
+include = [
+ "/src",
+ "/tests",
+]
+
+[tool.hatch.build.targets.wheel]
+packages = ["src/opentelemetry"]
diff --git a/exporter/opentelemetry-exporter-otlp-json-http/src/opentelemetry/exporter/otlp/json/http/__init__.py b/exporter/opentelemetry-exporter-otlp-json-http/src/opentelemetry/exporter/otlp/json/http/__init__.py
new file mode 100644
index 00000000000..36bf86385fe
--- /dev/null
+++ b/exporter/opentelemetry-exporter-otlp-json-http/src/opentelemetry/exporter/otlp/json/http/__init__.py
@@ -0,0 +1,24 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import enum
+
+from opentelemetry.exporter.otlp.json.http.version import __version__
+
+
+class Compression(enum.Enum):
+ NoCompression = "none"
+ Deflate = "deflate"
+ Gzip = "gzip"
+
diff --git a/exporter/opentelemetry-exporter-otlp-json-http/src/opentelemetry/exporter/otlp/json/http/_internal/__init__.py b/exporter/opentelemetry-exporter-otlp-json-http/src/opentelemetry/exporter/otlp/json/http/_internal/__init__.py
new file mode 100644
index 00000000000..8205c7426c7
--- /dev/null
+++ b/exporter/opentelemetry-exporter-otlp-json-http/src/opentelemetry/exporter/otlp/json/http/_internal/__init__.py
@@ -0,0 +1,219 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import gzip
+import logging
+import os
+import random
+import threading
+import time
+import zlib
+from io import BytesIO
+from typing import Dict, Optional, Final, Union
+
+import urllib3
+from opentelemetry.exporter.otlp.json.http import Compression
+from opentelemetry.exporter.otlp.json.http.version import __version__
+from opentelemetry.sdk.environment_variables import (
+ OTEL_EXPORTER_OTLP_COMPRESSION,
+ OTEL_EXPORTER_OTLP_ENDPOINT,
+ OTEL_EXPORTER_OTLP_HEADERS,
+ OTEL_EXPORTER_OTLP_TIMEOUT,
+)
+from opentelemetry.util.re import parse_env_headers
+
+_logger = logging.getLogger(__name__)
+
+_MAX_RETRYS: Final[int] = 6
+_DEFAULT_ENDPOINT: Final[str] = "http://localhost:4318/"
+_DEFAULT_TIMEOUT: Final[int] = 10
+_DEFAULT_JITTER: Final[float] = 0.2
+
+
+class _OTLPHttpClient:
+ """A signal-agnostic OTLP HTTP client using urllib3."""
+
+ def __init__(
+ self,
+ endpoint: str,
+ headers: Dict[str, str],
+ timeout: float,
+ compression: Compression,
+ certificate_file: Optional[Union[str, bool]] = None,
+ client_key_file: Optional[Union[str, bool]] = None,
+ client_certificate_file: Optional[Union[str, bool]] = None,
+ jitter: float = _DEFAULT_JITTER,
+ ):
+ self._endpoint = endpoint
+ self._headers = headers
+ self._timeout = timeout
+ self._compression = compression
+ self._jitter = jitter
+ self._shutdown = False
+ self._shutdown_in_progress = threading.Event()
+
+ self._http = urllib3.PoolManager(
+ retries=False,
+ ca_certs=certificate_file,
+ cert_file=client_certificate_file,
+ key_file=client_key_file,
+ )
+
+ @staticmethod
+ def _is_retryable(status_code: int) -> bool:
+ return status_code in (408, 429) or (500 <= status_code < 600)
+
+ def _get_backoff_with_jitter(self, retry_num: int) -> float:
+ """Calculate jittered exponential backoff."""
+ base_backoff = 2**retry_num
+ if self._jitter == 0:
+ return float(base_backoff)
+ return base_backoff * random.uniform(
+ 1 - self._jitter, 1 + self._jitter
+ )
+
+ def export(self, body: bytes, timeout_sec: Optional[float] = None) -> bool:
+ """Exports opaque bytes to the configured endpoint."""
+ if self._shutdown:
+ return False
+
+ if self._compression == Compression.Gzip:
+ gzip_data = BytesIO()
+ with gzip.GzipFile(fileobj=gzip_data, mode="wb") as gzip_stream:
+ gzip_stream.write(body)
+ body = gzip_data.getvalue()
+ elif self._compression == Compression.Deflate:
+ body = zlib.compress(body)
+
+ timeout = timeout_sec if timeout_sec is not None else self._timeout
+ deadline_sec = time.time() + timeout
+ for retry_num in range(_MAX_RETRYS):
+ backoff_seconds = self._get_backoff_with_jitter(retry_num)
+ try:
+ response = self._http.request(
+ "POST",
+ self._endpoint,
+ body=body,
+ headers=self._headers,
+ timeout=deadline_sec - time.time(),
+ retries=False,
+ )
+ if 200 <= response.status < 300:
+ return True
+
+ retryable = self._is_retryable(response.status)
+ reason = response.status
+ except Exception as error:
+ retryable = True
+ reason = error
+
+ if (
+ not retryable
+ or (retry_num + 1 == _MAX_RETRYS)
+ or (time.time() + backoff_seconds > deadline_sec)
+ or self._shutdown
+ ):
+ _logger.error("Failed to export batch. Code: %s", reason)
+ return False
+
+ _logger.warning(
+ "Transient error %s encountered while exporting, retrying in %.2fs.",
+ reason,
+ backoff_seconds,
+ )
+ if self._shutdown_in_progress.wait(backoff_seconds):
+ break
+
+ return False
+
+ def shutdown(self):
+ self._shutdown = True
+ self._shutdown_in_progress.set()
+ self._http.clear()
+
+
+def _resolve_endpoint(default_path: str, signal_env: str) -> str:
+ if endpoint := os.environ.get(signal_env):
+ return endpoint
+
+ base_endpoint = os.environ.get(
+ OTEL_EXPORTER_OTLP_ENDPOINT, _DEFAULT_ENDPOINT
+ )
+
+ return f"{base_endpoint.removesuffix('/')}/{default_path}"
+
+
+def _resolve_headers(
+ signal_headers_env: str, custom_headers: Optional[Dict[str, str]]
+) -> Dict[str, str]:
+ headers = {
+ "Content-Type": "application/json",
+ "User-Agent": "OTel-OTLP-JSON-Exporter-Python/" + __version__,
+ }
+ env_headers = parse_env_headers(
+ os.environ.get(
+ signal_headers_env, os.environ.get(OTEL_EXPORTER_OTLP_HEADERS, "")
+ ),
+ liberal=True,
+ )
+ headers.update(env_headers)
+ if custom_headers:
+ headers.update(custom_headers)
+ return headers
+
+
+def _resolve_timeout(
+ signal_timeout_env: str, custom_timeout: Optional[float]
+) -> float:
+ if custom_timeout is not None:
+ return custom_timeout
+ return float(
+ os.environ.get(
+ signal_timeout_env,
+ os.environ.get(OTEL_EXPORTER_OTLP_TIMEOUT, _DEFAULT_TIMEOUT),
+ )
+ )
+
+
+def _resolve_compression(
+ signal_compression_env: str, custom_compression: Optional[Compression]
+) -> Compression:
+ if custom_compression is not None:
+ return custom_compression
+
+ val = (
+ os.environ.get(
+ signal_compression_env,
+ os.environ.get(OTEL_EXPORTER_OTLP_COMPRESSION, "none"),
+ )
+ .lower()
+ .strip()
+ )
+
+ try:
+ return Compression(val)
+ except ValueError:
+ _logger.warning("Unsupported compression type: %s", val)
+ return Compression.NoCompression
+
+
+def _resolve_tls_file(
+ custom_file: Optional[str],
+ signal_env: str,
+ global_env: str,
+ default: Optional[Union[str, bool]] = None,
+) -> Optional[Union[str, bool]]:
+ if custom_file is not None:
+ return custom_file
+ return os.environ.get(signal_env, os.environ.get(global_env, default))
diff --git a/exporter/opentelemetry-exporter-otlp-json-http/src/opentelemetry/exporter/otlp/json/http/_log_exporter/__init__.py b/exporter/opentelemetry-exporter-otlp-json-http/src/opentelemetry/exporter/otlp/json/http/_log_exporter/__init__.py
new file mode 100644
index 00000000000..bea951f9d9e
--- /dev/null
+++ b/exporter/opentelemetry-exporter-otlp-json-http/src/opentelemetry/exporter/otlp/json/http/_log_exporter/__init__.py
@@ -0,0 +1,123 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+from typing import Optional, Sequence
+
+from opentelemetry.exporter.otlp.json.common._internal._log_encoder import (
+ encode_logs,
+)
+from opentelemetry.exporter.otlp.json.http import Compression
+from opentelemetry.exporter.otlp.json.http._internal import (
+ _OTLPHttpClient,
+ _resolve_compression,
+ _resolve_endpoint,
+ _resolve_headers,
+ _resolve_timeout,
+ _resolve_tls_file,
+ _DEFAULT_JITTER,
+)
+from opentelemetry.sdk._logs import ReadableLogRecord
+from opentelemetry.sdk._logs.export import (
+ LogRecordExporter,
+ LogRecordExportResult,
+)
+from opentelemetry.sdk._shared_internal import DuplicateFilter
+from opentelemetry.sdk.environment_variables import (
+ OTEL_EXPORTER_OTLP_CERTIFICATE,
+ OTEL_EXPORTER_OTLP_CLIENT_CERTIFICATE,
+ OTEL_EXPORTER_OTLP_CLIENT_KEY,
+ OTEL_EXPORTER_OTLP_LOGS_CERTIFICATE,
+ OTEL_EXPORTER_OTLP_LOGS_CLIENT_CERTIFICATE,
+ OTEL_EXPORTER_OTLP_LOGS_CLIENT_KEY,
+ OTEL_EXPORTER_OTLP_LOGS_COMPRESSION,
+ OTEL_EXPORTER_OTLP_LOGS_ENDPOINT,
+ OTEL_EXPORTER_OTLP_LOGS_HEADERS,
+ OTEL_EXPORTER_OTLP_LOGS_TIMEOUT,
+)
+
+_logger = logging.getLogger(__name__)
+_logger.addFilter(DuplicateFilter())
+
+
+class OTLPLogExporter(LogRecordExporter):
+ """OTLP JSON exporter for logs."""
+
+ def __init__(
+ self,
+ endpoint: Optional[str] = None,
+ certificate_file: Optional[str] = None,
+ client_key_file: Optional[str] = None,
+ client_certificate_file: Optional[str] = None,
+ headers: Optional[dict[str, str]] = None,
+ timeout: Optional[float] = None,
+ compression: Optional[Compression] = None,
+ jitter: float = _DEFAULT_JITTER,
+ ):
+ self._endpoint = endpoint or _resolve_endpoint(
+ "v1/logs", OTEL_EXPORTER_OTLP_LOGS_ENDPOINT
+ )
+ self._certificate_file = _resolve_tls_file(
+ certificate_file,
+ OTEL_EXPORTER_OTLP_LOGS_CERTIFICATE,
+ OTEL_EXPORTER_OTLP_CERTIFICATE,
+ )
+ self._client_key_file = _resolve_tls_file(
+ client_key_file,
+ OTEL_EXPORTER_OTLP_LOGS_CLIENT_KEY,
+ OTEL_EXPORTER_OTLP_CLIENT_KEY,
+ )
+ self._client_certificate_file = _resolve_tls_file(
+ client_certificate_file,
+ OTEL_EXPORTER_OTLP_LOGS_CLIENT_CERTIFICATE,
+ OTEL_EXPORTER_OTLP_CLIENT_CERTIFICATE,
+ )
+
+ self._headers = _resolve_headers(
+ OTEL_EXPORTER_OTLP_LOGS_HEADERS, headers
+ )
+
+ self._timeout = _resolve_timeout(
+ OTEL_EXPORTER_OTLP_LOGS_TIMEOUT, timeout
+ )
+ self._compression = _resolve_compression(
+ OTEL_EXPORTER_OTLP_LOGS_COMPRESSION, compression
+ )
+
+ self._client = _OTLPHttpClient(
+ endpoint=self._endpoint,
+ headers=self._headers,
+ timeout=self._timeout,
+ compression=self._compression,
+ certificate_file=self._certificate_file,
+ client_key_file=self._client_key_file,
+ client_certificate_file=self._client_certificate_file,
+ jitter=jitter,
+ )
+
+ def export(
+ self,
+ batch: Sequence[ReadableLogRecord],
+ ) -> LogRecordExportResult:
+ encoded_request = encode_logs(batch)
+ body = encoded_request.to_json().encode("utf-8")
+ if self._client.export(body):
+ return LogRecordExportResult.SUCCESS
+ return LogRecordExportResult.FAILURE
+
+ def shutdown(self) -> None:
+ self._client.shutdown()
+
+ def force_flush(self, timeout_millis: int = 30000) -> bool:
+ return True
diff --git a/exporter/opentelemetry-exporter-otlp-json-http/src/opentelemetry/exporter/otlp/json/http/metric_exporter/__init__.py b/exporter/opentelemetry-exporter-otlp-json-http/src/opentelemetry/exporter/otlp/json/http/metric_exporter/__init__.py
new file mode 100644
index 00000000000..9698db3520f
--- /dev/null
+++ b/exporter/opentelemetry-exporter-otlp-json-http/src/opentelemetry/exporter/otlp/json/http/metric_exporter/__init__.py
@@ -0,0 +1,248 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import os
+from typing import Optional
+
+from opentelemetry.exporter.otlp.json.common._internal.metrics_encoder import (
+ encode_metrics,
+)
+from opentelemetry.exporter.otlp.json.http import Compression
+from opentelemetry.exporter.otlp.json.http._internal import (
+ _OTLPHttpClient,
+ _resolve_compression,
+ _resolve_endpoint,
+ _resolve_headers,
+ _resolve_timeout,
+ _resolve_tls_file,
+ _DEFAULT_JITTER,
+)
+from opentelemetry.sdk.environment_variables import (
+ OTEL_EXPORTER_OTLP_CERTIFICATE,
+ OTEL_EXPORTER_OTLP_CLIENT_CERTIFICATE,
+ OTEL_EXPORTER_OTLP_CLIENT_KEY,
+ OTEL_EXPORTER_OTLP_METRICS_CERTIFICATE,
+ OTEL_EXPORTER_OTLP_METRICS_CLIENT_CERTIFICATE,
+ OTEL_EXPORTER_OTLP_METRICS_CLIENT_KEY,
+ OTEL_EXPORTER_OTLP_METRICS_COMPRESSION,
+ OTEL_EXPORTER_OTLP_METRICS_DEFAULT_HISTOGRAM_AGGREGATION,
+ OTEL_EXPORTER_OTLP_METRICS_ENDPOINT,
+ OTEL_EXPORTER_OTLP_METRICS_HEADERS,
+ OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE,
+ OTEL_EXPORTER_OTLP_METRICS_TIMEOUT,
+)
+from opentelemetry.sdk.metrics import (
+ Counter,
+ Histogram,
+ ObservableCounter,
+ ObservableGauge,
+ ObservableUpDownCounter,
+ UpDownCounter,
+)
+from opentelemetry.sdk.metrics.export import (
+ AggregationTemporality,
+ MetricExporter,
+ MetricExportResult,
+ MetricsData,
+)
+from opentelemetry.sdk.metrics.view import (
+ Aggregation,
+ ExplicitBucketHistogramAggregation,
+ ExponentialBucketHistogramAggregation,
+)
+
+_logger = logging.getLogger(__name__)
+
+
+class OTLPJSONMetricExporter(MetricExporter):
+ """OTLP JSON exporter for metrics using urllib3."""
+
+ def __init__(
+ self,
+ endpoint: Optional[str] = None,
+ certificate_file: Optional[str] = None,
+ client_key_file: Optional[str] = None,
+ client_certificate_file: Optional[str] = None,
+ headers: Optional[dict[str, str]] = None,
+ timeout: Optional[float] = None,
+ compression: Optional[Compression] = None,
+ preferred_temporality: Optional[
+ dict[type, AggregationTemporality]
+ ] = None,
+ preferred_aggregation: Optional[dict[type, Aggregation]] = None,
+ jitter: float = _DEFAULT_JITTER,
+ ):
+ self._endpoint = endpoint or _resolve_endpoint(
+ "v1/metrics", OTEL_EXPORTER_OTLP_METRICS_ENDPOINT
+ )
+
+ self._certificate_file = _resolve_tls_file(
+ certificate_file,
+ OTEL_EXPORTER_OTLP_METRICS_CERTIFICATE,
+ OTEL_EXPORTER_OTLP_CERTIFICATE,
+ default=True,
+ )
+ self._client_key_file = _resolve_tls_file(
+ client_key_file,
+ OTEL_EXPORTER_OTLP_METRICS_CLIENT_KEY,
+ OTEL_EXPORTER_OTLP_CLIENT_KEY,
+ )
+ self._client_certificate_file = _resolve_tls_file(
+ client_certificate_file,
+ OTEL_EXPORTER_OTLP_METRICS_CLIENT_CERTIFICATE,
+ OTEL_EXPORTER_OTLP_CLIENT_CERTIFICATE,
+ )
+
+ self._headers = _resolve_headers(
+ OTEL_EXPORTER_OTLP_METRICS_HEADERS, headers
+ )
+
+ self._timeout = _resolve_timeout(
+ OTEL_EXPORTER_OTLP_METRICS_TIMEOUT, timeout
+ )
+ self._compression = _resolve_compression(
+ OTEL_EXPORTER_OTLP_METRICS_COMPRESSION, compression
+ )
+
+ self._client = _OTLPHttpClient(
+ endpoint=self._endpoint,
+ headers=self._headers,
+ timeout=self._timeout,
+ compression=self._compression,
+ certificate_file=self._certificate_file,
+ client_key_file=self._client_key_file,
+ client_certificate_file=self._client_certificate_file,
+ jitter=jitter,
+ )
+
+ super().__init__(
+ preferred_temporality=self._get_temporality(preferred_temporality),
+ preferred_aggregation=self._get_aggregation(preferred_aggregation),
+ )
+
+ def _get_temporality(
+ self,
+ preferred_temporality: Optional[dict[type, AggregationTemporality]],
+ ) -> dict[type, AggregationTemporality]:
+ otel_exporter_otlp_metrics_temporality_preference = (
+ os.environ.get(
+ OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE,
+ "CUMULATIVE",
+ )
+ .upper()
+ .strip()
+ )
+
+ if otel_exporter_otlp_metrics_temporality_preference == "DELTA":
+ instrument_class_temporality = {
+ Counter: AggregationTemporality.DELTA,
+ UpDownCounter: AggregationTemporality.CUMULATIVE,
+ Histogram: AggregationTemporality.DELTA,
+ ObservableCounter: AggregationTemporality.DELTA,
+ ObservableUpDownCounter: AggregationTemporality.CUMULATIVE,
+ ObservableGauge: AggregationTemporality.CUMULATIVE,
+ }
+
+ elif otel_exporter_otlp_metrics_temporality_preference == "LOWMEMORY":
+ instrument_class_temporality = {
+ Counter: AggregationTemporality.DELTA,
+ UpDownCounter: AggregationTemporality.CUMULATIVE,
+ Histogram: AggregationTemporality.DELTA,
+ ObservableCounter: AggregationTemporality.CUMULATIVE,
+ ObservableUpDownCounter: AggregationTemporality.CUMULATIVE,
+ ObservableGauge: AggregationTemporality.CUMULATIVE,
+ }
+
+ else:
+ if (
+ otel_exporter_otlp_metrics_temporality_preference
+ != "CUMULATIVE"
+ ):
+ _logger.warning(
+ "Unrecognized OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE"
+ " value found: %s, using CUMULATIVE",
+ otel_exporter_otlp_metrics_temporality_preference,
+ )
+ instrument_class_temporality = {
+ Counter: AggregationTemporality.CUMULATIVE,
+ UpDownCounter: AggregationTemporality.CUMULATIVE,
+ Histogram: AggregationTemporality.CUMULATIVE,
+ ObservableCounter: AggregationTemporality.CUMULATIVE,
+ ObservableUpDownCounter: AggregationTemporality.CUMULATIVE,
+ ObservableGauge: AggregationTemporality.CUMULATIVE,
+ }
+
+ instrument_class_temporality.update(preferred_temporality or {})
+ return instrument_class_temporality
+
+ def _get_aggregation(
+ self,
+ preferred_aggregation: Optional[dict[type, Aggregation]],
+ ) -> dict[type, Aggregation]:
+ otel_exporter_otlp_metrics_default_histogram_aggregation = (
+ os.environ.get(
+ OTEL_EXPORTER_OTLP_METRICS_DEFAULT_HISTOGRAM_AGGREGATION,
+ "explicit_bucket_histogram",
+ )
+ )
+
+ if otel_exporter_otlp_metrics_default_histogram_aggregation == (
+ "base2_exponential_bucket_histogram"
+ ):
+ instrument_class_aggregation = {
+ Histogram: ExponentialBucketHistogramAggregation(),
+ }
+
+ else:
+ if (
+ otel_exporter_otlp_metrics_default_histogram_aggregation
+ != "explicit_bucket_histogram"
+ ):
+ _logger.warning(
+ "Invalid value for %s: %s, using explicit bucket "
+ "histogram aggregation",
+ OTEL_EXPORTER_OTLP_METRICS_DEFAULT_HISTOGRAM_AGGREGATION,
+ otel_exporter_otlp_metrics_default_histogram_aggregation,
+ )
+
+ instrument_class_aggregation = {
+ Histogram: ExplicitBucketHistogramAggregation(),
+ }
+
+ instrument_class_aggregation.update(preferred_aggregation or {})
+ return instrument_class_aggregation
+
+ def export(
+ self,
+ metrics_data: MetricsData,
+ timeout_millis: Optional[float] = None,
+ **kwargs,
+ ) -> MetricExportResult:
+ encoded_request = encode_metrics(metrics_data)
+ body = encoded_request.to_json().encode("utf-8")
+
+ timeout_sec = (
+ timeout_millis / 1000.0 if timeout_millis is not None else None
+ )
+
+ if self._client.export(body, timeout_sec=timeout_sec):
+ return MetricExportResult.SUCCESS
+ return MetricExportResult.FAILURE
+
+ def shutdown(self, timeout_millis: float = 30_000, **kwargs) -> None:
+ self._client.shutdown()
+
+ def force_flush(self, timeout_millis: float = 10_000) -> bool:
+ return True
diff --git a/exporter/opentelemetry-exporter-otlp-json-http/src/opentelemetry/exporter/otlp/json/http/py.typed b/exporter/opentelemetry-exporter-otlp-json-http/src/opentelemetry/exporter/otlp/json/http/py.typed
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/exporter/opentelemetry-exporter-otlp-json-http/src/opentelemetry/exporter/otlp/json/http/trace_exporter/__init__.py b/exporter/opentelemetry-exporter-otlp-json-http/src/opentelemetry/exporter/otlp/json/http/trace_exporter/__init__.py
new file mode 100644
index 00000000000..90af894d35c
--- /dev/null
+++ b/exporter/opentelemetry-exporter-otlp-json-http/src/opentelemetry/exporter/otlp/json/http/trace_exporter/__init__.py
@@ -0,0 +1,122 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+from typing import Optional, Sequence
+
+from opentelemetry.exporter.otlp.json.common._internal.trace_encoder import (
+ encode_spans,
+)
+from opentelemetry.exporter.otlp.json.http import Compression
+from opentelemetry.exporter.otlp.json.http._internal import (
+ _OTLPHttpClient,
+ _resolve_compression,
+ _resolve_endpoint,
+ _resolve_headers,
+ _resolve_timeout,
+ _resolve_tls_file,
+ _DEFAULT_JITTER,
+)
+from opentelemetry.sdk.environment_variables import (
+ OTEL_EXPORTER_OTLP_CERTIFICATE,
+ OTEL_EXPORTER_OTLP_CLIENT_CERTIFICATE,
+ OTEL_EXPORTER_OTLP_CLIENT_KEY,
+ OTEL_EXPORTER_OTLP_TRACES_CERTIFICATE,
+ OTEL_EXPORTER_OTLP_TRACES_CLIENT_CERTIFICATE,
+ OTEL_EXPORTER_OTLP_TRACES_CLIENT_KEY,
+ OTEL_EXPORTER_OTLP_TRACES_COMPRESSION,
+ OTEL_EXPORTER_OTLP_TRACES_ENDPOINT,
+ OTEL_EXPORTER_OTLP_TRACES_HEADERS,
+ OTEL_EXPORTER_OTLP_TRACES_TIMEOUT,
+)
+from opentelemetry.sdk.trace.export import (
+ ReadableSpan,
+ SpanExporter,
+ SpanExportResult,
+)
+
+_logger = logging.getLogger(__name__)
+
+
+class OTLPJSONTraceExporter(SpanExporter):
+ """OTLP JSON exporter for traces using urllib3."""
+
+ def __init__(
+ self,
+ endpoint: Optional[str] = None,
+ certificate_file: Optional[str] = None,
+ client_key_file: Optional[str] = None,
+ client_certificate_file: Optional[str] = None,
+ headers: Optional[dict[str, str]] = None,
+ timeout: Optional[float] = None,
+ compression: Optional[Compression] = None,
+ jitter: float = _DEFAULT_JITTER,
+ ):
+ self._endpoint = endpoint or _resolve_endpoint(
+ "v1/traces", OTEL_EXPORTER_OTLP_TRACES_ENDPOINT
+ )
+
+ self._certificate_file = _resolve_tls_file(
+ certificate_file,
+ OTEL_EXPORTER_OTLP_TRACES_CERTIFICATE,
+ OTEL_EXPORTER_OTLP_CERTIFICATE,
+ )
+ self._client_key_file = _resolve_tls_file(
+ client_key_file,
+ OTEL_EXPORTER_OTLP_TRACES_CLIENT_KEY,
+ OTEL_EXPORTER_OTLP_CLIENT_KEY,
+ )
+ self._client_certificate_file = _resolve_tls_file(
+ client_certificate_file,
+ OTEL_EXPORTER_OTLP_TRACES_CLIENT_CERTIFICATE,
+ OTEL_EXPORTER_OTLP_CLIENT_CERTIFICATE,
+ )
+
+ self._headers = _resolve_headers(
+ OTEL_EXPORTER_OTLP_TRACES_HEADERS, headers
+ )
+
+ self._timeout = _resolve_timeout(
+ OTEL_EXPORTER_OTLP_TRACES_TIMEOUT, timeout
+ )
+ self._compression = _resolve_compression(
+ OTEL_EXPORTER_OTLP_TRACES_COMPRESSION, compression
+ )
+
+ self._client = _OTLPHttpClient(
+ endpoint=self._endpoint,
+ headers=self._headers,
+ timeout=self._timeout,
+ compression=self._compression,
+ certificate_file=self._certificate_file,
+ client_key_file=self._client_key_file,
+ client_certificate_file=self._client_certificate_file,
+ jitter=jitter,
+ )
+
+ def export(
+ self,
+ spans: Sequence[ReadableSpan],
+ ) -> SpanExportResult:
+ encoded_request = encode_spans(spans)
+ body = encoded_request.to_json().encode("utf-8")
+ if self._client.export(body):
+ return SpanExportResult.SUCCESS
+ return SpanExportResult.FAILURE
+
+ def shutdown(self) -> None:
+ self._client.shutdown()
+
+ def force_flush(self, timeout_millis: int = 30000) -> bool:
+ return True
diff --git a/exporter/opentelemetry-exporter-otlp-json-http/src/opentelemetry/exporter/otlp/json/http/version/__init__.py b/exporter/opentelemetry-exporter-otlp-json-http/src/opentelemetry/exporter/otlp/json/http/version/__init__.py
new file mode 100644
index 00000000000..c099e9440e9
--- /dev/null
+++ b/exporter/opentelemetry-exporter-otlp-json-http/src/opentelemetry/exporter/otlp/json/http/version/__init__.py
@@ -0,0 +1,15 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+__version__ = "0.61b0.dev"
diff --git a/opentelemetry-proto-json/LICENSE b/opentelemetry-proto-json/LICENSE
new file mode 100644
index 00000000000..261eeb9e9f8
--- /dev/null
+++ b/opentelemetry-proto-json/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/opentelemetry-proto-json/README.rst b/opentelemetry-proto-json/README.rst
new file mode 100644
index 00000000000..d1d294ccd3e
--- /dev/null
+++ b/opentelemetry-proto-json/README.rst
@@ -0,0 +1,39 @@
+OpenTelemetry Python Proto JSON
+================================
+
+|pypi|
+
+.. |pypi| image:: https://badge.fury.io/py/opentelemetry-proto-json.svg
+ :target: https://pypi.org/project/opentelemetry-proto-json/
+
+This library contains the generated code for OpenTelemetry protobuf data model with JSON encoding support. The code in the current package was generated using the v1.9.0 release_ of opentelemetry-proto and includes definitions for the OpenTelemetry JSON Protobuf encoding specification.
+
+.. _release: https://github.com/open-telemetry/opentelemetry-proto/releases/tag/v1.9.0
+
+Installation
+------------
+
+::
+
+ pip install opentelemetry-proto-json
+
+Code Generation
+---------------
+
+These files were generated automatically from code in opentelemetry-proto_.
+To regenerate the code, run ``../scripts/proto_codegen_json.sh``.
+
+To build against a new release or specific commit of opentelemetry-proto_,
+update the ``PROTO_REPO_BRANCH_OR_COMMIT`` variable in
+``../scripts/proto_codegen_json.sh``. Then run the script and commit the changes
+as well as any fixes needed in the OTLP exporter.
+
+.. _opentelemetry-proto: https://github.com/open-telemetry/opentelemetry-proto
+
+
+References
+----------
+
+* `OpenTelemetry Project `_
+* `OpenTelemetry Proto `_
+* `OTLP JSON Encoding Specification `_
diff --git a/opentelemetry-proto-json/pyproject.toml b/opentelemetry-proto-json/pyproject.toml
new file mode 100644
index 00000000000..d8555888d76
--- /dev/null
+++ b/opentelemetry-proto-json/pyproject.toml
@@ -0,0 +1,44 @@
+[build-system]
+requires = ["hatchling"]
+build-backend = "hatchling.build"
+
+[project]
+name = "opentelemetry-proto-json"
+dynamic = ["version"]
+description = "OpenTelemetry Python Json Proto"
+readme = "README.rst"
+license = "Apache-2.0"
+requires-python = ">=3.9"
+authors = [
+ { name = "OpenTelemetry Authors", email = "cncf-opentelemetry-contributors@lists.cncf.io" },
+]
+classifiers = [
+ "Development Status :: 5 - Production/Stable",
+ "Framework :: OpenTelemetry",
+ "Intended Audience :: Developers",
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.9",
+ "Programming Language :: Python :: 3.10",
+ "Programming Language :: Python :: 3.11",
+ "Programming Language :: Python :: 3.12",
+ "Programming Language :: Python :: 3.13",
+ "Programming Language :: Python :: 3.14",
+]
+dependencies = []
+
+[project.urls]
+Homepage = "https://github.com/open-telemetry/opentelemetry-python/tree/main/opentelemetry-proto-json"
+Repository = "https://github.com/open-telemetry/opentelemetry-python"
+
+[tool.hatch.version]
+path = "src/opentelemetry/proto_json/version/__init__.py"
+
+[tool.hatch.build.targets.sdist]
+include = [
+ "/src",
+ "/tests",
+]
+
+[tool.hatch.build.targets.wheel]
+packages = ["src/opentelemetry"]
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/__init__.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/_otlp_json_utils.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/_otlp_json_utils.py
new file mode 100644
index 00000000000..833245a3453
--- /dev/null
+++ b/opentelemetry-proto-json/src/opentelemetry/proto_json/_otlp_json_utils.py
@@ -0,0 +1,147 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import base64
+import math
+from typing import Any, Callable, List, Optional, TypeVar, Union
+
+T = TypeVar("T")
+
+
+def encode_hex(value: bytes) -> str:
+ """
+ Encode bytes as hex string.
+ Used for trace_id and span_id per OTLP spec.
+ """
+ return value.hex() if value else ""
+
+
+def encode_base64(value: bytes) -> str:
+ """
+ Encode bytes as base64 string.
+ Standard Proto3 JSON mapping for bytes.
+ """
+ return base64.b64encode(value).decode("utf-8") if value else ""
+
+
+def encode_int64(value: int) -> str:
+ """
+ Encode 64 bit integers as strings.
+ Required for int64, uint64, fixed64, sfixed64 and sint64 per Proto3 JSON spec.
+ """
+ return str(value)
+
+
+def encode_float(value: float) -> Union[float, str]:
+ """
+ Encode float/double values.
+ """
+ if math.isnan(value):
+ return "NaN"
+ if math.isinf(value):
+ return "Infinity" if value > 0 else "-Infinity"
+ return value
+
+
+def serialize_repeated(
+ values: List[Any], map_fn: Callable[[Any], Any]
+) -> List[Any]:
+ """Helper to serialize repeated fields."""
+ return [map_fn(v) for v in values] if values else []
+
+
+def validate_type(
+ value: Any, expected_types: Union[type, tuple[type, ...]], field_name: str
+) -> None:
+ """
+ Validate that a value is of the expected type(s).
+ Raises TypeError if validation fails.
+ """
+ if not isinstance(value, expected_types):
+ raise TypeError(
+ f"Field '{field_name}' expected {expected_types}, "
+ f"got {type(value).__name__}"
+ )
+
+
+def decode_hex(value: Optional[str], field_name: str) -> bytes:
+ """Decode hex string to bytes."""
+ if not value:
+ return b""
+ validate_type(value, str, field_name)
+ try:
+ return bytes.fromhex(value)
+ except ValueError as e:
+ raise ValueError(
+ f"Invalid hex string for field '{field_name}': {e}"
+ ) from None
+
+
+def decode_base64(value: Optional[str], field_name: str) -> bytes:
+ """Decode base64 string to bytes."""
+ if not value:
+ return b""
+ validate_type(value, str, field_name)
+ try:
+ return base64.b64decode(value)
+ except Exception as e:
+ raise ValueError(
+ f"Invalid base64 string for field '{field_name}': {e}"
+ ) from None
+
+
+def parse_int64(value: Optional[Union[int, str]], field_name: str) -> int:
+ """Parse 64-bit integer from string or number."""
+ if value is None:
+ return 0
+ validate_type(value, (int, str), field_name)
+ try:
+ return int(value)
+ except (ValueError, TypeError):
+ raise ValueError(
+ f"Invalid int64 value for field '{field_name}': {value}"
+ ) from None
+
+
+def parse_float(
+ value: Optional[Union[float, int, str]], field_name: str
+) -> float:
+ """Parse float/double from number or special string."""
+ if value is None:
+ return 0.0
+ validate_type(value, (float, int, str), field_name)
+ if value == "NaN":
+ return math.nan
+ if value == "Infinity":
+ return math.inf
+ if value == "-Infinity":
+ return -math.inf
+ try:
+ return float(value)
+ except (ValueError, TypeError):
+ raise ValueError(
+ f"Invalid float value for field '{field_name}': {value}"
+ ) from None
+
+
+def deserialize_repeated(
+ values: Optional[List[Any]],
+ item_parser: Callable[[Any], T],
+ field_name: str,
+) -> List[T]:
+ """Helper to deserialize repeated fields."""
+ if values is None:
+ return []
+ validate_type(values, list, field_name)
+ return [item_parser(v) for v in values]
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/__init__.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/logs/__init__.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/logs/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/logs/v1/__init__.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/logs/v1/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/logs/v1/logs_service.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/logs/v1/logs_service.py
new file mode 100644
index 00000000000..3ce7651fbbd
--- /dev/null
+++ b/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/logs/v1/logs_service.py
@@ -0,0 +1,228 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# AUTO-GENERATED from "opentelemetry/proto/collector/logs/v1/logs_service.proto"
+# DO NOT EDIT MANUALLY
+
+from __future__ import annotations
+
+import builtins
+import dataclasses
+import functools
+import json
+import sys
+import typing
+
+if sys.version_info >= (3, 10):
+ _dataclass = functools.partial(dataclasses.dataclass, slots=True)
+else:
+ _dataclass = dataclasses.dataclass
+
+import opentelemetry.proto_json._otlp_json_utils as _utils
+import opentelemetry.proto_json.logs.v1.logs
+
+
+@typing.final
+@_dataclass
+class ExportLogsServiceRequest:
+ """
+ Generated from protobuf message ExportLogsServiceRequest
+ """
+
+ resource_logs: builtins.list[opentelemetry.proto_json.logs.v1.logs.ResourceLogs] = dataclasses.field(default_factory=builtins.list)
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.resource_logs:
+ _result["resourceLogs"] = _utils.serialize_repeated(self.resource_logs, lambda _v: _v.to_dict())
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ExportLogsServiceRequest":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ExportLogsServiceRequest instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("resourceLogs")) is not None:
+ _args["resource_logs"] = _utils.deserialize_repeated(_value, lambda _v: opentelemetry.proto_json.logs.v1.logs.ResourceLogs.from_dict(_v), "resource_logs")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ExportLogsServiceRequest":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class ExportLogsServiceResponse:
+ """
+ Generated from protobuf message ExportLogsServiceResponse
+ """
+
+ partial_success: typing.Optional[ExportLogsPartialSuccess] = None
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.partial_success:
+ _result["partialSuccess"] = self.partial_success.to_dict()
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ExportLogsServiceResponse":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ExportLogsServiceResponse instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("partialSuccess")) is not None:
+ _args["partial_success"] = ExportLogsPartialSuccess.from_dict(_value)
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ExportLogsServiceResponse":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class ExportLogsPartialSuccess:
+ """
+ Generated from protobuf message ExportLogsPartialSuccess
+ """
+
+ rejected_log_records: typing.Optional[builtins.int] = 0
+ error_message: typing.Optional[builtins.str] = ""
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.rejected_log_records:
+ _result["rejectedLogRecords"] = _utils.encode_int64(self.rejected_log_records)
+ if self.error_message:
+ _result["errorMessage"] = self.error_message
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ExportLogsPartialSuccess":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ExportLogsPartialSuccess instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("rejectedLogRecords")) is not None:
+ _args["rejected_log_records"] = _utils.parse_int64(_value, "rejected_log_records")
+ if (_value := data.get("errorMessage")) is not None:
+ _utils.validate_type(_value, builtins.str, "error_message")
+ _args["error_message"] = _value
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ExportLogsPartialSuccess":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/metrics/__init__.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/metrics/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/metrics/v1/__init__.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/metrics/v1/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/metrics/v1/metrics_service.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/metrics/v1/metrics_service.py
new file mode 100644
index 00000000000..c46ff5963a8
--- /dev/null
+++ b/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/metrics/v1/metrics_service.py
@@ -0,0 +1,228 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# AUTO-GENERATED from "opentelemetry/proto/collector/metrics/v1/metrics_service.proto"
+# DO NOT EDIT MANUALLY
+
+from __future__ import annotations
+
+import builtins
+import dataclasses
+import functools
+import json
+import sys
+import typing
+
+if sys.version_info >= (3, 10):
+ _dataclass = functools.partial(dataclasses.dataclass, slots=True)
+else:
+ _dataclass = dataclasses.dataclass
+
+import opentelemetry.proto_json._otlp_json_utils as _utils
+import opentelemetry.proto_json.metrics.v1.metrics
+
+
+@typing.final
+@_dataclass
+class ExportMetricsServiceRequest:
+ """
+ Generated from protobuf message ExportMetricsServiceRequest
+ """
+
+ resource_metrics: builtins.list[opentelemetry.proto_json.metrics.v1.metrics.ResourceMetrics] = dataclasses.field(default_factory=builtins.list)
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.resource_metrics:
+ _result["resourceMetrics"] = _utils.serialize_repeated(self.resource_metrics, lambda _v: _v.to_dict())
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ExportMetricsServiceRequest":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ExportMetricsServiceRequest instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("resourceMetrics")) is not None:
+ _args["resource_metrics"] = _utils.deserialize_repeated(_value, lambda _v: opentelemetry.proto_json.metrics.v1.metrics.ResourceMetrics.from_dict(_v), "resource_metrics")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ExportMetricsServiceRequest":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class ExportMetricsServiceResponse:
+ """
+ Generated from protobuf message ExportMetricsServiceResponse
+ """
+
+ partial_success: typing.Optional[ExportMetricsPartialSuccess] = None
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.partial_success:
+ _result["partialSuccess"] = self.partial_success.to_dict()
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ExportMetricsServiceResponse":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ExportMetricsServiceResponse instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("partialSuccess")) is not None:
+ _args["partial_success"] = ExportMetricsPartialSuccess.from_dict(_value)
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ExportMetricsServiceResponse":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class ExportMetricsPartialSuccess:
+ """
+ Generated from protobuf message ExportMetricsPartialSuccess
+ """
+
+ rejected_data_points: typing.Optional[builtins.int] = 0
+ error_message: typing.Optional[builtins.str] = ""
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.rejected_data_points:
+ _result["rejectedDataPoints"] = _utils.encode_int64(self.rejected_data_points)
+ if self.error_message:
+ _result["errorMessage"] = self.error_message
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ExportMetricsPartialSuccess":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ExportMetricsPartialSuccess instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("rejectedDataPoints")) is not None:
+ _args["rejected_data_points"] = _utils.parse_int64(_value, "rejected_data_points")
+ if (_value := data.get("errorMessage")) is not None:
+ _utils.validate_type(_value, builtins.str, "error_message")
+ _args["error_message"] = _value
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ExportMetricsPartialSuccess":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/profiles/__init__.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/profiles/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/profiles/v1development/__init__.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/profiles/v1development/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/profiles/v1development/profiles_service.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/profiles/v1development/profiles_service.py
new file mode 100644
index 00000000000..0d70ba65563
--- /dev/null
+++ b/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/profiles/v1development/profiles_service.py
@@ -0,0 +1,233 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# AUTO-GENERATED from "opentelemetry/proto/collector/profiles/v1development/profiles_service.proto"
+# DO NOT EDIT MANUALLY
+
+from __future__ import annotations
+
+import builtins
+import dataclasses
+import functools
+import json
+import sys
+import typing
+
+if sys.version_info >= (3, 10):
+ _dataclass = functools.partial(dataclasses.dataclass, slots=True)
+else:
+ _dataclass = dataclasses.dataclass
+
+import opentelemetry.proto_json._otlp_json_utils as _utils
+import opentelemetry.proto_json.profiles.v1development.profiles
+
+
+@typing.final
+@_dataclass
+class ExportProfilesServiceRequest:
+ """
+ Generated from protobuf message ExportProfilesServiceRequest
+ """
+
+ resource_profiles: builtins.list[opentelemetry.proto_json.profiles.v1development.profiles.ResourceProfiles] = dataclasses.field(default_factory=builtins.list)
+ dictionary: typing.Optional[opentelemetry.proto_json.profiles.v1development.profiles.ProfilesDictionary] = None
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.resource_profiles:
+ _result["resourceProfiles"] = _utils.serialize_repeated(self.resource_profiles, lambda _v: _v.to_dict())
+ if self.dictionary:
+ _result["dictionary"] = self.dictionary.to_dict()
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ExportProfilesServiceRequest":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ExportProfilesServiceRequest instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("resourceProfiles")) is not None:
+ _args["resource_profiles"] = _utils.deserialize_repeated(_value, lambda _v: opentelemetry.proto_json.profiles.v1development.profiles.ResourceProfiles.from_dict(_v), "resource_profiles")
+ if (_value := data.get("dictionary")) is not None:
+ _args["dictionary"] = opentelemetry.proto_json.profiles.v1development.profiles.ProfilesDictionary.from_dict(_value)
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ExportProfilesServiceRequest":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class ExportProfilesServiceResponse:
+ """
+ Generated from protobuf message ExportProfilesServiceResponse
+ """
+
+ partial_success: typing.Optional[ExportProfilesPartialSuccess] = None
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.partial_success:
+ _result["partialSuccess"] = self.partial_success.to_dict()
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ExportProfilesServiceResponse":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ExportProfilesServiceResponse instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("partialSuccess")) is not None:
+ _args["partial_success"] = ExportProfilesPartialSuccess.from_dict(_value)
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ExportProfilesServiceResponse":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class ExportProfilesPartialSuccess:
+ """
+ Generated from protobuf message ExportProfilesPartialSuccess
+ """
+
+ rejected_profiles: typing.Optional[builtins.int] = 0
+ error_message: typing.Optional[builtins.str] = ""
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.rejected_profiles:
+ _result["rejectedProfiles"] = _utils.encode_int64(self.rejected_profiles)
+ if self.error_message:
+ _result["errorMessage"] = self.error_message
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ExportProfilesPartialSuccess":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ExportProfilesPartialSuccess instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("rejectedProfiles")) is not None:
+ _args["rejected_profiles"] = _utils.parse_int64(_value, "rejected_profiles")
+ if (_value := data.get("errorMessage")) is not None:
+ _utils.validate_type(_value, builtins.str, "error_message")
+ _args["error_message"] = _value
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ExportProfilesPartialSuccess":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/trace/__init__.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/trace/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/trace/v1/__init__.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/trace/v1/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/trace/v1/trace_service.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/trace/v1/trace_service.py
new file mode 100644
index 00000000000..cc71f42da73
--- /dev/null
+++ b/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/trace/v1/trace_service.py
@@ -0,0 +1,228 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# AUTO-GENERATED from "opentelemetry/proto/collector/trace/v1/trace_service.proto"
+# DO NOT EDIT MANUALLY
+
+from __future__ import annotations
+
+import builtins
+import dataclasses
+import functools
+import json
+import sys
+import typing
+
+if sys.version_info >= (3, 10):
+ _dataclass = functools.partial(dataclasses.dataclass, slots=True)
+else:
+ _dataclass = dataclasses.dataclass
+
+import opentelemetry.proto_json._otlp_json_utils as _utils
+import opentelemetry.proto_json.trace.v1.trace
+
+
+@typing.final
+@_dataclass
+class ExportTraceServiceRequest:
+ """
+ Generated from protobuf message ExportTraceServiceRequest
+ """
+
+ resource_spans: builtins.list[opentelemetry.proto_json.trace.v1.trace.ResourceSpans] = dataclasses.field(default_factory=builtins.list)
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.resource_spans:
+ _result["resourceSpans"] = _utils.serialize_repeated(self.resource_spans, lambda _v: _v.to_dict())
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ExportTraceServiceRequest":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ExportTraceServiceRequest instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("resourceSpans")) is not None:
+ _args["resource_spans"] = _utils.deserialize_repeated(_value, lambda _v: opentelemetry.proto_json.trace.v1.trace.ResourceSpans.from_dict(_v), "resource_spans")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ExportTraceServiceRequest":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class ExportTraceServiceResponse:
+ """
+ Generated from protobuf message ExportTraceServiceResponse
+ """
+
+ partial_success: typing.Optional[ExportTracePartialSuccess] = None
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.partial_success:
+ _result["partialSuccess"] = self.partial_success.to_dict()
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ExportTraceServiceResponse":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ExportTraceServiceResponse instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("partialSuccess")) is not None:
+ _args["partial_success"] = ExportTracePartialSuccess.from_dict(_value)
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ExportTraceServiceResponse":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class ExportTracePartialSuccess:
+ """
+ Generated from protobuf message ExportTracePartialSuccess
+ """
+
+ rejected_spans: typing.Optional[builtins.int] = 0
+ error_message: typing.Optional[builtins.str] = ""
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.rejected_spans:
+ _result["rejectedSpans"] = _utils.encode_int64(self.rejected_spans)
+ if self.error_message:
+ _result["errorMessage"] = self.error_message
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ExportTracePartialSuccess":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ExportTracePartialSuccess instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("rejectedSpans")) is not None:
+ _args["rejected_spans"] = _utils.parse_int64(_value, "rejected_spans")
+ if (_value := data.get("errorMessage")) is not None:
+ _utils.validate_type(_value, builtins.str, "error_message")
+ _args["error_message"] = _value
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ExportTracePartialSuccess":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/common/__init__.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/common/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/common/v1/__init__.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/common/v1/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/common/v1/common.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/common/v1/common.py
new file mode 100644
index 00000000000..adadf576822
--- /dev/null
+++ b/opentelemetry-proto-json/src/opentelemetry/proto_json/common/v1/common.py
@@ -0,0 +1,483 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# AUTO-GENERATED from "opentelemetry/proto/common/v1/common.proto"
+# DO NOT EDIT MANUALLY
+
+from __future__ import annotations
+
+import builtins
+import dataclasses
+import functools
+import json
+import sys
+import typing
+
+if sys.version_info >= (3, 10):
+ _dataclass = functools.partial(dataclasses.dataclass, slots=True)
+else:
+ _dataclass = dataclasses.dataclass
+
+import opentelemetry.proto_json._otlp_json_utils as _utils
+
+
+@typing.final
+@_dataclass
+class AnyValue:
+ """
+ Generated from protobuf message AnyValue
+ """
+
+ string_value: typing.Optional[builtins.str] = None
+ bool_value: typing.Optional[builtins.bool] = None
+ int_value: typing.Optional[builtins.int] = None
+ double_value: typing.Optional[builtins.float] = None
+ array_value: typing.Optional[ArrayValue] = None
+ kvlist_value: typing.Optional[KeyValueList] = None
+ bytes_value: typing.Optional[builtins.bytes] = None
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.bytes_value is not None:
+ _result["bytesValue"] = _utils.encode_base64(self.bytes_value)
+ elif self.kvlist_value is not None:
+ _result["kvlistValue"] = self.kvlist_value.to_dict()
+ elif self.array_value is not None:
+ _result["arrayValue"] = self.array_value.to_dict()
+ elif self.double_value is not None:
+ _result["doubleValue"] = _utils.encode_float(self.double_value)
+ elif self.int_value is not None:
+ _result["intValue"] = _utils.encode_int64(self.int_value)
+ elif self.bool_value is not None:
+ _result["boolValue"] = self.bool_value
+ elif self.string_value is not None:
+ _result["stringValue"] = self.string_value
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "AnyValue":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ AnyValue instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("bytesValue")) is not None:
+ _args["bytes_value"] = _utils.decode_base64(_value, "bytes_value")
+ elif (_value := data.get("kvlistValue")) is not None:
+ _args["kvlist_value"] = KeyValueList.from_dict(_value)
+ elif (_value := data.get("arrayValue")) is not None:
+ _args["array_value"] = ArrayValue.from_dict(_value)
+ elif (_value := data.get("doubleValue")) is not None:
+ _args["double_value"] = _utils.parse_float(_value, "double_value")
+ elif (_value := data.get("intValue")) is not None:
+ _args["int_value"] = _utils.parse_int64(_value, "int_value")
+ elif (_value := data.get("boolValue")) is not None:
+ _utils.validate_type(_value, builtins.bool, "bool_value")
+ _args["bool_value"] = _value
+ elif (_value := data.get("stringValue")) is not None:
+ _utils.validate_type(_value, builtins.str, "string_value")
+ _args["string_value"] = _value
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "AnyValue":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class ArrayValue:
+ """
+ Generated from protobuf message ArrayValue
+ """
+
+ values: builtins.list[AnyValue] = dataclasses.field(default_factory=builtins.list)
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.values:
+ _result["values"] = _utils.serialize_repeated(self.values, lambda _v: _v.to_dict())
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ArrayValue":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ArrayValue instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("values")) is not None:
+ _args["values"] = _utils.deserialize_repeated(_value, lambda _v: AnyValue.from_dict(_v), "values")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ArrayValue":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class KeyValueList:
+ """
+ Generated from protobuf message KeyValueList
+ """
+
+ values: builtins.list[KeyValue] = dataclasses.field(default_factory=builtins.list)
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.values:
+ _result["values"] = _utils.serialize_repeated(self.values, lambda _v: _v.to_dict())
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "KeyValueList":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ KeyValueList instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("values")) is not None:
+ _args["values"] = _utils.deserialize_repeated(_value, lambda _v: KeyValue.from_dict(_v), "values")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "KeyValueList":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class KeyValue:
+ """
+ Generated from protobuf message KeyValue
+ """
+
+ key: typing.Optional[builtins.str] = ""
+ value: typing.Optional[AnyValue] = None
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.key:
+ _result["key"] = self.key
+ if self.value:
+ _result["value"] = self.value.to_dict()
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "KeyValue":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ KeyValue instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("key")) is not None:
+ _utils.validate_type(_value, builtins.str, "key")
+ _args["key"] = _value
+ if (_value := data.get("value")) is not None:
+ _args["value"] = AnyValue.from_dict(_value)
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "KeyValue":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class InstrumentationScope:
+ """
+ Generated from protobuf message InstrumentationScope
+ """
+
+ name: typing.Optional[builtins.str] = ""
+ version: typing.Optional[builtins.str] = ""
+ attributes: builtins.list[KeyValue] = dataclasses.field(default_factory=builtins.list)
+ dropped_attributes_count: typing.Optional[builtins.int] = 0
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.name:
+ _result["name"] = self.name
+ if self.version:
+ _result["version"] = self.version
+ if self.attributes:
+ _result["attributes"] = _utils.serialize_repeated(self.attributes, lambda _v: _v.to_dict())
+ if self.dropped_attributes_count:
+ _result["droppedAttributesCount"] = self.dropped_attributes_count
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "InstrumentationScope":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ InstrumentationScope instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("name")) is not None:
+ _utils.validate_type(_value, builtins.str, "name")
+ _args["name"] = _value
+ if (_value := data.get("version")) is not None:
+ _utils.validate_type(_value, builtins.str, "version")
+ _args["version"] = _value
+ if (_value := data.get("attributes")) is not None:
+ _args["attributes"] = _utils.deserialize_repeated(_value, lambda _v: KeyValue.from_dict(_v), "attributes")
+ if (_value := data.get("droppedAttributesCount")) is not None:
+ _utils.validate_type(_value, builtins.int, "dropped_attributes_count")
+ _args["dropped_attributes_count"] = _value
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "InstrumentationScope":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class EntityRef:
+ """
+ Generated from protobuf message EntityRef
+ """
+
+ schema_url: typing.Optional[builtins.str] = ""
+ type: typing.Optional[builtins.str] = ""
+ id_keys: builtins.list[builtins.str] = dataclasses.field(default_factory=builtins.list)
+ description_keys: builtins.list[builtins.str] = dataclasses.field(default_factory=builtins.list)
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.schema_url:
+ _result["schemaUrl"] = self.schema_url
+ if self.type:
+ _result["type"] = self.type
+ if self.id_keys:
+ _result["idKeys"] = self.id_keys
+ if self.description_keys:
+ _result["descriptionKeys"] = self.description_keys
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "EntityRef":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ EntityRef instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("schemaUrl")) is not None:
+ _utils.validate_type(_value, builtins.str, "schema_url")
+ _args["schema_url"] = _value
+ if (_value := data.get("type")) is not None:
+ _utils.validate_type(_value, builtins.str, "type")
+ _args["type"] = _value
+ if (_value := data.get("idKeys")) is not None:
+ _args["id_keys"] = _utils.deserialize_repeated(_value, lambda _v: _v, "id_keys")
+ if (_value := data.get("descriptionKeys")) is not None:
+ _args["description_keys"] = _utils.deserialize_repeated(_value, lambda _v: _v, "description_keys")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "EntityRef":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/logs/__init__.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/logs/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/logs/v1/__init__.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/logs/v1/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/logs/v1/logs.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/logs/v1/logs.py
new file mode 100644
index 00000000000..09ab05e6ce9
--- /dev/null
+++ b/opentelemetry-proto-json/src/opentelemetry/proto_json/logs/v1/logs.py
@@ -0,0 +1,405 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# AUTO-GENERATED from "opentelemetry/proto/logs/v1/logs.proto"
+# DO NOT EDIT MANUALLY
+
+from __future__ import annotations
+
+import builtins
+import dataclasses
+import enum
+import functools
+import json
+import sys
+import typing
+
+if sys.version_info >= (3, 10):
+ _dataclass = functools.partial(dataclasses.dataclass, slots=True)
+else:
+ _dataclass = dataclasses.dataclass
+
+import opentelemetry.proto_json._otlp_json_utils as _utils
+import opentelemetry.proto_json.common.v1.common
+import opentelemetry.proto_json.resource.v1.resource
+
+
+@typing.final
+class SeverityNumber(enum.IntEnum):
+ """
+ Generated from protobuf enum SeverityNumber
+ """
+
+ SEVERITY_NUMBER_UNSPECIFIED = 0
+ SEVERITY_NUMBER_TRACE = 1
+ SEVERITY_NUMBER_TRACE2 = 2
+ SEVERITY_NUMBER_TRACE3 = 3
+ SEVERITY_NUMBER_TRACE4 = 4
+ SEVERITY_NUMBER_DEBUG = 5
+ SEVERITY_NUMBER_DEBUG2 = 6
+ SEVERITY_NUMBER_DEBUG3 = 7
+ SEVERITY_NUMBER_DEBUG4 = 8
+ SEVERITY_NUMBER_INFO = 9
+ SEVERITY_NUMBER_INFO2 = 10
+ SEVERITY_NUMBER_INFO3 = 11
+ SEVERITY_NUMBER_INFO4 = 12
+ SEVERITY_NUMBER_WARN = 13
+ SEVERITY_NUMBER_WARN2 = 14
+ SEVERITY_NUMBER_WARN3 = 15
+ SEVERITY_NUMBER_WARN4 = 16
+ SEVERITY_NUMBER_ERROR = 17
+ SEVERITY_NUMBER_ERROR2 = 18
+ SEVERITY_NUMBER_ERROR3 = 19
+ SEVERITY_NUMBER_ERROR4 = 20
+ SEVERITY_NUMBER_FATAL = 21
+ SEVERITY_NUMBER_FATAL2 = 22
+ SEVERITY_NUMBER_FATAL3 = 23
+ SEVERITY_NUMBER_FATAL4 = 24
+
+@typing.final
+class LogRecordFlags(enum.IntEnum):
+ """
+ Generated from protobuf enum LogRecordFlags
+ """
+
+ LOG_RECORD_FLAGS_DO_NOT_USE = 0
+ LOG_RECORD_FLAGS_TRACE_FLAGS_MASK = 255
+
+@typing.final
+@_dataclass
+class LogsData:
+ """
+ Generated from protobuf message LogsData
+ """
+
+ resource_logs: builtins.list[ResourceLogs] = dataclasses.field(default_factory=builtins.list)
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.resource_logs:
+ _result["resourceLogs"] = _utils.serialize_repeated(self.resource_logs, lambda _v: _v.to_dict())
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "LogsData":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ LogsData instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("resourceLogs")) is not None:
+ _args["resource_logs"] = _utils.deserialize_repeated(_value, lambda _v: ResourceLogs.from_dict(_v), "resource_logs")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "LogsData":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class ResourceLogs:
+ """
+ Generated from protobuf message ResourceLogs
+ """
+
+ resource: typing.Optional[opentelemetry.proto_json.resource.v1.resource.Resource] = None
+ scope_logs: builtins.list[ScopeLogs] = dataclasses.field(default_factory=builtins.list)
+ schema_url: typing.Optional[builtins.str] = ""
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.resource:
+ _result["resource"] = self.resource.to_dict()
+ if self.scope_logs:
+ _result["scopeLogs"] = _utils.serialize_repeated(self.scope_logs, lambda _v: _v.to_dict())
+ if self.schema_url:
+ _result["schemaUrl"] = self.schema_url
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ResourceLogs":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ResourceLogs instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("resource")) is not None:
+ _args["resource"] = opentelemetry.proto_json.resource.v1.resource.Resource.from_dict(_value)
+ if (_value := data.get("scopeLogs")) is not None:
+ _args["scope_logs"] = _utils.deserialize_repeated(_value, lambda _v: ScopeLogs.from_dict(_v), "scope_logs")
+ if (_value := data.get("schemaUrl")) is not None:
+ _utils.validate_type(_value, builtins.str, "schema_url")
+ _args["schema_url"] = _value
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ResourceLogs":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class ScopeLogs:
+ """
+ Generated from protobuf message ScopeLogs
+ """
+
+ scope: typing.Optional[opentelemetry.proto_json.common.v1.common.InstrumentationScope] = None
+ log_records: builtins.list[LogRecord] = dataclasses.field(default_factory=builtins.list)
+ schema_url: typing.Optional[builtins.str] = ""
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.scope:
+ _result["scope"] = self.scope.to_dict()
+ if self.log_records:
+ _result["logRecords"] = _utils.serialize_repeated(self.log_records, lambda _v: _v.to_dict())
+ if self.schema_url:
+ _result["schemaUrl"] = self.schema_url
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ScopeLogs":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ScopeLogs instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("scope")) is not None:
+ _args["scope"] = opentelemetry.proto_json.common.v1.common.InstrumentationScope.from_dict(_value)
+ if (_value := data.get("logRecords")) is not None:
+ _args["log_records"] = _utils.deserialize_repeated(_value, lambda _v: LogRecord.from_dict(_v), "log_records")
+ if (_value := data.get("schemaUrl")) is not None:
+ _utils.validate_type(_value, builtins.str, "schema_url")
+ _args["schema_url"] = _value
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ScopeLogs":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class LogRecord:
+ """
+ Generated from protobuf message LogRecord
+ """
+
+ time_unix_nano: typing.Optional[builtins.int] = 0
+ observed_time_unix_nano: typing.Optional[builtins.int] = 0
+ severity_number: typing.Union[SeverityNumber, builtins.int, None] = 0
+ severity_text: typing.Optional[builtins.str] = ""
+ body: typing.Optional[opentelemetry.proto_json.common.v1.common.AnyValue] = None
+ attributes: builtins.list[opentelemetry.proto_json.common.v1.common.KeyValue] = dataclasses.field(default_factory=builtins.list)
+ dropped_attributes_count: typing.Optional[builtins.int] = 0
+ flags: typing.Optional[builtins.int] = 0
+ trace_id: typing.Optional[builtins.bytes] = b""
+ span_id: typing.Optional[builtins.bytes] = b""
+ event_name: typing.Optional[builtins.str] = ""
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.time_unix_nano:
+ _result["timeUnixNano"] = _utils.encode_int64(self.time_unix_nano)
+ if self.observed_time_unix_nano:
+ _result["observedTimeUnixNano"] = _utils.encode_int64(self.observed_time_unix_nano)
+ if self.severity_number:
+ _result["severityNumber"] = builtins.int(self.severity_number)
+ if self.severity_text:
+ _result["severityText"] = self.severity_text
+ if self.body:
+ _result["body"] = self.body.to_dict()
+ if self.attributes:
+ _result["attributes"] = _utils.serialize_repeated(self.attributes, lambda _v: _v.to_dict())
+ if self.dropped_attributes_count:
+ _result["droppedAttributesCount"] = self.dropped_attributes_count
+ if self.flags:
+ _result["flags"] = self.flags
+ if self.trace_id:
+ _result["traceId"] = _utils.encode_hex(self.trace_id)
+ if self.span_id:
+ _result["spanId"] = _utils.encode_hex(self.span_id)
+ if self.event_name:
+ _result["eventName"] = self.event_name
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "LogRecord":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ LogRecord instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("timeUnixNano")) is not None:
+ _args["time_unix_nano"] = _utils.parse_int64(_value, "time_unix_nano")
+ if (_value := data.get("observedTimeUnixNano")) is not None:
+ _args["observed_time_unix_nano"] = _utils.parse_int64(_value, "observed_time_unix_nano")
+ if (_value := data.get("severityNumber")) is not None:
+ _utils.validate_type(_value, builtins.int, "severity_number")
+ _args["severity_number"] = SeverityNumber(_value)
+ if (_value := data.get("severityText")) is not None:
+ _utils.validate_type(_value, builtins.str, "severity_text")
+ _args["severity_text"] = _value
+ if (_value := data.get("body")) is not None:
+ _args["body"] = opentelemetry.proto_json.common.v1.common.AnyValue.from_dict(_value)
+ if (_value := data.get("attributes")) is not None:
+ _args["attributes"] = _utils.deserialize_repeated(_value, lambda _v: opentelemetry.proto_json.common.v1.common.KeyValue.from_dict(_v), "attributes")
+ if (_value := data.get("droppedAttributesCount")) is not None:
+ _utils.validate_type(_value, builtins.int, "dropped_attributes_count")
+ _args["dropped_attributes_count"] = _value
+ if (_value := data.get("flags")) is not None:
+ _utils.validate_type(_value, builtins.int, "flags")
+ _args["flags"] = _value
+ if (_value := data.get("traceId")) is not None:
+ _args["trace_id"] = _utils.decode_hex(_value, "trace_id")
+ if (_value := data.get("spanId")) is not None:
+ _args["span_id"] = _utils.decode_hex(_value, "span_id")
+ if (_value := data.get("eventName")) is not None:
+ _utils.validate_type(_value, builtins.str, "event_name")
+ _args["event_name"] = _value
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "LogRecord":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/metrics/__init__.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/metrics/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/metrics/v1/__init__.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/metrics/v1/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/metrics/v1/metrics.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/metrics/v1/metrics.py
new file mode 100644
index 00000000000..187786c2d30
--- /dev/null
+++ b/opentelemetry-proto-json/src/opentelemetry/proto_json/metrics/v1/metrics.py
@@ -0,0 +1,1365 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# AUTO-GENERATED from "opentelemetry/proto/metrics/v1/metrics.proto"
+# DO NOT EDIT MANUALLY
+
+from __future__ import annotations
+
+import builtins
+import dataclasses
+import enum
+import functools
+import json
+import sys
+import typing
+
+if sys.version_info >= (3, 10):
+ _dataclass = functools.partial(dataclasses.dataclass, slots=True)
+else:
+ _dataclass = dataclasses.dataclass
+
+import opentelemetry.proto_json._otlp_json_utils as _utils
+import opentelemetry.proto_json.common.v1.common
+import opentelemetry.proto_json.resource.v1.resource
+
+
+@typing.final
+class AggregationTemporality(enum.IntEnum):
+ """
+ Generated from protobuf enum AggregationTemporality
+ """
+
+ AGGREGATION_TEMPORALITY_UNSPECIFIED = 0
+ AGGREGATION_TEMPORALITY_DELTA = 1
+ AGGREGATION_TEMPORALITY_CUMULATIVE = 2
+
+@typing.final
+class DataPointFlags(enum.IntEnum):
+ """
+ Generated from protobuf enum DataPointFlags
+ """
+
+ DATA_POINT_FLAGS_DO_NOT_USE = 0
+ DATA_POINT_FLAGS_NO_RECORDED_VALUE_MASK = 1
+
+@typing.final
+@_dataclass
+class MetricsData:
+ """
+ Generated from protobuf message MetricsData
+ """
+
+ resource_metrics: builtins.list[ResourceMetrics] = dataclasses.field(default_factory=builtins.list)
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.resource_metrics:
+ _result["resourceMetrics"] = _utils.serialize_repeated(self.resource_metrics, lambda _v: _v.to_dict())
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "MetricsData":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ MetricsData instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("resourceMetrics")) is not None:
+ _args["resource_metrics"] = _utils.deserialize_repeated(_value, lambda _v: ResourceMetrics.from_dict(_v), "resource_metrics")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "MetricsData":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class ResourceMetrics:
+ """
+ Generated from protobuf message ResourceMetrics
+ """
+
+ resource: typing.Optional[opentelemetry.proto_json.resource.v1.resource.Resource] = None
+ scope_metrics: builtins.list[ScopeMetrics] = dataclasses.field(default_factory=builtins.list)
+ schema_url: typing.Optional[builtins.str] = ""
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.resource:
+ _result["resource"] = self.resource.to_dict()
+ if self.scope_metrics:
+ _result["scopeMetrics"] = _utils.serialize_repeated(self.scope_metrics, lambda _v: _v.to_dict())
+ if self.schema_url:
+ _result["schemaUrl"] = self.schema_url
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ResourceMetrics":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ResourceMetrics instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("resource")) is not None:
+ _args["resource"] = opentelemetry.proto_json.resource.v1.resource.Resource.from_dict(_value)
+ if (_value := data.get("scopeMetrics")) is not None:
+ _args["scope_metrics"] = _utils.deserialize_repeated(_value, lambda _v: ScopeMetrics.from_dict(_v), "scope_metrics")
+ if (_value := data.get("schemaUrl")) is not None:
+ _utils.validate_type(_value, builtins.str, "schema_url")
+ _args["schema_url"] = _value
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ResourceMetrics":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class ScopeMetrics:
+ """
+ Generated from protobuf message ScopeMetrics
+ """
+
+ scope: typing.Optional[opentelemetry.proto_json.common.v1.common.InstrumentationScope] = None
+ metrics: builtins.list[Metric] = dataclasses.field(default_factory=builtins.list)
+ schema_url: typing.Optional[builtins.str] = ""
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.scope:
+ _result["scope"] = self.scope.to_dict()
+ if self.metrics:
+ _result["metrics"] = _utils.serialize_repeated(self.metrics, lambda _v: _v.to_dict())
+ if self.schema_url:
+ _result["schemaUrl"] = self.schema_url
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ScopeMetrics":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ScopeMetrics instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("scope")) is not None:
+ _args["scope"] = opentelemetry.proto_json.common.v1.common.InstrumentationScope.from_dict(_value)
+ if (_value := data.get("metrics")) is not None:
+ _args["metrics"] = _utils.deserialize_repeated(_value, lambda _v: Metric.from_dict(_v), "metrics")
+ if (_value := data.get("schemaUrl")) is not None:
+ _utils.validate_type(_value, builtins.str, "schema_url")
+ _args["schema_url"] = _value
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ScopeMetrics":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class Metric:
+ """
+ Generated from protobuf message Metric
+ """
+
+ name: typing.Optional[builtins.str] = ""
+ description: typing.Optional[builtins.str] = ""
+ unit: typing.Optional[builtins.str] = ""
+ gauge: typing.Optional[Gauge] = None
+ sum: typing.Optional[Sum] = None
+ histogram: typing.Optional[Histogram] = None
+ exponential_histogram: typing.Optional[ExponentialHistogram] = None
+ summary: typing.Optional[Summary] = None
+ metadata: builtins.list[opentelemetry.proto_json.common.v1.common.KeyValue] = dataclasses.field(default_factory=builtins.list)
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.name:
+ _result["name"] = self.name
+ if self.description:
+ _result["description"] = self.description
+ if self.unit:
+ _result["unit"] = self.unit
+ if self.metadata:
+ _result["metadata"] = _utils.serialize_repeated(self.metadata, lambda _v: _v.to_dict())
+ if self.summary is not None:
+ _result["summary"] = self.summary.to_dict()
+ elif self.exponential_histogram is not None:
+ _result["exponentialHistogram"] = self.exponential_histogram.to_dict()
+ elif self.histogram is not None:
+ _result["histogram"] = self.histogram.to_dict()
+ elif self.sum is not None:
+ _result["sum"] = self.sum.to_dict()
+ elif self.gauge is not None:
+ _result["gauge"] = self.gauge.to_dict()
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "Metric":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ Metric instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("name")) is not None:
+ _utils.validate_type(_value, builtins.str, "name")
+ _args["name"] = _value
+ if (_value := data.get("description")) is not None:
+ _utils.validate_type(_value, builtins.str, "description")
+ _args["description"] = _value
+ if (_value := data.get("unit")) is not None:
+ _utils.validate_type(_value, builtins.str, "unit")
+ _args["unit"] = _value
+ if (_value := data.get("metadata")) is not None:
+ _args["metadata"] = _utils.deserialize_repeated(_value, lambda _v: opentelemetry.proto_json.common.v1.common.KeyValue.from_dict(_v), "metadata")
+ if (_value := data.get("summary")) is not None:
+ _args["summary"] = Summary.from_dict(_value)
+ elif (_value := data.get("exponentialHistogram")) is not None:
+ _args["exponential_histogram"] = ExponentialHistogram.from_dict(_value)
+ elif (_value := data.get("histogram")) is not None:
+ _args["histogram"] = Histogram.from_dict(_value)
+ elif (_value := data.get("sum")) is not None:
+ _args["sum"] = Sum.from_dict(_value)
+ elif (_value := data.get("gauge")) is not None:
+ _args["gauge"] = Gauge.from_dict(_value)
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "Metric":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class Gauge:
+ """
+ Generated from protobuf message Gauge
+ """
+
+ data_points: builtins.list[NumberDataPoint] = dataclasses.field(default_factory=builtins.list)
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.data_points:
+ _result["dataPoints"] = _utils.serialize_repeated(self.data_points, lambda _v: _v.to_dict())
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "Gauge":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ Gauge instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("dataPoints")) is not None:
+ _args["data_points"] = _utils.deserialize_repeated(_value, lambda _v: NumberDataPoint.from_dict(_v), "data_points")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "Gauge":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class Sum:
+ """
+ Generated from protobuf message Sum
+ """
+
+ data_points: builtins.list[NumberDataPoint] = dataclasses.field(default_factory=builtins.list)
+ aggregation_temporality: typing.Union[AggregationTemporality, builtins.int, None] = 0
+ is_monotonic: typing.Optional[builtins.bool] = False
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.data_points:
+ _result["dataPoints"] = _utils.serialize_repeated(self.data_points, lambda _v: _v.to_dict())
+ if self.aggregation_temporality:
+ _result["aggregationTemporality"] = builtins.int(self.aggregation_temporality)
+ if self.is_monotonic:
+ _result["isMonotonic"] = self.is_monotonic
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "Sum":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ Sum instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("dataPoints")) is not None:
+ _args["data_points"] = _utils.deserialize_repeated(_value, lambda _v: NumberDataPoint.from_dict(_v), "data_points")
+ if (_value := data.get("aggregationTemporality")) is not None:
+ _utils.validate_type(_value, builtins.int, "aggregation_temporality")
+ _args["aggregation_temporality"] = AggregationTemporality(_value)
+ if (_value := data.get("isMonotonic")) is not None:
+ _utils.validate_type(_value, builtins.bool, "is_monotonic")
+ _args["is_monotonic"] = _value
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "Sum":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class Histogram:
+ """
+ Generated from protobuf message Histogram
+ """
+
+ data_points: builtins.list[HistogramDataPoint] = dataclasses.field(default_factory=builtins.list)
+ aggregation_temporality: typing.Union[AggregationTemporality, builtins.int, None] = 0
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.data_points:
+ _result["dataPoints"] = _utils.serialize_repeated(self.data_points, lambda _v: _v.to_dict())
+ if self.aggregation_temporality:
+ _result["aggregationTemporality"] = builtins.int(self.aggregation_temporality)
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "Histogram":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ Histogram instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("dataPoints")) is not None:
+ _args["data_points"] = _utils.deserialize_repeated(_value, lambda _v: HistogramDataPoint.from_dict(_v), "data_points")
+ if (_value := data.get("aggregationTemporality")) is not None:
+ _utils.validate_type(_value, builtins.int, "aggregation_temporality")
+ _args["aggregation_temporality"] = AggregationTemporality(_value)
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "Histogram":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class ExponentialHistogram:
+ """
+ Generated from protobuf message ExponentialHistogram
+ """
+
+ data_points: builtins.list[ExponentialHistogramDataPoint] = dataclasses.field(default_factory=builtins.list)
+ aggregation_temporality: typing.Union[AggregationTemporality, builtins.int, None] = 0
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.data_points:
+ _result["dataPoints"] = _utils.serialize_repeated(self.data_points, lambda _v: _v.to_dict())
+ if self.aggregation_temporality:
+ _result["aggregationTemporality"] = builtins.int(self.aggregation_temporality)
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ExponentialHistogram":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ExponentialHistogram instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("dataPoints")) is not None:
+ _args["data_points"] = _utils.deserialize_repeated(_value, lambda _v: ExponentialHistogramDataPoint.from_dict(_v), "data_points")
+ if (_value := data.get("aggregationTemporality")) is not None:
+ _utils.validate_type(_value, builtins.int, "aggregation_temporality")
+ _args["aggregation_temporality"] = AggregationTemporality(_value)
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ExponentialHistogram":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class Summary:
+ """
+ Generated from protobuf message Summary
+ """
+
+ data_points: builtins.list[SummaryDataPoint] = dataclasses.field(default_factory=builtins.list)
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.data_points:
+ _result["dataPoints"] = _utils.serialize_repeated(self.data_points, lambda _v: _v.to_dict())
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "Summary":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ Summary instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("dataPoints")) is not None:
+ _args["data_points"] = _utils.deserialize_repeated(_value, lambda _v: SummaryDataPoint.from_dict(_v), "data_points")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "Summary":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class NumberDataPoint:
+ """
+ Generated from protobuf message NumberDataPoint
+ """
+
+ attributes: builtins.list[opentelemetry.proto_json.common.v1.common.KeyValue] = dataclasses.field(default_factory=builtins.list)
+ start_time_unix_nano: typing.Optional[builtins.int] = 0
+ time_unix_nano: typing.Optional[builtins.int] = 0
+ as_double: typing.Optional[builtins.float] = None
+ as_int: typing.Optional[builtins.int] = None
+ exemplars: builtins.list[Exemplar] = dataclasses.field(default_factory=builtins.list)
+ flags: typing.Optional[builtins.int] = 0
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.attributes:
+ _result["attributes"] = _utils.serialize_repeated(self.attributes, lambda _v: _v.to_dict())
+ if self.start_time_unix_nano:
+ _result["startTimeUnixNano"] = _utils.encode_int64(self.start_time_unix_nano)
+ if self.time_unix_nano:
+ _result["timeUnixNano"] = _utils.encode_int64(self.time_unix_nano)
+ if self.exemplars:
+ _result["exemplars"] = _utils.serialize_repeated(self.exemplars, lambda _v: _v.to_dict())
+ if self.flags:
+ _result["flags"] = self.flags
+ if self.as_int is not None:
+ _result["asInt"] = _utils.encode_int64(self.as_int)
+ elif self.as_double is not None:
+ _result["asDouble"] = _utils.encode_float(self.as_double)
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "NumberDataPoint":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ NumberDataPoint instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("attributes")) is not None:
+ _args["attributes"] = _utils.deserialize_repeated(_value, lambda _v: opentelemetry.proto_json.common.v1.common.KeyValue.from_dict(_v), "attributes")
+ if (_value := data.get("startTimeUnixNano")) is not None:
+ _args["start_time_unix_nano"] = _utils.parse_int64(_value, "start_time_unix_nano")
+ if (_value := data.get("timeUnixNano")) is not None:
+ _args["time_unix_nano"] = _utils.parse_int64(_value, "time_unix_nano")
+ if (_value := data.get("exemplars")) is not None:
+ _args["exemplars"] = _utils.deserialize_repeated(_value, lambda _v: Exemplar.from_dict(_v), "exemplars")
+ if (_value := data.get("flags")) is not None:
+ _utils.validate_type(_value, builtins.int, "flags")
+ _args["flags"] = _value
+ if (_value := data.get("asInt")) is not None:
+ _args["as_int"] = _utils.parse_int64(_value, "as_int")
+ elif (_value := data.get("asDouble")) is not None:
+ _args["as_double"] = _utils.parse_float(_value, "as_double")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "NumberDataPoint":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class HistogramDataPoint:
+ """
+ Generated from protobuf message HistogramDataPoint
+ """
+
+ attributes: builtins.list[opentelemetry.proto_json.common.v1.common.KeyValue] = dataclasses.field(default_factory=builtins.list)
+ start_time_unix_nano: typing.Optional[builtins.int] = 0
+ time_unix_nano: typing.Optional[builtins.int] = 0
+ count: typing.Optional[builtins.int] = 0
+ sum: typing.Optional[builtins.float] = None
+ bucket_counts: builtins.list[builtins.int] = dataclasses.field(default_factory=builtins.list)
+ explicit_bounds: builtins.list[builtins.float] = dataclasses.field(default_factory=builtins.list)
+ exemplars: builtins.list[Exemplar] = dataclasses.field(default_factory=builtins.list)
+ flags: typing.Optional[builtins.int] = 0
+ min: typing.Optional[builtins.float] = None
+ max: typing.Optional[builtins.float] = None
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.attributes:
+ _result["attributes"] = _utils.serialize_repeated(self.attributes, lambda _v: _v.to_dict())
+ if self.start_time_unix_nano:
+ _result["startTimeUnixNano"] = _utils.encode_int64(self.start_time_unix_nano)
+ if self.time_unix_nano:
+ _result["timeUnixNano"] = _utils.encode_int64(self.time_unix_nano)
+ if self.count:
+ _result["count"] = _utils.encode_int64(self.count)
+ if self.sum:
+ _result["sum"] = _utils.encode_float(self.sum)
+ if self.bucket_counts:
+ _result["bucketCounts"] = _utils.serialize_repeated(self.bucket_counts, lambda _v: _utils.encode_int64(_v))
+ if self.explicit_bounds:
+ _result["explicitBounds"] = _utils.serialize_repeated(self.explicit_bounds, lambda _v: _utils.encode_float(_v))
+ if self.exemplars:
+ _result["exemplars"] = _utils.serialize_repeated(self.exemplars, lambda _v: _v.to_dict())
+ if self.flags:
+ _result["flags"] = self.flags
+ if self.min:
+ _result["min"] = _utils.encode_float(self.min)
+ if self.max:
+ _result["max"] = _utils.encode_float(self.max)
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "HistogramDataPoint":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ HistogramDataPoint instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("attributes")) is not None:
+ _args["attributes"] = _utils.deserialize_repeated(_value, lambda _v: opentelemetry.proto_json.common.v1.common.KeyValue.from_dict(_v), "attributes")
+ if (_value := data.get("startTimeUnixNano")) is not None:
+ _args["start_time_unix_nano"] = _utils.parse_int64(_value, "start_time_unix_nano")
+ if (_value := data.get("timeUnixNano")) is not None:
+ _args["time_unix_nano"] = _utils.parse_int64(_value, "time_unix_nano")
+ if (_value := data.get("count")) is not None:
+ _args["count"] = _utils.parse_int64(_value, "count")
+ if (_value := data.get("sum")) is not None:
+ _args["sum"] = _utils.parse_float(_value, "sum")
+ if (_value := data.get("bucketCounts")) is not None:
+ _args["bucket_counts"] = _utils.deserialize_repeated(_value, lambda _v: _utils.parse_int64(_v, "bucket_counts"), "bucket_counts")
+ if (_value := data.get("explicitBounds")) is not None:
+ _args["explicit_bounds"] = _utils.deserialize_repeated(_value, lambda _v: _utils.parse_float(_v, "explicit_bounds"), "explicit_bounds")
+ if (_value := data.get("exemplars")) is not None:
+ _args["exemplars"] = _utils.deserialize_repeated(_value, lambda _v: Exemplar.from_dict(_v), "exemplars")
+ if (_value := data.get("flags")) is not None:
+ _utils.validate_type(_value, builtins.int, "flags")
+ _args["flags"] = _value
+ if (_value := data.get("min")) is not None:
+ _args["min"] = _utils.parse_float(_value, "min")
+ if (_value := data.get("max")) is not None:
+ _args["max"] = _utils.parse_float(_value, "max")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "HistogramDataPoint":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class ExponentialHistogramDataPoint:
+ """
+ Generated from protobuf message ExponentialHistogramDataPoint
+ """
+
+ @typing.final
+ @_dataclass
+ class Buckets:
+ """
+ Generated from protobuf message Buckets
+ """
+
+ offset: typing.Optional[builtins.int] = 0
+ bucket_counts: builtins.list[builtins.int] = dataclasses.field(default_factory=builtins.list)
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.offset:
+ _result["offset"] = self.offset
+ if self.bucket_counts:
+ _result["bucketCounts"] = _utils.serialize_repeated(self.bucket_counts, lambda _v: _utils.encode_int64(_v))
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ExponentialHistogramDataPoint.Buckets":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ Buckets instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("offset")) is not None:
+ _utils.validate_type(_value, builtins.int, "offset")
+ _args["offset"] = _value
+ if (_value := data.get("bucketCounts")) is not None:
+ _args["bucket_counts"] = _utils.deserialize_repeated(_value, lambda _v: _utils.parse_int64(_v, "bucket_counts"), "bucket_counts")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ExponentialHistogramDataPoint.Buckets":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+ attributes: builtins.list[opentelemetry.proto_json.common.v1.common.KeyValue] = dataclasses.field(default_factory=builtins.list)
+ start_time_unix_nano: typing.Optional[builtins.int] = 0
+ time_unix_nano: typing.Optional[builtins.int] = 0
+ count: typing.Optional[builtins.int] = 0
+ sum: typing.Optional[builtins.float] = None
+ scale: typing.Optional[builtins.int] = 0
+ zero_count: typing.Optional[builtins.int] = 0
+ positive: typing.Optional[ExponentialHistogramDataPoint.Buckets] = None
+ negative: typing.Optional[ExponentialHistogramDataPoint.Buckets] = None
+ flags: typing.Optional[builtins.int] = 0
+ exemplars: builtins.list[Exemplar] = dataclasses.field(default_factory=builtins.list)
+ min: typing.Optional[builtins.float] = None
+ max: typing.Optional[builtins.float] = None
+ zero_threshold: typing.Optional[builtins.float] = 0.0
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.attributes:
+ _result["attributes"] = _utils.serialize_repeated(self.attributes, lambda _v: _v.to_dict())
+ if self.start_time_unix_nano:
+ _result["startTimeUnixNano"] = _utils.encode_int64(self.start_time_unix_nano)
+ if self.time_unix_nano:
+ _result["timeUnixNano"] = _utils.encode_int64(self.time_unix_nano)
+ if self.count:
+ _result["count"] = _utils.encode_int64(self.count)
+ if self.sum:
+ _result["sum"] = _utils.encode_float(self.sum)
+ if self.scale:
+ _result["scale"] = self.scale
+ if self.zero_count:
+ _result["zeroCount"] = _utils.encode_int64(self.zero_count)
+ if self.positive:
+ _result["positive"] = self.positive.to_dict()
+ if self.negative:
+ _result["negative"] = self.negative.to_dict()
+ if self.flags:
+ _result["flags"] = self.flags
+ if self.exemplars:
+ _result["exemplars"] = _utils.serialize_repeated(self.exemplars, lambda _v: _v.to_dict())
+ if self.min:
+ _result["min"] = _utils.encode_float(self.min)
+ if self.max:
+ _result["max"] = _utils.encode_float(self.max)
+ if self.zero_threshold:
+ _result["zeroThreshold"] = _utils.encode_float(self.zero_threshold)
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ExponentialHistogramDataPoint":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ExponentialHistogramDataPoint instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("attributes")) is not None:
+ _args["attributes"] = _utils.deserialize_repeated(_value, lambda _v: opentelemetry.proto_json.common.v1.common.KeyValue.from_dict(_v), "attributes")
+ if (_value := data.get("startTimeUnixNano")) is not None:
+ _args["start_time_unix_nano"] = _utils.parse_int64(_value, "start_time_unix_nano")
+ if (_value := data.get("timeUnixNano")) is not None:
+ _args["time_unix_nano"] = _utils.parse_int64(_value, "time_unix_nano")
+ if (_value := data.get("count")) is not None:
+ _args["count"] = _utils.parse_int64(_value, "count")
+ if (_value := data.get("sum")) is not None:
+ _args["sum"] = _utils.parse_float(_value, "sum")
+ if (_value := data.get("scale")) is not None:
+ _utils.validate_type(_value, builtins.int, "scale")
+ _args["scale"] = _value
+ if (_value := data.get("zeroCount")) is not None:
+ _args["zero_count"] = _utils.parse_int64(_value, "zero_count")
+ if (_value := data.get("positive")) is not None:
+ _args["positive"] = ExponentialHistogramDataPoint.Buckets.from_dict(_value)
+ if (_value := data.get("negative")) is not None:
+ _args["negative"] = ExponentialHistogramDataPoint.Buckets.from_dict(_value)
+ if (_value := data.get("flags")) is not None:
+ _utils.validate_type(_value, builtins.int, "flags")
+ _args["flags"] = _value
+ if (_value := data.get("exemplars")) is not None:
+ _args["exemplars"] = _utils.deserialize_repeated(_value, lambda _v: Exemplar.from_dict(_v), "exemplars")
+ if (_value := data.get("min")) is not None:
+ _args["min"] = _utils.parse_float(_value, "min")
+ if (_value := data.get("max")) is not None:
+ _args["max"] = _utils.parse_float(_value, "max")
+ if (_value := data.get("zeroThreshold")) is not None:
+ _args["zero_threshold"] = _utils.parse_float(_value, "zero_threshold")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ExponentialHistogramDataPoint":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class SummaryDataPoint:
+ """
+ Generated from protobuf message SummaryDataPoint
+ """
+
+ @typing.final
+ @_dataclass
+ class ValueAtQuantile:
+ """
+ Generated from protobuf message ValueAtQuantile
+ """
+
+ quantile: typing.Optional[builtins.float] = 0.0
+ value: typing.Optional[builtins.float] = 0.0
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.quantile:
+ _result["quantile"] = _utils.encode_float(self.quantile)
+ if self.value:
+ _result["value"] = _utils.encode_float(self.value)
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "SummaryDataPoint.ValueAtQuantile":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ValueAtQuantile instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("quantile")) is not None:
+ _args["quantile"] = _utils.parse_float(_value, "quantile")
+ if (_value := data.get("value")) is not None:
+ _args["value"] = _utils.parse_float(_value, "value")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "SummaryDataPoint.ValueAtQuantile":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+ attributes: builtins.list[opentelemetry.proto_json.common.v1.common.KeyValue] = dataclasses.field(default_factory=builtins.list)
+ start_time_unix_nano: typing.Optional[builtins.int] = 0
+ time_unix_nano: typing.Optional[builtins.int] = 0
+ count: typing.Optional[builtins.int] = 0
+ sum: typing.Optional[builtins.float] = 0.0
+ quantile_values: builtins.list[SummaryDataPoint.ValueAtQuantile] = dataclasses.field(default_factory=builtins.list)
+ flags: typing.Optional[builtins.int] = 0
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.attributes:
+ _result["attributes"] = _utils.serialize_repeated(self.attributes, lambda _v: _v.to_dict())
+ if self.start_time_unix_nano:
+ _result["startTimeUnixNano"] = _utils.encode_int64(self.start_time_unix_nano)
+ if self.time_unix_nano:
+ _result["timeUnixNano"] = _utils.encode_int64(self.time_unix_nano)
+ if self.count:
+ _result["count"] = _utils.encode_int64(self.count)
+ if self.sum:
+ _result["sum"] = _utils.encode_float(self.sum)
+ if self.quantile_values:
+ _result["quantileValues"] = _utils.serialize_repeated(self.quantile_values, lambda _v: _v.to_dict())
+ if self.flags:
+ _result["flags"] = self.flags
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "SummaryDataPoint":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ SummaryDataPoint instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("attributes")) is not None:
+ _args["attributes"] = _utils.deserialize_repeated(_value, lambda _v: opentelemetry.proto_json.common.v1.common.KeyValue.from_dict(_v), "attributes")
+ if (_value := data.get("startTimeUnixNano")) is not None:
+ _args["start_time_unix_nano"] = _utils.parse_int64(_value, "start_time_unix_nano")
+ if (_value := data.get("timeUnixNano")) is not None:
+ _args["time_unix_nano"] = _utils.parse_int64(_value, "time_unix_nano")
+ if (_value := data.get("count")) is not None:
+ _args["count"] = _utils.parse_int64(_value, "count")
+ if (_value := data.get("sum")) is not None:
+ _args["sum"] = _utils.parse_float(_value, "sum")
+ if (_value := data.get("quantileValues")) is not None:
+ _args["quantile_values"] = _utils.deserialize_repeated(_value, lambda _v: SummaryDataPoint.ValueAtQuantile.from_dict(_v), "quantile_values")
+ if (_value := data.get("flags")) is not None:
+ _utils.validate_type(_value, builtins.int, "flags")
+ _args["flags"] = _value
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "SummaryDataPoint":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class Exemplar:
+ """
+ Generated from protobuf message Exemplar
+ """
+
+ filtered_attributes: builtins.list[opentelemetry.proto_json.common.v1.common.KeyValue] = dataclasses.field(default_factory=builtins.list)
+ time_unix_nano: typing.Optional[builtins.int] = 0
+ as_double: typing.Optional[builtins.float] = None
+ as_int: typing.Optional[builtins.int] = None
+ span_id: typing.Optional[builtins.bytes] = b""
+ trace_id: typing.Optional[builtins.bytes] = b""
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.filtered_attributes:
+ _result["filteredAttributes"] = _utils.serialize_repeated(self.filtered_attributes, lambda _v: _v.to_dict())
+ if self.time_unix_nano:
+ _result["timeUnixNano"] = _utils.encode_int64(self.time_unix_nano)
+ if self.span_id:
+ _result["spanId"] = _utils.encode_hex(self.span_id)
+ if self.trace_id:
+ _result["traceId"] = _utils.encode_hex(self.trace_id)
+ if self.as_int is not None:
+ _result["asInt"] = _utils.encode_int64(self.as_int)
+ elif self.as_double is not None:
+ _result["asDouble"] = _utils.encode_float(self.as_double)
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "Exemplar":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ Exemplar instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("filteredAttributes")) is not None:
+ _args["filtered_attributes"] = _utils.deserialize_repeated(_value, lambda _v: opentelemetry.proto_json.common.v1.common.KeyValue.from_dict(_v), "filtered_attributes")
+ if (_value := data.get("timeUnixNano")) is not None:
+ _args["time_unix_nano"] = _utils.parse_int64(_value, "time_unix_nano")
+ if (_value := data.get("spanId")) is not None:
+ _args["span_id"] = _utils.decode_hex(_value, "span_id")
+ if (_value := data.get("traceId")) is not None:
+ _args["trace_id"] = _utils.decode_hex(_value, "trace_id")
+ if (_value := data.get("asInt")) is not None:
+ _args["as_int"] = _utils.parse_int64(_value, "as_int")
+ elif (_value := data.get("asDouble")) is not None:
+ _args["as_double"] = _utils.parse_float(_value, "as_double")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "Exemplar":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/profiles/__init__.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/profiles/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/profiles/v1development/__init__.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/profiles/v1development/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/profiles/v1development/profiles.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/profiles/v1development/profiles.py
new file mode 100644
index 00000000000..ac852ababf3
--- /dev/null
+++ b/opentelemetry-proto-json/src/opentelemetry/proto_json/profiles/v1development/profiles.py
@@ -0,0 +1,1137 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# AUTO-GENERATED from "opentelemetry/proto/profiles/v1development/profiles.proto"
+# DO NOT EDIT MANUALLY
+
+from __future__ import annotations
+
+import builtins
+import dataclasses
+import functools
+import json
+import sys
+import typing
+
+if sys.version_info >= (3, 10):
+ _dataclass = functools.partial(dataclasses.dataclass, slots=True)
+else:
+ _dataclass = dataclasses.dataclass
+
+import opentelemetry.proto_json._otlp_json_utils as _utils
+import opentelemetry.proto_json.common.v1.common
+import opentelemetry.proto_json.resource.v1.resource
+
+
+@typing.final
+@_dataclass
+class ProfilesDictionary:
+ """
+ Generated from protobuf message ProfilesDictionary
+ """
+
+ mapping_table: builtins.list[Mapping] = dataclasses.field(default_factory=builtins.list)
+ location_table: builtins.list[Location] = dataclasses.field(default_factory=builtins.list)
+ function_table: builtins.list[Function] = dataclasses.field(default_factory=builtins.list)
+ link_table: builtins.list[Link] = dataclasses.field(default_factory=builtins.list)
+ string_table: builtins.list[builtins.str] = dataclasses.field(default_factory=builtins.list)
+ attribute_table: builtins.list[KeyValueAndUnit] = dataclasses.field(default_factory=builtins.list)
+ stack_table: builtins.list[Stack] = dataclasses.field(default_factory=builtins.list)
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.mapping_table:
+ _result["mappingTable"] = _utils.serialize_repeated(self.mapping_table, lambda _v: _v.to_dict())
+ if self.location_table:
+ _result["locationTable"] = _utils.serialize_repeated(self.location_table, lambda _v: _v.to_dict())
+ if self.function_table:
+ _result["functionTable"] = _utils.serialize_repeated(self.function_table, lambda _v: _v.to_dict())
+ if self.link_table:
+ _result["linkTable"] = _utils.serialize_repeated(self.link_table, lambda _v: _v.to_dict())
+ if self.string_table:
+ _result["stringTable"] = self.string_table
+ if self.attribute_table:
+ _result["attributeTable"] = _utils.serialize_repeated(self.attribute_table, lambda _v: _v.to_dict())
+ if self.stack_table:
+ _result["stackTable"] = _utils.serialize_repeated(self.stack_table, lambda _v: _v.to_dict())
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ProfilesDictionary":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ProfilesDictionary instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("mappingTable")) is not None:
+ _args["mapping_table"] = _utils.deserialize_repeated(_value, lambda _v: Mapping.from_dict(_v), "mapping_table")
+ if (_value := data.get("locationTable")) is not None:
+ _args["location_table"] = _utils.deserialize_repeated(_value, lambda _v: Location.from_dict(_v), "location_table")
+ if (_value := data.get("functionTable")) is not None:
+ _args["function_table"] = _utils.deserialize_repeated(_value, lambda _v: Function.from_dict(_v), "function_table")
+ if (_value := data.get("linkTable")) is not None:
+ _args["link_table"] = _utils.deserialize_repeated(_value, lambda _v: Link.from_dict(_v), "link_table")
+ if (_value := data.get("stringTable")) is not None:
+ _args["string_table"] = _utils.deserialize_repeated(_value, lambda _v: _v, "string_table")
+ if (_value := data.get("attributeTable")) is not None:
+ _args["attribute_table"] = _utils.deserialize_repeated(_value, lambda _v: KeyValueAndUnit.from_dict(_v), "attribute_table")
+ if (_value := data.get("stackTable")) is not None:
+ _args["stack_table"] = _utils.deserialize_repeated(_value, lambda _v: Stack.from_dict(_v), "stack_table")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ProfilesDictionary":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class ProfilesData:
+ """
+ Generated from protobuf message ProfilesData
+ """
+
+ resource_profiles: builtins.list[ResourceProfiles] = dataclasses.field(default_factory=builtins.list)
+ dictionary: typing.Optional[ProfilesDictionary] = None
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.resource_profiles:
+ _result["resourceProfiles"] = _utils.serialize_repeated(self.resource_profiles, lambda _v: _v.to_dict())
+ if self.dictionary:
+ _result["dictionary"] = self.dictionary.to_dict()
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ProfilesData":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ProfilesData instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("resourceProfiles")) is not None:
+ _args["resource_profiles"] = _utils.deserialize_repeated(_value, lambda _v: ResourceProfiles.from_dict(_v), "resource_profiles")
+ if (_value := data.get("dictionary")) is not None:
+ _args["dictionary"] = ProfilesDictionary.from_dict(_value)
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ProfilesData":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class ResourceProfiles:
+ """
+ Generated from protobuf message ResourceProfiles
+ """
+
+ resource: typing.Optional[opentelemetry.proto_json.resource.v1.resource.Resource] = None
+ scope_profiles: builtins.list[ScopeProfiles] = dataclasses.field(default_factory=builtins.list)
+ schema_url: typing.Optional[builtins.str] = ""
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.resource:
+ _result["resource"] = self.resource.to_dict()
+ if self.scope_profiles:
+ _result["scopeProfiles"] = _utils.serialize_repeated(self.scope_profiles, lambda _v: _v.to_dict())
+ if self.schema_url:
+ _result["schemaUrl"] = self.schema_url
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ResourceProfiles":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ResourceProfiles instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("resource")) is not None:
+ _args["resource"] = opentelemetry.proto_json.resource.v1.resource.Resource.from_dict(_value)
+ if (_value := data.get("scopeProfiles")) is not None:
+ _args["scope_profiles"] = _utils.deserialize_repeated(_value, lambda _v: ScopeProfiles.from_dict(_v), "scope_profiles")
+ if (_value := data.get("schemaUrl")) is not None:
+ _utils.validate_type(_value, builtins.str, "schema_url")
+ _args["schema_url"] = _value
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ResourceProfiles":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class ScopeProfiles:
+ """
+ Generated from protobuf message ScopeProfiles
+ """
+
+ scope: typing.Optional[opentelemetry.proto_json.common.v1.common.InstrumentationScope] = None
+ profiles: builtins.list[Profile] = dataclasses.field(default_factory=builtins.list)
+ schema_url: typing.Optional[builtins.str] = ""
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.scope:
+ _result["scope"] = self.scope.to_dict()
+ if self.profiles:
+ _result["profiles"] = _utils.serialize_repeated(self.profiles, lambda _v: _v.to_dict())
+ if self.schema_url:
+ _result["schemaUrl"] = self.schema_url
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ScopeProfiles":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ScopeProfiles instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("scope")) is not None:
+ _args["scope"] = opentelemetry.proto_json.common.v1.common.InstrumentationScope.from_dict(_value)
+ if (_value := data.get("profiles")) is not None:
+ _args["profiles"] = _utils.deserialize_repeated(_value, lambda _v: Profile.from_dict(_v), "profiles")
+ if (_value := data.get("schemaUrl")) is not None:
+ _utils.validate_type(_value, builtins.str, "schema_url")
+ _args["schema_url"] = _value
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ScopeProfiles":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class Profile:
+ """
+ Generated from protobuf message Profile
+ """
+
+ sample_type: typing.Optional[ValueType] = None
+ samples: builtins.list[Sample] = dataclasses.field(default_factory=builtins.list)
+ time_unix_nano: typing.Optional[builtins.int] = 0
+ duration_nano: typing.Optional[builtins.int] = 0
+ period_type: typing.Optional[ValueType] = None
+ period: typing.Optional[builtins.int] = 0
+ profile_id: typing.Optional[builtins.bytes] = b""
+ dropped_attributes_count: typing.Optional[builtins.int] = 0
+ original_payload_format: typing.Optional[builtins.str] = ""
+ original_payload: typing.Optional[builtins.bytes] = b""
+ attribute_indices: builtins.list[builtins.int] = dataclasses.field(default_factory=builtins.list)
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.sample_type:
+ _result["sampleType"] = self.sample_type.to_dict()
+ if self.samples:
+ _result["samples"] = _utils.serialize_repeated(self.samples, lambda _v: _v.to_dict())
+ if self.time_unix_nano:
+ _result["timeUnixNano"] = _utils.encode_int64(self.time_unix_nano)
+ if self.duration_nano:
+ _result["durationNano"] = _utils.encode_int64(self.duration_nano)
+ if self.period_type:
+ _result["periodType"] = self.period_type.to_dict()
+ if self.period:
+ _result["period"] = _utils.encode_int64(self.period)
+ if self.profile_id:
+ _result["profileId"] = _utils.encode_base64(self.profile_id)
+ if self.dropped_attributes_count:
+ _result["droppedAttributesCount"] = self.dropped_attributes_count
+ if self.original_payload_format:
+ _result["originalPayloadFormat"] = self.original_payload_format
+ if self.original_payload:
+ _result["originalPayload"] = _utils.encode_base64(self.original_payload)
+ if self.attribute_indices:
+ _result["attributeIndices"] = self.attribute_indices
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "Profile":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ Profile instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("sampleType")) is not None:
+ _args["sample_type"] = ValueType.from_dict(_value)
+ if (_value := data.get("samples")) is not None:
+ _args["samples"] = _utils.deserialize_repeated(_value, lambda _v: Sample.from_dict(_v), "samples")
+ if (_value := data.get("timeUnixNano")) is not None:
+ _args["time_unix_nano"] = _utils.parse_int64(_value, "time_unix_nano")
+ if (_value := data.get("durationNano")) is not None:
+ _args["duration_nano"] = _utils.parse_int64(_value, "duration_nano")
+ if (_value := data.get("periodType")) is not None:
+ _args["period_type"] = ValueType.from_dict(_value)
+ if (_value := data.get("period")) is not None:
+ _args["period"] = _utils.parse_int64(_value, "period")
+ if (_value := data.get("profileId")) is not None:
+ _args["profile_id"] = _utils.decode_base64(_value, "profile_id")
+ if (_value := data.get("droppedAttributesCount")) is not None:
+ _utils.validate_type(_value, builtins.int, "dropped_attributes_count")
+ _args["dropped_attributes_count"] = _value
+ if (_value := data.get("originalPayloadFormat")) is not None:
+ _utils.validate_type(_value, builtins.str, "original_payload_format")
+ _args["original_payload_format"] = _value
+ if (_value := data.get("originalPayload")) is not None:
+ _args["original_payload"] = _utils.decode_base64(_value, "original_payload")
+ if (_value := data.get("attributeIndices")) is not None:
+ _args["attribute_indices"] = _utils.deserialize_repeated(_value, lambda _v: _v, "attribute_indices")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "Profile":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class Link:
+ """
+ Generated from protobuf message Link
+ """
+
+ trace_id: typing.Optional[builtins.bytes] = b""
+ span_id: typing.Optional[builtins.bytes] = b""
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.trace_id:
+ _result["traceId"] = _utils.encode_hex(self.trace_id)
+ if self.span_id:
+ _result["spanId"] = _utils.encode_hex(self.span_id)
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "Link":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ Link instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("traceId")) is not None:
+ _args["trace_id"] = _utils.decode_hex(_value, "trace_id")
+ if (_value := data.get("spanId")) is not None:
+ _args["span_id"] = _utils.decode_hex(_value, "span_id")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "Link":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class ValueType:
+ """
+ Generated from protobuf message ValueType
+ """
+
+ type_strindex: typing.Optional[builtins.int] = 0
+ unit_strindex: typing.Optional[builtins.int] = 0
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.type_strindex:
+ _result["typeStrindex"] = self.type_strindex
+ if self.unit_strindex:
+ _result["unitStrindex"] = self.unit_strindex
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ValueType":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ValueType instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("typeStrindex")) is not None:
+ _utils.validate_type(_value, builtins.int, "type_strindex")
+ _args["type_strindex"] = _value
+ if (_value := data.get("unitStrindex")) is not None:
+ _utils.validate_type(_value, builtins.int, "unit_strindex")
+ _args["unit_strindex"] = _value
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ValueType":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class Sample:
+ """
+ Generated from protobuf message Sample
+ """
+
+ stack_index: typing.Optional[builtins.int] = 0
+ values: builtins.list[builtins.int] = dataclasses.field(default_factory=builtins.list)
+ attribute_indices: builtins.list[builtins.int] = dataclasses.field(default_factory=builtins.list)
+ link_index: typing.Optional[builtins.int] = 0
+ timestamps_unix_nano: builtins.list[builtins.int] = dataclasses.field(default_factory=builtins.list)
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.stack_index:
+ _result["stackIndex"] = self.stack_index
+ if self.values:
+ _result["values"] = _utils.serialize_repeated(self.values, lambda _v: _utils.encode_int64(_v))
+ if self.attribute_indices:
+ _result["attributeIndices"] = self.attribute_indices
+ if self.link_index:
+ _result["linkIndex"] = self.link_index
+ if self.timestamps_unix_nano:
+ _result["timestampsUnixNano"] = _utils.serialize_repeated(self.timestamps_unix_nano, lambda _v: _utils.encode_int64(_v))
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "Sample":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ Sample instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("stackIndex")) is not None:
+ _utils.validate_type(_value, builtins.int, "stack_index")
+ _args["stack_index"] = _value
+ if (_value := data.get("values")) is not None:
+ _args["values"] = _utils.deserialize_repeated(_value, lambda _v: _utils.parse_int64(_v, "values"), "values")
+ if (_value := data.get("attributeIndices")) is not None:
+ _args["attribute_indices"] = _utils.deserialize_repeated(_value, lambda _v: _v, "attribute_indices")
+ if (_value := data.get("linkIndex")) is not None:
+ _utils.validate_type(_value, builtins.int, "link_index")
+ _args["link_index"] = _value
+ if (_value := data.get("timestampsUnixNano")) is not None:
+ _args["timestamps_unix_nano"] = _utils.deserialize_repeated(_value, lambda _v: _utils.parse_int64(_v, "timestamps_unix_nano"), "timestamps_unix_nano")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "Sample":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class Mapping:
+ """
+ Generated from protobuf message Mapping
+ """
+
+ memory_start: typing.Optional[builtins.int] = 0
+ memory_limit: typing.Optional[builtins.int] = 0
+ file_offset: typing.Optional[builtins.int] = 0
+ filename_strindex: typing.Optional[builtins.int] = 0
+ attribute_indices: builtins.list[builtins.int] = dataclasses.field(default_factory=builtins.list)
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.memory_start:
+ _result["memoryStart"] = _utils.encode_int64(self.memory_start)
+ if self.memory_limit:
+ _result["memoryLimit"] = _utils.encode_int64(self.memory_limit)
+ if self.file_offset:
+ _result["fileOffset"] = _utils.encode_int64(self.file_offset)
+ if self.filename_strindex:
+ _result["filenameStrindex"] = self.filename_strindex
+ if self.attribute_indices:
+ _result["attributeIndices"] = self.attribute_indices
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "Mapping":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ Mapping instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("memoryStart")) is not None:
+ _args["memory_start"] = _utils.parse_int64(_value, "memory_start")
+ if (_value := data.get("memoryLimit")) is not None:
+ _args["memory_limit"] = _utils.parse_int64(_value, "memory_limit")
+ if (_value := data.get("fileOffset")) is not None:
+ _args["file_offset"] = _utils.parse_int64(_value, "file_offset")
+ if (_value := data.get("filenameStrindex")) is not None:
+ _utils.validate_type(_value, builtins.int, "filename_strindex")
+ _args["filename_strindex"] = _value
+ if (_value := data.get("attributeIndices")) is not None:
+ _args["attribute_indices"] = _utils.deserialize_repeated(_value, lambda _v: _v, "attribute_indices")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "Mapping":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class Stack:
+ """
+ Generated from protobuf message Stack
+ """
+
+ location_indices: builtins.list[builtins.int] = dataclasses.field(default_factory=builtins.list)
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.location_indices:
+ _result["locationIndices"] = self.location_indices
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "Stack":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ Stack instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("locationIndices")) is not None:
+ _args["location_indices"] = _utils.deserialize_repeated(_value, lambda _v: _v, "location_indices")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "Stack":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class Location:
+ """
+ Generated from protobuf message Location
+ """
+
+ mapping_index: typing.Optional[builtins.int] = 0
+ address: typing.Optional[builtins.int] = 0
+ lines: builtins.list[Line] = dataclasses.field(default_factory=builtins.list)
+ attribute_indices: builtins.list[builtins.int] = dataclasses.field(default_factory=builtins.list)
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.mapping_index:
+ _result["mappingIndex"] = self.mapping_index
+ if self.address:
+ _result["address"] = _utils.encode_int64(self.address)
+ if self.lines:
+ _result["lines"] = _utils.serialize_repeated(self.lines, lambda _v: _v.to_dict())
+ if self.attribute_indices:
+ _result["attributeIndices"] = self.attribute_indices
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "Location":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ Location instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("mappingIndex")) is not None:
+ _utils.validate_type(_value, builtins.int, "mapping_index")
+ _args["mapping_index"] = _value
+ if (_value := data.get("address")) is not None:
+ _args["address"] = _utils.parse_int64(_value, "address")
+ if (_value := data.get("lines")) is not None:
+ _args["lines"] = _utils.deserialize_repeated(_value, lambda _v: Line.from_dict(_v), "lines")
+ if (_value := data.get("attributeIndices")) is not None:
+ _args["attribute_indices"] = _utils.deserialize_repeated(_value, lambda _v: _v, "attribute_indices")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "Location":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class Line:
+ """
+ Generated from protobuf message Line
+ """
+
+ function_index: typing.Optional[builtins.int] = 0
+ line: typing.Optional[builtins.int] = 0
+ column: typing.Optional[builtins.int] = 0
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.function_index:
+ _result["functionIndex"] = self.function_index
+ if self.line:
+ _result["line"] = _utils.encode_int64(self.line)
+ if self.column:
+ _result["column"] = _utils.encode_int64(self.column)
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "Line":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ Line instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("functionIndex")) is not None:
+ _utils.validate_type(_value, builtins.int, "function_index")
+ _args["function_index"] = _value
+ if (_value := data.get("line")) is not None:
+ _args["line"] = _utils.parse_int64(_value, "line")
+ if (_value := data.get("column")) is not None:
+ _args["column"] = _utils.parse_int64(_value, "column")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "Line":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class Function:
+ """
+ Generated from protobuf message Function
+ """
+
+ name_strindex: typing.Optional[builtins.int] = 0
+ system_name_strindex: typing.Optional[builtins.int] = 0
+ filename_strindex: typing.Optional[builtins.int] = 0
+ start_line: typing.Optional[builtins.int] = 0
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.name_strindex:
+ _result["nameStrindex"] = self.name_strindex
+ if self.system_name_strindex:
+ _result["systemNameStrindex"] = self.system_name_strindex
+ if self.filename_strindex:
+ _result["filenameStrindex"] = self.filename_strindex
+ if self.start_line:
+ _result["startLine"] = _utils.encode_int64(self.start_line)
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "Function":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ Function instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("nameStrindex")) is not None:
+ _utils.validate_type(_value, builtins.int, "name_strindex")
+ _args["name_strindex"] = _value
+ if (_value := data.get("systemNameStrindex")) is not None:
+ _utils.validate_type(_value, builtins.int, "system_name_strindex")
+ _args["system_name_strindex"] = _value
+ if (_value := data.get("filenameStrindex")) is not None:
+ _utils.validate_type(_value, builtins.int, "filename_strindex")
+ _args["filename_strindex"] = _value
+ if (_value := data.get("startLine")) is not None:
+ _args["start_line"] = _utils.parse_int64(_value, "start_line")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "Function":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class KeyValueAndUnit:
+ """
+ Generated from protobuf message KeyValueAndUnit
+ """
+
+ key_strindex: typing.Optional[builtins.int] = 0
+ value: typing.Optional[opentelemetry.proto_json.common.v1.common.AnyValue] = None
+ unit_strindex: typing.Optional[builtins.int] = 0
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.key_strindex:
+ _result["keyStrindex"] = self.key_strindex
+ if self.value:
+ _result["value"] = self.value.to_dict()
+ if self.unit_strindex:
+ _result["unitStrindex"] = self.unit_strindex
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "KeyValueAndUnit":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ KeyValueAndUnit instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("keyStrindex")) is not None:
+ _utils.validate_type(_value, builtins.int, "key_strindex")
+ _args["key_strindex"] = _value
+ if (_value := data.get("value")) is not None:
+ _args["value"] = opentelemetry.proto_json.common.v1.common.AnyValue.from_dict(_value)
+ if (_value := data.get("unitStrindex")) is not None:
+ _utils.validate_type(_value, builtins.int, "unit_strindex")
+ _args["unit_strindex"] = _value
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "KeyValueAndUnit":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/py.typed b/opentelemetry-proto-json/src/opentelemetry/proto_json/py.typed
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/resource/__init__.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/resource/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/resource/v1/__init__.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/resource/v1/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/resource/v1/resource.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/resource/v1/resource.py
new file mode 100644
index 00000000000..e481673dfca
--- /dev/null
+++ b/opentelemetry-proto-json/src/opentelemetry/proto_json/resource/v1/resource.py
@@ -0,0 +1,107 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# AUTO-GENERATED from "opentelemetry/proto/resource/v1/resource.proto"
+# DO NOT EDIT MANUALLY
+
+from __future__ import annotations
+
+import builtins
+import dataclasses
+import functools
+import json
+import sys
+import typing
+
+if sys.version_info >= (3, 10):
+ _dataclass = functools.partial(dataclasses.dataclass, slots=True)
+else:
+ _dataclass = dataclasses.dataclass
+
+import opentelemetry.proto_json._otlp_json_utils as _utils
+import opentelemetry.proto_json.common.v1.common
+
+
+@typing.final
+@_dataclass
+class Resource:
+ """
+ Generated from protobuf message Resource
+ """
+
+ attributes: builtins.list[opentelemetry.proto_json.common.v1.common.KeyValue] = dataclasses.field(default_factory=builtins.list)
+ dropped_attributes_count: typing.Optional[builtins.int] = 0
+ entity_refs: builtins.list[opentelemetry.proto_json.common.v1.common.EntityRef] = dataclasses.field(default_factory=builtins.list)
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.attributes:
+ _result["attributes"] = _utils.serialize_repeated(self.attributes, lambda _v: _v.to_dict())
+ if self.dropped_attributes_count:
+ _result["droppedAttributesCount"] = self.dropped_attributes_count
+ if self.entity_refs:
+ _result["entityRefs"] = _utils.serialize_repeated(self.entity_refs, lambda _v: _v.to_dict())
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "Resource":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ Resource instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("attributes")) is not None:
+ _args["attributes"] = _utils.deserialize_repeated(_value, lambda _v: opentelemetry.proto_json.common.v1.common.KeyValue.from_dict(_v), "attributes")
+ if (_value := data.get("droppedAttributesCount")) is not None:
+ _utils.validate_type(_value, builtins.int, "dropped_attributes_count")
+ _args["dropped_attributes_count"] = _value
+ if (_value := data.get("entityRefs")) is not None:
+ _args["entity_refs"] = _utils.deserialize_repeated(_value, lambda _v: opentelemetry.proto_json.common.v1.common.EntityRef.from_dict(_v), "entity_refs")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "Resource":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/trace/__init__.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/trace/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/trace/v1/__init__.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/trace/v1/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/trace/v1/trace.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/trace/v1/trace.py
new file mode 100644
index 00000000000..e780105f2cb
--- /dev/null
+++ b/opentelemetry-proto-json/src/opentelemetry/proto_json/trace/v1/trace.py
@@ -0,0 +1,664 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# AUTO-GENERATED from "opentelemetry/proto/trace/v1/trace.proto"
+# DO NOT EDIT MANUALLY
+
+from __future__ import annotations
+
+import builtins
+import dataclasses
+import enum
+import functools
+import json
+import sys
+import typing
+
+if sys.version_info >= (3, 10):
+ _dataclass = functools.partial(dataclasses.dataclass, slots=True)
+else:
+ _dataclass = dataclasses.dataclass
+
+import opentelemetry.proto_json._otlp_json_utils as _utils
+import opentelemetry.proto_json.common.v1.common
+import opentelemetry.proto_json.resource.v1.resource
+
+
+@typing.final
+class SpanFlags(enum.IntEnum):
+ """
+ Generated from protobuf enum SpanFlags
+ """
+
+ SPAN_FLAGS_DO_NOT_USE = 0
+ SPAN_FLAGS_TRACE_FLAGS_MASK = 255
+ SPAN_FLAGS_CONTEXT_HAS_IS_REMOTE_MASK = 256
+ SPAN_FLAGS_CONTEXT_IS_REMOTE_MASK = 512
+
+@typing.final
+@_dataclass
+class TracesData:
+ """
+ Generated from protobuf message TracesData
+ """
+
+ resource_spans: builtins.list[ResourceSpans] = dataclasses.field(default_factory=builtins.list)
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.resource_spans:
+ _result["resourceSpans"] = _utils.serialize_repeated(self.resource_spans, lambda _v: _v.to_dict())
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "TracesData":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ TracesData instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("resourceSpans")) is not None:
+ _args["resource_spans"] = _utils.deserialize_repeated(_value, lambda _v: ResourceSpans.from_dict(_v), "resource_spans")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "TracesData":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class ResourceSpans:
+ """
+ Generated from protobuf message ResourceSpans
+ """
+
+ resource: typing.Optional[opentelemetry.proto_json.resource.v1.resource.Resource] = None
+ scope_spans: builtins.list[ScopeSpans] = dataclasses.field(default_factory=builtins.list)
+ schema_url: typing.Optional[builtins.str] = ""
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.resource:
+ _result["resource"] = self.resource.to_dict()
+ if self.scope_spans:
+ _result["scopeSpans"] = _utils.serialize_repeated(self.scope_spans, lambda _v: _v.to_dict())
+ if self.schema_url:
+ _result["schemaUrl"] = self.schema_url
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ResourceSpans":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ResourceSpans instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("resource")) is not None:
+ _args["resource"] = opentelemetry.proto_json.resource.v1.resource.Resource.from_dict(_value)
+ if (_value := data.get("scopeSpans")) is not None:
+ _args["scope_spans"] = _utils.deserialize_repeated(_value, lambda _v: ScopeSpans.from_dict(_v), "scope_spans")
+ if (_value := data.get("schemaUrl")) is not None:
+ _utils.validate_type(_value, builtins.str, "schema_url")
+ _args["schema_url"] = _value
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ResourceSpans":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class ScopeSpans:
+ """
+ Generated from protobuf message ScopeSpans
+ """
+
+ scope: typing.Optional[opentelemetry.proto_json.common.v1.common.InstrumentationScope] = None
+ spans: builtins.list[Span] = dataclasses.field(default_factory=builtins.list)
+ schema_url: typing.Optional[builtins.str] = ""
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.scope:
+ _result["scope"] = self.scope.to_dict()
+ if self.spans:
+ _result["spans"] = _utils.serialize_repeated(self.spans, lambda _v: _v.to_dict())
+ if self.schema_url:
+ _result["schemaUrl"] = self.schema_url
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ScopeSpans":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ScopeSpans instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("scope")) is not None:
+ _args["scope"] = opentelemetry.proto_json.common.v1.common.InstrumentationScope.from_dict(_value)
+ if (_value := data.get("spans")) is not None:
+ _args["spans"] = _utils.deserialize_repeated(_value, lambda _v: Span.from_dict(_v), "spans")
+ if (_value := data.get("schemaUrl")) is not None:
+ _utils.validate_type(_value, builtins.str, "schema_url")
+ _args["schema_url"] = _value
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ScopeSpans":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class Span:
+ """
+ Generated from protobuf message Span
+ """
+
+ @typing.final
+ class SpanKind(enum.IntEnum):
+ """
+ Generated from protobuf enum SpanKind
+ """
+
+ SPAN_KIND_UNSPECIFIED = 0
+ SPAN_KIND_INTERNAL = 1
+ SPAN_KIND_SERVER = 2
+ SPAN_KIND_CLIENT = 3
+ SPAN_KIND_PRODUCER = 4
+ SPAN_KIND_CONSUMER = 5
+
+ @typing.final
+ @_dataclass
+ class Event:
+ """
+ Generated from protobuf message Event
+ """
+
+ time_unix_nano: typing.Optional[builtins.int] = 0
+ name: typing.Optional[builtins.str] = ""
+ attributes: builtins.list[opentelemetry.proto_json.common.v1.common.KeyValue] = dataclasses.field(default_factory=builtins.list)
+ dropped_attributes_count: typing.Optional[builtins.int] = 0
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.time_unix_nano:
+ _result["timeUnixNano"] = _utils.encode_int64(self.time_unix_nano)
+ if self.name:
+ _result["name"] = self.name
+ if self.attributes:
+ _result["attributes"] = _utils.serialize_repeated(self.attributes, lambda _v: _v.to_dict())
+ if self.dropped_attributes_count:
+ _result["droppedAttributesCount"] = self.dropped_attributes_count
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "Span.Event":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ Event instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("timeUnixNano")) is not None:
+ _args["time_unix_nano"] = _utils.parse_int64(_value, "time_unix_nano")
+ if (_value := data.get("name")) is not None:
+ _utils.validate_type(_value, builtins.str, "name")
+ _args["name"] = _value
+ if (_value := data.get("attributes")) is not None:
+ _args["attributes"] = _utils.deserialize_repeated(_value, lambda _v: opentelemetry.proto_json.common.v1.common.KeyValue.from_dict(_v), "attributes")
+ if (_value := data.get("droppedAttributesCount")) is not None:
+ _utils.validate_type(_value, builtins.int, "dropped_attributes_count")
+ _args["dropped_attributes_count"] = _value
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "Span.Event":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+ @typing.final
+ @_dataclass
+ class Link:
+ """
+ Generated from protobuf message Link
+ """
+
+ trace_id: typing.Optional[builtins.bytes] = b""
+ span_id: typing.Optional[builtins.bytes] = b""
+ trace_state: typing.Optional[builtins.str] = ""
+ attributes: builtins.list[opentelemetry.proto_json.common.v1.common.KeyValue] = dataclasses.field(default_factory=builtins.list)
+ dropped_attributes_count: typing.Optional[builtins.int] = 0
+ flags: typing.Optional[builtins.int] = 0
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.trace_id:
+ _result["traceId"] = _utils.encode_hex(self.trace_id)
+ if self.span_id:
+ _result["spanId"] = _utils.encode_hex(self.span_id)
+ if self.trace_state:
+ _result["traceState"] = self.trace_state
+ if self.attributes:
+ _result["attributes"] = _utils.serialize_repeated(self.attributes, lambda _v: _v.to_dict())
+ if self.dropped_attributes_count:
+ _result["droppedAttributesCount"] = self.dropped_attributes_count
+ if self.flags:
+ _result["flags"] = self.flags
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "Span.Link":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ Link instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("traceId")) is not None:
+ _args["trace_id"] = _utils.decode_hex(_value, "trace_id")
+ if (_value := data.get("spanId")) is not None:
+ _args["span_id"] = _utils.decode_hex(_value, "span_id")
+ if (_value := data.get("traceState")) is not None:
+ _utils.validate_type(_value, builtins.str, "trace_state")
+ _args["trace_state"] = _value
+ if (_value := data.get("attributes")) is not None:
+ _args["attributes"] = _utils.deserialize_repeated(_value, lambda _v: opentelemetry.proto_json.common.v1.common.KeyValue.from_dict(_v), "attributes")
+ if (_value := data.get("droppedAttributesCount")) is not None:
+ _utils.validate_type(_value, builtins.int, "dropped_attributes_count")
+ _args["dropped_attributes_count"] = _value
+ if (_value := data.get("flags")) is not None:
+ _utils.validate_type(_value, builtins.int, "flags")
+ _args["flags"] = _value
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "Span.Link":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+ trace_id: typing.Optional[builtins.bytes] = b""
+ span_id: typing.Optional[builtins.bytes] = b""
+ trace_state: typing.Optional[builtins.str] = ""
+ parent_span_id: typing.Optional[builtins.bytes] = b""
+ flags: typing.Optional[builtins.int] = 0
+ name: typing.Optional[builtins.str] = ""
+ kind: typing.Union[Span.SpanKind, builtins.int, None] = 0
+ start_time_unix_nano: typing.Optional[builtins.int] = 0
+ end_time_unix_nano: typing.Optional[builtins.int] = 0
+ attributes: builtins.list[opentelemetry.proto_json.common.v1.common.KeyValue] = dataclasses.field(default_factory=builtins.list)
+ dropped_attributes_count: typing.Optional[builtins.int] = 0
+ events: builtins.list[Span.Event] = dataclasses.field(default_factory=builtins.list)
+ dropped_events_count: typing.Optional[builtins.int] = 0
+ links: builtins.list[Span.Link] = dataclasses.field(default_factory=builtins.list)
+ dropped_links_count: typing.Optional[builtins.int] = 0
+ status: typing.Optional[Status] = None
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.trace_id:
+ _result["traceId"] = _utils.encode_hex(self.trace_id)
+ if self.span_id:
+ _result["spanId"] = _utils.encode_hex(self.span_id)
+ if self.trace_state:
+ _result["traceState"] = self.trace_state
+ if self.parent_span_id:
+ _result["parentSpanId"] = _utils.encode_hex(self.parent_span_id)
+ if self.flags:
+ _result["flags"] = self.flags
+ if self.name:
+ _result["name"] = self.name
+ if self.kind:
+ _result["kind"] = builtins.int(self.kind)
+ if self.start_time_unix_nano:
+ _result["startTimeUnixNano"] = _utils.encode_int64(self.start_time_unix_nano)
+ if self.end_time_unix_nano:
+ _result["endTimeUnixNano"] = _utils.encode_int64(self.end_time_unix_nano)
+ if self.attributes:
+ _result["attributes"] = _utils.serialize_repeated(self.attributes, lambda _v: _v.to_dict())
+ if self.dropped_attributes_count:
+ _result["droppedAttributesCount"] = self.dropped_attributes_count
+ if self.events:
+ _result["events"] = _utils.serialize_repeated(self.events, lambda _v: _v.to_dict())
+ if self.dropped_events_count:
+ _result["droppedEventsCount"] = self.dropped_events_count
+ if self.links:
+ _result["links"] = _utils.serialize_repeated(self.links, lambda _v: _v.to_dict())
+ if self.dropped_links_count:
+ _result["droppedLinksCount"] = self.dropped_links_count
+ if self.status:
+ _result["status"] = self.status.to_dict()
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "Span":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ Span instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("traceId")) is not None:
+ _args["trace_id"] = _utils.decode_hex(_value, "trace_id")
+ if (_value := data.get("spanId")) is not None:
+ _args["span_id"] = _utils.decode_hex(_value, "span_id")
+ if (_value := data.get("traceState")) is not None:
+ _utils.validate_type(_value, builtins.str, "trace_state")
+ _args["trace_state"] = _value
+ if (_value := data.get("parentSpanId")) is not None:
+ _args["parent_span_id"] = _utils.decode_hex(_value, "parent_span_id")
+ if (_value := data.get("flags")) is not None:
+ _utils.validate_type(_value, builtins.int, "flags")
+ _args["flags"] = _value
+ if (_value := data.get("name")) is not None:
+ _utils.validate_type(_value, builtins.str, "name")
+ _args["name"] = _value
+ if (_value := data.get("kind")) is not None:
+ _utils.validate_type(_value, builtins.int, "kind")
+ _args["kind"] = Span.SpanKind(_value)
+ if (_value := data.get("startTimeUnixNano")) is not None:
+ _args["start_time_unix_nano"] = _utils.parse_int64(_value, "start_time_unix_nano")
+ if (_value := data.get("endTimeUnixNano")) is not None:
+ _args["end_time_unix_nano"] = _utils.parse_int64(_value, "end_time_unix_nano")
+ if (_value := data.get("attributes")) is not None:
+ _args["attributes"] = _utils.deserialize_repeated(_value, lambda _v: opentelemetry.proto_json.common.v1.common.KeyValue.from_dict(_v), "attributes")
+ if (_value := data.get("droppedAttributesCount")) is not None:
+ _utils.validate_type(_value, builtins.int, "dropped_attributes_count")
+ _args["dropped_attributes_count"] = _value
+ if (_value := data.get("events")) is not None:
+ _args["events"] = _utils.deserialize_repeated(_value, lambda _v: Span.Event.from_dict(_v), "events")
+ if (_value := data.get("droppedEventsCount")) is not None:
+ _utils.validate_type(_value, builtins.int, "dropped_events_count")
+ _args["dropped_events_count"] = _value
+ if (_value := data.get("links")) is not None:
+ _args["links"] = _utils.deserialize_repeated(_value, lambda _v: Span.Link.from_dict(_v), "links")
+ if (_value := data.get("droppedLinksCount")) is not None:
+ _utils.validate_type(_value, builtins.int, "dropped_links_count")
+ _args["dropped_links_count"] = _value
+ if (_value := data.get("status")) is not None:
+ _args["status"] = Status.from_dict(_value)
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "Span":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class Status:
+ """
+ Generated from protobuf message Status
+ """
+
+ @typing.final
+ class StatusCode(enum.IntEnum):
+ """
+ Generated from protobuf enum StatusCode
+ """
+
+ STATUS_CODE_UNSET = 0
+ STATUS_CODE_OK = 1
+ STATUS_CODE_ERROR = 2
+
+ message: typing.Optional[builtins.str] = ""
+ code: typing.Union[Status.StatusCode, builtins.int, None] = 0
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.message:
+ _result["message"] = self.message
+ if self.code:
+ _result["code"] = builtins.int(self.code)
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "Status":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ Status instance
+ """
+ _utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("message")) is not None:
+ _utils.validate_type(_value, builtins.str, "message")
+ _args["message"] = _value
+ if (_value := data.get("code")) is not None:
+ _utils.validate_type(_value, builtins.int, "code")
+ _args["code"] = Status.StatusCode(_value)
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "Status":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/version/__init__.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/version/__init__.py
new file mode 100644
index 00000000000..c099e9440e9
--- /dev/null
+++ b/opentelemetry-proto-json/src/opentelemetry/proto_json/version/__init__.py
@@ -0,0 +1,15 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+__version__ = "0.61b0.dev"
diff --git a/pyproject.toml b/pyproject.toml
index b6970c666d8..4166fbfea92 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -7,14 +7,18 @@ dependencies = [
"opentelemetry-sdk",
"opentelemetry-semantic-conventions",
"opentelemetry-proto",
+ "opentelemetry-proto-json",
"opentelemetry-test-utils",
"opentelemetry-exporter-otlp-proto-grpc",
"opentelemetry-exporter-otlp-proto-http",
"opentelemetry-exporter-otlp-proto-common",
+ "opentelemetry-exporter-otlp-json-http",
+ "opentelemetry-exporter-otlp-json-common",
"opentelemetry-exporter-zipkin-json",
"opentelemetry-exporter-prometheus",
"opentelemetry-propagator-jaeger",
"opentelemetry-propagator-b3",
+ "opentelemetry-codegen-json",
]
# https://docs.astral.sh/uv/reference/settings/
@@ -26,15 +30,19 @@ required-version = ">=0.6.0"
opentelemetry-api = { workspace = true}
opentelemetry-sdk = { workspace = true }
opentelemetry-proto = { workspace = true }
+opentelemetry-proto-json = { workspace = true }
opentelemetry-semantic-conventions = { workspace = true }
opentelemetry-test-utils = { workspace = true }
opentelemetry-exporter-otlp-proto-grpc = { workspace = true }
opentelemetry-exporter-otlp-proto-http = { workspace = true }
opentelemetry-exporter-otlp-proto-common = { workspace = true }
+opentelemetry-exporter-otlp-json-http = { workspace = true }
+opentelemetry-exporter-otlp-json-common = { workspace = true }
opentelemetry-exporter-zipkin-json = { workspace = true }
opentelemetry-exporter-prometheus = {workspace = true }
opentelemetry-propagator-jaeger = { workspace = true }
opentelemetry-propagator-b3 = { workspace = true }
+opentelemetry-codegen-json = { workspace = true }
[tool.uv.workspace]
members = [
@@ -42,8 +50,10 @@ members = [
"opentelemetry-sdk",
"opentelemetry-semantic-conventions",
"opentelemetry-proto",
+ "opentelemetry-proto-json",
"exporter/*",
"propagator/*",
+ "codegen/*",
"tests/opentelemetry-test-utils",
]
diff --git a/scripts/proto_codegen_json.sh b/scripts/proto_codegen_json.sh
new file mode 100755
index 00000000000..4e02566f378
--- /dev/null
+++ b/scripts/proto_codegen_json.sh
@@ -0,0 +1,58 @@
+#!/bin/bash
+#
+# Regenerate python code from OTLP protos in
+# https://github.com/open-telemetry/opentelemetry-proto
+#
+# To use, update PROTO_REPO_BRANCH_OR_COMMIT variable below to a commit hash or
+# tag in opentelemtry-proto repo that you want to build off of. Then, just run
+# this script to update the proto files. Commit the changes as well as any
+# fixes needed in the OTLP exporter.
+#
+# Optional envars:
+# PROTO_REPO_DIR - the path to an existing checkout of the opentelemetry-proto repo
+
+# Pinned commit/branch/tag for the current version used in opentelemetry-proto python package.
+PROTO_REPO_BRANCH_OR_COMMIT="v1.9.0"
+
+set -e
+
+PROTO_REPO_DIR=${PROTO_REPO_DIR:-"/tmp/opentelemetry-proto"}
+# root of opentelemetry-python repo
+repo_root="$(git rev-parse --show-toplevel)"
+
+protoc() {
+ uvx -c $repo_root/gen-requirements.txt \
+ --python 3.12 \
+ --from grpcio-tools \
+ python -m grpc_tools.protoc "$@"
+}
+
+protoc --version
+
+# Clone the proto repo if it doesn't exist
+if [ ! -d "$PROTO_REPO_DIR" ]; then
+ git clone https://github.com/open-telemetry/opentelemetry-proto.git $PROTO_REPO_DIR
+fi
+
+# Pull in changes and switch to requested branch
+(
+ cd $PROTO_REPO_DIR
+ git fetch --all
+ git checkout $PROTO_REPO_BRANCH_OR_COMMIT
+ # pull if PROTO_REPO_BRANCH_OR_COMMIT is not a detached head
+ git symbolic-ref -q HEAD && git pull --ff-only || true
+)
+
+cd $repo_root/opentelemetry-proto-json/src
+
+# clean up old generated code
+rm -rf opentelemetry/proto_json/*
+
+# generate proto code for all protos
+all_protos=$(find $PROTO_REPO_DIR/ -iname "*.proto")
+protoc \
+ -I $PROTO_REPO_DIR \
+ --otlp_json_out=. \
+ $all_protos
+
+echo "Please update ./opentelemetry-proto-json/README.rst to include the updated version."
diff --git a/tox.ini b/tox.ini
index 52e0c1612a9..5cc3472c7a6 100644
--- a/tox.ini
+++ b/tox.ini
@@ -44,6 +44,10 @@ envlist =
pypy3-test-opentelemetry-exporter-otlp-proto-common
lint-opentelemetry-exporter-otlp-proto-common
+ py3{9,10,11,12,13,14}-test-opentelemetry-exporter-otlp-json-common
+ pypy3-test-opentelemetry-exporter-otlp-json-common
+ lint-opentelemetry-exporter-otlp-json-common
+
; opentelemetry-exporter-otlp
py3{9,10,11,12,13,14}-test-opentelemetry-exporter-otlp-combined
; intentionally excluded from pypy3
@@ -120,6 +124,8 @@ deps =
exporter-otlp-proto-common: -r {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-common/test-requirements.txt
+ exporter-otlp-json-common: -r {toxinidir}/exporter/opentelemetry-exporter-otlp-json-common/test-requirements.txt
+
exporter-otlp-combined: -r {toxinidir}/exporter/opentelemetry-exporter-otlp/test-requirements.txt
opentelemetry-exporter-otlp-proto-grpc-oldest: -r {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-grpc/test-requirements.oldest.txt
@@ -195,6 +201,9 @@ commands =
test-opentelemetry-exporter-otlp-proto-common: pytest {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-common/tests {posargs}
lint-opentelemetry-exporter-otlp-proto-common: sh -c "cd exporter && pylint --prefer-stubs yes --rcfile ../.pylintrc {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-common"
+ test-opentelemetry-exporter-otlp-json-common: pytest {toxinidir}/exporter/opentelemetry-exporter-otlp-json-common/tests {posargs}
+ lint-opentelemetry-exporter-otlp-json-common: sh -c "cd exporter && pylint --prefer-stubs yes --rcfile ../.pylintrc {toxinidir}/exporter/opentelemetry-exporter-otlp-json-common"
+
test-opentelemetry-exporter-otlp-combined: pytest {toxinidir}/exporter/opentelemetry-exporter-otlp/tests {posargs}
lint-opentelemetry-exporter-otlp-combined: sh -c "cd exporter && pylint --rcfile ../.pylintrc {toxinidir}/exporter/opentelemetry-exporter-otlp"
@@ -293,6 +302,7 @@ deps =
; OTLP packages
otlpexporter: -e {toxinidir}/opentelemetry-proto
otlpexporter: -e {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-common
+ otlpexporter: -e {toxinidir}/exporter/opentelemetry-exporter-otlp-json-common
otlpexporter: -e {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-grpc
otlpexporter: -e {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-http
otlpexporter: -e {toxinidir}/exporter/opentelemetry-exporter-otlp
@@ -347,6 +357,7 @@ deps =
-e {toxinidir}/opentelemetry-sdk
-e {toxinidir}/tests/opentelemetry-test-utils
-e {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-common
+ -e {toxinidir}/exporter/opentelemetry-exporter-otlp-json-common
-e {toxinidir}/exporter/opentelemetry-exporter-otlp
-e {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-grpc
-e {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-http
diff --git a/uv.lock b/uv.lock
index 30f2debcda2..061b9b5faa7 100644
--- a/uv.lock
+++ b/uv.lock
@@ -11,7 +11,10 @@ resolution-markers = [
[manifest]
members = [
"opentelemetry-api",
+ "opentelemetry-codegen-json",
"opentelemetry-exporter-otlp",
+ "opentelemetry-exporter-otlp-json-common",
+ "opentelemetry-exporter-otlp-json-http",
"opentelemetry-exporter-otlp-proto-common",
"opentelemetry-exporter-otlp-proto-grpc",
"opentelemetry-exporter-otlp-proto-http",
@@ -20,6 +23,7 @@ members = [
"opentelemetry-propagator-b3",
"opentelemetry-propagator-jaeger",
"opentelemetry-proto",
+ "opentelemetry-proto-json",
"opentelemetry-python",
"opentelemetry-sdk",
"opentelemetry-semantic-conventions",
@@ -390,6 +394,20 @@ requires-dist = [
{ name = "typing-extensions", specifier = ">=4.5.0" },
]
+[[package]]
+name = "opentelemetry-codegen-json"
+source = { editable = "codegen/opentelemetry-codegen-json" }
+dependencies = [
+ { name = "protobuf" },
+ { name = "types-protobuf" },
+]
+
+[package.metadata]
+requires-dist = [
+ { name = "protobuf", specifier = ">=4.25.3" },
+ { name = "types-protobuf", specifier = ">=4.24" },
+]
+
[[package]]
name = "opentelemetry-exporter-credential-provider-gcp"
version = "0.60b0"
@@ -418,6 +436,40 @@ requires-dist = [
{ name = "opentelemetry-exporter-otlp-proto-http", editable = "exporter/opentelemetry-exporter-otlp-proto-http" },
]
+[[package]]
+name = "opentelemetry-exporter-otlp-json-common"
+source = { editable = "exporter/opentelemetry-exporter-otlp-json-common" }
+dependencies = [
+ { name = "opentelemetry-proto-json" },
+ { name = "opentelemetry-sdk" },
+]
+
+[package.metadata]
+requires-dist = [
+ { name = "opentelemetry-proto-json", editable = "opentelemetry-proto-json" },
+ { name = "opentelemetry-sdk", editable = "opentelemetry-sdk" },
+]
+
+[[package]]
+name = "opentelemetry-exporter-otlp-json-http"
+source = { editable = "exporter/opentelemetry-exporter-otlp-json-http" }
+dependencies = [
+ { name = "opentelemetry-api" },
+ { name = "opentelemetry-exporter-otlp-json-common" },
+ { name = "opentelemetry-proto-json" },
+ { name = "opentelemetry-sdk" },
+ { name = "urllib3" },
+]
+
+[package.metadata]
+requires-dist = [
+ { name = "opentelemetry-api", editable = "opentelemetry-api" },
+ { name = "opentelemetry-exporter-otlp-json-common", editable = "exporter/opentelemetry-exporter-otlp-json-common" },
+ { name = "opentelemetry-proto-json", editable = "opentelemetry-proto-json" },
+ { name = "opentelemetry-sdk", editable = "opentelemetry-sdk" },
+ { name = "urllib3", specifier = "~=2.6" },
+]
+
[[package]]
name = "opentelemetry-exporter-otlp-proto-common"
source = { editable = "exporter/opentelemetry-exporter-otlp-proto-common" }
@@ -558,12 +610,19 @@ dependencies = [
[package.metadata]
requires-dist = [{ name = "protobuf", specifier = ">=5.0,<7.0" }]
+[[package]]
+name = "opentelemetry-proto-json"
+source = { editable = "opentelemetry-proto-json" }
+
[[package]]
name = "opentelemetry-python"
version = "0.0.0"
source = { virtual = "." }
dependencies = [
{ name = "opentelemetry-api" },
+ { name = "opentelemetry-codegen-json" },
+ { name = "opentelemetry-exporter-otlp-json-common" },
+ { name = "opentelemetry-exporter-otlp-json-http" },
{ name = "opentelemetry-exporter-otlp-proto-common" },
{ name = "opentelemetry-exporter-otlp-proto-grpc" },
{ name = "opentelemetry-exporter-otlp-proto-http" },
@@ -572,6 +631,7 @@ dependencies = [
{ name = "opentelemetry-propagator-b3" },
{ name = "opentelemetry-propagator-jaeger" },
{ name = "opentelemetry-proto" },
+ { name = "opentelemetry-proto-json" },
{ name = "opentelemetry-sdk" },
{ name = "opentelemetry-semantic-conventions" },
{ name = "opentelemetry-test-utils" },
@@ -590,6 +650,9 @@ dev = [
[package.metadata]
requires-dist = [
{ name = "opentelemetry-api", editable = "opentelemetry-api" },
+ { name = "opentelemetry-codegen-json", editable = "codegen/opentelemetry-codegen-json" },
+ { name = "opentelemetry-exporter-otlp-json-common", editable = "exporter/opentelemetry-exporter-otlp-json-common" },
+ { name = "opentelemetry-exporter-otlp-json-http", editable = "exporter/opentelemetry-exporter-otlp-json-http" },
{ name = "opentelemetry-exporter-otlp-proto-common", editable = "exporter/opentelemetry-exporter-otlp-proto-common" },
{ name = "opentelemetry-exporter-otlp-proto-grpc", editable = "exporter/opentelemetry-exporter-otlp-proto-grpc" },
{ name = "opentelemetry-exporter-otlp-proto-http", editable = "exporter/opentelemetry-exporter-otlp-proto-http" },
@@ -598,6 +661,7 @@ requires-dist = [
{ name = "opentelemetry-propagator-b3", editable = "propagator/opentelemetry-propagator-b3" },
{ name = "opentelemetry-propagator-jaeger", editable = "propagator/opentelemetry-propagator-jaeger" },
{ name = "opentelemetry-proto", editable = "opentelemetry-proto" },
+ { name = "opentelemetry-proto-json", editable = "opentelemetry-proto-json" },
{ name = "opentelemetry-sdk", editable = "opentelemetry-sdk" },
{ name = "opentelemetry-semantic-conventions", editable = "opentelemetry-semantic-conventions" },
{ name = "opentelemetry-test-utils", editable = "tests/opentelemetry-test-utils" },
@@ -1061,6 +1125,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/5c/17/221d62937c4130b044bb437caac4181e7e13d5536bbede65264db1f0ac9f/tox_uv-1.29.0-py3-none-any.whl", hash = "sha256:b1d251286edeeb4bc4af1e24c8acfdd9404700143c2199ccdbb4ea195f7de6cc", size = 17254, upload-time = "2025-10-09T20:40:25.885Z" },
]
+[[package]]
+name = "types-protobuf"
+version = "6.32.1.20251210"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/c2/59/c743a842911887cd96d56aa8936522b0cd5f7a7f228c96e81b59fced45be/types_protobuf-6.32.1.20251210.tar.gz", hash = "sha256:c698bb3f020274b1a2798ae09dc773728ce3f75209a35187bd11916ebfde6763", size = 63900, upload-time = "2025-12-10T03:14:25.451Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/aa/43/58e75bac4219cbafee83179505ff44cae3153ec279be0e30583a73b8f108/types_protobuf-6.32.1.20251210-py3-none-any.whl", hash = "sha256:2641f78f3696822a048cfb8d0ff42ccd85c25f12f871fbebe86da63793692140", size = 77921, upload-time = "2025-12-10T03:14:24.477Z" },
+]
+
[[package]]
name = "typing-extensions"
version = "4.15.0"