diff --git a/.github/workflows/test_0.yml b/.github/workflows/test_0.yml
index 2fd057ab59..626e423c22 100644
--- a/.github/workflows/test_0.yml
+++ b/.github/workflows/test_0.yml
@@ -488,6 +488,310 @@ jobs:
- name: Run tests
run: tox -e pypy3-test-opentelemetry-proto-gen-latest -- -ra
+ py39-test-opentelemetry-protojson-gen-oldest_ubuntu-latest:
+ name: opentelemetry-protojson-gen-oldest 3.9 Ubuntu
+ runs-on: ubuntu-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.9
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.9"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Run tests
+ run: tox -e py39-test-opentelemetry-protojson-gen-oldest -- -ra
+
+ py39-test-opentelemetry-protojson-gen-latest_ubuntu-latest:
+ name: opentelemetry-protojson-gen-latest 3.9 Ubuntu
+ runs-on: ubuntu-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.9
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.9"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Run tests
+ run: tox -e py39-test-opentelemetry-protojson-gen-latest -- -ra
+
+ py310-test-opentelemetry-protojson-gen-oldest_ubuntu-latest:
+ name: opentelemetry-protojson-gen-oldest 3.10 Ubuntu
+ runs-on: ubuntu-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.10
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.10"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Run tests
+ run: tox -e py310-test-opentelemetry-protojson-gen-oldest -- -ra
+
+ py310-test-opentelemetry-protojson-gen-latest_ubuntu-latest:
+ name: opentelemetry-protojson-gen-latest 3.10 Ubuntu
+ runs-on: ubuntu-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.10
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.10"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Run tests
+ run: tox -e py310-test-opentelemetry-protojson-gen-latest -- -ra
+
+ py311-test-opentelemetry-protojson-gen-oldest_ubuntu-latest:
+ name: opentelemetry-protojson-gen-oldest 3.11 Ubuntu
+ runs-on: ubuntu-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.11
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.11"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Run tests
+ run: tox -e py311-test-opentelemetry-protojson-gen-oldest -- -ra
+
+ py311-test-opentelemetry-protojson-gen-latest_ubuntu-latest:
+ name: opentelemetry-protojson-gen-latest 3.11 Ubuntu
+ runs-on: ubuntu-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.11
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.11"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Run tests
+ run: tox -e py311-test-opentelemetry-protojson-gen-latest -- -ra
+
+ py312-test-opentelemetry-protojson-gen-oldest_ubuntu-latest:
+ name: opentelemetry-protojson-gen-oldest 3.12 Ubuntu
+ runs-on: ubuntu-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.12
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.12"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Run tests
+ run: tox -e py312-test-opentelemetry-protojson-gen-oldest -- -ra
+
+ py312-test-opentelemetry-protojson-gen-latest_ubuntu-latest:
+ name: opentelemetry-protojson-gen-latest 3.12 Ubuntu
+ runs-on: ubuntu-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.12
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.12"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Run tests
+ run: tox -e py312-test-opentelemetry-protojson-gen-latest -- -ra
+
+ py313-test-opentelemetry-protojson-gen-oldest_ubuntu-latest:
+ name: opentelemetry-protojson-gen-oldest 3.13 Ubuntu
+ runs-on: ubuntu-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.13
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.13"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Run tests
+ run: tox -e py313-test-opentelemetry-protojson-gen-oldest -- -ra
+
+ py313-test-opentelemetry-protojson-gen-latest_ubuntu-latest:
+ name: opentelemetry-protojson-gen-latest 3.13 Ubuntu
+ runs-on: ubuntu-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.13
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.13"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Run tests
+ run: tox -e py313-test-opentelemetry-protojson-gen-latest -- -ra
+
+ py314-test-opentelemetry-protojson-gen-oldest_ubuntu-latest:
+ name: opentelemetry-protojson-gen-oldest 3.14 Ubuntu
+ runs-on: ubuntu-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.14
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.14"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Run tests
+ run: tox -e py314-test-opentelemetry-protojson-gen-oldest -- -ra
+
+ py314-test-opentelemetry-protojson-gen-latest_ubuntu-latest:
+ name: opentelemetry-protojson-gen-latest 3.14 Ubuntu
+ runs-on: ubuntu-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.14
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.14"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Run tests
+ run: tox -e py314-test-opentelemetry-protojson-gen-latest -- -ra
+
+ py314t-test-opentelemetry-protojson-gen-oldest_ubuntu-latest:
+ name: opentelemetry-protojson-gen-oldest 3.14t Ubuntu
+ runs-on: ubuntu-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.14t
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.14t"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Run tests
+ run: tox -e py314t-test-opentelemetry-protojson-gen-oldest -- -ra
+
+ py314t-test-opentelemetry-protojson-gen-latest_ubuntu-latest:
+ name: opentelemetry-protojson-gen-latest 3.14t Ubuntu
+ runs-on: ubuntu-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.14t
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.14t"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Run tests
+ run: tox -e py314t-test-opentelemetry-protojson-gen-latest -- -ra
+
+ pypy3-test-opentelemetry-protojson-gen-oldest_ubuntu-latest:
+ name: opentelemetry-protojson-gen-oldest pypy-3.9 Ubuntu
+ runs-on: ubuntu-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python pypy-3.9
+ uses: actions/setup-python@v5
+ with:
+ python-version: "pypy-3.9"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Run tests
+ run: tox -e pypy3-test-opentelemetry-protojson-gen-oldest -- -ra
+
+ pypy3-test-opentelemetry-protojson-gen-latest_ubuntu-latest:
+ name: opentelemetry-protojson-gen-latest pypy-3.9 Ubuntu
+ runs-on: ubuntu-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python pypy-3.9
+ uses: actions/setup-python@v5
+ with:
+ python-version: "pypy-3.9"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Run tests
+ run: tox -e pypy3-test-opentelemetry-protojson-gen-latest -- -ra
+
py39-test-opentelemetry-sdk_ubuntu-latest:
name: opentelemetry-sdk 3.9 Ubuntu
runs-on: ubuntu-latest
@@ -3524,8 +3828,8 @@ jobs:
- name: Run tests
run: tox -e pypy3-test-opentelemetry-proto-gen-latest -- -ra
- py39-test-opentelemetry-sdk_windows-latest:
- name: opentelemetry-sdk 3.9 Windows
+ py39-test-opentelemetry-protojson-gen-oldest_windows-latest:
+ name: opentelemetry-protojson-gen-oldest 3.9 Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
@@ -3544,164 +3848,10 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py39-test-opentelemetry-sdk -- -ra
-
- py310-test-opentelemetry-sdk_windows-latest:
- name: opentelemetry-sdk 3.10 Windows
- runs-on: windows-latest
- timeout-minutes: 30
- steps:
- - name: Checkout repo @ SHA - ${{ github.sha }}
- uses: actions/checkout@v4
-
- - name: Set up Python 3.10
- uses: actions/setup-python@v5
- with:
- python-version: "3.10"
-
- - name: Install tox
- run: pip install tox-uv
-
- - name: Configure git to support long filenames
- run: git config --system core.longpaths true
-
- - name: Run tests
- run: tox -e py310-test-opentelemetry-sdk -- -ra
-
- py311-test-opentelemetry-sdk_windows-latest:
- name: opentelemetry-sdk 3.11 Windows
- runs-on: windows-latest
- timeout-minutes: 30
- steps:
- - name: Checkout repo @ SHA - ${{ github.sha }}
- uses: actions/checkout@v4
-
- - name: Set up Python 3.11
- uses: actions/setup-python@v5
- with:
- python-version: "3.11"
-
- - name: Install tox
- run: pip install tox-uv
-
- - name: Configure git to support long filenames
- run: git config --system core.longpaths true
-
- - name: Run tests
- run: tox -e py311-test-opentelemetry-sdk -- -ra
-
- py312-test-opentelemetry-sdk_windows-latest:
- name: opentelemetry-sdk 3.12 Windows
- runs-on: windows-latest
- timeout-minutes: 30
- steps:
- - name: Checkout repo @ SHA - ${{ github.sha }}
- uses: actions/checkout@v4
-
- - name: Set up Python 3.12
- uses: actions/setup-python@v5
- with:
- python-version: "3.12"
-
- - name: Install tox
- run: pip install tox-uv
-
- - name: Configure git to support long filenames
- run: git config --system core.longpaths true
-
- - name: Run tests
- run: tox -e py312-test-opentelemetry-sdk -- -ra
-
- py313-test-opentelemetry-sdk_windows-latest:
- name: opentelemetry-sdk 3.13 Windows
- runs-on: windows-latest
- timeout-minutes: 30
- steps:
- - name: Checkout repo @ SHA - ${{ github.sha }}
- uses: actions/checkout@v4
-
- - name: Set up Python 3.13
- uses: actions/setup-python@v5
- with:
- python-version: "3.13"
-
- - name: Install tox
- run: pip install tox-uv
-
- - name: Configure git to support long filenames
- run: git config --system core.longpaths true
-
- - name: Run tests
- run: tox -e py313-test-opentelemetry-sdk -- -ra
-
- py314-test-opentelemetry-sdk_windows-latest:
- name: opentelemetry-sdk 3.14 Windows
- runs-on: windows-latest
- timeout-minutes: 30
- steps:
- - name: Checkout repo @ SHA - ${{ github.sha }}
- uses: actions/checkout@v4
-
- - name: Set up Python 3.14
- uses: actions/setup-python@v5
- with:
- python-version: "3.14"
-
- - name: Install tox
- run: pip install tox-uv
-
- - name: Configure git to support long filenames
- run: git config --system core.longpaths true
-
- - name: Run tests
- run: tox -e py314-test-opentelemetry-sdk -- -ra
-
- py314t-test-opentelemetry-sdk_windows-latest:
- name: opentelemetry-sdk 3.14t Windows
- runs-on: windows-latest
- timeout-minutes: 30
- steps:
- - name: Checkout repo @ SHA - ${{ github.sha }}
- uses: actions/checkout@v4
-
- - name: Set up Python 3.14t
- uses: actions/setup-python@v5
- with:
- python-version: "3.14t"
-
- - name: Install tox
- run: pip install tox-uv
-
- - name: Configure git to support long filenames
- run: git config --system core.longpaths true
-
- - name: Run tests
- run: tox -e py314t-test-opentelemetry-sdk -- -ra
-
- pypy3-test-opentelemetry-sdk_windows-latest:
- name: opentelemetry-sdk pypy-3.9 Windows
- runs-on: windows-latest
- timeout-minutes: 30
- steps:
- - name: Checkout repo @ SHA - ${{ github.sha }}
- uses: actions/checkout@v4
-
- - name: Set up Python pypy-3.9
- uses: actions/setup-python@v5
- with:
- python-version: "pypy-3.9"
-
- - name: Install tox
- run: pip install tox-uv
-
- - name: Configure git to support long filenames
- run: git config --system core.longpaths true
-
- - name: Run tests
- run: tox -e pypy3-test-opentelemetry-sdk -- -ra
+ run: tox -e py39-test-opentelemetry-protojson-gen-oldest -- -ra
- py39-test-opentelemetry-semantic-conventions_windows-latest:
- name: opentelemetry-semantic-conventions 3.9 Windows
+ py39-test-opentelemetry-protojson-gen-latest_windows-latest:
+ name: opentelemetry-protojson-gen-latest 3.9 Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
@@ -3720,10 +3870,10 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py39-test-opentelemetry-semantic-conventions -- -ra
+ run: tox -e py39-test-opentelemetry-protojson-gen-latest -- -ra
- py310-test-opentelemetry-semantic-conventions_windows-latest:
- name: opentelemetry-semantic-conventions 3.10 Windows
+ py310-test-opentelemetry-protojson-gen-oldest_windows-latest:
+ name: opentelemetry-protojson-gen-oldest 3.10 Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
@@ -3742,164 +3892,10 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py310-test-opentelemetry-semantic-conventions -- -ra
-
- py311-test-opentelemetry-semantic-conventions_windows-latest:
- name: opentelemetry-semantic-conventions 3.11 Windows
- runs-on: windows-latest
- timeout-minutes: 30
- steps:
- - name: Checkout repo @ SHA - ${{ github.sha }}
- uses: actions/checkout@v4
-
- - name: Set up Python 3.11
- uses: actions/setup-python@v5
- with:
- python-version: "3.11"
-
- - name: Install tox
- run: pip install tox-uv
-
- - name: Configure git to support long filenames
- run: git config --system core.longpaths true
-
- - name: Run tests
- run: tox -e py311-test-opentelemetry-semantic-conventions -- -ra
-
- py312-test-opentelemetry-semantic-conventions_windows-latest:
- name: opentelemetry-semantic-conventions 3.12 Windows
- runs-on: windows-latest
- timeout-minutes: 30
- steps:
- - name: Checkout repo @ SHA - ${{ github.sha }}
- uses: actions/checkout@v4
-
- - name: Set up Python 3.12
- uses: actions/setup-python@v5
- with:
- python-version: "3.12"
-
- - name: Install tox
- run: pip install tox-uv
-
- - name: Configure git to support long filenames
- run: git config --system core.longpaths true
-
- - name: Run tests
- run: tox -e py312-test-opentelemetry-semantic-conventions -- -ra
-
- py313-test-opentelemetry-semantic-conventions_windows-latest:
- name: opentelemetry-semantic-conventions 3.13 Windows
- runs-on: windows-latest
- timeout-minutes: 30
- steps:
- - name: Checkout repo @ SHA - ${{ github.sha }}
- uses: actions/checkout@v4
-
- - name: Set up Python 3.13
- uses: actions/setup-python@v5
- with:
- python-version: "3.13"
-
- - name: Install tox
- run: pip install tox-uv
-
- - name: Configure git to support long filenames
- run: git config --system core.longpaths true
-
- - name: Run tests
- run: tox -e py313-test-opentelemetry-semantic-conventions -- -ra
-
- py314-test-opentelemetry-semantic-conventions_windows-latest:
- name: opentelemetry-semantic-conventions 3.14 Windows
- runs-on: windows-latest
- timeout-minutes: 30
- steps:
- - name: Checkout repo @ SHA - ${{ github.sha }}
- uses: actions/checkout@v4
-
- - name: Set up Python 3.14
- uses: actions/setup-python@v5
- with:
- python-version: "3.14"
-
- - name: Install tox
- run: pip install tox-uv
-
- - name: Configure git to support long filenames
- run: git config --system core.longpaths true
-
- - name: Run tests
- run: tox -e py314-test-opentelemetry-semantic-conventions -- -ra
-
- py314t-test-opentelemetry-semantic-conventions_windows-latest:
- name: opentelemetry-semantic-conventions 3.14t Windows
- runs-on: windows-latest
- timeout-minutes: 30
- steps:
- - name: Checkout repo @ SHA - ${{ github.sha }}
- uses: actions/checkout@v4
-
- - name: Set up Python 3.14t
- uses: actions/setup-python@v5
- with:
- python-version: "3.14t"
-
- - name: Install tox
- run: pip install tox-uv
-
- - name: Configure git to support long filenames
- run: git config --system core.longpaths true
-
- - name: Run tests
- run: tox -e py314t-test-opentelemetry-semantic-conventions -- -ra
-
- pypy3-test-opentelemetry-semantic-conventions_windows-latest:
- name: opentelemetry-semantic-conventions pypy-3.9 Windows
- runs-on: windows-latest
- timeout-minutes: 30
- steps:
- - name: Checkout repo @ SHA - ${{ github.sha }}
- uses: actions/checkout@v4
-
- - name: Set up Python pypy-3.9
- uses: actions/setup-python@v5
- with:
- python-version: "pypy-3.9"
-
- - name: Install tox
- run: pip install tox-uv
-
- - name: Configure git to support long filenames
- run: git config --system core.longpaths true
-
- - name: Run tests
- run: tox -e pypy3-test-opentelemetry-semantic-conventions -- -ra
+ run: tox -e py310-test-opentelemetry-protojson-gen-oldest -- -ra
- py39-test-opentelemetry-getting-started_windows-latest:
- name: opentelemetry-getting-started 3.9 Windows
- runs-on: windows-latest
- timeout-minutes: 30
- steps:
- - name: Checkout repo @ SHA - ${{ github.sha }}
- uses: actions/checkout@v4
-
- - name: Set up Python 3.9
- uses: actions/setup-python@v5
- with:
- python-version: "3.9"
-
- - name: Install tox
- run: pip install tox-uv
-
- - name: Configure git to support long filenames
- run: git config --system core.longpaths true
-
- - name: Run tests
- run: tox -e py39-test-opentelemetry-getting-started -- -ra
-
- py310-test-opentelemetry-getting-started_windows-latest:
- name: opentelemetry-getting-started 3.10 Windows
+ py310-test-opentelemetry-protojson-gen-latest_windows-latest:
+ name: opentelemetry-protojson-gen-latest 3.10 Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
@@ -3918,10 +3914,10 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py310-test-opentelemetry-getting-started -- -ra
+ run: tox -e py310-test-opentelemetry-protojson-gen-latest -- -ra
- py311-test-opentelemetry-getting-started_windows-latest:
- name: opentelemetry-getting-started 3.11 Windows
+ py311-test-opentelemetry-protojson-gen-oldest_windows-latest:
+ name: opentelemetry-protojson-gen-oldest 3.11 Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
@@ -3940,108 +3936,20 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py311-test-opentelemetry-getting-started -- -ra
-
- py312-test-opentelemetry-getting-started_windows-latest:
- name: opentelemetry-getting-started 3.12 Windows
- runs-on: windows-latest
- timeout-minutes: 30
- steps:
- - name: Checkout repo @ SHA - ${{ github.sha }}
- uses: actions/checkout@v4
-
- - name: Set up Python 3.12
- uses: actions/setup-python@v5
- with:
- python-version: "3.12"
-
- - name: Install tox
- run: pip install tox-uv
-
- - name: Configure git to support long filenames
- run: git config --system core.longpaths true
-
- - name: Run tests
- run: tox -e py312-test-opentelemetry-getting-started -- -ra
-
- py313-test-opentelemetry-getting-started_windows-latest:
- name: opentelemetry-getting-started 3.13 Windows
- runs-on: windows-latest
- timeout-minutes: 30
- steps:
- - name: Checkout repo @ SHA - ${{ github.sha }}
- uses: actions/checkout@v4
-
- - name: Set up Python 3.13
- uses: actions/setup-python@v5
- with:
- python-version: "3.13"
-
- - name: Install tox
- run: pip install tox-uv
-
- - name: Configure git to support long filenames
- run: git config --system core.longpaths true
-
- - name: Run tests
- run: tox -e py313-test-opentelemetry-getting-started -- -ra
-
- py314-test-opentelemetry-getting-started_windows-latest:
- name: opentelemetry-getting-started 3.14 Windows
- runs-on: windows-latest
- timeout-minutes: 30
- steps:
- - name: Checkout repo @ SHA - ${{ github.sha }}
- uses: actions/checkout@v4
-
- - name: Set up Python 3.14
- uses: actions/setup-python@v5
- with:
- python-version: "3.14"
-
- - name: Install tox
- run: pip install tox-uv
-
- - name: Configure git to support long filenames
- run: git config --system core.longpaths true
-
- - name: Run tests
- run: tox -e py314-test-opentelemetry-getting-started -- -ra
-
- py39-test-opentelemetry-opentracing-shim_windows-latest:
- name: opentelemetry-opentracing-shim 3.9 Windows
- runs-on: windows-latest
- timeout-minutes: 30
- steps:
- - name: Checkout repo @ SHA - ${{ github.sha }}
- uses: actions/checkout@v4
-
- - name: Set up Python 3.9
- uses: actions/setup-python@v5
- with:
- python-version: "3.9"
-
- - name: Install tox
- run: pip install tox-uv
-
- - name: Configure git to support long filenames
- run: git config --system core.longpaths true
-
- - name: Run tests
- run: tox -e py39-test-opentelemetry-opentracing-shim -- -ra
+ run: tox -e py311-test-opentelemetry-protojson-gen-oldest -- -ra
- py310-test-opentelemetry-opentracing-shim_windows-latest:
- name: opentelemetry-opentracing-shim 3.10 Windows
+ py311-test-opentelemetry-protojson-gen-latest_windows-latest:
+ name: opentelemetry-protojson-gen-latest 3.11 Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
- name: Checkout repo @ SHA - ${{ github.sha }}
uses: actions/checkout@v4
- - name: Set up Python 3.10
+ - name: Set up Python 3.11
uses: actions/setup-python@v5
with:
- python-version: "3.10"
+ python-version: "3.11"
- name: Install tox
run: pip install tox-uv
@@ -4050,20 +3958,20 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py310-test-opentelemetry-opentracing-shim -- -ra
+ run: tox -e py311-test-opentelemetry-protojson-gen-latest -- -ra
- py311-test-opentelemetry-opentracing-shim_windows-latest:
- name: opentelemetry-opentracing-shim 3.11 Windows
+ py312-test-opentelemetry-protojson-gen-oldest_windows-latest:
+ name: opentelemetry-protojson-gen-oldest 3.12 Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
- name: Checkout repo @ SHA - ${{ github.sha }}
uses: actions/checkout@v4
- - name: Set up Python 3.11
+ - name: Set up Python 3.12
uses: actions/setup-python@v5
with:
- python-version: "3.11"
+ python-version: "3.12"
- name: Install tox
run: pip install tox-uv
@@ -4072,10 +3980,10 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py311-test-opentelemetry-opentracing-shim -- -ra
+ run: tox -e py312-test-opentelemetry-protojson-gen-oldest -- -ra
- py312-test-opentelemetry-opentracing-shim_windows-latest:
- name: opentelemetry-opentracing-shim 3.12 Windows
+ py312-test-opentelemetry-protojson-gen-latest_windows-latest:
+ name: opentelemetry-protojson-gen-latest 3.12 Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
@@ -4094,10 +4002,10 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py312-test-opentelemetry-opentracing-shim -- -ra
+ run: tox -e py312-test-opentelemetry-protojson-gen-latest -- -ra
- py313-test-opentelemetry-opentracing-shim_windows-latest:
- name: opentelemetry-opentracing-shim 3.13 Windows
+ py313-test-opentelemetry-protojson-gen-oldest_windows-latest:
+ name: opentelemetry-protojson-gen-oldest 3.13 Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
@@ -4116,20 +4024,20 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py313-test-opentelemetry-opentracing-shim -- -ra
+ run: tox -e py313-test-opentelemetry-protojson-gen-oldest -- -ra
- py314-test-opentelemetry-opentracing-shim_windows-latest:
- name: opentelemetry-opentracing-shim 3.14 Windows
+ py313-test-opentelemetry-protojson-gen-latest_windows-latest:
+ name: opentelemetry-protojson-gen-latest 3.13 Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
- name: Checkout repo @ SHA - ${{ github.sha }}
uses: actions/checkout@v4
- - name: Set up Python 3.14
+ - name: Set up Python 3.13
uses: actions/setup-python@v5
with:
- python-version: "3.14"
+ python-version: "3.13"
- name: Install tox
run: pip install tox-uv
@@ -4138,20 +4046,20 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py314-test-opentelemetry-opentracing-shim -- -ra
+ run: tox -e py313-test-opentelemetry-protojson-gen-latest -- -ra
- py314t-test-opentelemetry-opentracing-shim_windows-latest:
- name: opentelemetry-opentracing-shim 3.14t Windows
+ py314-test-opentelemetry-protojson-gen-oldest_windows-latest:
+ name: opentelemetry-protojson-gen-oldest 3.14 Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
- name: Checkout repo @ SHA - ${{ github.sha }}
uses: actions/checkout@v4
- - name: Set up Python 3.14t
+ - name: Set up Python 3.14
uses: actions/setup-python@v5
with:
- python-version: "3.14t"
+ python-version: "3.14"
- name: Install tox
run: pip install tox-uv
@@ -4160,20 +4068,20 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py314t-test-opentelemetry-opentracing-shim -- -ra
+ run: tox -e py314-test-opentelemetry-protojson-gen-oldest -- -ra
- pypy3-test-opentelemetry-opentracing-shim_windows-latest:
- name: opentelemetry-opentracing-shim pypy-3.9 Windows
+ py314-test-opentelemetry-protojson-gen-latest_windows-latest:
+ name: opentelemetry-protojson-gen-latest 3.14 Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
- name: Checkout repo @ SHA - ${{ github.sha }}
uses: actions/checkout@v4
- - name: Set up Python pypy-3.9
+ - name: Set up Python 3.14
uses: actions/setup-python@v5
with:
- python-version: "pypy-3.9"
+ python-version: "3.14"
- name: Install tox
run: pip install tox-uv
@@ -4182,20 +4090,20 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e pypy3-test-opentelemetry-opentracing-shim -- -ra
+ run: tox -e py314-test-opentelemetry-protojson-gen-latest -- -ra
- py39-test-opentelemetry-opencensus-shim_windows-latest:
- name: opentelemetry-opencensus-shim 3.9 Windows
+ py314t-test-opentelemetry-protojson-gen-oldest_windows-latest:
+ name: opentelemetry-protojson-gen-oldest 3.14t Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
- name: Checkout repo @ SHA - ${{ github.sha }}
uses: actions/checkout@v4
- - name: Set up Python 3.9
+ - name: Set up Python 3.14t
uses: actions/setup-python@v5
with:
- python-version: "3.9"
+ python-version: "3.14t"
- name: Install tox
run: pip install tox-uv
@@ -4204,20 +4112,20 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py39-test-opentelemetry-opencensus-shim -- -ra
+ run: tox -e py314t-test-opentelemetry-protojson-gen-oldest -- -ra
- py310-test-opentelemetry-opencensus-shim_windows-latest:
- name: opentelemetry-opencensus-shim 3.10 Windows
+ py314t-test-opentelemetry-protojson-gen-latest_windows-latest:
+ name: opentelemetry-protojson-gen-latest 3.14t Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
- name: Checkout repo @ SHA - ${{ github.sha }}
uses: actions/checkout@v4
- - name: Set up Python 3.10
+ - name: Set up Python 3.14t
uses: actions/setup-python@v5
with:
- python-version: "3.10"
+ python-version: "3.14t"
- name: Install tox
run: pip install tox-uv
@@ -4226,20 +4134,20 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py310-test-opentelemetry-opencensus-shim -- -ra
+ run: tox -e py314t-test-opentelemetry-protojson-gen-latest -- -ra
- py311-test-opentelemetry-opencensus-shim_windows-latest:
- name: opentelemetry-opencensus-shim 3.11 Windows
+ pypy3-test-opentelemetry-protojson-gen-oldest_windows-latest:
+ name: opentelemetry-protojson-gen-oldest pypy-3.9 Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
- name: Checkout repo @ SHA - ${{ github.sha }}
uses: actions/checkout@v4
- - name: Set up Python 3.11
+ - name: Set up Python pypy-3.9
uses: actions/setup-python@v5
with:
- python-version: "3.11"
+ python-version: "pypy-3.9"
- name: Install tox
run: pip install tox-uv
@@ -4248,20 +4156,20 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py311-test-opentelemetry-opencensus-shim -- -ra
+ run: tox -e pypy3-test-opentelemetry-protojson-gen-oldest -- -ra
- py312-test-opentelemetry-opencensus-shim_windows-latest:
- name: opentelemetry-opencensus-shim 3.12 Windows
+ pypy3-test-opentelemetry-protojson-gen-latest_windows-latest:
+ name: opentelemetry-protojson-gen-latest pypy-3.9 Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
- name: Checkout repo @ SHA - ${{ github.sha }}
uses: actions/checkout@v4
- - name: Set up Python 3.12
+ - name: Set up Python pypy-3.9
uses: actions/setup-python@v5
with:
- python-version: "3.12"
+ python-version: "pypy-3.9"
- name: Install tox
run: pip install tox-uv
@@ -4270,20 +4178,20 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py312-test-opentelemetry-opencensus-shim -- -ra
+ run: tox -e pypy3-test-opentelemetry-protojson-gen-latest -- -ra
- py313-test-opentelemetry-opencensus-shim_windows-latest:
- name: opentelemetry-opencensus-shim 3.13 Windows
+ py39-test-opentelemetry-sdk_windows-latest:
+ name: opentelemetry-sdk 3.9 Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
- name: Checkout repo @ SHA - ${{ github.sha }}
uses: actions/checkout@v4
- - name: Set up Python 3.13
+ - name: Set up Python 3.9
uses: actions/setup-python@v5
with:
- python-version: "3.13"
+ python-version: "3.9"
- name: Install tox
run: pip install tox-uv
@@ -4292,20 +4200,20 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py313-test-opentelemetry-opencensus-shim -- -ra
+ run: tox -e py39-test-opentelemetry-sdk -- -ra
- py314-test-opentelemetry-opencensus-shim_windows-latest:
- name: opentelemetry-opencensus-shim 3.14 Windows
+ py310-test-opentelemetry-sdk_windows-latest:
+ name: opentelemetry-sdk 3.10 Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
- name: Checkout repo @ SHA - ${{ github.sha }}
uses: actions/checkout@v4
- - name: Set up Python 3.14
+ - name: Set up Python 3.10
uses: actions/setup-python@v5
with:
- python-version: "3.14"
+ python-version: "3.10"
- name: Install tox
run: pip install tox-uv
@@ -4314,20 +4222,20 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py314-test-opentelemetry-opencensus-shim -- -ra
+ run: tox -e py310-test-opentelemetry-sdk -- -ra
- py39-test-opentelemetry-exporter-opencensus_windows-latest:
- name: opentelemetry-exporter-opencensus 3.9 Windows
+ py311-test-opentelemetry-sdk_windows-latest:
+ name: opentelemetry-sdk 3.11 Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
- name: Checkout repo @ SHA - ${{ github.sha }}
uses: actions/checkout@v4
- - name: Set up Python 3.9
+ - name: Set up Python 3.11
uses: actions/setup-python@v5
with:
- python-version: "3.9"
+ python-version: "3.11"
- name: Install tox
run: pip install tox-uv
@@ -4336,20 +4244,20 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py39-test-opentelemetry-exporter-opencensus -- -ra
+ run: tox -e py311-test-opentelemetry-sdk -- -ra
- py310-test-opentelemetry-exporter-opencensus_windows-latest:
- name: opentelemetry-exporter-opencensus 3.10 Windows
+ py312-test-opentelemetry-sdk_windows-latest:
+ name: opentelemetry-sdk 3.12 Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
- name: Checkout repo @ SHA - ${{ github.sha }}
uses: actions/checkout@v4
- - name: Set up Python 3.10
+ - name: Set up Python 3.12
uses: actions/setup-python@v5
with:
- python-version: "3.10"
+ python-version: "3.12"
- name: Install tox
run: pip install tox-uv
@@ -4358,20 +4266,20 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py310-test-opentelemetry-exporter-opencensus -- -ra
+ run: tox -e py312-test-opentelemetry-sdk -- -ra
- py311-test-opentelemetry-exporter-opencensus_windows-latest:
- name: opentelemetry-exporter-opencensus 3.11 Windows
+ py313-test-opentelemetry-sdk_windows-latest:
+ name: opentelemetry-sdk 3.13 Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
- name: Checkout repo @ SHA - ${{ github.sha }}
uses: actions/checkout@v4
- - name: Set up Python 3.11
+ - name: Set up Python 3.13
uses: actions/setup-python@v5
with:
- python-version: "3.11"
+ python-version: "3.13"
- name: Install tox
run: pip install tox-uv
@@ -4380,20 +4288,20 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py311-test-opentelemetry-exporter-opencensus -- -ra
+ run: tox -e py313-test-opentelemetry-sdk -- -ra
- py312-test-opentelemetry-exporter-opencensus_windows-latest:
- name: opentelemetry-exporter-opencensus 3.12 Windows
+ py314-test-opentelemetry-sdk_windows-latest:
+ name: opentelemetry-sdk 3.14 Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
- name: Checkout repo @ SHA - ${{ github.sha }}
uses: actions/checkout@v4
- - name: Set up Python 3.12
+ - name: Set up Python 3.14
uses: actions/setup-python@v5
with:
- python-version: "3.12"
+ python-version: "3.14"
- name: Install tox
run: pip install tox-uv
@@ -4402,20 +4310,20 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py312-test-opentelemetry-exporter-opencensus -- -ra
+ run: tox -e py314-test-opentelemetry-sdk -- -ra
- py313-test-opentelemetry-exporter-opencensus_windows-latest:
- name: opentelemetry-exporter-opencensus 3.13 Windows
+ py314t-test-opentelemetry-sdk_windows-latest:
+ name: opentelemetry-sdk 3.14t Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
- name: Checkout repo @ SHA - ${{ github.sha }}
uses: actions/checkout@v4
- - name: Set up Python 3.13
+ - name: Set up Python 3.14t
uses: actions/setup-python@v5
with:
- python-version: "3.13"
+ python-version: "3.14t"
- name: Install tox
run: pip install tox-uv
@@ -4424,20 +4332,20 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py313-test-opentelemetry-exporter-opencensus -- -ra
+ run: tox -e py314t-test-opentelemetry-sdk -- -ra
- py314-test-opentelemetry-exporter-opencensus_windows-latest:
- name: opentelemetry-exporter-opencensus 3.14 Windows
+ pypy3-test-opentelemetry-sdk_windows-latest:
+ name: opentelemetry-sdk pypy-3.9 Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
- name: Checkout repo @ SHA - ${{ github.sha }}
uses: actions/checkout@v4
- - name: Set up Python 3.14
+ - name: Set up Python pypy-3.9
uses: actions/setup-python@v5
with:
- python-version: "3.14"
+ python-version: "pypy-3.9"
- name: Install tox
run: pip install tox-uv
@@ -4446,10 +4354,10 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py314-test-opentelemetry-exporter-opencensus -- -ra
+ run: tox -e pypy3-test-opentelemetry-sdk -- -ra
- py39-test-opentelemetry-exporter-otlp-proto-common_windows-latest:
- name: opentelemetry-exporter-otlp-proto-common 3.9 Windows
+ py39-test-opentelemetry-semantic-conventions_windows-latest:
+ name: opentelemetry-semantic-conventions 3.9 Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
@@ -4468,10 +4376,10 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py39-test-opentelemetry-exporter-otlp-proto-common -- -ra
+ run: tox -e py39-test-opentelemetry-semantic-conventions -- -ra
- py310-test-opentelemetry-exporter-otlp-proto-common_windows-latest:
- name: opentelemetry-exporter-otlp-proto-common 3.10 Windows
+ py310-test-opentelemetry-semantic-conventions_windows-latest:
+ name: opentelemetry-semantic-conventions 3.10 Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
@@ -4490,10 +4398,10 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py310-test-opentelemetry-exporter-otlp-proto-common -- -ra
+ run: tox -e py310-test-opentelemetry-semantic-conventions -- -ra
- py311-test-opentelemetry-exporter-otlp-proto-common_windows-latest:
- name: opentelemetry-exporter-otlp-proto-common 3.11 Windows
+ py311-test-opentelemetry-semantic-conventions_windows-latest:
+ name: opentelemetry-semantic-conventions 3.11 Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
@@ -4512,10 +4420,10 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py311-test-opentelemetry-exporter-otlp-proto-common -- -ra
+ run: tox -e py311-test-opentelemetry-semantic-conventions -- -ra
- py312-test-opentelemetry-exporter-otlp-proto-common_windows-latest:
- name: opentelemetry-exporter-otlp-proto-common 3.12 Windows
+ py312-test-opentelemetry-semantic-conventions_windows-latest:
+ name: opentelemetry-semantic-conventions 3.12 Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
@@ -4534,10 +4442,10 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py312-test-opentelemetry-exporter-otlp-proto-common -- -ra
+ run: tox -e py312-test-opentelemetry-semantic-conventions -- -ra
- py313-test-opentelemetry-exporter-otlp-proto-common_windows-latest:
- name: opentelemetry-exporter-otlp-proto-common 3.13 Windows
+ py313-test-opentelemetry-semantic-conventions_windows-latest:
+ name: opentelemetry-semantic-conventions 3.13 Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
@@ -4556,10 +4464,10 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py313-test-opentelemetry-exporter-otlp-proto-common -- -ra
+ run: tox -e py313-test-opentelemetry-semantic-conventions -- -ra
- py314-test-opentelemetry-exporter-otlp-proto-common_windows-latest:
- name: opentelemetry-exporter-otlp-proto-common 3.14 Windows
+ py314-test-opentelemetry-semantic-conventions_windows-latest:
+ name: opentelemetry-semantic-conventions 3.14 Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
@@ -4578,10 +4486,10 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py314-test-opentelemetry-exporter-otlp-proto-common -- -ra
+ run: tox -e py314-test-opentelemetry-semantic-conventions -- -ra
- py314t-test-opentelemetry-exporter-otlp-proto-common_windows-latest:
- name: opentelemetry-exporter-otlp-proto-common 3.14t Windows
+ py314t-test-opentelemetry-semantic-conventions_windows-latest:
+ name: opentelemetry-semantic-conventions 3.14t Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
@@ -4600,10 +4508,10 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py314t-test-opentelemetry-exporter-otlp-proto-common -- -ra
+ run: tox -e py314t-test-opentelemetry-semantic-conventions -- -ra
- pypy3-test-opentelemetry-exporter-otlp-proto-common_windows-latest:
- name: opentelemetry-exporter-otlp-proto-common pypy-3.9 Windows
+ pypy3-test-opentelemetry-semantic-conventions_windows-latest:
+ name: opentelemetry-semantic-conventions pypy-3.9 Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
@@ -4622,10 +4530,10 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e pypy3-test-opentelemetry-exporter-otlp-proto-common -- -ra
+ run: tox -e pypy3-test-opentelemetry-semantic-conventions -- -ra
- py39-test-opentelemetry-exporter-otlp-combined_windows-latest:
- name: opentelemetry-exporter-otlp-combined 3.9 Windows
+ py39-test-opentelemetry-getting-started_windows-latest:
+ name: opentelemetry-getting-started 3.9 Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
@@ -4644,10 +4552,10 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py39-test-opentelemetry-exporter-otlp-combined -- -ra
+ run: tox -e py39-test-opentelemetry-getting-started -- -ra
- py310-test-opentelemetry-exporter-otlp-combined_windows-latest:
- name: opentelemetry-exporter-otlp-combined 3.10 Windows
+ py310-test-opentelemetry-getting-started_windows-latest:
+ name: opentelemetry-getting-started 3.10 Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
@@ -4666,10 +4574,10 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py310-test-opentelemetry-exporter-otlp-combined -- -ra
+ run: tox -e py310-test-opentelemetry-getting-started -- -ra
- py311-test-opentelemetry-exporter-otlp-combined_windows-latest:
- name: opentelemetry-exporter-otlp-combined 3.11 Windows
+ py311-test-opentelemetry-getting-started_windows-latest:
+ name: opentelemetry-getting-started 3.11 Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
@@ -4688,10 +4596,10 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py311-test-opentelemetry-exporter-otlp-combined -- -ra
+ run: tox -e py311-test-opentelemetry-getting-started -- -ra
- py312-test-opentelemetry-exporter-otlp-combined_windows-latest:
- name: opentelemetry-exporter-otlp-combined 3.12 Windows
+ py312-test-opentelemetry-getting-started_windows-latest:
+ name: opentelemetry-getting-started 3.12 Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
@@ -4710,10 +4618,10 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py312-test-opentelemetry-exporter-otlp-combined -- -ra
+ run: tox -e py312-test-opentelemetry-getting-started -- -ra
- py313-test-opentelemetry-exporter-otlp-combined_windows-latest:
- name: opentelemetry-exporter-otlp-combined 3.13 Windows
+ py313-test-opentelemetry-getting-started_windows-latest:
+ name: opentelemetry-getting-started 3.13 Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
@@ -4732,10 +4640,10 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py313-test-opentelemetry-exporter-otlp-combined -- -ra
+ run: tox -e py313-test-opentelemetry-getting-started -- -ra
- py314-test-opentelemetry-exporter-otlp-combined_windows-latest:
- name: opentelemetry-exporter-otlp-combined 3.14 Windows
+ py314-test-opentelemetry-getting-started_windows-latest:
+ name: opentelemetry-getting-started 3.14 Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
@@ -4754,10 +4662,10 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py314-test-opentelemetry-exporter-otlp-combined -- -ra
+ run: tox -e py314-test-opentelemetry-getting-started -- -ra
- py39-test-opentelemetry-exporter-otlp-proto-grpc-oldest_windows-latest:
- name: opentelemetry-exporter-otlp-proto-grpc-oldest 3.9 Windows
+ py39-test-opentelemetry-opentracing-shim_windows-latest:
+ name: opentelemetry-opentracing-shim 3.9 Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
@@ -4776,20 +4684,20 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py39-test-opentelemetry-exporter-otlp-proto-grpc-oldest -- -ra
+ run: tox -e py39-test-opentelemetry-opentracing-shim -- -ra
- py39-test-opentelemetry-exporter-otlp-proto-grpc-latest_windows-latest:
- name: opentelemetry-exporter-otlp-proto-grpc-latest 3.9 Windows
+ py310-test-opentelemetry-opentracing-shim_windows-latest:
+ name: opentelemetry-opentracing-shim 3.10 Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
- name: Checkout repo @ SHA - ${{ github.sha }}
uses: actions/checkout@v4
- - name: Set up Python 3.9
+ - name: Set up Python 3.10
uses: actions/setup-python@v5
with:
- python-version: "3.9"
+ python-version: "3.10"
- name: Install tox
run: pip install tox-uv
@@ -4798,20 +4706,20 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py39-test-opentelemetry-exporter-otlp-proto-grpc-latest -- -ra
+ run: tox -e py310-test-opentelemetry-opentracing-shim -- -ra
- py310-test-opentelemetry-exporter-otlp-proto-grpc-oldest_windows-latest:
- name: opentelemetry-exporter-otlp-proto-grpc-oldest 3.10 Windows
+ py311-test-opentelemetry-opentracing-shim_windows-latest:
+ name: opentelemetry-opentracing-shim 3.11 Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
- name: Checkout repo @ SHA - ${{ github.sha }}
uses: actions/checkout@v4
- - name: Set up Python 3.10
+ - name: Set up Python 3.11
uses: actions/setup-python@v5
with:
- python-version: "3.10"
+ python-version: "3.11"
- name: Install tox
run: pip install tox-uv
@@ -4820,20 +4728,20 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py310-test-opentelemetry-exporter-otlp-proto-grpc-oldest -- -ra
+ run: tox -e py311-test-opentelemetry-opentracing-shim -- -ra
- py310-test-opentelemetry-exporter-otlp-proto-grpc-latest_windows-latest:
- name: opentelemetry-exporter-otlp-proto-grpc-latest 3.10 Windows
+ py312-test-opentelemetry-opentracing-shim_windows-latest:
+ name: opentelemetry-opentracing-shim 3.12 Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
- name: Checkout repo @ SHA - ${{ github.sha }}
uses: actions/checkout@v4
- - name: Set up Python 3.10
+ - name: Set up Python 3.12
uses: actions/setup-python@v5
with:
- python-version: "3.10"
+ python-version: "3.12"
- name: Install tox
run: pip install tox-uv
@@ -4842,20 +4750,20 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py310-test-opentelemetry-exporter-otlp-proto-grpc-latest -- -ra
+ run: tox -e py312-test-opentelemetry-opentracing-shim -- -ra
- py311-test-opentelemetry-exporter-otlp-proto-grpc-oldest_windows-latest:
- name: opentelemetry-exporter-otlp-proto-grpc-oldest 3.11 Windows
+ py313-test-opentelemetry-opentracing-shim_windows-latest:
+ name: opentelemetry-opentracing-shim 3.13 Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
- name: Checkout repo @ SHA - ${{ github.sha }}
uses: actions/checkout@v4
- - name: Set up Python 3.11
+ - name: Set up Python 3.13
uses: actions/setup-python@v5
with:
- python-version: "3.11"
+ python-version: "3.13"
- name: Install tox
run: pip install tox-uv
@@ -4864,20 +4772,20 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py311-test-opentelemetry-exporter-otlp-proto-grpc-oldest -- -ra
+ run: tox -e py313-test-opentelemetry-opentracing-shim -- -ra
- py311-test-opentelemetry-exporter-otlp-proto-grpc-latest_windows-latest:
- name: opentelemetry-exporter-otlp-proto-grpc-latest 3.11 Windows
+ py314-test-opentelemetry-opentracing-shim_windows-latest:
+ name: opentelemetry-opentracing-shim 3.14 Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
- name: Checkout repo @ SHA - ${{ github.sha }}
uses: actions/checkout@v4
- - name: Set up Python 3.11
+ - name: Set up Python 3.14
uses: actions/setup-python@v5
with:
- python-version: "3.11"
+ python-version: "3.14"
- name: Install tox
run: pip install tox-uv
@@ -4886,20 +4794,20 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py311-test-opentelemetry-exporter-otlp-proto-grpc-latest -- -ra
+ run: tox -e py314-test-opentelemetry-opentracing-shim -- -ra
- py312-test-opentelemetry-exporter-otlp-proto-grpc-oldest_windows-latest:
- name: opentelemetry-exporter-otlp-proto-grpc-oldest 3.12 Windows
+ py314t-test-opentelemetry-opentracing-shim_windows-latest:
+ name: opentelemetry-opentracing-shim 3.14t Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
- name: Checkout repo @ SHA - ${{ github.sha }}
uses: actions/checkout@v4
- - name: Set up Python 3.12
+ - name: Set up Python 3.14t
uses: actions/setup-python@v5
with:
- python-version: "3.12"
+ python-version: "3.14t"
- name: Install tox
run: pip install tox-uv
@@ -4908,20 +4816,20 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py312-test-opentelemetry-exporter-otlp-proto-grpc-oldest -- -ra
+ run: tox -e py314t-test-opentelemetry-opentracing-shim -- -ra
- py312-test-opentelemetry-exporter-otlp-proto-grpc-latest_windows-latest:
- name: opentelemetry-exporter-otlp-proto-grpc-latest 3.12 Windows
+ pypy3-test-opentelemetry-opentracing-shim_windows-latest:
+ name: opentelemetry-opentracing-shim pypy-3.9 Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
- name: Checkout repo @ SHA - ${{ github.sha }}
uses: actions/checkout@v4
- - name: Set up Python 3.12
+ - name: Set up Python pypy-3.9
uses: actions/setup-python@v5
with:
- python-version: "3.12"
+ python-version: "pypy-3.9"
- name: Install tox
run: pip install tox-uv
@@ -4930,20 +4838,20 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py312-test-opentelemetry-exporter-otlp-proto-grpc-latest -- -ra
+ run: tox -e pypy3-test-opentelemetry-opentracing-shim -- -ra
- py313-test-opentelemetry-exporter-otlp-proto-grpc-oldest_windows-latest:
- name: opentelemetry-exporter-otlp-proto-grpc-oldest 3.13 Windows
+ py39-test-opentelemetry-opencensus-shim_windows-latest:
+ name: opentelemetry-opencensus-shim 3.9 Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
- name: Checkout repo @ SHA - ${{ github.sha }}
uses: actions/checkout@v4
- - name: Set up Python 3.13
+ - name: Set up Python 3.9
uses: actions/setup-python@v5
with:
- python-version: "3.13"
+ python-version: "3.9"
- name: Install tox
run: pip install tox-uv
@@ -4952,20 +4860,20 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py313-test-opentelemetry-exporter-otlp-proto-grpc-oldest -- -ra
+ run: tox -e py39-test-opentelemetry-opencensus-shim -- -ra
- py313-test-opentelemetry-exporter-otlp-proto-grpc-latest_windows-latest:
- name: opentelemetry-exporter-otlp-proto-grpc-latest 3.13 Windows
+ py310-test-opentelemetry-opencensus-shim_windows-latest:
+ name: opentelemetry-opencensus-shim 3.10 Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
- name: Checkout repo @ SHA - ${{ github.sha }}
uses: actions/checkout@v4
- - name: Set up Python 3.13
+ - name: Set up Python 3.10
uses: actions/setup-python@v5
with:
- python-version: "3.13"
+ python-version: "3.10"
- name: Install tox
run: pip install tox-uv
@@ -4974,20 +4882,20 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py313-test-opentelemetry-exporter-otlp-proto-grpc-latest -- -ra
+ run: tox -e py310-test-opentelemetry-opencensus-shim -- -ra
- py314-test-opentelemetry-exporter-otlp-proto-grpc-oldest_windows-latest:
- name: opentelemetry-exporter-otlp-proto-grpc-oldest 3.14 Windows
+ py311-test-opentelemetry-opencensus-shim_windows-latest:
+ name: opentelemetry-opencensus-shim 3.11 Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
- name: Checkout repo @ SHA - ${{ github.sha }}
uses: actions/checkout@v4
- - name: Set up Python 3.14
+ - name: Set up Python 3.11
uses: actions/setup-python@v5
with:
- python-version: "3.14"
+ python-version: "3.11"
- name: Install tox
run: pip install tox-uv
@@ -4996,10 +4904,54 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py314-test-opentelemetry-exporter-otlp-proto-grpc-oldest -- -ra
+ run: tox -e py311-test-opentelemetry-opencensus-shim -- -ra
+
+ py312-test-opentelemetry-opencensus-shim_windows-latest:
+ name: opentelemetry-opencensus-shim 3.12 Windows
+ runs-on: windows-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.12
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.12"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Configure git to support long filenames
+ run: git config --system core.longpaths true
+
+ - name: Run tests
+ run: tox -e py312-test-opentelemetry-opencensus-shim -- -ra
+
+ py313-test-opentelemetry-opencensus-shim_windows-latest:
+ name: opentelemetry-opencensus-shim 3.13 Windows
+ runs-on: windows-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.13
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.13"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Configure git to support long filenames
+ run: git config --system core.longpaths true
+
+ - name: Run tests
+ run: tox -e py313-test-opentelemetry-opencensus-shim -- -ra
- py314-test-opentelemetry-exporter-otlp-proto-grpc-latest_windows-latest:
- name: opentelemetry-exporter-otlp-proto-grpc-latest 3.14 Windows
+ py314-test-opentelemetry-opencensus-shim_windows-latest:
+ name: opentelemetry-opencensus-shim 3.14 Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
@@ -5018,10 +4970,10 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py314-test-opentelemetry-exporter-otlp-proto-grpc-latest -- -ra
+ run: tox -e py314-test-opentelemetry-opencensus-shim -- -ra
- py39-test-opentelemetry-exporter-otlp-proto-http_windows-latest:
- name: opentelemetry-exporter-otlp-proto-http 3.9 Windows
+ py39-test-opentelemetry-exporter-opencensus_windows-latest:
+ name: opentelemetry-exporter-opencensus 3.9 Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
@@ -5040,10 +4992,10 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py39-test-opentelemetry-exporter-otlp-proto-http -- -ra
+ run: tox -e py39-test-opentelemetry-exporter-opencensus -- -ra
- py310-test-opentelemetry-exporter-otlp-proto-http_windows-latest:
- name: opentelemetry-exporter-otlp-proto-http 3.10 Windows
+ py310-test-opentelemetry-exporter-opencensus_windows-latest:
+ name: opentelemetry-exporter-opencensus 3.10 Windows
runs-on: windows-latest
timeout-minutes: 30
steps:
@@ -5062,4 +5014,4 @@ jobs:
run: git config --system core.longpaths true
- name: Run tests
- run: tox -e py310-test-opentelemetry-exporter-otlp-proto-http -- -ra
+ run: tox -e py310-test-opentelemetry-exporter-opencensus -- -ra
diff --git a/.github/workflows/test_1.yml b/.github/workflows/test_1.yml
index bd55fa3d33..f72f96669e 100644
--- a/.github/workflows/test_1.yml
+++ b/.github/workflows/test_1.yml
@@ -32,6 +32,710 @@ env:
jobs:
+ py311-test-opentelemetry-exporter-opencensus_windows-latest:
+ name: opentelemetry-exporter-opencensus 3.11 Windows
+ runs-on: windows-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.11
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.11"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Configure git to support long filenames
+ run: git config --system core.longpaths true
+
+ - name: Run tests
+ run: tox -e py311-test-opentelemetry-exporter-opencensus -- -ra
+
+ py312-test-opentelemetry-exporter-opencensus_windows-latest:
+ name: opentelemetry-exporter-opencensus 3.12 Windows
+ runs-on: windows-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.12
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.12"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Configure git to support long filenames
+ run: git config --system core.longpaths true
+
+ - name: Run tests
+ run: tox -e py312-test-opentelemetry-exporter-opencensus -- -ra
+
+ py313-test-opentelemetry-exporter-opencensus_windows-latest:
+ name: opentelemetry-exporter-opencensus 3.13 Windows
+ runs-on: windows-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.13
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.13"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Configure git to support long filenames
+ run: git config --system core.longpaths true
+
+ - name: Run tests
+ run: tox -e py313-test-opentelemetry-exporter-opencensus -- -ra
+
+ py314-test-opentelemetry-exporter-opencensus_windows-latest:
+ name: opentelemetry-exporter-opencensus 3.14 Windows
+ runs-on: windows-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.14
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.14"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Configure git to support long filenames
+ run: git config --system core.longpaths true
+
+ - name: Run tests
+ run: tox -e py314-test-opentelemetry-exporter-opencensus -- -ra
+
+ py39-test-opentelemetry-exporter-otlp-proto-common_windows-latest:
+ name: opentelemetry-exporter-otlp-proto-common 3.9 Windows
+ runs-on: windows-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.9
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.9"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Configure git to support long filenames
+ run: git config --system core.longpaths true
+
+ - name: Run tests
+ run: tox -e py39-test-opentelemetry-exporter-otlp-proto-common -- -ra
+
+ py310-test-opentelemetry-exporter-otlp-proto-common_windows-latest:
+ name: opentelemetry-exporter-otlp-proto-common 3.10 Windows
+ runs-on: windows-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.10
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.10"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Configure git to support long filenames
+ run: git config --system core.longpaths true
+
+ - name: Run tests
+ run: tox -e py310-test-opentelemetry-exporter-otlp-proto-common -- -ra
+
+ py311-test-opentelemetry-exporter-otlp-proto-common_windows-latest:
+ name: opentelemetry-exporter-otlp-proto-common 3.11 Windows
+ runs-on: windows-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.11
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.11"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Configure git to support long filenames
+ run: git config --system core.longpaths true
+
+ - name: Run tests
+ run: tox -e py311-test-opentelemetry-exporter-otlp-proto-common -- -ra
+
+ py312-test-opentelemetry-exporter-otlp-proto-common_windows-latest:
+ name: opentelemetry-exporter-otlp-proto-common 3.12 Windows
+ runs-on: windows-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.12
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.12"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Configure git to support long filenames
+ run: git config --system core.longpaths true
+
+ - name: Run tests
+ run: tox -e py312-test-opentelemetry-exporter-otlp-proto-common -- -ra
+
+ py313-test-opentelemetry-exporter-otlp-proto-common_windows-latest:
+ name: opentelemetry-exporter-otlp-proto-common 3.13 Windows
+ runs-on: windows-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.13
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.13"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Configure git to support long filenames
+ run: git config --system core.longpaths true
+
+ - name: Run tests
+ run: tox -e py313-test-opentelemetry-exporter-otlp-proto-common -- -ra
+
+ py314-test-opentelemetry-exporter-otlp-proto-common_windows-latest:
+ name: opentelemetry-exporter-otlp-proto-common 3.14 Windows
+ runs-on: windows-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.14
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.14"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Configure git to support long filenames
+ run: git config --system core.longpaths true
+
+ - name: Run tests
+ run: tox -e py314-test-opentelemetry-exporter-otlp-proto-common -- -ra
+
+ py314t-test-opentelemetry-exporter-otlp-proto-common_windows-latest:
+ name: opentelemetry-exporter-otlp-proto-common 3.14t Windows
+ runs-on: windows-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.14t
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.14t"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Configure git to support long filenames
+ run: git config --system core.longpaths true
+
+ - name: Run tests
+ run: tox -e py314t-test-opentelemetry-exporter-otlp-proto-common -- -ra
+
+ pypy3-test-opentelemetry-exporter-otlp-proto-common_windows-latest:
+ name: opentelemetry-exporter-otlp-proto-common pypy-3.9 Windows
+ runs-on: windows-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python pypy-3.9
+ uses: actions/setup-python@v5
+ with:
+ python-version: "pypy-3.9"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Configure git to support long filenames
+ run: git config --system core.longpaths true
+
+ - name: Run tests
+ run: tox -e pypy3-test-opentelemetry-exporter-otlp-proto-common -- -ra
+
+ py39-test-opentelemetry-exporter-otlp-combined_windows-latest:
+ name: opentelemetry-exporter-otlp-combined 3.9 Windows
+ runs-on: windows-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.9
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.9"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Configure git to support long filenames
+ run: git config --system core.longpaths true
+
+ - name: Run tests
+ run: tox -e py39-test-opentelemetry-exporter-otlp-combined -- -ra
+
+ py310-test-opentelemetry-exporter-otlp-combined_windows-latest:
+ name: opentelemetry-exporter-otlp-combined 3.10 Windows
+ runs-on: windows-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.10
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.10"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Configure git to support long filenames
+ run: git config --system core.longpaths true
+
+ - name: Run tests
+ run: tox -e py310-test-opentelemetry-exporter-otlp-combined -- -ra
+
+ py311-test-opentelemetry-exporter-otlp-combined_windows-latest:
+ name: opentelemetry-exporter-otlp-combined 3.11 Windows
+ runs-on: windows-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.11
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.11"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Configure git to support long filenames
+ run: git config --system core.longpaths true
+
+ - name: Run tests
+ run: tox -e py311-test-opentelemetry-exporter-otlp-combined -- -ra
+
+ py312-test-opentelemetry-exporter-otlp-combined_windows-latest:
+ name: opentelemetry-exporter-otlp-combined 3.12 Windows
+ runs-on: windows-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.12
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.12"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Configure git to support long filenames
+ run: git config --system core.longpaths true
+
+ - name: Run tests
+ run: tox -e py312-test-opentelemetry-exporter-otlp-combined -- -ra
+
+ py313-test-opentelemetry-exporter-otlp-combined_windows-latest:
+ name: opentelemetry-exporter-otlp-combined 3.13 Windows
+ runs-on: windows-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.13
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.13"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Configure git to support long filenames
+ run: git config --system core.longpaths true
+
+ - name: Run tests
+ run: tox -e py313-test-opentelemetry-exporter-otlp-combined -- -ra
+
+ py314-test-opentelemetry-exporter-otlp-combined_windows-latest:
+ name: opentelemetry-exporter-otlp-combined 3.14 Windows
+ runs-on: windows-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.14
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.14"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Configure git to support long filenames
+ run: git config --system core.longpaths true
+
+ - name: Run tests
+ run: tox -e py314-test-opentelemetry-exporter-otlp-combined -- -ra
+
+ py39-test-opentelemetry-exporter-otlp-proto-grpc-oldest_windows-latest:
+ name: opentelemetry-exporter-otlp-proto-grpc-oldest 3.9 Windows
+ runs-on: windows-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.9
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.9"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Configure git to support long filenames
+ run: git config --system core.longpaths true
+
+ - name: Run tests
+ run: tox -e py39-test-opentelemetry-exporter-otlp-proto-grpc-oldest -- -ra
+
+ py39-test-opentelemetry-exporter-otlp-proto-grpc-latest_windows-latest:
+ name: opentelemetry-exporter-otlp-proto-grpc-latest 3.9 Windows
+ runs-on: windows-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.9
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.9"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Configure git to support long filenames
+ run: git config --system core.longpaths true
+
+ - name: Run tests
+ run: tox -e py39-test-opentelemetry-exporter-otlp-proto-grpc-latest -- -ra
+
+ py310-test-opentelemetry-exporter-otlp-proto-grpc-oldest_windows-latest:
+ name: opentelemetry-exporter-otlp-proto-grpc-oldest 3.10 Windows
+ runs-on: windows-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.10
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.10"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Configure git to support long filenames
+ run: git config --system core.longpaths true
+
+ - name: Run tests
+ run: tox -e py310-test-opentelemetry-exporter-otlp-proto-grpc-oldest -- -ra
+
+ py310-test-opentelemetry-exporter-otlp-proto-grpc-latest_windows-latest:
+ name: opentelemetry-exporter-otlp-proto-grpc-latest 3.10 Windows
+ runs-on: windows-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.10
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.10"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Configure git to support long filenames
+ run: git config --system core.longpaths true
+
+ - name: Run tests
+ run: tox -e py310-test-opentelemetry-exporter-otlp-proto-grpc-latest -- -ra
+
+ py311-test-opentelemetry-exporter-otlp-proto-grpc-oldest_windows-latest:
+ name: opentelemetry-exporter-otlp-proto-grpc-oldest 3.11 Windows
+ runs-on: windows-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.11
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.11"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Configure git to support long filenames
+ run: git config --system core.longpaths true
+
+ - name: Run tests
+ run: tox -e py311-test-opentelemetry-exporter-otlp-proto-grpc-oldest -- -ra
+
+ py311-test-opentelemetry-exporter-otlp-proto-grpc-latest_windows-latest:
+ name: opentelemetry-exporter-otlp-proto-grpc-latest 3.11 Windows
+ runs-on: windows-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.11
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.11"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Configure git to support long filenames
+ run: git config --system core.longpaths true
+
+ - name: Run tests
+ run: tox -e py311-test-opentelemetry-exporter-otlp-proto-grpc-latest -- -ra
+
+ py312-test-opentelemetry-exporter-otlp-proto-grpc-oldest_windows-latest:
+ name: opentelemetry-exporter-otlp-proto-grpc-oldest 3.12 Windows
+ runs-on: windows-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.12
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.12"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Configure git to support long filenames
+ run: git config --system core.longpaths true
+
+ - name: Run tests
+ run: tox -e py312-test-opentelemetry-exporter-otlp-proto-grpc-oldest -- -ra
+
+ py312-test-opentelemetry-exporter-otlp-proto-grpc-latest_windows-latest:
+ name: opentelemetry-exporter-otlp-proto-grpc-latest 3.12 Windows
+ runs-on: windows-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.12
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.12"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Configure git to support long filenames
+ run: git config --system core.longpaths true
+
+ - name: Run tests
+ run: tox -e py312-test-opentelemetry-exporter-otlp-proto-grpc-latest -- -ra
+
+ py313-test-opentelemetry-exporter-otlp-proto-grpc-oldest_windows-latest:
+ name: opentelemetry-exporter-otlp-proto-grpc-oldest 3.13 Windows
+ runs-on: windows-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.13
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.13"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Configure git to support long filenames
+ run: git config --system core.longpaths true
+
+ - name: Run tests
+ run: tox -e py313-test-opentelemetry-exporter-otlp-proto-grpc-oldest -- -ra
+
+ py313-test-opentelemetry-exporter-otlp-proto-grpc-latest_windows-latest:
+ name: opentelemetry-exporter-otlp-proto-grpc-latest 3.13 Windows
+ runs-on: windows-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.13
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.13"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Configure git to support long filenames
+ run: git config --system core.longpaths true
+
+ - name: Run tests
+ run: tox -e py313-test-opentelemetry-exporter-otlp-proto-grpc-latest -- -ra
+
+ py314-test-opentelemetry-exporter-otlp-proto-grpc-oldest_windows-latest:
+ name: opentelemetry-exporter-otlp-proto-grpc-oldest 3.14 Windows
+ runs-on: windows-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.14
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.14"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Configure git to support long filenames
+ run: git config --system core.longpaths true
+
+ - name: Run tests
+ run: tox -e py314-test-opentelemetry-exporter-otlp-proto-grpc-oldest -- -ra
+
+ py314-test-opentelemetry-exporter-otlp-proto-grpc-latest_windows-latest:
+ name: opentelemetry-exporter-otlp-proto-grpc-latest 3.14 Windows
+ runs-on: windows-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.14
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.14"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Configure git to support long filenames
+ run: git config --system core.longpaths true
+
+ - name: Run tests
+ run: tox -e py314-test-opentelemetry-exporter-otlp-proto-grpc-latest -- -ra
+
+ py39-test-opentelemetry-exporter-otlp-proto-http_windows-latest:
+ name: opentelemetry-exporter-otlp-proto-http 3.9 Windows
+ runs-on: windows-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.9
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.9"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Configure git to support long filenames
+ run: git config --system core.longpaths true
+
+ - name: Run tests
+ run: tox -e py39-test-opentelemetry-exporter-otlp-proto-http -- -ra
+
+ py310-test-opentelemetry-exporter-otlp-proto-http_windows-latest:
+ name: opentelemetry-exporter-otlp-proto-http 3.10 Windows
+ runs-on: windows-latest
+ timeout-minutes: 30
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.10
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.10"
+
+ - name: Install tox
+ run: pip install tox-uv
+
+ - name: Configure git to support long filenames
+ run: git config --system core.longpaths true
+
+ - name: Run tests
+ run: tox -e py310-test-opentelemetry-exporter-otlp-proto-http -- -ra
+
py311-test-opentelemetry-exporter-otlp-proto-http_windows-latest:
name: opentelemetry-exporter-otlp-proto-http 3.11 Windows
runs-on: windows-latest
diff --git a/CHANGELOG.md b/CHANGELOG.md
index f125b47e99..1b43610d32 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -38,6 +38,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
([#4862](https://github.com/open-telemetry/opentelemetry-python/pull/4862))
- `opentelemetry-exporter-otlp-proto-http`: fix retry logic and error handling for connection failures in trace, metric, and log exporters
([#4709](https://github.com/open-telemetry/opentelemetry-python/pull/4709))
+- Implement custom protoc plugin to generate OTLP JSON class definitions
+ ([#4910](https://github.com/open-telemetry/opentelemetry-python/pull/4910))
## Version 1.39.0/0.60b0 (2025-12-03)
diff --git a/codegen/opentelemetry-codegen-json/.gitignore b/codegen/opentelemetry-codegen-json/.gitignore
new file mode 100644
index 0000000000..b31e04e000
--- /dev/null
+++ b/codegen/opentelemetry-codegen-json/.gitignore
@@ -0,0 +1 @@
+tests/generated/otel_test_json
\ No newline at end of file
diff --git a/codegen/opentelemetry-codegen-json/README.rst b/codegen/opentelemetry-codegen-json/README.rst
new file mode 100644
index 0000000000..5d94521452
--- /dev/null
+++ b/codegen/opentelemetry-codegen-json/README.rst
@@ -0,0 +1,22 @@
+OpenTelemetry JSON Code Generator
+=================================
+
+|pypi|
+
+.. |pypi| image:: https://badge.fury.io/py/opentelemetry-codegen-json.svg
+ :target: https://pypi.org/project/opentelemetry-codegen-json/
+
+This library is a protocol buffer plugin that generates code for the OpenTelemetry protocol in JSON format.
+
+Installation
+------------
+
+::
+
+ pip install opentelemetry-codegen-json
+
+
+References
+----------
+
+* `OpenTelemetry `_
diff --git a/codegen/opentelemetry-codegen-json/pyproject.toml b/codegen/opentelemetry-codegen-json/pyproject.toml
new file mode 100644
index 0000000000..1f59509ae6
--- /dev/null
+++ b/codegen/opentelemetry-codegen-json/pyproject.toml
@@ -0,0 +1,49 @@
+[build-system]
+requires = ["hatchling"]
+build-backend = "hatchling.build"
+
+[project]
+name = "opentelemetry-codegen-json"
+dynamic = ["version"]
+description = "Protobuf plugin to generate JSON serializers and deserializers for OpenTelemetry protobuf messages"
+readme = "README.rst"
+license = "Apache-2.0"
+requires-python = ">=3.9"
+authors = [
+ { name = "OpenTelemetry Authors", email = "cncf-opentelemetry-contributors@lists.cncf.io" },
+]
+classifiers = [
+ "Development Status :: 4 - Beta",
+ "Framework :: OpenTelemetry",
+ "Intended Audience :: Developers",
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.9",
+ "Programming Language :: Python :: 3.10",
+ "Programming Language :: Python :: 3.11",
+ "Programming Language :: Python :: 3.12",
+ "Programming Language :: Python :: 3.13",
+ "Programming Language :: Python :: 3.14",
+]
+dependencies = [
+ "protobuf>=4.25.3",
+ "types-protobuf>=4.24",
+]
+
+[project.scripts]
+protoc-gen-otlp_json = "opentelemetry.codegen.json.plugin:main"
+
+[project.urls]
+Homepage = "https://github.com/open-telemetry/opentelemetry-python/codegen/opentelemetry-codegen-proto-json"
+Repository = "https://github.com/open-telemetry/opentelemetry-python"
+
+[tool.hatch.version]
+path = "src/opentelemetry/codegen/json/version/__init__.py"
+
+[tool.hatch.build.targets.sdist]
+include = [
+ "/src",
+]
+
+[tool.hatch.build.targets.wheel]
+packages = ["src/opentelemetry"]
diff --git a/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/__init__.py b/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/__init__.py b/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/analyzer.py b/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/analyzer.py
new file mode 100644
index 0000000000..6a423c5a85
--- /dev/null
+++ b/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/analyzer.py
@@ -0,0 +1,336 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+
+import logging
+from collections import defaultdict
+from dataclasses import dataclass
+from typing import Optional
+
+from google.protobuf import descriptor_pb2 as descriptor
+from google.protobuf.compiler import plugin_pb2 as plugin
+
+from opentelemetry.codegen.json.types import (
+ to_json_field_name,
+)
+
+_logger = logging.getLogger(__name__)
+
+
+@dataclass(frozen=True, slots=True)
+class ProtoType:
+ """Represents a field type with its Python equivalent."""
+
+ proto_type: int
+ is_repeated: bool = False
+ is_optional: bool = False
+ is_message: bool = False
+ is_enum: bool = False
+ type_name: Optional[str] = None # Fully qualified name for messages/enums
+
+
+@dataclass(frozen=True, slots=True)
+class FieldInfo:
+ """Contains info about a message field."""
+
+ name: str
+ number: int
+ field_type: ProtoType
+ json_name: str
+ default_value: Optional[str] = None
+ oneof_index: Optional[int] = None
+ is_oneof_member: bool = False
+
+
+@dataclass(frozen=True, slots=True)
+class EnumInfo:
+ """Contains info about an enum."""
+
+ name: str
+ package: str
+ file_name: str
+ values: tuple[tuple[str, int], ...] # List of (name, number) tuples
+ parent_path: Optional[str] = (
+ None # Full parent class path (e.g. "Span.Event")
+ )
+
+ @property
+ def python_class_path(self) -> str:
+ """Get Python class path (e.g. 'Span.Event.EventType' for nested)."""
+ if self.parent_path:
+ return f"{self.parent_path}.{self.name}"
+ return self.name
+
+ @property
+ def fully_qualified_name(self) -> str:
+ """Get fully qualified proto name."""
+ return f"{self.package}.{self.python_class_path}"
+
+
+@dataclass(frozen=True, slots=True)
+class MessageInfo:
+ """Contains all info about a protobuf message."""
+
+ name: str
+ package: str
+ file_name: str
+ fields: tuple[FieldInfo, ...]
+ nested_messages: tuple[MessageInfo, ...]
+ nested_enums: tuple[EnumInfo, ...]
+ parent_path: Optional[str] = (
+ None # Full parent class path (e.g. "Span.Event")
+ )
+
+ @property
+ def fully_qualified_name(self) -> str:
+ """Full proto package path."""
+ return f"{self.package}.{self.python_class_path}"
+
+ @property
+ def python_class_path(self) -> str:
+ """Path for nested classes in Python (e.g. 'Span.Event.SubEvent')."""
+ if self.parent_path:
+ return f"{self.parent_path}.{self.name}"
+ return self.name
+
+
+class DescriptorAnalyzer:
+ """Analyzes protobuf descriptors and builds a structured representation."""
+
+ def __init__(self, request: plugin.CodeGeneratorRequest) -> None:
+ self._request = request
+ self._messages: dict[
+ str, MessageInfo
+ ] = {} # Maps fully_qualified_name -> MessageInfo
+ self._enums: dict[
+ str, EnumInfo
+ ] = {} # Maps fully_qualified_name -> EnumInfo
+ self._file_to_messages: dict[str, list[MessageInfo]] = defaultdict(
+ list
+ ) # Maps proto file -> list of top level MessageInfo
+ self._file_to_enums: dict[str, list[EnumInfo]] = defaultdict(
+ list
+ ) # Maps proto file -> list of top level EnumInfo
+ self._file_dependencies: dict[str, list[str]] = defaultdict(
+ list
+ ) # Maps file -> list of imported files
+
+ @property
+ def messages(self) -> dict[str, MessageInfo]:
+ """Get all messages indexed by fully qualified name."""
+ return self._messages
+
+ @property
+ def enums(self) -> dict[str, EnumInfo]:
+ """Get all enums indexed by fully qualified name."""
+ return self._enums
+
+ @property
+ def file_to_messages(self) -> dict[str, list[MessageInfo]]:
+ """Get top level messages for each file."""
+ return self._file_to_messages
+
+ @property
+ def file_to_enums(self) -> dict[str, list[EnumInfo]]:
+ """Get top level enums for each file."""
+ return self._file_to_enums
+
+ @property
+ def file_dependencies(self) -> dict[str, list[str]]:
+ """Get file dependencies."""
+ return self._file_dependencies
+
+ def analyze(self) -> None:
+ """Process all files in the request."""
+ for proto_file in self._request.proto_file:
+ self._analyze_file(proto_file)
+
+ def _analyze_file(
+ self, file_descriptor: descriptor.FileDescriptorProto
+ ) -> None:
+ """Analyze a single proto file."""
+ package = file_descriptor.package
+ file_name = file_descriptor.name
+
+ _logger.debug("Processing file: %s (package: %s)", file_name, package)
+
+ self._file_dependencies[file_name] = list(file_descriptor.dependency)
+
+ self._file_to_enums[file_name].extend(
+ self._analyze_enum(enum_type, package, file_name, parent_path=None)
+ for enum_type in file_descriptor.enum_type
+ )
+ self._file_to_messages[file_name].extend(
+ self._analyze_message(
+ message_type, package, file_name, parent_path=None
+ )
+ for message_type in file_descriptor.message_type
+ )
+
+ def _analyze_message(
+ self,
+ message_desc: descriptor.DescriptorProto,
+ package: str,
+ file_name: str,
+ parent_path: Optional[str] = None,
+ ) -> MessageInfo:
+ """
+ Recursively analyze message and nested types.
+
+ Args:
+ message_desc: The message descriptor
+ package: The proto package name
+ file_name: The proto file name
+ parent_path: Full parent class path for nested messages (e.g. "Span.Event")
+
+ Returns:
+ MessageInfo for this message
+ """
+ # Determine the class path for nested types
+ current_path = (
+ f"{parent_path}.{message_desc.name}"
+ if parent_path
+ else message_desc.name
+ )
+
+ nested_enums = tuple(
+ self._analyze_enum(enum_type, package, file_name, current_path)
+ for enum_type in message_desc.enum_type
+ )
+
+ nested_messages = tuple(
+ self._analyze_message(
+ nested_type, package, file_name, current_path
+ )
+ for nested_type in message_desc.nested_type
+ if not nested_type.options.map_entry # Skip map entry types
+ )
+
+ fields = tuple(
+ self._analyze_field(field_desc)
+ for field_desc in message_desc.field
+ )
+
+ msg_info = MessageInfo(
+ name=message_desc.name,
+ package=package,
+ file_name=file_name,
+ fields=fields,
+ nested_messages=nested_messages,
+ nested_enums=nested_enums,
+ parent_path=parent_path,
+ )
+
+ self._messages[msg_info.fully_qualified_name] = msg_info
+ return msg_info
+
+ def _analyze_field(
+ self,
+ field_desc: descriptor.FieldDescriptorProto,
+ ) -> FieldInfo:
+ """Analyze a single field."""
+ is_repeated = (
+ field_desc.label == descriptor.FieldDescriptorProto.LABEL_REPEATED
+ )
+ is_optional = field_desc.proto3_optional
+ oneof_index = (
+ field_desc.oneof_index
+ if field_desc.HasField("oneof_index")
+ else None
+ )
+
+ # Get JSON name
+ json_name = (
+ field_desc.json_name
+ if field_desc.json_name
+ else to_json_field_name(field_desc.name)
+ )
+
+ is_message = (
+ field_desc.type == descriptor.FieldDescriptorProto.TYPE_MESSAGE
+ )
+ is_enum = field_desc.type == descriptor.FieldDescriptorProto.TYPE_ENUM
+ type_name = (
+ field_desc.type_name.lstrip(".")
+ if field_desc.HasField("type_name")
+ else None
+ )
+
+ proto_type = ProtoType(
+ proto_type=field_desc.type,
+ is_repeated=is_repeated,
+ is_optional=is_optional,
+ is_message=is_message,
+ is_enum=is_enum,
+ type_name=type_name,
+ )
+
+ return FieldInfo(
+ name=field_desc.name,
+ number=field_desc.number,
+ field_type=proto_type,
+ json_name=json_name,
+ oneof_index=oneof_index,
+ is_oneof_member=oneof_index is not None and not is_optional,
+ )
+
+ def _analyze_enum(
+ self,
+ enum_desc: descriptor.EnumDescriptorProto,
+ package: str,
+ file_name: str,
+ parent_path: Optional[str] = None,
+ ) -> EnumInfo:
+ """
+ Analyze an enum.
+
+ Args:
+ enum_desc: The enum descriptor
+ package: The proto package name
+ file_name: The proto file name
+ parent_path: Full parent class path for nested enums (e.g. "Span.Event")
+
+ Returns:
+ EnumInfo for this enum
+ """
+ enum_info = EnumInfo(
+ name=enum_desc.name,
+ package=package,
+ file_name=file_name,
+ values=tuple(
+ (value_desc.name, value_desc.number)
+ for value_desc in enum_desc.value
+ ),
+ parent_path=parent_path,
+ )
+
+ self._enums[enum_info.fully_qualified_name] = enum_info
+ return enum_info
+
+ def get_message_by_name(
+ self, fully_qualified_name: str
+ ) -> Optional[MessageInfo]:
+ """Get message by fully qualified name."""
+ return self._messages.get(fully_qualified_name)
+
+ def get_enum_by_name(
+ self, fully_qualified_name: str
+ ) -> Optional[EnumInfo]:
+ """Get enum by fully qualified name."""
+ return self._enums.get(fully_qualified_name)
+
+ def get_messages_for_file(self, file_name: str) -> list[MessageInfo]:
+ """Get top-level messages for a specific file."""
+ return self._file_to_messages.get(file_name, [])
diff --git a/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/generator.py b/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/generator.py
new file mode 100644
index 0000000000..6b4b257dcc
--- /dev/null
+++ b/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/generator.py
@@ -0,0 +1,954 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+
+import logging
+from collections import defaultdict
+from pathlib import Path
+from typing import Callable, Final, Optional, Set
+
+from google.protobuf import descriptor_pb2 as descriptor
+from google.protobuf.compiler import plugin_pb2 as plugin
+
+from opentelemetry.codegen.json.analyzer import (
+ DescriptorAnalyzer,
+ EnumInfo,
+ FieldInfo,
+ MessageInfo,
+ ProtoType,
+)
+from opentelemetry.codegen.json.types import (
+ get_default_value,
+ get_json_allowed_types,
+ get_python_type,
+ is_bytes_type,
+ is_hex_encoded_field,
+ is_int64_type,
+)
+from opentelemetry.codegen.json.version import __version__ as GENERATOR_VERSION
+from opentelemetry.codegen.json.writer import CodeWriter
+
+_logger = logging.getLogger(__name__)
+
+UTILS_MODULE_NAME: Final[str] = "_otlp_json_utils"
+
+
+class OtlpJsonGenerator:
+ """
+ Generates Python dataclasses and JSON serialization/deserialization code
+ from protobuf descriptors.
+ """
+
+ def __init__(
+ self,
+ analyzer: DescriptorAnalyzer,
+ package_transform: Callable[[str], str],
+ version: str,
+ ) -> None:
+ """
+ Initialize the generator.
+
+ Args:
+ analyzer: Analyzed descriptor information
+ package_transform: A callable that transforms the proto file path.
+ version: Version string for the generated code.
+ """
+ self._analyzer = analyzer
+ self._package_transform = package_transform
+ self._version = version
+ self._generated_files: dict[str, str] = {}
+ self._common_root: str = ""
+
+ def generate_all(self) -> dict[str, str]:
+ """
+ Generate Python code for all proto files and support modules.
+
+ Returns:
+ Dictionary mapping output file paths to generated code
+ """
+ all_proto_files = set(self._analyzer.file_to_messages.keys()) | set(
+ self._analyzer.file_to_enums.keys()
+ )
+
+ file_to_output = {
+ proto_file: self._transform_proto_path(proto_file)
+ for proto_file in all_proto_files
+ if self._analyzer.file_to_messages.get(proto_file)
+ or self._analyzer.file_to_enums.get(proto_file)
+ }
+
+ if not file_to_output:
+ return {}
+
+ self._common_root = self._find_common_root(
+ list(file_to_output.values())
+ )
+
+ for proto_file, output_path in file_to_output.items():
+ messages = self._analyzer.file_to_messages.get(proto_file, [])
+ enums = self._analyzer.file_to_enums.get(proto_file, [])
+ code = self._generate_file(proto_file, messages, enums)
+ self._generated_files[output_path] = code
+
+ utils_path = f"{self._common_root}/{UTILS_MODULE_NAME}.py"
+ self._generated_files[utils_path] = self._load_utils_source()
+
+ version_init_path = f"{self._common_root}/version/__init__.py"
+ version_writer = CodeWriter(indent_size=4)
+ self._generate_header(version_writer)
+ version_writer.writemany(f'__version__ = "{self._version}"', "")
+ self._generated_files[version_init_path] = version_writer.to_string()
+
+ self._ensure_init_files()
+
+ return self._generated_files
+
+ def _load_utils_source(self) -> str:
+ """Load the source code for the utility module from its source file."""
+ utils_src_path = (
+ Path(__file__).parent / "runtime" / "otlp_json_utils.py"
+ )
+ try:
+ return utils_src_path.read_text(encoding="utf-8")
+ except Exception as e:
+ _logger.error(
+ "Failed to load utility module source from %s: %s",
+ utils_src_path,
+ e,
+ )
+ raise RuntimeError(
+ f"Failed to load utility module source from {utils_src_path}"
+ ) from e
+
+ def _find_common_root(self, paths: list[str]) -> str:
+ """Find the longest common directory prefix."""
+ if not paths:
+ return ""
+
+ # Split paths into components
+ split_paths = [p.split("/")[:-1] for p in paths]
+ if not split_paths:
+ return ""
+
+ # Find common prefix among components
+ common = []
+ for parts in zip(*split_paths):
+ if all(p == parts[0] for p in parts):
+ common.append(parts[0])
+ else:
+ break
+
+ return "/".join(common)
+
+ def _ensure_init_files(self) -> None:
+ """Ensure every directory in the output contains an __init__.py file."""
+ dirs = set()
+ for path in list(self._generated_files.keys()):
+ p = Path(path)
+ for parent in p.parents:
+ parent_str = str(parent)
+ # Skip '.', root, and the 'opentelemetry' namespace directory
+ if parent_str in (".", "/", "opentelemetry"):
+ continue
+ dirs.add(parent_str)
+
+ for d in dirs:
+ init_path = f"{d}/__init__.py"
+ if init_path not in self._generated_files:
+ self._generated_files[init_path] = ""
+
+ def _get_utils_module_path(self) -> str:
+ """Get the absolute module path for the utility module."""
+ if not self._common_root:
+ return UTILS_MODULE_NAME
+ return f"{self._common_root.replace('/', '.')}.{UTILS_MODULE_NAME}"
+
+ def _transform_proto_path(self, proto_path: str) -> str:
+ """
+ Transform proto file path to output Python file path.
+
+ Example: 'opentelemetry/proto/trace/v1/trace.proto'
+ -> 'opentelemetry/proto_json/trace/v1/trace.py'
+
+ Args:
+ proto_path: Original .proto file path
+
+ Returns:
+ Transformed .py file path
+ """
+ transformed = self._package_transform(proto_path)
+ if transformed.endswith(".proto"):
+ transformed = transformed[:-6] + ".py"
+ return transformed
+
+ def _generate_file(
+ self,
+ proto_file: str,
+ messages: list[MessageInfo],
+ enums: list[EnumInfo],
+ ) -> str:
+ """
+ Generate complete Python file for a proto file.
+
+ Args:
+ proto_file: Original proto file path
+ messages: List of top-level messages in this file
+ enums: List of top-level enums in this file
+
+ Returns:
+ Generated Python code as string
+ """
+ writer = CodeWriter(indent_size=4)
+
+ self._generate_header(writer, proto_file)
+ self._generate_imports(
+ writer, proto_file, self._has_enums(messages, enums)
+ )
+ self._generate_enums_for_file(writer, enums)
+ self._generate_messages_for_file(writer, messages)
+ writer.blank_line()
+
+ return writer.to_string()
+
+ def _generate_header(
+ self, writer: CodeWriter, proto_file: str = ""
+ ) -> None:
+ """Generate file header with license and metadata."""
+ writer.writemany(
+ "# Copyright The OpenTelemetry Authors",
+ "#",
+ '# Licensed under the Apache License, Version 2.0 (the "License");',
+ "# you may not use this file except in compliance with the License.",
+ "# You may obtain a copy of the License at",
+ "#",
+ "# http://www.apache.org/licenses/LICENSE-2.0",
+ "#",
+ "# Unless required by applicable law or agreed to in writing, software",
+ '# distributed under the License is distributed on an "AS IS" BASIS,',
+ "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.",
+ "# See the License for the specific language governing permissions and",
+ "# limitations under the License.",
+ )
+ writer.blank_line()
+ if proto_file:
+ writer.comment(f'AUTO-GENERATED from "{proto_file}"')
+ writer.comment("DO NOT EDIT MANUALLY")
+ writer.blank_line()
+
+ def _generate_imports(
+ self,
+ writer: CodeWriter,
+ proto_file: str,
+ include_enum: bool,
+ ) -> None:
+ """
+ Generate all necessary import statements.
+
+ Args:
+ writer: Code writer instance
+ proto_file: Original proto file path
+ include_enum: Whether to include the enum module import
+ """
+ # Standard library imports
+ writer.writeln("from __future__ import annotations")
+ writer.blank_line()
+
+ std_imports = [
+ "builtins",
+ "dataclasses",
+ "functools",
+ "json",
+ "sys",
+ "typing",
+ ]
+ if include_enum:
+ std_imports.append("enum")
+
+ for module in sorted(std_imports):
+ writer.import_(module)
+
+ writer.blank_line()
+
+ writer.writeln("if sys.version_info >= (3, 10):")
+ with writer.indent():
+ writer.writeln(
+ "_dataclass = functools.partial(dataclasses.dataclass, slots=True)"
+ )
+ writer.writeln("else:")
+ with writer.indent():
+ writer.writeln("_dataclass = dataclasses.dataclass")
+ writer.blank_line()
+
+ # Collect all imports needed
+ imports = self._collect_imports(proto_file)
+ imports.add(f"import {self._get_utils_module_path()}")
+
+ # Generate cross file imports
+ if imports:
+ for import_info in sorted(imports):
+ writer.writeln(import_info)
+ writer.blank_line()
+ writer.blank_line()
+
+ def _get_module_path(self, proto_file: str) -> str:
+ """
+ Convert a proto file path to its transformed Python module path.
+
+ Example: 'opentelemetry/proto/common/v1/common.proto'
+ -> 'opentelemetry.proto_json.common.v1.common'
+
+ Args:
+ proto_file: Original .proto file path
+
+ Returns:
+ Python module path (dot-separated)
+ """
+ transformed = self._transform_proto_path(proto_file)
+ if transformed.endswith(".py"):
+ transformed = transformed[:-3]
+ return transformed.replace("/", ".")
+
+ def _collect_imports(self, proto_file: str) -> Set[str]:
+ """
+ Collect all import statements needed for cross file references.
+
+ Args:
+ proto_file: Current proto file path
+
+ Returns:
+ Set of import statement strings
+ """
+ return set(
+ "import " + self._get_module_path(dep_file)
+ for dep_file in self._analyzer.file_dependencies.get(
+ proto_file, []
+ )
+ )
+
+ def _generate_enums_for_file(
+ self,
+ writer: CodeWriter,
+ enums: list[EnumInfo],
+ ) -> None:
+ """
+ Generate all enums for a file (top level and nested).
+
+ Args:
+ writer: Code writer instance
+ enums: List of top level enums
+ """
+ for enum_info in enums:
+ self._generate_enum_class(writer, enum_info)
+ writer.blank_line()
+
+ def _generate_messages_for_file(
+ self, writer: CodeWriter, messages: list[MessageInfo]
+ ) -> None:
+ """
+ Generate all message classes for a file.
+
+ Args:
+ writer: Code writer instance
+ messages: List of top level messages
+ """
+ for i, message in enumerate(messages):
+ if i:
+ writer.blank_line(2)
+
+ self._generate_message_class(writer, message)
+
+ def _generate_message_class(
+ self, writer: CodeWriter, message: MessageInfo
+ ) -> None:
+ """
+ Generate a complete dataclass for a protobuf message.
+
+ Args:
+ writer: Code writer instance
+ message: Message information
+ """
+ with writer.dataclass(
+ message.name,
+ frozen=False,
+ slots=False,
+ decorators=("typing.final",),
+ decorator_name="_dataclass",
+ ):
+ if (
+ message.fields
+ or message.nested_messages
+ or message.nested_enums
+ ):
+ writer.docstring(
+ [f"Generated from protobuf message {message.name}"]
+ )
+ writer.blank_line()
+
+ for enum_info in message.nested_enums:
+ self._generate_enum_class(writer, enum_info)
+ writer.blank_line()
+
+ for nested_msg in message.nested_messages:
+ self._generate_message_class(writer, nested_msg)
+ writer.blank_line()
+
+ if message.fields:
+ for field_info in message.fields:
+ self._generate_field(writer, field_info, message)
+ else:
+ writer.pass_()
+
+ writer.blank_line()
+ self._generate_to_dict(writer, message)
+ writer.blank_line()
+ self._generate_to_json(writer, message)
+ writer.blank_line()
+ self._generate_from_dict(writer, message)
+ writer.blank_line()
+ self._generate_from_json(writer, message)
+
+ def _generate_to_dict(
+ self, writer: CodeWriter, message: MessageInfo
+ ) -> None:
+ """Generate to_dict() method."""
+ with writer.method(
+ "to_dict",
+ ["self"],
+ return_type="builtins.dict[builtins.str, typing.Any]",
+ ):
+ writer.docstring(
+ [
+ "Convert this message to a dictionary with lowerCamelCase keys.",
+ "",
+ "Returns:",
+ " Dictionary representation following OTLP JSON encoding",
+ ]
+ )
+ writer.writeln("_result = {}")
+
+ # Separate fields into oneof groups and standalone fields
+ oneof_groups: dict[int, list[FieldInfo]] = defaultdict(list)
+ standalone_fields: list[FieldInfo] = []
+
+ for field in message.fields:
+ if field.is_oneof_member and field.oneof_index is not None:
+ oneof_groups[field.oneof_index].append(field)
+ else:
+ standalone_fields.append(field)
+
+ # Handle standalone fields
+ for field in standalone_fields:
+ field_type = field.field_type
+ if field_type.is_repeated:
+ item_expr = self._get_serialization_expr(
+ field_type, field.name, "_v"
+ )
+ with writer.if_(f"self.{field.name}"):
+ if item_expr == "_v":
+ writer.writeln(
+ f'_result["{field.json_name}"] = self.{field.name}'
+ )
+ else:
+ utils = self._get_utils_module_path()
+ writer.writeln(
+ f'_result["{field.json_name}"] = {utils}.encode_repeated('
+ f"self.{field.name}, lambda _v: {item_expr})"
+ )
+ else:
+ val_expr = self._get_serialization_expr(
+ field_type, field.name, f"self.{field.name}"
+ )
+ check = f"self.{field.name}"
+
+ with writer.if_(check):
+ writer.writeln(
+ f'_result["{field.json_name}"] = {val_expr}'
+ )
+
+ # Handle oneof groups
+ for group_index in sorted(oneof_groups.keys()):
+ group_fields = oneof_groups[group_index]
+ for i, field in enumerate(reversed(group_fields)):
+ field_type = field.field_type
+ condition = f"self.{field.name} is not None"
+ context = (
+ writer.elif_(condition) if i else writer.if_(condition)
+ )
+
+ with context:
+ val_expr = self._get_serialization_expr(
+ field_type, field.name, f"self.{field.name}"
+ )
+ writer.writeln(
+ f'_result["{field.json_name}"] = {val_expr}'
+ )
+
+ writer.return_("_result")
+
+ def _generate_to_json(
+ self, writer: CodeWriter, message: MessageInfo
+ ) -> None:
+ """Generate to_json() method."""
+ with writer.method("to_json", ["self"], return_type="builtins.str"):
+ writer.docstring(
+ [
+ "Serialize this message to a JSON string.",
+ "",
+ "Returns:",
+ " JSON string",
+ ]
+ )
+ writer.return_("json.dumps(self.to_dict())")
+
+ def _get_serialization_expr(
+ self, field_type: ProtoType, field_name: str, var_name: str
+ ) -> str:
+ """Get the Python expression to serialize a value of a given type."""
+ utils = self._get_utils_module_path()
+ if field_type.is_message:
+ return f"{var_name}.to_dict()"
+ if field_type.is_enum:
+ return f"builtins.int({var_name})"
+ if is_hex_encoded_field(field_name):
+ return f"{utils}.encode_hex({var_name})"
+ if is_int64_type(field_type.proto_type):
+ return f"{utils}.encode_int64({var_name})"
+ if is_bytes_type(field_type.proto_type):
+ return f"{utils}.encode_base64({var_name})"
+ if field_type.proto_type in (
+ descriptor.FieldDescriptorProto.TYPE_FLOAT,
+ descriptor.FieldDescriptorProto.TYPE_DOUBLE,
+ ):
+ return f"{utils}.encode_float({var_name})"
+
+ return var_name
+
+ def _generate_from_dict(
+ self, writer: CodeWriter, message: MessageInfo
+ ) -> None:
+ """Generate from_dict() class method."""
+ with writer.method(
+ "from_dict",
+ ["cls", "data: builtins.dict[builtins.str, typing.Any]"],
+ decorators=["builtins.classmethod"],
+ return_type=f'"{message.python_class_path}"',
+ ):
+ writer.docstring(
+ [
+ "Create from a dictionary with lowerCamelCase keys.",
+ "",
+ "Args:",
+ " data: Dictionary representation following OTLP JSON encoding",
+ "",
+ "Returns:",
+ f" {message.name} instance",
+ ]
+ )
+ utils = self._get_utils_module_path()
+ writer.writeln(
+ f'{utils}.validate_type(data, builtins.dict, "data")'
+ )
+ writer.writeln("_args = {}")
+ writer.blank_line()
+
+ # Separate fields into oneof groups and standalone fields
+ oneof_groups: dict[int, list[FieldInfo]] = defaultdict(list)
+ standalone_fields: list[FieldInfo] = []
+
+ for field in message.fields:
+ if field.is_oneof_member and field.oneof_index is not None:
+ oneof_groups[field.oneof_index].append(field)
+ else:
+ standalone_fields.append(field)
+
+ # Handle standalone fields
+ for field in standalone_fields:
+ field_type = field.field_type
+ with writer.if_(
+ f'(_value := data.get("{field.json_name}")) is not None'
+ ):
+ if field_type.is_repeated:
+ item_expr = self._get_deserialization_expr(
+ field_type, field.name, "_v", message
+ )
+ writer.writeln(
+ f'_args["{field.name}"] = {utils}.decode_repeated('
+ f'_value, lambda _v: {item_expr}, "{field.name}")'
+ )
+ else:
+ self._generate_deserialization_statements(
+ writer, field, "_value", message, "_args"
+ )
+
+ # Handle oneof groups
+ for group_index in sorted(oneof_groups.keys()):
+ group_fields = oneof_groups[group_index]
+ for i, field in enumerate(reversed(group_fields)):
+ condition = f'(_value := data.get("{field.json_name}")) is not None'
+ context = (
+ writer.elif_(condition) if i else writer.if_(condition)
+ )
+
+ with context:
+ self._generate_deserialization_statements(
+ writer, field, "_value", message, "_args"
+ )
+
+ writer.blank_line()
+ writer.return_("cls(**_args)")
+
+ def _generate_from_json(
+ self, writer: CodeWriter, message: MessageInfo
+ ) -> None:
+ """Generate from_json() class method."""
+ with writer.method(
+ "from_json",
+ ["cls", "data: typing.Union[builtins.str, builtins.bytes]"],
+ decorators=["builtins.classmethod"],
+ return_type=f'"{message.python_class_path}"',
+ ):
+ writer.docstring(
+ [
+ "Deserialize from a JSON string or bytes.",
+ "",
+ "Args:",
+ " data: JSON string or bytes",
+ "",
+ "Returns:",
+ " Instance of the class",
+ ]
+ )
+ writer.return_("cls.from_dict(json.loads(data))")
+
+ def _generate_deserialization_statements(
+ self,
+ writer: CodeWriter,
+ field: FieldInfo,
+ var_name: str,
+ message: MessageInfo,
+ target_dict: str,
+ ) -> None:
+ """Generate validation and assignment statements for a field."""
+ field_type = field.field_type
+ utils = self._get_utils_module_path()
+ if field_type.is_message and (type_name := field_type.type_name):
+ msg_type = self._resolve_message_type(type_name, message)
+ writer.writeln(
+ f'{target_dict}["{field.name}"] = {msg_type}.from_dict({var_name})'
+ )
+ elif field_type.is_enum and (type_name := field_type.type_name):
+ enum_type = self._resolve_enum_type(type_name, message)
+ writer.writeln(
+ f'{utils}.validate_type({var_name}, builtins.int, "{field.name}")'
+ )
+ writer.writeln(
+ f'{target_dict}["{field.name}"] = {enum_type}({var_name})'
+ )
+ elif is_hex_encoded_field(field.name):
+ writer.writeln(
+ f'{target_dict}["{field.name}"] = {utils}.decode_hex({var_name}, "{field.name}")'
+ )
+ elif is_int64_type(field_type.proto_type):
+ writer.writeln(
+ f'{target_dict}["{field.name}"] = {utils}.decode_int64({var_name}, "{field.name}")'
+ )
+ elif is_bytes_type(field_type.proto_type):
+ writer.writeln(
+ f'{target_dict}["{field.name}"] = {utils}.decode_base64({var_name}, "{field.name}")'
+ )
+ elif field_type.proto_type in (
+ descriptor.FieldDescriptorProto.TYPE_FLOAT,
+ descriptor.FieldDescriptorProto.TYPE_DOUBLE,
+ ):
+ writer.writeln(
+ f'{target_dict}["{field.name}"] = {utils}.decode_float({var_name}, "{field.name}")'
+ )
+ else:
+ allowed_types = get_json_allowed_types(
+ field_type.proto_type, field.name
+ )
+ writer.writeln(
+ f'{utils}.validate_type({var_name}, {allowed_types}, "{field.name}")'
+ )
+ writer.writeln(f'{target_dict}["{field.name}"] = {var_name}')
+
+ def _get_deserialization_expr(
+ self,
+ field_type: ProtoType,
+ field_name: str,
+ var_name: str,
+ context: MessageInfo,
+ ) -> str:
+ """Get the Python expression to deserialize a value of a given type."""
+ utils = self._get_utils_module_path()
+ if field_type.is_message and (type_name := field_type.type_name):
+ msg_type = self._resolve_message_type(type_name, context)
+ return f"{msg_type}.from_dict({var_name})"
+ if field_type.is_enum and (type_name := field_type.type_name):
+ enum_type = self._resolve_enum_type(type_name, context)
+ return f"{enum_type}({var_name})"
+ if is_hex_encoded_field(field_name):
+ return f'{utils}.decode_hex({var_name}, "{field_name}")'
+ if is_int64_type(field_type.proto_type):
+ return f'{utils}.decode_int64({var_name}, "{field_name}")'
+ if is_bytes_type(field_type.proto_type):
+ return f'{utils}.decode_base64({var_name}, "{field_name}")'
+ if field_type.proto_type in (
+ descriptor.FieldDescriptorProto.TYPE_FLOAT,
+ descriptor.FieldDescriptorProto.TYPE_DOUBLE,
+ ):
+ return f'{utils}.decode_float({var_name}, "{field_name}")'
+
+ return var_name
+
+ def _generate_enum_class(
+ self, writer: CodeWriter, enum_info: EnumInfo
+ ) -> None:
+ """
+ Generate an IntEnum class for a protobuf enum.
+
+ Args:
+ writer: Code writer instance
+ enum_info: Enum information
+ """
+ with writer.enum(
+ enum_info.name,
+ enum_type="enum.IntEnum",
+ decorators=("typing.final",),
+ ):
+ writer.docstring(
+ [f"Generated from protobuf enum {enum_info.name}"]
+ )
+ writer.blank_line()
+
+ if enum_info.values:
+ for name, number in enum_info.values:
+ writer.enum_member(name, number)
+ else:
+ writer.pass_()
+
+ def _generate_field(
+ self,
+ writer: CodeWriter,
+ field_info: FieldInfo,
+ parent_message: MessageInfo,
+ ) -> None:
+ """
+ Generate a dataclass field.
+
+ Args:
+ writer: Code writer instance
+ field_info: Field information
+ parent_message: Parent message (for context)
+ """
+ type_hint = self._get_field_type_hint(field_info, parent_message)
+ writer.field(
+ field_info.name,
+ type_hint,
+ default=self._get_field_default(field_info),
+ )
+
+ def _get_field_type_hint(
+ self, field_info: FieldInfo, parent_message: MessageInfo
+ ) -> str:
+ """
+ Get the Python type hint for a field.
+
+ Args:
+ field_info: Field information
+ parent_message: Parent message (for resolving nested types)
+
+ Returns:
+ Python type hint string
+ """
+ field_type = field_info.field_type
+
+ if field_type.is_message and (type_name := field_type.type_name):
+ base_type = self._resolve_message_type(type_name, parent_message)
+ elif field_type.is_enum and (type_name := field_type.type_name):
+ base_type = self._resolve_enum_type(type_name, parent_message)
+ else:
+ base_type = get_python_type(field_type.proto_type)
+
+ if field_type.is_repeated:
+ return f"builtins.list[{base_type}]"
+ if field_type.is_enum:
+ return f"typing.Union[{base_type}, builtins.int, None]"
+ return f"typing.Optional[{base_type}]"
+
+ def _resolve_message_type(
+ self, fully_qualified_name: str, context_message: MessageInfo
+ ) -> str:
+ """
+ Resolve a message type name to its Python class path.
+
+ Args:
+ fully_qualified_name: Fully qualified proto name (e.g. 'package.Message')
+ context_message: Current message (for resolving nested types)
+
+ Returns:
+ Python class reference (e.g. 'Message' or 'ParentMessage.NestedMessage')
+ """
+ # Look up the message in the analyzer
+ message_info = self._analyzer.get_message_by_name(fully_qualified_name)
+
+ if message_info is None:
+ _logger.warning(
+ "Could not resolve message type: %s", fully_qualified_name
+ )
+ return "typing.Any"
+
+ # If in same file, use relative class path
+ if message_info.file_name == context_message.file_name:
+ return message_info.python_class_path
+ # Cross file reference - use fully qualified module + class path
+ module_path = self._get_module_path(message_info.file_name)
+ return f"{module_path}.{message_info.python_class_path}"
+
+ def _resolve_enum_type(
+ self, fully_qualified_name: str, context_message: MessageInfo
+ ) -> str:
+ """
+ Resolve an enum type name to its Python class path.
+
+ Args:
+ fully_qualified_name: Fully qualified proto name
+ context_message: Current message (for resolving nested types)
+
+ Returns:
+ Python class reference
+ """
+ enum_info = self._analyzer.get_enum_by_name(fully_qualified_name)
+
+ if enum_info is None:
+ _logger.warning(
+ "Could not resolve enum type: %s", fully_qualified_name
+ )
+ return "builtins.int"
+
+ # If in same file, use relative class path
+ if enum_info.file_name == context_message.file_name:
+ return enum_info.python_class_path
+ # Cross file reference - use fully qualified module + class path
+ module_path = self._get_module_path(enum_info.file_name)
+ return f"{module_path}.{enum_info.python_class_path}"
+
+ def _get_field_default(self, field_info: FieldInfo) -> Optional[str]:
+ """
+ Get the default value for a field.
+
+ Args:
+ field_info: Field information
+
+ Returns:
+ Default value string or None
+ """
+ field_type = field_info.field_type
+
+ # Repeated fields default to empty list
+ if field_type.is_repeated:
+ return "dataclasses.field(default_factory=builtins.list)"
+
+ # Optional fields, Message types, and oneof members default to None
+ if field_type.is_message or field_info.is_oneof_member:
+ return "None"
+
+ # Enum types default to 0
+ if field_type.is_enum:
+ return "0"
+
+ # Primitive types use proto defaults
+ return get_default_value(field_type.proto_type)
+
+ def _has_enums(
+ self, messages: list[MessageInfo], enums: list[EnumInfo]
+ ) -> bool:
+ """
+ Recursively check if there are any enums defined in the file.
+ """
+ if enums:
+ return True
+ for message in messages:
+ if message.nested_enums:
+ return True
+ if self._has_enums(list(message.nested_messages), []):
+ return True
+ return False
+
+
+def generate_code(
+ request: plugin.CodeGeneratorRequest,
+ package_transform: Callable[[str], str] = lambda p: p.replace(
+ "opentelemetry/proto/", "opentelemetry/proto_json/"
+ ),
+) -> dict[str, str]:
+ """
+ Main entry point for code generation.
+
+ Args:
+ request: Protobuf compiler plugin request
+ package_transform: Package transformation string or callable
+
+ Returns:
+ Dictionary mapping output file paths to generated code
+ """
+ analyzer = DescriptorAnalyzer(request)
+ analyzer.analyze()
+
+ generator = OtlpJsonGenerator(
+ analyzer, package_transform, version=GENERATOR_VERSION
+ )
+ return generator.generate_all()
+
+
+def generate_plugin_response(
+ request: plugin.CodeGeneratorRequest,
+ package_transform: Callable[[str], str] = lambda p: p.replace(
+ "opentelemetry/proto/", "opentelemetry/proto_json/"
+ ),
+) -> plugin.CodeGeneratorResponse:
+ """
+ Generate plugin response with all generated files.
+
+ Args:
+ request: Protobuf compiler plugin request
+ package_transform: Package transformation string
+
+ Returns:
+ Plugin response with generated files
+ """
+ response = plugin.CodeGeneratorResponse()
+
+ # Declare support for optional proto3 fields
+ response.supported_features |= (
+ plugin.CodeGeneratorResponse.FEATURE_PROTO3_OPTIONAL
+ )
+ response.supported_features |= (
+ plugin.CodeGeneratorResponse.FEATURE_SUPPORTS_EDITIONS
+ )
+
+ response.minimum_edition = descriptor.EDITION_LEGACY
+ response.maximum_edition = descriptor.EDITION_2024
+
+ # Generate code
+ generated_files = generate_code(request, package_transform)
+
+ # Create response files
+ for output_path, code in generated_files.items():
+ file_response = response.file.add()
+ file_response.name = output_path
+ file_response.content = code
+
+ return response
diff --git a/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/plugin.py b/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/plugin.py
new file mode 100644
index 0000000000..b53ecccaf8
--- /dev/null
+++ b/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/plugin.py
@@ -0,0 +1,65 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import sys
+from collections.abc import Iterator
+from contextlib import contextmanager
+from typing import Tuple
+
+from google.protobuf.compiler import plugin_pb2 as plugin
+
+from opentelemetry.codegen.json.generator import generate_plugin_response
+from opentelemetry.codegen.json.version import __version__
+
+_logger = logging.getLogger(__name__)
+
+
+@contextmanager
+def code_generation() -> Iterator[
+ Tuple[plugin.CodeGeneratorRequest, plugin.CodeGeneratorResponse],
+]:
+ if len(sys.argv) > 1 and sys.argv[1] in ("-V", "--version"):
+ print("opentelemetry-codegen-json " + __version__)
+ sys.exit(0)
+
+ data = sys.stdin.buffer.read()
+
+ request = plugin.CodeGeneratorRequest()
+ request.ParseFromString(data)
+
+ response = plugin.CodeGeneratorResponse()
+
+ yield request, response
+
+ output = response.SerializeToString()
+ sys.stdout.buffer.write(output)
+
+
+def main() -> None:
+ with code_generation() as (request, response):
+ generated_response = generate_plugin_response(request)
+
+ response.supported_features |= generated_response.supported_features
+ for file in generated_response.file:
+ response.file.add().CopyFrom(file)
+
+
+if __name__ == "__main__":
+ logging.basicConfig(
+ level=logging.DEBUG,
+ stream=sys.stderr,
+ format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
+ )
+ main()
diff --git a/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/runtime/__init__.py b/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/runtime/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/runtime/otlp_json_utils.py b/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/runtime/otlp_json_utils.py
new file mode 100644
index 0000000000..e88e9e3533
--- /dev/null
+++ b/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/runtime/otlp_json_utils.py
@@ -0,0 +1,153 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+
+import base64
+import math
+import typing
+
+T = typing.TypeVar("T")
+
+
+def encode_hex(value: bytes) -> str:
+ """
+ Encode bytes as hex string.
+ Used for trace_id and span_id per OTLP spec.
+ """
+ return value.hex() if value else ""
+
+
+def encode_base64(value: bytes) -> str:
+ """
+ Encode bytes as base64 string.
+ Standard Proto3 JSON mapping for bytes.
+ """
+ return base64.b64encode(value).decode("utf-8") if value else ""
+
+
+def encode_int64(value: int) -> str:
+ """
+ Encode 64 bit integers as strings.
+ Required for int64, uint64, fixed64, sfixed64 and sint64 per Proto3 JSON spec.
+ """
+ return str(value)
+
+
+def encode_float(value: float) -> typing.Union[float, str]:
+ """
+ Encode float/double values.
+ """
+ if math.isnan(value):
+ return "NaN"
+ if math.isinf(value):
+ return "Infinity" if value > 0 else "-Infinity"
+ return value
+
+
+def encode_repeated(
+ values: list[typing.Any], map_fn: typing.Callable[[typing.Any], typing.Any]
+) -> list[typing.Any]:
+ """Helper to serialize repeated fields."""
+ return [map_fn(v) for v in values] if values else []
+
+
+def decode_hex(value: typing.Optional[str], field_name: str) -> bytes:
+ """Decode hex string to bytes."""
+ if not value:
+ return b""
+ validate_type(value, str, field_name)
+ try:
+ return bytes.fromhex(value)
+ except ValueError as e:
+ raise ValueError(
+ f"Invalid hex string for field '{field_name}': {e}"
+ ) from None
+
+
+def decode_base64(value: typing.Optional[str], field_name: str) -> bytes:
+ """Decode base64 string to bytes."""
+ if not value:
+ return b""
+ validate_type(value, str, field_name)
+ try:
+ return base64.b64decode(value)
+ except Exception as e:
+ raise ValueError(
+ f"Invalid base64 string for field '{field_name}': {e}"
+ ) from None
+
+
+def decode_int64(
+ value: typing.Optional[typing.Union[int, str]], field_name: str
+) -> int:
+ """Parse 64-bit integer from string or number."""
+ if value is None:
+ return 0
+ validate_type(value, (int, str), field_name)
+ try:
+ return int(value)
+ except (ValueError, TypeError):
+ raise ValueError(
+ f"Invalid int64 value for field '{field_name}': {value}"
+ ) from None
+
+
+def decode_float(
+ value: typing.Optional[typing.Union[float, int, str]], field_name: str
+) -> float:
+ """Parse float/double from number or special string."""
+ if value is None:
+ return 0.0
+ validate_type(value, (float, int, str), field_name)
+ if value == "NaN":
+ return math.nan
+ if value == "Infinity":
+ return math.inf
+ if value == "-Infinity":
+ return -math.inf
+ try:
+ return float(value)
+ except (ValueError, TypeError):
+ raise ValueError(
+ f"Invalid float value for field '{field_name}': {value}"
+ ) from None
+
+
+def decode_repeated(
+ values: typing.Optional[list[typing.Any]],
+ item_parser: typing.Callable[[typing.Any], T],
+ field_name: str,
+) -> list[T]:
+ """Helper to deserialize repeated fields."""
+ if values is None:
+ return []
+ validate_type(values, list, field_name)
+ return [item_parser(v) for v in values]
+
+
+def validate_type(
+ value: typing.Any,
+ expected_types: typing.Union[type, tuple[type, ...]],
+ field_name: str,
+) -> None:
+ """
+ Validate that a value is of the expected type(s).
+ Raises TypeError if validation fails.
+ """
+ if not isinstance(value, expected_types):
+ raise TypeError(
+ f"Field '{field_name}' expected {expected_types}, "
+ f"got {type(value).__name__}"
+ )
diff --git a/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/types.py b/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/types.py
new file mode 100644
index 0000000000..0e5902d195
--- /dev/null
+++ b/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/types.py
@@ -0,0 +1,134 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from typing import Final
+
+from google.protobuf import descriptor_pb2 as descriptor
+
+PROTO_TO_PYTHON: Final[dict[int, str]] = {
+ descriptor.FieldDescriptorProto.TYPE_DOUBLE: "builtins.float",
+ descriptor.FieldDescriptorProto.TYPE_FLOAT: "builtins.float",
+ descriptor.FieldDescriptorProto.TYPE_INT64: "builtins.int",
+ descriptor.FieldDescriptorProto.TYPE_UINT64: "builtins.int",
+ descriptor.FieldDescriptorProto.TYPE_INT32: "builtins.int",
+ descriptor.FieldDescriptorProto.TYPE_FIXED64: "builtins.int",
+ descriptor.FieldDescriptorProto.TYPE_FIXED32: "builtins.int",
+ descriptor.FieldDescriptorProto.TYPE_BOOL: "builtins.bool",
+ descriptor.FieldDescriptorProto.TYPE_STRING: "builtins.str",
+ descriptor.FieldDescriptorProto.TYPE_BYTES: "builtins.bytes",
+ descriptor.FieldDescriptorProto.TYPE_UINT32: "builtins.int",
+ descriptor.FieldDescriptorProto.TYPE_SFIXED32: "builtins.int",
+ descriptor.FieldDescriptorProto.TYPE_SFIXED64: "builtins.int",
+ descriptor.FieldDescriptorProto.TYPE_SINT32: "builtins.int",
+ descriptor.FieldDescriptorProto.TYPE_SINT64: "builtins.int",
+}
+
+PROTO_DEFAULTS: Final[dict[int, str]] = {
+ descriptor.FieldDescriptorProto.TYPE_DOUBLE: "0.0",
+ descriptor.FieldDescriptorProto.TYPE_FLOAT: "0.0",
+ descriptor.FieldDescriptorProto.TYPE_INT64: "0",
+ descriptor.FieldDescriptorProto.TYPE_UINT64: "0",
+ descriptor.FieldDescriptorProto.TYPE_INT32: "0",
+ descriptor.FieldDescriptorProto.TYPE_FIXED64: "0",
+ descriptor.FieldDescriptorProto.TYPE_FIXED32: "0",
+ descriptor.FieldDescriptorProto.TYPE_BOOL: "False",
+ descriptor.FieldDescriptorProto.TYPE_STRING: '""',
+ descriptor.FieldDescriptorProto.TYPE_BYTES: 'b""',
+ descriptor.FieldDescriptorProto.TYPE_UINT32: "0",
+ descriptor.FieldDescriptorProto.TYPE_SFIXED32: "0",
+ descriptor.FieldDescriptorProto.TYPE_SFIXED64: "0",
+ descriptor.FieldDescriptorProto.TYPE_SINT32: "0",
+ descriptor.FieldDescriptorProto.TYPE_SINT64: "0",
+}
+
+INT64_TYPES: Final[set[int]] = {
+ descriptor.FieldDescriptorProto.TYPE_INT64,
+ descriptor.FieldDescriptorProto.TYPE_UINT64,
+ descriptor.FieldDescriptorProto.TYPE_FIXED64,
+ descriptor.FieldDescriptorProto.TYPE_SFIXED64,
+ descriptor.FieldDescriptorProto.TYPE_SINT64,
+}
+
+HEX_ENCODED_FIELDS: Final[set[str]] = {
+ "trace_id",
+ "span_id",
+ "parent_span_id",
+}
+
+
+def get_python_type(proto_type: int) -> str:
+ """Get Python type for a protobuf field type."""
+ return PROTO_TO_PYTHON.get(proto_type, "typing.Any")
+
+
+def get_default_value(proto_type: int) -> str:
+ """Get default value for a protobuf field type."""
+ return PROTO_DEFAULTS.get(proto_type, "None")
+
+
+def is_int64_type(proto_type: int) -> bool:
+ """Check if type is a 64-bit integer requiring string serialization."""
+ return proto_type in INT64_TYPES
+
+
+def is_bytes_type(proto_type: int) -> bool:
+ """Check if type is bytes."""
+ return proto_type == descriptor.FieldDescriptorProto.TYPE_BYTES
+
+
+def is_hex_encoded_field(field_name: str) -> bool:
+ """Check if this is a trace/span ID field requiring hex encoding."""
+ return field_name in HEX_ENCODED_FIELDS
+
+
+def to_json_field_name(snake_name: str) -> str:
+ """Convert snake_case field name to lowerCamelCase JSON name."""
+ components = snake_name.split("_")
+ return components[0] + "".join(x.title() for x in components[1:])
+
+
+def is_numeric_type(proto_type: int) -> bool:
+ """Check if type is numeric (int or float)."""
+ if proto_type in INT64_TYPES:
+ return True
+ return proto_type in {
+ descriptor.FieldDescriptorProto.TYPE_DOUBLE,
+ descriptor.FieldDescriptorProto.TYPE_FLOAT,
+ descriptor.FieldDescriptorProto.TYPE_INT32,
+ descriptor.FieldDescriptorProto.TYPE_FIXED32,
+ descriptor.FieldDescriptorProto.TYPE_UINT32,
+ descriptor.FieldDescriptorProto.TYPE_SFIXED32,
+ descriptor.FieldDescriptorProto.TYPE_SINT32,
+ }
+
+
+def get_json_allowed_types(proto_type: int, field_name: str = "") -> str:
+ """
+ Get the Python type(s) allowed for the JSON representation of a field.
+ Returns a string representation of the type or tuple of types.
+ """
+ if is_hex_encoded_field(field_name):
+ return "builtins.str"
+ if is_int64_type(proto_type):
+ return "(builtins.int, builtins.str)"
+ if is_bytes_type(proto_type):
+ return "builtins.str"
+ if proto_type in (
+ descriptor.FieldDescriptorProto.TYPE_FLOAT,
+ descriptor.FieldDescriptorProto.TYPE_DOUBLE,
+ ):
+ return "(builtins.float, builtins.int, builtins.str)"
+
+ py_type = get_python_type(proto_type)
+ return py_type
diff --git a/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/version/__init__.py b/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/version/__init__.py
new file mode 100644
index 0000000000..c099e9440e
--- /dev/null
+++ b/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/version/__init__.py
@@ -0,0 +1,15 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+__version__ = "0.61b0.dev"
diff --git a/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/writer.py b/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/writer.py
new file mode 100644
index 0000000000..a3535a4eb7
--- /dev/null
+++ b/codegen/opentelemetry-codegen-json/src/opentelemetry/codegen/json/writer.py
@@ -0,0 +1,489 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+
+from collections.abc import Iterable
+from contextlib import contextmanager
+from typing import Any, Generator, Mapping, Optional, Union
+
+
+class CodeWriter:
+ def __init__(self, indent_size: int = 4) -> None:
+ self._lines: list[str] = []
+ self._indent_level: int = 0
+ self._indent_size: int = indent_size
+
+ @contextmanager
+ def indent(self) -> Generator[CodeWriter, None, None]:
+ self._indent_level += 1
+ try:
+ yield self
+ finally:
+ self._indent_level -= 1
+
+ def writeln(self, line: str = "") -> CodeWriter:
+ if not line:
+ self._lines.append("")
+ return self
+ indent = " " * (self._indent_level * self._indent_size)
+ self._lines.append(f"{indent}{line}")
+ return self
+
+ def writemany(self, *lines: str) -> CodeWriter:
+ for line in lines:
+ self.writeln(line)
+ return self
+
+ def comment(self, content: Union[str, Iterable[str]]) -> CodeWriter:
+ if isinstance(content, str):
+ self.writeln(f"# {content}")
+ return self
+ for line in content:
+ self.writeln(f"# {line}")
+ return self
+
+ def docstring(self, content: Union[str, Iterable[str]]) -> CodeWriter:
+ if isinstance(content, str):
+ self.writeln(f'"""{content}"""')
+ return self
+ self.writeln('"""')
+ for line in content:
+ self.writeln(line)
+ self.writeln('"""')
+ return self
+
+ def import_(self, module: str, *items: str) -> CodeWriter:
+ if items:
+ self.writeln(f"from {module} import {', '.join(items)}")
+ else:
+ self.writeln(f"import {module}")
+ return self
+
+ @contextmanager
+ def suite(self, header: str) -> Generator[CodeWriter, None, None]:
+ """Write header then indent"""
+ self.writeln(header)
+ with self.indent():
+ yield self
+
+ @contextmanager
+ def class_(
+ self,
+ name: str,
+ bases: Optional[Iterable[str]] = None,
+ decorators: Optional[Iterable[str]] = None,
+ ) -> Generator[CodeWriter, None, None]:
+ """Create a regular class with optional bases and decorators"""
+ if decorators is not None:
+ for dec in decorators:
+ self.writeln(f"@{dec}")
+
+ bases_str = f"({', '.join(bases)})" if bases else ""
+ self.writeln(f"class {name}{bases_str}:")
+
+ with self.indent():
+ yield self
+
+ @contextmanager
+ def dataclass(
+ self,
+ name: str,
+ bases: Optional[Iterable[str]] = None,
+ decorators: Optional[Iterable[str]] = None,
+ frozen: bool = False,
+ slots: bool = False,
+ decorator_name: str = "dataclasses.dataclass",
+ ) -> Generator[CodeWriter, None, None]:
+ """Create a dataclass with optional configuration"""
+ dc_params = []
+ if frozen:
+ dc_params.append("frozen=True")
+ if slots:
+ dc_params.append("slots=True")
+
+ dc_decorator = (
+ f"{decorator_name}({', '.join(dc_params)})"
+ if dc_params
+ else decorator_name
+ )
+
+ all_decorators = []
+ if decorators is not None:
+ all_decorators.extend(decorators)
+ all_decorators.append(dc_decorator)
+
+ for dec in all_decorators:
+ self.writeln(f"@{dec}")
+
+ bases_str = f"({', '.join(bases)})" if bases else ""
+ self.writeln(f"class {name}{bases_str}:")
+
+ with self.indent():
+ yield self
+
+ @contextmanager
+ def enum(
+ self,
+ name: str,
+ enum_type: str = "enum.Enum",
+ bases: Optional[Iterable[str]] = None,
+ decorators: Optional[Iterable[str]] = None,
+ ) -> Generator[CodeWriter, None, None]:
+ """Create an enum"""
+ if decorators is not None:
+ for dec in decorators:
+ self.writeln(f"@{dec}")
+
+ all_bases = [enum_type]
+ if bases is not None:
+ all_bases.extend(bases)
+
+ bases_str = ", ".join(all_bases)
+ self.writeln(f"class {name}({bases_str}):")
+
+ with self.indent():
+ yield self
+
+ def field(
+ self,
+ name: str,
+ type_hint: str,
+ default: Any = None,
+ default_factory: Optional[str] = None,
+ ) -> CodeWriter:
+ """Write a dataclass field"""
+ if default_factory:
+ self.writeln(
+ f"{name}: {type_hint} = dataclasses.field(default_factory={default_factory})"
+ )
+ elif default is not None:
+ self.writeln(f"{name}: {type_hint} = {default}")
+ else:
+ self.writeln(f"{name}: {type_hint}")
+ return self
+
+ def enum_member(self, name: str, value: Any) -> CodeWriter:
+ """Write an enum member"""
+ self.writeln(f"{name} = {value}")
+ return self
+
+ def auto_enum_member(self, name: str) -> CodeWriter:
+ """Write an auto() enum member"""
+ self.writeln(f"{name} = enum.auto()")
+ return self
+
+ @contextmanager
+ def function(
+ self,
+ name: str,
+ params: Union[Iterable[str], str],
+ decorators: Optional[Iterable[str]] = None,
+ return_type: Optional[str] = None,
+ ) -> Generator[CodeWriter, None, None]:
+ """Create a function as a context manager for building the body"""
+ if decorators is not None:
+ for dec in decorators:
+ self.writeln(f"@{dec}")
+
+ params_str = params if isinstance(params, str) else ", ".join(params)
+ return_annotation = f" -> {return_type}" if return_type else ""
+ self.writeln(f"def {name}({params_str}){return_annotation}:")
+
+ with self.indent():
+ yield self
+
+ def write_function(
+ self,
+ name: str,
+ params: Union[Iterable[str], str],
+ body_lines: Union[Iterable[str], str],
+ decorators: Optional[Iterable[str]] = None,
+ return_type: Optional[str] = None,
+ ) -> CodeWriter:
+ """Write a complete function"""
+ with self.function(
+ name, params, decorators=decorators, return_type=return_type
+ ):
+ if isinstance(body_lines, str):
+ self.writeln(body_lines)
+ else:
+ for line in body_lines:
+ self.writeln(line)
+ return self
+
+ @contextmanager
+ def method(
+ self,
+ name: str,
+ params: Union[Iterable[str], str],
+ decorators: Optional[Iterable[str]] = None,
+ return_type: Optional[str] = None,
+ ) -> Generator[CodeWriter, None, None]:
+ """Alias for function() - more semantic for methods in classes"""
+ with self.function(
+ name, params, decorators=decorators, return_type=return_type
+ ):
+ yield self
+
+ def staticmethod_(
+ self,
+ name: str,
+ params: Union[Iterable[str], str],
+ body_lines: Union[Iterable[str], str],
+ return_type: Optional[str] = None,
+ ) -> CodeWriter:
+ return self.write_function(
+ name,
+ params,
+ body_lines,
+ decorators=["builtins.staticmethod"],
+ return_type=return_type,
+ )
+
+ def classmethod_(
+ self,
+ name: str,
+ params: Union[Iterable[str], str],
+ body_lines: Union[Iterable[str], str],
+ return_type: Optional[str] = None,
+ ) -> CodeWriter:
+ return self.write_function(
+ name,
+ params,
+ body_lines,
+ decorators=["builtins.classmethod"],
+ return_type=return_type,
+ )
+
+ @contextmanager
+ def if_(self, condition: str) -> Generator[CodeWriter, None, None]:
+ """Create an if block"""
+ self.writeln(f"if {condition}:")
+ with self.indent():
+ yield self
+
+ @contextmanager
+ def elif_(self, condition: str) -> Generator[CodeWriter, None, None]:
+ """Create an elif block"""
+ self.writeln(f"elif {condition}:")
+ with self.indent():
+ yield self
+
+ @contextmanager
+ def else_(self) -> Generator[CodeWriter, None, None]:
+ """Create an else block"""
+ self.writeln("else:")
+ with self.indent():
+ yield self
+
+ @contextmanager
+ def for_(
+ self, var: str, iterable: str
+ ) -> Generator[CodeWriter, None, None]:
+ """Create a for loop"""
+ self.writeln(f"for {var} in {iterable}:")
+ with self.indent():
+ yield self
+
+ @contextmanager
+ def while_(self, condition: str) -> Generator[CodeWriter, None, None]:
+ """Create a while loop"""
+ self.writeln(f"while {condition}:")
+ with self.indent():
+ yield self
+
+ @contextmanager
+ def try_(self) -> Generator[CodeWriter, None, None]:
+ """Create a try block"""
+ self.writeln("try:")
+ with self.indent():
+ yield self
+
+ @contextmanager
+ def except_(
+ self, exception: Optional[str] = None, as_var: Optional[str] = None
+ ) -> Generator[CodeWriter, None, None]:
+ """Create an except block"""
+ if exception and as_var:
+ self.writeln(f"except {exception} as {as_var}:")
+ elif exception:
+ self.writeln(f"except {exception}:")
+ else:
+ self.writeln("except:")
+ with self.indent():
+ yield self
+
+ @contextmanager
+ def finally_(self) -> Generator[CodeWriter, None, None]:
+ """Create a finally block"""
+ self.writeln("finally:")
+ with self.indent():
+ yield self
+
+ @contextmanager
+ def with_(self, *contexts: str) -> Generator[CodeWriter, None, None]:
+ """Create a with statement"""
+ context_str = ", ".join(contexts)
+ self.writeln(f"with {context_str}:")
+ with self.indent():
+ yield self
+
+ def section(
+ self, title: str, char: str = "=", width: int = 70
+ ) -> CodeWriter:
+ """Create a commented section divider"""
+ self.blank_line()
+ self.comment(char * width)
+ self.comment(f" {title}")
+ self.comment(char * width)
+ self.blank_line()
+ return self
+
+ def module_docstring(self, text: str) -> CodeWriter:
+ """Write a module-level docstring"""
+ self.writeln(f'"""{text}"""')
+ self.blank_line()
+ return self
+
+ def assignment(
+ self, var: str, value: str, type_hint: Optional[str] = None
+ ) -> CodeWriter:
+ """Write a variable assignment"""
+ if type_hint:
+ self.writeln(f"{var}: {type_hint} = {value}")
+ else:
+ self.writeln(f"{var} = {value}")
+ return self
+
+ def return_(self, value: Optional[str] = None) -> CodeWriter:
+ """Write a return statement"""
+ if value:
+ self.writeln(f"return {value}")
+ else:
+ self.writeln("return")
+ return self
+
+ def raise_(
+ self, exception: str, message: Optional[str] = None
+ ) -> CodeWriter:
+ """Write a raise statement"""
+ if message:
+ self.writeln(f"raise {exception}({message!r})")
+ else:
+ self.writeln(f"raise {exception}")
+ return self
+
+ def yield_(self, value: str) -> CodeWriter:
+ """Write a yield statement"""
+ self.writeln(f"yield {value}")
+ return self
+
+ def assert_(
+ self, condition: str, message: Optional[str] = None
+ ) -> CodeWriter:
+ """Write an assert statement"""
+ if message:
+ self.writeln(f"assert {condition}, {message!r}")
+ else:
+ self.writeln(f"assert {condition}")
+ return self
+
+ def pass_(self) -> CodeWriter:
+ """Write a pass statement"""
+ self.writeln("pass")
+ return self
+
+ def break_(self) -> CodeWriter:
+ """Write a break statement"""
+ self.writeln("break")
+ return self
+
+ def continue_(self) -> CodeWriter:
+ """Write a continue statement"""
+ self.writeln("continue")
+ return self
+
+ def generate_init(
+ self, params_with_types: Mapping[str, str]
+ ) -> CodeWriter:
+ """Generate __init__ with automatic assignment"""
+ params = ["self"] + [
+ f"{name}: {type_}" for name, type_ in params_with_types.items()
+ ]
+ body = [f"self.{name} = {name}" for name in params_with_types.keys()]
+ self.write_function("__init__", params, body)
+ return self
+
+ def generate_repr(
+ self, class_name: str, fields: Iterable[str]
+ ) -> CodeWriter:
+ """Generate __repr__ method"""
+ field_strs = ", ".join([f"{f}={{self.{f}!r}}" for f in fields])
+ body = f"return f'{class_name}({field_strs})'"
+ self.write_function(
+ "__repr__", ["self"], body, return_type="builtins.str"
+ )
+ return self
+
+ def generate_eq(self, fields: Iterable[str]) -> CodeWriter:
+ """Generate __eq__ method"""
+ comparisons = " and ".join([f"self.{f} == other.{f}" for f in fields])
+ body = [
+ "if not isinstance(other, self.__class__):",
+ " return False",
+ f"return {comparisons}",
+ ]
+ self.write_function(
+ "__eq__", ["self", "other"], body, return_type="builtins.bool"
+ )
+ return self
+
+ def generate_str(
+ self, class_name: str, fields: Iterable[str]
+ ) -> CodeWriter:
+ """Generate __str__ method"""
+ field_strs = ", ".join([f"{f}={{self.{f}}}" for f in fields])
+ body = f"return f'{class_name}({field_strs})'"
+ self.write_function(
+ "__str__", ["self"], body, return_type="builtins.str"
+ )
+ return self
+
+ def generate_hash(self, fields: Iterable[str]) -> CodeWriter:
+ """Generate __hash__ method"""
+ if not fields:
+ body = "return builtins.hash(builtins.id(self))"
+ else:
+ field_tuple = ", ".join([f"self.{f}" for f in fields])
+ body = f"return builtins.hash(({field_tuple}))"
+ self.write_function(
+ "__hash__", ["self"], body, return_type="builtins.int"
+ )
+ return self
+
+ def write_block(self, lines: Iterable[str]) -> CodeWriter:
+ for line in lines:
+ self.writeln(line)
+ return self
+
+ def blank_line(self, count: int = 1) -> CodeWriter:
+ self._lines.extend([""] * count)
+ return self
+
+ def to_string(self) -> str:
+ return "\n".join(self._lines)
+
+ def to_lines(self) -> list[str]:
+ return self._lines
diff --git a/codegen/opentelemetry-codegen-json/test-requirements.txt b/codegen/opentelemetry-codegen-json/test-requirements.txt
new file mode 100644
index 0000000000..4353bc55b5
--- /dev/null
+++ b/codegen/opentelemetry-codegen-json/test-requirements.txt
@@ -0,0 +1,12 @@
+importlib-metadata==6.11.0
+iniconfig==2.0.0
+packaging==24.0
+pluggy==1.5.0
+protobuf==6.31.1
+pytest==7.4.4
+tomli==2.0.1
+typing_extensions==4.12.0
+zipp==3.19.2
+grpcio==1.78.0
+grpcio-tools==1.78.0
+-e codegen/opentelemetry-codegen-json
diff --git a/codegen/opentelemetry-codegen-json/tests/__init__.py b/codegen/opentelemetry-codegen-json/tests/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/codegen/opentelemetry-codegen-json/tests/proto/otel_test_json/common/v1/common.proto b/codegen/opentelemetry-codegen-json/tests/proto/otel_test_json/common/v1/common.proto
new file mode 100644
index 0000000000..2799bcbad9
--- /dev/null
+++ b/codegen/opentelemetry-codegen-json/tests/proto/otel_test_json/common/v1/common.proto
@@ -0,0 +1,22 @@
+// Copyright The OpenTelemetry Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package otel_test_json.common.v1;
+
+message InstrumentationScope {
+ string name = 1;
+ string version = 2;
+}
diff --git a/codegen/opentelemetry-codegen-json/tests/proto/otel_test_json/test/v1/complex.proto b/codegen/opentelemetry-codegen-json/tests/proto/otel_test_json/test/v1/complex.proto
new file mode 100644
index 0000000000..7ef705aab6
--- /dev/null
+++ b/codegen/opentelemetry-codegen-json/tests/proto/otel_test_json/test/v1/complex.proto
@@ -0,0 +1,70 @@
+// Copyright The OpenTelemetry Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package otel_test_json.test.v1;
+
+import "otel_test_json/common/v1/common.proto";
+
+message NumericTest {
+ double d_val = 1;
+ float f_val = 2;
+ int64 i64_val = 3;
+ uint64 u64_val = 4;
+ int32 i32_val = 5;
+ uint32 u32_val = 6;
+ sint32 si32_val = 7;
+ sint64 si64_val = 8;
+ fixed32 f32_val = 9;
+ fixed64 f64_val = 10;
+ sfixed32 sf32_val = 11;
+ sfixed64 sf64_val = 12;
+}
+
+message OneofSuite {
+ oneof group1 {
+ string g1_string = 1;
+ int32 g1_int = 2;
+ }
+ oneof group2 {
+ otel_test_json.common.v1.InstrumentationScope g2_message = 3;
+ NestedMessage g2_nested = 4;
+ }
+
+ message NestedMessage {
+ string hint = 1;
+ }
+}
+
+message OptionalScalar {
+ optional string opt_string = 1;
+ optional int32 opt_int = 2;
+ optional bool opt_bool = 3;
+}
+
+message NestedEnumSuite {
+ enum NestedEnum {
+ NESTED_UNSPECIFIED = 0;
+ NESTED_FOO = 1;
+ NESTED_BAR = 2;
+ }
+ NestedEnum nested = 1;
+ repeated NestedEnum repeated_nested = 2;
+}
+
+message DeeplyNested {
+ string value = 1;
+ DeeplyNested next = 2;
+}
diff --git a/codegen/opentelemetry-codegen-json/tests/proto/otel_test_json/test/v1/test.proto b/codegen/opentelemetry-codegen-json/tests/proto/otel_test_json/test/v1/test.proto
new file mode 100644
index 0000000000..34968a7ae3
--- /dev/null
+++ b/codegen/opentelemetry-codegen-json/tests/proto/otel_test_json/test/v1/test.proto
@@ -0,0 +1,59 @@
+// Copyright The OpenTelemetry Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package otel_test_json.test.v1;
+
+message TestMessage {
+ enum TestEnum {
+ UNSPECIFIED = 0;
+ SUCCESS = 1;
+ FAILURE = 2;
+ }
+
+ // Scalar types
+ string name = 1;
+ int32 int_value = 2;
+ bool bool_value = 3;
+ double double_value = 4;
+
+ // Special OTLP types
+ int64 int64_value = 5;
+ uint64 uint64_value = 6;
+ bytes bytes_value = 7;
+
+ // Hex encoded fields (per OTLP spec)
+ bytes trace_id = 8;
+ bytes span_id = 9;
+
+ // Repeated fields
+ repeated string list_strings = 10;
+ repeated int32 list_ints = 11;
+
+ // Nested message and enum
+ TestEnum enum_value = 12;
+ SubMessage sub_message = 13;
+ repeated SubMessage list_messages = 14;
+
+ // Oneof group
+ oneof test_oneof {
+ string oneof_string = 15;
+ int32 oneof_int = 16;
+ }
+}
+
+message SubMessage {
+ string content = 1;
+}
diff --git a/codegen/opentelemetry-codegen-json/tests/proto/otel_test_json/trace/v1/trace.proto b/codegen/opentelemetry-codegen-json/tests/proto/otel_test_json/trace/v1/trace.proto
new file mode 100644
index 0000000000..ce98ecb776
--- /dev/null
+++ b/codegen/opentelemetry-codegen-json/tests/proto/otel_test_json/trace/v1/trace.proto
@@ -0,0 +1,24 @@
+// Copyright The OpenTelemetry Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package otel_test_json.trace.v1;
+
+import "otel_test_json/common/v1/common.proto";
+
+message Span {
+ string name = 1;
+ otel_test_json.common.v1.InstrumentationScope scope = 2;
+}
diff --git a/codegen/opentelemetry-codegen-json/tests/test_end_to_end.py b/codegen/opentelemetry-codegen-json/tests/test_end_to_end.py
new file mode 100644
index 0000000000..a47734713c
--- /dev/null
+++ b/codegen/opentelemetry-codegen-json/tests/test_end_to_end.py
@@ -0,0 +1,337 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# ruff: noqa: PLC0415
+
+import json
+import math
+import shutil
+import subprocess
+import sys
+from pathlib import Path
+from typing import Any, Generator
+
+import pytest
+
+PROJECT_ROOT = Path(__file__).parent.parent
+SRC_PATH = PROJECT_ROOT / "src"
+PROTO_PATH = Path(__file__).parent / "proto"
+GEN_PATH = Path(__file__).parent / "generated"
+
+WRAPPER_CONTENT = f"""#!/usr/bin/env python3
+import sys
+sys.path.insert(0, "{SRC_PATH.absolute()}")
+from opentelemetry.codegen.json.plugin import main
+if __name__ == "__main__":
+ main()
+"""
+
+
+@pytest.fixture(scope="module", autouse=True)
+def generate_code() -> Generator[None, None, None]:
+ if GEN_PATH.exists():
+ shutil.rmtree(GEN_PATH)
+ GEN_PATH.mkdir(parents=True)
+
+ wrapper_path = PROJECT_ROOT / "protoc-gen-otlp_json_wrapper.py"
+ wrapper_path.write_text(WRAPPER_CONTENT)
+ wrapper_path.chmod(0o755)
+
+ try:
+ protos = list(PROTO_PATH.glob("**/*.proto"))
+ proto_files = [str(p.relative_to(PROTO_PATH)) for p in protos]
+
+ subprocess.check_call(
+ [
+ sys.executable,
+ "-m",
+ "grpc_tools.protoc",
+ f"-I{PROTO_PATH}",
+ f"--plugin=protoc-gen-otlp_json={wrapper_path}",
+ f"--otlp_json_out={GEN_PATH}",
+ *proto_files,
+ ]
+ )
+
+ sys.path.insert(0, str(GEN_PATH.absolute()))
+ yield
+ finally:
+ if wrapper_path.exists():
+ wrapper_path.unlink()
+
+
+def test_generated_message_roundtrip() -> None:
+ from otel_test_json.test.v1.test import ( # type: ignore
+ SubMessage,
+ TestMessage,
+ )
+
+ msg = TestMessage(
+ name="test",
+ int_value=123,
+ bool_value=True,
+ double_value=1.5,
+ int64_value=9223372036854775807,
+ uint64_value=18446744073709551615,
+ bytes_value=b"hello",
+ trace_id=b"\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\x0d\x0e\x0f",
+ span_id=b"\x00\x01\x02\x03\x04\x05\x06",
+ list_strings=["a", "b"],
+ list_ints=[1, 2],
+ enum_value=TestMessage.TestEnum.SUCCESS,
+ sub_message=SubMessage(content="sub"),
+ list_messages=[SubMessage(content="m1"), SubMessage(content="m2")],
+ oneof_string="oneof",
+ )
+
+ json_str = msg.to_json()
+ data = json.loads(json_str)
+
+ assert data["name"] == "test"
+ assert data["intValue"] == 123
+ assert data["boolValue"] is True
+ assert data["doubleValue"] == 1.5
+ assert data["int64Value"] == "9223372036854775807"
+ assert data["uint64Value"] == "18446744073709551615"
+ assert data["bytesValue"] == "aGVsbG8="
+ assert data["traceId"] == "000102030405060708090a0b0c0d0e0f"
+ assert data["spanId"] == "00010203040506"
+ assert data["listStrings"] == ["a", "b"]
+ assert data["listInts"] == [1, 2]
+ assert data["enumValue"] == 1
+ assert data["subMessage"]["content"] == "sub"
+ assert len(data["listMessages"]) == 2
+ assert data["oneofString"] == "oneof"
+ assert "oneofInt" not in data
+
+ new_msg = TestMessage.from_json(json_str)
+ assert new_msg == msg
+
+
+def test_cross_reference() -> None:
+ from otel_test_json.common.v1.common import (
+ InstrumentationScope, # type: ignore
+ )
+ from otel_test_json.trace.v1.trace import Span # type: ignore
+
+ span = Span(
+ name="my-span",
+ scope=InstrumentationScope(name="my-scope", version="1.0.0"),
+ )
+
+ json_str = span.to_json()
+ data = json.loads(json_str)
+
+ assert data["name"] == "my-span"
+ assert data["scope"]["name"] == "my-scope"
+ assert data["scope"]["version"] == "1.0.0"
+
+ new_span = Span.from_json(json_str)
+ assert new_span == span
+
+
+@pytest.mark.parametrize(
+ "field, value, expected_json_val",
+ [
+ ("d_val", float("nan"), "NaN"),
+ ("d_val", float("inf"), "Infinity"),
+ ("d_val", float("-inf"), "-Infinity"),
+ ("d_val", 0.0, None), # Default values are omitted
+ ("d_val", -0.0, None),
+ ("i64_val", 9223372036854775807, "9223372036854775807"),
+ ("i64_val", -9223372036854775808, "-9223372036854775808"),
+ ("u64_val", 18446744073709551615, "18446744073709551615"),
+ ("i32_val", 2147483647, 2147483647),
+ ("i32_val", -2147483648, -2147483648),
+ ("u32_val", 4294967295, 4294967295),
+ ("si32_val", -123, -123),
+ ("si64_val", -456, "-456"),
+ ("f32_val", 789, 789),
+ ("f64_val", 101112, "101112"),
+ ("sf32_val", -131415, -131415),
+ ("sf64_val", -161718, "-161718"),
+ ],
+)
+def test_numeric_types(field: str, value: Any, expected_json_val: Any) -> None:
+ from otel_test_json.test.v1.complex import NumericTest # type: ignore
+
+ msg = NumericTest(**{field: value})
+ data = msg.to_dict()
+
+ # Convert snake_case to lowerCamelCase for lookup
+ components = field.split("_")
+ json_field = components[0] + "".join(x.title() for x in components[1:])
+
+ if expected_json_val is None:
+ assert json_field not in data
+ else:
+ assert data[json_field] == expected_json_val
+
+ new_msg = NumericTest.from_dict(data)
+ if isinstance(value, float) and math.isnan(value):
+ assert math.isnan(getattr(new_msg, field))
+ else:
+ assert getattr(new_msg, field) == value
+
+
+@pytest.mark.parametrize(
+ "kwargs, expected_data",
+ [
+ ({"g1_string": "hello"}, {"g1String": "hello"}),
+ ({"g1_int": 42}, {"g1Int": 42}),
+ ({"g2_message": {"name": "scope"}}, {"g2Message": {"name": "scope"}}),
+ ({"g2_nested": {"hint": "test"}}, {"g2Nested": {"hint": "test"}}),
+ ],
+)
+def test_oneof_suite_variants(
+ kwargs: dict[str, Any], expected_data: dict[str, Any]
+) -> None:
+ from otel_test_json.common.v1.common import (
+ InstrumentationScope, # type: ignore
+ )
+ from otel_test_json.test.v1.complex import OneofSuite # type: ignore
+
+ processed_kwargs = {}
+ for k, v in kwargs.items():
+ if k == "g2_message":
+ processed_kwargs[k] = InstrumentationScope(**v)
+ elif k == "g2_nested":
+ processed_kwargs[k] = OneofSuite.NestedMessage(**v)
+ else:
+ processed_kwargs[k] = v
+
+ msg = OneofSuite(**processed_kwargs)
+ data = msg.to_dict()
+ assert data == expected_data
+
+ new_msg = OneofSuite.from_dict(data)
+ for k, v in processed_kwargs.items():
+ assert getattr(new_msg, k) == v
+
+
+@pytest.mark.parametrize(
+ "kwargs, expected_dict",
+ [
+ ({}, {}),
+ ({"opt_string": ""}, {}),
+ ({"opt_string": "foo"}, {"optString": "foo"}),
+ ({"opt_int": 0}, {}),
+ ({"opt_int": 42}, {"optInt": 42}),
+ ({"opt_bool": False}, {}),
+ ({"opt_bool": True}, {"optBool": True}),
+ ],
+)
+def test_optional_scalars(
+ kwargs: dict[str, Any], expected_dict: dict[str, Any]
+) -> None:
+ from otel_test_json.test.v1.complex import OptionalScalar # type: ignore
+
+ msg = OptionalScalar(**kwargs)
+ assert msg.to_dict() == expected_dict
+ assert OptionalScalar.from_dict(expected_dict) == msg
+
+
+def test_nested_enum_suite() -> None:
+ from otel_test_json.test.v1.complex import NestedEnumSuite # type: ignore
+
+ msg = NestedEnumSuite(
+ nested=NestedEnumSuite.NestedEnum.NESTED_FOO,
+ repeated_nested=[
+ NestedEnumSuite.NestedEnum.NESTED_FOO,
+ NestedEnumSuite.NestedEnum.NESTED_BAR,
+ ],
+ )
+
+ data = msg.to_dict()
+ assert data["nested"] == 1
+ assert data["repeatedNested"] == [1, 2]
+
+ new_msg = NestedEnumSuite.from_dict(data)
+ assert new_msg.nested == NestedEnumSuite.NestedEnum.NESTED_FOO
+ assert new_msg.repeated_nested == msg.repeated_nested
+
+
+def test_deeply_nested() -> None:
+ from otel_test_json.test.v1.complex import DeeplyNested # type: ignore
+
+ msg = DeeplyNested(
+ value="1",
+ next=DeeplyNested(value="2", next=DeeplyNested(value="3")),
+ )
+
+ data = msg.to_dict()
+ assert data["value"] == "1"
+ assert data["next"]["value"] == "2"
+ assert data["next"]["next"]["value"] == "3"
+
+ new_msg = DeeplyNested.from_dict(data)
+ assert new_msg.value == "1"
+ assert new_msg.next.value == "2"
+ assert new_msg.next.next.value == "3"
+
+
+@pytest.mark.parametrize(
+ "data, expected_name, expected_int",
+ [
+ ({"name": None, "intValue": None}, "", 0),
+ ({"name": "test"}, "test", 0),
+ ({"intValue": 42}, "", 42),
+ ],
+)
+def test_defaults_and_none(
+ data: dict[str, Any], expected_name: str, expected_int: int
+) -> None:
+ from otel_test_json.test.v1.test import TestMessage # type: ignore
+
+ msg = TestMessage.from_dict(data)
+ assert msg.name == expected_name
+ assert msg.int_value == expected_int
+
+
+@pytest.mark.parametrize(
+ "data, expected_error, match",
+ [
+ ({"intValue": "not an int"}, TypeError, "expected "),
+ ({"traceId": "invalid hex"}, ValueError, "Invalid hex string"),
+ ({"listStrings": "not a list"}, TypeError, "expected "),
+ ({"name": 123}, TypeError, "expected "),
+ ({"subMessage": "not a dict"}, TypeError, "expected "),
+ ({"enumValue": "SUCCESS"}, TypeError, "expected "),
+ ({"listMessages": [None]}, TypeError, "expected "),
+ ],
+)
+def test_validation_errors(
+ data: dict[str, Any], expected_error: type, match: str
+) -> None:
+ from otel_test_json.test.v1.test import TestMessage # type: ignore
+
+ with pytest.raises(
+ expected_error,
+ match=match if isinstance(expected_error, TypeError) else None,
+ ):
+ TestMessage.from_dict(data)
+
+
+def test_unknown_fields_ignored() -> None:
+ from otel_test_json.test.v1.test import TestMessage # type: ignore
+
+ # Unknown fields should be ignored for forward compatibility
+ data = {
+ "name": "test",
+ "unknownField": "should be ignored",
+ "intValue": 10,
+ }
+ msg = TestMessage.from_dict(data)
+ assert msg.name == "test"
+ assert msg.int_value == 10
diff --git a/codegen/opentelemetry-codegen-json/tests/test_otlp_json_utils.py b/codegen/opentelemetry-codegen-json/tests/test_otlp_json_utils.py
new file mode 100644
index 0000000000..51c1aee6d8
--- /dev/null
+++ b/codegen/opentelemetry-codegen-json/tests/test_otlp_json_utils.py
@@ -0,0 +1,203 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import math
+from typing import Optional, Union
+
+import pytest
+
+from opentelemetry.codegen.json.runtime.otlp_json_utils import (
+ decode_base64,
+ decode_float,
+ decode_hex,
+ decode_int64,
+ decode_repeated,
+ encode_base64,
+ encode_float,
+ encode_hex,
+ encode_int64,
+ encode_repeated,
+ validate_type,
+)
+
+
+@pytest.mark.parametrize(
+ "value, expected",
+ [
+ (b"\x01\x02\x03", "010203"),
+ (b"", ""),
+ (None, ""),
+ ],
+)
+def test_encode_hex(value: Optional[bytes], expected: str) -> None:
+ assert encode_hex(value) == expected
+
+
+@pytest.mark.parametrize(
+ "value, expected",
+ [
+ ("010203", b"\x01\x02\x03"),
+ ("", b""),
+ (None, b""),
+ ],
+)
+def test_decode_hex(value: Optional[str], expected: bytes) -> None:
+ assert decode_hex(value, "field") == expected
+
+
+def test_decode_hex_errors() -> None:
+ with pytest.raises(TypeError):
+ decode_hex(123, "field") # type: ignore
+ with pytest.raises(ValueError, match="Invalid hex string"):
+ decode_hex("not hex", "field")
+
+
+@pytest.mark.parametrize(
+ "value, expected",
+ [
+ (b"hello", "aGVsbG8="),
+ (b"", ""),
+ (None, ""),
+ ],
+)
+def test_encode_base64(value: Optional[bytes], expected: str) -> None:
+ assert encode_base64(value) == expected
+
+
+@pytest.mark.parametrize(
+ "value, expected",
+ [
+ ("aGVsbG8=", b"hello"),
+ ("", b""),
+ (None, b""),
+ ],
+)
+def test_decode_base64(value: Optional[str], expected: bytes) -> None:
+ assert decode_base64(value, "field") == expected
+
+
+def test_decode_base64_errors() -> None:
+ with pytest.raises(TypeError):
+ decode_base64(123, "field") # type: ignore
+
+
+@pytest.mark.parametrize(
+ "value, expected",
+ [
+ (123, "123"),
+ (0, "0"),
+ (-1, "-1"),
+ ],
+)
+def test_encode_int64(value: int, expected: str) -> None:
+ assert encode_int64(value) == expected
+
+
+@pytest.mark.parametrize(
+ "value, expected",
+ [
+ ("123", 123),
+ (123, 123),
+ (None, 0),
+ ],
+)
+def test_decode_int64(value: Optional[Union[int, str]], expected: int) -> None:
+ assert decode_int64(value, "field") == expected
+
+
+def test_decode_int64_errors() -> None:
+ with pytest.raises(TypeError):
+ decode_int64([], "field") # type: ignore
+ with pytest.raises(ValueError, match="Invalid int64 value"):
+ decode_int64("abc", "field")
+
+
+@pytest.mark.parametrize(
+ "value, expected",
+ [
+ (1.5, 1.5),
+ (float("nan"), "NaN"),
+ (float("inf"), "Infinity"),
+ (float("-inf"), "-Infinity"),
+ ],
+)
+def test_encode_float(value: float, expected: Union[float, str]) -> None:
+ result = encode_float(value)
+ if isinstance(expected, float) and math.isnan(expected):
+ assert math.isnan(result) # type: ignore
+ else:
+ assert result == expected
+
+
+@pytest.mark.parametrize(
+ "value, expected",
+ [
+ (1.5, 1.5),
+ ("1.5", 1.5),
+ (1, 1.0),
+ ("NaN", math.nan),
+ ("Infinity", math.inf),
+ ("-Infinity", -math.inf),
+ (None, 0.0),
+ ],
+)
+def test_decode_float(
+ value: Optional[Union[float, int, str]], expected: float
+) -> None:
+ result = decode_float(value, "field")
+ if math.isnan(expected):
+ assert math.isnan(result)
+ else:
+ assert result == expected
+
+
+def test_decode_float_errors() -> None:
+ with pytest.raises(TypeError):
+ decode_float([], "field") # type: ignore
+ with pytest.raises(ValueError, match="Invalid float value"):
+ decode_float("abc", "field")
+
+
+def test_repeated_fields() -> None:
+ values = [1, 2, 3]
+ assert encode_repeated(values, str) == ["1", "2", "3"]
+ assert encode_repeated([], str) == []
+ assert encode_repeated(None, str) == [] # type: ignore
+
+ assert decode_repeated(["1", "2"], int, "field") == [1, 2]
+ assert decode_repeated([], int, "field") == []
+ assert decode_repeated(None, int, "field") == []
+
+
+def test_decode_repeated_errors() -> None:
+ with pytest.raises(TypeError):
+ decode_repeated("not a list", lambda x: x, "field") # type: ignore
+
+
+def test_validate_type() -> None:
+ validate_type("s", str, "field")
+ validate_type(1, int, "field")
+ validate_type(1, (int, str), "field")
+ validate_type("s", (int, str), "field")
+
+ with pytest.raises(
+ TypeError, match="Field 'field' expected , got str"
+ ):
+ validate_type("s", int, "field")
+
+ with pytest.raises(
+ TypeError,
+ match=r"Field 'field' expected \(, \), got str",
+ ):
+ validate_type("s", (int, float), "field")
diff --git a/codegen/opentelemetry-codegen-json/tests/test_types.py b/codegen/opentelemetry-codegen-json/tests/test_types.py
new file mode 100644
index 0000000000..bed82b0f59
--- /dev/null
+++ b/codegen/opentelemetry-codegen-json/tests/test_types.py
@@ -0,0 +1,154 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+from google.protobuf import descriptor_pb2 as descriptor
+
+from opentelemetry.codegen.json.types import (
+ get_default_value,
+ get_json_allowed_types,
+ get_python_type,
+ is_bytes_type,
+ is_hex_encoded_field,
+ is_int64_type,
+ is_numeric_type,
+ to_json_field_name,
+)
+
+
+@pytest.mark.parametrize(
+ "proto_type, expected",
+ [
+ (descriptor.FieldDescriptorProto.TYPE_DOUBLE, "builtins.float"),
+ (descriptor.FieldDescriptorProto.TYPE_INT64, "builtins.int"),
+ (descriptor.FieldDescriptorProto.TYPE_BOOL, "builtins.bool"),
+ (descriptor.FieldDescriptorProto.TYPE_STRING, "builtins.str"),
+ (descriptor.FieldDescriptorProto.TYPE_BYTES, "builtins.bytes"),
+ (999, "typing.Any"),
+ ],
+)
+def test_get_python_type(proto_type: int, expected: str) -> None:
+ assert get_python_type(proto_type) == expected
+
+
+@pytest.mark.parametrize(
+ "proto_type, expected",
+ [
+ (descriptor.FieldDescriptorProto.TYPE_DOUBLE, "0.0"),
+ (descriptor.FieldDescriptorProto.TYPE_INT64, "0"),
+ (descriptor.FieldDescriptorProto.TYPE_BOOL, "False"),
+ (descriptor.FieldDescriptorProto.TYPE_STRING, '""'),
+ (descriptor.FieldDescriptorProto.TYPE_BYTES, 'b""'),
+ (999, "None"),
+ ],
+)
+def test_get_default_value(proto_type: int, expected: str) -> None:
+ assert get_default_value(proto_type) == expected
+
+
+@pytest.mark.parametrize(
+ "proto_type, expected",
+ [
+ (descriptor.FieldDescriptorProto.TYPE_INT64, True),
+ (descriptor.FieldDescriptorProto.TYPE_UINT64, True),
+ (descriptor.FieldDescriptorProto.TYPE_FIXED64, True),
+ (descriptor.FieldDescriptorProto.TYPE_SFIXED64, True),
+ (descriptor.FieldDescriptorProto.TYPE_SINT64, True),
+ (descriptor.FieldDescriptorProto.TYPE_INT32, False),
+ (descriptor.FieldDescriptorProto.TYPE_STRING, False),
+ ],
+)
+def test_is_int64_type(proto_type: int, expected: bool) -> None:
+ assert is_int64_type(proto_type) == expected
+
+
+@pytest.mark.parametrize(
+ "proto_type, expected",
+ [
+ (descriptor.FieldDescriptorProto.TYPE_BYTES, True),
+ (descriptor.FieldDescriptorProto.TYPE_STRING, False),
+ ],
+)
+def test_is_bytes_type(proto_type: int, expected: bool) -> None:
+ assert is_bytes_type(proto_type) == expected
+
+
+@pytest.mark.parametrize(
+ "field_name, expected",
+ [
+ ("trace_id", True),
+ ("span_id", True),
+ ("parent_span_id", True),
+ ("name", False),
+ ("time_unix_nano", False),
+ ],
+)
+def test_is_hex_encoded_field(field_name: str, expected: bool) -> None:
+ assert is_hex_encoded_field(field_name) == expected
+
+
+@pytest.mark.parametrize(
+ "snake_name, expected",
+ [
+ ("name", "name"),
+ ("start_time_unix_nano", "startTimeUnixNano"),
+ ("trace_id", "traceId"),
+ ("multiple___underscores", "multipleUnderscores"),
+ ],
+)
+def test_to_json_field_name(snake_name: str, expected: str) -> None:
+ assert to_json_field_name(snake_name) == expected
+
+
+@pytest.mark.parametrize(
+ "proto_type, expected",
+ [
+ (descriptor.FieldDescriptorProto.TYPE_DOUBLE, True),
+ (descriptor.FieldDescriptorProto.TYPE_INT64, True),
+ (descriptor.FieldDescriptorProto.TYPE_INT32, True),
+ (descriptor.FieldDescriptorProto.TYPE_BOOL, False),
+ (descriptor.FieldDescriptorProto.TYPE_STRING, False),
+ ],
+)
+def test_is_numeric_type(proto_type: int, expected: bool) -> None:
+ assert is_numeric_type(proto_type) == expected
+
+
+@pytest.mark.parametrize(
+ "proto_type, field_name, expected",
+ [
+ (descriptor.FieldDescriptorProto.TYPE_BYTES, "data", "builtins.str"),
+ (
+ descriptor.FieldDescriptorProto.TYPE_STRING,
+ "trace_id",
+ "builtins.str",
+ ),
+ (
+ descriptor.FieldDescriptorProto.TYPE_INT64,
+ "count",
+ "(builtins.int, builtins.str)",
+ ),
+ (
+ descriptor.FieldDescriptorProto.TYPE_DOUBLE,
+ "value",
+ "(builtins.float, builtins.int, builtins.str)",
+ ),
+ (descriptor.FieldDescriptorProto.TYPE_BOOL, "flag", "builtins.bool"),
+ (descriptor.FieldDescriptorProto.TYPE_INT32, "id", "builtins.int"),
+ ],
+)
+def test_get_json_allowed_types(
+ proto_type: int, field_name: str, expected: str
+) -> None:
+ assert get_json_allowed_types(proto_type, field_name) == expected
diff --git a/codegen/opentelemetry-codegen-json/tests/test_writer.py b/codegen/opentelemetry-codegen-json/tests/test_writer.py
new file mode 100644
index 0000000000..4c53a5f063
--- /dev/null
+++ b/codegen/opentelemetry-codegen-json/tests/test_writer.py
@@ -0,0 +1,373 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from typing import Any, Optional
+
+import pytest
+
+from opentelemetry.codegen.json.writer import CodeWriter
+
+
+def test_initialization() -> None:
+ writer = CodeWriter(indent_size=2)
+ assert writer._indent_size == 2
+ assert writer.to_lines() == []
+ assert writer.to_string() == ""
+
+
+def test_writeln_indentation() -> None:
+ writer = CodeWriter(indent_size=4)
+ writer.writeln("line1")
+ with writer.indent():
+ writer.writeln("line2")
+ with writer.indent():
+ writer.writeln("line3")
+ writer.writeln("line4")
+
+ expected = ["line1", " line2", " line3", "line4"]
+ assert writer.to_lines() == expected
+
+
+def test_writemany() -> None:
+ writer = CodeWriter()
+ writer.writemany("a", "b", "c")
+ assert writer.to_lines() == ["a", "b", "c"]
+
+
+@pytest.mark.parametrize(
+ "content, expected",
+ [
+ ("single line", ["# single line"]),
+ (["line1", "line2"], ["# line1", "# line2"]),
+ ],
+)
+def test_comment(content: str, expected: list[str]) -> None:
+ writer = CodeWriter()
+ writer.comment(content)
+ assert writer.to_lines() == expected
+
+
+@pytest.mark.parametrize(
+ "content, expected",
+ [
+ ("single line", ['"""single line"""']),
+ (["line1", "line2"], ['"""', "line1", "line2", '"""']),
+ ],
+)
+def test_docstring(content: str, expected: list[str]) -> None:
+ writer = CodeWriter()
+ writer.docstring(content)
+ assert writer.to_lines() == expected
+
+
+@pytest.mark.parametrize(
+ "module, items, expected",
+ [
+ ("os", [], ["import os"]),
+ ("typing", ["Any", "Optional"], ["from typing import Any, Optional"]),
+ ],
+)
+def test_import(module: str, items: list[str], expected: list[str]) -> None:
+ writer = CodeWriter()
+ writer.import_(module, *items)
+ assert writer.to_lines() == expected
+
+
+def test_suite() -> None:
+ writer = CodeWriter()
+ with writer.suite("def foo():"):
+ writer.writeln("pass")
+ assert writer.to_lines() == ["def foo():", " pass"]
+
+
+@pytest.mark.parametrize(
+ "name, bases, decorators, expected",
+ [
+ ("MyClass", None, None, ["class MyClass:"]),
+ ("MyClass", ["Base"], ["deco"], ["@deco", "class MyClass(Base):"]),
+ (
+ "MyClass",
+ ["B1", "B2"],
+ ["d1", "d2"],
+ ["@d1", "@d2", "class MyClass(B1, B2):"],
+ ),
+ ],
+)
+def test_class(
+ name: str,
+ bases: Optional[str],
+ decorators: Optional[list[str]],
+ expected: list[str],
+) -> None:
+ writer = CodeWriter()
+ with writer.class_(name, bases=bases, decorators=decorators):
+ pass
+ assert writer.to_lines() == expected
+
+
+@pytest.mark.parametrize(
+ "kwargs, expected",
+ [
+ ({"name": "DC"}, ["@dataclasses.dataclass", "class DC:"]),
+ (
+ {"name": "DC", "frozen": True},
+ ["@dataclasses.dataclass(frozen=True)", "class DC:"],
+ ),
+ (
+ {"name": "DC", "slots": True},
+ ["@dataclasses.dataclass(slots=True)", "class DC:"],
+ ),
+ (
+ {"name": "DC", "frozen": True, "slots": True},
+ ["@dataclasses.dataclass(frozen=True, slots=True)", "class DC:"],
+ ),
+ (
+ {"name": "DC", "decorator_name": "custom.dc"},
+ ["@custom.dc", "class DC:"],
+ ),
+ ],
+)
+def test_dataclass(kwargs: dict[str, Any], expected: list[str]) -> None:
+ writer = CodeWriter()
+ with writer.dataclass(**kwargs):
+ pass
+ assert writer.to_lines() == expected
+
+
+def test_enum() -> None:
+ writer = CodeWriter()
+ with writer.enum("MyEnum", bases=["IntEnum"]):
+ writer.enum_member("A", 1)
+ writer.auto_enum_member("B")
+ expected = [
+ "class MyEnum(enum.Enum, IntEnum):",
+ " A = 1",
+ " B = enum.auto()",
+ ]
+ assert writer.to_lines() == expected
+
+
+@pytest.mark.parametrize(
+ "name, type_hint, default, default_factory, expected",
+ [
+ ("x", "int", None, None, ["x: int"]),
+ ("x", "int", 10, None, ["x: int = 10"]),
+ (
+ "x",
+ "list",
+ None,
+ "list",
+ ["x: list = dataclasses.field(default_factory=list)"],
+ ),
+ ],
+)
+def test_field(
+ name: str,
+ type_hint: str,
+ default: Optional[Any],
+ default_factory: Optional[Any],
+ expected: list[str],
+) -> None:
+ writer = CodeWriter()
+ writer.field(
+ name, type_hint, default=default, default_factory=default_factory
+ )
+ assert writer.to_lines() == expected
+
+
+def test_function() -> None:
+ writer = CodeWriter()
+ with writer.function("foo", ["a: int", "b: str"], return_type="bool"):
+ writer.return_("True")
+ expected = ["def foo(a: int, b: str) -> bool:", " return True"]
+ assert writer.to_lines() == expected
+
+
+def test_write_function() -> None:
+ writer = CodeWriter()
+ writer.write_function("bar", "x", ["return x * 2"], decorators=["deco"])
+ expected = ["@deco", "def bar(x):", " return x * 2"]
+ assert writer.to_lines() == expected
+
+
+def test_special_methods() -> None:
+ writer = CodeWriter()
+ writer.staticmethod_("s", "x", "pass")
+ writer.classmethod_("c", "cls", "pass")
+ assert "@builtins.staticmethod" in writer.to_string()
+ assert "@builtins.classmethod" in writer.to_string()
+
+
+def test_control_flow() -> None:
+ writer = CodeWriter()
+ with writer.if_("a > b"):
+ writer.pass_()
+ with writer.elif_("a == b"):
+ writer.break_()
+ with writer.else_():
+ writer.continue_()
+
+ expected = [
+ "if a > b:",
+ " pass",
+ "elif a == b:",
+ " break",
+ "else:",
+ " continue",
+ ]
+ assert writer.to_lines() == expected
+
+
+def test_loops() -> None:
+ writer = CodeWriter()
+ with writer.for_("i", "range(10)"):
+ writer.writeln("print(i)")
+ with writer.while_("True"):
+ writer.writeln("break")
+
+ expected = [
+ "for i in range(10):",
+ " print(i)",
+ "while True:",
+ " break",
+ ]
+ assert writer.to_lines() == expected
+
+
+def test_try_except_finally() -> None:
+ writer = CodeWriter()
+ with writer.try_():
+ writer.raise_("ValueError", "oops")
+ with writer.except_("ValueError", as_var="e"):
+ writer.writeln("print(e)")
+ with writer.finally_():
+ writer.pass_()
+
+ expected = [
+ "try:",
+ " raise ValueError('oops')",
+ "except ValueError as e:",
+ " print(e)",
+ "finally:",
+ " pass",
+ ]
+ assert writer.to_lines() == expected
+
+
+def test_with() -> None:
+ writer = CodeWriter()
+ with writer.with_("open('f') as f", "open('g') as g"):
+ writer.pass_()
+ assert writer.to_lines() == [
+ "with open('f') as f, open('g') as g:",
+ " pass",
+ ]
+
+
+def test_assignment_and_assertions() -> None:
+ writer = CodeWriter()
+ writer.assignment("x", "1", type_hint="int")
+ writer.assert_("x == 1", "must be 1")
+ writer.yield_("x")
+
+ expected = ["x: int = 1", "assert x == 1, 'must be 1'", "yield x"]
+ assert writer.to_lines() == expected
+
+
+def test_generate_init() -> None:
+ writer = CodeWriter()
+ writer.generate_init({"a": "int", "b": "str"})
+ expected = [
+ "def __init__(self, a: int, b: str):",
+ " self.a = a",
+ " self.b = b",
+ ]
+ assert writer.to_lines() == expected
+
+
+def test_generate_repr() -> None:
+ writer = CodeWriter()
+ writer.generate_repr("Point", ["x", "y"])
+ expected = [
+ "def __repr__(self) -> builtins.str:",
+ " return f'Point(x={self.x!r}, y={self.y!r})'",
+ ]
+ assert writer.to_lines() == expected
+
+
+def test_generate_eq() -> None:
+ writer = CodeWriter()
+ writer.generate_eq(["x", "y"])
+ expected = [
+ "def __eq__(self, other) -> builtins.bool:",
+ " if not isinstance(other, self.__class__):",
+ " return False",
+ " return self.x == other.x and self.y == other.y",
+ ]
+ assert writer.to_lines() == expected
+
+
+def test_generate_hash() -> None:
+ writer = CodeWriter()
+ writer.generate_hash(["x", "y"])
+ expected = [
+ "def __hash__(self) -> builtins.int:",
+ " return builtins.hash((self.x, self.y))",
+ ]
+ assert writer.to_lines() == expected
+
+
+def test_generate_hash_empty() -> None:
+ writer = CodeWriter()
+ writer.generate_hash([])
+ expected = [
+ "def __hash__(self) -> builtins.int:",
+ " return builtins.hash(builtins.id(self))",
+ ]
+ assert writer.to_lines() == expected
+
+
+def test_write_block() -> None:
+ writer = CodeWriter()
+ writer.write_block(["line1", "line2"])
+ assert writer.to_lines() == ["line1", "line2"]
+
+
+def test_method_alias():
+ writer = CodeWriter()
+ with writer.method("m", "self"):
+ writer.pass_()
+ assert "def m(self):" in writer.to_string()
+
+
+def test_generate_str() -> None:
+ writer = CodeWriter()
+ writer.generate_str("Point", ["x", "y"])
+ expected = [
+ "def __str__(self) -> builtins.str:",
+ " return f'Point(x={self.x}, y={self.y})'",
+ ]
+ assert writer.to_lines() == expected
+
+
+def test_formatting_utilities() -> None:
+ writer = CodeWriter()
+ writer.module_docstring("Module doc")
+ writer.section("Title", char="-", width=10)
+ writer.blank_line(2)
+
+ output = writer.to_string()
+ assert '"""Module doc"""' in output
+ assert "# ----------" in output
+ assert "# Title" in output
+ assert output.count("\n\n") >= 2
diff --git a/opentelemetry-proto-json/LICENSE b/opentelemetry-proto-json/LICENSE
new file mode 100644
index 0000000000..261eeb9e9f
--- /dev/null
+++ b/opentelemetry-proto-json/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/opentelemetry-proto-json/README.rst b/opentelemetry-proto-json/README.rst
new file mode 100644
index 0000000000..748fed3bc5
--- /dev/null
+++ b/opentelemetry-proto-json/README.rst
@@ -0,0 +1,39 @@
+OpenTelemetry Python Proto JSON
+================================
+
+|pypi|
+
+.. |pypi| image:: https://badge.fury.io/py/opentelemetry-proto-json.svg
+ :target: https://pypi.org/project/opentelemetry-proto-json/
+
+This library contains the generated code for OpenTelemetry protobuf data model with JSON encoding support. The code in the current package was generated using the v1.9.0 release_ of opentelemetry-proto and includes definitions for the OpenTelemetry JSON Protobuf encoding specification.
+
+.. _release: https://github.com/open-telemetry/opentelemetry-proto/releases/tag/v1.9.0
+
+Installation
+------------
+
+::
+
+ pip install opentelemetry-proto-json
+
+Code Generation
+---------------
+
+These files were generated automatically using the custom protoc plugin opentelemetry-codegen-json_ from code in opentelemetry-proto_.
+To regenerate the code, run ``../scripts/proto_codegen_json.sh``.
+
+To build against a new release or specific commit of opentelemetry-proto_,
+update the ``PROTO_REPO_BRANCH_OR_COMMIT`` variable in
+``../scripts/proto_codegen_json.sh``. Then run the script and commit the changes
+as well as any fixes needed in the OTLP exporter.
+
+.. _opentelemetry-codegen-json: https://github.com/open-telemetry/codegen/opentelemetry-codegen-json
+.. _opentelemetry-proto: https://github.com/open-telemetry/opentelemetry-proto
+
+References
+----------
+
+* `OpenTelemetry Project `_
+* `OpenTelemetry Proto `_
+* `OTLP JSON Encoding Specification `_
diff --git a/opentelemetry-proto-json/pyproject.toml b/opentelemetry-proto-json/pyproject.toml
new file mode 100644
index 0000000000..b161fc45e7
--- /dev/null
+++ b/opentelemetry-proto-json/pyproject.toml
@@ -0,0 +1,44 @@
+[build-system]
+requires = ["hatchling"]
+build-backend = "hatchling.build"
+
+[project]
+name = "opentelemetry-proto-json"
+dynamic = ["version"]
+description = "OpenTelemetry Python Json Proto"
+readme = "README.rst"
+license = "Apache-2.0"
+requires-python = ">=3.9"
+authors = [
+ { name = "OpenTelemetry Authors", email = "cncf-opentelemetry-contributors@lists.cncf.io" },
+]
+classifiers = [
+ "Development Status :: 4 - Beta",
+ "Framework :: OpenTelemetry",
+ "Intended Audience :: Developers",
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.9",
+ "Programming Language :: Python :: 3.10",
+ "Programming Language :: Python :: 3.11",
+ "Programming Language :: Python :: 3.12",
+ "Programming Language :: Python :: 3.13",
+ "Programming Language :: Python :: 3.14",
+]
+dependencies = []
+
+[project.urls]
+Homepage = "https://github.com/open-telemetry/opentelemetry-python/tree/main/opentelemetry-proto-json"
+Repository = "https://github.com/open-telemetry/opentelemetry-python"
+
+[tool.hatch.version]
+path = "src/opentelemetry/proto_json/version/__init__.py"
+
+[tool.hatch.build.targets.sdist]
+include = [
+ "/src",
+ "/tests",
+]
+
+[tool.hatch.build.targets.wheel]
+packages = ["src/opentelemetry"]
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/__init__.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/_otlp_json_utils.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/_otlp_json_utils.py
new file mode 100644
index 0000000000..e88e9e3533
--- /dev/null
+++ b/opentelemetry-proto-json/src/opentelemetry/proto_json/_otlp_json_utils.py
@@ -0,0 +1,153 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+
+import base64
+import math
+import typing
+
+T = typing.TypeVar("T")
+
+
+def encode_hex(value: bytes) -> str:
+ """
+ Encode bytes as hex string.
+ Used for trace_id and span_id per OTLP spec.
+ """
+ return value.hex() if value else ""
+
+
+def encode_base64(value: bytes) -> str:
+ """
+ Encode bytes as base64 string.
+ Standard Proto3 JSON mapping for bytes.
+ """
+ return base64.b64encode(value).decode("utf-8") if value else ""
+
+
+def encode_int64(value: int) -> str:
+ """
+ Encode 64 bit integers as strings.
+ Required for int64, uint64, fixed64, sfixed64 and sint64 per Proto3 JSON spec.
+ """
+ return str(value)
+
+
+def encode_float(value: float) -> typing.Union[float, str]:
+ """
+ Encode float/double values.
+ """
+ if math.isnan(value):
+ return "NaN"
+ if math.isinf(value):
+ return "Infinity" if value > 0 else "-Infinity"
+ return value
+
+
+def encode_repeated(
+ values: list[typing.Any], map_fn: typing.Callable[[typing.Any], typing.Any]
+) -> list[typing.Any]:
+ """Helper to serialize repeated fields."""
+ return [map_fn(v) for v in values] if values else []
+
+
+def decode_hex(value: typing.Optional[str], field_name: str) -> bytes:
+ """Decode hex string to bytes."""
+ if not value:
+ return b""
+ validate_type(value, str, field_name)
+ try:
+ return bytes.fromhex(value)
+ except ValueError as e:
+ raise ValueError(
+ f"Invalid hex string for field '{field_name}': {e}"
+ ) from None
+
+
+def decode_base64(value: typing.Optional[str], field_name: str) -> bytes:
+ """Decode base64 string to bytes."""
+ if not value:
+ return b""
+ validate_type(value, str, field_name)
+ try:
+ return base64.b64decode(value)
+ except Exception as e:
+ raise ValueError(
+ f"Invalid base64 string for field '{field_name}': {e}"
+ ) from None
+
+
+def decode_int64(
+ value: typing.Optional[typing.Union[int, str]], field_name: str
+) -> int:
+ """Parse 64-bit integer from string or number."""
+ if value is None:
+ return 0
+ validate_type(value, (int, str), field_name)
+ try:
+ return int(value)
+ except (ValueError, TypeError):
+ raise ValueError(
+ f"Invalid int64 value for field '{field_name}': {value}"
+ ) from None
+
+
+def decode_float(
+ value: typing.Optional[typing.Union[float, int, str]], field_name: str
+) -> float:
+ """Parse float/double from number or special string."""
+ if value is None:
+ return 0.0
+ validate_type(value, (float, int, str), field_name)
+ if value == "NaN":
+ return math.nan
+ if value == "Infinity":
+ return math.inf
+ if value == "-Infinity":
+ return -math.inf
+ try:
+ return float(value)
+ except (ValueError, TypeError):
+ raise ValueError(
+ f"Invalid float value for field '{field_name}': {value}"
+ ) from None
+
+
+def decode_repeated(
+ values: typing.Optional[list[typing.Any]],
+ item_parser: typing.Callable[[typing.Any], T],
+ field_name: str,
+) -> list[T]:
+ """Helper to deserialize repeated fields."""
+ if values is None:
+ return []
+ validate_type(values, list, field_name)
+ return [item_parser(v) for v in values]
+
+
+def validate_type(
+ value: typing.Any,
+ expected_types: typing.Union[type, tuple[type, ...]],
+ field_name: str,
+) -> None:
+ """
+ Validate that a value is of the expected type(s).
+ Raises TypeError if validation fails.
+ """
+ if not isinstance(value, expected_types):
+ raise TypeError(
+ f"Field '{field_name}' expected {expected_types}, "
+ f"got {type(value).__name__}"
+ )
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/__init__.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/logs/__init__.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/logs/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/logs/v1/__init__.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/logs/v1/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/logs/v1/logs_service.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/logs/v1/logs_service.py
new file mode 100644
index 0000000000..8002a77ba5
--- /dev/null
+++ b/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/logs/v1/logs_service.py
@@ -0,0 +1,228 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# AUTO-GENERATED from "opentelemetry/proto/collector/logs/v1/logs_service.proto"
+# DO NOT EDIT MANUALLY
+
+from __future__ import annotations
+
+import builtins
+import dataclasses
+import functools
+import json
+import sys
+import typing
+
+if sys.version_info >= (3, 10):
+ _dataclass = functools.partial(dataclasses.dataclass, slots=True)
+else:
+ _dataclass = dataclasses.dataclass
+
+import opentelemetry.proto_json._otlp_json_utils
+import opentelemetry.proto_json.logs.v1.logs
+
+
+@typing.final
+@_dataclass
+class ExportLogsServiceRequest:
+ """
+ Generated from protobuf message ExportLogsServiceRequest
+ """
+
+ resource_logs: builtins.list[opentelemetry.proto_json.logs.v1.logs.ResourceLogs] = dataclasses.field(default_factory=builtins.list)
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.resource_logs:
+ _result["resourceLogs"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.resource_logs, lambda _v: _v.to_dict())
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ExportLogsServiceRequest":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ExportLogsServiceRequest instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("resourceLogs")) is not None:
+ _args["resource_logs"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: opentelemetry.proto_json.logs.v1.logs.ResourceLogs.from_dict(_v), "resource_logs")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ExportLogsServiceRequest":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class ExportLogsServiceResponse:
+ """
+ Generated from protobuf message ExportLogsServiceResponse
+ """
+
+ partial_success: typing.Optional[ExportLogsPartialSuccess] = None
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.partial_success:
+ _result["partialSuccess"] = self.partial_success.to_dict()
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ExportLogsServiceResponse":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ExportLogsServiceResponse instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("partialSuccess")) is not None:
+ _args["partial_success"] = ExportLogsPartialSuccess.from_dict(_value)
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ExportLogsServiceResponse":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class ExportLogsPartialSuccess:
+ """
+ Generated from protobuf message ExportLogsPartialSuccess
+ """
+
+ rejected_log_records: typing.Optional[builtins.int] = 0
+ error_message: typing.Optional[builtins.str] = ""
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.rejected_log_records:
+ _result["rejectedLogRecords"] = opentelemetry.proto_json._otlp_json_utils.encode_int64(self.rejected_log_records)
+ if self.error_message:
+ _result["errorMessage"] = self.error_message
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ExportLogsPartialSuccess":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ExportLogsPartialSuccess instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("rejectedLogRecords")) is not None:
+ _args["rejected_log_records"] = opentelemetry.proto_json._otlp_json_utils.decode_int64(_value, "rejected_log_records")
+ if (_value := data.get("errorMessage")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.str, "error_message")
+ _args["error_message"] = _value
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ExportLogsPartialSuccess":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/metrics/__init__.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/metrics/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/metrics/v1/__init__.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/metrics/v1/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/metrics/v1/metrics_service.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/metrics/v1/metrics_service.py
new file mode 100644
index 0000000000..150d28b8d3
--- /dev/null
+++ b/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/metrics/v1/metrics_service.py
@@ -0,0 +1,228 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# AUTO-GENERATED from "opentelemetry/proto/collector/metrics/v1/metrics_service.proto"
+# DO NOT EDIT MANUALLY
+
+from __future__ import annotations
+
+import builtins
+import dataclasses
+import functools
+import json
+import sys
+import typing
+
+if sys.version_info >= (3, 10):
+ _dataclass = functools.partial(dataclasses.dataclass, slots=True)
+else:
+ _dataclass = dataclasses.dataclass
+
+import opentelemetry.proto_json._otlp_json_utils
+import opentelemetry.proto_json.metrics.v1.metrics
+
+
+@typing.final
+@_dataclass
+class ExportMetricsServiceRequest:
+ """
+ Generated from protobuf message ExportMetricsServiceRequest
+ """
+
+ resource_metrics: builtins.list[opentelemetry.proto_json.metrics.v1.metrics.ResourceMetrics] = dataclasses.field(default_factory=builtins.list)
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.resource_metrics:
+ _result["resourceMetrics"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.resource_metrics, lambda _v: _v.to_dict())
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ExportMetricsServiceRequest":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ExportMetricsServiceRequest instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("resourceMetrics")) is not None:
+ _args["resource_metrics"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: opentelemetry.proto_json.metrics.v1.metrics.ResourceMetrics.from_dict(_v), "resource_metrics")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ExportMetricsServiceRequest":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class ExportMetricsServiceResponse:
+ """
+ Generated from protobuf message ExportMetricsServiceResponse
+ """
+
+ partial_success: typing.Optional[ExportMetricsPartialSuccess] = None
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.partial_success:
+ _result["partialSuccess"] = self.partial_success.to_dict()
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ExportMetricsServiceResponse":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ExportMetricsServiceResponse instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("partialSuccess")) is not None:
+ _args["partial_success"] = ExportMetricsPartialSuccess.from_dict(_value)
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ExportMetricsServiceResponse":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class ExportMetricsPartialSuccess:
+ """
+ Generated from protobuf message ExportMetricsPartialSuccess
+ """
+
+ rejected_data_points: typing.Optional[builtins.int] = 0
+ error_message: typing.Optional[builtins.str] = ""
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.rejected_data_points:
+ _result["rejectedDataPoints"] = opentelemetry.proto_json._otlp_json_utils.encode_int64(self.rejected_data_points)
+ if self.error_message:
+ _result["errorMessage"] = self.error_message
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ExportMetricsPartialSuccess":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ExportMetricsPartialSuccess instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("rejectedDataPoints")) is not None:
+ _args["rejected_data_points"] = opentelemetry.proto_json._otlp_json_utils.decode_int64(_value, "rejected_data_points")
+ if (_value := data.get("errorMessage")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.str, "error_message")
+ _args["error_message"] = _value
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ExportMetricsPartialSuccess":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/profiles/__init__.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/profiles/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/profiles/v1development/__init__.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/profiles/v1development/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/profiles/v1development/profiles_service.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/profiles/v1development/profiles_service.py
new file mode 100644
index 0000000000..4c6cf11924
--- /dev/null
+++ b/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/profiles/v1development/profiles_service.py
@@ -0,0 +1,233 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# AUTO-GENERATED from "opentelemetry/proto/collector/profiles/v1development/profiles_service.proto"
+# DO NOT EDIT MANUALLY
+
+from __future__ import annotations
+
+import builtins
+import dataclasses
+import functools
+import json
+import sys
+import typing
+
+if sys.version_info >= (3, 10):
+ _dataclass = functools.partial(dataclasses.dataclass, slots=True)
+else:
+ _dataclass = dataclasses.dataclass
+
+import opentelemetry.proto_json._otlp_json_utils
+import opentelemetry.proto_json.profiles.v1development.profiles
+
+
+@typing.final
+@_dataclass
+class ExportProfilesServiceRequest:
+ """
+ Generated from protobuf message ExportProfilesServiceRequest
+ """
+
+ resource_profiles: builtins.list[opentelemetry.proto_json.profiles.v1development.profiles.ResourceProfiles] = dataclasses.field(default_factory=builtins.list)
+ dictionary: typing.Optional[opentelemetry.proto_json.profiles.v1development.profiles.ProfilesDictionary] = None
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.resource_profiles:
+ _result["resourceProfiles"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.resource_profiles, lambda _v: _v.to_dict())
+ if self.dictionary:
+ _result["dictionary"] = self.dictionary.to_dict()
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ExportProfilesServiceRequest":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ExportProfilesServiceRequest instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("resourceProfiles")) is not None:
+ _args["resource_profiles"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: opentelemetry.proto_json.profiles.v1development.profiles.ResourceProfiles.from_dict(_v), "resource_profiles")
+ if (_value := data.get("dictionary")) is not None:
+ _args["dictionary"] = opentelemetry.proto_json.profiles.v1development.profiles.ProfilesDictionary.from_dict(_value)
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ExportProfilesServiceRequest":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class ExportProfilesServiceResponse:
+ """
+ Generated from protobuf message ExportProfilesServiceResponse
+ """
+
+ partial_success: typing.Optional[ExportProfilesPartialSuccess] = None
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.partial_success:
+ _result["partialSuccess"] = self.partial_success.to_dict()
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ExportProfilesServiceResponse":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ExportProfilesServiceResponse instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("partialSuccess")) is not None:
+ _args["partial_success"] = ExportProfilesPartialSuccess.from_dict(_value)
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ExportProfilesServiceResponse":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class ExportProfilesPartialSuccess:
+ """
+ Generated from protobuf message ExportProfilesPartialSuccess
+ """
+
+ rejected_profiles: typing.Optional[builtins.int] = 0
+ error_message: typing.Optional[builtins.str] = ""
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.rejected_profiles:
+ _result["rejectedProfiles"] = opentelemetry.proto_json._otlp_json_utils.encode_int64(self.rejected_profiles)
+ if self.error_message:
+ _result["errorMessage"] = self.error_message
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ExportProfilesPartialSuccess":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ExportProfilesPartialSuccess instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("rejectedProfiles")) is not None:
+ _args["rejected_profiles"] = opentelemetry.proto_json._otlp_json_utils.decode_int64(_value, "rejected_profiles")
+ if (_value := data.get("errorMessage")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.str, "error_message")
+ _args["error_message"] = _value
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ExportProfilesPartialSuccess":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/trace/__init__.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/trace/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/trace/v1/__init__.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/trace/v1/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/trace/v1/trace_service.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/trace/v1/trace_service.py
new file mode 100644
index 0000000000..75b2b83f25
--- /dev/null
+++ b/opentelemetry-proto-json/src/opentelemetry/proto_json/collector/trace/v1/trace_service.py
@@ -0,0 +1,228 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# AUTO-GENERATED from "opentelemetry/proto/collector/trace/v1/trace_service.proto"
+# DO NOT EDIT MANUALLY
+
+from __future__ import annotations
+
+import builtins
+import dataclasses
+import functools
+import json
+import sys
+import typing
+
+if sys.version_info >= (3, 10):
+ _dataclass = functools.partial(dataclasses.dataclass, slots=True)
+else:
+ _dataclass = dataclasses.dataclass
+
+import opentelemetry.proto_json._otlp_json_utils
+import opentelemetry.proto_json.trace.v1.trace
+
+
+@typing.final
+@_dataclass
+class ExportTraceServiceRequest:
+ """
+ Generated from protobuf message ExportTraceServiceRequest
+ """
+
+ resource_spans: builtins.list[opentelemetry.proto_json.trace.v1.trace.ResourceSpans] = dataclasses.field(default_factory=builtins.list)
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.resource_spans:
+ _result["resourceSpans"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.resource_spans, lambda _v: _v.to_dict())
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ExportTraceServiceRequest":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ExportTraceServiceRequest instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("resourceSpans")) is not None:
+ _args["resource_spans"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: opentelemetry.proto_json.trace.v1.trace.ResourceSpans.from_dict(_v), "resource_spans")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ExportTraceServiceRequest":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class ExportTraceServiceResponse:
+ """
+ Generated from protobuf message ExportTraceServiceResponse
+ """
+
+ partial_success: typing.Optional[ExportTracePartialSuccess] = None
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.partial_success:
+ _result["partialSuccess"] = self.partial_success.to_dict()
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ExportTraceServiceResponse":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ExportTraceServiceResponse instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("partialSuccess")) is not None:
+ _args["partial_success"] = ExportTracePartialSuccess.from_dict(_value)
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ExportTraceServiceResponse":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class ExportTracePartialSuccess:
+ """
+ Generated from protobuf message ExportTracePartialSuccess
+ """
+
+ rejected_spans: typing.Optional[builtins.int] = 0
+ error_message: typing.Optional[builtins.str] = ""
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.rejected_spans:
+ _result["rejectedSpans"] = opentelemetry.proto_json._otlp_json_utils.encode_int64(self.rejected_spans)
+ if self.error_message:
+ _result["errorMessage"] = self.error_message
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ExportTracePartialSuccess":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ExportTracePartialSuccess instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("rejectedSpans")) is not None:
+ _args["rejected_spans"] = opentelemetry.proto_json._otlp_json_utils.decode_int64(_value, "rejected_spans")
+ if (_value := data.get("errorMessage")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.str, "error_message")
+ _args["error_message"] = _value
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ExportTracePartialSuccess":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/common/__init__.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/common/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/common/v1/__init__.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/common/v1/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/common/v1/common.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/common/v1/common.py
new file mode 100644
index 0000000000..f0c482f407
--- /dev/null
+++ b/opentelemetry-proto-json/src/opentelemetry/proto_json/common/v1/common.py
@@ -0,0 +1,483 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# AUTO-GENERATED from "opentelemetry/proto/common/v1/common.proto"
+# DO NOT EDIT MANUALLY
+
+from __future__ import annotations
+
+import builtins
+import dataclasses
+import functools
+import json
+import sys
+import typing
+
+if sys.version_info >= (3, 10):
+ _dataclass = functools.partial(dataclasses.dataclass, slots=True)
+else:
+ _dataclass = dataclasses.dataclass
+
+import opentelemetry.proto_json._otlp_json_utils
+
+
+@typing.final
+@_dataclass
+class AnyValue:
+ """
+ Generated from protobuf message AnyValue
+ """
+
+ string_value: typing.Optional[builtins.str] = None
+ bool_value: typing.Optional[builtins.bool] = None
+ int_value: typing.Optional[builtins.int] = None
+ double_value: typing.Optional[builtins.float] = None
+ array_value: typing.Optional[ArrayValue] = None
+ kvlist_value: typing.Optional[KeyValueList] = None
+ bytes_value: typing.Optional[builtins.bytes] = None
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.bytes_value is not None:
+ _result["bytesValue"] = opentelemetry.proto_json._otlp_json_utils.encode_base64(self.bytes_value)
+ elif self.kvlist_value is not None:
+ _result["kvlistValue"] = self.kvlist_value.to_dict()
+ elif self.array_value is not None:
+ _result["arrayValue"] = self.array_value.to_dict()
+ elif self.double_value is not None:
+ _result["doubleValue"] = opentelemetry.proto_json._otlp_json_utils.encode_float(self.double_value)
+ elif self.int_value is not None:
+ _result["intValue"] = opentelemetry.proto_json._otlp_json_utils.encode_int64(self.int_value)
+ elif self.bool_value is not None:
+ _result["boolValue"] = self.bool_value
+ elif self.string_value is not None:
+ _result["stringValue"] = self.string_value
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "AnyValue":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ AnyValue instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("bytesValue")) is not None:
+ _args["bytes_value"] = opentelemetry.proto_json._otlp_json_utils.decode_base64(_value, "bytes_value")
+ elif (_value := data.get("kvlistValue")) is not None:
+ _args["kvlist_value"] = KeyValueList.from_dict(_value)
+ elif (_value := data.get("arrayValue")) is not None:
+ _args["array_value"] = ArrayValue.from_dict(_value)
+ elif (_value := data.get("doubleValue")) is not None:
+ _args["double_value"] = opentelemetry.proto_json._otlp_json_utils.decode_float(_value, "double_value")
+ elif (_value := data.get("intValue")) is not None:
+ _args["int_value"] = opentelemetry.proto_json._otlp_json_utils.decode_int64(_value, "int_value")
+ elif (_value := data.get("boolValue")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.bool, "bool_value")
+ _args["bool_value"] = _value
+ elif (_value := data.get("stringValue")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.str, "string_value")
+ _args["string_value"] = _value
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "AnyValue":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class ArrayValue:
+ """
+ Generated from protobuf message ArrayValue
+ """
+
+ values: builtins.list[AnyValue] = dataclasses.field(default_factory=builtins.list)
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.values:
+ _result["values"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.values, lambda _v: _v.to_dict())
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ArrayValue":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ArrayValue instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("values")) is not None:
+ _args["values"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: AnyValue.from_dict(_v), "values")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ArrayValue":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class KeyValueList:
+ """
+ Generated from protobuf message KeyValueList
+ """
+
+ values: builtins.list[KeyValue] = dataclasses.field(default_factory=builtins.list)
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.values:
+ _result["values"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.values, lambda _v: _v.to_dict())
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "KeyValueList":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ KeyValueList instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("values")) is not None:
+ _args["values"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: KeyValue.from_dict(_v), "values")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "KeyValueList":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class KeyValue:
+ """
+ Generated from protobuf message KeyValue
+ """
+
+ key: typing.Optional[builtins.str] = ""
+ value: typing.Optional[AnyValue] = None
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.key:
+ _result["key"] = self.key
+ if self.value:
+ _result["value"] = self.value.to_dict()
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "KeyValue":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ KeyValue instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("key")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.str, "key")
+ _args["key"] = _value
+ if (_value := data.get("value")) is not None:
+ _args["value"] = AnyValue.from_dict(_value)
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "KeyValue":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class InstrumentationScope:
+ """
+ Generated from protobuf message InstrumentationScope
+ """
+
+ name: typing.Optional[builtins.str] = ""
+ version: typing.Optional[builtins.str] = ""
+ attributes: builtins.list[KeyValue] = dataclasses.field(default_factory=builtins.list)
+ dropped_attributes_count: typing.Optional[builtins.int] = 0
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.name:
+ _result["name"] = self.name
+ if self.version:
+ _result["version"] = self.version
+ if self.attributes:
+ _result["attributes"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.attributes, lambda _v: _v.to_dict())
+ if self.dropped_attributes_count:
+ _result["droppedAttributesCount"] = self.dropped_attributes_count
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "InstrumentationScope":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ InstrumentationScope instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("name")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.str, "name")
+ _args["name"] = _value
+ if (_value := data.get("version")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.str, "version")
+ _args["version"] = _value
+ if (_value := data.get("attributes")) is not None:
+ _args["attributes"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: KeyValue.from_dict(_v), "attributes")
+ if (_value := data.get("droppedAttributesCount")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.int, "dropped_attributes_count")
+ _args["dropped_attributes_count"] = _value
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "InstrumentationScope":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class EntityRef:
+ """
+ Generated from protobuf message EntityRef
+ """
+
+ schema_url: typing.Optional[builtins.str] = ""
+ type: typing.Optional[builtins.str] = ""
+ id_keys: builtins.list[builtins.str] = dataclasses.field(default_factory=builtins.list)
+ description_keys: builtins.list[builtins.str] = dataclasses.field(default_factory=builtins.list)
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.schema_url:
+ _result["schemaUrl"] = self.schema_url
+ if self.type:
+ _result["type"] = self.type
+ if self.id_keys:
+ _result["idKeys"] = self.id_keys
+ if self.description_keys:
+ _result["descriptionKeys"] = self.description_keys
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "EntityRef":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ EntityRef instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("schemaUrl")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.str, "schema_url")
+ _args["schema_url"] = _value
+ if (_value := data.get("type")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.str, "type")
+ _args["type"] = _value
+ if (_value := data.get("idKeys")) is not None:
+ _args["id_keys"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: _v, "id_keys")
+ if (_value := data.get("descriptionKeys")) is not None:
+ _args["description_keys"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: _v, "description_keys")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "EntityRef":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/logs/__init__.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/logs/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/logs/v1/__init__.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/logs/v1/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/logs/v1/logs.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/logs/v1/logs.py
new file mode 100644
index 0000000000..855c280328
--- /dev/null
+++ b/opentelemetry-proto-json/src/opentelemetry/proto_json/logs/v1/logs.py
@@ -0,0 +1,405 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# AUTO-GENERATED from "opentelemetry/proto/logs/v1/logs.proto"
+# DO NOT EDIT MANUALLY
+
+from __future__ import annotations
+
+import builtins
+import dataclasses
+import enum
+import functools
+import json
+import sys
+import typing
+
+if sys.version_info >= (3, 10):
+ _dataclass = functools.partial(dataclasses.dataclass, slots=True)
+else:
+ _dataclass = dataclasses.dataclass
+
+import opentelemetry.proto_json._otlp_json_utils
+import opentelemetry.proto_json.common.v1.common
+import opentelemetry.proto_json.resource.v1.resource
+
+
+@typing.final
+class SeverityNumber(enum.IntEnum):
+ """
+ Generated from protobuf enum SeverityNumber
+ """
+
+ SEVERITY_NUMBER_UNSPECIFIED = 0
+ SEVERITY_NUMBER_TRACE = 1
+ SEVERITY_NUMBER_TRACE2 = 2
+ SEVERITY_NUMBER_TRACE3 = 3
+ SEVERITY_NUMBER_TRACE4 = 4
+ SEVERITY_NUMBER_DEBUG = 5
+ SEVERITY_NUMBER_DEBUG2 = 6
+ SEVERITY_NUMBER_DEBUG3 = 7
+ SEVERITY_NUMBER_DEBUG4 = 8
+ SEVERITY_NUMBER_INFO = 9
+ SEVERITY_NUMBER_INFO2 = 10
+ SEVERITY_NUMBER_INFO3 = 11
+ SEVERITY_NUMBER_INFO4 = 12
+ SEVERITY_NUMBER_WARN = 13
+ SEVERITY_NUMBER_WARN2 = 14
+ SEVERITY_NUMBER_WARN3 = 15
+ SEVERITY_NUMBER_WARN4 = 16
+ SEVERITY_NUMBER_ERROR = 17
+ SEVERITY_NUMBER_ERROR2 = 18
+ SEVERITY_NUMBER_ERROR3 = 19
+ SEVERITY_NUMBER_ERROR4 = 20
+ SEVERITY_NUMBER_FATAL = 21
+ SEVERITY_NUMBER_FATAL2 = 22
+ SEVERITY_NUMBER_FATAL3 = 23
+ SEVERITY_NUMBER_FATAL4 = 24
+
+@typing.final
+class LogRecordFlags(enum.IntEnum):
+ """
+ Generated from protobuf enum LogRecordFlags
+ """
+
+ LOG_RECORD_FLAGS_DO_NOT_USE = 0
+ LOG_RECORD_FLAGS_TRACE_FLAGS_MASK = 255
+
+@typing.final
+@_dataclass
+class LogsData:
+ """
+ Generated from protobuf message LogsData
+ """
+
+ resource_logs: builtins.list[ResourceLogs] = dataclasses.field(default_factory=builtins.list)
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.resource_logs:
+ _result["resourceLogs"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.resource_logs, lambda _v: _v.to_dict())
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "LogsData":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ LogsData instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("resourceLogs")) is not None:
+ _args["resource_logs"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: ResourceLogs.from_dict(_v), "resource_logs")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "LogsData":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class ResourceLogs:
+ """
+ Generated from protobuf message ResourceLogs
+ """
+
+ resource: typing.Optional[opentelemetry.proto_json.resource.v1.resource.Resource] = None
+ scope_logs: builtins.list[ScopeLogs] = dataclasses.field(default_factory=builtins.list)
+ schema_url: typing.Optional[builtins.str] = ""
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.resource:
+ _result["resource"] = self.resource.to_dict()
+ if self.scope_logs:
+ _result["scopeLogs"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.scope_logs, lambda _v: _v.to_dict())
+ if self.schema_url:
+ _result["schemaUrl"] = self.schema_url
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ResourceLogs":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ResourceLogs instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("resource")) is not None:
+ _args["resource"] = opentelemetry.proto_json.resource.v1.resource.Resource.from_dict(_value)
+ if (_value := data.get("scopeLogs")) is not None:
+ _args["scope_logs"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: ScopeLogs.from_dict(_v), "scope_logs")
+ if (_value := data.get("schemaUrl")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.str, "schema_url")
+ _args["schema_url"] = _value
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ResourceLogs":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class ScopeLogs:
+ """
+ Generated from protobuf message ScopeLogs
+ """
+
+ scope: typing.Optional[opentelemetry.proto_json.common.v1.common.InstrumentationScope] = None
+ log_records: builtins.list[LogRecord] = dataclasses.field(default_factory=builtins.list)
+ schema_url: typing.Optional[builtins.str] = ""
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.scope:
+ _result["scope"] = self.scope.to_dict()
+ if self.log_records:
+ _result["logRecords"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.log_records, lambda _v: _v.to_dict())
+ if self.schema_url:
+ _result["schemaUrl"] = self.schema_url
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ScopeLogs":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ScopeLogs instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("scope")) is not None:
+ _args["scope"] = opentelemetry.proto_json.common.v1.common.InstrumentationScope.from_dict(_value)
+ if (_value := data.get("logRecords")) is not None:
+ _args["log_records"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: LogRecord.from_dict(_v), "log_records")
+ if (_value := data.get("schemaUrl")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.str, "schema_url")
+ _args["schema_url"] = _value
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ScopeLogs":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class LogRecord:
+ """
+ Generated from protobuf message LogRecord
+ """
+
+ time_unix_nano: typing.Optional[builtins.int] = 0
+ observed_time_unix_nano: typing.Optional[builtins.int] = 0
+ severity_number: typing.Union[SeverityNumber, builtins.int, None] = 0
+ severity_text: typing.Optional[builtins.str] = ""
+ body: typing.Optional[opentelemetry.proto_json.common.v1.common.AnyValue] = None
+ attributes: builtins.list[opentelemetry.proto_json.common.v1.common.KeyValue] = dataclasses.field(default_factory=builtins.list)
+ dropped_attributes_count: typing.Optional[builtins.int] = 0
+ flags: typing.Optional[builtins.int] = 0
+ trace_id: typing.Optional[builtins.bytes] = b""
+ span_id: typing.Optional[builtins.bytes] = b""
+ event_name: typing.Optional[builtins.str] = ""
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.time_unix_nano:
+ _result["timeUnixNano"] = opentelemetry.proto_json._otlp_json_utils.encode_int64(self.time_unix_nano)
+ if self.observed_time_unix_nano:
+ _result["observedTimeUnixNano"] = opentelemetry.proto_json._otlp_json_utils.encode_int64(self.observed_time_unix_nano)
+ if self.severity_number:
+ _result["severityNumber"] = builtins.int(self.severity_number)
+ if self.severity_text:
+ _result["severityText"] = self.severity_text
+ if self.body:
+ _result["body"] = self.body.to_dict()
+ if self.attributes:
+ _result["attributes"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.attributes, lambda _v: _v.to_dict())
+ if self.dropped_attributes_count:
+ _result["droppedAttributesCount"] = self.dropped_attributes_count
+ if self.flags:
+ _result["flags"] = self.flags
+ if self.trace_id:
+ _result["traceId"] = opentelemetry.proto_json._otlp_json_utils.encode_hex(self.trace_id)
+ if self.span_id:
+ _result["spanId"] = opentelemetry.proto_json._otlp_json_utils.encode_hex(self.span_id)
+ if self.event_name:
+ _result["eventName"] = self.event_name
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "LogRecord":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ LogRecord instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("timeUnixNano")) is not None:
+ _args["time_unix_nano"] = opentelemetry.proto_json._otlp_json_utils.decode_int64(_value, "time_unix_nano")
+ if (_value := data.get("observedTimeUnixNano")) is not None:
+ _args["observed_time_unix_nano"] = opentelemetry.proto_json._otlp_json_utils.decode_int64(_value, "observed_time_unix_nano")
+ if (_value := data.get("severityNumber")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.int, "severity_number")
+ _args["severity_number"] = SeverityNumber(_value)
+ if (_value := data.get("severityText")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.str, "severity_text")
+ _args["severity_text"] = _value
+ if (_value := data.get("body")) is not None:
+ _args["body"] = opentelemetry.proto_json.common.v1.common.AnyValue.from_dict(_value)
+ if (_value := data.get("attributes")) is not None:
+ _args["attributes"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: opentelemetry.proto_json.common.v1.common.KeyValue.from_dict(_v), "attributes")
+ if (_value := data.get("droppedAttributesCount")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.int, "dropped_attributes_count")
+ _args["dropped_attributes_count"] = _value
+ if (_value := data.get("flags")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.int, "flags")
+ _args["flags"] = _value
+ if (_value := data.get("traceId")) is not None:
+ _args["trace_id"] = opentelemetry.proto_json._otlp_json_utils.decode_hex(_value, "trace_id")
+ if (_value := data.get("spanId")) is not None:
+ _args["span_id"] = opentelemetry.proto_json._otlp_json_utils.decode_hex(_value, "span_id")
+ if (_value := data.get("eventName")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.str, "event_name")
+ _args["event_name"] = _value
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "LogRecord":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/metrics/__init__.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/metrics/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/metrics/v1/__init__.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/metrics/v1/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/metrics/v1/metrics.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/metrics/v1/metrics.py
new file mode 100644
index 0000000000..81f8489907
--- /dev/null
+++ b/opentelemetry-proto-json/src/opentelemetry/proto_json/metrics/v1/metrics.py
@@ -0,0 +1,1365 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# AUTO-GENERATED from "opentelemetry/proto/metrics/v1/metrics.proto"
+# DO NOT EDIT MANUALLY
+
+from __future__ import annotations
+
+import builtins
+import dataclasses
+import enum
+import functools
+import json
+import sys
+import typing
+
+if sys.version_info >= (3, 10):
+ _dataclass = functools.partial(dataclasses.dataclass, slots=True)
+else:
+ _dataclass = dataclasses.dataclass
+
+import opentelemetry.proto_json._otlp_json_utils
+import opentelemetry.proto_json.common.v1.common
+import opentelemetry.proto_json.resource.v1.resource
+
+
+@typing.final
+class AggregationTemporality(enum.IntEnum):
+ """
+ Generated from protobuf enum AggregationTemporality
+ """
+
+ AGGREGATION_TEMPORALITY_UNSPECIFIED = 0
+ AGGREGATION_TEMPORALITY_DELTA = 1
+ AGGREGATION_TEMPORALITY_CUMULATIVE = 2
+
+@typing.final
+class DataPointFlags(enum.IntEnum):
+ """
+ Generated from protobuf enum DataPointFlags
+ """
+
+ DATA_POINT_FLAGS_DO_NOT_USE = 0
+ DATA_POINT_FLAGS_NO_RECORDED_VALUE_MASK = 1
+
+@typing.final
+@_dataclass
+class MetricsData:
+ """
+ Generated from protobuf message MetricsData
+ """
+
+ resource_metrics: builtins.list[ResourceMetrics] = dataclasses.field(default_factory=builtins.list)
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.resource_metrics:
+ _result["resourceMetrics"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.resource_metrics, lambda _v: _v.to_dict())
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "MetricsData":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ MetricsData instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("resourceMetrics")) is not None:
+ _args["resource_metrics"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: ResourceMetrics.from_dict(_v), "resource_metrics")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "MetricsData":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class ResourceMetrics:
+ """
+ Generated from protobuf message ResourceMetrics
+ """
+
+ resource: typing.Optional[opentelemetry.proto_json.resource.v1.resource.Resource] = None
+ scope_metrics: builtins.list[ScopeMetrics] = dataclasses.field(default_factory=builtins.list)
+ schema_url: typing.Optional[builtins.str] = ""
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.resource:
+ _result["resource"] = self.resource.to_dict()
+ if self.scope_metrics:
+ _result["scopeMetrics"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.scope_metrics, lambda _v: _v.to_dict())
+ if self.schema_url:
+ _result["schemaUrl"] = self.schema_url
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ResourceMetrics":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ResourceMetrics instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("resource")) is not None:
+ _args["resource"] = opentelemetry.proto_json.resource.v1.resource.Resource.from_dict(_value)
+ if (_value := data.get("scopeMetrics")) is not None:
+ _args["scope_metrics"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: ScopeMetrics.from_dict(_v), "scope_metrics")
+ if (_value := data.get("schemaUrl")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.str, "schema_url")
+ _args["schema_url"] = _value
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ResourceMetrics":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class ScopeMetrics:
+ """
+ Generated from protobuf message ScopeMetrics
+ """
+
+ scope: typing.Optional[opentelemetry.proto_json.common.v1.common.InstrumentationScope] = None
+ metrics: builtins.list[Metric] = dataclasses.field(default_factory=builtins.list)
+ schema_url: typing.Optional[builtins.str] = ""
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.scope:
+ _result["scope"] = self.scope.to_dict()
+ if self.metrics:
+ _result["metrics"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.metrics, lambda _v: _v.to_dict())
+ if self.schema_url:
+ _result["schemaUrl"] = self.schema_url
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ScopeMetrics":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ScopeMetrics instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("scope")) is not None:
+ _args["scope"] = opentelemetry.proto_json.common.v1.common.InstrumentationScope.from_dict(_value)
+ if (_value := data.get("metrics")) is not None:
+ _args["metrics"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: Metric.from_dict(_v), "metrics")
+ if (_value := data.get("schemaUrl")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.str, "schema_url")
+ _args["schema_url"] = _value
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ScopeMetrics":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class Metric:
+ """
+ Generated from protobuf message Metric
+ """
+
+ name: typing.Optional[builtins.str] = ""
+ description: typing.Optional[builtins.str] = ""
+ unit: typing.Optional[builtins.str] = ""
+ gauge: typing.Optional[Gauge] = None
+ sum: typing.Optional[Sum] = None
+ histogram: typing.Optional[Histogram] = None
+ exponential_histogram: typing.Optional[ExponentialHistogram] = None
+ summary: typing.Optional[Summary] = None
+ metadata: builtins.list[opentelemetry.proto_json.common.v1.common.KeyValue] = dataclasses.field(default_factory=builtins.list)
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.name:
+ _result["name"] = self.name
+ if self.description:
+ _result["description"] = self.description
+ if self.unit:
+ _result["unit"] = self.unit
+ if self.metadata:
+ _result["metadata"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.metadata, lambda _v: _v.to_dict())
+ if self.summary is not None:
+ _result["summary"] = self.summary.to_dict()
+ elif self.exponential_histogram is not None:
+ _result["exponentialHistogram"] = self.exponential_histogram.to_dict()
+ elif self.histogram is not None:
+ _result["histogram"] = self.histogram.to_dict()
+ elif self.sum is not None:
+ _result["sum"] = self.sum.to_dict()
+ elif self.gauge is not None:
+ _result["gauge"] = self.gauge.to_dict()
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "Metric":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ Metric instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("name")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.str, "name")
+ _args["name"] = _value
+ if (_value := data.get("description")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.str, "description")
+ _args["description"] = _value
+ if (_value := data.get("unit")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.str, "unit")
+ _args["unit"] = _value
+ if (_value := data.get("metadata")) is not None:
+ _args["metadata"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: opentelemetry.proto_json.common.v1.common.KeyValue.from_dict(_v), "metadata")
+ if (_value := data.get("summary")) is not None:
+ _args["summary"] = Summary.from_dict(_value)
+ elif (_value := data.get("exponentialHistogram")) is not None:
+ _args["exponential_histogram"] = ExponentialHistogram.from_dict(_value)
+ elif (_value := data.get("histogram")) is not None:
+ _args["histogram"] = Histogram.from_dict(_value)
+ elif (_value := data.get("sum")) is not None:
+ _args["sum"] = Sum.from_dict(_value)
+ elif (_value := data.get("gauge")) is not None:
+ _args["gauge"] = Gauge.from_dict(_value)
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "Metric":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class Gauge:
+ """
+ Generated from protobuf message Gauge
+ """
+
+ data_points: builtins.list[NumberDataPoint] = dataclasses.field(default_factory=builtins.list)
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.data_points:
+ _result["dataPoints"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.data_points, lambda _v: _v.to_dict())
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "Gauge":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ Gauge instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("dataPoints")) is not None:
+ _args["data_points"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: NumberDataPoint.from_dict(_v), "data_points")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "Gauge":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class Sum:
+ """
+ Generated from protobuf message Sum
+ """
+
+ data_points: builtins.list[NumberDataPoint] = dataclasses.field(default_factory=builtins.list)
+ aggregation_temporality: typing.Union[AggregationTemporality, builtins.int, None] = 0
+ is_monotonic: typing.Optional[builtins.bool] = False
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.data_points:
+ _result["dataPoints"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.data_points, lambda _v: _v.to_dict())
+ if self.aggregation_temporality:
+ _result["aggregationTemporality"] = builtins.int(self.aggregation_temporality)
+ if self.is_monotonic:
+ _result["isMonotonic"] = self.is_monotonic
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "Sum":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ Sum instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("dataPoints")) is not None:
+ _args["data_points"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: NumberDataPoint.from_dict(_v), "data_points")
+ if (_value := data.get("aggregationTemporality")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.int, "aggregation_temporality")
+ _args["aggregation_temporality"] = AggregationTemporality(_value)
+ if (_value := data.get("isMonotonic")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.bool, "is_monotonic")
+ _args["is_monotonic"] = _value
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "Sum":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class Histogram:
+ """
+ Generated from protobuf message Histogram
+ """
+
+ data_points: builtins.list[HistogramDataPoint] = dataclasses.field(default_factory=builtins.list)
+ aggregation_temporality: typing.Union[AggregationTemporality, builtins.int, None] = 0
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.data_points:
+ _result["dataPoints"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.data_points, lambda _v: _v.to_dict())
+ if self.aggregation_temporality:
+ _result["aggregationTemporality"] = builtins.int(self.aggregation_temporality)
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "Histogram":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ Histogram instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("dataPoints")) is not None:
+ _args["data_points"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: HistogramDataPoint.from_dict(_v), "data_points")
+ if (_value := data.get("aggregationTemporality")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.int, "aggregation_temporality")
+ _args["aggregation_temporality"] = AggregationTemporality(_value)
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "Histogram":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class ExponentialHistogram:
+ """
+ Generated from protobuf message ExponentialHistogram
+ """
+
+ data_points: builtins.list[ExponentialHistogramDataPoint] = dataclasses.field(default_factory=builtins.list)
+ aggregation_temporality: typing.Union[AggregationTemporality, builtins.int, None] = 0
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.data_points:
+ _result["dataPoints"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.data_points, lambda _v: _v.to_dict())
+ if self.aggregation_temporality:
+ _result["aggregationTemporality"] = builtins.int(self.aggregation_temporality)
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ExponentialHistogram":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ExponentialHistogram instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("dataPoints")) is not None:
+ _args["data_points"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: ExponentialHistogramDataPoint.from_dict(_v), "data_points")
+ if (_value := data.get("aggregationTemporality")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.int, "aggregation_temporality")
+ _args["aggregation_temporality"] = AggregationTemporality(_value)
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ExponentialHistogram":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class Summary:
+ """
+ Generated from protobuf message Summary
+ """
+
+ data_points: builtins.list[SummaryDataPoint] = dataclasses.field(default_factory=builtins.list)
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.data_points:
+ _result["dataPoints"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.data_points, lambda _v: _v.to_dict())
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "Summary":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ Summary instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("dataPoints")) is not None:
+ _args["data_points"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: SummaryDataPoint.from_dict(_v), "data_points")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "Summary":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class NumberDataPoint:
+ """
+ Generated from protobuf message NumberDataPoint
+ """
+
+ attributes: builtins.list[opentelemetry.proto_json.common.v1.common.KeyValue] = dataclasses.field(default_factory=builtins.list)
+ start_time_unix_nano: typing.Optional[builtins.int] = 0
+ time_unix_nano: typing.Optional[builtins.int] = 0
+ as_double: typing.Optional[builtins.float] = None
+ as_int: typing.Optional[builtins.int] = None
+ exemplars: builtins.list[Exemplar] = dataclasses.field(default_factory=builtins.list)
+ flags: typing.Optional[builtins.int] = 0
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.attributes:
+ _result["attributes"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.attributes, lambda _v: _v.to_dict())
+ if self.start_time_unix_nano:
+ _result["startTimeUnixNano"] = opentelemetry.proto_json._otlp_json_utils.encode_int64(self.start_time_unix_nano)
+ if self.time_unix_nano:
+ _result["timeUnixNano"] = opentelemetry.proto_json._otlp_json_utils.encode_int64(self.time_unix_nano)
+ if self.exemplars:
+ _result["exemplars"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.exemplars, lambda _v: _v.to_dict())
+ if self.flags:
+ _result["flags"] = self.flags
+ if self.as_int is not None:
+ _result["asInt"] = opentelemetry.proto_json._otlp_json_utils.encode_int64(self.as_int)
+ elif self.as_double is not None:
+ _result["asDouble"] = opentelemetry.proto_json._otlp_json_utils.encode_float(self.as_double)
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "NumberDataPoint":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ NumberDataPoint instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("attributes")) is not None:
+ _args["attributes"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: opentelemetry.proto_json.common.v1.common.KeyValue.from_dict(_v), "attributes")
+ if (_value := data.get("startTimeUnixNano")) is not None:
+ _args["start_time_unix_nano"] = opentelemetry.proto_json._otlp_json_utils.decode_int64(_value, "start_time_unix_nano")
+ if (_value := data.get("timeUnixNano")) is not None:
+ _args["time_unix_nano"] = opentelemetry.proto_json._otlp_json_utils.decode_int64(_value, "time_unix_nano")
+ if (_value := data.get("exemplars")) is not None:
+ _args["exemplars"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: Exemplar.from_dict(_v), "exemplars")
+ if (_value := data.get("flags")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.int, "flags")
+ _args["flags"] = _value
+ if (_value := data.get("asInt")) is not None:
+ _args["as_int"] = opentelemetry.proto_json._otlp_json_utils.decode_int64(_value, "as_int")
+ elif (_value := data.get("asDouble")) is not None:
+ _args["as_double"] = opentelemetry.proto_json._otlp_json_utils.decode_float(_value, "as_double")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "NumberDataPoint":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class HistogramDataPoint:
+ """
+ Generated from protobuf message HistogramDataPoint
+ """
+
+ attributes: builtins.list[opentelemetry.proto_json.common.v1.common.KeyValue] = dataclasses.field(default_factory=builtins.list)
+ start_time_unix_nano: typing.Optional[builtins.int] = 0
+ time_unix_nano: typing.Optional[builtins.int] = 0
+ count: typing.Optional[builtins.int] = 0
+ sum: typing.Optional[builtins.float] = 0.0
+ bucket_counts: builtins.list[builtins.int] = dataclasses.field(default_factory=builtins.list)
+ explicit_bounds: builtins.list[builtins.float] = dataclasses.field(default_factory=builtins.list)
+ exemplars: builtins.list[Exemplar] = dataclasses.field(default_factory=builtins.list)
+ flags: typing.Optional[builtins.int] = 0
+ min: typing.Optional[builtins.float] = 0.0
+ max: typing.Optional[builtins.float] = 0.0
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.attributes:
+ _result["attributes"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.attributes, lambda _v: _v.to_dict())
+ if self.start_time_unix_nano:
+ _result["startTimeUnixNano"] = opentelemetry.proto_json._otlp_json_utils.encode_int64(self.start_time_unix_nano)
+ if self.time_unix_nano:
+ _result["timeUnixNano"] = opentelemetry.proto_json._otlp_json_utils.encode_int64(self.time_unix_nano)
+ if self.count:
+ _result["count"] = opentelemetry.proto_json._otlp_json_utils.encode_int64(self.count)
+ if self.sum:
+ _result["sum"] = opentelemetry.proto_json._otlp_json_utils.encode_float(self.sum)
+ if self.bucket_counts:
+ _result["bucketCounts"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.bucket_counts, lambda _v: opentelemetry.proto_json._otlp_json_utils.encode_int64(_v))
+ if self.explicit_bounds:
+ _result["explicitBounds"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.explicit_bounds, lambda _v: opentelemetry.proto_json._otlp_json_utils.encode_float(_v))
+ if self.exemplars:
+ _result["exemplars"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.exemplars, lambda _v: _v.to_dict())
+ if self.flags:
+ _result["flags"] = self.flags
+ if self.min:
+ _result["min"] = opentelemetry.proto_json._otlp_json_utils.encode_float(self.min)
+ if self.max:
+ _result["max"] = opentelemetry.proto_json._otlp_json_utils.encode_float(self.max)
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "HistogramDataPoint":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ HistogramDataPoint instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("attributes")) is not None:
+ _args["attributes"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: opentelemetry.proto_json.common.v1.common.KeyValue.from_dict(_v), "attributes")
+ if (_value := data.get("startTimeUnixNano")) is not None:
+ _args["start_time_unix_nano"] = opentelemetry.proto_json._otlp_json_utils.decode_int64(_value, "start_time_unix_nano")
+ if (_value := data.get("timeUnixNano")) is not None:
+ _args["time_unix_nano"] = opentelemetry.proto_json._otlp_json_utils.decode_int64(_value, "time_unix_nano")
+ if (_value := data.get("count")) is not None:
+ _args["count"] = opentelemetry.proto_json._otlp_json_utils.decode_int64(_value, "count")
+ if (_value := data.get("sum")) is not None:
+ _args["sum"] = opentelemetry.proto_json._otlp_json_utils.decode_float(_value, "sum")
+ if (_value := data.get("bucketCounts")) is not None:
+ _args["bucket_counts"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: opentelemetry.proto_json._otlp_json_utils.decode_int64(_v, "bucket_counts"), "bucket_counts")
+ if (_value := data.get("explicitBounds")) is not None:
+ _args["explicit_bounds"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: opentelemetry.proto_json._otlp_json_utils.decode_float(_v, "explicit_bounds"), "explicit_bounds")
+ if (_value := data.get("exemplars")) is not None:
+ _args["exemplars"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: Exemplar.from_dict(_v), "exemplars")
+ if (_value := data.get("flags")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.int, "flags")
+ _args["flags"] = _value
+ if (_value := data.get("min")) is not None:
+ _args["min"] = opentelemetry.proto_json._otlp_json_utils.decode_float(_value, "min")
+ if (_value := data.get("max")) is not None:
+ _args["max"] = opentelemetry.proto_json._otlp_json_utils.decode_float(_value, "max")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "HistogramDataPoint":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class ExponentialHistogramDataPoint:
+ """
+ Generated from protobuf message ExponentialHistogramDataPoint
+ """
+
+ @typing.final
+ @_dataclass
+ class Buckets:
+ """
+ Generated from protobuf message Buckets
+ """
+
+ offset: typing.Optional[builtins.int] = 0
+ bucket_counts: builtins.list[builtins.int] = dataclasses.field(default_factory=builtins.list)
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.offset:
+ _result["offset"] = self.offset
+ if self.bucket_counts:
+ _result["bucketCounts"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.bucket_counts, lambda _v: opentelemetry.proto_json._otlp_json_utils.encode_int64(_v))
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ExponentialHistogramDataPoint.Buckets":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ Buckets instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("offset")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.int, "offset")
+ _args["offset"] = _value
+ if (_value := data.get("bucketCounts")) is not None:
+ _args["bucket_counts"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: opentelemetry.proto_json._otlp_json_utils.decode_int64(_v, "bucket_counts"), "bucket_counts")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ExponentialHistogramDataPoint.Buckets":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+ attributes: builtins.list[opentelemetry.proto_json.common.v1.common.KeyValue] = dataclasses.field(default_factory=builtins.list)
+ start_time_unix_nano: typing.Optional[builtins.int] = 0
+ time_unix_nano: typing.Optional[builtins.int] = 0
+ count: typing.Optional[builtins.int] = 0
+ sum: typing.Optional[builtins.float] = 0.0
+ scale: typing.Optional[builtins.int] = 0
+ zero_count: typing.Optional[builtins.int] = 0
+ positive: typing.Optional[ExponentialHistogramDataPoint.Buckets] = None
+ negative: typing.Optional[ExponentialHistogramDataPoint.Buckets] = None
+ flags: typing.Optional[builtins.int] = 0
+ exemplars: builtins.list[Exemplar] = dataclasses.field(default_factory=builtins.list)
+ min: typing.Optional[builtins.float] = 0.0
+ max: typing.Optional[builtins.float] = 0.0
+ zero_threshold: typing.Optional[builtins.float] = 0.0
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.attributes:
+ _result["attributes"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.attributes, lambda _v: _v.to_dict())
+ if self.start_time_unix_nano:
+ _result["startTimeUnixNano"] = opentelemetry.proto_json._otlp_json_utils.encode_int64(self.start_time_unix_nano)
+ if self.time_unix_nano:
+ _result["timeUnixNano"] = opentelemetry.proto_json._otlp_json_utils.encode_int64(self.time_unix_nano)
+ if self.count:
+ _result["count"] = opentelemetry.proto_json._otlp_json_utils.encode_int64(self.count)
+ if self.sum:
+ _result["sum"] = opentelemetry.proto_json._otlp_json_utils.encode_float(self.sum)
+ if self.scale:
+ _result["scale"] = self.scale
+ if self.zero_count:
+ _result["zeroCount"] = opentelemetry.proto_json._otlp_json_utils.encode_int64(self.zero_count)
+ if self.positive:
+ _result["positive"] = self.positive.to_dict()
+ if self.negative:
+ _result["negative"] = self.negative.to_dict()
+ if self.flags:
+ _result["flags"] = self.flags
+ if self.exemplars:
+ _result["exemplars"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.exemplars, lambda _v: _v.to_dict())
+ if self.min:
+ _result["min"] = opentelemetry.proto_json._otlp_json_utils.encode_float(self.min)
+ if self.max:
+ _result["max"] = opentelemetry.proto_json._otlp_json_utils.encode_float(self.max)
+ if self.zero_threshold:
+ _result["zeroThreshold"] = opentelemetry.proto_json._otlp_json_utils.encode_float(self.zero_threshold)
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ExponentialHistogramDataPoint":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ExponentialHistogramDataPoint instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("attributes")) is not None:
+ _args["attributes"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: opentelemetry.proto_json.common.v1.common.KeyValue.from_dict(_v), "attributes")
+ if (_value := data.get("startTimeUnixNano")) is not None:
+ _args["start_time_unix_nano"] = opentelemetry.proto_json._otlp_json_utils.decode_int64(_value, "start_time_unix_nano")
+ if (_value := data.get("timeUnixNano")) is not None:
+ _args["time_unix_nano"] = opentelemetry.proto_json._otlp_json_utils.decode_int64(_value, "time_unix_nano")
+ if (_value := data.get("count")) is not None:
+ _args["count"] = opentelemetry.proto_json._otlp_json_utils.decode_int64(_value, "count")
+ if (_value := data.get("sum")) is not None:
+ _args["sum"] = opentelemetry.proto_json._otlp_json_utils.decode_float(_value, "sum")
+ if (_value := data.get("scale")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.int, "scale")
+ _args["scale"] = _value
+ if (_value := data.get("zeroCount")) is not None:
+ _args["zero_count"] = opentelemetry.proto_json._otlp_json_utils.decode_int64(_value, "zero_count")
+ if (_value := data.get("positive")) is not None:
+ _args["positive"] = ExponentialHistogramDataPoint.Buckets.from_dict(_value)
+ if (_value := data.get("negative")) is not None:
+ _args["negative"] = ExponentialHistogramDataPoint.Buckets.from_dict(_value)
+ if (_value := data.get("flags")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.int, "flags")
+ _args["flags"] = _value
+ if (_value := data.get("exemplars")) is not None:
+ _args["exemplars"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: Exemplar.from_dict(_v), "exemplars")
+ if (_value := data.get("min")) is not None:
+ _args["min"] = opentelemetry.proto_json._otlp_json_utils.decode_float(_value, "min")
+ if (_value := data.get("max")) is not None:
+ _args["max"] = opentelemetry.proto_json._otlp_json_utils.decode_float(_value, "max")
+ if (_value := data.get("zeroThreshold")) is not None:
+ _args["zero_threshold"] = opentelemetry.proto_json._otlp_json_utils.decode_float(_value, "zero_threshold")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ExponentialHistogramDataPoint":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class SummaryDataPoint:
+ """
+ Generated from protobuf message SummaryDataPoint
+ """
+
+ @typing.final
+ @_dataclass
+ class ValueAtQuantile:
+ """
+ Generated from protobuf message ValueAtQuantile
+ """
+
+ quantile: typing.Optional[builtins.float] = 0.0
+ value: typing.Optional[builtins.float] = 0.0
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.quantile:
+ _result["quantile"] = opentelemetry.proto_json._otlp_json_utils.encode_float(self.quantile)
+ if self.value:
+ _result["value"] = opentelemetry.proto_json._otlp_json_utils.encode_float(self.value)
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "SummaryDataPoint.ValueAtQuantile":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ValueAtQuantile instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("quantile")) is not None:
+ _args["quantile"] = opentelemetry.proto_json._otlp_json_utils.decode_float(_value, "quantile")
+ if (_value := data.get("value")) is not None:
+ _args["value"] = opentelemetry.proto_json._otlp_json_utils.decode_float(_value, "value")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "SummaryDataPoint.ValueAtQuantile":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+ attributes: builtins.list[opentelemetry.proto_json.common.v1.common.KeyValue] = dataclasses.field(default_factory=builtins.list)
+ start_time_unix_nano: typing.Optional[builtins.int] = 0
+ time_unix_nano: typing.Optional[builtins.int] = 0
+ count: typing.Optional[builtins.int] = 0
+ sum: typing.Optional[builtins.float] = 0.0
+ quantile_values: builtins.list[SummaryDataPoint.ValueAtQuantile] = dataclasses.field(default_factory=builtins.list)
+ flags: typing.Optional[builtins.int] = 0
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.attributes:
+ _result["attributes"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.attributes, lambda _v: _v.to_dict())
+ if self.start_time_unix_nano:
+ _result["startTimeUnixNano"] = opentelemetry.proto_json._otlp_json_utils.encode_int64(self.start_time_unix_nano)
+ if self.time_unix_nano:
+ _result["timeUnixNano"] = opentelemetry.proto_json._otlp_json_utils.encode_int64(self.time_unix_nano)
+ if self.count:
+ _result["count"] = opentelemetry.proto_json._otlp_json_utils.encode_int64(self.count)
+ if self.sum:
+ _result["sum"] = opentelemetry.proto_json._otlp_json_utils.encode_float(self.sum)
+ if self.quantile_values:
+ _result["quantileValues"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.quantile_values, lambda _v: _v.to_dict())
+ if self.flags:
+ _result["flags"] = self.flags
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "SummaryDataPoint":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ SummaryDataPoint instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("attributes")) is not None:
+ _args["attributes"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: opentelemetry.proto_json.common.v1.common.KeyValue.from_dict(_v), "attributes")
+ if (_value := data.get("startTimeUnixNano")) is not None:
+ _args["start_time_unix_nano"] = opentelemetry.proto_json._otlp_json_utils.decode_int64(_value, "start_time_unix_nano")
+ if (_value := data.get("timeUnixNano")) is not None:
+ _args["time_unix_nano"] = opentelemetry.proto_json._otlp_json_utils.decode_int64(_value, "time_unix_nano")
+ if (_value := data.get("count")) is not None:
+ _args["count"] = opentelemetry.proto_json._otlp_json_utils.decode_int64(_value, "count")
+ if (_value := data.get("sum")) is not None:
+ _args["sum"] = opentelemetry.proto_json._otlp_json_utils.decode_float(_value, "sum")
+ if (_value := data.get("quantileValues")) is not None:
+ _args["quantile_values"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: SummaryDataPoint.ValueAtQuantile.from_dict(_v), "quantile_values")
+ if (_value := data.get("flags")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.int, "flags")
+ _args["flags"] = _value
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "SummaryDataPoint":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class Exemplar:
+ """
+ Generated from protobuf message Exemplar
+ """
+
+ filtered_attributes: builtins.list[opentelemetry.proto_json.common.v1.common.KeyValue] = dataclasses.field(default_factory=builtins.list)
+ time_unix_nano: typing.Optional[builtins.int] = 0
+ as_double: typing.Optional[builtins.float] = None
+ as_int: typing.Optional[builtins.int] = None
+ span_id: typing.Optional[builtins.bytes] = b""
+ trace_id: typing.Optional[builtins.bytes] = b""
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.filtered_attributes:
+ _result["filteredAttributes"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.filtered_attributes, lambda _v: _v.to_dict())
+ if self.time_unix_nano:
+ _result["timeUnixNano"] = opentelemetry.proto_json._otlp_json_utils.encode_int64(self.time_unix_nano)
+ if self.span_id:
+ _result["spanId"] = opentelemetry.proto_json._otlp_json_utils.encode_hex(self.span_id)
+ if self.trace_id:
+ _result["traceId"] = opentelemetry.proto_json._otlp_json_utils.encode_hex(self.trace_id)
+ if self.as_int is not None:
+ _result["asInt"] = opentelemetry.proto_json._otlp_json_utils.encode_int64(self.as_int)
+ elif self.as_double is not None:
+ _result["asDouble"] = opentelemetry.proto_json._otlp_json_utils.encode_float(self.as_double)
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "Exemplar":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ Exemplar instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("filteredAttributes")) is not None:
+ _args["filtered_attributes"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: opentelemetry.proto_json.common.v1.common.KeyValue.from_dict(_v), "filtered_attributes")
+ if (_value := data.get("timeUnixNano")) is not None:
+ _args["time_unix_nano"] = opentelemetry.proto_json._otlp_json_utils.decode_int64(_value, "time_unix_nano")
+ if (_value := data.get("spanId")) is not None:
+ _args["span_id"] = opentelemetry.proto_json._otlp_json_utils.decode_hex(_value, "span_id")
+ if (_value := data.get("traceId")) is not None:
+ _args["trace_id"] = opentelemetry.proto_json._otlp_json_utils.decode_hex(_value, "trace_id")
+ if (_value := data.get("asInt")) is not None:
+ _args["as_int"] = opentelemetry.proto_json._otlp_json_utils.decode_int64(_value, "as_int")
+ elif (_value := data.get("asDouble")) is not None:
+ _args["as_double"] = opentelemetry.proto_json._otlp_json_utils.decode_float(_value, "as_double")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "Exemplar":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/profiles/__init__.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/profiles/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/profiles/v1development/__init__.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/profiles/v1development/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/profiles/v1development/profiles.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/profiles/v1development/profiles.py
new file mode 100644
index 0000000000..02d8efa1d8
--- /dev/null
+++ b/opentelemetry-proto-json/src/opentelemetry/proto_json/profiles/v1development/profiles.py
@@ -0,0 +1,1137 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# AUTO-GENERATED from "opentelemetry/proto/profiles/v1development/profiles.proto"
+# DO NOT EDIT MANUALLY
+
+from __future__ import annotations
+
+import builtins
+import dataclasses
+import functools
+import json
+import sys
+import typing
+
+if sys.version_info >= (3, 10):
+ _dataclass = functools.partial(dataclasses.dataclass, slots=True)
+else:
+ _dataclass = dataclasses.dataclass
+
+import opentelemetry.proto_json._otlp_json_utils
+import opentelemetry.proto_json.common.v1.common
+import opentelemetry.proto_json.resource.v1.resource
+
+
+@typing.final
+@_dataclass
+class ProfilesDictionary:
+ """
+ Generated from protobuf message ProfilesDictionary
+ """
+
+ mapping_table: builtins.list[Mapping] = dataclasses.field(default_factory=builtins.list)
+ location_table: builtins.list[Location] = dataclasses.field(default_factory=builtins.list)
+ function_table: builtins.list[Function] = dataclasses.field(default_factory=builtins.list)
+ link_table: builtins.list[Link] = dataclasses.field(default_factory=builtins.list)
+ string_table: builtins.list[builtins.str] = dataclasses.field(default_factory=builtins.list)
+ attribute_table: builtins.list[KeyValueAndUnit] = dataclasses.field(default_factory=builtins.list)
+ stack_table: builtins.list[Stack] = dataclasses.field(default_factory=builtins.list)
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.mapping_table:
+ _result["mappingTable"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.mapping_table, lambda _v: _v.to_dict())
+ if self.location_table:
+ _result["locationTable"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.location_table, lambda _v: _v.to_dict())
+ if self.function_table:
+ _result["functionTable"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.function_table, lambda _v: _v.to_dict())
+ if self.link_table:
+ _result["linkTable"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.link_table, lambda _v: _v.to_dict())
+ if self.string_table:
+ _result["stringTable"] = self.string_table
+ if self.attribute_table:
+ _result["attributeTable"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.attribute_table, lambda _v: _v.to_dict())
+ if self.stack_table:
+ _result["stackTable"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.stack_table, lambda _v: _v.to_dict())
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ProfilesDictionary":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ProfilesDictionary instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("mappingTable")) is not None:
+ _args["mapping_table"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: Mapping.from_dict(_v), "mapping_table")
+ if (_value := data.get("locationTable")) is not None:
+ _args["location_table"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: Location.from_dict(_v), "location_table")
+ if (_value := data.get("functionTable")) is not None:
+ _args["function_table"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: Function.from_dict(_v), "function_table")
+ if (_value := data.get("linkTable")) is not None:
+ _args["link_table"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: Link.from_dict(_v), "link_table")
+ if (_value := data.get("stringTable")) is not None:
+ _args["string_table"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: _v, "string_table")
+ if (_value := data.get("attributeTable")) is not None:
+ _args["attribute_table"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: KeyValueAndUnit.from_dict(_v), "attribute_table")
+ if (_value := data.get("stackTable")) is not None:
+ _args["stack_table"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: Stack.from_dict(_v), "stack_table")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ProfilesDictionary":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class ProfilesData:
+ """
+ Generated from protobuf message ProfilesData
+ """
+
+ resource_profiles: builtins.list[ResourceProfiles] = dataclasses.field(default_factory=builtins.list)
+ dictionary: typing.Optional[ProfilesDictionary] = None
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.resource_profiles:
+ _result["resourceProfiles"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.resource_profiles, lambda _v: _v.to_dict())
+ if self.dictionary:
+ _result["dictionary"] = self.dictionary.to_dict()
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ProfilesData":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ProfilesData instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("resourceProfiles")) is not None:
+ _args["resource_profiles"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: ResourceProfiles.from_dict(_v), "resource_profiles")
+ if (_value := data.get("dictionary")) is not None:
+ _args["dictionary"] = ProfilesDictionary.from_dict(_value)
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ProfilesData":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class ResourceProfiles:
+ """
+ Generated from protobuf message ResourceProfiles
+ """
+
+ resource: typing.Optional[opentelemetry.proto_json.resource.v1.resource.Resource] = None
+ scope_profiles: builtins.list[ScopeProfiles] = dataclasses.field(default_factory=builtins.list)
+ schema_url: typing.Optional[builtins.str] = ""
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.resource:
+ _result["resource"] = self.resource.to_dict()
+ if self.scope_profiles:
+ _result["scopeProfiles"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.scope_profiles, lambda _v: _v.to_dict())
+ if self.schema_url:
+ _result["schemaUrl"] = self.schema_url
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ResourceProfiles":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ResourceProfiles instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("resource")) is not None:
+ _args["resource"] = opentelemetry.proto_json.resource.v1.resource.Resource.from_dict(_value)
+ if (_value := data.get("scopeProfiles")) is not None:
+ _args["scope_profiles"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: ScopeProfiles.from_dict(_v), "scope_profiles")
+ if (_value := data.get("schemaUrl")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.str, "schema_url")
+ _args["schema_url"] = _value
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ResourceProfiles":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class ScopeProfiles:
+ """
+ Generated from protobuf message ScopeProfiles
+ """
+
+ scope: typing.Optional[opentelemetry.proto_json.common.v1.common.InstrumentationScope] = None
+ profiles: builtins.list[Profile] = dataclasses.field(default_factory=builtins.list)
+ schema_url: typing.Optional[builtins.str] = ""
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.scope:
+ _result["scope"] = self.scope.to_dict()
+ if self.profiles:
+ _result["profiles"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.profiles, lambda _v: _v.to_dict())
+ if self.schema_url:
+ _result["schemaUrl"] = self.schema_url
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ScopeProfiles":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ScopeProfiles instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("scope")) is not None:
+ _args["scope"] = opentelemetry.proto_json.common.v1.common.InstrumentationScope.from_dict(_value)
+ if (_value := data.get("profiles")) is not None:
+ _args["profiles"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: Profile.from_dict(_v), "profiles")
+ if (_value := data.get("schemaUrl")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.str, "schema_url")
+ _args["schema_url"] = _value
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ScopeProfiles":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class Profile:
+ """
+ Generated from protobuf message Profile
+ """
+
+ sample_type: typing.Optional[ValueType] = None
+ samples: builtins.list[Sample] = dataclasses.field(default_factory=builtins.list)
+ time_unix_nano: typing.Optional[builtins.int] = 0
+ duration_nano: typing.Optional[builtins.int] = 0
+ period_type: typing.Optional[ValueType] = None
+ period: typing.Optional[builtins.int] = 0
+ profile_id: typing.Optional[builtins.bytes] = b""
+ dropped_attributes_count: typing.Optional[builtins.int] = 0
+ original_payload_format: typing.Optional[builtins.str] = ""
+ original_payload: typing.Optional[builtins.bytes] = b""
+ attribute_indices: builtins.list[builtins.int] = dataclasses.field(default_factory=builtins.list)
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.sample_type:
+ _result["sampleType"] = self.sample_type.to_dict()
+ if self.samples:
+ _result["samples"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.samples, lambda _v: _v.to_dict())
+ if self.time_unix_nano:
+ _result["timeUnixNano"] = opentelemetry.proto_json._otlp_json_utils.encode_int64(self.time_unix_nano)
+ if self.duration_nano:
+ _result["durationNano"] = opentelemetry.proto_json._otlp_json_utils.encode_int64(self.duration_nano)
+ if self.period_type:
+ _result["periodType"] = self.period_type.to_dict()
+ if self.period:
+ _result["period"] = opentelemetry.proto_json._otlp_json_utils.encode_int64(self.period)
+ if self.profile_id:
+ _result["profileId"] = opentelemetry.proto_json._otlp_json_utils.encode_base64(self.profile_id)
+ if self.dropped_attributes_count:
+ _result["droppedAttributesCount"] = self.dropped_attributes_count
+ if self.original_payload_format:
+ _result["originalPayloadFormat"] = self.original_payload_format
+ if self.original_payload:
+ _result["originalPayload"] = opentelemetry.proto_json._otlp_json_utils.encode_base64(self.original_payload)
+ if self.attribute_indices:
+ _result["attributeIndices"] = self.attribute_indices
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "Profile":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ Profile instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("sampleType")) is not None:
+ _args["sample_type"] = ValueType.from_dict(_value)
+ if (_value := data.get("samples")) is not None:
+ _args["samples"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: Sample.from_dict(_v), "samples")
+ if (_value := data.get("timeUnixNano")) is not None:
+ _args["time_unix_nano"] = opentelemetry.proto_json._otlp_json_utils.decode_int64(_value, "time_unix_nano")
+ if (_value := data.get("durationNano")) is not None:
+ _args["duration_nano"] = opentelemetry.proto_json._otlp_json_utils.decode_int64(_value, "duration_nano")
+ if (_value := data.get("periodType")) is not None:
+ _args["period_type"] = ValueType.from_dict(_value)
+ if (_value := data.get("period")) is not None:
+ _args["period"] = opentelemetry.proto_json._otlp_json_utils.decode_int64(_value, "period")
+ if (_value := data.get("profileId")) is not None:
+ _args["profile_id"] = opentelemetry.proto_json._otlp_json_utils.decode_base64(_value, "profile_id")
+ if (_value := data.get("droppedAttributesCount")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.int, "dropped_attributes_count")
+ _args["dropped_attributes_count"] = _value
+ if (_value := data.get("originalPayloadFormat")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.str, "original_payload_format")
+ _args["original_payload_format"] = _value
+ if (_value := data.get("originalPayload")) is not None:
+ _args["original_payload"] = opentelemetry.proto_json._otlp_json_utils.decode_base64(_value, "original_payload")
+ if (_value := data.get("attributeIndices")) is not None:
+ _args["attribute_indices"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: _v, "attribute_indices")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "Profile":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class Link:
+ """
+ Generated from protobuf message Link
+ """
+
+ trace_id: typing.Optional[builtins.bytes] = b""
+ span_id: typing.Optional[builtins.bytes] = b""
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.trace_id:
+ _result["traceId"] = opentelemetry.proto_json._otlp_json_utils.encode_hex(self.trace_id)
+ if self.span_id:
+ _result["spanId"] = opentelemetry.proto_json._otlp_json_utils.encode_hex(self.span_id)
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "Link":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ Link instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("traceId")) is not None:
+ _args["trace_id"] = opentelemetry.proto_json._otlp_json_utils.decode_hex(_value, "trace_id")
+ if (_value := data.get("spanId")) is not None:
+ _args["span_id"] = opentelemetry.proto_json._otlp_json_utils.decode_hex(_value, "span_id")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "Link":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class ValueType:
+ """
+ Generated from protobuf message ValueType
+ """
+
+ type_strindex: typing.Optional[builtins.int] = 0
+ unit_strindex: typing.Optional[builtins.int] = 0
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.type_strindex:
+ _result["typeStrindex"] = self.type_strindex
+ if self.unit_strindex:
+ _result["unitStrindex"] = self.unit_strindex
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ValueType":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ValueType instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("typeStrindex")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.int, "type_strindex")
+ _args["type_strindex"] = _value
+ if (_value := data.get("unitStrindex")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.int, "unit_strindex")
+ _args["unit_strindex"] = _value
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ValueType":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class Sample:
+ """
+ Generated from protobuf message Sample
+ """
+
+ stack_index: typing.Optional[builtins.int] = 0
+ values: builtins.list[builtins.int] = dataclasses.field(default_factory=builtins.list)
+ attribute_indices: builtins.list[builtins.int] = dataclasses.field(default_factory=builtins.list)
+ link_index: typing.Optional[builtins.int] = 0
+ timestamps_unix_nano: builtins.list[builtins.int] = dataclasses.field(default_factory=builtins.list)
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.stack_index:
+ _result["stackIndex"] = self.stack_index
+ if self.values:
+ _result["values"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.values, lambda _v: opentelemetry.proto_json._otlp_json_utils.encode_int64(_v))
+ if self.attribute_indices:
+ _result["attributeIndices"] = self.attribute_indices
+ if self.link_index:
+ _result["linkIndex"] = self.link_index
+ if self.timestamps_unix_nano:
+ _result["timestampsUnixNano"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.timestamps_unix_nano, lambda _v: opentelemetry.proto_json._otlp_json_utils.encode_int64(_v))
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "Sample":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ Sample instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("stackIndex")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.int, "stack_index")
+ _args["stack_index"] = _value
+ if (_value := data.get("values")) is not None:
+ _args["values"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: opentelemetry.proto_json._otlp_json_utils.decode_int64(_v, "values"), "values")
+ if (_value := data.get("attributeIndices")) is not None:
+ _args["attribute_indices"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: _v, "attribute_indices")
+ if (_value := data.get("linkIndex")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.int, "link_index")
+ _args["link_index"] = _value
+ if (_value := data.get("timestampsUnixNano")) is not None:
+ _args["timestamps_unix_nano"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: opentelemetry.proto_json._otlp_json_utils.decode_int64(_v, "timestamps_unix_nano"), "timestamps_unix_nano")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "Sample":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class Mapping:
+ """
+ Generated from protobuf message Mapping
+ """
+
+ memory_start: typing.Optional[builtins.int] = 0
+ memory_limit: typing.Optional[builtins.int] = 0
+ file_offset: typing.Optional[builtins.int] = 0
+ filename_strindex: typing.Optional[builtins.int] = 0
+ attribute_indices: builtins.list[builtins.int] = dataclasses.field(default_factory=builtins.list)
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.memory_start:
+ _result["memoryStart"] = opentelemetry.proto_json._otlp_json_utils.encode_int64(self.memory_start)
+ if self.memory_limit:
+ _result["memoryLimit"] = opentelemetry.proto_json._otlp_json_utils.encode_int64(self.memory_limit)
+ if self.file_offset:
+ _result["fileOffset"] = opentelemetry.proto_json._otlp_json_utils.encode_int64(self.file_offset)
+ if self.filename_strindex:
+ _result["filenameStrindex"] = self.filename_strindex
+ if self.attribute_indices:
+ _result["attributeIndices"] = self.attribute_indices
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "Mapping":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ Mapping instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("memoryStart")) is not None:
+ _args["memory_start"] = opentelemetry.proto_json._otlp_json_utils.decode_int64(_value, "memory_start")
+ if (_value := data.get("memoryLimit")) is not None:
+ _args["memory_limit"] = opentelemetry.proto_json._otlp_json_utils.decode_int64(_value, "memory_limit")
+ if (_value := data.get("fileOffset")) is not None:
+ _args["file_offset"] = opentelemetry.proto_json._otlp_json_utils.decode_int64(_value, "file_offset")
+ if (_value := data.get("filenameStrindex")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.int, "filename_strindex")
+ _args["filename_strindex"] = _value
+ if (_value := data.get("attributeIndices")) is not None:
+ _args["attribute_indices"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: _v, "attribute_indices")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "Mapping":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class Stack:
+ """
+ Generated from protobuf message Stack
+ """
+
+ location_indices: builtins.list[builtins.int] = dataclasses.field(default_factory=builtins.list)
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.location_indices:
+ _result["locationIndices"] = self.location_indices
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "Stack":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ Stack instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("locationIndices")) is not None:
+ _args["location_indices"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: _v, "location_indices")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "Stack":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class Location:
+ """
+ Generated from protobuf message Location
+ """
+
+ mapping_index: typing.Optional[builtins.int] = 0
+ address: typing.Optional[builtins.int] = 0
+ lines: builtins.list[Line] = dataclasses.field(default_factory=builtins.list)
+ attribute_indices: builtins.list[builtins.int] = dataclasses.field(default_factory=builtins.list)
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.mapping_index:
+ _result["mappingIndex"] = self.mapping_index
+ if self.address:
+ _result["address"] = opentelemetry.proto_json._otlp_json_utils.encode_int64(self.address)
+ if self.lines:
+ _result["lines"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.lines, lambda _v: _v.to_dict())
+ if self.attribute_indices:
+ _result["attributeIndices"] = self.attribute_indices
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "Location":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ Location instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("mappingIndex")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.int, "mapping_index")
+ _args["mapping_index"] = _value
+ if (_value := data.get("address")) is not None:
+ _args["address"] = opentelemetry.proto_json._otlp_json_utils.decode_int64(_value, "address")
+ if (_value := data.get("lines")) is not None:
+ _args["lines"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: Line.from_dict(_v), "lines")
+ if (_value := data.get("attributeIndices")) is not None:
+ _args["attribute_indices"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: _v, "attribute_indices")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "Location":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class Line:
+ """
+ Generated from protobuf message Line
+ """
+
+ function_index: typing.Optional[builtins.int] = 0
+ line: typing.Optional[builtins.int] = 0
+ column: typing.Optional[builtins.int] = 0
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.function_index:
+ _result["functionIndex"] = self.function_index
+ if self.line:
+ _result["line"] = opentelemetry.proto_json._otlp_json_utils.encode_int64(self.line)
+ if self.column:
+ _result["column"] = opentelemetry.proto_json._otlp_json_utils.encode_int64(self.column)
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "Line":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ Line instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("functionIndex")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.int, "function_index")
+ _args["function_index"] = _value
+ if (_value := data.get("line")) is not None:
+ _args["line"] = opentelemetry.proto_json._otlp_json_utils.decode_int64(_value, "line")
+ if (_value := data.get("column")) is not None:
+ _args["column"] = opentelemetry.proto_json._otlp_json_utils.decode_int64(_value, "column")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "Line":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class Function:
+ """
+ Generated from protobuf message Function
+ """
+
+ name_strindex: typing.Optional[builtins.int] = 0
+ system_name_strindex: typing.Optional[builtins.int] = 0
+ filename_strindex: typing.Optional[builtins.int] = 0
+ start_line: typing.Optional[builtins.int] = 0
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.name_strindex:
+ _result["nameStrindex"] = self.name_strindex
+ if self.system_name_strindex:
+ _result["systemNameStrindex"] = self.system_name_strindex
+ if self.filename_strindex:
+ _result["filenameStrindex"] = self.filename_strindex
+ if self.start_line:
+ _result["startLine"] = opentelemetry.proto_json._otlp_json_utils.encode_int64(self.start_line)
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "Function":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ Function instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("nameStrindex")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.int, "name_strindex")
+ _args["name_strindex"] = _value
+ if (_value := data.get("systemNameStrindex")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.int, "system_name_strindex")
+ _args["system_name_strindex"] = _value
+ if (_value := data.get("filenameStrindex")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.int, "filename_strindex")
+ _args["filename_strindex"] = _value
+ if (_value := data.get("startLine")) is not None:
+ _args["start_line"] = opentelemetry.proto_json._otlp_json_utils.decode_int64(_value, "start_line")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "Function":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class KeyValueAndUnit:
+ """
+ Generated from protobuf message KeyValueAndUnit
+ """
+
+ key_strindex: typing.Optional[builtins.int] = 0
+ value: typing.Optional[opentelemetry.proto_json.common.v1.common.AnyValue] = None
+ unit_strindex: typing.Optional[builtins.int] = 0
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.key_strindex:
+ _result["keyStrindex"] = self.key_strindex
+ if self.value:
+ _result["value"] = self.value.to_dict()
+ if self.unit_strindex:
+ _result["unitStrindex"] = self.unit_strindex
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "KeyValueAndUnit":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ KeyValueAndUnit instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("keyStrindex")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.int, "key_strindex")
+ _args["key_strindex"] = _value
+ if (_value := data.get("value")) is not None:
+ _args["value"] = opentelemetry.proto_json.common.v1.common.AnyValue.from_dict(_value)
+ if (_value := data.get("unitStrindex")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.int, "unit_strindex")
+ _args["unit_strindex"] = _value
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "KeyValueAndUnit":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/py.typed b/opentelemetry-proto-json/src/opentelemetry/proto_json/py.typed
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/resource/__init__.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/resource/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/resource/v1/__init__.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/resource/v1/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/resource/v1/resource.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/resource/v1/resource.py
new file mode 100644
index 0000000000..bc80bf49aa
--- /dev/null
+++ b/opentelemetry-proto-json/src/opentelemetry/proto_json/resource/v1/resource.py
@@ -0,0 +1,107 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# AUTO-GENERATED from "opentelemetry/proto/resource/v1/resource.proto"
+# DO NOT EDIT MANUALLY
+
+from __future__ import annotations
+
+import builtins
+import dataclasses
+import functools
+import json
+import sys
+import typing
+
+if sys.version_info >= (3, 10):
+ _dataclass = functools.partial(dataclasses.dataclass, slots=True)
+else:
+ _dataclass = dataclasses.dataclass
+
+import opentelemetry.proto_json._otlp_json_utils
+import opentelemetry.proto_json.common.v1.common
+
+
+@typing.final
+@_dataclass
+class Resource:
+ """
+ Generated from protobuf message Resource
+ """
+
+ attributes: builtins.list[opentelemetry.proto_json.common.v1.common.KeyValue] = dataclasses.field(default_factory=builtins.list)
+ dropped_attributes_count: typing.Optional[builtins.int] = 0
+ entity_refs: builtins.list[opentelemetry.proto_json.common.v1.common.EntityRef] = dataclasses.field(default_factory=builtins.list)
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.attributes:
+ _result["attributes"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.attributes, lambda _v: _v.to_dict())
+ if self.dropped_attributes_count:
+ _result["droppedAttributesCount"] = self.dropped_attributes_count
+ if self.entity_refs:
+ _result["entityRefs"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.entity_refs, lambda _v: _v.to_dict())
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "Resource":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ Resource instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("attributes")) is not None:
+ _args["attributes"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: opentelemetry.proto_json.common.v1.common.KeyValue.from_dict(_v), "attributes")
+ if (_value := data.get("droppedAttributesCount")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.int, "dropped_attributes_count")
+ _args["dropped_attributes_count"] = _value
+ if (_value := data.get("entityRefs")) is not None:
+ _args["entity_refs"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: opentelemetry.proto_json.common.v1.common.EntityRef.from_dict(_v), "entity_refs")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "Resource":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/trace/__init__.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/trace/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/trace/v1/__init__.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/trace/v1/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/trace/v1/trace.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/trace/v1/trace.py
new file mode 100644
index 0000000000..a1fa28f18e
--- /dev/null
+++ b/opentelemetry-proto-json/src/opentelemetry/proto_json/trace/v1/trace.py
@@ -0,0 +1,664 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# AUTO-GENERATED from "opentelemetry/proto/trace/v1/trace.proto"
+# DO NOT EDIT MANUALLY
+
+from __future__ import annotations
+
+import builtins
+import dataclasses
+import enum
+import functools
+import json
+import sys
+import typing
+
+if sys.version_info >= (3, 10):
+ _dataclass = functools.partial(dataclasses.dataclass, slots=True)
+else:
+ _dataclass = dataclasses.dataclass
+
+import opentelemetry.proto_json._otlp_json_utils
+import opentelemetry.proto_json.common.v1.common
+import opentelemetry.proto_json.resource.v1.resource
+
+
+@typing.final
+class SpanFlags(enum.IntEnum):
+ """
+ Generated from protobuf enum SpanFlags
+ """
+
+ SPAN_FLAGS_DO_NOT_USE = 0
+ SPAN_FLAGS_TRACE_FLAGS_MASK = 255
+ SPAN_FLAGS_CONTEXT_HAS_IS_REMOTE_MASK = 256
+ SPAN_FLAGS_CONTEXT_IS_REMOTE_MASK = 512
+
+@typing.final
+@_dataclass
+class TracesData:
+ """
+ Generated from protobuf message TracesData
+ """
+
+ resource_spans: builtins.list[ResourceSpans] = dataclasses.field(default_factory=builtins.list)
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.resource_spans:
+ _result["resourceSpans"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.resource_spans, lambda _v: _v.to_dict())
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "TracesData":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ TracesData instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("resourceSpans")) is not None:
+ _args["resource_spans"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: ResourceSpans.from_dict(_v), "resource_spans")
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "TracesData":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class ResourceSpans:
+ """
+ Generated from protobuf message ResourceSpans
+ """
+
+ resource: typing.Optional[opentelemetry.proto_json.resource.v1.resource.Resource] = None
+ scope_spans: builtins.list[ScopeSpans] = dataclasses.field(default_factory=builtins.list)
+ schema_url: typing.Optional[builtins.str] = ""
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.resource:
+ _result["resource"] = self.resource.to_dict()
+ if self.scope_spans:
+ _result["scopeSpans"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.scope_spans, lambda _v: _v.to_dict())
+ if self.schema_url:
+ _result["schemaUrl"] = self.schema_url
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ResourceSpans":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ResourceSpans instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("resource")) is not None:
+ _args["resource"] = opentelemetry.proto_json.resource.v1.resource.Resource.from_dict(_value)
+ if (_value := data.get("scopeSpans")) is not None:
+ _args["scope_spans"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: ScopeSpans.from_dict(_v), "scope_spans")
+ if (_value := data.get("schemaUrl")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.str, "schema_url")
+ _args["schema_url"] = _value
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ResourceSpans":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class ScopeSpans:
+ """
+ Generated from protobuf message ScopeSpans
+ """
+
+ scope: typing.Optional[opentelemetry.proto_json.common.v1.common.InstrumentationScope] = None
+ spans: builtins.list[Span] = dataclasses.field(default_factory=builtins.list)
+ schema_url: typing.Optional[builtins.str] = ""
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.scope:
+ _result["scope"] = self.scope.to_dict()
+ if self.spans:
+ _result["spans"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.spans, lambda _v: _v.to_dict())
+ if self.schema_url:
+ _result["schemaUrl"] = self.schema_url
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "ScopeSpans":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ ScopeSpans instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("scope")) is not None:
+ _args["scope"] = opentelemetry.proto_json.common.v1.common.InstrumentationScope.from_dict(_value)
+ if (_value := data.get("spans")) is not None:
+ _args["spans"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: Span.from_dict(_v), "spans")
+ if (_value := data.get("schemaUrl")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.str, "schema_url")
+ _args["schema_url"] = _value
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "ScopeSpans":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class Span:
+ """
+ Generated from protobuf message Span
+ """
+
+ @typing.final
+ class SpanKind(enum.IntEnum):
+ """
+ Generated from protobuf enum SpanKind
+ """
+
+ SPAN_KIND_UNSPECIFIED = 0
+ SPAN_KIND_INTERNAL = 1
+ SPAN_KIND_SERVER = 2
+ SPAN_KIND_CLIENT = 3
+ SPAN_KIND_PRODUCER = 4
+ SPAN_KIND_CONSUMER = 5
+
+ @typing.final
+ @_dataclass
+ class Event:
+ """
+ Generated from protobuf message Event
+ """
+
+ time_unix_nano: typing.Optional[builtins.int] = 0
+ name: typing.Optional[builtins.str] = ""
+ attributes: builtins.list[opentelemetry.proto_json.common.v1.common.KeyValue] = dataclasses.field(default_factory=builtins.list)
+ dropped_attributes_count: typing.Optional[builtins.int] = 0
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.time_unix_nano:
+ _result["timeUnixNano"] = opentelemetry.proto_json._otlp_json_utils.encode_int64(self.time_unix_nano)
+ if self.name:
+ _result["name"] = self.name
+ if self.attributes:
+ _result["attributes"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.attributes, lambda _v: _v.to_dict())
+ if self.dropped_attributes_count:
+ _result["droppedAttributesCount"] = self.dropped_attributes_count
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "Span.Event":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ Event instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("timeUnixNano")) is not None:
+ _args["time_unix_nano"] = opentelemetry.proto_json._otlp_json_utils.decode_int64(_value, "time_unix_nano")
+ if (_value := data.get("name")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.str, "name")
+ _args["name"] = _value
+ if (_value := data.get("attributes")) is not None:
+ _args["attributes"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: opentelemetry.proto_json.common.v1.common.KeyValue.from_dict(_v), "attributes")
+ if (_value := data.get("droppedAttributesCount")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.int, "dropped_attributes_count")
+ _args["dropped_attributes_count"] = _value
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "Span.Event":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+ @typing.final
+ @_dataclass
+ class Link:
+ """
+ Generated from protobuf message Link
+ """
+
+ trace_id: typing.Optional[builtins.bytes] = b""
+ span_id: typing.Optional[builtins.bytes] = b""
+ trace_state: typing.Optional[builtins.str] = ""
+ attributes: builtins.list[opentelemetry.proto_json.common.v1.common.KeyValue] = dataclasses.field(default_factory=builtins.list)
+ dropped_attributes_count: typing.Optional[builtins.int] = 0
+ flags: typing.Optional[builtins.int] = 0
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.trace_id:
+ _result["traceId"] = opentelemetry.proto_json._otlp_json_utils.encode_hex(self.trace_id)
+ if self.span_id:
+ _result["spanId"] = opentelemetry.proto_json._otlp_json_utils.encode_hex(self.span_id)
+ if self.trace_state:
+ _result["traceState"] = self.trace_state
+ if self.attributes:
+ _result["attributes"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.attributes, lambda _v: _v.to_dict())
+ if self.dropped_attributes_count:
+ _result["droppedAttributesCount"] = self.dropped_attributes_count
+ if self.flags:
+ _result["flags"] = self.flags
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "Span.Link":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ Link instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("traceId")) is not None:
+ _args["trace_id"] = opentelemetry.proto_json._otlp_json_utils.decode_hex(_value, "trace_id")
+ if (_value := data.get("spanId")) is not None:
+ _args["span_id"] = opentelemetry.proto_json._otlp_json_utils.decode_hex(_value, "span_id")
+ if (_value := data.get("traceState")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.str, "trace_state")
+ _args["trace_state"] = _value
+ if (_value := data.get("attributes")) is not None:
+ _args["attributes"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: opentelemetry.proto_json.common.v1.common.KeyValue.from_dict(_v), "attributes")
+ if (_value := data.get("droppedAttributesCount")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.int, "dropped_attributes_count")
+ _args["dropped_attributes_count"] = _value
+ if (_value := data.get("flags")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.int, "flags")
+ _args["flags"] = _value
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "Span.Link":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+ trace_id: typing.Optional[builtins.bytes] = b""
+ span_id: typing.Optional[builtins.bytes] = b""
+ trace_state: typing.Optional[builtins.str] = ""
+ parent_span_id: typing.Optional[builtins.bytes] = b""
+ flags: typing.Optional[builtins.int] = 0
+ name: typing.Optional[builtins.str] = ""
+ kind: typing.Union[Span.SpanKind, builtins.int, None] = 0
+ start_time_unix_nano: typing.Optional[builtins.int] = 0
+ end_time_unix_nano: typing.Optional[builtins.int] = 0
+ attributes: builtins.list[opentelemetry.proto_json.common.v1.common.KeyValue] = dataclasses.field(default_factory=builtins.list)
+ dropped_attributes_count: typing.Optional[builtins.int] = 0
+ events: builtins.list[Span.Event] = dataclasses.field(default_factory=builtins.list)
+ dropped_events_count: typing.Optional[builtins.int] = 0
+ links: builtins.list[Span.Link] = dataclasses.field(default_factory=builtins.list)
+ dropped_links_count: typing.Optional[builtins.int] = 0
+ status: typing.Optional[Status] = None
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.trace_id:
+ _result["traceId"] = opentelemetry.proto_json._otlp_json_utils.encode_hex(self.trace_id)
+ if self.span_id:
+ _result["spanId"] = opentelemetry.proto_json._otlp_json_utils.encode_hex(self.span_id)
+ if self.trace_state:
+ _result["traceState"] = self.trace_state
+ if self.parent_span_id:
+ _result["parentSpanId"] = opentelemetry.proto_json._otlp_json_utils.encode_hex(self.parent_span_id)
+ if self.flags:
+ _result["flags"] = self.flags
+ if self.name:
+ _result["name"] = self.name
+ if self.kind:
+ _result["kind"] = builtins.int(self.kind)
+ if self.start_time_unix_nano:
+ _result["startTimeUnixNano"] = opentelemetry.proto_json._otlp_json_utils.encode_int64(self.start_time_unix_nano)
+ if self.end_time_unix_nano:
+ _result["endTimeUnixNano"] = opentelemetry.proto_json._otlp_json_utils.encode_int64(self.end_time_unix_nano)
+ if self.attributes:
+ _result["attributes"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.attributes, lambda _v: _v.to_dict())
+ if self.dropped_attributes_count:
+ _result["droppedAttributesCount"] = self.dropped_attributes_count
+ if self.events:
+ _result["events"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.events, lambda _v: _v.to_dict())
+ if self.dropped_events_count:
+ _result["droppedEventsCount"] = self.dropped_events_count
+ if self.links:
+ _result["links"] = opentelemetry.proto_json._otlp_json_utils.encode_repeated(self.links, lambda _v: _v.to_dict())
+ if self.dropped_links_count:
+ _result["droppedLinksCount"] = self.dropped_links_count
+ if self.status:
+ _result["status"] = self.status.to_dict()
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "Span":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ Span instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("traceId")) is not None:
+ _args["trace_id"] = opentelemetry.proto_json._otlp_json_utils.decode_hex(_value, "trace_id")
+ if (_value := data.get("spanId")) is not None:
+ _args["span_id"] = opentelemetry.proto_json._otlp_json_utils.decode_hex(_value, "span_id")
+ if (_value := data.get("traceState")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.str, "trace_state")
+ _args["trace_state"] = _value
+ if (_value := data.get("parentSpanId")) is not None:
+ _args["parent_span_id"] = opentelemetry.proto_json._otlp_json_utils.decode_hex(_value, "parent_span_id")
+ if (_value := data.get("flags")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.int, "flags")
+ _args["flags"] = _value
+ if (_value := data.get("name")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.str, "name")
+ _args["name"] = _value
+ if (_value := data.get("kind")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.int, "kind")
+ _args["kind"] = Span.SpanKind(_value)
+ if (_value := data.get("startTimeUnixNano")) is not None:
+ _args["start_time_unix_nano"] = opentelemetry.proto_json._otlp_json_utils.decode_int64(_value, "start_time_unix_nano")
+ if (_value := data.get("endTimeUnixNano")) is not None:
+ _args["end_time_unix_nano"] = opentelemetry.proto_json._otlp_json_utils.decode_int64(_value, "end_time_unix_nano")
+ if (_value := data.get("attributes")) is not None:
+ _args["attributes"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: opentelemetry.proto_json.common.v1.common.KeyValue.from_dict(_v), "attributes")
+ if (_value := data.get("droppedAttributesCount")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.int, "dropped_attributes_count")
+ _args["dropped_attributes_count"] = _value
+ if (_value := data.get("events")) is not None:
+ _args["events"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: Span.Event.from_dict(_v), "events")
+ if (_value := data.get("droppedEventsCount")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.int, "dropped_events_count")
+ _args["dropped_events_count"] = _value
+ if (_value := data.get("links")) is not None:
+ _args["links"] = opentelemetry.proto_json._otlp_json_utils.decode_repeated(_value, lambda _v: Span.Link.from_dict(_v), "links")
+ if (_value := data.get("droppedLinksCount")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.int, "dropped_links_count")
+ _args["dropped_links_count"] = _value
+ if (_value := data.get("status")) is not None:
+ _args["status"] = Status.from_dict(_value)
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "Span":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
+
+
+@typing.final
+@_dataclass
+class Status:
+ """
+ Generated from protobuf message Status
+ """
+
+ @typing.final
+ class StatusCode(enum.IntEnum):
+ """
+ Generated from protobuf enum StatusCode
+ """
+
+ STATUS_CODE_UNSET = 0
+ STATUS_CODE_OK = 1
+ STATUS_CODE_ERROR = 2
+
+ message: typing.Optional[builtins.str] = ""
+ code: typing.Union[Status.StatusCode, builtins.int, None] = 0
+
+ def to_dict(self) -> builtins.dict[builtins.str, typing.Any]:
+ """
+ Convert this message to a dictionary with lowerCamelCase keys.
+
+ Returns:
+ Dictionary representation following OTLP JSON encoding
+ """
+ _result = {}
+ if self.message:
+ _result["message"] = self.message
+ if self.code:
+ _result["code"] = builtins.int(self.code)
+ return _result
+
+ def to_json(self) -> builtins.str:
+ """
+ Serialize this message to a JSON string.
+
+ Returns:
+ JSON string
+ """
+ return json.dumps(self.to_dict())
+
+ @builtins.classmethod
+ def from_dict(cls, data: builtins.dict[builtins.str, typing.Any]) -> "Status":
+ """
+ Create from a dictionary with lowerCamelCase keys.
+
+ Args:
+ data: Dictionary representation following OTLP JSON encoding
+
+ Returns:
+ Status instance
+ """
+ opentelemetry.proto_json._otlp_json_utils.validate_type(data, builtins.dict, "data")
+ _args = {}
+
+ if (_value := data.get("message")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.str, "message")
+ _args["message"] = _value
+ if (_value := data.get("code")) is not None:
+ opentelemetry.proto_json._otlp_json_utils.validate_type(_value, builtins.int, "code")
+ _args["code"] = Status.StatusCode(_value)
+
+ return cls(**_args)
+
+ @builtins.classmethod
+ def from_json(cls, data: typing.Union[builtins.str, builtins.bytes]) -> "Status":
+ """
+ Deserialize from a JSON string or bytes.
+
+ Args:
+ data: JSON string or bytes
+
+ Returns:
+ Instance of the class
+ """
+ return cls.from_dict(json.loads(data))
diff --git a/opentelemetry-proto-json/src/opentelemetry/proto_json/version/__init__.py b/opentelemetry-proto-json/src/opentelemetry/proto_json/version/__init__.py
new file mode 100644
index 0000000000..c099e9440e
--- /dev/null
+++ b/opentelemetry-proto-json/src/opentelemetry/proto_json/version/__init__.py
@@ -0,0 +1,15 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+__version__ = "0.61b0.dev"
diff --git a/opentelemetry-proto-json/test-requirements.in b/opentelemetry-proto-json/test-requirements.in
new file mode 100644
index 0000000000..60cf5e47db
--- /dev/null
+++ b/opentelemetry-proto-json/test-requirements.in
@@ -0,0 +1,6 @@
+colorama>=0.4.6
+iniconfig>=2.0.0
+packaging>=24.0
+pytest>=7.4.4
+pluggy>=1.0.0
+-e opentelemetry-proto-json
diff --git a/opentelemetry-proto-json/test-requirements.latest.txt b/opentelemetry-proto-json/test-requirements.latest.txt
new file mode 100644
index 0000000000..403b955c79
--- /dev/null
+++ b/opentelemetry-proto-json/test-requirements.latest.txt
@@ -0,0 +1,28 @@
+# This file was autogenerated by uv via the following command:
+# uv pip compile --python 3.9 --universal -c dev-requirements.txt opentelemetry-proto-json/test-requirements.in -o opentelemetry-proto-json/test-requirements.latest.txt
+-e opentelemetry-proto-json
+ # via -r opentelemetry-proto-json/test-requirements.in
+colorama==0.4.6
+ # via
+ # -r opentelemetry-proto-json/test-requirements.in
+ # pytest
+exceptiongroup==1.3.1 ; python_full_version < '3.11'
+ # via pytest
+iniconfig==2.1.0
+ # via
+ # -r opentelemetry-proto-json/test-requirements.in
+ # pytest
+packaging==26.0
+ # via
+ # -r opentelemetry-proto-json/test-requirements.in
+ # pytest
+pluggy==1.6.0
+ # via pytest
+pytest==7.4.4
+ # via
+ # -c dev-requirements.txt
+ # -r opentelemetry-proto-json/test-requirements.in
+tomli==2.4.0 ; python_full_version < '3.11'
+ # via pytest
+typing-extensions==4.15.0 ; python_full_version < '3.11'
+ # via exceptiongroup
diff --git a/opentelemetry-proto-json/test-requirements.oldest.txt b/opentelemetry-proto-json/test-requirements.oldest.txt
new file mode 100644
index 0000000000..730c9c0370
--- /dev/null
+++ b/opentelemetry-proto-json/test-requirements.oldest.txt
@@ -0,0 +1,28 @@
+# This file was autogenerated by uv via the following command:
+# uv pip compile --python 3.9 --universal --resolution lowest -c dev-requirements.txt opentelemetry-proto-json/test-requirements.in -o opentelemetry-proto-json/test-requirements.oldest.txt
+-e opentelemetry-proto-json
+ # via -r opentelemetry-proto-json/test-requirements.in
+colorama==0.4.6
+ # via
+ # -r opentelemetry-proto-json/test-requirements.in
+ # pytest
+exceptiongroup==1.0.0 ; python_full_version < '3.11'
+ # via pytest
+iniconfig==2.0.0
+ # via
+ # -r opentelemetry-proto-json/test-requirements.in
+ # pytest
+packaging==24.0
+ # via
+ # -r opentelemetry-proto-json/test-requirements.in
+ # pytest
+pluggy==1.0.0
+ # via
+ # -r opentelemetry-proto-json/test-requirements.in
+ # pytest
+pytest==7.4.4
+ # via
+ # -c dev-requirements.txt
+ # -r opentelemetry-proto-json/test-requirements.in
+tomli==1.0.0 ; python_full_version < '3.11'
+ # via pytest
diff --git a/opentelemetry-proto-json/tests/__init__.py b/opentelemetry-proto-json/tests/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/opentelemetry-proto-json/tests/test_proto.py b/opentelemetry-proto-json/tests/test_proto.py
new file mode 100644
index 0000000000..f99d4abfb0
--- /dev/null
+++ b/opentelemetry-proto-json/tests/test_proto.py
@@ -0,0 +1,23 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# type: ignore
+
+from importlib.util import find_spec
+from unittest import TestCase
+
+
+class TestInstrumentor(TestCase):
+ def test_proto(self):
+ if find_spec("opentelemetry.proto_json") is None:
+ self.fail("opentelemetry-proto-json not installed")
diff --git a/pyproject.toml b/pyproject.toml
index b6970c666d..d464a47a96 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -7,6 +7,7 @@ dependencies = [
"opentelemetry-sdk",
"opentelemetry-semantic-conventions",
"opentelemetry-proto",
+ "opentelemetry-proto-json",
"opentelemetry-test-utils",
"opentelemetry-exporter-otlp-proto-grpc",
"opentelemetry-exporter-otlp-proto-http",
@@ -15,6 +16,7 @@ dependencies = [
"opentelemetry-exporter-prometheus",
"opentelemetry-propagator-jaeger",
"opentelemetry-propagator-b3",
+ "opentelemetry-codegen-json",
]
# https://docs.astral.sh/uv/reference/settings/
@@ -26,6 +28,7 @@ required-version = ">=0.6.0"
opentelemetry-api = { workspace = true}
opentelemetry-sdk = { workspace = true }
opentelemetry-proto = { workspace = true }
+opentelemetry-proto-json = { workspace = true }
opentelemetry-semantic-conventions = { workspace = true }
opentelemetry-test-utils = { workspace = true }
opentelemetry-exporter-otlp-proto-grpc = { workspace = true }
@@ -35,6 +38,7 @@ opentelemetry-exporter-zipkin-json = { workspace = true }
opentelemetry-exporter-prometheus = {workspace = true }
opentelemetry-propagator-jaeger = { workspace = true }
opentelemetry-propagator-b3 = { workspace = true }
+opentelemetry-codegen-json = { workspace = true }
[tool.uv.workspace]
members = [
@@ -42,8 +46,10 @@ members = [
"opentelemetry-sdk",
"opentelemetry-semantic-conventions",
"opentelemetry-proto",
+ "opentelemetry-proto-json",
"exporter/*",
"propagator/*",
+ "codegen/*",
"tests/opentelemetry-test-utils",
]
@@ -63,6 +69,7 @@ target-version = "py38"
line-length = 79
extend-exclude = [
"*_pb2*.py*",
+ "opentelemetry-proto-json/src/*",
]
output-format = "concise"
diff --git a/scripts/proto_codegen_json.sh b/scripts/proto_codegen_json.sh
new file mode 100755
index 0000000000..b37806809a
--- /dev/null
+++ b/scripts/proto_codegen_json.sh
@@ -0,0 +1,58 @@
+#!/bin/bash
+#
+# Regenerate python code from OTLP protos in
+# https://github.com/open-telemetry/opentelemetry-proto
+#
+# To use, update PROTO_REPO_BRANCH_OR_COMMIT variable below to a commit hash or
+# tag in opentelemtry-proto repo that you want to build off of. Then, just run
+# this script to update the proto files. Commit the changes as well as any
+# fixes needed in the OTLP exporter.
+#
+# Optional envars:
+# PROTO_REPO_DIR - the path to an existing checkout of the opentelemetry-proto repo
+
+# Pinned commit/branch/tag for the current version used in opentelemetry-proto python package.
+PROTO_REPO_BRANCH_OR_COMMIT="v1.9.0"
+
+set -e
+
+PROTO_REPO_DIR=${PROTO_REPO_DIR:-"/tmp/opentelemetry-proto"}
+# root of opentelemetry-python repo
+repo_root="$(git rev-parse --show-toplevel)"
+
+protoc() {
+ uvx -c $repo_root/gen-requirements.txt \
+ --python 3.12 \
+ --from grpcio-tools \
+ python -m grpc_tools.protoc "$@"
+}
+
+protoc --version
+
+# Clone the proto repo if it doesn't exist
+if [ ! -d "$PROTO_REPO_DIR" ]; then
+ git clone https://github.com/open-telemetry/opentelemetry-proto.git $PROTO_REPO_DIR
+fi
+
+# Pull in changes and switch to requested branch
+(
+ cd $PROTO_REPO_DIR
+ git fetch --all
+ git checkout $PROTO_REPO_BRANCH_OR_COMMIT
+ # pull if PROTO_REPO_BRANCH_OR_COMMIT is not a detached head
+ git symbolic-ref -q HEAD && git pull --ff-only || true
+)
+
+cd $repo_root/opentelemetry-proto-json/src
+
+# clean up old generated code
+find opentelemetry/proto_json/ -name "*.py" -delete
+
+# generate proto code for all protos
+all_protos=$(find $PROTO_REPO_DIR/ -iname "*.proto")
+protoc \
+ -I $PROTO_REPO_DIR \
+ --otlp_json_out=. \
+ $all_protos
+
+echo "Please update ./opentelemetry-proto-json/README.rst to include the updated version."
diff --git a/tox.ini b/tox.ini
index de125edce7..3db9bc6704 100644
--- a/tox.ini
+++ b/tox.ini
@@ -16,6 +16,9 @@ envlist =
pypy3-test-opentelemetry-proto-gen-{oldest,latest}
lint-opentelemetry-proto-gen-latest
+ py3{9,10,11,12,13,14,14t}-test-opentelemetry-protojson-gen-{oldest,latest}
+ pypy3-test-opentelemetry-protojson-gen-{oldest,latest}
+
py3{9,10,11,12,13,14,14t}-test-opentelemetry-sdk
pypy3-test-opentelemetry-sdk
lint-opentelemetry-sdk
@@ -116,6 +119,9 @@ deps =
opentelemetry-proto-gen-oldest: -r {toxinidir}/opentelemetry-proto/test-requirements.oldest.txt
opentelemetry-proto-gen-latest: -r {toxinidir}/opentelemetry-proto/test-requirements.latest.txt
+ opentelemetry-protojson-gen-oldest: -r {toxinidir}/opentelemetry-proto-json/test-requirements.oldest.txt
+ opentelemetry-protojson-gen-latest: -r {toxinidir}/opentelemetry-proto-json/test-requirements.latest.txt
+
exporter-opencensus: -r {toxinidir}/exporter/opentelemetry-exporter-opencensus/test-requirements.txt
exporter-otlp-proto-common: -r {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-common/test-requirements.txt
@@ -177,6 +183,9 @@ commands =
test-opentelemetry-proto-gen: pytest {toxinidir}/opentelemetry-proto/tests {posargs}
lint-opentelemetry-proto-gen: pylint {toxinidir}/opentelemetry-proto
+ test-opentelemetry-protojson-gen: pytest {toxinidir}/opentelemetry-proto-json/tests {posargs}
+ lint-opentelemetry-protojson-gen: pylint {toxinidir}/opentelemetry-proto-json
+
test-opentelemetry-semantic-conventions: pytest {toxinidir}/opentelemetry-semantic-conventions/tests {posargs}
lint-opentelemetry-semantic-conventions: pylint --rcfile {toxinidir}/opentelemetry-semantic-conventions/.pylintrc {toxinidir}/opentelemetry-semantic-conventions
@@ -351,6 +360,7 @@ deps =
-e {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-grpc
-e {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-http
-e {toxinidir}/opentelemetry-proto
+ -e {toxinidir}/opentelemetry-proto-json
commands =
pyright --version
diff --git a/uv.lock b/uv.lock
index 30f2debcda..97c013681a 100644
--- a/uv.lock
+++ b/uv.lock
@@ -11,6 +11,7 @@ resolution-markers = [
[manifest]
members = [
"opentelemetry-api",
+ "opentelemetry-codegen-json",
"opentelemetry-exporter-otlp",
"opentelemetry-exporter-otlp-proto-common",
"opentelemetry-exporter-otlp-proto-grpc",
@@ -20,6 +21,7 @@ members = [
"opentelemetry-propagator-b3",
"opentelemetry-propagator-jaeger",
"opentelemetry-proto",
+ "opentelemetry-proto-json",
"opentelemetry-python",
"opentelemetry-sdk",
"opentelemetry-semantic-conventions",
@@ -390,6 +392,20 @@ requires-dist = [
{ name = "typing-extensions", specifier = ">=4.5.0" },
]
+[[package]]
+name = "opentelemetry-codegen-json"
+source = { editable = "codegen/opentelemetry-codegen-json" }
+dependencies = [
+ { name = "protobuf" },
+ { name = "types-protobuf" },
+]
+
+[package.metadata]
+requires-dist = [
+ { name = "protobuf", specifier = ">=4.25.3" },
+ { name = "types-protobuf", specifier = ">=4.24" },
+]
+
[[package]]
name = "opentelemetry-exporter-credential-provider-gcp"
version = "0.60b0"
@@ -558,12 +574,17 @@ dependencies = [
[package.metadata]
requires-dist = [{ name = "protobuf", specifier = ">=5.0,<7.0" }]
+[[package]]
+name = "opentelemetry-proto-json"
+source = { editable = "opentelemetry-proto-json" }
+
[[package]]
name = "opentelemetry-python"
version = "0.0.0"
source = { virtual = "." }
dependencies = [
{ name = "opentelemetry-api" },
+ { name = "opentelemetry-codegen-json" },
{ name = "opentelemetry-exporter-otlp-proto-common" },
{ name = "opentelemetry-exporter-otlp-proto-grpc" },
{ name = "opentelemetry-exporter-otlp-proto-http" },
@@ -572,6 +593,7 @@ dependencies = [
{ name = "opentelemetry-propagator-b3" },
{ name = "opentelemetry-propagator-jaeger" },
{ name = "opentelemetry-proto" },
+ { name = "opentelemetry-proto-json" },
{ name = "opentelemetry-sdk" },
{ name = "opentelemetry-semantic-conventions" },
{ name = "opentelemetry-test-utils" },
@@ -590,6 +612,7 @@ dev = [
[package.metadata]
requires-dist = [
{ name = "opentelemetry-api", editable = "opentelemetry-api" },
+ { name = "opentelemetry-codegen-json", editable = "codegen/opentelemetry-codegen-json" },
{ name = "opentelemetry-exporter-otlp-proto-common", editable = "exporter/opentelemetry-exporter-otlp-proto-common" },
{ name = "opentelemetry-exporter-otlp-proto-grpc", editable = "exporter/opentelemetry-exporter-otlp-proto-grpc" },
{ name = "opentelemetry-exporter-otlp-proto-http", editable = "exporter/opentelemetry-exporter-otlp-proto-http" },
@@ -598,6 +621,7 @@ requires-dist = [
{ name = "opentelemetry-propagator-b3", editable = "propagator/opentelemetry-propagator-b3" },
{ name = "opentelemetry-propagator-jaeger", editable = "propagator/opentelemetry-propagator-jaeger" },
{ name = "opentelemetry-proto", editable = "opentelemetry-proto" },
+ { name = "opentelemetry-proto-json", editable = "opentelemetry-proto-json" },
{ name = "opentelemetry-sdk", editable = "opentelemetry-sdk" },
{ name = "opentelemetry-semantic-conventions", editable = "opentelemetry-semantic-conventions" },
{ name = "opentelemetry-test-utils", editable = "tests/opentelemetry-test-utils" },
@@ -1061,6 +1085,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/5c/17/221d62937c4130b044bb437caac4181e7e13d5536bbede65264db1f0ac9f/tox_uv-1.29.0-py3-none-any.whl", hash = "sha256:b1d251286edeeb4bc4af1e24c8acfdd9404700143c2199ccdbb4ea195f7de6cc", size = 17254, upload-time = "2025-10-09T20:40:25.885Z" },
]
+[[package]]
+name = "types-protobuf"
+version = "6.32.1.20251210"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/c2/59/c743a842911887cd96d56aa8936522b0cd5f7a7f228c96e81b59fced45be/types_protobuf-6.32.1.20251210.tar.gz", hash = "sha256:c698bb3f020274b1a2798ae09dc773728ce3f75209a35187bd11916ebfde6763", size = 63900, upload-time = "2025-12-10T03:14:25.451Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/aa/43/58e75bac4219cbafee83179505ff44cae3153ec279be0e30583a73b8f108/types_protobuf-6.32.1.20251210-py3-none-any.whl", hash = "sha256:2641f78f3696822a048cfb8d0ff42ccd85c25f12f871fbebe86da63793692140", size = 77921, upload-time = "2025-12-10T03:14:24.477Z" },
+]
+
[[package]]
name = "typing-extensions"
version = "4.15.0"