Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/opentelemetry/sdk/metrics/_internal/point.py: 88%
75 statements
« prev ^ index » next coverage.py v7.2.7, created at 2023-06-07 06:35 +0000
« prev ^ index » next coverage.py v7.2.7, created at 2023-06-07 06:35 +0000
1# Copyright The OpenTelemetry Authors
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
15# pylint: disable=unused-import
17from dataclasses import asdict, dataclass
18from json import dumps, loads
19from typing import Optional, Sequence, Union
21# This kind of import is needed to avoid Sphinx errors.
22import opentelemetry.sdk.metrics._internal
23from opentelemetry.sdk.resources import Resource
24from opentelemetry.sdk.util.instrumentation import InstrumentationScope
25from opentelemetry.util.types import Attributes
28@dataclass(frozen=True)
29class NumberDataPoint:
30 """Single data point in a timeseries that describes the time-varying scalar
31 value of a metric.
32 """
34 attributes: Attributes
35 start_time_unix_nano: int
36 time_unix_nano: int
37 value: Union[int, float]
39 def to_json(self, indent=4) -> str:
40 return dumps(asdict(self), indent=indent)
43@dataclass(frozen=True)
44class HistogramDataPoint:
45 """Single data point in a timeseries that describes the time-varying scalar
46 value of a metric.
47 """
49 attributes: Attributes
50 start_time_unix_nano: int
51 time_unix_nano: int
52 count: int
53 sum: Union[int, float]
54 bucket_counts: Sequence[int]
55 explicit_bounds: Sequence[float]
56 min: float
57 max: float
59 def to_json(self, indent=4) -> str:
60 return dumps(asdict(self), indent=indent)
63@dataclass(frozen=True)
64class Sum:
65 """Represents the type of a scalar metric that is calculated as a sum of
66 all reported measurements over a time interval."""
68 data_points: Sequence[NumberDataPoint]
69 aggregation_temporality: (
70 "opentelemetry.sdk.metrics.export.AggregationTemporality"
71 )
72 is_monotonic: bool
74 def to_json(self, indent=4) -> str:
75 return dumps(
76 {
77 "data_points": [
78 loads(data_point.to_json(indent=indent))
79 for data_point in self.data_points
80 ],
81 "aggregation_temporality": self.aggregation_temporality,
82 "is_monotonic": self.is_monotonic,
83 },
84 indent=indent,
85 )
88@dataclass(frozen=True)
89class Gauge:
90 """Represents the type of a scalar metric that always exports the current
91 value for every data point. It should be used for an unknown
92 aggregation."""
94 data_points: Sequence[NumberDataPoint]
96 def to_json(self, indent=4) -> str:
97 return dumps(
98 {
99 "data_points": [
100 loads(data_point.to_json(indent=indent))
101 for data_point in self.data_points
102 ],
103 },
104 indent=indent,
105 )
108@dataclass(frozen=True)
109class Histogram:
110 """Represents the type of a metric that is calculated by aggregating as a
111 histogram of all reported measurements over a time interval."""
113 data_points: Sequence[HistogramDataPoint]
114 aggregation_temporality: (
115 "opentelemetry.sdk.metrics.export.AggregationTemporality"
116 )
118 def to_json(self, indent=4) -> str:
119 return dumps(
120 {
121 "data_points": [
122 loads(data_point.to_json(indent=indent))
123 for data_point in self.data_points
124 ],
125 "aggregation_temporality": self.aggregation_temporality,
126 },
127 indent=indent,
128 )
131DataT = Union[Sum, Gauge, Histogram]
132DataPointT = Union[NumberDataPoint, HistogramDataPoint]
135@dataclass(frozen=True)
136class Metric:
137 """Represents a metric point in the OpenTelemetry data model to be
138 exported."""
140 name: str
141 description: Optional[str]
142 unit: Optional[str]
143 data: DataT
145 def to_json(self, indent=4) -> str:
146 return dumps(
147 {
148 "name": self.name,
149 "description": self.description or "",
150 "unit": self.unit or "",
151 "data": loads(self.data.to_json(indent=indent)),
152 },
153 indent=indent,
154 )
157@dataclass(frozen=True)
158class ScopeMetrics:
159 """A collection of Metrics produced by a scope"""
161 scope: InstrumentationScope
162 metrics: Sequence[Metric]
163 schema_url: str
165 def to_json(self, indent=4) -> str:
166 return dumps(
167 {
168 "scope": loads(self.scope.to_json(indent=indent)),
169 "metrics": [
170 loads(metric.to_json(indent=indent))
171 for metric in self.metrics
172 ],
173 "schema_url": self.schema_url,
174 },
175 indent=indent,
176 )
179@dataclass(frozen=True)
180class ResourceMetrics:
181 """A collection of ScopeMetrics from a Resource"""
183 resource: Resource
184 scope_metrics: Sequence[ScopeMetrics]
185 schema_url: str
187 def to_json(self, indent=4) -> str:
188 return dumps(
189 {
190 "resource": loads(self.resource.to_json(indent=indent)),
191 "scope_metrics": [
192 loads(scope_metrics.to_json(indent=indent))
193 for scope_metrics in self.scope_metrics
194 ],
195 "schema_url": self.schema_url,
196 },
197 indent=indent,
198 )
201@dataclass(frozen=True)
202class MetricsData:
203 """An array of ResourceMetrics"""
205 resource_metrics: Sequence[ResourceMetrics]
207 def to_json(self, indent=4) -> str:
208 return dumps(
209 {
210 "resource_metrics": [
211 loads(resource_metrics.to_json(indent=indent))
212 for resource_metrics in self.resource_metrics
213 ]
214 }
215 )