1# Licensed to the Apache Software Foundation (ASF) under one
2# or more contributor license agreements. See the NOTICE file
3# distributed with this work for additional information
4# regarding copyright ownership. The ASF licenses this file
5# to you under the Apache License, Version 2.0 (the
6# "License"); you may not use this file except in compliance
7# with the License. You may obtain a copy of the License at
8#
9# http://www.apache.org/licenses/LICENSE-2.0
10#
11# Unless required by applicable law or agreed to in writing,
12# software distributed under the License is distributed on an
13# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14# KIND, either express or implied. See the License for the
15# specific language governing permissions and limitations
16# under the License.
17from __future__ import annotations
18
19import dataclasses
20import enum
21import functools
22import logging
23import re
24import sys
25from fnmatch import fnmatch
26from importlib import import_module
27from re import Pattern
28from typing import TYPE_CHECKING, Any, TypeVar, cast
29
30import attr
31
32from airflow.sdk._shared.module_loading import import_string, iter_namespace, qualname
33from airflow.sdk.configuration import conf
34from airflow.sdk.observability.stats import Stats
35from airflow.sdk.serde.typing import is_pydantic_model
36
37if TYPE_CHECKING:
38 from types import ModuleType
39
40log = logging.getLogger(__name__)
41
42MAX_RECURSION_DEPTH = sys.getrecursionlimit() - 1
43
44CLASSNAME = "__classname__"
45VERSION = "__version__"
46DATA = "__data__"
47SCHEMA_ID = "__id__"
48CACHE = "__cache__"
49
50OLD_TYPE = "__type"
51OLD_SOURCE = "__source"
52OLD_DATA = "__var"
53OLD_DICT = "dict"
54PYDANTIC_MODEL_QUALNAME = "pydantic.main.BaseModel"
55
56DEFAULT_VERSION = 0
57
58T = TypeVar("T", bool, float, int, dict, list, str, tuple, set)
59U = bool | float | int | dict | list | str | tuple | set
60S = list | tuple | set
61
62_serializers: dict[str, ModuleType] = {}
63_deserializers: dict[str, ModuleType] = {}
64_stringifiers: dict[str, ModuleType] = {}
65_extra_allowed: set[str] = set()
66
67_primitives = (int, bool, float, str)
68_builtin_collections = (frozenset, list, set, tuple) # dict is treated specially.
69
70
71def encode(cls: str, version: int, data: T) -> dict[str, str | int | T]:
72 """Encode an object so it can be understood by the deserializer."""
73 return {CLASSNAME: cls, VERSION: version, DATA: data}
74
75
76def decode(d: dict[str, Any]) -> tuple[str, int, Any]:
77 classname = d[CLASSNAME]
78 version = d[VERSION]
79
80 if not isinstance(classname, str) or not isinstance(version, int):
81 raise ValueError(f"cannot decode {d!r}")
82
83 data = d.get(DATA)
84
85 return classname, version, data
86
87
88def serialize(o: object, depth: int = 0) -> U | None:
89 """
90 Serialize an object into a representation consisting only built-in types.
91
92 Primitives (int, float, bool, str) are returned as-is. Built-in collections
93 are iterated over, where it is assumed that keys in a dict can be represented
94 as str.
95
96 Values that are not of a built-in type are serialized if a serializer is
97 found for them. The order in which serializers are used is
98
99 1. A ``serialize`` function provided by the object.
100 2. A registered serializer in the namespace of ``airflow.sdk.serde.serializers``
101 3. Annotations from attr or dataclass.
102
103 Limitations: attr and dataclass objects can lose type information for nested objects
104 as they do not store this when calling ``asdict``. This means that at deserialization values
105 will be deserialized as a dict as opposed to reinstating the object. Provide
106 your own serializer to work around this.
107
108 :param o: The object to serialize.
109 :param depth: Private tracker for nested serialization.
110 :raise TypeError: A serializer cannot be found.
111 :raise RecursionError: The object is too nested for the function to handle.
112 :return: A representation of ``o`` that consists of only built-in types.
113 """
114 if depth == MAX_RECURSION_DEPTH:
115 raise RecursionError("maximum recursion depth reached for serialization")
116
117 # None remains None
118 if o is None:
119 return o
120
121 if isinstance(o, list):
122 return [serialize(d, depth + 1) for d in o]
123
124 if isinstance(o, dict):
125 if CLASSNAME in o or SCHEMA_ID in o:
126 raise AttributeError(f"reserved key {CLASSNAME} or {SCHEMA_ID} found in dict to serialize")
127
128 return {str(k): serialize(v, depth + 1) for k, v in o.items()}
129
130 cls = type(o)
131 qn = qualname(o)
132 classname = None
133
134 # Serialize namedtuple like tuples
135 # We also override the classname returned by the builtin.py serializer. The classname
136 # has to be "builtins.tuple", so that the deserializer can deserialize the object into tuple.
137 if _is_namedtuple(o):
138 qn = "builtins.tuple"
139 classname = qn
140
141 if is_pydantic_model(o):
142 # to match the generic Pydantic serializer and deserializer in _serializers and _deserializers
143 qn = PYDANTIC_MODEL_QUALNAME
144 # the actual Pydantic model class to encode
145 classname = qualname(o)
146
147 # if there is a builtin serializer available use that
148 if qn in _serializers:
149 data, serialized_classname, version, is_serialized = _serializers[qn].serialize(o)
150 if is_serialized:
151 return encode(classname or serialized_classname, version, serialize(data, depth + 1))
152
153 # primitive types are returned as is
154 if isinstance(o, _primitives):
155 if isinstance(o, enum.Enum):
156 return o.value
157
158 return o
159
160 # custom serializers
161 dct = {
162 CLASSNAME: qn,
163 VERSION: getattr(cls, "__version__", DEFAULT_VERSION),
164 }
165
166 # object / class brings their own
167 if hasattr(o, "serialize"):
168 data = getattr(o, "serialize")()
169
170 # if we end up with a structure, ensure its values are serialized
171 if isinstance(data, dict):
172 data = serialize(data, depth + 1)
173
174 dct[DATA] = data
175 return dct
176
177 # dataclasses
178 if dataclasses.is_dataclass(cls):
179 # fixme: unfortunately using asdict with nested dataclasses it looses information
180 data = dataclasses.asdict(o) # type: ignore[call-overload]
181 dct[DATA] = serialize(data, depth + 1)
182 return dct
183
184 # attr annotated
185 if attr.has(cls):
186 # Only include attributes which we can pass back to the classes constructor
187 data = attr.asdict(cast("attr.AttrsInstance", o), recurse=False, filter=lambda a, v: a.init)
188 dct[DATA] = serialize(data, depth + 1)
189 return dct
190
191 raise TypeError(f"cannot serialize object of type {cls}")
192
193
194def deserialize(o: T | None, full=True, type_hint: Any = None) -> object:
195 """
196 Deserialize an object of primitive type and uses an allow list to determine if a class can be loaded.
197
198 :param o: primitive to deserialize into an arbitrary object.
199 :param full: if False it will return a stringified representation
200 of an object and will not load any classes
201 :param type_hint: if set it will be used to help determine what
202 object to deserialize in. It does not override if another
203 specification is found
204 :return: object
205 """
206 if o is None:
207 return o
208
209 if isinstance(o, _primitives):
210 return o
211
212 # tuples, sets are included here for backwards compatibility
213 if isinstance(o, _builtin_collections):
214 col = [deserialize(d) for d in o]
215 if isinstance(o, tuple):
216 return tuple(col)
217
218 if isinstance(o, set):
219 return set(col)
220
221 return col
222
223 if not isinstance(o, dict):
224 # if o is not a dict, then it's already deserialized
225 # in this case we should return it as is
226 return o
227
228 o = _convert(o)
229
230 # plain dict and no type hint
231 if CLASSNAME not in o and not type_hint or VERSION not in o:
232 return {str(k): deserialize(v, full) for k, v in o.items()}
233
234 # custom deserialization starts here
235 cls: Any
236 version = 0
237 value: Any = None
238 classname = ""
239
240 if type_hint:
241 cls = type_hint
242 classname = qualname(cls)
243 version = 0 # type hinting always sets version to 0
244 value = o
245
246 if CLASSNAME in o and VERSION in o:
247 classname, version, value = decode(o)
248
249 if not classname:
250 raise TypeError("classname cannot be empty")
251
252 # only return string representation
253 if not full:
254 return _stringify(classname, version, value)
255 if not _match(classname) and classname not in _extra_allowed:
256 raise ImportError(
257 f"{classname} was not found in allow list for deserialization imports. "
258 f"To allow it, add it to allowed_deserialization_classes in the configuration"
259 )
260
261 cls = import_string(classname)
262
263 # registered deserializer
264 if classname in _deserializers:
265 return _deserializers[classname].deserialize(cls, version, deserialize(value))
266 if is_pydantic_model(cls):
267 if PYDANTIC_MODEL_QUALNAME in _deserializers:
268 return _deserializers[PYDANTIC_MODEL_QUALNAME].deserialize(cls, version, deserialize(value))
269
270 # class has deserialization function
271 if hasattr(cls, "deserialize"):
272 return getattr(cls, "deserialize")(deserialize(value), version)
273
274 # attr or dataclass
275 if attr.has(cls) or dataclasses.is_dataclass(cls):
276 class_version = getattr(cls, "__version__", 0)
277 if int(version) > class_version:
278 raise TypeError(
279 "serialized version of %s is newer than module version (%s > %s)",
280 classname,
281 version,
282 class_version,
283 )
284
285 deserialize_value = deserialize(value)
286 if not isinstance(deserialize_value, dict):
287 raise TypeError(
288 f"deserialized value for {classname} is not a dict, got {type(deserialize_value)}"
289 )
290 return cls(**deserialize_value) # type: ignore[operator]
291
292 # no deserializer available
293 raise TypeError(f"No deserializer found for {classname}")
294
295
296def _convert(old: dict) -> dict:
297 """Convert an old style serialization to new style."""
298 if OLD_TYPE in old and OLD_DATA in old:
299 # Return old style dicts directly as they do not need wrapping
300 if old[OLD_TYPE] == OLD_DICT:
301 return old[OLD_DATA]
302 return {CLASSNAME: old[OLD_TYPE], VERSION: DEFAULT_VERSION, DATA: old[OLD_DATA]}
303
304 return old
305
306
307def _match(classname: str) -> bool:
308 """Check if the given classname matches a path pattern either using glob format or regexp format."""
309 return _match_glob(classname) or _match_regexp(classname)
310
311
312@functools.cache
313def _match_glob(classname: str):
314 """Check if the given classname matches a pattern from allowed_deserialization_classes using glob syntax."""
315 patterns = _get_patterns()
316 return any(fnmatch(classname, p.pattern) for p in patterns)
317
318
319@functools.cache
320def _match_regexp(classname: str):
321 """Check if the given classname matches a pattern from allowed_deserialization_classes_regexp using regexp."""
322 patterns = _get_regexp_patterns()
323 return any(p.match(classname) is not None for p in patterns)
324
325
326def _stringify(classname: str, version: int, value: T | None) -> str:
327 """
328 Convert a previously serialized object in a somewhat human-readable format.
329
330 This function is not designed to be exact, and will not extensively traverse
331 the whole tree of an object.
332 """
333 if classname in _stringifiers:
334 return _stringifiers[classname].stringify(classname, version, value)
335
336 s = f"{classname}@version={version}("
337 if isinstance(value, _primitives):
338 s += f"{value}"
339 elif isinstance(value, _builtin_collections):
340 # deserialized values can be != str
341 s += ",".join(str(deserialize(value, full=False)))
342 elif isinstance(value, dict):
343 s += ",".join(f"{k}={deserialize(v, full=False)}" for k, v in value.items())
344 s += ")"
345
346 return s
347
348
349def _is_namedtuple(cls: Any) -> bool:
350 """
351 Return True if the class is a namedtuple.
352
353 Checking is done by attributes as it is significantly faster than
354 using isinstance.
355 """
356 return hasattr(cls, "_asdict") and hasattr(cls, "_fields") and hasattr(cls, "_field_defaults")
357
358
359def _register():
360 """Register builtin serializers and deserializers for types that don't have any themselves."""
361 _serializers.clear()
362 _deserializers.clear()
363 _stringifiers.clear()
364
365 with Stats.timer("serde.load_serializers") as timer:
366 serializers_module = import_module("airflow.sdk.serde.serializers")
367 for _, module_name, _ in iter_namespace(serializers_module):
368 module = import_module(module_name)
369 for serializers in getattr(module, "serializers", ()):
370 s_qualname = serializers if isinstance(serializers, str) else qualname(serializers)
371 if s_qualname in _serializers and _serializers[s_qualname] != module:
372 raise AttributeError(
373 f"duplicate {s_qualname} for serialization in {module} and {_serializers[s_qualname]}"
374 )
375 log.debug("registering %s for serialization", s_qualname)
376 _serializers[s_qualname] = module
377 for deserializers in getattr(module, "deserializers", ()):
378 d_qualname = deserializers if isinstance(deserializers, str) else qualname(deserializers)
379 if d_qualname in _deserializers and _deserializers[d_qualname] != module:
380 raise AttributeError(
381 f"duplicate {d_qualname} for deserialization in {module} and {_deserializers[d_qualname]}"
382 )
383 log.debug("registering %s for deserialization", d_qualname)
384 _deserializers[d_qualname] = module
385 _extra_allowed.add(d_qualname)
386 for stringifiers in getattr(module, "stringifiers", ()):
387 c_qualname = stringifiers if isinstance(stringifiers, str) else qualname(stringifiers)
388 if c_qualname in _deserializers and _deserializers[c_qualname] != module:
389 raise AttributeError(
390 f"duplicate {c_qualname} for stringifiers in {module} and {_stringifiers[c_qualname]}"
391 )
392 log.debug("registering %s for stringifying", c_qualname)
393 _stringifiers[c_qualname] = module
394
395 log.debug("loading serializers took %.3f seconds", timer.duration)
396
397
398@functools.cache
399def _get_patterns() -> list[Pattern]:
400 return [re.compile(p) for p in conf.get("core", "allowed_deserialization_classes").split()]
401
402
403@functools.cache
404def _get_regexp_patterns() -> list[Pattern]:
405 return [re.compile(p) for p in conf.get("core", "allowed_deserialization_classes_regexp").split()]
406
407
408_register()