1"""This module defines a base Exporter class. For Jinja template-based export,
2see templateexporter.py.
3"""
4
5# Copyright (c) Jupyter Development Team.
6# Distributed under the terms of the Modified BSD License.
7from __future__ import annotations
8
9import collections
10import copy
11import datetime
12import os
13import sys
14import typing as t
15
16import nbformat
17from nbformat import NotebookNode, validator
18from traitlets import Bool, HasTraits, List, TraitError, Unicode
19from traitlets.config import Config
20from traitlets.config.configurable import LoggingConfigurable
21from traitlets.utils.importstring import import_item
22
23
24class ResourcesDict(collections.defaultdict): # type:ignore[type-arg]
25 """A default dict for resources."""
26
27 def __missing__(self, key):
28 """Handle missing value."""
29 return ""
30
31
32class FilenameExtension(Unicode): # type:ignore[type-arg]
33 """A trait for filename extensions."""
34
35 default_value = ""
36 info_text = "a filename extension, beginning with a dot"
37
38 def validate(self, obj, value):
39 """Validate the file name."""
40 # cast to proper unicode
41 value = super().validate(obj, value)
42
43 # check that it starts with a dot
44 if value and not value.startswith("."):
45 msg = "FileExtension trait '{}' does not begin with a dot: {!r}"
46 raise TraitError(msg.format(self.name, value))
47
48 return value
49
50
51class Exporter(LoggingConfigurable):
52 """
53 Class containing methods that sequentially run a list of preprocessors on a
54 NotebookNode object and then return the modified NotebookNode object and
55 accompanying resources dict.
56 """
57
58 enabled = Bool(True, help="Disable this exporter (and any exporters inherited from it).").tag(
59 config=True
60 )
61
62 file_extension = FilenameExtension(
63 help="Extension of the file that should be written to disk"
64 ).tag(config=True)
65
66 optimistic_validation = Bool(
67 False,
68 help="Reduces the number of validation steps so that it only occurs after all preprocesors have run.",
69 ).tag(config=True)
70
71 # MIME type of the result file, for HTTP response headers.
72 # This is *not* a traitlet, because we want to be able to access it from
73 # the class, not just on instances.
74 output_mimetype = ""
75
76 # Should this converter be accessible from the notebook front-end?
77 # If so, should be a friendly name to display (and possibly translated).
78 export_from_notebook: str = None # type:ignore[assignment]
79
80 # Configurability, allows the user to easily add filters and preprocessors.
81 preprocessors: List[t.Any] = List(
82 help="""List of preprocessors, by name or namespace, to enable."""
83 ).tag(config=True)
84
85 _preprocessors: List[t.Any] = List()
86
87 default_preprocessors: List[t.Any] = List(
88 [
89 "nbconvert.preprocessors.TagRemovePreprocessor",
90 "nbconvert.preprocessors.RegexRemovePreprocessor",
91 "nbconvert.preprocessors.ClearOutputPreprocessor",
92 "nbconvert.preprocessors.CoalesceStreamsPreprocessor",
93 "nbconvert.preprocessors.ExecutePreprocessor",
94 "nbconvert.preprocessors.SVG2PDFPreprocessor",
95 "nbconvert.preprocessors.LatexPreprocessor",
96 "nbconvert.preprocessors.HighlightMagicsPreprocessor",
97 "nbconvert.preprocessors.ExtractOutputPreprocessor",
98 "nbconvert.preprocessors.ExtractAttachmentsPreprocessor",
99 "nbconvert.preprocessors.ClearMetadataPreprocessor",
100 ],
101 help="""List of preprocessors available by default, by name, namespace,
102 instance, or type.""",
103 ).tag(config=True)
104
105 def __init__(self, config=None, **kw):
106 """
107 Public constructor
108
109 Parameters
110 ----------
111 config : ``traitlets.config.Config``
112 User configuration instance.
113 `**kw`
114 Additional keyword arguments passed to parent __init__
115
116 """
117 with_default_config = self.default_config
118 if config:
119 with_default_config.merge(config)
120
121 super().__init__(config=with_default_config, **kw)
122
123 self._init_preprocessors()
124 self._nb_metadata = {}
125
126 @property
127 def default_config(self):
128 return Config()
129
130 def from_notebook_node(
131 self, nb: NotebookNode, resources: t.Any | None = None, **kw: t.Any
132 ) -> tuple[NotebookNode, dict[str, t.Any]]:
133 """
134 Convert a notebook from a notebook node instance.
135
136 Parameters
137 ----------
138 nb : :class:`~nbformat.NotebookNode`
139 Notebook node (dict-like with attr-access)
140 resources : dict
141 Additional resources that can be accessed read/write by
142 preprocessors and filters.
143 `**kw`
144 Ignored
145
146 """
147 nb_copy = copy.deepcopy(nb)
148 resources = self._init_resources(resources)
149
150 if "language" in nb["metadata"]:
151 resources["language"] = nb["metadata"]["language"].lower()
152
153 # Preprocess
154 nb_copy, resources = self._preprocess(nb_copy, resources)
155 notebook_name = ""
156 if resources is not None:
157 name = resources.get("metadata", {}).get("name", "")
158 path = resources.get("metadata", {}).get("path", "")
159 notebook_name = os.path.join(path, name)
160 self._nb_metadata[notebook_name] = nb_copy.metadata
161 return nb_copy, resources
162
163 def from_filename(
164 self, filename: str, resources: dict[str, t.Any] | None = None, **kw: t.Any
165 ) -> tuple[NotebookNode, dict[str, t.Any]]:
166 """
167 Convert a notebook from a notebook file.
168
169 Parameters
170 ----------
171 filename : str
172 Full filename of the notebook file to open and convert.
173 resources : dict
174 Additional resources that can be accessed read/write by
175 preprocessors and filters.
176 `**kw`
177 Ignored
178
179 """
180 # Pull the metadata from the filesystem.
181 if resources is None:
182 resources = ResourcesDict()
183 if "metadata" not in resources or resources["metadata"] == "":
184 resources["metadata"] = ResourcesDict()
185 path, basename = os.path.split(filename)
186 notebook_name = os.path.splitext(basename)[0]
187 resources["metadata"]["name"] = notebook_name
188 resources["metadata"]["path"] = path
189
190 modified_date = datetime.datetime.fromtimestamp(
191 os.path.getmtime(filename), tz=datetime.timezone.utc
192 )
193 # datetime.strftime date format for ipython
194 if sys.platform == "win32":
195 date_format = "%B %d, %Y"
196 else:
197 date_format = "%B %-d, %Y"
198 resources["metadata"]["modified_date"] = modified_date.strftime(date_format)
199
200 with open(filename, encoding="utf-8") as f:
201 return self.from_file(f, resources=resources, **kw)
202
203 def from_file(
204 self, file_stream: t.Any, resources: dict[str, t.Any] | None = None, **kw: t.Any
205 ) -> tuple[NotebookNode, dict[str, t.Any]]:
206 """
207 Convert a notebook from a notebook file.
208
209 Parameters
210 ----------
211 file_stream : file-like object
212 Notebook file-like object to convert.
213 resources : dict
214 Additional resources that can be accessed read/write by
215 preprocessors and filters.
216 `**kw`
217 Ignored
218
219 """
220 return self.from_notebook_node(
221 nbformat.read(file_stream, as_version=4), resources=resources, **kw
222 )
223
224 def register_preprocessor(self, preprocessor, enabled=False):
225 """
226 Register a preprocessor.
227 Preprocessors are classes that act upon the notebook before it is
228 passed into the Jinja templating engine. Preprocessors are also
229 capable of passing additional information to the Jinja
230 templating engine.
231
232 Parameters
233 ----------
234 preprocessor : `nbconvert.preprocessors.Preprocessor`
235 A dotted module name, a type, or an instance
236 enabled : bool
237 Mark the preprocessor as enabled
238
239 """
240 if preprocessor is None:
241 msg = "preprocessor must not be None"
242 raise TypeError(msg)
243 isclass = isinstance(preprocessor, type)
244 constructed = not isclass
245
246 # Handle preprocessor's registration based on it's type
247 if constructed and isinstance(
248 preprocessor,
249 str,
250 ):
251 # Preprocessor is a string, import the namespace and recursively call
252 # this register_preprocessor method
253 preprocessor_cls = import_item(preprocessor)
254 return self.register_preprocessor(preprocessor_cls, enabled)
255
256 if constructed and callable(preprocessor):
257 # Preprocessor is a function, no need to construct it.
258 # Register and return the preprocessor.
259 if enabled:
260 preprocessor.enabled = True
261 self._preprocessors.append(preprocessor)
262 return preprocessor
263
264 if isclass and issubclass(preprocessor, HasTraits):
265 # Preprocessor is configurable. Make sure to pass in new default for
266 # the enabled flag if one was specified.
267 self.register_preprocessor(preprocessor(parent=self), enabled)
268 return None
269
270 if isclass:
271 # Preprocessor is not configurable, construct it
272 self.register_preprocessor(preprocessor(), enabled)
273 return None
274
275 # Preprocessor is an instance of something without a __call__
276 # attribute.
277 raise TypeError(
278 "preprocessor must be callable or an importable constructor, got %r" % preprocessor
279 )
280
281 def _init_preprocessors(self):
282 """
283 Register all of the preprocessors needed for this exporter, disabled
284 unless specified explicitly.
285 """
286 self._preprocessors = []
287
288 # Load default preprocessors (not necessarily enabled by default).
289 for preprocessor in self.default_preprocessors:
290 self.register_preprocessor(preprocessor)
291
292 # Load user-specified preprocessors. Enable by default.
293 for preprocessor in self.preprocessors:
294 self.register_preprocessor(preprocessor, enabled=True)
295
296 def _init_resources(self, resources):
297 # Make sure the resources dict is of ResourcesDict type.
298 if resources is None:
299 resources = ResourcesDict()
300 if not isinstance(resources, ResourcesDict):
301 new_resources = ResourcesDict()
302 new_resources.update(resources)
303 resources = new_resources
304
305 # Make sure the metadata extension exists in resources
306 if "metadata" in resources:
307 if not isinstance(resources["metadata"], ResourcesDict):
308 new_metadata = ResourcesDict()
309 new_metadata.update(resources["metadata"])
310 resources["metadata"] = new_metadata
311 else:
312 resources["metadata"] = ResourcesDict()
313 if not resources["metadata"]["name"]:
314 resources["metadata"]["name"] = "Notebook"
315
316 # Set the output extension
317 resources["output_extension"] = self.file_extension
318 return resources
319
320 def _validate_preprocessor(self, nbc, preprocessor):
321 try:
322 nbformat.validate(nbc, relax_add_props=True)
323 except nbformat.ValidationError:
324 self.log.error("Notebook is invalid after preprocessor %s", preprocessor)
325 raise
326
327 def _preprocess(self, nb, resources):
328 """
329 Preprocess the notebook before passing it into the Jinja engine.
330 To preprocess the notebook is to successively apply all the
331 enabled preprocessors. Output from each preprocessor is passed
332 along to the next one.
333
334 Parameters
335 ----------
336 nb : notebook node
337 notebook that is being exported.
338 resources : a dict of additional resources that
339 can be accessed read/write by preprocessors
340 """
341
342 # Do a copy.deepcopy first,
343 # we are never safe enough with what the preprocessors could do.
344 nbc = copy.deepcopy(nb)
345 resc = copy.deepcopy(resources)
346
347 if hasattr(validator, "normalize"):
348 _, nbc = validator.normalize(nbc)
349
350 # Run each preprocessor on the notebook. Carry the output along
351 # to each preprocessor
352 for preprocessor in self._preprocessors:
353 nbc, resc = preprocessor(nbc, resc)
354 if not self.optimistic_validation:
355 self._validate_preprocessor(nbc, preprocessor)
356
357 if self.optimistic_validation:
358 self._validate_preprocessor(nbc, preprocessor)
359
360 return nbc, resc