Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/nbconvert/exporters/exporter.py: 28%
134 statements
« prev ^ index » next coverage.py v7.2.7, created at 2023-07-01 06:54 +0000
« prev ^ index » next coverage.py v7.2.7, created at 2023-07-01 06:54 +0000
1"""This module defines a base Exporter class. For Jinja template-based export,
2see templateexporter.py.
3"""
5# Copyright (c) Jupyter Development Team.
6# Distributed under the terms of the Modified BSD License.
9import collections
10import copy
11import datetime
12import os
13import sys
14import typing as t
16import nbformat
17from nbformat import NotebookNode, validator
18from traitlets import Bool, HasTraits, List, TraitError, Unicode
19from traitlets.config import Config
20from traitlets.config.configurable import LoggingConfigurable
21from traitlets.utils.importstring import import_item
24class ResourcesDict(collections.defaultdict):
25 """A default dict for resources."""
27 def __missing__(self, key):
28 """Handle missing value."""
29 return ""
32class FilenameExtension(Unicode):
33 """A trait for filename extensions."""
35 default_value = ""
36 info_text = "a filename extension, beginning with a dot"
38 def validate(self, obj, value):
39 """Validate the file name."""
40 # cast to proper unicode
41 value = super().validate(obj, value)
43 # check that it starts with a dot
44 if value and not value.startswith("."):
45 msg = "FileExtension trait '{}' does not begin with a dot: {!r}"
46 raise TraitError(msg.format(self.name, value))
48 return value
51class Exporter(LoggingConfigurable):
52 """
53 Class containing methods that sequentially run a list of preprocessors on a
54 NotebookNode object and then return the modified NotebookNode object and
55 accompanying resources dict.
56 """
58 enabled = Bool(True, help="Disable this exporter (and any exporters inherited from it).").tag(
59 config=True
60 )
62 file_extension = FilenameExtension(
63 help="Extension of the file that should be written to disk"
64 ).tag(config=True)
66 optimistic_validation = Bool(
67 False,
68 help="Reduces the number of validation steps so that it only occurs after all preprocesors have run.",
69 ).tag(config=True)
71 # MIME type of the result file, for HTTP response headers.
72 # This is *not* a traitlet, because we want to be able to access it from
73 # the class, not just on instances.
74 output_mimetype = ""
76 # Should this converter be accessible from the notebook front-end?
77 # If so, should be a friendly name to display (and possibly translated).
78 export_from_notebook: str = None # type:ignore
80 # Configurability, allows the user to easily add filters and preprocessors.
81 preprocessors = List(help="""List of preprocessors, by name or namespace, to enable.""").tag(
82 config=True
83 )
85 _preprocessors = List()
87 default_preprocessors = List(
88 [
89 "nbconvert.preprocessors.TagRemovePreprocessor",
90 "nbconvert.preprocessors.RegexRemovePreprocessor",
91 "nbconvert.preprocessors.ClearOutputPreprocessor",
92 "nbconvert.preprocessors.ExecutePreprocessor",
93 "nbconvert.preprocessors.coalesce_streams",
94 "nbconvert.preprocessors.SVG2PDFPreprocessor",
95 "nbconvert.preprocessors.LatexPreprocessor",
96 "nbconvert.preprocessors.HighlightMagicsPreprocessor",
97 "nbconvert.preprocessors.ExtractOutputPreprocessor",
98 "nbconvert.preprocessors.ExtractAttachmentsPreprocessor",
99 "nbconvert.preprocessors.ClearMetadataPreprocessor",
100 ],
101 help="""List of preprocessors available by default, by name, namespace,
102 instance, or type.""",
103 ).tag(config=True)
105 def __init__(self, config=None, **kw):
106 """
107 Public constructor
109 Parameters
110 ----------
111 config : ``traitlets.config.Config``
112 User configuration instance.
113 `**kw`
114 Additional keyword arguments passed to parent __init__
116 """
117 with_default_config = self.default_config
118 if config:
119 with_default_config.merge(config)
121 super().__init__(config=with_default_config, **kw)
123 self._init_preprocessors()
124 self._nb_metadata = {}
126 @property
127 def default_config(self):
128 return Config()
130 def from_notebook_node(
131 self, nb: NotebookNode, resources: t.Optional[t.Any] = None, **kw: t.Any
132 ) -> t.Tuple[NotebookNode, t.Dict]:
133 """
134 Convert a notebook from a notebook node instance.
136 Parameters
137 ----------
138 nb : :class:`~nbformat.NotebookNode`
139 Notebook node (dict-like with attr-access)
140 resources : dict
141 Additional resources that can be accessed read/write by
142 preprocessors and filters.
143 `**kw`
144 Ignored
146 """
147 nb_copy = copy.deepcopy(nb)
148 resources = self._init_resources(resources)
150 if "language" in nb["metadata"]:
151 resources["language"] = nb["metadata"]["language"].lower()
153 # Preprocess
154 nb_copy, resources = self._preprocess(nb_copy, resources)
155 notebook_name = ""
156 if resources is not None:
157 name = resources.get("metadata", {}).get("name", "")
158 path = resources.get("metadata", {}).get("path", "")
159 notebook_name = os.path.join(path, name)
160 self._nb_metadata[notebook_name] = nb_copy.metadata
161 return nb_copy, resources
163 def from_filename(
164 self, filename: str, resources: t.Optional[dict] = None, **kw: t.Any
165 ) -> t.Tuple[NotebookNode, t.Dict]:
166 """
167 Convert a notebook from a notebook file.
169 Parameters
170 ----------
171 filename : str
172 Full filename of the notebook file to open and convert.
173 resources : dict
174 Additional resources that can be accessed read/write by
175 preprocessors and filters.
176 `**kw`
177 Ignored
179 """
180 # Pull the metadata from the filesystem.
181 if resources is None:
182 resources = ResourcesDict()
183 if "metadata" not in resources or resources["metadata"] == "": # noqa
184 resources["metadata"] = ResourcesDict()
185 path, basename = os.path.split(filename)
186 notebook_name = os.path.splitext(basename)[0]
187 resources["metadata"]["name"] = notebook_name
188 resources["metadata"]["path"] = path
190 modified_date = datetime.datetime.fromtimestamp(
191 os.path.getmtime(filename), tz=datetime.timezone.utc
192 )
193 # datetime.strftime date format for ipython
194 if sys.platform == "win32":
195 date_format = "%B %d, %Y"
196 else:
197 date_format = "%B %-d, %Y"
198 resources["metadata"]["modified_date"] = modified_date.strftime(date_format)
200 with open(filename, encoding="utf-8") as f:
201 return self.from_file(f, resources=resources, **kw)
203 def from_file(
204 self, file_stream: t.Any, resources: t.Optional[dict] = None, **kw: t.Any
205 ) -> t.Tuple[NotebookNode, dict]:
206 """
207 Convert a notebook from a notebook file.
209 Parameters
210 ----------
211 file_stream : file-like object
212 Notebook file-like object to convert.
213 resources : dict
214 Additional resources that can be accessed read/write by
215 preprocessors and filters.
216 `**kw`
217 Ignored
219 """
220 return self.from_notebook_node(
221 nbformat.read(file_stream, as_version=4), resources=resources, **kw
222 )
224 def register_preprocessor(self, preprocessor, enabled=False):
225 """
226 Register a preprocessor.
227 Preprocessors are classes that act upon the notebook before it is
228 passed into the Jinja templating engine. Preprocessors are also
229 capable of passing additional information to the Jinja
230 templating engine.
232 Parameters
233 ----------
234 preprocessor : `nbconvert.preprocessors.Preprocessor`
235 A dotted module name, a type, or an instance
236 enabled : bool
237 Mark the preprocessor as enabled
239 """
240 if preprocessor is None:
241 msg = "preprocessor must not be None"
242 raise TypeError(msg)
243 isclass = isinstance(preprocessor, type)
244 constructed = not isclass
246 # Handle preprocessor's registration based on it's type
247 if constructed and isinstance(
248 preprocessor,
249 str,
250 ):
251 # Preprocessor is a string, import the namespace and recursively call
252 # this register_preprocessor method
253 preprocessor_cls = import_item(preprocessor)
254 return self.register_preprocessor(preprocessor_cls, enabled)
256 if constructed and hasattr(preprocessor, "__call__"): # noqa
257 # Preprocessor is a function, no need to construct it.
258 # Register and return the preprocessor.
259 if enabled:
260 preprocessor.enabled = True
261 self._preprocessors.append(preprocessor)
262 return preprocessor
264 elif isclass and issubclass(preprocessor, HasTraits):
265 # Preprocessor is configurable. Make sure to pass in new default for
266 # the enabled flag if one was specified.
267 self.register_preprocessor(preprocessor(parent=self), enabled)
269 elif isclass:
270 # Preprocessor is not configurable, construct it
271 self.register_preprocessor(preprocessor(), enabled)
273 else:
274 # Preprocessor is an instance of something without a __call__
275 # attribute.
276 raise TypeError(
277 "preprocessor must be callable or an importable constructor, got %r" % preprocessor
278 )
280 def _init_preprocessors(self):
281 """
282 Register all of the preprocessors needed for this exporter, disabled
283 unless specified explicitly.
284 """
285 self._preprocessors = []
287 # Load default preprocessors (not necessarily enabled by default).
288 for preprocessor in self.default_preprocessors:
289 self.register_preprocessor(preprocessor)
291 # Load user-specified preprocessors. Enable by default.
292 for preprocessor in self.preprocessors:
293 self.register_preprocessor(preprocessor, enabled=True)
295 def _init_resources(self, resources):
296 # Make sure the resources dict is of ResourcesDict type.
297 if resources is None:
298 resources = ResourcesDict()
299 if not isinstance(resources, ResourcesDict):
300 new_resources = ResourcesDict()
301 new_resources.update(resources)
302 resources = new_resources
304 # Make sure the metadata extension exists in resources
305 if "metadata" in resources:
306 if not isinstance(resources["metadata"], ResourcesDict):
307 new_metadata = ResourcesDict()
308 new_metadata.update(resources["metadata"])
309 resources["metadata"] = new_metadata
310 else:
311 resources["metadata"] = ResourcesDict()
312 if not resources["metadata"]["name"]:
313 resources["metadata"]["name"] = "Notebook"
315 # Set the output extension
316 resources["output_extension"] = self.file_extension
317 return resources
319 def _validate_preprocessor(self, nbc, preprocessor):
320 try:
321 nbformat.validate(nbc, relax_add_props=True)
322 except nbformat.ValidationError:
323 self.log.error("Notebook is invalid after preprocessor %s", preprocessor)
324 raise
326 def _preprocess(self, nb, resources):
327 """
328 Preprocess the notebook before passing it into the Jinja engine.
329 To preprocess the notebook is to successively apply all the
330 enabled preprocessors. Output from each preprocessor is passed
331 along to the next one.
333 Parameters
334 ----------
335 nb : notebook node
336 notebook that is being exported.
337 resources : a dict of additional resources that
338 can be accessed read/write by preprocessors
339 """
341 # Do a copy.deepcopy first,
342 # we are never safe enough with what the preprocessors could do.
343 nbc = copy.deepcopy(nb)
344 resc = copy.deepcopy(resources)
346 if hasattr(validator, "normalize"):
347 _, nbc = validator.normalize(nbc)
349 # Run each preprocessor on the notebook. Carry the output along
350 # to each preprocessor
351 for preprocessor in self._preprocessors:
352 nbc, resc = preprocessor(nbc, resc)
353 if not self.optimistic_validation:
354 self._validate_preprocessor(nbc, preprocessor)
356 if self.optimistic_validation:
357 self._validate_preprocessor(nbc, preprocessor)
359 return nbc, resc