Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/tables/node.py: 26%

292 statements  

« prev     ^ index     » next       coverage.py v7.2.5, created at 2023-05-10 06:15 +0000

1"""PyTables nodes.""" 

2 

3import warnings 

4import functools 

5 

6from .registry import class_name_dict, class_id_dict 

7from .exceptions import (ClosedNodeError, NodeError, UndoRedoWarning, 

8 PerformanceWarning) 

9from .path import join_path, split_path, isvisiblepath 

10from .utils import lazyattr 

11from .undoredo import move_to_shadow 

12from .attributeset import AttributeSet, NotLoggedAttributeSet 

13 

14 

15__docformat__ = 'reStructuredText' 

16"""The format of documentation strings in this module.""" 

17 

18 

19def _closedrepr(oldmethod): 

20 """Decorate string representation method to handle closed nodes. 

21 

22 If the node is closed, a string like this is returned:: 

23 

24 <closed MODULE.CLASS at ADDRESS> 

25 

26 instead of calling `oldmethod` and returning its result. 

27 

28 """ 

29 

30 @functools.wraps(oldmethod) 

31 def newmethod(self): 

32 if not self._v_isopen: 

33 return (f'<closed {self.__class__.__module__}.' 

34 f'{self.__class__.__name__} at 0x{id(self):x}>') 

35 return oldmethod(self) 

36 

37 return newmethod 

38 

39 

40class MetaNode(type): 

41 """Node metaclass. 

42 

43 This metaclass ensures that their instance classes get registered 

44 into several dictionaries (namely the `tables.utils.class_name_dict` 

45 class name dictionary and the `tables.utils.class_id_dict` class 

46 identifier dictionary). 

47 

48 It also adds sanity checks to some methods: 

49 

50 * Check that the node is open when calling string representation 

51 and provide a default string if so. 

52 

53 """ 

54 

55 def __new__(mcs, name, bases, dict_): 

56 # Add default behaviour for representing closed nodes. 

57 for mname in ['__str__', '__repr__']: 

58 if mname in dict_: 

59 dict_[mname] = _closedrepr(dict_[mname]) 

60 

61 return type.__new__(mcs, name, bases, dict_) 

62 

63 def __init__(cls, name, bases, dict_): 

64 super().__init__(name, bases, dict_) 

65 

66 # Always register into class name dictionary. 

67 class_name_dict[cls.__name__] = cls 

68 

69 # Register into class identifier dictionary only if the class 

70 # has an identifier and it is different from its parents'. 

71 cid = getattr(cls, '_c_classid', None) 

72 if cid is not None: 

73 for base in bases: 

74 pcid = getattr(base, '_c_classid', None) 

75 if pcid == cid: 

76 break 

77 else: 

78 class_id_dict[cid] = cls 

79 

80 

81class Node(metaclass=MetaNode): 

82 """Abstract base class for all PyTables nodes. 

83 

84 This is the base class for *all* nodes in a PyTables hierarchy. It is an 

85 abstract class, i.e. it may not be directly instantiated; however, every 

86 node in the hierarchy is an instance of this class. 

87 

88 A PyTables node is always hosted in a PyTables *file*, under a *parent 

89 group*, at a certain *depth* in the node hierarchy. A node knows its own 

90 *name* in the parent group and its own *path name* in the file. 

91 

92 All the previous information is location-dependent, i.e. it may change when 

93 moving or renaming a node in the hierarchy. A node also has 

94 location-independent information, such as its *HDF5 object identifier* and 

95 its *attribute set*. 

96 

97 This class gathers the operations and attributes (both location-dependent 

98 and independent) which are common to all PyTables nodes, whatever their 

99 type is. Nonetheless, due to natural naming restrictions, the names of all 

100 of these members start with a reserved prefix (see the Group class 

101 in :ref:`GroupClassDescr`). 

102 

103 Sub-classes with no children (e.g. *leaf nodes*) may define new methods, 

104 attributes and properties to avoid natural naming restrictions. For 

105 instance, _v_attrs may be shortened to attrs and _f_rename to 

106 rename. However, the original methods and attributes should still be 

107 available. 

108 

109 .. rubric:: Node attributes 

110 

111 .. attribute:: _v_depth 

112 

113 The depth of this node in the tree (an non-negative integer value). 

114 

115 .. attribute:: _v_file 

116 

117 The hosting File instance (see :ref:`FileClassDescr`). 

118 

119 .. attribute:: _v_name 

120 

121 The name of this node in its parent group (a string). 

122 

123 .. attribute:: _v_pathname 

124 

125 The path of this node in the tree (a string). 

126 

127 .. attribute:: _v_objectid 

128 

129 A node identifier (may change from run to run). 

130 

131 .. versionchanged:: 3.0 

132 The *_v_objectID* attribute has been renamed into *_v_object_id*. 

133 

134 """ 

135 

136 # By default, attributes accept Undo/Redo. 

137 _AttributeSet = AttributeSet 

138 

139 # `_v_parent` is accessed via its file to avoid upwards references. 

140 def _g_getparent(self): 

141 """The parent :class:`Group` instance""" 

142 (parentpath, nodename) = split_path(self._v_pathname) 

143 return self._v_file._get_node(parentpath) 

144 

145 _v_parent = property(_g_getparent) 

146 

147 # '_v_attrs' is defined as a lazy read-only attribute. 

148 # This saves 0.7s/3.8s. 

149 @lazyattr 

150 def _v_attrs(self): 

151 """The associated `AttributeSet` instance. 

152 

153 See Also 

154 -------- 

155 tables.attributeset.AttributeSet : container for the HDF5 attributes 

156 

157 """ 

158 

159 return self._AttributeSet(self) 

160 

161 # '_v_title' is a direct read-write shorthand for the 'TITLE' attribute 

162 # with the empty string as a default value. 

163 def _g_gettitle(self): 

164 """A description of this node. A shorthand for TITLE attribute.""" 

165 if hasattr(self._v_attrs, 'TITLE'): 

166 return self._v_attrs.TITLE 

167 else: 

168 return '' 

169 

170 def _g_settitle(self, title): 

171 self._v_attrs.TITLE = title 

172 

173 _v_title = property(_g_gettitle, _g_settitle) 

174 

175 # This may be looked up by ``__del__`` when ``__init__`` doesn't get 

176 # to be called. See ticket #144 for more info. 

177 _v_isopen = False 

178 """Whehter this node is open or not.""" 

179 

180 # The ``_log`` argument is only meant to be used by ``_g_copy_as_child()`` 

181 # to avoid logging the creation of children nodes of a copied sub-tree. 

182 def __init__(self, parentnode, name, _log=True): 

183 # Remember to assign these values in the root group constructor 

184 # as it does not use this method implementation! 

185 

186 # if the parent node is a softlink, dereference it 

187 if isinstance(parentnode, class_name_dict['SoftLink']): 

188 parentnode = parentnode.dereference() 

189 

190 self._v_file = None 

191 """The hosting File instance (see :ref:`FileClassDescr`).""" 

192 

193 self._v_isopen = False 

194 """Whether this node is open or not.""" 

195 

196 self._v_pathname = None 

197 """The path of this node in the tree (a string).""" 

198 

199 self._v_name = None 

200 """The name of this node in its parent group (a string).""" 

201 

202 self._v_depth = None 

203 """The depth of this node in the tree (an non-negative integer value). 

204 """ 

205 

206 self._v_maxtreedepth = parentnode._v_file.params['MAX_TREE_DEPTH'] 

207 """Maximum tree depth before warning the user. 

208 

209 .. versionchanged:: 3.0 

210 Renamed into *_v_maxtreedepth* from *_v_maxTreeDepth*. 

211 

212 """ 

213 

214 self._v__deleting = False 

215 """Is the node being deleted?""" 

216 

217 self._v_objectid = None 

218 """A node identifier (may change from run to run). 

219 

220 .. versionchanged:: 3.0 

221 The *_v_objectID* attribute has been renamed into *_v_objectid*. 

222 

223 """ 

224 

225 validate = new = self._v_new # set by subclass constructor 

226 

227 # Is the parent node a group? Is it open? 

228 self._g_check_group(parentnode) 

229 parentnode._g_check_open() 

230 file_ = parentnode._v_file 

231 

232 # Will the file be able to host a new node? 

233 if new: 

234 file_._check_writable() 

235 

236 # Bind to the parent node and set location-dependent information. 

237 if new: 

238 # Only new nodes need to be referenced. 

239 # Opened nodes are already known by their parent group. 

240 parentnode._g_refnode(self, name, validate) 

241 self._g_set_location(parentnode, name) 

242 

243 try: 

244 # hdf5extension operations: 

245 # Update node attributes. 

246 self._g_new(parentnode, name, init=True) 

247 # Create or open the node and get its object ID. 

248 if new: 

249 self._v_objectid = self._g_create() 

250 else: 

251 self._v_objectid = self._g_open() 

252 

253 # The node *has* been created, log that. 

254 if new and _log and file_.is_undo_enabled(): 

255 self._g_log_create() 

256 

257 # This allows extra operations after creating the node. 

258 self._g_post_init_hook() 

259 except Exception: 

260 # If anything happens, the node must be closed 

261 # to undo every possible registration made so far. 

262 # We do *not* rely on ``__del__()`` doing it later, 

263 # since it might never be called anyway. 

264 self._f_close() 

265 raise 

266 

267 def _g_log_create(self): 

268 self._v_file._log('CREATE', self._v_pathname) 

269 

270 def __del__(self): 

271 # Closed `Node` instances can not be killed and revived. 

272 # Instead, accessing a closed and deleted (from memory, not 

273 # disk) one yields a *new*, open `Node` instance. This is 

274 # because of two reasons: 

275 # 

276 # 1. Predictability. After closing a `Node` and deleting it, 

277 # only one thing can happen when accessing it again: a new, 

278 # open `Node` instance is returned. If closed nodes could be 

279 # revived, one could get either a closed or an open `Node`. 

280 # 

281 # 2. Ease of use. If the user wants to access a closed node 

282 # again, the only condition would be that no references to 

283 # the `Node` instance were left. If closed nodes could be 

284 # revived, the user would also need to force the closed 

285 # `Node` out of memory, which is not a trivial task. 

286 # 

287 

288 if not self._v_isopen: 

289 return # the node is already closed or not initialized 

290 

291 self._v__deleting = True 

292 

293 # If we get here, the `Node` is still open. 

294 try: 

295 node_manager = self._v_file._node_manager 

296 node_manager.drop_node(self, check_unregistered=False) 

297 finally: 

298 # At this point the node can still be open if there is still some 

299 # alive reference around (e.g. if the __del__ method is called 

300 # explicitly by the user). 

301 if self._v_isopen: 

302 self._v__deleting = True 

303 self._f_close() 

304 

305 def _g_pre_kill_hook(self): 

306 """Code to be called before killing the node.""" 

307 pass 

308 

309 def _g_create(self): 

310 """Create a new HDF5 node and return its object identifier.""" 

311 raise NotImplementedError 

312 

313 def _g_open(self): 

314 """Open an existing HDF5 node and return its object identifier.""" 

315 raise NotImplementedError 

316 

317 def _g_check_open(self): 

318 """Check that the node is open. 

319 

320 If the node is closed, a `ClosedNodeError` is raised. 

321 

322 """ 

323 

324 if not self._v_isopen: 

325 raise ClosedNodeError("the node object is closed") 

326 assert self._v_file.isopen, "found an open node in a closed file" 

327 

328 def _g_set_location(self, parentnode, name): 

329 """Set location-dependent attributes. 

330 

331 Sets the location-dependent attributes of this node to reflect 

332 that it is placed under the specified `parentnode`, with the 

333 specified `name`. 

334 

335 This also triggers the insertion of file references to this 

336 node. If the maximum recommended tree depth is exceeded, a 

337 `PerformanceWarning` is issued. 

338 

339 """ 

340 

341 file_ = parentnode._v_file 

342 parentdepth = parentnode._v_depth 

343 

344 self._v_file = file_ 

345 self._v_isopen = True 

346 

347 root_uep = file_.root_uep 

348 if name.startswith(root_uep): 

349 # This has been called from File._get_node() 

350 assert parentdepth == 0 

351 if root_uep == "/": 

352 self._v_pathname = name 

353 else: 

354 self._v_pathname = name[len(root_uep):] 

355 _, self._v_name = split_path(name) 

356 self._v_depth = name.count("/") - root_uep.count("/") + 1 

357 else: 

358 # If we enter here is because this has been called elsewhere 

359 self._v_name = name 

360 self._v_pathname = join_path(parentnode._v_pathname, name) 

361 self._v_depth = parentdepth + 1 

362 

363 # Check if the node is too deep in the tree. 

364 if parentdepth >= self._v_maxtreedepth: 

365 warnings.warn("""\ 

366node ``%s`` is exceeding the recommended maximum depth (%d);\ 

367be ready to see PyTables asking for *lots* of memory and possibly slow I/O""" 

368 % (self._v_pathname, self._v_maxtreedepth), 

369 PerformanceWarning) 

370 

371 if self._v_pathname != '/': 

372 file_._node_manager.cache_node(self, self._v_pathname) 

373 

374 def _g_update_location(self, newparentpath): 

375 """Update location-dependent attributes. 

376 

377 Updates location data when an ancestor node has changed its 

378 location in the hierarchy to `newparentpath`. In fact, this 

379 method is expected to be called by an ancestor of this node. 

380 

381 This also triggers the update of file references to this node. 

382 If the maximum recommended node depth is exceeded, a 

383 `PerformanceWarning` is issued. This warning is assured to be 

384 unique. 

385 

386 """ 

387 

388 oldpath = self._v_pathname 

389 newpath = join_path(newparentpath, self._v_name) 

390 newdepth = newpath.count('/') 

391 

392 self._v_pathname = newpath 

393 self._v_depth = newdepth 

394 

395 # Check if the node is too deep in the tree. 

396 if newdepth > self._v_maxtreedepth: 

397 warnings.warn("""\ 

398moved descendent node is exceeding the recommended maximum depth (%d);\ 

399be ready to see PyTables asking for *lots* of memory and possibly slow I/O""" 

400 % (self._v_maxtreedepth,), PerformanceWarning) 

401 

402 node_manager = self._v_file._node_manager 

403 node_manager.rename_node(oldpath, newpath) 

404 

405 # Tell dependent objects about the new location of this node. 

406 self._g_update_dependent() 

407 

408 def _g_del_location(self): 

409 """Clear location-dependent attributes. 

410 

411 This also triggers the removal of file references to this node. 

412 

413 """ 

414 

415 node_manager = self._v_file._node_manager 

416 pathname = self._v_pathname 

417 

418 if not self._v__deleting: 

419 node_manager.drop_from_cache(pathname) 

420 # Note: node_manager.drop_node do not removes the node form the 

421 # registry if it is still open 

422 node_manager.registry.pop(pathname, None) 

423 

424 self._v_file = None 

425 self._v_isopen = False 

426 self._v_pathname = None 

427 self._v_name = None 

428 self._v_depth = None 

429 

430 def _g_post_init_hook(self): 

431 """Code to be run after node creation and before creation logging.""" 

432 pass 

433 

434 def _g_update_dependent(self): 

435 """Update dependent objects after a location change. 

436 

437 All dependent objects (but not nodes!) referencing this node 

438 must be updated here. 

439 

440 """ 

441 

442 if '_v_attrs' in self.__dict__: 

443 self._v_attrs._g_update_node_location(self) 

444 

445 def _f_close(self): 

446 """Close this node in the tree. 

447 

448 This releases all resources held by the node, so it should not 

449 be used again. On nodes with data, it may be flushed to disk. 

450 

451 You should not need to close nodes manually because they are 

452 automatically opened/closed when they are loaded/evicted from 

453 the integrated LRU cache. 

454 

455 """ 

456 

457 # After calling ``_f_close()``, two conditions are met: 

458 # 

459 # 1. The node object is detached from the tree. 

460 # 2. *Every* attribute of the node is removed. 

461 # 

462 # Thus, cleanup operations used in ``_f_close()`` in sub-classes 

463 # must be run *before* calling the method in the superclass. 

464 

465 if not self._v_isopen: 

466 return # the node is already closed 

467 

468 myDict = self.__dict__ 

469 

470 # Close the associated `AttributeSet` 

471 # only if it has already been placed in the object's dictionary. 

472 if '_v_attrs' in myDict: 

473 self._v_attrs._g_close() 

474 

475 # Detach the node from the tree if necessary. 

476 self._g_del_location() 

477 

478 # Finally, clear all remaining attributes from the object. 

479 myDict.clear() 

480 

481 # Just add a final flag to signal that the node is closed: 

482 self._v_isopen = False 

483 

484 def _g_remove(self, recursive, force): 

485 """Remove this node from the hierarchy. 

486 

487 If the node has children, recursive removal must be stated by 

488 giving `recursive` a true value; otherwise, a `NodeError` will 

489 be raised. 

490 

491 If `force` is set to true, the node will be removed no matter it 

492 has children or not (useful for deleting hard links). 

493 

494 It does not log the change. 

495 

496 """ 

497 

498 # Remove the node from the PyTables hierarchy. 

499 parent = self._v_parent 

500 parent._g_unrefnode(self._v_name) 

501 # Close the node itself. 

502 self._f_close() 

503 # hdf5extension operations: 

504 # Remove the node from the HDF5 hierarchy. 

505 self._g_delete(parent) 

506 

507 def _f_remove(self, recursive=False, force=False): 

508 """Remove this node from the hierarchy. 

509 

510 If the node has children, recursive removal must be stated by giving 

511 recursive a true value; otherwise, a NodeError will be raised. 

512 

513 If the node is a link to a Group object, and you are sure that you want 

514 to delete it, you can do this by setting the force flag to true. 

515 

516 """ 

517 

518 self._g_check_open() 

519 file_ = self._v_file 

520 file_._check_writable() 

521 

522 if file_.is_undo_enabled(): 

523 self._g_remove_and_log(recursive, force) 

524 else: 

525 self._g_remove(recursive, force) 

526 

527 def _g_remove_and_log(self, recursive, force): 

528 file_ = self._v_file 

529 oldpathname = self._v_pathname 

530 # Log *before* moving to use the right shadow name. 

531 file_._log('REMOVE', oldpathname) 

532 move_to_shadow(file_, oldpathname) 

533 

534 def _g_move(self, newparent, newname): 

535 """Move this node in the hierarchy. 

536 

537 Moves the node into the given `newparent`, with the given 

538 `newname`. 

539 

540 It does not log the change. 

541 

542 """ 

543 

544 oldparent = self._v_parent 

545 oldname = self._v_name 

546 oldpathname = self._v_pathname # to move the HDF5 node 

547 

548 # Try to insert the node into the new parent. 

549 newparent._g_refnode(self, newname) 

550 # Remove the node from the new parent. 

551 oldparent._g_unrefnode(oldname) 

552 

553 # Remove location information for this node. 

554 self._g_del_location() 

555 # Set new location information for this node. 

556 self._g_set_location(newparent, newname) 

557 

558 # hdf5extension operations: 

559 # Update node attributes. 

560 self._g_new(newparent, self._v_name, init=False) 

561 # Move the node. 

562 # self._v_parent._g_move_node(oldpathname, self._v_pathname) 

563 self._v_parent._g_move_node(oldparent._v_objectid, oldname, 

564 newparent._v_objectid, newname, 

565 oldpathname, self._v_pathname) 

566 

567 # Tell dependent objects about the new location of this node. 

568 self._g_update_dependent() 

569 

570 def _f_rename(self, newname, overwrite=False): 

571 """Rename this node in place. 

572 

573 Changes the name of a node to *newname* (a string). If a node with the 

574 same newname already exists and overwrite is true, recursively remove 

575 it before renaming. 

576 

577 """ 

578 

579 self._f_move(newname=newname, overwrite=overwrite) 

580 

581 def _f_move(self, newparent=None, newname=None, 

582 overwrite=False, createparents=False): 

583 """Move or rename this node. 

584 

585 Moves a node into a new parent group, or changes the name of the 

586 node. newparent can be a Group object (see :ref:`GroupClassDescr`) or a 

587 pathname in string form. If it is not specified or None, the current 

588 parent group is chosen as the new parent. newname must be a string 

589 with a new name. If it is not specified or None, the current name is 

590 chosen as the new name. If createparents is true, the needed groups for 

591 the given new parent group path to exist will be created. 

592 

593 Moving a node across databases is not allowed, nor it is moving a node 

594 *into* itself. These result in a NodeError. However, moving a node 

595 *over* itself is allowed and simply does nothing. Moving over another 

596 existing node is similarly not allowed, unless the optional overwrite 

597 argument is true, in which case that node is recursively removed before 

598 moving. 

599 

600 Usually, only the first argument will be used, effectively moving the 

601 node to a new location without changing its name. Using only the 

602 second argument is equivalent to renaming the node in place. 

603 

604 """ 

605 

606 self._g_check_open() 

607 file_ = self._v_file 

608 oldparent = self._v_parent 

609 oldname = self._v_name 

610 

611 # Set default arguments. 

612 if newparent is None and newname is None: 

613 raise NodeError("you should specify at least " 

614 "a ``newparent`` or a ``newname`` parameter") 

615 if newparent is None: 

616 newparent = oldparent 

617 if newname is None: 

618 newname = oldname 

619 

620 # Get destination location. 

621 if hasattr(newparent, '_v_file'): # from node 

622 newfile = newparent._v_file 

623 newpath = newparent._v_pathname 

624 elif hasattr(newparent, 'startswith'): # from path 

625 newfile = file_ 

626 newpath = newparent 

627 else: 

628 raise TypeError("new parent is not a node nor a path: %r" 

629 % (newparent,)) 

630 

631 # Validity checks on arguments. 

632 # Is it in the same file? 

633 if newfile is not file_: 

634 raise NodeError("nodes can not be moved across databases; " 

635 "please make a copy of the node") 

636 

637 # The movement always fails if the hosting file can not be modified. 

638 file_._check_writable() 

639 

640 # Moving over itself? 

641 oldpath = oldparent._v_pathname 

642 if newpath == oldpath and newname == oldname: 

643 # This is equivalent to renaming the node to its current name, 

644 # and it does not change the referenced object, 

645 # so it is an allowed no-op. 

646 return 

647 

648 # Moving into itself? 

649 self._g_check_not_contains(newpath) 

650 

651 # Note that the previous checks allow us to go ahead and create 

652 # the parent groups if `createparents` is true. `newparent` is 

653 # used instead of `newpath` to avoid accepting `Node` objects 

654 # when `createparents` is true. 

655 newparent = file_._get_or_create_path(newparent, createparents) 

656 self._g_check_group(newparent) # Is it a group? 

657 

658 # Moving over an existing node? 

659 self._g_maybe_remove(newparent, newname, overwrite) 

660 

661 # Move the node. 

662 oldpathname = self._v_pathname 

663 self._g_move(newparent, newname) 

664 

665 # Log the change. 

666 if file_.is_undo_enabled(): 

667 self._g_log_move(oldpathname) 

668 

669 def _g_log_move(self, oldpathname): 

670 self._v_file._log('MOVE', oldpathname, self._v_pathname) 

671 

672 def _g_copy(self, newparent, newname, recursive, _log=True, **kwargs): 

673 """Copy this node and return the new one. 

674 

675 Creates and returns a copy of the node in the given `newparent`, 

676 with the given `newname`. If `recursive` copy is stated, all 

677 descendents are copied as well. Additional keyword argumens may 

678 affect the way that the copy is made. Unknown arguments must be 

679 ignored. On recursive copies, all keyword arguments must be 

680 passed on to the children invocation of this method. 

681 

682 If `_log` is false, the change is not logged. This is *only* 

683 intended to be used by ``_g_copy_as_child()`` as a means of 

684 optimising sub-tree copies. 

685 

686 """ 

687 

688 raise NotImplementedError 

689 

690 def _g_copy_as_child(self, newparent, **kwargs): 

691 """Copy this node as a child of another group. 

692 

693 Copies just this node into `newparent`, not recursing children 

694 nor overwriting nodes nor logging the copy. This is intended to 

695 be used when copying whole sub-trees. 

696 

697 """ 

698 

699 return self._g_copy(newparent, self._v_name, 

700 recursive=False, _log=False, **kwargs) 

701 

702 def _f_copy(self, newparent=None, newname=None, 

703 overwrite=False, recursive=False, createparents=False, 

704 **kwargs): 

705 """Copy this node and return the new node. 

706 

707 Creates and returns a copy of the node, maybe in a different place in 

708 the hierarchy. newparent can be a Group object (see 

709 :ref:`GroupClassDescr`) or a pathname in string form. If it is not 

710 specified or None, the current parent group is chosen as the new 

711 parent. newname must be a string with a new name. If it is not 

712 specified or None, the current name is chosen as the new name. If 

713 recursive copy is stated, all descendants are copied as well. If 

714 createparents is true, the needed groups for the given new parent group 

715 path to exist will be created. 

716 

717 Copying a node across databases is supported but can not be 

718 undone. Copying a node over itself is not allowed, nor it is 

719 recursively copying a node into itself. These result in a 

720 NodeError. Copying over another existing node is similarly not allowed, 

721 unless the optional overwrite argument is true, in which case that node 

722 is recursively removed before copying. 

723 

724 Additional keyword arguments may be passed to customize the copying 

725 process. For instance, title and filters may be changed, user 

726 attributes may be or may not be copied, data may be sub-sampled, stats 

727 may be collected, etc. See the documentation for the particular node 

728 type. 

729 

730 Using only the first argument is equivalent to copying the node to a 

731 new location without changing its name. Using only the second argument 

732 is equivalent to making a copy of the node in the same group. 

733 

734 """ 

735 

736 self._g_check_open() 

737 srcfile = self._v_file 

738 srcparent = self._v_parent 

739 srcname = self._v_name 

740 

741 dstparent = newparent 

742 dstname = newname 

743 

744 # Set default arguments. 

745 if dstparent is None and dstname is None: 

746 raise NodeError("you should specify at least " 

747 "a ``newparent`` or a ``newname`` parameter") 

748 if dstparent is None: 

749 dstparent = srcparent 

750 if dstname is None: 

751 dstname = srcname 

752 

753 # Get destination location. 

754 if hasattr(dstparent, '_v_file'): # from node 

755 dstfile = dstparent._v_file 

756 dstpath = dstparent._v_pathname 

757 elif hasattr(dstparent, 'startswith'): # from path 

758 dstfile = srcfile 

759 dstpath = dstparent 

760 else: 

761 raise TypeError("new parent is not a node nor a path: %r" 

762 % (dstparent,)) 

763 

764 # Validity checks on arguments. 

765 if dstfile is srcfile: 

766 # Copying over itself? 

767 srcpath = srcparent._v_pathname 

768 if dstpath == srcpath and dstname == srcname: 

769 raise NodeError( 

770 "source and destination nodes are the same node: ``%s``" 

771 % self._v_pathname) 

772 

773 # Recursively copying into itself? 

774 if recursive: 

775 self._g_check_not_contains(dstpath) 

776 

777 # Note that the previous checks allow us to go ahead and create 

778 # the parent groups if `createparents` is true. `dstParent` is 

779 # used instead of `dstPath` because it may be in other file, and 

780 # to avoid accepting `Node` objects when `createparents` is 

781 # true. 

782 dstparent = srcfile._get_or_create_path(dstparent, createparents) 

783 self._g_check_group(dstparent) # Is it a group? 

784 

785 # Copying to another file with undo enabled? 

786 if dstfile is not srcfile and srcfile.is_undo_enabled(): 

787 warnings.warn("copying across databases can not be undone " 

788 "nor redone from this database", 

789 UndoRedoWarning) 

790 

791 # Copying over an existing node? 

792 self._g_maybe_remove(dstparent, dstname, overwrite) 

793 

794 # Copy the node. 

795 # The constructor of the new node takes care of logging. 

796 return self._g_copy(dstparent, dstname, recursive, **kwargs) 

797 

798 def _f_isvisible(self): 

799 """Is this node visible?""" 

800 

801 self._g_check_open() 

802 return isvisiblepath(self._v_pathname) 

803 

804 def _g_check_group(self, node): 

805 # Node must be defined in order to define a Group. 

806 # However, we need to know Group here. 

807 # Using class_name_dict avoids a circular import. 

808 if not isinstance(node, class_name_dict['Node']): 

809 raise TypeError("new parent is not a registered node: %s" 

810 % node._v_pathname) 

811 if not isinstance(node, class_name_dict['Group']): 

812 raise TypeError("new parent node ``%s`` is not a group" 

813 % node._v_pathname) 

814 

815 def _g_check_not_contains(self, pathname): 

816 # The not-a-TARDIS test. ;) 

817 mypathname = self._v_pathname 

818 if (mypathname == '/' # all nodes fall below the root group 

819 or pathname == mypathname 

820 or pathname.startswith(mypathname + '/')): 

821 raise NodeError("can not move or recursively copy node ``%s`` " 

822 "into itself" % mypathname) 

823 

824 def _g_maybe_remove(self, parent, name, overwrite): 

825 if name in parent: 

826 if not overwrite: 

827 raise NodeError( 

828 f"destination group ``{parent._v_pathname}`` already " 

829 f"has a node named ``{name}``; you may want to use the " 

830 f"``overwrite`` argument") 

831 parent._f_get_child(name)._f_remove(True) 

832 

833 def _g_check_name(self, name): 

834 """Check validity of name for this particular kind of node. 

835 

836 This is invoked once the standard HDF5 and natural naming checks 

837 have successfully passed. 

838 

839 """ 

840 

841 if name.startswith('_i_'): 

842 # This is reserved for table index groups. 

843 raise ValueError( 

844 "node name starts with reserved prefix ``_i_``: %s" % name) 

845 

846 def _f_getattr(self, name): 

847 """Get a PyTables attribute from this node. 

848 

849 If the named attribute does not exist, an AttributeError is 

850 raised. 

851 

852 """ 

853 

854 return getattr(self._v_attrs, name) 

855 

856 def _f_setattr(self, name, value): 

857 """Set a PyTables attribute for this node. 

858 

859 If the node already has a large number of attributes, a 

860 PerformanceWarning is issued. 

861 

862 """ 

863 

864 setattr(self._v_attrs, name, value) 

865 

866 def _f_delattr(self, name): 

867 """Delete a PyTables attribute from this node. 

868 

869 If the named attribute does not exist, an AttributeError is 

870 raised. 

871 

872 """ 

873 

874 delattr(self._v_attrs, name) 

875 

876 # </attribute handling> 

877 

878 

879class NotLoggedMixin: 

880 # Include this class in your inheritance tree 

881 # to avoid changes to instances of your class from being logged. 

882 

883 _AttributeSet = NotLoggedAttributeSet 

884 

885 def _g_log_create(self): 

886 pass 

887 

888 def _g_log_move(self, oldpathname): 

889 pass 

890 

891 def _g_remove_and_log(self, recursive, force): 

892 self._g_remove(recursive, force)