Package rekall :: Package plugins :: Package response :: Module forensic_artifacts
[frames] | no frames]

Source Code for Module rekall.plugins.response.forensic_artifacts

   1  # Rekall Memory Forensics 
   2  # Copyright 2016 Google Inc. All Rights Reserved. 
   3  # 
   4  # This program is free software; you can redistribute it and/or modify 
   5  # it under the terms of the GNU General Public License as published by 
   6  # the Free Software Foundation; either version 2 of the License, or (at 
   7  # your option) any later version. 
   8  # 
   9  # This program is distributed in the hope that it will be useful, but 
  10  # WITHOUT ANY WARRANTY; without even the implied warranty of 
  11  # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 
  12  # General Public License for more details. 
  13  # 
  14  # You should have received a copy of the GNU General Public License 
  15  # along with this program; if not, write to the Free Software 
  16  # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA 
  17  # 
  18   
  19  """This module implements plugins related to forensic artifacts. 
  20   
  21  https://github.com/ForensicArtifacts 
  22  """ 
  23   
  24  __author__ = "Michael Cohen <scudette@google.com>" 
  25  import csv 
  26  import datetime 
  27  import json 
  28  import platform 
  29  import os 
  30  import StringIO 
  31  import sys 
  32  import zipfile 
  33   
  34  import yaml 
  35   
  36  from artifacts import definitions 
  37  from artifacts import errors 
  38   
  39  from rekall import plugin 
  40  from rekall import obj 
  41  from rekall_lib import yaml_utils 
  42  from rekall.ui import text 
  43  from rekall.ui import json_renderer 
  44  from rekall.plugins.response import common 
  45   
  46  from rekall_lib import registry 
47 48 49 -class ArtifactResult(object):
50 """Bundle all the results from an artifact."""
51 - def __init__(self, artifact_name=None, result_type=None, fields=None):
52 self.artifact_name = artifact_name 53 self.result_type = result_type 54 self.results = [] 55 self.fields = fields or []
56
57 - def __iter__(self):
58 return iter(self.results)
59
60 - def add_result(self, **data):
61 if data: 62 self.results.append(data)
63
64 - def merge(self, other):
65 self.results.extend(other)
66
67 - def as_dict(self):
68 return dict(fields=self.fields, 69 results=self.results, 70 artifact_name=self.artifact_name, 71 result_type=self.result_type)
72
73 74 75 -class BaseArtifactResultWriter(object):
76 """Writes the results of artifacts.""" 77 __abstract = True 78 79 __metaclass__ = registry.MetaclassRegistry 80
81 - def __init__(self, session=None, copy_files=False, 82 create_timeline=False):
83 self.session = session 84 self.copy_files = copy_files 85 self.create_timeline = create_timeline
86
87 - def write_result(self, result):
88 """Writes the artifact result."""
89
90 - def _create_timeline(self, artifact_result):
91 """Create a new timeline result from the given result. 92 93 We use the output format suitable for the timesketch tool: 94 https://github.com/google/timesketch/wiki/UserGuideTimelineFromFile 95 """ 96 artifact_fields = artifact_result.fields 97 fields = [ 98 dict(name="message", type="unicode"), 99 dict(name="timestamp", type="int"), 100 dict(name="datetime", type="unicode"), 101 dict(name="timestamp_desc", type="unicode"), 102 ] + artifact_fields 103 104 new_result = ArtifactResult( 105 artifact_name=artifact_result.artifact_name, 106 result_type="timeline", 107 fields=fields) 108 109 for field in artifact_fields: 110 # This field is a timestamp - copy the entire row into the timeline. 111 if field["type"] == "epoch": 112 for row in artifact_result.results: 113 new_row = row.copy() 114 timestamp = row.get(field["name"]) 115 if timestamp is None: 116 continue 117 118 new_row["timestamp"] = int(timestamp) 119 new_row["datetime"] = datetime.datetime.utcfromtimestamp( 120 timestamp).strftime("%Y-%m-%dT%H:%M:%S+00:00") 121 new_row["timestamp_desc"] = artifact_result.artifact_name 122 new_row["message"] = " ".join( 123 unicode(row[field["name"]]) for field in artifact_fields 124 if field["name"] in row) 125 new_result.add_result(**new_row) 126 127 return new_result
128
129 - def __enter__(self):
130 return self
131
132 - def __exit__(self, unused_type, unused_value, unused_traceback):
133 return
134
135 136 -class DirectoryBasedWriter(BaseArtifactResultWriter):
137 name = "Directory" 138
139 - def __init__(self, output=None, **kwargs):
140 super(DirectoryBasedWriter, self).__init__(**kwargs) 141 self.dump_dir = output 142 143 # Check if the directory already exists. 144 if not os.path.isdir(self.dump_dir): 145 raise plugin.PluginError("%s is not a directory" % self.dump_dir)
146
147 - def write_file(self, result):
148 """Writes a FileInformation object.""" 149 for row in result.results: 150 filename = row["filename"] 151 with open(filename, "rb") as in_fd: 152 with self.session.GetRenderer().open( 153 directory=self.dump_dir, 154 filename=filename, mode="wb") as out_fd: 155 while 1: 156 data = in_fd.read(1024*1024) 157 if not data: 158 break 159 160 out_fd.write(data)
161
162 - def _write_csv_file(self, out_fd, result):
163 fieldnames = [x["name"] for x in result.fields] 164 writer = csv.DictWriter( 165 out_fd, dialect="excel", 166 fieldnames=fieldnames) 167 writer.writeheader() 168 for row in result.results: 169 writer.writerow(row)
170
171 - def write_result(self, result):
172 """Writes the artifact result.""" 173 if self.copy_files and result.result_type == "file_information": 174 try: 175 self.write_file(result) 176 except (IOError, OSError) as e: 177 self.session.logging.warn("Unable to copy file: %s", e) 178 179 with self.session.GetRenderer().open( 180 directory=self.dump_dir, 181 filename="artifacts/%s.json" % result.artifact_name, 182 mode="wb") as out_fd: 183 out_fd.write(json.dumps(result.as_dict(), sort_keys=True)) 184 185 with self.session.GetRenderer().open( 186 directory=self.dump_dir, 187 filename="artifacts/%s.csv" % result.artifact_name, 188 mode="wb") as out_fd: 189 self._write_csv_file(out_fd, result) 190 191 if self.create_timeline: 192 with self.session.GetRenderer().open( 193 directory=self.dump_dir, 194 filename="artifacts/%s.timeline.csv" % 195 result.artifact_name, 196 mode="wb") as out_fd: 197 self._write_csv_file(out_fd, self._create_timeline(result))
198
199 200 -class ZipBasedWriter(BaseArtifactResultWriter):
201 name = "Zip" 202
203 - def __init__(self, output=None, **kwargs):
204 super(ZipBasedWriter, self).__init__(**kwargs) 205 self.output = output
206
207 - def __enter__(self):
208 self.out_fd = self.session.GetRenderer().open( 209 filename=self.output, mode="wb").__enter__() 210 211 self.outzip = zipfile.ZipFile(self.out_fd, mode="w", 212 compression=zipfile.ZIP_DEFLATED) 213 214 return self
215
216 - def __exit__(self, *args):
217 self.outzip.close() 218 self.out_fd.__exit__(*args)
219
220 - def _write_csv_file(self, out_fd, result):
221 fieldnames = [x["name"] for x in result.fields] 222 writer = csv.DictWriter( 223 out_fd, dialect="excel", 224 fieldnames=fieldnames) 225 writer.writeheader() 226 for row in result.results: 227 writer.writerow(row)
228
229 - def write_file(self, result):
230 for row in result.results: 231 filename = row["filename"] 232 self.outzip.write(filename)
233
234 - def write_result(self, result):
235 """Writes the artifact result.""" 236 if self.copy_files and result.result_type == "file_information": 237 try: 238 self.write_file(result) 239 except (IOError, OSError) as e: 240 self.session.logging.warn( 241 "Unable to copy file %s into output: %s", 242 result["filename"], e) 243 244 self.outzip.writestr("artifacts/%s.json" % result.artifact_name, 245 json.dumps(result.as_dict(), sort_keys=True), 246 zipfile.ZIP_DEFLATED) 247 248 tmp_fd = StringIO.StringIO() 249 self._write_csv_file(tmp_fd, result) 250 self.outzip.writestr("artifacts/%s.csv" % result.artifact_name, 251 tmp_fd.getvalue(), 252 zipfile.ZIP_DEFLATED) 253 254 255 if self.create_timeline: 256 tmp_fd = StringIO.StringIO() 257 self._write_csv_file(tmp_fd, self._create_timeline(result)) 258 self.outzip.writestr("artifacts/%s.timeline.csv" % 259 result.artifact_name, 260 tmp_fd.getvalue(), 261 zipfile.ZIP_DEFLATED)
262 263 264 # Rekall defines a new artifact type. 265 TYPE_INDICATOR_REKALL = "REKALL_EFILTER"
266 267 268 -class _FieldDefinitionValidator(object):
269 """Loads and validates fields in a dict. 270 271 We check their name, types and if they are optional according to a template 272 in _field_definitions. 273 """ 274 _field_definitions = [] 275
276 - def _LoadFieldDefinitions(self, data, field_definitions):
277 for field in field_definitions: 278 name = field["name"] 279 280 default = field.get("default") 281 required_type = field.get("type") 282 283 if required_type in (str, unicode): 284 required_type = basestring 285 286 if default is None and required_type is not None: 287 # basestring cant be instantiated. 288 if required_type is basestring: 289 default = "" 290 else: 291 default = required_type() 292 293 if required_type is None and default is not None: 294 required_type = type(default) 295 296 if not field.get("optional"): 297 if name not in data: 298 raise errors.FormatError( 299 u'Missing fields {}.'.format(name)) 300 301 value = data.get(name, default) 302 if default is not None and not isinstance(value, required_type): 303 raise errors.FormatError( 304 u'field {} has type {} should be {}.'.format( 305 name, type(data[name]), required_type)) 306 307 if field.get("checker"): 308 value = field["checker"](self, data) 309 310 setattr(self, name, value)
311
312 313 -class SourceType(_FieldDefinitionValidator):
314 """All sources inherit from this.""" 315 316 # Common fields for all sources. 317 _common_fields = [ 318 dict(name="type", optional=False), 319 dict(name="supported_os", optional=True, type=list, 320 default=list(definitions.SUPPORTED_OS)), 321 ] 322
323 - def __init__(self, source_definition, artifact=None):
324 attributes = source_definition["attributes"] 325 # The artifact that owns us. 326 self.artifact = artifact 327 self.source_definition = source_definition 328 self.type_indicator = source_definition["type"] 329 self._LoadFieldDefinitions(attributes, self._field_definitions) 330 self._LoadFieldDefinitions(source_definition, self._common_fields)
331
332 - def is_active(self, **_):
333 """Indicates if the source is applicable to the environment.""" 334 return True
335
336 - def apply(self, artifact_name=None, fields=None, result_type=None, **_):
337 """Generate ArtifactResult instances.""" 338 return ArtifactResult(artifact_name=artifact_name, 339 result_type=result_type, 340 fields=fields)
341 342 # These are the valid types of Rekall images. They can be used to restrict 343 # REKALL_EFILTER artifacts to specific types of images. The types which end in 344 # API refer to the API only version of the similar plugins. 345 REKALL_IMAGE_TYPES = [ 346 "Windows", "WindowsAPI", 347 "Linux", "LinuxAPI", 348 "Darwin", "DarwinAPI" 349 ]
350 351 352 -class RekallEFilterArtifacts(SourceType):
353 """Class to support Rekall Efilter artifact types.""" 354 355 allowed_types = { 356 "int": int, 357 "unicode": unicode, # Unicode data. 358 "str": str, # Used for binary data. 359 "float": float, 360 "epoch": float, # Dates as epoch timestamps. 361 "any": str # Used for opaque types that can not be further processed. 362 } 363 364 _field_definitions = [ 365 dict(name="query", type=basestring), 366 dict(name="query_parameters", default=[], optional=True), 367 dict(name="fields", type=list), 368 dict(name="type_name", type=basestring), 369 dict(name="image_type", type=list, optional=True, 370 default=REKALL_IMAGE_TYPES), 371 ] 372
373 - def __init__(self, source_definition, **kw):
374 super(RekallEFilterArtifacts, self).__init__(source_definition, **kw) 375 for column in self.fields: 376 if "name" not in column or "type" not in column: 377 raise errors.FormatError( 378 u"Field definition should have both name and type.") 379 380 mapped_type = column["type"] 381 if mapped_type not in self.allowed_types: 382 raise errors.FormatError( 383 u"Unsupported type %s." % mapped_type)
384
385 - def GetImageType(self, session):
386 """Returns one of the standard image types based on the session.""" 387 result = session.profile.metadata("os").capitalize() 388 389 if session.GetParameter("live_mode") == "API": 390 result += "API" 391 392 return result
393
394 - def is_active(self, session=None):
395 """Determine if this source is active.""" 396 return (self.image_type and 397 self.GetImageType(session) in self.image_type)
398
399 - def apply(self, session=None, **kwargs):
400 result = super(RekallEFilterArtifacts, self).apply( 401 fields=self.fields, result_type=self.type_name, **kwargs) 402 403 if not self.is_active(session): 404 return 405 406 search = session.plugins.search( 407 query=self.query, 408 query_parameters=self.query_parameters) 409 410 for match in search.solve(): 411 row = {} 412 for column in self.fields: 413 name = column["name"] 414 type = column["type"] 415 value = match.get(name) 416 if value is None: 417 continue 418 419 row[name] = RekallEFilterArtifacts.allowed_types[ 420 type](value) 421 422 result.add_result(**row) 423 424 yield result
425
426 427 -class LiveModeSourceMixin(object):
428 - def is_active(self, session=None):
429 """Determine if this source is active.""" 430 # We are only active in Live mode (API or Memory). 431 return (session.GetParameter("live_mode") != None and 432 session.profile.metadata("os").capitalize() in 433 self.supported_os)
434
435 436 -class FileSourceType(LiveModeSourceMixin, SourceType):
437 _field_definitions = [ 438 dict(name="paths", default=[]), 439 dict(name="separator", default="/", type=basestring, 440 optional=True), 441 ] 442 443 # These fields will be present in the ArtifactResult object we return. 444 _FIELDS = [ 445 dict(name="st_mode", type="unicode"), 446 dict(name="st_nlink", type="int"), 447 dict(name="st_uid", type="unicode"), 448 dict(name="st_gid", type="unicode"), 449 dict(name="st_size", type="int"), 450 dict(name="st_mtime", type="epoch"), 451 dict(name="filename", type="unicode"), 452 ] 453
454 - def apply(self, session=None, **kwargs):
455 result = super(FileSourceType, self).apply( 456 fields=self._FIELDS, result_type="file_information", **kwargs) 457 458 for hits in session.plugins.glob( 459 self.paths, path_sep=self.separator, 460 root=self.separator).collect(): 461 # Hits are FileInformation objects, and we just pick some of the 462 # important fields to report. 463 info = hits["path"] 464 row = {} 465 for field in self._FIELDS: 466 name = field["name"] 467 type = RekallEFilterArtifacts.allowed_types[field["type"]] 468 row[name] = type(getattr(info, name)) 469 470 result.add_result(**row) 471 472 yield result
473
474 475 -class ArtifactGroupSourceType(SourceType):
476 _field_definitions = [ 477 dict(name="names", type=list), 478 dict(name="supported_os", optional=True, 479 default=definitions.SUPPORTED_OS), 480 ] 481
482 - def apply(self, collector=None, **_):
483 for name in self.names: 484 for result in collector.collect_artifact(name): 485 yield result
486
487 -class WMISourceType(LiveModeSourceMixin, SourceType):
488 _field_definitions = [ 489 dict(name="query", type=basestring), 490 dict(name="fields", type=list, optional=True, default=[]), 491 dict(name="type_name", type=basestring, optional=True), 492 dict(name="supported_os", optional=True, 493 default=definitions.SUPPORTED_OS), 494 ] 495 496 fields = None 497
498 - def _guess_returned_fields(self, sample):
499 result = [] 500 for key, value in sample.iteritems(): 501 field_type = type(value) 502 if field_type is int: 503 field_type = "int" 504 elif field_type is str: 505 field_type = "unicode" 506 else: 507 field_type = "unicode" 508 509 result.append(dict(name=key, type=field_type)) 510 return result
511
512 - def apply(self, session=None, **kwargs):
513 result = super(WMISourceType, self).apply( 514 result_type=self.type_name, **kwargs) 515 wmi = session.plugins.wmi(query=self.query) 516 517 # The wmi plugin may not exist on non-windows systems. 518 if wmi == None: 519 return 520 521 for collected in wmi.collect(): 522 match = collected["Result"] 523 row = {} 524 # If the user did not specify the fields, we must 525 # deduce them from the first returned row. 526 if not self.fields: 527 self.fields = self._guess_returned_fields(match) 528 529 result.fields = self.fields 530 531 for column in self.fields: 532 name = column["name"] 533 type = column["type"] 534 value = match.get(name) 535 if value is None: 536 continue 537 538 row[name] = RekallEFilterArtifacts.allowed_types[ 539 type](value) 540 541 result.add_result(**row) 542 543 yield result
544
545 546 -class RegistryKeySourceType(LiveModeSourceMixin, SourceType):
547 _field_definitions = [ 548 dict(name="keys", default=[]), 549 dict(name="supported_os", optional=True, 550 default=["Windows"]), 551 ] 552 553 _FIELDS = [ 554 dict(name="st_mtime", type="epoch"), 555 dict(name="hive", type="unicode"), 556 dict(name="key_name", type="unicode"), 557 dict(name="value", type="str"), 558 dict(name="value_type", type="str"), 559 ] 560
561 - def apply(self, session=None, **kwargs):
562 result = super(RegistryKeySourceType, self).apply( 563 fields=self._FIELDS, result_type="registry_key", **kwargs) 564 565 for hits in session.plugins.glob( 566 self.keys, path_sep="\\", filesystem="Reg", 567 root="\\").collect(): 568 # Hits are FileInformation objects, and we just pick some of the 569 # important fields to report. 570 info = hits["path"] 571 row = {} 572 for field in self._FIELDS: 573 name = field["name"] 574 field_type = RekallEFilterArtifacts.allowed_types[field["type"]] 575 data = info.get(name) 576 if data is not None: 577 row[name] = field_type(data) 578 579 result.add_result(**row) 580 581 yield result
582
583 584 -class RegistryValueSourceType(LiveModeSourceMixin, SourceType):
585 - def CheckKeyValuePairs(self, source):
586 key_value_pairs = source["key_value_pairs"] 587 for pair in key_value_pairs: 588 if (not isinstance(pair, dict) or "key" not in pair or 589 "value" not in pair): 590 raise errors.FormatError( 591 u"key_value_pairs should consist of dicts with key and " 592 "value items.") 593 594 return key_value_pairs
595 596 _field_definitions = [ 597 dict(name="key_value_pairs", default=[], 598 checker=CheckKeyValuePairs), 599 dict(name="supported_os", optional=True, 600 default=["Windows"]), 601 ] 602 603 _FIELDS = [ 604 dict(name="st_mtime", type="epoch"), 605 dict(name="hive", type="unicode"), 606 dict(name="key_name", type="unicode"), 607 dict(name="value_name", type="unicode"), 608 dict(name="value_type", type="str"), 609 dict(name="value", type="str"), 610 ] 611
612 - def apply(self, session=None, **kwargs):
613 result = super(RegistryValueSourceType, self).apply( 614 fields=self._FIELDS, result_type="registry_value", **kwargs) 615 globs = [u"%s\\%s" % (x["key"], x["value"]) 616 for x in self.key_value_pairs] 617 618 for hits in session.plugins.glob( 619 globs, path_sep="\\", filesystem="Reg", 620 root="\\").collect(): 621 info = hits["path"] 622 row = {} 623 for field in self._FIELDS: 624 name = field["name"] 625 field_type = RekallEFilterArtifacts.allowed_types[field["type"]] 626 data = info.get(name) 627 if data is not None: 628 row[name] = field_type(data) 629 630 result.add_result(**row) 631 632 yield result
633 634 635 # This lookup table maps between source type name and concrete implementations 636 # that we support. Artifacts which contain sources which are not implemented 637 # will be ignored. 638 SOURCE_TYPES = { 639 TYPE_INDICATOR_REKALL: RekallEFilterArtifacts, 640 definitions.TYPE_INDICATOR_FILE: FileSourceType, 641 definitions.TYPE_INDICATOR_ARTIFACT_GROUP: ArtifactGroupSourceType, 642 definitions.TYPE_INDICATOR_WMI_QUERY: WMISourceType, 643 definitions.TYPE_INDICATOR_WINDOWS_REGISTRY_KEY: RegistryKeySourceType, 644 definitions.TYPE_INDICATOR_WINDOWS_REGISTRY_VALUE: RegistryValueSourceType, 645 }
646 647 648 -class ArtifactDefinition(_FieldDefinitionValidator):
649 """The main artifact class.""" 650
651 - def CheckLabels(self, art_definition):
652 """Ensure labels are defined.""" 653 labels = art_definition.get("labels", []) 654 # Keep unknown labels around in case callers want to check for complete 655 # label coverage. In most cases it is desirable to allow users to extend 656 # labels but when super strict validation is required we want to make 657 # sure that users dont typo a label. 658 self.undefined_labels = set(labels).difference(definitions.LABELS) 659 return labels
660
661 - def BuildSources(self, art_definition):
662 sources = art_definition["sources"] 663 result = [] 664 self.unsupported_source_types = [] 665 for source in sources: 666 if not isinstance(source, dict): 667 raise errors.FormatError("Source is not a dict.") 668 669 source_type_name = source.get("type") 670 if source_type_name is None: 671 raise errors.FormatError("Source has no type.") 672 673 source_cls = self.source_types.get(source_type_name) 674 if source_cls: 675 result.append(source_cls(source, artifact=self)) 676 else: 677 self.unsupported_source_types.append(source_type_name) 678 679 if not result: 680 if self.unsupported_source_types: 681 raise errors.FormatError( 682 "No supported sources: %s" % ( 683 self.unsupported_source_types,)) 684 685 raise errors.FormatError("No available sources.") 686 687 return result
688
689 - def SupportedOS(self, art_definition):
690 supported_os = art_definition.get( 691 "supported_os", definitions.SUPPORTED_OS) 692 693 undefined_supported_os = set(supported_os).difference( 694 definitions.SUPPORTED_OS) 695 696 if undefined_supported_os: 697 raise errors.FormatError( 698 u'supported operating system: {} ' 699 u'not defined.'.format( 700 u', '.join(undefined_supported_os))) 701 702 return supported_os
703 704 _field_definitions = [ 705 dict(name="name", type=basestring), 706 dict(name="doc", type=basestring), 707 dict(name="labels", default=[], 708 checker=CheckLabels, optional=True), 709 dict(name="sources", default=[], 710 checker=BuildSources), 711 dict(name="supported_os", 712 checker=SupportedOS, optional=True), 713 dict(name="conditions", default=[], optional=True), 714 dict(name="returned_types", default=[], optional=True), 715 dict(name="provides", type=list, optional=True), 716 dict(name="urls", type=list, optional=True) 717 ] 718 719 name = "unknown" 720 source_types = SOURCE_TYPES 721
722 - def __init__(self, data, source_types=None):
723 self.source_types = source_types or SOURCE_TYPES 724 self.data = data 725 try: 726 self._LoadDefinition(data) 727 except Exception as e: 728 exc_info = sys.exc_info() 729 raise errors.FormatError( 730 "Definition %s: %s" % (self.name, e)), None, exc_info[2]
731
732 - def set_implementations(self, source_types):
733 return self.__class__(self.data, source_types)
734
735 - def _LoadDefinition(self, data):
736 if not isinstance(data, dict): 737 raise errors.FormatError( 738 "Artifact definition must be a dict.") 739 740 different_keys = set(data) - definitions.TOP_LEVEL_KEYS 741 if different_keys: 742 raise errors.FormatError(u'Undefined keys: {}'.format( 743 different_keys)) 744 745 self._LoadFieldDefinitions(data, self._field_definitions)
746
747 748 -class ArtifactDefinitionProfileSectionLoader(obj.ProfileSectionLoader):
749 """Loads artifacts from the artifact profiles.""" 750 name = "$ARTIFACTS" 751
752 - def LoadIntoProfile(self, session, profile, art_definitions):
753 for definition in art_definitions: 754 try: 755 profile.AddDefinition(definition) 756 except errors.FormatError as e: 757 session.logging.debug( 758 "Skipping Artifact %s: %s", definition.get("name"), e) 759 760 return profile
761
762 763 -class ArtifactProfile(obj.Profile):
764 """A profile containing artifact definitions.""" 765 766 # This will contain the definitions.
767 - def __init__(self, *args, **kwargs):
768 super(ArtifactProfile, self).__init__(*args, **kwargs) 769 self.definitions = [] 770 self.definitions_by_name = {}
771
772 - def AddDefinition(self, definition):
773 """Add a new definition from a dict.""" 774 self.definitions.append(definition) 775 self.definitions_by_name[definition["name"]] = definition
776
777 - def GetDefinitionByName(self, name, source_types=None):
778 if source_types is None: 779 source_types = SOURCE_TYPES 780 781 definition = self.definitions_by_name[name] 782 return ArtifactDefinition(definition, source_types)
783
784 - def GetDefinitions(self, source_types=None):
785 if source_types is None: 786 source_types = SOURCE_TYPES 787 788 for definition in self.definitions: 789 try: 790 yield ArtifactDefinition(definition, source_types) 791 except errors.FormatError: 792 pass
793
794 795 -class ArtifactsCollector(plugin.TypedProfileCommand, 796 plugin.Command):
797 """Collects artifacts.""" 798 799 name = "artifact_collector" 800 801 __args = [ 802 dict(name="artifacts", positional=True, required=True, 803 type="ArrayStringParser", 804 help="A list of artifact names to collect."), 805 806 dict(name="artifact_files", type="ArrayStringParser", 807 help="A list of additional yaml files to load which contain " 808 "artifact definitions."), 809 810 dict(name="definitions", type="ArrayStringParser", 811 help="An inline artifact definition in yaml format."), 812 813 dict(name="create_timeline", type="Bool", default=False, 814 help="Also generate a timeline file."), 815 816 dict(name="copy_files", type="Bool", default=False, 817 help="Copy files into the output."), 818 819 dict(name="writer", type="Choices", 820 choices=lambda: ( 821 x.name for x in BaseArtifactResultWriter.classes.values()), 822 help="Writer for artifact results."), 823 824 dict(name="output_path", 825 help="Path suitable for dumping files."), 826 ] 827 828 table_header = [ 829 dict(name="divider", type="Divider"), 830 dict(name="result"), 831 ] 832 833 table_options = dict( 834 suppress_headers=True 835 ) 836
837 - def column_types(self):
838 return dict(path=common.FileInformation(filename="/etc"))
839
840 - def __init__(self, *args, **kwargs):
841 super(ArtifactsCollector, self).__init__(*args, **kwargs) 842 self.artifact_profile = self.session.LoadProfile("artifacts") 843 844 extra_definitions = [ 845 open(x).read() for x in self.plugin_args.artifact_files] 846 extra_definitions.extend(self.plugin_args.definitions or []) 847 848 # Make a copy of the artifact registry. 849 if extra_definitions: 850 self.artifact_profile = self.artifact_profile.copy() 851 852 for definition in extra_definitions: 853 for definition_data in yaml.safe_load_all(definition): 854 self.artifact_profile.AddDefinition(definition_data) 855 856 self.seen = set() 857 self.supported_os = self.get_supported_os(self.session) 858 if self.supported_os is None: 859 raise plugin.PluginError( 860 "Unable to determine running environment.") 861 862 # Make sure the args make sense. 863 if self.plugin_args.output_path is None: 864 if self.plugin_args.copy_files: 865 raise plugin.PluginError( 866 "Can only copy files when an output file is specified.") 867 if self.plugin_args.create_timeline: 868 raise plugin.PluginError( 869 "Can only create timelines when an output file " 870 "is specified.")
871 872 @classmethod
873 - def get_supported_os(cls, session):
874 # Determine which context we are running in. If we are running in live 875 # mode, we use the platform to determine the supported OS, otherwise we 876 # determine it from the profile. 877 if session.GetParameter("live"): 878 return platform.system() 879 elif session.profile.metadata("os") == "linux": 880 return "Linux" 881 882 elif session.profile.metadata("os") == "windows": 883 return "Windows" 884 885 elif session.profile.metadata("os") == "darwin": 886 return "Darwin"
887
888 - def _evaluate_conditions(self, conditions):
889 # TODO: Implement an expression parser for these. For now we just return 890 # True always. 891 return True
892
893 - def collect_artifact(self, artifact_name):
894 if artifact_name in self.seen: 895 return 896 897 self.seen.add(artifact_name) 898 899 try: 900 definition = self.artifact_profile.GetDefinitionByName( 901 artifact_name) 902 except KeyError: 903 self.session.logging.error("Unknown artifact %s" % artifact_name) 904 return 905 906 # This artifact is not for us. 907 if self.supported_os not in definition.supported_os: 908 self.session.logging.debug( 909 "Skipping artifact %s: Supported OS: %s, but we are %s", 910 definition.name, definition.supported_os, 911 self.supported_os) 912 return 913 914 if not self._evaluate_conditions(definition.conditions): 915 return 916 917 yield dict(divider="Artifact: %s" % definition.name) 918 919 for source in definition.sources: 920 # This source is not for us. 921 if not source.is_active(session=self.session): 922 continue 923 924 for result in source.apply( 925 artifact_name=definition.name, 926 session=self.session, 927 collector=self): 928 if isinstance(result, dict): 929 yield result 930 else: 931 yield dict(result=result)
932
933 - def collect(self):
934 # Figure out a sensible default for the output writer. 935 if (self.plugin_args.output_path is not None and 936 self.plugin_args.writer is None): 937 938 if os.path.isdir(self.plugin_args.output_path): 939 self.plugin_args.writer = "Directory" 940 else: 941 self.plugin_args.writer = "Zip" 942 943 if self.plugin_args.writer: 944 impl = BaseArtifactResultWriter.ImplementationByName( 945 self.plugin_args.writer) 946 with impl(session=self.session, 947 copy_files=self.plugin_args.copy_files, 948 create_timeline=self.plugin_args.create_timeline, 949 output=self.plugin_args.output_path) as writer: 950 for x in self._collect(writer=writer): 951 yield x 952 else: 953 for x in self._collect(): 954 yield x
955
956 - def _collect(self, writer=None):
957 for artifact_name in self.plugin_args.artifacts: 958 for hit in self.collect_artifact(artifact_name): 959 if "result" in hit and writer: 960 writer.write_result(hit["result"]) 961 yield hit
962
963 -class ArtifactsView(plugin.TypedProfileCommand, 964 plugin.Command):
965 name = "artifact_view" 966 967 __args = [ 968 dict(name="artifacts", type="ArrayStringParser", positional=True, 969 help="A list of artifacts to display") 970 ] 971 972 table_header = [ 973 dict(name="divider", type="Divider"), 974 dict(name="Message") 975 ] 976
977 - def collect(self):
978 artifact_profile = self.session.LoadProfile("artifacts") 979 for artifact in self.plugin_args.artifacts: 980 definition = artifact_profile.definitions_by_name.get(artifact) 981 if definition: 982 yield dict(divider=artifact) 983 yield dict(Message=yaml_utils.safe_dump(definition))
984
985 986 -class ArtifactsList(plugin.TypedProfileCommand, 987 plugin.Command):
988 """List details about all known artifacts.""" 989 990 name = "artifact_list" 991 992 __args = [ 993 dict(name="regex", type="RegEx", 994 default=".", 995 help="Filter the artifact name."), 996 dict(name="supported_os", type="ArrayStringParser", required=False, 997 help="If specified show for these OSs, otherwise autodetect " 998 "based on the current image."), 999 dict(name="labels", type="ArrayStringParser", 1000 help="Filter by these labels."), 1001 dict(name="all", type="Bool", 1002 help="Show all artifacts."), 1003 ] 1004 1005 table_header = [ 1006 dict(name="Name", width=30), 1007 dict(name="OS", width=8), 1008 dict(name="Labels", width=20), 1009 dict(name="Types", width=20), 1010 dict(name="Description", width=50), 1011 ] 1012
1013 - def collect(self):
1014 # Empty means autodetect based on the image. 1015 if not self.plugin_args.supported_os: 1016 supported_os = set([ 1017 ArtifactsCollector.get_supported_os(self.session)]) 1018 else: 1019 supported_os = set(self.plugin_args.supported_os) 1020 1021 for definition in self.session.LoadProfile( 1022 "artifacts").GetDefinitions(): 1023 if (not self.plugin_args.all and 1024 not supported_os.intersection(definition.supported_os)): 1025 continue 1026 1027 # Determine the type: 1028 types = set() 1029 for source in definition.sources: 1030 if self.plugin_args.all or source.is_active( 1031 session=self.session): 1032 types.add(source.type_indicator) 1033 1034 if self.plugin_args.regex.match(definition.name): 1035 yield (definition.name, definition.supported_os, 1036 definition.labels, sorted(types), definition.doc)
1037
1038 1039 -class ArtifactResult_TextObjectRenderer(text.TextObjectRenderer):
1040 renders_type = "ArtifactResult" 1041
1042 - def render_row(self, target, **_):
1043 column_names = [x["name"] for x in target.fields] 1044 table = text.TextTable( 1045 columns=target.fields, 1046 renderer=self.renderer, 1047 session=self.session) 1048 1049 if not target.results: 1050 return text.Cell("") 1051 1052 result = [ 1053 text.JoinedCell(*[text.Cell(x) for x in column_names]), 1054 text.JoinedCell(*[text.Cell("-" * len(x)) for x in column_names])] 1055 1056 for row in target.results: 1057 ordered_row = [] 1058 for column in column_names: 1059 ordered_row.append(row.get(column)) 1060 1061 result.append(table.get_row(*ordered_row)) 1062 1063 result = text.StackedCell(*result) 1064 return result
1065
1066 1067 -class ArtifactResult_DataExportObjectRenderer( 1068 json_renderer.StateBasedObjectRenderer):
1069 renders_type = "ArtifactResult" 1070 renderers = ["DataExportRenderer"] 1071
1072 - def GetState(self, item, **_):
1073 return dict(artifact_name=item.artifact_name, 1074 result_type=item.result_type, 1075 fields=item.fields, 1076 results=item.results)
1077