Coverage Report

Created: 2025-07-16 07:53

/src/libde265/libde265/contextmodel.h
Line
Count
Source (jump to first uncovered line)
1
/*
2
 * H.265 video codec.
3
 * Copyright (c) 2013-2014 struktur AG, Dirk Farin <farin@struktur.de>
4
 *
5
 * Authors: struktur AG, Dirk Farin <farin@struktur.de>
6
 *          Min Chen <chenm003@163.com>
7
 *
8
 * This file is part of libde265.
9
 *
10
 * libde265 is free software: you can redistribute it and/or modify
11
 * it under the terms of the GNU Lesser General Public License as
12
 * published by the Free Software Foundation, either version 3 of
13
 * the License, or (at your option) any later version.
14
 *
15
 * libde265 is distributed in the hope that it will be useful,
16
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
18
 * GNU Lesser General Public License for more details.
19
 *
20
 * You should have received a copy of the GNU Lesser General Public License
21
 * along with libde265.  If not, see <http://www.gnu.org/licenses/>.
22
 */
23
24
#ifndef DE265_CONTEXTMODEL_H
25
#define DE265_CONTEXTMODEL_H
26
27
#include "libde265/cabac.h"
28
#include "libde265/de265.h"
29
30
#include <string.h>
31
#include <string>
32
33
34
struct context_model {
35
  uint8_t MPSbit : 1;
36
  uint8_t state  : 7;
37
38
0
  bool operator==(context_model b) const { return state==b.state && MPSbit==b.MPSbit; }
39
0
  bool operator!=(context_model b) const { return state!=b.state || MPSbit!=b.MPSbit; }
40
};
41
42
43
enum context_model_index {
44
  // SAO
45
  CONTEXT_MODEL_SAO_MERGE_FLAG = 0,
46
  CONTEXT_MODEL_SAO_TYPE_IDX   = CONTEXT_MODEL_SAO_MERGE_FLAG +1,
47
48
  // CB-tree
49
  CONTEXT_MODEL_SPLIT_CU_FLAG  = CONTEXT_MODEL_SAO_TYPE_IDX + 1,
50
  CONTEXT_MODEL_CU_SKIP_FLAG   = CONTEXT_MODEL_SPLIT_CU_FLAG + 3,
51
52
  // intra-prediction
53
  CONTEXT_MODEL_PART_MODE      = CONTEXT_MODEL_CU_SKIP_FLAG + 3,
54
  CONTEXT_MODEL_PREV_INTRA_LUMA_PRED_FLAG = CONTEXT_MODEL_PART_MODE + 4,
55
  CONTEXT_MODEL_INTRA_CHROMA_PRED_MODE    = CONTEXT_MODEL_PREV_INTRA_LUMA_PRED_FLAG + 1,
56
57
  // transform-tree
58
  CONTEXT_MODEL_CBF_LUMA                  = CONTEXT_MODEL_INTRA_CHROMA_PRED_MODE + 1,
59
  CONTEXT_MODEL_CBF_CHROMA                = CONTEXT_MODEL_CBF_LUMA + 2,
60
  CONTEXT_MODEL_SPLIT_TRANSFORM_FLAG      = CONTEXT_MODEL_CBF_CHROMA + 4,
61
  CONTEXT_MODEL_CU_CHROMA_QP_OFFSET_FLAG  = CONTEXT_MODEL_SPLIT_TRANSFORM_FLAG + 3,
62
  CONTEXT_MODEL_CU_CHROMA_QP_OFFSET_IDX   = CONTEXT_MODEL_CU_CHROMA_QP_OFFSET_FLAG + 1,
63
64
  // residual
65
  CONTEXT_MODEL_LAST_SIGNIFICANT_COEFFICIENT_X_PREFIX = CONTEXT_MODEL_CU_CHROMA_QP_OFFSET_IDX + 1,
66
  CONTEXT_MODEL_LAST_SIGNIFICANT_COEFFICIENT_Y_PREFIX = CONTEXT_MODEL_LAST_SIGNIFICANT_COEFFICIENT_X_PREFIX + 18,
67
  CONTEXT_MODEL_CODED_SUB_BLOCK_FLAG          = CONTEXT_MODEL_LAST_SIGNIFICANT_COEFFICIENT_Y_PREFIX + 18,
68
  CONTEXT_MODEL_SIGNIFICANT_COEFF_FLAG        = CONTEXT_MODEL_CODED_SUB_BLOCK_FLAG + 4,
69
  CONTEXT_MODEL_COEFF_ABS_LEVEL_GREATER1_FLAG = CONTEXT_MODEL_SIGNIFICANT_COEFF_FLAG + 42+2,
70
  CONTEXT_MODEL_COEFF_ABS_LEVEL_GREATER2_FLAG = CONTEXT_MODEL_COEFF_ABS_LEVEL_GREATER1_FLAG + 24,
71
72
  CONTEXT_MODEL_CU_QP_DELTA_ABS        = CONTEXT_MODEL_COEFF_ABS_LEVEL_GREATER2_FLAG + 6,
73
  CONTEXT_MODEL_TRANSFORM_SKIP_FLAG    = CONTEXT_MODEL_CU_QP_DELTA_ABS + 2,
74
  CONTEXT_MODEL_RDPCM_FLAG             = CONTEXT_MODEL_TRANSFORM_SKIP_FLAG + 2,
75
  CONTEXT_MODEL_RDPCM_DIR              = CONTEXT_MODEL_RDPCM_FLAG + 2,
76
77
  // motion
78
  CONTEXT_MODEL_MERGE_FLAG             = CONTEXT_MODEL_RDPCM_DIR + 2,
79
  CONTEXT_MODEL_MERGE_IDX              = CONTEXT_MODEL_MERGE_FLAG + 1,
80
  CONTEXT_MODEL_PRED_MODE_FLAG         = CONTEXT_MODEL_MERGE_IDX + 1,
81
  CONTEXT_MODEL_ABS_MVD_GREATER01_FLAG = CONTEXT_MODEL_PRED_MODE_FLAG + 1,
82
  CONTEXT_MODEL_MVP_LX_FLAG            = CONTEXT_MODEL_ABS_MVD_GREATER01_FLAG + 2,
83
  CONTEXT_MODEL_RQT_ROOT_CBF           = CONTEXT_MODEL_MVP_LX_FLAG + 1,
84
  CONTEXT_MODEL_REF_IDX_LX             = CONTEXT_MODEL_RQT_ROOT_CBF + 1,
85
  CONTEXT_MODEL_INTER_PRED_IDC         = CONTEXT_MODEL_REF_IDX_LX + 2,
86
  CONTEXT_MODEL_CU_TRANSQUANT_BYPASS_FLAG = CONTEXT_MODEL_INTER_PRED_IDC + 5,
87
  CONTEXT_MODEL_LOG2_RES_SCALE_ABS_PLUS1 = CONTEXT_MODEL_CU_TRANSQUANT_BYPASS_FLAG + 1,
88
  CONTEXT_MODEL_RES_SCALE_SIGN_FLAG      = CONTEXT_MODEL_LOG2_RES_SCALE_ABS_PLUS1 + 8,
89
  CONTEXT_MODEL_TABLE_LENGTH           = CONTEXT_MODEL_RES_SCALE_SIGN_FLAG + 2
90
};
91
92
93
94
void initialize_CABAC_models(context_model context_model_table[CONTEXT_MODEL_TABLE_LENGTH],
95
                             int initType,
96
                             int QPY);
97
98
99
class context_model_table
100
{
101
 public:
102
  context_model_table();
103
  context_model_table(const context_model_table&);
104
  ~context_model_table();
105
106
  void init(int initType, int QPY);
107
  void release();
108
  void decouple();
109
  context_model_table transfer();
110
0
  context_model_table copy() const { context_model_table t=*this; t.decouple(); return t; }
111
112
3.40M
  bool empty() const { return refcnt != NULL; }
113
114
329M
  context_model& operator[](int i) { return model[i]; }
115
116
  context_model_table& operator=(const context_model_table&);
117
118
  bool operator==(const context_model_table&) const;
119
120
  std::string debug_dump() const;
121
122
 private:
123
  void decouple_or_alloc_with_empty_data();
124
125
  context_model* model; // [CONTEXT_MODEL_TABLE_LENGTH]
126
  int* refcnt;
127
};
128
129
130
#endif