Coverage Report

Created: 2024-09-14 07:19

/src/skia/src/gpu/graphite/render/CoverageMaskRenderStep.cpp
Line
Count
Source (jump to first uncovered line)
1
/*
2
 * Copyright 2023 Google LLC
3
 *
4
 * Use of this source code is governed by a BSD-style license that can be
5
 * found in the LICENSE file.
6
 */
7
#include "src/gpu/graphite/render/CoverageMaskRenderStep.h"
8
9
#include "src/gpu/graphite/ContextUtils.h"
10
#include "src/gpu/graphite/DrawParams.h"
11
#include "src/gpu/graphite/DrawWriter.h"
12
#include "src/gpu/graphite/PathAtlas.h"
13
#include "src/gpu/graphite/geom/CoverageMaskShape.h"
14
#include "src/gpu/graphite/render/CommonDepthStencilSettings.h"
15
16
namespace skgpu::graphite {
17
18
// The device origin is applied *before* the maskToDeviceRemainder matrix so that it can be
19
// combined with the mask atlas origin. This is necessary so that the mask bounds can be inset or
20
// outset for clamping w/o affecting the alignment of the mask sampling.
21
0
static skvx::float2 get_device_translation(const SkM44& localToDevice) {
22
0
    float m00 = localToDevice.rc(0,0), m01 = localToDevice.rc(0,1);
23
0
    float m10 = localToDevice.rc(1,0), m11 = localToDevice.rc(1,1);
24
25
0
    float det = m00*m11 - m01*m10;
26
0
    if (SkScalarNearlyZero(det)) {
27
        // We can't extract any pre-translation, since the upper 2x2 is not invertible. Return (0,0)
28
        // so that the maskToDeviceRemainder matrix remains the full transform.
29
0
        return {0.f, 0.f};
30
0
    }
31
32
    // Calculate inv([[m00,m01][m10,m11]])*[[m30][m31]] to get the pre-remainder device translation.
33
0
    float tx = localToDevice.rc(0,3), ty = localToDevice.rc(1,3);
34
0
    skvx::float4 invT = skvx::float4{m11, -m10, -m01, m00} * skvx::float4{tx,tx,ty,ty};
35
0
    return (invT.xy() + invT.zw()) / det;
36
0
}
37
38
CoverageMaskRenderStep::CoverageMaskRenderStep()
39
        : RenderStep("CoverageMaskRenderStep",
40
                     "",
41
                     // The mask will have AA outsets baked in, but the original bounds for clipping
42
                     // still require the outset for analytic coverage.
43
                     Flags::kPerformsShading | Flags::kHasTextures | Flags::kEmitsCoverage |
44
                     Flags::kOutsetBoundsForAA,
45
                     /*uniforms=*/{{"maskToDeviceRemainder", SkSLType::kFloat3x3}},
46
                     PrimitiveType::kTriangleStrip,
47
                     kDirectDepthGreaterPass,
48
                     /*vertexAttrs=*/{},
49
                     /*instanceAttrs=*/
50
                     // Draw bounds and mask bounds are in normalized relative to the mask texture,
51
                     // but 'drawBounds' is stored in float since the coords may map outside of
52
                     // [0,1] for inverse-filled masks. 'drawBounds' is relative to the logical mask
53
                     // entry's origin, while 'maskBoundsIn' is atlas-relative. Inverse fills swap
54
                     // the order in 'maskBoundsIn' to be RBLT.
55
                     {{"drawBounds", VertexAttribType::kFloat4 , SkSLType::kFloat4},  // ltrb
56
                      {"maskBoundsIn", VertexAttribType::kUShort4_norm, SkSLType::kFloat4},
57
                      // Remaining translation extracted from actual 'maskToDevice' transform.
58
                      {"deviceOrigin", VertexAttribType::kFloat2, SkSLType::kFloat2},
59
                      {"depth"     , VertexAttribType::kFloat, SkSLType::kFloat},
60
                      {"ssboIndices", VertexAttribType::kUShort2, SkSLType::kUShort2},
61
                      // deviceToLocal matrix for producing local coords for shader evaluation
62
                      {"mat0", VertexAttribType::kFloat3, SkSLType::kFloat3},
63
                      {"mat1", VertexAttribType::kFloat3, SkSLType::kFloat3},
64
                      {"mat2", VertexAttribType::kFloat3, SkSLType::kFloat3}},
65
                     /*varyings=*/
66
                     {// `maskBounds` are the atlas-relative, sorted bounds of the coverage mask.
67
                      // `textureCoords` are the atlas-relative UV coordinates of the draw, which
68
                      // can spill beyond `maskBounds` for inverse fills.
69
                      // TODO: maskBounds is constant for all fragments for a given instance,
70
                      // could we store them in the draw's SSBO?
71
                      {"maskBounds"   , SkSLType::kFloat4},
72
                      {"textureCoords", SkSLType::kFloat2},
73
                      // 'invert' is set to 0 use unmodified coverage, and set to 1 for "1-c".
74
0
                      {"invert", SkSLType::kHalf}}) {}
75
76
0
std::string CoverageMaskRenderStep::vertexSkSL() const {
77
    // Returns the body of a vertex function, which must define a float4 devPosition variable and
78
    // must write to an already-defined float2 stepLocalCoords variable.
79
0
    return "float4 devPosition = coverage_mask_vertex_fn("
80
0
                    "float2(sk_VertexID >> 1, sk_VertexID & 1), "
81
0
                    "maskToDeviceRemainder, drawBounds, maskBoundsIn, deviceOrigin, "
82
0
                    "depth, float3x3(mat0, mat1, mat2), "
83
0
                    "maskBounds, textureCoords, invert, stepLocalCoords);\n";
84
0
}
85
86
std::string CoverageMaskRenderStep::texturesAndSamplersSkSL(
87
0
        const ResourceBindingRequirements& bindingReqs, int* nextBindingIndex) const {
88
0
    return EmitSamplerLayout(bindingReqs, nextBindingIndex) + " sampler2D pathAtlas;";
89
0
}
90
91
0
const char* CoverageMaskRenderStep::fragmentCoverageSkSL() const {
92
0
    return R"(
93
0
        half c = sample(pathAtlas, clamp(textureCoords, maskBounds.LT, maskBounds.RB)).r;
94
0
        outputCoverage = half4(mix(c, 1 - c, invert));
95
0
    )";
96
0
}
97
98
void CoverageMaskRenderStep::writeVertices(DrawWriter* dw,
99
                                           const DrawParams& params,
100
0
                                           skvx::ushort2 ssboIndices) const {
101
0
    const CoverageMaskShape& coverageMask = params.geometry().coverageMaskShape();
102
0
    const TextureProxy* proxy = coverageMask.textureProxy();
103
0
    SkASSERT(proxy);
104
105
    // A quad is a 4-vertex instance. The coordinates are derived from the vertex IDs.
106
0
    DrawWriter::Instances instances(*dw, {}, {}, 4);
107
108
    // The device origin is the  translation extracted from the mask-to-device matrix so
109
    // that the remaining matrix uniform has less variance between draws.
110
0
    const auto& maskToDevice = params.transform().matrix();
111
0
    skvx::float2 deviceOrigin = get_device_translation(maskToDevice);
112
113
    // Relative to mask space (device origin and mask-to-device remainder must be applied in shader)
114
0
    skvx::float4 maskBounds = coverageMask.bounds().ltrb();
115
0
    skvx::float4 drawBounds;
116
117
0
    if (coverageMask.inverted()) {
118
        // Only mask filters trigger complex transforms, and they are never inverse filled. Since
119
        // we know this is an inverted mask, then we can exactly map the draw's clip bounds to mask
120
        // space so that the clip is still fully covered without branching in the vertex shader.
121
0
        SkASSERT(maskToDevice == SkM44::Translate(deviceOrigin.x(), deviceOrigin.y()));
122
0
        drawBounds = params.clip().drawBounds().makeOffset(-deviceOrigin).ltrb();
123
124
        // If the mask is fully clipped out, then the shape's mask info should be (0,0,0,0).
125
        // If it's not fully clipped out, then the mask info should be non-empty.
126
0
        SkASSERT(!params.clip().transformedShapeBounds().isEmptyNegativeOrNaN() ^
127
0
                 all(maskBounds == 0.f));
128
129
0
        if (params.clip().transformedShapeBounds().isEmptyNegativeOrNaN()) {
130
            // The inversion check is strict inequality, so (0,0,0,0) would not be detected. Adjust
131
            // to (0,0,1/2,1/2) to restrict sampling to the top-left quarter of the top-left pixel,
132
            // which should have a value of 0 regardless of filtering mode.
133
0
            maskBounds = skvx::float4{0.f, 0.f, 0.5f, 0.5f};
134
0
        } else {
135
            // Add 1/2px outset to the mask bounds so that clamped coordinates sample the texel
136
            // center of the padding around the atlas entry.
137
0
            maskBounds += skvx::float4{-0.5f, -0.5f, 0.5f, 0.5f};
138
0
        }
139
140
        // and store RBLT so that the 'maskBoundsIn' attribute has xy > zw to detect inverse fill.
141
0
        maskBounds = skvx::shuffle<2,3,0,1>(maskBounds);
142
0
    } else {
143
        // If we aren't inverted, then the originally assigned values don't need to be adjusted, but
144
        // also ensure the mask isn't empty (otherwise the draw should have been skipped earlier).
145
0
        SkASSERT(!coverageMask.bounds().isEmptyNegativeOrNaN());
146
0
        SkASSERT(all(maskBounds.xy() < maskBounds.zw()));
147
148
        // Since the mask bounds and draw bounds are 1-to-1 with each other, the clamping of texture
149
        // coords is mostly a formality. We inset the mask bounds by 1/2px so that we clamp to the
150
        // texel center of the outer row/column of the mask. This should be a no-op for nearest
151
        // sampling but prevents any linear sampling from incorporating adjacent data; for atlases
152
        // this would just be 0 but for non-atlas coverage masks that might not have padding this
153
        // avoids filtering unknown values in an approx-fit texture.
154
0
        drawBounds = maskBounds;
155
0
        maskBounds -= skvx::float4{-0.5f, -0.5f, 0.5f, 0.5f};
156
0
    }
157
158
    // Move 'drawBounds' and 'maskBounds' into the atlas coordinate space, then adjust the
159
    // device translation to undo the atlas origin automatically in the vertex shader.
160
0
    skvx::float2 textureOrigin = skvx::cast<float>(coverageMask.textureOrigin());
161
0
    maskBounds += textureOrigin.xyxy();
162
0
    drawBounds += textureOrigin.xyxy();
163
0
    deviceOrigin -= textureOrigin;
164
165
    // Normalize drawBounds and maskBounds after possibly correcting drawBounds for inverse fills.
166
    // The maskToDevice matrix uniform will handle de-normalizing drawBounds for vertex positions.
167
0
    auto atlasSizeInv = skvx::float2{1.f / proxy->dimensions().width(),
168
0
                                     1.f / proxy->dimensions().height()};
169
0
    drawBounds *= atlasSizeInv.xyxy();
170
0
    maskBounds *= atlasSizeInv.xyxy();
171
0
    deviceOrigin *= atlasSizeInv;
172
173
    // Since the mask bounds define normalized texels of the texture, we can encode them as
174
    // ushort_norm without losing precision to save space.
175
0
    SkASSERT(all((maskBounds >= 0.f) & (maskBounds <= 1.f)));
176
0
    maskBounds = 65535.f * maskBounds + 0.5f;
177
178
0
    const SkM44& m = coverageMask.deviceToLocal();
179
0
    instances.append(1) << drawBounds << skvx::cast<uint16_t>(maskBounds) << deviceOrigin
180
0
                        << params.order().depthAsFloat() << ssboIndices
181
0
                        << m.rc(0,0) << m.rc(1,0) << m.rc(3,0)   // mat0
182
0
                        << m.rc(0,1) << m.rc(1,1) << m.rc(3,1)   // mat1
183
0
                        << m.rc(0,3) << m.rc(1,3) << m.rc(3,3);  // mat2
184
0
}
Unexecuted instantiation: skgpu::graphite::CoverageMaskRenderStep::writeVertices(skgpu::graphite::DrawWriter*, skgpu::graphite::DrawParams const&, skvx::Vec<2, unsigned short>) const
Unexecuted instantiation: skgpu::graphite::CoverageMaskRenderStep::writeVertices(skgpu::graphite::DrawWriter*, skgpu::graphite::DrawParams const&, skvx::Vec<2, unsigned short>) const
185
186
void CoverageMaskRenderStep::writeUniformsAndTextures(const DrawParams& params,
187
0
                                                      PipelineDataGatherer* gatherer) const {
188
0
    SkDEBUGCODE(UniformExpectationsValidator uev(gatherer, this->uniforms());)
189
190
0
    const CoverageMaskShape& coverageMask = params.geometry().coverageMaskShape();
191
0
    const TextureProxy* proxy = coverageMask.textureProxy();
192
0
    SkASSERT(proxy);
193
194
    // Most coverage masks are aligned with the device pixels, so the params' transform is an
195
    // integer translation matrix. This translation is extracted as an instance attribute so that
196
    // the remaining transform has a much lower frequency of changing (only complex-transformed
197
    // mask filters).
198
0
    skvx::float2 deviceOrigin = get_device_translation(params.transform().matrix());
199
0
    SkMatrix maskToDevice = params.transform().matrix().asM33();
200
0
    maskToDevice.preTranslate(-deviceOrigin.x(), -deviceOrigin.y());
201
202
    // The mask coordinates in the vertex shader will be normalized, so scale by the proxy size
203
    // to get back to Skia's texel-based coords.
204
0
    maskToDevice.preScale(proxy->dimensions().width(), proxy->dimensions().height());
205
206
    // Write uniforms:
207
0
    gatherer->write(maskToDevice);
208
209
    // Write textures and samplers:
210
0
    const bool pixelAligned =
211
0
            params.transform().type() <= Transform::Type::kSimpleRectStaysRect &&
212
0
            params.transform().maxScaleFactor() == 1.f &&
213
0
            all(deviceOrigin == floor(deviceOrigin + SK_ScalarNearlyZero));
214
0
    gatherer->add(sk_ref_sp(proxy), {pixelAligned ? SkFilterMode::kNearest : SkFilterMode::kLinear,
215
0
                                     SkTileMode::kClamp});
216
0
}
Unexecuted instantiation: skgpu::graphite::CoverageMaskRenderStep::writeUniformsAndTextures(skgpu::graphite::DrawParams const&, skgpu::graphite::PipelineDataGatherer*) const
Unexecuted instantiation: skgpu::graphite::CoverageMaskRenderStep::writeUniformsAndTextures(skgpu::graphite::DrawParams const&, skgpu::graphite::PipelineDataGatherer*) const
217
218
}  // namespace skgpu::graphite