Coverage Report

Created: 2025-11-24 06:31

next uncovered line (L), next uncovered region (R), next uncovered branch (B)
/src/c-blosc2/internal-complibs/lz4-1.10.0/lz4hc.c
Line
Count
Source
1
/*
2
    LZ4 HC - High Compression Mode of LZ4
3
    Copyright (C) 2011-2020, Yann Collet.
4
5
    BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php)
6
7
    Redistribution and use in source and binary forms, with or without
8
    modification, are permitted provided that the following conditions are
9
    met:
10
11
    * Redistributions of source code must retain the above copyright
12
    notice, this list of conditions and the following disclaimer.
13
    * Redistributions in binary form must reproduce the above
14
    copyright notice, this list of conditions and the following disclaimer
15
    in the documentation and/or other materials provided with the
16
    distribution.
17
18
    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19
    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20
    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
21
    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
22
    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23
    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
24
    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25
    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26
    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27
    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28
    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29
30
    You can contact the author at :
31
       - LZ4 source repository : https://github.com/lz4/lz4
32
       - LZ4 public forum : https://groups.google.com/forum/#!forum/lz4c
33
*/
34
/* note : lz4hc is not an independent module, it requires lz4.h/lz4.c for proper compilation */
35
36
37
/* *************************************
38
*  Tuning Parameter
39
***************************************/
40
41
/*! HEAPMODE :
42
 *  Select how stateless HC compression functions like `LZ4_compress_HC()`
43
 *  allocate memory for their workspace:
44
 *  in stack (0:fastest), or in heap (1:default, requires malloc()).
45
 *  Since workspace is rather large, heap mode is recommended.
46
**/
47
#ifndef LZ4HC_HEAPMODE
48
#  define LZ4HC_HEAPMODE 1
49
#endif
50
51
52
/*===    Dependency    ===*/
53
#define LZ4_HC_STATIC_LINKING_ONLY
54
#include "lz4hc.h"
55
#include <limits.h>
56
57
58
/*===   Shared lz4.c code   ===*/
59
#ifndef LZ4_SRC_INCLUDED
60
# if defined(__GNUC__)
61
#  pragma GCC diagnostic ignored "-Wunused-function"
62
# endif
63
# if defined (__clang__)
64
#  pragma clang diagnostic ignored "-Wunused-function"
65
# endif
66
# define LZ4_COMMONDEFS_ONLY
67
# include "lz4.c"   /* LZ4_count, constants, mem */
68
#endif
69
70
71
/*===   Enums   ===*/
72
typedef enum { noDictCtx, usingDictCtxHc } dictCtx_directive;
73
74
75
/*===   Constants   ===*/
76
303k
#define OPTIMAL_ML (int)((ML_MASK-1)+MINMATCH)
77
0
#define LZ4_OPT_NUM   (1<<12)
78
79
80
/*===   Macros   ===*/
81
4.29M
#define MIN(a,b)   ( (a) < (b) ? (a) : (b) )
82
27.0M
#define MAX(a,b)   ( (a) > (b) ? (a) : (b) )
83
84
85
/*===   Levels definition   ===*/
86
typedef enum { lz4mid, lz4hc, lz4opt } lz4hc_strat_e;
87
typedef struct {
88
    lz4hc_strat_e strat;
89
    int nbSearches;
90
    U32 targetLength;
91
} cParams_t;
92
static const cParams_t k_clTable[LZ4HC_CLEVEL_MAX+1] = {
93
    { lz4mid,    2, 16 },  /* 0, unused */
94
    { lz4mid,    2, 16 },  /* 1, unused */
95
    { lz4mid,    2, 16 },  /* 2 */
96
    { lz4hc,     4, 16 },  /* 3 */
97
    { lz4hc,     8, 16 },  /* 4 */
98
    { lz4hc,    16, 16 },  /* 5 */
99
    { lz4hc,    32, 16 },  /* 6 */
100
    { lz4hc,    64, 16 },  /* 7 */
101
    { lz4hc,   128, 16 },  /* 8 */
102
    { lz4hc,   256, 16 },  /* 9 */
103
    { lz4opt,   96, 64 },  /*10==LZ4HC_CLEVEL_OPT_MIN*/
104
    { lz4opt,  512,128 },  /*11 */
105
    { lz4opt,16384,LZ4_OPT_NUM },  /* 12==LZ4HC_CLEVEL_MAX */
106
};
107
108
static cParams_t LZ4HC_getCLevelParams(int cLevel)
109
24.3k
{
110
    /* note : clevel convention is a bit different from lz4frame,
111
     * possibly something worth revisiting for consistency */
112
24.3k
    if (cLevel < 1)
113
0
        cLevel = LZ4HC_CLEVEL_DEFAULT;
114
24.3k
    cLevel = MIN(LZ4HC_CLEVEL_MAX, cLevel);
115
24.3k
    return k_clTable[cLevel];
116
24.3k
}
117
118
119
/*===   Hashing   ===*/
120
0
#define LZ4HC_HASHSIZE 4
121
76.4M
#define HASH_FUNCTION(i)      (((i) * 2654435761U) >> ((MINMATCH*8)-LZ4HC_HASH_LOG))
122
76.4M
static U32 LZ4HC_hashPtr(const void* ptr) { return HASH_FUNCTION(LZ4_read32(ptr)); }
123
124
#if defined(LZ4_FORCE_MEMORY_ACCESS) && (LZ4_FORCE_MEMORY_ACCESS==2)
125
/* lie to the compiler about data alignment; use with caution */
126
static U64 LZ4_read64(const void* memPtr) { return *(const U64*) memPtr; }
127
128
#elif defined(LZ4_FORCE_MEMORY_ACCESS) && (LZ4_FORCE_MEMORY_ACCESS==1)
129
/* __pack instructions are safer, but compiler specific */
130
LZ4_PACK(typedef struct { U64 u64; }) LZ4_unalign64;
131
13.6M
static U64 LZ4_read64(const void* ptr) { return ((const LZ4_unalign64*)ptr)->u64; }
132
133
#else  /* safe and portable access using memcpy() */
134
static U64 LZ4_read64(const void* memPtr)
135
{
136
    U64 val; LZ4_memcpy(&val, memPtr, sizeof(val)); return val;
137
}
138
139
#endif /* LZ4_FORCE_MEMORY_ACCESS */
140
141
7.95k
#define LZ4MID_HASHSIZE 8
142
25.0M
#define LZ4MID_HASHLOG (LZ4HC_HASH_LOG-1)
143
7.95k
#define LZ4MID_HASHTABLESIZE (1 << LZ4MID_HASHLOG)
144
145
11.3M
static U32 LZ4MID_hash4(U32 v) { return (v * 2654435761U) >> (32-LZ4MID_HASHLOG); }
146
11.3M
static U32 LZ4MID_hash4Ptr(const void* ptr) { return LZ4MID_hash4(LZ4_read32(ptr)); }
147
/* note: hash7 hashes the lower 56-bits.
148
 * It presumes input was read using little endian.*/
149
13.6M
static U32 LZ4MID_hash7(U64 v) { return (U32)(((v  << (64-56)) * 58295818150454627ULL) >> (64-LZ4MID_HASHLOG)) ; }
150
static U64 LZ4_readLE64(const void* memPtr);
151
13.6M
static U32 LZ4MID_hash8Ptr(const void* ptr) { return LZ4MID_hash7(LZ4_readLE64(ptr)); }
152
153
static U64 LZ4_readLE64(const void* memPtr)
154
13.6M
{
155
13.6M
    if (LZ4_isLittleEndian()) {
156
13.6M
        return LZ4_read64(memPtr);
157
13.6M
    } else {
158
0
        const BYTE* p = (const BYTE*)memPtr;
159
        /* note: relies on the compiler to simplify this expression */
160
0
        return (U64)p[0] | ((U64)p[1]<<8) | ((U64)p[2]<<16) | ((U64)p[3]<<24)
161
0
            | ((U64)p[4]<<32) | ((U64)p[5]<<40) | ((U64)p[6]<<48) | ((U64)p[7]<<56);
162
0
    }
163
13.6M
}
164
165
166
/*===   Count match length   ===*/
167
LZ4_FORCE_INLINE
168
unsigned LZ4HC_NbCommonBytes32(U32 val)
169
10.7M
{
170
10.7M
    assert(val != 0);
171
10.7M
    if (LZ4_isLittleEndian()) {
172
#     if defined(_MSC_VER) && (_MSC_VER >= 1400) && !defined(LZ4_FORCE_SW_BITCOUNT)
173
        unsigned long r;
174
        _BitScanReverse(&r, val);
175
        return (unsigned)((31 - r) >> 3);
176
#     elif (defined(__clang__) || (defined(__GNUC__) && ((__GNUC__ > 3) || \
177
                            ((__GNUC__ == 3) && (__GNUC_MINOR__ >= 4))))) && \
178
                                        !defined(LZ4_FORCE_SW_BITCOUNT)
179
        return (unsigned)__builtin_clz(val) >> 3;
180
#     else
181
        val >>= 8;
182
        val = ((((val + 0x00FFFF00) | 0x00FFFFFF) + val) |
183
              (val + 0x00FF0000)) >> 24;
184
        return (unsigned)val ^ 3;
185
#     endif
186
10.7M
    } else {
187
#     if defined(_MSC_VER) && (_MSC_VER >= 1400) && !defined(LZ4_FORCE_SW_BITCOUNT)
188
        unsigned long r;
189
        _BitScanForward(&r, val);
190
        return (unsigned)(r >> 3);
191
#     elif (defined(__clang__) || (defined(__GNUC__) && ((__GNUC__ > 3) || \
192
                            ((__GNUC__ == 3) && (__GNUC_MINOR__ >= 4))))) && \
193
                                        !defined(LZ4_FORCE_SW_BITCOUNT)
194
        return (unsigned)__builtin_ctz(val) >> 3;
195
#     else
196
        const U32 m = 0x01010101;
197
        return (unsigned)((((val - 1) ^ val) & (m - 1)) * m) >> 24;
198
#     endif
199
0
    }
200
10.7M
}
201
202
/** LZ4HC_countBack() :
203
 * @return : negative value, nb of common bytes before ip/match */
204
LZ4_FORCE_INLINE
205
int LZ4HC_countBack(const BYTE* const ip, const BYTE* const match,
206
                    const BYTE* const iMin, const BYTE* const mMin)
207
13.2M
{
208
13.2M
    int back = 0;
209
13.2M
    int const min = (int)MAX(iMin - ip, mMin - match);
210
13.2M
    assert(min <= 0);
211
13.2M
    assert(ip >= iMin); assert((size_t)(ip-iMin) < (1U<<31));
212
13.2M
    assert(match >= mMin); assert((size_t)(match - mMin) < (1U<<31));
213
214
22.5M
    while ((back - min) > 3) {
215
20.0M
        U32 const v = LZ4_read32(ip + back - 4) ^ LZ4_read32(match + back - 4);
216
20.0M
        if (v) {
217
10.7M
            return (back - (int)LZ4HC_NbCommonBytes32(v));
218
10.7M
        } else back -= 4; /* 4-byte step */
219
20.0M
    }
220
    /* check remainder if any */
221
3.87M
    while ( (back > min)
222
3.78M
         && (ip[back-1] == match[back-1]) )
223
1.35M
            back--;
224
2.51M
    return back;
225
13.2M
}
226
227
/*===   Chain table updates   ===*/
228
108M
#define DELTANEXTU16(table, pos) table[(U16)(pos)]   /* faster */
229
/* Make fields passed to, and updated by LZ4HC_encodeSequence explicit */
230
1.81M
#define UPDATABLE(ip, op, anchor) &ip, &op, &anchor
231
232
233
/**************************************
234
*  Init
235
**************************************/
236
static void LZ4HC_clearTables (LZ4HC_CCtx_internal* hc4)
237
0
{
238
0
    MEM_INIT(hc4->hashTable, 0, sizeof(hc4->hashTable));
239
0
    MEM_INIT(hc4->chainTable, 0xFF, sizeof(hc4->chainTable));
240
0
}
241
242
static void LZ4HC_init_internal (LZ4HC_CCtx_internal* hc4, const BYTE* start)
243
24.3k
{
244
24.3k
    size_t const bufferSize = (size_t)(hc4->end - hc4->prefixStart);
245
24.3k
    size_t newStartingOffset = bufferSize + hc4->dictLimit;
246
24.3k
    DEBUGLOG(5, "LZ4HC_init_internal");
247
24.3k
    assert(newStartingOffset >= bufferSize);  /* check overflow */
248
24.3k
    if (newStartingOffset > 1 GB) {
249
0
        LZ4HC_clearTables(hc4);
250
0
        newStartingOffset = 0;
251
0
    }
252
24.3k
    newStartingOffset += 64 KB;
253
24.3k
    hc4->nextToUpdate = (U32)newStartingOffset;
254
24.3k
    hc4->prefixStart = start;
255
24.3k
    hc4->end = start;
256
24.3k
    hc4->dictStart = start;
257
24.3k
    hc4->dictLimit = (U32)newStartingOffset;
258
24.3k
    hc4->lowLimit = (U32)newStartingOffset;
259
24.3k
}
260
261
262
/**************************************
263
*  Encode
264
**************************************/
265
/* LZ4HC_encodeSequence() :
266
 * @return : 0 if ok,
267
 *           1 if buffer issue detected */
268
LZ4_FORCE_INLINE int LZ4HC_encodeSequence (
269
    const BYTE** _ip,
270
    BYTE** _op,
271
    const BYTE** _anchor,
272
    int matchLength,
273
    int offset,
274
    limitedOutput_directive limit,
275
    BYTE* oend)
276
1.81M
{
277
5.43M
#define ip      (*_ip)
278
15.0M
#define op      (*_op)
279
5.43M
#define anchor  (*_anchor)
280
281
1.81M
    size_t length;
282
1.81M
    BYTE* const token = op++;
283
284
#if defined(LZ4_DEBUG) && (LZ4_DEBUG >= 6)
285
    static const BYTE* start = NULL;
286
    static U32 totalCost = 0;
287
    U32 const pos = (start==NULL) ? 0 : (U32)(anchor - start);
288
    U32 const ll = (U32)(ip - anchor);
289
    U32 const llAdd = (ll>=15) ? ((ll-15) / 255) + 1 : 0;
290
    U32 const mlAdd = (matchLength>=19) ? ((matchLength-19) / 255) + 1 : 0;
291
    U32 const cost = 1 + llAdd + ll + 2 + mlAdd;
292
    if (start==NULL) start = anchor;  /* only works for single segment */
293
    /* g_debuglog_enable = (pos >= 2228) & (pos <= 2262); */
294
    DEBUGLOG(6, "pos:%7u -- literals:%4u, match:%4i, offset:%5i, cost:%4u + %5u",
295
                pos,
296
                (U32)(ip - anchor), matchLength, offset,
297
                cost, totalCost);
298
    totalCost += cost;
299
#endif
300
301
    /* Encode Literal length */
302
1.81M
    length = (size_t)(ip - anchor);
303
1.81M
    LZ4_STATIC_ASSERT(notLimited == 0);
304
    /* Check output limit */
305
1.81M
    if (limit && ((op + (length / 255) + length + (2 + 1 + LASTLITERALS)) > oend)) {
306
1.46k
        DEBUGLOG(6, "Not enough room to write %i literals (%i bytes remaining)",
307
1.46k
                (int)length, (int)(oend - op));
308
1.46k
        return 1;
309
1.46k
    }
310
1.81M
    if (length >= RUN_MASK) {
311
160k
        size_t len = length - RUN_MASK;
312
160k
        *token = (RUN_MASK << ML_BITS);
313
200k
        for(; len >= 255 ; len -= 255) *op++ = 255;
314
160k
        *op++ = (BYTE)len;
315
1.65M
    } else {
316
1.65M
        *token = (BYTE)(length << ML_BITS);
317
1.65M
    }
318
319
    /* Copy Literals */
320
1.81M
    LZ4_wildCopy8(op, anchor, op + length);
321
1.81M
    op += length;
322
323
    /* Encode Offset */
324
1.81M
    assert(offset <= LZ4_DISTANCE_MAX );
325
1.81M
    assert(offset > 0);
326
1.81M
    LZ4_writeLE16(op, (U16)(offset)); op += 2;
327
328
    /* Encode MatchLength */
329
1.81M
    assert(matchLength >= MINMATCH);
330
1.81M
    length = (size_t)matchLength - MINMATCH;
331
1.81M
    if (limit && (op + (length / 255) + (1 + LASTLITERALS) > oend)) {
332
135
        DEBUGLOG(6, "Not enough room to write match length");
333
135
        return 1;   /* Check output limit */
334
135
    }
335
1.81M
    if (length >= ML_MASK) {
336
238k
        *token += ML_MASK;
337
238k
        length -= ML_MASK;
338
269k
        for(; length >= 510 ; length -= 510) { *op++ = 255; *op++ = 255; }
339
238k
        if (length >= 255) { length -= 255; *op++ = 255; }
340
238k
        *op++ = (BYTE)length;
341
1.57M
    } else {
342
1.57M
        *token += (BYTE)(length);
343
1.57M
    }
344
345
    /* Prepare next loop */
346
1.81M
    ip += matchLength;
347
1.81M
    anchor = ip;
348
349
1.81M
    return 0;
350
351
1.81M
#undef ip
352
1.81M
#undef op
353
1.81M
#undef anchor
354
1.81M
}
355
356
357
typedef struct {
358
    int off;
359
    int len;
360
    int back;  /* negative value */
361
} LZ4HC_match_t;
362
363
LZ4HC_match_t LZ4HC_searchExtDict(const BYTE* ip, U32 ipIndex,
364
        const BYTE* const iLowLimit, const BYTE* const iHighLimit,
365
        const LZ4HC_CCtx_internal* dictCtx, U32 gDictEndIndex,
366
        int currentBestML, int nbAttempts)
367
0
{
368
0
    size_t const lDictEndIndex = (size_t)(dictCtx->end - dictCtx->prefixStart) + dictCtx->dictLimit;
369
0
    U32 lDictMatchIndex = dictCtx->hashTable[LZ4HC_hashPtr(ip)];
370
0
    U32 matchIndex = lDictMatchIndex + gDictEndIndex - (U32)lDictEndIndex;
371
0
    int offset = 0, sBack = 0;
372
0
    assert(lDictEndIndex <= 1 GB);
373
0
    if (lDictMatchIndex>0)
374
0
        DEBUGLOG(7, "lDictEndIndex = %zu, lDictMatchIndex = %u", lDictEndIndex, lDictMatchIndex);
375
0
    while (ipIndex - matchIndex <= LZ4_DISTANCE_MAX && nbAttempts--) {
376
0
        const BYTE* const matchPtr = dictCtx->prefixStart - dictCtx->dictLimit + lDictMatchIndex;
377
378
0
        if (LZ4_read32(matchPtr) == LZ4_read32(ip)) {
379
0
            int mlt;
380
0
            int back = 0;
381
0
            const BYTE* vLimit = ip + (lDictEndIndex - lDictMatchIndex);
382
0
            if (vLimit > iHighLimit) vLimit = iHighLimit;
383
0
            mlt = (int)LZ4_count(ip+MINMATCH, matchPtr+MINMATCH, vLimit) + MINMATCH;
384
0
            back = (ip > iLowLimit) ? LZ4HC_countBack(ip, matchPtr, iLowLimit, dictCtx->prefixStart) : 0;
385
0
            mlt -= back;
386
0
            if (mlt > currentBestML) {
387
0
                currentBestML = mlt;
388
0
                offset = (int)(ipIndex - matchIndex);
389
0
                sBack = back;
390
0
                DEBUGLOG(7, "found match of length %i within extDictCtx", currentBestML);
391
0
        }   }
392
393
0
        {   U32 const nextOffset = DELTANEXTU16(dictCtx->chainTable, lDictMatchIndex);
394
0
            lDictMatchIndex -= nextOffset;
395
0
            matchIndex -= nextOffset;
396
0
    }   }
397
398
0
    {   LZ4HC_match_t md;
399
0
        md.len = currentBestML;
400
0
        md.off = offset;
401
0
        md.back = sBack;
402
0
        return md;
403
0
    }
404
0
}
405
406
typedef LZ4HC_match_t (*LZ4MID_searchIntoDict_f)(const BYTE* ip, U32 ipIndex,
407
        const BYTE* const iHighLimit,
408
        const LZ4HC_CCtx_internal* dictCtx, U32 gDictEndIndex);
409
410
static LZ4HC_match_t LZ4MID_searchHCDict(const BYTE* ip, U32 ipIndex,
411
        const BYTE* const iHighLimit,
412
        const LZ4HC_CCtx_internal* dictCtx, U32 gDictEndIndex)
413
0
{
414
0
    return LZ4HC_searchExtDict(ip,ipIndex,
415
0
                            ip, iHighLimit,
416
0
                            dictCtx, gDictEndIndex,
417
0
                            MINMATCH-1, 2);
418
0
}
419
420
static LZ4HC_match_t LZ4MID_searchExtDict(const BYTE* ip, U32 ipIndex,
421
        const BYTE* const iHighLimit,
422
        const LZ4HC_CCtx_internal* dictCtx, U32 gDictEndIndex)
423
0
{
424
0
    size_t const lDictEndIndex = (size_t)(dictCtx->end - dictCtx->prefixStart) + dictCtx->dictLimit;
425
0
    const U32* const hash4Table = dictCtx->hashTable;
426
0
    const U32* const hash8Table = hash4Table + LZ4MID_HASHTABLESIZE;
427
0
    DEBUGLOG(7, "LZ4MID_searchExtDict (ipIdx=%u)", ipIndex);
428
429
    /* search long match first */
430
0
    {   U32 l8DictMatchIndex = hash8Table[LZ4MID_hash8Ptr(ip)];
431
0
        U32 m8Index = l8DictMatchIndex + gDictEndIndex - (U32)lDictEndIndex;
432
0
        assert(lDictEndIndex <= 1 GB);
433
0
        if (ipIndex - m8Index <= LZ4_DISTANCE_MAX) {
434
0
            const BYTE* const matchPtr = dictCtx->prefixStart - dictCtx->dictLimit + l8DictMatchIndex;
435
0
            const size_t safeLen = MIN(lDictEndIndex - l8DictMatchIndex, (size_t)(iHighLimit - ip));
436
0
            int mlt = (int)LZ4_count(ip, matchPtr, ip + safeLen);
437
0
            if (mlt >= MINMATCH) {
438
0
                LZ4HC_match_t md;
439
0
                DEBUGLOG(7, "Found long ExtDict match of len=%u", mlt);
440
0
                md.len = mlt;
441
0
                md.off = (int)(ipIndex - m8Index);
442
0
                md.back = 0;
443
0
                return md;
444
0
            }
445
0
        }
446
0
    }
447
448
    /* search for short match second */
449
0
    {   U32 l4DictMatchIndex = hash4Table[LZ4MID_hash4Ptr(ip)];
450
0
        U32 m4Index = l4DictMatchIndex + gDictEndIndex - (U32)lDictEndIndex;
451
0
        if (ipIndex - m4Index <= LZ4_DISTANCE_MAX) {
452
0
            const BYTE* const matchPtr = dictCtx->prefixStart - dictCtx->dictLimit + l4DictMatchIndex;
453
0
            const size_t safeLen = MIN(lDictEndIndex - l4DictMatchIndex, (size_t)(iHighLimit - ip));
454
0
            int mlt = (int)LZ4_count(ip, matchPtr, ip + safeLen);
455
0
            if (mlt >= MINMATCH) {
456
0
                LZ4HC_match_t md;
457
0
                DEBUGLOG(7, "Found short ExtDict match of len=%u", mlt);
458
0
                md.len = mlt;
459
0
                md.off = (int)(ipIndex - m4Index);
460
0
                md.back = 0;
461
0
                return md;
462
0
            }
463
0
        }
464
0
    }
465
466
    /* nothing found */
467
0
    {   LZ4HC_match_t const md = {0, 0, 0 };
468
0
        return md;
469
0
    }
470
0
}
471
472
/**************************************
473
*  Mid Compression (level 2)
474
**************************************/
475
476
LZ4_FORCE_INLINE void
477
LZ4MID_addPosition(U32* hTable, U32 hValue, U32 index)
478
24.6M
{
479
24.6M
    hTable[hValue] = index;
480
24.6M
}
481
482
3.90M
#define ADDPOS8(_p, _idx) LZ4MID_addPosition(hash8Table, LZ4MID_hash8Ptr(_p), _idx)
483
2.34M
#define ADDPOS4(_p, _idx) LZ4MID_addPosition(hash4Table, LZ4MID_hash4Ptr(_p), _idx)
484
485
/* Fill hash tables with references into dictionary.
486
 * The resulting table is only exploitable by LZ4MID (level 2) */
487
static void
488
LZ4MID_fillHTable (LZ4HC_CCtx_internal* cctx, const void* dict, size_t size)
489
0
{
490
0
    U32* const hash4Table = cctx->hashTable;
491
0
    U32* const hash8Table = hash4Table + LZ4MID_HASHTABLESIZE;
492
0
    const BYTE* const prefixPtr = (const BYTE*)dict;
493
0
    U32 const prefixIdx = cctx->dictLimit;
494
0
    U32 const target = prefixIdx + (U32)size - LZ4MID_HASHSIZE;
495
0
    U32 idx = cctx->nextToUpdate;
496
0
    assert(dict == cctx->prefixStart);
497
0
    DEBUGLOG(4, "LZ4MID_fillHTable (size:%zu)", size);
498
0
    if (size <= LZ4MID_HASHSIZE)
499
0
        return;
500
501
0
    for (; idx < target; idx += 3) {
502
0
        ADDPOS4(prefixPtr+idx-prefixIdx, idx);
503
0
        ADDPOS8(prefixPtr+idx+1-prefixIdx, idx+1);
504
0
    }
505
506
0
    idx = (size > 32 KB + LZ4MID_HASHSIZE) ? target - 32 KB : cctx->nextToUpdate;
507
0
    for (; idx < target; idx += 1) {
508
0
        ADDPOS8(prefixPtr+idx-prefixIdx, idx);
509
0
    }
510
511
0
    cctx->nextToUpdate = target;
512
0
}
513
514
static LZ4MID_searchIntoDict_f select_searchDict_function(const LZ4HC_CCtx_internal* dictCtx)
515
0
{
516
0
    if (dictCtx == NULL) return NULL;
517
0
    if (LZ4HC_getCLevelParams(dictCtx->compressionLevel).strat == lz4mid)
518
0
        return LZ4MID_searchExtDict;
519
0
    return LZ4MID_searchHCDict;
520
0
}
521
522
static int LZ4MID_compress (
523
    LZ4HC_CCtx_internal* const ctx,
524
    const char* const src,
525
    char* const dst,
526
    int* srcSizePtr,
527
    int const maxOutputSize,
528
    const limitedOutput_directive limit,
529
    const dictCtx_directive dict
530
    )
531
7.95k
{
532
7.95k
    U32* const hash4Table = ctx->hashTable;
533
7.95k
    U32* const hash8Table = hash4Table + LZ4MID_HASHTABLESIZE;
534
7.95k
    const BYTE* ip = (const BYTE*)src;
535
7.95k
    const BYTE* anchor = ip;
536
7.95k
    const BYTE* const iend = ip + *srcSizePtr;
537
7.95k
    const BYTE* const mflimit = iend - MFLIMIT;
538
7.95k
    const BYTE* const matchlimit = (iend - LASTLITERALS);
539
7.95k
    const BYTE* const ilimit = (iend - LZ4MID_HASHSIZE);
540
7.95k
    BYTE* op = (BYTE*)dst;
541
7.95k
    BYTE* oend = op + maxOutputSize;
542
543
7.95k
    const BYTE* const prefixPtr = ctx->prefixStart;
544
7.95k
    const U32 prefixIdx = ctx->dictLimit;
545
7.95k
    const U32 ilimitIdx = (U32)(ilimit - prefixPtr) + prefixIdx;
546
7.95k
    const BYTE* const dictStart = ctx->dictStart;
547
7.95k
    const U32 dictIdx = ctx->lowLimit;
548
7.95k
    const U32 gDictEndIndex = ctx->lowLimit;
549
7.95k
    const LZ4MID_searchIntoDict_f searchIntoDict = (dict == usingDictCtxHc) ? select_searchDict_function(ctx->dictCtx) : NULL;
550
7.95k
    unsigned matchLength;
551
7.95k
    unsigned matchDistance;
552
553
    /* input sanitization */
554
7.95k
    DEBUGLOG(5, "LZ4MID_compress (%i bytes)", *srcSizePtr);
555
7.95k
    if (dict == usingDictCtxHc) DEBUGLOG(5, "usingDictCtxHc");
556
7.95k
    assert(*srcSizePtr >= 0);
557
7.95k
    if (*srcSizePtr) assert(src != NULL);
558
7.95k
    if (maxOutputSize) assert(dst != NULL);
559
7.95k
    if (*srcSizePtr < 0) return 0;  /* invalid */
560
7.95k
    if (maxOutputSize < 0) return 0; /* invalid */
561
7.95k
    if (*srcSizePtr > LZ4_MAX_INPUT_SIZE) {
562
        /* forbidden: no input is allowed to be that large */
563
0
        return 0;
564
0
    }
565
7.95k
    if (limit == fillOutput) oend -= LASTLITERALS;  /* Hack for support LZ4 format restriction */
566
7.95k
    if (*srcSizePtr < LZ4_minLength)
567
7
        goto _lz4mid_last_literals;  /* Input too small, no compression (all literals) */
568
569
    /* main loop */
570
9.35M
    while (ip <= mflimit) {
571
9.34M
        const U32 ipIndex = (U32)(ip - prefixPtr) + prefixIdx;
572
        /* search long match */
573
9.34M
        {   U32 const h8 = LZ4MID_hash8Ptr(ip);
574
9.34M
            U32 const pos8 = hash8Table[h8];
575
9.34M
            assert(h8 < LZ4MID_HASHTABLESIZE);
576
9.34M
            assert(pos8 < ipIndex);
577
9.34M
            LZ4MID_addPosition(hash8Table, h8, ipIndex);
578
9.34M
            if (ipIndex - pos8 <= LZ4_DISTANCE_MAX) {
579
                /* match candidate found */
580
1.10M
                if (pos8 >= prefixIdx) {
581
1.10M
                    const BYTE* const matchPtr = prefixPtr + pos8 - prefixIdx;
582
1.10M
                    assert(matchPtr < ip);
583
1.10M
                    matchLength = LZ4_count(ip, matchPtr, matchlimit);
584
1.10M
                    if (matchLength >= MINMATCH) {
585
358k
                        DEBUGLOG(7, "found long match at pos %u (len=%u)", pos8, matchLength);
586
358k
                        matchDistance = ipIndex - pos8;
587
358k
                        goto _lz4mid_encode_sequence;
588
358k
                    }
589
1.10M
                } else {
590
0
                    if (pos8 >= dictIdx) {
591
                        /* extDict match candidate */
592
0
                        const BYTE* const matchPtr = dictStart + (pos8 - dictIdx);
593
0
                        const size_t safeLen = MIN(prefixIdx - pos8, (size_t)(matchlimit - ip));
594
0
                        matchLength = LZ4_count(ip, matchPtr, ip + safeLen);
595
0
                        if (matchLength >= MINMATCH) {
596
0
                            DEBUGLOG(7, "found long match at ExtDict pos %u (len=%u)", pos8, matchLength);
597
0
                            matchDistance = ipIndex - pos8;
598
0
                            goto _lz4mid_encode_sequence;
599
0
                        }
600
0
                    }
601
0
                }
602
1.10M
        }   }
603
        /* search short match */
604
8.98M
        {   U32 const h4 = LZ4MID_hash4Ptr(ip);
605
8.98M
            U32 const pos4 = hash4Table[h4];
606
8.98M
            assert(h4 < LZ4MID_HASHTABLESIZE);
607
8.98M
            assert(pos4 < ipIndex);
608
8.98M
            LZ4MID_addPosition(hash4Table, h4, ipIndex);
609
8.98M
            if (ipIndex - pos4 <= LZ4_DISTANCE_MAX) {
610
                /* match candidate found */
611
1.12M
                if (pos4 >= prefixIdx) {
612
                /* only search within prefix */
613
1.12M
                    const BYTE* const matchPtr = prefixPtr + (pos4 - prefixIdx);
614
1.12M
                    assert(matchPtr < ip);
615
1.12M
                    assert(matchPtr >= prefixPtr);
616
1.12M
                    matchLength = LZ4_count(ip, matchPtr, matchlimit);
617
1.12M
                    if (matchLength >= MINMATCH) {
618
                        /* short match found, let's just check ip+1 for longer */
619
425k
                        U32 const h8 = LZ4MID_hash8Ptr(ip+1);
620
425k
                        U32 const pos8 = hash8Table[h8];
621
425k
                        U32 const m2Distance = ipIndex + 1 - pos8;
622
425k
                        matchDistance = ipIndex - pos4;
623
425k
                        if ( m2Distance <= LZ4_DISTANCE_MAX
624
89.8k
                        && pos8 >= prefixIdx /* only search within prefix */
625
89.8k
                        && likely(ip < mflimit)
626
425k
                        ) {
627
89.7k
                            const BYTE* const m2Ptr = prefixPtr + (pos8 - prefixIdx);
628
89.7k
                            unsigned ml2 = LZ4_count(ip+1, m2Ptr, matchlimit);
629
89.7k
                            if (ml2 > matchLength) {
630
47.4k
                                LZ4MID_addPosition(hash8Table, h8, ipIndex+1);
631
47.4k
                                ip++;
632
47.4k
                                matchLength = ml2;
633
47.4k
                                matchDistance = m2Distance;
634
47.4k
                        }   }
635
425k
                        goto _lz4mid_encode_sequence;
636
425k
                    }
637
1.12M
                } else {
638
0
                    if (pos4 >= dictIdx) {
639
                        /* extDict match candidate */
640
0
                        const BYTE* const matchPtr = dictStart + (pos4 - dictIdx);
641
0
                        const size_t safeLen = MIN(prefixIdx - pos4, (size_t)(matchlimit - ip));
642
0
                        matchLength = LZ4_count(ip, matchPtr, ip + safeLen);
643
0
                        if (matchLength >= MINMATCH) {
644
0
                            DEBUGLOG(7, "found match at ExtDict pos %u (len=%u)", pos4, matchLength);
645
0
                            matchDistance = ipIndex - pos4;
646
0
                            goto _lz4mid_encode_sequence;
647
0
                        }
648
0
                    }
649
0
                }
650
1.12M
        }   }
651
        /* no match found in prefix */
652
8.56M
        if ( (dict == usingDictCtxHc)
653
0
          && (ipIndex - gDictEndIndex < LZ4_DISTANCE_MAX - 8) ) {
654
            /* search a match into external dictionary */
655
0
            LZ4HC_match_t dMatch = searchIntoDict(ip, ipIndex,
656
0
                    matchlimit,
657
0
                    ctx->dictCtx, gDictEndIndex);
658
0
            if (dMatch.len >= MINMATCH) {
659
0
                DEBUGLOG(7, "found Dictionary match (offset=%i)", dMatch.off);
660
0
                assert(dMatch.back == 0);
661
0
                matchLength = (unsigned)dMatch.len;
662
0
                matchDistance = (unsigned)dMatch.off;
663
0
                goto _lz4mid_encode_sequence;
664
0
            }
665
0
        }
666
        /* no match found */
667
8.56M
        ip += 1 + ((ip-anchor) >> 9);  /* skip faster over incompressible data */
668
8.56M
        continue;
669
670
784k
_lz4mid_encode_sequence:
671
        /* catch back */
672
799k
        while (((ip > anchor) & ((U32)(ip-prefixPtr) > matchDistance)) && (unlikely(ip[-1] == ip[-(int)matchDistance-1]))) {
673
14.4k
            ip--;  matchLength++;
674
14.4k
        };
675
676
        /* fill table with beginning of match */
677
784k
        ADDPOS8(ip+1, ipIndex+1);
678
784k
        ADDPOS8(ip+2, ipIndex+2);
679
784k
        ADDPOS4(ip+1, ipIndex+1);
680
681
        /* encode */
682
784k
        {   BYTE* const saved_op = op;
683
            /* LZ4HC_encodeSequence always updates @op; on success, it updates @ip and @anchor */
684
784k
            if (LZ4HC_encodeSequence(UPDATABLE(ip, op, anchor),
685
784k
                    (int)matchLength, (int)matchDistance,
686
784k
                    limit, oend) ) {
687
539
                op = saved_op;  /* restore @op value before failed LZ4HC_encodeSequence */
688
539
                goto _lz4mid_dest_overflow;
689
539
            }
690
784k
        }
691
692
        /* fill table with end of match */
693
784k
        {   U32 endMatchIdx = (U32)(ip-prefixPtr) + prefixIdx;
694
784k
            U32 pos_m2 = endMatchIdx - 2;
695
784k
            if (pos_m2 < ilimitIdx) {
696
780k
                if (likely(ip - prefixPtr > 5)) {
697
779k
                    ADDPOS8(ip-5, endMatchIdx - 5);
698
779k
                }
699
780k
                ADDPOS8(ip-3, endMatchIdx - 3);
700
780k
                ADDPOS8(ip-2, endMatchIdx - 2);
701
780k
                ADDPOS4(ip-2, endMatchIdx - 2);
702
780k
                ADDPOS4(ip-1, endMatchIdx - 1);
703
780k
            }
704
784k
        }
705
784k
    }
706
707
7.41k
_lz4mid_last_literals:
708
    /* Encode Last Literals */
709
7.41k
    {   size_t lastRunSize = (size_t)(iend - anchor);  /* literals */
710
7.41k
        size_t llAdd = (lastRunSize + 255 - RUN_MASK) / 255;
711
7.41k
        size_t const totalSize = 1 + llAdd + lastRunSize;
712
7.41k
        if (limit == fillOutput) oend += LASTLITERALS;  /* restore correct value */
713
7.41k
        if (limit && (op + totalSize > oend)) {
714
901
            if (limit == limitedOutput) return 0;  /* not enough space in @dst */
715
            /* adapt lastRunSize to fill 'dest' */
716
0
            lastRunSize  = (size_t)(oend - op) - 1 /*token*/;
717
0
            llAdd = (lastRunSize + 256 - RUN_MASK) / 256;
718
0
            lastRunSize -= llAdd;
719
0
        }
720
6.51k
        DEBUGLOG(6, "Final literal run : %i literals", (int)lastRunSize);
721
6.51k
        ip = anchor + lastRunSize;  /* can be != iend if limit==fillOutput */
722
723
6.51k
        if (lastRunSize >= RUN_MASK) {
724
1.25k
            size_t accumulator = lastRunSize - RUN_MASK;
725
1.25k
            *op++ = (RUN_MASK << ML_BITS);
726
2.49k
            for(; accumulator >= 255 ; accumulator -= 255)
727
1.24k
                *op++ = 255;
728
1.25k
            *op++ = (BYTE) accumulator;
729
5.26k
        } else {
730
5.26k
            *op++ = (BYTE)(lastRunSize << ML_BITS);
731
5.26k
        }
732
6.51k
        assert(lastRunSize <= (size_t)(oend - op));
733
6.51k
        LZ4_memcpy(op, anchor, lastRunSize);
734
6.51k
        op += lastRunSize;
735
6.51k
    }
736
737
    /* End */
738
6.51k
    DEBUGLOG(5, "compressed %i bytes into %i bytes", *srcSizePtr, (int)((char*)op - dst));
739
6.51k
    assert(ip >= (const BYTE*)src);
740
6.51k
    assert(ip <= iend);
741
6.51k
    *srcSizePtr = (int)(ip - (const BYTE*)src);
742
6.51k
    assert((char*)op >= dst);
743
6.51k
    assert(op <= oend);
744
6.51k
    assert((char*)op - dst < INT_MAX);
745
6.51k
    return (int)((char*)op - dst);
746
747
539
_lz4mid_dest_overflow:
748
539
    if (limit == fillOutput) {
749
        /* Assumption : @ip, @anchor, @optr and @matchLength must be set correctly */
750
0
        size_t const ll = (size_t)(ip - anchor);
751
0
        size_t const ll_addbytes = (ll + 240) / 255;
752
0
        size_t const ll_totalCost = 1 + ll_addbytes + ll;
753
0
        BYTE* const maxLitPos = oend - 3; /* 2 for offset, 1 for token */
754
0
        DEBUGLOG(6, "Last sequence is overflowing : %u literals, %u remaining space",
755
0
                (unsigned)ll, (unsigned)(oend-op));
756
0
        if (op + ll_totalCost <= maxLitPos) {
757
            /* ll validated; now adjust match length */
758
0
            size_t const bytesLeftForMl = (size_t)(maxLitPos - (op+ll_totalCost));
759
0
            size_t const maxMlSize = MINMATCH + (ML_MASK-1) + (bytesLeftForMl * 255);
760
0
            assert(maxMlSize < INT_MAX);
761
0
            if ((size_t)matchLength > maxMlSize) matchLength= (unsigned)maxMlSize;
762
0
            if ((oend + LASTLITERALS) - (op + ll_totalCost + 2) - 1 + matchLength >= MFLIMIT) {
763
0
            DEBUGLOG(6, "Let's encode a last sequence (ll=%u, ml=%u)", (unsigned)ll, matchLength);
764
0
                LZ4HC_encodeSequence(UPDATABLE(ip, op, anchor),
765
0
                        (int)matchLength, (int)matchDistance,
766
0
                        notLimited, oend);
767
0
        }   }
768
0
        DEBUGLOG(6, "Let's finish with a run of literals (%u bytes left)", (unsigned)(oend-op));
769
0
        goto _lz4mid_last_literals;
770
0
    }
771
    /* compression failed */
772
539
    return 0;
773
539
}
774
775
776
/**************************************
777
*  HC Compression - Search
778
**************************************/
779
780
/* Update chains up to ip (excluded) */
781
LZ4_FORCE_INLINE void LZ4HC_Insert (LZ4HC_CCtx_internal* hc4, const BYTE* ip)
782
25.4M
{
783
25.4M
    U16* const chainTable = hc4->chainTable;
784
25.4M
    U32* const hashTable  = hc4->hashTable;
785
25.4M
    const BYTE* const prefixPtr = hc4->prefixStart;
786
25.4M
    U32 const prefixIdx = hc4->dictLimit;
787
25.4M
    U32 const target = (U32)(ip - prefixPtr) + prefixIdx;
788
25.4M
    U32 idx = hc4->nextToUpdate;
789
25.4M
    assert(ip >= prefixPtr);
790
25.4M
    assert(target >= prefixIdx);
791
792
76.4M
    while (idx < target) {
793
50.9M
        U32 const h = LZ4HC_hashPtr(prefixPtr+idx-prefixIdx);
794
50.9M
        size_t delta = idx - hashTable[h];
795
50.9M
        if (delta>LZ4_DISTANCE_MAX) delta = LZ4_DISTANCE_MAX;
796
50.9M
        DELTANEXTU16(chainTable, idx) = (U16)delta;
797
50.9M
        hashTable[h] = idx;
798
50.9M
        idx++;
799
50.9M
    }
800
801
25.4M
    hc4->nextToUpdate = target;
802
25.4M
}
803
804
#if defined(_MSC_VER)
805
#  define LZ4HC_rotl32(x,r) _rotl(x,r)
806
#else
807
0
#  define LZ4HC_rotl32(x,r) ((x << r) | (x >> (32 - r)))
808
#endif
809
810
811
static U32 LZ4HC_rotatePattern(size_t const rotate, U32 const pattern)
812
0
{
813
0
    size_t const bitsToRotate = (rotate & (sizeof(pattern) - 1)) << 3;
814
0
    if (bitsToRotate == 0) return pattern;
815
0
    return LZ4HC_rotl32(pattern, (int)bitsToRotate);
816
0
}
817
818
/* LZ4HC_countPattern() :
819
 * pattern32 must be a sample of repetitive pattern of length 1, 2 or 4 (but not 3!) */
820
static unsigned
821
LZ4HC_countPattern(const BYTE* ip, const BYTE* const iEnd, U32 const pattern32)
822
14.0M
{
823
14.0M
    const BYTE* const iStart = ip;
824
14.0M
    reg_t const pattern = (sizeof(pattern)==8) ?
825
14.0M
        (reg_t)pattern32 + (((reg_t)pattern32) << (sizeof(pattern)*4)) : pattern32;
826
827
17.0M
    while (likely(ip < iEnd-(sizeof(pattern)-1))) {
828
17.0M
        reg_t const diff = LZ4_read_ARCH(ip) ^ pattern;
829
17.0M
        if (!diff) { ip+=sizeof(pattern); continue; }
830
14.0M
        ip += LZ4_NbCommonBytes(diff);
831
14.0M
        return (unsigned)(ip - iStart);
832
17.0M
    }
833
834
10.2k
    if (LZ4_isLittleEndian()) {
835
10.2k
        reg_t patternByte = pattern;
836
42.6k
        while ((ip<iEnd) && (*ip == (BYTE)patternByte)) {
837
32.4k
            ip++; patternByte >>= 8;
838
32.4k
        }
839
10.2k
    } else {  /* big endian */
840
0
        U32 bitOffset = (sizeof(pattern)*8) - 8;
841
0
        while (ip < iEnd) {
842
0
            BYTE const byte = (BYTE)(pattern >> bitOffset);
843
0
            if (*ip != byte) break;
844
0
            ip ++; bitOffset -= 8;
845
0
    }   }
846
847
10.2k
    return (unsigned)(ip - iStart);
848
14.0M
}
849
850
/* LZ4HC_reverseCountPattern() :
851
 * pattern must be a sample of repetitive pattern of length 1, 2 or 4 (but not 3!)
852
 * read using natural platform endianness */
853
static unsigned
854
LZ4HC_reverseCountPattern(const BYTE* ip, const BYTE* const iLow, U32 pattern)
855
13.7M
{
856
13.7M
    const BYTE* const iStart = ip;
857
858
35.7M
    while (likely(ip >= iLow+4)) {
859
35.7M
        if (LZ4_read32(ip-4) != pattern) break;
860
22.0M
        ip -= 4;
861
22.0M
    }
862
13.7M
    {   const BYTE* bytePtr = (const BYTE*)(&pattern) + 3; /* works for any endianness */
863
28.5M
        while (likely(ip>iLow)) {
864
28.5M
            if (ip[-1] != *bytePtr) break;
865
14.8M
            ip--; bytePtr--;
866
14.8M
    }   }
867
13.7M
    return (unsigned)(iStart - ip);
868
13.7M
}
869
870
/* LZ4HC_protectDictEnd() :
871
 * Checks if the match is in the last 3 bytes of the dictionary, so reading the
872
 * 4 byte MINMATCH would overflow.
873
 * @returns true if the match index is okay.
874
 */
875
static int LZ4HC_protectDictEnd(U32 const dictLimit, U32 const matchIndex)
876
27.6M
{
877
27.6M
    return ((U32)((dictLimit - 1) - matchIndex) >= 3);
878
27.6M
}
879
880
typedef enum { rep_untested, rep_not, rep_confirmed } repeat_state_e;
881
typedef enum { favorCompressionRatio=0, favorDecompressionSpeed } HCfavor_e;
882
883
884
LZ4_FORCE_INLINE LZ4HC_match_t
885
LZ4HC_InsertAndGetWiderMatch (
886
        LZ4HC_CCtx_internal* const hc4,
887
        const BYTE* const ip,
888
        const BYTE* const iLowLimit, const BYTE* const iHighLimit,
889
        int longest,
890
        const int maxNbAttempts,
891
        const int patternAnalysis, const int chainSwap,
892
        const dictCtx_directive dict,
893
        const HCfavor_e favorDecSpeed)
894
25.4M
{
895
25.4M
    U16* const chainTable = hc4->chainTable;
896
25.4M
    U32* const hashTable = hc4->hashTable;
897
25.4M
    const LZ4HC_CCtx_internal* const dictCtx = hc4->dictCtx;
898
25.4M
    const BYTE* const prefixPtr = hc4->prefixStart;
899
25.4M
    const U32 prefixIdx = hc4->dictLimit;
900
25.4M
    const U32 ipIndex = (U32)(ip - prefixPtr) + prefixIdx;
901
25.4M
    const int withinStartDistance = (hc4->lowLimit + (LZ4_DISTANCE_MAX + 1) > ipIndex);
902
25.4M
    const U32 lowestMatchIndex = (withinStartDistance) ? hc4->lowLimit : ipIndex - LZ4_DISTANCE_MAX;
903
25.4M
    const BYTE* const dictStart = hc4->dictStart;
904
25.4M
    const U32 dictIdx = hc4->lowLimit;
905
25.4M
    const BYTE* const dictEnd = dictStart + prefixIdx - dictIdx;
906
25.4M
    int const lookBackLength = (int)(ip-iLowLimit);
907
25.4M
    int nbAttempts = maxNbAttempts;
908
25.4M
    U32 matchChainPos = 0;
909
25.4M
    U32 const pattern = LZ4_read32(ip);
910
25.4M
    U32 matchIndex;
911
25.4M
    repeat_state_e repeat = rep_untested;
912
25.4M
    size_t srcPatternLength = 0;
913
25.4M
    int offset = 0, sBack = 0;
914
915
25.4M
    DEBUGLOG(7, "LZ4HC_InsertAndGetWiderMatch");
916
    /* First Match */
917
25.4M
    LZ4HC_Insert(hc4, ip);  /* insert all prior positions up to ip (excluded) */
918
25.4M
    matchIndex = hashTable[LZ4HC_hashPtr(ip)];
919
25.4M
    DEBUGLOG(7, "First candidate match for pos %u found at index %u / %u (lowestMatchIndex)",
920
25.4M
                ipIndex, matchIndex, lowestMatchIndex);
921
922
59.2M
    while ((matchIndex>=lowestMatchIndex) && (nbAttempts>0)) {
923
33.7M
        int matchLength=0;
924
33.7M
        nbAttempts--;
925
33.7M
        assert(matchIndex < ipIndex);
926
33.7M
        if (favorDecSpeed && (ipIndex - matchIndex < 8)) {
927
            /* do nothing:
928
             * favorDecSpeed intentionally skips matches with offset < 8 */
929
33.7M
        } else if (matchIndex >= prefixIdx) {   /* within current Prefix */
930
33.7M
            const BYTE* const matchPtr = prefixPtr + (matchIndex - prefixIdx);
931
33.7M
            assert(matchPtr < ip);
932
33.7M
            assert(longest >= 1);
933
33.7M
            if (LZ4_read16(iLowLimit + longest - 1) == LZ4_read16(matchPtr - lookBackLength + longest - 1)) {
934
15.4M
                if (LZ4_read32(matchPtr) == pattern) {
935
15.4M
                    int const back = lookBackLength ? LZ4HC_countBack(ip, matchPtr, iLowLimit, prefixPtr) : 0;
936
15.4M
                    matchLength = MINMATCH + (int)LZ4_count(ip+MINMATCH, matchPtr+MINMATCH, iHighLimit);
937
15.4M
                    matchLength -= back;
938
15.4M
                    if (matchLength > longest) {
939
1.92M
                        longest = matchLength;
940
1.92M
                        offset = (int)(ipIndex - matchIndex);
941
1.92M
                        sBack = back;
942
1.92M
                        DEBUGLOG(7, "Found match of len=%i within prefix, offset=%i, back=%i", longest, offset, -back);
943
1.92M
            }   }   }
944
33.7M
        } else {   /* lowestMatchIndex <= matchIndex < dictLimit : within Ext Dict */
945
0
            const BYTE* const matchPtr = dictStart + (matchIndex - dictIdx);
946
0
            assert(matchIndex >= dictIdx);
947
0
            if ( likely(matchIndex <= prefixIdx - 4)
948
0
              && (LZ4_read32(matchPtr) == pattern) ) {
949
0
                int back = 0;
950
0
                const BYTE* vLimit = ip + (prefixIdx - matchIndex);
951
0
                if (vLimit > iHighLimit) vLimit = iHighLimit;
952
0
                matchLength = (int)LZ4_count(ip+MINMATCH, matchPtr+MINMATCH, vLimit) + MINMATCH;
953
0
                if ((ip+matchLength == vLimit) && (vLimit < iHighLimit))
954
0
                    matchLength += LZ4_count(ip+matchLength, prefixPtr, iHighLimit);
955
0
                back = lookBackLength ? LZ4HC_countBack(ip, matchPtr, iLowLimit, dictStart) : 0;
956
0
                matchLength -= back;
957
0
                if (matchLength > longest) {
958
0
                    longest = matchLength;
959
0
                    offset = (int)(ipIndex - matchIndex);
960
0
                    sBack = back;
961
0
                    DEBUGLOG(7, "Found match of len=%i within dict, offset=%i, back=%i", longest, offset, -back);
962
0
        }   }   }
963
964
33.7M
        if (chainSwap && matchLength==longest) {   /* better match => select a better chain */
965
0
            assert(lookBackLength==0);   /* search forward only */
966
0
            if (matchIndex + (U32)longest <= ipIndex) {
967
0
                int const kTrigger = 4;
968
0
                U32 distanceToNextMatch = 1;
969
0
                int const end = longest - MINMATCH + 1;
970
0
                int step = 1;
971
0
                int accel = 1 << kTrigger;
972
0
                int pos;
973
0
                for (pos = 0; pos < end; pos += step) {
974
0
                    U32 const candidateDist = DELTANEXTU16(chainTable, matchIndex + (U32)pos);
975
0
                    step = (accel++ >> kTrigger);
976
0
                    if (candidateDist > distanceToNextMatch) {
977
0
                        distanceToNextMatch = candidateDist;
978
0
                        matchChainPos = (U32)pos;
979
0
                        accel = 1 << kTrigger;
980
0
                }   }
981
0
                if (distanceToNextMatch > 1) {
982
0
                    if (distanceToNextMatch > matchIndex) break;   /* avoid overflow */
983
0
                    matchIndex -= distanceToNextMatch;
984
0
                    continue;
985
0
        }   }   }
986
987
33.7M
        {   U32 const distNextMatch = DELTANEXTU16(chainTable, matchIndex);
988
33.7M
            if (patternAnalysis && distNextMatch==1 && matchChainPos==0) {
989
13.9M
                U32 const matchCandidateIdx = matchIndex-1;
990
                /* may be a repeated pattern */
991
13.9M
                if (repeat == rep_untested) {
992
271k
                    if ( ((pattern & 0xFFFF) == (pattern >> 16))
993
271k
                      &  ((pattern & 0xFF)   == (pattern >> 24)) ) {
994
269k
                        DEBUGLOG(7, "Repeat pattern detected, char %02X", pattern >> 24);
995
269k
                        repeat = rep_confirmed;
996
269k
                        srcPatternLength = LZ4HC_countPattern(ip+sizeof(pattern), iHighLimit, pattern) + sizeof(pattern);
997
269k
                    } else {
998
1.27k
                        repeat = rep_not;
999
1.27k
                }   }
1000
13.9M
                if ( (repeat == rep_confirmed) && (matchCandidateIdx >= lowestMatchIndex)
1001
13.8M
                  && LZ4HC_protectDictEnd(prefixIdx, matchCandidateIdx) ) {
1002
13.8M
                    const int extDict = matchCandidateIdx < prefixIdx;
1003
13.8M
                    const BYTE* const matchPtr = extDict ? dictStart + (matchCandidateIdx - dictIdx) : prefixPtr + (matchCandidateIdx - prefixIdx);
1004
13.8M
                    if (LZ4_read32(matchPtr) == pattern) {  /* good candidate */
1005
13.7M
                        const BYTE* const iLimit = extDict ? dictEnd : iHighLimit;
1006
13.7M
                        size_t forwardPatternLength = LZ4HC_countPattern(matchPtr+sizeof(pattern), iLimit, pattern) + sizeof(pattern);
1007
13.7M
                        if (extDict && matchPtr + forwardPatternLength == iLimit) {
1008
0
                            U32 const rotatedPattern = LZ4HC_rotatePattern(forwardPatternLength, pattern);
1009
0
                            forwardPatternLength += LZ4HC_countPattern(prefixPtr, iHighLimit, rotatedPattern);
1010
0
                        }
1011
13.7M
                        {   const BYTE* const lowestMatchPtr = extDict ? dictStart : prefixPtr;
1012
13.7M
                            size_t backLength = LZ4HC_reverseCountPattern(matchPtr, lowestMatchPtr, pattern);
1013
13.7M
                            size_t currentSegmentLength;
1014
13.7M
                            if (!extDict
1015
13.7M
                              && matchPtr - backLength == prefixPtr
1016
72.9k
                              && dictIdx < prefixIdx) {
1017
0
                                U32 const rotatedPattern = LZ4HC_rotatePattern((U32)(-(int)backLength), pattern);
1018
0
                                backLength += LZ4HC_reverseCountPattern(dictEnd, dictStart, rotatedPattern);
1019
0
                            }
1020
                            /* Limit backLength not go further than lowestMatchIndex */
1021
13.7M
                            backLength = matchCandidateIdx - MAX(matchCandidateIdx - (U32)backLength, lowestMatchIndex);
1022
13.7M
                            assert(matchCandidateIdx - backLength >= lowestMatchIndex);
1023
13.7M
                            currentSegmentLength = backLength + forwardPatternLength;
1024
                            /* Adjust to end of pattern if the source pattern fits, otherwise the beginning of the pattern */
1025
13.7M
                            if ( (currentSegmentLength >= srcPatternLength)   /* current pattern segment large enough to contain full srcPatternLength */
1026
9.80M
                              && (forwardPatternLength <= srcPatternLength) ) { /* haven't reached this position yet */
1027
4.55M
                                U32 const newMatchIndex = matchCandidateIdx + (U32)forwardPatternLength - (U32)srcPatternLength;  /* best position, full pattern, might be followed by more match */
1028
4.55M
                                if (LZ4HC_protectDictEnd(prefixIdx, newMatchIndex))
1029
4.55M
                                    matchIndex = newMatchIndex;
1030
0
                                else {
1031
                                    /* Can only happen if started in the prefix */
1032
0
                                    assert(newMatchIndex >= prefixIdx - 3 && newMatchIndex < prefixIdx && !extDict);
1033
0
                                    matchIndex = prefixIdx;
1034
0
                                }
1035
9.24M
                            } else {
1036
9.24M
                                U32 const newMatchIndex = matchCandidateIdx - (U32)backLength;   /* farthest position in current segment, will find a match of length currentSegmentLength + maybe some back */
1037
9.24M
                                if (!LZ4HC_protectDictEnd(prefixIdx, newMatchIndex)) {
1038
0
                                    assert(newMatchIndex >= prefixIdx - 3 && newMatchIndex < prefixIdx && !extDict);
1039
0
                                    matchIndex = prefixIdx;
1040
9.24M
                                } else {
1041
9.24M
                                    matchIndex = newMatchIndex;
1042
9.24M
                                    if (lookBackLength==0) {  /* no back possible */
1043
4.27M
                                        size_t const maxML = MIN(currentSegmentLength, srcPatternLength);
1044
4.27M
                                        if ((size_t)longest < maxML) {
1045
66.6k
                                            assert(prefixPtr - prefixIdx + matchIndex != ip);
1046
66.6k
                                            if ((size_t)(ip - prefixPtr) + prefixIdx - matchIndex > LZ4_DISTANCE_MAX) break;
1047
66.6k
                                            assert(maxML < 2 GB);
1048
66.6k
                                            longest = (int)maxML;
1049
66.6k
                                            offset = (int)(ipIndex - matchIndex);
1050
66.6k
                                            assert(sBack == 0);
1051
66.6k
                                            DEBUGLOG(7, "Found repeat pattern match of len=%i, offset=%i", longest, offset);
1052
66.6k
                                        }
1053
4.27M
                                        {   U32 const distToNextPattern = DELTANEXTU16(chainTable, matchIndex);
1054
4.27M
                                            if (distToNextPattern > matchIndex) break;  /* avoid overflow */
1055
4.27M
                                            matchIndex -= distToNextPattern;
1056
4.27M
                        }   }   }   }   }
1057
13.7M
                        continue;
1058
13.7M
                }   }
1059
13.9M
        }   }   /* PA optimization */
1060
1061
        /* follow current chain */
1062
19.9M
        matchIndex -= DELTANEXTU16(chainTable, matchIndex + matchChainPos);
1063
1064
19.9M
    }  /* while ((matchIndex>=lowestMatchIndex) && (nbAttempts)) */
1065
1066
25.4M
    if ( dict == usingDictCtxHc
1067
0
      && nbAttempts > 0
1068
0
      && withinStartDistance) {
1069
0
        size_t const dictEndOffset = (size_t)(dictCtx->end - dictCtx->prefixStart) + dictCtx->dictLimit;
1070
0
        U32 dictMatchIndex = dictCtx->hashTable[LZ4HC_hashPtr(ip)];
1071
0
        assert(dictEndOffset <= 1 GB);
1072
0
        matchIndex = dictMatchIndex + lowestMatchIndex - (U32)dictEndOffset;
1073
0
        if (dictMatchIndex>0) DEBUGLOG(7, "dictEndOffset = %zu, dictMatchIndex = %u => relative matchIndex = %i", dictEndOffset, dictMatchIndex, (int)dictMatchIndex - (int)dictEndOffset);
1074
0
        while (ipIndex - matchIndex <= LZ4_DISTANCE_MAX && nbAttempts--) {
1075
0
            const BYTE* const matchPtr = dictCtx->prefixStart - dictCtx->dictLimit + dictMatchIndex;
1076
1077
0
            if (LZ4_read32(matchPtr) == pattern) {
1078
0
                int mlt;
1079
0
                int back = 0;
1080
0
                const BYTE* vLimit = ip + (dictEndOffset - dictMatchIndex);
1081
0
                if (vLimit > iHighLimit) vLimit = iHighLimit;
1082
0
                mlt = (int)LZ4_count(ip+MINMATCH, matchPtr+MINMATCH, vLimit) + MINMATCH;
1083
0
                back = lookBackLength ? LZ4HC_countBack(ip, matchPtr, iLowLimit, dictCtx->prefixStart) : 0;
1084
0
                mlt -= back;
1085
0
                if (mlt > longest) {
1086
0
                    longest = mlt;
1087
0
                    offset = (int)(ipIndex - matchIndex);
1088
0
                    sBack = back;
1089
0
                    DEBUGLOG(7, "found match of length %i within extDictCtx", longest);
1090
0
            }   }
1091
1092
0
            {   U32 const nextOffset = DELTANEXTU16(dictCtx->chainTable, dictMatchIndex);
1093
0
                dictMatchIndex -= nextOffset;
1094
0
                matchIndex -= nextOffset;
1095
0
    }   }   }
1096
1097
25.4M
    {   LZ4HC_match_t md;
1098
25.4M
        assert(longest >= 0);
1099
25.4M
        md.len = longest;
1100
25.4M
        md.off = offset;
1101
25.4M
        md.back = sBack;
1102
25.4M
        return md;
1103
25.4M
    }
1104
25.4M
}
1105
1106
LZ4_FORCE_INLINE LZ4HC_match_t
1107
LZ4HC_InsertAndFindBestMatch(LZ4HC_CCtx_internal* const hc4,   /* Index table will be updated */
1108
                       const BYTE* const ip, const BYTE* const iLimit,
1109
                       const int maxNbAttempts,
1110
                       const int patternAnalysis,
1111
                       const dictCtx_directive dict)
1112
24.2M
{
1113
24.2M
    DEBUGLOG(7, "LZ4HC_InsertAndFindBestMatch");
1114
    /* note : LZ4HC_InsertAndGetWiderMatch() is able to modify the starting position of a match (*startpos),
1115
     * but this won't be the case here, as we define iLowLimit==ip,
1116
     * so LZ4HC_InsertAndGetWiderMatch() won't be allowed to search past ip */
1117
24.2M
    return LZ4HC_InsertAndGetWiderMatch(hc4, ip, ip, iLimit, MINMATCH-1, maxNbAttempts, patternAnalysis, 0 /*chainSwap*/, dict, favorCompressionRatio);
1118
24.2M
}
1119
1120
1121
LZ4_FORCE_INLINE int LZ4HC_compress_hashChain (
1122
    LZ4HC_CCtx_internal* const ctx,
1123
    const char* const source,
1124
    char* const dest,
1125
    int* srcSizePtr,
1126
    int const maxOutputSize,
1127
    int maxNbAttempts,
1128
    const limitedOutput_directive limit,
1129
    const dictCtx_directive dict
1130
    )
1131
16.3k
{
1132
16.3k
    const int inputSize = *srcSizePtr;
1133
16.3k
    const int patternAnalysis = (maxNbAttempts > 128);   /* levels 9+ */
1134
1135
16.3k
    const BYTE* ip = (const BYTE*) source;
1136
16.3k
    const BYTE* anchor = ip;
1137
16.3k
    const BYTE* const iend = ip + inputSize;
1138
16.3k
    const BYTE* const mflimit = iend - MFLIMIT;
1139
16.3k
    const BYTE* const matchlimit = (iend - LASTLITERALS);
1140
1141
16.3k
    BYTE* optr = (BYTE*) dest;
1142
16.3k
    BYTE* op = (BYTE*) dest;
1143
16.3k
    BYTE* oend = op + maxOutputSize;
1144
1145
16.3k
    const BYTE* start0;
1146
16.3k
    const BYTE* start2 = NULL;
1147
16.3k
    const BYTE* start3 = NULL;
1148
16.3k
    LZ4HC_match_t m0, m1, m2, m3;
1149
16.3k
    const LZ4HC_match_t nomatch = {0, 0, 0};
1150
1151
    /* init */
1152
16.3k
    DEBUGLOG(5, "LZ4HC_compress_hashChain (dict?=>%i)", dict);
1153
16.3k
    *srcSizePtr = 0;
1154
16.3k
    if (limit == fillOutput) oend -= LASTLITERALS;                  /* Hack for support LZ4 format restriction */
1155
16.3k
    if (inputSize < LZ4_minLength) goto _last_literals;             /* Input too small, no compression (all literals) */
1156
1157
    /* Main Loop */
1158
24.2M
    while (ip <= mflimit) {
1159
24.2M
        m1 = LZ4HC_InsertAndFindBestMatch(ctx, ip, matchlimit, maxNbAttempts, patternAnalysis, dict);
1160
24.2M
        if (m1.len<MINMATCH) { ip++; continue; }
1161
1162
        /* saved, in case we would skip too much */
1163
879k
        start0 = ip; m0 = m1;
1164
1165
1.11M
_Search2:
1166
1.11M
        DEBUGLOG(7, "_Search2 (currently found match of size %i)", m1.len);
1167
1.11M
        if (ip+m1.len <= mflimit) {
1168
1.10M
            start2 = ip + m1.len - 2;
1169
1.10M
            m2 = LZ4HC_InsertAndGetWiderMatch(ctx,
1170
1.10M
                            start2, ip + 0, matchlimit, m1.len,
1171
1.10M
                            maxNbAttempts, patternAnalysis, 0, dict, favorCompressionRatio);
1172
1.10M
            start2 += m2.back;
1173
1.10M
        } else {
1174
6.74k
            m2 = nomatch;  /* do not search further */
1175
6.74k
        }
1176
1177
1.11M
        if (m2.len <= m1.len) { /* No better match => encode ML1 immediately */
1178
794k
            optr = op;
1179
794k
            if (LZ4HC_encodeSequence(UPDATABLE(ip, op, anchor),
1180
794k
                    m1.len, m1.off,
1181
794k
                    limit, oend) )
1182
654
                goto _dest_overflow;
1183
793k
            continue;
1184
794k
        }
1185
1186
316k
        if (start0 < ip) {   /* first match was skipped at least once */
1187
43.3k
            if (start2 < ip + m0.len) {  /* squeezing ML1 between ML0(original ML1) and ML2 */
1188
20.8k
                ip = start0; m1 = m0;  /* restore initial Match1 */
1189
20.8k
        }   }
1190
1191
        /* Here, start0==ip */
1192
316k
        if ((start2 - ip) < 3) {  /* First Match too small : removed */
1193
190k
            ip = start2;
1194
190k
            m1 = m2;
1195
190k
            goto _Search2;
1196
190k
        }
1197
1198
151k
_Search3:
1199
151k
        if ((start2 - ip) < OPTIMAL_ML) {
1200
130k
            int correction;
1201
130k
            int new_ml = m1.len;
1202
130k
            if (new_ml > OPTIMAL_ML) new_ml = OPTIMAL_ML;
1203
130k
            if (ip+new_ml > start2 + m2.len - MINMATCH)
1204
513
                new_ml = (int)(start2 - ip) + m2.len - MINMATCH;
1205
130k
            correction = new_ml - (int)(start2 - ip);
1206
130k
            if (correction > 0) {
1207
120k
                start2 += correction;
1208
120k
                m2.len -= correction;
1209
120k
            }
1210
130k
        }
1211
1212
151k
        if (start2 + m2.len <= mflimit) {
1213
147k
            start3 = start2 + m2.len - 3;
1214
147k
            m3 = LZ4HC_InsertAndGetWiderMatch(ctx,
1215
147k
                            start3, start2, matchlimit, m2.len,
1216
147k
                            maxNbAttempts, patternAnalysis, 0, dict, favorCompressionRatio);
1217
147k
            start3 += m3.back;
1218
147k
        } else {
1219
3.60k
            m3 = nomatch;  /* do not search further */
1220
3.60k
        }
1221
1222
151k
        if (m3.len <= m2.len) {  /* No better match => encode ML1 and ML2 */
1223
            /* ip & ref are known; Now for ml */
1224
85.6k
            if (start2 < ip+m1.len) m1.len = (int)(start2 - ip);
1225
            /* Now, encode 2 sequences */
1226
85.6k
            optr = op;
1227
85.6k
            if (LZ4HC_encodeSequence(UPDATABLE(ip, op, anchor),
1228
85.6k
                    m1.len, m1.off,
1229
85.6k
                    limit, oend) )
1230
190
                goto _dest_overflow;
1231
85.4k
            ip = start2;
1232
85.4k
            optr = op;
1233
85.4k
            if (LZ4HC_encodeSequence(UPDATABLE(ip, op, anchor),
1234
85.4k
                    m2.len, m2.off,
1235
85.4k
                    limit, oend) ) {
1236
20
                m1 = m2;
1237
20
                goto _dest_overflow;
1238
20
            }
1239
85.3k
            continue;
1240
85.4k
        }
1241
1242
65.3k
        if (start3 < ip+m1.len+3) {  /* Not enough space for match 2 : remove it */
1243
42.2k
            if (start3 >= (ip+m1.len)) {  /* can write Seq1 immediately ==> Seq2 is removed, so Seq3 becomes Seq1 */
1244
40.7k
                if (start2 < ip+m1.len) {
1245
2.90k
                    int correction = (int)(ip+m1.len - start2);
1246
2.90k
                    start2 += correction;
1247
2.90k
                    m2.len -= correction;
1248
2.90k
                    if (m2.len < MINMATCH) {
1249
162
                        start2 = start3;
1250
162
                        m2 = m3;
1251
162
                    }
1252
2.90k
                }
1253
1254
40.7k
                optr = op;
1255
40.7k
                if (LZ4HC_encodeSequence(UPDATABLE(ip, op, anchor),
1256
40.7k
                        m1.len, m1.off,
1257
40.7k
                        limit, oend) )
1258
149
                    goto _dest_overflow;
1259
40.6k
                ip  = start3;
1260
40.6k
                m1 = m3;
1261
1262
40.6k
                start0 = start2;
1263
40.6k
                m0 = m2;
1264
40.6k
                goto _Search2;
1265
40.7k
            }
1266
1267
1.47k
            start2 = start3;
1268
1.47k
            m2 = m3;
1269
1.47k
            goto _Search3;
1270
42.2k
        }
1271
1272
        /*
1273
        * OK, now we have 3 ascending matches;
1274
        * let's write the first one ML1.
1275
        * ip & ref are known; Now decide ml.
1276
        */
1277
23.1k
        if (start2 < ip+m1.len) {
1278
8.61k
            if ((start2 - ip) < OPTIMAL_ML) {
1279
0
                int correction;
1280
0
                if (m1.len > OPTIMAL_ML) m1.len = OPTIMAL_ML;
1281
0
                if (ip + m1.len > start2 + m2.len - MINMATCH)
1282
0
                    m1.len = (int)(start2 - ip) + m2.len - MINMATCH;
1283
0
                correction = m1.len - (int)(start2 - ip);
1284
0
                if (correction > 0) {
1285
0
                    start2 += correction;
1286
0
                    m2.len -= correction;
1287
0
                }
1288
8.61k
            } else {
1289
8.61k
                m1.len = (int)(start2 - ip);
1290
8.61k
            }
1291
8.61k
        }
1292
23.1k
        optr = op;
1293
23.1k
        if ( LZ4HC_encodeSequence(UPDATABLE(ip, op, anchor),
1294
23.1k
                m1.len, m1.off,
1295
23.1k
                limit, oend) )
1296
48
            goto _dest_overflow;
1297
1298
        /* ML2 becomes ML1 */
1299
23.0k
        ip = start2; m1 = m2;
1300
1301
        /* ML3 becomes ML2 */
1302
23.0k
        start2 = start3; m2 = m3;
1303
1304
        /* let's find a new ML3 */
1305
23.0k
        goto _Search3;
1306
23.1k
    }
1307
1308
15.3k
_last_literals:
1309
    /* Encode Last Literals */
1310
15.3k
    {   size_t lastRunSize = (size_t)(iend - anchor);  /* literals */
1311
15.3k
        size_t llAdd = (lastRunSize + 255 - RUN_MASK) / 255;
1312
15.3k
        size_t const totalSize = 1 + llAdd + lastRunSize;
1313
15.3k
        if (limit == fillOutput) oend += LASTLITERALS;  /* restore correct value */
1314
15.3k
        if (limit && (op + totalSize > oend)) {
1315
2.19k
            if (limit == limitedOutput) return 0;
1316
            /* adapt lastRunSize to fill 'dest' */
1317
0
            lastRunSize  = (size_t)(oend - op) - 1 /*token*/;
1318
0
            llAdd = (lastRunSize + 256 - RUN_MASK) / 256;
1319
0
            lastRunSize -= llAdd;
1320
0
        }
1321
13.1k
        DEBUGLOG(6, "Final literal run : %i literals", (int)lastRunSize);
1322
13.1k
        ip = anchor + lastRunSize;  /* can be != iend if limit==fillOutput */
1323
1324
13.1k
        if (lastRunSize >= RUN_MASK) {
1325
2.87k
            size_t accumulator = lastRunSize - RUN_MASK;
1326
2.87k
            *op++ = (RUN_MASK << ML_BITS);
1327
4.78k
            for(; accumulator >= 255 ; accumulator -= 255) *op++ = 255;
1328
2.87k
            *op++ = (BYTE) accumulator;
1329
10.2k
        } else {
1330
10.2k
            *op++ = (BYTE)(lastRunSize << ML_BITS);
1331
10.2k
        }
1332
13.1k
        LZ4_memcpy(op, anchor, lastRunSize);
1333
13.1k
        op += lastRunSize;
1334
13.1k
    }
1335
1336
    /* End */
1337
0
    *srcSizePtr = (int) (((const char*)ip) - source);
1338
13.1k
    return (int) (((char*)op)-dest);
1339
1340
1.06k
_dest_overflow:
1341
1.06k
    if (limit == fillOutput) {
1342
        /* Assumption : @ip, @anchor, @optr and @m1 must be set correctly */
1343
0
        size_t const ll = (size_t)(ip - anchor);
1344
0
        size_t const ll_addbytes = (ll + 240) / 255;
1345
0
        size_t const ll_totalCost = 1 + ll_addbytes + ll;
1346
0
        BYTE* const maxLitPos = oend - 3; /* 2 for offset, 1 for token */
1347
0
        DEBUGLOG(6, "Last sequence overflowing");
1348
0
        op = optr;  /* restore correct out pointer */
1349
0
        if (op + ll_totalCost <= maxLitPos) {
1350
            /* ll validated; now adjust match length */
1351
0
            size_t const bytesLeftForMl = (size_t)(maxLitPos - (op+ll_totalCost));
1352
0
            size_t const maxMlSize = MINMATCH + (ML_MASK-1) + (bytesLeftForMl * 255);
1353
0
            assert(maxMlSize < INT_MAX); assert(m1.len >= 0);
1354
0
            if ((size_t)m1.len > maxMlSize) m1.len = (int)maxMlSize;
1355
0
            if ((oend + LASTLITERALS) - (op + ll_totalCost + 2) - 1 + m1.len >= MFLIMIT) {
1356
0
                LZ4HC_encodeSequence(UPDATABLE(ip, op, anchor), m1.len, m1.off, notLimited, oend);
1357
0
        }   }
1358
0
        goto _last_literals;
1359
0
    }
1360
    /* compression failed */
1361
1.06k
    return 0;
1362
1.06k
}
1363
1364
1365
static int LZ4HC_compress_optimal( LZ4HC_CCtx_internal* ctx,
1366
    const char* const source, char* dst,
1367
    int* srcSizePtr, int dstCapacity,
1368
    int const nbSearches, size_t sufficient_len,
1369
    const limitedOutput_directive limit, int const fullUpdate,
1370
    const dictCtx_directive dict,
1371
    const HCfavor_e favorDecSpeed);
1372
1373
LZ4_FORCE_INLINE int
1374
LZ4HC_compress_generic_internal (
1375
            LZ4HC_CCtx_internal* const ctx,
1376
            const char* const src,
1377
            char* const dst,
1378
            int* const srcSizePtr,
1379
            int const dstCapacity,
1380
            int cLevel,
1381
            const limitedOutput_directive limit,
1382
            const dictCtx_directive dict
1383
            )
1384
24.3k
{
1385
24.3k
    DEBUGLOG(5, "LZ4HC_compress_generic_internal(src=%p, srcSize=%d)",
1386
24.3k
                src, *srcSizePtr);
1387
1388
24.3k
    if (limit == fillOutput && dstCapacity < 1) return 0;   /* Impossible to store anything */
1389
24.3k
    if ((U32)*srcSizePtr > (U32)LZ4_MAX_INPUT_SIZE) return 0;  /* Unsupported input size (too large or negative) */
1390
1391
24.3k
    ctx->end += *srcSizePtr;
1392
24.3k
    {   cParams_t const cParam = LZ4HC_getCLevelParams(cLevel);
1393
24.3k
        HCfavor_e const favor = ctx->favorDecSpeed ? favorDecompressionSpeed : favorCompressionRatio;
1394
24.3k
        int result;
1395
1396
24.3k
        if (cParam.strat == lz4mid) {
1397
7.95k
            result = LZ4MID_compress(ctx,
1398
7.95k
                                src, dst, srcSizePtr, dstCapacity,
1399
7.95k
                                limit, dict);
1400
16.3k
        } else if (cParam.strat == lz4hc) {
1401
16.3k
            result = LZ4HC_compress_hashChain(ctx,
1402
16.3k
                                src, dst, srcSizePtr, dstCapacity,
1403
16.3k
                                cParam.nbSearches, limit, dict);
1404
16.3k
        } else {
1405
0
            assert(cParam.strat == lz4opt);
1406
0
            result = LZ4HC_compress_optimal(ctx,
1407
0
                                src, dst, srcSizePtr, dstCapacity,
1408
0
                                cParam.nbSearches, cParam.targetLength, limit,
1409
0
                                cLevel >= LZ4HC_CLEVEL_MAX,   /* ultra mode */
1410
0
                                dict, favor);
1411
0
        }
1412
24.3k
        if (result <= 0) ctx->dirty = 1;
1413
24.3k
        return result;
1414
24.3k
    }
1415
24.3k
}
1416
1417
static void LZ4HC_setExternalDict(LZ4HC_CCtx_internal* ctxPtr, const BYTE* newBlock);
1418
1419
static int
1420
LZ4HC_compress_generic_noDictCtx (
1421
        LZ4HC_CCtx_internal* const ctx,
1422
        const char* const src,
1423
        char* const dst,
1424
        int* const srcSizePtr,
1425
        int const dstCapacity,
1426
        int cLevel,
1427
        limitedOutput_directive limit
1428
        )
1429
24.3k
{
1430
24.3k
    assert(ctx->dictCtx == NULL);
1431
24.3k
    return LZ4HC_compress_generic_internal(ctx, src, dst, srcSizePtr, dstCapacity, cLevel, limit, noDictCtx);
1432
24.3k
}
1433
1434
static int isStateCompatible(const LZ4HC_CCtx_internal* ctx1, const LZ4HC_CCtx_internal* ctx2)
1435
0
{
1436
0
    int const isMid1 = LZ4HC_getCLevelParams(ctx1->compressionLevel).strat == lz4mid;
1437
0
    int const isMid2 = LZ4HC_getCLevelParams(ctx2->compressionLevel).strat == lz4mid;
1438
0
    return !(isMid1 ^ isMid2);
1439
0
}
1440
1441
static int
1442
LZ4HC_compress_generic_dictCtx (
1443
        LZ4HC_CCtx_internal* const ctx,
1444
        const char* const src,
1445
        char* const dst,
1446
        int* const srcSizePtr,
1447
        int const dstCapacity,
1448
        int cLevel,
1449
        limitedOutput_directive limit
1450
        )
1451
0
{
1452
0
    const size_t position = (size_t)(ctx->end - ctx->prefixStart) + (ctx->dictLimit - ctx->lowLimit);
1453
0
    assert(ctx->dictCtx != NULL);
1454
0
    if (position >= 64 KB) {
1455
0
        ctx->dictCtx = NULL;
1456
0
        return LZ4HC_compress_generic_noDictCtx(ctx, src, dst, srcSizePtr, dstCapacity, cLevel, limit);
1457
0
    } else if (position == 0 && *srcSizePtr > 4 KB && isStateCompatible(ctx, ctx->dictCtx)) {
1458
0
        LZ4_memcpy(ctx, ctx->dictCtx, sizeof(LZ4HC_CCtx_internal));
1459
0
        LZ4HC_setExternalDict(ctx, (const BYTE *)src);
1460
0
        ctx->compressionLevel = (short)cLevel;
1461
0
        return LZ4HC_compress_generic_noDictCtx(ctx, src, dst, srcSizePtr, dstCapacity, cLevel, limit);
1462
0
    } else {
1463
0
        return LZ4HC_compress_generic_internal(ctx, src, dst, srcSizePtr, dstCapacity, cLevel, limit, usingDictCtxHc);
1464
0
    }
1465
0
}
1466
1467
static int
1468
LZ4HC_compress_generic (
1469
        LZ4HC_CCtx_internal* const ctx,
1470
        const char* const src,
1471
        char* const dst,
1472
        int* const srcSizePtr,
1473
        int const dstCapacity,
1474
        int cLevel,
1475
        limitedOutput_directive limit
1476
        )
1477
24.3k
{
1478
24.3k
    if (ctx->dictCtx == NULL) {
1479
24.3k
        return LZ4HC_compress_generic_noDictCtx(ctx, src, dst, srcSizePtr, dstCapacity, cLevel, limit);
1480
24.3k
    } else {
1481
0
        return LZ4HC_compress_generic_dictCtx(ctx, src, dst, srcSizePtr, dstCapacity, cLevel, limit);
1482
0
    }
1483
24.3k
}
1484
1485
1486
0
int LZ4_sizeofStateHC(void) { return (int)sizeof(LZ4_streamHC_t); }
1487
1488
static size_t LZ4_streamHC_t_alignment(void)
1489
48.6k
{
1490
48.6k
#if LZ4_ALIGN_TEST
1491
48.6k
    typedef struct { char c; LZ4_streamHC_t t; } t_a;
1492
48.6k
    return sizeof(t_a) - sizeof(LZ4_streamHC_t);
1493
#else
1494
    return 1;  /* effectively disabled */
1495
#endif
1496
48.6k
}
1497
1498
/* state is presumed correctly initialized,
1499
 * in which case its size and alignment have already been validate */
1500
int LZ4_compress_HC_extStateHC_fastReset (void* state, const char* src, char* dst, int srcSize, int dstCapacity, int compressionLevel)
1501
24.3k
{
1502
24.3k
    LZ4HC_CCtx_internal* const ctx = &((LZ4_streamHC_t*)state)->internal_donotuse;
1503
24.3k
    if (!LZ4_isAligned(state, LZ4_streamHC_t_alignment())) return 0;
1504
24.3k
    LZ4_resetStreamHC_fast((LZ4_streamHC_t*)state, compressionLevel);
1505
24.3k
    LZ4HC_init_internal (ctx, (const BYTE*)src);
1506
24.3k
    if (dstCapacity < LZ4_compressBound(srcSize))
1507
24.3k
        return LZ4HC_compress_generic (ctx, src, dst, &srcSize, dstCapacity, compressionLevel, limitedOutput);
1508
0
    else
1509
0
        return LZ4HC_compress_generic (ctx, src, dst, &srcSize, dstCapacity, compressionLevel, notLimited);
1510
24.3k
}
1511
1512
int LZ4_compress_HC_extStateHC (void* state, const char* src, char* dst, int srcSize, int dstCapacity, int compressionLevel)
1513
24.3k
{
1514
24.3k
    LZ4_streamHC_t* const ctx = LZ4_initStreamHC(state, sizeof(*ctx));
1515
24.3k
    if (ctx==NULL) return 0;   /* init failure */
1516
24.3k
    return LZ4_compress_HC_extStateHC_fastReset(state, src, dst, srcSize, dstCapacity, compressionLevel);
1517
24.3k
}
1518
1519
int LZ4_compress_HC(const char* src, char* dst, int srcSize, int dstCapacity, int compressionLevel)
1520
24.3k
{
1521
24.3k
    int cSize;
1522
24.3k
#if defined(LZ4HC_HEAPMODE) && LZ4HC_HEAPMODE==1
1523
24.3k
    LZ4_streamHC_t* const statePtr = (LZ4_streamHC_t*)ALLOC(sizeof(LZ4_streamHC_t));
1524
24.3k
    if (statePtr==NULL) return 0;
1525
#else
1526
    LZ4_streamHC_t state;
1527
    LZ4_streamHC_t* const statePtr = &state;
1528
#endif
1529
24.3k
    DEBUGLOG(5, "LZ4_compress_HC")
1530
24.3k
    cSize = LZ4_compress_HC_extStateHC(statePtr, src, dst, srcSize, dstCapacity, compressionLevel);
1531
24.3k
#if defined(LZ4HC_HEAPMODE) && LZ4HC_HEAPMODE==1
1532
24.3k
    FREEMEM(statePtr);
1533
24.3k
#endif
1534
24.3k
    return cSize;
1535
24.3k
}
1536
1537
/* state is presumed sized correctly (>= sizeof(LZ4_streamHC_t)) */
1538
int LZ4_compress_HC_destSize(void* state, const char* source, char* dest, int* sourceSizePtr, int targetDestSize, int cLevel)
1539
0
{
1540
0
    LZ4_streamHC_t* const ctx = LZ4_initStreamHC(state, sizeof(*ctx));
1541
0
    if (ctx==NULL) return 0;   /* init failure */
1542
0
    LZ4HC_init_internal(&ctx->internal_donotuse, (const BYTE*) source);
1543
0
    LZ4_setCompressionLevel(ctx, cLevel);
1544
0
    return LZ4HC_compress_generic(&ctx->internal_donotuse, source, dest, sourceSizePtr, targetDestSize, cLevel, fillOutput);
1545
0
}
1546
1547
1548
1549
/**************************************
1550
*  Streaming Functions
1551
**************************************/
1552
/* allocation */
1553
#if !defined(LZ4_STATIC_LINKING_ONLY_DISABLE_MEMORY_ALLOCATION)
1554
LZ4_streamHC_t* LZ4_createStreamHC(void)
1555
0
{
1556
0
    LZ4_streamHC_t* const state =
1557
0
        (LZ4_streamHC_t*)ALLOC_AND_ZERO(sizeof(LZ4_streamHC_t));
1558
0
    if (state == NULL) return NULL;
1559
0
    LZ4_setCompressionLevel(state, LZ4HC_CLEVEL_DEFAULT);
1560
0
    return state;
1561
0
}
1562
1563
int LZ4_freeStreamHC (LZ4_streamHC_t* LZ4_streamHCPtr)
1564
0
{
1565
0
    DEBUGLOG(4, "LZ4_freeStreamHC(%p)", LZ4_streamHCPtr);
1566
0
    if (!LZ4_streamHCPtr) return 0;  /* support free on NULL */
1567
0
    FREEMEM(LZ4_streamHCPtr);
1568
0
    return 0;
1569
0
}
1570
#endif
1571
1572
1573
LZ4_streamHC_t* LZ4_initStreamHC (void* buffer, size_t size)
1574
24.3k
{
1575
24.3k
    LZ4_streamHC_t* const LZ4_streamHCPtr = (LZ4_streamHC_t*)buffer;
1576
24.3k
    DEBUGLOG(4, "LZ4_initStreamHC(%p, %u)", buffer, (unsigned)size);
1577
    /* check conditions */
1578
24.3k
    if (buffer == NULL) return NULL;
1579
24.3k
    if (size < sizeof(LZ4_streamHC_t)) return NULL;
1580
24.3k
    if (!LZ4_isAligned(buffer, LZ4_streamHC_t_alignment())) return NULL;
1581
    /* init */
1582
24.3k
    { LZ4HC_CCtx_internal* const hcstate = &(LZ4_streamHCPtr->internal_donotuse);
1583
24.3k
      MEM_INIT(hcstate, 0, sizeof(*hcstate)); }
1584
24.3k
    LZ4_setCompressionLevel(LZ4_streamHCPtr, LZ4HC_CLEVEL_DEFAULT);
1585
24.3k
    return LZ4_streamHCPtr;
1586
24.3k
}
1587
1588
/* just a stub */
1589
void LZ4_resetStreamHC (LZ4_streamHC_t* LZ4_streamHCPtr, int compressionLevel)
1590
0
{
1591
0
    LZ4_initStreamHC(LZ4_streamHCPtr, sizeof(*LZ4_streamHCPtr));
1592
0
    LZ4_setCompressionLevel(LZ4_streamHCPtr, compressionLevel);
1593
0
}
1594
1595
void LZ4_resetStreamHC_fast (LZ4_streamHC_t* LZ4_streamHCPtr, int compressionLevel)
1596
24.3k
{
1597
24.3k
    LZ4HC_CCtx_internal* const s = &LZ4_streamHCPtr->internal_donotuse;
1598
24.3k
    DEBUGLOG(5, "LZ4_resetStreamHC_fast(%p, %d)", LZ4_streamHCPtr, compressionLevel);
1599
24.3k
    if (s->dirty) {
1600
0
        LZ4_initStreamHC(LZ4_streamHCPtr, sizeof(*LZ4_streamHCPtr));
1601
24.3k
    } else {
1602
24.3k
        assert(s->end >= s->prefixStart);
1603
24.3k
        s->dictLimit += (U32)(s->end - s->prefixStart);
1604
24.3k
        s->prefixStart = NULL;
1605
24.3k
        s->end = NULL;
1606
24.3k
        s->dictCtx = NULL;
1607
24.3k
    }
1608
24.3k
    LZ4_setCompressionLevel(LZ4_streamHCPtr, compressionLevel);
1609
24.3k
}
1610
1611
void LZ4_setCompressionLevel(LZ4_streamHC_t* LZ4_streamHCPtr, int compressionLevel)
1612
48.6k
{
1613
48.6k
    DEBUGLOG(5, "LZ4_setCompressionLevel(%p, %d)", LZ4_streamHCPtr, compressionLevel);
1614
48.6k
    if (compressionLevel < 1) compressionLevel = LZ4HC_CLEVEL_DEFAULT;
1615
48.6k
    if (compressionLevel > LZ4HC_CLEVEL_MAX) compressionLevel = LZ4HC_CLEVEL_MAX;
1616
48.6k
    LZ4_streamHCPtr->internal_donotuse.compressionLevel = (short)compressionLevel;
1617
48.6k
}
1618
1619
void LZ4_favorDecompressionSpeed(LZ4_streamHC_t* LZ4_streamHCPtr, int favor)
1620
0
{
1621
0
    LZ4_streamHCPtr->internal_donotuse.favorDecSpeed = (favor!=0);
1622
0
}
1623
1624
/* LZ4_loadDictHC() :
1625
 * LZ4_streamHCPtr is presumed properly initialized */
1626
int LZ4_loadDictHC (LZ4_streamHC_t* LZ4_streamHCPtr,
1627
              const char* dictionary, int dictSize)
1628
0
{
1629
0
    LZ4HC_CCtx_internal* const ctxPtr = &LZ4_streamHCPtr->internal_donotuse;
1630
0
    cParams_t cp;
1631
0
    DEBUGLOG(4, "LZ4_loadDictHC(ctx:%p, dict:%p, dictSize:%d, clevel=%d)", LZ4_streamHCPtr, dictionary, dictSize, ctxPtr->compressionLevel);
1632
0
    assert(dictSize >= 0);
1633
0
    assert(LZ4_streamHCPtr != NULL);
1634
0
    if (dictSize > 64 KB) {
1635
0
        dictionary += (size_t)dictSize - 64 KB;
1636
0
        dictSize = 64 KB;
1637
0
    }
1638
    /* need a full initialization, there are bad side-effects when using resetFast() */
1639
0
    {   int const cLevel = ctxPtr->compressionLevel;
1640
0
        LZ4_initStreamHC(LZ4_streamHCPtr, sizeof(*LZ4_streamHCPtr));
1641
0
        LZ4_setCompressionLevel(LZ4_streamHCPtr, cLevel);
1642
0
        cp = LZ4HC_getCLevelParams(cLevel);
1643
0
    }
1644
0
    LZ4HC_init_internal (ctxPtr, (const BYTE*)dictionary);
1645
0
    ctxPtr->end = (const BYTE*)dictionary + dictSize;
1646
0
    if (cp.strat == lz4mid) {
1647
0
        LZ4MID_fillHTable (ctxPtr, dictionary, (size_t)dictSize);
1648
0
    } else {
1649
0
        if (dictSize >= LZ4HC_HASHSIZE) LZ4HC_Insert (ctxPtr, ctxPtr->end-3);
1650
0
    }
1651
0
    return dictSize;
1652
0
}
1653
1654
0
void LZ4_attach_HC_dictionary(LZ4_streamHC_t *working_stream, const LZ4_streamHC_t *dictionary_stream) {
1655
0
    working_stream->internal_donotuse.dictCtx = dictionary_stream != NULL ? &(dictionary_stream->internal_donotuse) : NULL;
1656
0
}
1657
1658
/* compression */
1659
1660
static void LZ4HC_setExternalDict(LZ4HC_CCtx_internal* ctxPtr, const BYTE* newBlock)
1661
0
{
1662
0
    DEBUGLOG(4, "LZ4HC_setExternalDict(%p, %p)", ctxPtr, newBlock);
1663
0
    if ( (ctxPtr->end >= ctxPtr->prefixStart + 4)
1664
0
      && (LZ4HC_getCLevelParams(ctxPtr->compressionLevel).strat != lz4mid) ) {
1665
0
        LZ4HC_Insert (ctxPtr, ctxPtr->end-3);  /* Referencing remaining dictionary content */
1666
0
    }
1667
1668
    /* Only one memory segment for extDict, so any previous extDict is lost at this stage */
1669
0
    ctxPtr->lowLimit  = ctxPtr->dictLimit;
1670
0
    ctxPtr->dictStart  = ctxPtr->prefixStart;
1671
0
    ctxPtr->dictLimit += (U32)(ctxPtr->end - ctxPtr->prefixStart);
1672
0
    ctxPtr->prefixStart = newBlock;
1673
0
    ctxPtr->end  = newBlock;
1674
0
    ctxPtr->nextToUpdate = ctxPtr->dictLimit;   /* match referencing will resume from there */
1675
1676
    /* cannot reference an extDict and a dictCtx at the same time */
1677
0
    ctxPtr->dictCtx = NULL;
1678
0
}
1679
1680
static int
1681
LZ4_compressHC_continue_generic (LZ4_streamHC_t* LZ4_streamHCPtr,
1682
                                 const char* src, char* dst,
1683
                                 int* srcSizePtr, int dstCapacity,
1684
                                 limitedOutput_directive limit)
1685
0
{
1686
0
    LZ4HC_CCtx_internal* const ctxPtr = &LZ4_streamHCPtr->internal_donotuse;
1687
0
    DEBUGLOG(5, "LZ4_compressHC_continue_generic(ctx=%p, src=%p, srcSize=%d, limit=%d)",
1688
0
                LZ4_streamHCPtr, src, *srcSizePtr, limit);
1689
0
    assert(ctxPtr != NULL);
1690
    /* auto-init if forgotten */
1691
0
    if (ctxPtr->prefixStart == NULL)
1692
0
        LZ4HC_init_internal (ctxPtr, (const BYTE*) src);
1693
1694
    /* Check overflow */
1695
0
    if ((size_t)(ctxPtr->end - ctxPtr->prefixStart) + ctxPtr->dictLimit > 2 GB) {
1696
0
        size_t dictSize = (size_t)(ctxPtr->end - ctxPtr->prefixStart);
1697
0
        if (dictSize > 64 KB) dictSize = 64 KB;
1698
0
        LZ4_loadDictHC(LZ4_streamHCPtr, (const char*)(ctxPtr->end) - dictSize, (int)dictSize);
1699
0
    }
1700
1701
    /* Check if blocks follow each other */
1702
0
    if ((const BYTE*)src != ctxPtr->end)
1703
0
        LZ4HC_setExternalDict(ctxPtr, (const BYTE*)src);
1704
1705
    /* Check overlapping input/dictionary space */
1706
0
    {   const BYTE* sourceEnd = (const BYTE*) src + *srcSizePtr;
1707
0
        const BYTE* const dictBegin = ctxPtr->dictStart;
1708
0
        const BYTE* const dictEnd   = ctxPtr->dictStart + (ctxPtr->dictLimit - ctxPtr->lowLimit);
1709
0
        if ((sourceEnd > dictBegin) && ((const BYTE*)src < dictEnd)) {
1710
0
            if (sourceEnd > dictEnd) sourceEnd = dictEnd;
1711
0
            ctxPtr->lowLimit += (U32)(sourceEnd - ctxPtr->dictStart);
1712
0
            ctxPtr->dictStart += (U32)(sourceEnd - ctxPtr->dictStart);
1713
            /* invalidate dictionary is it's too small */
1714
0
            if (ctxPtr->dictLimit - ctxPtr->lowLimit < LZ4HC_HASHSIZE) {
1715
0
                ctxPtr->lowLimit = ctxPtr->dictLimit;
1716
0
                ctxPtr->dictStart = ctxPtr->prefixStart;
1717
0
    }   }   }
1718
1719
0
    return LZ4HC_compress_generic (ctxPtr, src, dst, srcSizePtr, dstCapacity, ctxPtr->compressionLevel, limit);
1720
0
}
1721
1722
int LZ4_compress_HC_continue (LZ4_streamHC_t* LZ4_streamHCPtr, const char* src, char* dst, int srcSize, int dstCapacity)
1723
0
{
1724
0
    DEBUGLOG(5, "LZ4_compress_HC_continue");
1725
0
    if (dstCapacity < LZ4_compressBound(srcSize))
1726
0
        return LZ4_compressHC_continue_generic (LZ4_streamHCPtr, src, dst, &srcSize, dstCapacity, limitedOutput);
1727
0
    else
1728
0
        return LZ4_compressHC_continue_generic (LZ4_streamHCPtr, src, dst, &srcSize, dstCapacity, notLimited);
1729
0
}
1730
1731
int LZ4_compress_HC_continue_destSize (LZ4_streamHC_t* LZ4_streamHCPtr, const char* src, char* dst, int* srcSizePtr, int targetDestSize)
1732
0
{
1733
0
    return LZ4_compressHC_continue_generic(LZ4_streamHCPtr, src, dst, srcSizePtr, targetDestSize, fillOutput);
1734
0
}
1735
1736
1737
/* LZ4_saveDictHC :
1738
 * save history content
1739
 * into a user-provided buffer
1740
 * which is then used to continue compression
1741
 */
1742
int LZ4_saveDictHC (LZ4_streamHC_t* LZ4_streamHCPtr, char* safeBuffer, int dictSize)
1743
0
{
1744
0
    LZ4HC_CCtx_internal* const streamPtr = &LZ4_streamHCPtr->internal_donotuse;
1745
0
    int const prefixSize = (int)(streamPtr->end - streamPtr->prefixStart);
1746
0
    DEBUGLOG(5, "LZ4_saveDictHC(%p, %p, %d)", LZ4_streamHCPtr, safeBuffer, dictSize);
1747
0
    assert(prefixSize >= 0);
1748
0
    if (dictSize > 64 KB) dictSize = 64 KB;
1749
0
    if (dictSize < 4) dictSize = 0;
1750
0
    if (dictSize > prefixSize) dictSize = prefixSize;
1751
0
    if (safeBuffer == NULL) assert(dictSize == 0);
1752
0
    if (dictSize > 0)
1753
0
        LZ4_memmove(safeBuffer, streamPtr->end - dictSize, (size_t)dictSize);
1754
0
    {   U32 const endIndex = (U32)(streamPtr->end - streamPtr->prefixStart) + streamPtr->dictLimit;
1755
0
        streamPtr->end = (safeBuffer == NULL) ? NULL : (const BYTE*)safeBuffer + dictSize;
1756
0
        streamPtr->prefixStart = (const BYTE*)safeBuffer;
1757
0
        streamPtr->dictLimit = endIndex - (U32)dictSize;
1758
0
        streamPtr->lowLimit = endIndex - (U32)dictSize;
1759
0
        streamPtr->dictStart = streamPtr->prefixStart;
1760
0
        if (streamPtr->nextToUpdate < streamPtr->dictLimit)
1761
0
            streamPtr->nextToUpdate = streamPtr->dictLimit;
1762
0
    }
1763
0
    return dictSize;
1764
0
}
1765
1766
1767
/* ================================================
1768
 *  LZ4 Optimal parser (levels [LZ4HC_CLEVEL_OPT_MIN - LZ4HC_CLEVEL_MAX])
1769
 * ===============================================*/
1770
typedef struct {
1771
    int price;
1772
    int off;
1773
    int mlen;
1774
    int litlen;
1775
} LZ4HC_optimal_t;
1776
1777
/* price in bytes */
1778
LZ4_FORCE_INLINE int LZ4HC_literalsPrice(int const litlen)
1779
0
{
1780
0
    int price = litlen;
1781
0
    assert(litlen >= 0);
1782
0
    if (litlen >= (int)RUN_MASK)
1783
0
        price += 1 + ((litlen-(int)RUN_MASK) / 255);
1784
0
    return price;
1785
0
}
1786
1787
/* requires mlen >= MINMATCH */
1788
LZ4_FORCE_INLINE int LZ4HC_sequencePrice(int litlen, int mlen)
1789
0
{
1790
0
    int price = 1 + 2 ; /* token + 16-bit offset */
1791
0
    assert(litlen >= 0);
1792
0
    assert(mlen >= MINMATCH);
1793
1794
0
    price += LZ4HC_literalsPrice(litlen);
1795
1796
0
    if (mlen >= (int)(ML_MASK+MINMATCH))
1797
0
        price += 1 + ((mlen-(int)(ML_MASK+MINMATCH)) / 255);
1798
1799
0
    return price;
1800
0
}
1801
1802
LZ4_FORCE_INLINE LZ4HC_match_t
1803
LZ4HC_FindLongerMatch(LZ4HC_CCtx_internal* const ctx,
1804
                      const BYTE* ip, const BYTE* const iHighLimit,
1805
                      int minLen, int nbSearches,
1806
                      const dictCtx_directive dict,
1807
                      const HCfavor_e favorDecSpeed)
1808
0
{
1809
0
    LZ4HC_match_t const match0 = { 0 , 0, 0 };
1810
    /* note : LZ4HC_InsertAndGetWiderMatch() is able to modify the starting position of a match (*startpos),
1811
     * but this won't be the case here, as we define iLowLimit==ip,
1812
    ** so LZ4HC_InsertAndGetWiderMatch() won't be allowed to search past ip */
1813
0
    LZ4HC_match_t md = LZ4HC_InsertAndGetWiderMatch(ctx, ip, ip, iHighLimit, minLen, nbSearches, 1 /*patternAnalysis*/, 1 /*chainSwap*/, dict, favorDecSpeed);
1814
0
    assert(md.back == 0);
1815
0
    if (md.len <= minLen) return match0;
1816
0
    if (favorDecSpeed) {
1817
0
        if ((md.len>18) & (md.len<=36)) md.len=18;   /* favor dec.speed (shortcut) */
1818
0
    }
1819
0
    return md;
1820
0
}
1821
1822
1823
static int LZ4HC_compress_optimal ( LZ4HC_CCtx_internal* ctx,
1824
                                    const char* const source,
1825
                                    char* dst,
1826
                                    int* srcSizePtr,
1827
                                    int dstCapacity,
1828
                                    int const nbSearches,
1829
                                    size_t sufficient_len,
1830
                                    const limitedOutput_directive limit,
1831
                                    int const fullUpdate,
1832
                                    const dictCtx_directive dict,
1833
                                    const HCfavor_e favorDecSpeed)
1834
0
{
1835
0
    int retval = 0;
1836
0
#define TRAILING_LITERALS 3
1837
0
#if defined(LZ4HC_HEAPMODE) && LZ4HC_HEAPMODE==1
1838
0
    LZ4HC_optimal_t* const opt = (LZ4HC_optimal_t*)ALLOC(sizeof(LZ4HC_optimal_t) * (LZ4_OPT_NUM + TRAILING_LITERALS));
1839
#else
1840
    LZ4HC_optimal_t opt[LZ4_OPT_NUM + TRAILING_LITERALS];   /* ~64 KB, which is a bit large for stack... */
1841
#endif
1842
1843
0
    const BYTE* ip = (const BYTE*) source;
1844
0
    const BYTE* anchor = ip;
1845
0
    const BYTE* const iend = ip + *srcSizePtr;
1846
0
    const BYTE* const mflimit = iend - MFLIMIT;
1847
0
    const BYTE* const matchlimit = iend - LASTLITERALS;
1848
0
    BYTE* op = (BYTE*) dst;
1849
0
    BYTE* opSaved = (BYTE*) dst;
1850
0
    BYTE* oend = op + dstCapacity;
1851
0
    int ovml = MINMATCH;  /* overflow - last sequence */
1852
0
    int ovoff = 0;
1853
1854
    /* init */
1855
0
#if defined(LZ4HC_HEAPMODE) && LZ4HC_HEAPMODE==1
1856
0
    if (opt == NULL) goto _return_label;
1857
0
#endif
1858
0
    DEBUGLOG(5, "LZ4HC_compress_optimal(dst=%p, dstCapa=%u)", dst, (unsigned)dstCapacity);
1859
0
    *srcSizePtr = 0;
1860
0
    if (limit == fillOutput) oend -= LASTLITERALS;   /* Hack for support LZ4 format restriction */
1861
0
    if (sufficient_len >= LZ4_OPT_NUM) sufficient_len = LZ4_OPT_NUM-1;
1862
1863
    /* Main Loop */
1864
0
    while (ip <= mflimit) {
1865
0
         int const llen = (int)(ip - anchor);
1866
0
         int best_mlen, best_off;
1867
0
         int cur, last_match_pos = 0;
1868
1869
0
         LZ4HC_match_t const firstMatch = LZ4HC_FindLongerMatch(ctx, ip, matchlimit, MINMATCH-1, nbSearches, dict, favorDecSpeed);
1870
0
         if (firstMatch.len==0) { ip++; continue; }
1871
1872
0
         if ((size_t)firstMatch.len > sufficient_len) {
1873
             /* good enough solution : immediate encoding */
1874
0
             int const firstML = firstMatch.len;
1875
0
             opSaved = op;
1876
0
             if ( LZ4HC_encodeSequence(UPDATABLE(ip, op, anchor), firstML, firstMatch.off, limit, oend) ) {  /* updates ip, op and anchor */
1877
0
                 ovml = firstML;
1878
0
                 ovoff = firstMatch.off;
1879
0
                 goto _dest_overflow;
1880
0
             }
1881
0
             continue;
1882
0
         }
1883
1884
         /* set prices for first positions (literals) */
1885
0
         {   int rPos;
1886
0
             for (rPos = 0 ; rPos < MINMATCH ; rPos++) {
1887
0
                 int const cost = LZ4HC_literalsPrice(llen + rPos);
1888
0
                 opt[rPos].mlen = 1;
1889
0
                 opt[rPos].off = 0;
1890
0
                 opt[rPos].litlen = llen + rPos;
1891
0
                 opt[rPos].price = cost;
1892
0
                 DEBUGLOG(7, "rPos:%3i => price:%3i (litlen=%i) -- initial setup",
1893
0
                             rPos, cost, opt[rPos].litlen);
1894
0
         }   }
1895
         /* set prices using initial match */
1896
0
         {   int const matchML = firstMatch.len;   /* necessarily < sufficient_len < LZ4_OPT_NUM */
1897
0
             int const offset = firstMatch.off;
1898
0
             int mlen;
1899
0
             assert(matchML < LZ4_OPT_NUM);
1900
0
             for (mlen = MINMATCH ; mlen <= matchML ; mlen++) {
1901
0
                 int const cost = LZ4HC_sequencePrice(llen, mlen);
1902
0
                 opt[mlen].mlen = mlen;
1903
0
                 opt[mlen].off = offset;
1904
0
                 opt[mlen].litlen = llen;
1905
0
                 opt[mlen].price = cost;
1906
0
                 DEBUGLOG(7, "rPos:%3i => price:%3i (matchlen=%i) -- initial setup",
1907
0
                             mlen, cost, mlen);
1908
0
         }   }
1909
0
         last_match_pos = firstMatch.len;
1910
0
         {   int addLit;
1911
0
             for (addLit = 1; addLit <= TRAILING_LITERALS; addLit ++) {
1912
0
                 opt[last_match_pos+addLit].mlen = 1; /* literal */
1913
0
                 opt[last_match_pos+addLit].off = 0;
1914
0
                 opt[last_match_pos+addLit].litlen = addLit;
1915
0
                 opt[last_match_pos+addLit].price = opt[last_match_pos].price + LZ4HC_literalsPrice(addLit);
1916
0
                 DEBUGLOG(7, "rPos:%3i => price:%3i (litlen=%i) -- initial setup",
1917
0
                             last_match_pos+addLit, opt[last_match_pos+addLit].price, addLit);
1918
0
         }   }
1919
1920
         /* check further positions */
1921
0
         for (cur = 1; cur < last_match_pos; cur++) {
1922
0
             const BYTE* const curPtr = ip + cur;
1923
0
             LZ4HC_match_t newMatch;
1924
1925
0
             if (curPtr > mflimit) break;
1926
0
             DEBUGLOG(7, "rPos:%u[%u] vs [%u]%u",
1927
0
                     cur, opt[cur].price, opt[cur+1].price, cur+1);
1928
0
             if (fullUpdate) {
1929
                 /* not useful to search here if next position has same (or lower) cost */
1930
0
                 if ( (opt[cur+1].price <= opt[cur].price)
1931
                   /* in some cases, next position has same cost, but cost rises sharply after, so a small match would still be beneficial */
1932
0
                   && (opt[cur+MINMATCH].price < opt[cur].price + 3/*min seq price*/) )
1933
0
                     continue;
1934
0
             } else {
1935
                 /* not useful to search here if next position has same (or lower) cost */
1936
0
                 if (opt[cur+1].price <= opt[cur].price) continue;
1937
0
             }
1938
1939
0
             DEBUGLOG(7, "search at rPos:%u", cur);
1940
0
             if (fullUpdate)
1941
0
                 newMatch = LZ4HC_FindLongerMatch(ctx, curPtr, matchlimit, MINMATCH-1, nbSearches, dict, favorDecSpeed);
1942
0
             else
1943
                 /* only test matches of minimum length; slightly faster, but misses a few bytes */
1944
0
                 newMatch = LZ4HC_FindLongerMatch(ctx, curPtr, matchlimit, last_match_pos - cur, nbSearches, dict, favorDecSpeed);
1945
0
             if (!newMatch.len) continue;
1946
1947
0
             if ( ((size_t)newMatch.len > sufficient_len)
1948
0
               || (newMatch.len + cur >= LZ4_OPT_NUM) ) {
1949
                 /* immediate encoding */
1950
0
                 best_mlen = newMatch.len;
1951
0
                 best_off = newMatch.off;
1952
0
                 last_match_pos = cur + 1;
1953
0
                 goto encode;
1954
0
             }
1955
1956
             /* before match : set price with literals at beginning */
1957
0
             {   int const baseLitlen = opt[cur].litlen;
1958
0
                 int litlen;
1959
0
                 for (litlen = 1; litlen < MINMATCH; litlen++) {
1960
0
                     int const price = opt[cur].price - LZ4HC_literalsPrice(baseLitlen) + LZ4HC_literalsPrice(baseLitlen+litlen);
1961
0
                     int const pos = cur + litlen;
1962
0
                     if (price < opt[pos].price) {
1963
0
                         opt[pos].mlen = 1; /* literal */
1964
0
                         opt[pos].off = 0;
1965
0
                         opt[pos].litlen = baseLitlen+litlen;
1966
0
                         opt[pos].price = price;
1967
0
                         DEBUGLOG(7, "rPos:%3i => price:%3i (litlen=%i)",
1968
0
                                     pos, price, opt[pos].litlen);
1969
0
             }   }   }
1970
1971
             /* set prices using match at position = cur */
1972
0
             {   int const matchML = newMatch.len;
1973
0
                 int ml = MINMATCH;
1974
1975
0
                 assert(cur + newMatch.len < LZ4_OPT_NUM);
1976
0
                 for ( ; ml <= matchML ; ml++) {
1977
0
                     int const pos = cur + ml;
1978
0
                     int const offset = newMatch.off;
1979
0
                     int price;
1980
0
                     int ll;
1981
0
                     DEBUGLOG(7, "testing price rPos %i (last_match_pos=%i)",
1982
0
                                 pos, last_match_pos);
1983
0
                     if (opt[cur].mlen == 1) {
1984
0
                         ll = opt[cur].litlen;
1985
0
                         price = ((cur > ll) ? opt[cur - ll].price : 0)
1986
0
                               + LZ4HC_sequencePrice(ll, ml);
1987
0
                     } else {
1988
0
                         ll = 0;
1989
0
                         price = opt[cur].price + LZ4HC_sequencePrice(0, ml);
1990
0
                     }
1991
1992
0
                    assert((U32)favorDecSpeed <= 1);
1993
0
                     if (pos > last_match_pos+TRAILING_LITERALS
1994
0
                      || price <= opt[pos].price - (int)favorDecSpeed) {
1995
0
                         DEBUGLOG(7, "rPos:%3i => price:%3i (matchlen=%i)",
1996
0
                                     pos, price, ml);
1997
0
                         assert(pos < LZ4_OPT_NUM);
1998
0
                         if ( (ml == matchML)  /* last pos of last match */
1999
0
                           && (last_match_pos < pos) )
2000
0
                             last_match_pos = pos;
2001
0
                         opt[pos].mlen = ml;
2002
0
                         opt[pos].off = offset;
2003
0
                         opt[pos].litlen = ll;
2004
0
                         opt[pos].price = price;
2005
0
             }   }   }
2006
             /* complete following positions with literals */
2007
0
             {   int addLit;
2008
0
                 for (addLit = 1; addLit <= TRAILING_LITERALS; addLit ++) {
2009
0
                     opt[last_match_pos+addLit].mlen = 1; /* literal */
2010
0
                     opt[last_match_pos+addLit].off = 0;
2011
0
                     opt[last_match_pos+addLit].litlen = addLit;
2012
0
                     opt[last_match_pos+addLit].price = opt[last_match_pos].price + LZ4HC_literalsPrice(addLit);
2013
0
                     DEBUGLOG(7, "rPos:%3i => price:%3i (litlen=%i)", last_match_pos+addLit, opt[last_match_pos+addLit].price, addLit);
2014
0
             }   }
2015
0
         }  /* for (cur = 1; cur <= last_match_pos; cur++) */
2016
2017
0
         assert(last_match_pos < LZ4_OPT_NUM + TRAILING_LITERALS);
2018
0
         best_mlen = opt[last_match_pos].mlen;
2019
0
         best_off = opt[last_match_pos].off;
2020
0
         cur = last_match_pos - best_mlen;
2021
2022
0
encode: /* cur, last_match_pos, best_mlen, best_off must be set */
2023
0
         assert(cur < LZ4_OPT_NUM);
2024
0
         assert(last_match_pos >= 1);  /* == 1 when only one candidate */
2025
0
         DEBUGLOG(6, "reverse traversal, looking for shortest path (last_match_pos=%i)", last_match_pos);
2026
0
         {   int candidate_pos = cur;
2027
0
             int selected_matchLength = best_mlen;
2028
0
             int selected_offset = best_off;
2029
0
             while (1) {  /* from end to beginning */
2030
0
                 int const next_matchLength = opt[candidate_pos].mlen;  /* can be 1, means literal */
2031
0
                 int const next_offset = opt[candidate_pos].off;
2032
0
                 DEBUGLOG(7, "pos %i: sequence length %i", candidate_pos, selected_matchLength);
2033
0
                 opt[candidate_pos].mlen = selected_matchLength;
2034
0
                 opt[candidate_pos].off = selected_offset;
2035
0
                 selected_matchLength = next_matchLength;
2036
0
                 selected_offset = next_offset;
2037
0
                 if (next_matchLength > candidate_pos) break; /* last match elected, first match to encode */
2038
0
                 assert(next_matchLength > 0);  /* can be 1, means literal */
2039
0
                 candidate_pos -= next_matchLength;
2040
0
         }   }
2041
2042
         /* encode all recorded sequences in order */
2043
0
         {   int rPos = 0;  /* relative position (to ip) */
2044
0
             while (rPos < last_match_pos) {
2045
0
                 int const ml = opt[rPos].mlen;
2046
0
                 int const offset = opt[rPos].off;
2047
0
                 if (ml == 1) { ip++; rPos++; continue; }  /* literal; note: can end up with several literals, in which case, skip them */
2048
0
                 rPos += ml;
2049
0
                 assert(ml >= MINMATCH);
2050
0
                 assert((offset >= 1) && (offset <= LZ4_DISTANCE_MAX));
2051
0
                 opSaved = op;
2052
0
                 if ( LZ4HC_encodeSequence(UPDATABLE(ip, op, anchor), ml, offset, limit, oend) ) {  /* updates ip, op and anchor */
2053
0
                     ovml = ml;
2054
0
                     ovoff = offset;
2055
0
                     goto _dest_overflow;
2056
0
         }   }   }
2057
0
     }  /* while (ip <= mflimit) */
2058
2059
0
_last_literals:
2060
     /* Encode Last Literals */
2061
0
     {   size_t lastRunSize = (size_t)(iend - anchor);  /* literals */
2062
0
         size_t llAdd = (lastRunSize + 255 - RUN_MASK) / 255;
2063
0
         size_t const totalSize = 1 + llAdd + lastRunSize;
2064
0
         if (limit == fillOutput) oend += LASTLITERALS;  /* restore correct value */
2065
0
         if (limit && (op + totalSize > oend)) {
2066
0
             if (limit == limitedOutput) { /* Check output limit */
2067
0
                retval = 0;
2068
0
                goto _return_label;
2069
0
             }
2070
             /* adapt lastRunSize to fill 'dst' */
2071
0
             lastRunSize  = (size_t)(oend - op) - 1 /*token*/;
2072
0
             llAdd = (lastRunSize + 256 - RUN_MASK) / 256;
2073
0
             lastRunSize -= llAdd;
2074
0
         }
2075
0
         DEBUGLOG(6, "Final literal run : %i literals", (int)lastRunSize);
2076
0
         ip = anchor + lastRunSize; /* can be != iend if limit==fillOutput */
2077
2078
0
         if (lastRunSize >= RUN_MASK) {
2079
0
             size_t accumulator = lastRunSize - RUN_MASK;
2080
0
             *op++ = (RUN_MASK << ML_BITS);
2081
0
             for(; accumulator >= 255 ; accumulator -= 255) *op++ = 255;
2082
0
             *op++ = (BYTE) accumulator;
2083
0
         } else {
2084
0
             *op++ = (BYTE)(lastRunSize << ML_BITS);
2085
0
         }
2086
0
         LZ4_memcpy(op, anchor, lastRunSize);
2087
0
         op += lastRunSize;
2088
0
     }
2089
2090
     /* End */
2091
0
     *srcSizePtr = (int) (((const char*)ip) - source);
2092
0
     retval = (int) ((char*)op-dst);
2093
0
     goto _return_label;
2094
2095
0
_dest_overflow:
2096
0
if (limit == fillOutput) {
2097
     /* Assumption : ip, anchor, ovml and ovref must be set correctly */
2098
0
     size_t const ll = (size_t)(ip - anchor);
2099
0
     size_t const ll_addbytes = (ll + 240) / 255;
2100
0
     size_t const ll_totalCost = 1 + ll_addbytes + ll;
2101
0
     BYTE* const maxLitPos = oend - 3; /* 2 for offset, 1 for token */
2102
0
     DEBUGLOG(6, "Last sequence overflowing (only %i bytes remaining)", (int)(oend-1-opSaved));
2103
0
     op = opSaved;  /* restore correct out pointer */
2104
0
     if (op + ll_totalCost <= maxLitPos) {
2105
         /* ll validated; now adjust match length */
2106
0
         size_t const bytesLeftForMl = (size_t)(maxLitPos - (op+ll_totalCost));
2107
0
         size_t const maxMlSize = MINMATCH + (ML_MASK-1) + (bytesLeftForMl * 255);
2108
0
         assert(maxMlSize < INT_MAX); assert(ovml >= 0);
2109
0
         if ((size_t)ovml > maxMlSize) ovml = (int)maxMlSize;
2110
0
         if ((oend + LASTLITERALS) - (op + ll_totalCost + 2) - 1 + ovml >= MFLIMIT) {
2111
0
             DEBUGLOG(6, "Space to end : %i + ml (%i)", (int)((oend + LASTLITERALS) - (op + ll_totalCost + 2) - 1), ovml);
2112
0
             DEBUGLOG(6, "Before : ip = %p, anchor = %p", ip, anchor);
2113
0
             LZ4HC_encodeSequence(UPDATABLE(ip, op, anchor), ovml, ovoff, notLimited, oend);
2114
0
             DEBUGLOG(6, "After : ip = %p, anchor = %p", ip, anchor);
2115
0
     }   }
2116
0
     goto _last_literals;
2117
0
}
2118
0
_return_label:
2119
0
#if defined(LZ4HC_HEAPMODE) && LZ4HC_HEAPMODE==1
2120
0
     if (opt) FREEMEM(opt);
2121
0
#endif
2122
0
     return retval;
2123
0
}
2124
2125
2126
/***************************************************
2127
*  Deprecated Functions
2128
***************************************************/
2129
2130
/* These functions currently generate deprecation warnings */
2131
2132
/* Wrappers for deprecated compression functions */
2133
0
int LZ4_compressHC(const char* src, char* dst, int srcSize) { return LZ4_compress_HC (src, dst, srcSize, LZ4_compressBound(srcSize), 0); }
2134
0
int LZ4_compressHC_limitedOutput(const char* src, char* dst, int srcSize, int maxDstSize) { return LZ4_compress_HC(src, dst, srcSize, maxDstSize, 0); }
2135
0
int LZ4_compressHC2(const char* src, char* dst, int srcSize, int cLevel) { return LZ4_compress_HC (src, dst, srcSize, LZ4_compressBound(srcSize), cLevel); }
2136
0
int LZ4_compressHC2_limitedOutput(const char* src, char* dst, int srcSize, int maxDstSize, int cLevel) { return LZ4_compress_HC(src, dst, srcSize, maxDstSize, cLevel); }
2137
0
int LZ4_compressHC_withStateHC (void* state, const char* src, char* dst, int srcSize) { return LZ4_compress_HC_extStateHC (state, src, dst, srcSize, LZ4_compressBound(srcSize), 0); }
2138
0
int LZ4_compressHC_limitedOutput_withStateHC (void* state, const char* src, char* dst, int srcSize, int maxDstSize) { return LZ4_compress_HC_extStateHC (state, src, dst, srcSize, maxDstSize, 0); }
2139
0
int LZ4_compressHC2_withStateHC (void* state, const char* src, char* dst, int srcSize, int cLevel) { return LZ4_compress_HC_extStateHC(state, src, dst, srcSize, LZ4_compressBound(srcSize), cLevel); }
2140
0
int LZ4_compressHC2_limitedOutput_withStateHC (void* state, const char* src, char* dst, int srcSize, int maxDstSize, int cLevel) { return LZ4_compress_HC_extStateHC(state, src, dst, srcSize, maxDstSize, cLevel); }
2141
0
int LZ4_compressHC_continue (LZ4_streamHC_t* ctx, const char* src, char* dst, int srcSize) { return LZ4_compress_HC_continue (ctx, src, dst, srcSize, LZ4_compressBound(srcSize)); }
2142
0
int LZ4_compressHC_limitedOutput_continue (LZ4_streamHC_t* ctx, const char* src, char* dst, int srcSize, int maxDstSize) { return LZ4_compress_HC_continue (ctx, src, dst, srcSize, maxDstSize); }
2143
2144
2145
/* Deprecated streaming functions */
2146
0
int LZ4_sizeofStreamStateHC(void) { return sizeof(LZ4_streamHC_t); }
2147
2148
/* state is presumed correctly sized, aka >= sizeof(LZ4_streamHC_t)
2149
 * @return : 0 on success, !=0 if error */
2150
int LZ4_resetStreamStateHC(void* state, char* inputBuffer)
2151
0
{
2152
0
    LZ4_streamHC_t* const hc4 = LZ4_initStreamHC(state, sizeof(*hc4));
2153
0
    if (hc4 == NULL) return 1;   /* init failed */
2154
0
    LZ4HC_init_internal (&hc4->internal_donotuse, (const BYTE*)inputBuffer);
2155
0
    return 0;
2156
0
}
2157
2158
#if !defined(LZ4_STATIC_LINKING_ONLY_DISABLE_MEMORY_ALLOCATION)
2159
void* LZ4_createHC (const char* inputBuffer)
2160
0
{
2161
0
    LZ4_streamHC_t* const hc4 = LZ4_createStreamHC();
2162
0
    if (hc4 == NULL) return NULL;   /* not enough memory */
2163
0
    LZ4HC_init_internal (&hc4->internal_donotuse, (const BYTE*)inputBuffer);
2164
0
    return hc4;
2165
0
}
2166
2167
int LZ4_freeHC (void* LZ4HC_Data)
2168
0
{
2169
0
    if (!LZ4HC_Data) return 0;  /* support free on NULL */
2170
0
    FREEMEM(LZ4HC_Data);
2171
0
    return 0;
2172
0
}
2173
#endif
2174
2175
int LZ4_compressHC2_continue (void* LZ4HC_Data, const char* src, char* dst, int srcSize, int cLevel)
2176
0
{
2177
0
    return LZ4HC_compress_generic (&((LZ4_streamHC_t*)LZ4HC_Data)->internal_donotuse, src, dst, &srcSize, 0, cLevel, notLimited);
2178
0
}
2179
2180
int LZ4_compressHC2_limitedOutput_continue (void* LZ4HC_Data, const char* src, char* dst, int srcSize, int dstCapacity, int cLevel)
2181
0
{
2182
0
    return LZ4HC_compress_generic (&((LZ4_streamHC_t*)LZ4HC_Data)->internal_donotuse, src, dst, &srcSize, dstCapacity, cLevel, limitedOutput);
2183
0
}
2184
2185
char* LZ4_slideInputBufferHC(void* LZ4HC_Data)
2186
0
{
2187
0
    LZ4HC_CCtx_internal* const s = &((LZ4_streamHC_t*)LZ4HC_Data)->internal_donotuse;
2188
0
    const BYTE* const bufferStart = s->prefixStart - s->dictLimit + s->lowLimit;
2189
0
    LZ4_resetStreamHC_fast((LZ4_streamHC_t*)LZ4HC_Data, s->compressionLevel);
2190
    /* ugly conversion trick, required to evade (const char*) -> (char*) cast-qual warning :( */
2191
0
    return (char*)(uptrval)bufferStart;
2192
0
}