45 # define LZ4_HEAPMODE 0 52 #define ACCELERATION_DEFAULT 1 71 #ifndef LZ4_FORCE_MEMORY_ACCESS 72 # if defined(__GNUC__) && \ 73 ( defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_6J__) || defined(__ARM_ARCH_6K__) \ 74 || defined(__ARM_ARCH_6Z__) || defined(__ARM_ARCH_6ZK__) || defined(__ARM_ARCH_6T2__) ) 75 # define LZ4_FORCE_MEMORY_ACCESS 2 76 # elif (defined(__INTEL_COMPILER) && !defined(_WIN32)) || defined(__GNUC__) 77 # define LZ4_FORCE_MEMORY_ACCESS 1 85 #if defined(_MSC_VER) && defined(_WIN32_WCE) 86 # define LZ4_FORCE_SW_BITCOUNT 94 #define LZ4_STATIC_LINKING_ONLY 95 #define LZ4_DISABLE_DEPRECATE_WARNINGS 105 # pragma warning(disable : 4127) 106 # pragma warning(disable : 4293) 109 #ifndef LZ4_FORCE_INLINE 111 # define LZ4_FORCE_INLINE static __forceinline 113 # if defined (__cplusplus) || defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L 115 # define LZ4_FORCE_INLINE static inline __attribute__((always_inline)) 117 # define LZ4_FORCE_INLINE static inline 120 # define LZ4_FORCE_INLINE static 139 #if defined(__PPC64__) && defined(__LITTLE_ENDIAN__) && defined(__GNUC__) 140 # define LZ4_FORCE_O2_GCC_PPC64LE __attribute__((optimize("O2"))) 141 # define LZ4_FORCE_O2_INLINE_GCC_PPC64LE __attribute__((optimize("O2"))) LZ4_FORCE_INLINE 143 # define LZ4_FORCE_O2_GCC_PPC64LE 144 # define LZ4_FORCE_O2_INLINE_GCC_PPC64LE static 147 #if (defined(__GNUC__) && (__GNUC__ >= 3)) || (defined(__INTEL_COMPILER) && (__INTEL_COMPILER >= 800)) || defined(__clang__) 148 # define expect(expr,value) (__builtin_expect ((expr),(value)) ) 150 # define expect(expr,value) (expr) 154 #define likely(expr) expect((expr) != 0, 1) 157 #define unlikely(expr) expect((expr) != 0, 0) 165 #define ALLOC(s) malloc(s) 166 #define ALLOC_AND_ZERO(s) calloc(1,s) 167 #define FREEMEM(p) free(p) 169 #define MEM_INIT(p,v,s) memset((p),(v),(s)) 175 #if defined(__cplusplus) || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) ) 177 typedef uint8_t
BYTE;
178 typedef uint16_t
U16;
179 typedef uint32_t
U32;
181 typedef uint64_t
U64;
185 typedef unsigned short U16;
188 typedef unsigned long long U64;
192 #if defined(__x86_64__) 201 static unsigned LZ4_isLittleEndian(
void)
203 const union {
U32 u;
BYTE c[4]; } one = { 1 };
208 #if defined(LZ4_FORCE_MEMORY_ACCESS) && (LZ4_FORCE_MEMORY_ACCESS==2) 211 static U16 LZ4_read16(
const void* memPtr) {
return *(
const U16*) memPtr; }
212 static U32 LZ4_read32(
const void* memPtr) {
return *(
const U32*) memPtr; }
213 static reg_t LZ4_read_ARCH(
const void* memPtr) {
return *(
const reg_t*) memPtr; }
215 static void LZ4_write16(
void* memPtr,
U16 value) { *(
U16*)memPtr = value; }
216 static void LZ4_write32(
void* memPtr,
U32 value) { *(
U32*)memPtr = value; }
218 #elif defined(LZ4_FORCE_MEMORY_ACCESS) && (LZ4_FORCE_MEMORY_ACCESS==1) 224 static U16 LZ4_read16(
const void* ptr) {
return ((
const unalign*)ptr)->u16; }
225 static U32 LZ4_read32(
const void* ptr) {
return ((
const unalign*)ptr)->u32; }
226 static reg_t LZ4_read_ARCH(
const void* ptr) {
return ((
const unalign*)ptr)->uArch; }
228 static void LZ4_write16(
void* memPtr,
U16 value) { ((unalign*)memPtr)->u16 = value; }
229 static void LZ4_write32(
void* memPtr,
U32 value) { ((unalign*)memPtr)->u32 = value; }
233 static U16 LZ4_read16(
const void* memPtr)
235 U16 val; memcpy(&val, memPtr,
sizeof(val));
return val;
238 static U32 LZ4_read32(
const void* memPtr)
240 U32 val; memcpy(&val, memPtr,
sizeof(val));
return val;
243 static reg_t LZ4_read_ARCH(
const void* memPtr)
245 reg_t val; memcpy(&val, memPtr,
sizeof(val));
return val;
248 static void LZ4_write16(
void* memPtr,
U16 value)
250 memcpy(memPtr, &value,
sizeof(value));
253 static void LZ4_write32(
void* memPtr,
U32 value)
255 memcpy(memPtr, &value,
sizeof(value));
261 static U16 LZ4_readLE16(
const void* memPtr)
263 if (LZ4_isLittleEndian()) {
264 return LZ4_read16(memPtr);
266 const BYTE* p = (
const BYTE*)memPtr;
267 return (
U16)((
U16)p[0] + (p[1]<<8));
271 static void LZ4_writeLE16(
void* memPtr,
U16 value)
273 if (LZ4_isLittleEndian()) {
274 LZ4_write16(memPtr, value);
278 p[1] = (
BYTE)(value>>8);
287 const BYTE* s = (
const BYTE*)srcPtr;
290 do { memcpy(d,s,8); d+=8; s+=8; }
while (d<e);
299 #define WILDCOPYLENGTH 8 300 #define LASTLITERALS 5 301 #define MFLIMIT (WILDCOPYLENGTH+MINMATCH) 302 static const int LZ4_minLength = (
MFLIMIT+1);
309 #define MAX_DISTANCE ((1 << MAXD_LOG) - 1) 312 #define ML_MASK ((1U<<ML_BITS)-1) 313 #define RUN_BITS (8-ML_BITS) 314 #define RUN_MASK ((1U<<RUN_BITS)-1) 320 #if defined(LZ4_DEBUG) && (LZ4_DEBUG>=1) 324 # define assert(condition) ((void)0) 328 #define LZ4_STATIC_ASSERT(c) { enum { LZ4_static_assert = 1/(int)(!!(c)) }; } 330 #if defined(LZ4_DEBUG) && (LZ4_DEBUG>=2) 332 static int g_debuglog_enable = 1;
333 # define DEBUGLOG(l, ...) { \ 334 if ((g_debuglog_enable) && (l<=LZ4_DEBUG)) { \ 335 fprintf(stderr, __FILE__ ": "); \ 336 fprintf(stderr, __VA_ARGS__); \ 337 fprintf(stderr, " \n"); \ 340 # define DEBUGLOG(l, ...) {} 347 static unsigned LZ4_NbCommonBytes (
reg_t val)
349 if (LZ4_isLittleEndian()) {
350 if (
sizeof(val)==8) {
351 # if defined(_MSC_VER) && defined(_WIN64) && !defined(LZ4_FORCE_SW_BITCOUNT) 353 _BitScanForward64( &r, (
U64)val );
355 # elif (defined(__clang__) || (defined(__GNUC__) && (__GNUC__>=3))) && !defined(LZ4_FORCE_SW_BITCOUNT) 356 return (__builtin_ctzll((
U64)val) >> 3);
358 static const int DeBruijnBytePos[64] = { 0, 0, 0, 0, 0, 1, 1, 2,
359 0, 3, 1, 3, 1, 4, 2, 7,
360 0, 2, 3, 6, 1, 5, 3, 5,
361 1, 3, 4, 4, 2, 5, 6, 7,
362 7, 0, 1, 2, 3, 3, 4, 6,
363 2, 6, 5, 5, 3, 4, 5, 6,
364 7, 1, 2, 4, 6, 4, 4, 5,
365 7, 2, 6, 5, 7, 6, 7, 7 };
366 return DeBruijnBytePos[((
U64)((val & -(
long long)val) * 0x0218A392CDABBD3FULL)) >> 58];
369 # if defined(_MSC_VER) && !defined(LZ4_FORCE_SW_BITCOUNT) 371 _BitScanForward( &r, (
U32)val );
373 # elif (defined(__clang__) || (defined(__GNUC__) && (__GNUC__>=3))) && !defined(LZ4_FORCE_SW_BITCOUNT) 374 return (__builtin_ctz((
U32)val) >> 3);
376 static const int DeBruijnBytePos[32] = { 0, 0, 3, 0, 3, 1, 3, 0,
377 3, 2, 2, 1, 3, 2, 0, 1,
378 3, 3, 1, 2, 2, 2, 2, 0,
379 3, 1, 2, 0, 1, 0, 1, 1 };
380 return DeBruijnBytePos[((
U32)((val & -(
S32)val) * 0x077CB531U)) >> 27];
384 if (
sizeof(val)==8) {
385 # if defined(_MSC_VER) && defined(_WIN64) && !defined(LZ4_FORCE_SW_BITCOUNT) 387 _BitScanReverse64( &r, val );
388 return (
unsigned)(r>>3);
389 # elif (defined(__clang__) || (defined(__GNUC__) && (__GNUC__>=3))) && !defined(LZ4_FORCE_SW_BITCOUNT) 390 return (__builtin_clzll((
U64)val) >> 3);
392 static const U32 by32 =
sizeof(val)*4;
396 if (!(val>>by32)) { r=4; }
else { r=0; val>>=by32; }
397 if (!(val>>16)) { r+=2; val>>=8; }
else { val>>=24; }
402 # if defined(_MSC_VER) && !defined(LZ4_FORCE_SW_BITCOUNT) 404 _BitScanReverse( &r, (
unsigned long)val );
405 return (
unsigned)(r>>3);
406 # elif (defined(__clang__) || (defined(__GNUC__) && (__GNUC__>=3))) && !defined(LZ4_FORCE_SW_BITCOUNT) 407 return (__builtin_clz((
U32)val) >> 3);
410 if (!(val>>16)) { r=2; val>>=8; }
else { r=0; val>>=24; }
418 #define STEPSIZE sizeof(reg_t) 422 const BYTE*
const pStart = pIn;
425 reg_t const diff = LZ4_read_ARCH(pMatch) ^ LZ4_read_ARCH(pIn);
429 return LZ4_NbCommonBytes(diff);
433 reg_t const diff = LZ4_read_ARCH(pMatch) ^ LZ4_read_ARCH(pIn);
435 pIn += LZ4_NbCommonBytes(diff);
436 return (
unsigned)(pIn - pStart);
439 if ((
STEPSIZE==8) && (pIn<(pInLimit-3)) && (LZ4_read32(pMatch) == LZ4_read32(pIn))) { pIn+=4; pMatch+=4; }
440 if ((pIn<(pInLimit-1)) && (LZ4_read16(pMatch) == LZ4_read16(pIn))) { pIn+=2; pMatch+=2; }
441 if ((pIn<pInLimit) && (*pMatch == *pIn)) pIn++;
442 return (
unsigned)(pIn - pStart);
446 #ifndef LZ4_COMMONDEFS_ONLY 450 static const int LZ4_64Klimit = ((64
KB) + (
MFLIMIT-1));
451 static const U32 LZ4_skipTrigger = 6;
504 if (tableType ==
byU16)
512 static const U64 prime5bytes = 889523592379ULL;
513 static const U64 prime8bytes = 11400714785074694791ULL;
515 if (LZ4_isLittleEndian())
516 return (
U32)(((sequence << 24) * prime5bytes) >> (64 - hashLog));
518 return (
U32)(((sequence >> 24) * prime8bytes) >> (64 - hashLog));
523 if ((
sizeof(
reg_t)==8) && (tableType !=
byU16))
return LZ4_hash5(LZ4_read_ARCH(p), tableType);
524 return LZ4_hash4(LZ4_read32(p), tableType);
527 static void LZ4_putIndexOnHash(
U32 idx,
U32 h,
void* tableBase,
tableType_t const tableType)
534 case byU32: {
U32* hashTable = (
U32*) tableBase; hashTable[h] = idx;
return; }
535 case byU16: {
U16* hashTable = (
U16*) tableBase;
assert(idx < 65536); hashTable[h] = (
U16)idx;
return; }
539 static void LZ4_putPositionOnHash(
const BYTE* p,
U32 h,
546 case byPtr: {
const BYTE** hashTable = (
const BYTE**)tableBase; hashTable[h] = p;
return; }
547 case byU32: {
U32* hashTable = (
U32*) tableBase; hashTable[h] = (
U32)(p-srcBase);
return; }
548 case byU16: {
U16* hashTable = (
U16*) tableBase; hashTable[h] = (
U16)(p-srcBase);
return; }
555 LZ4_putPositionOnHash(p, h, tableBase, tableType, srcBase);
564 static U32 LZ4_getIndexOnHash(
U32 h,
const void* tableBase,
tableType_t tableType)
567 if (tableType ==
byU32) {
568 const U32*
const hashTable = (
const U32*) tableBase;
572 if (tableType ==
byU16) {
573 const U16*
const hashTable = (
const U16*) tableBase;
580 static const BYTE* LZ4_getPositionOnHash(
U32 h,
const void* tableBase,
tableType_t tableType,
const BYTE* srcBase)
582 if (tableType ==
byPtr) {
const BYTE*
const* hashTable = (
const BYTE*
const*) tableBase;
return hashTable[h]; }
583 if (tableType ==
byU32) {
const U32*
const hashTable = (
const U32*) tableBase;
return hashTable[h] + srcBase; }
584 {
const U16*
const hashTable = (
const U16*) tableBase;
return hashTable[h] + srcBase; }
592 return LZ4_getPositionOnHash(h, tableBase, tableType, srcBase);
607 || tableType ==
byPtr 608 || inputSize >= 4
KB)
610 DEBUGLOG(4,
"LZ4_prepareTable: Resetting table in %p", cctx);
615 DEBUGLOG(4,
"LZ4_prepareTable: Re-use hash table (no reset)");
624 DEBUGLOG(5,
"LZ4_prepareTable: adding 64KB to currentOffset");
647 const U32 acceleration)
649 const BYTE* ip = (
const BYTE*) source;
652 const BYTE* base = (
const BYTE*) source - startIndex;
653 const BYTE* lowLimit;
656 const BYTE*
const dictionary =
663 U32 const prefixIdxLimit = startIndex - dictSize;
664 const BYTE*
const dictEnd = dictionary + dictSize;
665 const BYTE* anchor = (
const BYTE*) source;
667 const BYTE*
const mflimitPlusOne = iend -
MFLIMIT + 1;
674 dictionary + dictSize - startIndex;
682 DEBUGLOG(5,
"LZ4_compress_generic: srcSize=%i, tableType=%u", inputSize, tableType);
684 if (outputLimited ==
fillOutput && maxOutputSize < 1)
return 0;
686 if ((tableType ==
byU16) && (inputSize>=LZ4_64Klimit))
return 0;
688 assert(acceleration >= 1);
690 lowLimit = (
const BYTE*)source - (dictDirective ==
withPrefix64k ? dictSize : 0);
704 if (inputSize<LZ4_minLength)
goto _last_literals;
716 if (tableType ==
byPtr) {
717 const BYTE* forwardIp = ip;
719 unsigned searchMatchNb = acceleration << LZ4_skipTrigger;
721 U32 const h = forwardH;
724 step = (searchMatchNb++ >> LZ4_skipTrigger);
726 if (
unlikely(forwardIp > mflimitPlusOne))
goto _last_literals;
727 assert(ip < mflimitPlusOne);
729 match = LZ4_getPositionOnHash(h, cctx->
hashTable, tableType, base);
731 LZ4_putPositionOnHash(ip, h, cctx->
hashTable, tableType, base);
734 || (LZ4_read32(match) != LZ4_read32(ip)) );
738 const BYTE* forwardIp = ip;
740 unsigned searchMatchNb = acceleration << LZ4_skipTrigger;
742 U32 const h = forwardH;
743 U32 const current = (
U32)(forwardIp - base);
744 U32 matchIndex = LZ4_getIndexOnHash(h, cctx->
hashTable, tableType);
745 assert(matchIndex <= current);
746 assert(forwardIp - base < (ptrdiff_t)(2
GB - 1));
749 step = (searchMatchNb++ >> LZ4_skipTrigger);
751 if (
unlikely(forwardIp > mflimitPlusOne))
goto _last_literals;
752 assert(ip < mflimitPlusOne);
755 if (matchIndex < startIndex) {
759 match = dictBase + matchIndex;
760 matchIndex += dictDelta;
761 lowLimit = dictionary;
763 match = base + matchIndex;
764 lowLimit = (
const BYTE*)source;
767 if (matchIndex < startIndex) {
768 DEBUGLOG(7,
"extDict candidate: matchIndex=%5u < startIndex=%5u", matchIndex, startIndex);
770 match = dictBase + matchIndex;
771 lowLimit = dictionary;
773 match = base + matchIndex;
774 lowLimit = (
const BYTE*)source;
777 match = base + matchIndex;
780 LZ4_putIndexOnHash(current, h, cctx->
hashTable, tableType);
782 if ((dictIssue ==
dictSmall) && (matchIndex < prefixIdxLimit))
continue;
783 assert(matchIndex < current);
787 if (LZ4_read32(match) == LZ4_read32(ip)) {
788 if (maybe_extMem) offset = current - matchIndex;
796 while (((ip>anchor) & (match > lowLimit)) && (
unlikely(ip[-1]==match[-1]))) { ip--; match--; }
799 {
unsigned const litLength = (unsigned)(ip - anchor);
802 (
unlikely(op + litLength + (2 + 1 + LASTLITERALS) + (litLength/255) > olimit)))
805 (
unlikely(op + (litLength+240)/255 + litLength + 2 + 1 + MFLIMIT -
MINMATCH > olimit))) {
812 for(; len >= 255 ; len-=255) *op++ = 255;
820 DEBUGLOG(6,
"seq.start:%i, literals=%u, match.start:%i",
821 (
int)(anchor-(
const BYTE*)source), litLength, (
int)(ip-(
const BYTE*)source));
834 (op + 2 + 1 + MFLIMIT -
MINMATCH > olimit)) {
842 DEBUGLOG(6,
" with offset=%u (ext if > %i)", offset, (
int)(ip - (
const BYTE*)source));
843 assert(offset <= MAX_DISTANCE && offset > 0);
844 LZ4_writeLE16(op, (
U16)offset); op+=2;
846 DEBUGLOG(6,
" with offset=%u (same segment)", (
U32)(ip - match));
848 LZ4_writeLE16(op, (
U16)(ip - match)); op+=2;
852 {
unsigned matchCode;
855 && (lowLimit==dictionary) ) {
856 const BYTE* limit = ip + (dictEnd-match);
858 if (limit > matchlimit) limit = matchlimit;
862 unsigned const more =
LZ4_count(limit, (
const BYTE*)source, matchlimit);
866 DEBUGLOG(6,
" with matchLength=%u starting in extDict", matchCode+
MINMATCH);
873 if ((outputLimited) &&
874 (
unlikely(op + (1 + LASTLITERALS) + (matchCode>>8) > olimit)) ) {
880 ip -= matchCode - newMatchCode;
881 matchCode = newMatchCode;
887 LZ4_write32(op, 0xFFFFFFFF);
888 while (matchCode >= 4*255) {
890 LZ4_write32(op, 0xFFFFFFFF);
893 op += matchCode / 255;
894 *op++ = (
BYTE)(matchCode % 255);
896 *token += (
BYTE)(matchCode);
902 if (ip >= mflimitPlusOne)
break;
908 if (tableType ==
byPtr) {
913 && (LZ4_read32(match) == LZ4_read32(ip)) )
914 { token=op++; *token=0;
goto _next_match; }
919 U32 const current = (
U32)(ip-base);
920 U32 matchIndex = LZ4_getIndexOnHash(h, cctx->
hashTable, tableType);
921 assert(matchIndex < current);
923 if (matchIndex < startIndex) {
926 match = dictBase + matchIndex;
927 lowLimit = dictionary;
928 matchIndex += dictDelta;
930 match = base + matchIndex;
931 lowLimit = (
const BYTE*)source;
934 if (matchIndex < startIndex) {
935 match = dictBase + matchIndex;
936 lowLimit = dictionary;
938 match = base + matchIndex;
939 lowLimit = (
const BYTE*)source;
942 match = base + matchIndex;
944 LZ4_putIndexOnHash(current, h, cctx->
hashTable, tableType);
945 assert(matchIndex < current);
946 if ( ((dictIssue==
dictSmall) ? (matchIndex >= prefixIdxLimit) : 1)
948 && (LZ4_read32(match) == LZ4_read32(ip)) ) {
951 if (maybe_extMem) offset = current - matchIndex;
952 DEBUGLOG(6,
"seq.start:%i, literals=%u, match.start:%i",
953 (
int)(anchor-(
const BYTE*)source), 0, (
int)(ip-(
const BYTE*)source));
965 {
size_t lastRun = (size_t)(iend - anchor);
966 if ( (outputLimited) &&
967 (op + lastRun + 1 + ((lastRun+255-
RUN_MASK)/255) > olimit)) {
970 lastRun = (olimit-op) - 1;
971 lastRun -= (lastRun+240)/255;
977 size_t accumulator = lastRun -
RUN_MASK;
979 for(; accumulator >= 255 ; accumulator-=255) *op++ = 255;
980 *op++ = (
BYTE) accumulator;
984 memcpy(op, anchor, lastRun);
985 ip = anchor + lastRun;
990 *inputConsumed = (int) (((
const char*)ip)-
source);
992 DEBUGLOG(5,
"LZ4_compress_generic: compressed %i bytes into %i bytes", inputSize, (
int)(((
char*)op) - dest));
993 return (
int)(((
char*)op) -
dest);
1003 if (inputSize < LZ4_64Klimit) {
1004 return LZ4_compress_generic(ctx, source, dest, inputSize,
NULL, 0,
notLimited,
byU16,
noDict,
noDictIssue, acceleration);
1007 return LZ4_compress_generic(ctx, source, dest, inputSize,
NULL, 0,
notLimited, tableType,
noDict,
noDictIssue, acceleration);
1010 if (inputSize < LZ4_64Klimit) {;
1011 return LZ4_compress_generic(ctx, source, dest, inputSize,
NULL, maxOutputSize,
limitedOutput,
byU16,
noDict,
noDictIssue, acceleration);
1014 return LZ4_compress_generic(ctx, source, dest, inputSize,
NULL, maxOutputSize,
limitedOutput, tableType,
noDict,
noDictIssue, acceleration);
1034 if (srcSize < LZ4_64Klimit) {
1038 return LZ4_compress_generic(ctx, src, dst, srcSize,
NULL, 0,
notLimited, tableType,
noDict,
dictSmall, acceleration);
1040 return LZ4_compress_generic(ctx, src, dst, srcSize,
NULL, 0,
notLimited, tableType,
noDict,
noDictIssue, acceleration);
1045 return LZ4_compress_generic(ctx, src, dst, srcSize,
NULL, 0,
notLimited, tableType,
noDict,
noDictIssue, acceleration);
1048 if (srcSize < LZ4_64Klimit) {
1052 return LZ4_compress_generic(ctx, src, dst, srcSize,
NULL, dstCapacity,
limitedOutput, tableType,
noDict,
dictSmall, acceleration);
1054 return LZ4_compress_generic(ctx, src, dst, srcSize,
NULL, dstCapacity,
limitedOutput, tableType,
noDict,
noDictIssue, acceleration);
1059 return LZ4_compress_generic(ctx, src, dst, srcSize,
NULL, dstCapacity,
limitedOutput, tableType,
noDict,
noDictIssue, acceleration);
1070 if (ctxPtr ==
NULL)
return 0;
1097 if (inputSize < LZ4_64Klimit)
1098 return LZ4_compress_generic(&ctx.
internal_donotuse, source, dest, inputSize,
NULL, maxOutputSize,
limitedOutput,
byU16,
noDict,
noDictIssue, acceleration);
1100 return LZ4_compress_generic(&ctx.
internal_donotuse, source, dest, inputSize,
NULL, maxOutputSize,
limitedOutput,
sizeof(
void*)==8 ?
byU32 :
byPtr,
noDict,
noDictIssue, acceleration);
1107 static int LZ4_compress_destSize_extState (
LZ4_stream_t*
state,
const char* src,
char*
dst,
int* srcSizePtr,
int targetDstSize)
1114 if (*srcSizePtr < LZ4_64Klimit) {
1115 return LZ4_compress_generic(&state->
internal_donotuse, src,
dst, *srcSizePtr, srcSizePtr, targetDstSize,
fillOutput,
byU16,
noDict,
noDictIssue, 1);
1118 return LZ4_compress_generic(&state->
internal_donotuse, src,
dst, *srcSizePtr, srcSizePtr, targetDstSize,
fillOutput, tableType,
noDict,
noDictIssue, 1);
1127 if (ctx ==
NULL)
return 0;
1133 int result = LZ4_compress_destSize_extState(ctx, src, dst, srcSizePtr, targetDstSize);
1151 DEBUGLOG(4,
"LZ4_createStream %p", lz4s);
1159 DEBUGLOG(5,
"LZ4_resetStream (ctx:%p)", LZ4_stream);
1169 if (!LZ4_stream)
return 0;
1170 DEBUGLOG(5,
"LZ4_freeStream %p", LZ4_stream);
1176 #define HASH_UNIT sizeof(reg_t) 1181 const BYTE* p = (
const BYTE*)dictionary;
1182 const BYTE*
const dictEnd = p + dictSize;
1185 DEBUGLOG(4,
"LZ4_loadDict (%i bytes from %p into %p)", dictSize, dictionary, LZ4_dict);
1200 if ((dictEnd - p) > 64
KB) p = dictEnd - 64
KB;
1220 if (dictionary_stream !=
NULL) {
1261 DEBUGLOG(5,
"LZ4_compress_fast_continue (inputSize=%i)", inputSize);
1264 LZ4_renormDictT(streamPtr, inputSize);
1269 && (dictEnd != (
const BYTE*)source) ) {
1273 dictEnd = (
const BYTE*)source;
1277 {
const BYTE* sourceEnd = (
const BYTE*) source + inputSize;
1278 if ((sourceEnd > streamPtr->
dictionary) && (sourceEnd < dictEnd)) {
1289 return LZ4_compress_generic(streamPtr, source, dest, inputSize,
NULL, maxOutputSize,
limitedOutput, tableType,
withPrefix64k,
dictSmall, acceleration);
1291 return LZ4_compress_generic(streamPtr, source, dest, inputSize,
NULL, maxOutputSize,
limitedOutput, tableType,
withPrefix64k,
noDictIssue, acceleration);
1303 if (inputSize > 4
KB) {
1309 result =
LZ4_compress_generic(streamPtr, source, dest, inputSize,
NULL, maxOutputSize,
limitedOutput, tableType,
usingExtDict,
noDictIssue, acceleration);
1311 result =
LZ4_compress_generic(streamPtr, source, dest, inputSize,
NULL, maxOutputSize,
limitedOutput, tableType,
usingDictCtx,
noDictIssue, acceleration);
1315 result =
LZ4_compress_generic(streamPtr, source, dest, inputSize,
NULL, maxOutputSize,
limitedOutput, tableType,
usingExtDict,
dictSmall, acceleration);
1317 result =
LZ4_compress_generic(streamPtr, source, dest, inputSize,
NULL, maxOutputSize,
limitedOutput, tableType,
usingExtDict,
noDictIssue, acceleration);
1333 LZ4_renormDictT(streamPtr, srcSize);
1336 result =
LZ4_compress_generic(streamPtr, source, dest, srcSize,
NULL, 0,
notLimited,
byU32,
usingExtDict,
dictSmall, 1);
1338 result =
LZ4_compress_generic(streamPtr, source, dest, srcSize,
NULL, 0,
notLimited,
byU32,
usingExtDict,
noDictIssue, 1);
1360 if ((
U32)dictSize > 64
KB) dictSize = 64
KB;
1363 memmove(safeBuffer, previousDictEnd - dictSize, dictSize);
1384 const char*
const src,
1390 int partialDecoding,
1391 int targetOutputSize,
1393 const BYTE*
const lowPrefix,
1394 const BYTE*
const dictStart,
1395 const size_t dictSize
1398 const BYTE* ip = (
const BYTE*) src;
1399 const BYTE*
const iend = ip + srcSize;
1404 BYTE* oexit = op + targetOutputSize;
1406 const BYTE*
const dictEnd = (
const BYTE*)dictStart + dictSize;
1407 const unsigned inc32table[8] = {0, 1, 2, 1, 0, 4, 4, 4};
1408 const int dec64table[8] = {0, 0, 0, -1, -4, 1, 2, 3};
1411 const int checkOffset = ((safeDecode) && (dictSize < (
int)(64
KB)));
1414 const BYTE*
const shortiend = iend - (endOnInput ? 14 : 8) - 2 ;
1415 const BYTE*
const shortoend = oend - (endOnInput ? 14 : 8) - 18 ;
1417 DEBUGLOG(5,
"LZ4_decompress_generic (srcSize:%i)", srcSize);
1420 if ((partialDecoding) && (oexit > oend-
MFLIMIT)) oexit = oend-
MFLIMIT;
1421 if ((endOnInput) && (
unlikely(outputSize==0)))
return ((srcSize==1) && (*ip==0)) ? 0 : -1;
1422 if ((!endOnInput) && (
unlikely(outputSize==0)))
return (*ip==0?1:-1);
1423 if ((endOnInput) &&
unlikely(srcSize==0))
return -1;
1430 unsigned const token = *ip++;
1431 size_t length = token >>
ML_BITS;
1433 assert(!endOnInput || ip <= iend);
1444 if ( (endOnInput ? length !=
RUN_MASK : length <= 8)
1446 &&
likely((endOnInput ? ip < shortiend : 1) & (op <= shortoend)) ) {
1448 memcpy(op, ip, endOnInput ? 16 : 8);
1449 op += length; ip += length;
1454 offset = LZ4_readLE16(ip); ip += 2;
1455 match = op - offset;
1458 if ( (length != ML_MASK)
1462 memcpy(op + 0, match + 0, 8);
1463 memcpy(op + 8, match + 8, 8);
1464 memcpy(op +16, match +16, 2);
1482 }
while (
likely(endOnInput ? ip<iend-
RUN_MASK : 1) & (s==255) );
1489 if ( ((endOnInput) && ((cpy>(partialDecoding?oexit:oend-MFLIMIT)) || (ip+length>iend-(2+1+
LASTLITERALS))) )
1492 if (partialDecoding) {
1493 if (cpy > oend)
goto _output_error;
1494 if ((endOnInput) && (ip+length > iend))
goto _output_error;
1496 if ((!endOnInput) && (cpy != oend))
goto _output_error;
1497 if ((endOnInput) && ((ip+length != iend) || (cpy > oend)))
goto _output_error;
1499 memcpy(op, ip, length);
1505 ip += length; op = cpy;
1508 offset = LZ4_readLE16(ip); ip+=2;
1509 match = op - offset;
1515 if ((checkOffset) && (
unlikely(match + dictSize < lowPrefix)))
goto _output_error;
1516 LZ4_write32(op, (
U32)offset);
1518 if (length == ML_MASK) {
1522 if ((endOnInput) && (ip > iend-
LASTLITERALS))
goto _output_error;
1533 if (length <= (
size_t)(lowPrefix-match)) {
1535 memmove(op, dictEnd - (lowPrefix-match), length);
1539 size_t const copySize = (size_t)(lowPrefix-match);
1540 size_t const restSize = length - copySize;
1541 memcpy(op, dictEnd - copySize, copySize);
1543 if (restSize > (
size_t)(op-lowPrefix)) {
1544 BYTE*
const endOfMatch = op + restSize;
1545 const BYTE* copyFrom = lowPrefix;
1546 while (op < endOfMatch) *op++ = *copyFrom++;
1548 memcpy(op, lowPrefix, restSize);
1561 match += inc32table[offset];
1562 memcpy(op+4, match, 4);
1563 match -= dec64table[offset];
1564 }
else { memcpy(op, match, 8); match+=8; }
1570 if (op < oCopyLimit) {
1572 match += oCopyLimit - op;
1575 while (op<cpy) *op++ = *match++;
1577 memcpy(op, match, 8);
1585 return (
int) (((
char*)op)-
dst);
1587 return (
int) (((
const char*)ip)-src);
1591 return (
int) (-(((
const char*)ip)-src))-1;
1649 #define LZ4_decompress_safe_extDict LZ4_decompress_safe_forceExtDict 1651 const void* dictStart,
size_t dictSize)
1655 (
BYTE*)dest, (
const BYTE*)dictStart, dictSize);
1660 const void* dictStart,
size_t dictSize)
1673 size_t prefixSize,
const void* dictStart,
size_t dictSize)
1677 (
BYTE*)dest-prefixSize, (
const BYTE*)dictStart, dictSize);
1682 size_t prefixSize,
const void* dictStart,
size_t dictSize)
1686 (
BYTE*)dest-prefixSize, (
const BYTE*)dictStart, dictSize);
1699 if (!LZ4_stream)
return 0;
1733 if (maxBlockSize < 0)
return 0;
1735 if (maxBlockSize < 16) maxBlockSize = 16;
1756 if (result <= 0)
return result;
1764 result = LZ4_decompress_safe_withSmallPrefix(source, dest, compressedSize, maxOutputSize,
1769 if (result <= 0)
return result;
1778 if (result <= 0)
return result;
1795 if (result <= 0)
return result;
1804 if (result <= 0)
return result;
1810 result = LZ4_decompress_fast_extDict(source, dest, originalSize,
1812 if (result <= 0)
return result;
1832 if (dictStart+dictSize == dest) {
1833 if (dictSize >= 64
KB - 1)
1835 return LZ4_decompress_safe_withSmallPrefix(source, dest, compressedSize, maxOutputSize, dictSize);
1842 if (dictSize==0 || dictStart+dictSize == dest)
1844 return LZ4_decompress_fast_extDict(source, dest, originalSize, dictStart, dictSize);
void * LZ4_create(char *inputBuffer)
int LZ4_compress_destSize(const char *src, char *dst, int *srcSizePtr, int targetDstSize)
LZ4_stream_t * LZ4_createStream(void)
int LZ4_compress_fast_force(const char *source, char *dest, int inputSize, int maxOutputSize, int acceleration)
LZ4_streamDecode_t_internal internal_donotuse
LZ4_FORCE_O2_GCC_PPC64LE int LZ4_decompress_fast_continue(LZ4_streamDecode_t *LZ4_streamDecode, const char *source, char *dest, int originalSize)
int LZ4_loadDict(LZ4_stream_t *LZ4_dict, const char *dictionary, int dictSize)
int LZ4_compress_limitedOutput(const char *source, char *dest, int inputSize, int maxOutputSize)
#define LZ4_HASH_SIZE_U32
LZ4_FORCE_O2_GCC_PPC64LE LZ4_FORCE_INLINE int LZ4_decompress_generic(const char *const src, char *const dst, int srcSize, int outputSize, int endOnInput, int partialDecoding, int targetOutputSize, int dict, const BYTE *const lowPrefix, const BYTE *const dictStart, const size_t dictSize)
int LZ4_setStreamDecode(LZ4_streamDecode_t *LZ4_streamDecode, const char *dictionary, int dictSize)
unsigned int currentOffset
LZ4_FORCE_INLINE void LZ4_putPosition(const BYTE *p, void *tableBase, tableType_t tableType, const BYTE *srcBase)
int LZ4_resetStreamState(void *state, char *inputBuffer)
int LZ4_uncompress_unknownOutputSize(const char *source, char *dest, int isize, int maxOutputSize)
int LZ4_compress_limitedOutput_continue(LZ4_stream_t *LZ4_stream, const char *src, char *dst, int srcSize, int dstCapacity)
#define ACCELERATION_DEFAULT
int LZ4_freeStreamDecode(LZ4_streamDecode_t *LZ4_stream)
LZ4_streamDecode_t * LZ4_createStreamDecode(void)
char * LZ4_slideInputBuffer(void *state)
int LZ4_compress_forceExtDict(LZ4_stream_t *LZ4_dict, const char *source, char *dest, int srcSize)
LZ4_FORCE_O2_INLINE_GCC_PPC64LE void LZ4_wildCopy(void *dstPtr, const void *srcPtr, void *dstEnd)
LZ4_FORCE_INLINE int LZ4_decompress_safe_doubleDict(const char *source, char *dest, int compressedSize, int maxOutputSize, size_t prefixSize, const void *dictStart, size_t dictSize)
const unsigned char * prefixEnd
int LZ4_compress_fast_extState(void *state, const char *source, char *dest, int inputSize, int maxOutputSize, int acceleration)
int LZ4_compress_default(const char *source, char *dest, int inputSize, int maxOutputSize)
int LZ4_uncompress(const char *source, char *dest, int outputSize)
LZ4_FORCE_O2_GCC_PPC64LE int LZ4_decompress_safe_continue(LZ4_streamDecode_t *LZ4_streamDecode, const char *source, char *dest, int compressedSize, int maxOutputSize)
int LZ4_decompress_fast_withPrefix64k(const char *source, char *dest, int originalSize)
const unsigned char * externalDict
LZ4_FORCE_INLINE unsigned LZ4_count(const BYTE *pIn, const BYTE *pMatch, const BYTE *pInLimit)
unsigned int hashTable[LZ4_HASH_SIZE_U32]
char int int maxOutputSize
int LZ4_decompress_safe_usingDict(const char *source, char *dest, int compressedSize, int maxOutputSize, const char *dictStart, int dictSize)
#define LZ4_HASHTABLESIZE
int LZ4_compress_withState(void *state, const char *src, char *dst, int srcSize)
void LZ4_attach_dictionary(LZ4_stream_t *working_stream, const LZ4_stream_t *dictionary_stream)
#define LZ4_DECODER_RING_BUFFER_SIZE(mbs)
LZ4_FORCE_O2_GCC_PPC64LE int LZ4_decompress_safe(const char *source, char *dest, int compressedSize, int maxDecompressedSize)
LZ4_FORCE_O2_GCC_PPC64LE int LZ4_decompress_safe_withPrefix64k(const char *source, char *dest, int compressedSize, int maxOutputSize)
int LZ4_compress_fast(const char *source, char *dest, int inputSize, int maxOutputSize, int acceleration)
#define assert(condition)
int LZ4_freeStream(LZ4_stream_t *LZ4_stream)
int LZ4_versionNumber(void)
void LZ4_resetStream(LZ4_stream_t *LZ4_stream)
#define LZ4_VERSION_STRING
#define LZ4_FORCE_O2_GCC_PPC64LE
LZ4_FORCE_INLINE const BYTE * LZ4_getPosition(const BYTE *p, const void *tableBase, tableType_t tableType, const BYTE *srcBase)
int LZ4_decoderRingBufferSize(int maxBlockSize)
LZ4_FORCE_INLINE void LZ4_prepareTable(LZ4_stream_t_internal *const cctx, const int inputSize, const tableType_t tableType)
LZ4_stream_t_internal internal_donotuse
LZ4_FORCE_INLINE int LZ4_decompress_fast_doubleDict(const char *source, char *dest, int originalSize, size_t prefixSize, const void *dictStart, size_t dictSize)
int LZ4_compress_continue(LZ4_stream_t *LZ4_stream, const char *source, char *dest, int inputSize)
int LZ4_decompress_fast_usingDict(const char *source, char *dest, int originalSize, const char *dictStart, int dictSize)
#define LZ4_STATIC_ASSERT(c)
LZ4_FORCE_INLINE int LZ4_compress_generic(LZ4_stream_t_internal *const cctx, const char *const source, char *const dest, const int inputSize, int *inputConsumed, const int maxOutputSize, const limitedOutput_directive outputLimited, const tableType_t tableType, const dict_directive dictDirective, const dictIssue_directive dictIssue, const U32 acceleration)
#define LZ4_MAX_INPUT_SIZE
#define ALLOC_AND_ZERO(s)
int LZ4_compress_limitedOutput_withState(void *state, const char *src, char *dst, int srcSize, int dstSize)
const LZ4_stream_t_internal * dictCtx
const char char int inputSize
#define LZ4_VERSION_NUMBER
LZ4_FORCE_O2_GCC_PPC64LE int LZ4_decompress_safe_partial(const char *source, char *dest, int compressedSize, int targetOutputSize, int maxDecompressedSize)
#define LZ4_decompress_safe_extDict
int LZ4_sizeofStreamState()
const char * LZ4_versionString(void)
int LZ4_compress(const char *source, char *dest, int inputSize)
int LZ4_compressBound(int isize)
#define MEM_INIT(p, v, s)
const unsigned char * dictionary
LZ4_FORCE_O2_GCC_PPC64LE int LZ4_decompress_fast(const char *source, char *dest, int originalSize)
#define LZ4_COMPRESSBOUND(isize)
#define LZ4_FORCE_O2_INLINE_GCC_PPC64LE
int LZ4_saveDict(LZ4_stream_t *LZ4_dict, char *safeBuffer, int dictSize)
void LZ4_resetStream_fast(LZ4_stream_t *ctx)
int LZ4_compress_fast_extState_fastReset(void *state, const char *src, char *dst, int srcSize, int dstCapacity, int acceleration)
LZ4_FORCE_INLINE U32 LZ4_hashPosition(const void *const p, tableType_t const tableType)
int LZ4_compress_fast_continue(LZ4_stream_t *LZ4_stream, const char *source, char *dest, int inputSize, int maxOutputSize, int acceleration)