Lines Matching +full:64 +full:- +full:byte

2  * LZ4 - Fast LZ compression algorithm
3 * Copyright (C) 2011 - 2016, Yann Collet.
4 * BSD 2 - Clause License (http://www.opensource.org/licenses/bsd - license.php)
26 * - LZ4 homepage : http://www.lz4.org
27 * - LZ4 source repository : https://github.com/lz4/lz4
30 * Sven Schmidt <4sschmid@informatik.uni-hamburg.de>
33 /*-************************************
43 static const int LZ4_64Klimit = ((64 * KB) + (MFLIMIT - 1));
45 /*-******************************
54 >> ((MINMATCH * 8) - (LZ4_HASHLOG + 1))); in LZ4_hash4()
57 >> ((MINMATCH * 8) - LZ4_HASHLOG)); in LZ4_hash4()
71 return (U32)(((sequence << 24) * prime5bytes) >> (64 - hashLog)); in LZ4_hash5()
75 return (U32)(((sequence >> 24) * prime8bytes) >> (64 - hashLog)); in LZ4_hash5()
92 const BYTE *p, in LZ4_putPositionOnHash()
96 const BYTE *srcBase) in LZ4_putPositionOnHash()
101 const BYTE **hashTable = (const BYTE **)tableBase; in LZ4_putPositionOnHash()
110 hashTable[h] = (U32)(p - srcBase); in LZ4_putPositionOnHash()
117 hashTable[h] = (U16)(p - srcBase); in LZ4_putPositionOnHash()
124 const BYTE *p, in LZ4_putPosition()
127 const BYTE *srcBase) in LZ4_putPosition()
134 static const BYTE *LZ4_getPositionOnHash( in LZ4_getPositionOnHash()
138 const BYTE *srcBase) in LZ4_getPositionOnHash()
141 const BYTE **hashTable = (const BYTE **) tableBase; in LZ4_getPositionOnHash()
160 static FORCE_INLINE const BYTE *LZ4_getPosition( in LZ4_getPosition()
161 const BYTE *p, in LZ4_getPosition()
164 const BYTE *srcBase) in LZ4_getPosition()
188 const BYTE *ip = (const BYTE *) source; in LZ4_compress_generic()
189 const BYTE *base; in LZ4_compress_generic()
190 const BYTE *lowLimit; in LZ4_compress_generic()
191 const BYTE * const lowRefLimit = ip - dictPtr->dictSize; in LZ4_compress_generic()
192 const BYTE * const dictionary = dictPtr->dictionary; in LZ4_compress_generic()
193 const BYTE * const dictEnd = dictionary + dictPtr->dictSize; in LZ4_compress_generic()
194 const size_t dictDelta = dictEnd - (const BYTE *)source; in LZ4_compress_generic()
195 const BYTE *anchor = (const BYTE *) source; in LZ4_compress_generic()
196 const BYTE * const iend = ip + inputSize; in LZ4_compress_generic()
197 const BYTE * const mflimit = iend - MFLIMIT; in LZ4_compress_generic()
198 const BYTE * const matchlimit = iend - LASTLITERALS; in LZ4_compress_generic()
200 BYTE *op = (BYTE *) dest; in LZ4_compress_generic()
201 BYTE * const olimit = op + maxOutputSize; in LZ4_compress_generic()
215 base = (const BYTE *)source; in LZ4_compress_generic()
216 lowLimit = (const BYTE *)source; in LZ4_compress_generic()
219 base = (const BYTE *)source - dictPtr->currentOffset; in LZ4_compress_generic()
220 lowLimit = (const BYTE *)source - dictPtr->dictSize; in LZ4_compress_generic()
223 base = (const BYTE *)source - dictPtr->currentOffset; in LZ4_compress_generic()
224 lowLimit = (const BYTE *)source; in LZ4_compress_generic()
230 /* Size too large (not within 64K limit) */ in LZ4_compress_generic()
239 /* First Byte */ in LZ4_compress_generic()
240 LZ4_putPosition(ip, dictPtr->hashTable, tableType, base); in LZ4_compress_generic()
246 const BYTE *match; in LZ4_compress_generic()
247 BYTE *token; in LZ4_compress_generic()
251 const BYTE *forwardIp = ip; in LZ4_compress_generic()
266 dictPtr->hashTable, in LZ4_compress_generic()
270 if (match < (const BYTE *)source) { in LZ4_compress_generic()
275 lowLimit = (const BYTE *)source; in LZ4_compress_generic()
281 LZ4_putPositionOnHash(ip, h, dictPtr->hashTable, in LZ4_compress_generic()
295 && (unlikely(ip[-1] == match[refDelta - 1]))) { in LZ4_compress_generic()
296 ip--; in LZ4_compress_generic()
297 match--; in LZ4_compress_generic()
302 unsigned const int litLength = (unsigned int)(ip - anchor); in LZ4_compress_generic()
314 int len = (int)litLength - RUN_MASK; in LZ4_compress_generic()
318 for (; len >= 255; len -= 255) in LZ4_compress_generic()
320 *op++ = (BYTE)len; in LZ4_compress_generic()
322 *token = (BYTE)(litLength << ML_BITS); in LZ4_compress_generic()
331 LZ4_writeLE16(op, (U16)(ip - match)); in LZ4_compress_generic()
340 const BYTE *limit; in LZ4_compress_generic()
343 limit = ip + (dictEnd - match); in LZ4_compress_generic()
355 (const BYTE *)source, in LZ4_compress_generic()
376 matchCode -= ML_MASK; in LZ4_compress_generic()
382 matchCode -= 4 * 255; in LZ4_compress_generic()
386 *op++ = (BYTE)(matchCode % 255); in LZ4_compress_generic()
388 *token += (BYTE)(matchCode); in LZ4_compress_generic()
398 LZ4_putPosition(ip - 2, dictPtr->hashTable, tableType, base); in LZ4_compress_generic()
401 match = LZ4_getPosition(ip, dictPtr->hashTable, in LZ4_compress_generic()
405 if (match < (const BYTE *)source) { in LZ4_compress_generic()
410 lowLimit = (const BYTE *)source; in LZ4_compress_generic()
414 LZ4_putPosition(ip, dictPtr->hashTable, tableType, base); in LZ4_compress_generic()
431 size_t const lastRun = (size_t)(iend - anchor); in LZ4_compress_generic()
435 ((op - (BYTE *)dest) + lastRun + 1 + in LZ4_compress_generic()
436 ((lastRun + 255 - RUN_MASK) / 255) > (U32)maxOutputSize)) in LZ4_compress_generic()
440 size_t accumulator = lastRun - RUN_MASK; in LZ4_compress_generic()
442 for (; accumulator >= 255; accumulator -= 255) in LZ4_compress_generic()
444 *op++ = (BYTE) accumulator; in LZ4_compress_generic()
446 *op++ = (BYTE)(lastRun << ML_BITS); in LZ4_compress_generic()
455 return (int) (((char *)op) - dest); in LZ4_compress_generic()
466 LZ4_stream_t_internal *ctx = &((LZ4_stream_t *)state)->internal_donotuse; in LZ4_compress_fast_extState()
519 /*-******************************
530 const BYTE *ip = (const BYTE *) src; in LZ4_compress_destSize_generic()
531 const BYTE *base = (const BYTE *) src; in LZ4_compress_destSize_generic()
532 const BYTE *lowLimit = (const BYTE *) src; in LZ4_compress_destSize_generic()
533 const BYTE *anchor = ip; in LZ4_compress_destSize_generic()
534 const BYTE * const iend = ip + *srcSizePtr; in LZ4_compress_destSize_generic()
535 const BYTE * const mflimit = iend - MFLIMIT; in LZ4_compress_destSize_generic()
536 const BYTE * const matchlimit = iend - LASTLITERALS; in LZ4_compress_destSize_generic()
538 BYTE *op = (BYTE *) dst; in LZ4_compress_destSize_generic()
539 BYTE * const oend = op + targetDstSize; in LZ4_compress_destSize_generic()
540 BYTE * const oMaxLit = op + targetDstSize - 2 /* offset */ in LZ4_compress_destSize_generic()
541 - 8 /* because 8 + MINMATCH == MFLIMIT */ - 1 /* token */; in LZ4_compress_destSize_generic()
542 BYTE * const oMaxMatch = op + targetDstSize in LZ4_compress_destSize_generic()
543 - (LASTLITERALS + 1 /* token */); in LZ4_compress_destSize_generic()
544 BYTE * const oMaxSeq = oMaxLit - 1 /* token */; in LZ4_compress_destSize_generic()
555 /* Size too large (not within 64K limit) */ in LZ4_compress_destSize_generic()
562 /* First Byte */ in LZ4_compress_destSize_generic()
564 LZ4_putPosition(ip, ctx->hashTable, tableType, base); in LZ4_compress_destSize_generic()
569 const BYTE *match; in LZ4_compress_destSize_generic()
570 BYTE *token; in LZ4_compress_destSize_generic()
574 const BYTE *forwardIp = ip; in LZ4_compress_destSize_generic()
588 match = LZ4_getPositionOnHash(h, ctx->hashTable, in LZ4_compress_destSize_generic()
593 ctx->hashTable, tableType, in LZ4_compress_destSize_generic()
605 && (unlikely(ip[-1] == match[-1]))) { in LZ4_compress_destSize_generic()
606 ip--; in LZ4_compress_destSize_generic()
607 match--; in LZ4_compress_destSize_generic()
612 unsigned int litLength = (unsigned int)(ip - anchor); in LZ4_compress_destSize_generic()
618 op--; in LZ4_compress_destSize_generic()
622 unsigned int len = litLength - RUN_MASK; in LZ4_compress_destSize_generic()
624 for (; len >= 255; len -= 255) in LZ4_compress_destSize_generic()
626 *op++ = (BYTE)len; in LZ4_compress_destSize_generic()
628 *token = (BYTE)(litLength << ML_BITS); in LZ4_compress_destSize_generic()
637 LZ4_writeLE16(op, (U16)(ip - match)); op += 2; in LZ4_compress_destSize_generic()
646 matchLength = (15 - 1) + (oMaxMatch - op) * 255; in LZ4_compress_destSize_generic()
652 matchLength -= ML_MASK; in LZ4_compress_destSize_generic()
654 matchLength -= 255; in LZ4_compress_destSize_generic()
657 *op++ = (BYTE)matchLength; in LZ4_compress_destSize_generic()
659 *token += (BYTE)(matchLength); in LZ4_compress_destSize_generic()
671 LZ4_putPosition(ip - 2, ctx->hashTable, tableType, base); in LZ4_compress_destSize_generic()
674 match = LZ4_getPosition(ip, ctx->hashTable, tableType, base); in LZ4_compress_destSize_generic()
675 LZ4_putPosition(ip, ctx->hashTable, tableType, base); in LZ4_compress_destSize_generic()
690 size_t lastRunSize = (size_t)(iend - anchor); in LZ4_compress_destSize_generic()
696 lastRunSize = (oend - op) - 1; in LZ4_compress_destSize_generic()
697 lastRunSize -= (lastRunSize + 240) / 255; in LZ4_compress_destSize_generic()
702 size_t accumulator = lastRunSize - RUN_MASK; in LZ4_compress_destSize_generic()
705 for (; accumulator >= 255; accumulator -= 255) in LZ4_compress_destSize_generic()
707 *op++ = (BYTE) accumulator; in LZ4_compress_destSize_generic()
709 *op++ = (BYTE)(lastRunSize<<ML_BITS); in LZ4_compress_destSize_generic()
716 *srcSizePtr = (int) (((const char *)ip) - src); in LZ4_compress_destSize_generic()
717 return (int) (((char *)op) - dst); in LZ4_compress_destSize_generic()
743 &state->internal_donotuse, in LZ4_compress_destSize_extState()
748 &state->internal_donotuse, in LZ4_compress_destSize_extState()
767 /*-******************************
778 LZ4_stream_t_internal *dict = &LZ4_dict->internal_donotuse; in LZ4_loadDict()
779 const BYTE *p = (const BYTE *)dictionary; in LZ4_loadDict()
780 const BYTE * const dictEnd = p + dictSize; in LZ4_loadDict()
781 const BYTE *base; in LZ4_loadDict()
783 if ((dict->initCheck) in LZ4_loadDict()
784 || (dict->currentOffset > 1 * GB)) { in LZ4_loadDict()
790 dict->dictionary = NULL; in LZ4_loadDict()
791 dict->dictSize = 0; in LZ4_loadDict()
795 if ((dictEnd - p) > 64 * KB) in LZ4_loadDict()
796 p = dictEnd - 64 * KB; in LZ4_loadDict()
797 dict->currentOffset += 64 * KB; in LZ4_loadDict()
798 base = p - dict->currentOffset; in LZ4_loadDict()
799 dict->dictionary = p; in LZ4_loadDict()
800 dict->dictSize = (U32)(dictEnd - p); in LZ4_loadDict()
801 dict->currentOffset += dict->dictSize; in LZ4_loadDict()
803 while (p <= dictEnd - HASH_UNIT) { in LZ4_loadDict()
804 LZ4_putPosition(p, dict->hashTable, byU32, base); in LZ4_loadDict()
808 return dict->dictSize; in LZ4_loadDict()
813 const BYTE *src) in LZ4_renormDictT()
815 if ((LZ4_dict->currentOffset > 0x80000000) || in LZ4_renormDictT()
816 ((uptrval)LZ4_dict->currentOffset > (uptrval)src)) { in LZ4_renormDictT()
819 U32 const delta = LZ4_dict->currentOffset - 64 * KB; in LZ4_renormDictT()
820 const BYTE *dictEnd = LZ4_dict->dictionary + LZ4_dict->dictSize; in LZ4_renormDictT()
824 if (LZ4_dict->hashTable[i] < delta) in LZ4_renormDictT()
825 LZ4_dict->hashTable[i] = 0; in LZ4_renormDictT()
827 LZ4_dict->hashTable[i] -= delta; in LZ4_renormDictT()
829 LZ4_dict->currentOffset = 64 * KB; in LZ4_renormDictT()
830 if (LZ4_dict->dictSize > 64 * KB) in LZ4_renormDictT()
831 LZ4_dict->dictSize = 64 * KB; in LZ4_renormDictT()
832 LZ4_dict->dictionary = dictEnd - LZ4_dict->dictSize; in LZ4_renormDictT()
838 LZ4_stream_t_internal * const dict = &LZ4_dict->internal_donotuse; in LZ4_saveDict()
839 const BYTE * const previousDictEnd = dict->dictionary + dict->dictSize; in LZ4_saveDict()
841 if ((U32)dictSize > 64 * KB) { in LZ4_saveDict()
842 /* useless to define a dictionary > 64 * KB */ in LZ4_saveDict()
843 dictSize = 64 * KB; in LZ4_saveDict()
845 if ((U32)dictSize > dict->dictSize) in LZ4_saveDict()
846 dictSize = dict->dictSize; in LZ4_saveDict()
848 memmove(safeBuffer, previousDictEnd - dictSize, dictSize); in LZ4_saveDict()
850 dict->dictionary = (const BYTE *)safeBuffer; in LZ4_saveDict()
851 dict->dictSize = (U32)dictSize; in LZ4_saveDict()
860 LZ4_stream_t_internal *streamPtr = &LZ4_stream->internal_donotuse; in LZ4_compress_fast_continue()
861 const BYTE * const dictEnd = streamPtr->dictionary in LZ4_compress_fast_continue()
862 + streamPtr->dictSize; in LZ4_compress_fast_continue()
864 const BYTE *smallest = (const BYTE *) source; in LZ4_compress_fast_continue()
866 if (streamPtr->initCheck) { in LZ4_compress_fast_continue()
871 if ((streamPtr->dictSize > 0) && (smallest > dictEnd)) in LZ4_compress_fast_continue()
881 const BYTE *sourceEnd = (const BYTE *) source + inputSize; in LZ4_compress_fast_continue()
883 if ((sourceEnd > streamPtr->dictionary) in LZ4_compress_fast_continue()
885 streamPtr->dictSize = (U32)(dictEnd - sourceEnd); in LZ4_compress_fast_continue()
886 if (streamPtr->dictSize > 64 * KB) in LZ4_compress_fast_continue()
887 streamPtr->dictSize = 64 * KB; in LZ4_compress_fast_continue()
888 if (streamPtr->dictSize < 4) in LZ4_compress_fast_continue()
889 streamPtr->dictSize = 0; in LZ4_compress_fast_continue()
890 streamPtr->dictionary = dictEnd - streamPtr->dictSize; in LZ4_compress_fast_continue()
895 if (dictEnd == (const BYTE *)source) { in LZ4_compress_fast_continue()
898 if ((streamPtr->dictSize < 64 * KB) && in LZ4_compress_fast_continue()
899 (streamPtr->dictSize < streamPtr->currentOffset)) { in LZ4_compress_fast_continue()
910 streamPtr->dictSize += (U32)inputSize; in LZ4_compress_fast_continue()
911 streamPtr->currentOffset += (U32)inputSize; in LZ4_compress_fast_continue()
919 if ((streamPtr->dictSize < 64 * KB) && in LZ4_compress_fast_continue()
920 (streamPtr->dictSize < streamPtr->currentOffset)) { in LZ4_compress_fast_continue()
931 streamPtr->dictionary = (const BYTE *)source; in LZ4_compress_fast_continue()
932 streamPtr->dictSize = (U32)inputSize; in LZ4_compress_fast_continue()
933 streamPtr->currentOffset += (U32)inputSize; in LZ4_compress_fast_continue()