Lines Matching refs:ip
21 const BYTE* ip, const BYTE* iend, in ZSTD_updateDUBT() argument
33 U32 const target = (U32)(ip - base); in ZSTD_updateDUBT()
39 assert(ip + 8 <= iend); /* condition for ZSTD_hashPtr */ in ZSTD_updateDUBT()
77 const BYTE* const ip = (curr>=dictLimit) ? base + curr : dictBase + curr; in ZSTD_insertDUBT1() local
94 assert(ip < iend); /* condition for ZSTD_count */ in ZSTD_insertDUBT1()
113 matchLength += ZSTD_count(ip+matchLength, match+matchLength, iend); in ZSTD_insertDUBT1()
116 …matchLength += ZSTD_count_2segments(ip+matchLength, match+matchLength, iend, dictEnd, prefixStart); in ZSTD_insertDUBT1()
124 if (ip+matchLength == iend) { /* equal : no way to know if inf or sup */ in ZSTD_insertDUBT1()
128 if (match[matchLength] < ip[matchLength]) { /* necessarily within buffer */ in ZSTD_insertDUBT1()
155 const BYTE* const ip, const BYTE* const iend, in ZSTD_DUBT_findBetterDictMatch() argument
166 size_t const h = ZSTD_hashPtr(ip, hashLog, mls); in ZSTD_DUBT_findBetterDictMatch()
171 U32 const curr = (U32)(ip-base); in ZSTD_DUBT_findBetterDictMatch()
192 …matchLength += ZSTD_count_2segments(ip+matchLength, match+matchLength, iend, dictEnd, prefixStart); in ZSTD_DUBT_findBetterDictMatch()
203 …if (ip+matchLength == iend) { /* reached end of input : ip[matchLength] is not valid, no way to … in ZSTD_DUBT_findBetterDictMatch()
208 if (match[matchLength] < ip[matchLength]) { in ZSTD_DUBT_findBetterDictMatch()
232 const BYTE* const ip, const BYTE* const iend, in ZSTD_DUBT_findBestMatch() argument
240 size_t const h = ZSTD_hashPtr(ip, hashLog, mls); in ZSTD_DUBT_findBestMatch()
244 U32 const curr = (U32)(ip-base); in ZSTD_DUBT_findBestMatch()
260 assert(ip <= iend-8); /* required for h calculation */ in ZSTD_DUBT_findBestMatch()
319 matchLength += ZSTD_count(ip+matchLength, match+matchLength, iend); in ZSTD_DUBT_findBestMatch()
322 …matchLength += ZSTD_count_2segments(ip+matchLength, match+matchLength, iend, dictEnd, prefixStart); in ZSTD_DUBT_findBestMatch()
332 if (ip+matchLength == iend) { /* equal : no way to know if inf or sup */ in ZSTD_DUBT_findBestMatch()
342 if (match[matchLength] < ip[matchLength]) { in ZSTD_DUBT_findBestMatch()
363 ms, ip, iend, in ZSTD_DUBT_findBestMatch()
383 const BYTE* const ip, const BYTE* const iLimit, in ZSTD_BtFindBestMatch() argument
389 if (ip < ms->window.base + ms->nextToUpdate) return 0; /* skipped area */ in ZSTD_BtFindBestMatch()
390 ZSTD_updateDUBT(ms, ip, iLimit, mls); in ZSTD_BtFindBestMatch()
391 return ZSTD_DUBT_findBestMatch(ms, ip, iLimit, offsetPtr, mls, dictMode); in ZSTD_BtFindBestMatch()
398 void ZSTD_dedicatedDictSearch_lazy_loadDictionary(ZSTD_matchState_t* ms, const BYTE* const ip) in ZSTD_dedicatedDictSearch_lazy_loadDictionary() argument
401 U32 const target = (U32)(ip - base); in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
518 const BYTE* const ip, const BYTE* const iLimit, in ZSTD_dedicatedDictSearch_lazy_search() argument
556 if (MEM_read32(match) == MEM_read32(ip)) { in ZSTD_dedicatedDictSearch_lazy_search()
558 currentMl = ZSTD_count_2segments(ip+4, match+4, iLimit, ddsEnd, prefixStart) + 4; in ZSTD_dedicatedDictSearch_lazy_search()
565 if (ip+currentMl == iLimit) { in ZSTD_dedicatedDictSearch_lazy_search()
593 if (MEM_read32(match) == MEM_read32(ip)) { in ZSTD_dedicatedDictSearch_lazy_search()
595 currentMl = ZSTD_count_2segments(ip+4, match+4, iLimit, ddsEnd, prefixStart) + 4; in ZSTD_dedicatedDictSearch_lazy_search()
602 … if (ip+currentMl == iLimit) break; /* best possible, avoids read overflow on next attempt */ in ZSTD_dedicatedDictSearch_lazy_search()
620 const BYTE* ip, U32 const mls) in ZSTD_insertAndFindFirstIndex_internal() argument
627 const U32 target = (U32)(ip - base); in ZSTD_insertAndFindFirstIndex_internal()
638 return hashTable[ZSTD_hashPtr(ip, hashLog, mls)]; in ZSTD_insertAndFindFirstIndex_internal()
641 U32 ZSTD_insertAndFindFirstIndex(ZSTD_matchState_t* ms, const BYTE* ip) { in ZSTD_insertAndFindFirstIndex() argument
643 return ZSTD_insertAndFindFirstIndex_internal(ms, cParams, ip, ms->cParams.minMatch); in ZSTD_insertAndFindFirstIndex()
650 const BYTE* const ip, const BYTE* const iLimit, in ZSTD_HcFindBestMatch() argument
663 const U32 curr = (U32)(ip-base); in ZSTD_HcFindBestMatch()
677 ? ZSTD_hashPtr(ip, ddsHashLog, mls) << ZSTD_LAZY_DDSS_BUCKET_LOG : 0; in ZSTD_HcFindBestMatch()
687 matchIndex = ZSTD_insertAndFindFirstIndex_internal(ms, cParams, ip, mls); in ZSTD_HcFindBestMatch()
694 if (match[ml] == ip[ml]) /* potentially better */ in ZSTD_HcFindBestMatch()
695 currentMl = ZSTD_count(ip, match, iLimit); in ZSTD_HcFindBestMatch()
699 …if (MEM_read32(match) == MEM_read32(ip)) /* assumption : matchIndex <= dictLimit-4 (by table con… in ZSTD_HcFindBestMatch()
700 currentMl = ZSTD_count_2segments(ip+4, match+4, iLimit, dictEnd, prefixStart) + 4; in ZSTD_HcFindBestMatch()
707 … if (ip+currentMl == iLimit) break; /* best possible, avoids read overflow on next attempt */ in ZSTD_HcFindBestMatch()
717 ip, iLimit, prefixStart, curr, dictLimit, ddsIdx); in ZSTD_HcFindBestMatch()
729 matchIndex = dms->hashTable[ZSTD_hashPtr(ip, dms->cParams.hashLog, mls)]; in ZSTD_HcFindBestMatch()
735 …if (MEM_read32(match) == MEM_read32(ip)) /* assumption : matchIndex <= dictLimit-4 (by table con… in ZSTD_HcFindBestMatch()
736 currentMl = ZSTD_count_2segments(ip+4, match+4, iLimit, dmsEnd, prefixStart) + 4; in ZSTD_HcFindBestMatch()
743 … if (ip+currentMl == iLimit) break; /* best possible, avoids read overflow on next attempt */ in ZSTD_HcFindBestMatch()
935 FORCE_INLINE_TEMPLATE void ZSTD_row_update_internal(ZSTD_matchState_t* ms, const BYTE* ip, in ZSTD_row_update_internal() argument
941 const U32 target = (U32)(ip - base); in ZSTD_row_update_internal()
956 ZSTD_row_fillHashCache(ms, base, rowLog, mls, idx, ip+1); in ZSTD_row_update_internal()
968 void ZSTD_row_update(ZSTD_matchState_t* const ms, const BYTE* ip) { in ZSTD_row_update() argument
974 ZSTD_row_update_internal(ms, ip, mls, rowLog, rowMask, 0 /* dont use cache */); in ZSTD_row_update()
1122 const BYTE* const ip, const BYTE* const iLimit, in ZSTD_RowFindBestMatch() argument
1137 const U32 curr = (U32)(ip-base); in ZSTD_RowFindBestMatch()
1162 ddsIdx = ZSTD_hashPtr(ip, ddsHashLog, mls) << ZSTD_LAZY_DDSS_BUCKET_LOG; in ZSTD_RowFindBestMatch()
1172 U32 const dmsHash = (U32)ZSTD_hashPtr(ip, dms->rowHashLog + ZSTD_ROW_HASH_TAG_BITS, mls); in ZSTD_RowFindBestMatch()
1181 ZSTD_row_update_internal(ms, ip, mls, rowLog, rowMask, 1 /* useCache */); in ZSTD_RowFindBestMatch()
1227 if (match[ml] == ip[ml]) /* potentially better */ in ZSTD_RowFindBestMatch()
1228 currentMl = ZSTD_count(ip, match, iLimit); in ZSTD_RowFindBestMatch()
1232 …if (MEM_read32(match) == MEM_read32(ip)) /* assumption : matchIndex <= dictLimit-4 (by table con… in ZSTD_RowFindBestMatch()
1233 … currentMl = ZSTD_count_2segments(ip+4, match+4, iLimit, dictEnd, prefixStart) + 4; in ZSTD_RowFindBestMatch()
1240 … if (ip+currentMl == iLimit) break; /* best possible, avoids read overflow on next attempt */ in ZSTD_RowFindBestMatch()
1248 ip, iLimit, prefixStart, curr, dictLimit, ddsIdx); in ZSTD_RowFindBestMatch()
1281 if (MEM_read32(match) == MEM_read32(ip)) in ZSTD_RowFindBestMatch()
1282 … currentMl = ZSTD_count_2segments(ip+4, match+4, iLimit, dmsEnd, prefixStart) + 4; in ZSTD_RowFindBestMatch()
1289 if (ip+currentMl == iLimit) break; in ZSTD_RowFindBestMatch()
1329 const BYTE* ip, const BYTE* const iLimit, \
1333 return ZSTD_BtFindBestMatch(ms, ip, iLimit, offBasePtr, mls, ZSTD_##dictMode); \
1339 const BYTE* ip, const BYTE* const iLimit, \
1343 return ZSTD_HcFindBestMatch(ms, ip, iLimit, offsetPtr, mls, ZSTD_##dictMode); \
1349 const BYTE* ip, const BYTE* const iLimit, \
1354 return ZSTD_RowFindBestMatch(ms, ip, iLimit, offsetPtr, mls, ZSTD_##dictMode, rowLog); \
1389 return ZSTD_BT_SEARCH_FN(dictMode, mls)(ms, ip, iend, offsetPtr);
1392 return ZSTD_HC_SEARCH_FN(dictMode, mls)(ms, ip, iend, offsetPtr);
1395 return ZSTD_ROW_SEARCH_FN(dictMode, mls, rowLog)(ms, ip, iend, offsetPtr);
1450 const BYTE* ip, in ZSTD_searchMax() argument
1484 const BYTE* ip = istart; in ZSTD_compressBlock_lazy_generic() local
1507 const U32 dictAndPrefixLength = (U32)((ip - prefixLowest) + (dictEnd - dictLowest)); in ZSTD_compressBlock_lazy_generic()
1510 ip += (dictAndPrefixLength == 0); in ZSTD_compressBlock_lazy_generic()
1512 U32 const curr = (U32)(ip - base); in ZSTD_compressBlock_lazy_generic()
1538 while (ip < ilimit) { in ZSTD_compressBlock_lazy_generic()
1541 const BYTE* start=ip+1; in ZSTD_compressBlock_lazy_generic()
1546 const U32 repIndex = (U32)(ip - base) + 1 - offset_1; in ZSTD_compressBlock_lazy_generic()
1552 && (MEM_read32(repMatch) == MEM_read32(ip+1)) ) { in ZSTD_compressBlock_lazy_generic()
1554 … matchLength = ZSTD_count_2segments(ip+1+4, repMatch+4, iend, repMatchEnd, prefixLowest) + 4; in ZSTD_compressBlock_lazy_generic()
1559 && ((offset_1 > 0) & (MEM_read32(ip+1-offset_1) == MEM_read32(ip+1)))) { in ZSTD_compressBlock_lazy_generic()
1560 matchLength = ZSTD_count(ip+1+4, ip+1+4-offset_1, iend) + 4; in ZSTD_compressBlock_lazy_generic()
1566 …size_t const ml2 = ZSTD_searchMax(ms, ip, iend, &offsetFound, mls, rowLog, searchMethod, dictMode); in ZSTD_compressBlock_lazy_generic()
1568 matchLength = ml2, start = ip, offcode=offsetFound; in ZSTD_compressBlock_lazy_generic()
1572 … ip += ((ip-anchor) >> kSearchStrength) + 1; /* jump faster over incompressible sections */ in ZSTD_compressBlock_lazy_generic()
1578 while (ip<ilimit) { in ZSTD_compressBlock_lazy_generic()
1580 ip ++; in ZSTD_compressBlock_lazy_generic()
1582 && (offcode) && ((offset_1>0) & (MEM_read32(ip) == MEM_read32(ip - offset_1)))) { in ZSTD_compressBlock_lazy_generic()
1583 size_t const mlRep = ZSTD_count(ip+4, ip+4-offset_1, iend) + 4; in ZSTD_compressBlock_lazy_generic()
1587 matchLength = mlRep, offcode = STORE_REPCODE_1, start = ip; in ZSTD_compressBlock_lazy_generic()
1590 const U32 repIndex = (U32)(ip - base) - offset_1; in ZSTD_compressBlock_lazy_generic()
1595 && (MEM_read32(repMatch) == MEM_read32(ip)) ) { in ZSTD_compressBlock_lazy_generic()
1597 … size_t const mlRep = ZSTD_count_2segments(ip+4, repMatch+4, iend, repMatchEnd, prefixLowest) + 4; in ZSTD_compressBlock_lazy_generic()
1601 matchLength = mlRep, offcode = STORE_REPCODE_1, start = ip; in ZSTD_compressBlock_lazy_generic()
1605 … size_t const ml2 = ZSTD_searchMax(ms, ip, iend, &offset2, mls, rowLog, searchMethod, dictMode); in ZSTD_compressBlock_lazy_generic()
1609 matchLength = ml2, offcode = offset2, start = ip; in ZSTD_compressBlock_lazy_generic()
1614 if ((depth==2) && (ip<ilimit)) { in ZSTD_compressBlock_lazy_generic()
1616 ip ++; in ZSTD_compressBlock_lazy_generic()
1618 && (offcode) && ((offset_1>0) & (MEM_read32(ip) == MEM_read32(ip - offset_1)))) { in ZSTD_compressBlock_lazy_generic()
1619 size_t const mlRep = ZSTD_count(ip+4, ip+4-offset_1, iend) + 4; in ZSTD_compressBlock_lazy_generic()
1623 matchLength = mlRep, offcode = STORE_REPCODE_1, start = ip; in ZSTD_compressBlock_lazy_generic()
1626 const U32 repIndex = (U32)(ip - base) - offset_1; in ZSTD_compressBlock_lazy_generic()
1631 && (MEM_read32(repMatch) == MEM_read32(ip)) ) { in ZSTD_compressBlock_lazy_generic()
1633 … size_t const mlRep = ZSTD_count_2segments(ip+4, repMatch+4, iend, repMatchEnd, prefixLowest) + 4; in ZSTD_compressBlock_lazy_generic()
1637 matchLength = mlRep, offcode = STORE_REPCODE_1, start = ip; in ZSTD_compressBlock_lazy_generic()
1641 … size_t const ml2 = ZSTD_searchMax(ms, ip, iend, &offset2, mls, rowLog, searchMethod, dictMode); in ZSTD_compressBlock_lazy_generic()
1645 matchLength = ml2, offcode = offset2, start = ip; in ZSTD_compressBlock_lazy_generic()
1674 anchor = ip = start + matchLength; in ZSTD_compressBlock_lazy_generic()
1679 while (ip <= ilimit) { in ZSTD_compressBlock_lazy_generic()
1680 U32 const current2 = (U32)(ip-base); in ZSTD_compressBlock_lazy_generic()
1686 && (MEM_read32(repMatch) == MEM_read32(ip)) ) { in ZSTD_compressBlock_lazy_generic()
1688 … matchLength = ZSTD_count_2segments(ip+4, repMatch+4, iend, repEnd2, prefixLowest) + 4; in ZSTD_compressBlock_lazy_generic()
1691 ip += matchLength; in ZSTD_compressBlock_lazy_generic()
1692 anchor = ip; in ZSTD_compressBlock_lazy_generic()
1700 while ( ((ip <= ilimit) & (offset_2>0)) in ZSTD_compressBlock_lazy_generic()
1701 && (MEM_read32(ip) == MEM_read32(ip - offset_2)) ) { in ZSTD_compressBlock_lazy_generic()
1703 matchLength = ZSTD_count(ip+4, ip+4-offset_2, iend) + 4; in ZSTD_compressBlock_lazy_generic()
1706 ip += matchLength; in ZSTD_compressBlock_lazy_generic()
1707 anchor = ip; in ZSTD_compressBlock_lazy_generic()
1871 const BYTE* ip = istart; in ZSTD_compressBlock_lazy_extDict_generic() local
1890 ip += (ip == prefixStart); in ZSTD_compressBlock_lazy_extDict_generic()
1904 while (ip < ilimit) { in ZSTD_compressBlock_lazy_extDict_generic()
1907 const BYTE* start=ip+1; in ZSTD_compressBlock_lazy_extDict_generic()
1908 U32 curr = (U32)(ip-base); in ZSTD_compressBlock_lazy_extDict_generic()
1917 if (MEM_read32(ip+1) == MEM_read32(repMatch)) { in ZSTD_compressBlock_lazy_extDict_generic()
1920 … matchLength = ZSTD_count_2segments(ip+1+4, repMatch+4, iend, repEnd, prefixStart) + 4; in ZSTD_compressBlock_lazy_extDict_generic()
1926 …size_t const ml2 = ZSTD_searchMax(ms, ip, iend, &offsetFound, mls, rowLog, searchMethod, ZSTD_extD… in ZSTD_compressBlock_lazy_extDict_generic()
1928 matchLength = ml2, start = ip, offcode=offsetFound; in ZSTD_compressBlock_lazy_extDict_generic()
1932 … ip += ((ip-anchor) >> kSearchStrength) + 1; /* jump faster over incompressible sections */ in ZSTD_compressBlock_lazy_extDict_generic()
1938 while (ip<ilimit) { in ZSTD_compressBlock_lazy_extDict_generic()
1939 ip ++; in ZSTD_compressBlock_lazy_extDict_generic()
1949 if (MEM_read32(ip) == MEM_read32(repMatch)) { in ZSTD_compressBlock_lazy_extDict_generic()
1952 … size_t const repLength = ZSTD_count_2segments(ip+4, repMatch+4, iend, repEnd, prefixStart) + 4; in ZSTD_compressBlock_lazy_extDict_generic()
1956 matchLength = repLength, offcode = STORE_REPCODE_1, start = ip; in ZSTD_compressBlock_lazy_extDict_generic()
1961 …size_t const ml2 = ZSTD_searchMax(ms, ip, iend, &offset2, mls, rowLog, searchMethod, ZSTD_extDict); in ZSTD_compressBlock_lazy_extDict_generic()
1965 matchLength = ml2, offcode = offset2, start = ip; in ZSTD_compressBlock_lazy_extDict_generic()
1970 if ((depth==2) && (ip<ilimit)) { in ZSTD_compressBlock_lazy_extDict_generic()
1971 ip ++; in ZSTD_compressBlock_lazy_extDict_generic()
1981 if (MEM_read32(ip) == MEM_read32(repMatch)) { in ZSTD_compressBlock_lazy_extDict_generic()
1984 … size_t const repLength = ZSTD_count_2segments(ip+4, repMatch+4, iend, repEnd, prefixStart) + 4; in ZSTD_compressBlock_lazy_extDict_generic()
1988 matchLength = repLength, offcode = STORE_REPCODE_1, start = ip; in ZSTD_compressBlock_lazy_extDict_generic()
1993 …size_t const ml2 = ZSTD_searchMax(ms, ip, iend, &offset2, mls, rowLog, searchMethod, ZSTD_extDict); in ZSTD_compressBlock_lazy_extDict_generic()
1997 matchLength = ml2, offcode = offset2, start = ip; in ZSTD_compressBlock_lazy_extDict_generic()
2016 anchor = ip = start + matchLength; in ZSTD_compressBlock_lazy_extDict_generic()
2020 while (ip <= ilimit) { in ZSTD_compressBlock_lazy_extDict_generic()
2021 const U32 repCurrent = (U32)(ip-base); in ZSTD_compressBlock_lazy_extDict_generic()
2028 if (MEM_read32(ip) == MEM_read32(repMatch)) { in ZSTD_compressBlock_lazy_extDict_generic()
2031 matchLength = ZSTD_count_2segments(ip+4, repMatch+4, iend, repEnd, prefixStart) + 4; in ZSTD_compressBlock_lazy_extDict_generic()
2034 ip += matchLength; in ZSTD_compressBlock_lazy_extDict_generic()
2035 anchor = ip; in ZSTD_compressBlock_lazy_extDict_generic()