Searched refs:bitCost (Results 1 – 2 of 2) sorted by relevance
150 assert(bitCost <= scaleLog); in ZSTD_rescaleFreqs()151 … optPtr->litFreq[lit] = bitCost ? 1 << (scaleLog-bitCost) : 1 /*minimum to calculate cost*/; in ZSTD_rescaleFreqs()161 U32 const bitCost = FSE_getMaxNbBits(llstate.symbolTT, ll); in ZSTD_rescaleFreqs() local162 assert(bitCost < scaleLog); in ZSTD_rescaleFreqs()163 … optPtr->litLengthFreq[ll] = bitCost ? 1 << (scaleLog-bitCost) : 1 /*minimum to calculate cost*/; in ZSTD_rescaleFreqs()173 U32 const bitCost = FSE_getMaxNbBits(mlstate.symbolTT, ml); in ZSTD_rescaleFreqs() local174 assert(bitCost < scaleLog); in ZSTD_rescaleFreqs()175 … optPtr->matchLengthFreq[ml] = bitCost ? 1 << (scaleLog-bitCost) : 1 /*minimum to calculate cost*/; in ZSTD_rescaleFreqs()185 U32 const bitCost = FSE_getMaxNbBits(ofstate.symbolTT, of); in ZSTD_rescaleFreqs() local186 assert(bitCost < scaleLog); in ZSTD_rescaleFreqs()[all …]
122 unsigned const bitCost = FSE_bitCost(cstate.symbolTT, tableLog, s, kAccuracyLog); in ZSTD_fseBitCost() local125 if (bitCost >= badCost) { in ZSTD_fseBitCost()129 cost += (size_t)count[s] * bitCost; in ZSTD_fseBitCost()