Revert "Use std::min() and std::max()"
This somehow breaks TRT.
bug: 13350753
This reverts commit cf612a3abf
.
Change-Id: I812f067e7cc8106b054527732dc6fe4efd7cc0fe
main
parent
cf612a3abf
commit
f63000abea
|
@ -344,6 +344,9 @@ static inline void prof_out(void) {
|
|||
#define MAX_POINTER_COUNT 1
|
||||
#define MAX_POINTER_COUNT_G 2
|
||||
|
||||
template<typename T> AK_FORCE_INLINE const T &min(const T &a, const T &b) { return a < b ? a : b; }
|
||||
template<typename T> AK_FORCE_INLINE const T &max(const T &a, const T &b) { return a > b ? a : b; }
|
||||
|
||||
// DEBUG
|
||||
#define INPUTLENGTH_FOR_DEBUG (-1)
|
||||
#define MIN_OUTPUT_INDEX_FOR_DEBUG (-1)
|
||||
|
|
|
@ -17,7 +17,6 @@
|
|||
#ifndef LATINIME_DIC_NODE_PRIORITY_QUEUE_H
|
||||
#define LATINIME_DIC_NODE_PRIORITY_QUEUE_H
|
||||
|
||||
#include <algorithm>
|
||||
#include <queue>
|
||||
#include <vector>
|
||||
|
||||
|
@ -50,7 +49,7 @@ class DicNodePriorityQueue : public DicNodeReleaseListener {
|
|||
|
||||
AK_FORCE_INLINE void setMaxSize(const int maxSize) {
|
||||
ASSERT(maxSize <= mCapacity);
|
||||
mMaxSize = std::min(maxSize, mCapacity);
|
||||
mMaxSize = min(maxSize, mCapacity);
|
||||
}
|
||||
|
||||
AK_FORCE_INLINE void clearAndResizeToCapacity() {
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
|
||||
#include "suggest/core/dicnode/dic_node_utils.h"
|
||||
|
||||
#include <algorithm>
|
||||
#include <cstring>
|
||||
|
||||
#include "suggest/core/dicnode/dic_node.h"
|
||||
|
@ -118,7 +117,7 @@ namespace latinime {
|
|||
}
|
||||
actualLength0 = i + 1;
|
||||
}
|
||||
actualLength0 = std::min(actualLength0, MAX_WORD_LENGTH);
|
||||
actualLength0 = min(actualLength0, MAX_WORD_LENGTH);
|
||||
memmove(dest, src0, actualLength0 * sizeof(dest[0]));
|
||||
if (!src1 || length1 == 0) {
|
||||
return actualLength0;
|
||||
|
@ -130,7 +129,7 @@ namespace latinime {
|
|||
}
|
||||
actualLength1 = i + 1;
|
||||
}
|
||||
actualLength1 = std::min(actualLength1, MAX_WORD_LENGTH - actualLength0);
|
||||
actualLength1 = min(actualLength1, MAX_WORD_LENGTH - actualLength0);
|
||||
memmove(&dest[actualLength0], src1, actualLength1 * sizeof(dest[0]));
|
||||
return actualLength0 + actualLength1;
|
||||
}
|
||||
|
|
|
@ -17,7 +17,6 @@
|
|||
#ifndef LATINIME_DIC_NODES_CACHE_H
|
||||
#define LATINIME_DIC_NODES_CACHE_H
|
||||
|
||||
#include <algorithm>
|
||||
#include <stdint.h>
|
||||
|
||||
#include "defines.h"
|
||||
|
@ -52,7 +51,7 @@ class DicNodesCache {
|
|||
// We want to use the max capacity for the current active dic node queue.
|
||||
mActiveDicNodes->clearAndResizeToCapacity();
|
||||
// nextActiveSize is used to limit the next iteration's active dic node size.
|
||||
const int nextActiveSizeFittingToTheCapacity = std::min(nextActiveSize, getCacheCapacity());
|
||||
const int nextActiveSizeFittingToTheCapacity = min(nextActiveSize, getCacheCapacity());
|
||||
mNextActiveDicNodes->clearAndResize(nextActiveSizeFittingToTheCapacity);
|
||||
mTerminalDicNodes->clearAndResize(terminalSize);
|
||||
// We want to use the max capacity for the cached dic nodes that will be used for the
|
||||
|
|
|
@ -17,7 +17,6 @@
|
|||
#ifndef LATINIME_DIC_NODE_STATE_OUTPUT_H
|
||||
#define LATINIME_DIC_NODE_STATE_OUTPUT_H
|
||||
|
||||
#include <algorithm>
|
||||
#include <cstring> // for memmove()
|
||||
#include <stdint.h>
|
||||
|
||||
|
@ -50,8 +49,7 @@ class DicNodeStateOutput {
|
|||
void addMergedNodeCodePoints(const uint16_t mergedNodeCodePointCount,
|
||||
const int *const mergedNodeCodePoints) {
|
||||
if (mergedNodeCodePoints) {
|
||||
const int additionalCodePointCount = std::min(
|
||||
static_cast<int>(mergedNodeCodePointCount),
|
||||
const int additionalCodePointCount = min(static_cast<int>(mergedNodeCodePointCount),
|
||||
MAX_WORD_LENGTH - mOutputtedCodePointCount);
|
||||
memmove(&mCodePointsBuf[mOutputtedCodePointCount], mergedNodeCodePoints,
|
||||
additionalCodePointCount * sizeof(mCodePointsBuf[0]));
|
||||
|
|
|
@ -17,7 +17,6 @@
|
|||
#ifndef LATINIME_DIC_NODE_STATE_PREVWORD_H
|
||||
#define LATINIME_DIC_NODE_STATE_PREVWORD_H
|
||||
|
||||
#include <algorithm>
|
||||
#include <cstring> // for memset() and memmove()
|
||||
#include <stdint.h>
|
||||
|
||||
|
@ -70,7 +69,7 @@ class DicNodeStatePrevWord {
|
|||
const int prevWordNodePos, const int *const src0, const int16_t length0,
|
||||
const int *const src1, const int16_t length1,
|
||||
const int prevWordSecondWordFirstInputIndex, const int lastInputIndex) {
|
||||
mPrevWordCount = std::min(prevWordCount, static_cast<int16_t>(MAX_RESULTS));
|
||||
mPrevWordCount = min(prevWordCount, static_cast<int16_t>(MAX_RESULTS));
|
||||
mPrevWordProbability = prevWordProbability;
|
||||
mPrevWordPtNodePos = prevWordNodePos;
|
||||
int twoWordsLen =
|
||||
|
|
|
@ -17,7 +17,6 @@
|
|||
#ifndef LATINIME_DIC_NODE_STATE_SCORING_H
|
||||
#define LATINIME_DIC_NODE_STATE_SCORING_H
|
||||
|
||||
#include <algorithm>
|
||||
#include <stdint.h>
|
||||
|
||||
#include "defines.h"
|
||||
|
@ -200,7 +199,7 @@ class DicNodeStateScoring {
|
|||
mNormalizedCompoundDistance = mSpatialDistance + mLanguageDistance;
|
||||
} else {
|
||||
mNormalizedCompoundDistance = (mSpatialDistance + mLanguageDistance)
|
||||
/ static_cast<float>(std::max(1, totalInputIndex));
|
||||
/ static_cast<float>(max(1, totalInputIndex));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
|
|
@ -14,13 +14,12 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#include <cstring>
|
||||
|
||||
#define LOG_TAG "LatinIME: bigram_dictionary.cpp"
|
||||
|
||||
#include "bigram_dictionary.h"
|
||||
|
||||
#include <algorithm>
|
||||
#include <cstring>
|
||||
|
||||
#include "defines.h"
|
||||
#include "suggest/core/dictionary/binary_dictionary_bigrams_iterator.h"
|
||||
#include "suggest/core/dictionary/dictionary.h"
|
||||
|
@ -143,7 +142,7 @@ int BigramDictionary::getPredictions(const int *prevWord, const int prevWordLeng
|
|||
outBigramCodePoints, outputTypes);
|
||||
++bigramCount;
|
||||
}
|
||||
return std::min(bigramCount, MAX_RESULTS);
|
||||
return min(bigramCount, MAX_RESULTS);
|
||||
}
|
||||
|
||||
// Returns a pointer to the start of the bigram list.
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
|
||||
#include "suggest/core/layout/proximity_info.h"
|
||||
|
||||
#include <algorithm>
|
||||
#include <cstring>
|
||||
#include <cmath>
|
||||
|
||||
|
@ -64,7 +63,7 @@ ProximityInfo::ProximityInfo(JNIEnv *env, const jstring localeJStr,
|
|||
static_cast<float>(mostCommonKeyWidth))),
|
||||
CELL_WIDTH((keyboardWidth + gridWidth - 1) / gridWidth),
|
||||
CELL_HEIGHT((keyboardHeight + gridHeight - 1) / gridHeight),
|
||||
KEY_COUNT(std::min(keyCount, MAX_KEY_COUNT_IN_A_KEYBOARD)),
|
||||
KEY_COUNT(min(keyCount, MAX_KEY_COUNT_IN_A_KEYBOARD)),
|
||||
KEYBOARD_WIDTH(keyboardWidth), KEYBOARD_HEIGHT(keyboardHeight),
|
||||
KEYBOARD_HYPOTENUSE(hypotf(KEYBOARD_WIDTH, KEYBOARD_HEIGHT)),
|
||||
HAS_TOUCH_POSITION_CORRECTION_DATA(keyCount > 0 && keyXCoordinates && keyYCoordinates
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
|
||||
#include "suggest/core/layout/proximity_info_state.h"
|
||||
|
||||
#include <algorithm>
|
||||
#include <cstring> // for memset() and memmove()
|
||||
#include <sstream> // for debug prints
|
||||
#include <vector>
|
||||
|
@ -172,7 +171,7 @@ float ProximityInfoState::getPointToKeyLength(
|
|||
const int keyId = mProximityInfo->getKeyIndexOf(codePoint);
|
||||
if (keyId != NOT_AN_INDEX) {
|
||||
const int index = inputIndex * mProximityInfo->getKeyCount() + keyId;
|
||||
return std::min(mSampledNormalizedSquaredLengthCache[index], mMaxPointToKeyLength);
|
||||
return min(mSampledNormalizedSquaredLengthCache[index], mMaxPointToKeyLength);
|
||||
}
|
||||
if (CharUtils::isIntentionalOmissionCodePoint(codePoint)) {
|
||||
return 0.0f;
|
||||
|
|
|
@ -241,7 +241,7 @@ namespace latinime {
|
|||
// Calculate velocity by using distances and durations of
|
||||
// ProximityInfoParams::NUM_POINTS_FOR_SPEED_CALCULATION points for both forward and
|
||||
// backward.
|
||||
const int forwardNumPoints = std::min(inputSize - 1,
|
||||
const int forwardNumPoints = min(inputSize - 1,
|
||||
index + ProximityInfoParams::NUM_POINTS_FOR_SPEED_CALCULATION);
|
||||
for (int j = index; j < forwardNumPoints; ++j) {
|
||||
if (i < sampledInputSize - 1 && j >= (*sampledInputIndice)[i + 1]) {
|
||||
|
@ -251,7 +251,7 @@ namespace latinime {
|
|||
xCoordinates[j + 1], yCoordinates[j + 1]);
|
||||
duration += times[j + 1] - times[j];
|
||||
}
|
||||
const int backwardNumPoints = std::max(0,
|
||||
const int backwardNumPoints = max(0,
|
||||
index - ProximityInfoParams::NUM_POINTS_FOR_SPEED_CALCULATION);
|
||||
for (int j = index - 1; j >= backwardNumPoints; --j) {
|
||||
if (i > 0 && j < (*sampledInputIndice)[i - 1]) {
|
||||
|
@ -273,7 +273,7 @@ namespace latinime {
|
|||
|
||||
// Direction calculation.
|
||||
sampledDirections->resize(sampledInputSize - 1);
|
||||
for (int i = std::max(0, lastSavedInputSize - 1); i < sampledInputSize - 1; ++i) {
|
||||
for (int i = max(0, lastSavedInputSize - 1); i < sampledInputSize - 1; ++i) {
|
||||
(*sampledDirections)[i] = getDirection(sampledInputXs, sampledInputYs, i, i + 1);
|
||||
}
|
||||
return averageSpeed;
|
||||
|
@ -610,7 +610,7 @@ namespace latinime {
|
|||
const int inputIndex, const int keyId) {
|
||||
if (keyId != NOT_AN_INDEX) {
|
||||
const int index = inputIndex * keyCount + keyId;
|
||||
return std::min((*sampledNormalizedSquaredLengthCache)[index], maxPointToKeyLength);
|
||||
return min((*sampledNormalizedSquaredLengthCache)[index], maxPointToKeyLength);
|
||||
}
|
||||
// If the char is not a key on the keyboard then return the max length.
|
||||
return static_cast<float>(MAX_VALUE_FOR_WEIGHTING);
|
||||
|
@ -651,13 +651,13 @@ namespace latinime {
|
|||
}
|
||||
|
||||
if (i == 0) {
|
||||
skipProbability *= std::min(1.0f,
|
||||
skipProbability *= min(1.0f,
|
||||
nearestKeyDistance * ProximityInfoParams::NEAREST_DISTANCE_WEIGHT
|
||||
+ ProximityInfoParams::NEAREST_DISTANCE_BIAS);
|
||||
// Promote the first point
|
||||
skipProbability *= ProximityInfoParams::SKIP_FIRST_POINT_PROBABILITY;
|
||||
} else if (i == sampledInputSize - 1) {
|
||||
skipProbability *= std::min(1.0f,
|
||||
skipProbability *= min(1.0f,
|
||||
nearestKeyDistance * ProximityInfoParams::NEAREST_DISTANCE_WEIGHT_FOR_LAST
|
||||
+ ProximityInfoParams::NEAREST_DISTANCE_BIAS_FOR_LAST);
|
||||
// Promote the last point
|
||||
|
@ -668,17 +668,17 @@ namespace latinime {
|
|||
&& speedRate
|
||||
< (*sampledSpeedRates)[i + 1] - ProximityInfoParams::SPEED_MARGIN) {
|
||||
if (currentAngle < ProximityInfoParams::CORNER_ANGLE_THRESHOLD) {
|
||||
skipProbability *= std::min(1.0f, speedRate
|
||||
skipProbability *= min(1.0f, speedRate
|
||||
* ProximityInfoParams::SLOW_STRAIGHT_WEIGHT_FOR_SKIP_PROBABILITY);
|
||||
} else {
|
||||
// If the angle is small enough, we promote this point more. (e.g. pit vs put)
|
||||
skipProbability *= std::min(1.0f,
|
||||
skipProbability *= min(1.0f,
|
||||
speedRate * ProximityInfoParams::SPEED_WEIGHT_FOR_SKIP_PROBABILITY
|
||||
+ ProximityInfoParams::MIN_SPEED_RATE_FOR_SKIP_PROBABILITY);
|
||||
}
|
||||
}
|
||||
|
||||
skipProbability *= std::min(1.0f,
|
||||
skipProbability *= min(1.0f,
|
||||
speedRate * nearestKeyDistance * ProximityInfoParams::NEAREST_DISTANCE_WEIGHT
|
||||
+ ProximityInfoParams::NEAREST_DISTANCE_BIAS);
|
||||
|
||||
|
@ -708,10 +708,10 @@ namespace latinime {
|
|||
// (1.0f - skipProbability).
|
||||
const float inputCharProbability = 1.0f - skipProbability;
|
||||
|
||||
const float speedxAngleRate = std::min(speedRate * currentAngle / M_PI_F
|
||||
const float speedxAngleRate = min(speedRate * currentAngle / M_PI_F
|
||||
* ProximityInfoParams::SPEEDxANGLE_WEIGHT_FOR_STANDARD_DEVIATION,
|
||||
ProximityInfoParams::MAX_SPEEDxANGLE_RATE_FOR_STANDARD_DEVIATION);
|
||||
const float speedxNearestKeyDistanceRate = std::min(speedRate * nearestKeyDistance
|
||||
const float speedxNearestKeyDistanceRate = min(speedRate * nearestKeyDistance
|
||||
* ProximityInfoParams::SPEEDxNEAREST_WEIGHT_FOR_STANDARD_DEVIATION,
|
||||
ProximityInfoParams::MAX_SPEEDxNEAREST_RATE_FOR_STANDARD_DEVIATION);
|
||||
const float sigma = speedxAngleRate + speedxNearestKeyDistanceRate
|
||||
|
@ -828,7 +828,7 @@ namespace latinime {
|
|||
|
||||
// Decrease key probabilities of points which don't have the highest probability of that key
|
||||
// among nearby points. Probabilities of the first point and the last point are not suppressed.
|
||||
for (int i = std::max(start, 1); i < sampledInputSize; ++i) {
|
||||
for (int i = max(start, 1); i < sampledInputSize; ++i) {
|
||||
for (int j = i + 1; j < sampledInputSize; ++j) {
|
||||
if (!suppressCharProbabilities(
|
||||
mostCommonKeyWidth, sampledInputSize, sampledLengthCache, i, j,
|
||||
|
@ -836,7 +836,7 @@ namespace latinime {
|
|||
break;
|
||||
}
|
||||
}
|
||||
for (int j = i - 1; j >= std::max(start, 0); --j) {
|
||||
for (int j = i - 1; j >= max(start, 0); --j) {
|
||||
if (!suppressCharProbabilities(
|
||||
mostCommonKeyWidth, sampledInputSize, sampledLengthCache, i, j,
|
||||
charProbabilities)) {
|
||||
|
@ -879,7 +879,7 @@ namespace latinime {
|
|||
if (i >= lastSavedInputSize) {
|
||||
(*sampledSearchKeySets)[i].reset();
|
||||
}
|
||||
for (int j = std::max(i, lastSavedInputSize); j < sampledInputSize; ++j) {
|
||||
for (int j = max(i, lastSavedInputSize); j < sampledInputSize; ++j) {
|
||||
// TODO: Investigate if this is required. This may not fail.
|
||||
if ((*sampledLengthCache)[j] - (*sampledLengthCache)[i] >= readForwordLength) {
|
||||
break;
|
||||
|
@ -930,7 +930,7 @@ namespace latinime {
|
|||
(*charProbabilities)[index0][NOT_AN_INDEX] += suppression;
|
||||
|
||||
// Add the probability of the same key nearby index1
|
||||
const float probabilityGain = std::min(suppression
|
||||
const float probabilityGain = min(suppression
|
||||
* ProximityInfoParams::SUPPRESSION_WEIGHT_FOR_PROBABILITY_GAIN,
|
||||
(*charProbabilities)[index1][NOT_AN_INDEX]
|
||||
* ProximityInfoParams::SKIP_PROBABALITY_WEIGHT_FOR_PROBABILITY_GAIN);
|
||||
|
|
|
@ -17,8 +17,6 @@
|
|||
#ifndef LATINIME_TOUCH_POSITION_CORRECTION_UTILS_H
|
||||
#define LATINIME_TOUCH_POSITION_CORRECTION_UTILS_H
|
||||
|
||||
#include <algorithm>
|
||||
|
||||
#include "defines.h"
|
||||
#include "suggest/core/layout/proximity_info_params.h"
|
||||
|
||||
|
@ -36,7 +34,7 @@ class TouchPositionCorrectionUtils {
|
|||
static const float R2 = 1.0f;
|
||||
const float x = normalizedSquaredDistance;
|
||||
if (!isTouchPositionCorrectionEnabled) {
|
||||
return std::min(C, x);
|
||||
return min(C, x);
|
||||
}
|
||||
|
||||
// factor is a piecewise linear function like:
|
||||
|
|
|
@ -16,8 +16,6 @@
|
|||
|
||||
#include "suggest/core/result/suggestions_output_utils.h"
|
||||
|
||||
#include <algorithm>
|
||||
|
||||
#include "suggest/core/dicnode/dic_node.h"
|
||||
#include "suggest/core/dicnode/dic_node_utils.h"
|
||||
#include "suggest/core/dictionary/binary_dictionary_shortcut_iterator.h"
|
||||
|
@ -38,7 +36,7 @@ const int SuggestionsOutputUtils::MIN_LEN_FOR_MULTI_WORD_AUTOCORRECT = 16;
|
|||
#if DEBUG_EVALUATE_MOST_PROBABLE_STRING
|
||||
const int terminalSize = 0;
|
||||
#else
|
||||
const int terminalSize = std::min(MAX_RESULTS,
|
||||
const int terminalSize = min(MAX_RESULTS,
|
||||
static_cast<int>(traverseSession->getDicTraverseCache()->terminalSize()));
|
||||
#endif
|
||||
DicNode terminals[MAX_RESULTS]; // Avoiding non-POD variable length array
|
||||
|
@ -247,12 +245,12 @@ const int SuggestionsOutputUtils::MIN_LEN_FOR_MULTI_WORD_AUTOCORRECT = 16;
|
|||
// shortcut entry's score == its base entry's score - 1
|
||||
shortcutScore = finalScore;
|
||||
// Protection against int underflow
|
||||
shortcutScore = std::max(S_INT_MIN + 1, shortcutScore) - 1;
|
||||
shortcutScore = max(S_INT_MIN + 1, shortcutScore) - 1;
|
||||
kind = Dictionary::KIND_SHORTCUT;
|
||||
}
|
||||
outputTypes[outputWordIndex] = kind;
|
||||
outputScores[outputWordIndex] = shortcutScore;
|
||||
outputScores[outputWordIndex] = std::max(S_INT_MIN + 1, shortcutScore) - 1;
|
||||
outputScores[outputWordIndex] = max(S_INT_MIN + 1, shortcutScore) - 1;
|
||||
const int startIndex2 = outputWordIndex * MAX_WORD_LENGTH;
|
||||
DicNodeUtils::appendTwoWords(0, 0, shortcutTarget, shortcutTargetStringLength,
|
||||
&outputCodePoints[startIndex2]);
|
||||
|
|
|
@ -16,8 +16,6 @@
|
|||
|
||||
#include "suggest/policyimpl/dictionary/header/header_policy.h"
|
||||
|
||||
#include <algorithm>
|
||||
|
||||
namespace latinime {
|
||||
|
||||
// Note that these are corresponding definitions in Java side in DictionaryHeader.
|
||||
|
@ -74,7 +72,7 @@ void HeaderPolicy::readHeaderValueOrQuestionMark(const char *const key, int *out
|
|||
outValue[1] = '\0';
|
||||
return;
|
||||
}
|
||||
const int terminalIndex = std::min(static_cast<int>(it->second.size()), outValueSize - 1);
|
||||
const int terminalIndex = min(static_cast<int>(it->second.size()), outValueSize - 1);
|
||||
for (int i = 0; i < terminalIndex; ++i) {
|
||||
outValue[i] = it->second[i];
|
||||
}
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
|
||||
#include "suggest/policyimpl/dictionary/utils/forgetting_curve_utils.h"
|
||||
|
||||
#include <algorithm>
|
||||
#include <cmath>
|
||||
#include <stdlib.h>
|
||||
|
||||
|
@ -73,7 +72,7 @@ const ForgettingCurveUtils::ProbabilityTable ForgettingCurveUtils::sProbabilityT
|
|||
headerPolicy->getForgettingCurveDurationToLevelDown());
|
||||
return sProbabilityTable.getProbability(
|
||||
headerPolicy->getForgettingCurveProbabilityValuesTableId(), historicalInfo->getLevel(),
|
||||
std::min(std::max(elapsedTimeStepCount, 0), MAX_ELAPSED_TIME_STEP_COUNT));
|
||||
min(max(elapsedTimeStepCount, 0), MAX_ELAPSED_TIME_STEP_COUNT));
|
||||
}
|
||||
|
||||
/* static */ int ForgettingCurveUtils::getProbability(const int unigramProbability,
|
||||
|
@ -81,11 +80,11 @@ const ForgettingCurveUtils::ProbabilityTable ForgettingCurveUtils::sProbabilityT
|
|||
if (unigramProbability == NOT_A_PROBABILITY) {
|
||||
return NOT_A_PROBABILITY;
|
||||
} else if (bigramProbability == NOT_A_PROBABILITY) {
|
||||
return std::min(backoff(unigramProbability), MAX_PROBABILITY);
|
||||
return min(backoff(unigramProbability), MAX_PROBABILITY);
|
||||
} else {
|
||||
// TODO: Investigate better way to handle bigram probability.
|
||||
return std::min(std::max(unigramProbability,
|
||||
bigramProbability + MULTIPLIER_TWO_IN_PROBABILITY_SCALE), MAX_PROBABILITY);
|
||||
return min(max(unigramProbability, bigramProbability + MULTIPLIER_TWO_IN_PROBABILITY_SCALE),
|
||||
MAX_PROBABILITY);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -184,7 +183,7 @@ ForgettingCurveUtils::ProbabilityTable::ProbabilityTable() : mTables() {
|
|||
-1.0f * static_cast<float>(timeStepCount)
|
||||
/ static_cast<float>(MAX_ELAPSED_TIME_STEP_COUNT + 1));
|
||||
mTables[tableId][level][timeStepCount] =
|
||||
std::min(std::max(static_cast<int>(probability), 1), MAX_PROBABILITY);
|
||||
min(max(static_cast<int>(probability), 1), MAX_PROBABILITY);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,8 +17,6 @@
|
|||
#ifndef LATINIME_EDIT_DISTANCE_H
|
||||
#define LATINIME_EDIT_DISTANCE_H
|
||||
|
||||
#include <algorithm>
|
||||
|
||||
#include "defines.h"
|
||||
#include "suggest/policyimpl/utils/edit_distance_policy.h"
|
||||
|
||||
|
@ -40,13 +38,13 @@ class EditDistance {
|
|||
|
||||
for (int i = 0; i < beforeLength; ++i) {
|
||||
for (int j = 0; j < afterLength; ++j) {
|
||||
dp[(afterLength + 1) * (i + 1) + (j + 1)] = std::min(
|
||||
dp[(afterLength + 1) * (i + 1) + (j + 1)] = min(
|
||||
dp[(afterLength + 1) * i + (j + 1)] + policy->getInsertionCost(i, j),
|
||||
std::min(
|
||||
dp[(afterLength + 1) * (i + 1) + j] + policy->getDeletionCost(i, j),
|
||||
dp[(afterLength + 1) * i + j] + policy->getSubstitutionCost(i, j)));
|
||||
min(dp[(afterLength + 1) * (i + 1) + j] + policy->getDeletionCost(i, j),
|
||||
dp[(afterLength + 1) * i + j]
|
||||
+ policy->getSubstitutionCost(i, j)));
|
||||
if (policy->allowTransposition(i, j)) {
|
||||
dp[(afterLength + 1) * (i + 1) + (j + 1)] = std::min(
|
||||
dp[(afterLength + 1) * (i + 1) + (j + 1)] = min(
|
||||
dp[(afterLength + 1) * (i + 1) + (j + 1)],
|
||||
dp[(afterLength + 1) * (i - 1) + (j - 1)]
|
||||
+ policy->getTranspositionCost(i, j));
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
|
||||
#include "utils/autocorrection_threshold_utils.h"
|
||||
|
||||
#include <algorithm>
|
||||
#include <cmath>
|
||||
|
||||
#include "defines.h"
|
||||
|
@ -100,7 +99,7 @@ const int AutocorrectionThresholdUtils::FULL_WORD_MULTIPLIER = 2;
|
|||
const float maxScore = score >= S_INT_MAX ? static_cast<float>(S_INT_MAX)
|
||||
: static_cast<float>(MAX_INITIAL_SCORE)
|
||||
* powf(static_cast<float>(TYPED_LETTER_MULTIPLIER),
|
||||
static_cast<float>(std::min(beforeLength, afterLength - spaceCount)))
|
||||
static_cast<float>(min(beforeLength, afterLength - spaceCount)))
|
||||
* static_cast<float>(FULL_WORD_MULTIPLIER);
|
||||
|
||||
return (static_cast<float>(score) / maxScore) * weight;
|
||||
|
|
Loading…
Reference in New Issue