Rename probability to unigramProbability.
Bug: 14425059 Change-Id: I6a204c3b8fb257d037ad95a1a455ae6fb89068fdmain
parent
d028294890
commit
9c42ad47d4
|
@ -136,14 +136,14 @@ class DicNode {
|
|||
}
|
||||
|
||||
void initAsChild(const DicNode *const dicNode, const int childrenPtNodeArrayPos,
|
||||
const int probability, const int wordId, const bool isBlacklistedOrNotAWord,
|
||||
const int unigramProbability, const int wordId, const bool isBlacklistedOrNotAWord,
|
||||
const uint16_t mergedNodeCodePointCount, const int *const mergedNodeCodePoints) {
|
||||
uint16_t newDepth = static_cast<uint16_t>(dicNode->getNodeCodePointCount() + 1);
|
||||
mIsCachedForNextSuggestion = dicNode->mIsCachedForNextSuggestion;
|
||||
const uint16_t newLeavingDepth = static_cast<uint16_t>(
|
||||
dicNode->mDicNodeProperties.getLeavingDepth() + mergedNodeCodePointCount);
|
||||
mDicNodeProperties.init(childrenPtNodeArrayPos, mergedNodeCodePoints[0],
|
||||
probability, wordId, isBlacklistedOrNotAWord, newDepth, newLeavingDepth,
|
||||
unigramProbability, wordId, isBlacklistedOrNotAWord, newDepth, newLeavingDepth,
|
||||
dicNode->mDicNodeProperties.getPrevWordIds());
|
||||
mDicNodeState.init(&dicNode->mDicNodeState, mergedNodeCodePointCount,
|
||||
mergedNodeCodePoints);
|
||||
|
@ -217,8 +217,9 @@ class DicNode {
|
|||
return mDicNodeProperties.getChildrenPtNodeArrayPos();
|
||||
}
|
||||
|
||||
int getProbability() const {
|
||||
return mDicNodeProperties.getProbability();
|
||||
// TODO: Remove
|
||||
int getUnigramProbability() const {
|
||||
return mDicNodeProperties.getUnigramProbability();
|
||||
}
|
||||
|
||||
AK_FORCE_INLINE bool isTerminalDicNode() const {
|
||||
|
|
|
@ -84,7 +84,7 @@ namespace latinime {
|
|||
/* static */ int DicNodeUtils::getBigramNodeProbability(
|
||||
const DictionaryStructureWithBufferPolicy *const dictionaryStructurePolicy,
|
||||
const DicNode *const dicNode, MultiBigramMap *const multiBigramMap) {
|
||||
const int unigramProbability = dicNode->getProbability();
|
||||
const int unigramProbability = dicNode->getUnigramProbability();
|
||||
if (multiBigramMap) {
|
||||
const int *const prevWordIds = dicNode->getPrevWordIds();
|
||||
return multiBigramMap->getBigramProbability(dictionaryStructurePolicy,
|
||||
|
|
|
@ -59,11 +59,11 @@ class DicNodeVector {
|
|||
}
|
||||
|
||||
void pushLeavingChild(const DicNode *const dicNode, const int childrenPtNodeArrayPos,
|
||||
const int probability, const int wordId, const bool isBlacklistedOrNotAWord,
|
||||
const int unigramProbability, const int wordId, const bool isBlacklistedOrNotAWord,
|
||||
const uint16_t mergedNodeCodePointCount, const int *const mergedNodeCodePoints) {
|
||||
ASSERT(!mLock);
|
||||
mDicNodes.emplace_back();
|
||||
mDicNodes.back().initAsChild(dicNode, childrenPtNodeArrayPos, probability,
|
||||
mDicNodes.back().initAsChild(dicNode, childrenPtNodeArrayPos, unigramProbability,
|
||||
wordId, isBlacklistedOrNotAWord, mergedNodeCodePointCount, mergedNodeCodePoints);
|
||||
}
|
||||
|
||||
|
|
|
@ -29,19 +29,19 @@ namespace latinime {
|
|||
class DicNodeProperties {
|
||||
public:
|
||||
AK_FORCE_INLINE DicNodeProperties()
|
||||
: mChildrenPtNodeArrayPos(NOT_A_DICT_POS), mProbability(NOT_A_PROBABILITY),
|
||||
: mChildrenPtNodeArrayPos(NOT_A_DICT_POS), mUnigramProbability(NOT_A_PROBABILITY),
|
||||
mDicNodeCodePoint(NOT_A_CODE_POINT), mWordId(NOT_A_WORD_ID),
|
||||
mIsBlacklistedOrNotAWord(false), mDepth(0), mLeavingDepth(0) {}
|
||||
|
||||
~DicNodeProperties() {}
|
||||
|
||||
// Should be called only once per DicNode is initialized.
|
||||
void init(const int childrenPos, const int nodeCodePoint, const int probability,
|
||||
void init(const int childrenPos, const int nodeCodePoint, const int unigramProbability,
|
||||
const int wordId, const bool isBlacklistedOrNotAWord, const uint16_t depth,
|
||||
const uint16_t leavingDepth, const int *const prevWordIds) {
|
||||
mChildrenPtNodeArrayPos = childrenPos;
|
||||
mDicNodeCodePoint = nodeCodePoint;
|
||||
mProbability = probability;
|
||||
mUnigramProbability = unigramProbability;
|
||||
mWordId = wordId;
|
||||
mIsBlacklistedOrNotAWord = isBlacklistedOrNotAWord;
|
||||
mDepth = depth;
|
||||
|
@ -53,7 +53,7 @@ class DicNodeProperties {
|
|||
void init(const int rootPtNodeArrayPos, const int *const prevWordIds) {
|
||||
mChildrenPtNodeArrayPos = rootPtNodeArrayPos;
|
||||
mDicNodeCodePoint = NOT_A_CODE_POINT;
|
||||
mProbability = NOT_A_PROBABILITY;
|
||||
mUnigramProbability = NOT_A_PROBABILITY;
|
||||
mWordId = NOT_A_WORD_ID;
|
||||
mIsBlacklistedOrNotAWord = false;
|
||||
mDepth = 0;
|
||||
|
@ -64,7 +64,7 @@ class DicNodeProperties {
|
|||
void initByCopy(const DicNodeProperties *const dicNodeProp) {
|
||||
mChildrenPtNodeArrayPos = dicNodeProp->mChildrenPtNodeArrayPos;
|
||||
mDicNodeCodePoint = dicNodeProp->mDicNodeCodePoint;
|
||||
mProbability = dicNodeProp->mProbability;
|
||||
mUnigramProbability = dicNodeProp->mUnigramProbability;
|
||||
mWordId = dicNodeProp->mWordId;
|
||||
mIsBlacklistedOrNotAWord = dicNodeProp->mIsBlacklistedOrNotAWord;
|
||||
mDepth = dicNodeProp->mDepth;
|
||||
|
@ -76,7 +76,7 @@ class DicNodeProperties {
|
|||
void init(const DicNodeProperties *const dicNodeProp, const int codePoint) {
|
||||
mChildrenPtNodeArrayPos = dicNodeProp->mChildrenPtNodeArrayPos;
|
||||
mDicNodeCodePoint = codePoint; // Overwrite the node char of a passing child
|
||||
mProbability = dicNodeProp->mProbability;
|
||||
mUnigramProbability = dicNodeProp->mUnigramProbability;
|
||||
mWordId = dicNodeProp->mWordId;
|
||||
mIsBlacklistedOrNotAWord = dicNodeProp->mIsBlacklistedOrNotAWord;
|
||||
mDepth = dicNodeProp->mDepth + 1; // Increment the depth of a passing child
|
||||
|
@ -88,8 +88,8 @@ class DicNodeProperties {
|
|||
return mChildrenPtNodeArrayPos;
|
||||
}
|
||||
|
||||
int getProbability() const {
|
||||
return mProbability;
|
||||
int getUnigramProbability() const {
|
||||
return mUnigramProbability;
|
||||
}
|
||||
|
||||
int getDicNodeCodePoint() const {
|
||||
|
@ -130,9 +130,11 @@ class DicNodeProperties {
|
|||
// Use a default copy constructor and an assign operator because shallow copies are ok
|
||||
// for this class
|
||||
int mChildrenPtNodeArrayPos;
|
||||
int mProbability;
|
||||
// TODO: Remove
|
||||
int mUnigramProbability;
|
||||
int mDicNodeCodePoint;
|
||||
int mWordId;
|
||||
// TODO: Remove
|
||||
bool mIsBlacklistedOrNotAWord;
|
||||
uint16_t mDepth;
|
||||
uint16_t mLeavingDepth;
|
||||
|
|
|
@ -54,15 +54,15 @@ namespace latinime {
|
|||
current.swap(next);
|
||||
}
|
||||
|
||||
int maxProbability = NOT_A_PROBABILITY;
|
||||
int maxUnigramProbability = NOT_A_PROBABILITY;
|
||||
for (const DicNode &dicNode : current) {
|
||||
if (!dicNode.isTerminalDicNode()) {
|
||||
continue;
|
||||
}
|
||||
// dicNode can contain case errors, accent errors, intentional omissions or digraphs.
|
||||
maxProbability = std::max(maxProbability, dicNode.getProbability());
|
||||
maxUnigramProbability = std::max(maxUnigramProbability, dicNode.getUnigramProbability());
|
||||
}
|
||||
return maxProbability;
|
||||
return maxUnigramProbability;
|
||||
}
|
||||
|
||||
/* static */ void DictionaryUtils::processChildDicNodes(
|
||||
|
|
|
@ -87,7 +87,7 @@ const int SuggestionsOutputUtils::MIN_LEN_FOR_MULTI_WORD_AUTOCORRECT = 16;
|
|||
+ doubleLetterCost;
|
||||
const bool isPossiblyOffensiveWord =
|
||||
traverseSession->getDictionaryStructurePolicy()->getProbability(
|
||||
terminalDicNode->getProbability(), NOT_A_PROBABILITY) <= 0;
|
||||
terminalDicNode->getUnigramProbability(), NOT_A_PROBABILITY) <= 0;
|
||||
const bool isExactMatch =
|
||||
ErrorTypeUtils::isExactMatch(terminalDicNode->getContainedErrorTypes());
|
||||
const bool isExactMatchWithIntentionalOmission =
|
||||
|
|
|
@ -162,7 +162,8 @@ class TypingTraversal : public Traversal {
|
|||
}
|
||||
|
||||
AK_FORCE_INLINE bool isGoodToTraverseNextWord(const DicNode *const dicNode) const {
|
||||
const int probability = dicNode->getProbability();
|
||||
// TODO: Quit using unigram probability and use probability in the context.
|
||||
const int probability = dicNode->getUnigramProbability();
|
||||
if (probability < ScoringParams::THRESHOLD_NEXT_WORD_PROBABILITY) {
|
||||
return false;
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue