/******************************************************************************
|
|
Copyright (c) Microsoft Corporation.
|
|
|
|
Permission to use, copy, modify, and/or distribute this software for any
|
|
purpose with or without fee is hereby granted.
|
|
|
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
|
|
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
|
|
AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
|
|
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
|
|
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
|
|
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
|
|
PERFORMANCE OF THIS SOFTWARE.
|
|
***************************************************************************** */
|
|
|
|
function __classPrivateFieldGet(receiver, state, kind, f) {
|
|
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter");
|
|
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it");
|
|
return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
|
|
}
|
|
|
|
function __classPrivateFieldSet(receiver, state, value, kind, f) {
|
|
if (kind === "m") throw new TypeError("Private method is not writable");
|
|
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter");
|
|
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it");
|
|
return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;
|
|
}
|
|
|
|
/**
|
|
* Fuse.js v6.6.2 - Lightweight fuzzy-search (http://fusejs.io)
|
|
*
|
|
* Copyright (c) 2022 Kiro Risk (http://kiro.me)
|
|
* All Rights Reserved. Apache Software License 2.0
|
|
*
|
|
* http://www.apache.org/licenses/LICENSE-2.0
|
|
*/
|
|
|
|
function isArray(value) {
|
|
return !Array.isArray
|
|
? getTag(value) === '[object Array]'
|
|
: Array.isArray(value)
|
|
}
|
|
|
|
// Adapted from: https://github.com/lodash/lodash/blob/master/.internal/baseToString.js
|
|
const INFINITY = 1 / 0;
|
|
function baseToString(value) {
|
|
// Exit early for strings to avoid a performance hit in some environments.
|
|
if (typeof value == 'string') {
|
|
return value
|
|
}
|
|
let result = value + '';
|
|
return result == '0' && 1 / value == -INFINITY ? '-0' : result
|
|
}
|
|
|
|
function toString(value) {
|
|
return value == null ? '' : baseToString(value)
|
|
}
|
|
|
|
function isString(value) {
|
|
return typeof value === 'string'
|
|
}
|
|
|
|
function isNumber(value) {
|
|
return typeof value === 'number'
|
|
}
|
|
|
|
// Adapted from: https://github.com/lodash/lodash/blob/master/isBoolean.js
|
|
function isBoolean(value) {
|
|
return (
|
|
value === true ||
|
|
value === false ||
|
|
(isObjectLike(value) && getTag(value) == '[object Boolean]')
|
|
)
|
|
}
|
|
|
|
function isObject(value) {
|
|
return typeof value === 'object'
|
|
}
|
|
|
|
// Checks if `value` is object-like.
|
|
function isObjectLike(value) {
|
|
return isObject(value) && value !== null
|
|
}
|
|
|
|
function isDefined(value) {
|
|
return value !== undefined && value !== null
|
|
}
|
|
|
|
function isBlank(value) {
|
|
return !value.trim().length
|
|
}
|
|
|
|
// Gets the `toStringTag` of `value`.
|
|
// Adapted from: https://github.com/lodash/lodash/blob/master/.internal/getTag.js
|
|
function getTag(value) {
|
|
return value == null
|
|
? value === undefined
|
|
? '[object Undefined]'
|
|
: '[object Null]'
|
|
: Object.prototype.toString.call(value)
|
|
}
|
|
|
|
const EXTENDED_SEARCH_UNAVAILABLE = 'Extended search is not available';
|
|
|
|
const INCORRECT_INDEX_TYPE = "Incorrect 'index' type";
|
|
|
|
const LOGICAL_SEARCH_INVALID_QUERY_FOR_KEY = (key) =>
|
|
`Invalid value for key ${key}`;
|
|
|
|
const PATTERN_LENGTH_TOO_LARGE = (max) =>
|
|
`Pattern length exceeds max of ${max}.`;
|
|
|
|
const MISSING_KEY_PROPERTY = (name) => `Missing ${name} property in key`;
|
|
|
|
const INVALID_KEY_WEIGHT_VALUE = (key) =>
|
|
`Property 'weight' in key '${key}' must be a positive integer`;
|
|
|
|
const hasOwn = Object.prototype.hasOwnProperty;
|
|
|
|
class KeyStore {
|
|
constructor(keys) {
|
|
this._keys = [];
|
|
this._keyMap = {};
|
|
|
|
let totalWeight = 0;
|
|
|
|
keys.forEach((key) => {
|
|
let obj = createKey(key);
|
|
|
|
totalWeight += obj.weight;
|
|
|
|
this._keys.push(obj);
|
|
this._keyMap[obj.id] = obj;
|
|
|
|
totalWeight += obj.weight;
|
|
});
|
|
|
|
// Normalize weights so that their sum is equal to 1
|
|
this._keys.forEach((key) => {
|
|
key.weight /= totalWeight;
|
|
});
|
|
}
|
|
get(keyId) {
|
|
return this._keyMap[keyId]
|
|
}
|
|
keys() {
|
|
return this._keys
|
|
}
|
|
toJSON() {
|
|
return JSON.stringify(this._keys)
|
|
}
|
|
}
|
|
|
|
function createKey(key) {
|
|
let path = null;
|
|
let id = null;
|
|
let src = null;
|
|
let weight = 1;
|
|
let getFn = null;
|
|
|
|
if (isString(key) || isArray(key)) {
|
|
src = key;
|
|
path = createKeyPath(key);
|
|
id = createKeyId(key);
|
|
} else {
|
|
if (!hasOwn.call(key, 'name')) {
|
|
throw new Error(MISSING_KEY_PROPERTY('name'))
|
|
}
|
|
|
|
const name = key.name;
|
|
src = name;
|
|
|
|
if (hasOwn.call(key, 'weight')) {
|
|
weight = key.weight;
|
|
|
|
if (weight <= 0) {
|
|
throw new Error(INVALID_KEY_WEIGHT_VALUE(name))
|
|
}
|
|
}
|
|
|
|
path = createKeyPath(name);
|
|
id = createKeyId(name);
|
|
getFn = key.getFn;
|
|
}
|
|
|
|
return { path, id, weight, src, getFn }
|
|
}
|
|
|
|
function createKeyPath(key) {
|
|
return isArray(key) ? key : key.split('.')
|
|
}
|
|
|
|
function createKeyId(key) {
|
|
return isArray(key) ? key.join('.') : key
|
|
}
|
|
|
|
function get(obj, path) {
|
|
let list = [];
|
|
let arr = false;
|
|
|
|
const deepGet = (obj, path, index) => {
|
|
if (!isDefined(obj)) {
|
|
return
|
|
}
|
|
if (!path[index]) {
|
|
// If there's no path left, we've arrived at the object we care about.
|
|
list.push(obj);
|
|
} else {
|
|
let key = path[index];
|
|
|
|
const value = obj[key];
|
|
|
|
if (!isDefined(value)) {
|
|
return
|
|
}
|
|
|
|
// If we're at the last value in the path, and if it's a string/number/bool,
|
|
// add it to the list
|
|
if (
|
|
index === path.length - 1 &&
|
|
(isString(value) || isNumber(value) || isBoolean(value))
|
|
) {
|
|
list.push(toString(value));
|
|
} else if (isArray(value)) {
|
|
arr = true;
|
|
// Search each item in the array.
|
|
for (let i = 0, len = value.length; i < len; i += 1) {
|
|
deepGet(value[i], path, index + 1);
|
|
}
|
|
} else if (path.length) {
|
|
// An object. Recurse further.
|
|
deepGet(value, path, index + 1);
|
|
}
|
|
}
|
|
};
|
|
|
|
// Backwards compatibility (since path used to be a string)
|
|
deepGet(obj, isString(path) ? path.split('.') : path, 0);
|
|
|
|
return arr ? list : list[0]
|
|
}
|
|
|
|
const MatchOptions = {
|
|
// Whether the matches should be included in the result set. When `true`, each record in the result
|
|
// set will include the indices of the matched characters.
|
|
// These can consequently be used for highlighting purposes.
|
|
includeMatches: false,
|
|
// When `true`, the matching function will continue to the end of a search pattern even if
|
|
// a perfect match has already been located in the string.
|
|
findAllMatches: false,
|
|
// Minimum number of characters that must be matched before a result is considered a match
|
|
minMatchCharLength: 1
|
|
};
|
|
|
|
const BasicOptions = {
|
|
// When `true`, the algorithm continues searching to the end of the input even if a perfect
|
|
// match is found before the end of the same input.
|
|
isCaseSensitive: false,
|
|
// When true, the matching function will continue to the end of a search pattern even if
|
|
includeScore: false,
|
|
// List of properties that will be searched. This also supports nested properties.
|
|
keys: [],
|
|
// Whether to sort the result list, by score
|
|
shouldSort: true,
|
|
// Default sort function: sort by ascending score, ascending index
|
|
sortFn: (a, b) =>
|
|
a.score === b.score ? (a.idx < b.idx ? -1 : 1) : a.score < b.score ? -1 : 1
|
|
};
|
|
|
|
const FuzzyOptions = {
|
|
// Approximately where in the text is the pattern expected to be found?
|
|
location: 0,
|
|
// At what point does the match algorithm give up. A threshold of '0.0' requires a perfect match
|
|
// (of both letters and location), a threshold of '1.0' would match anything.
|
|
threshold: 0.6,
|
|
// Determines how close the match must be to the fuzzy location (specified above).
|
|
// An exact letter match which is 'distance' characters away from the fuzzy location
|
|
// would score as a complete mismatch. A distance of '0' requires the match be at
|
|
// the exact location specified, a threshold of '1000' would require a perfect match
|
|
// to be within 800 characters of the fuzzy location to be found using a 0.8 threshold.
|
|
distance: 100
|
|
};
|
|
|
|
const AdvancedOptions = {
|
|
// When `true`, it enables the use of unix-like search commands
|
|
useExtendedSearch: false,
|
|
// The get function to use when fetching an object's properties.
|
|
// The default will search nested paths *ie foo.bar.baz*
|
|
getFn: get,
|
|
// When `true`, search will ignore `location` and `distance`, so it won't matter
|
|
// where in the string the pattern appears.
|
|
// More info: https://fusejs.io/concepts/scoring-theory.html#fuzziness-score
|
|
ignoreLocation: false,
|
|
// When `true`, the calculation for the relevance score (used for sorting) will
|
|
// ignore the field-length norm.
|
|
// More info: https://fusejs.io/concepts/scoring-theory.html#field-length-norm
|
|
ignoreFieldNorm: false,
|
|
// The weight to determine how much field length norm effects scoring.
|
|
fieldNormWeight: 1
|
|
};
|
|
|
|
var Config = {
|
|
...BasicOptions,
|
|
...MatchOptions,
|
|
...FuzzyOptions,
|
|
...AdvancedOptions
|
|
};
|
|
|
|
const SPACE = /[^ ]+/g;
|
|
|
|
// Field-length norm: the shorter the field, the higher the weight.
|
|
// Set to 3 decimals to reduce index size.
|
|
function norm(weight = 1, mantissa = 3) {
|
|
const cache = new Map();
|
|
const m = Math.pow(10, mantissa);
|
|
|
|
return {
|
|
get(value) {
|
|
const numTokens = value.match(SPACE).length;
|
|
|
|
if (cache.has(numTokens)) {
|
|
return cache.get(numTokens)
|
|
}
|
|
|
|
// Default function is 1/sqrt(x), weight makes that variable
|
|
const norm = 1 / Math.pow(numTokens, 0.5 * weight);
|
|
|
|
// In place of `toFixed(mantissa)`, for faster computation
|
|
const n = parseFloat(Math.round(norm * m) / m);
|
|
|
|
cache.set(numTokens, n);
|
|
|
|
return n
|
|
},
|
|
clear() {
|
|
cache.clear();
|
|
}
|
|
}
|
|
}
|
|
|
|
class FuseIndex {
|
|
constructor({
|
|
getFn = Config.getFn,
|
|
fieldNormWeight = Config.fieldNormWeight
|
|
} = {}) {
|
|
this.norm = norm(fieldNormWeight, 3);
|
|
this.getFn = getFn;
|
|
this.isCreated = false;
|
|
|
|
this.setIndexRecords();
|
|
}
|
|
setSources(docs = []) {
|
|
this.docs = docs;
|
|
}
|
|
setIndexRecords(records = []) {
|
|
this.records = records;
|
|
}
|
|
setKeys(keys = []) {
|
|
this.keys = keys;
|
|
this._keysMap = {};
|
|
keys.forEach((key, idx) => {
|
|
this._keysMap[key.id] = idx;
|
|
});
|
|
}
|
|
create() {
|
|
if (this.isCreated || !this.docs.length) {
|
|
return
|
|
}
|
|
|
|
this.isCreated = true;
|
|
|
|
// List is Array<String>
|
|
if (isString(this.docs[0])) {
|
|
this.docs.forEach((doc, docIndex) => {
|
|
this._addString(doc, docIndex);
|
|
});
|
|
} else {
|
|
// List is Array<Object>
|
|
this.docs.forEach((doc, docIndex) => {
|
|
this._addObject(doc, docIndex);
|
|
});
|
|
}
|
|
|
|
this.norm.clear();
|
|
}
|
|
// Adds a doc to the end of the index
|
|
add(doc) {
|
|
const idx = this.size();
|
|
|
|
if (isString(doc)) {
|
|
this._addString(doc, idx);
|
|
} else {
|
|
this._addObject(doc, idx);
|
|
}
|
|
}
|
|
// Removes the doc at the specified index of the index
|
|
removeAt(idx) {
|
|
this.records.splice(idx, 1);
|
|
|
|
// Change ref index of every subsquent doc
|
|
for (let i = idx, len = this.size(); i < len; i += 1) {
|
|
this.records[i].i -= 1;
|
|
}
|
|
}
|
|
getValueForItemAtKeyId(item, keyId) {
|
|
return item[this._keysMap[keyId]]
|
|
}
|
|
size() {
|
|
return this.records.length
|
|
}
|
|
_addString(doc, docIndex) {
|
|
if (!isDefined(doc) || isBlank(doc)) {
|
|
return
|
|
}
|
|
|
|
let record = {
|
|
v: doc,
|
|
i: docIndex,
|
|
n: this.norm.get(doc)
|
|
};
|
|
|
|
this.records.push(record);
|
|
}
|
|
_addObject(doc, docIndex) {
|
|
let record = { i: docIndex, $: {} };
|
|
|
|
// Iterate over every key (i.e, path), and fetch the value at that key
|
|
this.keys.forEach((key, keyIndex) => {
|
|
let value = key.getFn ? key.getFn(doc) : this.getFn(doc, key.path);
|
|
|
|
if (!isDefined(value)) {
|
|
return
|
|
}
|
|
|
|
if (isArray(value)) {
|
|
let subRecords = [];
|
|
const stack = [{ nestedArrIndex: -1, value }];
|
|
|
|
while (stack.length) {
|
|
const { nestedArrIndex, value } = stack.pop();
|
|
|
|
if (!isDefined(value)) {
|
|
continue
|
|
}
|
|
|
|
if (isString(value) && !isBlank(value)) {
|
|
let subRecord = {
|
|
v: value,
|
|
i: nestedArrIndex,
|
|
n: this.norm.get(value)
|
|
};
|
|
|
|
subRecords.push(subRecord);
|
|
} else if (isArray(value)) {
|
|
value.forEach((item, k) => {
|
|
stack.push({
|
|
nestedArrIndex: k,
|
|
value: item
|
|
});
|
|
});
|
|
} else ;
|
|
}
|
|
record.$[keyIndex] = subRecords;
|
|
} else if (isString(value) && !isBlank(value)) {
|
|
let subRecord = {
|
|
v: value,
|
|
n: this.norm.get(value)
|
|
};
|
|
|
|
record.$[keyIndex] = subRecord;
|
|
}
|
|
});
|
|
|
|
this.records.push(record);
|
|
}
|
|
toJSON() {
|
|
return {
|
|
keys: this.keys,
|
|
records: this.records
|
|
}
|
|
}
|
|
}
|
|
|
|
function createIndex(
|
|
keys,
|
|
docs,
|
|
{ getFn = Config.getFn, fieldNormWeight = Config.fieldNormWeight } = {}
|
|
) {
|
|
const myIndex = new FuseIndex({ getFn, fieldNormWeight });
|
|
myIndex.setKeys(keys.map(createKey));
|
|
myIndex.setSources(docs);
|
|
myIndex.create();
|
|
return myIndex
|
|
}
|
|
|
|
function parseIndex(
|
|
data,
|
|
{ getFn = Config.getFn, fieldNormWeight = Config.fieldNormWeight } = {}
|
|
) {
|
|
const { keys, records } = data;
|
|
const myIndex = new FuseIndex({ getFn, fieldNormWeight });
|
|
myIndex.setKeys(keys);
|
|
myIndex.setIndexRecords(records);
|
|
return myIndex
|
|
}
|
|
|
|
function computeScore$1(
|
|
pattern,
|
|
{
|
|
errors = 0,
|
|
currentLocation = 0,
|
|
expectedLocation = 0,
|
|
distance = Config.distance,
|
|
ignoreLocation = Config.ignoreLocation
|
|
} = {}
|
|
) {
|
|
const accuracy = errors / pattern.length;
|
|
|
|
if (ignoreLocation) {
|
|
return accuracy
|
|
}
|
|
|
|
const proximity = Math.abs(expectedLocation - currentLocation);
|
|
|
|
if (!distance) {
|
|
// Dodge divide by zero error.
|
|
return proximity ? 1.0 : accuracy
|
|
}
|
|
|
|
return accuracy + proximity / distance
|
|
}
|
|
|
|
function convertMaskToIndices(
|
|
matchmask = [],
|
|
minMatchCharLength = Config.minMatchCharLength
|
|
) {
|
|
let indices = [];
|
|
let start = -1;
|
|
let end = -1;
|
|
let i = 0;
|
|
|
|
for (let len = matchmask.length; i < len; i += 1) {
|
|
let match = matchmask[i];
|
|
if (match && start === -1) {
|
|
start = i;
|
|
} else if (!match && start !== -1) {
|
|
end = i - 1;
|
|
if (end - start + 1 >= minMatchCharLength) {
|
|
indices.push([start, end]);
|
|
}
|
|
start = -1;
|
|
}
|
|
}
|
|
|
|
// (i-1 - start) + 1 => i - start
|
|
if (matchmask[i - 1] && i - start >= minMatchCharLength) {
|
|
indices.push([start, i - 1]);
|
|
}
|
|
|
|
return indices
|
|
}
|
|
|
|
// Machine word size
|
|
const MAX_BITS = 32;
|
|
|
|
function search(
|
|
text,
|
|
pattern,
|
|
patternAlphabet,
|
|
{
|
|
location = Config.location,
|
|
distance = Config.distance,
|
|
threshold = Config.threshold,
|
|
findAllMatches = Config.findAllMatches,
|
|
minMatchCharLength = Config.minMatchCharLength,
|
|
includeMatches = Config.includeMatches,
|
|
ignoreLocation = Config.ignoreLocation
|
|
} = {}
|
|
) {
|
|
if (pattern.length > MAX_BITS) {
|
|
throw new Error(PATTERN_LENGTH_TOO_LARGE(MAX_BITS))
|
|
}
|
|
|
|
const patternLen = pattern.length;
|
|
// Set starting location at beginning text and initialize the alphabet.
|
|
const textLen = text.length;
|
|
// Handle the case when location > text.length
|
|
const expectedLocation = Math.max(0, Math.min(location, textLen));
|
|
// Highest score beyond which we give up.
|
|
let currentThreshold = threshold;
|
|
// Is there a nearby exact match? (speedup)
|
|
let bestLocation = expectedLocation;
|
|
|
|
// Performance: only computer matches when the minMatchCharLength > 1
|
|
// OR if `includeMatches` is true.
|
|
const computeMatches = minMatchCharLength > 1 || includeMatches;
|
|
// A mask of the matches, used for building the indices
|
|
const matchMask = computeMatches ? Array(textLen) : [];
|
|
|
|
let index;
|
|
|
|
// Get all exact matches, here for speed up
|
|
while ((index = text.indexOf(pattern, bestLocation)) > -1) {
|
|
let score = computeScore$1(pattern, {
|
|
currentLocation: index,
|
|
expectedLocation,
|
|
distance,
|
|
ignoreLocation
|
|
});
|
|
|
|
currentThreshold = Math.min(score, currentThreshold);
|
|
bestLocation = index + patternLen;
|
|
|
|
if (computeMatches) {
|
|
let i = 0;
|
|
while (i < patternLen) {
|
|
matchMask[index + i] = 1;
|
|
i += 1;
|
|
}
|
|
}
|
|
}
|
|
|
|
// Reset the best location
|
|
bestLocation = -1;
|
|
|
|
let lastBitArr = [];
|
|
let finalScore = 1;
|
|
let binMax = patternLen + textLen;
|
|
|
|
const mask = 1 << (patternLen - 1);
|
|
|
|
for (let i = 0; i < patternLen; i += 1) {
|
|
// Scan for the best match; each iteration allows for one more error.
|
|
// Run a binary search to determine how far from the match location we can stray
|
|
// at this error level.
|
|
let binMin = 0;
|
|
let binMid = binMax;
|
|
|
|
while (binMin < binMid) {
|
|
const score = computeScore$1(pattern, {
|
|
errors: i,
|
|
currentLocation: expectedLocation + binMid,
|
|
expectedLocation,
|
|
distance,
|
|
ignoreLocation
|
|
});
|
|
|
|
if (score <= currentThreshold) {
|
|
binMin = binMid;
|
|
} else {
|
|
binMax = binMid;
|
|
}
|
|
|
|
binMid = Math.floor((binMax - binMin) / 2 + binMin);
|
|
}
|
|
|
|
// Use the result from this iteration as the maximum for the next.
|
|
binMax = binMid;
|
|
|
|
let start = Math.max(1, expectedLocation - binMid + 1);
|
|
let finish = findAllMatches
|
|
? textLen
|
|
: Math.min(expectedLocation + binMid, textLen) + patternLen;
|
|
|
|
// Initialize the bit array
|
|
let bitArr = Array(finish + 2);
|
|
|
|
bitArr[finish + 1] = (1 << i) - 1;
|
|
|
|
for (let j = finish; j >= start; j -= 1) {
|
|
let currentLocation = j - 1;
|
|
let charMatch = patternAlphabet[text.charAt(currentLocation)];
|
|
|
|
if (computeMatches) {
|
|
// Speed up: quick bool to int conversion (i.e, `charMatch ? 1 : 0`)
|
|
matchMask[currentLocation] = +!!charMatch;
|
|
}
|
|
|
|
// First pass: exact match
|
|
bitArr[j] = ((bitArr[j + 1] << 1) | 1) & charMatch;
|
|
|
|
// Subsequent passes: fuzzy match
|
|
if (i) {
|
|
bitArr[j] |=
|
|
((lastBitArr[j + 1] | lastBitArr[j]) << 1) | 1 | lastBitArr[j + 1];
|
|
}
|
|
|
|
if (bitArr[j] & mask) {
|
|
finalScore = computeScore$1(pattern, {
|
|
errors: i,
|
|
currentLocation,
|
|
expectedLocation,
|
|
distance,
|
|
ignoreLocation
|
|
});
|
|
|
|
// This match will almost certainly be better than any existing match.
|
|
// But check anyway.
|
|
if (finalScore <= currentThreshold) {
|
|
// Indeed it is
|
|
currentThreshold = finalScore;
|
|
bestLocation = currentLocation;
|
|
|
|
// Already passed `loc`, downhill from here on in.
|
|
if (bestLocation <= expectedLocation) {
|
|
break
|
|
}
|
|
|
|
// When passing `bestLocation`, don't exceed our current distance from `expectedLocation`.
|
|
start = Math.max(1, 2 * expectedLocation - bestLocation);
|
|
}
|
|
}
|
|
}
|
|
|
|
// No hope for a (better) match at greater error levels.
|
|
const score = computeScore$1(pattern, {
|
|
errors: i + 1,
|
|
currentLocation: expectedLocation,
|
|
expectedLocation,
|
|
distance,
|
|
ignoreLocation
|
|
});
|
|
|
|
if (score > currentThreshold) {
|
|
break
|
|
}
|
|
|
|
lastBitArr = bitArr;
|
|
}
|
|
|
|
const result = {
|
|
isMatch: bestLocation >= 0,
|
|
// Count exact matches (those with a score of 0) to be "almost" exact
|
|
score: Math.max(0.001, finalScore)
|
|
};
|
|
|
|
if (computeMatches) {
|
|
const indices = convertMaskToIndices(matchMask, minMatchCharLength);
|
|
if (!indices.length) {
|
|
result.isMatch = false;
|
|
} else if (includeMatches) {
|
|
result.indices = indices;
|
|
}
|
|
}
|
|
|
|
return result
|
|
}
|
|
|
|
function createPatternAlphabet(pattern) {
|
|
let mask = {};
|
|
|
|
for (let i = 0, len = pattern.length; i < len; i += 1) {
|
|
const char = pattern.charAt(i);
|
|
mask[char] = (mask[char] || 0) | (1 << (len - i - 1));
|
|
}
|
|
|
|
return mask
|
|
}
|
|
|
|
class BitapSearch {
|
|
constructor(
|
|
pattern,
|
|
{
|
|
location = Config.location,
|
|
threshold = Config.threshold,
|
|
distance = Config.distance,
|
|
includeMatches = Config.includeMatches,
|
|
findAllMatches = Config.findAllMatches,
|
|
minMatchCharLength = Config.minMatchCharLength,
|
|
isCaseSensitive = Config.isCaseSensitive,
|
|
ignoreLocation = Config.ignoreLocation
|
|
} = {}
|
|
) {
|
|
this.options = {
|
|
location,
|
|
threshold,
|
|
distance,
|
|
includeMatches,
|
|
findAllMatches,
|
|
minMatchCharLength,
|
|
isCaseSensitive,
|
|
ignoreLocation
|
|
};
|
|
|
|
this.pattern = isCaseSensitive ? pattern : pattern.toLowerCase();
|
|
|
|
this.chunks = [];
|
|
|
|
if (!this.pattern.length) {
|
|
return
|
|
}
|
|
|
|
const addChunk = (pattern, startIndex) => {
|
|
this.chunks.push({
|
|
pattern,
|
|
alphabet: createPatternAlphabet(pattern),
|
|
startIndex
|
|
});
|
|
};
|
|
|
|
const len = this.pattern.length;
|
|
|
|
if (len > MAX_BITS) {
|
|
let i = 0;
|
|
const remainder = len % MAX_BITS;
|
|
const end = len - remainder;
|
|
|
|
while (i < end) {
|
|
addChunk(this.pattern.substr(i, MAX_BITS), i);
|
|
i += MAX_BITS;
|
|
}
|
|
|
|
if (remainder) {
|
|
const startIndex = len - MAX_BITS;
|
|
addChunk(this.pattern.substr(startIndex), startIndex);
|
|
}
|
|
} else {
|
|
addChunk(this.pattern, 0);
|
|
}
|
|
}
|
|
|
|
searchIn(text) {
|
|
const { isCaseSensitive, includeMatches } = this.options;
|
|
|
|
if (!isCaseSensitive) {
|
|
text = text.toLowerCase();
|
|
}
|
|
|
|
// Exact match
|
|
if (this.pattern === text) {
|
|
let result = {
|
|
isMatch: true,
|
|
score: 0
|
|
};
|
|
|
|
if (includeMatches) {
|
|
result.indices = [[0, text.length - 1]];
|
|
}
|
|
|
|
return result
|
|
}
|
|
|
|
// Otherwise, use Bitap algorithm
|
|
const {
|
|
location,
|
|
distance,
|
|
threshold,
|
|
findAllMatches,
|
|
minMatchCharLength,
|
|
ignoreLocation
|
|
} = this.options;
|
|
|
|
let allIndices = [];
|
|
let totalScore = 0;
|
|
let hasMatches = false;
|
|
|
|
this.chunks.forEach(({ pattern, alphabet, startIndex }) => {
|
|
const { isMatch, score, indices } = search(text, pattern, alphabet, {
|
|
location: location + startIndex,
|
|
distance,
|
|
threshold,
|
|
findAllMatches,
|
|
minMatchCharLength,
|
|
includeMatches,
|
|
ignoreLocation
|
|
});
|
|
|
|
if (isMatch) {
|
|
hasMatches = true;
|
|
}
|
|
|
|
totalScore += score;
|
|
|
|
if (isMatch && indices) {
|
|
allIndices = [...allIndices, ...indices];
|
|
}
|
|
});
|
|
|
|
let result = {
|
|
isMatch: hasMatches,
|
|
score: hasMatches ? totalScore / this.chunks.length : 1
|
|
};
|
|
|
|
if (hasMatches && includeMatches) {
|
|
result.indices = allIndices;
|
|
}
|
|
|
|
return result
|
|
}
|
|
}
|
|
|
|
class BaseMatch {
|
|
constructor(pattern) {
|
|
this.pattern = pattern;
|
|
}
|
|
static isMultiMatch(pattern) {
|
|
return getMatch(pattern, this.multiRegex)
|
|
}
|
|
static isSingleMatch(pattern) {
|
|
return getMatch(pattern, this.singleRegex)
|
|
}
|
|
search(/*text*/) {}
|
|
}
|
|
|
|
function getMatch(pattern, exp) {
|
|
const matches = pattern.match(exp);
|
|
return matches ? matches[1] : null
|
|
}
|
|
|
|
// Token: 'file
|
|
|
|
class ExactMatch extends BaseMatch {
|
|
constructor(pattern) {
|
|
super(pattern);
|
|
}
|
|
static get type() {
|
|
return 'exact'
|
|
}
|
|
static get multiRegex() {
|
|
return /^="(.*)"$/
|
|
}
|
|
static get singleRegex() {
|
|
return /^=(.*)$/
|
|
}
|
|
search(text) {
|
|
const isMatch = text === this.pattern;
|
|
|
|
return {
|
|
isMatch,
|
|
score: isMatch ? 0 : 1,
|
|
indices: [0, this.pattern.length - 1]
|
|
}
|
|
}
|
|
}
|
|
|
|
// Token: !fire
|
|
|
|
class InverseExactMatch extends BaseMatch {
|
|
constructor(pattern) {
|
|
super(pattern);
|
|
}
|
|
static get type() {
|
|
return 'inverse-exact'
|
|
}
|
|
static get multiRegex() {
|
|
return /^!"(.*)"$/
|
|
}
|
|
static get singleRegex() {
|
|
return /^!(.*)$/
|
|
}
|
|
search(text) {
|
|
const index = text.indexOf(this.pattern);
|
|
const isMatch = index === -1;
|
|
|
|
return {
|
|
isMatch,
|
|
score: isMatch ? 0 : 1,
|
|
indices: [0, text.length - 1]
|
|
}
|
|
}
|
|
}
|
|
|
|
// Token: ^file
|
|
|
|
class PrefixExactMatch extends BaseMatch {
|
|
constructor(pattern) {
|
|
super(pattern);
|
|
}
|
|
static get type() {
|
|
return 'prefix-exact'
|
|
}
|
|
static get multiRegex() {
|
|
return /^\^"(.*)"$/
|
|
}
|
|
static get singleRegex() {
|
|
return /^\^(.*)$/
|
|
}
|
|
search(text) {
|
|
const isMatch = text.startsWith(this.pattern);
|
|
|
|
return {
|
|
isMatch,
|
|
score: isMatch ? 0 : 1,
|
|
indices: [0, this.pattern.length - 1]
|
|
}
|
|
}
|
|
}
|
|
|
|
// Token: !^fire
|
|
|
|
class InversePrefixExactMatch extends BaseMatch {
|
|
constructor(pattern) {
|
|
super(pattern);
|
|
}
|
|
static get type() {
|
|
return 'inverse-prefix-exact'
|
|
}
|
|
static get multiRegex() {
|
|
return /^!\^"(.*)"$/
|
|
}
|
|
static get singleRegex() {
|
|
return /^!\^(.*)$/
|
|
}
|
|
search(text) {
|
|
const isMatch = !text.startsWith(this.pattern);
|
|
|
|
return {
|
|
isMatch,
|
|
score: isMatch ? 0 : 1,
|
|
indices: [0, text.length - 1]
|
|
}
|
|
}
|
|
}
|
|
|
|
// Token: .file$
|
|
|
|
class SuffixExactMatch extends BaseMatch {
|
|
constructor(pattern) {
|
|
super(pattern);
|
|
}
|
|
static get type() {
|
|
return 'suffix-exact'
|
|
}
|
|
static get multiRegex() {
|
|
return /^"(.*)"\$$/
|
|
}
|
|
static get singleRegex() {
|
|
return /^(.*)\$$/
|
|
}
|
|
search(text) {
|
|
const isMatch = text.endsWith(this.pattern);
|
|
|
|
return {
|
|
isMatch,
|
|
score: isMatch ? 0 : 1,
|
|
indices: [text.length - this.pattern.length, text.length - 1]
|
|
}
|
|
}
|
|
}
|
|
|
|
// Token: !.file$
|
|
|
|
class InverseSuffixExactMatch extends BaseMatch {
|
|
constructor(pattern) {
|
|
super(pattern);
|
|
}
|
|
static get type() {
|
|
return 'inverse-suffix-exact'
|
|
}
|
|
static get multiRegex() {
|
|
return /^!"(.*)"\$$/
|
|
}
|
|
static get singleRegex() {
|
|
return /^!(.*)\$$/
|
|
}
|
|
search(text) {
|
|
const isMatch = !text.endsWith(this.pattern);
|
|
return {
|
|
isMatch,
|
|
score: isMatch ? 0 : 1,
|
|
indices: [0, text.length - 1]
|
|
}
|
|
}
|
|
}
|
|
|
|
class FuzzyMatch extends BaseMatch {
|
|
constructor(
|
|
pattern,
|
|
{
|
|
location = Config.location,
|
|
threshold = Config.threshold,
|
|
distance = Config.distance,
|
|
includeMatches = Config.includeMatches,
|
|
findAllMatches = Config.findAllMatches,
|
|
minMatchCharLength = Config.minMatchCharLength,
|
|
isCaseSensitive = Config.isCaseSensitive,
|
|
ignoreLocation = Config.ignoreLocation
|
|
} = {}
|
|
) {
|
|
super(pattern);
|
|
this._bitapSearch = new BitapSearch(pattern, {
|
|
location,
|
|
threshold,
|
|
distance,
|
|
includeMatches,
|
|
findAllMatches,
|
|
minMatchCharLength,
|
|
isCaseSensitive,
|
|
ignoreLocation
|
|
});
|
|
}
|
|
static get type() {
|
|
return 'fuzzy'
|
|
}
|
|
static get multiRegex() {
|
|
return /^"(.*)"$/
|
|
}
|
|
static get singleRegex() {
|
|
return /^(.*)$/
|
|
}
|
|
search(text) {
|
|
return this._bitapSearch.searchIn(text)
|
|
}
|
|
}
|
|
|
|
// Token: 'file
|
|
|
|
class IncludeMatch extends BaseMatch {
|
|
constructor(pattern) {
|
|
super(pattern);
|
|
}
|
|
static get type() {
|
|
return 'include'
|
|
}
|
|
static get multiRegex() {
|
|
return /^'"(.*)"$/
|
|
}
|
|
static get singleRegex() {
|
|
return /^'(.*)$/
|
|
}
|
|
search(text) {
|
|
let location = 0;
|
|
let index;
|
|
|
|
const indices = [];
|
|
const patternLen = this.pattern.length;
|
|
|
|
// Get all exact matches
|
|
while ((index = text.indexOf(this.pattern, location)) > -1) {
|
|
location = index + patternLen;
|
|
indices.push([index, location - 1]);
|
|
}
|
|
|
|
const isMatch = !!indices.length;
|
|
|
|
return {
|
|
isMatch,
|
|
score: isMatch ? 0 : 1,
|
|
indices
|
|
}
|
|
}
|
|
}
|
|
|
|
// ❗Order is important. DO NOT CHANGE.
|
|
const searchers = [
|
|
ExactMatch,
|
|
IncludeMatch,
|
|
PrefixExactMatch,
|
|
InversePrefixExactMatch,
|
|
InverseSuffixExactMatch,
|
|
SuffixExactMatch,
|
|
InverseExactMatch,
|
|
FuzzyMatch
|
|
];
|
|
|
|
const searchersLen = searchers.length;
|
|
|
|
// Regex to split by spaces, but keep anything in quotes together
|
|
const SPACE_RE = / +(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)/;
|
|
const OR_TOKEN = '|';
|
|
|
|
// Return a 2D array representation of the query, for simpler parsing.
|
|
// Example:
|
|
// "^core go$ | rb$ | py$ xy$" => [["^core", "go$"], ["rb$"], ["py$", "xy$"]]
|
|
function parseQuery(pattern, options = {}) {
|
|
return pattern.split(OR_TOKEN).map((item) => {
|
|
let query = item
|
|
.trim()
|
|
.split(SPACE_RE)
|
|
.filter((item) => item && !!item.trim());
|
|
|
|
let results = [];
|
|
for (let i = 0, len = query.length; i < len; i += 1) {
|
|
const queryItem = query[i];
|
|
|
|
// 1. Handle multiple query match (i.e, once that are quoted, like `"hello world"`)
|
|
let found = false;
|
|
let idx = -1;
|
|
while (!found && ++idx < searchersLen) {
|
|
const searcher = searchers[idx];
|
|
let token = searcher.isMultiMatch(queryItem);
|
|
if (token) {
|
|
results.push(new searcher(token, options));
|
|
found = true;
|
|
}
|
|
}
|
|
|
|
if (found) {
|
|
continue
|
|
}
|
|
|
|
// 2. Handle single query matches (i.e, once that are *not* quoted)
|
|
idx = -1;
|
|
while (++idx < searchersLen) {
|
|
const searcher = searchers[idx];
|
|
let token = searcher.isSingleMatch(queryItem);
|
|
if (token) {
|
|
results.push(new searcher(token, options));
|
|
break
|
|
}
|
|
}
|
|
}
|
|
|
|
return results
|
|
})
|
|
}
|
|
|
|
// These extended matchers can return an array of matches, as opposed
|
|
// to a singl match
|
|
const MultiMatchSet = new Set([FuzzyMatch.type, IncludeMatch.type]);
|
|
|
|
/**
|
|
* Command-like searching
|
|
* ======================
|
|
*
|
|
* Given multiple search terms delimited by spaces.e.g. `^jscript .python$ ruby !java`,
|
|
* search in a given text.
|
|
*
|
|
* Search syntax:
|
|
*
|
|
* | Token | Match type | Description |
|
|
* | ----------- | -------------------------- | -------------------------------------- |
|
|
* | `jscript` | fuzzy-match | Items that fuzzy match `jscript` |
|
|
* | `=scheme` | exact-match | Items that are `scheme` |
|
|
* | `'python` | include-match | Items that include `python` |
|
|
* | `!ruby` | inverse-exact-match | Items that do not include `ruby` |
|
|
* | `^java` | prefix-exact-match | Items that start with `java` |
|
|
* | `!^earlang` | inverse-prefix-exact-match | Items that do not start with `earlang` |
|
|
* | `.js$` | suffix-exact-match | Items that end with `.js` |
|
|
* | `!.go$` | inverse-suffix-exact-match | Items that do not end with `.go` |
|
|
*
|
|
* A single pipe character acts as an OR operator. For example, the following
|
|
* query matches entries that start with `core` and end with either`go`, `rb`,
|
|
* or`py`.
|
|
*
|
|
* ```
|
|
* ^core go$ | rb$ | py$
|
|
* ```
|
|
*/
|
|
class ExtendedSearch {
|
|
constructor(
|
|
pattern,
|
|
{
|
|
isCaseSensitive = Config.isCaseSensitive,
|
|
includeMatches = Config.includeMatches,
|
|
minMatchCharLength = Config.minMatchCharLength,
|
|
ignoreLocation = Config.ignoreLocation,
|
|
findAllMatches = Config.findAllMatches,
|
|
location = Config.location,
|
|
threshold = Config.threshold,
|
|
distance = Config.distance
|
|
} = {}
|
|
) {
|
|
this.query = null;
|
|
this.options = {
|
|
isCaseSensitive,
|
|
includeMatches,
|
|
minMatchCharLength,
|
|
findAllMatches,
|
|
ignoreLocation,
|
|
location,
|
|
threshold,
|
|
distance
|
|
};
|
|
|
|
this.pattern = isCaseSensitive ? pattern : pattern.toLowerCase();
|
|
this.query = parseQuery(this.pattern, this.options);
|
|
}
|
|
|
|
static condition(_, options) {
|
|
return options.useExtendedSearch
|
|
}
|
|
|
|
searchIn(text) {
|
|
const query = this.query;
|
|
|
|
if (!query) {
|
|
return {
|
|
isMatch: false,
|
|
score: 1
|
|
}
|
|
}
|
|
|
|
const { includeMatches, isCaseSensitive } = this.options;
|
|
|
|
text = isCaseSensitive ? text : text.toLowerCase();
|
|
|
|
let numMatches = 0;
|
|
let allIndices = [];
|
|
let totalScore = 0;
|
|
|
|
// ORs
|
|
for (let i = 0, qLen = query.length; i < qLen; i += 1) {
|
|
const searchers = query[i];
|
|
|
|
// Reset indices
|
|
allIndices.length = 0;
|
|
numMatches = 0;
|
|
|
|
// ANDs
|
|
for (let j = 0, pLen = searchers.length; j < pLen; j += 1) {
|
|
const searcher = searchers[j];
|
|
const { isMatch, indices, score } = searcher.search(text);
|
|
|
|
if (isMatch) {
|
|
numMatches += 1;
|
|
totalScore += score;
|
|
if (includeMatches) {
|
|
const type = searcher.constructor.type;
|
|
if (MultiMatchSet.has(type)) {
|
|
allIndices = [...allIndices, ...indices];
|
|
} else {
|
|
allIndices.push(indices);
|
|
}
|
|
}
|
|
} else {
|
|
totalScore = 0;
|
|
numMatches = 0;
|
|
allIndices.length = 0;
|
|
break
|
|
}
|
|
}
|
|
|
|
// OR condition, so if TRUE, return
|
|
if (numMatches) {
|
|
let result = {
|
|
isMatch: true,
|
|
score: totalScore / numMatches
|
|
};
|
|
|
|
if (includeMatches) {
|
|
result.indices = allIndices;
|
|
}
|
|
|
|
return result
|
|
}
|
|
}
|
|
|
|
// Nothing was matched
|
|
return {
|
|
isMatch: false,
|
|
score: 1
|
|
}
|
|
}
|
|
}
|
|
|
|
const registeredSearchers = [];
|
|
|
|
function register(...args) {
|
|
registeredSearchers.push(...args);
|
|
}
|
|
|
|
function createSearcher(pattern, options) {
|
|
for (let i = 0, len = registeredSearchers.length; i < len; i += 1) {
|
|
let searcherClass = registeredSearchers[i];
|
|
if (searcherClass.condition(pattern, options)) {
|
|
return new searcherClass(pattern, options)
|
|
}
|
|
}
|
|
|
|
return new BitapSearch(pattern, options)
|
|
}
|
|
|
|
const LogicalOperator = {
|
|
AND: '$and',
|
|
OR: '$or'
|
|
};
|
|
|
|
const KeyType = {
|
|
PATH: '$path',
|
|
PATTERN: '$val'
|
|
};
|
|
|
|
const isExpression = (query) =>
|
|
!!(query[LogicalOperator.AND] || query[LogicalOperator.OR]);
|
|
|
|
const isPath = (query) => !!query[KeyType.PATH];
|
|
|
|
const isLeaf = (query) =>
|
|
!isArray(query) && isObject(query) && !isExpression(query);
|
|
|
|
const convertToExplicit = (query) => ({
|
|
[LogicalOperator.AND]: Object.keys(query).map((key) => ({
|
|
[key]: query[key]
|
|
}))
|
|
});
|
|
|
|
// When `auto` is `true`, the parse function will infer and initialize and add
|
|
// the appropriate `Searcher` instance
|
|
function parse(query, options, { auto = true } = {}) {
|
|
const next = (query) => {
|
|
let keys = Object.keys(query);
|
|
|
|
const isQueryPath = isPath(query);
|
|
|
|
if (!isQueryPath && keys.length > 1 && !isExpression(query)) {
|
|
return next(convertToExplicit(query))
|
|
}
|
|
|
|
if (isLeaf(query)) {
|
|
const key = isQueryPath ? query[KeyType.PATH] : keys[0];
|
|
|
|
const pattern = isQueryPath ? query[KeyType.PATTERN] : query[key];
|
|
|
|
if (!isString(pattern)) {
|
|
throw new Error(LOGICAL_SEARCH_INVALID_QUERY_FOR_KEY(key))
|
|
}
|
|
|
|
const obj = {
|
|
keyId: createKeyId(key),
|
|
pattern
|
|
};
|
|
|
|
if (auto) {
|
|
obj.searcher = createSearcher(pattern, options);
|
|
}
|
|
|
|
return obj
|
|
}
|
|
|
|
let node = {
|
|
children: [],
|
|
operator: keys[0]
|
|
};
|
|
|
|
keys.forEach((key) => {
|
|
const value = query[key];
|
|
|
|
if (isArray(value)) {
|
|
value.forEach((item) => {
|
|
node.children.push(next(item));
|
|
});
|
|
}
|
|
});
|
|
|
|
return node
|
|
};
|
|
|
|
if (!isExpression(query)) {
|
|
query = convertToExplicit(query);
|
|
}
|
|
|
|
return next(query)
|
|
}
|
|
|
|
// Practical scoring function
|
|
function computeScore(
|
|
results,
|
|
{ ignoreFieldNorm = Config.ignoreFieldNorm }
|
|
) {
|
|
results.forEach((result) => {
|
|
let totalScore = 1;
|
|
|
|
result.matches.forEach(({ key, norm, score }) => {
|
|
const weight = key ? key.weight : null;
|
|
|
|
totalScore *= Math.pow(
|
|
score === 0 && weight ? Number.EPSILON : score,
|
|
(weight || 1) * (ignoreFieldNorm ? 1 : norm)
|
|
);
|
|
});
|
|
|
|
result.score = totalScore;
|
|
});
|
|
}
|
|
|
|
function transformMatches(result, data) {
|
|
const matches = result.matches;
|
|
data.matches = [];
|
|
|
|
if (!isDefined(matches)) {
|
|
return
|
|
}
|
|
|
|
matches.forEach((match) => {
|
|
if (!isDefined(match.indices) || !match.indices.length) {
|
|
return
|
|
}
|
|
|
|
const { indices, value } = match;
|
|
|
|
let obj = {
|
|
indices,
|
|
value
|
|
};
|
|
|
|
if (match.key) {
|
|
obj.key = match.key.src;
|
|
}
|
|
|
|
if (match.idx > -1) {
|
|
obj.refIndex = match.idx;
|
|
}
|
|
|
|
data.matches.push(obj);
|
|
});
|
|
}
|
|
|
|
function transformScore(result, data) {
|
|
data.score = result.score;
|
|
}
|
|
|
|
function format(
|
|
results,
|
|
docs,
|
|
{
|
|
includeMatches = Config.includeMatches,
|
|
includeScore = Config.includeScore
|
|
} = {}
|
|
) {
|
|
const transformers = [];
|
|
|
|
if (includeMatches) transformers.push(transformMatches);
|
|
if (includeScore) transformers.push(transformScore);
|
|
|
|
return results.map((result) => {
|
|
const { idx } = result;
|
|
|
|
const data = {
|
|
item: docs[idx],
|
|
refIndex: idx
|
|
};
|
|
|
|
if (transformers.length) {
|
|
transformers.forEach((transformer) => {
|
|
transformer(result, data);
|
|
});
|
|
}
|
|
|
|
return data
|
|
})
|
|
}
|
|
|
|
class Fuse {
|
|
constructor(docs, options = {}, index) {
|
|
this.options = { ...Config, ...options };
|
|
|
|
if (
|
|
this.options.useExtendedSearch &&
|
|
!true
|
|
) {
|
|
throw new Error(EXTENDED_SEARCH_UNAVAILABLE)
|
|
}
|
|
|
|
this._keyStore = new KeyStore(this.options.keys);
|
|
|
|
this.setCollection(docs, index);
|
|
}
|
|
|
|
setCollection(docs, index) {
|
|
this._docs = docs;
|
|
|
|
if (index && !(index instanceof FuseIndex)) {
|
|
throw new Error(INCORRECT_INDEX_TYPE)
|
|
}
|
|
|
|
this._myIndex =
|
|
index ||
|
|
createIndex(this.options.keys, this._docs, {
|
|
getFn: this.options.getFn,
|
|
fieldNormWeight: this.options.fieldNormWeight
|
|
});
|
|
}
|
|
|
|
add(doc) {
|
|
if (!isDefined(doc)) {
|
|
return
|
|
}
|
|
|
|
this._docs.push(doc);
|
|
this._myIndex.add(doc);
|
|
}
|
|
|
|
remove(predicate = (/* doc, idx */) => false) {
|
|
const results = [];
|
|
|
|
for (let i = 0, len = this._docs.length; i < len; i += 1) {
|
|
const doc = this._docs[i];
|
|
if (predicate(doc, i)) {
|
|
this.removeAt(i);
|
|
i -= 1;
|
|
len -= 1;
|
|
|
|
results.push(doc);
|
|
}
|
|
}
|
|
|
|
return results
|
|
}
|
|
|
|
removeAt(idx) {
|
|
this._docs.splice(idx, 1);
|
|
this._myIndex.removeAt(idx);
|
|
}
|
|
|
|
getIndex() {
|
|
return this._myIndex
|
|
}
|
|
|
|
search(query, { limit = -1 } = {}) {
|
|
const {
|
|
includeMatches,
|
|
includeScore,
|
|
shouldSort,
|
|
sortFn,
|
|
ignoreFieldNorm
|
|
} = this.options;
|
|
|
|
let results = isString(query)
|
|
? isString(this._docs[0])
|
|
? this._searchStringList(query)
|
|
: this._searchObjectList(query)
|
|
: this._searchLogical(query);
|
|
|
|
computeScore(results, { ignoreFieldNorm });
|
|
|
|
if (shouldSort) {
|
|
results.sort(sortFn);
|
|
}
|
|
|
|
if (isNumber(limit) && limit > -1) {
|
|
results = results.slice(0, limit);
|
|
}
|
|
|
|
return format(results, this._docs, {
|
|
includeMatches,
|
|
includeScore
|
|
})
|
|
}
|
|
|
|
_searchStringList(query) {
|
|
const searcher = createSearcher(query, this.options);
|
|
const { records } = this._myIndex;
|
|
const results = [];
|
|
|
|
// Iterate over every string in the index
|
|
records.forEach(({ v: text, i: idx, n: norm }) => {
|
|
if (!isDefined(text)) {
|
|
return
|
|
}
|
|
|
|
const { isMatch, score, indices } = searcher.searchIn(text);
|
|
|
|
if (isMatch) {
|
|
results.push({
|
|
item: text,
|
|
idx,
|
|
matches: [{ score, value: text, norm, indices }]
|
|
});
|
|
}
|
|
});
|
|
|
|
return results
|
|
}
|
|
|
|
_searchLogical(query) {
|
|
|
|
const expression = parse(query, this.options);
|
|
|
|
const evaluate = (node, item, idx) => {
|
|
if (!node.children) {
|
|
const { keyId, searcher } = node;
|
|
|
|
const matches = this._findMatches({
|
|
key: this._keyStore.get(keyId),
|
|
value: this._myIndex.getValueForItemAtKeyId(item, keyId),
|
|
searcher
|
|
});
|
|
|
|
if (matches && matches.length) {
|
|
return [
|
|
{
|
|
idx,
|
|
item,
|
|
matches
|
|
}
|
|
]
|
|
}
|
|
|
|
return []
|
|
}
|
|
|
|
const res = [];
|
|
for (let i = 0, len = node.children.length; i < len; i += 1) {
|
|
const child = node.children[i];
|
|
const result = evaluate(child, item, idx);
|
|
if (result.length) {
|
|
res.push(...result);
|
|
} else if (node.operator === LogicalOperator.AND) {
|
|
return []
|
|
}
|
|
}
|
|
return res
|
|
};
|
|
|
|
const records = this._myIndex.records;
|
|
const resultMap = {};
|
|
const results = [];
|
|
|
|
records.forEach(({ $: item, i: idx }) => {
|
|
if (isDefined(item)) {
|
|
let expResults = evaluate(expression, item, idx);
|
|
|
|
if (expResults.length) {
|
|
// Dedupe when adding
|
|
if (!resultMap[idx]) {
|
|
resultMap[idx] = { idx, item, matches: [] };
|
|
results.push(resultMap[idx]);
|
|
}
|
|
expResults.forEach(({ matches }) => {
|
|
resultMap[idx].matches.push(...matches);
|
|
});
|
|
}
|
|
}
|
|
});
|
|
|
|
return results
|
|
}
|
|
|
|
_searchObjectList(query) {
|
|
const searcher = createSearcher(query, this.options);
|
|
const { keys, records } = this._myIndex;
|
|
const results = [];
|
|
|
|
// List is Array<Object>
|
|
records.forEach(({ $: item, i: idx }) => {
|
|
if (!isDefined(item)) {
|
|
return
|
|
}
|
|
|
|
let matches = [];
|
|
|
|
// Iterate over every key (i.e, path), and fetch the value at that key
|
|
keys.forEach((key, keyIndex) => {
|
|
matches.push(
|
|
...this._findMatches({
|
|
key,
|
|
value: item[keyIndex],
|
|
searcher
|
|
})
|
|
);
|
|
});
|
|
|
|
if (matches.length) {
|
|
results.push({
|
|
idx,
|
|
item,
|
|
matches
|
|
});
|
|
}
|
|
});
|
|
|
|
return results
|
|
}
|
|
_findMatches({ key, value, searcher }) {
|
|
if (!isDefined(value)) {
|
|
return []
|
|
}
|
|
|
|
let matches = [];
|
|
|
|
if (isArray(value)) {
|
|
value.forEach(({ v: text, i: idx, n: norm }) => {
|
|
if (!isDefined(text)) {
|
|
return
|
|
}
|
|
|
|
const { isMatch, score, indices } = searcher.searchIn(text);
|
|
|
|
if (isMatch) {
|
|
matches.push({
|
|
score,
|
|
key,
|
|
value: text,
|
|
idx,
|
|
norm,
|
|
indices
|
|
});
|
|
}
|
|
});
|
|
} else {
|
|
const { v: text, n: norm } = value;
|
|
|
|
const { isMatch, score, indices } = searcher.searchIn(text);
|
|
|
|
if (isMatch) {
|
|
matches.push({ score, key, value: text, norm, indices });
|
|
}
|
|
}
|
|
|
|
return matches
|
|
}
|
|
}
|
|
|
|
Fuse.version = '6.6.2';
|
|
Fuse.createIndex = createIndex;
|
|
Fuse.parseIndex = parseIndex;
|
|
Fuse.config = Config;
|
|
|
|
{
|
|
Fuse.parseQuery = parse;
|
|
}
|
|
|
|
{
|
|
register(ExtendedSearch);
|
|
}
|
|
|
|
function noop() { }
|
|
function run(fn) {
|
|
return fn();
|
|
}
|
|
function blank_object() {
|
|
return Object.create(null);
|
|
}
|
|
function run_all(fns) {
|
|
fns.forEach(run);
|
|
}
|
|
function is_function(thing) {
|
|
return typeof thing === 'function';
|
|
}
|
|
function safe_not_equal(a, b) {
|
|
return a != a ? b == b : a !== b || ((a && typeof a === 'object') || typeof a === 'function');
|
|
}
|
|
let src_url_equal_anchor;
|
|
function src_url_equal(element_src, url) {
|
|
if (!src_url_equal_anchor) {
|
|
src_url_equal_anchor = document.createElement('a');
|
|
}
|
|
src_url_equal_anchor.href = url;
|
|
return element_src === src_url_equal_anchor.href;
|
|
}
|
|
function is_empty(obj) {
|
|
return Object.keys(obj).length === 0;
|
|
}
|
|
function append(target, node) {
|
|
target.appendChild(node);
|
|
}
|
|
function insert(target, node, anchor) {
|
|
target.insertBefore(node, anchor || null);
|
|
}
|
|
function detach(node) {
|
|
node.parentNode.removeChild(node);
|
|
}
|
|
function destroy_each(iterations, detaching) {
|
|
for (let i = 0; i < iterations.length; i += 1) {
|
|
if (iterations[i])
|
|
iterations[i].d(detaching);
|
|
}
|
|
}
|
|
function element(name) {
|
|
return document.createElement(name);
|
|
}
|
|
function svg_element(name) {
|
|
return document.createElementNS('http://www.w3.org/2000/svg', name);
|
|
}
|
|
function text(data) {
|
|
return document.createTextNode(data);
|
|
}
|
|
function space() {
|
|
return text(' ');
|
|
}
|
|
function empty() {
|
|
return text('');
|
|
}
|
|
function listen(node, event, handler, options) {
|
|
node.addEventListener(event, handler, options);
|
|
return () => node.removeEventListener(event, handler, options);
|
|
}
|
|
function stop_propagation(fn) {
|
|
return function (event) {
|
|
event.stopPropagation();
|
|
// @ts-ignore
|
|
return fn.call(this, event);
|
|
};
|
|
}
|
|
function attr(node, attribute, value) {
|
|
if (value == null)
|
|
node.removeAttribute(attribute);
|
|
else if (node.getAttribute(attribute) !== value)
|
|
node.setAttribute(attribute, value);
|
|
}
|
|
function children(element) {
|
|
return Array.from(element.childNodes);
|
|
}
|
|
function set_data(text, data) {
|
|
data = '' + data;
|
|
if (text.wholeText !== data)
|
|
text.data = data;
|
|
}
|
|
function toggle_class(element, name, toggle) {
|
|
element.classList[toggle ? 'add' : 'remove'](name);
|
|
}
|
|
function custom_event(type, detail, { bubbles = false, cancelable = false } = {}) {
|
|
const e = document.createEvent('CustomEvent');
|
|
e.initCustomEvent(type, bubbles, cancelable, detail);
|
|
return e;
|
|
}
|
|
class HtmlTag {
|
|
constructor(is_svg = false) {
|
|
this.is_svg = false;
|
|
this.is_svg = is_svg;
|
|
this.e = this.n = null;
|
|
}
|
|
c(html) {
|
|
this.h(html);
|
|
}
|
|
m(html, target, anchor = null) {
|
|
if (!this.e) {
|
|
if (this.is_svg)
|
|
this.e = svg_element(target.nodeName);
|
|
else
|
|
this.e = element(target.nodeName);
|
|
this.t = target;
|
|
this.c(html);
|
|
}
|
|
this.i(anchor);
|
|
}
|
|
h(html) {
|
|
this.e.innerHTML = html;
|
|
this.n = Array.from(this.e.childNodes);
|
|
}
|
|
i(anchor) {
|
|
for (let i = 0; i < this.n.length; i += 1) {
|
|
insert(this.t, this.n[i], anchor);
|
|
}
|
|
}
|
|
p(html) {
|
|
this.d();
|
|
this.h(html);
|
|
this.i(this.a);
|
|
}
|
|
d() {
|
|
this.n.forEach(detach);
|
|
}
|
|
}
|
|
|
|
let current_component;
|
|
function set_current_component(component) {
|
|
current_component = component;
|
|
}
|
|
function get_current_component() {
|
|
if (!current_component)
|
|
throw new Error('Function called outside component initialization');
|
|
return current_component;
|
|
}
|
|
function afterUpdate(fn) {
|
|
get_current_component().$$.after_update.push(fn);
|
|
}
|
|
function createEventDispatcher() {
|
|
const component = get_current_component();
|
|
return (type, detail, { cancelable = false } = {}) => {
|
|
const callbacks = component.$$.callbacks[type];
|
|
if (callbacks) {
|
|
// TODO are there situations where events could be dispatched
|
|
// in a server (non-DOM) environment?
|
|
const event = custom_event(type, detail, { cancelable });
|
|
callbacks.slice().forEach(fn => {
|
|
fn.call(component, event);
|
|
});
|
|
return !event.defaultPrevented;
|
|
}
|
|
return true;
|
|
};
|
|
}
|
|
|
|
const dirty_components = [];
|
|
const binding_callbacks = [];
|
|
const render_callbacks = [];
|
|
const flush_callbacks = [];
|
|
const resolved_promise = Promise.resolve();
|
|
let update_scheduled = false;
|
|
function schedule_update() {
|
|
if (!update_scheduled) {
|
|
update_scheduled = true;
|
|
resolved_promise.then(flush);
|
|
}
|
|
}
|
|
function add_render_callback(fn) {
|
|
render_callbacks.push(fn);
|
|
}
|
|
// flush() calls callbacks in this order:
|
|
// 1. All beforeUpdate callbacks, in order: parents before children
|
|
// 2. All bind:this callbacks, in reverse order: children before parents.
|
|
// 3. All afterUpdate callbacks, in order: parents before children. EXCEPT
|
|
// for afterUpdates called during the initial onMount, which are called in
|
|
// reverse order: children before parents.
|
|
// Since callbacks might update component values, which could trigger another
|
|
// call to flush(), the following steps guard against this:
|
|
// 1. During beforeUpdate, any updated components will be added to the
|
|
// dirty_components array and will cause a reentrant call to flush(). Because
|
|
// the flush index is kept outside the function, the reentrant call will pick
|
|
// up where the earlier call left off and go through all dirty components. The
|
|
// current_component value is saved and restored so that the reentrant call will
|
|
// not interfere with the "parent" flush() call.
|
|
// 2. bind:this callbacks cannot trigger new flush() calls.
|
|
// 3. During afterUpdate, any updated components will NOT have their afterUpdate
|
|
// callback called a second time; the seen_callbacks set, outside the flush()
|
|
// function, guarantees this behavior.
|
|
const seen_callbacks = new Set();
|
|
let flushidx = 0; // Do *not* move this inside the flush() function
|
|
function flush() {
|
|
const saved_component = current_component;
|
|
do {
|
|
// first, call beforeUpdate functions
|
|
// and update components
|
|
while (flushidx < dirty_components.length) {
|
|
const component = dirty_components[flushidx];
|
|
flushidx++;
|
|
set_current_component(component);
|
|
update(component.$$);
|
|
}
|
|
set_current_component(null);
|
|
dirty_components.length = 0;
|
|
flushidx = 0;
|
|
while (binding_callbacks.length)
|
|
binding_callbacks.pop()();
|
|
// then, once components are updated, call
|
|
// afterUpdate functions. This may cause
|
|
// subsequent updates...
|
|
for (let i = 0; i < render_callbacks.length; i += 1) {
|
|
const callback = render_callbacks[i];
|
|
if (!seen_callbacks.has(callback)) {
|
|
// ...so guard against infinite loops
|
|
seen_callbacks.add(callback);
|
|
callback();
|
|
}
|
|
}
|
|
render_callbacks.length = 0;
|
|
} while (dirty_components.length);
|
|
while (flush_callbacks.length) {
|
|
flush_callbacks.pop()();
|
|
}
|
|
update_scheduled = false;
|
|
seen_callbacks.clear();
|
|
set_current_component(saved_component);
|
|
}
|
|
function update($$) {
|
|
if ($$.fragment !== null) {
|
|
$$.update();
|
|
run_all($$.before_update);
|
|
const dirty = $$.dirty;
|
|
$$.dirty = [-1];
|
|
$$.fragment && $$.fragment.p($$.ctx, dirty);
|
|
$$.after_update.forEach(add_render_callback);
|
|
}
|
|
}
|
|
const outroing = new Set();
|
|
function transition_in(block, local) {
|
|
if (block && block.i) {
|
|
outroing.delete(block);
|
|
block.i(local);
|
|
}
|
|
}
|
|
|
|
function destroy_block(block, lookup) {
|
|
block.d(1);
|
|
lookup.delete(block.key);
|
|
}
|
|
function update_keyed_each(old_blocks, dirty, get_key, dynamic, ctx, list, lookup, node, destroy, create_each_block, next, get_context) {
|
|
let o = old_blocks.length;
|
|
let n = list.length;
|
|
let i = o;
|
|
const old_indexes = {};
|
|
while (i--)
|
|
old_indexes[old_blocks[i].key] = i;
|
|
const new_blocks = [];
|
|
const new_lookup = new Map();
|
|
const deltas = new Map();
|
|
i = n;
|
|
while (i--) {
|
|
const child_ctx = get_context(ctx, list, i);
|
|
const key = get_key(child_ctx);
|
|
let block = lookup.get(key);
|
|
if (!block) {
|
|
block = create_each_block(key, child_ctx);
|
|
block.c();
|
|
}
|
|
else if (dynamic) {
|
|
block.p(child_ctx, dirty);
|
|
}
|
|
new_lookup.set(key, new_blocks[i] = block);
|
|
if (key in old_indexes)
|
|
deltas.set(key, Math.abs(i - old_indexes[key]));
|
|
}
|
|
const will_move = new Set();
|
|
const did_move = new Set();
|
|
function insert(block) {
|
|
transition_in(block, 1);
|
|
block.m(node, next);
|
|
lookup.set(block.key, block);
|
|
next = block.first;
|
|
n--;
|
|
}
|
|
while (o && n) {
|
|
const new_block = new_blocks[n - 1];
|
|
const old_block = old_blocks[o - 1];
|
|
const new_key = new_block.key;
|
|
const old_key = old_block.key;
|
|
if (new_block === old_block) {
|
|
// do nothing
|
|
next = new_block.first;
|
|
o--;
|
|
n--;
|
|
}
|
|
else if (!new_lookup.has(old_key)) {
|
|
// remove old block
|
|
destroy(old_block, lookup);
|
|
o--;
|
|
}
|
|
else if (!lookup.has(new_key) || will_move.has(new_key)) {
|
|
insert(new_block);
|
|
}
|
|
else if (did_move.has(old_key)) {
|
|
o--;
|
|
}
|
|
else if (deltas.get(new_key) > deltas.get(old_key)) {
|
|
did_move.add(new_key);
|
|
insert(new_block);
|
|
}
|
|
else {
|
|
will_move.add(old_key);
|
|
o--;
|
|
}
|
|
}
|
|
while (o--) {
|
|
const old_block = old_blocks[o];
|
|
if (!new_lookup.has(old_block.key))
|
|
destroy(old_block, lookup);
|
|
}
|
|
while (n)
|
|
insert(new_blocks[n - 1]);
|
|
return new_blocks;
|
|
}
|
|
function mount_component(component, target, anchor, customElement) {
|
|
const { fragment, on_mount, on_destroy, after_update } = component.$$;
|
|
fragment && fragment.m(target, anchor);
|
|
if (!customElement) {
|
|
// onMount happens before the initial afterUpdate
|
|
add_render_callback(() => {
|
|
const new_on_destroy = on_mount.map(run).filter(is_function);
|
|
if (on_destroy) {
|
|
on_destroy.push(...new_on_destroy);
|
|
}
|
|
else {
|
|
// Edge case - component was destroyed immediately,
|
|
// most likely as a result of a binding initialising
|
|
run_all(new_on_destroy);
|
|
}
|
|
component.$$.on_mount = [];
|
|
});
|
|
}
|
|
after_update.forEach(add_render_callback);
|
|
}
|
|
function destroy_component(component, detaching) {
|
|
const $$ = component.$$;
|
|
if ($$.fragment !== null) {
|
|
run_all($$.on_destroy);
|
|
$$.fragment && $$.fragment.d(detaching);
|
|
// TODO null out other refs, including component.$$ (but need to
|
|
// preserve final state?)
|
|
$$.on_destroy = $$.fragment = null;
|
|
$$.ctx = [];
|
|
}
|
|
}
|
|
function make_dirty(component, i) {
|
|
if (component.$$.dirty[0] === -1) {
|
|
dirty_components.push(component);
|
|
schedule_update();
|
|
component.$$.dirty.fill(0);
|
|
}
|
|
component.$$.dirty[(i / 31) | 0] |= (1 << (i % 31));
|
|
}
|
|
function init(component, options, instance, create_fragment, not_equal, props, append_styles, dirty = [-1]) {
|
|
const parent_component = current_component;
|
|
set_current_component(component);
|
|
const $$ = component.$$ = {
|
|
fragment: null,
|
|
ctx: null,
|
|
// state
|
|
props,
|
|
update: noop,
|
|
not_equal,
|
|
bound: blank_object(),
|
|
// lifecycle
|
|
on_mount: [],
|
|
on_destroy: [],
|
|
on_disconnect: [],
|
|
before_update: [],
|
|
after_update: [],
|
|
context: new Map(options.context || (parent_component ? parent_component.$$.context : [])),
|
|
// everything else
|
|
callbacks: blank_object(),
|
|
dirty,
|
|
skip_bound: false,
|
|
root: options.target || parent_component.$$.root
|
|
};
|
|
append_styles && append_styles($$.root);
|
|
let ready = false;
|
|
$$.ctx = instance
|
|
? instance(component, options.props || {}, (i, ret, ...rest) => {
|
|
const value = rest.length ? rest[0] : ret;
|
|
if ($$.ctx && not_equal($$.ctx[i], $$.ctx[i] = value)) {
|
|
if (!$$.skip_bound && $$.bound[i])
|
|
$$.bound[i](value);
|
|
if (ready)
|
|
make_dirty(component, i);
|
|
}
|
|
return ret;
|
|
})
|
|
: [];
|
|
$$.update();
|
|
ready = true;
|
|
run_all($$.before_update);
|
|
// `false` as a special case of no DOM component
|
|
$$.fragment = create_fragment ? create_fragment($$.ctx) : false;
|
|
if (options.target) {
|
|
if (options.hydrate) {
|
|
const nodes = children(options.target);
|
|
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
|
$$.fragment && $$.fragment.l(nodes);
|
|
nodes.forEach(detach);
|
|
}
|
|
else {
|
|
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
|
$$.fragment && $$.fragment.c();
|
|
}
|
|
if (options.intro)
|
|
transition_in(component.$$.fragment);
|
|
mount_component(component, options.target, options.anchor, options.customElement);
|
|
flush();
|
|
}
|
|
set_current_component(parent_component);
|
|
}
|
|
/**
|
|
* Base class for Svelte components. Used when dev=false.
|
|
*/
|
|
class SvelteComponent {
|
|
$destroy() {
|
|
destroy_component(this, 1);
|
|
this.$destroy = noop;
|
|
}
|
|
$on(type, callback) {
|
|
const callbacks = (this.$$.callbacks[type] || (this.$$.callbacks[type] = []));
|
|
callbacks.push(callback);
|
|
return () => {
|
|
const index = callbacks.indexOf(callback);
|
|
if (index !== -1)
|
|
callbacks.splice(index, 1);
|
|
};
|
|
}
|
|
$set($$props) {
|
|
if (this.$$set && !is_empty($$props)) {
|
|
this.$$.skip_bound = true;
|
|
this.$$set($$props);
|
|
this.$$.skip_bound = false;
|
|
}
|
|
}
|
|
}
|
|
|
|
export { Fuse, HtmlTag, SvelteComponent, __classPrivateFieldGet, __classPrivateFieldSet, afterUpdate, append, attr, binding_callbacks, createEventDispatcher, destroy_block, destroy_each, detach, element, empty, init, insert, listen, noop, run_all, safe_not_equal, set_data, space, src_url_equal, stop_propagation, text, toggle_class, update_keyed_each };
|
|
//# sourceMappingURL=vendor.js.map
|