I tried this solution but I got this error :
Uncaught ReferenceError: normalized is not defined
Here is my code :
var charMap = {
"à": "a", "â": "a", "é": "e", "è": "e", "ê": "e", "ë": "e",
"ï": "i", "î": "i", "ô": "o", "ö": "o", "û": "u", "ù": "u"
};
var normalize = function(str) {
$.each(charMap, function(chars, normalized) {
var regex = new RegExp('[' + chars + ']', 'gi');
str = str.replace(regex, normalized);
});
return normalized;
}
var queryTokenizer = function(q) {
var normalized = normalize(q);
return Bloodhound.tokenizers.whitespace(normalized);
};
var spectacles = new Bloodhound({
datumTokenizer: Bloodhound.tokenizers.obj.whitespace('value'),
queryTokenizer: queryTokenizer,
prefetch:'spectacles.json',
limit:10,
});
spectacles.initialize();
$('#search').typeahead({
minLength: 1,
hint:false,
highlight: true
},
{
name: 'spectacles',
displayKey: 'value',
source: spectacles.ttAdapter()
}) ;
where is my error? Thanks
Change your normalize function so that it returns the normalized string i.e.
var normalize = function (input) {
$.each(charMap, function (unnormalizedChar, normalizedChar) {
var regex = new RegExp(unnormalizedChar, 'gi');
input = input.replace(regex, normalizedChar);
});
return input;
}
See this fiddle I wrote to see it working:
http://jsfiddle.net/Fresh/SL36H/
You can see the normalized string in the browser debug console. In my example "àààèèèùùù" is converted to "aaaeeeuuu".
Note that I've changed the function parameters so that they are more accurate (i.e. chars is incorrect, it should be char) and I've also rationalised the regular expression.