I made a query like this... and first time, it ran the filter... cool, and it worked...
But now there are more entries, and it seems to be running off of cache. How do I force it to stop using the cache?
var countries = new Bloodhound({
datumTokenizer: function (d) { return Bloodhound.tokenizers.whitespace(d.name); },
queryTokenizer: Bloodhound.tokenizers.whitespace,
limit: 10,
prefetch: {
url: Url + '/Country/JsonList',
filter: function (list) {
return $.map(list, function (country) { return { name: country.Name }; });
}
}
});
countries.initialize();
$('.countries.typeahead').typeahead(null, {
displayKey: 'name',
source: countries.ttAdapter()
});
I think this is better than the accepted answer:
var countries = new Bloodhound({
datumTokenizer: function (d) { return Bloodhound.tokenizers.whitespace(d.name); },
queryTokenizer: Bloodhound.tokenizers.whitespace,
limit: 10,
prefetch: {
url: Url + '/Country/JsonList',
filter: function (list) {
return $.map(list, function (country) { return { name: country.Name }; });
},
cache: false //NEW!
}
});
countries.initialize();
$('.countries.typeahead').typeahead(null, {
displayKey: 'name',
source: countries.ttAdapter() //NOTE: .ttAdapter() is deprecated and will be removed in V1
});