diff --git a/lib/src/services/api_call.dart b/lib/src/services/api_call.dart index 057d32f..7c4e56d 100644 --- a/lib/src/services/api_call.dart +++ b/lib/src/services/api_call.dart @@ -30,17 +30,16 @@ class ApiCall extends BaseApiCall> { bool shouldCacheResult = false, }) => shouldCacheResult && config.cachedSearchResultsTTL != Duration.zero - ? _requestCache.cache( + ? _requestCache.getResponse( // SplayTreeMap ensures order of the parameters is maintained so // cache key won't differ because of different ordering of // parameters. - '$endpoint${SplayTreeMap.from(queryParams)}'.hashCode, - send, + '$endpoint${SplayTreeMap.from(queryParams)}', (node) => node.client.get( requestUri(node, endpoint, queryParams), headers: defaultHeaders, ), - config.cachedSearchResultsTTL, + send, ) : send((node) => node.client.get( requestUri(node, endpoint, queryParams), @@ -80,19 +79,17 @@ class ApiCall extends BaseApiCall> { bool shouldCacheResult = false, }) => shouldCacheResult && config.cachedSearchResultsTTL != Duration.zero - ? _requestCache.cache( + ? _requestCache.getResponse( // SplayTreeMap ensures order of the parameters is maintained so // cache key won't differ because of different ordering of // parameters. - '$endpoint${SplayTreeMap.from(queryParams)}${SplayTreeMap.from(additionalHeaders)}${json.encode(bodyParameters)}' - .hashCode, - send, + '$endpoint${SplayTreeMap.from(queryParams)}${SplayTreeMap.from(additionalHeaders)}${json.encode(bodyParameters)}', (node) => node.client.post( requestUri(node, endpoint, queryParams), headers: {...defaultHeaders, ...additionalHeaders}, body: json.encode(bodyParameters), ), - config.cachedSearchResultsTTL, + send, ) : send((node) => node.client.post( requestUri(node, endpoint, queryParams), diff --git a/lib/src/services/base_api_call.dart b/lib/src/services/base_api_call.dart index f44af64..6944df2 100644 --- a/lib/src/services/base_api_call.dart +++ b/lib/src/services/base_api_call.dart @@ -2,6 +2,7 @@ import 'dart:async'; import 'package:http/http.dart' as http; +import 'typedefs.dart'; import 'node_pool.dart'; import '../configuration.dart'; import '../models/node.dart'; @@ -51,7 +52,7 @@ abstract class BaseApiCall { /// /// Also sets the health status of nodes after each request so it can be put /// in/out of [NodePool]'s circulation. - Future send(Future Function(Node) request) async { + Future send(Request request) async { http.Response response; Node node; for (var triesLeft = config.numRetries;;) { diff --git a/lib/src/services/request_cache.dart b/lib/src/services/request_cache.dart index 65f835f..e8e8b6f 100644 --- a/lib/src/services/request_cache.dart +++ b/lib/src/services/request_cache.dart @@ -1,44 +1,36 @@ import 'dart:collection'; +import 'package:dcache/dcache.dart'; -import 'package:http/http.dart' as http; - -import '../models/node.dart'; +import 'typedefs.dart'; /// Cache store which uses a [HashMap] internally to serve requests. class RequestCache { - final _cachedResponses = HashMap(); + Cache> _cachedResponses; + final _cachedTimestamp = HashMap(); + final Duration timeToUse; + final int size; + + RequestCache(this.size, this.timeToUse) { + _cachedResponses = LruCache>(storage: InMemoryStorage(size)); + } /// Caches the response of the [request], identified by [key]. The cached /// response is valid till [cacheTTL]. - Future> cache( - int key, - Future> Function(Future Function(Node)) - send, - Future Function(Node) request, - Duration cacheTTL, + Future> getResponse( + String key, + Request request, + Send> send ) async { - if (_cachedResponses.containsKey(key)) { - if (_isCacheValid(_cachedResponses[key], cacheTTL)) { - // Cache entry is still valid, return it - return Future.value(_cachedResponses[key].data); - } else { - // Cache entry has expired, so delete it explicitly - _cachedResponses.remove(key); - } + if (_cachedResponses.containsKey(key) && _isCacheValid(key)) { + return Future>.value(_cachedResponses.get(key)); } - final response = await send(request); - _cachedResponses[key] = _Cache(response, DateTime.now()); + var response = await send(request); + _cachedResponses.set(key, response); + _cachedTimestamp[key] = DateTime.now(); return response; } - bool _isCacheValid(_Cache cache, Duration cacheTTL) => - DateTime.now().difference(cache.creationTime) < cacheTTL; -} - -class _Cache { - final DateTime creationTime; - final Map data; - - const _Cache(this.data, this.creationTime); + bool _isCacheValid(String key) => + DateTime.now().difference(_cachedTimestamp[key]) < timeToUse; } diff --git a/lib/src/services/typedefs.dart b/lib/src/services/typedefs.dart new file mode 100644 index 0000000..50b9095 --- /dev/null +++ b/lib/src/services/typedefs.dart @@ -0,0 +1,6 @@ +import 'package:http/http.dart' as http; + +import '../models/node.dart'; + +typedef Request = Future Function(Node); +typedef Send = Future Function(Request); diff --git a/pubspec.yaml b/pubspec.yaml index 5aafc7a..4585efc 100644 --- a/pubspec.yaml +++ b/pubspec.yaml @@ -11,6 +11,7 @@ dependencies: http: ^0.13.3 crypto: ^3.0.1 equatable: ^2.0.2 + dcache: ^0.4.0 dev_dependencies: test: ^1.17.7 diff --git a/test/configuration_test.dart b/test/configuration_test.dart index 9d04788..f3ce36a 100644 --- a/test/configuration_test.dart +++ b/test/configuration_test.dart @@ -27,6 +27,7 @@ void main() { retryInterval: Duration(seconds: 3), sendApiKeyAsQueryParam: true, cachedSearchResultsTTL: Duration(seconds: 30), + cacheCapacity: 101, ); group('Configuration', () { @@ -61,9 +62,12 @@ void main() { test('has a sendApiKeyAsQueryParam field', () { expect(config.sendApiKeyAsQueryParam, isTrue); }); - test('has a cacheSearchResults field', () { + test('has a cacheSearchResultsTTL field', () { expect(config.cachedSearchResultsTTL, equals(Duration(seconds: 30))); }); + test('has a cacheCapacity field', () { + expect(config.cacheCapacity, equals(101)); + }); }); group('Configuration initialization', () { @@ -180,6 +184,31 @@ void main() { ); expect(config.retryInterval, equals(Duration(milliseconds: 100))); }); + test('with missing cacheCapacity, sets cacheCapacity to 100', () { + final config = Configuration( + apiKey: 'abc123', + connectionTimeout: Duration(seconds: 10), + healthcheckInterval: Duration(seconds: 5), + nearestNode: Node( + protocol: 'http', + host: 'localhost', + path: '/path/to/service', + ), + nodes: { + Node( + protocol: 'https', + host: 'localhost', + path: '/path/to/service', + ), + }, + numRetries: 5, + retryInterval: Duration(seconds: 3), + sendApiKeyAsQueryParam: true, + cachedSearchResultsTTL: Duration(seconds: 30), + ); + + expect(config.cacheCapacity, equals(100)); + }); test( 'with missing sendApiKeyAsQueryParam, sets sendApiKeyAsQueryParam to false', () { diff --git a/test/services/request_cache_test.dart b/test/services/request_cache_test.dart index 8a50ea5..bbbaadb 100644 --- a/test/services/request_cache_test.dart +++ b/test/services/request_cache_test.dart @@ -6,91 +6,166 @@ import 'package:http/http.dart' as http; import 'package:typesense/src/services/request_cache.dart'; import 'package:typesense/src/models/node.dart'; +import 'package:typesense/src/services/typedefs.dart'; import '../test_utils.dart'; class MockResponse extends Mock implements http.Response {} +Future> send(request) async { + final response = await request( + Node( + protocol: protocol, + host: host, + path: pathToService, + ), + ); + return json.decode(response.body); +} + void main() { + RequestCache requestCache; + MockResponse mockResponse; + int requestNumber; + Request request; + + setUp(() { + requestCache = RequestCache(5, Duration(seconds: 1)); + + mockResponse = MockResponse(); + requestNumber = 1; + when(mockResponse.body).thenAnswer((invocation) { + switch (requestNumber++) { + case 1: + return json.encode({'value': 'initial'}); + + case 2: + return json.encode({'value': 'updated'}); + + default: + return json.encode({}); + } + }); + request = (node) => Future.value(mockResponse); + }); group('RequestCache', () { - RequestCache requestCache; - MockResponse mockResponse; - int requestNumber; - Future> Function(Future Function(Node)) - send; - Future Function(Node) request; - final cacheTTL = Duration(seconds: 1); - setUp(() { - requestCache = RequestCache(); - mockResponse = MockResponse(); - requestNumber = 1; - - when(mockResponse.body).thenAnswer((invocation) { - switch (requestNumber++) { - case 1: - return json.encode({'value': 'initial'}); - - case 2: - return json.encode({'value': 'updated'}); - - default: - return json.encode({}); - } - }); - - send = (request) async { - final response = await request( - Node(protocol: protocol, host: host, path: pathToService)); - return json.decode(response.body); - }; - request = (node) => Future.value(mockResponse); + test('has a size field', () { + expect(requestCache.size, equals(5)); + }); + test('has a timeToUse field', () { + expect(requestCache.timeToUse, equals(Duration(seconds: 1))); + }); + test('has a getResponse method', () async { + expect( + await requestCache.getResponse( + '/value', + request, + send + ), + equals({'value': 'initial'})); }); + }); - test('caches the response', () async { + group('RequestCache.getResponse', () { + test('returns cached response', () async { expect( - await requestCache.cache( - '/value'.hashCode, - send, + await requestCache.getResponse( + '/value', request, - cacheTTL, + send ), equals({'value': 'initial'})); expect( - await requestCache.cache( - '/value'.hashCode, - send, + await requestCache.getResponse( + '/value', request, - cacheTTL, + send ), equals({'value': 'initial'})); }); - test('refreshes the cache after TTL duration', () async { + test('refreshes the cache after timeToUse duration', () async { expect( - await requestCache.cache( - '/value'.hashCode, - send, + await requestCache.getResponse( + '/value', request, - cacheTTL, + send ), equals({'value': 'initial'})); expect( - await requestCache.cache( - '/value'.hashCode, - send, + await requestCache.getResponse( + '/value', request, - cacheTTL, + send ), equals({'value': 'initial'})); await Future.delayed(Duration(seconds: 1, milliseconds: 100)); expect( - await requestCache.cache( - '/value'.hashCode, - send, + await requestCache.getResponse( + '/value', request, - cacheTTL, + send ), equals({'value': 'updated'})); }); + test('evicts the least recently used response', () async { + requestCache = RequestCache(5, Duration(seconds: 10)); + + final mockResponses = List.generate(6, (_) => MockResponse()), + callCounters = List.filled(6, 0); + var i = 0; + + for (final mockResponse in mockResponses) { + when(mockResponse.body).thenAnswer((invocation) { + return json.encode({'$i': '${++callCounters[i]}'}); + }); + } + + // Cache size is 5, filling up the cache with different responses. + for (; i < 5; i++) { + expect( + await requestCache.getResponse( + i.toString(), + (node) => Future.value(mockResponses[i]), + send + ), + equals({'$i': '1'})); + } + + // The responses should still be 1 since they're cached. + i = 0; + for (; i < 5; i++) { + expect( + await requestCache.getResponse( + i.toString(), + (node) => Future.value(mockResponses[i]), + send + ), + equals({'$i': '1'})); + } + + // Least recently used response at this moment should be index 0 and hence + // should be evicted by the following call. + expect( + await requestCache.getResponse( + 5.toString(), + (node) => Future.value(mockResponses[5]), + send + ), + equals({'5': '1'})); + + // The responses should now be 2 since each response gets evicted before + // being called again. + i = 0; + for (; i < 5; i++) { + expect( + await requestCache.getResponse( + i.toString(), + (node) => Future.value(mockResponses[i]), + send + ), + equals({'$i': '2'})); + } + }); }); }