Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update RequestCache #112

Open
wants to merge 13 commits into
base: master
Choose a base branch
from
3 changes: 2 additions & 1 deletion lib/src/services/base_api_call.dart
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import 'dart:async';

import 'package:http/http.dart' as http;

import 'typedefs.dart';
import 'node_pool.dart';
import '../configuration.dart';
import '../models/node.dart';
Expand Down Expand Up @@ -51,7 +52,7 @@ abstract class BaseApiCall<R extends Object> {
///
/// Also sets the health status of nodes after each request so it can be put
/// in/out of [NodePool]'s circulation.
Future<R> send(Future<http.Response> Function(Node) request) async {
Future<R> send(Request request) async {
http.Response response;
Node node;
for (var triesLeft = config.numRetries;;) {
Expand Down
47 changes: 18 additions & 29 deletions lib/src/services/request_cache.dart
Original file line number Diff line number Diff line change
@@ -1,44 +1,33 @@
import 'dart:collection';
import 'package:dcache/dcache.dart';
import 'package:http/http.dart';

import 'package:http/http.dart' as http;

import '../models/node.dart';
import 'typedefs.dart' as defs;

/// Cache store which uses a [HashMap] internally to serve requests.
class RequestCache {
final _cachedResponses = HashMap<int, _Cache>();
Cache _cachedResponses;
final Duration timeToUse;
final int size;
final defs.Send<Map<String, dynamic>> send;

RequestCache(this.size, this.timeToUse, this.send) {
_cachedResponses = LruCache<dynamic, Response>(storage: InMemoryStorage(size));
harisarang marked this conversation as resolved.
Show resolved Hide resolved
}

// TODO(harisarang): rename this function to getResponse
/// Caches the response of the [request], identified by [key]. The cached
/// response is valid till [cacheTTL].
Future<Map<String, dynamic>> cache(
Future<Map<String, dynamic>> getResponse(
int key,
harisarang marked this conversation as resolved.
Show resolved Hide resolved
Future<Map<String, dynamic>> Function(Future<http.Response> Function(Node))
send,
Future<http.Response> Function(Node) request,
Duration cacheTTL,
defs.Request request,
) async {
if (_cachedResponses.containsKey(key)) {
if (_isCacheValid(_cachedResponses[key], cacheTTL)) {
harisarang marked this conversation as resolved.
Show resolved Hide resolved
// Cache entry is still valid, return it
return Future.value(_cachedResponses[key].data);
} else {
// Cache entry has expired, so delete it explicitly
_cachedResponses.remove(key);
}
return send(_cachedResponses.get(key));
harisarang marked this conversation as resolved.
Show resolved Hide resolved
}

final response = await send(request);
_cachedResponses[key] = _Cache(response, DateTime.now());
var response = await send(request);
_cachedResponses.set(key, response);
return response;
}

bool _isCacheValid(_Cache cache, Duration cacheTTL) =>
DateTime.now().difference(cache.creationTime) < cacheTTL;
}

class _Cache {
final DateTime creationTime;
final Map<String, dynamic> data;

const _Cache(this.data, this.creationTime);
}
6 changes: 6 additions & 0 deletions lib/src/services/typedefs.dart
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
import 'package:http/http.dart' as http;

import '../models/node.dart';

typedef Request = Future<http.Response> Function(Node);
typedef Send<R> = Future<R> Function(Request);
1 change: 1 addition & 0 deletions pubspec.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ dependencies:
http: ^0.13.3
crypto: ^3.0.1
equatable: ^2.0.2
dcache: ^0.4.0

dev_dependencies:
test: ^1.17.7
Expand Down
31 changes: 30 additions & 1 deletion test/configuration_test.dart
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ void main() {
retryInterval: Duration(seconds: 3),
sendApiKeyAsQueryParam: true,
cachedSearchResultsTTL: Duration(seconds: 30),
cacheCapacity: 101,
);

group('Configuration', () {
Expand Down Expand Up @@ -61,9 +62,12 @@ void main() {
test('has a sendApiKeyAsQueryParam field', () {
expect(config.sendApiKeyAsQueryParam, isTrue);
});
test('has a cacheSearchResults field', () {
test('has a cacheSearchResultsTTL field', () {
expect(config.cachedSearchResultsTTL, equals(Duration(seconds: 30)));
});
test('has a cacheCapacity field', () {
expect(config.cacheCapacity, equals(101));
});
});

group('Configuration initialization', () {
Expand Down Expand Up @@ -180,6 +184,31 @@ void main() {
);
expect(config.retryInterval, equals(Duration(milliseconds: 100)));
});
test('with missing cacheCapacity, sets cacheCapacity to 100', () {
final config = Configuration(
apiKey: 'abc123',
connectionTimeout: Duration(seconds: 10),
healthcheckInterval: Duration(seconds: 5),
nearestNode: Node(
protocol: 'http',
host: 'localhost',
path: '/path/to/service',
),
nodes: {
Node(
protocol: 'https',
host: 'localhost',
path: '/path/to/service',
),
},
numRetries: 5,
retryInterval: Duration(seconds: 3),
sendApiKeyAsQueryParam: true,
cachedSearchResultsTTL: Duration(seconds: 30),
);

expect(config.cacheCapacity, equals(100));
});
test(
'with missing sendApiKeyAsQueryParam, sets sendApiKeyAsQueryParam to false',
() {
Expand Down
164 changes: 116 additions & 48 deletions test/services/request_cache_test.dart
Original file line number Diff line number Diff line change
Expand Up @@ -6,91 +6,159 @@ import 'package:http/http.dart' as http;

import 'package:typesense/src/services/request_cache.dart';
import 'package:typesense/src/models/node.dart';
import 'package:typesense/src/services/typedefs.dart';

import '../test_utils.dart';

class MockResponse extends Mock implements http.Response {}

Future<Map<String, dynamic>> send(request) async {
final response = await request(
Node(
protocol: protocol,
host: host,
path: pathToService,
),
);
return json.decode(response.body);
}

void main() {
RequestCache requestCache;
MockResponse mockResponse;
int requestNumber;
Request request;

setUp(() {
requestCache = RequestCache(5, Duration(seconds: 1), send);

mockResponse = MockResponse();
requestNumber = 1;
when(mockResponse.body).thenAnswer((invocation) {
switch (requestNumber++) {
case 1:
return json.encode({'value': 'initial'});

case 2:
return json.encode({'value': 'updated'});

default:
return json.encode({});
}
});
request = (node) => Future.value(mockResponse);
});
group('RequestCache', () {
RequestCache requestCache;
MockResponse mockResponse;
int requestNumber;
Future<Map<String, dynamic>> Function(Future<http.Response> Function(Node))
send;
Future<http.Response> Function(Node) request;
final cacheTTL = Duration(seconds: 1);
setUp(() {
requestCache = RequestCache();
mockResponse = MockResponse();
requestNumber = 1;

when(mockResponse.body).thenAnswer((invocation) {
switch (requestNumber++) {
case 1:
return json.encode({'value': 'initial'});

case 2:
return json.encode({'value': 'updated'});

default:
return json.encode({});
}
});

send = (request) async {
final response = await request(
Node(protocol: protocol, host: host, path: pathToService));
return json.decode(response.body);
};
request = (node) => Future.value(mockResponse);
test('has a size field', () {
expect(requestCache.size, equals(5));
});
test('has a timeToUse field', () {
expect(requestCache.timeToUse, equals(Duration(seconds: 1)));
});
test('has a getResponse method', () async {
expect(
await requestCache.getResponse(
'/value'.hashCode,
request,
),
equals({'value': 'initial'}));
});
test('has a send method', () async {
expect(await requestCache.send(request), equals({'value': 'initial'}));
});
});

test('caches the response', () async {
group('RequestCache.getResponse', () {
test('returns cached response', () async {
expect(
await requestCache.cache(
await requestCache.getResponse(
'/value'.hashCode,
send,
request,
cacheTTL,
),
equals({'value': 'initial'}));
expect(
await requestCache.cache(
await requestCache.getResponse(
'/value'.hashCode,
send,
request,
cacheTTL,
),
equals({'value': 'initial'}));
});
test('refreshes the cache after TTL duration', () async {
test('refreshes the cache after timeToUse duration', () async {
expect(
await requestCache.cache(
await requestCache.getResponse(
'/value'.hashCode,
send,
request,
cacheTTL,
),
equals({'value': 'initial'}));
expect(
await requestCache.cache(
await requestCache.getResponse(
'/value'.hashCode,
send,
request,
cacheTTL,
),
equals({'value': 'initial'}));

await Future.delayed(Duration(seconds: 1, milliseconds: 100));
expect(
await requestCache.cache(
await requestCache.getResponse(
'/value'.hashCode,
send,
request,
cacheTTL,
),
equals({'value': 'updated'}));
});
test('evicts the least recently used response', () async {
requestCache = RequestCache(5, Duration(seconds: 10), send);

final mockResponses = List.generate(6, (_) => MockResponse()),
callCounters = List.filled(6, 0);
var i = 0;

for (final mockResponse in mockResponses) {
when(mockResponse.body).thenAnswer((invocation) {
return json.encode({'$i': '${++callCounters[i]}'});
});
}

// Cache size is 5, filling up the cache with different responses.
for (; i < 5; i++) {
expect(
await requestCache.getResponse(
i,
(node) => Future.value(mockResponses[i]),
),
equals({'$i': '1'}));
}

// The responses should still be 1 since they're cached.
i = 0;
for (; i < 5; i++) {
expect(
await requestCache.getResponse(
i,
(node) => Future.value(mockResponses[i]),
),
equals({'$i': '1'}));
}

// Least recently used response at this moment should be index 0 and hence
// should be evicted by the following call.
expect(
await requestCache.getResponse(
5,
(node) => Future.value(mockResponses[5]),
),
equals({'5': '1'}));

// The responses should now be 2 since each response gets evicted before
// being called again.
i = 0;
for (; i < 5; i++) {
expect(
await requestCache.getResponse(
i,
(node) => Future.value(mockResponses[i]),
),
equals({'$i': '2'}));
}
});
});
}