const forcedCache = httpRequest.headers.get(CachingHeaders.Cache) !== null
const forcedNoneCache = httpRequest.headers.get(CachingHeaders.NoCache) !== null
//console.log('forcedCache', forcedCache, 'forcedNoneCache', forcedNoneCache)
let headers = httpRequest.headers.delete(CachingHeaders.NoCache)
headers = headers.delete(CachingHeaders.Cache)
httpRequest = httpRequest.clone({
headers: headers
})
if (forcedCache && forcedNoneCache) {
throw new Error('You cannot use cache and non-cache header at once!')
} else if (forcedNoneCache || (this.httpCacheConfig.behavior === CachingHeaders.NoCache && !forcedCache)) {
return next.handle(httpRequest);
} else if (forcedCache || (this.httpCacheConfig.behavior === CachingHeaders.Cache && !forcedNoneCache)) {
// Checked if there is cached data for this URI
const key = this.httpToKey(httpRequest)
const lastResponse = this.getCache(key);
if (lastResponse) {
// In case of parallel requests to same URI,
// return the request already in progress
// otherwise return the last cached data
//console.info('http cache interceptor hit cache', key)
return (lastResponse instanceof Observable)
? lastResponse : of(lastResponse.clone());
}
//console.info('http cache interceptor', key)
// If the request of going through for first time
// then let the request proceed and cache the response
const requestHandle = next.handle(httpRequest).pipe(
tap((stateEvent: any) => {
if (stateEvent instanceof HttpResponse) {
this.setCache(
key,
stateEvent.clone()
);
}
})
)
// Meanwhile cache the request Observable to handle parallel request
//this.cachedData.set(key, requestHandle);
return requestHandle;
} else {
console.error(this.httpCacheConfig)
console.error(httpRequest.headers)
throw new Error('There is a configuration in your setup')