Skip to content

Add ability to clear cache and individual keys and new policy that allows cache size management in MB #9

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 4 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
996 changes: 804 additions & 192 deletions example/ios/Podfile.lock

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion example/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
},
"dependencies": {
"react": "18.2.0",
"react-native": "0.72.6",
"react-native": "0.72.17",
"react-native-blob-util": "^0.19.2",
"react-native-url-polyfill": "^2.0.0",
"react-native-video": "^5.2.1"
Expand Down
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@
"pod-install": "^0.1.0",
"prettier": "^2.0.5",
"react": "18.2.0",
"react-native": "0.72.6",
"react-native": "0.72.17",
"react-native-blob-util": "^0.19.2",
"react-native-builder-bob": "^0.23.1",
"react-native-url-polyfill": "^2.0.0",
Expand Down
2 changes: 1 addition & 1 deletion react-native-cache-video.podspec
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ Pod::Spec.new do |s|
s.license = package["license"]
s.authors = package["author"]

s.platforms = { :ios => "11.0" }
s.platforms = { :ios => "12.4" }
s.source = { :git => "https://github.com/nguyenvanphituoc/react-native-cache-video.git", :tag => "#{s.version}" }

s.source_files = "ios/**/*.{h,m,mm}"
Expand Down
15 changes: 15 additions & 0 deletions src/Hooks/useCache.ts
Original file line number Diff line number Diff line change
Expand Up @@ -59,8 +59,23 @@ export const useAsyncCache = () => {
[cacheManager, delayUpdateVideo]
);

const removeVideoFromCache = useCallback(
async (url: string) => {
if (cacheManager) {
await cacheManager.removeCachedVideo(url);
// Clear our local state if this was the current video
if (url === currentVideoUrl.current) {
currentVideoUrl.current = undefined;
setVideoUrl(undefined);
}
}
},
[cacheManager]
);

return {
setVideoPlayUrlBy,
cachedVideoUrl,
removeVideoFromCache,
};
};
8 changes: 8 additions & 0 deletions src/Libs/fileSystem.ts
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,14 @@ export class FileSystemManager {
return {} as Awaited<ReturnType<typeof FSManager.stat>>;
}

async getStatisticList(directory?: string) {
if (directory) {
const lstat = await FSManager.lstat(directory);
return lstat;
}
return [] as Awaited<ReturnType<typeof FSManager.lstat>>;
}

async existsFile(forFile: string): Promise<boolean> {
// let key = cacheKey(forKey, folder);
// check exist and ignore timestamp path
Expand Down
9 changes: 5 additions & 4 deletions src/Provider/MemoryCacheFreePolicy.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,11 @@ import type {
* Free policy is a policy that doesn't care about anything, just cache it
*/
export class FreePolicy implements MemoryCachePolicyInterface {
constructor() {
this.onAccess.bind(this);
this.onEvict.bind(this);
}
constructor() {}

clear() {}

removeEntry(_key: string) {}

onAccess(_cache: Map<string, any>, _key: string) {}

Expand Down
35 changes: 30 additions & 5 deletions src/Provider/MemoryCacheLFUPolicy.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ import {
*
- LRU (Least Recently Used): The least recently used item is evicted. This policy is often used to keep recently accessed items in the cache.
- LFU (Least Frequently Used): The least frequently used item is evicted. This policy is based on the number of accesses to each item.
- LFUSize (Least Frequently Used by Size): The least frequently used item is evicted. This bases the eviction check on cache directory size in MB.
- FIFO (First-In-First-Out): The first item added to the cache is the first one to be evicted. This is a straightforward and easy-to-implement policy.
- Random Replacement: A random item is selected for eviction. This policy does not consider access patterns and can lead to uneven cache performance.
- MRU (Most Recently Used): The most recently used item is evicted. In contrast to LRU, MRU keeps the most recent item in the cache.
Expand All @@ -28,9 +29,14 @@ export class LFUPolicy implements MemoryCachePolicyInterface {
[key in string]: number;
};
this.capacity = capacity;
}

clear(): void {
this.referenceBit = {};
}

this.onAccess.bind(this);
this.onEvict.bind(this);
removeEntry(key: string): void {
delete this.referenceBit[key];
}

onAccess(cache: Map<string, any>, key: string) {
Expand Down Expand Up @@ -59,10 +65,19 @@ export class LFUPolicy implements MemoryCachePolicyInterface {

// Evict the least recently used item (at the end)
for (const key in this.referenceBit) {
if (!cache.has(key)) {
// Only consider keys that actually exist in the cache
delete this.referenceBit[key]; // Clean up stale reference
continue;
}

const freq = this.referenceBit[key];
if (freq && freq < minFreq && freq !== SECOND_CHANCE_TO_COUNT) {
minFreq = freq;
lfuKey = key;
if (freq && freq < minFreq) {
// Consider SECOND_CHANCE_TO_COUNT items if nothing else found
if (freq !== SECOND_CHANCE_TO_COUNT || lfuKey === null) {
minFreq = freq;
lfuKey = key;
}
}
}

Expand All @@ -71,6 +86,16 @@ export class LFUPolicy implements MemoryCachePolicyInterface {
cache.delete(lfuKey);
delete this.referenceBit[lfuKey];
delegate && delegate.didEvictHandler(lfuKey, value);
} else if (cache.size >= this.capacity) {
// If we couldn't find anything to evict but still need space,
// evict the first item (oldest by insertion order)
const firstKey = cache.keys().next().value;
if (firstKey) {
const value = cache.get(firstKey);
cache.delete(firstKey);
delete this.referenceBit[firstKey];
delegate && delegate.didEvictHandler(firstKey, value);
}
}
}
//
Expand Down
197 changes: 197 additions & 0 deletions src/Provider/MemoryCacheLFUSizePolicy.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,197 @@
import type {
MemoryCacheDelegate,
MemoryCachePolicyInterface,
} from '../types/type';

import { SECOND_CHANCE_TO_COUNT } from '../Utils/constants';
import {
isNull,
mergeLargerNumber,
mergeWithCustomCondition,
} from '../Utils/util';

import { FileBucket, FileSystemManager } from '../Libs/fileSystem';

/**
*
- LFUSize (Least Recently Used by Size): The least recently used item is evicted. This bases the eviction check on cache directory size in MB.
*/
// LFUSize (Least Frequently Used by Size) replacement policy
export class LFUSizePolicy implements MemoryCachePolicyInterface {
private isEvicting = false;
private referenceBit: { [key in string]: number };
private capacityBytes: number;
private storage: FileSystemManager;

constructor(capacityMB: number) {
this.referenceBit = {} as {
[key in string]: number;
};
this.capacityBytes = capacityMB * 1024 * 1024; // Convert MB to bytes
this.storage = new FileSystemManager();
}

clear(): void {
this.referenceBit = {};
}

removeEntry(key: string): void {
delete this.referenceBit[key];
}

onAccess(cache: Map<string, any>, key: string) {
// Update access frequency for the item
const value = cache.get(key);
if (value) {
// mixed with LRU
cache.delete(key);
cache.set(key, value);
}

// access to url, count it if need or give it a chance to be counted
this.referenceBit[key] = isNull(this.referenceBit[key])
? SECOND_CHANCE_TO_COUNT
: this.referenceBit[key]! + 1;
}

async onEvict(
cache: Map<string, any>,
delegate?: MemoryCacheDelegate<any>,
triggerKey?: string
) {
if (this.isEvicting) {
return; // Another eviction is in progress
}
try {
this.isEvicting = true;

// Get current directory size
const files = await this.storage.getStatisticList(
this.storage.getBucketFolder(FileBucket.cache)
);

let totalSize = files.reduce(
(sum, file) => sum + parseInt(file.size as unknown as string, 10),
0
);

// console.log('::::::::::::::::: REFERENCE_BIT', this.referenceBit);
// console.log('::::::::::::::::: CACHE', Object.fromEntries(cache));

// Keep evicting least frequently used items until we're under capacity
let count = 0;
while (totalSize > this.capacityBytes) {
count++;

// Don't evict if it's among last files, could be single giant file
// Don't try more than 10 files at a time per eviction check.
if (files.length <= 2 || count > 10) {
break;
}

const evictedKey = this.findLFUKey(files, cache, triggerKey);
// console.log('::::::::::::: COUNT', count, ':::');
// console.log('::::::::::::: EVICTKEY', count, evictedKey, ':::');
// console.log('::::::::::::: FILES', count, files.length, ':::');

if (!evictedKey) {
// Nothing left to evict or only the trigger file remains
break;
}

const cachedPath = cache.get(evictedKey);
// console.log('::::::::::::: CACHEPATH', count, cachedPath, ':::');
if (!cachedPath) {
delete this.referenceBit[evictedKey]; // Clean up stale reference
continue;
}

// Find the file size we're about to evict
const fileToEvict = files.find((f) => cachedPath.includes(f.filename));
if (!fileToEvict) {
// File doesn't exist on disk, clean up stale reference
cache.delete(evictedKey);
delete this.referenceBit[evictedKey];
continue;
}

// Evict the file
cache.delete(evictedKey);
delete this.referenceBit[evictedKey];
await delegate?.didEvictHandler(evictedKey, cachedPath);

// Update our running total
totalSize -= fileToEvict.size;
// file must exist or -1 will remove last item
files.splice(files.indexOf(fileToEvict), 1);

// console.log('::::::::::::: NewSize:', count, '||', totalSize, ':::');
}
} finally {
this.isEvicting = false;
}
}

private findLFUKey(
files: Array<any>,
cache: Map<string, any>,
excludeKey?: string
): string | null {
let minFreq = Number.MAX_VALUE;
let lfuKey: string | null = null;

for (const key in this.referenceBit) {
// Skip the file that triggered eviction
if (key === excludeKey) continue;

const freq = this.referenceBit[key];
if (freq && freq < minFreq) {
if (freq !== SECOND_CHANCE_TO_COUNT || lfuKey === null) {
minFreq = freq;
lfuKey = key;
}
}
}

// If all items have equal frequency, use the oldest file
if (!lfuKey && Object.keys(this.referenceBit).length > 0) {
const eligibleFiles = files.filter((file) => {
if (excludeKey) {
const excludePath = cache.get(excludeKey);
return !excludePath?.includes(file.filename);
}
return true;
});

// Find the oldest file
const oldestFile = eligibleFiles.reduce((oldest, current) => {
return oldest.lastModified < current.lastModified ? oldest : current;
});

// Find the referenceBit key that corresponds to this file
// Find which cache entry has this filename
lfuKey =
Array.from(cache.entries()).find(([_, path]) =>
path.includes(oldestFile.filename)
)?.[0] ||
cache.keys().next().value || // fallback to first (oldest) key
null;
}

return lfuKey;
}

//
get dataSource(): { [key in string]: number } {
return this.referenceBit;
}

set dataSource(data: { [key in string]: number }) {
const newDataSource = mergeWithCustomCondition(
this.referenceBit,
data,
mergeLargerNumber
);
this.referenceBit = newDataSource;
}
}
15 changes: 14 additions & 1 deletion src/Provider/MemoryCacheProvider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,9 @@ export class MemoryCacheProvider<V> implements MemoryCacheInterface<V> {
get(key: string): V | undefined {
// Update access time or frequency based on the policy
this.cachePolicy.onAccess(this.cache, key);

this.cachePolicy.onEvict(this.cache, this.delegate, key);

return this.cache.get(key);
}
put(key: string, value: V): void {
Expand All @@ -43,7 +46,7 @@ export class MemoryCacheProvider<V> implements MemoryCacheInterface<V> {
// set for new key only, give it a chance to be counted
this.cachePolicy.dataSource[key] = SECOND_CHANCE_TO_COUNT;
// If the cache is full, apply the replacement policy to evict an item
this.cachePolicy.onEvict(this.cache, this.delegate);
this.cachePolicy.onEvict(this.cache, this.delegate, key);
}

this.cache.set(key, value);
Expand All @@ -56,6 +59,8 @@ export class MemoryCacheProvider<V> implements MemoryCacheInterface<V> {
} else {
// remove
this.cache.delete(key);
// Also clean up policy tracking when removing
this.cachePolicy.removeEntry(key);
}
}
//
Expand Down Expand Up @@ -95,4 +100,12 @@ export class MemoryCacheProvider<V> implements MemoryCacheInterface<V> {
}
}
//

clear(): void {
// Clear the actual cache
this.cache = new Map<string, V>();

// Clear the policy tracking
this.cachePolicy.clear();
}
}
4 changes: 0 additions & 4 deletions src/Provider/PreCacheProvider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,10 +27,6 @@ export class PreCacheProvider implements PreCacheInterface {
this.sessionTask = sessionTask;
this.cacheFolder = cacheFolder;
//
this.preCacheForList.bind(this);
this.prepareSourceMedia = this.prepareSourceMedia.bind(this);
this.cancelCachingList = this.cancelCachingList.bind(this);
//
}

// MARK: - Pre-cache
Expand Down
Loading