async-cache

2.0.0 · active · verified Thu Apr 16

async-cache is an asyncio application layer cache and dataloader for Python-based microservices and applications. It provides features like thundering herd protection, cache warmup, invalidation, and metrics. The current version is 2.0.0, and releases appear to be infrequent, driven by new feature additions.

Common errors

Warnings

Install

Imports

Quickstart

This quickstart demonstrates basic asynchronous caching with `AsyncCache` and its `InMemoryCacheBackend`, showcasing how to decorate an async function to cache its results. It also includes an example of `DataLoader` from version 2.0.0, illustrating how to batch requests for multiple items into a single backend call, effectively preventing N+1 problems in async applications.

import asyncio
import time
from async_cache import AsyncCache, InMemoryCacheBackend, DataLoader

# --- Basic Caching Example ---
async def run_basic_cache_example():
    print("--- Basic Caching Example ---")
    # Initialize an in-memory cache backend
    cache_backend = InMemoryCacheBackend()
    # Set a default TTL of 60 seconds for cache entries
    cache = AsyncCache(cache_backend=cache_backend, default_ttl=60)

    @cache.cache(key="my_expensive_function:{arg1}")
    async def expensive_function(arg1: int, arg2: str) -> str:
        print(f"Executing expensive_function with {arg1}, {arg2}...")
        await asyncio.sleep(1)  # Simulate network call or heavy computation
        return f"Result for {arg1}, {arg2} at {time.time()}"

    print("First call (should execute function):")
    result1 = await expensive_function(1, "hello")
    print(f"Result 1: {result1}")

    print("\nSecond call (should be cached, no function execution):")
    result2 = await expensive_function(1, "hello")
    print(f"Result 2: {result2}")

    print("\nThird call (different args, not cached, executes function):")
    result3 = await expensive_function(2, "world")
    print(f"Result 3: {result3}")

# --- DataLoader Example (v2 feature) ---
async def run_dataloader_example():
    print("\n--- Dataloader Example ---")

    # A batch function that fetches multiple items efficiently
    async def fetch_users_batch(user_ids: list[int]) -> list[str]:
        print(f"Fetching users for IDs: {user_ids}")
        await asyncio.sleep(0.5) # Simulate batch API call
        return [f"User_{uid}_data" for uid in user_ids]

    # Initialize a dataloader with the batch function
    # The dataloader will collect individual load calls and batch them
    user_loader = DataLoader(batch_function=fetch_users_batch)

    async def get_user_data(user_id: int) -> str:
        return await user_loader.load(user_id)

    print("Calling get_user_data for multiple IDs (some duplicated):")
    # The dataloader will ensure fetch_users_batch is called only once for [1, 2, 3]
    results = await asyncio.gather(
        get_user_data(1),
        get_user_data(2),
        get_user_data(1), # This will be deduplicated by the dataloader
        get_user_data(3)
    )
    print(f"Dataloader results: {results}")

async def main():
    await run_basic_cache_example()
    await run_dataloader_example()

if __name__ == "__main__":
    asyncio.run(main())

view raw JSON →