Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,10 @@ This project uses [*towncrier*](https://towncrier.readthedocs.io/) and the chang

## 0.2

### 0.2.2
- Valkey backend support
- Example for Valkey backend

### 0.2.1
- Fix picklecoder
- Fix connection failure transparency and add logging
Expand Down
14 changes: 13 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,15 @@ backends supporting Redis, Memcached, and Amazon DynamoDB.

## Features

- Supports `redis`, `memcache`, `dynamodb`, and `in-memory` backends.
- Supports `redis`, `valkey`, `memcache`, `dynamodb`, and `in-memory` backends.
- Easy integration with [FastAPI](https://fastapi.tiangolo.com/).
- Support for HTTP cache headers like `ETag` and `Cache-Control`, as well as conditional `If-Match-None` requests.

## Requirements

- FastAPI
- `redis` when using `RedisBackend`.
- `valkey` when using `ValkeyBackend`.
- `memcache` when using `MemcacheBackend`.
- `aiobotocore` when using `DynamoBackend`.

Expand All @@ -36,6 +37,12 @@ or

or

```shell
> pip install "fastapi-cache2[valkey]"
```

or

```shell
> pip install "fastapi-cache2[memcache]"
```
Expand Down Expand Up @@ -221,6 +228,11 @@ When using the Redis backend, please make sure you pass in a redis client that d

[redis-decode]: https://redis-py.readthedocs.io/en/latest/examples/connection_examples.html#by-default-Redis-return-binary-responses,-to-decode-them-use-decode_responses=True

### ValkeyBackend

When using the Valkey backend, please make sure you pass in a Valkey client that does not decode responses (`decode_responses` **must** be `False`). Cached data is stored as `bytes` (binary), decoding these in the Valkey client would break caching.


## Tests and coverage

```shell
Expand Down
47 changes: 47 additions & 0 deletions examples/valkey/Readme.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
# Valkey Backend Example

This example demonstrates using FastAPI-Cache with Valkey as the backend.

## Prerequisites

1. Install Valkey:
```bash
# Using Docker
docker run -d -p 6379:6379 valkey/valkey:latest

# Or install locally
# See: https://valkey.io/download/
```

2. Install dependencies:
```bash
poetry install
# or
pip install fastapi-cache2[valkey]
```

## Running the Example

```bash
cd examples/valkey
fastapi dev main.py
```

## Endpoints

- `GET /` - Cached endpoint (10s TTL)
- `GET /clear` - Clear cache
- `GET /date` - Get cached date
- `GET /datetime` - Get cached datetime
- `GET /blocking` - Sync cached endpoint
- `GET /html` - Cached HTML response
- `GET /cache_response_obj` - Cached JSON response

## Configuration

The example uses these Valkey settings:
- Host: localhost
- Port: 6379
- DB: 0
- decode_responses: False (required for pickle coder)
```
Empty file added examples/valkey/__init__.py
Empty file.
10 changes: 10 additions & 0 deletions examples/valkey/index.html
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Title</title>
</head>
<body>
<h1>Cache HTML! {{ ret }} </h1>
</body>
</html>
112 changes: 112 additions & 0 deletions examples/valkey/main.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,112 @@
# pyright: reportGeneralTypeIssues=false
import time
from contextlib import asynccontextmanager
from typing import AsyncIterator

import pendulum
import uvicorn
from fastapi import FastAPI
from fastapi.responses import HTMLResponse
from fastapi.staticfiles import StaticFiles
from fastapi.templating import Jinja2Templates
from fastapi_cache import FastAPICache
from fastapi_cache.backends.valkey import ValkeyBackend
from fastapi_cache.coder import PickleCoder
from fastapi_cache.decorator import cache
from starlette.requests import Request
from starlette.responses import JSONResponse, Response

from valkey.asyncio import Valkey


@asynccontextmanager
async def lifespan(_: FastAPI) -> AsyncIterator[None]:
client = Valkey(
host="localhost",
port=6379,
db=0,
decode_responses=False,
)

# Test the connection
try:
await client.ping()
print(f"✓ Connected to Valkey at localhost:6379")
except Exception as e:
print(f"✗ Failed to connect to Valkey: {e}")
raise

FastAPICache.init(ValkeyBackend(client), prefix="fastapi-cache")

yield

print("Closing Valkey connection...")
await client.close()


app = FastAPI(lifespan=lifespan)

app.mount(
path="/static",
app=StaticFiles(directory="./"),
name="static",
)
templates = Jinja2Templates(directory="./")
ret = 0


@cache(namespace="test", expire=1)
async def get_ret():
global ret
ret = ret + 1
return ret


@app.get("/")
@cache(namespace="test", expire=10)
async def index():
return {"ret": await get_ret()}


@app.get("/clear")
async def clear():
return await FastAPICache.clear(namespace="test")


@app.get("/date")
@cache(namespace="test", expire=10)
async def get_data(request: Request, response: Response):
return pendulum.today()


# MUST be sync to verify threadpool + cache handling
@app.get("/blocking")
@cache(namespace="test", expire=10) # pyright: ignore[reportArgumentType]
def blocking():
time.sleep(2)
return {"ret": 42}


@app.get("/datetime")
@cache(namespace="test", expire=2)
async def get_datetime(request: Request, response: Response):
print(request, response)
return pendulum.now()


@app.get("/html", response_class=HTMLResponse)
@cache(expire=60, namespace="html", coder=PickleCoder)
async def cache_html(request: Request):
return templates.TemplateResponse(
"index.html", {"request": request, "ret": await get_ret()}
)


@app.get("/cache_response_obj")
@cache(namespace="test", expire=5)
async def cache_response_obj():
return JSONResponse({"a": 1})


if __name__ == "__main__":
uvicorn.run("main:app", reload=True)
7 changes: 7 additions & 0 deletions fastapi_cache/backends/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,3 +26,10 @@
pass
else:
__all__ += ["redis"]

try:
from fastapi_cache.backends import valkey
except ImportError:
pass
else:
__all__ += ["valkey"]
40 changes: 40 additions & 0 deletions fastapi_cache/backends/valkey.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
from typing import Optional, Tuple, Union

from valkey.asyncio import Valkey
from valkey.asyncio.cluster import ValkeyCluster

from fastapi_cache.types import Backend


class ValkeyBackend(Backend):
def __init__(self, valkey: Union["Valkey[bytes]", "ValkeyCluster[bytes]"]):
self.valkey = valkey
self.is_cluster: bool = isinstance(valkey, ValkeyCluster)

async def get_with_ttl(self, key: str) -> Tuple[int, Optional[bytes]]:
async with self.valkey.pipeline(transaction=not self.is_cluster) as pipe:
return await pipe.ttl(key).get(key).execute() # type: ignore[union-attr,no-any-return]

async def get(self, key: str) -> Optional[bytes]:
return await self.valkey.get(key) # type: ignore[union-attr]

async def set(self, key: str, value: bytes, expire: Optional[int] = None) -> None:
await self.valkey.set(key, value, ex=expire) # type: ignore[union-attr]

async def clear(self, namespace: Optional[str] = None, key: Optional[str] = None) -> int:
if namespace:
cursor = 0
deleted = 0
pattern = f"{namespace}:*"

while True:
cursor, keys = await self.valkey.scan(cursor, match=pattern, count=100) # type: ignore[union-attr]
if keys:
deleted += await self.valkey.delete(*keys) # type: ignore[union-attr]
if cursor == 0:
break

return deleted
elif key:
return await self.valkey.delete(key) # type: ignore[union-attr]
return 0
Loading