When using Templates
, you need to declare a Request
parameter in the endpoint that will return a template, as shown below:
from fastapi import Request
@app.get('/')
def index(request: Request):
return templates.TemplateResponse("index.html", {"request": request})
Below are given two options (with complete code samples) on how to stream (live) video using FastAPI and OpenCV
. Option 1 demonstrates an approach based on your question using the HTTP
protocol and FastAPI/Starlette's StreamingResponse
. Option 2 uses the WebSocket
protocol, which can easily handle HD video streaming and is supported by FastAPI/Starlette (documentation can be found here and here)
Option 1 - Using HTTP
Protocol
You can access the live streaming at http://127.0.0.1:8000/.
app.py
import cv2
import uvicorn
from fastapi import FastAPI, Request
from fastapi.templating import Jinja2Templates
from fastapi.responses import StreamingResponse
app = FastAPI()
camera = cv2.VideoCapture(0, cv2.CAP_DSHOW)
templates = Jinja2Templates(directory="templates")
def gen_frames():
while True:
success, frame = camera.read()
if not success:
break
else:
ret, buffer = cv2.imencode('.jpg', frame)
frame = buffer.tobytes()
yield (b'--frame\r\n'
b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n')
@app.get('/')
def index(request: Request):
return templates.TemplateResponse("index.html", {"request": request})
@app.get('/video_feed')
def video_feed():
return StreamingResponse(gen_frames(), media_type='multipart/x-mixed-replace; boundary=frame')
if __name__ == '__main__':
uvicorn.run(app, host='127.0.0.1', port=8000, debug=True)
templates/index.html
<!DOCTYPE html>
<html>
<body>
<div class="container">
<h3> Live Streaming </h3>
<img src="{{ url_for('video_feed') }}" width="50%">
</div>
</body>
</html>
Option 2 - Using WebSocket
Protocol
You can access the live streaming at http://127.0.0.1:8000/.
app.py
from fastapi import FastAPI, Request, WebSocket, WebSocketDisconnect
from fastapi.templating import Jinja2Templates
import uvicorn
import cv2
app = FastAPI()
camera = cv2.VideoCapture(0,cv2.CAP_DSHOW)
templates = Jinja2Templates(directory="templates")
@app.get('/')
def index(request: Request):
return templates.TemplateResponse("index.html", {"request": request})
@app.websocket("/ws")
async def get_stream(websocket: WebSocket):
await websocket.accept()
try:
while True:
success, frame = camera.read()
if not success:
break
else:
ret, buffer = cv2.imencode('.jpg', frame)
await websocket.send_bytes(buffer.tobytes())
except WebSocketDisconnect:
print("Client disconnected")
if __name__ == '__main__':
uvicorn.run(app, host='127.0.0.1', port=8000)
Below is the HTML
template for establishing the WebSocket
connection, receiving the image bytes and creating a Blob
URL (which is released after the image is loaded, so that the object will subsequently be garbage collected, rather being kept in memory, unnecessarily), as shown here, to display the video frame in the browser.
templates/index.html
<!DOCTYPE html>
<html>
<head>
<title>Live Streaming</title>
</head>
<body>
<img id="frame" src="">
<script>
let ws = new WebSocket("ws://localhost:8000/ws");
let image = document.getElementById("frame");
image.onload = function(){
URL.revokeObjectURL(this.src); // release the blob URL once the image is loaded
}
ws.onmessage = function(event) {
image.src = URL.createObjectURL(event.data);
};
</script>
</body>
</html>
Below is also a Python client based on the websockets
library and OpenCV
, which you may use to connect to the server, in order to receive and display the video frames in a Python app.
client.py
import websockets
import asyncio
import cv2
import numpy as np
camera = cv2.VideoCapture(0, cv2.CAP_DSHOW)
async def main():
url = 'ws://127.0.0.1:8000/ws'
async with websockets.connect(url) as ws:
#count = 1
while True:
contents = await ws.recv()
arr = np.frombuffer(contents, np.uint8)
frame = cv2.imdecode(arr, cv2.IMREAD_UNCHANGED)
cv2.imshow('frame', frame)
cv2.waitKey(1)
#cv2.imwrite("frame%d.jpg" % count, frame)
#count += 1
asyncio.run(main())