Spaces:
Running
on
Zero
Running
on
Zero
File size: 1,634 Bytes
4f83ec0 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 |
import logging
from typing import Annotated
from fastapi import FastAPI, Request
from fastapi.responses import StreamingResponse
from pydantic import BaseModel, StringConstraints
from outlines import generate
from generate import model, sampler, stream_file
logger = logging.getLogger(__name__)
class Status(BaseModel):
status: Annotated[str, StringConstraints(pattern="ok")]
status_generator = generate.json(model, Status, sampler=sampler)
status_stream = status_generator.stream("status:")
status = "".join(char.strip() for char in status_stream if char.strip())
logger.warning("Model status: " + status)
async def stream_response(filename: str, prompt: str, columns: list[str], seed: int, size: int):
for chunk in stream_file(
filename=filename,
prompt=prompt,
columns=columns,
seed=seed,
size=size,
):
yield chunk
async def dummy_stream():
yield ""
app = FastAPI()
@app.head("/{filename}.jsonl")
@app.get("/{filename}.jsonl")
async def read_item(request: Request, filename: str, prompt: str = "", columns: str = "", seed: int = 42, size: int = 3):
if request.method == 'GET':
columns = [field.strip() for field in columns.split(",") if field.strip()]
content = stream_response(
filename,
prompt=prompt,
columns=columns,
seed=seed,
size=size
)
else:
content = dummy_stream()
response = StreamingResponse(content, media_type="text/jsonlines")
response.headers["Content-Disposition"] = f"attachment; filename={filename}.jsonl"
return response
|