runtime error

Exit code: 1. Reason: te(main) File "/usr/local/lib/python3.9/asyncio/base_events.py", line 647, in run_until_complete return future.result() File "/home/user/.local/lib/python3.9/site-packages/uvicorn/server.py", line 70, in serve await self._serve(sockets) File "/home/user/.local/lib/python3.9/site-packages/uvicorn/server.py", line 77, in _serve config.load() File "/home/user/.local/lib/python3.9/site-packages/uvicorn/config.py", line 435, in load self.loaded_app = import_from_string(self.app) File "/home/user/.local/lib/python3.9/site-packages/uvicorn/importer.py", line 19, in import_from_string module = importlib.import_module(module_str) File "/usr/local/lib/python3.9/importlib/__init__.py", line 127, in import_module return _bootstrap._gcd_import(name[level:], package, level) File "<frozen importlib._bootstrap>", line 1030, in _gcd_import File "<frozen importlib._bootstrap>", line 1007, in _find_and_load File "<frozen importlib._bootstrap>", line 986, in _find_and_load_unlocked File "<frozen importlib._bootstrap>", line 680, in _load_unlocked File "<frozen importlib._bootstrap_external>", line 850, in exec_module File "<frozen importlib._bootstrap>", line 228, in _call_with_frames_removed File "/app/app.py", line 77, in <module> index = VectorStoreIndex.from_documents({}) File "/home/user/.local/lib/python3.9/site-packages/llama_index/core/indices/base.py", line 119, in from_documents return cls( File "/home/user/.local/lib/python3.9/site-packages/llama_index/core/indices/vector_store/base.py", line 72, in __init__ else Settings.embed_model File "/home/user/.local/lib/python3.9/site-packages/llama_index/core/settings.py", line 64, in embed_model self._embed_model = resolve_embed_model("default") File "/home/user/.local/lib/python3.9/site-packages/llama_index/core/embeddings/utils.py", line 61, in resolve_embed_model raise ImportError( ImportError: `llama-index-embeddings-openai` package not found, please run `pip install llama-index-embeddings-openai`

Container logs:

Fetching error logs...