-
Notifications
You must be signed in to change notification settings - Fork 1
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
0 parents
commit caffc31
Showing
5 changed files
with
171 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,12 @@ | ||
FROM python:3.11-slim | ||
|
||
WORKDIR /app | ||
|
||
COPY . . | ||
|
||
RUN pip install -r requirements.txt && pip cache purge | ||
|
||
ARG PORT | ||
EXPOSE ${PORT:-80} | ||
|
||
CMD streamlit run --server.port ${PORT:-80} app.py |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,22 @@ | ||
# GenAI Starter - Llamaindex - Restack | ||
|
||
An example of how to deploy a [LangChain](https://www.llamaindex.ai/) application with [Streamlit](https://streamlit.io/) with Restack. | ||
|
||
--- | ||
|
||
### Environment Variables | ||
|
||
<details> | ||
|
||
<summary>ℹ️ OpenAI models</summary> | ||
|
||
In this example, we chose OpenAI's models for the sake of simplicity, but you're free to choose the models you prefer as LangChain provides support for other models as well. In that case, we recommend you remove the `OPENAI_API_KEY` environment variable and the relevant application code. | ||
|
||
</details> | ||
|
||
To ensure your successful deployment, set the following environment variables: | ||
|
||
```bash | ||
# Get it from https://platform.openai.com/account/api-keys | ||
OPENAI_API_KEY=<YOUR_API_KEY> | ||
``` |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,39 @@ | ||
import os.path | ||
import streamlit as st | ||
|
||
from llama_index.core import ( | ||
VectorStoreIndex, | ||
SimpleDirectoryReader, | ||
StorageContext, | ||
load_index_from_storage, | ||
) | ||
|
||
# check if storage already exists | ||
PERSIST_DIR = "./storage" | ||
if not os.path.exists(PERSIST_DIR): | ||
# load the documents and create the index | ||
documents = SimpleDirectoryReader("data").load_data() | ||
index = VectorStoreIndex.from_documents(documents) | ||
# store it for later | ||
index.storage_context.persist(persist_dir=PERSIST_DIR) | ||
else: | ||
# load the existing index | ||
storage_context = StorageContext.from_defaults(persist_dir=PERSIST_DIR) | ||
index = load_index_from_storage(storage_context) | ||
|
||
query_engine = index.as_query_engine() | ||
|
||
# Define a simple Streamlit app | ||
st.title('Ask Llama about the "GPT-4 Technical Report"') | ||
query = st.text_input("What would you like to ask? (source: data/gpt-4.pdf)", "What insights from the report?") | ||
|
||
# If the 'Submit' button is clicked | ||
if st.button("Submit"): | ||
if not query.strip(): | ||
st.error(f"Please provide the search query.") | ||
else: | ||
try: | ||
response = query_engine.query(query) | ||
st.success(response) | ||
except Exception as e: | ||
st.error(f"An error occurred: {e}") |
Binary file not shown.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,98 @@ | ||
aiohttp==3.9.3 | ||
aiosignal==1.3.1 | ||
altair==5.2.0 | ||
annotated-types==0.6.0 | ||
anyio==4.3.0 | ||
attrs==23.2.0 | ||
beautifulsoup4==4.12.3 | ||
blinker==1.7.0 | ||
bs4==0.0.2 | ||
cachetools==5.3.2 | ||
certifi==2024.2.2 | ||
charset-normalizer==3.3.2 | ||
click==8.1.7 | ||
dataclasses-json==0.6.4 | ||
Deprecated==1.2.14 | ||
dirtyjson==1.0.8 | ||
distro==1.9.0 | ||
frozenlist==1.4.1 | ||
fsspec==2024.2.0 | ||
gitdb==4.0.11 | ||
GitPython==3.1.42 | ||
greenlet==3.0.3 | ||
h11==0.14.0 | ||
httpcore==1.0.3 | ||
httpx==0.26.0 | ||
idna==3.6 | ||
importlib-metadata==7.0.1 | ||
Jinja2==3.1.3 | ||
joblib==1.3.2 | ||
jsonschema==4.21.1 | ||
jsonschema-specifications==2023.12.1 | ||
llama-index==0.10.9 | ||
llama-index-agent-openai==0.1.3 | ||
llama-index-core==0.10.8.post1 | ||
llama-index-embeddings-openai==0.1.4 | ||
llama-index-indices-managed-llama-cloud==0.1.1 | ||
llama-index-legacy==0.9.48 | ||
llama-index-llms-openai==0.1.3 | ||
llama-index-multi-modal-llms-openai==0.1.2 | ||
llama-index-program-openai==0.1.2 | ||
llama-index-question-gen-openai==0.1.1 | ||
llama-index-readers-file==0.1.3 | ||
llama-index-readers-llama-parse==0.1.0 | ||
llama-parse==0.3.3 | ||
llamaindex-py-client==0.1.13 | ||
markdown-it-py==3.0.0 | ||
MarkupSafe==2.1.5 | ||
marshmallow==3.20.2 | ||
mdurl==0.1.2 | ||
multidict==6.0.5 | ||
mypy-extensions==1.0.0 | ||
nest-asyncio==1.6.0 | ||
networkx==3.2.1 | ||
nltk==3.8.1 | ||
numpy==1.26.4 | ||
openai==1.12.0 | ||
packaging==23.2 | ||
pandas==2.2.0 | ||
pillow==10.2.0 | ||
protobuf==4.25.3 | ||
pyarrow==15.0.0 | ||
pydantic==2.6.1 | ||
pydantic_core==2.16.2 | ||
pydeck==0.8.1b0 | ||
Pygments==2.17.2 | ||
PyMuPDF==1.23.24 | ||
PyMuPDFb==1.23.22 | ||
pypdf==4.0.2 | ||
python-dateutil==2.8.2 | ||
pytz==2024.1 | ||
PyYAML==6.0.1 | ||
referencing==0.33.0 | ||
regex==2023.12.25 | ||
requests==2.31.0 | ||
rich==13.7.0 | ||
rpds-py==0.18.0 | ||
six==1.16.0 | ||
smmap==5.0.1 | ||
sniffio==1.3.0 | ||
soupsieve==2.5 | ||
SQLAlchemy==2.0.27 | ||
streamlit==1.31.1 | ||
tenacity==8.2.3 | ||
tiktoken==0.6.0 | ||
toml==0.10.2 | ||
toolz==0.12.1 | ||
tornado==6.4 | ||
tqdm==4.66.2 | ||
typing-inspect==0.9.0 | ||
typing_extensions==4.9.0 | ||
tzdata==2024.1 | ||
tzlocal==5.2 | ||
urllib3==2.2.1 | ||
validators==0.22.0 | ||
watchdog==4.0.0 | ||
wrapt==1.16.0 | ||
yarl==1.9.4 | ||
zipp==3.17.0 |