Skip to content

Commit

Permalink
try transitioning to HF Docker space!
Browse files Browse the repository at this point in the history
  • Loading branch information
anakin87 committed Jan 6, 2024
1 parent ab324a5 commit 073bc44
Show file tree
Hide file tree
Showing 4 changed files with 26 additions and 16 deletions.
17 changes: 17 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
FROM deepset/haystack:base-cpu-v1.23.0

COPY requirements.txt .
RUN pip install -r requirements.txt

# copy only the application files in /app
# Streamlit does not allow running an app from the root directory
COPY Rock_fact_checker.py app/
COPY pages app/pages
COPY app_utils app/app_utils
COPY data app/data

WORKDIR app

EXPOSE 8501

ENTRYPOINT ["streamlit", "run", "Rock_fact_checker.py"]
5 changes: 2 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,8 @@ title: Fact Checking rocks!
emoji: 🎸
colorFrom: purple
colorTo: blue
sdk: streamlit
sdk_version: 1.19.0
app_file: Rock_fact_checker.py
sdk: docker
app_port: 8501
pinned: true
models: [sentence-transformers/msmarco-distilbert-base-tas-b, microsoft/deberta-v2-xlarge-mnli, google/flan-t5-large]
tags: [fact-checking, rock, natural language inference, dense retrieval, large language models, haystack, neural search]
Expand Down
12 changes: 3 additions & 9 deletions app_utils/config.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import streamlit as st
import os

INDEX_DIR = "data/index"
STATEMENTS_PATH = "data/statements.txt"
Expand All @@ -9,17 +9,11 @@

# In HF Space, we use microsoft/deberta-v2-xlarge-mnli
# for local testing, a smaller model is better
try:
NLI_MODEL = st.secrets["NLI_MODEL"]
except:
NLI_MODEL = "valhalla/distilbart-mnli-12-1"
NLI_MODEL = os.environ.get("NLI_MODEL", "valhalla/distilbart-mnli-12-1")
print(f"Used NLI model: {NLI_MODEL}")


# In HF Space, we use google/flan-t5-large
# for local testing, a smaller model is better
try:
PROMPT_MODEL = st.secrets["PROMPT_MODEL"]
except:
PROMPT_MODEL = "google/flan-t5-small"
PROMPT_MODEL = os.environ.get("PROMPT_MODEL", "google/flan-t5-small")
print(f"Used Prompt model: {PROMPT_MODEL}")
8 changes: 4 additions & 4 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
farm-haystack[faiss,inference]==1.23.0
# we now use the Haystack Docker image, so the following line is not needed anymore
# farm-haystack[faiss,inference]==1.23.0

haystack-entailment-checker
plotly==5.14.1
pydantic<2

# commented to not interfere with streamlit SDK in HF spces
# uncomment for local installation
# streamlit==1.19.0
streamlit==1.19.0

altair<5

0 comments on commit 073bc44

Please sign in to comment.