diff --git a/docker-compose.yml b/docker-compose.yml index fb7a1e19a..4b166f490 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -34,8 +34,6 @@ services: # - LLM_MODEL_CONFIG_bedrock_claude_3_5_sonnet=${LLM_MODEL_CONFIG_bedrock_claude_3_5_sonnet-} # - LLM_MODEL_CONFIG_fireworks_qwen_72b=${LLM_MODEL_CONFIG_fireworks_qwen_72b-} - LLM_MODEL_CONFIG_ollama_llama3=${LLM_MODEL_CONFIG_ollama_llama3-} - # env_file: - # - ./backend/.env container_name: backend extra_hosts: - host.docker.internal:host-gateway diff --git a/example.env b/example.env deleted file mode 100644 index 71c9dd36b..000000000 --- a/example.env +++ /dev/null @@ -1,36 +0,0 @@ -# Optional Backend -EMBEDDING_MODEL="all-MiniLM-L6-v2" -IS_EMBEDDING="true" -KNN_MIN_SCORE="0.94" -# Enable Gemini (default is False) | Can be False or True -GEMINI_ENABLED=False -# LLM_MODEL_CONFIG_ollama_llama3="llama3,http://host.docker.internal:11434" - -# Enable Google Cloud logs (default is False) | Can be False or True -GCP_LOG_METRICS_ENABLED=False -NUMBER_OF_CHUNKS_TO_COMBINE=6 -UPDATE_GRAPH_CHUNKS_PROCESSED=20 -NEO4J_URI="neo4j://database:7687" -NEO4J_USERNAME="neo4j" -NEO4J_PASSWORD="password" -LANGCHAIN_API_KEY="" -LANGCHAIN_PROJECT="" -LANGCHAIN_TRACING_V2="true" -LANGCHAIN_ENDPOINT="https://api.smith.langchain.com" -GCS_FILE_CACHE=False -ENTITY_EMBEDDING=True - -# Optional Frontend -VITE_BACKEND_API_URL="http://localhost:8000" -VITE_BLOOM_URL="https://workspace-preview.neo4j.io/workspace/explore?connectURL={CONNECT_URL}&search=Show+me+a+graph&featureGenAISuggestions=true&featureGenAISuggestionsInternal=true" -VITE_REACT_APP_SOURCES="local,youtube,wiki,s3,web" -VITE_ENV="DEV" -VITE_TIME_PER_PAGE=50 -VITE_CHUNK_SIZE=5242880 -VITE_CHUNK_OVERLAP=20 -VITE_TOKENS_PER_CHUNK=100 -VITE_CHUNK_TO_COMBINE=1 -VITE_GOOGLE_CLIENT_ID="" -VITE_CHAT_MODES="" -VITE_BATCH_SIZE=2 -VITE_LLM_MODELS_PROD="openai_gpt_4o,openai_gpt_4o_mini,diffbot,gemini_1.5_flash"