Commit new Chart releases for TrueCharts
Signed-off-by: TrueCharts-Bot <bot@truecharts.org>
This commit is contained in:
parent
236250deba
commit
737c077e49
|
@ -0,0 +1,30 @@
|
|||
# Patterns to ignore when building packages.
|
||||
# This supports shell glob matching, relative path matching, and
|
||||
# negation (prefixed with !). Only one pattern per line.
|
||||
.DS_Store
|
||||
# Common VCS dirs
|
||||
.git/
|
||||
.gitignore
|
||||
.bzr/
|
||||
.bzrignore
|
||||
.hg/
|
||||
.hgignore
|
||||
.svn/
|
||||
# Common backup files
|
||||
*.swp
|
||||
*.bak
|
||||
*.tmp
|
||||
*~
|
||||
# Various IDEs
|
||||
.project
|
||||
.idea/
|
||||
*.tmproj
|
||||
.vscode/
|
||||
# OWNERS file for Kubernetes
|
||||
OWNERS
|
||||
# helm-docs templates
|
||||
*.gotmpl
|
||||
# docs folder
|
||||
/docs
|
||||
# icon
|
||||
icon.png
|
|
@ -0,0 +1,18 @@
|
|||
---
|
||||
title: Changelog
|
||||
---
|
||||
|
||||
|
||||
*for the complete changelog, please refer to the website*
|
||||
|
||||
**Important:**
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
## [anything-llm-0.0.1]anything-llm-0.0.1 (2024-01-21)
|
|
@ -0,0 +1,37 @@
|
|||
annotations:
|
||||
max_scale_version: 24.04.0
|
||||
min_scale_version: 23.10.0
|
||||
truecharts.org/SCALE-support: "true"
|
||||
truecharts.org/category: AI
|
||||
truecharts.org/max_helm_version: "3.14"
|
||||
truecharts.org/min_helm_version: "3.11"
|
||||
truecharts.org/train: incubator
|
||||
apiVersion: v2
|
||||
appVersion: latest
|
||||
dependencies:
|
||||
- name: common
|
||||
version: 17.5.1
|
||||
repository: oci://tccr.io/truecharts
|
||||
condition: ""
|
||||
alias: ""
|
||||
tags: []
|
||||
import-values: []
|
||||
deprecated: false
|
||||
description: Open-source ChatGPT experience for LLMs, embedders, and vector databases.
|
||||
home: https://truecharts.org/charts/incubator/anything-llm
|
||||
icon: https://truecharts.org/img/hotlink-ok/chart-icons/anything-llm.png
|
||||
keywords:
|
||||
- anything-llm
|
||||
- ai
|
||||
kubeVersion: ">=1.24.0-0"
|
||||
maintainers:
|
||||
- name: TrueCharts
|
||||
email: info@truecharts.org
|
||||
url: https://truecharts.org
|
||||
name: anything-llm
|
||||
sources:
|
||||
- https://github.com/Mintplex-Labs/anything-llm
|
||||
- https://github.com/truecharts/charts/tree/master/charts/incubator/anything-llm
|
||||
- https://ghcr.io/mintplex-labs/anything-llm
|
||||
type: application
|
||||
version: 1.0.3
|
|
@ -0,0 +1,28 @@
|
|||
---
|
||||
title: README
|
||||
---
|
||||
|
||||
## General Info
|
||||
|
||||
TrueCharts can be installed as both _normal_ Helm Charts or as Apps on TrueNAS SCALE.
|
||||
However only installations using the TrueNAS SCALE Apps system are supported.
|
||||
|
||||
For more information about this App, please check the docs on the TrueCharts [website](https://truecharts.org/charts/incubator/anything-llm)
|
||||
|
||||
**This chart is not maintained by the upstream project and any issues with the chart should be raised [here](https://github.com/truecharts/charts/issues/new/choose)**
|
||||
|
||||
## Support
|
||||
|
||||
- Please check our [quick-start guides for TrueNAS SCALE](https://truecharts.org/manual/SCALE/guides/scale-intro).
|
||||
- See the [Website](https://truecharts.org)
|
||||
- Check our [Discord](https://discord.gg/tVsPTHWTtr)
|
||||
- Open a [issue](https://github.com/truecharts/charts/issues/new/choose)
|
||||
|
||||
---
|
||||
|
||||
## Sponsor TrueCharts
|
||||
|
||||
TrueCharts can only exist due to the incredible effort of our staff.
|
||||
Please consider making a [donation](https://truecharts.org/sponsor) or contributing back to the project any way you can!
|
||||
|
||||
_All Rights Reserved - The TrueCharts Project_
|
|
@ -0,0 +1,8 @@
|
|||
Open-source ChatGPT experience for LLMs, embedders, and vector databases.
|
||||
|
||||
This App is supplied by TrueCharts, for more information visit the manual: [https://truecharts.org/charts/incubator/anything-llm](https://truecharts.org/charts/incubator/anything-llm)
|
||||
|
||||
---
|
||||
|
||||
TrueCharts can only exist due to the incredible effort of our staff.
|
||||
Please consider making a [donation](https://truecharts.org/sponsor) or contributing back to the project any way you can!
|
Binary file not shown.
|
@ -0,0 +1,155 @@
|
|||
image:
|
||||
repository: ghcr.io/mintplex-labs/anything-llm
|
||||
pullPolicy: IfNotPresent
|
||||
tag: latest@sha256:fc85952a3d6e9b33f6cd9368ff114c769b24366c06f35800f82490271fa37dbb
|
||||
securityContext:
|
||||
container:
|
||||
readOnlyRootFilesystem: false
|
||||
runAsUser: 0
|
||||
runAsGroup: 0
|
||||
capabilities:
|
||||
add:
|
||||
- SYS_ADMIN
|
||||
|
||||
service:
|
||||
main:
|
||||
ports:
|
||||
main:
|
||||
protocol: http
|
||||
port: 3001
|
||||
|
||||
workload:
|
||||
main:
|
||||
podSpec:
|
||||
containers:
|
||||
main:
|
||||
env:
|
||||
SERVER_PORT: "{{ .Values.service.main.ports.main.port }}"
|
||||
STORAGE_DIR: "{{.Values.persistence.storage.mountPath }}"
|
||||
# forces users to use ingress if https is needed.
|
||||
# keep false.
|
||||
ENABLE_HTTPS: false
|
||||
JWT_SECRET:
|
||||
secretKeyRef:
|
||||
name: anythinglmm-secrets
|
||||
key: JWT_SECRET
|
||||
# LLM_PROVIDER='openai'
|
||||
# OPEN_AI_KEY=
|
||||
# OPEN_MODEL_PREF='gpt-3.5-turbo'
|
||||
|
||||
# LLM_PROVIDER='gemini'
|
||||
# GEMINI_API_KEY=
|
||||
# GEMINI_LLM_MODEL_PREF='gemini-pro'
|
||||
|
||||
# LLM_PROVIDER='azure'
|
||||
# AZURE_OPENAI_KEY=
|
||||
# AZURE_OPENAI_ENDPOINT=
|
||||
# OPEN_MODEL_PREF='my-gpt35-deployment' # This is the "deployment" on Azure you want to use. Not the base model.
|
||||
# EMBEDDING_MODEL_PREF='embedder-model' # This is the "deployment" on Azure you want to use for embeddings. Not the base model. Valid base model is text-embedding-ada-002
|
||||
|
||||
# LLM_PROVIDER='anthropic'
|
||||
# ANTHROPIC_API_KEY=sk-ant-xxxx
|
||||
# ANTHROPIC_MODEL_PREF='claude-2'
|
||||
|
||||
# LLM_PROVIDER='lmstudio'
|
||||
# LMSTUDIO_BASE_PATH='http://your-server:1234/v1'
|
||||
# LMSTUDIO_MODEL_TOKEN_LIMIT=4096
|
||||
|
||||
# LLM_PROVIDER='localai'
|
||||
# LOCAL_AI_BASE_PATH='http://host.docker.internal:8080/v1'
|
||||
# LOCAL_AI_MODEL_PREF='luna-ai-llama2'
|
||||
# LOCAL_AI_MODEL_TOKEN_LIMIT=4096
|
||||
# LOCAL_AI_API_KEY="sk-123abc"
|
||||
|
||||
# LLM_PROVIDER='ollama'
|
||||
# OLLAMA_BASE_PATH='http://host.docker.internal:11434'
|
||||
# OLLAMA_MODEL_PREF='llama2'
|
||||
# OLLAMA_MODEL_TOKEN_LIMIT=4096
|
||||
|
||||
# LLM_PROVIDER='togetherai'
|
||||
# TOGETHER_AI_API_KEY='my-together-ai-key'
|
||||
# TOGETHER_AI_MODEL_PREF='mistralai/Mixtral-8x7B-Instruct-v0.1'
|
||||
|
||||
# LLM_PROVIDER='mistral'
|
||||
# MISTRAL_API_KEY='example-mistral-ai-api-key'
|
||||
# MISTRAL_MODEL_PREF='mistral-tiny'
|
||||
|
||||
# LLM_PROVIDER='huggingface'
|
||||
# HUGGING_FACE_LLM_ENDPOINT=https://uuid-here.us-east-1.aws.endpoints.huggingface.cloud
|
||||
# HUGGING_FACE_LLM_API_KEY=hf_xxxxxx
|
||||
# HUGGING_FACE_LLM_TOKEN_LIMIT=8000
|
||||
|
||||
# EMBEDDING_ENGINE='openai'
|
||||
# OPEN_AI_KEY=sk-xxxx
|
||||
# EMBEDDING_MODEL_PREF='text-embedding-ada-002'
|
||||
|
||||
# EMBEDDING_ENGINE='azure'
|
||||
# AZURE_OPENAI_ENDPOINT=
|
||||
# AZURE_OPENAI_KEY=
|
||||
# EMBEDDING_MODEL_PREF='my-embedder-model' # This is the "deployment" on Azure you want to use for embeddings. Not the base model. Valid base model is text-embedding-ada-002
|
||||
|
||||
# EMBEDDING_ENGINE='localai'
|
||||
# EMBEDDING_BASE_PATH='http://localhost:8080/v1'
|
||||
# EMBEDDING_MODEL_PREF='text-embedding-ada-002'
|
||||
# EMBEDDING_MODEL_MAX_CHUNK_LENGTH=1000 # The max chunk size in chars a string to embed can be
|
||||
|
||||
# Enable all below if you are using vector database: Chroma.
|
||||
# VECTOR_DB="chroma"
|
||||
# CHROMA_ENDPOINT='http://host.docker.internal:8000'
|
||||
# CHROMA_API_HEADER="X-Api-Key"
|
||||
# CHROMA_API_KEY="sk-123abc"
|
||||
|
||||
# VECTOR_DB="pinecone"
|
||||
# PINECONE_API_KEY=
|
||||
# PINECONE_INDEX=
|
||||
|
||||
# VECTOR_DB="lancedb"
|
||||
|
||||
# VECTOR_DB="weaviate"
|
||||
# WEAVIATE_ENDPOINT="http://localhost:8080"
|
||||
# WEAVIATE_API_KEY=
|
||||
|
||||
# VECTOR_DB="qdrant"
|
||||
# QDRANT_ENDPOINT="http://localhost:6333"
|
||||
# QDRANT_API_KEY=
|
||||
|
||||
# VECTOR_DB="milvus"
|
||||
# MILVUS_ADDRESS="http://localhost:19530"
|
||||
# MILVUS_USERNAME=
|
||||
# MILVUS_PASSWORD=
|
||||
|
||||
# VECTOR_DB="zilliz"
|
||||
# ZILLIZ_ENDPOINT="https://sample.api.gcp-us-west1.zillizcloud.com"
|
||||
# ZILLIZ_API_TOKEN=api-token-here
|
||||
|
||||
# VECTOR_DB="astra"
|
||||
# ASTRA_DB_APPLICATION_TOKEN=
|
||||
# ASTRA_DB_ENDPOINT=
|
||||
|
||||
# AUTH_TOKEN="hunter2" # This is the password to your application if remote hosting.
|
||||
# DISABLE_TELEMETRY="false"
|
||||
|
||||
# Documentation on how to use https://github.com/kamronbatman/joi-password-complexity
|
||||
# Default is only 8 char minimum
|
||||
PASSWORDMINCHAR: 8
|
||||
PASSWORDMAXCHAR: 250
|
||||
PASSWORDLOWERCASE: 1
|
||||
PASSWORDUPPERCASE: 1
|
||||
PASSWORDNUMERIC: 1
|
||||
PASSWORDSYMBOL: 1
|
||||
PASSWORDREQUIREMENTS: 4
|
||||
|
||||
persistence:
|
||||
storage:
|
||||
enabled: true
|
||||
mountPath: "/app/server/storage"
|
||||
hotdir:
|
||||
enabled: true
|
||||
mountPath: "/app/collector/hotdir"
|
||||
outputs:
|
||||
enabled: true
|
||||
mountPath: "/app/collector/outputs"
|
||||
|
||||
portal:
|
||||
open:
|
||||
enabled: true
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1 @@
|
|||
{{- include "tc.v1.common.lib.chart.notes" $ -}}
|
|
@ -0,0 +1,13 @@
|
|||
{{/* Define the secrets */}}
|
||||
{{- define "anythinglmm.secrets" -}}
|
||||
{{- $secretName := (printf "%s-anythinglmm-secrets" (include "tc.v1.common.lib.chart.names.fullname" $)) }}
|
||||
|
||||
{{- $jwtSecret := randAlphaNum 64 -}}
|
||||
|
||||
{{- with lookup "v1" "Secret" .Release.Namespace $secretName -}}
|
||||
{{- $jwtSecret = index .data "JWT_SECRET" | b64dec -}}
|
||||
{{- end }}
|
||||
enabled: true
|
||||
data:
|
||||
JWT_SECRET: {{ $jwtSecret }}
|
||||
{{- end -}}
|
|
@ -0,0 +1,11 @@
|
|||
{{/* Make sure all variables are set properly */}}
|
||||
{{- include "tc.v1.common.loader.init" . -}}
|
||||
|
||||
{{/* Render secrets for anythinglmm */}}
|
||||
{{- $secrets := include "anythinglmm.secrets" . | fromYaml -}}
|
||||
{{- if $secrets -}}
|
||||
{{- $_ := set .Values.secret "anythinglmm-secrets" $secrets -}}
|
||||
{{- end -}}
|
||||
|
||||
{{/* Render the templates */}}
|
||||
{{- include "tc.v1.common.loader.apply" . -}}
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Loading…
Reference in New Issue