Compare commits

..

19 Commits

Author SHA1 Message Date
yihong
7b1fc4d2e6 fix: add make test for short cut backend unittest (#28380)
Signed-off-by: yihong0618 <zouzou0208@gmail.com>
2025-11-20 11:33:42 +08:00
github-actions[bot]
204d5f1bb9 chore: translate i18n files and update type definitions (#28429)
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-11-20 11:32:55 +08:00
Will
8fc1c7d994 chore: remove redundant reimports (#28415)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: Asuka Minato <i@asukaminato.eu.org>
2025-11-20 11:28:29 +08:00
yangzheli
879869d3e3 fix(web): fix checkbox unselectable bug & optimize document-list/app-annotation styles (#28244) 2025-11-20 11:28:20 +08:00
GuanMu
1d2cdf3489 feat: add display status filtering to document list and API (#28342)
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-11-20 11:27:44 +08:00
yangzheli
a5d0e68675 feat(workflow): optimize workflow canvas pan and scroll behavior (#28250)
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-11-20 11:27:30 +08:00
github-actions[bot]
605e543372 chore: translate i18n files and update type definitions (#28425)
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-11-20 11:26:49 +08:00
-LAN-
c432f601ab fix: change TenantApi endpoint from GET to POST (#27858)
Co-authored-by: Claude <noreply@anthropic.com>
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-11-20 11:22:37 +08:00
lyzno1
e8d03a422d fix: improve email code sign-in experience (#28307) 2025-11-20 11:19:15 +08:00
Novice
6be013e072 feat: implement RFC-compliant OAuth discovery with dynamic scope selection for MCP providers (#28294)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-20 11:18:16 +08:00
znn
014cbaf387 make expand/collapse in question classifier node (#26772)
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
Co-authored-by: lyzno1 <yuanyouhuilyz@gmail.com>
Co-authored-by: lyzno1 <92089059+lyzno1@users.noreply.github.com>
2025-11-20 11:17:34 +08:00
XlKsyt
1be38183e5 fix(frontend): add missing vertical type to divider in provider config modal (#28387)
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-11-20 11:17:04 +08:00
ice
8bab42e224 style(web): fix vertical alignment of search button on apps page (#28398) 2025-11-20 11:14:09 +08:00
wangxiaolei
99e9fc751b refactor: refactor python sdk (#28118)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-20 11:10:53 +08:00
Maries
a1b735a4c0 feat: trigger billing (#28335)
Signed-off-by: lyzno1 <yuanyouhuilyz@gmail.com>
Co-authored-by: lyzno1 <yuanyouhuilyz@gmail.com>
Co-authored-by: lyzno1 <92089059+lyzno1@users.noreply.github.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-20 10:15:23 +08:00
longbingljw
c0b7ffd5d0 feat:mysql adaptation for metadb (#28188) 2025-11-20 09:44:39 +08:00
Maries
012877d8d4 fix: address user input preparation in workflow app generator (#28410)
Co-authored-by: lyzno1 <92089059+lyzno1@users.noreply.github.com>
2025-11-20 02:09:40 +08:00
Jyong
41bb6f3109 Revert "add vdb-test workflow run filter" (#28382)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-19 20:02:24 +08:00
Lloyd-Pottiger
88c9b18cb6 fix(docker): start-up TiFlash (#28376) 2025-11-19 13:59:56 +08:00
299 changed files with 13450 additions and 3619 deletions

View File

@@ -62,7 +62,7 @@ jobs:
compose-file: |
docker/docker-compose.middleware.yaml
services: |
db
db_postgres
redis
sandbox
ssrf_proxy

View File

@@ -8,7 +8,7 @@ concurrency:
cancel-in-progress: true
jobs:
db-migration-test:
db-migration-test-postgres:
runs-on: ubuntu-latest
steps:
@@ -45,7 +45,7 @@ jobs:
compose-file: |
docker/docker-compose.middleware.yaml
services: |
db
db_postgres
redis
- name: Prepare configs
@@ -57,3 +57,60 @@ jobs:
env:
DEBUG: true
run: uv run --directory api flask upgrade-db
db-migration-test-mysql:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
persist-credentials: false
- name: Setup UV and Python
uses: astral-sh/setup-uv@v6
with:
enable-cache: true
python-version: "3.12"
cache-dependency-glob: api/uv.lock
- name: Install dependencies
run: uv sync --project api
- name: Ensure Offline migration are supported
run: |
# upgrade
uv run --directory api flask db upgrade 'base:head' --sql
# downgrade
uv run --directory api flask db downgrade 'head:base' --sql
- name: Prepare middleware env for MySQL
run: |
cd docker
cp middleware.env.example middleware.env
sed -i 's/DB_TYPE=postgresql/DB_TYPE=mysql/' middleware.env
sed -i 's/DB_HOST=db_postgres/DB_HOST=db_mysql/' middleware.env
sed -i 's/DB_PORT=5432/DB_PORT=3306/' middleware.env
sed -i 's/DB_USERNAME=postgres/DB_USERNAME=mysql/' middleware.env
- name: Set up Middlewares
uses: hoverkraft-tech/compose-action@v2.0.2
with:
compose-file: |
docker/docker-compose.middleware.yaml
services: |
db_mysql
redis
- name: Prepare configs for MySQL
run: |
cd api
cp .env.example .env
sed -i 's/DB_TYPE=postgresql/DB_TYPE=mysql/' .env
sed -i 's/DB_PORT=5432/DB_PORT=3306/' .env
sed -i 's/DB_USERNAME=postgres/DB_USERNAME=root/' .env
- name: Run DB Migration
env:
DEBUG: true
run: uv run --directory api flask upgrade-db

View File

@@ -1,10 +1,7 @@
name: Run VDB Tests
on:
push:
branches: [main]
paths:
- 'api/core/rag/*.py'
workflow_call:
concurrency:
group: vdb-tests-${{ github.head_ref || github.run_id }}
@@ -54,13 +51,13 @@ jobs:
- name: Expose Service Ports
run: sh .github/workflows/expose_service_ports.sh
- name: Set up Vector Store (TiDB)
uses: hoverkraft-tech/compose-action@v2.0.2
with:
compose-file: docker/tidb/docker-compose.yaml
services: |
tidb
tiflash
# - name: Set up Vector Store (TiDB)
# uses: hoverkraft-tech/compose-action@v2.0.2
# with:
# compose-file: docker/tidb/docker-compose.yaml
# services: |
# tidb
# tiflash
- name: Set up Vector Stores (Weaviate, Qdrant, PGVector, Milvus, PgVecto-RS, Chroma, MyScale, ElasticSearch, Couchbase, OceanBase)
uses: hoverkraft-tech/compose-action@v2.0.2
@@ -86,8 +83,8 @@ jobs:
ls -lah .
cp api/tests/integration_tests/.env.example api/tests/integration_tests/.env
- name: Check VDB Ready (TiDB)
run: uv run --project api python api/tests/integration_tests/vdb/tidb_vector/check_tiflash_ready.py
# - name: Check VDB Ready (TiDB)
# run: uv run --project api python api/tests/integration_tests/vdb/tidb_vector/check_tiflash_ready.py
- name: Test Vector Stores
run: uv run --project api bash dev/pytest/pytest_vdb.sh

2
.gitignore vendored
View File

@@ -186,6 +186,8 @@ docker/volumes/couchbase/*
docker/volumes/oceanbase/*
docker/volumes/plugin_daemon/*
docker/volumes/matrixone/*
docker/volumes/mysql/*
docker/volumes/seekdb/*
!docker/volumes/oceanbase/init.d
docker/nginx/conf.d/default.conf

View File

@@ -70,6 +70,11 @@ type-check:
@uv run --directory api --dev basedpyright
@echo "✅ Type check complete"
test:
@echo "🧪 Running backend unit tests..."
@uv run --project api --dev dev/pytest/pytest_unit_tests.sh
@echo "✅ Tests complete"
# Build Docker images
build-web:
@echo "Building web Docker image: $(WEB_IMAGE):$(VERSION)..."
@@ -119,6 +124,7 @@ help:
@echo " make check - Check code with ruff"
@echo " make lint - Format and fix code with ruff"
@echo " make type-check - Run type checking with basedpyright"
@echo " make test - Run backend unit tests"
@echo ""
@echo "Docker Build Targets:"
@echo " make build-web - Build web Docker image"
@@ -128,4 +134,4 @@ help:
@echo " make build-push-all - Build and push all Docker images"
# Phony targets
.PHONY: build-web build-api push-web push-api build-all push-all build-push-all dev-setup prepare-docker prepare-web prepare-api dev-clean help format check lint type-check
.PHONY: build-web build-api push-web push-api build-all push-all build-push-all dev-setup prepare-docker prepare-web prepare-api dev-clean help format check lint type-check test

View File

@@ -72,12 +72,15 @@ REDIS_CLUSTERS_PASSWORD=
# celery configuration
CELERY_BROKER_URL=redis://:difyai123456@localhost:${REDIS_PORT}/1
CELERY_BACKEND=redis
# PostgreSQL database configuration
# Database configuration
DB_TYPE=postgresql
DB_USERNAME=postgres
DB_PASSWORD=difyai123456
DB_HOST=localhost
DB_PORT=5432
DB_DATABASE=dify
SQLALCHEMY_POOL_PRE_PING=true
SQLALCHEMY_POOL_TIMEOUT=30
@@ -163,7 +166,7 @@ CONSOLE_CORS_ALLOW_ORIGINS=http://localhost:3000,*
COOKIE_DOMAIN=
# Vector database configuration
# Supported values are `weaviate`, `qdrant`, `milvus`, `myscale`, `relyt`, `pgvector`, `pgvecto-rs`, `chroma`, `opensearch`, `oracle`, `tencent`, `elasticsearch`, `elasticsearch-ja`, `analyticdb`, `couchbase`, `vikingdb`, `oceanbase`, `opengauss`, `tablestore`,`vastbase`,`tidb`,`tidb_on_qdrant`,`baidu`,`lindorm`,`huawei_cloud`,`upstash`, `matrixone`.
# Supported values are `weaviate`, `oceanbase`, `qdrant`, `milvus`, `myscale`, `relyt`, `pgvector`, `pgvecto-rs`, `chroma`, `opensearch`, `oracle`, `tencent`, `elasticsearch`, `elasticsearch-ja`, `analyticdb`, `couchbase`, `vikingdb`, `opengauss`, `tablestore`,`vastbase`,`tidb`,`tidb_on_qdrant`,`baidu`,`lindorm`,`huawei_cloud`,`upstash`, `matrixone`.
VECTOR_STORE=weaviate
# Prefix used to create collection name in vector database
VECTOR_INDEX_NAME_PREFIX=Vector_index
@@ -174,6 +177,17 @@ WEAVIATE_API_KEY=WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih
WEAVIATE_GRPC_ENABLED=false
WEAVIATE_BATCH_SIZE=100
# OceanBase Vector configuration
OCEANBASE_VECTOR_HOST=127.0.0.1
OCEANBASE_VECTOR_PORT=2881
OCEANBASE_VECTOR_USER=root@test
OCEANBASE_VECTOR_PASSWORD=difyai123456
OCEANBASE_VECTOR_DATABASE=test
OCEANBASE_MEMORY_LIMIT=6G
OCEANBASE_ENABLE_HYBRID_SEARCH=false
OCEANBASE_FULLTEXT_PARSER=ik
SEEKDB_MEMORY_LIMIT=2G
# Qdrant configuration, use `http://localhost:6333` for local mode or `https://your-qdrant-cluster-url.qdrant.io` for remote mode
QDRANT_URL=http://localhost:6333
QDRANT_API_KEY=difyai123456
@@ -339,15 +353,6 @@ LINDORM_PASSWORD=admin
LINDORM_USING_UGC=True
LINDORM_QUERY_TIMEOUT=1
# OceanBase Vector configuration
OCEANBASE_VECTOR_HOST=127.0.0.1
OCEANBASE_VECTOR_PORT=2881
OCEANBASE_VECTOR_USER=root@test
OCEANBASE_VECTOR_PASSWORD=difyai123456
OCEANBASE_VECTOR_DATABASE=test
OCEANBASE_MEMORY_LIMIT=6G
OCEANBASE_ENABLE_HYBRID_SEARCH=false
# AlibabaCloud MySQL Vector configuration
ALIBABACLOUD_MYSQL_HOST=127.0.0.1
ALIBABACLOUD_MYSQL_PORT=3306

View File

@@ -15,8 +15,8 @@
```bash
cd ../docker
cp middleware.env.example middleware.env
# change the profile to other vector database if you are not using weaviate
docker compose -f docker-compose.middleware.yaml --profile weaviate -p dify up -d
# change the profile to mysql if you are not using postgres,change the profile to other vector database if you are not using weaviate
docker compose -f docker-compose.middleware.yaml --profile postgresql --profile weaviate -p dify up -d
cd ../api
```

View File

@@ -77,10 +77,6 @@ class AppExecutionConfig(BaseSettings):
description="Maximum number of concurrent active requests per app (0 for unlimited)",
default=0,
)
APP_DAILY_RATE_LIMIT: NonNegativeInt = Field(
description="Maximum number of requests per app per day",
default=5000,
)
class CodeExecutionSandboxConfig(BaseSettings):
@@ -1086,7 +1082,7 @@ class CeleryScheduleTasksConfig(BaseSettings):
)
TRIGGER_PROVIDER_CREDENTIAL_THRESHOLD_SECONDS: int = Field(
description="Proactive credential refresh threshold in seconds",
default=180,
default=60 * 60,
)
TRIGGER_PROVIDER_SUBSCRIPTION_THRESHOLD_SECONDS: int = Field(
description="Proactive subscription refresh threshold in seconds",

View File

@@ -105,6 +105,12 @@ class KeywordStoreConfig(BaseSettings):
class DatabaseConfig(BaseSettings):
# Database type selector
DB_TYPE: Literal["postgresql", "mysql", "oceanbase"] = Field(
description="Database type to use. OceanBase is MySQL-compatible.",
default="postgresql",
)
DB_HOST: str = Field(
description="Hostname or IP address of the database server.",
default="localhost",
@@ -140,10 +146,10 @@ class DatabaseConfig(BaseSettings):
default="",
)
SQLALCHEMY_DATABASE_URI_SCHEME: str = Field(
description="Database URI scheme for SQLAlchemy connection.",
default="postgresql",
)
@computed_field # type: ignore[prop-decorator]
@property
def SQLALCHEMY_DATABASE_URI_SCHEME(self) -> str:
return "postgresql" if self.DB_TYPE == "postgresql" else "mysql+pymysql"
@computed_field # type: ignore[prop-decorator]
@property
@@ -204,15 +210,15 @@ class DatabaseConfig(BaseSettings):
# Parse DB_EXTRAS for 'options'
db_extras_dict = dict(parse_qsl(self.DB_EXTRAS))
options = db_extras_dict.get("options", "")
# Always include timezone
timezone_opt = "-c timezone=UTC"
if options:
# Merge user options and timezone
merged_options = f"{options} {timezone_opt}"
else:
merged_options = timezone_opt
connect_args = {"options": merged_options}
connect_args = {}
# Use the dynamic SQLALCHEMY_DATABASE_URI_SCHEME property
if self.SQLALCHEMY_DATABASE_URI_SCHEME.startswith("postgresql"):
timezone_opt = "-c timezone=UTC"
if options:
merged_options = f"{options} {timezone_opt}"
else:
merged_options = timezone_opt
connect_args = {"options": merged_options}
return {
"pool_size": self.SQLALCHEMY_POOL_SIZE,

View File

@@ -250,10 +250,8 @@ class AppApi(Resource):
args = parser.parse_args()
app_service = AppService()
# Construct ArgsDict from parsed arguments
from services.app_service import AppService as AppServiceType
args_dict: AppServiceType.ArgsDict = {
args_dict: AppService.ArgsDict = {
"name": args["name"],
"description": args.get("description", ""),
"icon_type": args.get("icon_type", ""),

View File

@@ -10,9 +10,9 @@ from controllers.console.wraps import account_initialization_required, setup_req
from core.app.entities.app_invoke_entities import InvokeFrom
from extensions.ext_database import db
from libs.datetime_utils import parse_time_range
from libs.helper import DatetimeString
from libs.helper import DatetimeString, convert_datetime_to_date
from libs.login import current_account_with_tenant, login_required
from models import AppMode, Message
from models import AppMode
@console_ns.route("/apps/<uuid:app_id>/statistics/daily-messages")
@@ -44,8 +44,9 @@ class DailyMessageStatistic(Resource):
)
args = parser.parse_args()
sql_query = """SELECT
DATE(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
converted_created_at = convert_datetime_to_date("created_at")
sql_query = f"""SELECT
{converted_created_at} AS date,
COUNT(*) AS message_count
FROM
messages
@@ -106,6 +107,17 @@ class DailyConversationStatistic(Resource):
account, _ = current_account_with_tenant()
args = parser.parse_args()
converted_created_at = convert_datetime_to_date("created_at")
sql_query = f"""SELECT
{converted_created_at} AS date,
COUNT(DISTINCT conversation_id) AS conversation_count
FROM
messages
WHERE
app_id = :app_id
AND invoke_from != :invoke_from"""
arg_dict = {"tz": account.timezone, "app_id": app_model.id, "invoke_from": InvokeFrom.DEBUGGER}
assert account.timezone is not None
try:
@@ -113,30 +125,21 @@ class DailyConversationStatistic(Resource):
except ValueError as e:
abort(400, description=str(e))
stmt = (
sa.select(
sa.func.date(
sa.func.date_trunc("day", sa.text("created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz"))
).label("date"),
sa.func.count(sa.distinct(Message.conversation_id)).label("conversation_count"),
)
.select_from(Message)
.where(Message.app_id == app_model.id, Message.invoke_from != InvokeFrom.DEBUGGER)
)
if start_datetime_utc:
stmt = stmt.where(Message.created_at >= start_datetime_utc)
sql_query += " AND created_at >= :start"
arg_dict["start"] = start_datetime_utc
if end_datetime_utc:
stmt = stmt.where(Message.created_at < end_datetime_utc)
sql_query += " AND created_at < :end"
arg_dict["end"] = end_datetime_utc
stmt = stmt.group_by("date").order_by("date")
sql_query += " GROUP BY date ORDER BY date"
response_data = []
with db.engine.begin() as conn:
rs = conn.execute(stmt, {"tz": account.timezone})
for row in rs:
response_data.append({"date": str(row.date), "conversation_count": row.conversation_count})
rs = conn.execute(sa.text(sql_query), arg_dict)
for i in rs:
response_data.append({"date": str(i.date), "conversation_count": i.conversation_count})
return jsonify({"data": response_data})
@@ -161,8 +164,9 @@ class DailyTerminalsStatistic(Resource):
args = parser.parse_args()
sql_query = """SELECT
DATE(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
converted_created_at = convert_datetime_to_date("created_at")
sql_query = f"""SELECT
{converted_created_at} AS date,
COUNT(DISTINCT messages.from_end_user_id) AS terminal_count
FROM
messages
@@ -217,8 +221,9 @@ class DailyTokenCostStatistic(Resource):
args = parser.parse_args()
sql_query = """SELECT
DATE(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
converted_created_at = convert_datetime_to_date("created_at")
sql_query = f"""SELECT
{converted_created_at} AS date,
(SUM(messages.message_tokens) + SUM(messages.answer_tokens)) AS token_count,
SUM(total_price) AS total_price
FROM
@@ -276,8 +281,9 @@ class AverageSessionInteractionStatistic(Resource):
args = parser.parse_args()
sql_query = """SELECT
DATE(DATE_TRUNC('day', c.created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
converted_created_at = convert_datetime_to_date("c.created_at")
sql_query = f"""SELECT
{converted_created_at} AS date,
AVG(subquery.message_count) AS interactions
FROM
(
@@ -351,8 +357,9 @@ class UserSatisfactionRateStatistic(Resource):
args = parser.parse_args()
sql_query = """SELECT
DATE(DATE_TRUNC('day', m.created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
converted_created_at = convert_datetime_to_date("m.created_at")
sql_query = f"""SELECT
{converted_created_at} AS date,
COUNT(m.id) AS message_count,
COUNT(mf.id) AS feedback_count
FROM
@@ -416,8 +423,9 @@ class AverageResponseTimeStatistic(Resource):
args = parser.parse_args()
sql_query = """SELECT
DATE(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
converted_created_at = convert_datetime_to_date("created_at")
sql_query = f"""SELECT
{converted_created_at} AS date,
AVG(provider_response_latency) AS latency
FROM
messages
@@ -471,8 +479,9 @@ class TokensPerSecondStatistic(Resource):
account, _ = current_account_with_tenant()
args = parser.parse_args()
sql_query = """SELECT
DATE(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
converted_created_at = convert_datetime_to_date("created_at")
sql_query = f"""SELECT
{converted_created_at} AS date,
CASE
WHEN SUM(provider_response_latency) = 0 THEN 0
ELSE (SUM(answer_tokens) / SUM(provider_response_latency))

View File

@@ -162,6 +162,7 @@ class DatasetDocumentListApi(Resource):
"keyword": "Search keyword",
"sort": "Sort order (default: -created_at)",
"fetch": "Fetch full details (default: false)",
"status": "Filter documents by display status",
}
)
@api.response(200, "Documents retrieved successfully")
@@ -175,6 +176,7 @@ class DatasetDocumentListApi(Resource):
limit = request.args.get("limit", default=20, type=int)
search = request.args.get("keyword", default=None, type=str)
sort = request.args.get("sort", default="-created_at", type=str)
status = request.args.get("status", default=None, type=str)
# "yes", "true", "t", "y", "1" convert to True, while others convert to False.
try:
fetch_val = request.args.get("fetch", default="false")
@@ -203,6 +205,9 @@ class DatasetDocumentListApi(Resource):
query = select(Document).filter_by(dataset_id=str(dataset_id), tenant_id=current_tenant_id)
if status:
query = DocumentService.apply_display_status_filter(query, status)
if search:
search = f"%{search}%"
query = query.where(Document.name.like(search))

View File

@@ -1086,7 +1086,13 @@ class ToolMCPAuthApi(Resource):
return {"result": "success"}
except MCPAuthError as e:
try:
auth_result = auth(provider_entity, args.get("authorization_code"))
# Pass the extracted OAuth metadata hints to auth()
auth_result = auth(
provider_entity,
args.get("authorization_code"),
resource_metadata_url=e.resource_metadata_url,
scope_hint=e.scope_hint,
)
with Session(db.engine) as session, session.begin():
service = MCPToolManageService(session=session)
response = service.execute_auth_actions(auth_result)
@@ -1096,7 +1102,7 @@ class ToolMCPAuthApi(Resource):
service = MCPToolManageService(session=session)
service.clear_provider_credentials(provider_id=provider_id, tenant_id=tenant_id)
raise ValueError(f"Failed to refresh token, please try to authorize again: {e}") from e
except MCPError as e:
except (MCPError, ValueError) as e:
with Session(db.engine) as session, session.begin():
service = MCPToolManageService(session=session)
service.clear_provider_credentials(provider_id=provider_id, tenant_id=tenant_id)

View File

@@ -128,7 +128,7 @@ class TenantApi(Resource):
@login_required
@account_initialization_required
@marshal_with(tenant_fields)
def get(self):
def post(self):
if request.path == "/info":
logger.warning("Deprecated URL /info was used.")

View File

@@ -456,12 +456,16 @@ class DocumentListApi(DatasetApiResource):
page = request.args.get("page", default=1, type=int)
limit = request.args.get("limit", default=20, type=int)
search = request.args.get("keyword", default=None, type=str)
status = request.args.get("status", default=None, type=str)
dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first()
if not dataset:
raise NotFound("Dataset not found.")
query = select(Document).filter_by(dataset_id=str(dataset_id), tenant_id=tenant_id)
if status:
query = DocumentService.apply_display_status_filter(query, status)
if search:
search = f"%{search}%"
query = query.where(Document.name.like(search))

View File

@@ -145,7 +145,8 @@ class WorkflowAppGenerator(BaseAppGenerator):
**extract_external_trace_id_from_args(args),
}
workflow_run_id = str(uuid.uuid4())
# for trigger debug run, not prepare user inputs
# FIXME (Yeuoly): we need to remove the SKIP_PREPARE_USER_INPUTS_KEY from the args
# trigger shouldn't prepare user inputs
if self._should_prepare_user_inputs(args):
inputs = self._prepare_user_inputs(
user_inputs=inputs,

View File

@@ -1,14 +1,10 @@
from typing import TYPE_CHECKING, Any, Optional
from typing import Any
from pydantic import BaseModel, Field
# Import InvokeFrom locally to avoid circular import
from core.app.entities.app_invoke_entities import InvokeFrom
from core.datasource.entities.datasource_entities import DatasourceInvokeFrom
if TYPE_CHECKING:
from core.app.entities.app_invoke_entities import InvokeFrom
class DatasourceRuntime(BaseModel):
"""
@@ -17,7 +13,7 @@ class DatasourceRuntime(BaseModel):
tenant_id: str
datasource_id: str | None = None
invoke_from: Optional["InvokeFrom"] = None
invoke_from: InvokeFrom | None = None
datasource_invoke_from: DatasourceInvokeFrom | None = None
credentials: dict[str, Any] = Field(default_factory=dict)
runtime_parameters: dict[str, Any] = Field(default_factory=dict)

View File

@@ -6,7 +6,8 @@ import secrets
import urllib.parse
from urllib.parse import urljoin, urlparse
from httpx import ConnectError, HTTPStatusError, RequestError
import httpx
from httpx import RequestError
from pydantic import ValidationError
from core.entities.mcp_provider import MCPProviderEntity, MCPSupportGrantType
@@ -20,6 +21,7 @@ from core.mcp.types import (
OAuthClientMetadata,
OAuthMetadata,
OAuthTokens,
ProtectedResourceMetadata,
)
from extensions.ext_redis import redis_client
@@ -39,6 +41,131 @@ def generate_pkce_challenge() -> tuple[str, str]:
return code_verifier, code_challenge
def build_protected_resource_metadata_discovery_urls(
www_auth_resource_metadata_url: str | None, server_url: str
) -> list[str]:
"""
Build a list of URLs to try for Protected Resource Metadata discovery.
Per SEP-985, supports fallback when discovery fails at one URL.
"""
urls = []
# First priority: URL from WWW-Authenticate header
if www_auth_resource_metadata_url:
urls.append(www_auth_resource_metadata_url)
# Fallback: construct from server URL
parsed = urlparse(server_url)
base_url = f"{parsed.scheme}://{parsed.netloc}"
fallback_url = urljoin(base_url, "/.well-known/oauth-protected-resource")
if fallback_url not in urls:
urls.append(fallback_url)
return urls
def build_oauth_authorization_server_metadata_discovery_urls(auth_server_url: str | None, server_url: str) -> list[str]:
"""
Build a list of URLs to try for OAuth Authorization Server Metadata discovery.
Supports both OAuth 2.0 (RFC 8414) and OpenID Connect discovery.
Per RFC 8414 section 3:
- If issuer has no path: https://example.com/.well-known/oauth-authorization-server
- If issuer has path: https://example.com/.well-known/oauth-authorization-server{path}
Example:
- issuer: https://example.com/oauth
- metadata: https://example.com/.well-known/oauth-authorization-server/oauth
"""
urls = []
base_url = auth_server_url or server_url
parsed = urlparse(base_url)
base = f"{parsed.scheme}://{parsed.netloc}"
path = parsed.path.rstrip("/") # Remove trailing slash
# Try OpenID Connect discovery first (more common)
urls.append(urljoin(base + "/", ".well-known/openid-configuration"))
# OAuth 2.0 Authorization Server Metadata (RFC 8414)
# Include the path component if present in the issuer URL
if path:
urls.append(urljoin(base, f".well-known/oauth-authorization-server{path}"))
else:
urls.append(urljoin(base, ".well-known/oauth-authorization-server"))
return urls
def discover_protected_resource_metadata(
prm_url: str | None, server_url: str, protocol_version: str | None = None
) -> ProtectedResourceMetadata | None:
"""Discover OAuth 2.0 Protected Resource Metadata (RFC 9470)."""
urls = build_protected_resource_metadata_discovery_urls(prm_url, server_url)
headers = {"MCP-Protocol-Version": protocol_version or LATEST_PROTOCOL_VERSION, "User-Agent": "Dify"}
for url in urls:
try:
response = ssrf_proxy.get(url, headers=headers)
if response.status_code == 200:
return ProtectedResourceMetadata.model_validate(response.json())
elif response.status_code == 404:
continue # Try next URL
except (RequestError, ValidationError):
continue # Try next URL
return None
def discover_oauth_authorization_server_metadata(
auth_server_url: str | None, server_url: str, protocol_version: str | None = None
) -> OAuthMetadata | None:
"""Discover OAuth 2.0 Authorization Server Metadata (RFC 8414)."""
urls = build_oauth_authorization_server_metadata_discovery_urls(auth_server_url, server_url)
headers = {"MCP-Protocol-Version": protocol_version or LATEST_PROTOCOL_VERSION, "User-Agent": "Dify"}
for url in urls:
try:
response = ssrf_proxy.get(url, headers=headers)
if response.status_code == 200:
return OAuthMetadata.model_validate(response.json())
elif response.status_code == 404:
continue # Try next URL
except (RequestError, ValidationError):
continue # Try next URL
return None
def get_effective_scope(
scope_from_www_auth: str | None,
prm: ProtectedResourceMetadata | None,
asm: OAuthMetadata | None,
client_scope: str | None,
) -> str | None:
"""
Determine effective scope using priority-based selection strategy.
Priority order:
1. WWW-Authenticate header scope (server explicit requirement)
2. Protected Resource Metadata scopes
3. OAuth Authorization Server Metadata scopes
4. Client configured scope
"""
if scope_from_www_auth:
return scope_from_www_auth
if prm and prm.scopes_supported:
return " ".join(prm.scopes_supported)
if asm and asm.scopes_supported:
return " ".join(asm.scopes_supported)
return client_scope
def _create_secure_redis_state(state_data: OAuthCallbackState) -> str:
"""Create a secure state parameter by storing state data in Redis and returning a random state key."""
# Generate a secure random state key
@@ -121,42 +248,36 @@ def check_support_resource_discovery(server_url: str) -> tuple[bool, str]:
return False, ""
def discover_oauth_metadata(server_url: str, protocol_version: str | None = None) -> OAuthMetadata | None:
"""Looks up RFC 8414 OAuth 2.0 Authorization Server Metadata."""
# First check if the server supports OAuth 2.0 Resource Discovery
support_resource_discovery, oauth_discovery_url = check_support_resource_discovery(server_url)
if support_resource_discovery:
# The oauth_discovery_url is the authorization server base URL
# Try OpenID Connect discovery first (more common), then OAuth 2.0
urls_to_try = [
urljoin(oauth_discovery_url + "/", ".well-known/oauth-authorization-server"),
urljoin(oauth_discovery_url + "/", ".well-known/openid-configuration"),
]
else:
urls_to_try = [urljoin(server_url, "/.well-known/oauth-authorization-server")]
def discover_oauth_metadata(
server_url: str,
resource_metadata_url: str | None = None,
scope_hint: str | None = None,
protocol_version: str | None = None,
) -> tuple[OAuthMetadata | None, ProtectedResourceMetadata | None, str | None]:
"""
Discover OAuth metadata using RFC 8414/9470 standards.
headers = {"MCP-Protocol-Version": protocol_version or LATEST_PROTOCOL_VERSION}
Args:
server_url: The MCP server URL
resource_metadata_url: Protected Resource Metadata URL from WWW-Authenticate header
scope_hint: Scope hint from WWW-Authenticate header
protocol_version: MCP protocol version
for url in urls_to_try:
try:
response = ssrf_proxy.get(url, headers=headers)
if response.status_code == 404:
continue
if not response.is_success:
response.raise_for_status()
return OAuthMetadata.model_validate(response.json())
except (RequestError, HTTPStatusError) as e:
if isinstance(e, ConnectError):
response = ssrf_proxy.get(url)
if response.status_code == 404:
continue # Try next URL
if not response.is_success:
raise ValueError(f"HTTP {response.status_code} trying to load well-known OAuth metadata")
return OAuthMetadata.model_validate(response.json())
# For other errors, try next URL
continue
Returns:
(oauth_metadata, protected_resource_metadata, scope_hint)
"""
# Discover Protected Resource Metadata
prm = discover_protected_resource_metadata(resource_metadata_url, server_url, protocol_version)
return None # No metadata found
# Get authorization server URL from PRM or use server URL
auth_server_url = None
if prm and prm.authorization_servers:
auth_server_url = prm.authorization_servers[0]
# Discover OAuth Authorization Server Metadata
asm = discover_oauth_authorization_server_metadata(auth_server_url, server_url, protocol_version)
return asm, prm, scope_hint
def start_authorization(
@@ -166,6 +287,7 @@ def start_authorization(
redirect_url: str,
provider_id: str,
tenant_id: str,
scope: str | None = None,
) -> tuple[str, str]:
"""Begins the authorization flow with secure Redis state storage."""
response_type = "code"
@@ -175,13 +297,6 @@ def start_authorization(
authorization_url = metadata.authorization_endpoint
if response_type not in metadata.response_types_supported:
raise ValueError(f"Incompatible auth server: does not support response type {response_type}")
if (
not metadata.code_challenge_methods_supported
or code_challenge_method not in metadata.code_challenge_methods_supported
):
raise ValueError(
f"Incompatible auth server: does not support code challenge method {code_challenge_method}"
)
else:
authorization_url = urljoin(server_url, "/authorize")
@@ -210,10 +325,49 @@ def start_authorization(
"state": state_key,
}
# Add scope if provided
if scope:
params["scope"] = scope
authorization_url = f"{authorization_url}?{urllib.parse.urlencode(params)}"
return authorization_url, code_verifier
def _parse_token_response(response: httpx.Response) -> OAuthTokens:
"""
Parse OAuth token response supporting both JSON and form-urlencoded formats.
Per RFC 6749 Section 5.1, the standard format is JSON.
However, some legacy OAuth providers (e.g., early GitHub OAuth Apps) return
application/x-www-form-urlencoded format for backwards compatibility.
Args:
response: The HTTP response from token endpoint
Returns:
Parsed OAuth tokens
Raises:
ValueError: If response cannot be parsed
"""
content_type = response.headers.get("content-type", "").lower()
if "application/json" in content_type:
# Standard OAuth 2.0 JSON response (RFC 6749)
return OAuthTokens.model_validate(response.json())
elif "application/x-www-form-urlencoded" in content_type:
# Legacy form-urlencoded response (non-standard but used by some providers)
token_data = dict(urllib.parse.parse_qsl(response.text))
return OAuthTokens.model_validate(token_data)
else:
# No content-type or unknown - try JSON first, fallback to form-urlencoded
try:
return OAuthTokens.model_validate(response.json())
except (ValidationError, json.JSONDecodeError):
token_data = dict(urllib.parse.parse_qsl(response.text))
return OAuthTokens.model_validate(token_data)
def exchange_authorization(
server_url: str,
metadata: OAuthMetadata | None,
@@ -246,7 +400,7 @@ def exchange_authorization(
response = ssrf_proxy.post(token_url, data=params)
if not response.is_success:
raise ValueError(f"Token exchange failed: HTTP {response.status_code}")
return OAuthTokens.model_validate(response.json())
return _parse_token_response(response)
def refresh_authorization(
@@ -279,7 +433,7 @@ def refresh_authorization(
raise MCPRefreshTokenError(e) from e
if not response.is_success:
raise MCPRefreshTokenError(response.text)
return OAuthTokens.model_validate(response.json())
return _parse_token_response(response)
def client_credentials_flow(
@@ -322,7 +476,7 @@ def client_credentials_flow(
f"Client credentials token request failed: HTTP {response.status_code}, Response: {response.text}"
)
return OAuthTokens.model_validate(response.json())
return _parse_token_response(response)
def register_client(
@@ -352,6 +506,8 @@ def auth(
provider: MCPProviderEntity,
authorization_code: str | None = None,
state_param: str | None = None,
resource_metadata_url: str | None = None,
scope_hint: str | None = None,
) -> AuthResult:
"""
Orchestrates the full auth flow with a server using secure Redis state storage.
@@ -363,18 +519,26 @@ def auth(
provider: The MCP provider entity
authorization_code: Optional authorization code from OAuth callback
state_param: Optional state parameter from OAuth callback
resource_metadata_url: Optional Protected Resource Metadata URL from WWW-Authenticate
scope_hint: Optional scope hint from WWW-Authenticate header
Returns:
AuthResult containing actions to be performed and response data
"""
actions: list[AuthAction] = []
server_url = provider.decrypt_server_url()
server_metadata = discover_oauth_metadata(server_url)
# Discover OAuth metadata using RFC 8414/9470 standards
server_metadata, prm, scope_from_www_auth = discover_oauth_metadata(
server_url, resource_metadata_url, scope_hint, LATEST_PROTOCOL_VERSION
)
client_metadata = provider.client_metadata
provider_id = provider.id
tenant_id = provider.tenant_id
client_information = provider.retrieve_client_information()
redirect_url = provider.redirect_url
credentials = provider.decrypt_credentials()
# Determine grant type based on server metadata
if not server_metadata:
@@ -392,8 +556,8 @@ def auth(
else:
effective_grant_type = MCPSupportGrantType.CLIENT_CREDENTIALS.value
# Get stored credentials
credentials = provider.decrypt_credentials()
# Determine effective scope using priority-based strategy
effective_scope = get_effective_scope(scope_from_www_auth, prm, server_metadata, credentials.get("scope"))
if not client_information:
if authorization_code is not None:
@@ -425,12 +589,11 @@ def auth(
if effective_grant_type == MCPSupportGrantType.CLIENT_CREDENTIALS.value:
# Direct token request without user interaction
try:
scope = credentials.get("scope")
tokens = client_credentials_flow(
server_url,
server_metadata,
client_information,
scope,
effective_scope,
)
# Return action to save tokens and grant type
@@ -526,6 +689,7 @@ def auth(
redirect_url,
provider_id,
tenant_id,
effective_scope,
)
# Return action to save code verifier

View File

@@ -90,7 +90,13 @@ class MCPClientWithAuthRetry(MCPClient):
mcp_service = MCPToolManageService(session=session)
# Perform authentication using the service's auth method
mcp_service.auth_with_actions(self.provider_entity, self.authorization_code)
# Extract OAuth metadata hints from the error
mcp_service.auth_with_actions(
self.provider_entity,
self.authorization_code,
resource_metadata_url=error.resource_metadata_url,
scope_hint=error.scope_hint,
)
# Retrieve new tokens
self.provider_entity = mcp_service.get_provider_entity(

View File

@@ -290,7 +290,7 @@ def sse_client(
except httpx.HTTPStatusError as exc:
if exc.response.status_code == 401:
raise MCPAuthError()
raise MCPAuthError(response=exc.response)
raise MCPConnectionError()
except Exception:
logger.exception("Error connecting to SSE endpoint")

View File

@@ -1,3 +1,10 @@
import re
from typing import TYPE_CHECKING
if TYPE_CHECKING:
import httpx
class MCPError(Exception):
pass
@@ -7,7 +14,49 @@ class MCPConnectionError(MCPError):
class MCPAuthError(MCPConnectionError):
pass
def __init__(
self,
message: str | None = None,
response: "httpx.Response | None" = None,
www_authenticate_header: str | None = None,
):
"""
MCP Authentication Error.
Args:
message: Error message
response: HTTP response object (will extract WWW-Authenticate header if provided)
www_authenticate_header: Pre-extracted WWW-Authenticate header value
"""
super().__init__(message or "Authentication failed")
# Extract OAuth metadata hints from WWW-Authenticate header
if response is not None:
www_authenticate_header = response.headers.get("WWW-Authenticate")
self.resource_metadata_url: str | None = None
self.scope_hint: str | None = None
if www_authenticate_header:
self.resource_metadata_url = self._extract_field(www_authenticate_header, "resource_metadata")
self.scope_hint = self._extract_field(www_authenticate_header, "scope")
@staticmethod
def _extract_field(www_auth: str, field_name: str) -> str | None:
"""Extract a specific field from the WWW-Authenticate header."""
# Pattern to match field="value" or field=value
pattern = rf'{field_name}="([^"]*)"'
match = re.search(pattern, www_auth)
if match:
return match.group(1)
# Try without quotes
pattern = rf"{field_name}=([^\s,]+)"
match = re.search(pattern, www_auth)
if match:
return match.group(1)
return None
class MCPRefreshTokenError(MCPError):

View File

@@ -149,7 +149,7 @@ class BaseSession(
messages when entered.
"""
_response_streams: dict[RequestId, queue.Queue[JSONRPCResponse | JSONRPCError]]
_response_streams: dict[RequestId, queue.Queue[JSONRPCResponse | JSONRPCError | HTTPStatusError]]
_request_id: int
_in_flight: dict[RequestId, RequestResponder[ReceiveRequestT, SendResultT]]
_receive_request_type: type[ReceiveRequestT]
@@ -230,7 +230,7 @@ class BaseSession(
request_id = self._request_id
self._request_id = request_id + 1
response_queue: queue.Queue[JSONRPCResponse | JSONRPCError] = queue.Queue()
response_queue: queue.Queue[JSONRPCResponse | JSONRPCError | HTTPStatusError] = queue.Queue()
self._response_streams[request_id] = response_queue
try:
@@ -261,11 +261,17 @@ class BaseSession(
message="No response received",
)
)
elif isinstance(response_or_error, HTTPStatusError):
# HTTPStatusError from streamable_client with preserved response object
if response_or_error.response.status_code == 401:
raise MCPAuthError(response=response_or_error.response)
else:
raise MCPConnectionError(
ErrorData(code=response_or_error.response.status_code, message=str(response_or_error))
)
elif isinstance(response_or_error, JSONRPCError):
if response_or_error.error.code == 401:
raise MCPAuthError(
ErrorData(code=response_or_error.error.code, message=response_or_error.error.message)
)
raise MCPAuthError(message=response_or_error.error.message)
else:
raise MCPConnectionError(
ErrorData(code=response_or_error.error.code, message=response_or_error.error.message)
@@ -327,13 +333,17 @@ class BaseSession(
if isinstance(message, HTTPStatusError):
response_queue = self._response_streams.get(self._request_id - 1)
if response_queue is not None:
response_queue.put(
JSONRPCError(
jsonrpc="2.0",
id=self._request_id - 1,
error=ErrorData(code=message.response.status_code, message=message.args[0]),
# For 401 errors, pass the HTTPStatusError directly to preserve response object
if message.response.status_code == 401:
response_queue.put(message)
else:
response_queue.put(
JSONRPCError(
jsonrpc="2.0",
id=self._request_id - 1,
error=ErrorData(code=message.response.status_code, message=message.args[0]),
)
)
)
else:
self._handle_incoming(RuntimeError(f"Received response with an unknown request ID: {message}"))
elif isinstance(message, Exception):

View File

@@ -23,7 +23,7 @@ for reference.
not separate types in the schema.
"""
# Client support both version, not support 2025-06-18 yet.
LATEST_PROTOCOL_VERSION = "2025-03-26"
LATEST_PROTOCOL_VERSION = "2025-06-18"
# Server support 2024-11-05 to allow claude to use.
SERVER_LATEST_PROTOCOL_VERSION = "2024-11-05"
DEFAULT_NEGOTIATED_VERSION = "2025-03-26"
@@ -1330,3 +1330,13 @@ class OAuthMetadata(BaseModel):
response_types_supported: list[str]
grant_types_supported: list[str] | None = None
code_challenge_methods_supported: list[str] | None = None
scopes_supported: list[str] | None = None
class ProtectedResourceMetadata(BaseModel):
"""OAuth 2.0 Protected Resource Metadata (RFC 9470)."""
resource: str | None = None
authorization_servers: list[str]
scopes_supported: list[str] | None = None
bearer_methods_supported: list[str] | None = None

View File

@@ -7,8 +7,7 @@ from collections.abc import Generator, Mapping
from typing import Any, Union, cast
from flask import Flask, current_app
from sqlalchemy import Float, and_, or_, select, text
from sqlalchemy import cast as sqlalchemy_cast
from sqlalchemy import and_, or_, select
from core.app.app_config.entities import (
DatasetEntity,
@@ -1023,60 +1022,55 @@ class DatasetRetrieval:
self, sequence: int, condition: str, metadata_name: str, value: Any | None, filters: list
):
if value is None and condition not in ("empty", "not empty"):
return
return filters
json_field = DatasetDocument.doc_metadata[metadata_name].as_string()
key = f"{metadata_name}_{sequence}"
key_value = f"{metadata_name}_{sequence}_value"
match condition:
case "contains":
filters.append(
(text(f"documents.doc_metadata ->> :{key} LIKE :{key_value}")).params(
**{key: metadata_name, key_value: f"%{value}%"}
)
)
filters.append(json_field.like(f"%{value}%"))
case "not contains":
filters.append(
(text(f"documents.doc_metadata ->> :{key} NOT LIKE :{key_value}")).params(
**{key: metadata_name, key_value: f"%{value}%"}
)
)
filters.append(json_field.notlike(f"%{value}%"))
case "start with":
filters.append(
(text(f"documents.doc_metadata ->> :{key} LIKE :{key_value}")).params(
**{key: metadata_name, key_value: f"{value}%"}
)
)
filters.append(json_field.like(f"{value}%"))
case "end with":
filters.append(
(text(f"documents.doc_metadata ->> :{key} LIKE :{key_value}")).params(
**{key: metadata_name, key_value: f"%{value}"}
)
)
filters.append(json_field.like(f"%{value}"))
case "is" | "=":
if isinstance(value, str):
filters.append(DatasetDocument.doc_metadata[metadata_name] == f'"{value}"')
else:
filters.append(sqlalchemy_cast(DatasetDocument.doc_metadata[metadata_name].astext, Float) == value)
filters.append(json_field == value)
elif isinstance(value, (int, float)):
filters.append(DatasetDocument.doc_metadata[metadata_name].as_float() == value)
case "is not" | "":
if isinstance(value, str):
filters.append(DatasetDocument.doc_metadata[metadata_name] != f'"{value}"')
else:
filters.append(sqlalchemy_cast(DatasetDocument.doc_metadata[metadata_name].astext, Float) != value)
filters.append(json_field != value)
elif isinstance(value, (int, float)):
filters.append(DatasetDocument.doc_metadata[metadata_name].as_float() != value)
case "empty":
filters.append(DatasetDocument.doc_metadata[metadata_name].is_(None))
case "not empty":
filters.append(DatasetDocument.doc_metadata[metadata_name].isnot(None))
case "before" | "<":
filters.append(sqlalchemy_cast(DatasetDocument.doc_metadata[metadata_name].astext, Float) < value)
filters.append(DatasetDocument.doc_metadata[metadata_name].as_float() < value)
case "after" | ">":
filters.append(sqlalchemy_cast(DatasetDocument.doc_metadata[metadata_name].astext, Float) > value)
filters.append(DatasetDocument.doc_metadata[metadata_name].as_float() > value)
case "" | "<=":
filters.append(sqlalchemy_cast(DatasetDocument.doc_metadata[metadata_name].astext, Float) <= value)
filters.append(DatasetDocument.doc_metadata[metadata_name].as_float() <= value)
case "" | ">=":
filters.append(sqlalchemy_cast(DatasetDocument.doc_metadata[metadata_name].astext, Float) >= value)
filters.append(DatasetDocument.doc_metadata[metadata_name].as_float() >= value)
case _:
pass
return filters
def _fetch_model_config(

View File

@@ -13,6 +13,7 @@ from sqlalchemy.orm import Session
from yarl import URL
import contexts
from configs import dify_config
from core.helper.provider_cache import ToolProviderCredentialsCache
from core.plugin.impl.tool import PluginToolManager
from core.tools.__base.tool_provider import ToolProviderController
@@ -32,7 +33,6 @@ from services.tools.mcp_tools_manage_service import MCPToolManageService
if TYPE_CHECKING:
from core.workflow.nodes.tool.entities import ToolEntity
from configs import dify_config
from core.agent.entities import AgentToolEntity
from core.app.entities.app_invoke_entities import InvokeFrom
from core.helper.module_import_helper import load_single_subclass_from_source
@@ -63,7 +63,6 @@ from services.tools.tools_transform_service import ToolTransformService
if TYPE_CHECKING:
from core.workflow.nodes.tool.entities import ToolEntity
from core.workflow.runtime import VariablePool
logger = logging.getLogger(__name__)
@@ -618,12 +617,28 @@ class ToolManager:
"""
# according to multi credentials, select the one with is_default=True first, then created_at oldest
# for compatibility with old version
sql = """
if dify_config.SQLALCHEMY_DATABASE_URI_SCHEME == "postgresql":
# PostgreSQL: Use DISTINCT ON
sql = """
SELECT DISTINCT ON (tenant_id, provider) id
FROM tool_builtin_providers
WHERE tenant_id = :tenant_id
ORDER BY tenant_id, provider, is_default DESC, created_at DESC
"""
else:
# MySQL: Use window function to achieve same result
sql = """
SELECT id FROM (
SELECT id,
ROW_NUMBER() OVER (
PARTITION BY tenant_id, provider
ORDER BY is_default DESC, created_at DESC
) as rn
FROM tool_builtin_providers
WHERE tenant_id = :tenant_id
) ranked WHERE rn = 1
"""
with Session(db.engine, autoflush=False) as session:
ids = [row.id for row in session.execute(sa.text(sql), {"tenant_id": tenant_id}).all()]
return session.query(BuiltinToolProvider).where(BuiltinToolProvider.id.in_(ids)).all()

View File

@@ -6,8 +6,7 @@ from collections import defaultdict
from collections.abc import Mapping, Sequence
from typing import TYPE_CHECKING, Any, cast
from sqlalchemy import Float, and_, func, or_, select, text
from sqlalchemy import cast as sqlalchemy_cast
from sqlalchemy import and_, func, literal, or_, select
from sqlalchemy.orm import sessionmaker
from core.app.app_config.entities import DatasetRetrieveConfigEntity
@@ -597,79 +596,79 @@ class KnowledgeRetrievalNode(LLMUsageTrackingMixin, Node):
if value is None and condition not in ("empty", "not empty"):
return filters
key = f"{metadata_name}_{sequence}"
key_value = f"{metadata_name}_{sequence}_value"
json_field = Document.doc_metadata[metadata_name].as_string()
match condition:
case "contains":
filters.append(
(text(f"documents.doc_metadata ->> :{key} LIKE :{key_value}")).params(
**{key: metadata_name, key_value: f"%{value}%"}
)
)
filters.append(json_field.like(f"%{value}%"))
case "not contains":
filters.append(
(text(f"documents.doc_metadata ->> :{key} NOT LIKE :{key_value}")).params(
**{key: metadata_name, key_value: f"%{value}%"}
)
)
filters.append(json_field.notlike(f"%{value}%"))
case "start with":
filters.append(
(text(f"documents.doc_metadata ->> :{key} LIKE :{key_value}")).params(
**{key: metadata_name, key_value: f"{value}%"}
)
)
filters.append(json_field.like(f"{value}%"))
case "end with":
filters.append(
(text(f"documents.doc_metadata ->> :{key} LIKE :{key_value}")).params(
**{key: metadata_name, key_value: f"%{value}"}
)
)
filters.append(json_field.like(f"%{value}"))
case "in":
if isinstance(value, str):
escaped_values = [v.strip().replace("'", "''") for v in str(value).split(",")]
escaped_value_str = ",".join(escaped_values)
value_list = [v.strip() for v in value.split(",") if v.strip()]
elif isinstance(value, (list, tuple)):
value_list = [str(v) for v in value if v is not None]
else:
escaped_value_str = str(value)
filters.append(
(text(f"documents.doc_metadata ->> :{key} = any(string_to_array(:{key_value},','))")).params(
**{key: metadata_name, key_value: escaped_value_str}
)
)
value_list = [str(value)] if value is not None else []
if not value_list:
filters.append(literal(False))
else:
filters.append(json_field.in_(value_list))
case "not in":
if isinstance(value, str):
escaped_values = [v.strip().replace("'", "''") for v in str(value).split(",")]
escaped_value_str = ",".join(escaped_values)
value_list = [v.strip() for v in value.split(",") if v.strip()]
elif isinstance(value, (list, tuple)):
value_list = [str(v) for v in value if v is not None]
else:
escaped_value_str = str(value)
filters.append(
(text(f"documents.doc_metadata ->> :{key} != all(string_to_array(:{key_value},','))")).params(
**{key: metadata_name, key_value: escaped_value_str}
)
)
case "=" | "is":
value_list = [str(value)] if value is not None else []
if not value_list:
filters.append(literal(True))
else:
filters.append(json_field.notin_(value_list))
case "is" | "=":
if isinstance(value, str):
filters.append(Document.doc_metadata[metadata_name] == f'"{value}"')
else:
filters.append(sqlalchemy_cast(Document.doc_metadata[metadata_name].astext, Float) == value)
filters.append(json_field == value)
elif isinstance(value, (int, float)):
filters.append(Document.doc_metadata[metadata_name].as_float() == value)
case "is not" | "":
if isinstance(value, str):
filters.append(Document.doc_metadata[metadata_name] != f'"{value}"')
else:
filters.append(sqlalchemy_cast(Document.doc_metadata[metadata_name].astext, Float) != value)
filters.append(json_field != value)
elif isinstance(value, (int, float)):
filters.append(Document.doc_metadata[metadata_name].as_float() != value)
case "empty":
filters.append(Document.doc_metadata[metadata_name].is_(None))
case "not empty":
filters.append(Document.doc_metadata[metadata_name].isnot(None))
case "before" | "<":
filters.append(sqlalchemy_cast(Document.doc_metadata[metadata_name].astext, Float) < value)
filters.append(Document.doc_metadata[metadata_name].as_float() < value)
case "after" | ">":
filters.append(sqlalchemy_cast(Document.doc_metadata[metadata_name].astext, Float) > value)
filters.append(Document.doc_metadata[metadata_name].as_float() > value)
case "" | "<=":
filters.append(sqlalchemy_cast(Document.doc_metadata[metadata_name].astext, Float) <= value)
filters.append(Document.doc_metadata[metadata_name].as_float() <= value)
case "" | ">=":
filters.append(sqlalchemy_cast(Document.doc_metadata[metadata_name].astext, Float) >= value)
filters.append(Document.doc_metadata[metadata_name].as_float() >= value)
case _:
pass
return filters
@classmethod

View File

@@ -3,7 +3,6 @@ from __future__ import annotations
import importlib
import json
from collections.abc import Mapping, Sequence
from collections.abc import Mapping as TypingMapping
from copy import deepcopy
from dataclasses import dataclass
from typing import Any, Protocol
@@ -100,8 +99,8 @@ class ResponseStreamCoordinatorProtocol(Protocol):
class GraphProtocol(Protocol):
"""Structural interface required from graph instances attached to the runtime state."""
nodes: TypingMapping[str, object]
edges: TypingMapping[str, object]
nodes: Mapping[str, object]
edges: Mapping[str, object]
root_node: object
def get_outgoing_edges(self, node_id: str) -> Sequence[object]: ...

209
api/enums/quota_type.py Normal file
View File

@@ -0,0 +1,209 @@
import logging
from dataclasses import dataclass
from enum import StrEnum, auto
logger = logging.getLogger(__name__)
@dataclass
class QuotaCharge:
"""
Result of a quota consumption operation.
Attributes:
success: Whether the quota charge succeeded
charge_id: UUID for refund, or None if failed/disabled
"""
success: bool
charge_id: str | None
_quota_type: "QuotaType"
def refund(self) -> None:
"""
Refund this quota charge.
Safe to call even if charge failed or was disabled.
This method guarantees no exceptions will be raised.
"""
if self.charge_id:
self._quota_type.refund(self.charge_id)
logger.info("Refunded quota for %s with charge_id: %s", self._quota_type.value, self.charge_id)
class QuotaType(StrEnum):
"""
Supported quota types for tenant feature usage.
Add additional types here whenever new billable features become available.
"""
# Trigger execution quota
TRIGGER = auto()
# Workflow execution quota
WORKFLOW = auto()
UNLIMITED = auto()
@property
def billing_key(self) -> str:
"""
Get the billing key for the feature.
"""
match self:
case QuotaType.TRIGGER:
return "trigger_event"
case QuotaType.WORKFLOW:
return "api_rate_limit"
case _:
raise ValueError(f"Invalid quota type: {self}")
def consume(self, tenant_id: str, amount: int = 1) -> QuotaCharge:
"""
Consume quota for the feature.
Args:
tenant_id: The tenant identifier
amount: Amount to consume (default: 1)
Returns:
QuotaCharge with success status and charge_id for refund
Raises:
QuotaExceededError: When quota is insufficient
"""
from configs import dify_config
from services.billing_service import BillingService
from services.errors.app import QuotaExceededError
if not dify_config.BILLING_ENABLED:
logger.debug("Billing disabled, allowing request for %s", tenant_id)
return QuotaCharge(success=True, charge_id=None, _quota_type=self)
logger.info("Consuming %d %s quota for tenant %s", amount, self.value, tenant_id)
if amount <= 0:
raise ValueError("Amount to consume must be greater than 0")
try:
response = BillingService.update_tenant_feature_plan_usage(tenant_id, self.billing_key, delta=amount)
if response.get("result") != "success":
logger.warning(
"Failed to consume quota for %s, feature %s details: %s",
tenant_id,
self.value,
response.get("detail"),
)
raise QuotaExceededError(feature=self.value, tenant_id=tenant_id, required=amount)
charge_id = response.get("history_id")
logger.debug(
"Successfully consumed %d %s quota for tenant %s, charge_id: %s",
amount,
self.value,
tenant_id,
charge_id,
)
return QuotaCharge(success=True, charge_id=charge_id, _quota_type=self)
except QuotaExceededError:
raise
except Exception:
# fail-safe: allow request on billing errors
logger.exception("Failed to consume quota for %s, feature %s", tenant_id, self.value)
return unlimited()
def check(self, tenant_id: str, amount: int = 1) -> bool:
"""
Check if tenant has sufficient quota without consuming.
Args:
tenant_id: The tenant identifier
amount: Amount to check (default: 1)
Returns:
True if quota is sufficient, False otherwise
"""
from configs import dify_config
if not dify_config.BILLING_ENABLED:
return True
if amount <= 0:
raise ValueError("Amount to check must be greater than 0")
try:
remaining = self.get_remaining(tenant_id)
return remaining >= amount if remaining != -1 else True
except Exception:
logger.exception("Failed to check quota for %s, feature %s", tenant_id, self.value)
# fail-safe: allow request on billing errors
return True
def refund(self, charge_id: str) -> None:
"""
Refund quota using charge_id from consume().
This method guarantees no exceptions will be raised.
All errors are logged but silently handled.
Args:
charge_id: The UUID returned from consume()
"""
try:
from configs import dify_config
from services.billing_service import BillingService
if not dify_config.BILLING_ENABLED:
return
if not charge_id:
logger.warning("Cannot refund: charge_id is empty")
return
logger.info("Refunding %s quota with charge_id: %s", self.value, charge_id)
response = BillingService.refund_tenant_feature_plan_usage(charge_id)
if response.get("result") == "success":
logger.debug("Successfully refunded %s quota, charge_id: %s", self.value, charge_id)
else:
logger.warning("Refund failed for charge_id: %s", charge_id)
except Exception:
# Catch ALL exceptions - refund must never fail
logger.exception("Failed to refund quota for charge_id: %s", charge_id)
# Don't raise - refund is best-effort and must be silent
def get_remaining(self, tenant_id: str) -> int:
"""
Get remaining quota for the tenant.
Args:
tenant_id: The tenant identifier
Returns:
Remaining quota amount
"""
from services.billing_service import BillingService
try:
usage_info = BillingService.get_tenant_feature_plan_usage(tenant_id, self.billing_key)
# Assuming the API returns a dict with 'remaining' or 'limit' and 'used'
if isinstance(usage_info, dict):
return usage_info.get("remaining", 0)
# If it returns a simple number, treat it as remaining
return int(usage_info) if usage_info else 0
except Exception:
logger.exception("Failed to get remaining quota for %s, feature %s", tenant_id, self.value)
return -1
def unlimited() -> QuotaCharge:
"""
Return a quota charge for unlimited quota.
This is useful for features that are not subject to quota limits, such as the UNLIMITED quota type.
"""
return QuotaCharge(success=True, charge_id=None, _quota_type=QuotaType.UNLIMITED)

View File

@@ -10,7 +10,6 @@ from redis import RedisError
from redis.cache import CacheConfig
from redis.cluster import ClusterNode, RedisCluster
from redis.connection import Connection, SSLConnection
from redis.lock import Lock
from redis.sentinel import Sentinel
from configs import dify_config

View File

@@ -45,7 +45,6 @@ class ClickZettaVolumeConfig(BaseModel):
This method will first try to use CLICKZETTA_VOLUME_* environment variables,
then fall back to CLICKZETTA_* environment variables (for vector DB config).
"""
import os
# Helper function to get environment variable with fallback
def get_env_with_fallback(volume_key: str, fallback_key: str, default: str | None = None) -> str:

View File

@@ -38,6 +38,12 @@ class EmailType(StrEnum):
EMAIL_REGISTER = auto()
EMAIL_REGISTER_WHEN_ACCOUNT_EXIST = auto()
RESET_PASSWORD_WHEN_ACCOUNT_NOT_EXIST_NO_REGISTER = auto()
TRIGGER_EVENTS_LIMIT_SANDBOX = auto()
TRIGGER_EVENTS_LIMIT_PROFESSIONAL = auto()
TRIGGER_EVENTS_USAGE_WARNING_SANDBOX = auto()
TRIGGER_EVENTS_USAGE_WARNING_PROFESSIONAL = auto()
API_RATE_LIMIT_LIMIT_SANDBOX = auto()
API_RATE_LIMIT_WARNING_SANDBOX = auto()
class EmailLanguage(StrEnum):
@@ -445,6 +451,78 @@ def create_default_email_config() -> EmailI18nConfig:
branded_template_path="clean_document_job_mail_template_zh-CN.html",
),
},
EmailType.TRIGGER_EVENTS_LIMIT_SANDBOX: {
EmailLanguage.EN_US: EmailTemplate(
subject="Youve reached your Sandbox Trigger Events limit",
template_path="trigger_events_limit_template_en-US.html",
branded_template_path="without-brand/trigger_events_limit_template_en-US.html",
),
EmailLanguage.ZH_HANS: EmailTemplate(
subject="您的 Sandbox 触发事件额度已用尽",
template_path="trigger_events_limit_template_zh-CN.html",
branded_template_path="without-brand/trigger_events_limit_template_zh-CN.html",
),
},
EmailType.TRIGGER_EVENTS_LIMIT_PROFESSIONAL: {
EmailLanguage.EN_US: EmailTemplate(
subject="Youve reached your monthly Trigger Events limit",
template_path="trigger_events_limit_template_en-US.html",
branded_template_path="without-brand/trigger_events_limit_template_en-US.html",
),
EmailLanguage.ZH_HANS: EmailTemplate(
subject="您的月度触发事件额度已用尽",
template_path="trigger_events_limit_template_zh-CN.html",
branded_template_path="without-brand/trigger_events_limit_template_zh-CN.html",
),
},
EmailType.TRIGGER_EVENTS_USAGE_WARNING_SANDBOX: {
EmailLanguage.EN_US: EmailTemplate(
subject="Youre nearing your Sandbox Trigger Events limit",
template_path="trigger_events_usage_warning_template_en-US.html",
branded_template_path="without-brand/trigger_events_usage_warning_template_en-US.html",
),
EmailLanguage.ZH_HANS: EmailTemplate(
subject="您的 Sandbox 触发事件额度接近上限",
template_path="trigger_events_usage_warning_template_zh-CN.html",
branded_template_path="without-brand/trigger_events_usage_warning_template_zh-CN.html",
),
},
EmailType.TRIGGER_EVENTS_USAGE_WARNING_PROFESSIONAL: {
EmailLanguage.EN_US: EmailTemplate(
subject="Youre nearing your Monthly Trigger Events limit",
template_path="trigger_events_usage_warning_template_en-US.html",
branded_template_path="without-brand/trigger_events_usage_warning_template_en-US.html",
),
EmailLanguage.ZH_HANS: EmailTemplate(
subject="您的月度触发事件额度接近上限",
template_path="trigger_events_usage_warning_template_zh-CN.html",
branded_template_path="without-brand/trigger_events_usage_warning_template_zh-CN.html",
),
},
EmailType.API_RATE_LIMIT_LIMIT_SANDBOX: {
EmailLanguage.EN_US: EmailTemplate(
subject="Youve reached your API Rate Limit",
template_path="api_rate_limit_limit_template_en-US.html",
branded_template_path="without-brand/api_rate_limit_limit_template_en-US.html",
),
EmailLanguage.ZH_HANS: EmailTemplate(
subject="您的 API 速率额度已用尽",
template_path="api_rate_limit_limit_template_zh-CN.html",
branded_template_path="without-brand/api_rate_limit_limit_template_zh-CN.html",
),
},
EmailType.API_RATE_LIMIT_WARNING_SANDBOX: {
EmailLanguage.EN_US: EmailTemplate(
subject="Youre nearing your API Rate Limit",
template_path="api_rate_limit_warning_template_en-US.html",
branded_template_path="without-brand/api_rate_limit_warning_template_en-US.html",
),
EmailLanguage.ZH_HANS: EmailTemplate(
subject="您的 API 速率额度接近上限",
template_path="api_rate_limit_warning_template_zh-CN.html",
branded_template_path="without-brand/api_rate_limit_warning_template_zh-CN.html",
),
},
EmailType.EMAIL_REGISTER: {
EmailLanguage.EN_US: EmailTemplate(
subject="Register Your {application_title} Account",

View File

@@ -177,6 +177,15 @@ def timezone(timezone_string):
raise ValueError(error)
def convert_datetime_to_date(field, target_timezone: str = ":tz"):
if dify_config.DB_TYPE == "postgresql":
return f"DATE(DATE_TRUNC('day', {field} AT TIME ZONE 'UTC' AT TIME ZONE {target_timezone}))"
elif dify_config.DB_TYPE == "mysql":
return f"DATE(CONVERT_TZ({field}, 'UTC', {target_timezone}))"
else:
raise NotImplementedError(f"Unsupported database type: {dify_config.DB_TYPE}")
def generate_string(n):
letters_digits = string.ascii_letters + string.digits
result = ""

View File

@@ -8,6 +8,12 @@ Create Date: 2024-01-07 04:07:34.482983
import sqlalchemy as sa
from alembic import op
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '00bacef91f18'
down_revision = '8ec536f3c800'
@@ -17,17 +23,31 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
batch_op.add_column(sa.Column('description', sa.Text(), nullable=False))
batch_op.drop_column('description_str')
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
batch_op.add_column(sa.Column('description', sa.Text(), nullable=False))
batch_op.drop_column('description_str')
else:
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
batch_op.add_column(sa.Column('description', models.types.LongText(), nullable=False))
batch_op.drop_column('description_str')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
batch_op.add_column(sa.Column('description_str', sa.TEXT(), autoincrement=False, nullable=False))
batch_op.drop_column('description')
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
batch_op.add_column(sa.Column('description_str', sa.TEXT(), autoincrement=False, nullable=False))
batch_op.drop_column('description')
else:
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
batch_op.add_column(sa.Column('description_str', models.types.LongText(), autoincrement=False, nullable=False))
batch_op.drop_column('description')
# ### end Alembic commands ###

View File

@@ -10,6 +10,10 @@ from alembic import op
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '04c602f5dc9b'
down_revision = '4ff534e1eb11'
@@ -19,15 +23,28 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('tracing_app_configs',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('tracing_provider', sa.String(length=255), nullable=True),
sa.Column('tracing_config', sa.JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False),
sa.PrimaryKeyConstraint('id', name='tracing_app_config_pkey')
)
conn = op.get_bind()
if _is_pg(conn):
op.create_table('tracing_app_configs',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('tracing_provider', sa.String(length=255), nullable=True),
sa.Column('tracing_config', sa.JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False),
sa.PrimaryKeyConstraint('id', name='tracing_app_config_pkey')
)
else:
op.create_table('tracing_app_configs',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('tracing_provider', sa.String(length=255), nullable=True),
sa.Column('tracing_config', sa.JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.now(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.now(), nullable=False),
sa.PrimaryKeyConstraint('id', name='tracing_app_config_pkey')
)
# ### end Alembic commands ###

View File

@@ -9,6 +9,12 @@ import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '053da0c1d756'
down_revision = '4829e54d2fee'
@@ -18,16 +24,31 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('tool_conversation_variables',
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('user_id', postgresql.UUID(), nullable=False),
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
sa.Column('conversation_id', postgresql.UUID(), nullable=False),
sa.Column('variables_str', sa.Text(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_conversation_variables_pkey')
)
conn = op.get_bind()
if _is_pg(conn):
op.create_table('tool_conversation_variables',
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('user_id', postgresql.UUID(), nullable=False),
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
sa.Column('conversation_id', postgresql.UUID(), nullable=False),
sa.Column('variables_str', sa.Text(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_conversation_variables_pkey')
)
else:
op.create_table('tool_conversation_variables',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('user_id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('conversation_id', models.types.StringUUID(), nullable=False),
sa.Column('variables_str', models.types.LongText(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_conversation_variables_pkey')
)
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
batch_op.add_column(sa.Column('privacy_policy', sa.String(length=255), nullable=True))
batch_op.alter_column('icon',

View File

@@ -9,6 +9,12 @@ import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '114eed84c228'
down_revision = 'c71211c8f604'
@@ -26,7 +32,13 @@ def upgrade():
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('tool_model_invokes', schema=None) as batch_op:
batch_op.add_column(sa.Column('tool_id', postgresql.UUID(), autoincrement=False, nullable=False))
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('tool_model_invokes', schema=None) as batch_op:
batch_op.add_column(sa.Column('tool_id', postgresql.UUID(), autoincrement=False, nullable=False))
else:
with op.batch_alter_table('tool_model_invokes', schema=None) as batch_op:
batch_op.add_column(sa.Column('tool_id', models.types.StringUUID(), autoincrement=False, nullable=False))
# ### end Alembic commands ###

View File

@@ -8,7 +8,11 @@ Create Date: 2024-07-05 14:30:59.472593
import sqlalchemy as sa
from alembic import op
import models as models
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '161cadc1af8d'
@@ -19,9 +23,16 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('dataset_permissions', schema=None) as batch_op:
# Step 1: Add column without NOT NULL constraint
op.add_column('dataset_permissions', sa.Column('tenant_id', sa.UUID(), nullable=False))
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('dataset_permissions', schema=None) as batch_op:
# Step 1: Add column without NOT NULL constraint
op.add_column('dataset_permissions', sa.Column('tenant_id', sa.UUID(), nullable=False))
else:
with op.batch_alter_table('dataset_permissions', schema=None) as batch_op:
# Step 1: Add column without NOT NULL constraint
op.add_column('dataset_permissions', sa.Column('tenant_id', models.types.StringUUID(), nullable=False))
# ### end Alembic commands ###

View File

@@ -9,6 +9,12 @@ import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '16fa53d9faec'
down_revision = '8d2d099ceb74'
@@ -18,44 +24,87 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('provider_models',
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
sa.Column('provider_name', sa.String(length=40), nullable=False),
sa.Column('model_name', sa.String(length=40), nullable=False),
sa.Column('model_type', sa.String(length=40), nullable=False),
sa.Column('encrypted_config', sa.Text(), nullable=True),
sa.Column('is_valid', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='provider_model_pkey'),
sa.UniqueConstraint('tenant_id', 'provider_name', 'model_name', 'model_type', name='unique_provider_model_name')
)
conn = op.get_bind()
if _is_pg(conn):
op.create_table('provider_models',
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
sa.Column('provider_name', sa.String(length=40), nullable=False),
sa.Column('model_name', sa.String(length=40), nullable=False),
sa.Column('model_type', sa.String(length=40), nullable=False),
sa.Column('encrypted_config', sa.Text(), nullable=True),
sa.Column('is_valid', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='provider_model_pkey'),
sa.UniqueConstraint('tenant_id', 'provider_name', 'model_name', 'model_type', name='unique_provider_model_name')
)
else:
op.create_table('provider_models',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider_name', sa.String(length=40), nullable=False),
sa.Column('model_name', sa.String(length=40), nullable=False),
sa.Column('model_type', sa.String(length=40), nullable=False),
sa.Column('encrypted_config', models.types.LongText(), nullable=True),
sa.Column('is_valid', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='provider_model_pkey'),
sa.UniqueConstraint('tenant_id', 'provider_name', 'model_name', 'model_type', name='unique_provider_model_name')
)
with op.batch_alter_table('provider_models', schema=None) as batch_op:
batch_op.create_index('provider_model_tenant_id_provider_idx', ['tenant_id', 'provider_name'], unique=False)
op.create_table('tenant_default_models',
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
sa.Column('provider_name', sa.String(length=40), nullable=False),
sa.Column('model_name', sa.String(length=40), nullable=False),
sa.Column('model_type', sa.String(length=40), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='tenant_default_model_pkey')
)
if _is_pg(conn):
op.create_table('tenant_default_models',
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
sa.Column('provider_name', sa.String(length=40), nullable=False),
sa.Column('model_name', sa.String(length=40), nullable=False),
sa.Column('model_type', sa.String(length=40), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='tenant_default_model_pkey')
)
else:
op.create_table('tenant_default_models',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider_name', sa.String(length=40), nullable=False),
sa.Column('model_name', sa.String(length=40), nullable=False),
sa.Column('model_type', sa.String(length=40), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='tenant_default_model_pkey')
)
with op.batch_alter_table('tenant_default_models', schema=None) as batch_op:
batch_op.create_index('tenant_default_model_tenant_id_provider_type_idx', ['tenant_id', 'provider_name', 'model_type'], unique=False)
op.create_table('tenant_preferred_model_providers',
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
sa.Column('provider_name', sa.String(length=40), nullable=False),
sa.Column('preferred_provider_type', sa.String(length=40), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='tenant_preferred_model_provider_pkey')
)
if _is_pg(conn):
op.create_table('tenant_preferred_model_providers',
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
sa.Column('provider_name', sa.String(length=40), nullable=False),
sa.Column('preferred_provider_type', sa.String(length=40), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='tenant_preferred_model_provider_pkey')
)
else:
op.create_table('tenant_preferred_model_providers',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider_name', sa.String(length=40), nullable=False),
sa.Column('preferred_provider_type', sa.String(length=40), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='tenant_preferred_model_provider_pkey')
)
with op.batch_alter_table('tenant_preferred_model_providers', schema=None) as batch_op:
batch_op.create_index('tenant_preferred_model_provider_tenant_provider_idx', ['tenant_id', 'provider_name'], unique=False)

View File

@@ -8,6 +8,10 @@ Create Date: 2024-04-01 09:48:54.232201
import sqlalchemy as sa
from alembic import op
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '17b5ab037c40'
down_revision = 'a8f9b3c45e4a'
@@ -17,9 +21,14 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('dataset_keyword_tables', schema=None) as batch_op:
batch_op.add_column(sa.Column('data_source_type', sa.String(length=255), server_default=sa.text("'database'::character varying"), nullable=False))
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('dataset_keyword_tables', schema=None) as batch_op:
batch_op.add_column(sa.Column('data_source_type', sa.String(length=255), server_default=sa.text("'database'::character varying"), nullable=False))
else:
with op.batch_alter_table('dataset_keyword_tables', schema=None) as batch_op:
batch_op.add_column(sa.Column('data_source_type', sa.String(length=255), server_default=sa.text("'database'"), nullable=False))
# ### end Alembic commands ###

View File

@@ -10,6 +10,10 @@ from alembic import op
import models as models
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '63a83fcf12ba'
down_revision = '1787fbae959a'
@@ -19,21 +23,39 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('workflow__conversation_variables',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('conversation_id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('data', sa.Text(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', 'conversation_id', name=op.f('workflow__conversation_variables_pkey'))
)
conn = op.get_bind()
if _is_pg(conn):
op.create_table('workflow__conversation_variables',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('conversation_id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('data', sa.Text(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', 'conversation_id', name=op.f('workflow__conversation_variables_pkey'))
)
else:
op.create_table('workflow__conversation_variables',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('conversation_id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('data', models.types.LongText(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', 'conversation_id', name=op.f('workflow__conversation_variables_pkey'))
)
with op.batch_alter_table('workflow__conversation_variables', schema=None) as batch_op:
batch_op.create_index(batch_op.f('workflow__conversation_variables_app_id_idx'), ['app_id'], unique=False)
batch_op.create_index(batch_op.f('workflow__conversation_variables_created_at_idx'), ['created_at'], unique=False)
with op.batch_alter_table('workflows', schema=None) as batch_op:
batch_op.add_column(sa.Column('conversation_variables', sa.Text(), server_default='{}', nullable=False))
if _is_pg(conn):
with op.batch_alter_table('workflows', schema=None) as batch_op:
batch_op.add_column(sa.Column('conversation_variables', sa.Text(), server_default='{}', nullable=False))
else:
with op.batch_alter_table('workflows', schema=None) as batch_op:
batch_op.add_column(sa.Column('conversation_variables', models.types.LongText(), default='{}', nullable=False))
# ### end Alembic commands ###

View File

@@ -10,6 +10,10 @@ from alembic import op
import models as models
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '0251a1c768cc'
down_revision = 'bbadea11becb'
@@ -19,18 +23,35 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('tidb_auth_bindings',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=True),
sa.Column('cluster_id', sa.String(length=255), nullable=False),
sa.Column('cluster_name', sa.String(length=255), nullable=False),
sa.Column('active', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('status', sa.String(length=255), server_default=sa.text("'CREATING'::character varying"), nullable=False),
sa.Column('account', sa.String(length=255), nullable=False),
sa.Column('password', sa.String(length=255), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='tidb_auth_bindings_pkey')
)
conn = op.get_bind()
if _is_pg(conn):
op.create_table('tidb_auth_bindings',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=True),
sa.Column('cluster_id', sa.String(length=255), nullable=False),
sa.Column('cluster_name', sa.String(length=255), nullable=False),
sa.Column('active', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('status', sa.String(length=255), server_default=sa.text("'CREATING'::character varying"), nullable=False),
sa.Column('account', sa.String(length=255), nullable=False),
sa.Column('password', sa.String(length=255), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='tidb_auth_bindings_pkey')
)
else:
op.create_table('tidb_auth_bindings',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=True),
sa.Column('cluster_id', sa.String(length=255), nullable=False),
sa.Column('cluster_name', sa.String(length=255), nullable=False),
sa.Column('active', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('status', sa.String(length=255), server_default=sa.text("'CREATING'"), nullable=False),
sa.Column('account', sa.String(length=255), nullable=False),
sa.Column('password', sa.String(length=255), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='tidb_auth_bindings_pkey')
)
with op.batch_alter_table('tidb_auth_bindings', schema=None) as batch_op:
batch_op.create_index('tidb_auth_bindings_active_idx', ['active'], unique=False)
batch_op.create_index('tidb_auth_bindings_status_idx', ['status'], unique=False)

View File

@@ -10,6 +10,10 @@ from alembic import op
import models as models
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = 'd57ba9ebb251'
down_revision = '675b5321501b'
@@ -22,8 +26,14 @@ def upgrade():
with op.batch_alter_table('messages', schema=None) as batch_op:
batch_op.add_column(sa.Column('parent_message_id', models.types.StringUUID(), nullable=True))
# Set parent_message_id for existing messages to uuid_nil() to distinguish them from new messages with actual parent IDs or NULLs
op.execute('UPDATE messages SET parent_message_id = uuid_nil() WHERE parent_message_id IS NULL')
# Set parent_message_id for existing messages to distinguish them from new messages with actual parent IDs or NULLs
conn = op.get_bind()
if _is_pg(conn):
# PostgreSQL: Use uuid_nil() function
op.execute('UPDATE messages SET parent_message_id = uuid_nil() WHERE parent_message_id IS NULL')
else:
# MySQL: Use a specific UUID value to represent nil
op.execute("UPDATE messages SET parent_message_id = '00000000-0000-0000-0000-000000000000' WHERE parent_message_id IS NULL")
# ### end Alembic commands ###

View File

@@ -6,7 +6,11 @@ Create Date: 2024-09-24 09:22:43.570120
"""
from alembic import op
import models as models
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
@@ -19,30 +23,58 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op:
batch_op.alter_column('document_id',
existing_type=sa.UUID(),
nullable=True)
batch_op.alter_column('data_source_type',
existing_type=sa.TEXT(),
nullable=True)
batch_op.alter_column('segment_id',
existing_type=sa.UUID(),
nullable=True)
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op:
batch_op.alter_column('document_id',
existing_type=sa.UUID(),
nullable=True)
batch_op.alter_column('data_source_type',
existing_type=sa.TEXT(),
nullable=True)
batch_op.alter_column('segment_id',
existing_type=sa.UUID(),
nullable=True)
else:
with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op:
batch_op.alter_column('document_id',
existing_type=models.types.StringUUID(),
nullable=True)
batch_op.alter_column('data_source_type',
existing_type=models.types.LongText(),
nullable=True)
batch_op.alter_column('segment_id',
existing_type=models.types.StringUUID(),
nullable=True)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op:
batch_op.alter_column('segment_id',
existing_type=sa.UUID(),
nullable=False)
batch_op.alter_column('data_source_type',
existing_type=sa.TEXT(),
nullable=False)
batch_op.alter_column('document_id',
existing_type=sa.UUID(),
nullable=False)
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op:
batch_op.alter_column('segment_id',
existing_type=sa.UUID(),
nullable=False)
batch_op.alter_column('data_source_type',
existing_type=sa.TEXT(),
nullable=False)
batch_op.alter_column('document_id',
existing_type=sa.UUID(),
nullable=False)
else:
with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op:
batch_op.alter_column('segment_id',
existing_type=models.types.StringUUID(),
nullable=False)
batch_op.alter_column('data_source_type',
existing_type=models.types.LongText(),
nullable=False)
batch_op.alter_column('document_id',
existing_type=models.types.StringUUID(),
nullable=False)
# ### end Alembic commands ###

View File

@@ -10,6 +10,10 @@ import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '33f5fac87f29'
down_revision = '6af6a521a53e'
@@ -19,34 +23,66 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('external_knowledge_apis',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('description', sa.String(length=255), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('settings', sa.Text(), nullable=True),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='external_knowledge_apis_pkey')
)
conn = op.get_bind()
if _is_pg(conn):
op.create_table('external_knowledge_apis',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('description', sa.String(length=255), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('settings', sa.Text(), nullable=True),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='external_knowledge_apis_pkey')
)
else:
op.create_table('external_knowledge_apis',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('description', sa.String(length=255), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('settings', models.types.LongText(), nullable=True),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='external_knowledge_apis_pkey')
)
with op.batch_alter_table('external_knowledge_apis', schema=None) as batch_op:
batch_op.create_index('external_knowledge_apis_name_idx', ['name'], unique=False)
batch_op.create_index('external_knowledge_apis_tenant_idx', ['tenant_id'], unique=False)
op.create_table('external_knowledge_bindings',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('external_knowledge_api_id', models.types.StringUUID(), nullable=False),
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
sa.Column('external_knowledge_id', sa.Text(), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='external_knowledge_bindings_pkey')
)
if _is_pg(conn):
op.create_table('external_knowledge_bindings',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('external_knowledge_api_id', models.types.StringUUID(), nullable=False),
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
sa.Column('external_knowledge_id', sa.Text(), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='external_knowledge_bindings_pkey')
)
else:
op.create_table('external_knowledge_bindings',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('external_knowledge_api_id', models.types.StringUUID(), nullable=False),
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
sa.Column('external_knowledge_id', sa.String(length=512), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='external_knowledge_bindings_pkey')
)
with op.batch_alter_table('external_knowledge_bindings', schema=None) as batch_op:
batch_op.create_index('external_knowledge_bindings_dataset_idx', ['dataset_id'], unique=False)
batch_op.create_index('external_knowledge_bindings_external_knowledge_api_idx', ['external_knowledge_api_id'], unique=False)

View File

@@ -16,6 +16,10 @@ branch_labels = None
depends_on = None
def _is_pg(conn):
return conn.dialect.name == "postgresql"
def upgrade():
def _has_name_or_size_column() -> bool:
# We cannot access the database in offline mode, so assume
@@ -46,14 +50,26 @@ def upgrade():
if _has_name_or_size_column():
return
with op.batch_alter_table("tool_files", schema=None) as batch_op:
batch_op.add_column(sa.Column("name", sa.String(), nullable=True))
batch_op.add_column(sa.Column("size", sa.Integer(), nullable=True))
op.execute("UPDATE tool_files SET name = '' WHERE name IS NULL")
op.execute("UPDATE tool_files SET size = -1 WHERE size IS NULL")
with op.batch_alter_table("tool_files", schema=None) as batch_op:
batch_op.alter_column("name", existing_type=sa.String(), nullable=False)
batch_op.alter_column("size", existing_type=sa.Integer(), nullable=False)
if _is_pg(conn):
# PostgreSQL: Keep original syntax
with op.batch_alter_table("tool_files", schema=None) as batch_op:
batch_op.add_column(sa.Column("name", sa.String(), nullable=True))
batch_op.add_column(sa.Column("size", sa.Integer(), nullable=True))
op.execute("UPDATE tool_files SET name = '' WHERE name IS NULL")
op.execute("UPDATE tool_files SET size = -1 WHERE size IS NULL")
with op.batch_alter_table("tool_files", schema=None) as batch_op:
batch_op.alter_column("name", existing_type=sa.String(), nullable=False)
batch_op.alter_column("size", existing_type=sa.Integer(), nullable=False)
else:
# MySQL: Use compatible syntax
with op.batch_alter_table("tool_files", schema=None) as batch_op:
batch_op.add_column(sa.Column("name", sa.String(length=255), nullable=True))
batch_op.add_column(sa.Column("size", sa.Integer(), nullable=True))
op.execute("UPDATE tool_files SET name = '' WHERE name IS NULL")
op.execute("UPDATE tool_files SET size = -1 WHERE size IS NULL")
with op.batch_alter_table("tool_files", schema=None) as batch_op:
batch_op.alter_column("name", existing_type=sa.String(length=255), nullable=False)
batch_op.alter_column("size", existing_type=sa.Integer(), nullable=False)
# ### end Alembic commands ###

View File

@@ -10,6 +10,10 @@ import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '43fa78bc3b7d'
down_revision = '0251a1c768cc'
@@ -19,13 +23,25 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('whitelists',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=True),
sa.Column('category', sa.String(length=255), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='whitelists_pkey')
)
conn = op.get_bind()
if _is_pg(conn):
op.create_table('whitelists',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=True),
sa.Column('category', sa.String(length=255), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='whitelists_pkey')
)
else:
op.create_table('whitelists',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=True),
sa.Column('category', sa.String(length=255), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='whitelists_pkey')
)
with op.batch_alter_table('whitelists', schema=None) as batch_op:
batch_op.create_index('whitelists_tenant_idx', ['tenant_id'], unique=False)

View File

@@ -10,6 +10,10 @@ import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '08ec4f75af5e'
down_revision = 'ddcc8bbef391'
@@ -19,14 +23,26 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('account_plugin_permissions',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('install_permission', sa.String(length=16), server_default='everyone', nullable=False),
sa.Column('debug_permission', sa.String(length=16), server_default='noone', nullable=False),
sa.PrimaryKeyConstraint('id', name='account_plugin_permission_pkey'),
sa.UniqueConstraint('tenant_id', name='unique_tenant_plugin')
)
conn = op.get_bind()
if _is_pg(conn):
op.create_table('account_plugin_permissions',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('install_permission', sa.String(length=16), server_default='everyone', nullable=False),
sa.Column('debug_permission', sa.String(length=16), server_default='noone', nullable=False),
sa.PrimaryKeyConstraint('id', name='account_plugin_permission_pkey'),
sa.UniqueConstraint('tenant_id', name='unique_tenant_plugin')
)
else:
op.create_table('account_plugin_permissions',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('install_permission', sa.String(length=16), server_default='everyone', nullable=False),
sa.Column('debug_permission', sa.String(length=16), server_default='noone', nullable=False),
sa.PrimaryKeyConstraint('id', name='account_plugin_permission_pkey'),
sa.UniqueConstraint('tenant_id', name='unique_tenant_plugin')
)
# ### end Alembic commands ###

View File

@@ -10,6 +10,10 @@ import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = 'f4d7ce70a7ca'
down_revision = '93ad8c19c40b'
@@ -19,23 +23,43 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('upload_files', schema=None) as batch_op:
batch_op.alter_column('source_url',
existing_type=sa.VARCHAR(length=255),
type_=sa.TEXT(),
existing_nullable=False,
existing_server_default=sa.text("''::character varying"))
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('upload_files', schema=None) as batch_op:
batch_op.alter_column('source_url',
existing_type=sa.VARCHAR(length=255),
type_=sa.TEXT(),
existing_nullable=False,
existing_server_default=sa.text("''::character varying"))
else:
with op.batch_alter_table('upload_files', schema=None) as batch_op:
batch_op.alter_column('source_url',
existing_type=sa.VARCHAR(length=255),
type_=models.types.LongText(),
existing_nullable=False,
existing_default=sa.text("''"))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('upload_files', schema=None) as batch_op:
batch_op.alter_column('source_url',
existing_type=sa.TEXT(),
type_=sa.VARCHAR(length=255),
existing_nullable=False,
existing_server_default=sa.text("''::character varying"))
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('upload_files', schema=None) as batch_op:
batch_op.alter_column('source_url',
existing_type=sa.TEXT(),
type_=sa.VARCHAR(length=255),
existing_nullable=False,
existing_server_default=sa.text("''::character varying"))
else:
with op.batch_alter_table('upload_files', schema=None) as batch_op:
batch_op.alter_column('source_url',
existing_type=models.types.LongText(),
type_=sa.VARCHAR(length=255),
existing_nullable=False,
existing_default=sa.text("''"))
# ### end Alembic commands ###

View File

@@ -7,6 +7,9 @@ Create Date: 2024-11-01 06:22:27.981398
"""
from alembic import op
import models as models
def _is_pg(conn):
return conn.dialect.name == "postgresql"
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
@@ -19,49 +22,91 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
op.execute("UPDATE recommended_apps SET custom_disclaimer = '' WHERE custom_disclaimer IS NULL")
op.execute("UPDATE sites SET custom_disclaimer = '' WHERE custom_disclaimer IS NULL")
op.execute("UPDATE tool_api_providers SET custom_disclaimer = '' WHERE custom_disclaimer IS NULL")
with op.batch_alter_table('recommended_apps', schema=None) as batch_op:
batch_op.alter_column('custom_disclaimer',
existing_type=sa.VARCHAR(length=255),
type_=sa.TEXT(),
nullable=False)
if _is_pg(conn):
with op.batch_alter_table('recommended_apps', schema=None) as batch_op:
batch_op.alter_column('custom_disclaimer',
existing_type=sa.VARCHAR(length=255),
type_=sa.TEXT(),
nullable=False)
with op.batch_alter_table('sites', schema=None) as batch_op:
batch_op.alter_column('custom_disclaimer',
existing_type=sa.VARCHAR(length=255),
type_=sa.TEXT(),
nullable=False)
with op.batch_alter_table('sites', schema=None) as batch_op:
batch_op.alter_column('custom_disclaimer',
existing_type=sa.VARCHAR(length=255),
type_=sa.TEXT(),
nullable=False)
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
batch_op.alter_column('custom_disclaimer',
existing_type=sa.VARCHAR(length=255),
type_=sa.TEXT(),
nullable=False)
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
batch_op.alter_column('custom_disclaimer',
existing_type=sa.VARCHAR(length=255),
type_=sa.TEXT(),
nullable=False)
else:
with op.batch_alter_table('recommended_apps', schema=None) as batch_op:
batch_op.alter_column('custom_disclaimer',
existing_type=sa.VARCHAR(length=255),
type_=models.types.LongText(),
nullable=False)
with op.batch_alter_table('sites', schema=None) as batch_op:
batch_op.alter_column('custom_disclaimer',
existing_type=sa.VARCHAR(length=255),
type_=models.types.LongText(),
nullable=False)
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
batch_op.alter_column('custom_disclaimer',
existing_type=sa.VARCHAR(length=255),
type_=models.types.LongText(),
nullable=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
batch_op.alter_column('custom_disclaimer',
existing_type=sa.TEXT(),
type_=sa.VARCHAR(length=255),
nullable=True)
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
batch_op.alter_column('custom_disclaimer',
existing_type=sa.TEXT(),
type_=sa.VARCHAR(length=255),
nullable=True)
with op.batch_alter_table('sites', schema=None) as batch_op:
batch_op.alter_column('custom_disclaimer',
existing_type=sa.TEXT(),
type_=sa.VARCHAR(length=255),
nullable=True)
with op.batch_alter_table('sites', schema=None) as batch_op:
batch_op.alter_column('custom_disclaimer',
existing_type=sa.TEXT(),
type_=sa.VARCHAR(length=255),
nullable=True)
with op.batch_alter_table('recommended_apps', schema=None) as batch_op:
batch_op.alter_column('custom_disclaimer',
existing_type=sa.TEXT(),
type_=sa.VARCHAR(length=255),
nullable=True)
with op.batch_alter_table('recommended_apps', schema=None) as batch_op:
batch_op.alter_column('custom_disclaimer',
existing_type=sa.TEXT(),
type_=sa.VARCHAR(length=255),
nullable=True)
else:
with op.batch_alter_table('tool_api_providers', schema=None) as batch_op:
batch_op.alter_column('custom_disclaimer',
existing_type=models.types.LongText(),
type_=sa.VARCHAR(length=255),
nullable=True)
with op.batch_alter_table('sites', schema=None) as batch_op:
batch_op.alter_column('custom_disclaimer',
existing_type=models.types.LongText(),
type_=sa.VARCHAR(length=255),
nullable=True)
with op.batch_alter_table('recommended_apps', schema=None) as batch_op:
batch_op.alter_column('custom_disclaimer',
existing_type=models.types.LongText(),
type_=sa.VARCHAR(length=255),
nullable=True)
# ### end Alembic commands ###

View File

@@ -10,6 +10,10 @@ import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '09a8d1878d9b'
down_revision = 'd07474999927'
@@ -19,55 +23,103 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('conversations', schema=None) as batch_op:
batch_op.alter_column('inputs',
existing_type=postgresql.JSON(astext_type=sa.Text()),
nullable=False)
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('conversations', schema=None) as batch_op:
batch_op.alter_column('inputs',
existing_type=postgresql.JSON(astext_type=sa.Text()),
nullable=False)
with op.batch_alter_table('messages', schema=None) as batch_op:
batch_op.alter_column('inputs',
existing_type=postgresql.JSON(astext_type=sa.Text()),
nullable=False)
with op.batch_alter_table('messages', schema=None) as batch_op:
batch_op.alter_column('inputs',
existing_type=postgresql.JSON(astext_type=sa.Text()),
nullable=False)
else:
with op.batch_alter_table('conversations', schema=None) as batch_op:
batch_op.alter_column('inputs',
existing_type=sa.JSON(),
nullable=False)
with op.batch_alter_table('messages', schema=None) as batch_op:
batch_op.alter_column('inputs',
existing_type=sa.JSON(),
nullable=False)
op.execute("UPDATE workflows SET updated_at = created_at WHERE updated_at IS NULL")
op.execute("UPDATE workflows SET graph = '' WHERE graph IS NULL")
op.execute("UPDATE workflows SET features = '' WHERE features IS NULL")
with op.batch_alter_table('workflows', schema=None) as batch_op:
batch_op.alter_column('graph',
existing_type=sa.TEXT(),
nullable=False)
batch_op.alter_column('features',
existing_type=sa.TEXT(),
nullable=False)
batch_op.alter_column('updated_at',
existing_type=postgresql.TIMESTAMP(),
nullable=False)
if _is_pg(conn):
with op.batch_alter_table('workflows', schema=None) as batch_op:
batch_op.alter_column('graph',
existing_type=sa.TEXT(),
nullable=False)
batch_op.alter_column('features',
existing_type=sa.TEXT(),
nullable=False)
batch_op.alter_column('updated_at',
existing_type=postgresql.TIMESTAMP(),
nullable=False)
else:
with op.batch_alter_table('workflows', schema=None) as batch_op:
batch_op.alter_column('graph',
existing_type=models.types.LongText(),
nullable=False)
batch_op.alter_column('features',
existing_type=models.types.LongText(),
nullable=False)
batch_op.alter_column('updated_at',
existing_type=sa.TIMESTAMP(),
nullable=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('workflows', schema=None) as batch_op:
batch_op.alter_column('updated_at',
existing_type=postgresql.TIMESTAMP(),
nullable=True)
batch_op.alter_column('features',
existing_type=sa.TEXT(),
nullable=True)
batch_op.alter_column('graph',
existing_type=sa.TEXT(),
nullable=True)
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('workflows', schema=None) as batch_op:
batch_op.alter_column('updated_at',
existing_type=postgresql.TIMESTAMP(),
nullable=True)
batch_op.alter_column('features',
existing_type=sa.TEXT(),
nullable=True)
batch_op.alter_column('graph',
existing_type=sa.TEXT(),
nullable=True)
else:
with op.batch_alter_table('workflows', schema=None) as batch_op:
batch_op.alter_column('updated_at',
existing_type=sa.TIMESTAMP(),
nullable=True)
batch_op.alter_column('features',
existing_type=models.types.LongText(),
nullable=True)
batch_op.alter_column('graph',
existing_type=models.types.LongText(),
nullable=True)
with op.batch_alter_table('messages', schema=None) as batch_op:
batch_op.alter_column('inputs',
existing_type=postgresql.JSON(astext_type=sa.Text()),
nullable=True)
if _is_pg(conn):
with op.batch_alter_table('messages', schema=None) as batch_op:
batch_op.alter_column('inputs',
existing_type=postgresql.JSON(astext_type=sa.Text()),
nullable=True)
with op.batch_alter_table('conversations', schema=None) as batch_op:
batch_op.alter_column('inputs',
existing_type=postgresql.JSON(astext_type=sa.Text()),
nullable=True)
with op.batch_alter_table('conversations', schema=None) as batch_op:
batch_op.alter_column('inputs',
existing_type=postgresql.JSON(astext_type=sa.Text()),
nullable=True)
else:
with op.batch_alter_table('messages', schema=None) as batch_op:
batch_op.alter_column('inputs',
existing_type=sa.JSON(),
nullable=True)
with op.batch_alter_table('conversations', schema=None) as batch_op:
batch_op.alter_column('inputs',
existing_type=sa.JSON(),
nullable=True)
# ### end Alembic commands ###

View File

@@ -10,6 +10,10 @@ import models as models
import sqlalchemy as sa
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = 'e19037032219'
down_revision = 'd7999dfa4aae'
@@ -19,27 +23,53 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('child_chunks',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
sa.Column('document_id', models.types.StringUUID(), nullable=False),
sa.Column('segment_id', models.types.StringUUID(), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('content', sa.Text(), nullable=False),
sa.Column('word_count', sa.Integer(), nullable=False),
sa.Column('index_node_id', sa.String(length=255), nullable=True),
sa.Column('index_node_hash', sa.String(length=255), nullable=True),
sa.Column('type', sa.String(length=255), server_default=sa.text("'automatic'::character varying"), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('indexing_at', sa.DateTime(), nullable=True),
sa.Column('completed_at', sa.DateTime(), nullable=True),
sa.Column('error', sa.Text(), nullable=True),
sa.PrimaryKeyConstraint('id', name='child_chunk_pkey')
)
conn = op.get_bind()
if _is_pg(conn):
op.create_table('child_chunks',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
sa.Column('document_id', models.types.StringUUID(), nullable=False),
sa.Column('segment_id', models.types.StringUUID(), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('content', sa.Text(), nullable=False),
sa.Column('word_count', sa.Integer(), nullable=False),
sa.Column('index_node_id', sa.String(length=255), nullable=True),
sa.Column('index_node_hash', sa.String(length=255), nullable=True),
sa.Column('type', sa.String(length=255), server_default=sa.text("'automatic'::character varying"), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('indexing_at', sa.DateTime(), nullable=True),
sa.Column('completed_at', sa.DateTime(), nullable=True),
sa.Column('error', sa.Text(), nullable=True),
sa.PrimaryKeyConstraint('id', name='child_chunk_pkey')
)
else:
op.create_table('child_chunks',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
sa.Column('document_id', models.types.StringUUID(), nullable=False),
sa.Column('segment_id', models.types.StringUUID(), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('content', models.types.LongText(), nullable=False),
sa.Column('word_count', sa.Integer(), nullable=False),
sa.Column('index_node_id', sa.String(length=255), nullable=True),
sa.Column('index_node_hash', sa.String(length=255), nullable=True),
sa.Column('type', sa.String(length=255), server_default=sa.text("'automatic'"), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('indexing_at', sa.DateTime(), nullable=True),
sa.Column('completed_at', sa.DateTime(), nullable=True),
sa.Column('error', models.types.LongText(), nullable=True),
sa.PrimaryKeyConstraint('id', name='child_chunk_pkey')
)
with op.batch_alter_table('child_chunks', schema=None) as batch_op:
batch_op.create_index('child_chunk_dataset_id_idx', ['tenant_id', 'dataset_id', 'document_id', 'segment_id', 'index_node_id'], unique=False)

View File

@@ -10,6 +10,10 @@ import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '11b07f66c737'
down_revision = 'cf8f4fc45278'
@@ -25,15 +29,30 @@ def upgrade():
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('tool_providers',
sa.Column('id', sa.UUID(), server_default=sa.text('uuid_generate_v4()'), autoincrement=False, nullable=False),
sa.Column('tenant_id', sa.UUID(), autoincrement=False, nullable=False),
sa.Column('tool_name', sa.VARCHAR(length=40), autoincrement=False, nullable=False),
sa.Column('encrypted_credentials', sa.TEXT(), autoincrement=False, nullable=True),
sa.Column('is_enabled', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=False),
sa.Column('created_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), autoincrement=False, nullable=False),
sa.Column('updated_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), autoincrement=False, nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_provider_pkey'),
sa.UniqueConstraint('tenant_id', 'tool_name', name='unique_tool_provider_tool_name')
)
conn = op.get_bind()
if _is_pg(conn):
op.create_table('tool_providers',
sa.Column('id', sa.UUID(), server_default=sa.text('uuid_generate_v4()'), autoincrement=False, nullable=False),
sa.Column('tenant_id', sa.UUID(), autoincrement=False, nullable=False),
sa.Column('tool_name', sa.VARCHAR(length=40), autoincrement=False, nullable=False),
sa.Column('encrypted_credentials', sa.TEXT(), autoincrement=False, nullable=True),
sa.Column('is_enabled', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=False),
sa.Column('created_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), autoincrement=False, nullable=False),
sa.Column('updated_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), autoincrement=False, nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_provider_pkey'),
sa.UniqueConstraint('tenant_id', 'tool_name', name='unique_tool_provider_tool_name')
)
else:
op.create_table('tool_providers',
sa.Column('id', models.types.StringUUID(), autoincrement=False, nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), autoincrement=False, nullable=False),
sa.Column('tool_name', sa.VARCHAR(length=40), autoincrement=False, nullable=False),
sa.Column('encrypted_credentials', models.types.LongText(), autoincrement=False, nullable=True),
sa.Column('is_enabled', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=False),
sa.Column('created_at', sa.TIMESTAMP(), server_default=sa.func.current_timestamp(), autoincrement=False, nullable=False),
sa.Column('updated_at', sa.TIMESTAMP(), server_default=sa.func.current_timestamp(), autoincrement=False, nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_provider_pkey'),
sa.UniqueConstraint('tenant_id', 'tool_name', name='unique_tool_provider_tool_name')
)
# ### end Alembic commands ###

View File

@@ -10,6 +10,10 @@ import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '923752d42eb6'
down_revision = 'e19037032219'
@@ -19,15 +23,29 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('dataset_auto_disable_logs',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
sa.Column('document_id', models.types.StringUUID(), nullable=False),
sa.Column('notified', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='dataset_auto_disable_log_pkey')
)
conn = op.get_bind()
if _is_pg(conn):
op.create_table('dataset_auto_disable_logs',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
sa.Column('document_id', models.types.StringUUID(), nullable=False),
sa.Column('notified', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='dataset_auto_disable_log_pkey')
)
else:
op.create_table('dataset_auto_disable_logs',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
sa.Column('document_id', models.types.StringUUID(), nullable=False),
sa.Column('notified', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='dataset_auto_disable_log_pkey')
)
with op.batch_alter_table('dataset_auto_disable_logs', schema=None) as batch_op:
batch_op.create_index('dataset_auto_disable_log_created_atx', ['created_at'], unique=False)
batch_op.create_index('dataset_auto_disable_log_dataset_idx', ['dataset_id'], unique=False)

View File

@@ -10,6 +10,10 @@ import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = 'f051706725cc'
down_revision = 'ee79d9b1c156'
@@ -19,14 +23,27 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('rate_limit_logs',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('subscription_plan', sa.String(length=255), nullable=False),
sa.Column('operation', sa.String(length=255), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='rate_limit_log_pkey')
)
conn = op.get_bind()
if _is_pg(conn):
op.create_table('rate_limit_logs',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('subscription_plan', sa.String(length=255), nullable=False),
sa.Column('operation', sa.String(length=255), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='rate_limit_log_pkey')
)
else:
op.create_table('rate_limit_logs',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('subscription_plan', sa.String(length=255), nullable=False),
sa.Column('operation', sa.String(length=255), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='rate_limit_log_pkey')
)
with op.batch_alter_table('rate_limit_logs', schema=None) as batch_op:
batch_op.create_index('rate_limit_log_operation_idx', ['operation'], unique=False)
batch_op.create_index('rate_limit_log_tenant_idx', ['tenant_id'], unique=False)

View File

@@ -10,6 +10,10 @@ import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = 'd20049ed0af6'
down_revision = 'f051706725cc'
@@ -19,34 +23,66 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('dataset_metadata_bindings',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
sa.Column('metadata_id', models.types.StringUUID(), nullable=False),
sa.Column('document_id', models.types.StringUUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.PrimaryKeyConstraint('id', name='dataset_metadata_binding_pkey')
)
conn = op.get_bind()
if _is_pg(conn):
op.create_table('dataset_metadata_bindings',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
sa.Column('metadata_id', models.types.StringUUID(), nullable=False),
sa.Column('document_id', models.types.StringUUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.PrimaryKeyConstraint('id', name='dataset_metadata_binding_pkey')
)
else:
op.create_table('dataset_metadata_bindings',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
sa.Column('metadata_id', models.types.StringUUID(), nullable=False),
sa.Column('document_id', models.types.StringUUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.PrimaryKeyConstraint('id', name='dataset_metadata_binding_pkey')
)
with op.batch_alter_table('dataset_metadata_bindings', schema=None) as batch_op:
batch_op.create_index('dataset_metadata_binding_dataset_idx', ['dataset_id'], unique=False)
batch_op.create_index('dataset_metadata_binding_document_idx', ['document_id'], unique=False)
batch_op.create_index('dataset_metadata_binding_metadata_idx', ['metadata_id'], unique=False)
batch_op.create_index('dataset_metadata_binding_tenant_idx', ['tenant_id'], unique=False)
op.create_table('dataset_metadatas',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
sa.Column('type', sa.String(length=255), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
sa.PrimaryKeyConstraint('id', name='dataset_metadata_pkey')
)
if _is_pg(conn):
# PostgreSQL: Keep original syntax
op.create_table('dataset_metadatas',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
sa.Column('type', sa.String(length=255), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
sa.PrimaryKeyConstraint('id', name='dataset_metadata_pkey')
)
else:
# MySQL: Use compatible syntax
op.create_table('dataset_metadatas',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
sa.Column('type', sa.String(length=255), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
sa.PrimaryKeyConstraint('id', name='dataset_metadata_pkey')
)
with op.batch_alter_table('dataset_metadatas', schema=None) as batch_op:
batch_op.create_index('dataset_metadata_dataset_idx', ['dataset_id'], unique=False)
batch_op.create_index('dataset_metadata_tenant_idx', ['tenant_id'], unique=False)
@@ -54,23 +90,31 @@ def upgrade():
with op.batch_alter_table('datasets', schema=None) as batch_op:
batch_op.add_column(sa.Column('built_in_field_enabled', sa.Boolean(), server_default=sa.text('false'), nullable=False))
with op.batch_alter_table('documents', schema=None) as batch_op:
batch_op.alter_column('doc_metadata',
existing_type=postgresql.JSON(astext_type=sa.Text()),
type_=postgresql.JSONB(astext_type=sa.Text()),
existing_nullable=True)
batch_op.create_index('document_metadata_idx', ['doc_metadata'], unique=False, postgresql_using='gin')
if _is_pg(conn):
with op.batch_alter_table('documents', schema=None) as batch_op:
batch_op.alter_column('doc_metadata',
existing_type=postgresql.JSON(astext_type=sa.Text()),
type_=postgresql.JSONB(astext_type=sa.Text()),
existing_nullable=True)
batch_op.create_index('document_metadata_idx', ['doc_metadata'], unique=False, postgresql_using='gin')
else:
pass
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('documents', schema=None) as batch_op:
batch_op.drop_index('document_metadata_idx', postgresql_using='gin')
batch_op.alter_column('doc_metadata',
existing_type=postgresql.JSONB(astext_type=sa.Text()),
type_=postgresql.JSON(astext_type=sa.Text()),
existing_nullable=True)
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('documents', schema=None) as batch_op:
batch_op.drop_index('document_metadata_idx', postgresql_using='gin')
batch_op.alter_column('doc_metadata',
existing_type=postgresql.JSONB(astext_type=sa.Text()),
type_=postgresql.JSON(astext_type=sa.Text()),
existing_nullable=True)
else:
pass
with op.batch_alter_table('datasets', schema=None) as batch_op:
batch_op.drop_column('built_in_field_enabled')

View File

@@ -17,10 +17,23 @@ branch_labels = None
depends_on = None
def _is_pg(conn):
return conn.dialect.name == "postgresql"
def upgrade():
with op.batch_alter_table('workflows', schema=None) as batch_op:
batch_op.add_column(sa.Column('marked_name', sa.String(), nullable=False, server_default=''))
batch_op.add_column(sa.Column('marked_comment', sa.String(), nullable=False, server_default=''))
conn = op.get_bind()
if _is_pg(conn):
# PostgreSQL: Keep original syntax
with op.batch_alter_table('workflows', schema=None) as batch_op:
batch_op.add_column(sa.Column('marked_name', sa.String(), nullable=False, server_default=''))
batch_op.add_column(sa.Column('marked_comment', sa.String(), nullable=False, server_default=''))
else:
# MySQL: Use compatible syntax
with op.batch_alter_table('workflows', schema=None) as batch_op:
batch_op.add_column(sa.Column('marked_name', sa.String(length=255), nullable=False, server_default=''))
batch_op.add_column(sa.Column('marked_comment', sa.String(length=255), nullable=False, server_default=''))
def downgrade():

View File

@@ -11,6 +11,10 @@ from alembic import op
import models as models
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = "2adcbe1f5dfb"
down_revision = "d28f2004b072"
@@ -20,24 +24,46 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"workflow_draft_variables",
sa.Column("id", models.types.StringUUID(), server_default=sa.text("uuid_generate_v4()"), nullable=False),
sa.Column("created_at", sa.DateTime(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=False),
sa.Column("updated_at", sa.DateTime(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=False),
sa.Column("app_id", models.types.StringUUID(), nullable=False),
sa.Column("last_edited_at", sa.DateTime(), nullable=True),
sa.Column("node_id", sa.String(length=255), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
sa.Column("description", sa.String(length=255), nullable=False),
sa.Column("selector", sa.String(length=255), nullable=False),
sa.Column("value_type", sa.String(length=20), nullable=False),
sa.Column("value", sa.Text(), nullable=False),
sa.Column("visible", sa.Boolean(), nullable=False),
sa.Column("editable", sa.Boolean(), nullable=False),
sa.PrimaryKeyConstraint("id", name=op.f("workflow_draft_variables_pkey")),
sa.UniqueConstraint("app_id", "node_id", "name", name=op.f("workflow_draft_variables_app_id_key")),
)
conn = op.get_bind()
if _is_pg(conn):
op.create_table(
"workflow_draft_variables",
sa.Column("id", models.types.StringUUID(), server_default=sa.text("uuid_generate_v4()"), nullable=False),
sa.Column("created_at", sa.DateTime(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=False),
sa.Column("updated_at", sa.DateTime(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=False),
sa.Column("app_id", models.types.StringUUID(), nullable=False),
sa.Column("last_edited_at", sa.DateTime(), nullable=True),
sa.Column("node_id", sa.String(length=255), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
sa.Column("description", sa.String(length=255), nullable=False),
sa.Column("selector", sa.String(length=255), nullable=False),
sa.Column("value_type", sa.String(length=20), nullable=False),
sa.Column("value", sa.Text(), nullable=False),
sa.Column("visible", sa.Boolean(), nullable=False),
sa.Column("editable", sa.Boolean(), nullable=False),
sa.PrimaryKeyConstraint("id", name=op.f("workflow_draft_variables_pkey")),
sa.UniqueConstraint("app_id", "node_id", "name", name=op.f("workflow_draft_variables_app_id_key")),
)
else:
op.create_table(
"workflow_draft_variables",
sa.Column("id", models.types.StringUUID(), nullable=False),
sa.Column("created_at", sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column("updated_at", sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column("app_id", models.types.StringUUID(), nullable=False),
sa.Column("last_edited_at", sa.DateTime(), nullable=True),
sa.Column("node_id", sa.String(length=255), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
sa.Column("description", sa.String(length=255), nullable=False),
sa.Column("selector", sa.String(length=255), nullable=False),
sa.Column("value_type", sa.String(length=20), nullable=False),
sa.Column("value", models.types.LongText(), nullable=False),
sa.Column("visible", sa.Boolean(), nullable=False),
sa.Column("editable", sa.Boolean(), nullable=False),
sa.PrimaryKeyConstraint("id", name=op.f("workflow_draft_variables_pkey")),
sa.UniqueConstraint("app_id", "node_id", "name", name=op.f("workflow_draft_variables_app_id_key")),
)
# ### end Alembic commands ###

View File

@@ -7,6 +7,10 @@ Create Date: 2025-06-06 14:24:44.213018
"""
from alembic import op
import models as models
def _is_pg(conn):
return conn.dialect.name == "postgresql"
import sqlalchemy as sa
@@ -18,19 +22,30 @@ depends_on = None
def upgrade():
# `CREATE INDEX CONCURRENTLY` cannot run within a transaction, so use the `autocommit_block`
# context manager to wrap the index creation statement.
# Reference:
#
# - https://www.postgresql.org/docs/current/sql-createindex.html#:~:text=Another%20difference%20is,CREATE%20INDEX%20CONCURRENTLY%20cannot.
# - https://alembic.sqlalchemy.org/en/latest/api/runtime.html#alembic.runtime.migration.MigrationContext.autocommit_block
with op.get_context().autocommit_block():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
# `CREATE INDEX CONCURRENTLY` cannot run within a transaction, so use the `autocommit_block`
# context manager to wrap the index creation statement.
# Reference:
#
# - https://www.postgresql.org/docs/current/sql-createindex.html#:~:text=Another%20difference%20is,CREATE%20INDEX%20CONCURRENTLY%20cannot.
# - https://alembic.sqlalchemy.org/en/latest/api/runtime.html#alembic.runtime.migration.MigrationContext.autocommit_block
with op.get_context().autocommit_block():
op.create_index(
op.f('workflow_node_executions_tenant_id_idx'),
"workflow_node_executions",
['tenant_id', 'workflow_id', 'node_id', sa.literal_column('created_at DESC')],
unique=False,
postgresql_concurrently=True,
)
else:
op.create_index(
op.f('workflow_node_executions_tenant_id_idx'),
"workflow_node_executions",
['tenant_id', 'workflow_id', 'node_id', sa.literal_column('created_at DESC')],
unique=False,
postgresql_concurrently=True,
)
with op.batch_alter_table('workflow_draft_variables', schema=None) as batch_op:
@@ -51,8 +66,13 @@ def downgrade():
# Reference:
#
# https://www.postgresql.org/docs/current/sql-createindex.html#:~:text=Another%20difference%20is,CREATE%20INDEX%20CONCURRENTLY%20cannot.
with op.get_context().autocommit_block():
op.drop_index(op.f('workflow_node_executions_tenant_id_idx'), postgresql_concurrently=True)
conn = op.get_bind()
if _is_pg(conn):
with op.get_context().autocommit_block():
op.drop_index(op.f('workflow_node_executions_tenant_id_idx'), postgresql_concurrently=True)
else:
op.drop_index(op.f('workflow_node_executions_tenant_id_idx'))
with op.batch_alter_table('workflow_draft_variables', schema=None) as batch_op:
batch_op.drop_column('node_execution_id')

View File

@@ -10,6 +10,10 @@ import models as models
import sqlalchemy as sa
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '58eb7bdb93fe'
down_revision = '0ab65e1cc7fa'
@@ -19,40 +23,80 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('app_mcp_servers',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('description', sa.String(length=255), nullable=False),
sa.Column('server_code', sa.String(length=255), nullable=False),
sa.Column('status', sa.String(length=255), server_default=sa.text("'normal'::character varying"), nullable=False),
sa.Column('parameters', sa.Text(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='app_mcp_server_pkey'),
sa.UniqueConstraint('tenant_id', 'app_id', name='unique_app_mcp_server_tenant_app_id'),
sa.UniqueConstraint('server_code', name='unique_app_mcp_server_server_code')
)
op.create_table('tool_mcp_providers',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('name', sa.String(length=40), nullable=False),
sa.Column('server_identifier', sa.String(length=24), nullable=False),
sa.Column('server_url', sa.Text(), nullable=False),
sa.Column('server_url_hash', sa.String(length=64), nullable=False),
sa.Column('icon', sa.String(length=255), nullable=True),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('user_id', models.types.StringUUID(), nullable=False),
sa.Column('encrypted_credentials', sa.Text(), nullable=True),
sa.Column('authed', sa.Boolean(), nullable=False),
sa.Column('tools', sa.Text(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_mcp_provider_pkey'),
sa.UniqueConstraint('tenant_id', 'name', name='unique_mcp_provider_name'),
sa.UniqueConstraint('tenant_id', 'server_identifier', name='unique_mcp_provider_server_identifier'),
sa.UniqueConstraint('tenant_id', 'server_url_hash', name='unique_mcp_provider_server_url')
)
conn = op.get_bind()
if _is_pg(conn):
op.create_table('app_mcp_servers',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('description', sa.String(length=255), nullable=False),
sa.Column('server_code', sa.String(length=255), nullable=False),
sa.Column('status', sa.String(length=255), server_default=sa.text("'normal'::character varying"), nullable=False),
sa.Column('parameters', sa.Text(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='app_mcp_server_pkey'),
sa.UniqueConstraint('tenant_id', 'app_id', name='unique_app_mcp_server_tenant_app_id'),
sa.UniqueConstraint('server_code', name='unique_app_mcp_server_server_code')
)
else:
op.create_table('app_mcp_servers',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('description', sa.String(length=255), nullable=False),
sa.Column('server_code', sa.String(length=255), nullable=False),
sa.Column('status', sa.String(length=255), server_default=sa.text("'normal'"), nullable=False),
sa.Column('parameters', models.types.LongText(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='app_mcp_server_pkey'),
sa.UniqueConstraint('tenant_id', 'app_id', name='unique_app_mcp_server_tenant_app_id'),
sa.UniqueConstraint('server_code', name='unique_app_mcp_server_server_code')
)
if _is_pg(conn):
op.create_table('tool_mcp_providers',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('name', sa.String(length=40), nullable=False),
sa.Column('server_identifier', sa.String(length=24), nullable=False),
sa.Column('server_url', sa.Text(), nullable=False),
sa.Column('server_url_hash', sa.String(length=64), nullable=False),
sa.Column('icon', sa.String(length=255), nullable=True),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('user_id', models.types.StringUUID(), nullable=False),
sa.Column('encrypted_credentials', sa.Text(), nullable=True),
sa.Column('authed', sa.Boolean(), nullable=False),
sa.Column('tools', sa.Text(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_mcp_provider_pkey'),
sa.UniqueConstraint('tenant_id', 'name', name='unique_mcp_provider_name'),
sa.UniqueConstraint('tenant_id', 'server_identifier', name='unique_mcp_provider_server_identifier'),
sa.UniqueConstraint('tenant_id', 'server_url_hash', name='unique_mcp_provider_server_url')
)
else:
op.create_table('tool_mcp_providers',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=40), nullable=False),
sa.Column('server_identifier', sa.String(length=24), nullable=False),
sa.Column('server_url', models.types.LongText(), nullable=False),
sa.Column('server_url_hash', sa.String(length=64), nullable=False),
sa.Column('icon', sa.String(length=255), nullable=True),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('user_id', models.types.StringUUID(), nullable=False),
sa.Column('encrypted_credentials', models.types.LongText(), nullable=True),
sa.Column('authed', sa.Boolean(), nullable=False),
sa.Column('tools', models.types.LongText(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_mcp_provider_pkey'),
sa.UniqueConstraint('tenant_id', 'name', name='unique_mcp_provider_name'),
sa.UniqueConstraint('tenant_id', 'server_identifier', name='unique_mcp_provider_server_identifier'),
sa.UniqueConstraint('tenant_id', 'server_url_hash', name='unique_mcp_provider_server_url')
)
# ### end Alembic commands ###

View File

@@ -27,6 +27,10 @@ import models as models
import sqlalchemy as sa
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '1c9ba48be8e4'
down_revision = '58eb7bdb93fe'
@@ -40,7 +44,11 @@ def upgrade():
# The ability to specify source timestamp has been removed because its type signature is incompatible with
# PostgreSQL 18's `uuidv7` function. This capability is rarely needed in practice, as IDs can be
# generated and controlled within the application layer.
op.execute(sa.text(r"""
conn = op.get_bind()
if _is_pg(conn):
# PostgreSQL: Create uuidv7 functions
op.execute(sa.text(r"""
/* Main function to generate a uuidv7 value with millisecond precision */
CREATE FUNCTION uuidv7() RETURNS uuid
AS
@@ -63,7 +71,7 @@ COMMENT ON FUNCTION uuidv7 IS
'Generate a uuid-v7 value with a 48-bit timestamp (millisecond precision) and 74 bits of randomness';
"""))
op.execute(sa.text(r"""
op.execute(sa.text(r"""
CREATE FUNCTION uuidv7_boundary(timestamptz) RETURNS uuid
AS
$$
@@ -79,8 +87,15 @@ COMMENT ON FUNCTION uuidv7_boundary(timestamptz) IS
'Generate a non-random uuidv7 with the given timestamp (first 48 bits) and all random bits to 0. As the smallest possible uuidv7 for that timestamp, it may be used as a boundary for partitions.';
"""
))
else:
pass
def downgrade():
op.execute(sa.text("DROP FUNCTION uuidv7"))
op.execute(sa.text("DROP FUNCTION uuidv7_boundary"))
conn = op.get_bind()
if _is_pg(conn):
op.execute(sa.text("DROP FUNCTION uuidv7"))
op.execute(sa.text("DROP FUNCTION uuidv7_boundary"))
else:
pass

View File

@@ -10,6 +10,10 @@ import models as models
import sqlalchemy as sa
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '71f5020c6470'
down_revision = '1c9ba48be8e4'
@@ -19,31 +23,63 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('tool_oauth_system_clients',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('plugin_id', sa.String(length=512), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('encrypted_oauth_params', sa.Text(), nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_oauth_system_client_pkey'),
sa.UniqueConstraint('plugin_id', 'provider', name='tool_oauth_system_client_plugin_id_provider_idx')
)
op.create_table('tool_oauth_tenant_clients',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('plugin_id', sa.String(length=512), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False),
sa.Column('encrypted_oauth_params', sa.Text(), nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_oauth_tenant_client_pkey'),
sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', name='unique_tool_oauth_tenant_client')
)
conn = op.get_bind()
if _is_pg(conn):
op.create_table('tool_oauth_system_clients',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('plugin_id', sa.String(length=512), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('encrypted_oauth_params', sa.Text(), nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_oauth_system_client_pkey'),
sa.UniqueConstraint('plugin_id', 'provider', name='tool_oauth_system_client_plugin_id_provider_idx')
)
else:
op.create_table('tool_oauth_system_clients',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('plugin_id', sa.String(length=512), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('encrypted_oauth_params', models.types.LongText(), nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_oauth_system_client_pkey'),
sa.UniqueConstraint('plugin_id', 'provider', name='tool_oauth_system_client_plugin_id_provider_idx')
)
if _is_pg(conn):
op.create_table('tool_oauth_tenant_clients',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('plugin_id', sa.String(length=512), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False),
sa.Column('encrypted_oauth_params', sa.Text(), nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_oauth_tenant_client_pkey'),
sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', name='unique_tool_oauth_tenant_client')
)
else:
op.create_table('tool_oauth_tenant_clients',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('plugin_id', sa.String(length=255), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False),
sa.Column('encrypted_oauth_params', models.types.LongText(), nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_oauth_tenant_client_pkey'),
sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', name='unique_tool_oauth_tenant_client')
)
with op.batch_alter_table('tool_builtin_providers', schema=None) as batch_op:
batch_op.add_column(sa.Column('name', sa.String(length=256), server_default=sa.text("'API KEY 1'::character varying"), nullable=False))
batch_op.add_column(sa.Column('is_default', sa.Boolean(), server_default=sa.text('false'), nullable=False))
batch_op.add_column(sa.Column('credential_type', sa.String(length=32), server_default=sa.text("'api-key'::character varying"), nullable=False))
batch_op.drop_constraint(batch_op.f('unique_builtin_tool_provider'), type_='unique')
batch_op.create_unique_constraint(batch_op.f('unique_builtin_tool_provider'), ['tenant_id', 'provider', 'name'])
if _is_pg(conn):
with op.batch_alter_table('tool_builtin_providers', schema=None) as batch_op:
batch_op.add_column(sa.Column('name', sa.String(length=256), server_default=sa.text("'API KEY 1'::character varying"), nullable=False))
batch_op.add_column(sa.Column('is_default', sa.Boolean(), server_default=sa.text('false'), nullable=False))
batch_op.add_column(sa.Column('credential_type', sa.String(length=32), server_default=sa.text("'api-key'::character varying"), nullable=False))
batch_op.drop_constraint(batch_op.f('unique_builtin_tool_provider'), type_='unique')
batch_op.create_unique_constraint(batch_op.f('unique_builtin_tool_provider'), ['tenant_id', 'provider', 'name'])
else:
with op.batch_alter_table('tool_builtin_providers', schema=None) as batch_op:
batch_op.add_column(sa.Column('name', sa.String(length=256), server_default=sa.text("'API KEY 1'"), nullable=False))
batch_op.add_column(sa.Column('is_default', sa.Boolean(), server_default=sa.text('false'), nullable=False))
batch_op.add_column(sa.Column('credential_type', sa.String(length=32), server_default=sa.text("'api-key'"), nullable=False))
batch_op.drop_constraint(batch_op.f('unique_builtin_tool_provider'), type_='unique')
batch_op.create_unique_constraint(batch_op.f('unique_builtin_tool_provider'), ['tenant_id', 'provider', 'name'])
# ### end Alembic commands ###

View File

@@ -10,6 +10,10 @@ import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '8bcc02c9bd07'
down_revision = '375fe79ead14'
@@ -19,19 +23,36 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('tenant_plugin_auto_upgrade_strategies',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('strategy_setting', sa.String(length=16), server_default='fix_only', nullable=False),
sa.Column('upgrade_time_of_day', sa.Integer(), nullable=False),
sa.Column('upgrade_mode', sa.String(length=16), server_default='exclude', nullable=False),
sa.Column('exclude_plugins', sa.ARRAY(sa.String(length=255)), nullable=False),
sa.Column('include_plugins', sa.ARRAY(sa.String(length=255)), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='tenant_plugin_auto_upgrade_strategy_pkey'),
sa.UniqueConstraint('tenant_id', name='unique_tenant_plugin_auto_upgrade_strategy')
)
conn = op.get_bind()
if _is_pg(conn):
op.create_table('tenant_plugin_auto_upgrade_strategies',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('strategy_setting', sa.String(length=16), server_default='fix_only', nullable=False),
sa.Column('upgrade_time_of_day', sa.Integer(), nullable=False),
sa.Column('upgrade_mode', sa.String(length=16), server_default='exclude', nullable=False),
sa.Column('exclude_plugins', sa.ARRAY(sa.String(length=255)), nullable=False),
sa.Column('include_plugins', sa.ARRAY(sa.String(length=255)), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='tenant_plugin_auto_upgrade_strategy_pkey'),
sa.UniqueConstraint('tenant_id', name='unique_tenant_plugin_auto_upgrade_strategy')
)
else:
op.create_table('tenant_plugin_auto_upgrade_strategies',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('strategy_setting', sa.String(length=16), server_default='fix_only', nullable=False),
sa.Column('upgrade_time_of_day', sa.Integer(), nullable=False),
sa.Column('upgrade_mode', sa.String(length=16), server_default='exclude', nullable=False),
sa.Column('exclude_plugins', sa.JSON(), nullable=False),
sa.Column('include_plugins', sa.JSON(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='tenant_plugin_auto_upgrade_strategy_pkey'),
sa.UniqueConstraint('tenant_id', name='unique_tenant_plugin_auto_upgrade_strategy')
)
# ### end Alembic commands ###

View File

@@ -7,6 +7,10 @@ Create Date: 2025-07-24 14:50:48.779833
"""
from alembic import op
import models as models
def _is_pg(conn):
return conn.dialect.name == "postgresql"
import sqlalchemy as sa
@@ -18,8 +22,18 @@ depends_on = None
def upgrade():
op.execute("ALTER TABLE tidb_auth_bindings ALTER COLUMN status SET DEFAULT 'CREATING'::character varying")
conn = op.get_bind()
if _is_pg(conn):
op.execute("ALTER TABLE tidb_auth_bindings ALTER COLUMN status SET DEFAULT 'CREATING'::character varying")
else:
op.execute("ALTER TABLE tidb_auth_bindings ALTER COLUMN status SET DEFAULT 'CREATING'")
def downgrade():
op.execute("ALTER TABLE tidb_auth_bindings ALTER COLUMN status SET DEFAULT 'CREATING'")
conn = op.get_bind()
if _is_pg(conn):
op.execute("ALTER TABLE tidb_auth_bindings ALTER COLUMN status SET DEFAULT 'CREATING'::character varying")
else:
op.execute("ALTER TABLE tidb_auth_bindings ALTER COLUMN status SET DEFAULT 'CREATING'")

View File

@@ -11,6 +11,10 @@ import models as models
import sqlalchemy as sa
from sqlalchemy.sql import table, column
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = 'e8446f481c1e'
down_revision = 'fa8b0fa6f407'
@@ -20,16 +24,30 @@ depends_on = None
def upgrade():
# Create provider_credentials table
op.create_table('provider_credentials',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider_name', sa.String(length=255), nullable=False),
sa.Column('credential_name', sa.String(length=255), nullable=False),
sa.Column('encrypted_config', sa.Text(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='provider_credential_pkey')
)
conn = op.get_bind()
if _is_pg(conn):
op.create_table('provider_credentials',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider_name', sa.String(length=255), nullable=False),
sa.Column('credential_name', sa.String(length=255), nullable=False),
sa.Column('encrypted_config', sa.Text(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='provider_credential_pkey')
)
else:
op.create_table('provider_credentials',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider_name', sa.String(length=255), nullable=False),
sa.Column('credential_name', sa.String(length=255), nullable=False),
sa.Column('encrypted_config', models.types.LongText(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='provider_credential_pkey')
)
# Create index for provider_credentials
with op.batch_alter_table('provider_credentials', schema=None) as batch_op:
@@ -60,27 +78,49 @@ def upgrade():
def migrate_existing_providers_data():
"""migrate providers table data to provider_credentials"""
conn = op.get_bind()
# Define table structure for data manipulation
providers_table = table('providers',
column('id', models.types.StringUUID()),
column('tenant_id', models.types.StringUUID()),
column('provider_name', sa.String()),
column('encrypted_config', sa.Text()),
column('created_at', sa.DateTime()),
column('updated_at', sa.DateTime()),
column('credential_id', models.types.StringUUID()),
)
if _is_pg(conn):
providers_table = table('providers',
column('id', models.types.StringUUID()),
column('tenant_id', models.types.StringUUID()),
column('provider_name', sa.String()),
column('encrypted_config', sa.Text()),
column('created_at', sa.DateTime()),
column('updated_at', sa.DateTime()),
column('credential_id', models.types.StringUUID()),
)
else:
providers_table = table('providers',
column('id', models.types.StringUUID()),
column('tenant_id', models.types.StringUUID()),
column('provider_name', sa.String()),
column('encrypted_config', models.types.LongText()),
column('created_at', sa.DateTime()),
column('updated_at', sa.DateTime()),
column('credential_id', models.types.StringUUID()),
)
provider_credential_table = table('provider_credentials',
column('id', models.types.StringUUID()),
column('tenant_id', models.types.StringUUID()),
column('provider_name', sa.String()),
column('credential_name', sa.String()),
column('encrypted_config', sa.Text()),
column('created_at', sa.DateTime()),
column('updated_at', sa.DateTime())
)
if _is_pg(conn):
provider_credential_table = table('provider_credentials',
column('id', models.types.StringUUID()),
column('tenant_id', models.types.StringUUID()),
column('provider_name', sa.String()),
column('credential_name', sa.String()),
column('encrypted_config', sa.Text()),
column('created_at', sa.DateTime()),
column('updated_at', sa.DateTime())
)
else:
provider_credential_table = table('provider_credentials',
column('id', models.types.StringUUID()),
column('tenant_id', models.types.StringUUID()),
column('provider_name', sa.String()),
column('credential_name', sa.String()),
column('encrypted_config', models.types.LongText()),
column('created_at', sa.DateTime()),
column('updated_at', sa.DateTime())
)
# Get database connection
conn = op.get_bind()
@@ -123,8 +163,14 @@ def migrate_existing_providers_data():
def downgrade():
# Re-add encrypted_config column to providers table
with op.batch_alter_table('providers', schema=None) as batch_op:
batch_op.add_column(sa.Column('encrypted_config', sa.Text(), nullable=True))
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('providers', schema=None) as batch_op:
batch_op.add_column(sa.Column('encrypted_config', sa.Text(), nullable=True))
else:
with op.batch_alter_table('providers', schema=None) as batch_op:
batch_op.add_column(sa.Column('encrypted_config', models.types.LongText(), nullable=True))
# Migrate data back from provider_credentials to providers

View File

@@ -13,6 +13,10 @@ import sqlalchemy as sa
from sqlalchemy.sql import table, column
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '0e154742a5fa'
down_revision = 'e8446f481c1e'
@@ -22,18 +26,34 @@ depends_on = None
def upgrade():
# Create provider_model_credentials table
op.create_table('provider_model_credentials',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider_name', sa.String(length=255), nullable=False),
sa.Column('model_name', sa.String(length=255), nullable=False),
sa.Column('model_type', sa.String(length=40), nullable=False),
sa.Column('credential_name', sa.String(length=255), nullable=False),
sa.Column('encrypted_config', sa.Text(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='provider_model_credential_pkey')
)
conn = op.get_bind()
if _is_pg(conn):
op.create_table('provider_model_credentials',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider_name', sa.String(length=255), nullable=False),
sa.Column('model_name', sa.String(length=255), nullable=False),
sa.Column('model_type', sa.String(length=40), nullable=False),
sa.Column('credential_name', sa.String(length=255), nullable=False),
sa.Column('encrypted_config', sa.Text(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='provider_model_credential_pkey')
)
else:
op.create_table('provider_model_credentials',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider_name', sa.String(length=255), nullable=False),
sa.Column('model_name', sa.String(length=255), nullable=False),
sa.Column('model_type', sa.String(length=40), nullable=False),
sa.Column('credential_name', sa.String(length=255), nullable=False),
sa.Column('encrypted_config', models.types.LongText(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='provider_model_credential_pkey')
)
# Create index for provider_model_credentials
with op.batch_alter_table('provider_model_credentials', schema=None) as batch_op:
@@ -66,31 +86,57 @@ def upgrade():
def migrate_existing_provider_models_data():
"""migrate provider_models table data to provider_model_credentials"""
conn = op.get_bind()
# Define table structure for data manipulation
provider_models_table = table('provider_models',
column('id', models.types.StringUUID()),
column('tenant_id', models.types.StringUUID()),
column('provider_name', sa.String()),
column('model_name', sa.String()),
column('model_type', sa.String()),
column('encrypted_config', sa.Text()),
column('created_at', sa.DateTime()),
column('updated_at', sa.DateTime()),
column('credential_id', models.types.StringUUID()),
)
if _is_pg(conn):
provider_models_table = table('provider_models',
column('id', models.types.StringUUID()),
column('tenant_id', models.types.StringUUID()),
column('provider_name', sa.String()),
column('model_name', sa.String()),
column('model_type', sa.String()),
column('encrypted_config', sa.Text()),
column('created_at', sa.DateTime()),
column('updated_at', sa.DateTime()),
column('credential_id', models.types.StringUUID()),
)
else:
provider_models_table = table('provider_models',
column('id', models.types.StringUUID()),
column('tenant_id', models.types.StringUUID()),
column('provider_name', sa.String()),
column('model_name', sa.String()),
column('model_type', sa.String()),
column('encrypted_config', models.types.LongText()),
column('created_at', sa.DateTime()),
column('updated_at', sa.DateTime()),
column('credential_id', models.types.StringUUID()),
)
provider_model_credentials_table = table('provider_model_credentials',
column('id', models.types.StringUUID()),
column('tenant_id', models.types.StringUUID()),
column('provider_name', sa.String()),
column('model_name', sa.String()),
column('model_type', sa.String()),
column('credential_name', sa.String()),
column('encrypted_config', sa.Text()),
column('created_at', sa.DateTime()),
column('updated_at', sa.DateTime())
)
if _is_pg(conn):
provider_model_credentials_table = table('provider_model_credentials',
column('id', models.types.StringUUID()),
column('tenant_id', models.types.StringUUID()),
column('provider_name', sa.String()),
column('model_name', sa.String()),
column('model_type', sa.String()),
column('credential_name', sa.String()),
column('encrypted_config', sa.Text()),
column('created_at', sa.DateTime()),
column('updated_at', sa.DateTime())
)
else:
provider_model_credentials_table = table('provider_model_credentials',
column('id', models.types.StringUUID()),
column('tenant_id', models.types.StringUUID()),
column('provider_name', sa.String()),
column('model_name', sa.String()),
column('model_type', sa.String()),
column('credential_name', sa.String()),
column('encrypted_config', models.types.LongText()),
column('created_at', sa.DateTime()),
column('updated_at', sa.DateTime())
)
# Get database connection
@@ -137,8 +183,14 @@ def migrate_existing_provider_models_data():
def downgrade():
# Re-add encrypted_config column to provider_models table
with op.batch_alter_table('provider_models', schema=None) as batch_op:
batch_op.add_column(sa.Column('encrypted_config', sa.Text(), nullable=True))
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('provider_models', schema=None) as batch_op:
batch_op.add_column(sa.Column('encrypted_config', sa.Text(), nullable=True))
else:
with op.batch_alter_table('provider_models', schema=None) as batch_op:
batch_op.add_column(sa.Column('encrypted_config', models.types.LongText(), nullable=True))
if not context.is_offline_mode():
# Migrate data back from provider_model_credentials to provider_models

View File

@@ -8,6 +8,11 @@ Create Date: 2025-08-20 17:47:17.015695
from alembic import op
import models as models
import sqlalchemy as sa
from libs.uuid_utils import uuidv7
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
@@ -19,17 +24,33 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('oauth_provider_apps',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('app_icon', sa.String(length=255), nullable=False),
sa.Column('app_label', sa.JSON(), server_default='{}', nullable=False),
sa.Column('client_id', sa.String(length=255), nullable=False),
sa.Column('client_secret', sa.String(length=255), nullable=False),
sa.Column('redirect_uris', sa.JSON(), server_default='[]', nullable=False),
sa.Column('scope', sa.String(length=255), server_default=sa.text("'read:name read:email read:avatar read:interface_language read:timezone'"), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='oauth_provider_app_pkey')
)
conn = op.get_bind()
if _is_pg(conn):
op.create_table('oauth_provider_apps',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('app_icon', sa.String(length=255), nullable=False),
sa.Column('app_label', sa.JSON(), server_default='{}', nullable=False),
sa.Column('client_id', sa.String(length=255), nullable=False),
sa.Column('client_secret', sa.String(length=255), nullable=False),
sa.Column('redirect_uris', sa.JSON(), server_default='[]', nullable=False),
sa.Column('scope', sa.String(length=255), server_default=sa.text("'read:name read:email read:avatar read:interface_language read:timezone'"), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='oauth_provider_app_pkey')
)
else:
op.create_table('oauth_provider_apps',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('app_icon', sa.String(length=255), nullable=False),
sa.Column('app_label', sa.JSON(), default='{}', nullable=False),
sa.Column('client_id', sa.String(length=255), nullable=False),
sa.Column('client_secret', sa.String(length=255), nullable=False),
sa.Column('redirect_uris', sa.JSON(), default='[]', nullable=False),
sa.Column('scope', sa.String(length=255), server_default=sa.text("'read:name read:email read:avatar read:interface_language read:timezone'"), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='oauth_provider_app_pkey')
)
with op.batch_alter_table('oauth_provider_apps', schema=None) as batch_op:
batch_op.create_index('oauth_provider_app_client_id_idx', ['client_id'], unique=False)

View File

@@ -7,6 +7,10 @@ Create Date: 2025-08-29 10:07:54.163626
"""
from alembic import op
import models as models
def _is_pg(conn):
return conn.dialect.name == "postgresql"
import sqlalchemy as sa
@@ -19,7 +23,12 @@ depends_on = None
def upgrade():
# Add encrypted_headers column to tool_mcp_providers table
op.add_column('tool_mcp_providers', sa.Column('encrypted_headers', sa.Text(), nullable=True))
conn = op.get_bind()
if _is_pg(conn):
op.add_column('tool_mcp_providers', sa.Column('encrypted_headers', sa.Text(), nullable=True))
else:
op.add_column('tool_mcp_providers', sa.Column('encrypted_headers', models.types.LongText(), nullable=True))
def downgrade():

View File

@@ -7,6 +7,9 @@ Create Date: 2025-09-11 15:37:17.771298
"""
from alembic import op
import models as models
def _is_pg(conn):
return conn.dialect.name == "postgresql"
import sqlalchemy as sa
@@ -19,8 +22,14 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('providers', schema=None) as batch_op:
batch_op.add_column(sa.Column('credential_status', sa.String(length=20), server_default=sa.text("'active'::character varying"), nullable=True))
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('providers', schema=None) as batch_op:
batch_op.add_column(sa.Column('credential_status', sa.String(length=20), server_default=sa.text("'active'::character varying"), nullable=True))
else:
with op.batch_alter_table('providers', schema=None) as batch_op:
batch_op.add_column(sa.Column('credential_status', sa.String(length=20), server_default=sa.text("'active'"), nullable=True))
# ### end Alembic commands ###

View File

@@ -9,6 +9,11 @@ from alembic import op
import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from libs.uuid_utils import uuidv7
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '68519ad5cd18'
@@ -19,152 +24,314 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('datasource_oauth_params',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('plugin_id', sa.String(length=255), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('system_credentials', postgresql.JSONB(astext_type=sa.Text()), nullable=False),
sa.PrimaryKeyConstraint('id', name='datasource_oauth_config_pkey'),
sa.UniqueConstraint('plugin_id', 'provider', name='datasource_oauth_config_datasource_id_provider_idx')
)
op.create_table('datasource_oauth_tenant_params',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('plugin_id', sa.String(length=255), nullable=False),
sa.Column('client_params', postgresql.JSONB(astext_type=sa.Text()), nullable=False),
sa.Column('enabled', sa.Boolean(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='datasource_oauth_tenant_config_pkey'),
sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', name='datasource_oauth_tenant_config_unique')
)
op.create_table('datasource_providers',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('plugin_id', sa.String(length=255), nullable=False),
sa.Column('auth_type', sa.String(length=255), nullable=False),
sa.Column('encrypted_credentials', postgresql.JSONB(astext_type=sa.Text()), nullable=False),
sa.Column('avatar_url', sa.Text(), nullable=True),
sa.Column('is_default', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('expires_at', sa.Integer(), server_default='-1', nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='datasource_provider_pkey'),
sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', 'name', name='datasource_provider_unique_name')
)
conn = op.get_bind()
if _is_pg(conn):
op.create_table('datasource_oauth_params',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('plugin_id', sa.String(length=255), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('system_credentials', postgresql.JSONB(astext_type=sa.Text()), nullable=False),
sa.PrimaryKeyConstraint('id', name='datasource_oauth_config_pkey'),
sa.UniqueConstraint('plugin_id', 'provider', name='datasource_oauth_config_datasource_id_provider_idx')
)
else:
op.create_table('datasource_oauth_params',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('plugin_id', sa.String(length=255), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('system_credentials', models.types.AdjustedJSON(astext_type=sa.Text()), nullable=False),
sa.PrimaryKeyConstraint('id', name='datasource_oauth_config_pkey'),
sa.UniqueConstraint('plugin_id', 'provider', name='datasource_oauth_config_datasource_id_provider_idx')
)
if _is_pg(conn):
op.create_table('datasource_oauth_tenant_params',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('plugin_id', sa.String(length=255), nullable=False),
sa.Column('client_params', postgresql.JSONB(astext_type=sa.Text()), nullable=False),
sa.Column('enabled', sa.Boolean(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='datasource_oauth_tenant_config_pkey'),
sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', name='datasource_oauth_tenant_config_unique')
)
else:
op.create_table('datasource_oauth_tenant_params',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('plugin_id', sa.String(length=255), nullable=False),
sa.Column('client_params', models.types.AdjustedJSON(astext_type=sa.Text()), nullable=False),
sa.Column('enabled', sa.Boolean(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='datasource_oauth_tenant_config_pkey'),
sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', name='datasource_oauth_tenant_config_unique')
)
if _is_pg(conn):
op.create_table('datasource_providers',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('plugin_id', sa.String(length=255), nullable=False),
sa.Column('auth_type', sa.String(length=255), nullable=False),
sa.Column('encrypted_credentials', postgresql.JSONB(astext_type=sa.Text()), nullable=False),
sa.Column('avatar_url', sa.Text(), nullable=True),
sa.Column('is_default', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('expires_at', sa.Integer(), server_default='-1', nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='datasource_provider_pkey'),
sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', 'name', name='datasource_provider_unique_name')
)
else:
op.create_table('datasource_providers',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('provider', sa.String(length=128), nullable=False),
sa.Column('plugin_id', sa.String(length=255), nullable=False),
sa.Column('auth_type', sa.String(length=255), nullable=False),
sa.Column('encrypted_credentials', models.types.AdjustedJSON(astext_type=sa.Text()), nullable=False),
sa.Column('avatar_url', models.types.LongText(), nullable=True),
sa.Column('is_default', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('expires_at', sa.Integer(), server_default='-1', nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='datasource_provider_pkey'),
sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', 'name', name='datasource_provider_unique_name')
)
with op.batch_alter_table('datasource_providers', schema=None) as batch_op:
batch_op.create_index('datasource_provider_auth_type_provider_idx', ['tenant_id', 'plugin_id', 'provider'], unique=False)
op.create_table('document_pipeline_execution_logs',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('pipeline_id', models.types.StringUUID(), nullable=False),
sa.Column('document_id', models.types.StringUUID(), nullable=False),
sa.Column('datasource_type', sa.String(length=255), nullable=False),
sa.Column('datasource_info', sa.Text(), nullable=False),
sa.Column('datasource_node_id', sa.String(length=255), nullable=False),
sa.Column('input_data', sa.JSON(), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='document_pipeline_execution_log_pkey')
)
if _is_pg(conn):
op.create_table('document_pipeline_execution_logs',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('pipeline_id', models.types.StringUUID(), nullable=False),
sa.Column('document_id', models.types.StringUUID(), nullable=False),
sa.Column('datasource_type', sa.String(length=255), nullable=False),
sa.Column('datasource_info', sa.Text(), nullable=False),
sa.Column('datasource_node_id', sa.String(length=255), nullable=False),
sa.Column('input_data', sa.JSON(), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='document_pipeline_execution_log_pkey')
)
else:
op.create_table('document_pipeline_execution_logs',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('pipeline_id', models.types.StringUUID(), nullable=False),
sa.Column('document_id', models.types.StringUUID(), nullable=False),
sa.Column('datasource_type', sa.String(length=255), nullable=False),
sa.Column('datasource_info', models.types.LongText(), nullable=False),
sa.Column('datasource_node_id', sa.String(length=255), nullable=False),
sa.Column('input_data', sa.JSON(), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='document_pipeline_execution_log_pkey')
)
with op.batch_alter_table('document_pipeline_execution_logs', schema=None) as batch_op:
batch_op.create_index('document_pipeline_execution_logs_document_id_idx', ['document_id'], unique=False)
op.create_table('pipeline_built_in_templates',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.Column('chunk_structure', sa.String(length=255), nullable=False),
sa.Column('icon', sa.JSON(), nullable=False),
sa.Column('yaml_content', sa.Text(), nullable=False),
sa.Column('copyright', sa.String(length=255), nullable=False),
sa.Column('privacy_policy', sa.String(length=255), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('install_count', sa.Integer(), nullable=False),
sa.Column('language', sa.String(length=255), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
sa.PrimaryKeyConstraint('id', name='pipeline_built_in_template_pkey')
)
op.create_table('pipeline_customized_templates',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.Column('chunk_structure', sa.String(length=255), nullable=False),
sa.Column('icon', sa.JSON(), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('yaml_content', sa.Text(), nullable=False),
sa.Column('install_count', sa.Integer(), nullable=False),
sa.Column('language', sa.String(length=255), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='pipeline_customized_template_pkey')
)
if _is_pg(conn):
op.create_table('pipeline_built_in_templates',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.Column('chunk_structure', sa.String(length=255), nullable=False),
sa.Column('icon', sa.JSON(), nullable=False),
sa.Column('yaml_content', sa.Text(), nullable=False),
sa.Column('copyright', sa.String(length=255), nullable=False),
sa.Column('privacy_policy', sa.String(length=255), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('install_count', sa.Integer(), nullable=False),
sa.Column('language', sa.String(length=255), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
sa.PrimaryKeyConstraint('id', name='pipeline_built_in_template_pkey')
)
else:
op.create_table('pipeline_built_in_templates',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('description', models.types.LongText(), nullable=False),
sa.Column('chunk_structure', sa.String(length=255), nullable=False),
sa.Column('icon', sa.JSON(), nullable=False),
sa.Column('yaml_content', models.types.LongText(), nullable=False),
sa.Column('copyright', sa.String(length=255), nullable=False),
sa.Column('privacy_policy', sa.String(length=255), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('install_count', sa.Integer(), nullable=False),
sa.Column('language', sa.String(length=255), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
sa.PrimaryKeyConstraint('id', name='pipeline_built_in_template_pkey')
)
if _is_pg(conn):
op.create_table('pipeline_customized_templates',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.Column('chunk_structure', sa.String(length=255), nullable=False),
sa.Column('icon', sa.JSON(), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('yaml_content', sa.Text(), nullable=False),
sa.Column('install_count', sa.Integer(), nullable=False),
sa.Column('language', sa.String(length=255), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='pipeline_customized_template_pkey')
)
else:
# MySQL: Use compatible syntax
op.create_table('pipeline_customized_templates',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('description', models.types.LongText(), nullable=False),
sa.Column('chunk_structure', sa.String(length=255), nullable=False),
sa.Column('icon', sa.JSON(), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('yaml_content', models.types.LongText(), nullable=False),
sa.Column('install_count', sa.Integer(), nullable=False),
sa.Column('language', sa.String(length=255), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='pipeline_customized_template_pkey')
)
with op.batch_alter_table('pipeline_customized_templates', schema=None) as batch_op:
batch_op.create_index('pipeline_customized_template_tenant_idx', ['tenant_id'], unique=False)
op.create_table('pipeline_recommended_plugins',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('plugin_id', sa.Text(), nullable=False),
sa.Column('provider_name', sa.Text(), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('active', sa.Boolean(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='pipeline_recommended_plugin_pkey')
)
op.create_table('pipelines',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('description', sa.Text(), server_default=sa.text("''::character varying"), nullable=False),
sa.Column('workflow_id', models.types.StringUUID(), nullable=True),
sa.Column('is_public', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('is_published', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='pipeline_pkey')
)
op.create_table('workflow_draft_variable_files',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False, comment='The tenant to which the WorkflowDraftVariableFile belongs, referencing Tenant.id'),
sa.Column('app_id', models.types.StringUUID(), nullable=False, comment='The application to which the WorkflowDraftVariableFile belongs, referencing App.id'),
sa.Column('user_id', models.types.StringUUID(), nullable=False, comment='The owner to of the WorkflowDraftVariableFile, referencing Account.id'),
sa.Column('upload_file_id', models.types.StringUUID(), nullable=False, comment='Reference to UploadFile containing the large variable data'),
sa.Column('size', sa.BigInteger(), nullable=False, comment='Size of the original variable content in bytes'),
sa.Column('length', sa.Integer(), nullable=True, comment='Length of the original variable content. For array and array-like types, this represents the number of elements. For object types, it indicates the number of keys. For other types, the value is NULL.'),
sa.Column('value_type', sa.String(20), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('workflow_draft_variable_files_pkey'))
)
op.create_table('workflow_node_execution_offload',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('node_execution_id', models.types.StringUUID(), nullable=True),
sa.Column('type', sa.String(20), nullable=False),
sa.Column('file_id', models.types.StringUUID(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('workflow_node_execution_offload_pkey')),
sa.UniqueConstraint('node_execution_id', 'type', name=op.f('workflow_node_execution_offload_node_execution_id_key'))
)
with op.batch_alter_table('datasets', schema=None) as batch_op:
batch_op.add_column(sa.Column('keyword_number', sa.Integer(), server_default=sa.text('10'), nullable=True))
batch_op.add_column(sa.Column('icon_info', postgresql.JSONB(astext_type=sa.Text()), nullable=True))
batch_op.add_column(sa.Column('runtime_mode', sa.String(length=255), server_default=sa.text("'general'::character varying"), nullable=True))
batch_op.add_column(sa.Column('pipeline_id', models.types.StringUUID(), nullable=True))
batch_op.add_column(sa.Column('chunk_structure', sa.String(length=255), nullable=True))
batch_op.add_column(sa.Column('enable_api', sa.Boolean(), server_default=sa.text('true'), nullable=False))
if _is_pg(conn):
op.create_table('pipeline_recommended_plugins',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('plugin_id', sa.Text(), nullable=False),
sa.Column('provider_name', sa.Text(), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('active', sa.Boolean(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='pipeline_recommended_plugin_pkey')
)
else:
op.create_table('pipeline_recommended_plugins',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('plugin_id', models.types.LongText(), nullable=False),
sa.Column('provider_name', models.types.LongText(), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('active', sa.Boolean(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='pipeline_recommended_plugin_pkey')
)
if _is_pg(conn):
op.create_table('pipelines',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('description', sa.Text(), server_default=sa.text("''::character varying"), nullable=False),
sa.Column('workflow_id', models.types.StringUUID(), nullable=True),
sa.Column('is_public', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('is_published', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='pipeline_pkey')
)
else:
op.create_table('pipelines',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('description', models.types.LongText(), default=sa.text("''"), nullable=False),
sa.Column('workflow_id', models.types.StringUUID(), nullable=True),
sa.Column('is_public', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('is_published', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_by', models.types.StringUUID(), nullable=True),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='pipeline_pkey')
)
if _is_pg(conn):
op.create_table('workflow_draft_variable_files',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False, comment='The tenant to which the WorkflowDraftVariableFile belongs, referencing Tenant.id'),
sa.Column('app_id', models.types.StringUUID(), nullable=False, comment='The application to which the WorkflowDraftVariableFile belongs, referencing App.id'),
sa.Column('user_id', models.types.StringUUID(), nullable=False, comment='The owner to of the WorkflowDraftVariableFile, referencing Account.id'),
sa.Column('upload_file_id', models.types.StringUUID(), nullable=False, comment='Reference to UploadFile containing the large variable data'),
sa.Column('size', sa.BigInteger(), nullable=False, comment='Size of the original variable content in bytes'),
sa.Column('length', sa.Integer(), nullable=True, comment='Length of the original variable content. For array and array-like types, this represents the number of elements. For object types, it indicates the number of keys. For other types, the value is NULL.'),
sa.Column('value_type', sa.String(20), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('workflow_draft_variable_files_pkey'))
)
else:
op.create_table('workflow_draft_variable_files',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False, comment='The tenant to which the WorkflowDraftVariableFile belongs, referencing Tenant.id'),
sa.Column('app_id', models.types.StringUUID(), nullable=False, comment='The application to which the WorkflowDraftVariableFile belongs, referencing App.id'),
sa.Column('user_id', models.types.StringUUID(), nullable=False, comment='The owner to of the WorkflowDraftVariableFile, referencing Account.id'),
sa.Column('upload_file_id', models.types.StringUUID(), nullable=False, comment='Reference to UploadFile containing the large variable data'),
sa.Column('size', sa.BigInteger(), nullable=False, comment='Size of the original variable content in bytes'),
sa.Column('length', sa.Integer(), nullable=True, comment='Length of the original variable content. For array and array-like types, this represents the number of elements. For object types, it indicates the number of keys. For other types, the value is NULL.'),
sa.Column('value_type', sa.String(20), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('workflow_draft_variable_files_pkey'))
)
if _is_pg(conn):
op.create_table('workflow_node_execution_offload',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('node_execution_id', models.types.StringUUID(), nullable=True),
sa.Column('type', sa.String(20), nullable=False),
sa.Column('file_id', models.types.StringUUID(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('workflow_node_execution_offload_pkey')),
sa.UniqueConstraint('node_execution_id', 'type', name=op.f('workflow_node_execution_offload_node_execution_id_key'))
)
else:
op.create_table('workflow_node_execution_offload',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('node_execution_id', models.types.StringUUID(), nullable=True),
sa.Column('type', sa.String(20), nullable=False),
sa.Column('file_id', models.types.StringUUID(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('workflow_node_execution_offload_pkey')),
sa.UniqueConstraint('node_execution_id', 'type', name=op.f('workflow_node_execution_offload_node_execution_id_key'))
)
if _is_pg(conn):
with op.batch_alter_table('datasets', schema=None) as batch_op:
batch_op.add_column(sa.Column('keyword_number', sa.Integer(), server_default=sa.text('10'), nullable=True))
batch_op.add_column(sa.Column('icon_info', postgresql.JSONB(astext_type=sa.Text()), nullable=True))
batch_op.add_column(sa.Column('runtime_mode', sa.String(length=255), server_default=sa.text("'general'::character varying"), nullable=True))
batch_op.add_column(sa.Column('pipeline_id', models.types.StringUUID(), nullable=True))
batch_op.add_column(sa.Column('chunk_structure', sa.String(length=255), nullable=True))
batch_op.add_column(sa.Column('enable_api', sa.Boolean(), server_default=sa.text('true'), nullable=False))
else:
with op.batch_alter_table('datasets', schema=None) as batch_op:
batch_op.add_column(sa.Column('keyword_number', sa.Integer(), server_default=sa.text('10'), nullable=True))
batch_op.add_column(sa.Column('icon_info', models.types.AdjustedJSON(astext_type=sa.Text()), nullable=True))
batch_op.add_column(sa.Column('runtime_mode', sa.String(length=255), server_default=sa.text("'general'"), nullable=True))
batch_op.add_column(sa.Column('pipeline_id', models.types.StringUUID(), nullable=True))
batch_op.add_column(sa.Column('chunk_structure', sa.String(length=255), nullable=True))
batch_op.add_column(sa.Column('enable_api', sa.Boolean(), server_default=sa.text('true'), nullable=False))
with op.batch_alter_table('workflow_draft_variables', schema=None) as batch_op:
batch_op.add_column(sa.Column('file_id', models.types.StringUUID(), nullable=True, comment='Reference to WorkflowDraftVariableFile if variable is offloaded to external storage'))
@@ -175,9 +342,12 @@ def upgrade():
comment='Indicates whether the current value is the default for a conversation variable. Always `FALSE` for other types of variables.',)
)
batch_op.create_index('workflow_draft_variable_file_id_idx', ['file_id'], unique=False)
with op.batch_alter_table('workflows', schema=None) as batch_op:
batch_op.add_column(sa.Column('rag_pipeline_variables', sa.Text(), server_default='{}', nullable=False))
if _is_pg(conn):
with op.batch_alter_table('workflows', schema=None) as batch_op:
batch_op.add_column(sa.Column('rag_pipeline_variables', sa.Text(), server_default='{}', nullable=False))
else:
with op.batch_alter_table('workflows', schema=None) as batch_op:
batch_op.add_column(sa.Column('rag_pipeline_variables', models.types.LongText(), default='{}', nullable=False))
# ### end Alembic commands ###

View File

@@ -7,6 +7,10 @@ Create Date: 2025-10-21 14:30:28.566192
"""
from alembic import op
import models as models
def _is_pg(conn):
return conn.dialect.name == "postgresql"
import sqlalchemy as sa
@@ -29,8 +33,15 @@ def upgrade():
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('pipeline_built_in_templates', schema=None) as batch_op:
batch_op.add_column(sa.Column('created_by', sa.UUID(), autoincrement=False, nullable=False))
batch_op.add_column(sa.Column('updated_by', sa.UUID(), autoincrement=False, nullable=True))
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('pipeline_built_in_templates', schema=None) as batch_op:
batch_op.add_column(sa.Column('created_by', sa.UUID(), autoincrement=False, nullable=False))
batch_op.add_column(sa.Column('updated_by', sa.UUID(), autoincrement=False, nullable=True))
else:
with op.batch_alter_table('pipeline_built_in_templates', schema=None) as batch_op:
batch_op.add_column(sa.Column('created_by', models.types.StringUUID(), autoincrement=False, nullable=False))
batch_op.add_column(sa.Column('updated_by', models.types.StringUUID(), autoincrement=False, nullable=True))
# ### end Alembic commands ###

View File

@@ -9,7 +9,10 @@ Create Date: 2025-10-22 16:11:31.805407
from alembic import op
import models as models
import sqlalchemy as sa
from libs.uuid_utils import uuidv7
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = "03f8dcbc611e"
@@ -19,19 +22,33 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"workflow_pauses",
sa.Column("workflow_id", models.types.StringUUID(), nullable=False),
sa.Column("workflow_run_id", models.types.StringUUID(), nullable=False),
sa.Column("resumed_at", sa.DateTime(), nullable=True),
sa.Column("state_object_key", sa.String(length=255), nullable=False),
sa.Column("id", models.types.StringUUID(), server_default=sa.text("uuidv7()"), nullable=False),
sa.Column("created_at", sa.DateTime(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=False),
sa.Column("updated_at", sa.DateTime(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=False),
sa.PrimaryKeyConstraint("id", name=op.f("workflow_pauses_pkey")),
sa.UniqueConstraint("workflow_run_id", name=op.f("workflow_pauses_workflow_run_id_key")),
)
conn = op.get_bind()
if _is_pg(conn):
op.create_table(
"workflow_pauses",
sa.Column("workflow_id", models.types.StringUUID(), nullable=False),
sa.Column("workflow_run_id", models.types.StringUUID(), nullable=False),
sa.Column("resumed_at", sa.DateTime(), nullable=True),
sa.Column("state_object_key", sa.String(length=255), nullable=False),
sa.Column("id", models.types.StringUUID(), server_default=sa.text("uuidv7()"), nullable=False),
sa.Column("created_at", sa.DateTime(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=False),
sa.Column("updated_at", sa.DateTime(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=False),
sa.PrimaryKeyConstraint("id", name=op.f("workflow_pauses_pkey")),
sa.UniqueConstraint("workflow_run_id", name=op.f("workflow_pauses_workflow_run_id_key")),
)
else:
op.create_table(
"workflow_pauses",
sa.Column("workflow_id", models.types.StringUUID(), nullable=False),
sa.Column("workflow_run_id", models.types.StringUUID(), nullable=False),
sa.Column("resumed_at", sa.DateTime(), nullable=True),
sa.Column("state_object_key", sa.String(length=255), nullable=False),
sa.Column("id", models.types.StringUUID(), nullable=False),
sa.Column("created_at", sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column("updated_at", sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint("id", name=op.f("workflow_pauses_pkey")),
sa.UniqueConstraint("workflow_run_id", name=op.f("workflow_pauses_workflow_run_id_key")),
)
# ### end Alembic commands ###

View File

@@ -8,9 +8,12 @@ Create Date: 2025-10-30 15:18:49.549156
from alembic import op
import models as models
import sqlalchemy as sa
from libs.uuid_utils import uuidv7
from models.enums import AppTriggerStatus, AppTriggerType
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '669ffd70119c'
@@ -21,125 +24,246 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('app_triggers',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('node_id', sa.String(length=64), nullable=False),
sa.Column('trigger_type', models.types.EnumText(AppTriggerType, length=50), nullable=False),
sa.Column('title', sa.String(length=255), nullable=False),
sa.Column('provider_name', sa.String(length=255), server_default='', nullable=True),
sa.Column('status', models.types.EnumText(AppTriggerStatus, length=50), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('id', name='app_trigger_pkey')
)
conn = op.get_bind()
if _is_pg(conn):
op.create_table('app_triggers',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('node_id', sa.String(length=64), nullable=False),
sa.Column('trigger_type', models.types.EnumText(AppTriggerType, length=50), nullable=False),
sa.Column('title', sa.String(length=255), nullable=False),
sa.Column('provider_name', sa.String(length=255), server_default='', nullable=True),
sa.Column('status', models.types.EnumText(AppTriggerStatus, length=50), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('id', name='app_trigger_pkey')
)
else:
op.create_table('app_triggers',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('node_id', sa.String(length=64), nullable=False),
sa.Column('trigger_type', models.types.EnumText(AppTriggerType, length=50), nullable=False),
sa.Column('title', sa.String(length=255), nullable=False),
sa.Column('provider_name', sa.String(length=255), server_default='', nullable=True),
sa.Column('status', models.types.EnumText(AppTriggerStatus, length=50), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('id', name='app_trigger_pkey')
)
with op.batch_alter_table('app_triggers', schema=None) as batch_op:
batch_op.create_index('app_trigger_tenant_app_idx', ['tenant_id', 'app_id'], unique=False)
op.create_table('trigger_oauth_system_clients',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('plugin_id', sa.String(length=512), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('encrypted_oauth_params', sa.Text(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='trigger_oauth_system_client_pkey'),
sa.UniqueConstraint('plugin_id', 'provider', name='trigger_oauth_system_client_plugin_id_provider_idx')
)
op.create_table('trigger_oauth_tenant_clients',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('plugin_id', sa.String(length=512), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False),
sa.Column('encrypted_oauth_params', sa.Text(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='trigger_oauth_tenant_client_pkey'),
sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', name='unique_trigger_oauth_tenant_client')
)
op.create_table('trigger_subscriptions',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False, comment='Subscription instance name'),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('user_id', models.types.StringUUID(), nullable=False),
sa.Column('provider_id', sa.String(length=255), nullable=False, comment='Provider identifier (e.g., plugin_id/provider_name)'),
sa.Column('endpoint_id', sa.String(length=255), nullable=False, comment='Subscription endpoint'),
sa.Column('parameters', sa.JSON(), nullable=False, comment='Subscription parameters JSON'),
sa.Column('properties', sa.JSON(), nullable=False, comment='Subscription properties JSON'),
sa.Column('credentials', sa.JSON(), nullable=False, comment='Subscription credentials JSON'),
sa.Column('credential_type', sa.String(length=50), nullable=False, comment='oauth or api_key'),
sa.Column('credential_expires_at', sa.Integer(), nullable=False, comment='OAuth token expiration timestamp, -1 for never'),
sa.Column('expires_at', sa.Integer(), nullable=False, comment='Subscription instance expiration timestamp, -1 for never'),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='trigger_provider_pkey'),
sa.UniqueConstraint('tenant_id', 'provider_id', 'name', name='unique_trigger_provider')
)
if _is_pg(conn):
op.create_table('trigger_oauth_system_clients',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('plugin_id', sa.String(length=512), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('encrypted_oauth_params', sa.Text(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='trigger_oauth_system_client_pkey'),
sa.UniqueConstraint('plugin_id', 'provider', name='trigger_oauth_system_client_plugin_id_provider_idx')
)
else:
op.create_table('trigger_oauth_system_clients',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('plugin_id', sa.String(length=512), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('encrypted_oauth_params', models.types.LongText(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='trigger_oauth_system_client_pkey'),
sa.UniqueConstraint('plugin_id', 'provider', name='trigger_oauth_system_client_plugin_id_provider_idx')
)
if _is_pg(conn):
op.create_table('trigger_oauth_tenant_clients',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('plugin_id', sa.String(length=255), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False),
sa.Column('encrypted_oauth_params', sa.Text(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='trigger_oauth_tenant_client_pkey'),
sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', name='unique_trigger_oauth_tenant_client')
)
else:
op.create_table('trigger_oauth_tenant_clients',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('plugin_id', sa.String(length=255), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False),
sa.Column('encrypted_oauth_params', models.types.LongText(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='trigger_oauth_tenant_client_pkey'),
sa.UniqueConstraint('tenant_id', 'plugin_id', 'provider', name='unique_trigger_oauth_tenant_client')
)
if _is_pg(conn):
op.create_table('trigger_subscriptions',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False, comment='Subscription instance name'),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('user_id', models.types.StringUUID(), nullable=False),
sa.Column('provider_id', sa.String(length=255), nullable=False, comment='Provider identifier (e.g., plugin_id/provider_name)'),
sa.Column('endpoint_id', sa.String(length=255), nullable=False, comment='Subscription endpoint'),
sa.Column('parameters', sa.JSON(), nullable=False, comment='Subscription parameters JSON'),
sa.Column('properties', sa.JSON(), nullable=False, comment='Subscription properties JSON'),
sa.Column('credentials', sa.JSON(), nullable=False, comment='Subscription credentials JSON'),
sa.Column('credential_type', sa.String(length=50), nullable=False, comment='oauth or api_key'),
sa.Column('credential_expires_at', sa.Integer(), nullable=False, comment='OAuth token expiration timestamp, -1 for never'),
sa.Column('expires_at', sa.Integer(), nullable=False, comment='Subscription instance expiration timestamp, -1 for never'),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='trigger_provider_pkey'),
sa.UniqueConstraint('tenant_id', 'provider_id', 'name', name='unique_trigger_provider')
)
else:
op.create_table('trigger_subscriptions',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False, comment='Subscription instance name'),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('user_id', models.types.StringUUID(), nullable=False),
sa.Column('provider_id', sa.String(length=255), nullable=False, comment='Provider identifier (e.g., plugin_id/provider_name)'),
sa.Column('endpoint_id', sa.String(length=255), nullable=False, comment='Subscription endpoint'),
sa.Column('parameters', sa.JSON(), nullable=False, comment='Subscription parameters JSON'),
sa.Column('properties', sa.JSON(), nullable=False, comment='Subscription properties JSON'),
sa.Column('credentials', sa.JSON(), nullable=False, comment='Subscription credentials JSON'),
sa.Column('credential_type', sa.String(length=50), nullable=False, comment='oauth or api_key'),
sa.Column('credential_expires_at', sa.Integer(), nullable=False, comment='OAuth token expiration timestamp, -1 for never'),
sa.Column('expires_at', sa.Integer(), nullable=False, comment='Subscription instance expiration timestamp, -1 for never'),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='trigger_provider_pkey'),
sa.UniqueConstraint('tenant_id', 'provider_id', 'name', name='unique_trigger_provider')
)
with op.batch_alter_table('trigger_subscriptions', schema=None) as batch_op:
batch_op.create_index('idx_trigger_providers_endpoint', ['endpoint_id'], unique=True)
batch_op.create_index('idx_trigger_providers_tenant_endpoint', ['tenant_id', 'endpoint_id'], unique=False)
batch_op.create_index('idx_trigger_providers_tenant_provider', ['tenant_id', 'provider_id'], unique=False)
op.create_table('workflow_plugin_triggers',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('node_id', sa.String(length=64), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider_id', sa.String(length=512), nullable=False),
sa.Column('event_name', sa.String(length=255), nullable=False),
sa.Column('subscription_id', sa.String(length=255), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='workflow_plugin_trigger_pkey'),
sa.UniqueConstraint('app_id', 'node_id', name='uniq_app_node_subscription')
)
if _is_pg(conn):
op.create_table('workflow_plugin_triggers',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('node_id', sa.String(length=64), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider_id', sa.String(length=512), nullable=False),
sa.Column('event_name', sa.String(length=255), nullable=False),
sa.Column('subscription_id', sa.String(length=255), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='workflow_plugin_trigger_pkey'),
sa.UniqueConstraint('app_id', 'node_id', name='uniq_app_node_subscription')
)
else:
op.create_table('workflow_plugin_triggers',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('node_id', sa.String(length=64), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider_id', sa.String(length=512), nullable=False),
sa.Column('event_name', sa.String(length=255), nullable=False),
sa.Column('subscription_id', sa.String(length=255), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='workflow_plugin_trigger_pkey'),
sa.UniqueConstraint('app_id', 'node_id', name='uniq_app_node_subscription')
)
with op.batch_alter_table('workflow_plugin_triggers', schema=None) as batch_op:
batch_op.create_index('workflow_plugin_trigger_tenant_subscription_idx', ['tenant_id', 'subscription_id', 'event_name'], unique=False)
op.create_table('workflow_schedule_plans',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('node_id', sa.String(length=64), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('cron_expression', sa.String(length=255), nullable=False),
sa.Column('timezone', sa.String(length=64), nullable=False),
sa.Column('next_run_at', sa.DateTime(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='workflow_schedule_plan_pkey'),
sa.UniqueConstraint('app_id', 'node_id', name='uniq_app_node')
)
if _is_pg(conn):
op.create_table('workflow_schedule_plans',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('node_id', sa.String(length=64), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('cron_expression', sa.String(length=255), nullable=False),
sa.Column('timezone', sa.String(length=64), nullable=False),
sa.Column('next_run_at', sa.DateTime(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='workflow_schedule_plan_pkey'),
sa.UniqueConstraint('app_id', 'node_id', name='uniq_app_node')
)
else:
op.create_table('workflow_schedule_plans',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('node_id', sa.String(length=64), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('cron_expression', sa.String(length=255), nullable=False),
sa.Column('timezone', sa.String(length=64), nullable=False),
sa.Column('next_run_at', sa.DateTime(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='workflow_schedule_plan_pkey'),
sa.UniqueConstraint('app_id', 'node_id', name='uniq_app_node')
)
with op.batch_alter_table('workflow_schedule_plans', schema=None) as batch_op:
batch_op.create_index('workflow_schedule_plan_next_idx', ['next_run_at'], unique=False)
op.create_table('workflow_trigger_logs',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('workflow_id', models.types.StringUUID(), nullable=False),
sa.Column('workflow_run_id', models.types.StringUUID(), nullable=True),
sa.Column('root_node_id', sa.String(length=255), nullable=True),
sa.Column('trigger_metadata', sa.Text(), nullable=False),
sa.Column('trigger_type', models.types.EnumText(AppTriggerType, length=50), nullable=False),
sa.Column('trigger_data', sa.Text(), nullable=False),
sa.Column('inputs', sa.Text(), nullable=False),
sa.Column('outputs', sa.Text(), nullable=True),
sa.Column('status', models.types.EnumText(AppTriggerStatus, length=50), nullable=False),
sa.Column('error', sa.Text(), nullable=True),
sa.Column('queue_name', sa.String(length=100), nullable=False),
sa.Column('celery_task_id', sa.String(length=255), nullable=True),
sa.Column('retry_count', sa.Integer(), nullable=False),
sa.Column('elapsed_time', sa.Float(), nullable=True),
sa.Column('total_tokens', sa.Integer(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('created_by_role', sa.String(length=255), nullable=False),
sa.Column('created_by', sa.String(length=255), nullable=False),
sa.Column('triggered_at', sa.DateTime(), nullable=True),
sa.Column('finished_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id', name='workflow_trigger_log_pkey')
)
if _is_pg(conn):
op.create_table('workflow_trigger_logs',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('workflow_id', models.types.StringUUID(), nullable=False),
sa.Column('workflow_run_id', models.types.StringUUID(), nullable=True),
sa.Column('root_node_id', sa.String(length=255), nullable=True),
sa.Column('trigger_metadata', sa.Text(), nullable=False),
sa.Column('trigger_type', models.types.EnumText(AppTriggerType, length=50), nullable=False),
sa.Column('trigger_data', sa.Text(), nullable=False),
sa.Column('inputs', sa.Text(), nullable=False),
sa.Column('outputs', sa.Text(), nullable=True),
sa.Column('status', models.types.EnumText(AppTriggerStatus, length=50), nullable=False),
sa.Column('error', sa.Text(), nullable=True),
sa.Column('queue_name', sa.String(length=100), nullable=False),
sa.Column('celery_task_id', sa.String(length=255), nullable=True),
sa.Column('retry_count', sa.Integer(), nullable=False),
sa.Column('elapsed_time', sa.Float(), nullable=True),
sa.Column('total_tokens', sa.Integer(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('created_by_role', sa.String(length=255), nullable=False),
sa.Column('created_by', sa.String(length=255), nullable=False),
sa.Column('triggered_at', sa.DateTime(), nullable=True),
sa.Column('finished_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id', name='workflow_trigger_log_pkey')
)
else:
op.create_table('workflow_trigger_logs',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('workflow_id', models.types.StringUUID(), nullable=False),
sa.Column('workflow_run_id', models.types.StringUUID(), nullable=True),
sa.Column('root_node_id', sa.String(length=255), nullable=True),
sa.Column('trigger_metadata', models.types.LongText(), nullable=False),
sa.Column('trigger_type', models.types.EnumText(AppTriggerType, length=50), nullable=False),
sa.Column('trigger_data', models.types.LongText(), nullable=False),
sa.Column('inputs', models.types.LongText(), nullable=False),
sa.Column('outputs', models.types.LongText(), nullable=True),
sa.Column('status', models.types.EnumText(AppTriggerStatus, length=50), nullable=False),
sa.Column('error', models.types.LongText(), nullable=True),
sa.Column('queue_name', sa.String(length=100), nullable=False),
sa.Column('celery_task_id', sa.String(length=255), nullable=True),
sa.Column('retry_count', sa.Integer(), nullable=False),
sa.Column('elapsed_time', sa.Float(), nullable=True),
sa.Column('total_tokens', sa.Integer(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('created_by_role', sa.String(length=255), nullable=False),
sa.Column('created_by', sa.String(length=255), nullable=False),
sa.Column('triggered_at', sa.DateTime(), nullable=True),
sa.Column('finished_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id', name='workflow_trigger_log_pkey')
)
with op.batch_alter_table('workflow_trigger_logs', schema=None) as batch_op:
batch_op.create_index('workflow_trigger_log_created_at_idx', ['created_at'], unique=False)
batch_op.create_index('workflow_trigger_log_status_idx', ['status'], unique=False)
@@ -147,19 +271,34 @@ def upgrade():
batch_op.create_index('workflow_trigger_log_workflow_id_idx', ['workflow_id'], unique=False)
batch_op.create_index('workflow_trigger_log_workflow_run_idx', ['workflow_run_id'], unique=False)
op.create_table('workflow_webhook_triggers',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('node_id', sa.String(length=64), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('webhook_id', sa.String(length=24), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='workflow_webhook_trigger_pkey'),
sa.UniqueConstraint('app_id', 'node_id', name='uniq_node'),
sa.UniqueConstraint('webhook_id', name='uniq_webhook_id')
)
if _is_pg(conn):
op.create_table('workflow_webhook_triggers',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('node_id', sa.String(length=64), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('webhook_id', sa.String(length=24), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='workflow_webhook_trigger_pkey'),
sa.UniqueConstraint('app_id', 'node_id', name='uniq_node'),
sa.UniqueConstraint('webhook_id', name='uniq_webhook_id')
)
else:
op.create_table('workflow_webhook_triggers',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('node_id', sa.String(length=64), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('webhook_id', sa.String(length=24), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='workflow_webhook_trigger_pkey'),
sa.UniqueConstraint('app_id', 'node_id', name='uniq_node'),
sa.UniqueConstraint('webhook_id', name='uniq_webhook_id')
)
with op.batch_alter_table('workflow_webhook_triggers', schema=None) as batch_op:
batch_op.create_index('workflow_webhook_trigger_tenant_idx', ['tenant_id'], unique=False)
@@ -184,8 +323,14 @@ def upgrade():
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('providers', schema=None) as batch_op:
batch_op.add_column(sa.Column('credential_status', sa.VARCHAR(length=20), server_default=sa.text("'active'::character varying"), autoincrement=False, nullable=True))
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('providers', schema=None) as batch_op:
batch_op.add_column(sa.Column('credential_status', sa.VARCHAR(length=20), server_default=sa.text("'active'::character varying"), autoincrement=False, nullable=True))
else:
with op.batch_alter_table('providers', schema=None) as batch_op:
batch_op.add_column(sa.Column('credential_status', sa.VARCHAR(length=20), server_default=sa.text("'active'"), autoincrement=False, nullable=True))
with op.batch_alter_table('celery_tasksetmeta', schema=None) as batch_op:
batch_op.alter_column('taskset_id',

View File

@@ -0,0 +1,131 @@
"""empty message
Revision ID: 09cfdda155d1
Revises: 669ffd70119c
Create Date: 2025-11-15 21:02:32.472885
"""
from alembic import op
import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql, mysql
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '09cfdda155d1'
down_revision = '669ffd70119c'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('datasource_providers', schema=None) as batch_op:
batch_op.alter_column('provider',
existing_type=sa.VARCHAR(length=255),
type_=sa.String(length=128),
existing_nullable=False)
with op.batch_alter_table('external_knowledge_bindings', schema=None) as batch_op:
batch_op.alter_column('external_knowledge_id',
existing_type=sa.TEXT(),
type_=sa.String(length=512),
existing_nullable=False)
with op.batch_alter_table('tenant_plugin_auto_upgrade_strategies', schema=None) as batch_op:
batch_op.alter_column('exclude_plugins',
existing_type=postgresql.ARRAY(sa.VARCHAR(length=255)),
type_=sa.JSON(),
existing_nullable=False,
postgresql_using='to_jsonb(exclude_plugins)::json')
batch_op.alter_column('include_plugins',
existing_type=postgresql.ARRAY(sa.VARCHAR(length=255)),
type_=sa.JSON(),
existing_nullable=False,
postgresql_using='to_jsonb(include_plugins)::json')
with op.batch_alter_table('tool_oauth_tenant_clients', schema=None) as batch_op:
batch_op.alter_column('plugin_id',
existing_type=sa.VARCHAR(length=512),
type_=sa.String(length=255),
existing_nullable=False)
with op.batch_alter_table('trigger_oauth_system_clients', schema=None) as batch_op:
batch_op.alter_column('plugin_id',
existing_type=sa.VARCHAR(length=512),
type_=sa.String(length=255),
existing_nullable=False)
else:
with op.batch_alter_table('trigger_oauth_system_clients', schema=None) as batch_op:
batch_op.alter_column('plugin_id',
existing_type=mysql.VARCHAR(length=512),
type_=sa.String(length=255),
existing_nullable=False)
with op.batch_alter_table('workflows', schema=None) as batch_op:
batch_op.alter_column('updated_at',
existing_type=mysql.TIMESTAMP(),
type_=sa.DateTime(),
existing_nullable=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('trigger_oauth_system_clients', schema=None) as batch_op:
batch_op.alter_column('plugin_id',
existing_type=sa.String(length=255),
type_=sa.VARCHAR(length=512),
existing_nullable=False)
with op.batch_alter_table('tool_oauth_tenant_clients', schema=None) as batch_op:
batch_op.alter_column('plugin_id',
existing_type=sa.String(length=255),
type_=sa.VARCHAR(length=512),
existing_nullable=False)
with op.batch_alter_table('tenant_plugin_auto_upgrade_strategies', schema=None) as batch_op:
batch_op.alter_column('include_plugins',
existing_type=sa.JSON(),
type_=postgresql.ARRAY(sa.VARCHAR(length=255)),
existing_nullable=False)
batch_op.alter_column('exclude_plugins',
existing_type=sa.JSON(),
type_=postgresql.ARRAY(sa.VARCHAR(length=255)),
existing_nullable=False)
with op.batch_alter_table('external_knowledge_bindings', schema=None) as batch_op:
batch_op.alter_column('external_knowledge_id',
existing_type=sa.String(length=512),
type_=sa.TEXT(),
existing_nullable=False)
with op.batch_alter_table('datasource_providers', schema=None) as batch_op:
batch_op.alter_column('provider',
existing_type=sa.String(length=128),
type_=sa.VARCHAR(length=255),
existing_nullable=False)
else:
with op.batch_alter_table('workflows', schema=None) as batch_op:
batch_op.alter_column('updated_at',
existing_type=sa.DateTime(),
type_=mysql.TIMESTAMP(),
existing_nullable=False)
with op.batch_alter_table('trigger_oauth_system_clients', schema=None) as batch_op:
batch_op.alter_column('plugin_id',
existing_type=sa.String(length=255),
type_=mysql.VARCHAR(length=512),
existing_nullable=False)
# ### end Alembic commands ###

View File

@@ -8,6 +8,12 @@ Create Date: 2024-01-18 08:46:37.302657
import sqlalchemy as sa
from alembic import op
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '23db93619b9d'
down_revision = '8ae9bc661daa'
@@ -17,8 +23,14 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
batch_op.add_column(sa.Column('message_files', sa.Text(), nullable=True))
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
batch_op.add_column(sa.Column('message_files', sa.Text(), nullable=True))
else:
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
batch_op.add_column(sa.Column('message_files', models.types.LongText(), nullable=True))
# ### end Alembic commands ###

View File

@@ -9,6 +9,12 @@ import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '246ba09cbbdb'
down_revision = '714aafe25d39'
@@ -18,17 +24,33 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('app_annotation_settings',
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('app_id', postgresql.UUID(), nullable=False),
sa.Column('score_threshold', sa.Float(), server_default=sa.text('0'), nullable=False),
sa.Column('collection_binding_id', postgresql.UUID(), nullable=False),
sa.Column('created_user_id', postgresql.UUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_user_id', postgresql.UUID(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='app_annotation_settings_pkey')
)
conn = op.get_bind()
if _is_pg(conn):
op.create_table('app_annotation_settings',
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('app_id', postgresql.UUID(), nullable=False),
sa.Column('score_threshold', sa.Float(), server_default=sa.text('0'), nullable=False),
sa.Column('collection_binding_id', postgresql.UUID(), nullable=False),
sa.Column('created_user_id', postgresql.UUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_user_id', postgresql.UUID(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='app_annotation_settings_pkey')
)
else:
op.create_table('app_annotation_settings',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('score_threshold', sa.Float(), server_default=sa.text('0'), nullable=False),
sa.Column('collection_binding_id', models.types.StringUUID(), nullable=False),
sa.Column('created_user_id', models.types.StringUUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_user_id', models.types.StringUUID(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='app_annotation_settings_pkey')
)
with op.batch_alter_table('app_annotation_settings', schema=None) as batch_op:
batch_op.create_index('app_annotation_settings_app_idx', ['app_id'], unique=False)
@@ -40,8 +62,14 @@ def upgrade():
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
batch_op.add_column(sa.Column('annotation_reply', sa.TEXT(), autoincrement=False, nullable=True))
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
batch_op.add_column(sa.Column('annotation_reply', sa.TEXT(), autoincrement=False, nullable=True))
else:
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
batch_op.add_column(sa.Column('annotation_reply', models.types.LongText(), autoincrement=False, nullable=True))
with op.batch_alter_table('app_annotation_settings', schema=None) as batch_op:
batch_op.drop_index('app_annotation_settings_app_idx')

View File

@@ -10,6 +10,10 @@ from alembic import op
import models as models
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '2a3aebbbf4bb'
down_revision = 'c031d46af369'
@@ -19,8 +23,14 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('apps', schema=None) as batch_op:
batch_op.add_column(sa.Column('tracing', sa.Text(), nullable=True))
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('apps', schema=None) as batch_op:
batch_op.add_column(sa.Column('tracing', sa.Text(), nullable=True))
else:
with op.batch_alter_table('apps', schema=None) as batch_op:
batch_op.add_column(sa.Column('tracing', models.types.LongText(), nullable=True))
# ### end Alembic commands ###

View File

@@ -9,6 +9,12 @@ import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '2e9819ca5b28'
down_revision = 'ab23c11305d4'
@@ -18,19 +24,35 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('api_tokens', schema=None) as batch_op:
batch_op.add_column(sa.Column('tenant_id', postgresql.UUID(), nullable=True))
batch_op.create_index('api_token_tenant_idx', ['tenant_id', 'type'], unique=False)
batch_op.drop_column('dataset_id')
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('api_tokens', schema=None) as batch_op:
batch_op.add_column(sa.Column('tenant_id', postgresql.UUID(), nullable=True))
batch_op.create_index('api_token_tenant_idx', ['tenant_id', 'type'], unique=False)
batch_op.drop_column('dataset_id')
else:
with op.batch_alter_table('api_tokens', schema=None) as batch_op:
batch_op.add_column(sa.Column('tenant_id', models.types.StringUUID(), nullable=True))
batch_op.create_index('api_token_tenant_idx', ['tenant_id', 'type'], unique=False)
batch_op.drop_column('dataset_id')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('api_tokens', schema=None) as batch_op:
batch_op.add_column(sa.Column('dataset_id', postgresql.UUID(), autoincrement=False, nullable=True))
batch_op.drop_index('api_token_tenant_idx')
batch_op.drop_column('tenant_id')
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('api_tokens', schema=None) as batch_op:
batch_op.add_column(sa.Column('dataset_id', postgresql.UUID(), autoincrement=False, nullable=True))
batch_op.drop_index('api_token_tenant_idx')
batch_op.drop_column('tenant_id')
else:
with op.batch_alter_table('api_tokens', schema=None) as batch_op:
batch_op.add_column(sa.Column('dataset_id', models.types.StringUUID(), autoincrement=False, nullable=True))
batch_op.drop_index('api_token_tenant_idx')
batch_op.drop_column('tenant_id')
# ### end Alembic commands ###

View File

@@ -8,6 +8,12 @@ Create Date: 2024-01-24 10:58:15.644445
import sqlalchemy as sa
from alembic import op
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '380c6aa5a70d'
down_revision = 'dfb3b7f477da'
@@ -17,8 +23,14 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
batch_op.add_column(sa.Column('tool_labels_str', sa.Text(), server_default=sa.text("'{}'::text"), nullable=False))
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
batch_op.add_column(sa.Column('tool_labels_str', sa.Text(), server_default=sa.text("'{}'::text"), nullable=False))
else:
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
batch_op.add_column(sa.Column('tool_labels_str', models.types.LongText(), default=sa.text("'{}'"), nullable=False))
# ### end Alembic commands ###

View File

@@ -10,6 +10,10 @@ from alembic import op
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '3b18fea55204'
down_revision = '7bdef072e63a'
@@ -19,13 +23,24 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('tool_label_bindings',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tool_id', sa.String(length=64), nullable=False),
sa.Column('tool_type', sa.String(length=40), nullable=False),
sa.Column('label_name', sa.String(length=40), nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_label_bind_pkey')
)
conn = op.get_bind()
if _is_pg(conn):
op.create_table('tool_label_bindings',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tool_id', sa.String(length=64), nullable=False),
sa.Column('tool_type', sa.String(length=40), nullable=False),
sa.Column('label_name', sa.String(length=40), nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_label_bind_pkey')
)
else:
op.create_table('tool_label_bindings',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tool_id', sa.String(length=64), nullable=False),
sa.Column('tool_type', sa.String(length=40), nullable=False),
sa.Column('label_name', sa.String(length=40), nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_label_bind_pkey')
)
with op.batch_alter_table('tool_workflow_providers', schema=None) as batch_op:
batch_op.add_column(sa.Column('privacy_policy', sa.String(length=255), server_default='', nullable=True))

View File

@@ -6,9 +6,15 @@ Create Date: 2024-04-11 06:17:34.278594
"""
import sqlalchemy as sa
from alembic import op
from alembic import op
from sqlalchemy.dialects import postgresql
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '3c7cac9521c6'
down_revision = 'c3311b089690'
@@ -18,28 +24,54 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('tag_bindings',
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', postgresql.UUID(), nullable=True),
sa.Column('tag_id', postgresql.UUID(), nullable=True),
sa.Column('target_id', postgresql.UUID(), nullable=True),
sa.Column('created_by', postgresql.UUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='tag_binding_pkey')
)
conn = op.get_bind()
if _is_pg(conn):
op.create_table('tag_bindings',
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', postgresql.UUID(), nullable=True),
sa.Column('tag_id', postgresql.UUID(), nullable=True),
sa.Column('target_id', postgresql.UUID(), nullable=True),
sa.Column('created_by', postgresql.UUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='tag_binding_pkey')
)
else:
op.create_table('tag_bindings',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=True),
sa.Column('tag_id', models.types.StringUUID(), nullable=True),
sa.Column('target_id', models.types.StringUUID(), nullable=True),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='tag_binding_pkey')
)
with op.batch_alter_table('tag_bindings', schema=None) as batch_op:
batch_op.create_index('tag_bind_tag_id_idx', ['tag_id'], unique=False)
batch_op.create_index('tag_bind_target_id_idx', ['target_id'], unique=False)
op.create_table('tags',
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', postgresql.UUID(), nullable=True),
sa.Column('type', sa.String(length=16), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('created_by', postgresql.UUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='tag_pkey')
)
if _is_pg(conn):
op.create_table('tags',
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', postgresql.UUID(), nullable=True),
sa.Column('type', sa.String(length=16), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('created_by', postgresql.UUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='tag_pkey')
)
else:
op.create_table('tags',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=True),
sa.Column('type', sa.String(length=16), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='tag_pkey')
)
with op.batch_alter_table('tags', schema=None) as batch_op:
batch_op.create_index('tag_name_idx', ['name'], unique=False)
batch_op.create_index('tag_type_idx', ['type'], unique=False)

View File

@@ -9,6 +9,12 @@ import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '3ef9b2b6bee6'
down_revision = '89c7899ca936'
@@ -18,44 +24,96 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('tool_api_providers',
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('name', sa.String(length=40), nullable=False),
sa.Column('schema', sa.Text(), nullable=False),
sa.Column('schema_type_str', sa.String(length=40), nullable=False),
sa.Column('user_id', postgresql.UUID(), nullable=False),
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
sa.Column('description_str', sa.Text(), nullable=False),
sa.Column('tools_str', sa.Text(), nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_api_provider_pkey')
)
op.create_table('tool_builtin_providers',
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', postgresql.UUID(), nullable=True),
sa.Column('user_id', postgresql.UUID(), nullable=False),
sa.Column('provider', sa.String(length=40), nullable=False),
sa.Column('encrypted_credentials', sa.Text(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_builtin_provider_pkey'),
sa.UniqueConstraint('tenant_id', 'provider', name='unique_builtin_tool_provider')
)
op.create_table('tool_published_apps',
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('app_id', postgresql.UUID(), nullable=False),
sa.Column('user_id', postgresql.UUID(), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.Column('llm_description', sa.Text(), nullable=False),
sa.Column('query_description', sa.Text(), nullable=False),
sa.Column('query_name', sa.String(length=40), nullable=False),
sa.Column('tool_name', sa.String(length=40), nullable=False),
sa.Column('author', sa.String(length=40), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.ForeignKeyConstraint(['app_id'], ['apps.id'], ),
sa.PrimaryKeyConstraint('id', name='published_app_tool_pkey'),
sa.UniqueConstraint('app_id', 'user_id', name='unique_published_app_tool')
)
conn = op.get_bind()
if _is_pg(conn):
# PostgreSQL: Keep original syntax
op.create_table('tool_api_providers',
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('name', sa.String(length=40), nullable=False),
sa.Column('schema', sa.Text(), nullable=False),
sa.Column('schema_type_str', sa.String(length=40), nullable=False),
sa.Column('user_id', postgresql.UUID(), nullable=False),
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
sa.Column('description_str', sa.Text(), nullable=False),
sa.Column('tools_str', sa.Text(), nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_api_provider_pkey')
)
else:
# MySQL: Use compatible syntax
op.create_table('tool_api_providers',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=40), nullable=False),
sa.Column('schema', models.types.LongText(), nullable=False),
sa.Column('schema_type_str', sa.String(length=40), nullable=False),
sa.Column('user_id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('description_str', models.types.LongText(), nullable=False),
sa.Column('tools_str', models.types.LongText(), nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_api_provider_pkey')
)
if _is_pg(conn):
# PostgreSQL: Keep original syntax
op.create_table('tool_builtin_providers',
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', postgresql.UUID(), nullable=True),
sa.Column('user_id', postgresql.UUID(), nullable=False),
sa.Column('provider', sa.String(length=40), nullable=False),
sa.Column('encrypted_credentials', sa.Text(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_builtin_provider_pkey'),
sa.UniqueConstraint('tenant_id', 'provider', name='unique_builtin_tool_provider')
)
else:
# MySQL: Use compatible syntax
op.create_table('tool_builtin_providers',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=True),
sa.Column('user_id', models.types.StringUUID(), nullable=False),
sa.Column('provider', sa.String(length=40), nullable=False),
sa.Column('encrypted_credentials', models.types.LongText(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_builtin_provider_pkey'),
sa.UniqueConstraint('tenant_id', 'provider', name='unique_builtin_tool_provider')
)
if _is_pg(conn):
# PostgreSQL: Keep original syntax
op.create_table('tool_published_apps',
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('app_id', postgresql.UUID(), nullable=False),
sa.Column('user_id', postgresql.UUID(), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.Column('llm_description', sa.Text(), nullable=False),
sa.Column('query_description', sa.Text(), nullable=False),
sa.Column('query_name', sa.String(length=40), nullable=False),
sa.Column('tool_name', sa.String(length=40), nullable=False),
sa.Column('author', sa.String(length=40), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.ForeignKeyConstraint(['app_id'], ['apps.id'], ),
sa.PrimaryKeyConstraint('id', name='published_app_tool_pkey'),
sa.UniqueConstraint('app_id', 'user_id', name='unique_published_app_tool')
)
else:
# MySQL: Use compatible syntax
op.create_table('tool_published_apps',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('user_id', models.types.StringUUID(), nullable=False),
sa.Column('description', models.types.LongText(), nullable=False),
sa.Column('llm_description', models.types.LongText(), nullable=False),
sa.Column('query_description', models.types.LongText(), nullable=False),
sa.Column('query_name', sa.String(length=40), nullable=False),
sa.Column('tool_name', sa.String(length=40), nullable=False),
sa.Column('author', sa.String(length=40), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.ForeignKeyConstraint(['app_id'], ['apps.id'], ),
sa.PrimaryKeyConstraint('id', name='published_app_tool_pkey'),
sa.UniqueConstraint('app_id', 'user_id', name='unique_published_app_tool')
)
# ### end Alembic commands ###

View File

@@ -9,6 +9,12 @@ import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '42e85ed5564d'
down_revision = 'f9107f83abab'
@@ -18,31 +24,59 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('conversations', schema=None) as batch_op:
batch_op.alter_column('app_model_config_id',
existing_type=postgresql.UUID(),
nullable=True)
batch_op.alter_column('model_provider',
existing_type=sa.VARCHAR(length=255),
nullable=True)
batch_op.alter_column('model_id',
existing_type=sa.VARCHAR(length=255),
nullable=True)
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('conversations', schema=None) as batch_op:
batch_op.alter_column('app_model_config_id',
existing_type=postgresql.UUID(),
nullable=True)
batch_op.alter_column('model_provider',
existing_type=sa.VARCHAR(length=255),
nullable=True)
batch_op.alter_column('model_id',
existing_type=sa.VARCHAR(length=255),
nullable=True)
else:
with op.batch_alter_table('conversations', schema=None) as batch_op:
batch_op.alter_column('app_model_config_id',
existing_type=models.types.StringUUID(),
nullable=True)
batch_op.alter_column('model_provider',
existing_type=sa.VARCHAR(length=255),
nullable=True)
batch_op.alter_column('model_id',
existing_type=sa.VARCHAR(length=255),
nullable=True)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('conversations', schema=None) as batch_op:
batch_op.alter_column('model_id',
existing_type=sa.VARCHAR(length=255),
nullable=False)
batch_op.alter_column('model_provider',
existing_type=sa.VARCHAR(length=255),
nullable=False)
batch_op.alter_column('app_model_config_id',
existing_type=postgresql.UUID(),
nullable=False)
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('conversations', schema=None) as batch_op:
batch_op.alter_column('model_id',
existing_type=sa.VARCHAR(length=255),
nullable=False)
batch_op.alter_column('model_provider',
existing_type=sa.VARCHAR(length=255),
nullable=False)
batch_op.alter_column('app_model_config_id',
existing_type=postgresql.UUID(),
nullable=False)
else:
with op.batch_alter_table('conversations', schema=None) as batch_op:
batch_op.alter_column('model_id',
existing_type=sa.VARCHAR(length=255),
nullable=False)
batch_op.alter_column('model_provider',
existing_type=sa.VARCHAR(length=255),
nullable=False)
batch_op.alter_column('app_model_config_id',
existing_type=models.types.StringUUID(),
nullable=False)
# ### end Alembic commands ###

View File

@@ -9,6 +9,12 @@ import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '4823da1d26cf'
down_revision = '053da0c1d756'
@@ -18,16 +24,30 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('tool_files',
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('user_id', postgresql.UUID(), nullable=False),
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
sa.Column('conversation_id', postgresql.UUID(), nullable=False),
sa.Column('file_key', sa.String(length=255), nullable=False),
sa.Column('mimetype', sa.String(length=255), nullable=False),
sa.Column('original_url', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('id', name='tool_file_pkey')
)
conn = op.get_bind()
if _is_pg(conn):
op.create_table('tool_files',
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('user_id', postgresql.UUID(), nullable=False),
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
sa.Column('conversation_id', postgresql.UUID(), nullable=False),
sa.Column('file_key', sa.String(length=255), nullable=False),
sa.Column('mimetype', sa.String(length=255), nullable=False),
sa.Column('original_url', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('id', name='tool_file_pkey')
)
else:
op.create_table('tool_files',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('user_id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('conversation_id', models.types.StringUUID(), nullable=False),
sa.Column('file_key', sa.String(length=255), nullable=False),
sa.Column('mimetype', sa.String(length=255), nullable=False),
sa.Column('original_url', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('id', name='tool_file_pkey')
)
# ### end Alembic commands ###

View File

@@ -8,6 +8,12 @@ Create Date: 2024-01-12 03:42:27.362415
from alembic import op
from sqlalchemy.dialects import postgresql
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '4829e54d2fee'
down_revision = '114eed84c228'
@@ -17,19 +23,39 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
batch_op.alter_column('message_chain_id',
existing_type=postgresql.UUID(),
nullable=True)
conn = op.get_bind()
if _is_pg(conn):
# PostgreSQL: Keep original syntax
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
batch_op.alter_column('message_chain_id',
existing_type=postgresql.UUID(),
nullable=True)
else:
# MySQL: Use compatible syntax
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
batch_op.alter_column('message_chain_id',
existing_type=models.types.StringUUID(),
nullable=True)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
batch_op.alter_column('message_chain_id',
existing_type=postgresql.UUID(),
nullable=False)
conn = op.get_bind()
if _is_pg(conn):
# PostgreSQL: Keep original syntax
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
batch_op.alter_column('message_chain_id',
existing_type=postgresql.UUID(),
nullable=False)
else:
# MySQL: Use compatible syntax
with op.batch_alter_table('message_agent_thoughts', schema=None) as batch_op:
batch_op.alter_column('message_chain_id',
existing_type=models.types.StringUUID(),
nullable=False)
# ### end Alembic commands ###

View File

@@ -8,6 +8,10 @@ Create Date: 2023-08-28 20:58:50.077056
import sqlalchemy as sa
from alembic import op
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '4bcffcd64aa4'
down_revision = '853f9b9cd3b6'
@@ -17,29 +21,55 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('datasets', schema=None) as batch_op:
batch_op.alter_column('embedding_model',
existing_type=sa.VARCHAR(length=255),
nullable=True,
existing_server_default=sa.text("'text-embedding-ada-002'::character varying"))
batch_op.alter_column('embedding_model_provider',
existing_type=sa.VARCHAR(length=255),
nullable=True,
existing_server_default=sa.text("'openai'::character varying"))
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('datasets', schema=None) as batch_op:
batch_op.alter_column('embedding_model',
existing_type=sa.VARCHAR(length=255),
nullable=True,
existing_server_default=sa.text("'text-embedding-ada-002'::character varying"))
batch_op.alter_column('embedding_model_provider',
existing_type=sa.VARCHAR(length=255),
nullable=True,
existing_server_default=sa.text("'openai'::character varying"))
else:
with op.batch_alter_table('datasets', schema=None) as batch_op:
batch_op.alter_column('embedding_model',
existing_type=sa.VARCHAR(length=255),
nullable=True,
existing_server_default=sa.text("'text-embedding-ada-002'"))
batch_op.alter_column('embedding_model_provider',
existing_type=sa.VARCHAR(length=255),
nullable=True,
existing_server_default=sa.text("'openai'"))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('datasets', schema=None) as batch_op:
batch_op.alter_column('embedding_model_provider',
existing_type=sa.VARCHAR(length=255),
nullable=False,
existing_server_default=sa.text("'openai'::character varying"))
batch_op.alter_column('embedding_model',
existing_type=sa.VARCHAR(length=255),
nullable=False,
existing_server_default=sa.text("'text-embedding-ada-002'::character varying"))
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('datasets', schema=None) as batch_op:
batch_op.alter_column('embedding_model_provider',
existing_type=sa.VARCHAR(length=255),
nullable=False,
existing_server_default=sa.text("'openai'::character varying"))
batch_op.alter_column('embedding_model',
existing_type=sa.VARCHAR(length=255),
nullable=False,
existing_server_default=sa.text("'text-embedding-ada-002'::character varying"))
else:
with op.batch_alter_table('datasets', schema=None) as batch_op:
batch_op.alter_column('embedding_model_provider',
existing_type=sa.VARCHAR(length=255),
nullable=False,
existing_server_default=sa.text("'openai'"))
batch_op.alter_column('embedding_model',
existing_type=sa.VARCHAR(length=255),
nullable=False,
existing_server_default=sa.text("'text-embedding-ada-002'"))
# ### end Alembic commands ###

View File

@@ -10,6 +10,10 @@ from alembic import op
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '4e99a8df00ff'
down_revision = '64a70a7aab8b'
@@ -19,34 +23,67 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('load_balancing_model_configs',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider_name', sa.String(length=255), nullable=False),
sa.Column('model_name', sa.String(length=255), nullable=False),
sa.Column('model_type', sa.String(length=40), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('encrypted_config', sa.Text(), nullable=True),
sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='load_balancing_model_config_pkey')
)
conn = op.get_bind()
if _is_pg(conn):
op.create_table('load_balancing_model_configs',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider_name', sa.String(length=255), nullable=False),
sa.Column('model_name', sa.String(length=255), nullable=False),
sa.Column('model_type', sa.String(length=40), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('encrypted_config', sa.Text(), nullable=True),
sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='load_balancing_model_config_pkey')
)
else:
op.create_table('load_balancing_model_configs',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider_name', sa.String(length=255), nullable=False),
sa.Column('model_name', sa.String(length=255), nullable=False),
sa.Column('model_type', sa.String(length=40), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('encrypted_config', models.types.LongText(), nullable=True),
sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='load_balancing_model_config_pkey')
)
with op.batch_alter_table('load_balancing_model_configs', schema=None) as batch_op:
batch_op.create_index('load_balancing_model_config_tenant_provider_model_idx', ['tenant_id', 'provider_name', 'model_type'], unique=False)
op.create_table('provider_model_settings',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider_name', sa.String(length=255), nullable=False),
sa.Column('model_name', sa.String(length=255), nullable=False),
sa.Column('model_type', sa.String(length=40), nullable=False),
sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False),
sa.Column('load_balancing_enabled', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='provider_model_setting_pkey')
)
if _is_pg(conn):
op.create_table('provider_model_settings',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider_name', sa.String(length=255), nullable=False),
sa.Column('model_name', sa.String(length=255), nullable=False),
sa.Column('model_type', sa.String(length=40), nullable=False),
sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False),
sa.Column('load_balancing_enabled', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='provider_model_setting_pkey')
)
else:
op.create_table('provider_model_settings',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('provider_name', sa.String(length=255), nullable=False),
sa.Column('model_name', sa.String(length=255), nullable=False),
sa.Column('model_type', sa.String(length=40), nullable=False),
sa.Column('enabled', sa.Boolean(), server_default=sa.text('true'), nullable=False),
sa.Column('load_balancing_enabled', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='provider_model_setting_pkey')
)
with op.batch_alter_table('provider_model_settings', schema=None) as batch_op:
batch_op.create_index('provider_model_setting_tenant_provider_model_idx', ['tenant_id', 'provider_name', 'model_type'], unique=False)

View File

@@ -8,6 +8,10 @@ Create Date: 2023-08-11 14:38:15.499460
import sqlalchemy as sa
from alembic import op
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '5022897aaceb'
down_revision = 'bf0aec5ba2cf'
@@ -17,10 +21,20 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('embeddings', schema=None) as batch_op:
batch_op.add_column(sa.Column('model_name', sa.String(length=40), server_default=sa.text("'text-embedding-ada-002'::character varying"), nullable=False))
batch_op.drop_constraint('embedding_hash_idx', type_='unique')
batch_op.create_unique_constraint('embedding_hash_idx', ['model_name', 'hash'])
conn = op.get_bind()
if _is_pg(conn):
# PostgreSQL: Keep original syntax
with op.batch_alter_table('embeddings', schema=None) as batch_op:
batch_op.add_column(sa.Column('model_name', sa.String(length=40), server_default=sa.text("'text-embedding-ada-002'::character varying"), nullable=False))
batch_op.drop_constraint('embedding_hash_idx', type_='unique')
batch_op.create_unique_constraint('embedding_hash_idx', ['model_name', 'hash'])
else:
# MySQL: Use compatible syntax
with op.batch_alter_table('embeddings', schema=None) as batch_op:
batch_op.add_column(sa.Column('model_name', sa.String(length=40), server_default=sa.text("'text-embedding-ada-002'"), nullable=False))
batch_op.drop_constraint('embedding_hash_idx', type_='unique')
batch_op.create_unique_constraint('embedding_hash_idx', ['model_name', 'hash'])
# ### end Alembic commands ###

View File

@@ -10,6 +10,10 @@ from alembic import op
import models as models
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '53bf8af60645'
down_revision = '8e5588e6412e'
@@ -19,23 +23,43 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('embeddings', schema=None) as batch_op:
batch_op.alter_column('provider_name',
existing_type=sa.VARCHAR(length=40),
type_=sa.String(length=255),
existing_nullable=False,
existing_server_default=sa.text("''::character varying"))
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('embeddings', schema=None) as batch_op:
batch_op.alter_column('provider_name',
existing_type=sa.VARCHAR(length=40),
type_=sa.String(length=255),
existing_nullable=False,
existing_server_default=sa.text("''::character varying"))
else:
with op.batch_alter_table('embeddings', schema=None) as batch_op:
batch_op.alter_column('provider_name',
existing_type=sa.VARCHAR(length=40),
type_=sa.String(length=255),
existing_nullable=False,
existing_server_default=sa.text("''"))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('embeddings', schema=None) as batch_op:
batch_op.alter_column('provider_name',
existing_type=sa.String(length=255),
type_=sa.VARCHAR(length=40),
existing_nullable=False,
existing_server_default=sa.text("''::character varying"))
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('embeddings', schema=None) as batch_op:
batch_op.alter_column('provider_name',
existing_type=sa.String(length=255),
type_=sa.VARCHAR(length=40),
existing_nullable=False,
existing_server_default=sa.text("''::character varying"))
else:
with op.batch_alter_table('embeddings', schema=None) as batch_op:
batch_op.alter_column('provider_name',
existing_type=sa.String(length=255),
type_=sa.VARCHAR(length=40),
existing_nullable=False,
existing_server_default=sa.text("''"))
# ### end Alembic commands ###

View File

@@ -8,6 +8,12 @@ Create Date: 2024-03-14 04:54:56.679506
from alembic import op
from sqlalchemy.dialects import postgresql
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '563cf8bf777b'
down_revision = 'b5429b71023c'
@@ -17,19 +23,35 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('tool_files', schema=None) as batch_op:
batch_op.alter_column('conversation_id',
existing_type=postgresql.UUID(),
nullable=True)
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('tool_files', schema=None) as batch_op:
batch_op.alter_column('conversation_id',
existing_type=postgresql.UUID(),
nullable=True)
else:
with op.batch_alter_table('tool_files', schema=None) as batch_op:
batch_op.alter_column('conversation_id',
existing_type=models.types.StringUUID(),
nullable=True)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('tool_files', schema=None) as batch_op:
batch_op.alter_column('conversation_id',
existing_type=postgresql.UUID(),
nullable=False)
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('tool_files', schema=None) as batch_op:
batch_op.alter_column('conversation_id',
existing_type=postgresql.UUID(),
nullable=False)
else:
with op.batch_alter_table('tool_files', schema=None) as batch_op:
batch_op.alter_column('conversation_id',
existing_type=models.types.StringUUID(),
nullable=False)
# ### end Alembic commands ###

View File

@@ -8,6 +8,10 @@ Create Date: 2023-06-15 13:33:00.357467
import sqlalchemy as sa
from alembic import op
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '614f77cecc48'
down_revision = 'a45f4dfde53b'
@@ -17,8 +21,14 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('accounts', schema=None) as batch_op:
batch_op.add_column(sa.Column('last_active_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False))
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('accounts', schema=None) as batch_op:
batch_op.add_column(sa.Column('last_active_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False))
else:
with op.batch_alter_table('accounts', schema=None) as batch_op:
batch_op.add_column(sa.Column('last_active_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False))
# ### end Alembic commands ###

File diff suppressed because it is too large Load Diff

View File

@@ -9,6 +9,12 @@ import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '6dcb43972bdc'
down_revision = '4bcffcd64aa4'
@@ -18,27 +24,53 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('dataset_retriever_resources',
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('message_id', postgresql.UUID(), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('dataset_id', postgresql.UUID(), nullable=False),
sa.Column('dataset_name', sa.Text(), nullable=False),
sa.Column('document_id', postgresql.UUID(), nullable=False),
sa.Column('document_name', sa.Text(), nullable=False),
sa.Column('data_source_type', sa.Text(), nullable=False),
sa.Column('segment_id', postgresql.UUID(), nullable=False),
sa.Column('score', sa.Float(), nullable=True),
sa.Column('content', sa.Text(), nullable=False),
sa.Column('hit_count', sa.Integer(), nullable=True),
sa.Column('word_count', sa.Integer(), nullable=True),
sa.Column('segment_position', sa.Integer(), nullable=True),
sa.Column('index_node_hash', sa.Text(), nullable=True),
sa.Column('retriever_from', sa.Text(), nullable=False),
sa.Column('created_by', postgresql.UUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='dataset_retriever_resource_pkey')
)
conn = op.get_bind()
if _is_pg(conn):
op.create_table('dataset_retriever_resources',
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('message_id', postgresql.UUID(), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('dataset_id', postgresql.UUID(), nullable=False),
sa.Column('dataset_name', sa.Text(), nullable=False),
sa.Column('document_id', postgresql.UUID(), nullable=False),
sa.Column('document_name', sa.Text(), nullable=False),
sa.Column('data_source_type', sa.Text(), nullable=False),
sa.Column('segment_id', postgresql.UUID(), nullable=False),
sa.Column('score', sa.Float(), nullable=True),
sa.Column('content', sa.Text(), nullable=False),
sa.Column('hit_count', sa.Integer(), nullable=True),
sa.Column('word_count', sa.Integer(), nullable=True),
sa.Column('segment_position', sa.Integer(), nullable=True),
sa.Column('index_node_hash', sa.Text(), nullable=True),
sa.Column('retriever_from', sa.Text(), nullable=False),
sa.Column('created_by', postgresql.UUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='dataset_retriever_resource_pkey')
)
else:
op.create_table('dataset_retriever_resources',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('message_id', models.types.StringUUID(), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
sa.Column('dataset_name', models.types.LongText(), nullable=False),
sa.Column('document_id', models.types.StringUUID(), nullable=False),
sa.Column('document_name', models.types.LongText(), nullable=False),
sa.Column('data_source_type', models.types.LongText(), nullable=False),
sa.Column('segment_id', models.types.StringUUID(), nullable=False),
sa.Column('score', sa.Float(), nullable=True),
sa.Column('content', models.types.LongText(), nullable=False),
sa.Column('hit_count', sa.Integer(), nullable=True),
sa.Column('word_count', sa.Integer(), nullable=True),
sa.Column('segment_position', sa.Integer(), nullable=True),
sa.Column('index_node_hash', models.types.LongText(), nullable=True),
sa.Column('retriever_from', models.types.LongText(), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='dataset_retriever_resource_pkey')
)
with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op:
batch_op.create_index('dataset_retriever_resource_message_id_idx', ['message_id'], unique=False)

View File

@@ -9,6 +9,12 @@ import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '6e2cfb077b04'
down_revision = '77e83833755c'
@@ -18,19 +24,36 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('dataset_collection_bindings',
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('provider_name', sa.String(length=40), nullable=False),
sa.Column('model_name', sa.String(length=40), nullable=False),
sa.Column('collection_name', sa.String(length=64), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='dataset_collection_bindings_pkey')
)
conn = op.get_bind()
if _is_pg(conn):
op.create_table('dataset_collection_bindings',
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('provider_name', sa.String(length=40), nullable=False),
sa.Column('model_name', sa.String(length=40), nullable=False),
sa.Column('collection_name', sa.String(length=64), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='dataset_collection_bindings_pkey')
)
else:
op.create_table('dataset_collection_bindings',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('provider_name', sa.String(length=40), nullable=False),
sa.Column('model_name', sa.String(length=40), nullable=False),
sa.Column('collection_name', sa.String(length=64), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='dataset_collection_bindings_pkey')
)
with op.batch_alter_table('dataset_collection_bindings', schema=None) as batch_op:
batch_op.create_index('provider_model_name_idx', ['provider_name', 'model_name'], unique=False)
with op.batch_alter_table('datasets', schema=None) as batch_op:
batch_op.add_column(sa.Column('collection_binding_id', postgresql.UUID(), nullable=True))
if _is_pg(conn):
with op.batch_alter_table('datasets', schema=None) as batch_op:
batch_op.add_column(sa.Column('collection_binding_id', postgresql.UUID(), nullable=True))
else:
with op.batch_alter_table('datasets', schema=None) as batch_op:
batch_op.add_column(sa.Column('collection_binding_id', models.types.StringUUID(), nullable=True))
# ### end Alembic commands ###

View File

@@ -8,6 +8,12 @@ Create Date: 2023-12-14 06:38:02.972527
import sqlalchemy as sa
from alembic import op
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '714aafe25d39'
down_revision = 'f2a6fc85e260'
@@ -17,9 +23,16 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('app_annotation_hit_histories', schema=None) as batch_op:
batch_op.add_column(sa.Column('annotation_question', sa.Text(), nullable=False))
batch_op.add_column(sa.Column('annotation_content', sa.Text(), nullable=False))
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('app_annotation_hit_histories', schema=None) as batch_op:
batch_op.add_column(sa.Column('annotation_question', sa.Text(), nullable=False))
batch_op.add_column(sa.Column('annotation_content', sa.Text(), nullable=False))
else:
with op.batch_alter_table('app_annotation_hit_histories', schema=None) as batch_op:
batch_op.add_column(sa.Column('annotation_question', models.types.LongText(), nullable=False))
batch_op.add_column(sa.Column('annotation_content', models.types.LongText(), nullable=False))
# ### end Alembic commands ###

View File

@@ -8,6 +8,12 @@ Create Date: 2023-09-06 17:26:40.311927
import sqlalchemy as sa
from alembic import op
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '77e83833755c'
down_revision = '6dcb43972bdc'
@@ -17,8 +23,14 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
batch_op.add_column(sa.Column('retriever_resource', sa.Text(), nullable=True))
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
batch_op.add_column(sa.Column('retriever_resource', sa.Text(), nullable=True))
else:
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
batch_op.add_column(sa.Column('retriever_resource', models.types.LongText(), nullable=True))
# ### end Alembic commands ###

View File

@@ -10,6 +10,10 @@ from alembic import op
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '7b45942e39bb'
down_revision = '4e99a8df00ff'
@@ -19,44 +23,75 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('data_source_api_key_auth_bindings',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('category', sa.String(length=255), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('credentials', sa.Text(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('disabled', sa.Boolean(), server_default=sa.text('false'), nullable=True),
sa.PrimaryKeyConstraint('id', name='data_source_api_key_auth_binding_pkey')
)
conn = op.get_bind()
if _is_pg(conn):
# PostgreSQL: Keep original syntax
op.create_table('data_source_api_key_auth_bindings',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('category', sa.String(length=255), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('credentials', sa.Text(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('disabled', sa.Boolean(), server_default=sa.text('false'), nullable=True),
sa.PrimaryKeyConstraint('id', name='data_source_api_key_auth_binding_pkey')
)
else:
# MySQL: Use compatible syntax
op.create_table('data_source_api_key_auth_bindings',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('category', sa.String(length=255), nullable=False),
sa.Column('provider', sa.String(length=255), nullable=False),
sa.Column('credentials', models.types.LongText(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('disabled', sa.Boolean(), server_default=sa.text('false'), nullable=True),
sa.PrimaryKeyConstraint('id', name='data_source_api_key_auth_binding_pkey')
)
with op.batch_alter_table('data_source_api_key_auth_bindings', schema=None) as batch_op:
batch_op.create_index('data_source_api_key_auth_binding_provider_idx', ['provider'], unique=False)
batch_op.create_index('data_source_api_key_auth_binding_tenant_id_idx', ['tenant_id'], unique=False)
with op.batch_alter_table('data_source_bindings', schema=None) as batch_op:
batch_op.drop_index('source_binding_tenant_id_idx')
batch_op.drop_index('source_info_idx')
if _is_pg(conn):
batch_op.drop_index('source_info_idx', postgresql_using='gin')
else:
pass
op.rename_table('data_source_bindings', 'data_source_oauth_bindings')
with op.batch_alter_table('data_source_oauth_bindings', schema=None) as batch_op:
batch_op.create_index('source_binding_tenant_id_idx', ['tenant_id'], unique=False)
batch_op.create_index('source_info_idx', ['source_info'], unique=False, postgresql_using='gin')
if _is_pg(conn):
batch_op.create_index('source_info_idx', ['source_info'], unique=False, postgresql_using='gin')
else:
pass
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
with op.batch_alter_table('data_source_oauth_bindings', schema=None) as batch_op:
batch_op.drop_index('source_info_idx', postgresql_using='gin')
if _is_pg(conn):
batch_op.drop_index('source_info_idx', postgresql_using='gin')
else:
pass
batch_op.drop_index('source_binding_tenant_id_idx')
op.rename_table('data_source_oauth_bindings', 'data_source_bindings')
with op.batch_alter_table('data_source_bindings', schema=None) as batch_op:
batch_op.create_index('source_info_idx', ['source_info'], unique=False)
if _is_pg(conn):
batch_op.create_index('source_info_idx', ['source_info'], unique=False, postgresql_using='gin')
else:
pass
batch_op.create_index('source_binding_tenant_id_idx', ['tenant_id'], unique=False)
with op.batch_alter_table('data_source_api_key_auth_bindings', schema=None) as batch_op:

View File

@@ -10,6 +10,10 @@ from alembic import op
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '7bdef072e63a'
down_revision = '5fda94355fce'
@@ -19,21 +23,42 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('tool_workflow_providers',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('name', sa.String(length=40), nullable=False),
sa.Column('icon', sa.String(length=255), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('user_id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.Column('parameter_configuration', sa.Text(), server_default='[]', nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_workflow_provider_pkey'),
sa.UniqueConstraint('name', 'tenant_id', name='unique_workflow_tool_provider'),
sa.UniqueConstraint('tenant_id', 'app_id', name='unique_workflow_tool_provider_app_id')
)
conn = op.get_bind()
if _is_pg(conn):
# PostgreSQL: Keep original syntax
op.create_table('tool_workflow_providers',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('name', sa.String(length=40), nullable=False),
sa.Column('icon', sa.String(length=255), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('user_id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.Column('parameter_configuration', sa.Text(), server_default='[]', nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_workflow_provider_pkey'),
sa.UniqueConstraint('name', 'tenant_id', name='unique_workflow_tool_provider'),
sa.UniqueConstraint('tenant_id', 'app_id', name='unique_workflow_tool_provider_app_id')
)
else:
# MySQL: Use compatible syntax
op.create_table('tool_workflow_providers',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=40), nullable=False),
sa.Column('icon', sa.String(length=255), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('user_id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('description', models.types.LongText(), nullable=False),
sa.Column('parameter_configuration', models.types.LongText(), default='[]', nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_workflow_provider_pkey'),
sa.UniqueConstraint('name', 'tenant_id', name='unique_workflow_tool_provider'),
sa.UniqueConstraint('tenant_id', 'app_id', name='unique_workflow_tool_provider_app_id')
)
# ### end Alembic commands ###

View File

@@ -9,6 +9,12 @@ import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '7ce5a52e4eee'
down_revision = '2beac44e5f5f'
@@ -18,19 +24,40 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('tool_providers',
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
sa.Column('tool_name', sa.String(length=40), nullable=False),
sa.Column('encrypted_credentials', sa.Text(), nullable=True),
sa.Column('is_enabled', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_provider_pkey'),
sa.UniqueConstraint('tenant_id', 'tool_name', name='unique_tool_provider_tool_name')
)
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
batch_op.add_column(sa.Column('sensitive_word_avoidance', sa.Text(), nullable=True))
conn = op.get_bind()
if _is_pg(conn):
# PostgreSQL: Keep original syntax
op.create_table('tool_providers',
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
sa.Column('tool_name', sa.String(length=40), nullable=False),
sa.Column('encrypted_credentials', sa.Text(), nullable=True),
sa.Column('is_enabled', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_provider_pkey'),
sa.UniqueConstraint('tenant_id', 'tool_name', name='unique_tool_provider_tool_name')
)
else:
# MySQL: Use compatible syntax
op.create_table('tool_providers',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('tool_name', sa.String(length=40), nullable=False),
sa.Column('encrypted_credentials', models.types.LongText(), nullable=True),
sa.Column('is_enabled', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_provider_pkey'),
sa.UniqueConstraint('tenant_id', 'tool_name', name='unique_tool_provider_tool_name')
)
if _is_pg(conn):
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
batch_op.add_column(sa.Column('sensitive_word_avoidance', sa.Text(), nullable=True))
else:
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
batch_op.add_column(sa.Column('sensitive_word_avoidance', models.types.LongText(), nullable=True))
# ### end Alembic commands ###

View File

@@ -10,6 +10,10 @@ from alembic import op
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '7e6a8693e07a'
down_revision = 'b2602e131636'
@@ -19,14 +23,27 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('dataset_permissions',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
sa.Column('account_id', models.types.StringUUID(), nullable=False),
sa.Column('has_permission', sa.Boolean(), server_default=sa.text('true'), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='dataset_permission_pkey')
)
conn = op.get_bind()
if _is_pg(conn):
op.create_table('dataset_permissions',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
sa.Column('account_id', models.types.StringUUID(), nullable=False),
sa.Column('has_permission', sa.Boolean(), server_default=sa.text('true'), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='dataset_permission_pkey')
)
else:
op.create_table('dataset_permissions',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('dataset_id', models.types.StringUUID(), nullable=False),
sa.Column('account_id', models.types.StringUUID(), nullable=False),
sa.Column('has_permission', sa.Boolean(), server_default=sa.text('true'), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='dataset_permission_pkey')
)
with op.batch_alter_table('dataset_permissions', schema=None) as batch_op:
batch_op.create_index('idx_dataset_permissions_account_id', ['account_id'], unique=False)
batch_op.create_index('idx_dataset_permissions_dataset_id', ['dataset_id'], unique=False)

View File

@@ -8,6 +8,12 @@ Create Date: 2023-12-14 07:36:50.705362
import sqlalchemy as sa
from alembic import op
import models.types
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '88072f0caa04'
down_revision = '246ba09cbbdb'
@@ -17,8 +23,14 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('tenants', schema=None) as batch_op:
batch_op.add_column(sa.Column('custom_config', sa.Text(), nullable=True))
conn = op.get_bind()
if _is_pg(conn):
with op.batch_alter_table('tenants', schema=None) as batch_op:
batch_op.add_column(sa.Column('custom_config', sa.Text(), nullable=True))
else:
with op.batch_alter_table('tenants', schema=None) as batch_op:
batch_op.add_column(sa.Column('custom_config', models.types.LongText(), nullable=True))
# ### end Alembic commands ###

Some files were not shown because too many files have changed in this diff Show More