feat: add basic types and database accessors for wxo integration (#12011)
* Add basic deployment persistence migrations and types * ruff * Rename db fields * Encryption updates, alembic ids, schema relationships Critical Issues Fixed 1. is_encrypted removed — update_provider_account now always encrypts, matching create_provider_account. No more heuristic that could store plaintext keys. 2. Proper Alembic revision IDs — a1b2c3d4e5f6 → 8106300be7aa, c3d4e5f6a7b8 → 2a5defa5ddc0 (randomly generated). 3. Folder ↔ Deployment relationship — Added folder on Deployment and deployments on Folder with "all, delete, delete-orphan" cascade. Important Issues Fixed 4. Schema layering — Added DeploymentRead, DeploymentProviderAccountCreate, DeploymentProviderAccountRead (no api_key!), DeploymentProviderAccountUpdate. 5. Cascade config — DeploymentProviderAccount.deployments now uses "all, delete, delete-orphan" (matching Folder.flows pattern). 6. UUID validation standardized — Both CRUDs now use _parse_uuid() that raises ValueError with context (field name + value). No moent return None/return 0 on bad input. 7. encrypt_api_key error context — Wrapped in try/except raising RuntimeError with clear message about encryption config. Suggestions Fixed 8. Field validators — name, resource_key (Deployment) and provider_key, provider_url (DeploymentProviderAccount) now validated non-empty with .strip(). 9. (provider_account_id, resource_key) uniqueness — Added to both model and migration. 10. account_id NULL documented — Comment explaining unique constraint behavior with NULLs. 11. or 0 removed from count_deployment_rows. * Bit more verbose naming, but follows existing standards * Add crud tests * Harden validation, error handling, and test coverage for deployment persistence - Extract shared validators (validate_non_empty_string) to database/utils.py - Add DeploymentCreate schema with field validators - Add _UNSET sentinel to update_provider_account for nullable field handling - Extract _encrypt_api_key helper with broadened exception handling - Add empty-string validation in CRUD create functions before DB round-trip - Escalate IntegrityError and None rowcount logging to aerror with rollback - Fix parse_uuid to chain exceptions with `from exc` - Fix folder relationship nullability (Folder, not Folder | None) - Add tests for 5 previously untested CRUD functions and new validation paths * refactor: improve deployment CRUD naming, helpers, and documentation - Rename count_deployments to count_deployments_by_provider - Add update_deployment CRUD function and DeploymentUpdate schema - Extract _strip_or_raise helper to deduplicate input validation - Clarify IntegrityError messages to describe conflicts generically - Document cascade semantics on User model relationships, double- validation rationale in CRUD, and model_fields_set usage on DeploymentProviderAccountUpdate - Add missing get_deployment and update_deployment tests * fix: harden deployment model validation, logging, and SQLAlchemy compatibility - Validate pagination bounds (offset >= 0, limit > 0) in list_deployments_page - Normalize blank provider_tenant_id to None on create, matching update behavior - Centralize normalization via normalize_string_or_none utility and model validators - Remove raw exception objects from IntegrityError log messages to avoid leaking SQL - Reject unsupported types in parse_uuid with a clear TypeError - Remove `from __future__ import annotations` from table models to fix SQLAlchemy relationship mapper errors at runtime * update parent of deployment provider account migration to flow history migration * Add documentation and in-memory tests for deployment tables - Add inline comment on api_key column in migration noting it is stored encrypted - Add comment on DeploymentProviderAccount.api_key model field documenting encryption requirement - Add in-memory SQLite test suite covering both deployment and deployment_provider_account tables: unique constraints, CASCADE deletes, relationship loading, FK enforcement, and CRUD operations * Add missing test files * mypy * [autofix.ci] apply automated fixes * remove unused test --------- Co-authored-by: Hamza Rashid <hzarashid@gmail.com> Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
This commit is contained in:
@@ -0,0 +1,92 @@
|
||||
"""Add deployment table
|
||||
|
||||
Revision ID: 2a5defa5ddc0
|
||||
Revises: 8106300be7aa
|
||||
Create Date: 2026-03-03 12:01:00.000000
|
||||
|
||||
Phase: EXPAND
|
||||
"""
|
||||
|
||||
from collections.abc import Sequence
|
||||
|
||||
import sqlalchemy as sa
|
||||
import sqlmodel
|
||||
from alembic import op
|
||||
from langflow.utils import migration
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "2a5defa5ddc0" # pragma: allowlist secret
|
||||
down_revision: str | None = "8106300be7aa" # pragma: allowlist secret
|
||||
branch_labels: str | Sequence[str] | None = None
|
||||
depends_on: str | Sequence[str] | None = None
|
||||
|
||||
NAME_UNIQUE_CONSTRAINT = "uq_deployment_name_in_provider"
|
||||
RESOURCE_KEY_UNIQUE_CONSTRAINT = "uq_deployment_resource_key_in_provider"
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
if migration.table_exists("deployment", conn):
|
||||
return
|
||||
|
||||
op.create_table(
|
||||
"deployment",
|
||||
sa.Column("id", sa.Uuid(), nullable=False),
|
||||
sa.Column("resource_key", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column("user_id", sa.Uuid(), nullable=False),
|
||||
sa.Column("project_id", sa.Uuid(), nullable=False),
|
||||
sa.Column("deployment_provider_account_id", sa.Uuid(), nullable=False),
|
||||
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
|
||||
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
|
||||
sa.ForeignKeyConstraint(
|
||||
["project_id"],
|
||||
["folder.id"],
|
||||
name=op.f("fk_deployment_project_id_folder"),
|
||||
ondelete="CASCADE",
|
||||
),
|
||||
sa.ForeignKeyConstraint(
|
||||
["deployment_provider_account_id"],
|
||||
["deployment_provider_account.id"],
|
||||
name=op.f("fk_deployment_deployment_provider_account_id_deployment_provider_account"),
|
||||
ondelete="CASCADE",
|
||||
),
|
||||
sa.ForeignKeyConstraint(
|
||||
["user_id"],
|
||||
["user.id"],
|
||||
name=op.f("fk_deployment_user_id_user"),
|
||||
ondelete="CASCADE",
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id", name=op.f("pk_deployment")),
|
||||
)
|
||||
with op.batch_alter_table("deployment", schema=None) as batch_op:
|
||||
batch_op.create_index(batch_op.f("ix_deployment_name"), ["name"], unique=False)
|
||||
batch_op.create_index(batch_op.f("ix_deployment_project_id"), ["project_id"], unique=False)
|
||||
batch_op.create_index(
|
||||
batch_op.f("ix_deployment_deployment_provider_account_id"),
|
||||
["deployment_provider_account_id"],
|
||||
unique=False,
|
||||
)
|
||||
batch_op.create_index(batch_op.f("ix_deployment_resource_key"), ["resource_key"], unique=False)
|
||||
batch_op.create_index(batch_op.f("ix_deployment_user_id"), ["user_id"], unique=False)
|
||||
batch_op.create_unique_constraint(NAME_UNIQUE_CONSTRAINT, ["deployment_provider_account_id", "name"])
|
||||
batch_op.create_unique_constraint(
|
||||
RESOURCE_KEY_UNIQUE_CONSTRAINT, ["deployment_provider_account_id", "resource_key"]
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
if not migration.table_exists("deployment", conn):
|
||||
return
|
||||
|
||||
with op.batch_alter_table("deployment", schema=None) as batch_op:
|
||||
batch_op.drop_constraint(RESOURCE_KEY_UNIQUE_CONSTRAINT, type_="unique")
|
||||
batch_op.drop_constraint(NAME_UNIQUE_CONSTRAINT, type_="unique")
|
||||
batch_op.drop_index(batch_op.f("ix_deployment_user_id"))
|
||||
batch_op.drop_index(batch_op.f("ix_deployment_resource_key"))
|
||||
batch_op.drop_index(batch_op.f("ix_deployment_deployment_provider_account_id"))
|
||||
batch_op.drop_index(batch_op.f("ix_deployment_project_id"))
|
||||
batch_op.drop_index(batch_op.f("ix_deployment_name"))
|
||||
|
||||
op.drop_table("deployment")
|
||||
@@ -0,0 +1,70 @@
|
||||
"""Add deployment provider account table
|
||||
|
||||
Revision ID: 8106300be7aa
|
||||
Revises: 7d327cfafab6
|
||||
Create Date: 2026-03-03 12:00:00.000000
|
||||
|
||||
Phase: EXPAND
|
||||
"""
|
||||
|
||||
from collections.abc import Sequence
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from langflow.utils import migration
|
||||
from sqlmodel.sql.sqltypes import AutoString
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "8106300be7aa" # pragma: allowlist secret
|
||||
down_revision: str | None = "7d327cfafab6" # pragma: allowlist secret
|
||||
branch_labels: str | Sequence[str] | None = None
|
||||
depends_on: str | Sequence[str] | None = None
|
||||
|
||||
TABLE_NAME = "deployment_provider_account"
|
||||
UNIQUE_CONSTRAINT_NAME = "uq_deployment_provider_account_user_url_tenant"
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
if migration.table_exists(TABLE_NAME, conn):
|
||||
return
|
||||
|
||||
op.create_table(
|
||||
TABLE_NAME,
|
||||
sa.Column("id", sa.Uuid(), nullable=False),
|
||||
sa.Column("user_id", sa.Uuid(), nullable=False),
|
||||
sa.Column("provider_tenant_id", AutoString(), nullable=True),
|
||||
sa.Column("provider_key", AutoString(), nullable=False),
|
||||
sa.Column("provider_url", AutoString(), nullable=False),
|
||||
sa.Column("api_key", AutoString(), nullable=False), # MUST be stored encrypted
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
|
||||
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
|
||||
sa.ForeignKeyConstraint(
|
||||
["user_id"],
|
||||
["user.id"],
|
||||
name=op.f("fk_deployment_provider_account_user_id_user"),
|
||||
ondelete="CASCADE",
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id", name=op.f("pk_deployment_provider_account")),
|
||||
sa.UniqueConstraint("user_id", "provider_url", "provider_tenant_id", name=UNIQUE_CONSTRAINT_NAME),
|
||||
)
|
||||
|
||||
with op.batch_alter_table(TABLE_NAME, schema=None) as batch_op:
|
||||
batch_op.create_index(batch_op.f("ix_deployment_provider_account_user_id"), ["user_id"], unique=False)
|
||||
batch_op.create_index(
|
||||
batch_op.f("ix_deployment_provider_account_provider_tenant_id"), ["provider_tenant_id"], unique=False
|
||||
)
|
||||
batch_op.create_index(batch_op.f("ix_deployment_provider_account_provider_key"), ["provider_key"], unique=False)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
if not migration.table_exists(TABLE_NAME, conn):
|
||||
return
|
||||
|
||||
with op.batch_alter_table(TABLE_NAME, schema=None) as batch_op:
|
||||
batch_op.drop_index(batch_op.f("ix_deployment_provider_account_provider_key"))
|
||||
batch_op.drop_index(batch_op.f("ix_deployment_provider_account_provider_tenant_id"))
|
||||
batch_op.drop_index(batch_op.f("ix_deployment_provider_account_user_id"))
|
||||
|
||||
op.drop_table(TABLE_NAME)
|
||||
@@ -1,5 +1,7 @@
|
||||
from .api_key import ApiKey
|
||||
from .auth import SSOConfig, SSOUserProfile
|
||||
from .deployment import Deployment
|
||||
from .deployment_provider_account import DeploymentProviderAccount
|
||||
from .file import File
|
||||
from .flow import Flow
|
||||
from .flow_version import FlowVersion
|
||||
@@ -13,6 +15,8 @@ from .variable import Variable
|
||||
|
||||
__all__ = [
|
||||
"ApiKey",
|
||||
"Deployment",
|
||||
"DeploymentProviderAccount",
|
||||
"File",
|
||||
"Flow",
|
||||
"FlowVersion",
|
||||
|
||||
@@ -0,0 +1,3 @@
|
||||
from .model import Deployment, DeploymentCreate, DeploymentRead, DeploymentUpdate
|
||||
|
||||
__all__ = ["Deployment", "DeploymentCreate", "DeploymentRead", "DeploymentUpdate"]
|
||||
@@ -0,0 +1,195 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timezone
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from lfx.log.logger import logger
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlmodel import col, delete, func, select
|
||||
|
||||
from langflow.services.database.models.deployment.model import Deployment
|
||||
from langflow.services.database.utils import parse_uuid
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from uuid import UUID
|
||||
|
||||
from sqlmodel.ext.asyncio.session import AsyncSession
|
||||
|
||||
|
||||
def _strip_or_raise(value: str, field_name: str) -> str:
|
||||
"""Return *value* stripped of whitespace, or raise if blank."""
|
||||
stripped = value.strip()
|
||||
if not stripped:
|
||||
msg = f"{field_name} must not be empty"
|
||||
raise ValueError(msg)
|
||||
return stripped
|
||||
|
||||
|
||||
async def create_deployment(
|
||||
db: AsyncSession,
|
||||
*,
|
||||
user_id: UUID,
|
||||
project_id: UUID,
|
||||
deployment_provider_account_id: UUID,
|
||||
resource_key: str,
|
||||
name: str,
|
||||
) -> Deployment:
|
||||
# The Deployment model has its own field validators, but pre-checking here
|
||||
# gives clearer errors and avoids constructing the object.
|
||||
resource_key_s = _strip_or_raise(resource_key, "resource_key")
|
||||
name_s = _strip_or_raise(name, "name")
|
||||
|
||||
row = Deployment(
|
||||
user_id=user_id,
|
||||
project_id=project_id,
|
||||
deployment_provider_account_id=deployment_provider_account_id,
|
||||
resource_key=resource_key_s,
|
||||
name=name_s,
|
||||
)
|
||||
db.add(row)
|
||||
try:
|
||||
await db.flush()
|
||||
except IntegrityError as exc:
|
||||
await db.rollback()
|
||||
await logger.aerror("IntegrityError creating deployment: %s", exc)
|
||||
msg = f"Deployment conflicts with an existing record (resource_key={resource_key!r}, name={name!r})"
|
||||
raise ValueError(msg) from exc
|
||||
await db.refresh(row)
|
||||
return row
|
||||
|
||||
|
||||
async def get_deployment_by_resource_key(
|
||||
db: AsyncSession,
|
||||
*,
|
||||
user_id: UUID,
|
||||
deployment_provider_account_id: UUID,
|
||||
resource_key: str,
|
||||
) -> Deployment | None:
|
||||
stmt = select(Deployment).where(
|
||||
Deployment.user_id == user_id,
|
||||
Deployment.deployment_provider_account_id == deployment_provider_account_id,
|
||||
Deployment.resource_key == resource_key.strip(),
|
||||
)
|
||||
return (await db.exec(stmt)).first()
|
||||
|
||||
|
||||
async def get_deployment(
|
||||
db: AsyncSession,
|
||||
*,
|
||||
user_id: UUID,
|
||||
deployment_id: UUID | str,
|
||||
) -> Deployment | None:
|
||||
deployment_uuid = parse_uuid(deployment_id, field_name="deployment_id")
|
||||
stmt = select(Deployment).where(
|
||||
Deployment.user_id == user_id,
|
||||
Deployment.id == deployment_uuid,
|
||||
)
|
||||
return (await db.exec(stmt)).first()
|
||||
|
||||
|
||||
async def update_deployment(
|
||||
db: AsyncSession,
|
||||
*,
|
||||
deployment: Deployment,
|
||||
name: str | None = None,
|
||||
project_id: UUID | None = None,
|
||||
) -> Deployment:
|
||||
if name is not None:
|
||||
deployment.name = _strip_or_raise(name, "name")
|
||||
if project_id is not None:
|
||||
deployment.project_id = project_id
|
||||
deployment.updated_at = datetime.now(timezone.utc)
|
||||
db.add(deployment)
|
||||
try:
|
||||
await db.flush()
|
||||
except IntegrityError as exc:
|
||||
await db.rollback()
|
||||
await logger.aerror("IntegrityError updating deployment id=%s: %s", deployment.id, exc)
|
||||
msg = "Deployment update conflicts with an existing record"
|
||||
raise ValueError(msg) from exc
|
||||
await db.refresh(deployment)
|
||||
return deployment
|
||||
|
||||
|
||||
async def list_deployments_page(
|
||||
db: AsyncSession,
|
||||
*,
|
||||
user_id: UUID,
|
||||
deployment_provider_account_id: UUID,
|
||||
offset: int,
|
||||
limit: int,
|
||||
) -> list[Deployment]:
|
||||
if offset < 0:
|
||||
msg = "offset must be greater than or equal to 0"
|
||||
raise ValueError(msg)
|
||||
if limit <= 0:
|
||||
msg = "limit must be greater than 0"
|
||||
raise ValueError(msg)
|
||||
|
||||
stmt = (
|
||||
select(Deployment)
|
||||
.where(
|
||||
Deployment.user_id == user_id,
|
||||
Deployment.deployment_provider_account_id == deployment_provider_account_id,
|
||||
)
|
||||
.order_by(col(Deployment.created_at).desc(), col(Deployment.id).desc())
|
||||
.offset(offset)
|
||||
.limit(limit)
|
||||
)
|
||||
return list((await db.exec(stmt)).all())
|
||||
|
||||
|
||||
async def count_deployments_by_provider(
|
||||
db: AsyncSession,
|
||||
*,
|
||||
user_id: UUID,
|
||||
deployment_provider_account_id: UUID,
|
||||
) -> int:
|
||||
stmt = select(func.count(Deployment.id)).where(
|
||||
Deployment.user_id == user_id,
|
||||
Deployment.deployment_provider_account_id == deployment_provider_account_id,
|
||||
)
|
||||
return int((await db.exec(stmt)).one())
|
||||
|
||||
|
||||
async def delete_deployment_by_resource_key(
|
||||
db: AsyncSession,
|
||||
*,
|
||||
user_id: UUID,
|
||||
deployment_provider_account_id: UUID,
|
||||
resource_key: str,
|
||||
) -> int:
|
||||
stmt = delete(Deployment).where(
|
||||
Deployment.user_id == user_id,
|
||||
Deployment.deployment_provider_account_id == deployment_provider_account_id,
|
||||
Deployment.resource_key == resource_key.strip(),
|
||||
)
|
||||
result = await db.exec(stmt)
|
||||
if result.rowcount is None:
|
||||
await logger.aerror(
|
||||
"DELETE rowcount was None for deployment resource_key=%r -- "
|
||||
"database driver may not support rowcount for DELETE statements",
|
||||
resource_key,
|
||||
)
|
||||
return int(result.rowcount or 0)
|
||||
|
||||
|
||||
async def delete_deployment_by_id(
|
||||
db: AsyncSession,
|
||||
*,
|
||||
user_id: UUID,
|
||||
deployment_id: UUID | str,
|
||||
) -> int:
|
||||
deployment_uuid = parse_uuid(deployment_id, field_name="deployment_id")
|
||||
stmt = delete(Deployment).where(
|
||||
Deployment.user_id == user_id,
|
||||
Deployment.id == deployment_uuid,
|
||||
)
|
||||
result = await db.exec(stmt)
|
||||
if result.rowcount is None:
|
||||
await logger.aerror(
|
||||
"DELETE rowcount was None for deployment id=%s -- "
|
||||
"database driver may not support rowcount for DELETE statements",
|
||||
deployment_uuid,
|
||||
)
|
||||
return int(result.rowcount or 0)
|
||||
@@ -0,0 +1,83 @@
|
||||
from datetime import datetime
|
||||
from typing import TYPE_CHECKING
|
||||
from uuid import UUID, uuid4
|
||||
|
||||
from pydantic import field_validator
|
||||
from sqlalchemy import UniqueConstraint
|
||||
from sqlmodel import Column, DateTime, Field, Relationship, SQLModel, func
|
||||
|
||||
from langflow.services.database.utils import validate_non_empty_string, validate_non_empty_string_optional
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from langflow.services.database.models.deployment_provider_account.model import DeploymentProviderAccount
|
||||
from langflow.services.database.models.folder.model import Folder
|
||||
from langflow.services.database.models.user.model import User
|
||||
|
||||
|
||||
class Deployment(SQLModel, table=True): # type: ignore[call-arg]
|
||||
__tablename__ = "deployment"
|
||||
__table_args__ = (
|
||||
UniqueConstraint("deployment_provider_account_id", "name", name="uq_deployment_name_in_provider"),
|
||||
UniqueConstraint(
|
||||
"deployment_provider_account_id", "resource_key", name="uq_deployment_resource_key_in_provider"
|
||||
),
|
||||
)
|
||||
|
||||
id: UUID | None = Field(default_factory=uuid4, primary_key=True)
|
||||
resource_key: str = Field(index=True)
|
||||
user_id: UUID = Field(foreign_key="user.id", index=True)
|
||||
# "project" is represented by a Folder row in the existing schema.
|
||||
project_id: UUID = Field(foreign_key="folder.id", index=True)
|
||||
# CASCADE behaviour is enforced at the migration/DDL level.
|
||||
deployment_provider_account_id: UUID = Field(foreign_key="deployment_provider_account.id", index=True)
|
||||
name: str = Field(index=True)
|
||||
created_at: datetime | None = Field(
|
||||
default=None,
|
||||
sa_column=Column(DateTime(timezone=True), server_default=func.now(), nullable=False),
|
||||
)
|
||||
updated_at: datetime | None = Field(
|
||||
default=None,
|
||||
sa_column=Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now(), nullable=False),
|
||||
)
|
||||
|
||||
user: "User" = Relationship(back_populates="deployments")
|
||||
deployment_provider_account: "DeploymentProviderAccount" = Relationship(back_populates="deployments")
|
||||
folder: "Folder" = Relationship(back_populates="deployments")
|
||||
|
||||
@field_validator("name", "resource_key")
|
||||
@classmethod
|
||||
def validate_non_empty(cls, v: str, info: object) -> str:
|
||||
return validate_non_empty_string(v, info)
|
||||
|
||||
|
||||
class DeploymentCreate(SQLModel):
|
||||
resource_key: str
|
||||
deployment_provider_account_id: UUID
|
||||
project_id: UUID
|
||||
name: str
|
||||
|
||||
@field_validator("name", "resource_key")
|
||||
@classmethod
|
||||
def validate_non_empty(cls, v: str, info: object) -> str:
|
||||
return validate_non_empty_string(v, info)
|
||||
|
||||
|
||||
class DeploymentUpdate(SQLModel):
|
||||
name: str | None = None
|
||||
project_id: UUID | None = None
|
||||
|
||||
@field_validator("name", mode="before")
|
||||
@classmethod
|
||||
def validate_non_empty_if_provided(cls, v: str | None, info: object) -> str | None:
|
||||
return validate_non_empty_string_optional(v, info)
|
||||
|
||||
|
||||
class DeploymentRead(SQLModel):
|
||||
id: UUID
|
||||
resource_key: str
|
||||
user_id: UUID
|
||||
project_id: UUID
|
||||
deployment_provider_account_id: UUID
|
||||
name: str
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
@@ -0,0 +1,13 @@
|
||||
from .model import (
|
||||
DeploymentProviderAccount,
|
||||
DeploymentProviderAccountCreate,
|
||||
DeploymentProviderAccountRead,
|
||||
DeploymentProviderAccountUpdate,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"DeploymentProviderAccount",
|
||||
"DeploymentProviderAccountCreate",
|
||||
"DeploymentProviderAccountRead",
|
||||
"DeploymentProviderAccountUpdate",
|
||||
]
|
||||
@@ -0,0 +1,179 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timezone
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from cryptography.fernet import InvalidToken
|
||||
from lfx.log.logger import logger
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlmodel import col, select
|
||||
|
||||
from langflow.services.auth import utils as auth_utils
|
||||
from langflow.services.database.models.deployment_provider_account.model import DeploymentProviderAccount
|
||||
from langflow.services.database.utils import normalize_string_or_none, parse_uuid
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from uuid import UUID
|
||||
|
||||
from sqlmodel.ext.asyncio.session import AsyncSession
|
||||
|
||||
_UNSET = object()
|
||||
|
||||
|
||||
def _strip_or_raise(value: str, field_name: str) -> str:
|
||||
"""Return *value* stripped of whitespace, or raise if blank."""
|
||||
stripped = value.strip()
|
||||
if not stripped:
|
||||
msg = f"{field_name} must not be empty"
|
||||
raise ValueError(msg)
|
||||
return stripped
|
||||
|
||||
|
||||
def _encrypt_api_key(raw: str) -> str:
|
||||
"""Encrypt an API key, raising ``RuntimeError`` on failure."""
|
||||
stripped = raw.strip()
|
||||
if not stripped:
|
||||
msg = "api_key must not be empty"
|
||||
raise ValueError(msg)
|
||||
try:
|
||||
return auth_utils.encrypt_api_key(stripped)
|
||||
except (ValueError, InvalidToken, TypeError, AttributeError) as e:
|
||||
msg = "Failed to encrypt API key -- check server encryption configuration"
|
||||
raise RuntimeError(msg) from e
|
||||
|
||||
|
||||
async def get_provider_account_by_id(
|
||||
db: AsyncSession,
|
||||
*,
|
||||
provider_id: UUID | str,
|
||||
user_id: UUID | str,
|
||||
) -> DeploymentProviderAccount | None:
|
||||
provider_uuid = parse_uuid(provider_id, field_name="provider_id")
|
||||
user_uuid = parse_uuid(user_id, field_name="user_id")
|
||||
|
||||
stmt = select(DeploymentProviderAccount).where(
|
||||
DeploymentProviderAccount.id == provider_uuid,
|
||||
DeploymentProviderAccount.user_id == user_uuid,
|
||||
)
|
||||
return (await db.exec(stmt)).first()
|
||||
|
||||
|
||||
async def list_provider_accounts(
|
||||
db: AsyncSession,
|
||||
*,
|
||||
user_id: UUID | str,
|
||||
) -> list[DeploymentProviderAccount]:
|
||||
user_uuid = parse_uuid(user_id, field_name="user_id")
|
||||
stmt = (
|
||||
select(DeploymentProviderAccount)
|
||||
.where(DeploymentProviderAccount.user_id == user_uuid)
|
||||
.order_by(col(DeploymentProviderAccount.created_at).desc())
|
||||
)
|
||||
return list((await db.exec(stmt)).all())
|
||||
|
||||
|
||||
async def create_provider_account(
|
||||
db: AsyncSession,
|
||||
*,
|
||||
user_id: UUID | str,
|
||||
provider_tenant_id: str | None,
|
||||
provider_key: str,
|
||||
provider_url: str,
|
||||
api_key: str,
|
||||
) -> DeploymentProviderAccount:
|
||||
user_uuid = parse_uuid(user_id, field_name="user_id")
|
||||
|
||||
# The model has its own field validators, but pre-checking here gives
|
||||
# clearer errors and avoids constructing the object.
|
||||
provider_key_s = _strip_or_raise(provider_key, "provider_key")
|
||||
provider_url_s = _strip_or_raise(provider_url, "provider_url")
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
try:
|
||||
encrypted_key = _encrypt_api_key(api_key)
|
||||
except RuntimeError:
|
||||
await logger.aerror(
|
||||
"Encryption failed creating provider account (user_id=%s, provider_url=%s)",
|
||||
user_id,
|
||||
provider_url,
|
||||
)
|
||||
raise
|
||||
provider_account = DeploymentProviderAccount(
|
||||
user_id=user_uuid,
|
||||
provider_tenant_id=normalize_string_or_none(provider_tenant_id),
|
||||
provider_key=provider_key_s,
|
||||
provider_url=provider_url_s,
|
||||
api_key=encrypted_key,
|
||||
created_at=now,
|
||||
updated_at=now,
|
||||
)
|
||||
db.add(provider_account)
|
||||
try:
|
||||
await db.flush()
|
||||
except IntegrityError as exc:
|
||||
await db.rollback()
|
||||
await logger.aerror(
|
||||
"IntegrityError creating provider account (user_id=%s, provider_url=%s, provider_tenant_id=%s)",
|
||||
user_uuid,
|
||||
provider_url_s,
|
||||
provider_tenant_id,
|
||||
)
|
||||
msg = (
|
||||
f"Provider account already exists "
|
||||
f"(provider_url={provider_url!r}, provider_tenant_id={provider_tenant_id!r})"
|
||||
)
|
||||
raise ValueError(msg) from exc
|
||||
await db.refresh(provider_account)
|
||||
return provider_account
|
||||
|
||||
|
||||
async def update_provider_account(
|
||||
db: AsyncSession,
|
||||
*,
|
||||
provider_account: DeploymentProviderAccount,
|
||||
provider_tenant_id: str | None = _UNSET, # type: ignore[assignment]
|
||||
provider_key: str | None = None,
|
||||
provider_url: str | None = None,
|
||||
api_key: str | None = None,
|
||||
) -> DeploymentProviderAccount:
|
||||
if provider_tenant_id is not _UNSET:
|
||||
provider_account.provider_tenant_id = normalize_string_or_none(provider_tenant_id) # type: ignore[arg-type]
|
||||
if provider_key is not None:
|
||||
provider_account.provider_key = _strip_or_raise(provider_key, "provider_key")
|
||||
if provider_url is not None:
|
||||
provider_account.provider_url = _strip_or_raise(provider_url, "provider_url")
|
||||
if api_key is not None:
|
||||
try:
|
||||
provider_account.api_key = _encrypt_api_key(api_key)
|
||||
except RuntimeError:
|
||||
await logger.aerror(
|
||||
"Encryption failed updating provider account id=%s",
|
||||
provider_account.id,
|
||||
)
|
||||
raise
|
||||
provider_account.updated_at = datetime.now(timezone.utc)
|
||||
db.add(provider_account)
|
||||
try:
|
||||
await db.flush()
|
||||
except IntegrityError as exc:
|
||||
await db.rollback()
|
||||
await logger.aerror("IntegrityError updating provider account id=%s", provider_account.id)
|
||||
msg = "Provider account update conflicts with an existing record"
|
||||
raise ValueError(msg) from exc
|
||||
await db.refresh(provider_account)
|
||||
return provider_account
|
||||
|
||||
|
||||
async def delete_provider_account(
|
||||
db: AsyncSession,
|
||||
*,
|
||||
provider_account: DeploymentProviderAccount,
|
||||
) -> None:
|
||||
await db.delete(provider_account)
|
||||
try:
|
||||
await db.flush()
|
||||
except IntegrityError as exc:
|
||||
await db.rollback()
|
||||
await logger.aerror("Failed to delete provider account id=%s", provider_account.id)
|
||||
msg = f"Failed to delete provider account id={provider_account.id}"
|
||||
raise ValueError(msg) from exc
|
||||
@@ -0,0 +1,117 @@
|
||||
from datetime import datetime
|
||||
from typing import TYPE_CHECKING
|
||||
from uuid import UUID, uuid4
|
||||
|
||||
from pydantic import field_validator
|
||||
from sqlalchemy import UniqueConstraint
|
||||
from sqlmodel import Column, DateTime, Field, Relationship, SQLModel, func
|
||||
|
||||
from langflow.services.database.utils import (
|
||||
normalize_string_or_none,
|
||||
validate_non_empty_string,
|
||||
validate_non_empty_string_optional,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from langflow.services.database.models.deployment.model import Deployment
|
||||
from langflow.services.database.models.user.model import User
|
||||
|
||||
|
||||
class DeploymentProviderAccount(SQLModel, table=True): # type: ignore[call-arg]
|
||||
__tablename__ = "deployment_provider_account"
|
||||
__table_args__ = (
|
||||
UniqueConstraint(
|
||||
"user_id",
|
||||
"provider_url",
|
||||
"provider_tenant_id",
|
||||
name="uq_deployment_provider_account_user_url_tenant",
|
||||
),
|
||||
)
|
||||
|
||||
id: UUID | None = Field(default_factory=uuid4, primary_key=True)
|
||||
user_id: UUID = Field(foreign_key="user.id", index=True)
|
||||
# provider_tenant_id participates in a unique constraint. When NULL,
|
||||
# SQL-standard databases (PostgreSQL, SQLite) treat NULL != NULL in unique
|
||||
# constraints, so multiple rows with the same (user_id, provider_url) are
|
||||
# allowed when provider_tenant_id is NULL. This is intentional: a provider
|
||||
# may not require a tenant/organization identifier.
|
||||
provider_tenant_id: str | None = Field(default=None, index=True)
|
||||
provider_key: str = Field(index=True)
|
||||
provider_url: str = Field()
|
||||
# MUST be stored encrypted; the CRUD layer encrypts via auth_utils before writing
|
||||
# and the Read schema MUST intentionally excludes this field.
|
||||
api_key: str = Field()
|
||||
created_at: datetime | None = Field(
|
||||
default=None,
|
||||
sa_column=Column(DateTime(timezone=True), server_default=func.now(), nullable=False),
|
||||
)
|
||||
updated_at: datetime | None = Field(
|
||||
default=None,
|
||||
sa_column=Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now(), nullable=False),
|
||||
)
|
||||
|
||||
user: "User" = Relationship(back_populates="deployment_provider_accounts")
|
||||
deployments: list["Deployment"] = Relationship(
|
||||
back_populates="deployment_provider_account",
|
||||
sa_relationship_kwargs={"cascade": "all, delete, delete-orphan"},
|
||||
)
|
||||
|
||||
@field_validator("provider_tenant_id", mode="before")
|
||||
@classmethod
|
||||
def normalize_tenant_id(cls, v: str | None) -> str | None:
|
||||
return normalize_string_or_none(v)
|
||||
|
||||
@field_validator("provider_key", "provider_url", "api_key")
|
||||
@classmethod
|
||||
def validate_non_empty(cls, v: str, info: object) -> str:
|
||||
return validate_non_empty_string(v, info)
|
||||
|
||||
|
||||
class DeploymentProviderAccountCreate(SQLModel):
|
||||
provider_tenant_id: str | None = None
|
||||
provider_key: str
|
||||
provider_url: str
|
||||
api_key: str
|
||||
|
||||
@field_validator("provider_tenant_id", mode="before")
|
||||
@classmethod
|
||||
def normalize_tenant_id(cls, v: str | None) -> str | None:
|
||||
return normalize_string_or_none(v)
|
||||
|
||||
@field_validator("provider_key", "provider_url", "api_key")
|
||||
@classmethod
|
||||
def validate_non_empty(cls, v: str, info: object) -> str:
|
||||
return validate_non_empty_string(v, info)
|
||||
|
||||
|
||||
class DeploymentProviderAccountRead(SQLModel):
|
||||
id: UUID
|
||||
user_id: UUID
|
||||
provider_tenant_id: str | None = None
|
||||
provider_key: str
|
||||
provider_url: str
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
# api_key intentionally omitted -- stored encrypted, never serialize credentials to API responses
|
||||
|
||||
|
||||
class DeploymentProviderAccountUpdate(SQLModel):
|
||||
# All fields default to None. API routes consuming this schema must check
|
||||
# ``model_fields_set`` to distinguish "field omitted" (keep existing value)
|
||||
# from "field explicitly set to null" (clear the value). The CRUD layer's
|
||||
# ``update_provider_account`` uses an ``_UNSET`` sentinel for the same
|
||||
# purpose on ``provider_tenant_id``.
|
||||
provider_tenant_id: str | None = None
|
||||
provider_key: str | None = None
|
||||
provider_url: str | None = None
|
||||
api_key: str | None = None
|
||||
|
||||
@field_validator("provider_tenant_id", mode="before")
|
||||
@classmethod
|
||||
def normalize_tenant_id(cls, v: str | None) -> str | None:
|
||||
return normalize_string_or_none(v)
|
||||
|
||||
@field_validator("provider_key", "provider_url", "api_key", mode="before")
|
||||
@classmethod
|
||||
def validate_non_empty_if_provided(cls, v: str | None, info: object) -> str | None:
|
||||
return validate_non_empty_string_optional(v, info)
|
||||
@@ -4,6 +4,7 @@ from uuid import UUID, uuid4
|
||||
from sqlalchemy import Text, UniqueConstraint
|
||||
from sqlmodel import JSON, Column, Field, Relationship, SQLModel
|
||||
|
||||
from langflow.services.database.models.deployment.model import Deployment
|
||||
from langflow.services.database.models.flow.model import Flow, FlowRead
|
||||
from langflow.services.database.models.user.model import User
|
||||
|
||||
@@ -32,6 +33,9 @@ class Folder(FolderBase, table=True): # type: ignore[call-arg]
|
||||
flows: list[Flow] = Relationship(
|
||||
back_populates="folder", sa_relationship_kwargs={"cascade": "all, delete, delete-orphan"}
|
||||
)
|
||||
deployments: list[Deployment] = Relationship(
|
||||
back_populates="folder", sa_relationship_kwargs={"cascade": "all, delete, delete-orphan"}
|
||||
)
|
||||
|
||||
__table_args__ = (UniqueConstraint("user_id", "name", name="unique_folder_name"),)
|
||||
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
from fastapi_pagination import Page
|
||||
|
||||
from langflow.helpers.base_model import BaseModel
|
||||
from langflow.services.database.models.flow.model import Flow
|
||||
from langflow.services.database.models.flow.model import FlowRead
|
||||
from langflow.services.database.models.folder.model import FolderRead
|
||||
|
||||
|
||||
class FolderWithPaginatedFlows(BaseModel):
|
||||
folder: FolderRead
|
||||
flows: Page[Flow]
|
||||
flows: Page[FlowRead]
|
||||
|
||||
@@ -10,6 +10,8 @@ from langflow.schema.serialize import UUIDstr
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from langflow.services.database.models.api_key.model import ApiKey
|
||||
from langflow.services.database.models.deployment.model import Deployment
|
||||
from langflow.services.database.models.deployment_provider_account.model import DeploymentProviderAccount
|
||||
from langflow.services.database.models.flow.model import Flow
|
||||
from langflow.services.database.models.folder.model import Folder
|
||||
from langflow.services.database.models.variable.model import Variable
|
||||
@@ -38,6 +40,17 @@ class User(SQLModel, table=True): # type: ignore[call-arg]
|
||||
)
|
||||
store_api_key: str | None = Field(default=None, nullable=True)
|
||||
flows: list["Flow"] = Relationship(back_populates="user")
|
||||
# User is a secondary parent, so cascade="delete" (no "delete-orphan").
|
||||
# Orphan management is handled by the owning models
|
||||
# (DeploymentProviderAccount, Folder) which use "all, delete, delete-orphan".
|
||||
deployment_provider_accounts: list["DeploymentProviderAccount"] = Relationship(
|
||||
back_populates="user",
|
||||
sa_relationship_kwargs={"cascade": "delete"},
|
||||
)
|
||||
deployments: list["Deployment"] = Relationship(
|
||||
back_populates="user",
|
||||
sa_relationship_kwargs={"cascade": "delete"},
|
||||
)
|
||||
variables: list["Variable"] = Relationship(
|
||||
back_populates="user",
|
||||
sa_relationship_kwargs={"cascade": "delete"},
|
||||
|
||||
@@ -3,6 +3,7 @@ from __future__ import annotations
|
||||
from contextlib import asynccontextmanager
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING
|
||||
from uuid import UUID
|
||||
|
||||
from alembic.util.exc import CommandError
|
||||
from lfx.log.logger import logger
|
||||
@@ -75,6 +76,57 @@ async def session_getter(db_service: DatabaseService):
|
||||
await session.close()
|
||||
|
||||
|
||||
def validate_non_empty_string(v: str, info: object) -> str:
|
||||
"""Validate a string field is non-empty after stripping whitespace.
|
||||
|
||||
Intended for use inside ``@field_validator`` methods on SQLModel/Pydantic
|
||||
models. Raises ``ValueError`` with the field name if the value is blank.
|
||||
"""
|
||||
stripped = v.strip()
|
||||
if not stripped:
|
||||
field = getattr(info, "field_name", "Field")
|
||||
msg = f"{field} must not be empty"
|
||||
raise ValueError(msg)
|
||||
return stripped
|
||||
|
||||
|
||||
def validate_non_empty_string_optional(v: str | None, info: object) -> str | None:
|
||||
"""Like :func:`validate_non_empty_string` but allows ``None`` (skip)."""
|
||||
if v is None:
|
||||
return v
|
||||
return validate_non_empty_string(v, info)
|
||||
|
||||
|
||||
def normalize_string_or_none(v: str | None) -> str | None:
|
||||
"""Strip whitespace from *v* and return ``None`` if the result is blank."""
|
||||
if v is None:
|
||||
return None
|
||||
stripped = v.strip()
|
||||
return stripped if stripped else None
|
||||
|
||||
|
||||
def parse_uuid(value: UUID | str, *, field_name: str = "value") -> UUID:
|
||||
"""Parse a UUID from a string or pass through a UUID.
|
||||
|
||||
Raises ValueError if the string is empty or not a valid UUID.
|
||||
The *field_name* parameter is included in the error message for context.
|
||||
"""
|
||||
if isinstance(value, UUID):
|
||||
return value
|
||||
if isinstance(value, str):
|
||||
stripped = value.strip()
|
||||
if not stripped:
|
||||
msg = f"{field_name} must not be empty"
|
||||
raise ValueError(msg)
|
||||
try:
|
||||
return UUID(stripped)
|
||||
except ValueError as exc:
|
||||
msg = f"{field_name} is not a valid UUID: {stripped!r}"
|
||||
raise ValueError(msg) from exc
|
||||
msg = f"{field_name} must be a UUID or string, got {type(value).__name__}"
|
||||
raise TypeError(msg)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Result:
|
||||
name: str
|
||||
|
||||
@@ -0,0 +1,489 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
from uuid import uuid4
|
||||
|
||||
import pytest
|
||||
from langflow.services.database.models.deployment.crud import (
|
||||
count_deployments_by_provider,
|
||||
create_deployment,
|
||||
delete_deployment_by_id,
|
||||
delete_deployment_by_resource_key,
|
||||
get_deployment,
|
||||
get_deployment_by_resource_key,
|
||||
list_deployments_page,
|
||||
update_deployment,
|
||||
)
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
|
||||
DEPLOYMENT_CLASS = "langflow.services.database.models.deployment.crud.Deployment"
|
||||
|
||||
|
||||
def _make_db() -> AsyncMock:
|
||||
"""Create a mock AsyncSession with common async methods."""
|
||||
db = AsyncMock()
|
||||
db.add = MagicMock()
|
||||
return db
|
||||
|
||||
|
||||
# --- create_deployment ---
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_deployment_success():
|
||||
db = _make_db()
|
||||
uid = uuid4()
|
||||
pid = uuid4()
|
||||
dpid = uuid4()
|
||||
|
||||
with patch(DEPLOYMENT_CLASS) as mock_cls:
|
||||
mock_row = MagicMock()
|
||||
mock_row.resource_key = "rk-1"
|
||||
mock_row.name = "my-deploy"
|
||||
mock_cls.return_value = mock_row
|
||||
|
||||
result = await create_deployment(
|
||||
db,
|
||||
user_id=uid,
|
||||
project_id=pid,
|
||||
deployment_provider_account_id=dpid,
|
||||
resource_key="rk-1",
|
||||
name="my-deploy",
|
||||
)
|
||||
|
||||
db.add.assert_called_once_with(mock_row)
|
||||
db.flush.assert_awaited_once()
|
||||
db.refresh.assert_awaited_once_with(mock_row)
|
||||
assert result is mock_row
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_deployment_strips_whitespace():
|
||||
db = _make_db()
|
||||
|
||||
with patch(DEPLOYMENT_CLASS) as mock_cls:
|
||||
mock_cls.return_value = MagicMock()
|
||||
|
||||
await create_deployment(
|
||||
db,
|
||||
user_id=uuid4(),
|
||||
project_id=uuid4(),
|
||||
deployment_provider_account_id=uuid4(),
|
||||
resource_key=" rk-1 ",
|
||||
name=" my-deploy ",
|
||||
)
|
||||
|
||||
call_kwargs = mock_cls.call_args.kwargs
|
||||
assert call_kwargs["resource_key"] == "rk-1"
|
||||
assert call_kwargs["name"] == "my-deploy"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_deployment_empty_resource_key_raises():
|
||||
db = _make_db()
|
||||
|
||||
with pytest.raises(ValueError, match="resource_key must not be empty"):
|
||||
await create_deployment(
|
||||
db,
|
||||
user_id=uuid4(),
|
||||
project_id=uuid4(),
|
||||
deployment_provider_account_id=uuid4(),
|
||||
resource_key=" ",
|
||||
name="my-deploy",
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_deployment_empty_name_raises():
|
||||
db = _make_db()
|
||||
|
||||
with pytest.raises(ValueError, match="name must not be empty"):
|
||||
await create_deployment(
|
||||
db,
|
||||
user_id=uuid4(),
|
||||
project_id=uuid4(),
|
||||
deployment_provider_account_id=uuid4(),
|
||||
resource_key="rk-1",
|
||||
name="",
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_deployment_integrity_error_raises_value_error():
|
||||
db = _make_db()
|
||||
db.flush.side_effect = IntegrityError("dup", params=None, orig=Exception())
|
||||
|
||||
with (
|
||||
patch(DEPLOYMENT_CLASS),
|
||||
patch("langflow.services.database.models.deployment.crud.logger") as mock_logger,
|
||||
):
|
||||
mock_logger.aerror = AsyncMock()
|
||||
with pytest.raises(ValueError, match="Deployment conflicts with an existing record"):
|
||||
await create_deployment(
|
||||
db,
|
||||
user_id=uuid4(),
|
||||
project_id=uuid4(),
|
||||
deployment_provider_account_id=uuid4(),
|
||||
resource_key="rk-1",
|
||||
name="my-deploy",
|
||||
)
|
||||
|
||||
db.rollback.assert_awaited_once()
|
||||
|
||||
|
||||
# --- get_deployment ---
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_deployment_invalid_uuid_raises():
|
||||
db = _make_db()
|
||||
|
||||
with pytest.raises(ValueError, match="deployment_id is not a valid UUID"):
|
||||
await get_deployment(db, user_id=uuid4(), deployment_id="not-a-uuid")
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_deployment_found():
|
||||
db = _make_db()
|
||||
mock_deployment = MagicMock()
|
||||
mock_result = MagicMock()
|
||||
mock_result.first.return_value = mock_deployment
|
||||
db.exec.return_value = mock_result
|
||||
|
||||
result = await get_deployment(db, user_id=uuid4(), deployment_id=uuid4())
|
||||
|
||||
assert result is mock_deployment
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_deployment_not_found():
|
||||
db = _make_db()
|
||||
mock_result = MagicMock()
|
||||
mock_result.first.return_value = None
|
||||
db.exec.return_value = mock_result
|
||||
|
||||
result = await get_deployment(db, user_id=uuid4(), deployment_id=uuid4())
|
||||
|
||||
assert result is None
|
||||
|
||||
|
||||
# --- get_deployment_by_resource_key ---
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_deployment_by_resource_key_found():
|
||||
db = _make_db()
|
||||
mock_deployment = MagicMock()
|
||||
mock_result = MagicMock()
|
||||
mock_result.first.return_value = mock_deployment
|
||||
db.exec.return_value = mock_result
|
||||
|
||||
result = await get_deployment_by_resource_key(
|
||||
db,
|
||||
user_id=uuid4(),
|
||||
deployment_provider_account_id=uuid4(),
|
||||
resource_key="rk-1",
|
||||
)
|
||||
|
||||
assert result is mock_deployment
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_deployment_by_resource_key_not_found():
|
||||
db = _make_db()
|
||||
mock_result = MagicMock()
|
||||
mock_result.first.return_value = None
|
||||
db.exec.return_value = mock_result
|
||||
|
||||
result = await get_deployment_by_resource_key(
|
||||
db,
|
||||
user_id=uuid4(),
|
||||
deployment_provider_account_id=uuid4(),
|
||||
resource_key="nonexistent",
|
||||
)
|
||||
|
||||
assert result is None
|
||||
|
||||
|
||||
# --- list_deployments_page ---
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_list_deployments_page_negative_offset_raises():
|
||||
db = _make_db()
|
||||
|
||||
with pytest.raises(ValueError, match="offset must be greater than or equal to 0"):
|
||||
await list_deployments_page(
|
||||
db,
|
||||
user_id=uuid4(),
|
||||
deployment_provider_account_id=uuid4(),
|
||||
offset=-1,
|
||||
limit=10,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_list_deployments_page_zero_limit_raises():
|
||||
db = _make_db()
|
||||
|
||||
with pytest.raises(ValueError, match="limit must be greater than 0"):
|
||||
await list_deployments_page(
|
||||
db,
|
||||
user_id=uuid4(),
|
||||
deployment_provider_account_id=uuid4(),
|
||||
offset=0,
|
||||
limit=0,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_list_deployments_page_negative_limit_raises():
|
||||
db = _make_db()
|
||||
|
||||
with pytest.raises(ValueError, match="limit must be greater than 0"):
|
||||
await list_deployments_page(
|
||||
db,
|
||||
user_id=uuid4(),
|
||||
deployment_provider_account_id=uuid4(),
|
||||
offset=0,
|
||||
limit=-5,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_list_deployments_page_returns_list():
|
||||
db = _make_db()
|
||||
mock_items = [MagicMock(), MagicMock()]
|
||||
mock_result = MagicMock()
|
||||
mock_result.all.return_value = mock_items
|
||||
db.exec.return_value = mock_result
|
||||
|
||||
result = await list_deployments_page(
|
||||
db,
|
||||
user_id=uuid4(),
|
||||
deployment_provider_account_id=uuid4(),
|
||||
offset=0,
|
||||
limit=10,
|
||||
)
|
||||
|
||||
assert result == mock_items
|
||||
assert isinstance(result, list)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_list_deployments_page_empty():
|
||||
db = _make_db()
|
||||
mock_result = MagicMock()
|
||||
mock_result.all.return_value = []
|
||||
db.exec.return_value = mock_result
|
||||
|
||||
result = await list_deployments_page(
|
||||
db,
|
||||
user_id=uuid4(),
|
||||
deployment_provider_account_id=uuid4(),
|
||||
offset=0,
|
||||
limit=10,
|
||||
)
|
||||
|
||||
assert result == []
|
||||
|
||||
|
||||
# --- count_deployments_by_provider ---
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_count_deployments_by_provider_returns_int():
|
||||
db = _make_db()
|
||||
mock_result = MagicMock()
|
||||
mock_result.one.return_value = 5
|
||||
db.exec.return_value = mock_result
|
||||
|
||||
result = await count_deployments_by_provider(
|
||||
db,
|
||||
user_id=uuid4(),
|
||||
deployment_provider_account_id=uuid4(),
|
||||
)
|
||||
|
||||
assert result == 5
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_count_deployments_by_provider_returns_zero():
|
||||
db = _make_db()
|
||||
mock_result = MagicMock()
|
||||
mock_result.one.return_value = 0
|
||||
db.exec.return_value = mock_result
|
||||
|
||||
result = await count_deployments_by_provider(
|
||||
db,
|
||||
user_id=uuid4(),
|
||||
deployment_provider_account_id=uuid4(),
|
||||
)
|
||||
|
||||
assert result == 0
|
||||
|
||||
|
||||
# --- delete_deployment_by_resource_key ---
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_delete_by_resource_key_returns_rowcount():
|
||||
db = _make_db()
|
||||
mock_result = MagicMock()
|
||||
mock_result.rowcount = 1
|
||||
db.exec.return_value = mock_result
|
||||
|
||||
count = await delete_deployment_by_resource_key(
|
||||
db,
|
||||
user_id=uuid4(),
|
||||
deployment_provider_account_id=uuid4(),
|
||||
resource_key="rk-1",
|
||||
)
|
||||
|
||||
assert count == 1
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_delete_by_resource_key_none_rowcount_logs_error():
|
||||
db = _make_db()
|
||||
mock_result = MagicMock()
|
||||
mock_result.rowcount = None
|
||||
db.exec.return_value = mock_result
|
||||
|
||||
with patch("langflow.services.database.models.deployment.crud.logger") as mock_logger:
|
||||
mock_logger.aerror = AsyncMock()
|
||||
count = await delete_deployment_by_resource_key(
|
||||
db,
|
||||
user_id=uuid4(),
|
||||
deployment_provider_account_id=uuid4(),
|
||||
resource_key="rk-1",
|
||||
)
|
||||
|
||||
assert count == 0
|
||||
mock_logger.aerror.assert_awaited_once()
|
||||
|
||||
|
||||
# --- delete_deployment_by_id ---
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_delete_by_id_returns_rowcount():
|
||||
db = _make_db()
|
||||
mock_result = MagicMock()
|
||||
mock_result.rowcount = 1
|
||||
db.exec.return_value = mock_result
|
||||
|
||||
count = await delete_deployment_by_id(
|
||||
db,
|
||||
user_id=uuid4(),
|
||||
deployment_id=uuid4(),
|
||||
)
|
||||
|
||||
assert count == 1
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_delete_by_id_none_rowcount_logs_error():
|
||||
db = _make_db()
|
||||
mock_result = MagicMock()
|
||||
mock_result.rowcount = None
|
||||
db.exec.return_value = mock_result
|
||||
|
||||
with patch("langflow.services.database.models.deployment.crud.logger") as mock_logger:
|
||||
mock_logger.aerror = AsyncMock()
|
||||
count = await delete_deployment_by_id(
|
||||
db,
|
||||
user_id=uuid4(),
|
||||
deployment_id=uuid4(),
|
||||
)
|
||||
|
||||
assert count == 0
|
||||
mock_logger.aerror.assert_awaited_once()
|
||||
|
||||
|
||||
# --- update_deployment ---
|
||||
|
||||
|
||||
def _make_deployment(**overrides) -> MagicMock:
|
||||
defaults = {
|
||||
"id": uuid4(),
|
||||
"user_id": uuid4(),
|
||||
"project_id": uuid4(),
|
||||
"deployment_provider_account_id": uuid4(),
|
||||
"resource_key": "rk-1",
|
||||
"name": "my-deploy",
|
||||
}
|
||||
defaults.update(overrides)
|
||||
mock = MagicMock()
|
||||
for k, v in defaults.items():
|
||||
setattr(mock, k, v)
|
||||
return mock
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_deployment_name():
|
||||
db = _make_db()
|
||||
deploy = _make_deployment()
|
||||
|
||||
result = await update_deployment(db, deployment=deploy, name="new-name")
|
||||
|
||||
assert result.name == "new-name"
|
||||
db.flush.assert_awaited_once()
|
||||
db.refresh.assert_awaited_once()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_deployment_project_id():
|
||||
db = _make_db()
|
||||
deploy = _make_deployment()
|
||||
new_pid = uuid4()
|
||||
|
||||
result = await update_deployment(db, deployment=deploy, project_id=new_pid)
|
||||
|
||||
assert result.project_id == new_pid
|
||||
db.flush.assert_awaited_once()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_deployment_no_changes():
|
||||
db = _make_db()
|
||||
deploy = _make_deployment()
|
||||
original_name = deploy.name
|
||||
|
||||
result = await update_deployment(db, deployment=deploy)
|
||||
|
||||
assert result.name == original_name
|
||||
db.flush.assert_awaited_once()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_deployment_empty_name_raises():
|
||||
db = _make_db()
|
||||
deploy = _make_deployment()
|
||||
|
||||
with pytest.raises(ValueError, match="name must not be empty"):
|
||||
await update_deployment(db, deployment=deploy, name=" ")
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_deployment_strips_whitespace():
|
||||
db = _make_db()
|
||||
deploy = _make_deployment()
|
||||
|
||||
await update_deployment(db, deployment=deploy, name=" new-name ")
|
||||
|
||||
assert deploy.name == "new-name"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_deployment_integrity_error_raises_value_error():
|
||||
db = _make_db()
|
||||
db.flush.side_effect = IntegrityError("dup", params=None, orig=Exception())
|
||||
deploy = _make_deployment()
|
||||
|
||||
with patch("langflow.services.database.models.deployment.crud.logger") as mock_logger:
|
||||
mock_logger.aerror = AsyncMock()
|
||||
with pytest.raises(ValueError, match="conflicts with an existing record"):
|
||||
await update_deployment(db, deployment=deploy, name="duplicate-name")
|
||||
|
||||
db.rollback.assert_awaited_once()
|
||||
@@ -0,0 +1,738 @@
|
||||
"""Deployment and DeploymentProviderAccount tests against in-memory SQLite.
|
||||
|
||||
Uses a real database with foreign keys enabled to verify CASCADE deletes,
|
||||
unique constraints, relationships, and CRUD operations.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from unittest.mock import patch
|
||||
from uuid import uuid4
|
||||
|
||||
import pytest
|
||||
from langflow.services.database.models.deployment.crud import (
|
||||
count_deployments_by_provider,
|
||||
create_deployment,
|
||||
delete_deployment_by_id,
|
||||
delete_deployment_by_resource_key,
|
||||
get_deployment,
|
||||
get_deployment_by_resource_key,
|
||||
list_deployments_page,
|
||||
update_deployment,
|
||||
)
|
||||
from langflow.services.database.models.deployment.model import Deployment
|
||||
from langflow.services.database.models.deployment_provider_account.crud import (
|
||||
create_provider_account,
|
||||
delete_provider_account,
|
||||
get_provider_account_by_id,
|
||||
list_provider_accounts,
|
||||
update_provider_account,
|
||||
)
|
||||
from langflow.services.database.models.deployment_provider_account.model import DeploymentProviderAccount
|
||||
from langflow.services.database.models.folder.model import Folder
|
||||
from langflow.services.database.models.user.model import User
|
||||
from sqlalchemy import event
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.ext.asyncio import create_async_engine
|
||||
from sqlalchemy.pool import StaticPool
|
||||
from sqlmodel import SQLModel, select
|
||||
from sqlmodel.ext.asyncio.session import AsyncSession
|
||||
|
||||
_TEST_PASSWORD = "hashed" # noqa: S105 # pragma: allowlist secret
|
||||
_ENCRYPT_TARGET = "langflow.services.database.models.deployment_provider_account.crud.auth_utils"
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Fixtures
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
@pytest.fixture(name="db_engine")
|
||||
def db_engine_fixture():
|
||||
engine = create_async_engine(
|
||||
"sqlite+aiosqlite://",
|
||||
connect_args={"check_same_thread": False},
|
||||
poolclass=StaticPool,
|
||||
)
|
||||
|
||||
@event.listens_for(engine.sync_engine, "connect")
|
||||
def _enable_fk(dbapi_connection, connection_record): # noqa: ARG001
|
||||
cursor = dbapi_connection.cursor()
|
||||
cursor.execute("PRAGMA foreign_keys=ON")
|
||||
cursor.close()
|
||||
|
||||
return engine
|
||||
|
||||
|
||||
@pytest.fixture(name="db")
|
||||
async def db_fixture(db_engine):
|
||||
async with db_engine.begin() as conn:
|
||||
await conn.run_sync(SQLModel.metadata.create_all)
|
||||
async with AsyncSession(db_engine, expire_on_commit=False) as session:
|
||||
yield session
|
||||
async with db_engine.begin() as conn:
|
||||
await conn.run_sync(SQLModel.metadata.drop_all)
|
||||
await db_engine.dispose()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def user(db: AsyncSession) -> User:
|
||||
u = User(username="testuser", password=_TEST_PASSWORD, is_active=True)
|
||||
db.add(u)
|
||||
await db.commit()
|
||||
await db.refresh(u)
|
||||
return u
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def folder(db: AsyncSession, user: User) -> Folder:
|
||||
f = Folder(name="test-project", user_id=user.id)
|
||||
db.add(f)
|
||||
await db.commit()
|
||||
await db.refresh(f)
|
||||
return f
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def provider_account(db: AsyncSession, user: User) -> DeploymentProviderAccount:
|
||||
acct = DeploymentProviderAccount(
|
||||
user_id=user.id,
|
||||
provider_tenant_id="tenant-1",
|
||||
provider_key="test-provider",
|
||||
provider_url="https://provider.example.com",
|
||||
api_key="encrypted-value", # pragma: allowlist secret
|
||||
)
|
||||
db.add(acct)
|
||||
await db.commit()
|
||||
await db.refresh(acct)
|
||||
return acct
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def deployment(
|
||||
db: AsyncSession,
|
||||
user: User,
|
||||
folder: Folder,
|
||||
provider_account: DeploymentProviderAccount,
|
||||
) -> Deployment:
|
||||
d = Deployment(
|
||||
user_id=user.id,
|
||||
project_id=folder.id,
|
||||
deployment_provider_account_id=provider_account.id,
|
||||
resource_key="rk-1",
|
||||
name="my-deployment",
|
||||
)
|
||||
db.add(d)
|
||||
await db.commit()
|
||||
await db.refresh(d)
|
||||
return d
|
||||
|
||||
|
||||
# ===========================================================================
|
||||
# DeploymentProviderAccount — model-level integration
|
||||
# ===========================================================================
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
class TestProviderAccountModel:
|
||||
async def test_create_and_read(self, db: AsyncSession, provider_account: DeploymentProviderAccount):
|
||||
stmt = select(DeploymentProviderAccount).where(DeploymentProviderAccount.id == provider_account.id)
|
||||
row = (await db.exec(stmt)).one()
|
||||
assert row.provider_key == "test-provider"
|
||||
assert row.provider_url == "https://provider.example.com"
|
||||
assert row.provider_tenant_id == "tenant-1"
|
||||
assert row.created_at is not None
|
||||
assert row.updated_at is not None
|
||||
|
||||
async def test_unique_constraint_user_url_tenant(
|
||||
self, db: AsyncSession, user: User, provider_account: DeploymentProviderAccount
|
||||
):
|
||||
dup = DeploymentProviderAccount(
|
||||
user_id=user.id,
|
||||
provider_tenant_id=provider_account.provider_tenant_id,
|
||||
provider_key="another-key",
|
||||
provider_url=provider_account.provider_url,
|
||||
api_key="other-encrypted", # pragma: allowlist secret
|
||||
)
|
||||
db.add(dup)
|
||||
with pytest.raises(IntegrityError):
|
||||
await db.commit()
|
||||
|
||||
async def test_null_tenant_allows_multiple_rows(self, db: AsyncSession, user: User):
|
||||
"""SQL NULL != NULL in unique constraints, so two rows with tenant=NULL are allowed."""
|
||||
for i in range(2):
|
||||
acct = DeploymentProviderAccount(
|
||||
user_id=user.id,
|
||||
provider_tenant_id=None,
|
||||
provider_key=f"key-{i}",
|
||||
provider_url="https://same-url.example.com",
|
||||
api_key=f"enc-{i}",
|
||||
)
|
||||
db.add(acct)
|
||||
await db.commit()
|
||||
|
||||
stmt = select(DeploymentProviderAccount).where(
|
||||
DeploymentProviderAccount.user_id == user.id,
|
||||
DeploymentProviderAccount.provider_tenant_id.is_(None), # type: ignore[union-attr]
|
||||
)
|
||||
rows = (await db.exec(stmt)).all()
|
||||
assert len(rows) == 2
|
||||
|
||||
async def test_cascade_delete_on_user(
|
||||
self, db: AsyncSession, user: User, provider_account: DeploymentProviderAccount
|
||||
):
|
||||
acct_id = provider_account.id
|
||||
await db.delete(user)
|
||||
await db.commit()
|
||||
|
||||
stmt = select(DeploymentProviderAccount).where(DeploymentProviderAccount.id == acct_id)
|
||||
assert (await db.exec(stmt)).first() is None
|
||||
|
||||
async def test_user_relationship(self, db: AsyncSession, provider_account: DeploymentProviderAccount):
|
||||
await db.refresh(provider_account, attribute_names=["user"])
|
||||
assert provider_account.user is not None
|
||||
assert provider_account.user.username == "testuser"
|
||||
|
||||
|
||||
# ===========================================================================
|
||||
# Deployment — model-level integration
|
||||
# ===========================================================================
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
class TestDeploymentModel:
|
||||
async def test_create_and_read(self, db: AsyncSession, deployment: Deployment):
|
||||
stmt = select(Deployment).where(Deployment.id == deployment.id)
|
||||
row = (await db.exec(stmt)).one()
|
||||
assert row.name == "my-deployment"
|
||||
assert row.resource_key == "rk-1"
|
||||
assert row.created_at is not None
|
||||
assert row.updated_at is not None
|
||||
|
||||
async def test_unique_name_per_provider(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
user: User,
|
||||
folder: Folder,
|
||||
provider_account: DeploymentProviderAccount,
|
||||
deployment: Deployment,
|
||||
):
|
||||
dup = Deployment(
|
||||
user_id=user.id,
|
||||
project_id=folder.id,
|
||||
deployment_provider_account_id=provider_account.id,
|
||||
resource_key="rk-different",
|
||||
name=deployment.name,
|
||||
)
|
||||
db.add(dup)
|
||||
with pytest.raises(IntegrityError):
|
||||
await db.commit()
|
||||
|
||||
async def test_unique_resource_key_per_provider(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
user: User,
|
||||
folder: Folder,
|
||||
provider_account: DeploymentProviderAccount,
|
||||
deployment: Deployment,
|
||||
):
|
||||
dup = Deployment(
|
||||
user_id=user.id,
|
||||
project_id=folder.id,
|
||||
deployment_provider_account_id=provider_account.id,
|
||||
resource_key=deployment.resource_key,
|
||||
name="different-name",
|
||||
)
|
||||
db.add(dup)
|
||||
with pytest.raises(IntegrityError):
|
||||
await db.commit()
|
||||
|
||||
async def test_same_name_allowed_across_providers(
|
||||
self, db: AsyncSession, user: User, folder: Folder, deployment: Deployment
|
||||
):
|
||||
other_acct = DeploymentProviderAccount(
|
||||
user_id=user.id,
|
||||
provider_key="other-provider",
|
||||
provider_url="https://other.example.com",
|
||||
api_key="enc-other", # pragma: allowlist secret
|
||||
)
|
||||
db.add(other_acct)
|
||||
await db.commit()
|
||||
await db.refresh(other_acct)
|
||||
|
||||
d2 = Deployment(
|
||||
user_id=user.id,
|
||||
project_id=folder.id,
|
||||
deployment_provider_account_id=other_acct.id,
|
||||
resource_key=deployment.resource_key,
|
||||
name=deployment.name,
|
||||
)
|
||||
db.add(d2)
|
||||
await db.commit()
|
||||
await db.refresh(d2)
|
||||
assert d2.id is not None
|
||||
|
||||
async def test_cascade_delete_on_user(self, db: AsyncSession, user: User, deployment: Deployment):
|
||||
dep_id = deployment.id
|
||||
await db.delete(user)
|
||||
await db.commit()
|
||||
|
||||
stmt = select(Deployment).where(Deployment.id == dep_id)
|
||||
assert (await db.exec(stmt)).first() is None
|
||||
|
||||
async def test_cascade_delete_on_folder(self, db: AsyncSession, folder: Folder, deployment: Deployment):
|
||||
dep_id = deployment.id
|
||||
await db.delete(folder)
|
||||
await db.commit()
|
||||
|
||||
stmt = select(Deployment).where(Deployment.id == dep_id)
|
||||
assert (await db.exec(stmt)).first() is None
|
||||
|
||||
async def test_cascade_delete_on_provider_account(
|
||||
self, db: AsyncSession, provider_account: DeploymentProviderAccount, deployment: Deployment
|
||||
):
|
||||
dep_id = deployment.id
|
||||
await db.delete(provider_account)
|
||||
await db.commit()
|
||||
|
||||
stmt = select(Deployment).where(Deployment.id == dep_id)
|
||||
assert (await db.exec(stmt)).first() is None
|
||||
|
||||
async def test_relationships_load(self, db: AsyncSession, deployment: Deployment):
|
||||
await db.refresh(deployment, attribute_names=["user", "folder", "deployment_provider_account"])
|
||||
assert deployment.user.username == "testuser"
|
||||
assert deployment.folder.name == "test-project"
|
||||
assert deployment.deployment_provider_account.provider_key == "test-provider"
|
||||
|
||||
async def test_fk_rejects_nonexistent_folder(self, db: AsyncSession, user: User, provider_account):
|
||||
d = Deployment(
|
||||
user_id=user.id,
|
||||
project_id=uuid4(),
|
||||
deployment_provider_account_id=provider_account.id,
|
||||
resource_key="rk-orphan",
|
||||
name="orphan",
|
||||
)
|
||||
db.add(d)
|
||||
with pytest.raises(IntegrityError):
|
||||
await db.commit()
|
||||
|
||||
|
||||
# ===========================================================================
|
||||
# DeploymentProviderAccount — CRUD integration
|
||||
# ===========================================================================
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
class TestProviderAccountCRUD:
|
||||
async def test_create_and_get(self, db: AsyncSession, user: User):
|
||||
with patch(_ENCRYPT_TARGET) as mock_auth:
|
||||
mock_auth.encrypt_api_key.return_value = "enc-token"
|
||||
acct = await create_provider_account(
|
||||
db,
|
||||
user_id=user.id,
|
||||
provider_tenant_id="t1",
|
||||
provider_key="watsonx",
|
||||
provider_url="https://api.example.com",
|
||||
api_key="raw-key", # pragma: allowlist secret
|
||||
)
|
||||
await db.commit()
|
||||
|
||||
assert acct.id is not None
|
||||
assert acct.api_key == "enc-token" # pragma: allowlist secret
|
||||
|
||||
fetched = await get_provider_account_by_id(db, provider_id=acct.id, user_id=user.id)
|
||||
assert fetched is not None
|
||||
assert fetched.provider_key == "watsonx"
|
||||
|
||||
async def test_list(self, db: AsyncSession, user: User):
|
||||
with patch(_ENCRYPT_TARGET) as mock_auth:
|
||||
mock_auth.encrypt_api_key.return_value = "enc"
|
||||
for i in range(3):
|
||||
await create_provider_account(
|
||||
db,
|
||||
user_id=user.id,
|
||||
provider_tenant_id=f"t-{i}",
|
||||
provider_key="k",
|
||||
provider_url=f"https://p{i}.example.com",
|
||||
api_key="key", # pragma: allowlist secret
|
||||
)
|
||||
await db.commit()
|
||||
|
||||
accounts = await list_provider_accounts(db, user_id=user.id)
|
||||
assert len(accounts) == 3
|
||||
|
||||
async def test_update(self, db: AsyncSession, user: User):
|
||||
with patch(_ENCRYPT_TARGET) as mock_auth:
|
||||
mock_auth.encrypt_api_key.return_value = "enc"
|
||||
acct = await create_provider_account(
|
||||
db,
|
||||
user_id=user.id,
|
||||
provider_tenant_id=None,
|
||||
provider_key="k1",
|
||||
provider_url="https://p.example.com",
|
||||
api_key="key", # pragma: allowlist secret
|
||||
)
|
||||
await db.commit()
|
||||
|
||||
updated = await update_provider_account(
|
||||
db,
|
||||
provider_account=acct,
|
||||
provider_key="k2",
|
||||
provider_tenant_id="new-tenant",
|
||||
)
|
||||
await db.commit()
|
||||
|
||||
assert updated.provider_key == "k2"
|
||||
assert updated.provider_tenant_id == "new-tenant"
|
||||
|
||||
async def test_delete(self, db: AsyncSession, user: User):
|
||||
with patch(_ENCRYPT_TARGET) as mock_auth:
|
||||
mock_auth.encrypt_api_key.return_value = "enc"
|
||||
acct = await create_provider_account(
|
||||
db,
|
||||
user_id=user.id,
|
||||
provider_tenant_id=None,
|
||||
provider_key="k",
|
||||
provider_url="https://p.example.com",
|
||||
api_key="key", # pragma: allowlist secret
|
||||
)
|
||||
await db.commit()
|
||||
|
||||
acct_id = acct.id
|
||||
assert acct_id is not None
|
||||
await delete_provider_account(db, provider_account=acct)
|
||||
await db.commit()
|
||||
|
||||
assert await get_provider_account_by_id(db, provider_id=acct_id, user_id=user.id) is None
|
||||
|
||||
async def test_create_duplicate_raises(self, db: AsyncSession, user: User):
|
||||
with patch(_ENCRYPT_TARGET) as mock_auth:
|
||||
mock_auth.encrypt_api_key.return_value = "enc"
|
||||
await create_provider_account(
|
||||
db,
|
||||
user_id=user.id,
|
||||
provider_tenant_id="t1",
|
||||
provider_key="k",
|
||||
provider_url="https://p.example.com",
|
||||
api_key="key", # pragma: allowlist secret
|
||||
)
|
||||
await db.commit()
|
||||
|
||||
with pytest.raises(ValueError, match="Provider account already exists"):
|
||||
await create_provider_account(
|
||||
db,
|
||||
user_id=user.id,
|
||||
provider_tenant_id="t1",
|
||||
provider_key="other",
|
||||
provider_url="https://p.example.com",
|
||||
api_key="key2", # pragma: allowlist secret
|
||||
)
|
||||
|
||||
|
||||
# ===========================================================================
|
||||
# Deployment — CRUD integration
|
||||
# ===========================================================================
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
class TestDeploymentCRUD:
|
||||
async def test_create_and_get(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
user: User,
|
||||
folder: Folder,
|
||||
provider_account: DeploymentProviderAccount,
|
||||
):
|
||||
assert folder.id is not None
|
||||
assert provider_account.id is not None
|
||||
|
||||
dep = await create_deployment(
|
||||
db,
|
||||
user_id=user.id,
|
||||
project_id=folder.id,
|
||||
deployment_provider_account_id=provider_account.id,
|
||||
resource_key="rk-crud",
|
||||
name="crud-deploy",
|
||||
)
|
||||
await db.commit()
|
||||
|
||||
assert dep.id is not None
|
||||
fetched = await get_deployment(db, user_id=user.id, deployment_id=dep.id)
|
||||
assert fetched is not None
|
||||
assert fetched.name == "crud-deploy"
|
||||
|
||||
async def test_get_by_resource_key(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
user: User,
|
||||
folder: Folder,
|
||||
provider_account: DeploymentProviderAccount,
|
||||
):
|
||||
assert folder.id is not None
|
||||
assert provider_account.id is not None
|
||||
|
||||
await create_deployment(
|
||||
db,
|
||||
user_id=user.id,
|
||||
project_id=folder.id,
|
||||
deployment_provider_account_id=provider_account.id,
|
||||
resource_key="rk-lookup",
|
||||
name="lookup-deploy",
|
||||
)
|
||||
await db.commit()
|
||||
|
||||
found = await get_deployment_by_resource_key(
|
||||
db,
|
||||
user_id=user.id,
|
||||
deployment_provider_account_id=provider_account.id,
|
||||
resource_key="rk-lookup",
|
||||
)
|
||||
assert found is not None
|
||||
assert found.name == "lookup-deploy"
|
||||
|
||||
async def test_list_page_and_count(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
user: User,
|
||||
folder: Folder,
|
||||
provider_account: DeploymentProviderAccount,
|
||||
):
|
||||
assert folder.id is not None
|
||||
assert provider_account.id is not None
|
||||
|
||||
for i in range(5):
|
||||
await create_deployment(
|
||||
db,
|
||||
user_id=user.id,
|
||||
project_id=folder.id,
|
||||
deployment_provider_account_id=provider_account.id,
|
||||
resource_key=f"rk-{i}",
|
||||
name=f"deploy-{i}",
|
||||
)
|
||||
await db.commit()
|
||||
|
||||
total = await count_deployments_by_provider(
|
||||
db, user_id=user.id, deployment_provider_account_id=provider_account.id
|
||||
)
|
||||
assert total == 5
|
||||
|
||||
page = await list_deployments_page(
|
||||
db,
|
||||
user_id=user.id,
|
||||
deployment_provider_account_id=provider_account.id,
|
||||
offset=0,
|
||||
limit=3,
|
||||
)
|
||||
assert len(page) == 3
|
||||
|
||||
page2 = await list_deployments_page(
|
||||
db,
|
||||
user_id=user.id,
|
||||
deployment_provider_account_id=provider_account.id,
|
||||
offset=3,
|
||||
limit=3,
|
||||
)
|
||||
assert len(page2) == 2
|
||||
|
||||
async def test_update(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
user: User,
|
||||
folder: Folder,
|
||||
provider_account: DeploymentProviderAccount,
|
||||
):
|
||||
assert folder.id is not None
|
||||
assert provider_account.id is not None
|
||||
|
||||
dep = await create_deployment(
|
||||
db,
|
||||
user_id=user.id,
|
||||
project_id=folder.id,
|
||||
deployment_provider_account_id=provider_account.id,
|
||||
resource_key="rk-upd",
|
||||
name="original",
|
||||
)
|
||||
await db.commit()
|
||||
|
||||
updated = await update_deployment(db, deployment=dep, name="renamed")
|
||||
await db.commit()
|
||||
|
||||
assert updated.name == "renamed"
|
||||
|
||||
assert dep.id is not None
|
||||
fetched = await get_deployment(db, user_id=user.id, deployment_id=dep.id)
|
||||
assert fetched is not None
|
||||
assert fetched.name == "renamed"
|
||||
|
||||
async def test_delete_by_id(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
user: User,
|
||||
folder: Folder,
|
||||
provider_account: DeploymentProviderAccount,
|
||||
):
|
||||
assert folder.id is not None
|
||||
assert provider_account.id is not None
|
||||
|
||||
dep = await create_deployment(
|
||||
db,
|
||||
user_id=user.id,
|
||||
project_id=folder.id,
|
||||
deployment_provider_account_id=provider_account.id,
|
||||
resource_key="rk-del",
|
||||
name="to-delete",
|
||||
)
|
||||
await db.commit()
|
||||
|
||||
assert dep.id is not None
|
||||
count = await delete_deployment_by_id(db, user_id=user.id, deployment_id=dep.id)
|
||||
await db.commit()
|
||||
assert count == 1
|
||||
|
||||
assert await get_deployment(db, user_id=user.id, deployment_id=dep.id) is None
|
||||
|
||||
async def test_delete_by_resource_key(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
user: User,
|
||||
folder: Folder,
|
||||
provider_account: DeploymentProviderAccount,
|
||||
):
|
||||
assert folder.id is not None
|
||||
assert provider_account.id is not None
|
||||
|
||||
await create_deployment(
|
||||
db,
|
||||
user_id=user.id,
|
||||
project_id=folder.id,
|
||||
deployment_provider_account_id=provider_account.id,
|
||||
resource_key="rk-delrk",
|
||||
name="to-delete-rk",
|
||||
)
|
||||
await db.commit()
|
||||
|
||||
count = await delete_deployment_by_resource_key(
|
||||
db,
|
||||
user_id=user.id,
|
||||
deployment_provider_account_id=provider_account.id,
|
||||
resource_key="rk-delrk",
|
||||
)
|
||||
await db.commit()
|
||||
assert count == 1
|
||||
|
||||
async def test_create_duplicate_name_raises(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
user: User,
|
||||
folder: Folder,
|
||||
provider_account: DeploymentProviderAccount,
|
||||
):
|
||||
assert folder.id is not None
|
||||
assert provider_account.id is not None
|
||||
|
||||
await create_deployment(
|
||||
db,
|
||||
user_id=user.id,
|
||||
project_id=folder.id,
|
||||
deployment_provider_account_id=provider_account.id,
|
||||
resource_key="rk-a",
|
||||
name="same-name",
|
||||
)
|
||||
await db.commit()
|
||||
|
||||
with pytest.raises(ValueError, match="conflicts with an existing record"):
|
||||
await create_deployment(
|
||||
db,
|
||||
user_id=user.id,
|
||||
project_id=folder.id,
|
||||
deployment_provider_account_id=provider_account.id,
|
||||
resource_key="rk-b",
|
||||
name="same-name",
|
||||
)
|
||||
|
||||
async def test_create_duplicate_resource_key_raises(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
user: User,
|
||||
folder: Folder,
|
||||
provider_account: DeploymentProviderAccount,
|
||||
):
|
||||
assert folder.id is not None
|
||||
assert provider_account.id is not None
|
||||
|
||||
await create_deployment(
|
||||
db,
|
||||
user_id=user.id,
|
||||
project_id=folder.id,
|
||||
deployment_provider_account_id=provider_account.id,
|
||||
resource_key="rk-dup",
|
||||
name="name-a",
|
||||
)
|
||||
await db.commit()
|
||||
|
||||
with pytest.raises(ValueError, match="conflicts with an existing record"):
|
||||
await create_deployment(
|
||||
db,
|
||||
user_id=user.id,
|
||||
project_id=folder.id,
|
||||
deployment_provider_account_id=provider_account.id,
|
||||
resource_key="rk-dup",
|
||||
name="name-b",
|
||||
)
|
||||
|
||||
async def test_user_scoping(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
user: User,
|
||||
folder: Folder,
|
||||
provider_account: DeploymentProviderAccount,
|
||||
):
|
||||
"""Deployments are scoped to user_id — another user cannot see them."""
|
||||
assert folder.id is not None
|
||||
assert provider_account.id is not None
|
||||
|
||||
dep = await create_deployment(
|
||||
db,
|
||||
user_id=user.id,
|
||||
project_id=folder.id,
|
||||
deployment_provider_account_id=provider_account.id,
|
||||
resource_key="rk-scoped",
|
||||
name="scoped",
|
||||
)
|
||||
await db.commit()
|
||||
|
||||
other_user = User(username="other", password=_TEST_PASSWORD, is_active=True)
|
||||
db.add(other_user)
|
||||
await db.commit()
|
||||
await db.refresh(other_user)
|
||||
|
||||
assert dep.id is not None
|
||||
assert await get_deployment(db, user_id=other_user.id, deployment_id=dep.id) is None
|
||||
|
||||
async def test_cascade_delete_via_provider_crud(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
user: User,
|
||||
folder: Folder,
|
||||
provider_account: DeploymentProviderAccount,
|
||||
):
|
||||
"""Deleting a provider account via CRUD cascades to its deployments."""
|
||||
assert folder.id is not None
|
||||
assert provider_account.id is not None
|
||||
|
||||
dep = await create_deployment(
|
||||
db,
|
||||
user_id=user.id,
|
||||
project_id=folder.id,
|
||||
deployment_provider_account_id=provider_account.id,
|
||||
resource_key="rk-cascade",
|
||||
name="cascade-test",
|
||||
)
|
||||
await db.commit()
|
||||
dep_id = dep.id
|
||||
|
||||
await delete_provider_account(db, provider_account=provider_account)
|
||||
await db.commit()
|
||||
|
||||
stmt = select(Deployment).where(Deployment.id == dep_id)
|
||||
assert (await db.exec(stmt)).first() is None
|
||||
@@ -0,0 +1,101 @@
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
from langflow.services.database.models.deployment.model import Deployment, DeploymentCreate, DeploymentRead
|
||||
|
||||
|
||||
class TestDeploymentValidation:
|
||||
"""Tests for Deployment model field validators."""
|
||||
|
||||
def _make_info(self, field_name: str) -> MagicMock:
|
||||
info = MagicMock()
|
||||
info.field_name = field_name
|
||||
return info
|
||||
|
||||
def test_rejects_empty_name(self):
|
||||
with pytest.raises(ValueError, match="name must not be empty"):
|
||||
Deployment.validate_non_empty("", self._make_info("name"))
|
||||
|
||||
def test_rejects_whitespace_name(self):
|
||||
with pytest.raises(ValueError, match="name must not be empty"):
|
||||
Deployment.validate_non_empty(" ", self._make_info("name"))
|
||||
|
||||
def test_rejects_empty_resource_key(self):
|
||||
with pytest.raises(ValueError, match="resource_key must not be empty"):
|
||||
Deployment.validate_non_empty("", self._make_info("resource_key"))
|
||||
|
||||
def test_rejects_whitespace_resource_key(self):
|
||||
with pytest.raises(ValueError, match="resource_key must not be empty"):
|
||||
Deployment.validate_non_empty(" ", self._make_info("resource_key"))
|
||||
|
||||
def test_strips_whitespace_from_name(self):
|
||||
result = Deployment.validate_non_empty(" hello ", self._make_info("name"))
|
||||
assert result == "hello"
|
||||
|
||||
def test_strips_whitespace_from_resource_key(self):
|
||||
result = Deployment.validate_non_empty(" rk-1 ", self._make_info("resource_key"))
|
||||
assert result == "rk-1"
|
||||
|
||||
|
||||
class TestDeploymentRead:
|
||||
"""Tests for DeploymentRead schema."""
|
||||
|
||||
def test_has_expected_fields(self):
|
||||
expected = {
|
||||
"id",
|
||||
"resource_key",
|
||||
"user_id",
|
||||
"project_id",
|
||||
"deployment_provider_account_id",
|
||||
"name",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
}
|
||||
assert set(DeploymentRead.model_fields.keys()) == expected
|
||||
|
||||
|
||||
class TestDeploymentCreate:
|
||||
"""Tests for DeploymentCreate schema."""
|
||||
|
||||
def test_rejects_empty_name(self):
|
||||
from uuid import uuid4
|
||||
|
||||
with pytest.raises(ValueError, match="name must not be empty"):
|
||||
DeploymentCreate(
|
||||
resource_key="rk-1",
|
||||
deployment_provider_account_id=uuid4(),
|
||||
project_id=uuid4(),
|
||||
name="",
|
||||
)
|
||||
|
||||
def test_rejects_empty_resource_key(self):
|
||||
from uuid import uuid4
|
||||
|
||||
with pytest.raises(ValueError, match="resource_key must not be empty"):
|
||||
DeploymentCreate(
|
||||
resource_key=" ",
|
||||
deployment_provider_account_id=uuid4(),
|
||||
project_id=uuid4(),
|
||||
name="my-deploy",
|
||||
)
|
||||
|
||||
def test_valid_create(self):
|
||||
from uuid import uuid4
|
||||
|
||||
obj = DeploymentCreate(
|
||||
resource_key="rk-1",
|
||||
deployment_provider_account_id=uuid4(),
|
||||
project_id=uuid4(),
|
||||
name="my-deploy",
|
||||
)
|
||||
assert obj.name == "my-deploy"
|
||||
assert obj.resource_key == "rk-1"
|
||||
|
||||
def test_has_expected_fields(self):
|
||||
expected = {
|
||||
"resource_key",
|
||||
"deployment_provider_account_id",
|
||||
"project_id",
|
||||
"name",
|
||||
}
|
||||
assert set(DeploymentCreate.model_fields.keys()) == expected
|
||||
@@ -0,0 +1,486 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
from uuid import uuid4
|
||||
|
||||
import pytest
|
||||
from cryptography.fernet import InvalidToken
|
||||
from langflow.services.database.models.deployment_provider_account.crud import (
|
||||
create_provider_account,
|
||||
delete_provider_account,
|
||||
get_provider_account_by_id,
|
||||
list_provider_accounts,
|
||||
update_provider_account,
|
||||
)
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
|
||||
MODEL_CLASS = "langflow.services.database.models.deployment_provider_account.crud.DeploymentProviderAccount"
|
||||
CRUD_AUTH = "langflow.services.database.models.deployment_provider_account.crud.auth_utils"
|
||||
CRUD_LOGGER = "langflow.services.database.models.deployment_provider_account.crud.logger"
|
||||
|
||||
|
||||
def _make_db() -> AsyncMock:
|
||||
"""Create a mock AsyncSession with common async methods."""
|
||||
db = AsyncMock()
|
||||
db.add = MagicMock()
|
||||
return db
|
||||
|
||||
|
||||
def _make_provider_account(**overrides) -> MagicMock:
|
||||
"""Create a mock provider account with sensible defaults for testing."""
|
||||
defaults = {
|
||||
"id": uuid4(),
|
||||
"user_id": uuid4(),
|
||||
"provider_tenant_id": "tenant-1",
|
||||
"provider_key": "watsonx",
|
||||
"provider_url": "https://example.com",
|
||||
"api_key": "encrypted-key", # pragma: allowlist secret
|
||||
}
|
||||
defaults.update(overrides)
|
||||
mock = MagicMock()
|
||||
for k, v in defaults.items():
|
||||
setattr(mock, k, v)
|
||||
return mock
|
||||
|
||||
|
||||
# --- get_provider_account_by_id ---
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_provider_account_by_id_found():
|
||||
db = _make_db()
|
||||
mock_acct = MagicMock()
|
||||
mock_result = MagicMock()
|
||||
mock_result.first.return_value = mock_acct
|
||||
db.exec.return_value = mock_result
|
||||
|
||||
result = await get_provider_account_by_id(db, provider_id=uuid4(), user_id=uuid4())
|
||||
|
||||
assert result is mock_acct
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_provider_account_by_id_not_found():
|
||||
db = _make_db()
|
||||
mock_result = MagicMock()
|
||||
mock_result.first.return_value = None
|
||||
db.exec.return_value = mock_result
|
||||
|
||||
result = await get_provider_account_by_id(db, provider_id=uuid4(), user_id=uuid4())
|
||||
|
||||
assert result is None
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_provider_account_by_id_invalid_uuid_raises():
|
||||
db = _make_db()
|
||||
|
||||
with pytest.raises(ValueError, match="provider_id is not a valid UUID"):
|
||||
await get_provider_account_by_id(db, provider_id="not-a-uuid", user_id=uuid4())
|
||||
|
||||
|
||||
# --- list_provider_accounts ---
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_list_provider_accounts_returns_list():
|
||||
db = _make_db()
|
||||
mock_items = [MagicMock(), MagicMock()]
|
||||
mock_result = MagicMock()
|
||||
mock_result.all.return_value = mock_items
|
||||
db.exec.return_value = mock_result
|
||||
|
||||
result = await list_provider_accounts(db, user_id=uuid4())
|
||||
|
||||
assert result == mock_items
|
||||
assert isinstance(result, list)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_list_provider_accounts_empty():
|
||||
db = _make_db()
|
||||
mock_result = MagicMock()
|
||||
mock_result.all.return_value = []
|
||||
db.exec.return_value = mock_result
|
||||
|
||||
result = await list_provider_accounts(db, user_id=uuid4())
|
||||
|
||||
assert result == []
|
||||
|
||||
|
||||
# --- create_provider_account ---
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_provider_account_success():
|
||||
db = _make_db()
|
||||
|
||||
with (
|
||||
patch(CRUD_AUTH) as mock_auth,
|
||||
patch(MODEL_CLASS) as mock_cls,
|
||||
):
|
||||
mock_auth.encrypt_api_key.return_value = "encrypted"
|
||||
mock_obj = MagicMock()
|
||||
mock_obj.provider_key = "watsonx"
|
||||
mock_obj.api_key = "encrypted" # pragma: allowlist secret
|
||||
mock_cls.return_value = mock_obj
|
||||
|
||||
result = await create_provider_account(
|
||||
db,
|
||||
user_id=uuid4(),
|
||||
provider_tenant_id="tenant-1",
|
||||
provider_key="watsonx",
|
||||
provider_url="https://example.com",
|
||||
api_key="test-token", # pragma: allowlist secret
|
||||
)
|
||||
|
||||
db.add.assert_called_once_with(mock_obj)
|
||||
db.flush.assert_awaited_once()
|
||||
db.refresh.assert_awaited_once_with(mock_obj)
|
||||
assert result is mock_obj
|
||||
assert result.provider_key == "watsonx"
|
||||
assert result.api_key == "encrypted" # pragma: allowlist secret
|
||||
mock_auth.encrypt_api_key.assert_called_once_with("test-token")
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_provider_account_strips_whitespace():
|
||||
db = _make_db()
|
||||
|
||||
with (
|
||||
patch(CRUD_AUTH) as mock_auth,
|
||||
patch(MODEL_CLASS) as mock_cls,
|
||||
):
|
||||
mock_auth.encrypt_api_key.return_value = "encrypted"
|
||||
mock_cls.return_value = MagicMock()
|
||||
|
||||
await create_provider_account(
|
||||
db,
|
||||
user_id=uuid4(),
|
||||
provider_tenant_id=" tenant-1 ",
|
||||
provider_key=" watsonx ",
|
||||
provider_url=" https://example.com ",
|
||||
api_key=" test-token ", # pragma: allowlist secret
|
||||
)
|
||||
|
||||
call_kwargs = mock_cls.call_args.kwargs
|
||||
assert call_kwargs["provider_tenant_id"] == "tenant-1"
|
||||
assert call_kwargs["provider_key"] == "watsonx"
|
||||
assert call_kwargs["provider_url"] == "https://example.com"
|
||||
mock_auth.encrypt_api_key.assert_called_once_with("test-token")
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_provider_account_none_tenant_id():
|
||||
db = _make_db()
|
||||
|
||||
with (
|
||||
patch(CRUD_AUTH) as mock_auth,
|
||||
patch(MODEL_CLASS) as mock_cls,
|
||||
):
|
||||
mock_auth.encrypt_api_key.return_value = "encrypted"
|
||||
mock_cls.return_value = MagicMock()
|
||||
|
||||
await create_provider_account(
|
||||
db,
|
||||
user_id=uuid4(),
|
||||
provider_tenant_id=None,
|
||||
provider_key="watsonx",
|
||||
provider_url="https://example.com",
|
||||
api_key="test-token", # pragma: allowlist secret
|
||||
)
|
||||
|
||||
call_kwargs = mock_cls.call_args.kwargs
|
||||
assert call_kwargs["provider_tenant_id"] is None
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_provider_account_blank_tenant_id_normalizes_to_none():
|
||||
db = _make_db()
|
||||
|
||||
with (
|
||||
patch(CRUD_AUTH) as mock_auth,
|
||||
patch(MODEL_CLASS) as mock_cls,
|
||||
):
|
||||
mock_auth.encrypt_api_key.return_value = "encrypted"
|
||||
mock_cls.return_value = MagicMock()
|
||||
|
||||
await create_provider_account(
|
||||
db,
|
||||
user_id=uuid4(),
|
||||
provider_tenant_id=" ",
|
||||
provider_key="watsonx",
|
||||
provider_url="https://example.com",
|
||||
api_key="test-token", # pragma: allowlist secret
|
||||
)
|
||||
|
||||
call_kwargs = mock_cls.call_args.kwargs
|
||||
assert call_kwargs["provider_tenant_id"] is None
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_provider_account_empty_provider_key_raises():
|
||||
db = _make_db()
|
||||
|
||||
with pytest.raises(ValueError, match="provider_key must not be empty"):
|
||||
await create_provider_account(
|
||||
db,
|
||||
user_id=uuid4(),
|
||||
provider_tenant_id=None,
|
||||
provider_key=" ",
|
||||
provider_url="https://example.com",
|
||||
api_key="test-token", # pragma: allowlist secret
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_provider_account_empty_provider_url_raises():
|
||||
db = _make_db()
|
||||
|
||||
with pytest.raises(ValueError, match="provider_url must not be empty"):
|
||||
await create_provider_account(
|
||||
db,
|
||||
user_id=uuid4(),
|
||||
provider_tenant_id=None,
|
||||
provider_key="watsonx",
|
||||
provider_url="",
|
||||
api_key="test-token", # pragma: allowlist secret
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_provider_account_empty_api_key_raises():
|
||||
db = _make_db()
|
||||
|
||||
with pytest.raises(ValueError, match="api_key must not be empty"):
|
||||
await create_provider_account(
|
||||
db,
|
||||
user_id=uuid4(),
|
||||
provider_tenant_id=None,
|
||||
provider_key="watsonx",
|
||||
provider_url="https://example.com",
|
||||
api_key=" ", # pragma: allowlist secret
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_provider_account_integrity_error_raises_value_error():
|
||||
db = _make_db()
|
||||
db.flush.side_effect = IntegrityError("dup", params=None, orig=Exception())
|
||||
|
||||
with (
|
||||
patch(CRUD_AUTH) as mock_auth,
|
||||
patch(MODEL_CLASS),
|
||||
patch(CRUD_LOGGER) as mock_logger,
|
||||
):
|
||||
mock_auth.encrypt_api_key.return_value = "encrypted"
|
||||
mock_logger.aerror = AsyncMock()
|
||||
with pytest.raises(ValueError, match="Provider account already exists"):
|
||||
await create_provider_account(
|
||||
db,
|
||||
user_id=uuid4(),
|
||||
provider_tenant_id=None,
|
||||
provider_key="watsonx",
|
||||
provider_url="https://example.com",
|
||||
api_key="test-token", # pragma: allowlist secret
|
||||
)
|
||||
|
||||
db.rollback.assert_awaited_once()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_provider_account_encryption_value_error():
|
||||
db = _make_db()
|
||||
|
||||
with (
|
||||
patch(CRUD_AUTH) as mock_auth,
|
||||
patch(CRUD_LOGGER) as mock_logger,
|
||||
):
|
||||
mock_auth.encrypt_api_key.side_effect = ValueError("bad key")
|
||||
mock_logger.aerror = AsyncMock()
|
||||
with pytest.raises(RuntimeError, match="Failed to encrypt API key"):
|
||||
await create_provider_account(
|
||||
db,
|
||||
user_id=uuid4(),
|
||||
provider_tenant_id=None,
|
||||
provider_key="watsonx",
|
||||
provider_url="https://example.com",
|
||||
api_key="test-token", # pragma: allowlist secret
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_provider_account_encryption_invalid_token():
|
||||
db = _make_db()
|
||||
|
||||
with (
|
||||
patch(CRUD_AUTH) as mock_auth,
|
||||
patch(CRUD_LOGGER) as mock_logger,
|
||||
):
|
||||
mock_auth.encrypt_api_key.side_effect = InvalidToken()
|
||||
mock_logger.aerror = AsyncMock()
|
||||
with pytest.raises(RuntimeError, match="Failed to encrypt API key"):
|
||||
await create_provider_account(
|
||||
db,
|
||||
user_id=uuid4(),
|
||||
provider_tenant_id=None,
|
||||
provider_key="watsonx",
|
||||
provider_url="https://example.com",
|
||||
api_key="test-token", # pragma: allowlist secret
|
||||
)
|
||||
|
||||
|
||||
# --- update_provider_account ---
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_provider_account_success():
|
||||
db = _make_db()
|
||||
acct = _make_provider_account()
|
||||
|
||||
with patch(CRUD_AUTH) as mock_auth:
|
||||
mock_auth.encrypt_api_key.return_value = "new-encrypted"
|
||||
result = await update_provider_account(
|
||||
db,
|
||||
provider_account=acct,
|
||||
provider_tenant_id="new-tenant",
|
||||
provider_key="new-key",
|
||||
provider_url="https://new.example.com",
|
||||
api_key="updated-token", # pragma: allowlist secret
|
||||
)
|
||||
|
||||
assert result.provider_tenant_id == "new-tenant"
|
||||
assert result.provider_key == "new-key"
|
||||
assert result.provider_url == "https://new.example.com"
|
||||
assert result.api_key == "new-encrypted" # pragma: allowlist secret
|
||||
db.flush.assert_awaited_once()
|
||||
db.refresh.assert_awaited_once()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_provider_account_no_changes():
|
||||
db = _make_db()
|
||||
acct = _make_provider_account()
|
||||
original_key = acct.provider_key
|
||||
|
||||
result = await update_provider_account(db, provider_account=acct)
|
||||
|
||||
assert result.provider_key == original_key
|
||||
db.flush.assert_awaited_once()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_provider_account_set_tenant_to_none():
|
||||
db = _make_db()
|
||||
acct = _make_provider_account(provider_tenant_id="old-tenant")
|
||||
|
||||
await update_provider_account(db, provider_account=acct, provider_tenant_id=None)
|
||||
|
||||
assert acct.provider_tenant_id is None
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_provider_account_empty_tenant_normalizes_to_none():
|
||||
db = _make_db()
|
||||
acct = _make_provider_account(provider_tenant_id="old-tenant")
|
||||
|
||||
await update_provider_account(db, provider_account=acct, provider_tenant_id=" ")
|
||||
|
||||
assert acct.provider_tenant_id is None
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_provider_account_empty_provider_key_raises():
|
||||
db = _make_db()
|
||||
acct = _make_provider_account()
|
||||
|
||||
with pytest.raises(ValueError, match="provider_key must not be empty"):
|
||||
await update_provider_account(db, provider_account=acct, provider_key="")
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_provider_account_whitespace_provider_key_raises():
|
||||
db = _make_db()
|
||||
acct = _make_provider_account()
|
||||
|
||||
with pytest.raises(ValueError, match="provider_key must not be empty"):
|
||||
await update_provider_account(db, provider_account=acct, provider_key=" ")
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_provider_account_empty_provider_url_raises():
|
||||
db = _make_db()
|
||||
acct = _make_provider_account()
|
||||
|
||||
with pytest.raises(ValueError, match="provider_url must not be empty"):
|
||||
await update_provider_account(db, provider_account=acct, provider_url="")
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_provider_account_empty_api_key_raises():
|
||||
db = _make_db()
|
||||
acct = _make_provider_account()
|
||||
|
||||
with pytest.raises(ValueError, match="api_key must not be empty"):
|
||||
await update_provider_account(db, provider_account=acct, api_key=" ") # pragma: allowlist secret
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_provider_account_integrity_error_raises_value_error():
|
||||
db = _make_db()
|
||||
db.flush.side_effect = IntegrityError("dup", params=None, orig=Exception())
|
||||
acct = _make_provider_account()
|
||||
|
||||
with patch(CRUD_LOGGER) as mock_logger:
|
||||
mock_logger.aerror = AsyncMock()
|
||||
with pytest.raises(ValueError, match="conflicts with an existing record"):
|
||||
await update_provider_account(db, provider_account=acct, provider_tenant_id="new-tenant")
|
||||
|
||||
db.rollback.assert_awaited_once()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_provider_account_encryption_error():
|
||||
db = _make_db()
|
||||
acct = _make_provider_account()
|
||||
|
||||
with (
|
||||
patch(CRUD_AUTH) as mock_auth,
|
||||
patch(CRUD_LOGGER) as mock_logger,
|
||||
):
|
||||
mock_auth.encrypt_api_key.side_effect = ValueError("bad key")
|
||||
mock_logger.aerror = AsyncMock()
|
||||
with pytest.raises(RuntimeError, match="Failed to encrypt API key"):
|
||||
await update_provider_account(
|
||||
db,
|
||||
provider_account=acct,
|
||||
api_key="updated-token", # pragma: allowlist secret
|
||||
)
|
||||
|
||||
|
||||
# --- delete_provider_account ---
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_delete_provider_account_success():
|
||||
db = _make_db()
|
||||
acct = _make_provider_account()
|
||||
|
||||
await delete_provider_account(db, provider_account=acct)
|
||||
|
||||
db.delete.assert_awaited_once_with(acct)
|
||||
db.flush.assert_awaited_once()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_delete_provider_account_integrity_error_raises_value_error():
|
||||
db = _make_db()
|
||||
db.flush.side_effect = IntegrityError("fk", params=None, orig=Exception())
|
||||
acct = _make_provider_account()
|
||||
|
||||
with patch(CRUD_LOGGER) as mock_logger:
|
||||
mock_logger.aerror = AsyncMock()
|
||||
with pytest.raises(ValueError, match="Failed to delete provider account"):
|
||||
await delete_provider_account(db, provider_account=acct)
|
||||
|
||||
db.rollback.assert_awaited_once()
|
||||
@@ -0,0 +1,169 @@
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
from langflow.services.database.models.deployment_provider_account.model import (
|
||||
DeploymentProviderAccount,
|
||||
DeploymentProviderAccountCreate,
|
||||
DeploymentProviderAccountRead,
|
||||
DeploymentProviderAccountUpdate,
|
||||
)
|
||||
|
||||
|
||||
class TestDeploymentProviderAccountValidation:
|
||||
"""Tests for DeploymentProviderAccount table model validators."""
|
||||
|
||||
def _make_info(self, field_name: str) -> MagicMock:
|
||||
info = MagicMock()
|
||||
info.field_name = field_name
|
||||
return info
|
||||
|
||||
def test_rejects_empty_provider_key(self):
|
||||
with pytest.raises(ValueError, match="provider_key must not be empty"):
|
||||
DeploymentProviderAccount.validate_non_empty("", self._make_info("provider_key"))
|
||||
|
||||
def test_rejects_whitespace_provider_key(self):
|
||||
with pytest.raises(ValueError, match="provider_key must not be empty"):
|
||||
DeploymentProviderAccount.validate_non_empty(" ", self._make_info("provider_key"))
|
||||
|
||||
def test_rejects_empty_provider_url(self):
|
||||
with pytest.raises(ValueError, match="provider_url must not be empty"):
|
||||
DeploymentProviderAccount.validate_non_empty("", self._make_info("provider_url"))
|
||||
|
||||
def test_rejects_whitespace_provider_url(self):
|
||||
with pytest.raises(ValueError, match="provider_url must not be empty"):
|
||||
DeploymentProviderAccount.validate_non_empty(" ", self._make_info("provider_url"))
|
||||
|
||||
def test_rejects_empty_api_key(self):
|
||||
with pytest.raises(ValueError, match="api_key must not be empty"):
|
||||
DeploymentProviderAccount.validate_non_empty("", self._make_info("api_key"))
|
||||
|
||||
def test_rejects_whitespace_api_key(self):
|
||||
with pytest.raises(ValueError, match="api_key must not be empty"):
|
||||
DeploymentProviderAccount.validate_non_empty(" ", self._make_info("api_key"))
|
||||
|
||||
def test_strips_whitespace(self):
|
||||
result = DeploymentProviderAccount.validate_non_empty(" watsonx ", self._make_info("provider_key"))
|
||||
assert result == "watsonx"
|
||||
|
||||
def test_normalizes_blank_tenant_id_to_none(self):
|
||||
result = DeploymentProviderAccount.normalize_tenant_id(" ")
|
||||
assert result is None
|
||||
|
||||
def test_normalizes_empty_tenant_id_to_none(self):
|
||||
result = DeploymentProviderAccount.normalize_tenant_id("")
|
||||
assert result is None
|
||||
|
||||
def test_strips_tenant_id(self):
|
||||
result = DeploymentProviderAccount.normalize_tenant_id(" tenant-1 ")
|
||||
assert result == "tenant-1"
|
||||
|
||||
def test_none_tenant_id_passthrough(self):
|
||||
result = DeploymentProviderAccount.normalize_tenant_id(None)
|
||||
assert result is None
|
||||
|
||||
|
||||
class TestDeploymentProviderAccountRead:
|
||||
"""Tests for DeploymentProviderAccountRead schema."""
|
||||
|
||||
def test_excludes_api_key(self):
|
||||
assert "api_key" not in DeploymentProviderAccountRead.model_fields
|
||||
|
||||
def test_has_expected_fields(self):
|
||||
expected = {
|
||||
"id",
|
||||
"user_id",
|
||||
"provider_tenant_id",
|
||||
"provider_key",
|
||||
"provider_url",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
}
|
||||
assert set(DeploymentProviderAccountRead.model_fields.keys()) == expected
|
||||
|
||||
|
||||
class TestDeploymentProviderAccountCreate:
|
||||
"""Tests for DeploymentProviderAccountCreate schema validators."""
|
||||
|
||||
def test_rejects_empty_provider_key(self):
|
||||
with pytest.raises(ValueError, match="provider_key must not be empty"):
|
||||
DeploymentProviderAccountCreate(
|
||||
provider_key="",
|
||||
provider_url="https://example.com",
|
||||
api_key="key", # pragma: allowlist secret
|
||||
)
|
||||
|
||||
def test_rejects_empty_provider_url(self):
|
||||
with pytest.raises(ValueError, match="provider_url must not be empty"):
|
||||
DeploymentProviderAccountCreate(
|
||||
provider_key="watsonx",
|
||||
provider_url="",
|
||||
api_key="key", # pragma: allowlist secret
|
||||
)
|
||||
|
||||
def test_rejects_empty_api_key(self):
|
||||
with pytest.raises(ValueError, match="api_key must not be empty"):
|
||||
DeploymentProviderAccountCreate(provider_key="watsonx", provider_url="https://example.com", api_key="")
|
||||
|
||||
def test_valid_create(self):
|
||||
obj = DeploymentProviderAccountCreate(
|
||||
provider_key="watsonx",
|
||||
provider_url="https://example.com",
|
||||
api_key="key", # pragma: allowlist secret
|
||||
)
|
||||
assert obj.provider_key == "watsonx"
|
||||
|
||||
def test_blank_tenant_id_normalizes_to_none(self):
|
||||
obj = DeploymentProviderAccountCreate(
|
||||
provider_tenant_id=" ",
|
||||
provider_key="watsonx",
|
||||
provider_url="https://example.com",
|
||||
api_key="key", # pragma: allowlist secret
|
||||
)
|
||||
assert obj.provider_tenant_id is None
|
||||
|
||||
def test_strips_tenant_id(self):
|
||||
obj = DeploymentProviderAccountCreate(
|
||||
provider_tenant_id=" tenant-1 ",
|
||||
provider_key="watsonx",
|
||||
provider_url="https://example.com",
|
||||
api_key="key", # pragma: allowlist secret
|
||||
)
|
||||
assert obj.provider_tenant_id == "tenant-1"
|
||||
|
||||
|
||||
class TestDeploymentProviderAccountUpdate:
|
||||
"""Tests for DeploymentProviderAccountUpdate schema validators."""
|
||||
|
||||
def test_allows_none_values(self):
|
||||
obj = DeploymentProviderAccountUpdate()
|
||||
assert obj.provider_key is None
|
||||
assert obj.provider_url is None
|
||||
assert obj.api_key is None
|
||||
|
||||
def test_rejects_empty_provider_key_when_provided(self):
|
||||
with pytest.raises(ValueError, match="provider_key must not be empty"):
|
||||
DeploymentProviderAccountUpdate(provider_key="")
|
||||
|
||||
def test_rejects_empty_provider_url_when_provided(self):
|
||||
with pytest.raises(ValueError, match="provider_url must not be empty"):
|
||||
DeploymentProviderAccountUpdate(provider_url="")
|
||||
|
||||
def test_rejects_empty_api_key_when_provided(self):
|
||||
with pytest.raises(ValueError, match="api_key must not be empty"):
|
||||
DeploymentProviderAccountUpdate(api_key="")
|
||||
|
||||
def test_rejects_whitespace_provider_key_when_provided(self):
|
||||
with pytest.raises(ValueError, match="provider_key must not be empty"):
|
||||
DeploymentProviderAccountUpdate(provider_key=" ")
|
||||
|
||||
def test_valid_update(self):
|
||||
obj = DeploymentProviderAccountUpdate(provider_key="new-key")
|
||||
assert obj.provider_key == "new-key"
|
||||
|
||||
def test_blank_tenant_id_normalizes_to_none(self):
|
||||
obj = DeploymentProviderAccountUpdate(provider_tenant_id=" ")
|
||||
assert obj.provider_tenant_id is None
|
||||
|
||||
def test_strips_tenant_id(self):
|
||||
obj = DeploymentProviderAccountUpdate(provider_tenant_id=" tenant-1 ")
|
||||
assert obj.provider_tenant_id == "tenant-1"
|
||||
@@ -0,0 +1,18 @@
|
||||
from langflow.services.database.utils import normalize_string_or_none
|
||||
|
||||
|
||||
class TestNormalizeStringOrNone:
|
||||
def test_none_returns_none(self):
|
||||
assert normalize_string_or_none(None) is None
|
||||
|
||||
def test_empty_returns_none(self):
|
||||
assert normalize_string_or_none("") is None
|
||||
|
||||
def test_whitespace_returns_none(self):
|
||||
assert normalize_string_or_none(" ") is None
|
||||
|
||||
def test_strips_and_returns(self):
|
||||
assert normalize_string_or_none(" hello ") == "hello"
|
||||
|
||||
def test_non_blank_passthrough(self):
|
||||
assert normalize_string_or_none("hello") == "hello"
|
||||
@@ -0,0 +1,47 @@
|
||||
from uuid import UUID
|
||||
|
||||
import pytest
|
||||
from langflow.services.database.utils import parse_uuid
|
||||
|
||||
|
||||
class TestParseUuid:
|
||||
"""Tests for the shared parse_uuid utility."""
|
||||
|
||||
def test_passthrough_uuid(self):
|
||||
uid = UUID("12345678-1234-5678-1234-567812345678")
|
||||
assert parse_uuid(uid) is uid
|
||||
|
||||
def test_valid_string_uuid(self):
|
||||
raw = "12345678-1234-5678-1234-567812345678"
|
||||
result = parse_uuid(raw)
|
||||
assert isinstance(result, UUID)
|
||||
assert str(result) == raw
|
||||
|
||||
def test_strips_whitespace(self):
|
||||
raw = " 12345678-1234-5678-1234-567812345678 "
|
||||
result = parse_uuid(raw)
|
||||
assert str(result) == "12345678-1234-5678-1234-567812345678"
|
||||
|
||||
def test_empty_string_raises(self):
|
||||
with pytest.raises(ValueError, match="must not be empty"):
|
||||
parse_uuid("")
|
||||
|
||||
def test_whitespace_only_raises(self):
|
||||
with pytest.raises(ValueError, match="must not be empty"):
|
||||
parse_uuid(" ")
|
||||
|
||||
def test_invalid_string_raises_with_field_name(self):
|
||||
with pytest.raises(ValueError, match="my_field is not a valid UUID"):
|
||||
parse_uuid("not-a-uuid", field_name="my_field")
|
||||
|
||||
def test_default_field_name_in_error(self):
|
||||
with pytest.raises(ValueError, match="value is not a valid UUID"):
|
||||
parse_uuid("not-a-uuid")
|
||||
|
||||
def test_unsupported_type_raises_type_error(self):
|
||||
with pytest.raises(TypeError, match="my_field must be a UUID or string, got int"):
|
||||
parse_uuid(12345, field_name="my_field") # type: ignore[arg-type]
|
||||
|
||||
def test_unsupported_type_default_field_name(self):
|
||||
with pytest.raises(TypeError, match="value must be a UUID or string, got list"):
|
||||
parse_uuid([], field_name="value") # type: ignore[arg-type]
|
||||
@@ -0,0 +1,35 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
from langflow.services.database.utils import validate_non_empty_string
|
||||
|
||||
|
||||
class TestValidateNonEmptyString:
|
||||
"""Tests for the validate_non_empty_string utility."""
|
||||
|
||||
def _make_info(self, field_name: str) -> MagicMock:
|
||||
info = MagicMock()
|
||||
info.field_name = field_name
|
||||
return info
|
||||
|
||||
def test_returns_stripped_value(self):
|
||||
assert validate_non_empty_string(" hello ", self._make_info("name")) == "hello"
|
||||
|
||||
def test_passthrough_clean_value(self):
|
||||
assert validate_non_empty_string("hello", self._make_info("name")) == "hello"
|
||||
|
||||
def test_empty_string_raises_with_field_name(self):
|
||||
with pytest.raises(ValueError, match="name must not be empty"):
|
||||
validate_non_empty_string("", self._make_info("name"))
|
||||
|
||||
def test_whitespace_only_raises(self):
|
||||
with pytest.raises(ValueError, match="provider_url must not be empty"):
|
||||
validate_non_empty_string(" ", self._make_info("provider_url"))
|
||||
|
||||
def test_fallback_field_name_when_info_lacks_attribute(self):
|
||||
"""When info has no field_name attribute, falls back to 'Field'."""
|
||||
info = object() # no field_name attribute
|
||||
with pytest.raises(ValueError, match="Field must not be empty"):
|
||||
validate_non_empty_string("", info)
|
||||
284
uv.lock
generated
284
uv.lock
generated
@@ -9,6 +9,7 @@ resolution-markers = [
|
||||
"python_full_version == '3.12.*' and sys_platform != 'darwin'",
|
||||
"python_full_version == '3.12.*' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
|
||||
"python_full_version == '3.12.*' and platform_machine != 'x86_64' and sys_platform == 'darwin'",
|
||||
"python_version < '0'",
|
||||
"python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux'",
|
||||
"(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
"python_full_version == '3.11.*' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
|
||||
@@ -189,7 +190,7 @@ source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "aiohappyeyeballs" },
|
||||
{ name = "aiosignal" },
|
||||
{ name = "async-timeout", version = "4.0.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" },
|
||||
{ name = "async-timeout", marker = "python_full_version < '3.11'" },
|
||||
{ name = "attrs" },
|
||||
{ name = "frozenlist" },
|
||||
{ name = "multidict" },
|
||||
@@ -555,32 +556,11 @@ wheels = [
|
||||
name = "async-timeout"
|
||||
version = "4.0.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux'",
|
||||
"(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
"python_full_version < '3.11' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
|
||||
"python_full_version < '3.11' and platform_machine != 'x86_64' and sys_platform == 'darwin'",
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/87/d6/21b30a550dafea84b1b8eee21b5e23fa16d010ae006011221f33dcd8d7f8/async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f", size = 8345, upload-time = "2023-08-10T16:35:56.907Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/a7/fa/e01228c2938de91d47b307831c62ab9e4001e747789d0b05baf779a6488c/async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028", size = 5721, upload-time = "2023-08-10T16:35:55.203Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "async-timeout"
|
||||
version = "5.0.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux'",
|
||||
"(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
"python_full_version == '3.11.*' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
|
||||
"python_full_version == '3.11.*' and platform_machine != 'x86_64' and sys_platform == 'darwin'",
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a5/ae/136395dfbfe00dfc94da3f3e136d0b13f394cba8f4841120e34226265780/async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3", size = 9274, upload-time = "2024-11-06T16:41:39.6Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/fe/ba/e2081de779ca30d473f21f5b30e0e737c438205440784c7dfc81efc2b029/async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c", size = 6233, upload-time = "2024-11-06T16:41:37.9Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "asyncer"
|
||||
version = "0.0.8"
|
||||
@@ -978,12 +958,20 @@ sdist = { url = "https://files.pythonhosted.org/packages/92/88/b8527e1b00c1811db
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/6a/80/ea4ead0c5d52a9828692e7df20f0eafe8d26e671ce4883a0a146bb91049e/caio-0.9.25-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ca6c8ecda611478b6016cb94d23fd3eb7124852b985bdec7ecaad9f3116b9619", size = 36836, upload-time = "2025-12-26T15:22:04.662Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/17/b9/36715c97c873649d1029001578f901b50250916295e3dddf20c865438865/caio-0.9.25-cp310-cp310-manylinux2010_x86_64.manylinux2014_x86_64.manylinux_2_12_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:db9b5681e4af8176159f0d6598e73b2279bb661e718c7ac23342c550bd78c241", size = 79695, upload-time = "2025-12-26T15:22:18.818Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0b/ab/07080ecb1adb55a02cbd8ec0126aa8e43af343ffabb6a71125b42670e9a1/caio-0.9.25-cp310-cp310-manylinux_2_34_aarch64.whl", hash = "sha256:bf61d7d0c4fd10ffdd98ca47f7e8db4d7408e74649ffaf4bef40b029ada3c21b", size = 79457, upload-time = "2026-03-04T22:08:16.024Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/88/95/dd55757bb671eb4c376e006c04e83beb413486821f517792ea603ef216e9/caio-0.9.25-cp310-cp310-manylinux_2_34_x86_64.whl", hash = "sha256:ab52e5b643f8bbd64a0605d9412796cd3464cb8ca88593b13e95a0f0b10508ae", size = 77705, upload-time = "2026-03-04T22:08:17.202Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ec/90/543f556fcfcfa270713eef906b6352ab048e1e557afec12925c991dc93c2/caio-0.9.25-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d6956d9e4a27021c8bd6c9677f3a59eb1d820cc32d0343cea7961a03b1371965", size = 36839, upload-time = "2025-12-26T15:21:40.267Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/51/3b/36f3e8ec38dafe8de4831decd2e44c69303d2a3892d16ceda42afed44e1b/caio-0.9.25-cp311-cp311-manylinux2010_x86_64.manylinux2014_x86_64.manylinux_2_12_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bf84bfa039f25ad91f4f52944452a5f6f405e8afab4d445450978cd6241d1478", size = 80255, upload-time = "2025-12-26T15:22:20.271Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/df/ce/65e64867d928e6aff1b4f0e12dba0ef6d5bf412c240dc1df9d421ac10573/caio-0.9.25-cp311-cp311-manylinux_2_34_aarch64.whl", hash = "sha256:ae3d62587332bce600f861a8de6256b1014d6485cfd25d68c15caf1611dd1f7c", size = 80052, upload-time = "2026-03-04T22:08:20.402Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/46/90/e278863c47e14ec58309aa2e38a45882fbe67b4cc29ec9bc8f65852d3e45/caio-0.9.25-cp311-cp311-manylinux_2_34_x86_64.whl", hash = "sha256:fc220b8533dcf0f238a6b1a4a937f92024c71e7b10b5a2dfc1c73604a25709bc", size = 78273, upload-time = "2026-03-04T22:08:21.368Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d3/25/79c98ebe12df31548ba4eaf44db11b7cad6b3e7b4203718335620939083c/caio-0.9.25-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:fb7ff95af4c31ad3f03179149aab61097a71fd85e05f89b4786de0359dffd044", size = 36983, upload-time = "2025-12-26T15:21:36.075Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a3/2b/21288691f16d479945968a0a4f2856818c1c5be56881d51d4dac9b255d26/caio-0.9.25-cp312-cp312-manylinux2010_x86_64.manylinux2014_x86_64.manylinux_2_12_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:97084e4e30dfa598449d874c4d8e0c8d5ea17d2f752ef5e48e150ff9d240cd64", size = 82012, upload-time = "2025-12-26T15:22:20.983Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/03/c4/8a1b580875303500a9c12b9e0af58cb82e47f5bcf888c2457742a138273c/caio-0.9.25-cp312-cp312-manylinux_2_34_aarch64.whl", hash = "sha256:4fa69eba47e0f041b9d4f336e2ad40740681c43e686b18b191b6c5f4c5544bfb", size = 81502, upload-time = "2026-03-04T22:08:22.381Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d1/1c/0fe770b8ffc8362c48134d1592d653a81a3d8748d764bec33864db36319d/caio-0.9.25-cp312-cp312-manylinux_2_34_x86_64.whl", hash = "sha256:6bebf6f079f1341d19f7386db9b8b1f07e8cc15ae13bfdaff573371ba0575d69", size = 80200, upload-time = "2026-03-04T22:08:23.382Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/31/57/5e6ff127e6f62c9f15d989560435c642144aa4210882f9494204bc892305/caio-0.9.25-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d6c2a3411af97762a2b03840c3cec2f7f728921ff8adda53d7ea2315a8563451", size = 36979, upload-time = "2025-12-26T15:21:35.484Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a3/9f/f21af50e72117eb528c422d4276cbac11fb941b1b812b182e0a9c70d19c5/caio-0.9.25-cp313-cp313-manylinux2010_x86_64.manylinux2014_x86_64.manylinux_2_12_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0998210a4d5cd5cb565b32ccfe4e53d67303f868a76f212e002a8554692870e6", size = 81900, upload-time = "2025-12-26T15:22:21.919Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9c/12/c39ae2a4037cb10ad5eb3578eb4d5f8c1a2575c62bba675f3406b7ef0824/caio-0.9.25-cp313-cp313-manylinux_2_34_aarch64.whl", hash = "sha256:1a177d4777141b96f175fe2c37a3d96dec7911ed9ad5f02bac38aaa1c936611f", size = 81523, upload-time = "2026-03-04T22:08:25.187Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/22/59/f8f2e950eb4f1a5a3883e198dca514b9d475415cb6cd7b78b9213a0dd45a/caio-0.9.25-cp313-cp313-manylinux_2_34_x86_64.whl", hash = "sha256:9ed3cfb28c0e99fec5e208c934e5c157d0866aa9c32aa4dc5e9b6034af6286b7", size = 80243, upload-time = "2026-03-04T22:08:26.449Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/86/93/1f76c8d1bafe3b0614e06b2195784a3765bbf7b0a067661af9e2dd47fc33/caio-0.9.25-py3-none-any.whl", hash = "sha256:06c0bb02d6b929119b1cfbe1ca403c768b2013a369e2db46bfa2a5761cf82e40", size = 19087, upload-time = "2025-12-26T15:22:00.221Z" },
|
||||
]
|
||||
|
||||
@@ -2427,26 +2415,22 @@ name = "easyocr"
|
||||
version = "1.7.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "ninja" },
|
||||
{ name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.12'" },
|
||||
{ name = "numpy", version = "2.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12'" },
|
||||
{ name = "opencv-python-headless", version = "4.11.0.86", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.12'" },
|
||||
{ name = "opencv-python-headless", version = "4.13.0.92", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12'" },
|
||||
{ name = "pillow" },
|
||||
{ name = "pyclipper" },
|
||||
{ name = "python-bidi" },
|
||||
{ name = "pyyaml" },
|
||||
{ name = "scikit-image", version = "0.25.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" },
|
||||
{ name = "scikit-image", version = "0.26.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" },
|
||||
{ name = "scipy", version = "1.15.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" },
|
||||
{ name = "scipy", version = "1.17.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" },
|
||||
{ name = "shapely" },
|
||||
{ name = "torch", version = "2.2.2", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "python_full_version < '3.13' and platform_machine == 'x86_64' and sys_platform == 'darwin'" },
|
||||
{ name = "torch", version = "2.2.2+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "python_full_version >= '3.13' and platform_machine == 'x86_64' and sys_platform == 'darwin'" },
|
||||
{ name = "ninja", marker = "platform_machine != 'x86_64' or sys_platform != 'darwin'" },
|
||||
{ name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.12' and platform_machine != 'x86_64') or (python_full_version < '3.12' and sys_platform != 'darwin')" },
|
||||
{ name = "numpy", version = "2.4.2", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.12' and platform_machine != 'x86_64') or (python_full_version >= '3.12' and sys_platform != 'darwin')" },
|
||||
{ name = "opencv-python-headless", version = "4.11.0.86", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.12' and platform_machine != 'x86_64') or (python_full_version < '3.12' and sys_platform != 'darwin')" },
|
||||
{ name = "opencv-python-headless", version = "4.13.0.92", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.12' and platform_machine != 'x86_64') or (python_full_version >= '3.12' and sys_platform != 'darwin')" },
|
||||
{ name = "pillow", marker = "platform_machine != 'x86_64' or sys_platform != 'darwin'" },
|
||||
{ name = "pyclipper", marker = "platform_machine != 'x86_64' or sys_platform != 'darwin'" },
|
||||
{ name = "python-bidi", marker = "platform_machine != 'x86_64' or sys_platform != 'darwin'" },
|
||||
{ name = "pyyaml", marker = "platform_machine != 'x86_64' or sys_platform != 'darwin'" },
|
||||
{ name = "scikit-image", version = "0.25.2", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.12' and platform_machine != 'x86_64') or (python_full_version < '3.12' and sys_platform != 'darwin')" },
|
||||
{ name = "scikit-image", version = "0.26.0", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.12' and platform_machine != 'x86_64') or (python_full_version >= '3.12' and sys_platform != 'darwin')" },
|
||||
{ name = "scipy", version = "1.15.3", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and platform_machine != 'x86_64') or (python_full_version < '3.11' and sys_platform != 'darwin')" },
|
||||
{ name = "scipy", version = "1.17.1", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and platform_machine != 'x86_64') or (python_full_version >= '3.11' and sys_platform != 'darwin')" },
|
||||
{ name = "shapely", marker = "platform_machine != 'x86_64' or sys_platform != 'darwin'" },
|
||||
{ name = "torch", version = "2.10.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "platform_machine != 'x86_64' and sys_platform == 'darwin'" },
|
||||
{ name = "torch", version = "2.10.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "sys_platform != 'darwin'" },
|
||||
{ name = "torchvision", version = "0.17.2", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "python_full_version < '3.13' and platform_machine == 'x86_64' and sys_platform == 'darwin'" },
|
||||
{ name = "torchvision", version = "0.17.2+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "python_full_version >= '3.13' and platform_machine == 'x86_64' and sys_platform == 'darwin'" },
|
||||
{ name = "torchvision", version = "0.25.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "platform_machine != 'x86_64' and sys_platform == 'darwin'" },
|
||||
{ name = "torchvision", version = "0.25.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "sys_platform != 'darwin'" },
|
||||
]
|
||||
@@ -3251,10 +3235,10 @@ name = "gassist"
|
||||
version = "0.0.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "colorama" },
|
||||
{ name = "flask" },
|
||||
{ name = "flask-cors" },
|
||||
{ name = "tqdm" },
|
||||
{ name = "colorama", marker = "(python_full_version < '3.13' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.12.*' and platform_machine == 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.13' and sys_platform != 'darwin' and sys_platform != 'linux') or sys_platform == 'win32'" },
|
||||
{ name = "flask", marker = "(python_full_version < '3.13' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.12.*' and platform_machine == 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.13' and sys_platform != 'darwin' and sys_platform != 'linux') or sys_platform == 'win32'" },
|
||||
{ name = "flask-cors", marker = "(python_full_version < '3.13' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.12.*' and platform_machine == 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.13' and sys_platform != 'darwin' and sys_platform != 'linux') or sys_platform == 'win32'" },
|
||||
{ name = "tqdm", marker = "(python_full_version < '3.13' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.12.*' and platform_machine == 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.13' and sys_platform != 'darwin' and sys_platform != 'linux') or sys_platform == 'win32'" },
|
||||
]
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/b0/2e/f79632d7300874f7f0e60b61a6ab22455a245e1556116a1729542a77b0da/gassist-0.0.1-py3-none-any.whl", hash = "sha256:bb0fac74b453153a6c74b2db40a14fdde7879cbc10ec692ed170e576c8e2b6aa", size = 23819, upload-time = "2025-05-09T18:22:23.609Z" },
|
||||
@@ -4419,9 +4403,9 @@ name = "imageio"
|
||||
version = "2.37.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.12'" },
|
||||
{ name = "numpy", version = "2.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12'" },
|
||||
{ name = "pillow" },
|
||||
{ name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.12' and platform_machine != 'x86_64') or (python_full_version < '3.12' and sys_platform != 'darwin')" },
|
||||
{ name = "numpy", version = "2.4.2", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.12' and platform_machine != 'x86_64') or (python_full_version >= '3.12' and sys_platform != 'darwin')" },
|
||||
{ name = "pillow", marker = "platform_machine != 'x86_64' or sys_platform != 'darwin'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a3/6f/606be632e37bf8d05b253e8626c2291d74c691ddc7bcdf7d6aaf33b32f6a/imageio-2.37.2.tar.gz", hash = "sha256:0212ef2727ac9caa5ca4b2c75ae89454312f440a756fcfc8ef1993e718f50f8a", size = 389600, upload-time = "2025-11-04T14:29:39.898Z" }
|
||||
wheels = [
|
||||
@@ -4548,8 +4532,8 @@ dependencies = [
|
||||
{ name = "appnope", marker = "sys_platform == 'darwin'" },
|
||||
{ name = "comm" },
|
||||
{ name = "debugpy" },
|
||||
{ name = "ipython", version = "8.38.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" },
|
||||
{ name = "ipython", version = "9.10.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" },
|
||||
{ name = "ipython", version = "8.38.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.12'" },
|
||||
{ name = "ipython", version = "9.10.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12'" },
|
||||
{ name = "jupyter-client" },
|
||||
{ name = "jupyter-core" },
|
||||
{ name = "matplotlib-inline" },
|
||||
@@ -4570,23 +4554,27 @@ name = "ipython"
|
||||
version = "8.38.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux'",
|
||||
"(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
"python_full_version == '3.11.*' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
|
||||
"python_full_version == '3.11.*' and platform_machine != 'x86_64' and sys_platform == 'darwin'",
|
||||
"python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux'",
|
||||
"(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
"python_full_version < '3.11' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
|
||||
"python_full_version < '3.11' and platform_machine != 'x86_64' and sys_platform == 'darwin'",
|
||||
]
|
||||
dependencies = [
|
||||
{ name = "colorama", marker = "python_full_version < '3.11' and sys_platform == 'win32'" },
|
||||
{ name = "decorator", marker = "python_full_version < '3.11'" },
|
||||
{ name = "colorama", marker = "python_full_version < '3.12' and sys_platform == 'win32'" },
|
||||
{ name = "decorator", marker = "python_full_version < '3.12'" },
|
||||
{ name = "exceptiongroup", marker = "python_full_version < '3.11'" },
|
||||
{ name = "jedi", marker = "python_full_version < '3.11'" },
|
||||
{ name = "matplotlib-inline", marker = "python_full_version < '3.11'" },
|
||||
{ name = "pexpect", marker = "python_full_version < '3.11' and sys_platform != 'emscripten' and sys_platform != 'win32'" },
|
||||
{ name = "prompt-toolkit", marker = "python_full_version < '3.11'" },
|
||||
{ name = "pygments", marker = "python_full_version < '3.11'" },
|
||||
{ name = "stack-data", marker = "python_full_version < '3.11'" },
|
||||
{ name = "traitlets", marker = "python_full_version < '3.11'" },
|
||||
{ name = "typing-extensions", marker = "python_full_version < '3.11'" },
|
||||
{ name = "jedi", marker = "python_full_version < '3.12'" },
|
||||
{ name = "matplotlib-inline", marker = "python_full_version < '3.12'" },
|
||||
{ name = "pexpect", marker = "python_full_version < '3.12' and sys_platform != 'emscripten' and sys_platform != 'win32'" },
|
||||
{ name = "prompt-toolkit", marker = "python_full_version < '3.12'" },
|
||||
{ name = "pygments", marker = "python_full_version < '3.12'" },
|
||||
{ name = "stack-data", marker = "python_full_version < '3.12'" },
|
||||
{ name = "traitlets", marker = "python_full_version < '3.12'" },
|
||||
{ name = "typing-extensions", marker = "python_full_version < '3.12'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/e5/61/1810830e8b93c72dcd3c0f150c80a00c3deb229562d9423807ec92c3a539/ipython-8.38.0.tar.gz", hash = "sha256:9cfea8c903ce0867cc2f23199ed8545eb741f3a69420bfcf3743ad1cec856d39", size = 5513996, upload-time = "2026-01-05T10:59:06.901Z" }
|
||||
wheels = [
|
||||
@@ -4605,23 +4593,18 @@ resolution-markers = [
|
||||
"python_full_version == '3.12.*' and sys_platform != 'darwin'",
|
||||
"python_full_version == '3.12.*' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
|
||||
"python_full_version == '3.12.*' and platform_machine != 'x86_64' and sys_platform == 'darwin'",
|
||||
"python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux'",
|
||||
"(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
"python_full_version == '3.11.*' and platform_machine == 'x86_64' and sys_platform == 'darwin'",
|
||||
"python_full_version == '3.11.*' and platform_machine != 'x86_64' and sys_platform == 'darwin'",
|
||||
]
|
||||
dependencies = [
|
||||
{ name = "colorama", marker = "python_full_version >= '3.11' and sys_platform == 'win32'" },
|
||||
{ name = "decorator", marker = "python_full_version >= '3.11'" },
|
||||
{ name = "ipython-pygments-lexers", marker = "python_full_version >= '3.11'" },
|
||||
{ name = "jedi", marker = "python_full_version >= '3.11'" },
|
||||
{ name = "matplotlib-inline", marker = "python_full_version >= '3.11'" },
|
||||
{ name = "pexpect", marker = "python_full_version >= '3.11' and sys_platform != 'emscripten' and sys_platform != 'win32'" },
|
||||
{ name = "prompt-toolkit", marker = "python_full_version >= '3.11'" },
|
||||
{ name = "pygments", marker = "python_full_version >= '3.11'" },
|
||||
{ name = "stack-data", marker = "python_full_version >= '3.11'" },
|
||||
{ name = "traitlets", marker = "python_full_version >= '3.11'" },
|
||||
{ name = "typing-extensions", marker = "python_full_version == '3.11.*'" },
|
||||
{ name = "colorama", marker = "python_full_version >= '3.12' and sys_platform == 'win32'" },
|
||||
{ name = "decorator", marker = "python_full_version >= '3.12'" },
|
||||
{ name = "ipython-pygments-lexers", marker = "python_full_version >= '3.12'" },
|
||||
{ name = "jedi", marker = "python_full_version >= '3.12'" },
|
||||
{ name = "matplotlib-inline", marker = "python_full_version >= '3.12'" },
|
||||
{ name = "pexpect", marker = "python_full_version >= '3.12' and sys_platform != 'emscripten' and sys_platform != 'win32'" },
|
||||
{ name = "prompt-toolkit", marker = "python_full_version >= '3.12'" },
|
||||
{ name = "pygments", marker = "python_full_version >= '3.12'" },
|
||||
{ name = "stack-data", marker = "python_full_version >= '3.12'" },
|
||||
{ name = "traitlets", marker = "python_full_version >= '3.12'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a6/60/2111715ea11f39b1535bed6024b7dec7918b71e5e5d30855a5b503056b50/ipython-9.10.0.tar.gz", hash = "sha256:cd9e656be97618a0676d058134cd44e6dc7012c0e5cb36a9ce96a8c904adaf77", size = 4426526, upload-time = "2026-02-02T10:00:33.594Z" }
|
||||
wheels = [
|
||||
@@ -4633,7 +4616,7 @@ name = "ipython-pygments-lexers"
|
||||
version = "1.1.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "pygments", marker = "python_full_version >= '3.11'" },
|
||||
{ name = "pygments", marker = "python_full_version >= '3.12'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ef/4c/5dd1d8af08107f88c7f741ead7a40854b8ac24ddf9ae850afbcf698aa552/ipython_pygments_lexers-1.1.1.tar.gz", hash = "sha256:09c0138009e56b6854f9535736f4171d855c8c08a563a0dcd8022f78355c7e81", size = 8393, upload-time = "2025-01-17T11:24:34.505Z" }
|
||||
wheels = [
|
||||
@@ -5060,7 +5043,7 @@ name = "langchain"
|
||||
version = "0.3.27"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "async-timeout", version = "4.0.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" },
|
||||
{ name = "async-timeout", marker = "python_full_version < '3.11'" },
|
||||
{ name = "langchain-core" },
|
||||
{ name = "langchain-text-splitters" },
|
||||
{ name = "langsmith" },
|
||||
@@ -6878,7 +6861,7 @@ name = "lazy-loader"
|
||||
version = "0.4"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "packaging" },
|
||||
{ name = "packaging", marker = "platform_machine != 'x86_64' or sys_platform != 'darwin'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/6f/6b/c875b30a1ba490860c93da4cabf479e03f584eba06fe5963f6f6644653d8/lazy_loader-0.4.tar.gz", hash = "sha256:47c75182589b91a4e1a85a136c074285a5ad4d9f39c63e0d7fb76391c4574cd1", size = 15431, upload-time = "2024-04-05T13:03:12.261Z" }
|
||||
wheels = [
|
||||
@@ -7690,7 +7673,7 @@ name = "mlx"
|
||||
version = "0.31.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "mlx-metal", marker = "python_full_version >= '3.12' and sys_platform == 'darwin'" },
|
||||
{ name = "mlx-metal", marker = "python_full_version >= '3.12' and platform_machine != 'x86_64' and sys_platform == 'darwin'" },
|
||||
]
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/73/54/269d13847b04b07523d44cf903e1d3c6d48f56e6e89dda7e16418b411629/mlx-0.31.0-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:38680838e0dd9a621ed4adc5a9ed8b94aeb6a4798142fbe215b821b8c6b8fc36", size = 575395, upload-time = "2026-02-27T23:49:11.886Z" },
|
||||
@@ -7712,13 +7695,13 @@ name = "mlx-lm"
|
||||
version = "0.29.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "jinja2", marker = "python_full_version >= '3.12'" },
|
||||
{ name = "mlx", marker = "python_full_version >= '3.12' and sys_platform == 'darwin'" },
|
||||
{ name = "numpy", version = "2.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12'" },
|
||||
{ name = "protobuf", marker = "python_full_version >= '3.12'" },
|
||||
{ name = "pyyaml", marker = "python_full_version >= '3.12'" },
|
||||
{ name = "sentencepiece", marker = "python_full_version >= '3.12'" },
|
||||
{ name = "transformers", marker = "python_full_version >= '3.12'" },
|
||||
{ name = "jinja2", marker = "python_full_version >= '3.12' and platform_machine != 'x86_64' and sys_platform == 'darwin'" },
|
||||
{ name = "mlx", marker = "python_full_version >= '3.12' and platform_machine != 'x86_64' and sys_platform == 'darwin'" },
|
||||
{ name = "numpy", version = "2.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12' and platform_machine != 'x86_64' and sys_platform == 'darwin'" },
|
||||
{ name = "protobuf", marker = "python_full_version >= '3.12' and platform_machine != 'x86_64' and sys_platform == 'darwin'" },
|
||||
{ name = "pyyaml", marker = "python_full_version >= '3.12' and platform_machine != 'x86_64' and sys_platform == 'darwin'" },
|
||||
{ name = "sentencepiece", marker = "python_full_version >= '3.12' and platform_machine != 'x86_64' and sys_platform == 'darwin'" },
|
||||
{ name = "transformers", marker = "python_full_version >= '3.12' and platform_machine != 'x86_64' and sys_platform == 'darwin'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/e3/62/f46e1355256a114808517947f8e83ad6be310c7288c551db0fa678f47923/mlx_lm-0.29.1.tar.gz", hash = "sha256:b99180d8f33d33a077b814e550bfb2d8a59ae003d668fd1f4b3fff62a381d34b", size = 232302, upload-time = "2025-12-16T16:58:27.959Z" }
|
||||
wheels = [
|
||||
@@ -7740,19 +7723,19 @@ name = "mlx-vlm"
|
||||
version = "0.3.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "datasets", marker = "python_full_version >= '3.12'" },
|
||||
{ name = "fastapi", marker = "python_full_version >= '3.12'" },
|
||||
{ name = "mlx", marker = "python_full_version >= '3.12'" },
|
||||
{ name = "mlx-lm", marker = "python_full_version >= '3.12'" },
|
||||
{ name = "numpy", version = "2.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12'" },
|
||||
{ name = "opencv-python", version = "4.13.0.92", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12'" },
|
||||
{ name = "pillow", marker = "python_full_version >= '3.12'" },
|
||||
{ name = "requests", marker = "python_full_version >= '3.12'" },
|
||||
{ name = "scipy", version = "1.17.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12'" },
|
||||
{ name = "soundfile", marker = "python_full_version >= '3.12'" },
|
||||
{ name = "tqdm", marker = "python_full_version >= '3.12'" },
|
||||
{ name = "transformers", marker = "python_full_version >= '3.12'" },
|
||||
{ name = "uvicorn", marker = "python_full_version >= '3.12'" },
|
||||
{ name = "datasets", marker = "python_full_version >= '3.12' and platform_machine != 'x86_64' and sys_platform == 'darwin'" },
|
||||
{ name = "fastapi", marker = "python_full_version >= '3.12' and platform_machine != 'x86_64' and sys_platform == 'darwin'" },
|
||||
{ name = "mlx", marker = "python_full_version >= '3.12' and platform_machine != 'x86_64' and sys_platform == 'darwin'" },
|
||||
{ name = "mlx-lm", marker = "python_full_version >= '3.12' and platform_machine != 'x86_64' and sys_platform == 'darwin'" },
|
||||
{ name = "numpy", version = "2.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12' and platform_machine != 'x86_64' and sys_platform == 'darwin'" },
|
||||
{ name = "opencv-python", version = "4.13.0.92", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12' and platform_machine != 'x86_64' and sys_platform == 'darwin'" },
|
||||
{ name = "pillow", marker = "python_full_version >= '3.12' and platform_machine != 'x86_64' and sys_platform == 'darwin'" },
|
||||
{ name = "requests", marker = "python_full_version >= '3.12' and platform_machine != 'x86_64' and sys_platform == 'darwin'" },
|
||||
{ name = "scipy", version = "1.17.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12' and platform_machine != 'x86_64' and sys_platform == 'darwin'" },
|
||||
{ name = "soundfile", marker = "python_full_version >= '3.12' and platform_machine != 'x86_64' and sys_platform == 'darwin'" },
|
||||
{ name = "tqdm", marker = "python_full_version >= '3.12' and platform_machine != 'x86_64' and sys_platform == 'darwin'" },
|
||||
{ name = "transformers", marker = "python_full_version >= '3.12' and platform_machine != 'x86_64' and sys_platform == 'darwin'" },
|
||||
{ name = "uvicorn", marker = "python_full_version >= '3.12' and platform_machine != 'x86_64' and sys_platform == 'darwin'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ff/9f/de419334820da334203de28eaf861b57ae0d06b0882770e5e5d0671dc5dd/mlx_vlm-0.3.3.tar.gz", hash = "sha256:5a08c802d1bf32cc47bd6aebe348d3554ce21bfce417a585bba83f9d213a6e66", size = 231935, upload-time = "2025-08-20T14:52:51.323Z" }
|
||||
wheels = [
|
||||
@@ -8437,9 +8420,9 @@ name = "ocrmac"
|
||||
version = "1.0.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "click" },
|
||||
{ name = "pillow" },
|
||||
{ name = "pyobjc-framework-vision" },
|
||||
{ name = "click", marker = "sys_platform == 'darwin'" },
|
||||
{ name = "pillow", marker = "sys_platform == 'darwin'" },
|
||||
{ name = "pyobjc-framework-vision", marker = "sys_platform == 'darwin'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/5e/07/3e15ab404f75875c5e48c47163300eb90b7409044d8711fc3aaf52503f2e/ocrmac-1.0.1.tar.gz", hash = "sha256:507fe5e4cbd67b2d03f6729a52bbc11f9d0b58241134eb958a5daafd4b9d93d9", size = 1454317, upload-time = "2026-01-08T16:44:26.412Z" }
|
||||
wheels = [
|
||||
@@ -8608,7 +8591,7 @@ resolution-markers = [
|
||||
"python_full_version < '3.11' and platform_machine != 'x86_64' and sys_platform == 'darwin'",
|
||||
]
|
||||
dependencies = [
|
||||
{ name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.12'" },
|
||||
{ name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.12' and platform_machine != 'x86_64') or (python_full_version < '3.12' and sys_platform != 'darwin')" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/36/2f/5b2b3ba52c864848885ba988f24b7f105052f68da9ab0e693cc7c25b0b30/opencv-python-headless-4.11.0.86.tar.gz", hash = "sha256:996eb282ca4b43ec6a3972414de0e2331f5d9cda2b41091a49739c19fb843798", size = 95177929, upload-time = "2025-01-16T13:53:40.22Z" }
|
||||
wheels = [
|
||||
@@ -8632,7 +8615,7 @@ resolution-markers = [
|
||||
"python_full_version == '3.12.*' and platform_machine != 'x86_64' and sys_platform == 'darwin'",
|
||||
]
|
||||
dependencies = [
|
||||
{ name = "numpy", version = "2.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12'" },
|
||||
{ name = "numpy", version = "2.4.2", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.12' and platform_machine != 'x86_64') or (python_full_version >= '3.12' and sys_platform != 'darwin')" },
|
||||
]
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/79/42/2310883be3b8826ac58c3f2787b9358a2d46923d61f88fedf930bc59c60c/opencv_python_headless-4.13.0.92-cp37-abi3-macosx_13_0_arm64.whl", hash = "sha256:1a7d040ac656c11b8c38677cc8cccdc149f98535089dbe5b081e80a4e5903209", size = 46247192, upload-time = "2026-02-05T07:01:35.187Z" },
|
||||
@@ -9845,7 +9828,7 @@ name = "pexpect"
|
||||
version = "4.9.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "ptyprocess" },
|
||||
{ name = "ptyprocess", marker = "python_full_version < '3.13' or sys_platform != 'win32'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/42/92/cc564bf6381ff43ce1f4d06852fc19a2f11d180f23dc32d9588bee2f149d/pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f", size = 166450, upload-time = "2023-11-25T09:07:26.339Z" }
|
||||
wheels = [
|
||||
@@ -11104,7 +11087,7 @@ name = "pyobjc-framework-cocoa"
|
||||
version = "12.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "pyobjc-core" },
|
||||
{ name = "pyobjc-core", marker = "sys_platform == 'darwin'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/02/a3/16ca9a15e77c061a9250afbae2eae26f2e1579eb8ca9462ae2d2c71e1169/pyobjc_framework_cocoa-12.1.tar.gz", hash = "sha256:5556c87db95711b985d5efdaaf01c917ddd41d148b1e52a0c66b1a2e2c5c1640", size = 2772191, upload-time = "2025-11-14T10:13:02.069Z" }
|
||||
wheels = [
|
||||
@@ -11120,8 +11103,8 @@ name = "pyobjc-framework-coreml"
|
||||
version = "12.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "pyobjc-core" },
|
||||
{ name = "pyobjc-framework-cocoa" },
|
||||
{ name = "pyobjc-core", marker = "sys_platform == 'darwin'" },
|
||||
{ name = "pyobjc-framework-cocoa", marker = "sys_platform == 'darwin'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/30/2d/baa9ea02cbb1c200683cb7273b69b4bee5070e86f2060b77e6a27c2a9d7e/pyobjc_framework_coreml-12.1.tar.gz", hash = "sha256:0d1a4216891a18775c9e0170d908714c18e4f53f9dc79fb0f5263b2aa81609ba", size = 40465, upload-time = "2025-11-14T10:14:02.265Z" }
|
||||
wheels = [
|
||||
@@ -11137,8 +11120,8 @@ name = "pyobjc-framework-quartz"
|
||||
version = "12.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "pyobjc-core" },
|
||||
{ name = "pyobjc-framework-cocoa" },
|
||||
{ name = "pyobjc-core", marker = "sys_platform == 'darwin'" },
|
||||
{ name = "pyobjc-framework-cocoa", marker = "sys_platform == 'darwin'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/94/18/cc59f3d4355c9456fc945eae7fe8797003c4da99212dd531ad1b0de8a0c6/pyobjc_framework_quartz-12.1.tar.gz", hash = "sha256:27f782f3513ac88ec9b6c82d9767eef95a5cf4175ce88a1e5a65875fee799608", size = 3159099, upload-time = "2025-11-14T10:21:24.31Z" }
|
||||
wheels = [
|
||||
@@ -11154,10 +11137,10 @@ name = "pyobjc-framework-vision"
|
||||
version = "12.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "pyobjc-core" },
|
||||
{ name = "pyobjc-framework-cocoa" },
|
||||
{ name = "pyobjc-framework-coreml" },
|
||||
{ name = "pyobjc-framework-quartz" },
|
||||
{ name = "pyobjc-core", marker = "sys_platform == 'darwin'" },
|
||||
{ name = "pyobjc-framework-cocoa", marker = "sys_platform == 'darwin'" },
|
||||
{ name = "pyobjc-framework-coreml", marker = "sys_platform == 'darwin'" },
|
||||
{ name = "pyobjc-framework-quartz", marker = "sys_platform == 'darwin'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c2/5a/08bb3e278f870443d226c141af14205ff41c0274da1e053b72b11dfc9fb2/pyobjc_framework_vision-12.1.tar.gz", hash = "sha256:a30959100e85dcede3a786c544e621ad6eb65ff6abf85721f805822b8c5fe9b0", size = 59538, upload-time = "2025-11-14T10:23:21.979Z" }
|
||||
wheels = [
|
||||
@@ -12043,8 +12026,7 @@ name = "redis"
|
||||
version = "5.3.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "async-timeout", version = "4.0.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" },
|
||||
{ name = "async-timeout", version = "5.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11' and python_full_version < '3.11.3'" },
|
||||
{ name = "async-timeout", marker = "python_full_version < '3.11.3'" },
|
||||
{ name = "pyjwt" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/6a/cf/128b1b6d7086200c9f387bd4be9b2572a30b90745ef078bd8b235042dc9f/redis-5.3.1.tar.gz", hash = "sha256:ca49577a531ea64039b5a36db3d6cd1a0c7a60c34124d46924a45b956e8cf14c", size = 4626200, upload-time = "2025-07-25T08:06:27.778Z" }
|
||||
@@ -12613,19 +12595,24 @@ name = "scikit-image"
|
||||
version = "0.25.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux'",
|
||||
"(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
"python_full_version == '3.11.*' and platform_machine != 'x86_64' and sys_platform == 'darwin'",
|
||||
"python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux'",
|
||||
"(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
"python_full_version < '3.11' and platform_machine != 'x86_64' and sys_platform == 'darwin'",
|
||||
]
|
||||
dependencies = [
|
||||
{ name = "imageio", marker = "python_full_version < '3.11'" },
|
||||
{ name = "lazy-loader", marker = "python_full_version < '3.11'" },
|
||||
{ name = "networkx", version = "3.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" },
|
||||
{ name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" },
|
||||
{ name = "packaging", marker = "python_full_version < '3.11'" },
|
||||
{ name = "pillow", marker = "python_full_version < '3.11'" },
|
||||
{ name = "scipy", version = "1.15.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" },
|
||||
{ name = "tifffile", version = "2025.5.10", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" },
|
||||
{ name = "imageio", marker = "(python_full_version < '3.12' and platform_machine != 'x86_64') or (python_full_version < '3.12' and sys_platform != 'darwin')" },
|
||||
{ name = "lazy-loader", marker = "(python_full_version < '3.12' and platform_machine != 'x86_64') or (python_full_version < '3.12' and sys_platform != 'darwin')" },
|
||||
{ name = "networkx", version = "3.4.2", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and platform_machine != 'x86_64') or (python_full_version < '3.11' and sys_platform != 'darwin')" },
|
||||
{ name = "networkx", version = "3.6.1", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version == '3.11.*' and platform_machine != 'x86_64') or (python_full_version == '3.11.*' and sys_platform != 'darwin')" },
|
||||
{ name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.12' and platform_machine != 'x86_64') or (python_full_version < '3.12' and sys_platform != 'darwin')" },
|
||||
{ name = "packaging", marker = "(python_full_version < '3.12' and platform_machine != 'x86_64') or (python_full_version < '3.12' and sys_platform != 'darwin')" },
|
||||
{ name = "pillow", marker = "(python_full_version < '3.12' and platform_machine != 'x86_64') or (python_full_version < '3.12' and sys_platform != 'darwin')" },
|
||||
{ name = "scipy", version = "1.15.3", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and platform_machine != 'x86_64') or (python_full_version < '3.11' and sys_platform != 'darwin')" },
|
||||
{ name = "scipy", version = "1.17.1", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version == '3.11.*' and platform_machine != 'x86_64') or (python_full_version == '3.11.*' and sys_platform != 'darwin')" },
|
||||
{ name = "tifffile", version = "2025.5.10", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.12' and platform_machine != 'x86_64') or (python_full_version < '3.12' and sys_platform != 'darwin')" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c7/a8/3c0f256012b93dd2cb6fda9245e9f4bff7dc0486880b248005f15ea2255e/scikit_image-0.25.2.tar.gz", hash = "sha256:e5a37e6cd4d0c018a7a55b9d601357e3382826d3888c10d0213fc63bff977dde", size = 22693594, upload-time = "2025-02-18T18:05:24.538Z" }
|
||||
wheels = [
|
||||
@@ -12662,20 +12649,16 @@ resolution-markers = [
|
||||
"python_full_version >= '3.13' and platform_machine != 'x86_64' and sys_platform == 'darwin'",
|
||||
"python_full_version == '3.12.*' and sys_platform != 'darwin'",
|
||||
"python_full_version == '3.12.*' and platform_machine != 'x86_64' and sys_platform == 'darwin'",
|
||||
"python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux'",
|
||||
"(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
"python_full_version == '3.11.*' and platform_machine != 'x86_64' and sys_platform == 'darwin'",
|
||||
]
|
||||
dependencies = [
|
||||
{ name = "imageio", marker = "python_full_version >= '3.11'" },
|
||||
{ name = "lazy-loader", marker = "python_full_version >= '3.11'" },
|
||||
{ name = "networkx", version = "3.6.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" },
|
||||
{ name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.11.*'" },
|
||||
{ name = "numpy", version = "2.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12'" },
|
||||
{ name = "packaging", marker = "python_full_version >= '3.11'" },
|
||||
{ name = "pillow", marker = "python_full_version >= '3.11'" },
|
||||
{ name = "scipy", version = "1.17.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" },
|
||||
{ name = "tifffile", version = "2026.2.24", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" },
|
||||
{ name = "imageio", marker = "(python_full_version >= '3.12' and platform_machine != 'x86_64') or (python_full_version >= '3.12' and sys_platform != 'darwin')" },
|
||||
{ name = "lazy-loader", marker = "(python_full_version >= '3.12' and platform_machine != 'x86_64') or (python_full_version >= '3.12' and sys_platform != 'darwin')" },
|
||||
{ name = "networkx", version = "3.6.1", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.12' and platform_machine != 'x86_64') or (python_full_version >= '3.12' and sys_platform != 'darwin')" },
|
||||
{ name = "numpy", version = "2.4.2", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.12' and platform_machine != 'x86_64') or (python_full_version >= '3.12' and sys_platform != 'darwin')" },
|
||||
{ name = "packaging", marker = "(python_full_version >= '3.12' and platform_machine != 'x86_64') or (python_full_version >= '3.12' and sys_platform != 'darwin')" },
|
||||
{ name = "pillow", marker = "(python_full_version >= '3.12' and platform_machine != 'x86_64') or (python_full_version >= '3.12' and sys_platform != 'darwin')" },
|
||||
{ name = "scipy", version = "1.17.1", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.12' and platform_machine != 'x86_64') or (python_full_version >= '3.12' and sys_platform != 'darwin')" },
|
||||
{ name = "tifffile", version = "2026.2.24", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.12' and platform_machine != 'x86_64') or (python_full_version >= '3.12' and sys_platform != 'darwin')" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a1/b4/2528bb43c67d48053a7a649a9666432dc307d66ba02e3a6d5c40f46655df/scikit_image-0.26.0.tar.gz", hash = "sha256:f5f970ab04efad85c24714321fcc91613fcb64ef2a892a13167df2f3e59199fa", size = 22729739, upload-time = "2025-12-20T17:12:21.824Z" }
|
||||
wheels = [
|
||||
@@ -13241,8 +13224,8 @@ name = "soundfile"
|
||||
version = "0.13.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "cffi", marker = "python_full_version >= '3.12'" },
|
||||
{ name = "numpy", version = "2.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12'" },
|
||||
{ name = "cffi", marker = "python_full_version >= '3.12' and platform_machine != 'x86_64' and sys_platform == 'darwin'" },
|
||||
{ name = "numpy", version = "2.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12' and platform_machine != 'x86_64' and sys_platform == 'darwin'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/e1/41/9b873a8c055582859b239be17902a85339bec6a30ad162f98c9b0288a2cc/soundfile-0.13.1.tar.gz", hash = "sha256:b2c68dab1e30297317080a5b43df57e302584c49e2942defdde0acccc53f0e5b", size = 46156, upload-time = "2025-01-25T09:17:04.831Z" }
|
||||
wheels = [
|
||||
@@ -13565,12 +13548,15 @@ name = "tifffile"
|
||||
version = "2025.5.10"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux'",
|
||||
"(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
"python_full_version == '3.11.*' and platform_machine != 'x86_64' and sys_platform == 'darwin'",
|
||||
"python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux'",
|
||||
"(python_full_version < '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
"python_full_version < '3.11' and platform_machine != 'x86_64' and sys_platform == 'darwin'",
|
||||
]
|
||||
dependencies = [
|
||||
{ name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" },
|
||||
{ name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.12' and platform_machine != 'x86_64') or (python_full_version < '3.12' and sys_platform != 'darwin')" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/44/d0/18fed0fc0916578a4463f775b0fbd9c5fed2392152d039df2fb533bfdd5d/tifffile-2025.5.10.tar.gz", hash = "sha256:018335d34283aa3fd8c263bae5c3c2b661ebc45548fde31504016fcae7bf1103", size = 365290, upload-time = "2025-05-10T19:22:34.386Z" }
|
||||
wheels = [
|
||||
@@ -13587,13 +13573,9 @@ resolution-markers = [
|
||||
"python_full_version >= '3.13' and platform_machine != 'x86_64' and sys_platform == 'darwin'",
|
||||
"python_full_version == '3.12.*' and sys_platform != 'darwin'",
|
||||
"python_full_version == '3.12.*' and platform_machine != 'x86_64' and sys_platform == 'darwin'",
|
||||
"python_full_version == '3.11.*' and platform_machine == 'aarch64' and sys_platform == 'linux'",
|
||||
"(python_full_version == '3.11.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
"python_full_version == '3.11.*' and platform_machine != 'x86_64' and sys_platform == 'darwin'",
|
||||
]
|
||||
dependencies = [
|
||||
{ name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.11.*'" },
|
||||
{ name = "numpy", version = "2.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12'" },
|
||||
{ name = "numpy", version = "2.4.2", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.12' and platform_machine != 'x86_64') or (python_full_version >= '3.12' and sys_platform != 'darwin')" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/6e/1c/19fc653e2b05ec0defae511b03b330ca60c95f2c47fcaaf21c52c6e84aa8/tifffile-2026.2.24.tar.gz", hash = "sha256:d73cfa6d7a8f5775a1e3c9f3bfca77c992946639fb41a5bbe888878cb6964dc6", size = 387373, upload-time = "2026-02-24T23:59:11.706Z" }
|
||||
wheels = [
|
||||
@@ -14361,11 +14343,11 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "types-pytz"
|
||||
version = "2025.2.0.20251108"
|
||||
version = "2026.1.1.20260304"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/40/ff/c047ddc68c803b46470a357454ef76f4acd8c1088f5cc4891cdd909bfcf6/types_pytz-2025.2.0.20251108.tar.gz", hash = "sha256:fca87917836ae843f07129567b74c1929f1870610681b4c92cb86a3df5817bdb", size = 10961, upload-time = "2025-11-08T02:55:57.001Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/fe/56/2f12a15ea8c5615c8fb896c4fbbb527ab1c0f776ed5860c6fc9ec26ea2c7/types_pytz-2026.1.1.20260304.tar.gz", hash = "sha256:0c3542d8e9b0160b424233440c52b83d6f58cae4b85333d54e4f961cf013e117", size = 11198, upload-time = "2026-03-04T03:57:24.445Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e7/c1/56ef16bf5dcd255155cc736d276efa6ae0a5c26fd685e28f0412a4013c01/types_pytz-2025.2.0.20251108-py3-none-any.whl", hash = "sha256:0f1c9792cab4eb0e46c52f8845c8f77cf1e313cb3d68bf826aa867fe4717d91c", size = 10116, upload-time = "2025-11-08T02:55:56.194Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/94/b8/e77c355f179dc89d44e7ca6dbf7a46e650806df1d356a5462e5829fccea5/types_pytz-2026.1.1.20260304-py3-none-any.whl", hash = "sha256:175332c1cf7bd6b1cc56b877f70bf02def1a3f75e5adcc05385ce2c3c70e6500", size = 10126, upload-time = "2026-03-04T03:57:23.481Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -14739,8 +14721,8 @@ version = "0.5.4"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "cachetools" },
|
||||
{ name = "ipython", version = "8.38.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" },
|
||||
{ name = "ipython", version = "9.10.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" },
|
||||
{ name = "ipython", version = "8.38.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.12'" },
|
||||
{ name = "ipython", version = "9.10.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12'" },
|
||||
{ name = "loguru" },
|
||||
{ name = "opencv-python", version = "4.11.0.86", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.12'" },
|
||||
{ name = "opencv-python", version = "4.13.0.92", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12'" },
|
||||
@@ -14792,14 +14774,14 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "vulture"
|
||||
version = "2.14"
|
||||
version = "2.15"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "tomli", marker = "python_full_version < '3.11'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/8e/25/925f35db758a0f9199113aaf61d703de891676b082bd7cf73ea01d6000f7/vulture-2.14.tar.gz", hash = "sha256:cb8277902a1138deeab796ec5bef7076a6e0248ca3607a3f3dee0b6d9e9b8415", size = 58823, upload-time = "2024-12-08T17:39:43.319Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/59/c6/4f147b621b4c0899eb1770f98113334bb706ebd251ac2be979316b1985fa/vulture-2.15.tar.gz", hash = "sha256:f9d8b4ce29c69950d323f21dceab4a4d6c694403dffbed7713c4691057e561fe", size = 52438, upload-time = "2026-03-04T21:41:39.096Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/a0/56/0cc15b8ff2613c1d5c3dc1f3f576ede1c43868c1bc2e5ccaa2d4bcd7974d/vulture-2.14-py2.py3-none-any.whl", hash = "sha256:d9a90dba89607489548a49d557f8bac8112bd25d3cbc8aeef23e860811bd5ed9", size = 28915, upload-time = "2024-12-08T17:39:40.573Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1c/f3/07cf122e145bc6df976030e9935123124c3fcb5044cf407b5e71e85821b4/vulture-2.15-py3-none-any.whl", hash = "sha256:a3d8ebef918694326620eb128fa783486c8d285b23381c2b457d864ac056ef8d", size = 26895, upload-time = "2026-03-04T21:41:39.878Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
Reference in New Issue
Block a user