-
Notifications
You must be signed in to change notification settings - Fork 25
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
feat(backend): support delete datasource endpoint
- Loading branch information
Showing
7 changed files
with
154 additions
and
55 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,34 @@ | ||
"""empty message | ||
Revision ID: bd17a4ebccc5 | ||
Revises: a8c79553c9f6 | ||
Create Date: 2024-08-08 01:20:42.069228 | ||
""" | ||
from alembic import op | ||
import sqlalchemy as sa | ||
import sqlmodel.sql.sqltypes | ||
from tidb_vector.sqlalchemy import VectorType | ||
|
||
|
||
# revision identifiers, used by Alembic. | ||
revision = 'bd17a4ebccc5' | ||
down_revision = 'a8c79553c9f6' | ||
branch_labels = None | ||
depends_on = None | ||
|
||
|
||
def upgrade(): | ||
# ### commands auto generated by Alembic - please adjust! ### | ||
op.add_column('data_sources', sa.Column('deleted_at', sa.DateTime(), nullable=True)) | ||
op.drop_index('source_uri', table_name='documents') | ||
op.add_column('relationships', sa.Column('chunk_id', sqlmodel.sql.sqltypes.GUID(), nullable=True)) | ||
# ### end Alembic commands ### | ||
|
||
|
||
def downgrade(): | ||
# ### commands auto generated by Alembic - please adjust! ### | ||
op.drop_column('relationships', 'chunk_id') | ||
op.create_index('source_uri', 'documents', ['source_uri'], unique=True) | ||
op.drop_column('data_sources', 'deleted_at') | ||
# ### end Alembic commands ### |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,89 @@ | ||
from typing import Optional | ||
from datetime import datetime, UTC | ||
|
||
from sqlmodel import select, Session, func | ||
from fastapi_pagination import Params, Page | ||
from fastapi_pagination.ext.sqlmodel import paginate | ||
|
||
from app.models import DataSource, Document, Chunk | ||
from app.repositories.base_repo import BaseRepo | ||
|
||
|
||
class DataSourceRepo(BaseRepo): | ||
model_cls = DataSource | ||
|
||
def paginate( | ||
self, | ||
session: Session, | ||
params: Params | None = Params(), | ||
) -> Page[DataSource]: | ||
query = ( | ||
select(DataSource) | ||
.where(DataSource.deleted_at == None) | ||
.order_by(DataSource.created_at.desc()) | ||
) | ||
return paginate(session, query, params) | ||
|
||
def get( | ||
self, | ||
session: Session, | ||
data_source_id: int, | ||
) -> Optional[DataSource]: | ||
return session.exec( | ||
select(DataSource).where( | ||
DataSource.id == data_source_id, DataSource.deleted_at == None | ||
) | ||
).first() | ||
|
||
def delete(self, session: Session, data_source: DataSource) -> None: | ||
data_source.deleted_at = datetime.now(UTC) | ||
session.add(data_source) | ||
session.commit() | ||
|
||
def overview(self, session: Session, data_source: DataSource) -> dict: | ||
data_source_id = data_source.id | ||
documents_count = session.scalar( | ||
select(func.count(Document.id)).where( | ||
Document.data_source_id == data_source_id | ||
) | ||
) | ||
chunks_count = session.scalar( | ||
select(func.count(Chunk.id)).where( | ||
Chunk.document.has(Document.data_source_id == data_source_id) | ||
) | ||
) | ||
|
||
statement = ( | ||
select(Document.index_status, func.count(Document.id)) | ||
.where(Document.data_source_id == data_source_id) | ||
.group_by(Document.index_status) | ||
.order_by(Document.index_status) | ||
) | ||
results = session.exec(statement).all() | ||
vector_index_status = {s: c for s, c in results} | ||
|
||
if data_source.build_kg_index: | ||
statement = ( | ||
select(Chunk.index_status, func.count(Chunk.id)) | ||
.where(Chunk.document.has(Document.data_source_id == data_source_id)) | ||
.group_by(Chunk.index_status) | ||
.order_by(Chunk.index_status) | ||
) | ||
results = session.exec(statement).all() | ||
kg_index_status = {s: c for s, c in results} | ||
else: | ||
kg_index_status = {} | ||
|
||
return { | ||
"documents": { | ||
"total": documents_count, | ||
}, | ||
"chunks": { | ||
"total": chunks_count, | ||
}, | ||
"kg_index": kg_index_status, | ||
"vector_index": vector_index_status, | ||
} | ||
|
||
|
||
data_source_repo = DataSourceRepo() |