Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

ops: ci actions fix for clippy, redoc, and yarn build #1017

Merged
merged 8 commits into from
Mar 5, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 11 additions & 1 deletion .github/workflows/chat-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,18 @@ on:
- 'chat/**'
- '.github/workflows/**'
jobs:
build-test:
runs-on: self-hosted
steps:
- uses: actions/checkout@v2
- name: Setting up
run: sed -i 's/chat\///' ./chat/.eslintrc.json
- name: Install dependencies
run: yarn --cwd chat
- name: Running lint
run: yarn --cwd chat build
eslint:
runs-on: ubuntu-latest
runs-on: self-hosted
steps:
- uses: actions/checkout@v2
- name: Setting up
Expand Down
105 changes: 52 additions & 53 deletions .github/workflows/push-server.yml
Original file line number Diff line number Diff line change
@@ -1,53 +1,52 @@
name: Create Docker Images
on:
workflow_call:
secrets:
DOCKER_USERNAME:
required: true
DOCKER_PASSWORD:
required: true
workflow_dispatch:
push:
branches:
- 'main'

jobs:
build_server:
name: Building Server Docker Image
runs-on: ubuntu-latest
steps:
- name: Checkout the repo
uses: actions/checkout@v4

- name: Setup buildx
uses: docker/setup-buildx-action@v3

- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}

- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
# list of Docker images to use as base name for tags
images: |
arguflow/search
tags: |
type=schedule
type=ref,event=branch
type=ref,event=pr
type=sha

- name: Build and push Docker image
uses: docker/build-push-action@v5
with:
cache-from: type=gha
cache-to: type=gha,mode=max
context: server/
file: ./server/Dockerfile.no-ocr
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
# name: Create Docker Images
# on:
# workflow_call:
# secrets:
# DOCKER_USERNAME:
# required: true
# DOCKER_PASSWORD:
# required: true
# workflow_dispatch:
# push:
# branches:
# - 'main'
#
# jobs:
# build_server:
# name: Building Server Docker Image
# steps:
# - name: Checkout the repo
# uses: actions/checkout@v4
#
# - name: Setup buildx
# uses: docker/setup-buildx-action@v3
#
# - name: Login to Docker Hub
# uses: docker/login-action@v3
# with:
# username: ${{ secrets.DOCKER_USERNAME }}
# password: ${{ secrets.DOCKER_PASSWORD }}
#
# - name: Docker meta
# id: meta
# uses: docker/metadata-action@v5
# with:
# # list of Docker images to use as base name for tags
# images: |
# arguflow/search
# tags: |
# type=schedule
# type=ref,event=branch
# type=ref,event=pr
# type=sha
#
# - name: Build and push Docker image
# uses: docker/build-push-action@v5
# with:
# cache-from: type=gha
# cache-to: type=gha,mode=max
# context: server/
# file: ./server/Dockerfile.no-ocr
# push: true
# tags: ${{ steps.meta.outputs.tags }}
# labels: ${{ steps.meta.outputs.labels }}
13 changes: 11 additions & 2 deletions .github/workflows/rust-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,17 @@ jobs:
steps:
- uses: actions/checkout@v1
- run: rustup component add clippy
- uses: actions-rs/clippy-check@v1
- uses: clechasseur/rs-clippy-check@v3
with:
token: ${{ secrets.GITHUB_TOKEN }}
args: --features runtime-env --manifest-path server/Cargo.toml
redoc:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Installing Vaccum
run: npm install -g @quobix/vacuum
- name: Generating OpenAPI spec
run: cargo run --features runtime-env --manifest-path server/Cargo.toml --bin redoc_ci > openapi.json
- name: Vaccum lint
run: vacuum lint openapi.json

11 changes: 11 additions & 0 deletions .github/workflows/search-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,16 @@ on:
- 'search/**'
- '.github/workflows/**'
jobs:
build-test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Setting up
run: sed -i 's/search\///' ./search/.eslintrc.json
- name: Install dependencies
run: yarn --cwd search
- name: Running lint
run: yarn --cwd search build
eslint:
runs-on: ubuntu-latest
steps:
Expand All @@ -27,3 +37,4 @@ jobs:
repo-token: '${{ secrets.GITHUB_TOKEN }}'
report-json: './search/eslint_report.json'


3 changes: 3 additions & 0 deletions server/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,9 @@ path = "src/main.rs"
name = "ingestion-microservice"
path = "src/bin/ingestion-microservice.rs"

[[bin]]
name = "redoc_ci"
path = "src/bin/redoc_ci.rs"

[dependencies]
actix-identity = { version = "0.6.0" }
Expand Down
7 changes: 7 additions & 0 deletions server/src/bin/redoc_ci.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
use trieve_server::ApiDoc;
use utoipa::OpenApi;

fn main() -> std::io::Result<()> {
println!("{}", ApiDoc::openapi().to_pretty_json().unwrap());
Ok(())
}
13 changes: 9 additions & 4 deletions server/src/data/models.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,11 @@ use utoipa::ToSchema;
// type alias to use in multiple places
pub type Pool = r2d2::Pool<ConnectionManager<PgConnection>>;

#[derive(Debug, Serialize, Deserialize)]
pub struct AI {
pub a : String,
}

#[derive(Debug, Serialize, Deserialize, Queryable, Insertable, Selectable, Clone, ToSchema)]
#[diesel(table_name = users)]
pub struct User {
Expand Down Expand Up @@ -344,7 +349,7 @@ impl SlimUser {
}
}

#[derive(Debug, Serialize, Deserialize, Clone, ToSchema)]
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct UserDTO {
pub id: uuid::Uuid,
pub email: Option<String>,
Expand Down Expand Up @@ -483,7 +488,7 @@ impl FileGroup {
}
}

#[derive(Debug, Serialize, Deserialize, Clone, ToSchema)]
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct UserDTOWithChunks {
pub id: uuid::Uuid,
pub email: Option<String>,
Expand Down Expand Up @@ -1041,7 +1046,7 @@ impl ClientDatasetConfiguration {
}
}

#[derive(Serialize, Deserialize, Debug, Clone, ToSchema)]
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct DatasetAndOrgWithSubAndPlan {
pub dataset: Dataset,
pub organization: OrganizationWithSubAndPlan,
Expand Down Expand Up @@ -1274,7 +1279,7 @@ impl OrganizationWithSubAndPlan {
}
}

#[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Clone, ToSchema, Ord, PartialOrd)]
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Clone, Ord, PartialOrd)]
pub enum UserRole {
Owner = 2,
Admin = 1,
Expand Down
6 changes: 0 additions & 6 deletions server/src/handlers/auth_handler.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,12 +29,6 @@ use serde_json::json;
use std::future::{ready, Ready};
use utoipa::ToSchema;

#[derive(Debug, Deserialize, ToSchema)]
pub struct AuthData {
pub email: String,
pub password: String,
}

#[derive(Deserialize, Debug)]
pub struct OpCallback {
pub state: String,
Expand Down
2 changes: 1 addition & 1 deletion server/src/handlers/event_handler.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ pub async fn get_events(
Ok(HttpResponse::Ok().json(events))
}

#[derive(Debug, Deserialize, Serialize, Clone, ToSchema)]
#[derive(Debug, Deserialize, Serialize, Clone)]
pub struct EventId {
/// Id of the notification to target.
pub notification_id: uuid::Uuid,
Expand Down
26 changes: 13 additions & 13 deletions server/src/handlers/group_handler.rs
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ pub struct GroupData {
pub total_pages: i64,
}

#[derive(Deserialize, Serialize, ToSchema)]
#[derive(Deserialize, Serialize)]
pub struct DatasetGroupQuery {
pub dataset_id: uuid::Uuid,
pub page: u64,
Expand Down Expand Up @@ -164,7 +164,7 @@ pub async fn get_specific_dataset_chunk_groups(
}))
}

#[derive(Deserialize, Serialize, ToSchema)]
#[derive(Deserialize, Serialize)]
pub struct GetGroupByTrackingIDData {
pub tracking_id: String,
}
Expand Down Expand Up @@ -219,7 +219,7 @@ pub struct UpdateGroupByTrackingIDData {

#[utoipa::path(
put,
path = "/chunk_group/tracking_id",
path = "/chunk_group/tracking_id/{tracking_id}",
context_path = "/api",
tag = "chunk_group",
request_body(content = UpdateGroupByTrackingIDData, description = "JSON request payload to update a chunkGroup", content_type = "application/json"),
Expand All @@ -229,7 +229,7 @@ pub struct UpdateGroupByTrackingIDData {
),
params(
("TR-Dataset" = String, Header, description = "The dataset id to use for the request"),
("tracking_id" = uuid, description = "Tracking id of the chunk_group to update"),
("tracking_id" = uuid::Uuid, description = "Tracking id of the chunk_group to update"),
),
security(
("ApiKey" = ["admin"]),
Expand Down Expand Up @@ -264,7 +264,7 @@ pub async fn update_group_by_tracking_id(
Ok(HttpResponse::NoContent().finish())
}

#[derive(Debug, Deserialize, Serialize, ToSchema)]
#[derive(Debug, Deserialize, Serialize)]
pub struct DeleteGroupByTrackingIDData {
pub delete_chunks: Option<bool>,
}
Expand All @@ -280,7 +280,7 @@ pub struct DeleteGroupByTrackingIDData {
),
params(
("TR-Dataset" = String, Header, description = "The dataset id to use for the request"),
("tracking_id" = uuid, description = "Tracking id of the chunk_group to delete"),
("tracking_id" = uuid::Uuid, description = "Tracking id of the chunk_group to delete"),
),
security(
("ApiKey" = ["admin"]),
Expand Down Expand Up @@ -316,7 +316,7 @@ pub async fn delete_group_by_tracking_id(
Ok(HttpResponse::NoContent().finish())
}

#[derive(Debug, Deserialize, Serialize, ToSchema)]
#[derive(Debug, Deserialize, Serialize)]
pub struct DeleteGroupData {
pub delete_chunks: Option<bool>,
}
Expand All @@ -335,7 +335,7 @@ pub struct DeleteGroupData {
),
params(
("TR-Dataset" = String, Header, description = "The dataset id to use for the request"),
("group_id" = uuid, description = "Id of the chunk_group to delete"),
("group_id" = uuid::Uuid, description = "Id of the chunk_group to delete"),
("delete_chunks" = bool, Query, description = "Delete the chunks within the group"),
),
security(
Expand Down Expand Up @@ -518,7 +518,7 @@ pub struct AddChunkToGroupByTrackingIdData {
),
params(
("TR-Dataset" = String, Header, description = "The dataset id to use for the request"),
("group_id" = uuid, description = "Id of the group to add the chunk to as a bookmark"),
("tracking_id" = uuid, description = "Id of the group to add the chunk to as a bookmark"),
),
security(
("ApiKey" = ["admin"]),
Expand Down Expand Up @@ -569,7 +569,7 @@ pub struct BookmarkData {
pub total_pages: i64,
}

#[derive(Serialize, Deserialize, Debug, ToSchema)]
#[derive(Serialize, Deserialize, Debug)]
pub struct GetAllBookmarksData {
pub group_id: uuid::Uuid,
pub page: Option<u64>,
Expand Down Expand Up @@ -628,7 +628,7 @@ pub async fn get_chunks_in_group(
}))
}

#[derive(Serialize, Deserialize, Debug, ToSchema)]
#[derive(Serialize, Deserialize, Debug)]
pub struct GetAllBookmarksByTrackingIdData {
pub tracking_id: String,
pub page: Option<u64>,
Expand Down Expand Up @@ -726,7 +726,7 @@ pub async fn get_groups_chunk_is_in(
Ok(HttpResponse::Ok().json(groups))
}

#[derive(Deserialize, Serialize, ToSchema)]
#[derive(Deserialize, Serialize)]
pub struct DeleteBookmarkPathData {
pub chunk_id: uuid::Uuid,
}
Expand Down Expand Up @@ -795,7 +795,7 @@ pub async fn group_unique_search(
Ok(group)
}

#[derive(Serialize, Deserialize, Debug, ToSchema)]
#[derive(Serialize, Deserialize, Debug)]
pub struct GenerateOffGroupData {
pub group_id: uuid::Uuid,
pub page: Option<u64>,
Expand Down
Loading
Loading