From 0884bf1e5bde7314a174dd6540dced8f13bfd075 Mon Sep 17 00:00:00 2001 From: shumon84 Date: Mon, 9 Sep 2024 02:31:50 +0900 Subject: [PATCH 01/10] feat(asset): add asset.graphql --- asset/asset.graphql | 152 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 152 insertions(+) create mode 100644 asset/asset.graphql diff --git a/asset/asset.graphql b/asset/asset.graphql new file mode 100644 index 0000000..f333b95 --- /dev/null +++ b/asset/asset.graphql @@ -0,0 +1,152 @@ +type Asset implements Node { + id: ID! + project: Project! + projectId: ID! + createdAt: DateTime! + createdBy: Operator! + createdByType: OperatorType! + createdById: ID! + items: [AssetItem!] + size: FileSize! + previewType: PreviewType + uuid: String! + thread: Thread + threadId: ID! + url: String! + fileName: String! + archiveExtractionStatus: ArchiveExtractionStatus +} +type AssetItem { + itemId: ID! + modelId: ID! +} + +type AssetFile { + name: String! + size: FileSize! + contentType: String + path: String! + filePaths: [String!] +} + +enum PreviewType { + IMAGE + IMAGE_SVG + GEO + GEO_3D_TILES + GEO_MVT + MODEL_3D + CSV + UNKNOWN +} + +enum ArchiveExtractionStatus { + SKIPPED + PENDING + IN_PROGRESS + DONE + FAILED +} + +input CreateAssetInput { + projectId: ID! + file: Upload + url: String + token: String + skipDecompression: Boolean +} + +# If `cursor` is specified, both `filename` and `contentLength` will be ignored. +input CreateAssetUploadInput { + projectId: ID! + + # The name of the file to upload. + filename: String + # The size of the file to upload. + contentLength: Int + + # Required if uploading in multiple parts. + cursor: String +} + +input UpdateAssetInput { + id: ID! + previewType: PreviewType +} + +input DeleteAssetInput { + assetId: ID! +} + +input DecompressAssetInput { + assetId: ID! +} + +type CreateAssetPayload { + asset: Asset! +} + +type UpdateAssetPayload { + asset: Asset! +} + +type DeleteAssetPayload { + assetId: ID! +} + +type DecompressAssetPayload { + asset: Asset! +} + +type CreateAssetUploadPayload { + # A token identifying the sequence of uploads. + # If an empty string is returned, it means that issuing URLs is not supported, and the `file` in CreateAsset must be used. + # If splitting the upload is necessary, it is guaranteed that the same value will be returned. + token: String! + # The URL to which the PUT request should be made. + # An empty string return means that the upload process has been completed. + url: String! + # The MIME type for the PUT request. + # If unspecified or an empty string, the Content-Type should not be sent. + contentType: String + # The size of the upload. + contentLength: Int! + # A cursor to obtain the URL for the next PUT request. + next: String +} + +type AssetConnection { + edges: [AssetEdge!]! + nodes: [Asset]! + pageInfo: PageInfo! + totalCount: Int! +} + +type AssetEdge { + cursor: Cursor! + node: Asset +} + +enum AssetSortType { + DATE + SIZE + NAME +} + +input AssetSort { + sortBy: AssetSortType! + direction: SortDirection +} + +extend type Query { + assetFile(assetId: ID!): AssetFile! + assets(projectId: ID!, keyword: String, sort: AssetSort, pagination: Pagination): AssetConnection! +} + +extend type Mutation { + createAsset(input: CreateAssetInput!): CreateAssetPayload + updateAsset(input: UpdateAssetInput!): UpdateAssetPayload + deleteAsset(input: DeleteAssetInput!): DeleteAssetPayload + decompressAsset(input: DecompressAssetInput!): DecompressAssetPayload + createAssetUpload(input: CreateAssetUploadInput!): CreateAssetUploadPayload +} From 619b70076d6b9359f799fade54f61ecd5ef47f87 Mon Sep 17 00:00:00 2001 From: shumon84 Date: Mon, 9 Sep 2024 02:32:23 +0900 Subject: [PATCH 02/10] feat(asset): add assetdomain package --- asset/assetdomain/asset/asset.go | 112 ++ asset/assetdomain/asset/asset_test.go | 151 ++ asset/assetdomain/asset/builder.go | 121 ++ asset/assetdomain/asset/builder_test.go | 322 ++++ asset/assetdomain/asset/common.go | 14 + asset/assetdomain/asset/file.go | 165 +++ asset/assetdomain/asset/file_builder.go | 74 + asset/assetdomain/asset/file_builder_test.go | 61 + asset/assetdomain/asset/file_test.go | 198 +++ asset/assetdomain/asset/id.go | 37 + asset/assetdomain/asset/list.go | 29 + asset/assetdomain/asset/list_test.go | 53 + asset/assetdomain/asset/map.go | 20 + asset/assetdomain/asset/map_test.go | 28 + asset/assetdomain/asset/preview_type.go | 128 ++ asset/assetdomain/asset/preview_type_test.go | 318 ++++ asset/assetdomain/asset/status.go | 57 + asset/assetdomain/asset/status_test.go | 96 ++ asset/assetdomain/asset/upload.go | 37 + asset/assetdomain/asset/upload_builder.go | 44 + asset/assetdomain/event/builder.go | 62 + asset/assetdomain/event/builder_test.go | 43 + asset/assetdomain/event/event.go | 82 + asset/assetdomain/event/event_test.go | 31 + asset/assetdomain/event/id.go | 17 + asset/assetdomain/file/file.go | 86 ++ asset/assetdomain/file/file_test.go | 52 + asset/assetdomain/file/testdata/test.txt | 1 + asset/assetdomain/id.go | 320 ++++ asset/assetdomain/integration/builder.go | 91 ++ asset/assetdomain/integration/builder_test.go | 519 +++++++ asset/assetdomain/integration/id.go | 19 + asset/assetdomain/integration/integration.go | 175 +++ .../integration/integration_test.go | 1316 +++++++++++++++++ .../integration/integration_type.go | 22 + .../integration/integration_type_test.go | 39 + asset/assetdomain/integration/list.go | 30 + asset/assetdomain/integration/list_test.go | 197 +++ asset/assetdomain/integration/webhook.go | 109 ++ .../integration/webhook_builder.go | 74 + .../integration/webhook_builder_test.go | 392 +++++ asset/assetdomain/integration/webhook_test.go | 501 +++++++ asset/assetdomain/operator/id.go | 14 + asset/assetdomain/operator/operator.go | 43 + asset/assetdomain/operator/operator_test.go | 37 + asset/assetdomain/project/builder.go | 96 ++ asset/assetdomain/project/builder_test.go | 296 ++++ asset/assetdomain/project/id.go | 40 + asset/assetdomain/project/id_test.go | 16 + asset/assetdomain/project/list.go | 20 + asset/assetdomain/project/list_test.go | 36 + asset/assetdomain/project/project.go | 141 ++ asset/assetdomain/project/project_test.go | 160 ++ asset/assetdomain/project/publication.go | 55 + asset/assetdomain/project/publication_test.go | 94 ++ asset/assetdomain/task/task.go | 45 + asset/assetdomain/thread/builder.go | 54 + asset/assetdomain/thread/builder_test.go | 126 ++ asset/assetdomain/thread/comment.go | 53 + asset/assetdomain/thread/comment_test.go | 44 + asset/assetdomain/thread/common.go | 12 + asset/assetdomain/thread/id.go | 34 + asset/assetdomain/thread/list.go | 20 + asset/assetdomain/thread/list_test.go | 37 + asset/assetdomain/thread/thread.go | 94 ++ asset/assetdomain/thread/thread_test.go | 137 ++ 66 files changed, 7947 insertions(+) create mode 100644 asset/assetdomain/asset/asset.go create mode 100644 asset/assetdomain/asset/asset_test.go create mode 100644 asset/assetdomain/asset/builder.go create mode 100644 asset/assetdomain/asset/builder_test.go create mode 100644 asset/assetdomain/asset/common.go create mode 100644 asset/assetdomain/asset/file.go create mode 100644 asset/assetdomain/asset/file_builder.go create mode 100644 asset/assetdomain/asset/file_builder_test.go create mode 100644 asset/assetdomain/asset/file_test.go create mode 100644 asset/assetdomain/asset/id.go create mode 100644 asset/assetdomain/asset/list.go create mode 100644 asset/assetdomain/asset/list_test.go create mode 100644 asset/assetdomain/asset/map.go create mode 100644 asset/assetdomain/asset/map_test.go create mode 100644 asset/assetdomain/asset/preview_type.go create mode 100644 asset/assetdomain/asset/preview_type_test.go create mode 100644 asset/assetdomain/asset/status.go create mode 100644 asset/assetdomain/asset/status_test.go create mode 100644 asset/assetdomain/asset/upload.go create mode 100644 asset/assetdomain/asset/upload_builder.go create mode 100644 asset/assetdomain/event/builder.go create mode 100644 asset/assetdomain/event/builder_test.go create mode 100644 asset/assetdomain/event/event.go create mode 100644 asset/assetdomain/event/event_test.go create mode 100644 asset/assetdomain/event/id.go create mode 100644 asset/assetdomain/file/file.go create mode 100644 asset/assetdomain/file/file_test.go create mode 100644 asset/assetdomain/file/testdata/test.txt create mode 100644 asset/assetdomain/id.go create mode 100644 asset/assetdomain/integration/builder.go create mode 100644 asset/assetdomain/integration/builder_test.go create mode 100644 asset/assetdomain/integration/id.go create mode 100644 asset/assetdomain/integration/integration.go create mode 100644 asset/assetdomain/integration/integration_test.go create mode 100644 asset/assetdomain/integration/integration_type.go create mode 100644 asset/assetdomain/integration/integration_type_test.go create mode 100644 asset/assetdomain/integration/list.go create mode 100644 asset/assetdomain/integration/list_test.go create mode 100644 asset/assetdomain/integration/webhook.go create mode 100644 asset/assetdomain/integration/webhook_builder.go create mode 100644 asset/assetdomain/integration/webhook_builder_test.go create mode 100644 asset/assetdomain/integration/webhook_test.go create mode 100644 asset/assetdomain/operator/id.go create mode 100644 asset/assetdomain/operator/operator.go create mode 100644 asset/assetdomain/operator/operator_test.go create mode 100644 asset/assetdomain/project/builder.go create mode 100644 asset/assetdomain/project/builder_test.go create mode 100644 asset/assetdomain/project/id.go create mode 100644 asset/assetdomain/project/id_test.go create mode 100644 asset/assetdomain/project/list.go create mode 100644 asset/assetdomain/project/list_test.go create mode 100644 asset/assetdomain/project/project.go create mode 100644 asset/assetdomain/project/project_test.go create mode 100644 asset/assetdomain/project/publication.go create mode 100644 asset/assetdomain/project/publication_test.go create mode 100644 asset/assetdomain/task/task.go create mode 100644 asset/assetdomain/thread/builder.go create mode 100644 asset/assetdomain/thread/builder_test.go create mode 100644 asset/assetdomain/thread/comment.go create mode 100644 asset/assetdomain/thread/comment_test.go create mode 100644 asset/assetdomain/thread/common.go create mode 100644 asset/assetdomain/thread/id.go create mode 100644 asset/assetdomain/thread/list.go create mode 100644 asset/assetdomain/thread/list_test.go create mode 100644 asset/assetdomain/thread/thread.go create mode 100644 asset/assetdomain/thread/thread_test.go diff --git a/asset/assetdomain/asset/asset.go b/asset/assetdomain/asset/asset.go new file mode 100644 index 0000000..d2e4648 --- /dev/null +++ b/asset/assetdomain/asset/asset.go @@ -0,0 +1,112 @@ +package asset + +import ( + "time" + + "github.com/reearth/reearthx/account/accountdomain" + "github.com/reearth/reearthx/util" +) + +type Asset struct { + id ID + project ProjectID + createdAt time.Time + user *accountdomain.UserID + integration *IntegrationID + fileName string + size uint64 + previewType *PreviewType + uuid string + thread ThreadID + archiveExtractionStatus *ArchiveExtractionStatus + flatFiles bool +} + +type URLResolver = func(*Asset) string + +func (a *Asset) ID() ID { + return a.id +} + +func (a *Asset) Project() ProjectID { + return a.project +} + +func (a *Asset) CreatedAt() time.Time { + if a == nil { + return time.Time{} + } + + return a.createdAt +} + +func (a *Asset) User() *accountdomain.UserID { + return a.user +} + +func (a *Asset) Integration() *IntegrationID { + return a.integration +} + +func (a *Asset) FileName() string { + return a.fileName +} + +func (a *Asset) Size() uint64 { + return a.size +} + +func (a *Asset) PreviewType() *PreviewType { + if a.previewType == nil { + return nil + } + return a.previewType +} + +func (a *Asset) UUID() string { + return a.uuid +} + +func (a *Asset) ArchiveExtractionStatus() *ArchiveExtractionStatus { + if a.archiveExtractionStatus == nil { + return nil + } + return a.archiveExtractionStatus +} + +func (a *Asset) UpdatePreviewType(p *PreviewType) { + a.previewType = util.CloneRef(p) +} + +func (a *Asset) UpdateArchiveExtractionStatus(s *ArchiveExtractionStatus) { + a.archiveExtractionStatus = util.CloneRef(s) +} + +func (a *Asset) Clone() *Asset { + if a == nil { + return nil + } + + return &Asset{ + id: a.id.Clone(), + project: a.project.Clone(), + createdAt: a.createdAt, + user: a.user.CloneRef(), + integration: a.integration.CloneRef(), + fileName: a.fileName, + size: a.size, + previewType: a.previewType, + uuid: a.uuid, + thread: a.thread.Clone(), + archiveExtractionStatus: a.archiveExtractionStatus, + flatFiles: a.flatFiles, + } +} + +func (a *Asset) Thread() ThreadID { + return a.thread +} + +func (a *Asset) FlatFiles() bool { + return a.flatFiles +} diff --git a/asset/assetdomain/asset/asset_test.go b/asset/assetdomain/asset/asset_test.go new file mode 100644 index 0000000..2897a71 --- /dev/null +++ b/asset/assetdomain/asset/asset_test.go @@ -0,0 +1,151 @@ +package asset + +import ( + "testing" + "time" + + "github.com/reearth/reearthx/account/accountdomain" + "github.com/samber/lo" + "github.com/stretchr/testify/assert" +) + +func TestAsset_Type(t *testing.T) { + aid := NewID() + pid := NewProjectID() + uid := accountdomain.NewUserID() + iid := NewIntegrationID() + thid := NewThreadID() + tim, _ := time.Parse(time.RFC3339, "2021-03-16T04:19:57.592Z") + var size uint64 = 15 + wantPreviewType, _ := PreviewTypeFrom("image") + gotPreviewType, _ := PreviewTypeFrom(PreviewTypeImage.String()) + wantStatus, _ := ArchiveExtractionStatusFrom("pending") + gotStatus, _ := ArchiveExtractionStatusFrom(ArchiveExtractionStatusPending.String()) + + got := Asset{ + id: aid, + project: pid, + createdAt: tim, + user: &uid, + integration: &iid, + fileName: "hoge", + size: size, + previewType: &gotPreviewType, + uuid: "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", + thread: thid, + archiveExtractionStatus: &gotStatus, + } + + assert.Equal(t, aid, got.ID()) + assert.Equal(t, pid, got.Project()) + assert.Equal(t, tim, got.CreatedAt()) + assert.Equal(t, &uid, got.User()) + assert.Equal(t, &iid, got.Integration()) + assert.Equal(t, "hoge", got.FileName()) + assert.Equal(t, size, got.Size()) + assert.Equal(t, &wantPreviewType, got.PreviewType()) + assert.Equal(t, "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", got.UUID()) + assert.Equal(t, thid, got.Thread()) + assert.Equal(t, &wantStatus, got.ArchiveExtractionStatus()) +} + +func TestAsset_CreatedAt(t *testing.T) { + // if asset is nil Asset.CreatedAt() should be time.Time{} + var got *Asset = nil + assert.Equal(t, time.Time{}, got.CreatedAt()) +} + +func TestAsset_PreviewType(t *testing.T) { + aid := NewID() + pid := NewProjectID() + uid := accountdomain.NewUserID() + tim, _ := time.Parse(time.RFC3339, "2021-03-16T04:19:57.592Z") + var size uint64 = 15 + + got := Asset{ + id: aid, + project: pid, + createdAt: tim, + user: &uid, + fileName: "hoge", + size: size, + uuid: "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", + } + + assert.Nil(t, got.PreviewType()) +} + +func TestAsset_Status(t *testing.T) { + aid := NewID() + pid := NewProjectID() + uid := accountdomain.NewUserID() + tim, _ := time.Parse(time.RFC3339, "2021-03-16T04:19:57.592Z") + var size uint64 = 15 + + got := Asset{ + id: aid, + project: pid, + createdAt: tim, + user: &uid, + fileName: "hoge", + size: size, + uuid: "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", + } + + assert.Nil(t, got.ArchiveExtractionStatus()) +} + +func TestAsset_UpdatePreviewType(t *testing.T) { + aid := NewID() + pid := NewProjectID() + uid := accountdomain.NewUserID() + tim, _ := time.Parse(time.RFC3339, "2021-03-16T04:19:57.592Z") + var size uint64 = 15 + + got := Asset{ + id: aid, + project: pid, + createdAt: tim, + user: &uid, + fileName: "hoge", + size: size, + uuid: "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", + } + + pt := lo.ToPtr(PreviewTypeImage) + got.UpdatePreviewType(pt) + assert.Equal(t, pt, got.PreviewType()) +} + +func TestAsset_UpdateStatus(t *testing.T) { + aid := NewID() + pid := NewProjectID() + uid := accountdomain.NewUserID() + tim, _ := time.Parse(time.RFC3339, "2021-03-16T04:19:57.592Z") + var size uint64 = 15 + + got := Asset{ + id: aid, + project: pid, + createdAt: tim, + user: &uid, + fileName: "hoge", + size: size, + uuid: "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", + } + + p := lo.ToPtr(ArchiveExtractionStatusPending) + got.UpdateArchiveExtractionStatus(p) + assert.Equal(t, p, got.ArchiveExtractionStatus()) +} + +func TestAsset_Clone(t *testing.T) { + pid := NewProjectID() + uid := accountdomain.NewUserID() + a := New().NewID().Project(pid).CreatedByUser(uid).Size(1000).Thread(NewThreadID()).NewUUID().MustBuild() + + got := a.Clone() + assert.Equal(t, a, got) + assert.NotSame(t, a, got) + assert.Nil(t, (*Asset)(nil).Clone()) +} diff --git a/asset/assetdomain/asset/builder.go b/asset/assetdomain/asset/builder.go new file mode 100644 index 0000000..8fc074b --- /dev/null +++ b/asset/assetdomain/asset/builder.go @@ -0,0 +1,121 @@ +package asset + +import ( + "time" + + "github.com/google/uuid" + "github.com/reearth/reearthx/account/accountdomain" +) + +type Builder struct { + a *Asset +} + +func New() *Builder { + return &Builder{a: &Asset{}} +} + +func (b *Builder) Build() (*Asset, error) { + if b.a.id.IsNil() { + return nil, ErrInvalidID + } + if b.a.project.IsNil() { + return nil, ErrNoProjectID + } + if b.a.user.IsNil() && b.a.integration.IsNil() { + return nil, ErrNoUser + } + if b.a.thread.IsNil() { + return nil, ErrNoThread + } + if b.a.size == 0 { + return nil, ErrZeroSize + } + if b.a.uuid == "" { + return nil, ErrNoUUID + } + if b.a.createdAt.IsZero() { + b.a.createdAt = b.a.id.Timestamp() + } + return b.a, nil +} + +func (b *Builder) MustBuild() *Asset { + r, err := b.Build() + if err != nil { + panic(err) + } + return r +} + +func (b *Builder) ID(id ID) *Builder { + b.a.id = id + return b +} + +func (b *Builder) NewID() *Builder { + b.a.id = NewID() + return b +} + +func (b *Builder) Project(pid ProjectID) *Builder { + b.a.project = pid + return b +} + +func (b *Builder) CreatedAt(createdAt time.Time) *Builder { + b.a.createdAt = createdAt + return b +} + +func (b *Builder) CreatedByUser(createdBy accountdomain.UserID) *Builder { + b.a.user = &createdBy + b.a.integration = nil + return b +} + +func (b *Builder) CreatedByIntegration(createdBy IntegrationID) *Builder { + b.a.integration = &createdBy + b.a.user = nil + return b +} + +func (b *Builder) FileName(name string) *Builder { + b.a.fileName = name + return b +} + +func (b *Builder) Size(size uint64) *Builder { + b.a.size = size + return b +} + +func (b *Builder) Type(t *PreviewType) *Builder { + b.a.previewType = t + return b +} + +func (b *Builder) UUID(uuid string) *Builder { + b.a.uuid = uuid + return b +} + +func (b *Builder) NewUUID() *Builder { + b.a.uuid = uuid.NewString() + return b +} + +func (b *Builder) Thread(th ThreadID) *Builder { + b.a.thread = th + return b +} + +func (b *Builder) ArchiveExtractionStatus(s *ArchiveExtractionStatus) *Builder { + b.a.archiveExtractionStatus = s + return b +} + +func (b *Builder) FlatFiles(flatFiles bool) *Builder { + b.a.flatFiles = flatFiles + return b +} diff --git a/asset/assetdomain/asset/builder_test.go b/asset/assetdomain/asset/builder_test.go new file mode 100644 index 0000000..e233921 --- /dev/null +++ b/asset/assetdomain/asset/builder_test.go @@ -0,0 +1,322 @@ +package asset + +import ( + "testing" + "time" + + "github.com/reearth/reearthx/account/accountdomain" + "github.com/samber/lo" + "github.com/stretchr/testify/assert" +) + +type Tests []struct { + name string + input Input + want *Asset + err error +} + +type Input struct { + id ID + project ProjectID + createdAt time.Time + createdByUser accountdomain.UserID + createdByIntegration IntegrationID + fileName string + size uint64 + previewType *PreviewType + uuid string + thread ThreadID + archiveExtractionStatus *ArchiveExtractionStatus +} + +func TestBuilder_Build(t *testing.T) { + var aid = NewID() + pid := NewProjectID() + uid := accountdomain.NewUserID() + iid := NewIntegrationID() + thid := NewThreadID() + tim, _ := time.Parse(time.RFC3339, "2021-03-16T04:19:57.592Z") + var size uint64 = 15 + + tests := Tests{ + { + name: "should create an asset", + input: Input{ + id: aid, + project: pid, + createdAt: tim, + createdByUser: uid, + fileName: "hoge", + size: size, + previewType: lo.ToPtr(PreviewTypeImage), + uuid: "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", + thread: thid, + archiveExtractionStatus: lo.ToPtr(ArchiveExtractionStatusPending), + }, + want: &Asset{ + id: aid, + project: pid, + createdAt: tim, + user: &uid, + fileName: "hoge", + size: size, + previewType: PreviewTypeFromRef(lo.ToPtr("image")), + uuid: "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", + thread: thid, + archiveExtractionStatus: lo.ToPtr(ArchiveExtractionStatusPending), + }, + }, + { + name: "fail: empty project id", + input: Input{ + id: aid, + createdByUser: uid, + fileName: "hoge", + size: size, + previewType: lo.ToPtr(PreviewTypeImage), + uuid: "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", + thread: thid, + archiveExtractionStatus: lo.ToPtr(ArchiveExtractionStatusPending), + }, + err: ErrNoProjectID, + }, + { + name: "fail: empty id", + input: Input{ + project: pid, + createdByUser: uid, + fileName: "hoge", + size: size, + previewType: lo.ToPtr(PreviewTypeImage), + uuid: "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", + thread: thid, + archiveExtractionStatus: lo.ToPtr(ArchiveExtractionStatusPending), + }, + err: ErrInvalidID, + }, + { + name: "fail: empty user", + input: Input{ + id: aid, + project: pid, + fileName: "hoge", + size: size, + previewType: lo.ToPtr(PreviewTypeImage), + uuid: "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", + thread: thid, + archiveExtractionStatus: lo.ToPtr(ArchiveExtractionStatusPending), + }, + err: ErrNoUser, + }, + { + name: "fail: zero size", + input: Input{ + id: aid, + project: pid, + createdByUser: uid, + fileName: "hoge", + size: 0, + previewType: lo.ToPtr(PreviewTypeImage), + uuid: "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", + thread: thid, + archiveExtractionStatus: lo.ToPtr(ArchiveExtractionStatusPending), + }, + err: ErrZeroSize, + }, + { + name: "fail: invalid threadId", + input: Input{ + id: aid, + project: pid, + createdByUser: uid, + fileName: "hoge", + size: size, + previewType: lo.ToPtr(PreviewTypeImage), + uuid: "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", + thread: ThreadID{}, + archiveExtractionStatus: lo.ToPtr(ArchiveExtractionStatusPending), + }, + err: ErrNoThread, + }, + { + name: "fail: no uuid", + input: Input{ + id: aid, + project: pid, + createdByUser: uid, + fileName: "hoge", + size: size, + previewType: PreviewTypeFromRef(lo.ToPtr(PreviewTypeImage.String())), + thread: thid, + }, + err: ErrNoUUID, + }, + { + name: "should create asset with id timestamp", + input: Input{ + id: aid, + project: pid, + createdByUser: uid, + fileName: "hoge", + size: size, + previewType: lo.ToPtr(PreviewTypeImage), + uuid: "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", + thread: thid, + archiveExtractionStatus: lo.ToPtr(ArchiveExtractionStatusPending), + }, + want: &Asset{ + id: aid, + project: pid, + createdAt: aid.Timestamp(), + user: &uid, + fileName: "hoge", + size: size, + previewType: PreviewTypeFromRef(lo.ToPtr("image")), + uuid: "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", + thread: thid, + archiveExtractionStatus: lo.ToPtr(ArchiveExtractionStatusPending), + }, + }, + { + name: "should create asset with id timestamp", + input: Input{ + id: aid, + project: pid, + createdByIntegration: iid, + fileName: "hoge", + size: size, + previewType: lo.ToPtr(PreviewTypeImage), + uuid: "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", + thread: thid, + archiveExtractionStatus: lo.ToPtr(ArchiveExtractionStatusPending), + }, + want: &Asset{ + id: aid, + project: pid, + createdAt: aid.Timestamp(), + integration: &iid, + fileName: "hoge", + size: size, + previewType: PreviewTypeFromRef(lo.ToPtr("image")), + uuid: "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", + thread: thid, + archiveExtractionStatus: lo.ToPtr(ArchiveExtractionStatusPending), + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ab := New(). + ID(tt.input.id). + Project(tt.input.project). + CreatedAt(tt.input.createdAt). + FileName(tt.input.fileName). + Size(tt.input.size). + Type(tt.input.previewType). + UUID(tt.input.uuid). + Thread(tt.input.thread). + ArchiveExtractionStatus(tt.input.archiveExtractionStatus) + if !tt.input.createdByUser.IsNil() { + ab.CreatedByUser(tt.input.createdByUser) + } + if !tt.input.createdByIntegration.IsNil() { + ab.CreatedByIntegration(tt.input.createdByIntegration) + } + + got, err := ab.Build() + if tt.err != nil { + assert.Equal(t, tt.err, err) + } else { + assert.Equal(t, tt.want, got) + } + }) + } +} + +func TestBuilder_MustBuild(t *testing.T) { + var aid = NewID() + pid := NewProjectID() + uid := accountdomain.NewUserID() + thid := NewThreadID() + tim, _ := time.Parse(time.RFC3339, "2021-03-16T04:19:57.592Z") + var size uint64 = 15 + + tests := Tests{ + { + name: "Valid asset", + input: Input{ + id: aid, + project: pid, + createdAt: tim, + createdByUser: uid, + fileName: "hoge", + size: size, + previewType: PreviewTypeFromRef(lo.ToPtr("image")), + uuid: "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", + thread: thid, + }, + want: &Asset{ + id: aid, + project: pid, + createdAt: tim, + user: &uid, + fileName: "hoge", + size: size, + previewType: PreviewTypeFromRef(lo.ToPtr("image")), + uuid: "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", + thread: thid, + }, + }, + { + name: "fail: Invalid Id", + input: Input{ + id: ID{}, + project: pid, + createdAt: tim, + createdByUser: uid, + fileName: "hoge", + size: size, + previewType: PreviewTypeFromRef(lo.ToPtr("image")), + uuid: "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", + thread: thid, + }, + err: ErrInvalidID, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + build := func() *Asset { + t.Helper() + return New(). + ID(tt.input.id). + Project(tt.input.project). + CreatedAt(tt.input.createdAt). + CreatedByUser(tt.input.createdByUser). + FileName(tt.input.fileName). + Type(tt.input.previewType). + Size(tt.input.size). + UUID(tt.input.uuid). + Thread(tt.input.thread). + MustBuild() + } + if tt.err != nil { + assert.PanicsWithValue(t, tt.err, func() { _ = build() }) + } else { + assert.Equal(t, tt.want, build()) + } + }) + } +} + +func TestBuilder_NewID(t *testing.T) { + pid := NewProjectID() + uid := accountdomain.NewUserID() + var size uint64 = 15 + a := New().NewID().Project(pid).CreatedByUser(uid).Size(size).Thread(NewThreadID()).NewUUID().MustBuild() + assert.False(t, a.id.IsNil()) +} diff --git a/asset/assetdomain/asset/common.go b/asset/assetdomain/asset/common.go new file mode 100644 index 0000000..d959484 --- /dev/null +++ b/asset/assetdomain/asset/common.go @@ -0,0 +1,14 @@ +package asset + +import ( + "github.com/reearth/reearthx/i18n" + "github.com/reearth/reearthx/rerror" +) + +var ( + ErrNoProjectID = rerror.NewE(i18n.T("projectID is required")) + ErrZeroSize = rerror.NewE(i18n.T("file size cannot be zero")) + ErrNoUser = rerror.NewE(i18n.T("createdBy is required")) + ErrNoThread = rerror.NewE(i18n.T("thread is required")) + ErrNoUUID = rerror.NewE(i18n.T("uuid is required")) +) diff --git a/asset/assetdomain/asset/file.go b/asset/assetdomain/asset/file.go new file mode 100644 index 0000000..5a800dc --- /dev/null +++ b/asset/assetdomain/asset/file.go @@ -0,0 +1,165 @@ +package asset + +import ( + "path" + "strings" + + "github.com/samber/lo" + "golang.org/x/exp/slices" +) + +type File struct { + name string + size uint64 + contentType string + path string + children []*File + files []*File +} + +func (f *File) Name() string { + if f == nil { + return "" + } + return f.name +} + +func (f *File) SetName(n string) { + f.name = n +} + +func (f *File) Size() uint64 { + if f == nil { + return 0 + } + return f.size +} + +func (f *File) ContentType() string { + if f == nil { + return "" + } + return f.contentType +} + +func (f *File) Path() string { + if f == nil { + return "" + } + return f.path +} + +func (f *File) Children() []*File { + if f == nil { + return nil + } + return slices.Clone(f.children) +} + +func (f *File) Files() []*File { + return slices.Clone(f.files) +} + +func (f *File) SetFiles(s []*File) { + f.files = lo.Filter(s, func(af *File, _ int) bool { + return af.Path() != f.Path() + }) +} + +func (f *File) FilePaths() []string { + return lo.Map(f.files, func(f *File, _ int) string { return f.path }) +} + +func (f *File) IsDir() bool { + return f != nil && f.children != nil +} + +func (f *File) AppendChild(c *File) { + if f == nil { + return + } + f.children = append(f.children, c) +} + +func (f *File) Clone() *File { + if f == nil { + return nil + } + + var children []*File + if f.children != nil { + children = lo.Map(f.children, func(f *File, _ int) *File { return f.Clone() }) + } + + return &File{ + name: f.name, + size: f.size, + contentType: f.contentType, + path: f.path, + children: children, + } +} + +// FlattenChildren recursively collects all children of the File object into a flat slice. +// It returns a slice of File objects containing all children in a flattened structure. +func (f *File) FlattenChildren() (res []*File) { + if f == nil { + return nil + } + if len(f.children) > 0 { + for _, c := range f.children { + res = append(res, c.FlattenChildren()...) + } + } else { + res = append(res, f) + } + return +} + +func (f *File) RootPath(uuid string) string { + if f == nil { + return "" + } + return path.Join(uuid[:2], uuid[2:], f.path) +} + +// FoldFiles organizes files into directories and returns the files as children of the parent directory. +// The parent directory refers to a zip file located in the root directory and is treated as the root directory. +func FoldFiles(files []*File, parent *File) *File { + files = slices.Clone(files) + slices.SortFunc(files, func(a, b *File) int { + return strings.Compare(a.Path(), b.Path()) + }) + + folded := *parent + folded.children = nil + + const rootDir = "/" + dirs := map[string]*File{ + rootDir: &folded, + } + for i := range files { + parentDir := rootDir + names := strings.TrimPrefix(files[i].Path(), "/") + for { + name, rest, found := strings.Cut(names, "/") + if !found { + break + } + names = rest + + dir := path.Join(parentDir, name) + if _, ok := dirs[dir]; !ok { + d := &File{ + name: name, + path: dir, + } + dirs[parentDir].AppendChild(d) + dirs[dir] = d + } + parentDir = dir + } + dirs[parentDir].AppendChild(files[i]) + } + return &folded +} diff --git a/asset/assetdomain/asset/file_builder.go b/asset/assetdomain/asset/file_builder.go new file mode 100644 index 0000000..96c4ff2 --- /dev/null +++ b/asset/assetdomain/asset/file_builder.go @@ -0,0 +1,74 @@ +package asset + +import ( + "mime" + "path" + "strings" + + "golang.org/x/exp/slices" +) + +type FileBuilder struct { + f *File + detectContentType bool +} + +func NewFile() *FileBuilder { + return &FileBuilder{ + f: &File{}, + } +} + +func (b *FileBuilder) Name(name string) *FileBuilder { + b.f.name = name + return b +} + +func (b *FileBuilder) ContentType(contentType string) *FileBuilder { + b.f.contentType = contentType + return b +} + +func (b *FileBuilder) Path(filePath string) *FileBuilder { + if !strings.HasPrefix(filePath, "/") && filePath != "" { + filePath = "/" + filePath + } + + b.f.path = filePath + return b +} + +func (b *FileBuilder) Size(size uint64) *FileBuilder { + b.f.size = size + return b +} + +func (b *FileBuilder) Children(children []*File) *FileBuilder { + b.f.children = slices.Clone(children) + return b +} + +func (b *FileBuilder) Files(files []*File) *FileBuilder { + b.f.files = slices.Clone(files) + return b +} + +func (b *FileBuilder) GuessContentType() *FileBuilder { + b.detectContentType = true + return b +} + +func (b *FileBuilder) Dir() *FileBuilder { + if b.f.children == nil { + b.f.children = []*File{} + } + return b +} + +func (b *FileBuilder) Build() *File { + if b.detectContentType { + b.f.contentType = mime.TypeByExtension(path.Ext(b.f.path)) + } + + return b.f +} diff --git a/asset/assetdomain/asset/file_builder_test.go b/asset/assetdomain/asset/file_builder_test.go new file mode 100644 index 0000000..081aa66 --- /dev/null +++ b/asset/assetdomain/asset/file_builder_test.go @@ -0,0 +1,61 @@ +package asset + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestFileBuilder_Name(t *testing.T) { + name := "aaa" + f := NewFile().Name(name).Build() + assert.Equal(t, name, f.Name()) +} + +func TestFileBuilder_ContentType(t *testing.T) { + contentType := "image/jpg" + f := NewFile().ContentType(contentType).Build() + assert.Equal(t, contentType, f.ContentType()) +} + +func TestFileBuilder_Path(t *testing.T) { + path1 := "/hoge" + path2 := "fuga" + f1 := NewFile().Path(path1).Build() + assert.Equal(t, path1, f1.Path()) + + f2 := NewFile().Path(path2).Build() + assert.Equal(t, "/"+path2, f2.Path()) +} + +func TestFileBuilder_GuessContentType(t *testing.T) { + f := NewFile().GuessContentType() + assert.Equal(t, true, f.detectContentType) +} + +func TestFileBuilder_Dir(t *testing.T) { + f := NewFile().Dir().Build() + assert.NotNil(t, f.children) +} + +func TestFileBuilder_Build(t *testing.T) { + c := []*File{NewFile().Build()} + fl := []*File{NewFile().Build()} + // ContentType should be filled automatically + f := NewFile().Name("aaa").Path("/aaa.jpg").Size(1000).GuessContentType().Files(fl).Children(c).Build() + assert.Equal(t, "aaa", f.Name()) + assert.Equal(t, "/aaa.jpg", f.Path()) + assert.Equal(t, uint64(1000), f.Size()) + assert.Equal(t, "image/jpeg", f.ContentType()) + assert.Equal(t, c, f.Children()) + assert.Equal(t, fl, f.FlattenChildren()) + + // ContentType should be blank + f2 := NewFile().Name("aaa").Path("/aaa.jpg").Size(1000).Files(fl).Children(c).Build() + assert.Equal(t, "aaa", f2.Name()) + assert.Equal(t, "/aaa.jpg", f2.Path()) + assert.Equal(t, uint64(1000), f2.Size()) + assert.Zero(t, f2.ContentType()) + assert.Equal(t, c, f2.Children()) + assert.Equal(t, fl, f2.FlattenChildren()) +} diff --git a/asset/assetdomain/asset/file_test.go b/asset/assetdomain/asset/file_test.go new file mode 100644 index 0000000..414f3f7 --- /dev/null +++ b/asset/assetdomain/asset/file_test.go @@ -0,0 +1,198 @@ +package asset + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestFile_FileType(t *testing.T) { + c := NewFile().Build() + fl := []*File{NewFile().Build()} + f := NewFile().Name("aaa.txt").Path("/aaa.txt").Size(10).GuessContentType().Files(fl).Children([]*File{c}).Build() + + assert.Equal(t, "aaa.txt", f.Name()) + assert.Equal(t, uint64(10), f.Size()) + assert.Equal(t, "text/plain; charset=utf-8", f.ContentType()) + assert.Equal(t, "/aaa.txt", f.Path()) + assert.Equal(t, []*File{c}, f.Children()) + assert.Equal(t, fl, f.Files()) + + f.SetName("bbb") + assert.Equal(t, "bbb", f.Name()) + + c2 := NewFile().Build() + f.AppendChild(c2) + assert.Equal(t, []*File{c, c2}, f.Children()) + + dir := NewFile().Name("dir").Path("/aaa").Children([]*File{c}).Build() + assert.True(t, dir.IsDir()) +} + +func TestFile_Children(t *testing.T) { + // nil file should return nil children + var got *File = nil + assert.Nil(t, got.Children()) + + // file.Children() should return file.children + c := []*File{} + got = &File{ + children: c, + } + assert.Equal(t, c, got.Children()) +} + +func TestFile_Files(t *testing.T) { + f := &File{ + path: "aaa", + children: []*File{ + { + path: "aaa/a", + children: []*File{ + { + path: "aaa/a/a.txt", + }, + }, + }, + { + path: "aaa/b.txt", + }, + }, + } + + assert.Equal(t, []*File{ + { + path: "aaa/a/a.txt", + }, + { + path: "aaa/b.txt", + }, + }, f.FlattenChildren()) +} + +func TestFile_SetFiles(t *testing.T) { + root := NewFile().Build() + files := []*File{NewFile().Path("aaa/a/a.txt").Build(), NewFile().Path("aaa/b.txt").Build()} + root.SetFiles(files) + assert.Equal(t, files, root.files) + + root2 := NewFile().Path("aaa.zip").Build() + files2 := []*File{NewFile().Path("aaa.zip").Build(), NewFile().Path("aaa/a/a.txt").Build(), NewFile().Path("aaa/b.txt").Build()} + expected := []*File{NewFile().Path("aaa/a/a.txt").Build(), NewFile().Path("aaa/b.txt").Build()} + root2.SetFiles(files2) + assert.Equal(t, expected, root2.files) +} + +func Test_FoldFiles(t *testing.T) { + assert.Equal(t, + &File{ + name: "hello.zip", path: "/hello.zip", size: 100, contentType: "application/zip", + children: []*File{ + {name: "a.txt", path: "/a.txt", size: 10, contentType: "text/plain"}, + {name: "b.txt", path: "/b.txt", size: 20, contentType: "text/plain"}, + }, + }, + FoldFiles( + []*File{ + {name: "a.txt", path: "/a.txt", size: 10, contentType: "text/plain"}, + {name: "b.txt", path: "/b.txt", size: 20, contentType: "text/plain"}, + }, + &File{name: "hello.zip", path: "/hello.zip", size: 100, contentType: "application/zip"}, + ), + ) + assert.Equal(t, + &File{ + name: "hello.zip", path: "/hello.zip", size: 100, contentType: "application/zip", + children: []*File{ + {name: "hello", path: "/hello", size: 0, contentType: "", children: []*File{ + {name: "a.txt", path: "/hello/a.txt", size: 10, contentType: "text/plain"}, + {name: "b.txt", path: "/hello/b.txt", size: 20, contentType: "text/plain"}, + }}, + }, + }, + FoldFiles( + []*File{ + {name: "a.txt", path: "/hello/a.txt", size: 10, contentType: "text/plain"}, + {name: "b.txt", path: "/hello/b.txt", size: 20, contentType: "text/plain"}, + }, + &File{name: "hello.zip", path: "/hello.zip", size: 100, contentType: "application/zip"}, + ), + ) + + assert.Equal(t, + &File{ + name: "hello.zip", path: "/hello.zip", size: 100, contentType: "application/zip", + children: []*File{ + {name: "hello", path: "/hello", size: 0, contentType: "", children: []*File{ + {name: "c.txt", path: "/hello/c.txt", size: 20, contentType: "text/plain"}, + {name: "good", path: "/hello/good", size: 0, contentType: "", children: []*File{ + {name: "a.txt", path: "/hello/good/a.txt", size: 10, contentType: "text/plain"}, + {name: "b.txt", path: "/hello/good/b.txt", size: 10, contentType: "text/plain"}, + }}, + }}, + }, + }, + FoldFiles( + []*File{ + {name: "a.txt", path: "/hello/good/a.txt", size: 10, contentType: "text/plain"}, + {name: "b.txt", path: "/hello/good/b.txt", size: 10, contentType: "text/plain"}, + {name: "c.txt", path: "/hello/c.txt", size: 20, contentType: "text/plain"}, + }, + &File{name: "hello.zip", path: "/hello.zip", size: 100, contentType: "application/zip"}, + ), + ) + assert.Equal(t, + &File{ + name: "hello.zip", path: "/hello.zip", size: 100, contentType: "application/zip", + children: []*File{ + {name: "hello", path: "/hello", size: 0, contentType: "", children: []*File{ + {name: "hello", path: "/hello/hello", children: []*File{ + {name: "a.txt", path: "/hello/hello/a.txt", size: 10, contentType: "text/plain"}, + {name: "b.txt", path: "/hello/hello/b.txt", size: 10, contentType: "text/plain"}, + {name: "c", path: "/hello/hello/c", children: []*File{ + {name: "d.txt", path: "/hello/hello/c/d.txt", size: 20, contentType: "text/plain"}, + }}, + }}, + }, + }, + }, + }, + FoldFiles( + []*File{ + {name: "a.txt", path: "/hello/hello/a.txt", size: 10, contentType: "text/plain"}, + {name: "b.txt", path: "/hello/hello/b.txt", size: 10, contentType: "text/plain"}, + {name: "d.txt", path: "/hello/hello/c/d.txt", size: 20, contentType: "text/plain"}, + }, + &File{name: "hello.zip", path: "/hello.zip", size: 100, contentType: "application/zip"}, + ), + ) + + assert.Equal(t, + &File{ + name: "hello.zip", path: "/hello.zip", size: 100, contentType: "application/zip", + children: []*File{ + {name: "hello", path: "/hello", contentType: "", children: []*File{ + {name: "a.txt", path: "/hello/a.txt", size: 10, contentType: "text/plain"}, + }}, + {name: "hello_a", path: "/hello_a", children: []*File{ + {name: "b.txt", path: "/hello_a/b.txt", size: 10, contentType: "text/plain"}, + {name: "c", path: "/hello_a/c", children: []*File{ + {name: "d.txt", path: "/hello_a/c/d.txt", size: 20, contentType: "text/plain"}, + }}, + }}, + }, + }, + FoldFiles( + []*File{ + {name: "a.txt", path: "/hello/a.txt", size: 10, contentType: "text/plain"}, + {name: "b.txt", path: "/hello_a/b.txt", size: 10, contentType: "text/plain"}, + {name: "d.txt", path: "/hello_a/c/d.txt", size: 20, contentType: "text/plain"}, + }, + &File{name: "hello.zip", path: "/hello.zip", size: 100, contentType: "application/zip"}, + ), + ) +} + +func Test_File_RootPath(t *testing.T) { + assert.Equal(t, "xx/xxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/hoge.zip", (&File{path: "hoge.zip"}).RootPath("xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx")) +} diff --git a/asset/assetdomain/asset/id.go b/asset/assetdomain/asset/id.go new file mode 100644 index 0000000..efda98b --- /dev/null +++ b/asset/assetdomain/asset/id.go @@ -0,0 +1,37 @@ +package asset + +import ( + "github.com/reearth/reearthx/account/accountdomain" + "github.com/reearth/reearthx/asset/assetdomain" + "github.com/reearth/reearthx/idx" +) + +type ID = assetdomain.AssetID +type IDList = assetdomain.AssetIDList +type ProjectID = assetdomain.ProjectID +type UserID = accountdomain.UserID +type ThreadID = assetdomain.ThreadID +type IntegrationID = assetdomain.IntegrationID + +var NewID = assetdomain.NewAssetID +var NewProjectID = assetdomain.NewProjectID +var NewUserID = accountdomain.NewUserID +var NewThreadID = assetdomain.NewThreadID +var NewIntegrationID = assetdomain.NewIntegrationID + +var MustID = assetdomain.MustAssetID +var MustProjectID = assetdomain.MustProjectID +var MustUserID = accountdomain.MustUserID +var MustThreadID = assetdomain.MustThreadID + +var IDFrom = assetdomain.AssetIDFrom +var ProjectIDFrom = assetdomain.ProjectIDFrom +var UserIDFrom = accountdomain.UserIDFrom +var ThreadIDFrom = assetdomain.ThreadIDFrom + +var IDFromRef = assetdomain.AssetIDFromRef +var ProjectIDFromRef = assetdomain.ProjectIDFromRef +var UserIDFromRef = accountdomain.UserIDFromRef +var ThreadIDFromRef = assetdomain.ThreadIDFromRef + +var ErrInvalidID = idx.ErrInvalidID diff --git a/asset/assetdomain/asset/list.go b/asset/assetdomain/asset/list.go new file mode 100644 index 0000000..e9cc7cd --- /dev/null +++ b/asset/assetdomain/asset/list.go @@ -0,0 +1,29 @@ +package asset + +import ( + "github.com/reearth/reearthx/util" + "github.com/samber/lo" + "golang.org/x/exp/slices" +) + +type List []*Asset + +func (l List) SortByID() List { + m := slices.Clone(l) + slices.SortFunc(m, func(a, b *Asset) int { + return a.ID().Compare(b.ID()) + }) + return m +} + +func (l List) Clone() List { + return util.Map(l, func(p *Asset) *Asset { return p.Clone() }) +} + +func (l List) Map() Map { + return lo.SliceToMap(lo.Filter(l, func(a *Asset, _ int) bool { + return a != nil + }), func(a *Asset) (ID, *Asset) { + return a.ID(), a + }) +} diff --git a/asset/assetdomain/asset/list_test.go b/asset/assetdomain/asset/list_test.go new file mode 100644 index 0000000..099a847 --- /dev/null +++ b/asset/assetdomain/asset/list_test.go @@ -0,0 +1,53 @@ +package asset + +import ( + "testing" + + "github.com/reearth/reearthx/account/accountdomain" + "github.com/stretchr/testify/assert" +) + +func TestList_SortByID(t *testing.T) { + id1 := NewID() + id2 := NewID() + + list := List{ + &Asset{id: id2}, + &Asset{id: id1}, + } + res := list.SortByID() + + assert.Equal(t, List{ + &Asset{id: id1}, + &Asset{id: id2}, + }, res) + + assert.Equal(t, List{ + &Asset{id: id2}, + &Asset{id: id1}, + }, list) +} + +func TestList_Clone(t *testing.T) { + pid := NewProjectID() + uid := accountdomain.NewUserID() + + a := New().NewID().Project(pid).CreatedByUser(uid).Size(1000).Thread(NewThreadID()).NewUUID().MustBuild() + + list := List{a} + got := list.Clone() + assert.Equal(t, list, got) + assert.NotSame(t, list[0], got[0]) +} + +func TestList_Map(t *testing.T) { + pid := NewProjectID() + uid := accountdomain.NewUserID() + + a := New().NewID().Project(pid).CreatedByUser(uid).Size(1000).Thread(NewThreadID()).NewUUID().MustBuild() + + assert.Equal(t, Map{ + a.ID(): a, + }, List{a, nil}.Map()) + assert.Equal(t, Map{}, List(nil).Map()) +} diff --git a/asset/assetdomain/asset/map.go b/asset/assetdomain/asset/map.go new file mode 100644 index 0000000..f15835b --- /dev/null +++ b/asset/assetdomain/asset/map.go @@ -0,0 +1,20 @@ +package asset + +import "github.com/samber/lo" + +type Map map[ID]*Asset + +func (m Map) List() List { + return lo.MapToSlice(m, func(_ ID, v *Asset) *Asset { + return v + }) +} + +func (m Map) ListFrom(ids IDList) (res List) { + for _, id := range ids { + if a, ok := m[id]; ok { + res = append(res, a) + } + } + return +} diff --git a/asset/assetdomain/asset/map_test.go b/asset/assetdomain/asset/map_test.go new file mode 100644 index 0000000..eef3506 --- /dev/null +++ b/asset/assetdomain/asset/map_test.go @@ -0,0 +1,28 @@ +package asset + +import ( + "testing" + + "github.com/reearth/reearthx/account/accountdomain" + "github.com/stretchr/testify/assert" +) + +func TestMap_List(t *testing.T) { + pid := NewProjectID() + uid := accountdomain.NewUserID() + + a := New().NewID().Project(pid).CreatedByUser(uid).Size(1000).Thread(NewThreadID()).NewUUID().MustBuild() + + assert.Equal(t, List{a}, Map{a.ID(): a}.List()) + assert.Equal(t, List{}, Map(nil).List()) +} + +func TestMap_ListFrom(t *testing.T) { + pid := NewProjectID() + uid := accountdomain.NewUserID() + + a := New().NewID().Project(pid).CreatedByUser(uid).Size(1000).Thread(NewThreadID()).NewUUID().MustBuild() + + assert.Equal(t, List{a}, Map{a.ID(): a}.ListFrom(IDList{a.ID()})) + assert.Nil(t, Map(nil).ListFrom(nil)) +} diff --git a/asset/assetdomain/asset/preview_type.go b/asset/assetdomain/asset/preview_type.go new file mode 100644 index 0000000..5ee82cb --- /dev/null +++ b/asset/assetdomain/asset/preview_type.go @@ -0,0 +1,128 @@ +package asset + +import ( + "path/filepath" + "strings" + + "github.com/reearth/reearthx/asset/assetdomain/file" + "github.com/samber/lo" +) + +var ( + imageExtensions = []string{".jpg", ".jpeg", ".png", ".gif", ".tiff", ".webp"} + imageSVGExtension = ".svg" + geoExtensions = []string{".kml", ".czml", ".topojson", ".geojson"} + geoMvtExtension = ".mvt" + model3dExtensions = []string{".gltf", ".glb"} + csvExtension = ".csv" +) + +type PreviewType string + +func (p PreviewType) Ref() *PreviewType { + return &p +} + +const ( + PreviewTypeImage PreviewType = "image" + PreviewTypeImageSvg PreviewType = "image_svg" + PreviewTypeGeo PreviewType = "geo" + PreviewTypeGeo3dTiles PreviewType = "geo_3d_tiles" + PreviewTypeGeoMvt PreviewType = "geo_mvt" + PreviewTypeModel3d PreviewType = "model_3d" + PreviewTypeCSV PreviewType = "csv" + PreviewTypeUnknown PreviewType = "unknown" +) + +func PreviewTypeFrom(p string) (PreviewType, bool) { + pp := strings.ToLower(p) + switch PreviewType(pp) { + case PreviewTypeImage: + return PreviewTypeImage, true + case PreviewTypeImageSvg: + return PreviewTypeImageSvg, true + case PreviewTypeGeo: + return PreviewTypeGeo, true + case PreviewTypeGeo3dTiles: + return PreviewTypeGeo3dTiles, true + case PreviewTypeGeoMvt: + return PreviewTypeGeoMvt, true + case PreviewTypeModel3d: + return PreviewTypeModel3d, true + case PreviewTypeCSV: + return PreviewTypeCSV, true + case PreviewTypeUnknown: + return PreviewTypeUnknown, true + default: + return PreviewType(""), false + } +} + +func PreviewTypeFromRef(p *string) *PreviewType { + if p == nil { + return nil + } + + pp, ok := PreviewTypeFrom(*p) + if !ok { + return nil + } + return &pp +} + +func DetectPreviewType(f *file.File) *PreviewType { + pt := PreviewTypeFromContentType(f.ContentType) + if pt != PreviewTypeUnknown { + return lo.ToPtr(pt) + } + ext := filepath.Ext(f.Name) + pt = PreviewTypeFromExtension(ext) + return lo.ToPtr(pt) +} + +func PreviewTypeFromContentType(c string) PreviewType { + if strings.HasPrefix(c, "image/") { + if strings.HasPrefix(c, "image/svg") { + return PreviewTypeImageSvg + } + return PreviewTypeImage + } + if strings.HasPrefix(c, "text/csv") { + return PreviewTypeCSV + } + return PreviewTypeUnknown +} + +func PreviewTypeFromExtension(ext string) PreviewType { + if lo.Contains(imageExtensions, ext) { + return PreviewTypeImage + } + if ext == imageSVGExtension { + return PreviewTypeImageSvg + } + if lo.Contains(geoExtensions, ext) { + return PreviewTypeGeo + } + if ext == geoMvtExtension { + return PreviewTypeGeoMvt + } + if lo.Contains(model3dExtensions, ext) { + return PreviewTypeModel3d + } + if ext == csvExtension { + return PreviewTypeCSV + } + return PreviewTypeUnknown +} + +func (p PreviewType) String() string { + return string(p) +} + +func (p *PreviewType) StringRef() *string { + if p == nil { + return nil + } + p2 := string(*p) + return &p2 +} diff --git a/asset/assetdomain/asset/preview_type_test.go b/asset/assetdomain/asset/preview_type_test.go new file mode 100644 index 0000000..a4a0f1a --- /dev/null +++ b/asset/assetdomain/asset/preview_type_test.go @@ -0,0 +1,318 @@ +package asset + +import ( + "testing" + + "github.com/reearth/reearthx/asset/assetdomain/file" + "github.com/samber/lo" + "github.com/stretchr/testify/assert" +) + +func TestPreviewType_PreviewTypeFrom(t *testing.T) { + tests := []struct { + Name string + Expected struct { + TA PreviewType + Bool bool + } + }{ + { + Name: "image", + Expected: struct { + TA PreviewType + Bool bool + }{ + TA: PreviewTypeImage, + Bool: true, + }, + }, + { + Name: "IMAGE", + Expected: struct { + TA PreviewType + Bool bool + }{ + TA: PreviewTypeImage, + Bool: true, + }, + }, + { + Name: "image_svg", + Expected: struct { + TA PreviewType + Bool bool + }{ + TA: PreviewTypeImageSvg, + Bool: true, + }, + }, + { + Name: "geo", + Expected: struct { + TA PreviewType + Bool bool + }{ + TA: PreviewTypeGeo, + Bool: true, + }, + }, + { + Name: "geo_3d_tiles", + Expected: struct { + TA PreviewType + Bool bool + }{ + TA: PreviewTypeGeo3dTiles, + Bool: true, + }, + }, + { + Name: "geo_mvt", + Expected: struct { + TA PreviewType + Bool bool + }{ + TA: PreviewTypeGeoMvt, + Bool: true, + }, + }, + { + Name: "model_3d", + Expected: struct { + TA PreviewType + Bool bool + }{ + TA: PreviewTypeModel3d, + Bool: true, + }, + }, + { + Name: "csv", + Expected: struct { + TA PreviewType + Bool bool + }{ + TA: PreviewTypeCSV, + Bool: true, + }, + }, + { + Name: "unknown", + Expected: struct { + TA PreviewType + Bool bool + }{ + TA: PreviewTypeUnknown, + Bool: true, + }, + }, + { + Name: "undefined", + Expected: struct { + TA PreviewType + Bool bool + }{ + TA: PreviewType(""), + Bool: false, + }, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res, ok := PreviewTypeFrom(tc.Name) + assert.Equal(t, tc.Expected.TA, res) + assert.Equal(t, tc.Expected.Bool, ok) + }) + } +} + +func TestPreviewType_PreviewTypeFromRef(t *testing.T) { + i := PreviewTypeImage + is := PreviewTypeImageSvg + g := PreviewTypeGeo + g3d := PreviewTypeGeo3dTiles + mvt := PreviewTypeGeoMvt + m := PreviewTypeModel3d + c := PreviewTypeCSV + u := PreviewTypeUnknown + + tests := []struct { + Name string + Input *string + Expected *PreviewType + }{ + { + Name: "image", + Input: lo.ToPtr("image"), + Expected: &i, + }, + { + Name: "upper case image", + Input: lo.ToPtr("IMAGE"), + Expected: &i, + }, + { + Name: "image_svg", + Input: lo.ToPtr("image_svg"), + Expected: &is, + }, + { + Name: "geo", + Input: lo.ToPtr("geo"), + Expected: &g, + }, + { + Name: "geo_3d_tiles", + Input: lo.ToPtr("geo_3d_tiles"), + Expected: &g3d, + }, + { + Name: "geo_mvt", + Input: lo.ToPtr("geo_mvt"), + Expected: &mvt, + }, + { + Name: "model_3d", + Input: lo.ToPtr("model_3d"), + Expected: &m, + }, + { + Name: "csv", + Input: lo.ToPtr("csv"), + Expected: &c, + }, + { + Name: "unknown", + Input: lo.ToPtr("unknown"), + Expected: &u, + }, + { + Name: "undefined", + Input: lo.ToPtr("undefined"), + }, + { + Name: "nil input", + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := PreviewTypeFromRef(tc.Input) + assert.Equal(t, tc.Expected, res) + }) + } +} + +func TestPreviewType_DetectPreviewType(t *testing.T) { + f1 := file.File{ + Name: "image.png", + ContentType: "image/png", + } + want1 := PreviewTypeImage + got1 := DetectPreviewType(&f1) + assert.Equal(t, want1, *got1) + + f2 := file.File{ + Name: "file.geojson", + ContentType: "application/json", + } + want2 := PreviewTypeGeo + got2 := DetectPreviewType(&f2) + assert.Equal(t, want2, *got2) +} + +func TestPreviewType_PreviewTypeFromContentType(t *testing.T) { + c1 := "image/png" + want1 := PreviewTypeImage + got1 := PreviewTypeFromContentType(c1) + assert.Equal(t, want1, got1) + + c2 := "video/mp4" + want2 := PreviewTypeUnknown + got2 := PreviewTypeFromContentType(c2) + assert.Equal(t, want2, got2) + + c3 := "image/svg" + want3 := PreviewTypeImageSvg + got3 := PreviewTypeFromContentType(c3) + assert.Equal(t, want3, got3) + + c4 := "text/csv" + want4 := PreviewTypeCSV + got4 := PreviewTypeFromContentType(c4) + assert.Equal(t, want4, got4) +} + +func TestPreviewType_PreviewTypeFromExtension(t *testing.T) { + ext1 := ".png" + want1 := PreviewTypeImage + got1 := PreviewTypeFromExtension(ext1) + assert.Equal(t, want1, got1) + + ext2 := ".kml" + want2 := PreviewTypeGeo + got2 := PreviewTypeFromExtension(ext2) + assert.Equal(t, want2, got2) + + ext3 := ".svg" + want3 := PreviewTypeImageSvg + got3 := PreviewTypeFromExtension(ext3) + assert.Equal(t, want3, got3) + + ext4 := ".csv" + want4 := PreviewTypeCSV + got4 := PreviewTypeFromExtension(ext4) + assert.Equal(t, want4, got4) + + ext5 := ".glb" + want5 := PreviewTypeModel3d + got5 := PreviewTypeFromExtension(ext5) + assert.Equal(t, want5, got5) + + ext6 := ".mvt" + want6 := PreviewTypeGeoMvt + got6 := PreviewTypeFromExtension(ext6) + assert.Equal(t, want6, got6) +} + +func TestPreviewType_String(t *testing.T) { + s := "image" + pt := PreviewTypeImage + assert.Equal(t, s, pt.String()) +} + +func TestPreviewType_StringRef(t *testing.T) { + var pt1 *PreviewType + var pt2 *PreviewType = lo.ToPtr(PreviewTypeImage) + s := string(*pt2) + + tests := []struct { + Name string + Input *string + Expected *string + }{ + { + Name: "nil PreviewType pointer", + Input: pt1.StringRef(), + Expected: nil, + }, + { + Name: "PreviewType pointer", + Input: pt2.StringRef(), + Expected: &s, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.Expected, tc.Input) + }) + } +} diff --git a/asset/assetdomain/asset/status.go b/asset/assetdomain/asset/status.go new file mode 100644 index 0000000..caf4cf1 --- /dev/null +++ b/asset/assetdomain/asset/status.go @@ -0,0 +1,57 @@ +package asset + +import ( + "strings" +) + +type ArchiveExtractionStatus string + +const ( + ArchiveExtractionStatusSkipped ArchiveExtractionStatus = "skipped" + ArchiveExtractionStatusPending ArchiveExtractionStatus = "pending" + ArchiveExtractionStatusInProgress ArchiveExtractionStatus = "in_progress" + ArchiveExtractionStatusDone ArchiveExtractionStatus = "done" + ArchiveExtractionStatusFailed ArchiveExtractionStatus = "failed" +) + +func ArchiveExtractionStatusFrom(s string) (ArchiveExtractionStatus, bool) { + ss := strings.ToLower(s) + switch ArchiveExtractionStatus(ss) { + case ArchiveExtractionStatusSkipped: + return ArchiveExtractionStatusSkipped, true + case ArchiveExtractionStatusPending: + return ArchiveExtractionStatusPending, true + case ArchiveExtractionStatusInProgress: + return ArchiveExtractionStatusInProgress, true + case ArchiveExtractionStatusDone: + return ArchiveExtractionStatusDone, true + case ArchiveExtractionStatusFailed: + return ArchiveExtractionStatusFailed, true + default: + return ArchiveExtractionStatus(""), false + } +} + +func ArchiveExtractionStatusFromRef(s *string) *ArchiveExtractionStatus { + if s == nil { + return nil + } + + ss, ok := ArchiveExtractionStatusFrom(*s) + if !ok { + return nil + } + return &ss +} + +func (s ArchiveExtractionStatus) String() string { + return string(s) +} + +func (s *ArchiveExtractionStatus) StringRef() *string { + if s == nil { + return nil + } + s2 := string(*s) + return &s2 +} diff --git a/asset/assetdomain/asset/status_test.go b/asset/assetdomain/asset/status_test.go new file mode 100644 index 0000000..7a6bf31 --- /dev/null +++ b/asset/assetdomain/asset/status_test.go @@ -0,0 +1,96 @@ +package asset + +import ( + "testing" + + "github.com/samber/lo" + "github.com/stretchr/testify/assert" +) + +func TestStatus_StatusFrom(t *testing.T) { + s := ArchiveExtractionStatusSkipped + res, ok := ArchiveExtractionStatusFrom("skipped") + assert.Equal(t, s, res) + assert.True(t, ok) + + s = ArchiveExtractionStatusPending + res, ok = ArchiveExtractionStatusFrom("pending") + assert.Equal(t, s, res) + assert.True(t, ok) + + s = ArchiveExtractionStatusPending + res, ok = ArchiveExtractionStatusFrom("PENDING") + assert.Equal(t, s, res) + assert.True(t, ok) + + s = ArchiveExtractionStatusInProgress + res, ok = ArchiveExtractionStatusFrom("in_progress") + assert.Equal(t, s, res) + assert.True(t, ok) + + s = ArchiveExtractionStatusDone + res, ok = ArchiveExtractionStatusFrom("done") + assert.Equal(t, s, res) + assert.True(t, ok) + + s = ArchiveExtractionStatusFailed + res, ok = ArchiveExtractionStatusFrom("failed") + assert.Equal(t, s, res) + assert.True(t, ok) + + s = ArchiveExtractionStatus("") + res, ok = ArchiveExtractionStatusFrom("") + assert.Equal(t, s, res) + assert.False(t, ok) +} + +func TestStatus_StatusFromRef(t *testing.T) { + sk := ArchiveExtractionStatusSkipped + p := ArchiveExtractionStatusPending + ip := ArchiveExtractionStatusInProgress + d := ArchiveExtractionStatusDone + f := ArchiveExtractionStatusFailed + + s := lo.ToPtr("skipped") + res := ArchiveExtractionStatusFromRef(s) + assert.Equal(t, &sk, res) + + s = lo.ToPtr("pending") + res = ArchiveExtractionStatusFromRef(s) + assert.Equal(t, &p, res) + + s = lo.ToPtr("PENDING") + res = ArchiveExtractionStatusFromRef(s) + assert.Equal(t, &p, res) + + s = lo.ToPtr("in_progress") + res = ArchiveExtractionStatusFromRef(s) + assert.Equal(t, &ip, res) + + s = lo.ToPtr("done") + res = ArchiveExtractionStatusFromRef(s) + assert.Equal(t, &d, res) + + s = lo.ToPtr("failed") + res = ArchiveExtractionStatusFromRef(s) + assert.Equal(t, &f, res) + + s = nil + res = ArchiveExtractionStatusFromRef(s) + assert.Nil(t, res) +} + +func TestStatus_String(t *testing.T) { + s := "pending" + st := ArchiveExtractionStatusPending + assert.Equal(t, s, st.String()) +} + +func TestStatus_StringRef(t *testing.T) { + var st1 *ArchiveExtractionStatus + assert.Nil(t, st1.StringRef()) + + st2 := lo.ToPtr(ArchiveExtractionStatusPending) + s := lo.ToPtr("pending") + assert.Equal(t, s, st2.StringRef()) +} diff --git a/asset/assetdomain/asset/upload.go b/asset/assetdomain/asset/upload.go new file mode 100644 index 0000000..5f107c1 --- /dev/null +++ b/asset/assetdomain/asset/upload.go @@ -0,0 +1,37 @@ +package asset + +import ( + "time" +) + +type Upload struct { + uuid string + project ProjectID + fileName string + expiresAt time.Time + contentLength int64 +} + +func (u *Upload) UUID() string { + return u.uuid +} + +func (u *Upload) Project() ProjectID { + return u.project +} + +func (u *Upload) FileName() string { + return u.fileName +} + +func (u *Upload) ExpiresAt() time.Time { + return u.expiresAt +} + +func (u *Upload) Expired(t time.Time) bool { + return t.After(u.expiresAt) +} + +func (u *Upload) ContentLength() int64 { + return u.contentLength +} diff --git a/asset/assetdomain/asset/upload_builder.go b/asset/assetdomain/asset/upload_builder.go new file mode 100644 index 0000000..664e0d5 --- /dev/null +++ b/asset/assetdomain/asset/upload_builder.go @@ -0,0 +1,44 @@ +package asset + +import ( + "time" +) + +type UploadBuilder struct { + u *Upload +} + +func NewUpload() *UploadBuilder { + return &UploadBuilder{ + u: &Upload{}, + } +} + +func (b *UploadBuilder) UUID(uuid string) *UploadBuilder { + b.u.uuid = uuid + return b +} + +func (b *UploadBuilder) Project(project ProjectID) *UploadBuilder { + b.u.project = project + return b +} + +func (b *UploadBuilder) FileName(fileName string) *UploadBuilder { + b.u.fileName = fileName + return b +} + +func (b *UploadBuilder) ExpiresAt(expiresAt time.Time) *UploadBuilder { + b.u.expiresAt = expiresAt + return b +} + +func (b *UploadBuilder) ContentLength(contentLength int64) *UploadBuilder { + b.u.contentLength = contentLength + return b +} + +func (b *UploadBuilder) Build() *Upload { + return b.u +} diff --git a/asset/assetdomain/event/builder.go b/asset/assetdomain/event/builder.go new file mode 100644 index 0000000..f0da64b --- /dev/null +++ b/asset/assetdomain/event/builder.go @@ -0,0 +1,62 @@ +package event + +import ( + "time" + + "github.com/reearth/reearthx/asset/assetdomain/operator" + "github.com/samber/lo" +) + +type Builder[T any] struct { + i *Event[T] +} + +func New[T any]() *Builder[T] { + return &Builder[T]{i: &Event[T]{}} +} + +func (b *Builder[T]) Build() (*Event[T], error) { + if b.i.id.IsNil() || !b.i.operator.Validate() { + return nil, ErrInvalidID + } + return b.i, nil +} + +func (b *Builder[T]) MustBuild() *Event[T] { + return lo.Must(b.Build()) +} + +func (b *Builder[T]) ID(id ID) *Builder[T] { + b.i.id = id + return b +} + +func (b *Builder[T]) NewID() *Builder[T] { + b.i.id = NewID() + return b +} + +func (b *Builder[T]) Timestamp(t time.Time) *Builder[T] { + b.i.timestamp = t + return b +} + +func (b *Builder[T]) Type(t Type) *Builder[T] { + b.i.ty = t + return b +} + +func (b *Builder[T]) Project(prj *Project) *Builder[T] { + b.i.prj = prj.Clone() + return b +} + +func (b *Builder[T]) Operator(o operator.Operator) *Builder[T] { + b.i.operator = o + return b +} + +func (b *Builder[T]) Object(o T) *Builder[T] { + b.i.object = o + return b +} diff --git a/asset/assetdomain/event/builder_test.go b/asset/assetdomain/event/builder_test.go new file mode 100644 index 0000000..9f5d3b3 --- /dev/null +++ b/asset/assetdomain/event/builder_test.go @@ -0,0 +1,43 @@ +package event + +import ( + "testing" + "time" + + "github.com/reearth/reearthx/account/accountdomain/user" + id "github.com/reearth/reearthx/asset/assetdomain" + "github.com/reearth/reearthx/asset/assetdomain/asset" + "github.com/reearth/reearthx/asset/assetdomain/operator" + "github.com/reearth/reearthx/asset/assetdomain/project" + "github.com/stretchr/testify/assert" +) + +var ( + u = user.New().NewID().Email("hoge@example.com").Name("John").MustBuild() + a = asset.New().NewID().Project(project.NewID()).NewUUID(). + Thread(id.NewThreadID()).Size(100).CreatedByUser(u.ID()). + MustBuild() +) + +func TestBuilder(t *testing.T) { + now := time.Now() + id := NewID() + + ev := New[*asset.Asset]().ID(id).Timestamp(now). + Type(AssetCreate).Operator(operator.OperatorFromUser(u.ID())).Object(a).MustBuild() + ev2 := New[*asset.Asset]().NewID().Timestamp(now). + Type(AssetDecompress).Operator(operator.OperatorFromUser(u.ID())).Object(a).MustBuild() + + // ev1 + assert.Equal(t, id, ev.ID()) + assert.Equal(t, Type(AssetCreate), ev.Type()) + assert.Equal(t, operator.OperatorFromUser(u.ID()), ev.Operator()) + assert.Equal(t, a, ev.Object()) + + // ev2 + assert.NotNil(t, ev2.ID()) + + ev3, err := New[*asset.Asset]().Build() + assert.Equal(t, ErrInvalidID, err) + assert.Nil(t, ev3) +} diff --git a/asset/assetdomain/event/event.go b/asset/assetdomain/event/event.go new file mode 100644 index 0000000..f742b44 --- /dev/null +++ b/asset/assetdomain/event/event.go @@ -0,0 +1,82 @@ +package event + +import ( + "time" + + "github.com/reearth/reearthx/asset/assetdomain/operator" +) + +type Type string + +const ( + ItemCreate = "item.create" + ItemUpdate = "item.update" + ItemDelete = "item.delete" + ItemPublish = "item.publish" + ItemUnpublish = "item.unpublish" + AssetCreate = "asset.create" + AssetDecompress = "asset.decompress" + AssetDelete = "asset.delete" +) + +type Event[T any] struct { + id ID + timestamp time.Time + operator operator.Operator + ty Type + prj *Project + object T +} + +func (e *Event[T]) ID() ID { + return e.id +} + +func (e *Event[T]) Type() Type { + return e.ty +} + +func (e *Event[T]) Timestamp() time.Time { + return e.timestamp +} + +func (e *Event[T]) Operator() operator.Operator { + return e.operator +} + +func (e *Event[T]) Project() *Project { + return e.prj.Clone() +} + +func (e *Event[T]) Object() any { + return e.object +} + +func (e *Event[T]) Clone() *Event[T] { + if e == nil { + return nil + } + return &Event[T]{ + id: e.id.Clone(), + timestamp: e.timestamp, + operator: e.operator, + ty: e.ty, + prj: e.prj.Clone(), + object: e.object, + } +} + +type Project struct { + ID string + Alias string +} + +func (p *Project) Clone() *Project { + if p == nil { + return nil + } + return &Project{ + ID: p.ID, + Alias: p.Alias, + } +} diff --git a/asset/assetdomain/event/event_test.go b/asset/assetdomain/event/event_test.go new file mode 100644 index 0000000..c26b415 --- /dev/null +++ b/asset/assetdomain/event/event_test.go @@ -0,0 +1,31 @@ +package event + +import ( + "testing" + "time" + + "github.com/reearth/reearth-cms/server/pkg/asset" + "github.com/reearth/reearth-cms/server/pkg/id" + "github.com/reearth/reearth-cms/server/pkg/operator" + "github.com/reearth/reearth-cms/server/pkg/project" + "github.com/reearth/reearthx/account/accountdomain/user" + "github.com/stretchr/testify/assert" +) + +func TestEvent(t *testing.T) { + u := user.New().NewID().Email("hoge@example.com").Name("John").MustBuild() + a := asset.New().NewID().Thread(id.NewThreadID()).NewUUID(). + Project(project.NewID()).Size(100).CreatedByUser(u.ID()).MustBuild() + now := time.Now() + eID := NewID() + ev := New[*asset.Asset]().ID(eID).Timestamp(now).Type(AssetCreate). + Operator(operator.OperatorFromUser(u.ID())).Object(a).MustBuild() + + assert.Equal(t, eID, ev.ID()) + assert.Equal(t, Type(AssetCreate), ev.Type()) + assert.Equal(t, operator.OperatorFromUser(u.ID()), ev.Operator()) + assert.Equal(t, a, ev.Object()) + assert.Equal(t, now, ev.Timestamp()) + assert.Equal(t, ev, ev.Clone()) + assert.NotSame(t, ev, ev.Clone()) +} diff --git a/asset/assetdomain/event/id.go b/asset/assetdomain/event/id.go new file mode 100644 index 0000000..a78500c --- /dev/null +++ b/asset/assetdomain/event/id.go @@ -0,0 +1,17 @@ +package event + +import ( + "github.com/reearth/reearthx/account/accountdomain" + id "github.com/reearth/reearthx/asset/assetdomain" + "github.com/reearth/reearthx/idx" +) + +type ID = id.EventID +type UserID = accountdomain.UserID +type IntegrationID = id.IntegrationID + +var NewID = id.NewEventID +var MustID = id.MustEventID +var IDFrom = id.EventIDFrom +var IDFromRef = id.EventIDFromRef +var ErrInvalidID = idx.ErrInvalidID diff --git a/asset/assetdomain/file/file.go b/asset/assetdomain/file/file.go new file mode 100644 index 0000000..5c882c5 --- /dev/null +++ b/asset/assetdomain/file/file.go @@ -0,0 +1,86 @@ +package file + +import ( + "fmt" + "io" + "mime" + "mime/multipart" + "net/http" + "net/url" + "path" + "strconv" + + "github.com/reearth/reearthx/i18n" + "github.com/reearth/reearthx/rerror" +) + +type File struct { + Content io.ReadCloser + Name string + Size int64 + ContentType string +} + +func FromMultipart(multipartReader *multipart.Reader, formName string) (*File, error) { + if formName == "" { + formName = "file" + } + + for { + p, err := multipartReader.NextPart() + if err == io.EOF { + break + } + if err != nil { + return nil, err + } + + if p.FormName() != formName { + if err := p.Close(); err != nil { + return nil, err + } + continue + } + + return &File{ + Content: p, + Name: p.FileName(), + Size: 0, + ContentType: p.Header.Get("Content-Type"), + }, nil + } + + return nil, rerror.NewE(i18n.T("file not found")) +} + +func FromURL(rawURL string) (*File, error) { + URL, err := url.Parse(rawURL) + if err != nil { + return nil, err + } + + res, err := http.Get(URL.String()) + if err != nil { + return nil, rerror.ErrInternalBy(err) + } + + if res.StatusCode > 300 { + return nil, rerror.ErrInternalBy(fmt.Errorf("status code is %d", res.StatusCode)) + } + + ct := res.Header.Get("Content-Type") + fs, _ := strconv.ParseInt(res.Header.Get("Content-Length"), 10, 64) + + fn := path.Base(URL.Path) + _, m, err := mime.ParseMediaType(res.Header.Get("Content-Disposition")) + if err == nil && m["filename"] != "" { + fn = m["filename"] + } + + return &File{ + Content: res.Body, + Name: fn, + ContentType: ct, + Size: fs, + }, nil +} diff --git a/asset/assetdomain/file/file_test.go b/asset/assetdomain/file/file_test.go new file mode 100644 index 0000000..9cc7f7a --- /dev/null +++ b/asset/assetdomain/file/file_test.go @@ -0,0 +1,52 @@ +package file + +import ( + "io" + "mime" + "net/http" + "os" + "path" + "testing" + + "github.com/jarcoal/httpmock" + "github.com/samber/lo" + "github.com/stretchr/testify/assert" +) + +func TestFromURL(t *testing.T) { + URL := "https://cms.com/xyz/test.txt" + f := lo.Must(os.Open("testdata/test.txt")) + defer f.Close() + z := lo.Must(io.ReadAll(f)) + + httpmock.Activate() + defer httpmock.Deactivate() + + httpmock.RegisterResponder("GET", URL, func(r *http.Request) (*http.Response, error) { + res := httpmock.NewBytesResponse(200, z) + res.Header.Set("Content-Type", mime.TypeByExtension(path.Ext(URL))) + res.Header.Set("Content-Length", "123") + res.Header.Set("Content-Disposition", `attachment; filename="filename.txt"`) + return res, nil + }) + + expected := File{Name: "filename.txt", Content: f, Size: 123} + + got, err := FromURL(URL) + assert.NoError(t, err) + assert.Equal(t, expected.Name, got.Name) + assert.Equal(t, z, lo.Must(io.ReadAll(got.Content))) + + httpmock.RegisterResponder("GET", URL, func(r *http.Request) (*http.Response, error) { + res := httpmock.NewBytesResponse(200, z) + res.Header.Set("Content-Type", mime.TypeByExtension(path.Ext(URL))) + return res, nil + }) + + expected = File{Name: "test.txt", Content: f, Size: 0} + + got, err = FromURL(URL) + assert.NoError(t, err) + assert.Equal(t, expected.Name, got.Name) + assert.Equal(t, z, lo.Must(io.ReadAll(got.Content))) +} diff --git a/asset/assetdomain/file/testdata/test.txt b/asset/assetdomain/file/testdata/test.txt new file mode 100644 index 0000000..b6fc4c6 --- /dev/null +++ b/asset/assetdomain/file/testdata/test.txt @@ -0,0 +1 @@ +hello \ No newline at end of file diff --git a/asset/assetdomain/id.go b/asset/assetdomain/id.go new file mode 100644 index 0000000..5be3661 --- /dev/null +++ b/asset/assetdomain/id.go @@ -0,0 +1,320 @@ +package assetdomain + +import ( + "github.com/goombaio/namegenerator" + "github.com/reearth/reearthx/account/accountdomain" + "github.com/reearth/reearthx/i18n" + "github.com/reearth/reearthx/idx" + "github.com/reearth/reearthx/rerror" + "github.com/reearth/reearthx/util" + "github.com/samber/lo" + "regexp" + "strings" +) + +type Workspace struct{} +type User struct{} +type Asset struct{} +type Event struct{} + +func (Workspace) Type() string { return "workspace" } +func (User) Type() string { return "user" } +func (Asset) Type() string { return "asset" } +func (Event) Type() string { return "event" } + +type WorkspaceID = idx.ID[Workspace] +type UserID = idx.ID[User] +type AssetID = idx.ID[Asset] +type EventID = idx.ID[Event] + +var NewWorkspaceID = idx.New[Workspace] +var NewUserID = idx.New[User] +var NewAssetID = idx.New[Asset] +var NewEventID = idx.New[Event] + +var MustWorkspaceID = idx.Must[Workspace] +var MustUserID = idx.Must[User] +var MustAssetID = idx.Must[Asset] +var MustEventID = idx.Must[Event] + +var WorkspaceIDFrom = idx.From[Workspace] +var UserIDFrom = idx.From[User] +var AssetIDFrom = idx.From[Asset] +var EventIDFrom = idx.From[Event] + +var WorkspaceIDFromRef = idx.FromRef[Workspace] +var UserIDFromRef = idx.FromRef[User] +var AssetIDFromRef = idx.FromRef[Asset] +var EventIDFromRef = idx.FromRef[Event] + +type WorkspaceIDList = idx.List[accountdomain.Workspace] +type UserIDList = idx.List[accountdomain.User] +type AssetIDList = idx.List[Asset] + +var WorkspaceIDListFrom = idx.ListFrom[accountdomain.Workspace] +var UserIDListFrom = idx.ListFrom[accountdomain.User] +var AssetIDListFrom = idx.ListFrom[Asset] + +type WorkspaceIDSet = idx.Set[Workspace] +type UserIDSet = idx.Set[User] +type AssetIDSet = idx.Set[Asset] + +var NewWorkspaceIDSet = idx.NewSet[Workspace] +var NewUserIDSet = idx.NewSet[User] +var NewAssetIDSet = idx.NewSet[Asset] + +type Project struct{} + +func (Project) Type() string { return "project" } + +type ProjectID = idx.ID[Project] +type ProjectIDList = idx.List[Project] + +var MustProjectID = idx.Must[Project] +var NewProjectID = idx.New[Project] +var ProjectIDFrom = idx.From[Project] +var ProjectIDFromRef = idx.FromRef[Project] +var ProjectIDListFrom = idx.ListFrom[Project] + +type Model struct{} + +func (Model) Type() string { return "model" } + +type ModelID = idx.ID[Model] +type ModelIDList = idx.List[Model] + +var MustModelID = idx.Must[Model] +var NewModelID = idx.New[Model] +var ModelIDFrom = idx.From[Model] +var ModelIDFromRef = idx.FromRef[Model] +var ModelIDListFrom = idx.ListFrom[Model] + +type Field struct{} + +func (Field) Type() string { return "field" } + +type FieldID = idx.ID[Field] +type FieldIDList = idx.List[Field] + +var MustFieldID = idx.Must[Field] +var NewFieldID = idx.New[Field] +var FieldIDFrom = idx.From[Field] +var FieldIDFromRef = idx.FromRef[Field] +var FieldIDListFrom = idx.ListFrom[Field] + +type Tag struct{} + +func (Tag) Type() string { return "tag" } + +type TagID = idx.ID[Tag] +type TagIDList = idx.List[Tag] + +var MustTagID = idx.Must[Tag] +var NewTagID = idx.New[Tag] +var TagIDFrom = idx.From[Tag] +var TagIDFromRef = idx.FromRef[Tag] +var TagIDListFrom = idx.ListFrom[Tag] + +type Schema struct{} + +func (Schema) Type() string { return "schema" } + +type SchemaID = idx.ID[Schema] +type SchemaIDList = idx.List[Schema] + +var MustSchemaID = idx.Must[Schema] +var NewSchemaID = idx.New[Schema] +var SchemaIDFrom = idx.From[Schema] +var SchemaIDFromRef = idx.FromRef[Schema] +var SchemaIDListFrom = idx.ListFrom[Schema] + +type Group struct{} + +func (Group) Type() string { return "group" } + +type GroupID = idx.ID[Group] +type GroupIDList = idx.List[Group] + +var MustGroupID = idx.Must[Group] +var NewGroupID = idx.New[Group] +var GroupIDFrom = idx.From[Group] +var GroupIDFromRef = idx.FromRef[Group] +var GroupIDListFrom = idx.ListFrom[Group] + +type ItemGroup struct{} + +func (ItemGroup) Type() string { return "item_group" } + +type ItemGroupID = idx.ID[ItemGroup] +type ItemGroupIDList = idx.List[ItemGroup] + +var MustItemGroupID = idx.Must[ItemGroup] +var NewItemGroupID = idx.New[ItemGroup] +var ItemGroupIDFrom = idx.From[ItemGroup] +var ItemGroupIDFromRef = idx.FromRef[ItemGroup] +var ItemGroupIDListFrom = idx.ListFrom[ItemGroup] + +type Thread struct{} + +func (Thread) Type() string { return "thread" } + +type ThreadID = idx.ID[Thread] +type ThreadIDList = idx.List[Thread] + +var NewThreadID = idx.New[Thread] +var MustThreadID = idx.Must[Thread] +var ThreadIDFrom = idx.From[Thread] +var ThreadIDFromRef = idx.FromRef[Thread] + +type Comment struct{} + +func (Comment) Type() string { return "comment" } + +type CommentID = idx.ID[Comment] +type CommentIDList = idx.List[Comment] + +var NewCommentID = idx.New[Comment] +var MustCommentID = idx.Must[Comment] +var CommentIDFrom = idx.From[Comment] +var CommentIDFromRef = idx.FromRef[Comment] + +type Item struct{} + +func (Item) Type() string { return "item" } + +type ItemID = idx.ID[Item] +type ItemIDList = idx.List[Item] + +var MustItemID = idx.Must[Item] +var NewItemID = idx.New[Item] +var ItemIDFrom = idx.From[Item] +var ItemIDFromRef = idx.FromRef[Item] +var ItemIDListFrom = idx.ListFrom[Item] + +type Integration struct{} + +func (Integration) Type() string { return "integration" } + +type IntegrationID = idx.ID[Integration] +type IntegrationIDList = idx.List[Integration] + +var MustIntegrationID = idx.Must[Integration] +var NewIntegrationID = idx.New[Integration] +var IntegrationIDFrom = idx.From[Integration] +var IntegrationIDFromRef = idx.FromRef[Integration] +var IntegrationIDListFrom = idx.ListFrom[Integration] + +type Webhook struct{} + +func (Webhook) Type() string { return "webhook" } + +type WebhookID = idx.ID[Webhook] +type WebhookIDList = idx.List[Webhook] + +var MustWebhookID = idx.Must[Webhook] +var NewWebhookID = idx.New[Webhook] +var WebhookIDFrom = idx.From[Webhook] +var WebhookIDFromRef = idx.FromRef[Webhook] +var WebhookIDListFrom = idx.ListFrom[Webhook] + +type Task struct{} + +func (Task) Type() string { return "task" } + +type TaskID = idx.ID[Task] + +var NewTaskID = idx.New[Task] +var MustTaskID = idx.Must[Task] +var TaskIDFrom = idx.From[Task] +var TaskIDFromRef = idx.FromRef[Task] + +type TaskIDList = idx.List[Task] + +var TaskIDListFrom = idx.ListFrom[Task] + +type TaskIDSet = idx.Set[Task] + +var NewTaskIDSet = idx.NewSet[Task] + +type Request struct{} + +func (Request) Type() string { return "request" } + +type RequestID = idx.ID[Request] +type RequestIDList = idx.List[Request] + +var NewRequestID = idx.New[Request] +var MustRequestID = idx.Must[Request] +var RequestIDFrom = idx.From[Request] +var RequestIDFromRef = idx.FromRef[Request] + +type View struct{} + +func (View) Type() string { return "request" } + +type ViewID = idx.ID[View] +type ViewIDList = idx.List[View] + +var NewViewID = idx.New[View] +var MustViewID = idx.Must[View] +var ViewIDFrom = idx.From[View] +var ViewIDFromRef = idx.FromRef[View] + +type Resource struct{} + +func (Resource) Type() string { return "resource" } + +type ResourceID = idx.ID[Resource] +type ResourceIDList = idx.List[Resource] + +var NewResourceID = idx.New[Resource] +var MustResourceID = idx.Must[Resource] +var ResourceIDFrom = idx.From[Resource] +var ResourceIDFromRef = idx.FromRef[Resource] + +var ErrInvalidKey = rerror.NewE(i18n.T("invalid key")) +var ErrDuplicatedKey = rerror.NewE(i18n.T("duplicated key")) + +var keyRegexp = regexp.MustCompile("^[a-zA-Z0-9_-]{1,32}$") + +var ngKeys = []string{"id"} + +type Key struct { + key string +} + +func NewKey(key string) Key { + if !keyRegexp.MatchString(key) { + return Key{} + } + k := Key{key} + return k +} + +func NewKeyFromPtr(key *string) *Key { + return lo.ToPtr(NewKey(lo.FromPtr(key))) +} + +func RandomKey() Key { + seed := util.Now().UTC().UnixNano() + return NewKey(namegenerator.NewNameGenerator(seed).Generate()) +} + +func (k Key) IsValid() bool { + return k.key != "" && !strings.HasPrefix(k.key, "_") && !strings.HasPrefix(k.key, "-") && !slices.Contains(ngKeys, k.key) +} + +func (k Key) Ref() *Key { + return &k +} + +func (k Key) String() string { + return k.key +} + +func (k *Key) StringRef() *string { + if k == nil { + return nil + } + return lo.ToPtr(k.key) +} diff --git a/asset/assetdomain/integration/builder.go b/asset/assetdomain/integration/builder.go new file mode 100644 index 0000000..bad150b --- /dev/null +++ b/asset/assetdomain/integration/builder.go @@ -0,0 +1,91 @@ +package integration + +import ( + "net/url" + "time" +) + +type Builder struct { + i *Integration +} + +func New() *Builder { + return &Builder{i: &Integration{}} +} + +func (b *Builder) Build() (*Integration, error) { + if b.i.id.IsNil() { + return nil, ErrInvalidID + } + if b.i.updatedAt.IsZero() { + b.i.updatedAt = b.i.CreatedAt() + } + return b.i, nil +} + +func (b *Builder) MustBuild() *Integration { + r, err := b.Build() + if err != nil { + panic(err) + } + return r +} + +func (b *Builder) ID(id ID) *Builder { + b.i.id = id + return b +} + +func (b *Builder) NewID() *Builder { + b.i.id = NewID() + return b +} + +func (b *Builder) UpdatedAt(updatedAt time.Time) *Builder { + b.i.updatedAt = updatedAt + return b +} + +func (b *Builder) Name(name string) *Builder { + b.i.name = name + return b +} + +func (b *Builder) Description(description string) *Builder { + b.i.description = description + return b +} + +func (b *Builder) Type(t Type) *Builder { + b.i.iType = t + return b +} + +func (b *Builder) LogoUrl(logoURL *url.URL) *Builder { + if logoURL == nil { + b.i.logoUrl = nil + } else { + b.i.logoUrl, _ = url.Parse(logoURL.String()) + } + return b +} + +func (b *Builder) GenerateToken() *Builder { + b.i.RandomToken() + return b +} + +func (b *Builder) Token(token string) *Builder { + b.i.token = token + return b +} + +func (b *Builder) Developer(developer UserID) *Builder { + b.i.developer = developer + return b +} + +func (b *Builder) Webhook(webhook []*Webhook) *Builder { + b.i.webhooks = webhook + return b +} diff --git a/asset/assetdomain/integration/builder_test.go b/asset/assetdomain/integration/builder_test.go new file mode 100644 index 0000000..8976384 --- /dev/null +++ b/asset/assetdomain/integration/builder_test.go @@ -0,0 +1,519 @@ +package integration + +import ( + "net/url" + "strings" + "testing" + "time" + + "github.com/reearth/reearth-cms/server/pkg/id" + "github.com/reearth/reearthx/account/accountdomain" + "github.com/samber/lo" + "github.com/stretchr/testify/assert" +) + +func TestNew(t *testing.T) { + tests := []struct { + name string + want *Builder + }{ + { + name: "new", + want: &Builder{i: &Integration{}}, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + assert.Equalf(t, tt.want, New(), "New()") + }) + } +} + +func TestBuilder_Build(t *testing.T) { + iId := id.NewIntegrationID() + now := time.Now() + type fields struct { + i *Integration + } + tests := []struct { + name string + fields fields + want *Integration + wantErr error + }{ + { + name: "no id", + fields: fields{i: &Integration{}}, + want: nil, + wantErr: ErrInvalidID, + }, + { + name: "no updated at", + fields: fields{i: &Integration{ + id: iId, + }}, + want: &Integration{ + id: iId, + updatedAt: iId.Timestamp(), + }, + wantErr: nil, + }, + { + name: "full", + fields: fields{i: &Integration{ + id: iId, + updatedAt: now, + }}, + want: &Integration{ + id: iId, + updatedAt: now, + }, + wantErr: nil, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + b := &Builder{ + i: tt.fields.i, + } + got, err := b.Build() + if tt.wantErr != nil { + assert.Equal(t, tt.wantErr, err) + assert.Nil(t, got) + return + } + assert.NoError(t, err) + assert.Equalf(t, tt.want, got, "Build()") + }) + } +} + +func TestBuilder_MustBuild(t *testing.T) { + iId := id.NewIntegrationID() + now := time.Now() + type fields struct { + i *Integration + } + tests := []struct { + name string + fields fields + want *Integration + wantErr error + }{ + { + name: "no id", + fields: fields{i: &Integration{}}, + want: nil, + wantErr: ErrInvalidID, + }, + { + name: "no updated at", + fields: fields{i: &Integration{ + id: iId, + }}, + want: &Integration{ + id: iId, + updatedAt: iId.Timestamp(), + }, + wantErr: nil, + }, + { + name: "full", + fields: fields{i: &Integration{ + id: iId, + updatedAt: now, + }}, + want: &Integration{ + id: iId, + updatedAt: now, + }, + wantErr: nil, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + b := &Builder{ + i: tt.fields.i, + } + if tt.wantErr != nil { + assert.PanicsWithValue(t, tt.wantErr, func() { + b.MustBuild() + }) + } else { + assert.Equal(t, tt.want, b.MustBuild()) + } + }) + } +} + +func TestBuilder_NewID(t *testing.T) { + type fields struct { + i *Integration + } + tests := []struct { + name string + fields fields + }{ + { + name: "new", + fields: fields{i: &Integration{}}, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + b := &Builder{ + i: tt.fields.i, + } + b.NewID() + assert.False(t, b.i.id.IsEmpty()) + }) + } +} + +func TestBuilder_ID(t *testing.T) { + iId := id.NewIntegrationID() + type fields struct { + i *Integration + } + type args struct { + id ID + } + tests := []struct { + name string + fields fields + args args + want *Builder + }{ + { + name: "set", + fields: fields{i: &Integration{}}, + args: args{id: iId}, + want: &Builder{i: &Integration{id: iId}}, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + b := &Builder{ + i: tt.fields.i, + } + assert.Equalf(t, tt.want, b.ID(tt.args.id), "ID(%v)", tt.args.id) + }) + } +} + +func TestBuilder_Name(t *testing.T) { + type fields struct { + i *Integration + } + type args struct { + name string + } + tests := []struct { + name string + fields fields + args args + want *Builder + }{ + { + name: "set", + fields: fields{i: &Integration{}}, + args: args{name: "test"}, + want: &Builder{i: &Integration{name: "test"}}, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + b := &Builder{ + i: tt.fields.i, + } + assert.Equalf(t, tt.want, b.Name(tt.args.name), "Name(%v)", tt.args.name) + }) + } +} + +func TestBuilder_Description(t *testing.T) { + type fields struct { + i *Integration + } + type args struct { + description string + } + tests := []struct { + name string + fields fields + args args + want *Builder + }{ + { + name: "set", + fields: fields{&Integration{}}, + args: args{description: "test"}, + want: &Builder{i: &Integration{description: "test"}}, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + b := &Builder{ + i: tt.fields.i, + } + assert.Equalf(t, tt.want, b.Description(tt.args.description), "Description(%v)", tt.args.description) + }) + } +} + +func TestBuilder_Type(t *testing.T) { + type fields struct { + i *Integration + } + type args struct { + t Type + } + tests := []struct { + name string + fields fields + args args + want *Builder + }{ + { + name: "set", + fields: fields{i: &Integration{}}, + args: args{t: TypePublic}, + want: &Builder{i: &Integration{iType: TypePublic}}, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + b := &Builder{ + i: tt.fields.i, + } + assert.Equalf(t, tt.want, b.Type(tt.args.t), "Type(%v)", tt.args.t) + }) + } +} + +func TestBuilder_LogoUrl(t *testing.T) { + type fields struct { + i *Integration + } + type args struct { + logoURL *url.URL + } + tests := []struct { + name string + fields fields + args args + want *Builder + }{ + { + name: "set", + fields: fields{i: &Integration{}}, + args: args{logoURL: lo.Must(url.Parse("https://sub.hugo.com/dir?p=1#test"))}, + want: &Builder{i: &Integration{logoUrl: lo.Must(url.Parse("https://sub.hugo.com/dir?p=1#test"))}}, + }, + { + name: "set", + fields: fields{i: &Integration{}}, + args: args{logoURL: nil}, + want: &Builder{i: &Integration{logoUrl: nil}}, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + b := &Builder{ + i: tt.fields.i, + } + assert.Equalf(t, tt.want, b.LogoUrl(tt.args.logoURL), "LogoUrl(%v)", tt.args.logoURL) + }) + } +} + +func TestBuilder_Developer(t *testing.T) { + uId := accountdomain.NewUserID() + type fields struct { + i *Integration + } + type args struct { + developer UserID + } + tests := []struct { + name string + fields fields + args args + want *Builder + }{ + { + name: "set", + fields: fields{i: &Integration{}}, + args: args{developer: uId}, + want: &Builder{i: &Integration{developer: uId}}, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + b := &Builder{ + i: tt.fields.i, + } + assert.Equalf(t, tt.want, b.Developer(tt.args.developer), "Developer(%v)", tt.args.developer) + }) + } +} + +func TestBuilder_Webhook(t *testing.T) { + wId := id.NewWebhookID() + now := time.Now() + type fields struct { + i *Integration + } + type args struct { + webhook []*Webhook + } + tests := []struct { + name string + fields fields + args args + want *Builder + }{ + { + name: "set", + fields: fields{i: &Integration{}}, + args: args{webhook: []*Webhook{}}, + want: &Builder{i: &Integration{webhooks: []*Webhook{}}}, + }, + { + name: "set", + fields: fields{i: &Integration{}}, + args: args{webhook: []*Webhook{{ + id: wId, + name: "xyz", + url: nil, + active: true, + trigger: WebhookTrigger{}, + updatedAt: now, + }}}, + want: &Builder{i: &Integration{webhooks: []*Webhook{{ + id: wId, + name: "xyz", + url: nil, + active: true, + trigger: WebhookTrigger{}, + updatedAt: now, + }}}}, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + b := &Builder{ + i: tt.fields.i, + } + assert.Equalf(t, tt.want, b.Webhook(tt.args.webhook), "Webhook(%v)", tt.args.webhook) + }) + } +} + +func TestBuilder_Token(t *testing.T) { + type fields struct { + i *Integration + } + type args struct { + token string + } + tests := []struct { + name string + fields fields + args args + want *Builder + }{ + { + name: "set", + fields: fields{i: &Integration{}}, + args: args{token: "xyz"}, + want: &Builder{i: &Integration{token: "xyz"}}, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + b := &Builder{ + i: tt.fields.i, + } + assert.Equalf(t, tt.want, b.Token(tt.args.token), "Token(%v)", tt.args.token) + }) + } +} + +func TestBuilder_UpdatedAt(t *testing.T) { + now := time.Now() + type fields struct { + i *Integration + } + type args struct { + updatedAt time.Time + } + tests := []struct { + name string + fields fields + args args + want *Builder + }{ + { + name: "set", + fields: fields{i: &Integration{}}, + args: args{updatedAt: now}, + want: &Builder{i: &Integration{updatedAt: now}}, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + b := &Builder{ + i: tt.fields.i, + } + assert.Equalf(t, tt.want, b.UpdatedAt(tt.args.updatedAt), "UpdatedAt(%v)", tt.args.updatedAt) + }) + } +} + +func TestBuilder_GenerateToken(t *testing.T) { + b := New() + b.GenerateToken() + assert.NotNil(t, b.i.token) + assert.True(t, strings.HasPrefix(b.i.token, "secret_")) + assert.Equal(t, 50, len(b.i.token)) +} diff --git a/asset/assetdomain/integration/id.go b/asset/assetdomain/integration/id.go new file mode 100644 index 0000000..d26355b --- /dev/null +++ b/asset/assetdomain/integration/id.go @@ -0,0 +1,19 @@ +package integration + +import ( + "github.com/reearth/reearthx/account/accountdomain" + id "github.com/reearth/reearthx/asset/assetdomain" + "github.com/reearth/reearthx/idx" +) + +type ID = id.IntegrationID +type WebhookID = id.WebhookID +type UserID = accountdomain.UserID +type ModelID = id.ModelID + +var NewID = id.NewIntegrationID +var NewWebhookID = id.NewWebhookID +var MustID = id.MustIntegrationID +var IDFrom = id.IntegrationIDFrom +var IDFromRef = id.IntegrationIDFromRef +var ErrInvalidID = idx.ErrInvalidID diff --git a/asset/assetdomain/integration/integration.go b/asset/assetdomain/integration/integration.go new file mode 100644 index 0000000..5265f67 --- /dev/null +++ b/asset/assetdomain/integration/integration.go @@ -0,0 +1,175 @@ +package integration + +import ( + "crypto/rand" + "math/big" + "net/url" + "time" + + "github.com/reearth/reearthx/util" + "github.com/samber/lo" + "golang.org/x/exp/slices" +) + +type Integration struct { + id ID + name string + description string + logoUrl *url.URL + iType Type + token string + developer UserID + webhooks []*Webhook + updatedAt time.Time +} + +func (i *Integration) ID() ID { + return i.id +} + +func (i *Integration) Name() string { + return i.name +} + +func (i *Integration) SetName(name string) { + i.name = name +} + +func (i *Integration) Description() string { + return i.description +} + +func (i *Integration) SetDescription(description string) { + i.description = description +} + +func (i *Integration) Type() Type { + return i.iType +} + +func (i *Integration) SetType(t Type) { + i.iType = t +} + +func (i *Integration) LogoUrl() *url.URL { + return i.logoUrl +} + +func (i *Integration) SetLogoUrl(logoUrl *url.URL) { + i.logoUrl = logoUrl +} + +func (i *Integration) Token() string { + return i.token +} + +func (i *Integration) SetToken(token string) { + i.token = token +} + +func (i *Integration) RandomToken() { + t, err := randomString(43) + if err != nil { + return + } + i.token = "secret_" + t +} + +func (i *Integration) Developer() UserID { + return i.developer +} + +func (i *Integration) SetDeveloper(developer UserID) { + i.developer = developer +} + +func (i *Integration) Webhooks() []*Webhook { + return i.webhooks +} + +func (i *Integration) Webhook(wId WebhookID) (*Webhook, bool) { + return lo.Find(i.webhooks, func(w *Webhook) bool { return w.id == wId }) +} + +func (i *Integration) AddWebhook(w *Webhook) { + if w == nil { + return + } + i.webhooks = append(i.webhooks, w) +} + +func (i *Integration) UpdateWebhook(wId WebhookID, w *Webhook) bool { + if w == nil { + return false + } + _, idx, ok := lo.FindIndexOf(i.webhooks, func(w *Webhook) bool { return w.id == wId }) + if !ok || idx >= len(i.webhooks) { + return false + } + i.webhooks[idx] = w + return true +} + +func (i *Integration) DeleteWebhook(wId WebhookID) bool { + _, idx, ok := lo.FindIndexOf(i.webhooks, func(w *Webhook) bool { return w.id == wId }) + if !ok || idx >= len(i.webhooks) { + return false + } + i.webhooks = slices.Delete(i.webhooks, idx, idx+1) + return true +} + +func (i *Integration) SetWebhook(webhook []*Webhook) { + i.webhooks = webhook +} + +func (i *Integration) UpdatedAt() time.Time { + if i.updatedAt.IsZero() { + return i.id.Timestamp() + } + return i.updatedAt +} + +func (i *Integration) SetUpdatedAt(updatedAt time.Time) { + i.updatedAt = updatedAt +} + +func (i *Integration) CreatedAt() time.Time { + return i.id.Timestamp() +} + +func (i *Integration) Clone() *Integration { + if i == nil { + return nil + } + + var u *url.URL = nil + if i.logoUrl != nil { + u, _ = url.Parse(i.logoUrl.String()) + } + return &Integration{ + id: i.id.Clone(), + name: i.name, + description: i.description, + logoUrl: u, + iType: i.iType, + token: i.token, + developer: i.developer, + webhooks: util.Map(i.webhooks, func(w *Webhook) *Webhook { return w.Clone() }), + updatedAt: i.updatedAt, + } +} + +func randomString(n int) (string, error) { + const letters = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + result := make([]byte, n) + for i := 0; i < n; i++ { + randIndex, err := rand.Int(rand.Reader, big.NewInt(int64(len(letters)))) + if err != nil { + return "", err + } + result[i] = letters[randIndex.Int64()] + } + + return string(result), nil +} diff --git a/asset/assetdomain/integration/integration_test.go b/asset/assetdomain/integration/integration_test.go new file mode 100644 index 0000000..4e4347d --- /dev/null +++ b/asset/assetdomain/integration/integration_test.go @@ -0,0 +1,1316 @@ +package integration + +import ( + "net/url" + "strings" + "testing" + "time" + + "github.com/reearth/reearth-cms/server/pkg/id" + "github.com/reearth/reearthx/account/accountdomain" + "github.com/samber/lo" + "github.com/stretchr/testify/assert" +) + +func TestIntegration_Clone(t *testing.T) { + iId := id.NewIntegrationID() + uId := accountdomain.NewUserID() + wId := id.NewWebhookID() + now := time.Now() + tests := []struct { + name string + i *Integration + want *Integration + }{ + { + name: "test", + i: &Integration{ + id: iId, + name: "xyz", + description: "xyz d", + logoUrl: lo.Must(url.Parse("https://sub.hugo.com/dir?p=1#test")), + iType: TypePublic, + token: "token", + developer: uId, + webhooks: []*Webhook{ + { + id: wId, + name: "w xyz", + url: lo.Must(url.Parse("https://sub.hugo2.com/dir?p=1#test")), + active: true, + trigger: WebhookTrigger{}, + updatedAt: now, + }, + }, + updatedAt: now, + }, + want: &Integration{ + id: iId, + name: "xyz", + description: "xyz d", + logoUrl: lo.Must(url.Parse("https://sub.hugo.com/dir?p=1#test")), + iType: TypePublic, + token: "token", + developer: uId, + webhooks: []*Webhook{ + { + id: wId, + name: "w xyz", + url: lo.Must(url.Parse("https://sub.hugo2.com/dir?p=1#test")), + active: true, + trigger: WebhookTrigger{}, + updatedAt: now, + }, + }, + updatedAt: now, + }, + }, + { + name: "nil", + i: nil, + want: nil, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + assert.Equalf(t, tt.want, tt.i.Clone(), "Clone()") + }) + } +} + +func TestIntegration_CreatedAt(t *testing.T) { + iId := id.NewIntegrationID() + type fields struct { + id ID + name string + description string + logoUrl *url.URL + iType Type + token string + developer UserID + webhook []*Webhook + updatedAt time.Time + } + tests := []struct { + name string + fields fields + want time.Time + }{ + { + name: "test", + fields: fields{id: iId}, + want: iId.Timestamp(), + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + i := &Integration{ + id: tt.fields.id, + name: tt.fields.name, + description: tt.fields.description, + logoUrl: tt.fields.logoUrl, + iType: tt.fields.iType, + token: tt.fields.token, + developer: tt.fields.developer, + webhooks: tt.fields.webhook, + updatedAt: tt.fields.updatedAt, + } + assert.Equalf(t, tt.want, i.CreatedAt(), "CreatedAt()") + }) + } +} + +func TestIntegration_Description(t *testing.T) { + type fields struct { + id ID + name string + description string + logoUrl *url.URL + iType Type + token string + developer UserID + webhook []*Webhook + updatedAt time.Time + } + tests := []struct { + name string + fields fields + want string + }{ + { + name: "test", + fields: fields{description: "xyz"}, + want: "xyz", + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + i := &Integration{ + id: tt.fields.id, + name: tt.fields.name, + description: tt.fields.description, + logoUrl: tt.fields.logoUrl, + iType: tt.fields.iType, + token: tt.fields.token, + developer: tt.fields.developer, + webhooks: tt.fields.webhook, + updatedAt: tt.fields.updatedAt, + } + assert.Equalf(t, tt.want, i.Description(), "Description()") + }) + } +} + +func TestIntegration_Developer(t *testing.T) { + uId := accountdomain.NewUserID() + type fields struct { + id ID + name string + description string + logoUrl *url.URL + iType Type + token string + developer UserID + webhook []*Webhook + updatedAt time.Time + } + tests := []struct { + name string + fields fields + want UserID + }{ + { + name: "test", + fields: fields{developer: uId}, + want: uId, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + i := &Integration{ + id: tt.fields.id, + name: tt.fields.name, + description: tt.fields.description, + logoUrl: tt.fields.logoUrl, + iType: tt.fields.iType, + token: tt.fields.token, + developer: tt.fields.developer, + webhooks: tt.fields.webhook, + updatedAt: tt.fields.updatedAt, + } + assert.Equalf(t, tt.want, i.Developer(), "Developer()") + }) + } +} + +func TestIntegration_ID(t *testing.T) { + iId := id.NewIntegrationID() + type fields struct { + id ID + name string + description string + logoUrl *url.URL + iType Type + token string + developer UserID + webhook []*Webhook + updatedAt time.Time + } + tests := []struct { + name string + fields fields + want ID + }{ + { + name: "test", + fields: fields{id: iId}, + want: iId, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + i := &Integration{ + id: tt.fields.id, + name: tt.fields.name, + description: tt.fields.description, + logoUrl: tt.fields.logoUrl, + iType: tt.fields.iType, + token: tt.fields.token, + developer: tt.fields.developer, + webhooks: tt.fields.webhook, + updatedAt: tt.fields.updatedAt, + } + assert.Equalf(t, tt.want, i.ID(), "ID()") + }) + } +} + +func TestIntegration_LogoUrl(t *testing.T) { + type fields struct { + id ID + name string + description string + logoUrl *url.URL + iType Type + token string + developer UserID + webhook []*Webhook + updatedAt time.Time + } + tests := []struct { + name string + fields fields + want *url.URL + }{ + { + name: "test", + fields: fields{logoUrl: lo.Must(url.Parse("https://sub.hugo.com/dir?p=1#test"))}, + want: lo.Must(url.Parse("https://sub.hugo.com/dir?p=1#test")), + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + i := &Integration{ + id: tt.fields.id, + name: tt.fields.name, + description: tt.fields.description, + logoUrl: tt.fields.logoUrl, + iType: tt.fields.iType, + token: tt.fields.token, + developer: tt.fields.developer, + webhooks: tt.fields.webhook, + updatedAt: tt.fields.updatedAt, + } + assert.Equalf(t, tt.want, i.LogoUrl(), "LogoUrl()") + }) + } +} + +func TestIntegration_Name(t *testing.T) { + type fields struct { + id ID + name string + description string + logoUrl *url.URL + iType Type + token string + developer UserID + webhook []*Webhook + updatedAt time.Time + } + tests := []struct { + name string + fields fields + want string + }{ + { + name: "test", + fields: fields{name: "xyz"}, + want: "xyz", + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + i := &Integration{ + id: tt.fields.id, + name: tt.fields.name, + description: tt.fields.description, + logoUrl: tt.fields.logoUrl, + iType: tt.fields.iType, + token: tt.fields.token, + developer: tt.fields.developer, + webhooks: tt.fields.webhook, + updatedAt: tt.fields.updatedAt, + } + assert.Equalf(t, tt.want, i.Name(), "Name()") + }) + } +} + +func TestIntegration_SetDescription(t *testing.T) { + type fields struct { + id ID + name string + description string + logoUrl *url.URL + iType Type + token string + developer UserID + webhook []*Webhook + updatedAt time.Time + } + type args struct { + description string + } + tests := []struct { + name string + fields fields + args args + want string + }{ + { + name: "set", + fields: fields{}, + args: args{description: "test"}, + want: "test", + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + i := &Integration{ + id: tt.fields.id, + name: tt.fields.name, + description: tt.fields.description, + logoUrl: tt.fields.logoUrl, + iType: tt.fields.iType, + token: tt.fields.token, + developer: tt.fields.developer, + webhooks: tt.fields.webhook, + updatedAt: tt.fields.updatedAt, + } + i.SetDescription(tt.args.description) + assert.Equal(t, tt.want, i.description) + }) + } +} + +func TestIntegration_SetDeveloper(t *testing.T) { + uId := accountdomain.NewUserID() + type fields struct { + id ID + name string + description string + logoUrl *url.URL + iType Type + token string + developer UserID + webhook []*Webhook + updatedAt time.Time + } + type args struct { + developer UserID + } + tests := []struct { + name string + fields fields + args args + want UserID + }{ + { + name: "set", + fields: fields{}, + args: args{developer: uId}, + want: uId, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + i := &Integration{ + id: tt.fields.id, + name: tt.fields.name, + description: tt.fields.description, + logoUrl: tt.fields.logoUrl, + iType: tt.fields.iType, + token: tt.fields.token, + developer: tt.fields.developer, + webhooks: tt.fields.webhook, + updatedAt: tt.fields.updatedAt, + } + i.SetDeveloper(tt.args.developer) + assert.Equal(t, tt.want, i.developer) + }) + } +} + +func TestIntegration_SetLogoUrl(t *testing.T) { + type fields struct { + id ID + name string + description string + logoUrl *url.URL + iType Type + token string + developer UserID + webhook []*Webhook + updatedAt time.Time + } + type args struct { + logoUrl *url.URL + } + tests := []struct { + name string + fields fields + args args + want string + }{ + { + name: "set", + fields: fields{}, + args: args{logoUrl: lo.Must(url.Parse("https://sub.hugo.com/dir?p=1#test"))}, + want: "https://sub.hugo.com/dir?p=1#test", + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + i := &Integration{ + id: tt.fields.id, + name: tt.fields.name, + description: tt.fields.description, + logoUrl: tt.fields.logoUrl, + iType: tt.fields.iType, + token: tt.fields.token, + developer: tt.fields.developer, + webhooks: tt.fields.webhook, + updatedAt: tt.fields.updatedAt, + } + i.SetLogoUrl(tt.args.logoUrl) + assert.Equal(t, tt.want, i.logoUrl.String()) + }) + } +} + +func TestIntegration_SetName(t *testing.T) { + type fields struct { + id ID + name string + description string + logoUrl *url.URL + iType Type + token string + developer UserID + webhook []*Webhook + updatedAt time.Time + } + type args struct { + name string + } + tests := []struct { + name string + fields fields + args args + want string + }{ + { + name: "set", + fields: fields{}, + args: args{name: "test"}, + want: "test", + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + i := &Integration{ + id: tt.fields.id, + name: tt.fields.name, + description: tt.fields.description, + logoUrl: tt.fields.logoUrl, + iType: tt.fields.iType, + token: tt.fields.token, + developer: tt.fields.developer, + webhooks: tt.fields.webhook, + updatedAt: tt.fields.updatedAt, + } + i.SetName(tt.args.name) + assert.Equal(t, tt.want, i.name) + }) + } +} + +func TestIntegration_SetToken(t *testing.T) { + type fields struct { + id ID + name string + description string + logoUrl *url.URL + iType Type + token string + developer UserID + webhook []*Webhook + updatedAt time.Time + } + type args struct { + token string + } + tests := []struct { + name string + fields fields + args args + want string + }{ + { + name: "test", + fields: fields{}, + args: args{token: "test"}, + want: "test", + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + i := &Integration{ + id: tt.fields.id, + name: tt.fields.name, + description: tt.fields.description, + logoUrl: tt.fields.logoUrl, + iType: tt.fields.iType, + token: tt.fields.token, + developer: tt.fields.developer, + webhooks: tt.fields.webhook, + updatedAt: tt.fields.updatedAt, + } + i.SetToken(tt.args.token) + assert.Equal(t, tt.want, i.token) + }) + } +} + +func TestIntegration_SetType(t *testing.T) { + type fields struct { + id ID + name string + description string + logoUrl *url.URL + iType Type + token string + developer UserID + webhook []*Webhook + updatedAt time.Time + } + type args struct { + t Type + } + tests := []struct { + name string + fields fields + args args + want Type + }{ + { + name: "set", + fields: fields{}, + args: args{t: TypePublic}, + want: TypePublic, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + i := &Integration{ + id: tt.fields.id, + name: tt.fields.name, + description: tt.fields.description, + logoUrl: tt.fields.logoUrl, + iType: tt.fields.iType, + token: tt.fields.token, + developer: tt.fields.developer, + webhooks: tt.fields.webhook, + updatedAt: tt.fields.updatedAt, + } + i.SetType(tt.args.t) + assert.Equal(t, tt.want, i.iType) + }) + } +} + +func TestIntegration_SetUpdatedAt(t *testing.T) { + now := time.Now() + type fields struct { + id ID + name string + description string + logoUrl *url.URL + iType Type + token string + developer UserID + webhook []*Webhook + updatedAt time.Time + } + type args struct { + updatedAt time.Time + } + tests := []struct { + name string + fields fields + args args + want time.Time + }{ + { + name: "set", + fields: fields{}, + args: args{updatedAt: now}, + want: now, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + i := &Integration{ + id: tt.fields.id, + name: tt.fields.name, + description: tt.fields.description, + logoUrl: tt.fields.logoUrl, + iType: tt.fields.iType, + token: tt.fields.token, + developer: tt.fields.developer, + webhooks: tt.fields.webhook, + updatedAt: tt.fields.updatedAt, + } + i.SetUpdatedAt(tt.args.updatedAt) + assert.Equal(t, tt.want, i.updatedAt) + }) + } +} + +func TestIntegration_SetWebhook(t *testing.T) { + wId := id.NewWebhookID() + now := time.Now() + type fields struct { + id ID + name string + description string + logoUrl *url.URL + iType Type + token string + developer UserID + webhook []*Webhook + updatedAt time.Time + } + type args struct { + webhook []*Webhook + } + tests := []struct { + name string + fields fields + args args + want []*Webhook + }{ + { + name: "set", + fields: fields{}, + args: args{webhook: []*Webhook{}}, + want: []*Webhook{}, + }, + { + name: "set", + fields: fields{}, + args: args{webhook: []*Webhook{{ + id: wId, + name: "xyz", + url: nil, + active: false, + trigger: WebhookTrigger{}, + updatedAt: now, + }}}, + want: []*Webhook{{ + id: wId, + name: "xyz", + url: nil, + active: false, + trigger: WebhookTrigger{}, + updatedAt: now, + }}, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + i := &Integration{ + id: tt.fields.id, + name: tt.fields.name, + description: tt.fields.description, + logoUrl: tt.fields.logoUrl, + iType: tt.fields.iType, + token: tt.fields.token, + developer: tt.fields.developer, + webhooks: tt.fields.webhook, + updatedAt: tt.fields.updatedAt, + } + i.SetWebhook(tt.args.webhook) + assert.Equal(t, tt.want, i.webhooks) + }) + } +} + +func TestIntegration_Token(t *testing.T) { + type fields struct { + id ID + name string + description string + logoUrl *url.URL + iType Type + token string + developer UserID + webhook []*Webhook + updatedAt time.Time + } + tests := []struct { + name string + fields fields + want string + }{ + { + name: "test", + fields: fields{token: "xyz"}, + want: "xyz", + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + i := &Integration{ + id: tt.fields.id, + name: tt.fields.name, + description: tt.fields.description, + logoUrl: tt.fields.logoUrl, + iType: tt.fields.iType, + token: tt.fields.token, + developer: tt.fields.developer, + webhooks: tt.fields.webhook, + updatedAt: tt.fields.updatedAt, + } + assert.Equalf(t, tt.want, i.Token(), "Token()") + }) + } +} + +func TestIntegration_Type(t *testing.T) { + type fields struct { + id ID + name string + description string + logoUrl *url.URL + iType Type + token string + developer UserID + webhook []*Webhook + updatedAt time.Time + } + tests := []struct { + name string + fields fields + want Type + }{ + { + name: "test", + fields: fields{iType: TypePublic}, + want: TypePublic, + }, + { + name: "test", + fields: fields{iType: TypePrivate}, + want: TypePrivate, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + i := &Integration{ + id: tt.fields.id, + name: tt.fields.name, + description: tt.fields.description, + logoUrl: tt.fields.logoUrl, + iType: tt.fields.iType, + token: tt.fields.token, + developer: tt.fields.developer, + webhooks: tt.fields.webhook, + updatedAt: tt.fields.updatedAt, + } + assert.Equalf(t, tt.want, i.Type(), "Type()") + }) + } +} + +func TestIntegration_UpdatedAt(t *testing.T) { + iId := id.NewIntegrationID() + now := time.Now() + type fields struct { + id ID + name string + description string + logoUrl *url.URL + iType Type + token string + developer UserID + webhook []*Webhook + updatedAt time.Time + } + tests := []struct { + name string + fields fields + want time.Time + }{ + { + name: "test", + fields: fields{updatedAt: now}, + want: now, + }, + { + name: "test", + fields: fields{id: iId}, + want: iId.Timestamp(), + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + i := &Integration{ + id: tt.fields.id, + name: tt.fields.name, + description: tt.fields.description, + logoUrl: tt.fields.logoUrl, + iType: tt.fields.iType, + token: tt.fields.token, + developer: tt.fields.developer, + webhooks: tt.fields.webhook, + updatedAt: tt.fields.updatedAt, + } + assert.Equalf(t, tt.want, i.UpdatedAt(), "UpdatedAt()") + }) + } +} + +func TestIntegration_Webhooks(t *testing.T) { + wId := id.NewWebhookID() + now := time.Now() + type fields struct { + id ID + name string + description string + logoUrl *url.URL + iType Type + token string + developer UserID + webhook []*Webhook + updatedAt time.Time + } + tests := []struct { + name string + fields fields + want []*Webhook + }{ + { + name: "test", + fields: fields{webhook: []*Webhook{}}, + want: []*Webhook{}, + }, + { + name: "test", + fields: fields{webhook: []*Webhook{ + { + id: wId, + name: "w xyz", + url: lo.Must(url.Parse("https://sub.hugo2.com/dir?p=1#test")), + active: true, + trigger: WebhookTrigger{}, + updatedAt: now, + }, + }}, + want: []*Webhook{ + { + id: wId, + name: "w xyz", + url: lo.Must(url.Parse("https://sub.hugo2.com/dir?p=1#test")), + active: true, + trigger: WebhookTrigger{}, + updatedAt: now, + }, + }, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + i := &Integration{ + id: tt.fields.id, + name: tt.fields.name, + description: tt.fields.description, + logoUrl: tt.fields.logoUrl, + iType: tt.fields.iType, + token: tt.fields.token, + developer: tt.fields.developer, + webhooks: tt.fields.webhook, + updatedAt: tt.fields.updatedAt, + } + assert.Equalf(t, tt.want, i.Webhooks(), "Webhook()") + }) + } +} + +func TestIntegration_Webhook(t *testing.T) { + wId := id.NewWebhookID() + now := time.Now() + type fields struct { + id ID + name string + description string + logoUrl *url.URL + iType Type + token string + developer UserID + webhooks []*Webhook + updatedAt time.Time + } + type args struct { + wId WebhookID + } + tests := []struct { + name string + fields fields + args args + want *Webhook + want1 bool + }{ + { + name: "test", + fields: fields{webhooks: []*Webhook{}}, + args: args{wId: id.NewWebhookID()}, + want: nil, + want1: false, + }, + { + name: "test", + fields: fields{webhooks: []*Webhook{ + { + id: wId, + name: "w xyz", + url: lo.Must(url.Parse("https://sub.hugo2.com/dir?p=1#test")), + active: true, + trigger: WebhookTrigger{}, + updatedAt: now, + }, + }}, + args: args{wId: wId}, + want: &Webhook{ + id: wId, + name: "w xyz", + url: lo.Must(url.Parse("https://sub.hugo2.com/dir?p=1#test")), + active: true, + trigger: WebhookTrigger{}, + updatedAt: now, + }, + want1: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + i := &Integration{ + id: tt.fields.id, + name: tt.fields.name, + description: tt.fields.description, + logoUrl: tt.fields.logoUrl, + iType: tt.fields.iType, + token: tt.fields.token, + developer: tt.fields.developer, + webhooks: tt.fields.webhooks, + updatedAt: tt.fields.updatedAt, + } + got, got1 := i.Webhook(tt.args.wId) + assert.Equalf(t, tt.want, got, "Webhook(%v)", tt.args.wId) + assert.Equalf(t, tt.want1, got1, "Webhook(%v)", tt.args.wId) + }) + } +} + +func TestIntegration_AddWebhook(t *testing.T) { + wId := id.NewWebhookID() + now := time.Now() + type fields struct { + id ID + name string + description string + logoUrl *url.URL + iType Type + token string + developer UserID + webhooks []*Webhook + updatedAt time.Time + } + type args struct { + w *Webhook + } + tests := []struct { + name string + fields fields + args args + want []*Webhook + }{ + { + name: "test", + fields: fields{webhooks: []*Webhook{}}, + args: args{w: nil}, + want: []*Webhook{}, + }, + { + name: "test", + fields: fields{webhooks: []*Webhook{}}, + args: args{w: &Webhook{ + id: wId, + name: "w xyz", + url: lo.Must(url.Parse("https://sub.hugo2.com/dir?p=1#test")), + active: true, + trigger: WebhookTrigger{}, + updatedAt: now, + }}, + want: []*Webhook{ + { + id: wId, + name: "w xyz", + url: lo.Must(url.Parse("https://sub.hugo2.com/dir?p=1#test")), + active: true, + trigger: WebhookTrigger{}, + updatedAt: now, + }, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + i := &Integration{ + id: tt.fields.id, + name: tt.fields.name, + description: tt.fields.description, + logoUrl: tt.fields.logoUrl, + iType: tt.fields.iType, + token: tt.fields.token, + developer: tt.fields.developer, + webhooks: tt.fields.webhooks, + updatedAt: tt.fields.updatedAt, + } + i.AddWebhook(tt.args.w) + assert.Equal(t, tt.want, i.webhooks) + }) + } +} + +func TestIntegration_UpdateWebhook(t *testing.T) { + wId := id.NewWebhookID() + now := time.Now() + type fields struct { + id ID + name string + description string + logoUrl *url.URL + iType Type + token string + developer UserID + webhooks []*Webhook + updatedAt time.Time + } + type args struct { + wId WebhookID + w *Webhook + } + tests := []struct { + name string + fields fields + args args + want []*Webhook + want1 bool + }{ + { + name: "test", + fields: fields{webhooks: []*Webhook{}}, + args: args{wId: id.NewWebhookID(), w: nil}, + want: []*Webhook{}, + want1: false, + }, + { + name: "test", + fields: fields{webhooks: []*Webhook{}}, + args: args{ + wId: wId, + w: &Webhook{ + id: wId, + name: "w xyz", + url: lo.Must(url.Parse("https://sub.hugo2.com/dir?p=1#test")), + active: true, + trigger: WebhookTrigger{}, + updatedAt: now, + }}, + want: []*Webhook{}, + want1: false, + }, + { + name: "test", + fields: fields{webhooks: []*Webhook{ + { + id: wId, + name: "w xyz", + url: lo.Must(url.Parse("https://sub.hugo2.com/dir?p=1#test")), + active: true, + trigger: WebhookTrigger{}, + updatedAt: now, + }, + }}, + args: args{ + wId: wId, + w: &Webhook{ + id: wId, + name: "w xyz updated", + url: lo.Must(url.Parse("https://sub.hugo2.com/dir?p=1#test")), + active: true, + trigger: WebhookTrigger{}, + updatedAt: now, + }}, + want: []*Webhook{ + { + id: wId, + name: "w xyz updated", + url: lo.Must(url.Parse("https://sub.hugo2.com/dir?p=1#test")), + active: true, + trigger: WebhookTrigger{}, + updatedAt: now, + }, + }, + want1: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + i := &Integration{ + id: tt.fields.id, + name: tt.fields.name, + description: tt.fields.description, + logoUrl: tt.fields.logoUrl, + iType: tt.fields.iType, + token: tt.fields.token, + developer: tt.fields.developer, + webhooks: tt.fields.webhooks, + updatedAt: tt.fields.updatedAt, + } + assert.Equal(t, tt.want1, i.UpdateWebhook(tt.args.wId, tt.args.w)) + assert.Equal(t, tt.want, i.webhooks) + }) + } +} + +func TestIntegration_DeleteWebhook(t *testing.T) { + wId := id.NewWebhookID() + now := time.Now() + type fields struct { + id ID + name string + description string + logoUrl *url.URL + iType Type + token string + developer UserID + webhooks []*Webhook + updatedAt time.Time + } + type args struct { + wId WebhookID + } + tests := []struct { + name string + fields fields + args args + want []*Webhook + want1 bool + }{ + { + name: "test", + fields: fields{webhooks: []*Webhook{}}, + args: args{wId: id.NewWebhookID()}, + want: []*Webhook{}, + want1: false, + }, + { + name: "test", + fields: fields{webhooks: []*Webhook{}}, + args: args{wId: wId}, + want: []*Webhook{}, + want1: false, + }, + { + name: "test", + fields: fields{webhooks: []*Webhook{ + { + id: wId, + name: "w xyz", + url: lo.Must(url.Parse("https://sub.hugo2.com/dir?p=1#test")), + active: true, + trigger: WebhookTrigger{}, + updatedAt: now, + }, + }}, + args: args{wId: wId}, + want: []*Webhook{}, + want1: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + i := &Integration{ + id: tt.fields.id, + name: tt.fields.name, + description: tt.fields.description, + logoUrl: tt.fields.logoUrl, + iType: tt.fields.iType, + token: tt.fields.token, + developer: tt.fields.developer, + webhooks: tt.fields.webhooks, + updatedAt: tt.fields.updatedAt, + } + assert.Equal(t, tt.want1, i.DeleteWebhook(tt.args.wId)) + assert.Equal(t, tt.want, i.webhooks) + }) + } +} + +func TestIntegration_RandomToken(t *testing.T) { + i := Integration{} + i.RandomToken() + assert.NotNil(t, i.token) + assert.True(t, strings.HasPrefix(i.token, "secret_")) + assert.Equal(t, 50, len(i.token)) +} diff --git a/asset/assetdomain/integration/integration_type.go b/asset/assetdomain/integration/integration_type.go new file mode 100644 index 0000000..8b5a0f0 --- /dev/null +++ b/asset/assetdomain/integration/integration_type.go @@ -0,0 +1,22 @@ +package integration + +import "strings" + +type Type string + +const ( + TypePublic Type = "public" + + TypePrivate Type = "private" +) + +func TypeFrom(s string) Type { + switch strings.ToLower(s) { + case "public": + return TypePublic + case "private": + return TypePrivate + default: + return TypePrivate + } +} diff --git a/asset/assetdomain/integration/integration_type_test.go b/asset/assetdomain/integration/integration_type_test.go new file mode 100644 index 0000000..9d2f1d0 --- /dev/null +++ b/asset/assetdomain/integration/integration_type_test.go @@ -0,0 +1,39 @@ +package integration + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestTypeFrom(t *testing.T) { + tests := []struct { + name string + input string + want Type + }{ + { + name: "public", + input: "public", + want: TypePublic, + }, + { + name: "private", + input: "private", + want: TypePrivate, + }, + { + name: "other", + input: "xyz", + want: TypePrivate, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + assert.Equal(t, tt.want, TypeFrom(tt.input)) + }) + } +} diff --git a/asset/assetdomain/integration/list.go b/asset/assetdomain/integration/list.go new file mode 100644 index 0000000..06cc0bf --- /dev/null +++ b/asset/assetdomain/integration/list.go @@ -0,0 +1,30 @@ +package integration + +import ( + "github.com/reearth/reearth-cms/server/pkg/event" + "github.com/reearth/reearthx/util" + "github.com/samber/lo" + "golang.org/x/exp/slices" +) + +type List []*Integration + +func (l List) SortByID() List { + m := slices.Clone(l) + slices.SortFunc(m, func(a, b *Integration) int { + return a.ID().Compare(b.ID()) + }) + return m +} + +func (l List) Clone() List { + return util.Map(l, func(m *Integration) *Integration { return m.Clone() }) +} + +func (l List) ActiveWebhooks(ty event.Type) []*Webhook { + return lo.FlatMap(l, func(i *Integration, _ int) []*Webhook { + return lo.Filter(i.Webhooks(), func(w *Webhook, _ int) bool { + return w.Trigger().IsActive(ty) && w.Active() + }) + }) +} diff --git a/asset/assetdomain/integration/list_test.go b/asset/assetdomain/integration/list_test.go new file mode 100644 index 0000000..ea0ec6f --- /dev/null +++ b/asset/assetdomain/integration/list_test.go @@ -0,0 +1,197 @@ +package integration + +import ( + "net/url" + "testing" + "time" + + "github.com/reearth/reearth-cms/server/pkg/event" + "github.com/reearth/reearthx/account/accountdomain/user" + "github.com/samber/lo" + "github.com/stretchr/testify/assert" +) + +func TestList_SortByID(t *testing.T) { + id1 := NewID() + id2 := NewID() + + list := List{ + &Integration{id: id2}, + &Integration{id: id1}, + } + res := list.SortByID() + assert.Equal(t, List{ + &Integration{id: id1}, + &Integration{id: id2}, + }, res) + // test whether original list is not modified + assert.Equal(t, List{ + &Integration{id: id2}, + &Integration{id: id1}, + }, list) +} + +func TestList_Clone(t *testing.T) { + id := NewID() + list := List{&Integration{id: id}} + got := list.Clone() + assert.Equal(t, list, got) + assert.NotSame(t, list[0], got[0]) + + got[0].id = NewID() + // test whether original list is not modified + assert.Equal(t, list, List{&Integration{id: id}}) +} + +func TestList_ActiveWebhooks(t *testing.T) { + now := time.Now() + iID1 := NewID() + iID2 := NewID() + iID3 := NewID() + uID := user.NewID() + wID1 := NewWebhookID() + wID2 := NewWebhookID() + wID3 := NewWebhookID() + wID4 := NewWebhookID() + + w1 := &Webhook{ + id: wID1, + name: "w xyz", + url: lo.Must(url.Parse("https://sub.hugo2.com/dir?p=1#test")), + active: true, + trigger: WebhookTrigger{ + event.ItemCreate: true, + event.ItemUpdate: true, + event.ItemDelete: true, + event.ItemPublish: false, + event.ItemUnpublish: false, + event.AssetCreate: false, + event.AssetDecompress: false, + event.AssetDelete: false, + }, + updatedAt: now, + } + w2 := &Webhook{ + id: wID2, + name: "w abc", + url: lo.Must(url.Parse("https://sub.hugo2.com/dir?p=1#test")), + active: true, + trigger: WebhookTrigger{ + event.ItemCreate: true, + event.ItemUpdate: false, + event.ItemDelete: false, + event.ItemPublish: false, + event.ItemUnpublish: false, + event.AssetCreate: false, + event.AssetDecompress: false, + event.AssetDelete: false, + }, + updatedAt: now, + } + w3 := &Webhook{ + id: wID3, + name: "xxx", + url: lo.Must(url.Parse("https://sub.hugo2.com/dir?p=1#test")), + active: true, + trigger: WebhookTrigger{ + event.ItemCreate: true, + event.ItemUpdate: true, + event.ItemDelete: false, + event.ItemPublish: false, + event.ItemUnpublish: false, + event.AssetCreate: false, + event.AssetDecompress: false, + event.AssetDelete: false}, + updatedAt: now, + } + w4 := &Webhook{ + id: wID4, + name: "xxx", + url: lo.Must(url.Parse("https://sub.hugo2.com/dir?p=1#test")), + active: false, + updatedAt: now, + } + + i1 := &Integration{ + id: iID1, + name: "xyz", + description: "xyz d", + logoUrl: lo.Must(url.Parse("https://sub.hugo.com/dir?p=1#test")), + iType: TypePublic, + token: "token", + developer: uID, + webhooks: []*Webhook{ + w1, w2, + }, + } + i2 := &Integration{ + id: iID2, + name: "xxx", + description: "xyz d", + logoUrl: lo.Must(url.Parse("https://sub.hugo.com/dir?p=1#test")), + iType: TypePublic, + token: "token", + developer: uID, + webhooks: []*Webhook{ + w3, + }, + } + i3 := &Integration{ + id: iID3, + name: "xxx", + description: "xyz d", + logoUrl: lo.Must(url.Parse("https://sub.hugo.com/dir?p=1#test")), + iType: TypePublic, + token: "token", + developer: uID, + webhooks: []*Webhook{ + w4, + }, + } + iList := List{i1, i2, i3} + + // type test struct { + // eType event.Type + // expected []*Webhook + // } + + tests := []struct { + name string + eType event.Type + expected []*Webhook + }{ + { + name: "integrations have multiple active webhooks", + eType: event.Type("item.create"), + expected: []*Webhook{ + w1, w2, w3, + }, + }, + { + name: "integrations have one active webhook each", + eType: event.Type("item.update"), + expected: []*Webhook{ + w1, w3, + }, + }, + { + name: "one integration have one active webhook", + eType: event.Type("item.delete"), + expected: []*Webhook{ + w1, + }, + }, + { + name: "no integration have active webhooks", + eType: event.Type("item.publish"), + expected: []*Webhook{}, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + assert.Equal(t, tc.expected, iList.ActiveWebhooks(tc.eType)) + }) + } +} diff --git a/asset/assetdomain/integration/webhook.go b/asset/assetdomain/integration/webhook.go new file mode 100644 index 0000000..7612554 --- /dev/null +++ b/asset/assetdomain/integration/webhook.go @@ -0,0 +1,109 @@ +package integration + +import ( + "net/url" + "time" + + "github.com/reearth/reearth-cms/server/pkg/event" +) + +type Webhook struct { + id WebhookID + name string + url *url.URL + active bool + trigger WebhookTrigger + updatedAt time.Time + secret string +} + +type WebhookTrigger map[event.Type]bool + +func (w *Webhook) ID() WebhookID { + return w.id +} + +func (w *Webhook) Name() string { + return w.name +} + +func (w *Webhook) SetName(name string) { + w.name = name +} + +func (w *Webhook) URL() *url.URL { + return w.url +} + +func (w *Webhook) SetURL(url *url.URL) { + w.url = url +} + +func (w *Webhook) Active() bool { + return w.active +} + +func (w *Webhook) SetActive(active bool) { + w.active = active +} + +func (w *Webhook) Trigger() WebhookTrigger { + return w.trigger +} + +func (w *Webhook) SetTrigger(trigger WebhookTrigger) { + w.trigger = trigger +} + +func (w *Webhook) UpdatedAt() time.Time { + if w.updatedAt.IsZero() { + return w.id.Timestamp() + } + return w.updatedAt +} + +func (w *Webhook) CreatedAt() time.Time { + return w.id.Timestamp() +} +func (w *Webhook) SetUpdatedAt(updatedAt time.Time) { + w.updatedAt = updatedAt +} + +func (w *Webhook) Secret() string { + return w.secret +} +func (w *Webhook) SetSecret(secret string) { + w.secret = secret +} + +func (w *Webhook) Clone() *Webhook { + if w == nil { + return nil + } + + var u *url.URL = nil + if w.url != nil { + u, _ = url.Parse(w.url.String()) + } + return &Webhook{ + id: w.id.Clone(), + name: w.name, + url: u, + active: w.active, + trigger: w.trigger, + updatedAt: w.updatedAt, + secret: w.secret, + } +} + +func (t WebhookTrigger) IsActive(et event.Type) bool { + return t[et] +} + +func (t WebhookTrigger) Enable(et event.Type) { + t[et] = true +} + +func (t WebhookTrigger) Disable(et event.Type) { + delete(t, et) +} diff --git a/asset/assetdomain/integration/webhook_builder.go b/asset/assetdomain/integration/webhook_builder.go new file mode 100644 index 0000000..e774fda --- /dev/null +++ b/asset/assetdomain/integration/webhook_builder.go @@ -0,0 +1,74 @@ +package integration + +import ( + "net/url" + "time" + + "github.com/reearth/reearth-cms/server/pkg/id" +) + +type WebhookBuilder struct { + w *Webhook +} + +func NewWebhookBuilder() *WebhookBuilder { + return &WebhookBuilder{w: &Webhook{}} +} + +func (b *WebhookBuilder) Build() (*Webhook, error) { + if b.w.id.IsNil() { + return nil, ErrInvalidID + } + if b.w.updatedAt.IsZero() { + b.w.updatedAt = b.w.CreatedAt() + } + return b.w, nil +} + +func (b *WebhookBuilder) MustBuild() *Webhook { + r, err := b.Build() + if err != nil { + panic(err) + } + return r +} + +func (b *WebhookBuilder) NewID() *WebhookBuilder { + b.w.id = id.NewWebhookID() + return b +} + +func (b *WebhookBuilder) ID(wId WebhookID) *WebhookBuilder { + b.w.id = wId + return b +} + +func (b *WebhookBuilder) Name(name string) *WebhookBuilder { + b.w.name = name + return b +} + +func (b *WebhookBuilder) Url(url *url.URL) *WebhookBuilder { + b.w.url = url + return b +} + +func (b *WebhookBuilder) Active(active bool) *WebhookBuilder { + b.w.active = active + return b +} + +func (b *WebhookBuilder) Trigger(trigger WebhookTrigger) *WebhookBuilder { + b.w.trigger = trigger + return b +} + +func (b *WebhookBuilder) UpdatedAt(updatedAt time.Time) *WebhookBuilder { + b.w.updatedAt = updatedAt + return b +} + +func (b *WebhookBuilder) Secret(secret string) *WebhookBuilder { + b.w.secret = secret + return b +} diff --git a/asset/assetdomain/integration/webhook_builder_test.go b/asset/assetdomain/integration/webhook_builder_test.go new file mode 100644 index 0000000..5ba927e --- /dev/null +++ b/asset/assetdomain/integration/webhook_builder_test.go @@ -0,0 +1,392 @@ +package integration + +import ( + "net/url" + "testing" + "time" + + "github.com/reearth/reearth-cms/server/pkg/event" + "github.com/reearth/reearth-cms/server/pkg/id" + "github.com/samber/lo" + "github.com/stretchr/testify/assert" +) + +func TestNewWebhookBuilder(t *testing.T) { + tests := []struct { + name string + want *WebhookBuilder + }{ + { + name: "name", + want: &WebhookBuilder{ + w: &Webhook{}, + }, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + assert.Equalf(t, tt.want, NewWebhookBuilder(), "NewWebhookBuilder()") + }) + } +} + +func TestWebhookBuilder_Active(t *testing.T) { + type fields struct { + w *Webhook + } + type args struct { + active bool + } + tests := []struct { + name string + fields fields + args args + want *WebhookBuilder + }{ + { + name: "true", + fields: fields{w: &Webhook{}}, + args: args{active: true}, + want: &WebhookBuilder{w: &Webhook{active: true}}, + }, + { + name: "false", + fields: fields{w: &Webhook{}}, + args: args{active: false}, + want: &WebhookBuilder{w: &Webhook{active: false}}, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + b := &WebhookBuilder{ + w: tt.fields.w, + } + assert.Equalf(t, tt.want, b.Active(tt.args.active), "Active(%v)", tt.args.active) + }) + } +} + +func TestWebhookBuilder_Build(t *testing.T) { + wId := id.NewWebhookID() + now := time.Now() + type fields struct { + w *Webhook + } + tests := []struct { + name string + fields fields + want *Webhook + wantErr error + }{ + { + name: "no id", + fields: fields{w: &Webhook{}}, + want: nil, + wantErr: ErrInvalidID, + }, + { + name: "no update at", + fields: fields{w: &Webhook{id: wId}}, + want: &Webhook{id: wId, updatedAt: wId.Timestamp()}, + wantErr: nil, + }, + { + name: "full", + fields: fields{w: &Webhook{id: wId, updatedAt: now, active: true, name: "xyz", trigger: WebhookTrigger{}}}, + want: &Webhook{id: wId, updatedAt: now, active: true, name: "xyz", trigger: WebhookTrigger{}}, + wantErr: nil, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + b := &WebhookBuilder{ + w: tt.fields.w, + } + got, err := b.Build() + if tt.wantErr != nil { + assert.Equal(t, tt.wantErr, err) + assert.Nil(t, got) + return + } + assert.NoError(t, err) + assert.Equalf(t, tt.want, got, "Build()") + }) + } +} + +func TestWebhookBuilder_ID(t *testing.T) { + wId := id.NewWebhookID() + type fields struct { + w *Webhook + } + type args struct { + wId WebhookID + } + tests := []struct { + name string + fields fields + args args + want *WebhookBuilder + }{ + { + name: "set", + fields: fields{w: &Webhook{}}, + args: args{wId: wId}, + want: &WebhookBuilder{w: &Webhook{id: wId}}, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + b := &WebhookBuilder{ + w: tt.fields.w, + } + assert.Equalf(t, tt.want, b.ID(tt.args.wId), "ID(%v)", tt.args.wId) + }) + } +} + +func TestWebhookBuilder_MustBuild(t *testing.T) { + wId := id.NewWebhookID() + now := time.Now() + type fields struct { + w *Webhook + } + tests := []struct { + name string + fields fields + want *Webhook + wantErr error + }{ + { + name: "no id", + fields: fields{w: &Webhook{}}, + want: nil, + wantErr: ErrInvalidID, + }, + { + name: "no update at", + fields: fields{w: &Webhook{id: wId}}, + want: &Webhook{id: wId, updatedAt: wId.Timestamp()}, + wantErr: nil, + }, + { + name: "full", + fields: fields{w: &Webhook{id: wId, updatedAt: now, active: true, name: "xyz", trigger: WebhookTrigger{}}}, + want: &Webhook{id: wId, updatedAt: now, active: true, name: "xyz", trigger: WebhookTrigger{}}, + wantErr: nil, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + b := &WebhookBuilder{ + w: tt.fields.w, + } + + if tt.wantErr != nil { + assert.PanicsWithValue(t, tt.wantErr, func() { _ = b.MustBuild() }) + } else { + assert.Equal(t, tt.want, b.MustBuild()) + } + }) + } +} + +func TestWebhookBuilder_Name(t *testing.T) { + type fields struct { + w *Webhook + } + type args struct { + name string + } + tests := []struct { + name string + fields fields + args args + want *WebhookBuilder + }{ + { + name: "set", + fields: fields{w: &Webhook{}}, + args: args{name: "test"}, + want: &WebhookBuilder{w: &Webhook{name: "test"}}, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + b := &WebhookBuilder{ + w: tt.fields.w, + } + assert.Equalf(t, tt.want, b.Name(tt.args.name), "Name(%v)", tt.args.name) + }) + } +} + +func TestWebhookBuilder_Trigger(t *testing.T) { + type fields struct { + w *Webhook + } + type args struct { + trigger WebhookTrigger + } + tests := []struct { + name string + fields fields + args args + want *WebhookBuilder + }{ + { + name: "set", + fields: fields{w: &Webhook{}}, + args: args{trigger: WebhookTrigger{ + event.ItemCreate: true, + event.ItemUpdate: true, + event.ItemDelete: true, + event.ItemPublish: true, + event.ItemUnpublish: true, + event.AssetCreate: true, + event.AssetDecompress: true, + event.AssetDelete: true, + }}, + want: &WebhookBuilder{w: &Webhook{trigger: WebhookTrigger{ + event.ItemCreate: true, + event.ItemUpdate: true, + event.ItemDelete: true, + event.ItemPublish: true, + event.ItemUnpublish: true, + event.AssetCreate: true, + event.AssetDecompress: true, + event.AssetDelete: true, + }}}, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + b := &WebhookBuilder{ + w: tt.fields.w, + } + assert.Equalf(t, tt.want, b.Trigger(tt.args.trigger), "Trigger(%v)", tt.args.trigger) + }) + } +} + +func TestWebhookBuilder_UpdatedAt(t *testing.T) { + now := time.Now() + type fields struct { + w *Webhook + } + type args struct { + updatedAt time.Time + } + tests := []struct { + name string + fields fields + args args + want *WebhookBuilder + }{ + { + name: "set", + fields: fields{w: &Webhook{}}, + args: args{updatedAt: now}, + want: &WebhookBuilder{w: &Webhook{updatedAt: now}}, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + b := &WebhookBuilder{ + w: tt.fields.w, + } + assert.Equalf(t, tt.want, b.UpdatedAt(tt.args.updatedAt), "UpdatedAt(%v)", tt.args.updatedAt) + }) + } +} + +func TestWebhookBuilder_Url(t *testing.T) { + type fields struct { + w *Webhook + } + type args struct { + url *url.URL + } + tests := []struct { + name string + fields fields + args args + want *WebhookBuilder + }{ + { + name: "set", + fields: fields{w: &Webhook{}}, + args: args{url: lo.Must(url.Parse("https://sub.hugo.com/dir?p=1#test"))}, + want: &WebhookBuilder{w: &Webhook{url: lo.Must(url.Parse("https://sub.hugo.com/dir?p=1#test"))}}, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + b := &WebhookBuilder{ + w: tt.fields.w, + } + assert.Equalf(t, tt.want, b.Url(tt.args.url), "Url(%v)", tt.args.url) + }) + } +} + +func TestWebhookBuilder_NewID(t *testing.T) { + type fields struct { + w *Webhook + } + tests := []struct { + name string + fields fields + }{ + { + name: "test", + fields: fields{w: &Webhook{}}, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + b := &WebhookBuilder{ + w: tt.fields.w, + } + b.NewID() + assert.False(t, b.w.id.IsNil()) + assert.False(t, b.w.id.IsEmpty()) + }) + } +} + +func TestWebhookBuilder_Secret(t *testing.T) { + b := NewWebhookBuilder() + b.Secret("xyz") + assert.Equal(t, "xyz", b.w.secret) +} diff --git a/asset/assetdomain/integration/webhook_test.go b/asset/assetdomain/integration/webhook_test.go new file mode 100644 index 0000000..aef0137 --- /dev/null +++ b/asset/assetdomain/integration/webhook_test.go @@ -0,0 +1,501 @@ +package integration + +import ( + "net/url" + "testing" + "time" + + "github.com/reearth/reearth-cms/server/pkg/event" + "github.com/reearth/reearth-cms/server/pkg/id" + "github.com/samber/lo" + "github.com/stretchr/testify/assert" +) + +func TestWebhook_Active(t *testing.T) { + tests := []struct { + name string + w *Webhook + want bool + }{ + { + name: "true", + w: &Webhook{active: true}, + want: true, + }, + { + name: "false", + w: &Webhook{active: false}, + want: false, + }, + { + name: "not set", + w: &Webhook{active: false}, + want: false, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + assert.Equalf(t, tt.want, tt.w.Active(), "Active()") + }) + } +} + +func TestWebhook_Clone(t *testing.T) { + wId := id.NewWebhookID() + now := time.Now() + tests := []struct { + name string + w *Webhook + want *Webhook + }{ + { + name: "clone", + w: &Webhook{ + id: wId, + name: "w1", + url: lo.Must(url.Parse("https://sub.hugo.com/dir?p=1#test")), + active: true, + trigger: WebhookTrigger{ + event.ItemCreate: false, + event.ItemUpdate: false, + event.ItemDelete: false, + event.ItemPublish: false, + event.ItemUnpublish: false, + event.AssetCreate: false, + event.AssetDecompress: false, + event.AssetDelete: false, + }, + updatedAt: now, + }, + want: &Webhook{ + id: wId, + name: "w1", + url: lo.Must(url.Parse("https://sub.hugo.com/dir?p=1#test")), + active: true, + trigger: WebhookTrigger{ + event.ItemCreate: false, + event.ItemUpdate: false, + event.ItemDelete: false, + event.ItemPublish: false, + event.ItemUnpublish: false, + event.AssetCreate: false, + event.AssetDecompress: false, + event.AssetDelete: false, + }, + updatedAt: now, + }, + }, + { + name: "nil", + w: nil, + want: nil, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + assert.Equalf(t, tt.want, tt.w.Clone(), "Clone()") + if tt.want != nil { + assert.NotSame(t, tt.want, tt.w) + } + }) + } +} + +func TestWebhook_CreatedAt(t *testing.T) { + wId := id.NewWebhookID() + tests := []struct { + name string + w *Webhook + want time.Time + }{ + { + name: "test", + w: &Webhook{id: wId}, + want: wId.Timestamp(), + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + assert.Equalf(t, tt.want, tt.w.CreatedAt(), "CreatedAt()") + }) + } +} + +func TestWebhook_ID(t *testing.T) { + wId := id.NewWebhookID() + tests := []struct { + name string + w *Webhook + want WebhookID + }{ + { + name: "set", + w: &Webhook{id: wId}, + want: wId, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + assert.Equalf(t, tt.want, tt.w.ID(), "ID()") + }) + } +} + +func TestWebhook_Name(t *testing.T) { + tests := []struct { + name string + w *Webhook + want string + }{ + { + name: "set", + w: &Webhook{name: "test"}, + want: "test", + }, + { + name: "not set", + w: &Webhook{}, + want: "", + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + assert.Equalf(t, tt.want, tt.w.Name(), "Name()") + }) + } +} + +func TestWebhook_SetActive(t *testing.T) { + type args struct { + active bool + } + tests := []struct { + name string + w *Webhook + args args + want bool + }{ + { + name: "set", + w: &Webhook{}, + args: args{active: true}, + want: true, + }, + { + name: "unset", + w: &Webhook{}, + args: args{active: false}, + want: false, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + tt.w.SetActive(tt.args.active) + assert.Equal(t, tt.want, tt.w.active) + }) + } +} + +func TestWebhook_SetName(t *testing.T) { + type args struct { + name string + } + tests := []struct { + name string + w *Webhook + args args + want string + }{ + { + name: "set", + w: &Webhook{}, + args: args{name: "xyz"}, + want: "xyz", + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + tt.w.SetName(tt.args.name) + assert.Equal(t, tt.want, tt.w.name) + }) + } +} + +func TestWebhook_SetTrigger(t *testing.T) { + type args struct { + trigger WebhookTrigger + } + tests := []struct { + name string + w *Webhook + args args + want WebhookTrigger + }{ + { + name: "set", + w: &Webhook{}, + args: args{trigger: WebhookTrigger{ + event.ItemCreate: false, + event.ItemUpdate: false, + event.ItemDelete: false, + event.ItemPublish: false, + event.ItemUnpublish: false, + event.AssetCreate: false, + event.AssetDecompress: false, + event.AssetDelete: false, + }}, + want: WebhookTrigger{ + event.ItemCreate: false, + event.ItemUpdate: false, + event.ItemDelete: false, + event.ItemPublish: false, + event.ItemUnpublish: false, + event.AssetCreate: false, + event.AssetDecompress: false, + event.AssetDelete: false, + }, + }, + { + name: "set true", + w: &Webhook{}, + args: args{trigger: WebhookTrigger{ + event.ItemCreate: true, + event.ItemUpdate: true, + event.ItemDelete: true, + event.ItemPublish: true, + event.ItemUnpublish: true, + event.AssetCreate: true, + event.AssetDecompress: true, + event.AssetDelete: true, + }}, + want: WebhookTrigger{ + event.ItemCreate: true, + event.ItemUpdate: true, + event.ItemDelete: true, + event.ItemPublish: true, + event.ItemUnpublish: true, + event.AssetCreate: true, + event.AssetDecompress: true, + event.AssetDelete: true, + }, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + tt.w.SetTrigger(tt.args.trigger) + assert.Equal(t, tt.want, tt.w.trigger) + }) + } +} + +func TestWebhook_SetUpdatedAt(t *testing.T) { + now := time.Now() + type args struct { + updatedAt time.Time + } + tests := []struct { + name string + w *Webhook + args args + want time.Time + }{ + { + name: "set", + w: &Webhook{}, + args: args{updatedAt: now}, + want: now, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + tt.w.SetUpdatedAt(tt.args.updatedAt) + assert.Equal(t, tt.want, tt.w.updatedAt) + }) + } +} + +func TestWebhook_SetUrl(t *testing.T) { + type args struct { + url *url.URL + } + tests := []struct { + name string + w *Webhook + args args + want string + }{ + { + name: "set", + w: &Webhook{}, + args: args{lo.Must(url.Parse("https://sub.hugo.com/dir?p=1#test"))}, + want: "https://sub.hugo.com/dir?p=1#test", + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + tt.w.SetURL(tt.args.url) + assert.Equal(t, tt.want, tt.w.url.String()) + }) + } +} + +func TestWebhook_Trigger(t *testing.T) { + tests := []struct { + name string + w *Webhook + want WebhookTrigger + }{ + { + name: "get falsy", + w: &Webhook{trigger: WebhookTrigger{ + event.ItemCreate: false, + event.ItemUpdate: false, + event.ItemDelete: false, + event.ItemPublish: false, + event.ItemUnpublish: false, + event.AssetCreate: false, + event.AssetDecompress: false, + event.AssetDelete: false, + }}, + want: WebhookTrigger{ + event.ItemCreate: false, + event.ItemUpdate: false, + event.ItemDelete: false, + event.ItemPublish: false, + event.ItemUnpublish: false, + event.AssetCreate: false, + event.AssetDecompress: false, + event.AssetDelete: false, + }, + }, + { + name: "get true", + w: &Webhook{trigger: WebhookTrigger{ + event.ItemCreate: true, + event.ItemUpdate: true, + event.ItemDelete: true, + event.ItemPublish: true, + event.ItemUnpublish: true, + event.AssetCreate: true, + event.AssetDecompress: true, + event.AssetDelete: true, + }}, + want: WebhookTrigger{ + event.ItemCreate: true, + event.ItemUpdate: true, + event.ItemDelete: true, + event.ItemPublish: true, + event.ItemUnpublish: true, + event.AssetCreate: true, + event.AssetDecompress: true, + event.AssetDelete: true, + }, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + assert.Equalf(t, tt.want, tt.w.Trigger(), "Trigger()") + }) + } +} + +func TestWebhook_UpdatedAt(t *testing.T) { + now := time.Now() + wId := id.NewWebhookID() + tests := []struct { + name string + w *Webhook + want time.Time + }{ + { + name: "set", + w: &Webhook{id: wId, updatedAt: now}, + want: now, + }, + { + name: "not set", + w: &Webhook{id: wId}, + want: wId.Timestamp(), + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + assert.Equalf(t, tt.want, tt.w.UpdatedAt(), "UpdatedAt()") + }) + } +} + +func TestWebhook_Url(t *testing.T) { + tests := []struct { + name string + w *Webhook + want string + }{ + { + name: "set", + w: &Webhook{url: lo.Must(url.Parse("https://sub.hugo.com/dir?p=1#test"))}, + want: "https://sub.hugo.com/dir?p=1#test", + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + assert.Equalf(t, tt.want, tt.w.URL().String(), "Url()") + }) + } +} + +func TestWebhook_Secret(t *testing.T) { + w := Webhook{} + w.SetSecret("xyz") + assert.Equal(t, "xyz", w.secret) + assert.Equal(t, "xyz", w.Secret()) +} + +func TestWebhookTrigger_Enable(t *testing.T) { + wt := WebhookTrigger{} + + assert.False(t, wt.IsActive(event.ItemCreate)) + wt.Enable(event.ItemCreate) + assert.True(t, wt.IsActive(event.ItemCreate)) + wt.Disable(event.ItemCreate) + assert.False(t, wt.IsActive(event.ItemCreate)) +} diff --git a/asset/assetdomain/operator/id.go b/asset/assetdomain/operator/id.go new file mode 100644 index 0000000..45ef255 --- /dev/null +++ b/asset/assetdomain/operator/id.go @@ -0,0 +1,14 @@ +package operator + +import ( + "github.com/reearth/reearthx/account/accountdomain" + id "github.com/reearth/reearthx/asset/assetdomain" + "github.com/reearth/reearthx/idx" +) + +type ID = id.EventID +type UserID = accountdomain.UserID +type IntegrationID = id.IntegrationID + +var ErrInvalidID = idx.ErrInvalidID +var NewIntegrationID = id.NewIntegrationID diff --git a/asset/assetdomain/operator/operator.go b/asset/assetdomain/operator/operator.go new file mode 100644 index 0000000..ff7c69a --- /dev/null +++ b/asset/assetdomain/operator/operator.go @@ -0,0 +1,43 @@ +package operator + +import "github.com/reearth/reearthx/account/accountdomain" + +type Operator struct { + user *accountdomain.UserID + integration *IntegrationID + isMachine bool +} + +func OperatorFromUser(user accountdomain.UserID) Operator { + return Operator{ + user: &user, + } +} + +func OperatorFromIntegration(integration IntegrationID) Operator { + return Operator{ + integration: &integration, + } +} + +func OperatorFromMachine() Operator { + return Operator{ + isMachine: true, + } +} + +func (o Operator) User() *accountdomain.UserID { + return o.user.CloneRef() +} + +func (o Operator) Integration() *IntegrationID { + return o.integration.CloneRef() +} + +func (o Operator) Machine() bool { + return o.isMachine +} + +func (o Operator) Validate() bool { + return !o.user.IsNil() || !o.integration.IsNil() || o.Machine() +} diff --git a/asset/assetdomain/operator/operator_test.go b/asset/assetdomain/operator/operator_test.go new file mode 100644 index 0000000..f09d8cc --- /dev/null +++ b/asset/assetdomain/operator/operator_test.go @@ -0,0 +1,37 @@ +package operator + +import ( + "testing" + + "github.com/reearth/reearthx/account/accountdomain" + "github.com/stretchr/testify/assert" +) + +func TestOperator(t *testing.T) { + uID := accountdomain.NewUserID() + iID := NewIntegrationID() + + uOp := OperatorFromUser(uID) + iOp := OperatorFromIntegration(iID) + cmsOp := OperatorFromMachine() + + assert.NotNil(t, uOp) + assert.NotNil(t, iOp) + + assert.Equal(t, uID, *uOp.User()) + assert.Nil(t, uOp.Integration()) + assert.False(t, uOp.Machine()) + + assert.Equal(t, iID, *iOp.Integration()) + assert.Nil(t, iOp.User()) + assert.False(t, uOp.Machine()) + + assert.True(t, cmsOp.Machine()) + assert.Nil(t, cmsOp.User()) + assert.Nil(t, cmsOp.Integration()) + + assert.True(t, uOp.Validate()) + assert.True(t, iOp.Validate()) + assert.True(t, cmsOp.Validate()) + +} diff --git a/asset/assetdomain/project/builder.go b/asset/assetdomain/project/builder.go new file mode 100644 index 0000000..d45ab0b --- /dev/null +++ b/asset/assetdomain/project/builder.go @@ -0,0 +1,96 @@ +package project + +import ( + "net/url" + "time" + + "github.com/reearth/reearthx/account/accountdomain" + "github.com/reearth/reearthx/account/accountdomain/workspace" + "golang.org/x/exp/slices" +) + +type Builder struct { + p *Project +} + +func New() *Builder { + return &Builder{p: &Project{}} +} + +func (b *Builder) Build() (*Project, error) { + if b.p.id.IsNil() { + return nil, ErrInvalidID + } + if b.p.alias != "" && !CheckAliasPattern(b.p.alias) { + return nil, ErrInvalidAlias + } + if b.p.updatedAt.IsZero() { + b.p.updatedAt = b.p.CreatedAt() + } + + return b.p, nil +} + +func (b *Builder) MustBuild() *Project { + r, err := b.Build() + if err != nil { + panic(err) + } + return r +} + +func (b *Builder) ID(id ID) *Builder { + b.p.id = id + return b +} + +func (b *Builder) NewID() *Builder { + b.p.id = NewID() + return b +} + +func (b *Builder) UpdatedAt(updatedAt time.Time) *Builder { + b.p.updatedAt = updatedAt + return b +} + +func (b *Builder) Name(name string) *Builder { + b.p.name = name + return b +} + +func (b *Builder) Description(description string) *Builder { + b.p.description = description + return b +} + +func (b *Builder) Alias(alias string) *Builder { + b.p.alias = alias + return b +} + +func (b *Builder) ImageURL(imageURL *url.URL) *Builder { + if imageURL == nil { + b.p.imageURL = nil + } else { + // https://github.com/golang/go/issues/38351 + imageURL2 := *imageURL + b.p.imageURL = &imageURL2 + } + return b +} + +func (b *Builder) Workspace(team accountdomain.WorkspaceID) *Builder { + b.p.workspaceID = team + return b +} + +func (b *Builder) Publication(publication *Publication) *Builder { + b.p.publication = publication + return b +} + +func (b *Builder) RequestRoles(requestRoles []workspace.Role) *Builder { + b.p.requestRoles = slices.Clone(requestRoles) + return b +} diff --git a/asset/assetdomain/project/builder_test.go b/asset/assetdomain/project/builder_test.go new file mode 100644 index 0000000..d2c33b1 --- /dev/null +++ b/asset/assetdomain/project/builder_test.go @@ -0,0 +1,296 @@ +package project + +import ( + "net/url" + "reflect" + "testing" + "time" + + "github.com/reearth/reearthx/account/accountdomain" + "github.com/reearth/reearthx/account/accountdomain/workspace" + "github.com/stretchr/testify/assert" +) + +func TestNew(t *testing.T) { + var tb = New() + assert.NotNil(t, tb) +} + +func TestBuilder_ID(t *testing.T) { + var tb = New() + res := tb.ID(NewID()).MustBuild() + assert.NotNil(t, res.ID()) +} + +func TestBuilder_Name(t *testing.T) { + var tb = New().NewID() + res := tb.Name("foo").MustBuild() + assert.Equal(t, "foo", res.Name()) +} + +func TestBuilder_NewID(t *testing.T) { + var tb = New() + res := tb.NewID().MustBuild() + assert.NotNil(t, res.ID()) +} + +func TestBuilder_Alias(t *testing.T) { + var tb = New().NewID() + res := tb.Alias("xxxxx").MustBuild() + assert.Equal(t, "xxxxx", res.Alias()) +} + +func TestBuilder_Description(t *testing.T) { + var tb = New().NewID() + res := tb.Description("desc").MustBuild() + assert.Equal(t, "desc", res.Description()) +} + +func TestBuilder_ImageURL(t *testing.T) { + tests := []struct { + name string + image *url.URL + expectedNil bool + }{ + { + name: "image not nil", + image: &url.URL{}, + expectedNil: false, + }, + { + name: "image is nil", + image: nil, + expectedNil: true, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + tb := New().NewID() + res := tb.ImageURL(tt.image).MustBuild() + if res.imageURL == nil { + assert.True(t, tt.expectedNil) + } else { + assert.False(t, tt.expectedNil) + } + }) + } +} + +func TestBuilder_Team(t *testing.T) { + var tb = New().NewID() + res := tb.Workspace(accountdomain.NewWorkspaceID()).MustBuild() + assert.NotNil(t, res.Workspace()) +} + +func TestBuilder_UpdatedAt(t *testing.T) { + var tb = New().NewID() + d := time.Date(1900, 1, 1, 00, 00, 0, 1, time.UTC) + res := tb.UpdatedAt(d).MustBuild() + assert.True(t, reflect.DeepEqual(res.UpdatedAt(), d)) +} + +func TestBuilder_Publication(t *testing.T) { + var tb = New().NewID() + p := &Publication{} + res := tb.Publication(p) + assert.Equal(t, &Builder{ + p: &Project{id: tb.p.id, publication: p}, + }, res) +} + +func TestBuilder_RequestRoles(t *testing.T) { + var tb = New().NewID() + r := []workspace.Role{workspace.RoleOwner, workspace.RoleMaintainer} + res := tb.RequestRoles(r) + assert.Equal(t, &Builder{ + p: &Project{id: tb.p.id, requestRoles: r}, + }, res) +} + +func TestBuilder_Build(t *testing.T) { + d := time.Date(1900, 1, 1, 00, 00, 0, 1, time.UTC) + i, _ := url.Parse("ttt://xxx.aa/") + pid := NewID() + tid := accountdomain.NewWorkspaceID() + + type args struct { + name, description string + alias string + id ID + updatedAt time.Time + imageURL *url.URL + team accountdomain.WorkspaceID + } + + tests := []struct { + name string + args args + expected *Project + err error + }{ + { + name: "build normal project", + args: args{ + name: "xxx.aaa", + description: "ddd", + alias: "aaaaa", + id: pid, + updatedAt: d, + imageURL: i, + team: tid, + }, + expected: &Project{ + id: pid, + description: "ddd", + name: "xxx.aaa", + alias: "aaaaa", + updatedAt: d, + imageURL: i, + workspaceID: tid, + }, + }, + { + name: "zero updated at", + args: args{ + id: pid, + }, + expected: &Project{ + id: pid, + updatedAt: pid.Timestamp(), + }, + }, + { + name: "failed invalid id", + err: ErrInvalidID, + }, + { + name: "failed invalid alias", + args: args{ + id: NewID(), + alias: "xxx.aaa", + }, + expected: nil, + err: ErrInvalidAlias, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + p, err := New(). + ID(tt.args.id). + UpdatedAt(tt.args.updatedAt). + Workspace(tt.args.team). + ImageURL(tt.args.imageURL). + Name(tt.args.name). + Alias(tt.args.alias). + UpdatedAt(tt.args.updatedAt). + Description(tt.args.description). + Build() + + if tt.err == nil { + assert.Equal(t, tt.expected, p) + } else { + assert.Equal(t, tt.err, err) + } + }) + } +} + +func TestBuilder_MustBuild(t *testing.T) { + d := time.Date(1900, 1, 1, 00, 00, 0, 1, time.UTC) + i, _ := url.Parse("ttt://xxx.aa/") + pid := NewID() + tid := accountdomain.NewWorkspaceID() + + type args struct { + name, description string + alias string + id ID + updatedAt time.Time + imageURL *url.URL + team accountdomain.WorkspaceID + } + + tests := []struct { + name string + args args + expected *Project + err error + }{ + { + name: "build normal project", + args: args{ + name: "xxx.aaa", + description: "ddd", + alias: "aaaaa", + id: pid, + updatedAt: d, + imageURL: i, + team: tid, + }, + expected: &Project{ + id: pid, + description: "ddd", + name: "xxx.aaa", + alias: "aaaaa", + updatedAt: d, + imageURL: i, + workspaceID: tid, + }, + }, + { + name: "zero updated at", + args: args{ + id: pid, + }, + expected: &Project{ + id: pid, + updatedAt: pid.Timestamp(), + }, + }, + { + name: "failed invalid id", + err: ErrInvalidID, + }, + { + name: "failed invalid alias", + args: args{ + id: NewID(), + alias: "xxx.aaa", + }, + err: ErrInvalidAlias, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + build := func() *Project { + t.Helper() + return New(). + ID(tt.args.id). + UpdatedAt(tt.args.updatedAt). + Workspace(tt.args.team). + ImageURL(tt.args.imageURL). + Name(tt.args.name). + Alias(tt.args.alias). + UpdatedAt(tt.args.updatedAt). + Description(tt.args.description). + MustBuild() + } + + if tt.err != nil { + assert.PanicsWithValue(t, tt.err, func() { _ = build() }) + } else { + assert.Equal(t, tt.expected, build()) + } + }) + } +} diff --git a/asset/assetdomain/project/id.go b/asset/assetdomain/project/id.go new file mode 100644 index 0000000..eedd750 --- /dev/null +++ b/asset/assetdomain/project/id.go @@ -0,0 +1,40 @@ +package project + +import ( + "github.com/reearth/reearthx/account/accountdomain" + id "github.com/reearth/reearthx/asset/assetdomain" + "github.com/reearth/reearthx/idx" + "github.com/samber/lo" +) + +type ID = id.ProjectID +type WorkspaceID = id.WorkspaceID + +type IDList = id.ProjectIDList + +var NewID = id.NewProjectID +var NewWorkspaceID = accountdomain.NewWorkspaceID + +var MustID = id.MustProjectID +var MustWorkspaceID = id.MustWorkspaceID + +var IDFrom = id.ProjectIDFrom +var WorkspaceIDFrom = id.WorkspaceIDFrom + +var IDFromRef = id.ProjectIDFromRef +var WorkspaceIDFromRef = id.WorkspaceIDFromRef + +var ErrInvalidID = idx.ErrInvalidID + +type IDOrAlias string + +func (i IDOrAlias) ID() *ID { + return IDFromRef(lo.ToPtr(string(i))) +} + +func (i IDOrAlias) Alias() *string { + if string(i) != "" && i.ID() == nil { + return lo.ToPtr(string(i)) + } + return nil +} diff --git a/asset/assetdomain/project/id_test.go b/asset/assetdomain/project/id_test.go new file mode 100644 index 0000000..4db70b3 --- /dev/null +++ b/asset/assetdomain/project/id_test.go @@ -0,0 +1,16 @@ +package project + +import ( + "testing" + + "github.com/samber/lo" + "github.com/stretchr/testify/assert" +) + +func TestIDOrAlias(t *testing.T) { + i := NewID() + assert.Equal(t, &i, IDOrAlias(i.String()).ID()) + assert.Empty(t, IDOrAlias(i.String()).Alias()) + assert.Nil(t, IDOrAlias("aaa").ID()) + assert.Equal(t, lo.ToPtr("aaa"), IDOrAlias("aaa").Alias()) +} diff --git a/asset/assetdomain/project/list.go b/asset/assetdomain/project/list.go new file mode 100644 index 0000000..fab2517 --- /dev/null +++ b/asset/assetdomain/project/list.go @@ -0,0 +1,20 @@ +package project + +import ( + "github.com/reearth/reearthx/util" + "golang.org/x/exp/slices" +) + +type List []*Project + +func (l List) SortByID() List { + m := slices.Clone(l) + slices.SortFunc(m, func(a, b *Project) int { + return a.ID().Compare(b.ID()) + }) + return m +} + +func (l List) Clone() List { + return util.Map(l, func(p *Project) *Project { return p.Clone() }) +} diff --git a/asset/assetdomain/project/list_test.go b/asset/assetdomain/project/list_test.go new file mode 100644 index 0000000..4881272 --- /dev/null +++ b/asset/assetdomain/project/list_test.go @@ -0,0 +1,36 @@ +package project + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestList_SortByID(t *testing.T) { + id1 := NewID() + id2 := NewID() + + list := List{ + &Project{id: id2}, + &Project{id: id1}, + } + res := list.SortByID() + assert.Equal(t, List{ + &Project{id: id1}, + &Project{id: id2}, + }, res) + // test whether original list is not modified + assert.Equal(t, List{ + &Project{id: id2}, + &Project{id: id1}, + }, list) +} + +func TestList_Clone(t *testing.T) { + p := New().NewID().Name("a").MustBuild() + + list := List{p} + got := list.Clone() + assert.Equal(t, list, got) + assert.NotSame(t, list[0], got[0]) +} diff --git a/asset/assetdomain/project/project.go b/asset/assetdomain/project/project.go new file mode 100644 index 0000000..a38964b --- /dev/null +++ b/asset/assetdomain/project/project.go @@ -0,0 +1,141 @@ +package project + +import ( + "net/url" + "regexp" + "time" + + "github.com/reearth/reearthx/account/accountdomain" + "github.com/reearth/reearthx/account/accountdomain/workspace" + "github.com/reearth/reearthx/i18n" + "github.com/reearth/reearthx/rerror" + "github.com/reearth/reearthx/util" + "golang.org/x/exp/slices" +) + +var ( + ErrInvalidAlias error = rerror.NewE(i18n.T("invalid alias")) + aliasRegexp = regexp.MustCompile("^[a-zA-Z0-9_-]{5,32}$") +) + +type Project struct { + id ID + workspaceID accountdomain.WorkspaceID + name string + description string + alias string + imageURL *url.URL + updatedAt time.Time + publication *Publication + requestRoles []workspace.Role +} + +func (p *Project) ID() ID { + return p.id +} + +func (p *Project) UpdatedAt() time.Time { + return p.updatedAt +} + +func (p *Project) Name() string { + return p.name +} + +func (p *Project) Description() string { + return p.description +} + +func (p *Project) Alias() string { + return p.alias +} + +func (p *Project) ImageURL() *url.URL { + if p == nil || p.imageURL == nil { + return nil + } + // https://github.com/golang/go/issues/38351 + imageURL2 := *p.imageURL + return &imageURL2 +} + +func (p *Project) Workspace() accountdomain.WorkspaceID { + return p.workspaceID +} + +func (p *Project) CreatedAt() time.Time { + return p.id.Timestamp() +} + +func (p *Project) Publication() *Publication { + return p.publication +} + +func (p *Project) RequestRoles() []workspace.Role { + return p.requestRoles +} + +func (p *Project) SetUpdatedAt(updatedAt time.Time) { + p.updatedAt = updatedAt +} + +func (p *Project) SetImageURL(imageURL *url.URL) { + if imageURL == nil { + p.imageURL = nil + } else { + // https://github.com/golang/go/issues/38351 + imageURL2 := *imageURL + p.imageURL = &imageURL2 + } +} + +func (p *Project) SetPublication(publication *Publication) { + p.publication = publication +} + +func (p *Project) UpdateName(name string) { + p.name = name +} + +func (p *Project) UpdateDescription(description string) { + p.description = description +} + +func (p *Project) SetRequestRoles(sr []workspace.Role) { + p.requestRoles = slices.Clone(sr) +} + +func (p *Project) UpdateAlias(alias string) error { + if CheckAliasPattern(alias) { + p.alias = alias + } else { + return ErrInvalidAlias + } + return nil +} + +func (p *Project) UpdateTeam(team accountdomain.WorkspaceID) { + p.workspaceID = team +} + +func (p *Project) Clone() *Project { + if p == nil { + return nil + } + + return &Project{ + id: p.id.Clone(), + workspaceID: p.workspaceID.Clone(), + name: p.name, + description: p.description, + alias: p.alias, + imageURL: util.CopyURL(p.imageURL), + updatedAt: p.updatedAt, + publication: p.publication.Clone(), + requestRoles: p.requestRoles, + } +} + +func CheckAliasPattern(alias string) bool { + return alias != "" && aliasRegexp.Match([]byte(alias)) +} diff --git a/asset/assetdomain/project/project_test.go b/asset/assetdomain/project/project_test.go new file mode 100644 index 0000000..4cacf81 --- /dev/null +++ b/asset/assetdomain/project/project_test.go @@ -0,0 +1,160 @@ +package project + +import ( + "net/url" + "testing" + "time" + + "github.com/reearth/reearthx/account/accountdomain" + "github.com/reearth/reearthx/account/accountdomain/workspace" + "github.com/stretchr/testify/assert" +) + +func TestCheckAliasPattern(t *testing.T) { + testCase := []struct { + name, alias string + expexted bool + }{ + { + name: "accepted regex", + alias: "xxxxx", + expexted: true, + }, + { + name: "refused regex", + alias: "xxx", + expexted: false, + }, + } + + for _, tt := range testCase { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.expexted, CheckAliasPattern(tt.alias)) + }) + } +} + +func TestProject_SetUpdatedAt(t *testing.T) { + p := &Project{} + p.SetUpdatedAt(time.Date(1900, 1, 1, 00, 00, 1, 1, time.UTC)) + assert.Equal(t, time.Date(1900, 1, 1, 00, 00, 1, 1, time.UTC), p.UpdatedAt()) +} + +func TestProject_SetImageURL(t *testing.T) { + testCase := []struct { + name string + image *url.URL + p *Project + expectedNil bool + }{ + { + name: "nil image", + image: nil, + p: &Project{}, + expectedNil: true, + }, + { + name: "set new image", + image: &url.URL{}, + p: &Project{}, + expectedNil: false, + }, + } + + for _, tt := range testCase { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + tt.p.SetImageURL(tt.image) + if tt.expectedNil { + assert.Nil(t, tt.p.ImageURL()) + } else { + assert.NotNil(t, tt.p.ImageURL()) + } + }) + } +} + +func TestProject_UpdateName(t *testing.T) { + p := &Project{} + p.UpdateName("foo") + assert.Equal(t, "foo", p.Name()) +} + +func TestProject_Publication(t *testing.T) { + p := &Project{} + pp := &Publication{ + scope: PublicationScopePublic, + assetPublic: true, + } + p.SetPublication(pp) + assert.Equal(t, pp, p.Publication()) +} + +func TestProject_UpdateDescription(t *testing.T) { + p := &Project{} + p.UpdateDescription("aaa") + assert.Equal(t, "aaa", p.Description()) +} + +func TestProject_UpdateTeam(t *testing.T) { + p := &Project{} + p.UpdateTeam(accountdomain.NewWorkspaceID()) + assert.NotNil(t, p.Workspace()) +} + +func TestProject_SetRequestRoles(t *testing.T) { + p := &Project{} + r := []workspace.Role{workspace.RoleOwner, workspace.RoleMaintainer} + p.SetRequestRoles(r) + assert.Equal(t, p.RequestRoles(), r) +} + +func TestProject_UpdateAlias(t *testing.T) { + tests := []struct { + name, a string + expected string + err error + }{ + { + name: "accepted alias", + a: "xxxxx", + expected: "xxxxx", + err: nil, + }, + { + name: "fail: invalid alias", + a: "xxx", + expected: "", + err: ErrInvalidAlias, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + p := &Project{} + err := p.UpdateAlias(tt.a) + if tt.err == nil { + assert.Equal(t, tt.expected, p.Alias()) + } else { + assert.Equal(t, tt.err, err) + } + }) + } +} + +func TestProject_Clone(t *testing.T) { + pub := &Publication{} + r := []workspace.Role{workspace.RoleOwner, workspace.RoleMaintainer} + p := New().NewID().Name("a").Publication(pub).RequestRoles(r).MustBuild() + + got := p.Clone() + assert.Equal(t, p, got) + assert.NotSame(t, p, got) + assert.NotSame(t, p, got.publication) + assert.Nil(t, (*Project)(nil).Clone()) +} diff --git a/asset/assetdomain/project/publication.go b/asset/assetdomain/project/publication.go new file mode 100644 index 0000000..3ef30a6 --- /dev/null +++ b/asset/assetdomain/project/publication.go @@ -0,0 +1,55 @@ +package project + +const ( + PublicationScopePrivate PublicationScope = "private" + PublicationScopeLimited PublicationScope = "limited" + PublicationScopePublic PublicationScope = "public" +) + +type PublicationScope string + +type Publication struct { + scope PublicationScope + assetPublic bool +} + +func NewPublication(scope PublicationScope, assetPublic bool) *Publication { + p := &Publication{} + p.SetScope(scope) + p.SetAssetPublic(assetPublic) + return p +} + +func (p *Publication) Scope() PublicationScope { + if p.scope == "" { + return PublicationScopePrivate + } + return p.scope +} + +func (p *Publication) AssetPublic() bool { + return p.assetPublic +} + +func (p *Publication) SetScope(scope PublicationScope) { + if scope != PublicationScopePrivate && scope != PublicationScopeLimited && scope != PublicationScopePublic { + scope = PublicationScopePrivate + } + + p.scope = scope +} + +func (p *Publication) SetAssetPublic(assetPublic bool) { + p.assetPublic = assetPublic +} + +func (p *Publication) Clone() *Publication { + if p == nil { + return nil + } + + return &Publication{ + scope: p.scope, + assetPublic: p.assetPublic, + } +} diff --git a/asset/assetdomain/project/publication_test.go b/asset/assetdomain/project/publication_test.go new file mode 100644 index 0000000..b09a1fd --- /dev/null +++ b/asset/assetdomain/project/publication_test.go @@ -0,0 +1,94 @@ +package project + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestNewPublication(t *testing.T) { + assert.Equal(t, &Publication{ + scope: PublicationScopePrivate, + assetPublic: false, + }, NewPublication(PublicationScopePrivate, false)) + assert.Equal(t, &Publication{ + scope: PublicationScopeLimited, + assetPublic: true, + }, NewPublication(PublicationScopeLimited, true)) + assert.Equal(t, &Publication{ + scope: PublicationScopePublic, + assetPublic: false, + }, NewPublication(PublicationScopePublic, false)) + assert.Equal(t, &Publication{ + scope: PublicationScopePrivate, + assetPublic: true, + }, NewPublication("", true)) +} + +func TestPublication_Scope(t *testing.T) { + assert.Equal(t, PublicationScopePrivate, (&Publication{}).Scope()) + assert.Equal(t, PublicationScopePublic, (&Publication{scope: PublicationScopePublic}).Scope()) +} + +func TestPublication_AssetPublic(t *testing.T) { + assert.True(t, (&Publication{assetPublic: true}).AssetPublic()) +} + +func TestPublication_SetScope(t *testing.T) { + p := &Publication{ + scope: PublicationScopePublic, + } + p.SetScope(PublicationScopePrivate) + assert.Equal(t, &Publication{ + scope: PublicationScopePrivate, + }, p) + + p = &Publication{} + p.SetScope(PublicationScopeLimited) + assert.Equal(t, &Publication{ + scope: PublicationScopeLimited, + }, p) + + p = &Publication{} + p.SetScope(PublicationScopePublic) + assert.Equal(t, &Publication{ + scope: PublicationScopePublic, + }, p) + + p = &Publication{ + scope: PublicationScopePublic, + } + p.SetScope("") + assert.Equal(t, &Publication{ + scope: PublicationScopePrivate, + }, p) +} + +func TestPublication_SetAssetPublic(t *testing.T) { + p := &Publication{ + assetPublic: false, + } + p.SetAssetPublic(true) + assert.Equal(t, &Publication{ + assetPublic: true, + }, p) + + p = &Publication{ + assetPublic: true, + } + p.SetAssetPublic(false) + assert.Equal(t, &Publication{ + assetPublic: false, + }, p) +} + +func TestPublication_Clone(t *testing.T) { + p := &Publication{ + assetPublic: false, + scope: PublicationScopeLimited, + } + p2 := p.Clone() + assert.Equal(t, p, p2) + assert.NotSame(t, p, p2) + assert.Nil(t, (*Publication)(nil).Clone()) +} diff --git a/asset/assetdomain/task/task.go b/asset/assetdomain/task/task.go new file mode 100644 index 0000000..1a9275f --- /dev/null +++ b/asset/assetdomain/task/task.go @@ -0,0 +1,45 @@ +package task + +import ( + "github.com/reearth/reearthx/asset/assetdomain/event" + "github.com/reearth/reearthx/asset/assetdomain/integration" +) + +type Payload struct { + DecompressAsset *DecompressAssetPayload + CompressAsset *CompressAssetPayload + Webhook *WebhookPayload +} + +type DecompressAssetPayload struct { + AssetID string + Path string +} + +func (t *DecompressAssetPayload) Payload() Payload { + return Payload{ + DecompressAsset: t, + } +} + +type CompressAssetPayload struct { + AssetID string +} + +func (t *CompressAssetPayload) Payload() Payload { + return Payload{ + CompressAsset: t, + } +} + +type WebhookPayload struct { + Webhook *integration.Webhook + Event *event.Event[any] + Override any +} + +func (t WebhookPayload) Payload() Payload { + return Payload{ + Webhook: &t, + } +} diff --git a/asset/assetdomain/thread/builder.go b/asset/assetdomain/thread/builder.go new file mode 100644 index 0000000..414c6e6 --- /dev/null +++ b/asset/assetdomain/thread/builder.go @@ -0,0 +1,54 @@ +package thread + +import ( + "github.com/reearth/reearthx/account/accountdomain" + "golang.org/x/exp/slices" +) + +type Builder struct { + th *Thread +} + +func New() *Builder { + return &Builder{th: &Thread{}} +} + +func (b *Builder) Build() (*Thread, error) { + if b.th.id.IsNil() { + return nil, ErrInvalidID + } + + if b.th.workspace.IsNil() { + return nil, ErrNoWorkspaceID + } + + return b.th, nil +} + +func (b *Builder) MustBuild() *Thread { + th, err := b.Build() + if err != nil { + panic(err) + } + return th +} + +func (b *Builder) ID(id ID) *Builder { + b.th.id = id + return b +} + +func (b *Builder) Workspace(wid accountdomain.WorkspaceID) *Builder { + b.th.workspace = wid + return b +} + +func (b *Builder) NewID() *Builder { + b.th.id = NewID() + return b +} + +func (b *Builder) Comments(c []*Comment) *Builder { + b.th.comments = slices.Clone(c) + return b +} diff --git a/asset/assetdomain/thread/builder_test.go b/asset/assetdomain/thread/builder_test.go new file mode 100644 index 0000000..92f8dec --- /dev/null +++ b/asset/assetdomain/thread/builder_test.go @@ -0,0 +1,126 @@ +package thread + +import ( + "testing" + + "github.com/reearth/reearthx/account/accountdomain" + "github.com/stretchr/testify/assert" +) + +type Tests []struct { + name string + input Input + want *Thread + err error +} + +type Input struct { + id ID + workspace accountdomain.WorkspaceID + comments []*Comment +} + +func TestBuilder_Build(t *testing.T) { + var thid = NewID() + var wid = accountdomain.NewWorkspaceID() + c := []*Comment{} + + tests := Tests{ + { + name: "should create a thread", + input: Input{ + id: thid, + workspace: wid, + comments: c, + }, + want: &Thread{ + id: thid, + workspace: wid, + comments: c, + }, + }, + { + name: "fail: empty id", + input: Input{}, + err: ErrInvalidID, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := New(). + ID(tt.input.id). + Comments(tt.input.comments). + Workspace(tt.input.workspace). + Build() + if err != tt.err { + assert.Equal(t, tt.want, got) + } + }) + } +} + +func TestBuilder_MustBuild(t *testing.T) { + thid := NewID() + wid := accountdomain.NewWorkspaceID() + c := []*Comment{} + + tests := Tests{ + { + name: "should create a thread", + input: Input{ + id: thid, + workspace: wid, + comments: c, + }, + want: &Thread{ + id: thid, + workspace: wid, + comments: c, + }, + }, + { + name: "fail: empty id", + input: Input{ + workspace: wid, + comments: c, + }, + err: ErrInvalidID, + }, + { + name: "fail: empty workspace id", + input: Input{ + id: thid, + comments: c, + }, + err: ErrNoWorkspaceID, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + build := func() *Thread { + t.Helper() + return New(). + ID(tt.input.id). + Comments(tt.input.comments). + Workspace(tt.input.workspace). + MustBuild() + } + if tt.err != nil { + assert.PanicsWithValue(t, tt.err, func() { _ = build() }) + } else { + assert.Equal(t, tt.want, build()) + } + }) + } +} + +func TestBuilder_NewID(t *testing.T) { + c := []*Comment{} + wid := accountdomain.NewWorkspaceID() + a := New().NewID().Workspace(wid).Comments(c).MustBuild() + assert.False(t, a.id.IsNil()) +} diff --git a/asset/assetdomain/thread/comment.go b/asset/assetdomain/thread/comment.go new file mode 100644 index 0000000..9ec3812 --- /dev/null +++ b/asset/assetdomain/thread/comment.go @@ -0,0 +1,53 @@ +package thread + +import ( + "time" + + "github.com/reearth/reearth-cms/server/pkg/operator" +) + +type Comment struct { + id CommentID + author operator.Operator + content string +} + +func NewComment(id CommentID, author operator.Operator, content string) *Comment { + return &Comment{ + id: id, + author: author, + content: content, + } +} + +func (c *Comment) ID() CommentID { + return c.id +} + +func (c *Comment) Author() operator.Operator { + return c.author +} + +func (c *Comment) Content() string { + return c.content +} + +func (c *Comment) CreatedAt() time.Time { + return c.id.Timestamp() +} + +func (c *Comment) SetContent(content string) { + c.content = content +} + +func (c *Comment) Clone() *Comment { + if c == nil { + return nil + } + + return &Comment{ + id: c.id, + author: c.author, + content: c.content, + } +} diff --git a/asset/assetdomain/thread/comment_test.go b/asset/assetdomain/thread/comment_test.go new file mode 100644 index 0000000..6214c1c --- /dev/null +++ b/asset/assetdomain/thread/comment_test.go @@ -0,0 +1,44 @@ +package thread + +import ( + "testing" + "time" + + "github.com/reearth/reearth-cms/server/pkg/operator" + "github.com/stretchr/testify/assert" +) + +func TestComment_CommentType(t *testing.T) { + cid := NewCommentID() + uid := NewUserID() + c := "xxx" + mocknow := time.Now().Truncate(time.Millisecond) + + got := Comment{ + id: cid, + author: operator.OperatorFromUser(uid), + content: c, + } + + assert.Equal(t, cid, got.ID()) + assert.Equal(t, uid, *got.Author().User()) + assert.Equal(t, c, got.Content()) + assert.Equal(t, mocknow, got.CreatedAt()) +} + +func TestComment_SetContent(t *testing.T) { + comment := Comment{} + comment.SetContent("xxx") + assert.Equal(t, "xxx", comment.content) +} + +func TestComment_Clone(t *testing.T) { + comment := (&Comment{ + id: NewCommentID(), + author: operator.OperatorFromUser(NewUserID()), + content: "test", + }) + assert.Nil(t, (*Comment)(nil).Clone()) + assert.Equal(t, comment, comment.Clone()) + assert.NotSame(t, comment, comment.Clone()) +} diff --git a/asset/assetdomain/thread/common.go b/asset/assetdomain/thread/common.go new file mode 100644 index 0000000..2ea6b59 --- /dev/null +++ b/asset/assetdomain/thread/common.go @@ -0,0 +1,12 @@ +package thread + +import ( + "github.com/reearth/reearthx/i18n" + "github.com/reearth/reearthx/rerror" +) + +var ( + ErrNoWorkspaceID = rerror.NewE(i18n.T("workspace id is required")) + ErrCommentAlreadyExist = rerror.NewE(i18n.T("comment already exist in this thread")) + ErrCommentDoesNotExist = rerror.NewE(i18n.T("comment does not exist in this thread")) +) diff --git a/asset/assetdomain/thread/id.go b/asset/assetdomain/thread/id.go new file mode 100644 index 0000000..259ba62 --- /dev/null +++ b/asset/assetdomain/thread/id.go @@ -0,0 +1,34 @@ +package thread + +import ( + "github.com/reearth/reearthx/account/accountdomain" + id "github.com/reearth/reearthx/asset/assetdomain" + "github.com/reearth/reearthx/idx" +) + +type ID = id.ThreadID +type CommentID = id.CommentID +type UserID = accountdomain.UserID +type WorkspaceID = id.WorkspaceID + +var NewID = id.NewThreadID +var NewCommentID = id.NewCommentID +var NewUserID = accountdomain.NewUserID +var NewWorkspaceID = accountdomain.NewWorkspaceID + +var MustID = id.MustThreadID +var MustCommentID = id.MustCommentID +var MustUserID = id.MustUserID +var MustWorkspaceID = id.MustWorkspaceID + +var IDFrom = id.ThreadIDFrom +var CommentIDFrom = id.CommentIDFrom +var UserIDFrom = accountdomain.UserIDFrom +var WorkspaceIDFrom = id.WorkspaceIDFrom + +var IDFromRef = id.ThreadIDFromRef +var CommentIDFromRef = id.CommentIDFromRef +var UserIDFromRef = accountdomain.UserIDFromRef +var WorkspaceIDFromRef = id.WorkspaceIDFromRef + +var ErrInvalidID = idx.ErrInvalidID diff --git a/asset/assetdomain/thread/list.go b/asset/assetdomain/thread/list.go new file mode 100644 index 0000000..7c8d6fe --- /dev/null +++ b/asset/assetdomain/thread/list.go @@ -0,0 +1,20 @@ +package thread + +import ( + "github.com/reearth/reearthx/util" + "golang.org/x/exp/slices" +) + +type List []*Thread + +func (l List) SortByID() List { + m := slices.Clone(l) + slices.SortFunc(m, func(a, b *Thread) int { + return a.ID().Compare(b.ID()) + }) + return m +} + +func (l List) Clone() List { + return util.Map(l, func(th *Thread) *Thread { return th.Clone() }) +} diff --git a/asset/assetdomain/thread/list_test.go b/asset/assetdomain/thread/list_test.go new file mode 100644 index 0000000..86b08ff --- /dev/null +++ b/asset/assetdomain/thread/list_test.go @@ -0,0 +1,37 @@ +package thread + +import ( + "testing" + + "github.com/reearth/reearthx/account/accountdomain" + "github.com/stretchr/testify/assert" +) + +func TestList_SortByID(t *testing.T) { + id1 := NewID() + id2 := NewID() + + list := List{ + &Thread{id: id2}, + &Thread{id: id1}, + } + res := list.SortByID() + assert.Equal(t, List{ + &Thread{id: id1}, + &Thread{id: id2}, + }, res) + // test whether original list is not modified + assert.Equal(t, List{ + &Thread{id: id2}, + &Thread{id: id1}, + }, list) +} + +func TestList_Clone(t *testing.T) { + th := New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild() + + list := List{th} + got := list.Clone() + assert.Equal(t, list, got) + assert.NotSame(t, list[0], got[0]) +} diff --git a/asset/assetdomain/thread/thread.go b/asset/assetdomain/thread/thread.go new file mode 100644 index 0000000..18b1933 --- /dev/null +++ b/asset/assetdomain/thread/thread.go @@ -0,0 +1,94 @@ +package thread + +import ( + "github.com/reearth/reearth-cms/server/pkg/id" + "github.com/reearth/reearthx/account/accountdomain" + "github.com/reearth/reearthx/util" + "github.com/samber/lo" + "golang.org/x/exp/slices" +) + +type Thread struct { + id ID + workspace accountdomain.WorkspaceID + comments []*Comment +} + +func (th *Thread) ID() ID { + return th.id +} + +func (th *Thread) Workspace() accountdomain.WorkspaceID { + return th.workspace +} + +func (th *Thread) Comments() []*Comment { + if th == nil { + return nil + } + return slices.Clone(th.comments) +} + +func (th *Thread) HasComment(cid CommentID) bool { + if th == nil { + return false + } + return lo.SomeBy(th.comments, func(c *Comment) bool { return c.ID() == cid }) +} + +func (th *Thread) AddComment(c *Comment) error { + if th.comments == nil { + th.comments = []*Comment{} + } + if th.HasComment(c.ID()) { + return ErrCommentAlreadyExist + } + + th.comments = append(th.comments, c) + return nil +} + +func (th *Thread) UpdateComment(cid id.CommentID, content string) error { + c, _ := lo.Find(th.comments, func(c *Comment) bool { return c.ID() == cid }) + if c == nil { + return ErrCommentDoesNotExist + } + c.SetContent(content) + return nil +} + +func (th *Thread) DeleteComment(cid id.CommentID) error { + i := slices.IndexFunc(th.Comments(), func(c *Comment) bool { return c.ID() == cid }) + if i < 0 { + return ErrCommentDoesNotExist + } + + comments := append(th.Comments()[:i], th.Comments()[i+1:]...) + th.SetComments(comments...) + return nil +} + +func (th *Thread) Comment(cid id.CommentID) *Comment { + c, _ := lo.Find(th.comments, func(c *Comment) bool { return c.ID() == cid }) + return c +} + +func (th *Thread) SetComments(comments ...*Comment) { + th.comments = slices.Clone(comments) +} + +func (th *Thread) Clone() *Thread { + if th == nil { + return nil + } + + comments := util.Map(th.comments, func(c *Comment) *Comment { + return c.Clone() + }) + + return &Thread{ + id: th.id.Clone(), + workspace: th.workspace.Clone(), + comments: comments, + } +} diff --git a/asset/assetdomain/thread/thread_test.go b/asset/assetdomain/thread/thread_test.go new file mode 100644 index 0000000..650751f --- /dev/null +++ b/asset/assetdomain/thread/thread_test.go @@ -0,0 +1,137 @@ +package thread + +import ( + "testing" + + "github.com/reearth/reearth-cms/server/pkg/id" + "github.com/reearth/reearth-cms/server/pkg/operator" + "github.com/reearth/reearthx/account/accountdomain" + "github.com/stretchr/testify/assert" +) + +func TestThread_Getters(t *testing.T) { + thid := NewID() + wid := accountdomain.NewWorkspaceID() + c := []*Comment{} + + got := Thread{ + id: thid, + workspace: wid, + comments: c, + } + + assert.Equal(t, thid, got.ID()) + assert.Equal(t, wid, got.Workspace()) + assert.Equal(t, c, got.Comments()) +} + +func TestThread_Comments(t *testing.T) { + var got *Thread = nil + assert.Nil(t, got.Comments()) + + c := []*Comment{{id: NewCommentID()}} + got = &Thread{ + comments: c, + } + assert.Equal(t, c, got.Comments()) +} + +func TestThread_HasComment(t *testing.T) { + c := NewComment(NewCommentID(), operator.OperatorFromUser(NewUserID()), "test") + thread := &Thread{ + id: NewID(), + workspace: accountdomain.NewWorkspaceID(), + comments: []*Comment{ + {id: NewCommentID()}, c, + }, + } + + ok := thread.HasComment(c.id) + assert.True(t, ok) + + ok = thread.HasComment(id.NewCommentID()) + assert.False(t, ok) + + thread = nil + ok = thread.HasComment(c.id) + assert.False(t, ok) +} + +func TestThread_AddComment(t *testing.T) { + thread := &Thread{ + id: NewID(), + workspace: accountdomain.NewWorkspaceID(), + } + c := NewComment(NewCommentID(), operator.OperatorFromUser(NewUserID()), "test") + err := thread.AddComment(c) + assert.NoError(t, err) + assert.True(t, thread.HasComment(c.id)) + + err = thread.AddComment(c) + assert.ErrorIs(t, err, ErrCommentAlreadyExist) +} + +func TestThread_UpdateComment(t *testing.T) { + c := NewComment(NewCommentID(), operator.OperatorFromUser(NewUserID()), "test") + thread := &Thread{ + id: NewID(), + workspace: accountdomain.NewWorkspaceID(), + comments: []*Comment{ + {id: NewCommentID()}, c, + }, + } + + err := thread.UpdateComment(NewCommentID(), "updated") + assert.ErrorIs(t, err, ErrCommentDoesNotExist) + + err = thread.UpdateComment(c.id, "updated") + assert.NoError(t, err) + assert.Equal(t, "updated", c.content) + +} + +func TestThread_DeleteComment(t *testing.T) { + c := NewComment(NewCommentID(), operator.OperatorFromUser(NewUserID()), "test") + thread := &Thread{ + id: NewID(), + workspace: accountdomain.NewWorkspaceID(), + comments: []*Comment{ + {id: NewCommentID()}, c, + }, + } + + err := thread.DeleteComment(NewCommentID()) + assert.ErrorIs(t, err, ErrCommentDoesNotExist) + + err = thread.DeleteComment(c.id) + assert.NoError(t, err) + assert.False(t, thread.HasComment(c.id)) +} + +func TestThread_Comment(t *testing.T) { + c := NewComment(NewCommentID(), operator.OperatorFromUser(NewUserID()), "test") + thread := &Thread{ + id: NewID(), + workspace: accountdomain.NewWorkspaceID(), + comments: []*Comment{ + {id: NewCommentID()}, c, + }, + } + + cc := thread.Comment(c.id) + assert.Equal(t, c, cc) + +} + +func TestThread_Clone(t *testing.T) { + thread := &Thread{ + id: NewID(), + workspace: accountdomain.NewWorkspaceID(), + comments: []*Comment{ + {id: NewCommentID()}, + }, + } + assert.Nil(t, (*Thread)(nil).Clone()) + assert.Equal(t, thread, thread.Clone()) + assert.NotSame(t, thread, thread.Clone()) +} From 6873e1acc7d1048cb9acea3175e05694122d0abe Mon Sep 17 00:00:00 2001 From: shumon84 Date: Mon, 9 Sep 2024 02:34:22 +0900 Subject: [PATCH 03/10] feat(asset): add assetinterfaces package --- asset/assetusecase/assetinterfaces/asset.go | 70 ++++++++++++++++++++ asset/assetusecase/assetinterfaces/common.go | 20 ++++++ 2 files changed, 90 insertions(+) create mode 100644 asset/assetusecase/assetinterfaces/asset.go create mode 100644 asset/assetusecase/assetinterfaces/common.go diff --git a/asset/assetusecase/assetinterfaces/asset.go b/asset/assetusecase/assetinterfaces/asset.go new file mode 100644 index 0000000..b05ce5a --- /dev/null +++ b/asset/assetusecase/assetinterfaces/asset.go @@ -0,0 +1,70 @@ +package assetinterfaces + +import ( + "context" + id "github.com/reearth/reearthx/asset/assetdomain" + "github.com/reearth/reearthx/asset/assetdomain/asset" + "github.com/reearth/reearthx/asset/assetdomain/file" + usecase "github.com/reearth/reearthx/asset/assetusecase" + "github.com/reearth/reearthx/i18n" + "github.com/reearth/reearthx/idx" + "github.com/reearth/reearthx/rerror" + "github.com/reearth/reearthx/usecasex" +) + +type AssetFilterType string + +type CreateAssetParam struct { + ProjectID idx.ID[id.Project] + File *file.File + Token string + SkipDecompression bool +} + +type UpdateAssetParam struct { + AssetID idx.ID[id.Asset] + PreviewType *asset.PreviewType +} + +type CreateAssetUploadParam struct { + ProjectID idx.ID[id.Project] + + Filename string + ContentLength int64 + + Cursor string +} + +var ( + ErrCreateAssetFailed error = rerror.NewE(i18n.T("failed to create asset")) + ErrFileNotIncluded error = rerror.NewE(i18n.T("file not included")) +) + +type AssetFilter struct { + Sort *usecasex.Sort + Keyword *string + Pagination *usecasex.Pagination +} + +type AssetUpload struct { + URL string + UUID string + ContentType string + ContentLength int64 + Next string +} + +type Asset interface { + FindByID(context.Context, id.AssetID, *usecase.Operator) (*asset.Asset, error) + FindByIDs(context.Context, []id.AssetID, *usecase.Operator) (asset.List, error) + FindByProject(context.Context, id.ProjectID, AssetFilter, *usecase.Operator) (asset.List, *usecasex.PageInfo, error) + FindFileByID(context.Context, id.AssetID, *usecase.Operator) (*asset.File, error) + GetURL(*asset.Asset) string + Create(context.Context, CreateAssetParam, *usecase.Operator) (*asset.Asset, *asset.File, error) + Update(context.Context, UpdateAssetParam, *usecase.Operator) (*asset.Asset, error) + UpdateFiles(context.Context, id.AssetID, *asset.ArchiveExtractionStatus, *usecase.Operator) (*asset.Asset, error) + Delete(context.Context, id.AssetID, *usecase.Operator) (id.AssetID, error) + DecompressByID(context.Context, id.AssetID, *usecase.Operator) (*asset.Asset, error) + CreateUpload(context.Context, CreateAssetUploadParam, *usecase.Operator) (*AssetUpload, error) + RetryDecompression(context.Context, string) error +} diff --git a/asset/assetusecase/assetinterfaces/common.go b/asset/assetusecase/assetinterfaces/common.go new file mode 100644 index 0000000..2f873bd --- /dev/null +++ b/asset/assetusecase/assetinterfaces/common.go @@ -0,0 +1,20 @@ +package assetinterfaces + +import ( + "github.com/reearth/reearthx/account/accountusecase/accountinterfaces" + "github.com/reearth/reearthx/i18n" + "github.com/reearth/reearthx/rerror" +) + +type ListOperation string + +var ( + ErrOperationDenied error = rerror.NewE(i18n.T("operation denied")) + ErrInvalidOperator error = rerror.NewE(i18n.T("invalid operator")) +) + +type Container struct { + Asset Asset + Workspace accountinterfaces.Workspace + User accountinterfaces.User +} From 7d7ca2d10cf761cf65e1087373e77c9c055ea298 Mon Sep 17 00:00:00 2001 From: shumon84 Date: Mon, 9 Sep 2024 02:34:49 +0900 Subject: [PATCH 04/10] feat(asset): add assetrepo package --- asset/assetusecase/assetrepo/asset.go | 30 ++++ asset/assetusecase/assetrepo/asset_upload.go | 12 ++ asset/assetusecase/assetrepo/container.go | 140 +++++++++++++++++++ asset/assetusecase/assetrepo/event.go | 13 ++ asset/assetusecase/assetrepo/intgration.go | 18 +++ asset/assetusecase/assetrepo/project.go | 22 +++ asset/assetusecase/assetrepo/thread.go | 21 +++ 7 files changed, 256 insertions(+) create mode 100644 asset/assetusecase/assetrepo/asset.go create mode 100644 asset/assetusecase/assetrepo/asset_upload.go create mode 100644 asset/assetusecase/assetrepo/container.go create mode 100644 asset/assetusecase/assetrepo/event.go create mode 100644 asset/assetusecase/assetrepo/intgration.go create mode 100644 asset/assetusecase/assetrepo/project.go create mode 100644 asset/assetusecase/assetrepo/thread.go diff --git a/asset/assetusecase/assetrepo/asset.go b/asset/assetusecase/assetrepo/asset.go new file mode 100644 index 0000000..e353083 --- /dev/null +++ b/asset/assetusecase/assetrepo/asset.go @@ -0,0 +1,30 @@ +package assetrepo + +import ( + "context" + + id "github.com/reearth/reearthx/asset/assetdomain" + "github.com/reearth/reearthx/asset/assetdomain/asset" + "github.com/reearth/reearthx/usecasex" +) + +type AssetFilter struct { + Sort *usecasex.Sort + Keyword *string + Pagination *usecasex.Pagination +} + +type Asset interface { + Filtered(ProjectFilter) Asset + FindByProject(context.Context, id.ProjectID, AssetFilter) ([]*asset.Asset, *usecasex.PageInfo, error) + FindByID(context.Context, id.AssetID) (*asset.Asset, error) + FindByIDs(context.Context, id.AssetIDList) ([]*asset.Asset, error) + Save(context.Context, *asset.Asset) error + Delete(context.Context, id.AssetID) error +} + +type AssetFile interface { + FindByID(context.Context, id.AssetID) (*asset.File, error) + Save(context.Context, id.AssetID, *asset.File) error + SaveFlat(context.Context, id.AssetID, *asset.File, []*asset.File) error +} diff --git a/asset/assetusecase/assetrepo/asset_upload.go b/asset/assetusecase/assetrepo/asset_upload.go new file mode 100644 index 0000000..2a724cf --- /dev/null +++ b/asset/assetusecase/assetrepo/asset_upload.go @@ -0,0 +1,12 @@ +package assetrepo + +import ( + "context" + + "github.com/reearth/reearthx/asset/assetdomain/asset" +) + +type AssetUpload interface { + Save(ctx context.Context, upload *asset.Upload) error + FindByID(ctx context.Context, uuid string) (*asset.Upload, error) +} diff --git a/asset/assetusecase/assetrepo/container.go b/asset/assetusecase/assetrepo/container.go new file mode 100644 index 0000000..f536cfa --- /dev/null +++ b/asset/assetusecase/assetrepo/container.go @@ -0,0 +1,140 @@ +package assetrepo + +import ( + "github.com/reearth/reearthx/account/accountdomain" + "github.com/reearth/reearthx/account/accountdomain/user" + "github.com/reearth/reearthx/account/accountusecase/accountrepo" + "github.com/reearth/reearthx/asset/assetdomain/project" + "github.com/reearth/reearthx/asset/assetusecase" + "github.com/reearth/reearthx/i18n" + "github.com/reearth/reearthx/rerror" + "github.com/reearth/reearthx/usecasex" +) + +type Container struct { + Asset Asset + AssetFile AssetFile + AssetUpload AssetUpload + User accountrepo.User + Workspace accountrepo.Workspace + Integration Integration + Project Project + Thread Thread + Event Event + Transaction usecasex.Transaction +} + +var ( + ErrOperationDenied = rerror.NewE(i18n.T("operation denied")) +) + +func (c *Container) Filtered(workspace WorkspaceFilter, project ProjectFilter) *Container { + if c == nil { + return c + } + return &Container{ + Asset: c.Asset.Filtered(project), + AssetFile: c.AssetFile, + AssetUpload: c.AssetUpload, + Transaction: c.Transaction, + Project: c.Project.Filtered(workspace), + } +} + +type WorkspaceFilter struct { + Readable user.WorkspaceIDList + Writable user.WorkspaceIDList +} + +func WorkspaceFilterFromOperator(o *assetusecase.Operator) WorkspaceFilter { + return WorkspaceFilter{ + Readable: o.AllReadableWorkspaces(), + Writable: o.AllWritableWorkspaces(), + } +} + +func (f WorkspaceFilter) Clone() WorkspaceFilter { + return WorkspaceFilter{ + Readable: f.Readable.Clone(), + Writable: f.Writable.Clone(), + } +} + +func (f WorkspaceFilter) Merge(g WorkspaceFilter) WorkspaceFilter { + var r, w user.WorkspaceIDList + if f.Readable != nil || g.Readable != nil { + if f.Readable == nil { + r = g.Readable.Clone() + } else { + r = append(f.Readable, g.Readable...) + } + } + if f.Writable != nil || g.Writable != nil { + if f.Writable == nil { + w = g.Writable.Clone() + } else { + w = append(f.Writable, g.Writable...) + } + } + return WorkspaceFilter{ + Readable: r, + Writable: w, + } +} + +func (f WorkspaceFilter) CanRead(id accountdomain.WorkspaceID) bool { + return f.Readable == nil || f.Readable.Has(id) || f.CanWrite(id) +} + +func (f WorkspaceFilter) CanWrite(id accountdomain.WorkspaceID) bool { + return f.Writable == nil || f.Writable.Has(id) +} + +type ProjectFilter struct { + Readable project.IDList + Writable project.IDList +} + +func ProjectFilterFromOperator(o *assetusecase.Operator) ProjectFilter { + return ProjectFilter{ + Readable: o.AllReadableProjects(), + Writable: o.AllWritableProjects(), + } +} + +func (f ProjectFilter) Clone() ProjectFilter { + return ProjectFilter{ + Readable: f.Readable.Clone(), + Writable: f.Writable.Clone(), + } +} + +func (f ProjectFilter) Merge(g ProjectFilter) ProjectFilter { + var r, w project.IDList + if f.Readable != nil || g.Readable != nil { + if f.Readable == nil { + r = g.Readable.Clone() + } else { + r = append(f.Readable, g.Readable...) + } + } + if f.Writable != nil || g.Writable != nil { + if f.Writable == nil { + w = g.Writable.Clone() + } else { + w = append(f.Writable, g.Writable...) + } + } + return ProjectFilter{ + Readable: r, + Writable: w, + } +} + +func (f ProjectFilter) CanRead(ids ...project.ID) bool { + return f.Readable == nil || f.Readable.Has(ids...) || f.CanWrite(ids...) +} + +func (f ProjectFilter) CanWrite(ids ...project.ID) bool { + return f.Writable == nil || f.Writable.Has(ids...) +} diff --git a/asset/assetusecase/assetrepo/event.go b/asset/assetusecase/assetrepo/event.go new file mode 100644 index 0000000..a8bf6c1 --- /dev/null +++ b/asset/assetusecase/assetrepo/event.go @@ -0,0 +1,13 @@ +package assetrepo + +import ( + "context" + + id "github.com/reearth/reearthx/asset/assetdomain" + "github.com/reearth/reearthx/asset/assetdomain/event" +) + +type Event interface { + FindByID(context.Context, id.EventID) (*event.Event[any], error) + Save(context.Context, *event.Event[any]) error +} diff --git a/asset/assetusecase/assetrepo/intgration.go b/asset/assetusecase/assetrepo/intgration.go new file mode 100644 index 0000000..8283a3b --- /dev/null +++ b/asset/assetusecase/assetrepo/intgration.go @@ -0,0 +1,18 @@ +package assetrepo + +import ( + "context" + + "github.com/reearth/reearthx/account/accountdomain" + id "github.com/reearth/reearthx/asset/assetdomain" + "github.com/reearth/reearthx/asset/assetdomain/integration" +) + +type Integration interface { + FindByIDs(context.Context, id.IntegrationIDList) (integration.List, error) + FindByUser(context.Context, accountdomain.UserID) (integration.List, error) + FindByID(context.Context, id.IntegrationID) (*integration.Integration, error) + FindByToken(context.Context, string) (*integration.Integration, error) + Save(context.Context, *integration.Integration) error + Remove(context.Context, id.IntegrationID) error +} diff --git a/asset/assetusecase/assetrepo/project.go b/asset/assetusecase/assetrepo/project.go new file mode 100644 index 0000000..cbbc46e --- /dev/null +++ b/asset/assetusecase/assetrepo/project.go @@ -0,0 +1,22 @@ +package assetrepo + +import ( + "context" + + "github.com/reearth/reearthx/account/accountdomain" + id "github.com/reearth/reearthx/asset/assetdomain" + "github.com/reearth/reearthx/asset/assetdomain/project" + "github.com/reearth/reearthx/usecasex" +) + +type Project interface { + Filtered(filter WorkspaceFilter) Project + FindByIDs(context.Context, id.ProjectIDList) (project.List, error) + FindByID(context.Context, id.ProjectID) (*project.Project, error) + FindByIDOrAlias(context.Context, project.IDOrAlias) (*project.Project, error) + FindByWorkspaces(context.Context, accountdomain.WorkspaceIDList, *usecasex.Pagination) (project.List, *usecasex.PageInfo, error) + FindByPublicName(context.Context, string) (*project.Project, error) + CountByWorkspace(context.Context, accountdomain.WorkspaceID) (int, error) + Save(context.Context, *project.Project) error + Remove(context.Context, id.ProjectID) error +} diff --git a/asset/assetusecase/assetrepo/thread.go b/asset/assetusecase/assetrepo/thread.go new file mode 100644 index 0000000..3b5e70a --- /dev/null +++ b/asset/assetusecase/assetrepo/thread.go @@ -0,0 +1,21 @@ +package assetrepo + +import ( + "context" + + id "github.com/reearth/reearthx/asset/assetdomain" + "github.com/reearth/reearthx/asset/assetdomain/thread" + "github.com/reearth/reearthx/i18n" + "github.com/reearth/reearthx/rerror" +) + +var ( + ErrCommentNotFound error = rerror.NewE(i18n.T("comment not found")) +) + +type Thread interface { + Save(context.Context, *thread.Thread) error + Filtered(filter WorkspaceFilter) Thread + FindByID(ctx context.Context, id id.ThreadID) (*thread.Thread, error) + FindByIDs(context.Context, id.ThreadIDList) ([]*thread.Thread, error) +} From d56c2b6cd6d551ea898959ae6f69ffe2e1042008 Mon Sep 17 00:00:00 2001 From: shumon84 Date: Mon, 9 Sep 2024 02:35:17 +0900 Subject: [PATCH 05/10] feat(asset): add assetgateway package --- asset/assetusecase/assetgateway/container.go | 13 ++++ asset/assetusecase/assetgateway/file.go | 58 +++++++++++++++++ .../assetgateway/gatewaymock/task.go | 64 +++++++++++++++++++ asset/assetusecase/assetgateway/genmock.go | 3 + asset/assetusecase/assetgateway/task.go | 12 ++++ 5 files changed, 150 insertions(+) create mode 100644 asset/assetusecase/assetgateway/container.go create mode 100644 asset/assetusecase/assetgateway/file.go create mode 100644 asset/assetusecase/assetgateway/gatewaymock/task.go create mode 100644 asset/assetusecase/assetgateway/genmock.go create mode 100644 asset/assetusecase/assetgateway/task.go diff --git a/asset/assetusecase/assetgateway/container.go b/asset/assetusecase/assetgateway/container.go new file mode 100644 index 0000000..970dc3f --- /dev/null +++ b/asset/assetusecase/assetgateway/container.go @@ -0,0 +1,13 @@ +package assetgateway + +import ( + "github.com/reearth/reearthx/account/accountusecase/accountgateway" + "github.com/reearth/reearthx/mailer" +) + +type Container struct { + Authenticator accountgateway.Authenticator + File File + Mailer mailer.Mailer + TaskRunner TaskRunner +} diff --git a/asset/assetusecase/assetgateway/file.go b/asset/assetusecase/assetgateway/file.go new file mode 100644 index 0000000..72e9e88 --- /dev/null +++ b/asset/assetusecase/assetgateway/file.go @@ -0,0 +1,58 @@ +package assetgateway + +import ( + "context" + "io" + "mime" + "path" + "time" + + "github.com/reearth/reearthx/asset/assetdomain/asset" + "github.com/reearth/reearthx/asset/assetdomain/file" + "github.com/reearth/reearthx/i18n" + "github.com/reearth/reearthx/rerror" +) + +var ( + ErrInvalidFile error = rerror.NewE(i18n.T("invalid file")) + ErrFailedToUploadFile error = rerror.NewE(i18n.T("failed to upload file")) + ErrFileTooLarge error = rerror.NewE(i18n.T("file too large")) + ErrFailedToDeleteFile error = rerror.NewE(i18n.T("failed to delete file")) + ErrFileNotFound error = rerror.NewE(i18n.T("file not found")) + ErrUnsupportedOperation error = rerror.NewE(i18n.T("unsupported operation")) +) + +type FileEntry struct { + Name string + Size int64 +} + +type UploadAssetLink struct { + URL string + ContentType string + ContentLength int64 + Next string +} + +type IssueUploadAssetParam struct { + UUID string + Filename string + ContentLength int64 + ExpiresAt time.Time + + Cursor string +} + +func (p IssueUploadAssetParam) ContentType() string { + return mime.TypeByExtension(path.Ext(p.Filename)) +} + +type File interface { + ReadAsset(context.Context, string, string) (io.ReadCloser, error) + GetAssetFiles(context.Context, string) ([]FileEntry, error) + UploadAsset(context.Context, *file.File) (string, int64, error) + DeleteAsset(context.Context, string, string) error + GetURL(*asset.Asset) string + IssueUploadAssetLink(context.Context, IssueUploadAssetParam) (*UploadAssetLink, error) + UploadedAsset(context.Context, *asset.Upload) (*file.File, error) +} diff --git a/asset/assetusecase/assetgateway/gatewaymock/task.go b/asset/assetusecase/assetgateway/gatewaymock/task.go new file mode 100644 index 0000000..3357278 --- /dev/null +++ b/asset/assetusecase/assetgateway/gatewaymock/task.go @@ -0,0 +1,64 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: ./task.go + +// Package gatewaymock is a generated GoMock package. +package gatewaymock + +import ( + context "context" + reflect "reflect" + + gomock "github.com/golang/mock/gomock" + task "github.com/reearth/reearthx/asset/assetdomain/task" +) + +// MockTaskRunner is a mock of TaskRunner interface. +type MockTaskRunner struct { + ctrl *gomock.Controller + recorder *MockTaskRunnerMockRecorder +} + +// MockTaskRunnerMockRecorder is the mock recorder for MockTaskRunner. +type MockTaskRunnerMockRecorder struct { + mock *MockTaskRunner +} + +// NewMockTaskRunner creates a new mock instance. +func NewMockTaskRunner(ctrl *gomock.Controller) *MockTaskRunner { + mock := &MockTaskRunner{ctrl: ctrl} + mock.recorder = &MockTaskRunnerMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockTaskRunner) EXPECT() *MockTaskRunnerMockRecorder { + return m.recorder +} + +// Retry mocks base method. +func (m *MockTaskRunner) Retry(arg0 context.Context, arg1 string) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Retry", arg0, arg1) + ret0, _ := ret[0].(error) + return ret0 +} + +// Retry indicates an expected call of Retry. +func (mr *MockTaskRunnerMockRecorder) Retry(arg0, arg1 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Retry", reflect.TypeOf((*MockTaskRunner)(nil).Retry), arg0, arg1) +} + +// Run mocks base method. +func (m *MockTaskRunner) Run(arg0 context.Context, arg1 task.Payload) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Run", arg0, arg1) + ret0, _ := ret[0].(error) + return ret0 +} + +// Run indicates an expected call of Run. +func (mr *MockTaskRunnerMockRecorder) Run(arg0, arg1 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Run", reflect.TypeOf((*MockTaskRunner)(nil).Run), arg0, arg1) +} diff --git a/asset/assetusecase/assetgateway/genmock.go b/asset/assetusecase/assetgateway/genmock.go new file mode 100644 index 0000000..da0c2dd --- /dev/null +++ b/asset/assetusecase/assetgateway/genmock.go @@ -0,0 +1,3 @@ +package assetgateway + +//go:generate gp run github.com/golang/mock/mockgen -source=./task.go -destination=./gatewaymock/task.go -package=gatewaymock diff --git a/asset/assetusecase/assetgateway/task.go b/asset/assetusecase/assetgateway/task.go new file mode 100644 index 0000000..d20a32b --- /dev/null +++ b/asset/assetusecase/assetgateway/task.go @@ -0,0 +1,12 @@ +package assetgateway + +import ( + "context" + + "github.com/reearth/reearthx/asset/assetdomain/task" +) + +type TaskRunner interface { + Run(context.Context, task.Payload) error + Retry(context.Context, string) error +} From b133380aa5b3feb76a255bc4b89c0b670babc58b Mon Sep 17 00:00:00 2001 From: shumon84 Date: Mon, 9 Sep 2024 02:35:53 +0900 Subject: [PATCH 06/10] feat(asset): add assetinteractor package --- asset/assetusecase/assetinteractor/asset.go | 559 +++++++ .../assetinteractor/asset_test.go | 1481 +++++++++++++++++ asset/assetusecase/assetinteractor/common.go | 110 ++ .../assetinteractor/common_test.go | 145 ++ 4 files changed, 2295 insertions(+) create mode 100644 asset/assetusecase/assetinteractor/asset.go create mode 100644 asset/assetusecase/assetinteractor/asset_test.go create mode 100644 asset/assetusecase/assetinteractor/common.go create mode 100644 asset/assetusecase/assetinteractor/common_test.go diff --git a/asset/assetusecase/assetinteractor/asset.go b/asset/assetusecase/assetinteractor/asset.go new file mode 100644 index 0000000..73f1d2a --- /dev/null +++ b/asset/assetusecase/assetinteractor/asset.go @@ -0,0 +1,559 @@ +package interactor + +import ( + "context" + "errors" + "fmt" + "path" + "strings" + "time" + + "github.com/google/uuid" + id "github.com/reearth/reearthx/asset/assetdomain" + "github.com/reearth/reearthx/asset/assetdomain/asset" + "github.com/reearth/reearthx/asset/assetdomain/event" + "github.com/reearth/reearthx/asset/assetdomain/file" + "github.com/reearth/reearthx/asset/assetdomain/task" + "github.com/reearth/reearthx/asset/assetdomain/thread" + usecase "github.com/reearth/reearthx/asset/assetusecase" + "github.com/reearth/reearthx/asset/assetusecase/assetgateway" + interfaces "github.com/reearth/reearthx/asset/assetusecase/assetinterfaces" + repo "github.com/reearth/reearthx/asset/assetusecase/assetrepo" + "github.com/reearth/reearthx/log" + "github.com/reearth/reearthx/rerror" + "github.com/reearth/reearthx/usecasex" + "github.com/samber/lo" +) + +type Asset struct { + repos *repo.Container + gateways *assetgateway.Container + ignoreEvent bool +} + +func NewAsset(r *repo.Container, g *assetgateway.Container) interfaces.Asset { + return &Asset{ + repos: r, + gateways: g, + } +} + +func (i *Asset) FindByID(ctx context.Context, aid id.AssetID, _ *usecase.Operator) (*asset.Asset, error) { + return i.repos.Asset.FindByID(ctx, aid) +} + +func (i *Asset) FindByIDs(ctx context.Context, assets []id.AssetID, _ *usecase.Operator) (asset.List, error) { + return i.repos.Asset.FindByIDs(ctx, assets) +} + +func (i *Asset) FindByProject(ctx context.Context, pid id.ProjectID, filter interfaces.AssetFilter, _ *usecase.Operator) (asset.List, *usecasex.PageInfo, error) { + return i.repos.Asset.FindByProject(ctx, pid, repo.AssetFilter{ + Sort: filter.Sort, + Keyword: filter.Keyword, + Pagination: filter.Pagination, + }) +} + +func (i *Asset) FindFileByID(ctx context.Context, aid id.AssetID, _ *usecase.Operator) (*asset.File, error) { + _, err := i.repos.Asset.FindByID(ctx, aid) + if err != nil { + return nil, err + } + + files, err := i.repos.AssetFile.FindByID(ctx, aid) + if err != nil { + return nil, err + } + + return files, nil +} + +func (i *Asset) GetURL(a *asset.Asset) string { + return i.gateways.File.GetURL(a) +} + +func (i *Asset) Create(ctx context.Context, inp interfaces.CreateAssetParam, op *usecase.Operator) (result *asset.Asset, afile *asset.File, err error) { + if op.AcOperator.User == nil && op.Integration == nil { + return nil, nil, interfaces.ErrInvalidOperator + } + + if inp.File == nil && inp.Token == "" { + return nil, nil, interfaces.ErrFileNotIncluded + } + + prj, err := i.repos.Project.FindByID(ctx, inp.ProjectID) + if err != nil { + return nil, nil, err + } + + if !op.IsWritableWorkspace(prj.Workspace()) { + return nil, nil, interfaces.ErrOperationDenied + } + + var uuid string + var file *file.File + if inp.File != nil { + var size int64 + file = inp.File + uuid, size, err = i.gateways.File.UploadAsset(ctx, inp.File) + if err != nil { + return nil, nil, err + } + file.Size = size + } + + a, f, err := usecasex.Run2[*asset.Asset, *asset.File]( + ctx, func(ctx context.Context) (*asset.Asset, *asset.File, error) { + if inp.Token != "" { + uuid = inp.Token + u, err := i.repos.AssetUpload.FindByID(ctx, uuid) + if err != nil { + return nil, nil, err + } + if u.Expired(time.Now()) { + return nil, nil, rerror.ErrInternalBy(fmt.Errorf("expired upload token: %s", uuid)) + } + file, err = i.gateways.File.UploadedAsset(ctx, u) + if err != nil { + return nil, nil, err + } + } + th, err := thread.New().NewID().Workspace(prj.Workspace()).Build() + if err != nil { + return nil, nil, err + } + if err := i.repos.Thread.Save(ctx, th); err != nil { + return nil, nil, err + } + + needDecompress := false + if ext := strings.ToLower(path.Ext(file.Name)); ext == ".zip" || ext == ".7z" { + needDecompress = true + } + + es := lo.ToPtr(asset.ArchiveExtractionStatusDone) + if needDecompress { + if inp.SkipDecompression { + es = lo.ToPtr(asset.ArchiveExtractionStatusSkipped) + } else { + es = lo.ToPtr(asset.ArchiveExtractionStatusPending) + } + } + + ab := asset.New(). + NewID(). + Project(inp.ProjectID). + FileName(path.Base(file.Name)). + Size(uint64(file.Size)). + Type(asset.DetectPreviewType(file)). + UUID(uuid). + Thread(th.ID()). + ArchiveExtractionStatus(es) + + if op.AcOperator.User != nil { + ab.CreatedByUser(*op.AcOperator.User) + } + if op.Integration != nil { + ab.CreatedByIntegration(*op.Integration) + } + + a, err := ab.Build() + if err != nil { + return nil, nil, err + } + + f := asset.NewFile(). + Name(file.Name). + Path(file.Name). + Size(uint64(file.Size)). + GuessContentType(). + Build() + + if err := i.repos.Asset.Save(ctx, a); err != nil { + return nil, nil, err + } + + if err := i.repos.AssetFile.Save(ctx, a.ID(), f); err != nil { + return nil, nil, err + } + + if needDecompress && !inp.SkipDecompression { + if err := i.triggerDecompressEvent(ctx, a, f); err != nil { + return nil, nil, err + } + } + return a, f, nil + }) + + if err != nil { + return nil, nil, err + } + + // In AWS, extraction is done in very short time when a zip file is small, so it often results in an error because an asset is not saved yet in MongoDB. So an event should be created after commtting the transaction. + if err := i.event(ctx, Event{ + Project: prj, + Workspace: prj.Workspace(), + Type: event.AssetCreate, + Object: a, + Operator: op.Operator(), + }); err != nil { + return nil, nil, err + } + + return a, f, nil +} + +func (i *Asset) DecompressByID(ctx context.Context, aId id.AssetID, operator *usecase.Operator) (*asset.Asset, error) { + if operator.AcOperator.User == nil && operator.Integration == nil { + return nil, interfaces.ErrInvalidOperator + } + + return usecasex.Run1( + ctx, func(ctx context.Context) (*asset.Asset, error) { + a, err := i.repos.Asset.FindByID(ctx, aId) + if err != nil { + return nil, err + } + + if !operator.CanUpdate(a) { + return nil, interfaces.ErrOperationDenied + } + + f, err := i.repos.AssetFile.FindByID(ctx, aId) + if err != nil { + return nil, err + } + + if err := i.triggerDecompressEvent(ctx, a, f); err != nil { + return nil, err + } + + a.UpdateArchiveExtractionStatus(lo.ToPtr(asset.ArchiveExtractionStatusPending)) + + if err := i.repos.Asset.Save(ctx, a); err != nil { + return nil, err + } + + return a, nil + }, + ) +} + +type wrappedUploadCursor struct { + UUID string + Cursor string +} + +func (c wrappedUploadCursor) String() string { + return c.UUID + "_" + c.Cursor +} + +func parseWrappedUploadCursor(c string) (*wrappedUploadCursor, error) { + uuid, cursor, found := strings.Cut(c, "_") + if !found { + return nil, fmt.Errorf("separator not found") + } + return &wrappedUploadCursor{ + UUID: uuid, + Cursor: cursor, + }, nil +} + +func wrapUploadCursor(uuid, cursor string) string { + if cursor == "" { + return "" + } + return wrappedUploadCursor{UUID: uuid, Cursor: cursor}.String() +} + +func (i *Asset) CreateUpload(ctx context.Context, inp interfaces.CreateAssetUploadParam, op *usecase.Operator) (*interfaces.AssetUpload, error) { + if op.AcOperator.User == nil && op.Integration == nil { + return nil, interfaces.ErrInvalidOperator + } + + var param *assetgateway.IssueUploadAssetParam + if inp.Cursor == "" { + if inp.Filename == "" { + // TODO: Change to the appropriate error + return nil, interfaces.ErrFileNotIncluded + } + + const week = 7 * 24 * time.Hour + expiresAt := time.Now().Add(1 * week) + param = &assetgateway.IssueUploadAssetParam{ + UUID: uuid.New().String(), + Filename: inp.Filename, + ContentLength: inp.ContentLength, + ExpiresAt: expiresAt, + Cursor: "", + } + } else { + wrapped, err := parseWrappedUploadCursor(inp.Cursor) + if err != nil { + return nil, fmt.Errorf("parse cursor(%s): %w", inp.Cursor, err) + } + au, err := i.repos.AssetUpload.FindByID(ctx, wrapped.UUID) + if err != nil { + return nil, fmt.Errorf("find asset upload(uuid=%s): %w", wrapped.UUID, err) + } + if inp.ProjectID.Compare(au.Project()) != 0 { + return nil, fmt.Errorf("unmatched project id(in=%s,db=%s)", inp.ProjectID, au.Project()) + } + param = &assetgateway.IssueUploadAssetParam{ + UUID: wrapped.UUID, + Filename: au.FileName(), + ContentLength: au.ContentLength(), + ExpiresAt: au.ExpiresAt(), + Cursor: wrapped.Cursor, + } + } + + prj, err := i.repos.Project.FindByID(ctx, inp.ProjectID) + if err != nil { + return nil, err + } + if !op.IsWritableWorkspace(prj.Workspace()) { + return nil, interfaces.ErrOperationDenied + } + + uploadLink, err := i.gateways.File.IssueUploadAssetLink(ctx, *param) + if errors.Is(err, assetgateway.ErrUnsupportedOperation) { + return nil, rerror.ErrNotFound + } + if err != nil { + return nil, err + } + + if inp.Cursor == "" { + u := asset.NewUpload(). + UUID(param.UUID). + Project(prj.ID()). + FileName(param.Filename). + ExpiresAt(param.ExpiresAt). + ContentLength(param.ContentLength). + Build() + if err := i.repos.AssetUpload.Save(ctx, u); err != nil { + return nil, err + } + } + return &interfaces.AssetUpload{ + URL: uploadLink.URL, + UUID: param.UUID, + ContentType: uploadLink.ContentType, + ContentLength: uploadLink.ContentLength, + Next: wrapUploadCursor(param.UUID, uploadLink.Next), + }, nil +} + +func (i *Asset) triggerDecompressEvent(ctx context.Context, a *asset.Asset, f *asset.File) error { + if i.gateways.TaskRunner == nil { + log.Infof("asset: decompression of asset %s was skipped because task runner is not configured", a.ID()) + return nil + } + + taskPayload := task.DecompressAssetPayload{ + AssetID: a.ID().String(), + Path: f.RootPath(a.UUID()), + } + if err := i.gateways.TaskRunner.Run(ctx, taskPayload.Payload()); err != nil { + return err + } + + a.UpdateArchiveExtractionStatus(lo.ToPtr(asset.ArchiveExtractionStatusInProgress)) + if err := i.repos.Asset.Save(ctx, a); err != nil { + return err + } + + return nil +} + +func (i *Asset) Update(ctx context.Context, inp interfaces.UpdateAssetParam, operator *usecase.Operator) (result *asset.Asset, err error) { + if operator.AcOperator.User == nil && operator.Integration == nil { + return nil, interfaces.ErrInvalidOperator + } + + return usecasex.Run1( + ctx, func(ctx context.Context) (*asset.Asset, error) { + a, err := i.repos.Asset.FindByID(ctx, inp.AssetID) + if err != nil { + return nil, err + } + + if !operator.CanUpdate(a) { + return nil, interfaces.ErrOperationDenied + } + + if inp.PreviewType != nil { + a.UpdatePreviewType(inp.PreviewType) + } + + if err := i.repos.Asset.Save(ctx, a); err != nil { + return nil, err + } + + return a, nil + }, + ) +} + +func (i *Asset) UpdateFiles(ctx context.Context, aid id.AssetID, s *asset.ArchiveExtractionStatus, op *usecase.Operator) (*asset.Asset, error) { + if op.AcOperator.User == nil && op.Integration == nil && !op.Machine { + return nil, interfaces.ErrInvalidOperator + } + + return usecasex.Run1( + ctx, func(ctx context.Context) (*asset.Asset, error) { + a, err := i.repos.Asset.FindByID(ctx, aid) + if err != nil { + if err == rerror.ErrNotFound { + return nil, err + } + return nil, fmt.Errorf("failed to find an asset: %v", err) + } + + if !op.CanUpdate(a) { + return nil, interfaces.ErrOperationDenied + } + + if shouldSkipUpdate(a.ArchiveExtractionStatus(), s) { + return a, nil + } + + prj, err := i.repos.Project.FindByID(ctx, a.Project()) + if err != nil { + return nil, fmt.Errorf("failed to find a project: %v", err) + } + + srcfile, err := i.repos.AssetFile.FindByID(ctx, aid) + if err != nil { + return nil, fmt.Errorf("failed to find an asset file: %v", err) + } + + files, err := i.gateways.File.GetAssetFiles(ctx, a.UUID()) + if err != nil { + if err == assetgateway.ErrFileNotFound { + return nil, err + } + return nil, fmt.Errorf("failed to get asset files: %v", err) + } + + a.UpdateArchiveExtractionStatus(s) + if previewType := detectPreviewType(files); previewType != nil { + a.UpdatePreviewType(previewType) + } + + if err := i.repos.Asset.Save(ctx, a); err != nil { + return nil, fmt.Errorf("failed to save an asset: %v", err) + } + + srcPath := srcfile.Path() + assetFiles := lo.FilterMap(files, func(f assetgateway.FileEntry, _ int) (*asset.File, bool) { + if srcPath == f.Name { + return nil, false + } + return asset.NewFile(). + Name(path.Base(f.Name)). + Path(f.Name). + GuessContentType(). + Build(), true + }) + + if err := i.repos.AssetFile.SaveFlat(ctx, a.ID(), srcfile, assetFiles); err != nil { + return nil, fmt.Errorf("failed to save asset files: %v", err) + } + + if err := i.event(ctx, Event{ + Project: prj, + Workspace: prj.Workspace(), + Type: event.AssetDecompress, + Object: a, + Operator: op.Operator(), + }); err != nil { + return nil, fmt.Errorf("failed to create an event: %v", err) + } + + return a, nil + }, + ) +} + +func detectPreviewType(files []assetgateway.FileEntry) *asset.PreviewType { + for _, entry := range files { + if path.Base(entry.Name) == "tileset.json" { + return lo.ToPtr(asset.PreviewTypeGeo3dTiles) + } + if path.Ext(entry.Name) == ".mvt" { + return lo.ToPtr(asset.PreviewTypeGeoMvt) + } + } + return nil +} + +func shouldSkipUpdate(from, to *asset.ArchiveExtractionStatus) bool { + if from.String() == asset.ArchiveExtractionStatusDone.String() { + return true + } + return from.String() == to.String() +} + +func (i *Asset) Delete(ctx context.Context, aId id.AssetID, operator *usecase.Operator) (result id.AssetID, err error) { + if operator.AcOperator.User == nil && operator.Integration == nil { + return aId, interfaces.ErrInvalidOperator + } + + return usecasex.Run1( + ctx, func(ctx context.Context) (id.AssetID, error) { + a, err := i.repos.Asset.FindByID(ctx, aId) + if err != nil { + return aId, err + } + + if !operator.CanUpdate(a) { + return aId, interfaces.ErrOperationDenied + } + + uuid := a.UUID() + filename := a.FileName() + if uuid != "" && filename != "" { + if err := i.gateways.File.DeleteAsset(ctx, uuid, filename); err != nil { + return aId, err + } + } + + err = i.repos.Asset.Delete(ctx, aId) + if err != nil { + return aId, err + } + + p, err := i.repos.Project.FindByID(ctx, a.Project()) + if err != nil { + return aId, err + } + + if err := i.event(ctx, Event{ + Project: p, + Workspace: p.Workspace(), + Type: event.AssetDelete, + Object: a, + Operator: operator.Operator(), + }); err != nil { + return aId, err + } + + return aId, nil + }, + ) +} + +func (i *Asset) event(ctx context.Context, e Event) error { + if i.ignoreEvent { + return nil + } + + _, err := createEvent(ctx, i.repos, i.gateways, e) + return err +} + +func (i *Asset) RetryDecompression(ctx context.Context, id string) error { + return i.gateways.TaskRunner.Retry(ctx, id) +} diff --git a/asset/assetusecase/assetinteractor/asset_test.go b/asset/assetusecase/assetinteractor/asset_test.go new file mode 100644 index 0000000..8837868 --- /dev/null +++ b/asset/assetusecase/assetinteractor/asset_test.go @@ -0,0 +1,1481 @@ +package interactor + +import ( + "bytes" + "context" + "github.com/reearth/reearthx/asset/assetinfrastructure/assetfs" + "github.com/reearth/reearthx/asset/assetinfrastructure/assetmemory" + "io" + "path" + "runtime" + "strings" + "testing" + "time" + + "github.com/google/uuid" + "github.com/reearth/reearthx/account/accountdomain" + "github.com/reearth/reearthx/account/accountdomain/user" + "github.com/reearth/reearthx/account/accountdomain/workspace" + "github.com/reearth/reearthx/account/accountusecase" + id "github.com/reearth/reearthx/asset/assetdomain" + "github.com/reearth/reearthx/asset/assetdomain/asset" + "github.com/reearth/reearthx/asset/assetdomain/file" + "github.com/reearth/reearthx/asset/assetdomain/project" + "github.com/reearth/reearthx/asset/assetdomain/task" + usecase "github.com/reearth/reearthx/asset/assetusecase" + gateway "github.com/reearth/reearthx/asset/assetusecase/assetgateway" + interfaces "github.com/reearth/reearthx/asset/assetusecase/assetinterfaces" + "github.com/reearth/reearthx/idx" + "github.com/reearth/reearthx/rerror" + "github.com/reearth/reearthx/usecasex" + "github.com/samber/lo" + "github.com/spf13/afero" + "github.com/stretchr/testify/assert" +) + +func TestAsset_FindByID(t *testing.T) { + pid := id.NewProjectID() + id1 := id.NewAssetID() + uid1 := accountdomain.NewUserID() + a1 := asset.New(). + ID(id1). + Project(pid). + CreatedByUser(uid1). + Size(1000). + Thread(id.NewThreadID()). + NewUUID(). + MustBuild() + + op := &usecase.Operator{} + + type args struct { + id id.AssetID + operator *usecase.Operator + } + + tests := []struct { + name string + seeds []*asset.Asset + args args + want *asset.Asset + wantErr error + }{ + { + name: "Not found in empty db", + seeds: []*asset.Asset{}, + args: args{ + id: id.NewAssetID(), + operator: op, + }, + want: nil, + wantErr: rerror.ErrNotFound, + }, + { + name: "Not found", + seeds: []*asset.Asset{a1}, + args: args{ + id: id.NewAssetID(), + operator: op, + }, + want: nil, + wantErr: rerror.ErrNotFound, + }, + { + name: "Found 1", + seeds: []*asset.Asset{a1}, + args: args{ + id: id1, + operator: op, + }, + want: a1, + wantErr: nil, + }, + { + name: "Found 2", + seeds: []*asset.Asset{ + a1, + asset.New(). + NewID(). + Project(id.NewProjectID()). + CreatedByUser(accountdomain.NewUserID()). + Size(1000). + Thread(id.NewThreadID()). + NewUUID(). + MustBuild(), + asset.New(). + NewID(). + Project(id.NewProjectID()). + CreatedByUser(accountdomain.NewUserID()). + Size(1000). + Thread(id.NewThreadID()). + NewUUID(). + MustBuild(), + }, + args: args{ + id: id1, + operator: op, + }, + want: a1, + wantErr: nil, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + ctx := context.Background() + db := assetmemory.New() + + for _, a := range tc.seeds { + err := db.Asset.Save(ctx, a.Clone()) + assert.NoError(t, err) + } + assetUC := NewAsset(db, nil) + + got, err := assetUC.FindByID(ctx, tc.args.id, tc.args.operator) + if tc.wantErr != nil { + assert.Equal(t, tc.wantErr, err) + return + } + assert.NoError(t, err) + assert.Equal(t, tc.want, got) + }) + } +} + +func TestAsset_DecompressByID(t *testing.T) { + ws1 := workspace.New().NewID().MustBuild() + pid1 := id.NewProjectID() + id1 := id.NewAssetID() + uid1 := accountdomain.NewUserID() + u1 := user.New().ID(uid1).Name("aaa").Email("aaa@bbb.com").Workspace(ws1.ID()).MustBuild() + a1 := asset.New(). + ID(id1). + Project(pid1). + CreatedByUser(uid1). + Size(1000). + FileName("aaa.zip"). + Thread(id.NewThreadID()). + NewUUID(). + MustBuild() + + type args struct { + id id.AssetID + operator *usecase.Operator + } + + tests := []struct { + name string + seeds []*asset.Asset + args args + want *asset.Asset + wantErr error + }{ + { + name: "No user or integration", + seeds: []*asset.Asset{}, + args: args{ + id: id.NewAssetID(), + operator: &usecase.Operator{ + AcOperator: &accountusecase.Operator{}, + }, + }, + want: nil, + wantErr: interfaces.ErrInvalidOperator, + }, + { + name: "Operation denied", + seeds: []*asset.Asset{a1}, + args: args{ + id: a1.ID(), + operator: &usecase.Operator{ + AcOperator: &accountusecase.Operator{ + User: lo.ToPtr(u1.ID()), + ReadableWorkspaces: []accountdomain.WorkspaceID{ws1.ID()}, + }, + }, + }, + want: nil, + wantErr: interfaces.ErrOperationDenied, + }, + { + name: "not found", + seeds: []*asset.Asset{a1}, + args: args{ + id: asset.NewID(), + operator: &usecase.Operator{ + AcOperator: &accountusecase.Operator{ + User: lo.ToPtr(u1.ID()), + OwningWorkspaces: []accountdomain.WorkspaceID{ws1.ID()}, + }, + OwningProjects: []id.ProjectID{pid1}, + }, + }, + want: nil, + wantErr: rerror.ErrNotFound, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + ctx := context.Background() + db := assetmemory.New() + + for _, a := range tc.seeds { + err := db.Asset.Save(ctx, a.Clone()) + assert.NoError(t, err) + } + assetUC := NewAsset(db, nil) + + got, err := assetUC.DecompressByID(ctx, tc.args.id, tc.args.operator) + if tc.wantErr != nil { + assert.Equal(t, tc.wantErr, err) + return + } + assert.NoError(t, err) + assert.Equal(t, tc.want, got) + }) + } +} + +func TestAsset_FindFileByID(t *testing.T) { + pid := id.NewProjectID() + id1 := id.NewAssetID() + uid1 := accountdomain.NewUserID() + a1 := asset.New(). + ID(id1). + Project(pid). + CreatedByUser(uid1). + Size(1000). + Thread(id.NewThreadID()). + NewUUID(). + MustBuild() + af1 := asset.NewFile().Name("xxx").Path("/xxx.zip").GuessContentType().Build() + op := &usecase.Operator{} + + type args struct { + id id.AssetID + operator *usecase.Operator + } + + tests := []struct { + name string + seeds []*asset.Asset + seedFiles map[asset.ID]*asset.File + args args + want *asset.File + wantErr error + }{ + { + name: "Asset Not found", + seeds: []*asset.Asset{a1}, + args: args{ + id: asset.NewID(), + operator: op, + }, + want: nil, + wantErr: rerror.ErrNotFound, + }, + { + name: "Asset file Not found", + seeds: []*asset.Asset{a1}, + seedFiles: map[asset.ID]*asset.File{ + asset.NewID(): af1, + }, + args: args{ + id: id1, + operator: op, + }, + want: nil, + wantErr: rerror.ErrNotFound, + }, + { + name: "Asset file found", + seeds: []*asset.Asset{a1}, + seedFiles: map[asset.ID]*asset.File{ + id1: af1, + }, + args: args{ + id: id1, + operator: op, + }, + want: af1, + wantErr: nil, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + ctx := context.Background() + db := assetmemory.New() + + for _, a := range tc.seeds { + err := db.Asset.Save(ctx, a.Clone()) + assert.NoError(t, err) + } + for id, f := range tc.seedFiles { + err := db.AssetFile.Save(ctx, id, f.Clone()) + assert.Nil(t, err) + } + + assetUC := NewAsset(db, nil) + + got, err := assetUC.FindFileByID(ctx, tc.args.id, tc.args.operator) + if tc.wantErr != nil { + assert.Equal(t, tc.wantErr, err) + return + } + assert.NoError(t, err) + assert.Equal(t, tc.want, got) + }) + } +} + +func TestAsset_FindByIDs(t *testing.T) { + pid1 := id.NewProjectID() + uid1 := accountdomain.NewUserID() + id1 := id.NewAssetID() + id2 := id.NewAssetID() + tim, _ := time.Parse(time.RFC3339, "2021-03-16T04:19:57.592Z") + a1 := asset.New().ID(id1). + Project(pid1). + CreatedAt(tim). + CreatedByUser(uid1). + Size(1000). + Thread(id.NewThreadID()). + NewUUID(). + MustBuild() + a2 := asset.New().ID(id2). + Project(pid1). + CreatedAt(tim). + CreatedByUser(uid1). + Size(1000). + Thread(id.NewThreadID()). + NewUUID(). + MustBuild() + + tests := []struct { + name string + seeds asset.List + arg id.AssetIDList + want asset.List + wantErr error + }{ + { + name: "0 count in empty db", + seeds: asset.List{}, + arg: []id.AssetID{}, + want: nil, + wantErr: nil, + }, + { + name: "0 count with asset for another workspaces", + seeds: asset.List{ + asset.New().NewID().Project(id.NewProjectID()).NewUUID(). + CreatedByUser(accountdomain.NewUserID()).Size(1000).Thread(id.NewThreadID()).MustBuild(), + }, + arg: []id.AssetID{}, + want: nil, + wantErr: nil, + }, + { + name: "1 count with single asset", + seeds: asset.List{ + a1, + }, + arg: []id.AssetID{id1}, + want: asset.List{a1}, + wantErr: nil, + }, + { + name: "1 count with multi assets", + seeds: asset.List{ + a1, + asset.New().NewID().Project(id.NewProjectID()).NewUUID(). + CreatedByUser(accountdomain.NewUserID()).Size(1000).Thread(id.NewThreadID()).MustBuild(), + asset.New().NewID().Project(id.NewProjectID()).NewUUID(). + CreatedByUser(accountdomain.NewUserID()).Size(1000).Thread(id.NewThreadID()).MustBuild(), + }, + arg: []id.AssetID{id1}, + want: asset.List{a1}, + wantErr: nil, + }, + { + name: "2 count with multi assets", + seeds: asset.List{ + a1, + a2, + asset.New().NewID().Project(id.NewProjectID()).NewUUID(). + CreatedByUser(accountdomain.NewUserID()).Size(1000).Thread(id.NewThreadID()).MustBuild(), + asset.New().NewID().Project(id.NewProjectID()).NewUUID(). + CreatedByUser(accountdomain.NewUserID()).Size(1000).Thread(id.NewThreadID()).MustBuild(), + }, + arg: []id.AssetID{id1, id2}, + want: asset.List{a1, a2}, + wantErr: nil, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + ctx := context.Background() + db := assetmemory.New() + + for _, a := range tc.seeds { + err := db.Asset.Save(ctx, a.Clone()) + assert.NoError(t, err) + } + assetUC := NewAsset(db, nil) + + got, err := assetUC.FindByIDs(ctx, tc.arg, &usecase.Operator{AcOperator: &accountusecase.Operator{}}) + if tc.wantErr != nil { + assert.Equal(t, tc.wantErr, err) + return + } + assert.NoError(t, err) + assert.Equal(t, tc.want, got) + }) + } +} + +func TestAsset_FindByProject(t *testing.T) { + pid := id.NewProjectID() + aid1 := id.NewAssetID() + uid1 := accountdomain.NewUserID() + a1 := asset.New().ID(aid1).Project(pid).NewUUID(). + CreatedByUser(uid1).Size(1000).Thread(id.NewThreadID()).MustBuild() + + aid2 := id.NewAssetID() + uid2 := accountdomain.NewUserID() + a2 := asset.New().ID(aid2).Project(pid).NewUUID(). + CreatedByUser(uid2).Size(1000).Thread(id.NewThreadID()).MustBuild() + + op := &usecase.Operator{} + + type args struct { + pid id.ProjectID + f interfaces.AssetFilter + operator *usecase.Operator + } + tests := []struct { + name string + seeds asset.List + args args + want asset.List + wantErr error + }{ + { + name: "0 count in empty db", + seeds: asset.List{}, + args: args{ + pid: id.NewProjectID(), + operator: op, + }, + want: nil, + wantErr: nil, + }, + { + name: "0 count with asset for another projects", + seeds: asset.List{ + asset.New().NewID().Project(id.NewProjectID()).NewUUID(). + CreatedByUser(accountdomain.NewUserID()).Size(1000).Thread(id.NewThreadID()).MustBuild(), + }, + args: args{ + pid: id.NewProjectID(), + operator: op, + }, + want: nil, + wantErr: nil, + }, + { + name: "1 count with single asset", + seeds: asset.List{ + a1, + }, + args: args{ + pid: pid, + f: interfaces.AssetFilter{ + Pagination: usecasex.CursorPagination{First: lo.ToPtr(int64(1))}.Wrap(), + }, + operator: op, + }, + want: asset.List{a1}, + wantErr: nil, + }, + { + name: "1 count with multi assets", + seeds: asset.List{ + a1, + asset.New().NewID().Project(id.NewProjectID()).NewUUID(). + CreatedByUser(accountdomain.NewUserID()).Size(1000).Thread(id.NewThreadID()).MustBuild(), + asset.New().NewID().Project(id.NewProjectID()).NewUUID(). + CreatedByUser(accountdomain.NewUserID()).Size(1000).Thread(id.NewThreadID()).MustBuild(), + }, + args: args{ + pid: pid, + f: interfaces.AssetFilter{ + Pagination: usecasex.CursorPagination{First: lo.ToPtr(int64(1))}.Wrap(), + }, + operator: op, + }, + want: asset.List{a1}, + wantErr: nil, + }, + { + name: "2 count with multi assets", + seeds: asset.List{ + a1, + a2, + asset.New().NewID().Project(id.NewProjectID()).NewUUID(). + CreatedByUser(accountdomain.NewUserID()).Size(1000).Thread(id.NewThreadID()).MustBuild(), + asset.New().NewID().Project(id.NewProjectID()).NewUUID(). + CreatedByUser(accountdomain.NewUserID()).Size(1000).Thread(id.NewThreadID()).MustBuild(), + }, + args: args{ + pid: pid, + f: interfaces.AssetFilter{ + Pagination: usecasex.CursorPagination{First: lo.ToPtr(int64(2))}.Wrap(), + }, + operator: op, + }, + want: asset.List{a1, a2}, + wantErr: nil, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + ctx := context.Background() + db := assetmemory.New() + + for _, a := range tc.seeds { + err := db.Asset.Save(ctx, a.Clone()) + assert.NoError(t, err) + } + assetUC := NewAsset(db, nil) + + got, _, err := assetUC.FindByProject(ctx, tc.args.pid, tc.args.f, tc.args.operator) + if tc.wantErr != nil { + assert.Equal(t, tc.wantErr, err) + return + } + assert.NoError(t, err) + assert.Equal(t, tc.want, got) + }) + } +} + +func TestAsset_Create(t *testing.T) { + mocktime := time.Now() + ws := workspace.New().NewID().MustBuild() + ws2 := workspace.New().NewID().MustBuild() + + pid1 := id.NewProjectID() + p1 := project.New().ID(pid1).Workspace(ws.ID()).UpdatedAt(mocktime).MustBuild() + + u := user.New().NewID().Name("aaa").Email("aaa@bbb.com").Workspace(ws.ID()).MustBuild() + acop := &accountusecase.Operator{ + User: lo.ToPtr(u.ID()), + WritableWorkspaces: []accountdomain.WorkspaceID{ws.ID()}, + } + op := &usecase.Operator{ + AcOperator: acop, + } + + zipMime := "application/zip" + if runtime.GOOS == "windows" { + zipMime = "application/x-zip-compressed" + } + + buf := bytes.NewBufferString("Hello") + buf2 := bytes.NewBufferString("Hello") + buf3 := bytes.NewBufferString("Hello") + buf4 := bytes.NewBufferString("Hello") + buf5 := bytes.NewBufferString("Hello") + af := asset.NewFile().Name("aaa.txt").Size(uint64(buf.Len())).Path("aaa.txt").ContentType("text/plain; charset=utf-8").Build() + af2 := asset.NewFile().Name("aaa.txt").Size(uint64(buf2.Len())).Path("aaa.txt").ContentType("text/plain; charset=utf-8").Build() + af3 := asset.NewFile().Name("aaa.zip").Size(uint64(buf3.Len())).Path("aaa.zip").ContentType(zipMime).Build() + af4 := asset.NewFile().Name("aaa.zip").Size(uint64(buf4.Len())).Path("aaa.zip").ContentType(zipMime).Build() + af5 := asset.NewFile().Name("AAA.ZIP").Size(uint64(buf5.Len())).Path("AAA.ZIP").ContentType(zipMime).Build() + + type args struct { + cpp interfaces.CreateAssetParam + operator *usecase.Operator + } + tests := []struct { + name string + seeds []*asset.Asset + args args + want *asset.Asset + wantFile *asset.File + wantErr error + }{ + { + name: "Create", + seeds: []*asset.Asset{}, + args: args{ + cpp: interfaces.CreateAssetParam{ + ProjectID: p1.ID(), + File: &file.File{ + Name: "aaa.txt", + Content: io.NopCloser(buf), + Size: int64(buf.Len()), + }, + }, + operator: op, + }, + want: asset.New(). + NewID(). + Project(p1.ID()). + CreatedByUser(u.ID()). + FileName("aaa.txt"). + Size(uint64(buf.Len())). + Type(asset.PreviewTypeUnknown.Ref()). + Thread(id.NewThreadID()). + NewUUID(). + ArchiveExtractionStatus(lo.ToPtr(asset.ArchiveExtractionStatusDone)). + MustBuild(), + wantFile: af, + wantErr: nil, + }, + { + name: "Create skip decompress", + seeds: []*asset.Asset{}, + args: args{ + cpp: interfaces.CreateAssetParam{ + ProjectID: p1.ID(), + File: &file.File{ + Name: "aaa.txt", + Content: io.NopCloser(buf2), + Size: int64(buf2.Len()), + }, + SkipDecompression: true, + }, + operator: op, + }, + want: asset.New(). + NewID(). + Project(p1.ID()). + CreatedByUser(u.ID()). + FileName("aaa.txt"). + Size(uint64(buf2.Len())). + Type(asset.PreviewTypeUnknown.Ref()). + Thread(id.NewThreadID()). + NewUUID(). + ArchiveExtractionStatus(lo.ToPtr(asset.ArchiveExtractionStatusDone)). + MustBuild(), + wantFile: af2, + wantErr: nil, + }, + { + name: "CreateZip", + seeds: []*asset.Asset{}, + args: args{ + cpp: interfaces.CreateAssetParam{ + ProjectID: p1.ID(), + File: &file.File{ + Name: "aaa.zip", + Content: io.NopCloser(buf3), + Size: int64(buf3.Len()), + }, + }, + operator: op, + }, + want: asset.New(). + NewID(). + Project(p1.ID()). + CreatedByUser(u.ID()). + FileName("aaa.zip"). + Size(uint64(buf3.Len())). + Type(asset.PreviewTypeUnknown.Ref()). + Thread(id.NewThreadID()). + NewUUID(). + ArchiveExtractionStatus(lo.ToPtr(asset.ArchiveExtractionStatusInProgress)). + MustBuild(), + wantFile: af3, + wantErr: nil, + }, + { + name: "CreateZip skip decompress", + seeds: []*asset.Asset{}, + args: args{ + cpp: interfaces.CreateAssetParam{ + ProjectID: p1.ID(), + File: &file.File{ + Name: "aaa.zip", + Content: io.NopCloser(buf4), + Size: int64(buf4.Len()), + }, + SkipDecompression: true, + }, + operator: op, + }, + want: asset.New(). + NewID(). + Project(p1.ID()). + CreatedByUser(u.ID()). + FileName("aaa.zip"). + Size(uint64(buf4.Len())). + Type(asset.PreviewTypeUnknown.Ref()). + Thread(id.NewThreadID()). + NewUUID(). + ArchiveExtractionStatus(lo.ToPtr(asset.ArchiveExtractionStatusSkipped)). + MustBuild(), + wantFile: af4, + wantErr: nil, + }, + { + name: "CreateZipUpper", + seeds: []*asset.Asset{}, + args: args{ + cpp: interfaces.CreateAssetParam{ + ProjectID: p1.ID(), + File: &file.File{ + Name: "AAA.ZIP", + Content: io.NopCloser(buf5), + Size: int64(buf5.Len()), + }, + }, + operator: op, + }, + want: asset.New(). + NewID(). + Project(p1.ID()). + CreatedByUser(u.ID()). + FileName("AAA.ZIP"). + Size(uint64(buf5.Len())). + Type(asset.PreviewTypeUnknown.Ref()). + Thread(id.NewThreadID()). + NewUUID(). + ArchiveExtractionStatus(lo.ToPtr(asset.ArchiveExtractionStatusInProgress)). + MustBuild(), + wantFile: af5, + wantErr: nil, + }, + { + name: "Create invalid file size", + seeds: []*asset.Asset{}, + args: args{ + cpp: interfaces.CreateAssetParam{ + ProjectID: p1.ID(), + File: &file.File{ + Name: "aaa.txt", + Content: io.NopCloser(buf), + Size: 10*1024*1024*1024 + 1, + }, + }, + operator: op, + }, + want: nil, + wantFile: nil, + wantErr: gateway.ErrFileTooLarge, + }, + { + name: "Create invalid file", + seeds: []*asset.Asset{}, + args: args{ + cpp: interfaces.CreateAssetParam{ + ProjectID: p1.ID(), + File: nil, + }, + operator: op, + }, + want: nil, + wantFile: nil, + wantErr: interfaces.ErrFileNotIncluded, + }, + { + name: "Create invalid operator", + seeds: []*asset.Asset{}, + args: args{ + cpp: interfaces.CreateAssetParam{ + ProjectID: p1.ID(), + File: nil, + }, + operator: &usecase.Operator{ + AcOperator: &accountusecase.Operator{}, + }, + }, + want: nil, + wantFile: nil, + wantErr: interfaces.ErrInvalidOperator, + }, + { + name: "Create project not found", + seeds: []*asset.Asset{}, + args: args{ + cpp: interfaces.CreateAssetParam{ + ProjectID: project.NewID(), + File: &file.File{ + Name: "aaa.txt", + Content: io.NopCloser(buf), + Size: 10*1024*1024*1024 + 1, + }, + }, + operator: &usecase.Operator{ + AcOperator: &accountusecase.Operator{ + User: lo.ToPtr(u.ID()), + WritableWorkspaces: []accountdomain.WorkspaceID{ws.ID()}, + }, + }, + }, + want: nil, + wantFile: nil, + wantErr: rerror.ErrNotFound, + }, + { + name: "Create operator denied", + seeds: []*asset.Asset{}, + args: args{ + cpp: interfaces.CreateAssetParam{ + ProjectID: p1.ID(), + File: &file.File{ + Name: "aaa.txt", + Content: io.NopCloser(buf), + Size: 10*1024*1024*1024 + 1, + }, + }, + operator: &usecase.Operator{ + AcOperator: &accountusecase.Operator{ + User: lo.ToPtr(u.ID()), + WritableWorkspaces: []accountdomain.WorkspaceID{ws2.ID()}, + }, + }, + }, + want: nil, + wantFile: nil, + wantErr: interfaces.ErrOperationDenied, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + ctx := context.Background() + db := assetmemory.New() + mfs := afero.NewMemMapFs() + f, _ := assetfs.NewFile(mfs, "") + runnerGw := NewMockRunner() + + err := db.User.Save(ctx, u) + assert.NoError(t, err) + + err2 := db.Project.Save(ctx, p1.Clone()) + assert.Nil(t, err2) + + for _, a := range tc.seeds { + err := db.Asset.Save(ctx, a.Clone()) + assert.NoError(t, err) + } + + assetUC := Asset{ + repos: db, + gateways: &gateway.Container{ + File: f, + TaskRunner: runnerGw, + }, + ignoreEvent: true, + } + + got, gotFile, err := assetUC.Create(ctx, tc.args.cpp, tc.args.operator) + if tc.wantErr != nil { + assert.Equal(t, tc.wantErr, err) + return + } + assert.NoError(t, err) + + if strings.HasPrefix(got.PreviewType().String(), "image/") { + assert.Equal(t, asset.PreviewTypeImage.Ref(), got.PreviewType()) + } else { + assert.Equal(t, asset.PreviewTypeUnknown.Ref(), got.PreviewType()) + } + + assert.Equal(t, tc.want.Project(), got.Project()) + assert.Equal(t, tc.want.PreviewType(), got.PreviewType()) + assert.Equal(t, tc.want.ArchiveExtractionStatus(), got.ArchiveExtractionStatus()) + + dbGot, err := db.Asset.FindByID(ctx, got.ID()) + assert.NoError(t, err) + assert.Equal(t, tc.want.Project(), dbGot.Project()) + assert.Equal(t, tc.want.PreviewType(), dbGot.PreviewType()) + assert.Equal(t, tc.want.ArchiveExtractionStatus(), dbGot.ArchiveExtractionStatus()) + + assert.Equal(t, tc.wantFile, gotFile) + }) + } +} + +func TestAsset_Update(t *testing.T) { + uid := accountdomain.NewUserID() + ws := workspace.New().NewID().MustBuild() + pid1 := id.NewProjectID() + p := project.New().ID(pid1).Workspace(ws.ID()).MustBuild() + + var pti = asset.PreviewTypeImage + var ptg = asset.PreviewTypeGeo + + aid1 := id.NewAssetID() + thid := id.NewThreadID() + a1 := asset.New().ID(aid1).Project(pid1).NewUUID(). + CreatedByUser(uid).Size(1000).Thread(thid).MustBuild() + a1Updated := asset.New().ID(aid1).Project(pid1).UUID(a1.UUID()). + CreatedByUser(uid).Size(1000).Thread(thid).Type(&pti).MustBuild() + + pid2 := id.NewProjectID() + aid2 := id.NewAssetID() + a2 := asset.New().ID(aid2).Project(pid2).NewUUID(). + CreatedByUser(uid).Size(1000).Thread(id.NewThreadID()).MustBuild() + acop := &accountusecase.Operator{ + User: &uid, + OwningWorkspaces: []accountdomain.WorkspaceID{ws.ID()}, + } + op := &usecase.Operator{ + AcOperator: acop, + OwningProjects: []id.ProjectID{pid1}, + Integration: nil, + } + + type args struct { + upp interfaces.UpdateAssetParam + operator *usecase.Operator + } + tests := []struct { + name string + seeds []*asset.Asset + args args + want *asset.Asset + wantErr error + }{ + { + name: "invalid operator", + seeds: []*asset.Asset{a1, a2}, + args: args{ + upp: interfaces.UpdateAssetParam{ + AssetID: aid1, + PreviewType: &pti, + }, + operator: &usecase.Operator{ + AcOperator: &accountusecase.Operator{}, + }, + }, + want: nil, + wantErr: interfaces.ErrInvalidOperator, + }, + { + name: "operation denied", + seeds: []*asset.Asset{a1, a2}, + args: args{ + upp: interfaces.UpdateAssetParam{ + AssetID: aid1, + PreviewType: &pti, + }, + operator: &usecase.Operator{ + AcOperator: &accountusecase.Operator{ + User: &uid, + ReadableWorkspaces: []accountdomain.WorkspaceID{ws.ID()}, + }, + }, + }, + want: nil, + wantErr: interfaces.ErrOperationDenied, + }, + { + name: "update", + seeds: []*asset.Asset{a1, a2}, + args: args{ + upp: interfaces.UpdateAssetParam{ + AssetID: aid1, + PreviewType: &pti, + }, + operator: op, + }, + want: a1Updated, + wantErr: nil, + }, + { + name: "update not found", + seeds: []*asset.Asset{a1, a2}, + args: args{ + upp: interfaces.UpdateAssetParam{ + AssetID: idx.ID[id.Asset]{}, + PreviewType: &ptg, + }, + operator: op, + }, + want: nil, + wantErr: rerror.ErrNotFound, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + // t.Parallel() + + ctx := context.Background() + db := assetmemory.New() + + err := db.Project.Save(ctx, p) + assert.NoError(t, err) + for _, p := range tc.seeds { + err := db.Asset.Save(ctx, p.Clone()) + assert.NoError(t, err) + } + assetUC := NewAsset(db, &gateway.Container{}) + + got, err := assetUC.Update(ctx, tc.args.upp, tc.args.operator) + if tc.wantErr != nil { + assert.Equal(t, tc.wantErr, err) + return + } + assert.NoError(t, err) + assert.Equal(t, tc.want, got) + }) + } +} + +func TestAsset_UpdateFiles(t *testing.T) { + uid := accountdomain.NewUserID() + assetID1, uuid1 := asset.NewID(), "5130c89f-8f67-4766-b127-49ee6796d464" + assetID2, uuid2 := asset.NewID(), uuid.New().String() + ws := workspace.New().NewID().MustBuild() + proj := project.New().NewID().Workspace(ws.ID()).MustBuild() + + thid := id.NewThreadID() + sp := lo.ToPtr(asset.ArchiveExtractionStatusPending) + a1 := asset.New(). + ID(assetID1). + Project(proj.ID()). + CreatedByUser(uid). + Size(1000). + UUID(uuid1). + Thread(thid). + ArchiveExtractionStatus(sp). + MustBuild() + a1f := asset.NewFile().Name("xxx").Path("/xxx.zip").GuessContentType().Build() + a2 := asset.New(). + ID(assetID2). + Project(proj.ID()). + CreatedByUser(uid). + Size(1000). + UUID(uuid2). + Thread(id.NewThreadID()). + ArchiveExtractionStatus(sp). + MustBuild() + a2f := asset.NewFile().Build() + acop := &accountusecase.Operator{ + User: &uid, + OwningWorkspaces: []accountdomain.WorkspaceID{ws.ID()}, + } + op := &usecase.Operator{ + AcOperator: acop, + OwningProjects: []id.ProjectID{proj.ID()}, + } + + tests := []struct { + name string + operator *usecase.Operator + seedAssets []*asset.Asset + seedFiles map[asset.ID]*asset.File + seedProjects []*project.Project + prepareFileFunc func() afero.Fs + assetID id.AssetID + status *asset.ArchiveExtractionStatus + want *asset.Asset + wantFile *asset.File + wantErr error + }{ + { + name: "invalid operator", + operator: &usecase.Operator{ + AcOperator: &accountusecase.Operator{}, + }, + prepareFileFunc: func() afero.Fs { + return mockFs() + }, + assetID: assetID1, + want: nil, + wantErr: interfaces.ErrInvalidOperator, + }, + { + name: "not found", + operator: op, + prepareFileFunc: func() afero.Fs { + return mockFs() + }, + assetID: assetID1, + want: nil, + wantErr: rerror.ErrNotFound, + }, + { + name: "operation denied", + operator: &usecase.Operator{ + AcOperator: &accountusecase.Operator{ + User: &uid, + ReadableWorkspaces: []accountdomain.WorkspaceID{ws.ID()}, + }, + }, + seedAssets: []*asset.Asset{a1.Clone(), a2.Clone()}, + seedFiles: map[asset.ID]*asset.File{ + a1.ID(): a1f, + a2.ID(): a2f, + }, + seedProjects: []*project.Project{proj}, + prepareFileFunc: func() afero.Fs { + return mockFs() + }, + assetID: assetID1, + status: sp, + want: nil, + wantErr: interfaces.ErrOperationDenied, + }, + { + name: "update asset not found", + operator: op, + prepareFileFunc: func() afero.Fs { + return mockFs() + }, + assetID: assetID1, + want: nil, + wantErr: rerror.ErrNotFound, + }, + { + name: "update file not found", + operator: op, + seedAssets: []*asset.Asset{a1.Clone(), a2.Clone()}, + seedFiles: map[asset.ID]*asset.File{ + a1.ID(): a1f, + a2.ID(): a2f, + }, + prepareFileFunc: func() afero.Fs { + return afero.NewMemMapFs() + }, + assetID: assetID1, + status: lo.ToPtr(asset.ArchiveExtractionStatusFailed), + want: nil, + wantErr: gateway.ErrFileNotFound, + }, + { + name: "update", + operator: op, + seedAssets: []*asset.Asset{a1.Clone(), a2.Clone()}, + seedFiles: map[asset.ID]*asset.File{ + a1.ID(): a1f, + a2.ID(): a2f, + }, + seedProjects: []*project.Project{proj}, + prepareFileFunc: func() afero.Fs { + return mockFs() + }, + assetID: assetID1, + status: sp, + want: asset.New(). + ID(assetID1). + Project(proj.ID()). + CreatedByUser(uid). + Size(1000). + UUID(uuid1). + Thread(thid). + ArchiveExtractionStatus(sp). + MustBuild(), + wantFile: asset.NewFile().Name("xxx").Path(path.Join("xxx.zip")).GuessContentType().Children([]*asset.File{ + asset.NewFile().Name("xxx").Path(path.Join("xxx")).Dir().Children([]*asset.File{ + asset.NewFile().Name("yyy").Path(path.Join("xxx", "yyy")).Dir().Children([]*asset.File{ + asset.NewFile().Name("hello.txt").Path(path.Join("xxx", "yyy", "hello.txt")).GuessContentType().Build(), + }).Build(), + asset.NewFile().Name("zzz.txt").Path(path.Join("xxx", "zzz.txt")).GuessContentType().Build(), + }).Build(), + }).Build(), + wantErr: nil, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + // t.Parallel() + + ctx := context.Background() + db := assetmemory.New() + + fileGw := lo.Must(assetfs.NewFile(tc.prepareFileFunc(), "")) + + err := db.Project.Save(ctx, proj) + assert.NoError(t, err) + for _, p := range tc.seedAssets { + err := db.Asset.Save(ctx, p.Clone()) + assert.Nil(t, err) + } + for id, f := range tc.seedFiles { + err := db.AssetFile.Save(ctx, id, f.Clone()) + assert.Nil(t, err) + } + for _, p := range tc.seedProjects { + err := db.Project.Save(ctx, p.Clone()) + assert.Nil(t, err) + } + + assetUC := Asset{ + repos: db, + gateways: &gateway.Container{ + File: fileGw, + }, + ignoreEvent: true, + } + got, err := assetUC.UpdateFiles(ctx, tc.assetID, tc.status, tc.operator) + if tc.wantErr != nil { + assert.Equal(t, tc.wantErr, err) + return + } + assert.NoError(t, err) + assert.Equal(t, tc.want, got) + + if tc.wantErr != nil { + gotf, err := db.AssetFile.FindByID(ctx, tc.assetID) + assert.NoError(t, err) + assert.Equal(t, tc.wantFile, gotf) + } + }) + } +} + +func TestAsset_Delete(t *testing.T) { + uid := accountdomain.NewUserID() + + ws := workspace.New().NewID().MustBuild() + proj1 := project.New().NewID().Workspace(ws.ID()).MustBuild() + aid1 := id.NewAssetID() + a1 := asset.New().ID(aid1).Project(proj1.ID()).NewUUID(). + CreatedByUser(uid).Size(1000).Thread(id.NewThreadID()).MustBuild() + + proj2 := project.New().NewID().MustBuild() + aid2 := id.NewAssetID() + a2 := asset.New().ID(aid2).Project(proj2.ID()).NewUUID(). + CreatedByUser(uid).Size(1000).Thread(id.NewThreadID()).MustBuild() + + acop := &accountusecase.Operator{ + User: &uid, + OwningWorkspaces: []accountdomain.WorkspaceID{ws.ID()}, + } + op := &usecase.Operator{ + AcOperator: acop, + OwningProjects: []id.ProjectID{proj1.ID()}, + } + type args struct { + id id.AssetID + operator *usecase.Operator + } + tests := []struct { + name string + seedsAsset []*asset.Asset + seedsProject []*project.Project + args args + want []*asset.Asset + mockAssetErr bool + wantErr error + }{ + { + name: "delete", + seedsAsset: []*asset.Asset{a1, a2}, + seedsProject: []*project.Project{proj1, proj2}, + args: args{ + id: aid1, + operator: op, + }, + want: nil, + wantErr: nil, + }, + { + name: "invalid operator", + seedsAsset: []*asset.Asset{a1, a2}, + args: args{ + id: id.NewAssetID(), + operator: &usecase.Operator{ + AcOperator: &accountusecase.Operator{}, + }, + }, + want: nil, + wantErr: interfaces.ErrInvalidOperator, + }, + { + name: "operation denied", + seedsAsset: []*asset.Asset{a1, a2}, + seedsProject: []*project.Project{proj1, proj2}, + args: args{ + id: aid1, + operator: &usecase.Operator{ + AcOperator: &accountusecase.Operator{ + User: &uid, + ReadableWorkspaces: []accountdomain.WorkspaceID{ws.ID()}, + }, + }, + }, + want: nil, + wantErr: interfaces.ErrOperationDenied, + }, + { + name: "delete not found", + seedsAsset: []*asset.Asset{a1, a2}, + args: args{ + id: id.NewAssetID(), + operator: op, + }, + want: nil, + wantErr: rerror.ErrNotFound, + }, + { + name: "delete od", + seedsAsset: []*asset.Asset{}, + args: args{ + id: aid2, + operator: op, + }, + want: nil, + wantErr: rerror.ErrNotFound, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + ctx := context.Background() + db := assetmemory.New() + + for _, p := range tc.seedsAsset { + err := db.Asset.Save(ctx, p.Clone()) + assert.NoError(t, err) + } + for _, p := range tc.seedsProject { + err := db.Project.Save(ctx, p.Clone()) + assert.NoError(t, err) + } + assetUC := Asset{ + repos: db, + gateways: &gateway.Container{}, + ignoreEvent: true, + } + id, err := assetUC.Delete(ctx, tc.args.id, tc.args.operator) + if tc.wantErr != nil { + assert.Equal(t, tc.wantErr, err) + return + } + assert.Equal(t, tc.args.id, id) + assert.NoError(t, err) + + _, err = db.Asset.FindByID(ctx, tc.args.id) + assert.Equal(t, rerror.ErrNotFound, err) + }) + } +} + +type file2 struct { + gateway.File +} + +func (f *file2) GetURL(*asset.Asset) string { + return "xxx" +} + +func TestAsset_GetURL(t *testing.T) { + uc := &Asset{ + gateways: &gateway.Container{ + File: &file2{}, + }, + } + assert.Equal(t, "xxx", uc.GetURL(nil)) +} + +func mockFs() afero.Fs { + files := map[string]string{ + path.Join("assets", "51", "30c89f-8f67-4766-b127-49ee6796d464", "xxx.zip"): "xxx", + path.Join("assets", "51", "30c89f-8f67-4766-b127-49ee6796d464", "xxx", "zzz.txt"): "zzz", + path.Join("assets", "51", "30c89f-8f67-4766-b127-49ee6796d464", "xxx", "yyy", "hello.txt"): "hello", + path.Join("plugins", "aaa~1.0.0", "foo.js"): "bar", + path.Join("published", "s.json"): "{}", + } + + fs := afero.NewMemMapFs() + for name, content := range files { + f, _ := fs.Create(name) + _, _ = f.WriteString(content) + _ = f.Close() + } + return fs +} + +// mockRunner implements gateway.TaskRunner +type mockRunner struct{} + +func NewMockRunner() gateway.TaskRunner { + return &mockRunner{} +} + +func (r *mockRunner) Run(context.Context, task.Payload) error { + return nil +} + +func (r *mockRunner) Retry(context.Context, string) error { + return nil +} + +func Test_detectPreviewType(t *testing.T) { + tests := []struct { + name string + files []gateway.FileEntry + want *asset.PreviewType + }{ + { + name: "MVT", + files: []gateway.FileEntry{ + { + Name: "test/0/123.mvt", + Size: 123, + }, + }, + want: lo.ToPtr(asset.PreviewTypeGeoMvt), + }, + { + name: "3d tiles", + files: []gateway.FileEntry{ + { + Name: "test/tileset.json", + Size: 123, + }, + }, + want: lo.ToPtr(asset.PreviewTypeGeo3dTiles), + }, + { + name: "Unknown", + files: []gateway.FileEntry{ + { + Name: "test.jpg", + Size: 123, + }, + }, + want: nil, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, detectPreviewType(tt.files)) + }) + } +} diff --git a/asset/assetusecase/assetinteractor/common.go b/asset/assetusecase/assetinteractor/common.go new file mode 100644 index 0000000..872fddd --- /dev/null +++ b/asset/assetusecase/assetinteractor/common.go @@ -0,0 +1,110 @@ +package interactor + +import ( + "context" + + "github.com/reearth/reearthx/account/accountdomain" + "github.com/reearth/reearthx/account/accountusecase/accountgateway" + "github.com/reearth/reearthx/account/accountusecase/accountinteractor" + "github.com/reearth/reearthx/account/accountusecase/accountrepo" + id "github.com/reearth/reearthx/asset/assetdomain" + "github.com/reearth/reearthx/asset/assetdomain/event" + "github.com/reearth/reearthx/asset/assetdomain/operator" + "github.com/reearth/reearthx/asset/assetdomain/project" + "github.com/reearth/reearthx/asset/assetdomain/task" + gateway "github.com/reearth/reearthx/asset/assetusecase/assetgateway" + interfaces "github.com/reearth/reearthx/asset/assetusecase/assetinterfaces" + repo "github.com/reearth/reearthx/asset/assetusecase/assetrepo" + "github.com/reearth/reearthx/log" + "github.com/reearth/reearthx/util" +) + +type ContainerConfig struct { + SignupSecret string + AuthSrvUIDomain string +} + +func New(r *repo.Container, g *gateway.Container, + ar *accountrepo.Container, ag *accountgateway.Container, + config ContainerConfig) interfaces.Container { + return interfaces.Container{ + Asset: NewAsset(r, g), + Workspace: accountinteractor.NewWorkspace(ar, nil), + User: accountinteractor.NewMultiUser(ar, ag, config.SignupSecret, config.AuthSrvUIDomain, ar.Users), + } +} + +type Event struct { + Project *project.Project + Workspace accountdomain.WorkspaceID + Type event.Type + Operator operator.Operator + Object any + WebhookObject any +} + +func (e *Event) EventProject() *event.Project { + if e == nil || e.Project == nil { + return nil + } + return &event.Project{ + ID: e.Project.ID().String(), + Alias: e.Project.Alias(), + } +} + +func createEvent(ctx context.Context, r *repo.Container, g *gateway.Container, e Event) (*event.Event[any], error) { + ev, err := event.New[any]().NewID().Object(e.Object).Type(e.Type).Project(e.EventProject()).Timestamp(util.Now()).Operator(e.Operator).Build() + if err != nil { + return nil, err + } + + if err := r.Event.Save(ctx, ev); err != nil { + return nil, err + } + + if err := webhook(ctx, r, g, e, ev); err != nil { + return nil, err + } + + return ev, nil +} + +func webhook(ctx context.Context, r *repo.Container, g *gateway.Container, e Event, ev *event.Event[any]) error { + if g == nil || g.TaskRunner == nil { + log.Infof("asset: webhook was not sent because task runner is not configured") + return nil + } + + ws, err := r.Workspace.FindByID(ctx, e.Workspace) + if err != nil { + return err + } + integrationIDs := ws.Members().IntegrationIDs() + + ids := make([]id.IntegrationID, len(integrationIDs)) + for i, iid := range integrationIDs { + id, err := id.IntegrationIDFrom(iid.String()) + if err != nil { + return err + } + ids[i] = id + } + + integrations, err := r.Integration.FindByIDs(ctx, ids) + if err != nil { + return err + } + + for _, w := range integrations.ActiveWebhooks(ev.Type()) { + if err := g.TaskRunner.Run(ctx, task.WebhookPayload{ + Webhook: w, + Event: ev, + Override: e.WebhookObject, + }.Payload()); err != nil { + return err + } + } + + return nil +} diff --git a/asset/assetusecase/assetinteractor/common_test.go b/asset/assetusecase/assetinteractor/common_test.go new file mode 100644 index 0000000..b3cfa7f --- /dev/null +++ b/asset/assetusecase/assetinteractor/common_test.go @@ -0,0 +1,145 @@ +package interactor + +import ( + "context" + "net/url" + "testing" + "time" + + "github.com/golang/mock/gomock" + "github.com/reearth/reearth-cms/server/internal/infrastructure/memory" + "github.com/reearth/reearth-cms/server/internal/usecase/gateway" + "github.com/reearth/reearth-cms/server/internal/usecase/gateway/gatewaymock" + "github.com/reearth/reearth-cms/server/internal/usecase/interfaces" + "github.com/reearth/reearth-cms/server/pkg/asset" + "github.com/reearth/reearth-cms/server/pkg/event" + "github.com/reearth/reearth-cms/server/pkg/integration" + "github.com/reearth/reearth-cms/server/pkg/operator" + "github.com/reearth/reearth-cms/server/pkg/project" + "github.com/reearth/reearth-cms/server/pkg/task" + "github.com/reearth/reearthx/account/accountdomain" + "github.com/reearth/reearthx/account/accountdomain/user" + "github.com/reearth/reearthx/account/accountdomain/workspace" + "github.com/reearth/reearthx/account/accountusecase/accountinteractor" + "github.com/reearth/reearthx/account/accountusecase/accountrepo" + "github.com/reearth/reearthx/util" + "github.com/samber/lo" + "github.com/stretchr/testify/assert" +) + +func TestCommon_createEvent(t *testing.T) { + now := util.Now() + defer util.MockNow(now)() + uID := user.NewID() + a := asset.New().NewID().Thread(asset.NewThreadID()). + Project(project.NewID()).Size(100).CreatedByUser(uID).NewUUID().MustBuild() + ws := workspace.New().NewID().MustBuild() + wh := integration.NewWebhookBuilder().NewID().Name("aaa"). + Url(lo.Must(url.Parse("https://example.com"))).Active(true). + Trigger(integration.WebhookTrigger{event.AssetCreate: true}).MustBuild() + integration := integration.New().NewID().Developer(uID).Name("xxx").Webhook([]*integration.Webhook{wh}).MustBuild() + iid, err := accountdomain.IntegrationIDFrom(integration.ID().String()) + assert.NoError(t, err) + lo.Must0(ws.Members().AddIntegration(iid, workspace.RoleOwner, uID)) + + db := memory.New() + mockCtrl := gomock.NewController(t) + defer mockCtrl.Finish() + mRunner := gatewaymock.NewMockTaskRunner(mockCtrl) + gw := &gateway.Container{ + TaskRunner: mRunner, + } + + ctx := context.Background() + lo.Must0(db.Workspace.Save(ctx, ws)) + lo.Must0(db.Integration.Save(ctx, integration)) + mRunner.EXPECT().Run(ctx, gomock.Any()).Times(1).Return(nil) + + ev, err := createEvent(ctx, db, gw, Event{ + Workspace: ws.ID(), + Type: event.Type(event.AssetCreate), + Object: a, + Operator: operator.OperatorFromUser(uID), + }) + assert.NoError(t, err) + expectedEv := event.New[any]().ID(ev.ID()).Timestamp(now).Type(event.AssetCreate).Operator(operator.OperatorFromUser(uID)).Object(a).MustBuild() + assert.Equal(t, expectedEv, ev) + + ev, err = createEvent(ctx, db, gw, Event{ + Workspace: ws.ID(), + Type: event.Type(event.AssetCreate), + Object: a, + Operator: operator.Operator{}, + }) + assert.ErrorIs(t, err, event.ErrInvalidID) + assert.Nil(t, ev) +} + +func TestCommon_webhook(t *testing.T) { + now := time.Now() + uID := user.NewID() + a := asset.New().NewID().Thread(asset.NewThreadID()).NewUUID(). + Project(project.NewID()).Size(100).CreatedByUser(uID). + MustBuild() + ws := workspace.New().NewID().MustBuild() + wh := integration.NewWebhookBuilder().NewID().Name("aaa"). + Url(lo.Must(url.Parse("https://example.com"))).Active(true). + Trigger(integration.WebhookTrigger{event.AssetCreate: true}).MustBuild() + integration := integration.New().NewID().Developer(uID).Name("xxx"). + Webhook([]*integration.Webhook{wh}).MustBuild() + iid, err := accountdomain.IntegrationIDFrom(integration.ID().String()) + assert.NoError(t, err) + lo.Must0(ws.Members().AddIntegration(iid, workspace.RoleOwner, uID)) + ev := event.New[any]().NewID().Timestamp(now).Type(event.AssetCreate). + Operator(operator.OperatorFromUser(uID)).Object(a).MustBuild() + + db := memory.New() + mockCtrl := gomock.NewController(t) + defer mockCtrl.Finish() + mRunner := gatewaymock.NewMockTaskRunner(mockCtrl) + gw := &gateway.Container{ + TaskRunner: mRunner, + } + + ctx := context.Background() + // no workspace + err = webhook(ctx, db, gw, Event{Workspace: ws.ID()}, ev) + assert.Error(t, err) + + lo.Must0(db.Workspace.Save(ctx, ws)) + // no webhook call since no integrtaion + mRunner.EXPECT().Run(ctx, task.WebhookPayload{ + Webhook: wh, + Event: ev, + }.Payload()).Times(0).Return(nil) + err = webhook(ctx, db, gw, Event{Workspace: ws.ID()}, ev) + assert.NoError(t, err) + + lo.Must0(db.Integration.Save(ctx, integration)) + mRunner.EXPECT().Run(ctx, task.WebhookPayload{ + Webhook: wh, + Event: ev, + }.Payload()).Times(1).Return(nil) + err = webhook(ctx, db, gw, Event{Workspace: ws.ID()}, ev) + assert.NoError(t, err) +} + +func TestNew(t *testing.T) { + uc := New(nil, nil, &accountrepo.Container{}, nil, ContainerConfig{}) + assert.NotNil(t, uc) + assert.Equal(t, interfaces.Container{ + Asset: NewAsset(nil, nil), + Workspace: accountinteractor.NewWorkspace(&accountrepo.Container{}, nil), + User: accountinteractor.NewUser(&accountrepo.Container{}, nil, "", ""), + Item: NewItem(nil, nil), + View: NewView(nil, nil), + Project: NewProject(nil, nil), + Request: NewRequest(nil, nil), + Model: NewModel(nil, nil), + Schema: NewSchema(nil, nil), + Integration: NewIntegration(nil, nil), + Thread: NewThread(nil, nil), + Group: NewGroup(nil, nil), + WorkspaceSettings: NewWorkspaceSettings(nil, nil), + }, uc) +} From ffffe1719b6b8a0ae0f0942431e104c077079e78 Mon Sep 17 00:00:00 2001 From: shumon84 Date: Mon, 9 Sep 2024 02:36:08 +0900 Subject: [PATCH 07/10] feat(asset): add assetusecase package --- asset/assetusecase/operator.go | 184 +++++++++++++++++++ asset/assetusecase/operator_test.go | 265 ++++++++++++++++++++++++++++ 2 files changed, 449 insertions(+) create mode 100644 asset/assetusecase/operator.go create mode 100644 asset/assetusecase/operator_test.go diff --git a/asset/assetusecase/operator.go b/asset/assetusecase/operator.go new file mode 100644 index 0000000..f50ecca --- /dev/null +++ b/asset/assetusecase/operator.go @@ -0,0 +1,184 @@ +package assetusecase + +import ( + "github.com/reearth/reearthx/account/accountdomain" + "github.com/reearth/reearthx/account/accountdomain/user" + "github.com/reearth/reearthx/account/accountdomain/workspace" + "github.com/reearth/reearthx/account/accountusecase" + id "github.com/reearth/reearthx/asset/assetdomain" + "github.com/reearth/reearthx/asset/assetdomain/integration" + "github.com/reearth/reearthx/asset/assetdomain/operator" + "github.com/reearth/reearthx/asset/assetdomain/project" +) + +type Operator struct { + Integration *integration.ID + Machine bool + Lang string + ReadableProjects project.IDList + WritableProjects project.IDList + OwningProjects project.IDList + MaintainableProjects project.IDList + + AcOperator *accountusecase.Operator +} + +type Ownable interface { + User() *accountdomain.UserID + Integration() *id.IntegrationID + Project() id.ProjectID +} + +func (o *Operator) Workspaces(r workspace.Role) []accountdomain.WorkspaceID { + if o == nil { + return nil + } + if r == workspace.RoleReader { + return o.AcOperator.ReadableWorkspaces + } + if r == workspace.RoleWriter { + return o.AcOperator.WritableWorkspaces + } + if r == workspace.RoleMaintainer { + return o.AcOperator.MaintainableWorkspaces + } + if r == workspace.RoleOwner { + return o.AcOperator.OwningWorkspaces + } + return nil +} + +func (o *Operator) AllReadableWorkspaces() user.WorkspaceIDList { + if o == nil { + return nil + } + return append(o.AcOperator.ReadableWorkspaces, o.AllWritableWorkspaces()...) +} + +func (o *Operator) AllWritableWorkspaces() user.WorkspaceIDList { + return append(o.AcOperator.WritableWorkspaces, o.AllMaintainingWorkspaces()...) +} + +func (o *Operator) AllMaintainingWorkspaces() user.WorkspaceIDList { + return append(o.AcOperator.MaintainableWorkspaces, o.AllOwningWorkspaces()...) +} + +func (o *Operator) AllOwningWorkspaces() user.WorkspaceIDList { + return o.AcOperator.OwningWorkspaces +} + +func (o *Operator) IsReadableWorkspace(workspace ...accountdomain.WorkspaceID) bool { + return o.AllReadableWorkspaces().Intersect(workspace).Len() > 0 +} + +func (o *Operator) IsWritableWorkspace(workspace ...accountdomain.WorkspaceID) bool { + return o.AllWritableWorkspaces().Intersect(workspace).Len() > 0 +} + +func (o *Operator) IsMaintainingWorkspace(workspace ...accountdomain.WorkspaceID) bool { + return o.AllMaintainingWorkspaces().Intersect(workspace).Len() > 0 +} + +func (o *Operator) IsOwningWorkspace(workspace ...accountdomain.WorkspaceID) bool { + return o.AllOwningWorkspaces().Intersect(workspace).Len() > 0 +} + +func (o *Operator) AddNewWorkspace(workspace accountdomain.WorkspaceID) { + o.AcOperator.OwningWorkspaces = append(o.AcOperator.OwningWorkspaces, workspace) +} + +func (o *Operator) Projects(r workspace.Role) project.IDList { + if o == nil { + return nil + } + if r == workspace.RoleReader { + return o.ReadableProjects + } + if r == workspace.RoleWriter { + return o.WritableProjects + } + if r == workspace.RoleMaintainer { + return o.MaintainableProjects + } + if r == workspace.RoleOwner { + return o.OwningProjects + } + return nil +} + +func (o *Operator) AllReadableProjects() project.IDList { + return append(o.ReadableProjects, o.AllWritableProjects()...) +} + +func (o *Operator) AllWritableProjects() project.IDList { + return append(o.WritableProjects, o.AllMaintainableProjects()...) +} + +func (o *Operator) AllMaintainableProjects() project.IDList { + return append(o.MaintainableProjects, o.AllOwningProjects()...) +} + +func (o *Operator) AllOwningProjects() project.IDList { + return o.OwningProjects +} + +func (o *Operator) IsReadableProject(projects ...project.ID) bool { + return o.AllReadableProjects().Intersect(projects).Len() > 0 +} + +func (o *Operator) IsWritableProject(projects ...project.ID) bool { + return o.AllWritableProjects().Intersect(projects).Len() > 0 +} + +func (o *Operator) IsMaintainingProject(projects ...project.ID) bool { + return o.AllMaintainableProjects().Intersect(projects).Len() > 0 +} + +func (o *Operator) IsOwningProject(projects ...project.ID) bool { + return o.AllOwningProjects().Intersect(projects).Len() > 0 +} + +func (o *Operator) AddNewProject(p project.ID) { + o.OwningProjects = append(o.OwningProjects, p) +} + +func (o *Operator) Operator() operator.Operator { + var eOp operator.Operator + if o.AcOperator.User != nil { + eOp = operator.OperatorFromUser(*o.AcOperator.User) + } + if o.Integration != nil { + eOp = operator.OperatorFromIntegration(*o.Integration) + } + if o.Machine { + eOp = operator.OperatorFromMachine() + } + return eOp +} + +func (o *Operator) CanUpdate(obj Ownable) bool { + isWriter := o.IsWritableProject(obj.Project()) + isMaintainer := o.IsMaintainingProject(obj.Project()) + return isMaintainer || (isWriter && o.Owns(obj)) || o.Machine +} + +func (o *Operator) Owns(obj Ownable) bool { + return (o.AcOperator.User != nil && obj.User() != nil && *o.AcOperator.User == *obj.User()) || + (o.Integration != nil && obj.Integration() != nil && *o.Integration == *obj.Integration()) +} + +func (o *Operator) RoleByProject(pid id.ProjectID) workspace.Role { + if o.IsOwningProject(pid) { + return workspace.RoleOwner + } + if o.IsMaintainingProject(pid) { + return workspace.RoleMaintainer + } + if o.IsWritableProject(pid) { + return workspace.RoleWriter + } + if o.IsReadableProject(pid) { + return workspace.RoleReader + } + return "" +} diff --git a/asset/assetusecase/operator_test.go b/asset/assetusecase/operator_test.go new file mode 100644 index 0000000..37caa14 --- /dev/null +++ b/asset/assetusecase/operator_test.go @@ -0,0 +1,265 @@ +package assetusecase + +import ( + "testing" + + "github.com/reearth/reearthx/account/accountdomain" + "github.com/reearth/reearthx/account/accountdomain/user" + "github.com/reearth/reearthx/account/accountdomain/workspace" + "github.com/reearth/reearthx/account/accountusecase" + id "github.com/reearth/reearthx/asset/assetdomain" + "github.com/reearth/reearthx/asset/assetdomain/project" + "github.com/stretchr/testify/assert" +) + +func TestOperator_Workspaces(t *testing.T) { + u := accountdomain.NewUserID() + w1, w2, w3, w4 := accountdomain.NewWorkspaceID(), accountdomain.NewWorkspaceID(), accountdomain.NewWorkspaceID(), accountdomain.NewWorkspaceID() + op := Operator{ + AcOperator: &accountusecase.Operator{ + User: &u, + ReadableWorkspaces: id.WorkspaceIDList{w1}, + WritableWorkspaces: id.WorkspaceIDList{w2}, + MaintainableWorkspaces: id.WorkspaceIDList{w3}, + OwningWorkspaces: id.WorkspaceIDList{w4}, + }, + Integration: nil, + } + + assert.Equal(t, op.Workspaces(workspace.RoleReader), []accountdomain.WorkspaceID{w1}) + assert.Equal(t, op.Workspaces(workspace.RoleWriter), []accountdomain.WorkspaceID{w2}) + assert.Equal(t, op.Workspaces(workspace.RoleMaintainer), []accountdomain.WorkspaceID{w3}) + assert.Equal(t, op.Workspaces(workspace.RoleOwner), []accountdomain.WorkspaceID{w4}) + assert.Nil(t, op.Workspaces("")) + assert.Nil(t, (*Operator)(nil).Workspaces("")) + + assert.Equal(t, op.AllReadableWorkspaces(), id.WorkspaceIDList{w1, w2, w3, w4}) + assert.Equal(t, op.AllWritableWorkspaces(), id.WorkspaceIDList{w2, w3, w4}) + assert.Equal(t, op.AllMaintainingWorkspaces(), id.WorkspaceIDList{w3, w4}) + assert.Equal(t, op.AllOwningWorkspaces(), id.WorkspaceIDList{w4}) + + assert.True(t, op.IsReadableWorkspace(w1)) + assert.True(t, op.IsReadableWorkspace(w2)) + assert.True(t, op.IsReadableWorkspace(w3)) + assert.True(t, op.IsReadableWorkspace(w4)) + assert.False(t, op.IsReadableWorkspace(accountdomain.NewWorkspaceID())) + + assert.False(t, op.IsWritableWorkspace(w1)) + assert.True(t, op.IsWritableWorkspace(w2)) + assert.True(t, op.IsWritableWorkspace(w3)) + assert.True(t, op.IsWritableWorkspace(w4)) + assert.False(t, op.IsWritableWorkspace(accountdomain.NewWorkspaceID())) + + assert.False(t, op.IsMaintainingWorkspace(w1)) + assert.False(t, op.IsMaintainingWorkspace(w2)) + assert.True(t, op.IsMaintainingWorkspace(w3)) + assert.True(t, op.IsMaintainingWorkspace(w4)) + assert.False(t, op.IsMaintainingWorkspace(accountdomain.NewWorkspaceID())) + + assert.False(t, op.IsOwningWorkspace(w1)) + assert.False(t, op.IsOwningWorkspace(w2)) + assert.False(t, op.IsOwningWorkspace(w3)) + assert.True(t, op.IsOwningWorkspace(w4)) + assert.False(t, op.IsOwningWorkspace(accountdomain.NewWorkspaceID())) + + w5 := accountdomain.NewWorkspaceID() + op.AddNewWorkspace(w5) + assert.Equal(t, user.WorkspaceIDList{w4, w5}, op.AcOperator.OwningWorkspaces) +} + +func TestOperator_Projects(t *testing.T) { + u := accountdomain.NewUserID() + p1, p2, p3, p4 := id.NewProjectID(), id.NewProjectID(), id.NewProjectID(), id.NewProjectID() + op := Operator{ + AcOperator: &accountusecase.Operator{ + User: &u, + }, + Integration: nil, + ReadableProjects: id.ProjectIDList{p1}, + WritableProjects: id.ProjectIDList{p2}, + MaintainableProjects: id.ProjectIDList{p3}, + OwningProjects: id.ProjectIDList{p4}, + } + + assert.Equal(t, op.Projects(workspace.RoleReader), project.IDList{p1}) + assert.Equal(t, op.Projects(workspace.RoleWriter), project.IDList{p2}) + assert.Equal(t, op.Projects(workspace.RoleMaintainer), project.IDList{p3}) + assert.Equal(t, op.Projects(workspace.RoleOwner), project.IDList{p4}) + assert.Nil(t, ((*Operator)(nil)).Projects(workspace.RoleReader)) + assert.Nil(t, op.Projects("")) + + assert.Equal(t, op.AllReadableProjects(), id.ProjectIDList{p1, p2, p3, p4}) + assert.Equal(t, op.AllWritableProjects(), id.ProjectIDList{p2, p3, p4}) + assert.Equal(t, op.AllMaintainableProjects(), id.ProjectIDList{p3, p4}) + assert.Equal(t, op.AllOwningProjects(), id.ProjectIDList{p4}) + + assert.True(t, op.IsReadableProject(p1)) + assert.True(t, op.IsReadableProject(p2)) + assert.True(t, op.IsReadableProject(p3)) + assert.True(t, op.IsReadableProject(p4)) + assert.False(t, op.IsReadableProject(id.NewProjectID())) + + assert.False(t, op.IsWritableProject(p1)) + assert.True(t, op.IsWritableProject(p2)) + assert.True(t, op.IsWritableProject(p3)) + assert.True(t, op.IsWritableProject(p4)) + assert.False(t, op.IsWritableProject(id.NewProjectID())) + + assert.False(t, op.IsMaintainingProject(p1)) + assert.False(t, op.IsMaintainingProject(p2)) + assert.True(t, op.IsMaintainingProject(p3)) + assert.True(t, op.IsMaintainingProject(p4)) + assert.False(t, op.IsMaintainingProject(id.NewProjectID())) + + assert.False(t, op.IsOwningProject(p1)) + assert.False(t, op.IsOwningProject(p2)) + assert.False(t, op.IsOwningProject(p3)) + assert.True(t, op.IsOwningProject(p4)) + assert.False(t, op.IsOwningProject(id.NewProjectID())) + + p5 := id.NewProjectID() + op.AddNewProject(p5) + assert.Equal(t, project.IDList{p4, p5}, op.OwningProjects) +} + +func TestOperator_Operator(t *testing.T) { + uId := accountdomain.NewUserID() + op := Operator{ + Integration: nil, + AcOperator: &accountusecase.Operator{ + User: &uId, + }, + } + + eOp := op.Operator() + + assert.NotNil(t, eOp.User()) + assert.Nil(t, eOp.Integration()) + assert.Equal(t, &uId, eOp.User()) + assert.False(t, eOp.Machine()) + + iId := id.NewIntegrationID() + + op = Operator{ + Integration: &iId, + AcOperator: &accountusecase.Operator{ + User: nil, + }, + } + + eOp = op.Operator() + + assert.Nil(t, eOp.User()) + assert.NotNil(t, eOp.Integration()) + assert.Equal(t, &iId, eOp.Integration()) + assert.False(t, eOp.Machine()) + + op = Operator{ + AcOperator: &accountusecase.Operator{}, + Machine: true, + } + + eOp = op.Operator() + + assert.Nil(t, eOp.User()) + assert.Nil(t, eOp.Integration()) + assert.True(t, eOp.Machine()) +} + +type ownable struct { + U *accountdomain.UserID + I *id.IntegrationID + P id.ProjectID +} + +func (o ownable) User() *accountdomain.UserID { + return o.U +} +func (o ownable) Integration() *id.IntegrationID { + return o.I +} +func (o ownable) Project() id.ProjectID { + return o.P +} + +func TestOperator_Checks(t *testing.T) { + uId := accountdomain.NewUserID() + pId := id.NewProjectID() + op := Operator{ + AcOperator: &accountusecase.Operator{ + User: &uId, + }, + Integration: nil, + WritableProjects: project.IDList{pId}, + } + + obj := ownable{U: &uId, P: pId} + + assert.True(t, op.Owns(obj)) + + assert.True(t, op.CanUpdate(obj)) +} + +func TestOperator_RoleByProject(t *testing.T) { + // Owner + pid := id.NewProjectID() + uid := accountdomain.NewUserID() + operator := &Operator{ + AcOperator: &accountusecase.Operator{ + User: &uid, + }, + Integration: nil, + OwningProjects: project.IDList{pid}, + } + role := operator.RoleByProject(pid) + expectedRole := workspace.RoleOwner + assert.Equal(t, expectedRole, role) + + // Maintainer + pid2 := id.NewProjectID() + uid2 := accountdomain.NewUserID() + operator2 := &Operator{ + AcOperator: &accountusecase.Operator{ + User: &uid2, + }, + Integration: nil, + MaintainableProjects: project.IDList{pid2}, + } + role2 := operator2.RoleByProject(pid2) + expectedRole2 := workspace.RoleMaintainer + assert.Equal(t, expectedRole2, role2) + + // Writer + pid3 := id.NewProjectID() + uid3 := accountdomain.NewUserID() + operator3 := &Operator{ + AcOperator: &accountusecase.Operator{ + User: &uid3, + }, Integration: nil, + WritableProjects: project.IDList{pid3}, + } + role3 := operator3.RoleByProject(pid3) + expectedRole3 := workspace.RoleWriter + assert.Equal(t, expectedRole3, role3) + + // Reader + pid4 := id.NewProjectID() + uid4 := accountdomain.NewUserID() + operator4 := &Operator{ + AcOperator: &accountusecase.Operator{ + User: &uid4, + }, + Integration: nil, + ReadableProjects: project.IDList{pid4}, + } + role4 := operator4.RoleByProject(pid4) + expectedRole4 := workspace.RoleReader + assert.Equal(t, expectedRole4, role4) + + // No role + pid5 := id.NewProjectID() + operator5 := &Operator{} + role5 := operator5.RoleByProject(pid5) + expectedRole5 := workspace.Role("") + assert.Equal(t, expectedRole5, role5) +} From ac69bfef1400f3f118e1b0c1121586347e68a372 Mon Sep 17 00:00:00 2001 From: shumon84 Date: Mon, 9 Sep 2024 02:37:16 +0900 Subject: [PATCH 08/10] feat(asset): add assetinfrastructure package --- asset/assetinfrastructure/assetfs/common.go | 16 + asset/assetinfrastructure/assetfs/file.go | 208 ++++ .../assetinfrastructure/assetfs/file_test.go | 177 ++++ .../assetinfrastructure/assetmemory/asset.go | 106 ++ .../assetmemory/asset_file.go | 60 ++ .../assetmemory/asset_test.go | 456 +++++++++ .../assetmemory/container.go | 31 + .../assetmemory/container_test.go | 18 + .../assetinfrastructure/assetmemory/event.go | 46 + .../assetmemory/event_test.go | 61 ++ .../assetmemory/integration.go | 109 ++ .../assetmemory/integration_test.go | 376 +++++++ .../assetmemory/project.go | 178 ++++ .../assetmemory/project_test.go | 940 ++++++++++++++++++ .../assetinfrastructure/assetmemory/thread.go | 73 ++ .../assetmemory/thread_test.go | 315 ++++++ 16 files changed, 3170 insertions(+) create mode 100644 asset/assetinfrastructure/assetfs/common.go create mode 100644 asset/assetinfrastructure/assetfs/file.go create mode 100644 asset/assetinfrastructure/assetfs/file_test.go create mode 100644 asset/assetinfrastructure/assetmemory/asset.go create mode 100644 asset/assetinfrastructure/assetmemory/asset_file.go create mode 100644 asset/assetinfrastructure/assetmemory/asset_test.go create mode 100644 asset/assetinfrastructure/assetmemory/container.go create mode 100644 asset/assetinfrastructure/assetmemory/container_test.go create mode 100644 asset/assetinfrastructure/assetmemory/event.go create mode 100644 asset/assetinfrastructure/assetmemory/event_test.go create mode 100644 asset/assetinfrastructure/assetmemory/integration.go create mode 100644 asset/assetinfrastructure/assetmemory/integration_test.go create mode 100644 asset/assetinfrastructure/assetmemory/project.go create mode 100644 asset/assetinfrastructure/assetmemory/project_test.go create mode 100644 asset/assetinfrastructure/assetmemory/thread.go create mode 100644 asset/assetinfrastructure/assetmemory/thread_test.go diff --git a/asset/assetinfrastructure/assetfs/common.go b/asset/assetinfrastructure/assetfs/common.go new file mode 100644 index 0000000..15ecd10 --- /dev/null +++ b/asset/assetinfrastructure/assetfs/common.go @@ -0,0 +1,16 @@ +package assetfs + +import ( + "github.com/reearth/reearthx/i18n" + "github.com/reearth/reearthx/rerror" +) + +const ( + fileSizeLimit int64 = 10 * 1024 * 1024 * 1024 // 10GB + assetDir = "assets" + defaultBase = "http://localhost:8080/assets" +) + +var ( + ErrInvalidBaseURL = rerror.NewE(i18n.T("invalid base URL")) +) diff --git a/asset/assetinfrastructure/assetfs/file.go b/asset/assetinfrastructure/assetfs/file.go new file mode 100644 index 0000000..08b3be9 --- /dev/null +++ b/asset/assetinfrastructure/assetfs/file.go @@ -0,0 +1,208 @@ +package assetfs + +import ( + "context" + "errors" + "io" + "io/fs" + "net/url" + "os" + "path/filepath" + "strings" + + "github.com/google/uuid" + "github.com/reearth/reearthx/asset/assetdomain/asset" + "github.com/reearth/reearthx/asset/assetdomain/file" + gateway "github.com/reearth/reearthx/asset/assetusecase/assetgateway" + "github.com/reearth/reearthx/rerror" + "github.com/samber/lo" + "github.com/spf13/afero" +) + +type fileRepo struct { + fs afero.Fs + urlBase *url.URL +} + +func NewFile(fs afero.Fs, urlBase string) (gateway.File, error) { + var b *url.URL + if urlBase == "" { + urlBase = defaultBase + } + + var err error + b, err = url.Parse(urlBase) + if err != nil { + return nil, ErrInvalidBaseURL + } + + return &fileRepo{ + fs: fs, + urlBase: b, + }, nil +} + +func (f *fileRepo) ReadAsset(_ context.Context, fileUUID string, fn string) (io.ReadCloser, error) { + if fileUUID == "" || fn == "" { + return nil, rerror.ErrNotFound + } + + p := getFSObjectPath(fileUUID, fn) + + return f.read(p) +} + +func (f *fileRepo) GetAssetFiles(_ context.Context, fileUUID string) ([]gateway.FileEntry, error) { + if fileUUID == "" { + return nil, rerror.ErrNotFound + } + + p := getFSObjectPath(fileUUID, "") + var fileEntries []gateway.FileEntry + err := afero.Walk(f.fs, p, func(path string, info fs.FileInfo, err error) error { + if err != nil { + return err + } + + if info.IsDir() { + return nil + } + + fileEntries = append(fileEntries, gateway.FileEntry{ + Name: strings.ReplaceAll(lo.Must1(filepath.Rel(p, path)), "\\", "/"), + Size: info.Size(), + }) + return nil + }) + if err != nil { + if errors.Is(err, afero.ErrFileNotFound) { + return nil, gateway.ErrFileNotFound + } else { + return nil, rerror.ErrInternalBy(err) + } + } + + if len(fileEntries) == 0 { + return nil, gateway.ErrFileNotFound + } + + return fileEntries, nil +} + +func (f *fileRepo) UploadAsset(_ context.Context, file *file.File) (string, int64, error) { + if file == nil { + return "", 0, gateway.ErrInvalidFile + } + if file.Size >= fileSizeLimit { + return "", 0, gateway.ErrFileTooLarge + } + + fileUUID := newUUID() + + p := getFSObjectPath(fileUUID, file.Name) + + size, err := f.upload(p, file.Content) + if err != nil { + return "", 0, err + } + + return fileUUID, size, nil +} + +func (f *fileRepo) DeleteAsset(_ context.Context, fileUUID string, fn string) error { + if fileUUID == "" || fn == "" { + return gateway.ErrInvalidFile + } + + p := getFSObjectPath(fileUUID, fn) + + return f.delete(p) +} + +func (f *fileRepo) GetURL(a *asset.Asset) string { + fileUUID := a.UUID() + return f.urlBase.JoinPath(assetDir, fileUUID[:2], fileUUID[2:], url.PathEscape(a.FileName())).String() +} + +func (f *fileRepo) IssueUploadAssetLink(ctx context.Context, param gateway.IssueUploadAssetParam) (*gateway.UploadAssetLink, error) { + return nil, gateway.ErrUnsupportedOperation +} + +func (f *fileRepo) UploadedAsset(ctx context.Context, u *asset.Upload) (*file.File, error) { + return nil, gateway.ErrUnsupportedOperation +} + +// helpers + +func (f *fileRepo) read(filename string) (io.ReadCloser, error) { + if filename == "" { + return nil, rerror.ErrNotFound + } + + file, err := f.fs.Open(filename) + if err != nil { + if os.IsNotExist(err) { + return nil, rerror.ErrNotFound + } + return nil, rerror.ErrInternalBy(err) + } + return file, nil +} + +func (f *fileRepo) upload(filename string, content io.Reader) (int64, error) { + if filename == "" || content == nil { + return 0, gateway.ErrFailedToUploadFile + } + + if fnd := filepath.Dir(filename); fnd != "" { + if err := f.fs.MkdirAll(fnd, 0755); err != nil { + return 0, rerror.ErrInternalBy(err) + } + } + + dest, err := f.fs.Create(filename) + if err != nil { + return 0, rerror.ErrInternalBy(err) + } + defer func() { + _ = dest.Close() + }() + + var size int64 + if size, err = io.Copy(dest, content); err != nil { + return 0, gateway.ErrFailedToUploadFile + } + + return size, nil +} + +func (f *fileRepo) delete(filename string) error { + if filename == "" { + return gateway.ErrFailedToUploadFile + } + + if err := f.fs.RemoveAll(filename); err != nil { + if os.IsNotExist(err) { + return nil + } + return rerror.ErrInternalBy(err) + } + return nil +} + +func getFSObjectPath(fileUUID, objectName string) string { + if fileUUID == "" || !IsValidUUID(fileUUID) { + return "" + } + + return filepath.Join(assetDir, fileUUID[:2], fileUUID[2:], objectName) +} + +func newUUID() string { + return uuid.NewString() +} + +func IsValidUUID(fileUUID string) bool { + _, err := uuid.Parse(fileUUID) + return err == nil +} diff --git a/asset/assetinfrastructure/assetfs/file_test.go b/asset/assetinfrastructure/assetfs/file_test.go new file mode 100644 index 0000000..cb994c0 --- /dev/null +++ b/asset/assetinfrastructure/assetfs/file_test.go @@ -0,0 +1,177 @@ +package assetfs + +import ( + "context" + "io" + "net/url" + "os" + "path" + "path/filepath" + "strings" + "testing" + + "github.com/reearth/reearthx/account/accountdomain" + id "github.com/reearth/reearthx/asset/assetdomain" + "github.com/reearth/reearthx/asset/assetdomain/asset" + "github.com/reearth/reearthx/asset/assetdomain/file" + gateway "github.com/reearth/reearthx/asset/assetusecase/assetgateway" + "github.com/reearth/reearthx/rerror" + "github.com/spf13/afero" + "github.com/stretchr/testify/assert" +) + +func TestNewFile(t *testing.T) { + f, err := NewFile(mockFs(), "") + assert.NoError(t, err) + assert.NotNil(t, f) + + f1, err := NewFile(mockFs(), "htp:#$%&''()00lde/fdaslk") + assert.Equal(t, err, ErrInvalidBaseURL) + assert.Nil(t, f1) +} + +func TestFile_ReadAsset(t *testing.T) { + f, _ := NewFile(mockFs(), "") + u := "5130c89f-8f67-4766-b127-49ee6796d464" + + r, err := f.ReadAsset(context.Background(), u, "xxx.txt") + assert.NoError(t, err) + c, err := io.ReadAll(r) + assert.NoError(t, err) + assert.Equal(t, "hello", string(c)) + assert.NoError(t, r.Close()) + + r, err = f.ReadAsset(context.Background(), u, "") + assert.ErrorIs(t, err, rerror.ErrNotFound) + assert.Nil(t, r) + + r, err = f.ReadAsset(context.Background(), u, "aaa.txt") + assert.ErrorIs(t, err, rerror.ErrNotFound) + assert.Nil(t, r) + + r, err = f.ReadAsset(context.Background(), u, "../published/s.json") + assert.ErrorIs(t, err, rerror.ErrNotFound) + assert.Nil(t, r) +} + +func TestFile_GetAssetFiles(t *testing.T) { + fs := mockFs() + f, _ := NewFile(fs, "") + + files, err := f.GetAssetFiles(context.Background(), "5130c89f-8f67-4766-b127-49ee6796d464") + assert.NoError(t, err) + assert.Equal(t, []gateway.FileEntry{ + {Name: "xxx.txt", Size: 5}, + {Name: path.Join("yyy", "hello.txt"), Size: 6}, + }, files) +} + +func TestFile_UploadAsset(t *testing.T) { + fs := mockFs() + f, _ := NewFile(fs, "https://example.com/assets") + + u, _, err := f.UploadAsset(context.Background(), &file.File{ + Name: "aaa.txt", + Content: io.NopCloser(strings.NewReader("aaa")), + }) + p := getFSObjectPath(u, "aaa.txt") + + assert.NoError(t, err) + assert.Contains(t, p, "aaa.txt") + + u1, _, err1 := f.UploadAsset(context.Background(), nil) + assert.Equal(t, "", u1) + assert.Same(t, gateway.ErrInvalidFile, err1) + + u2, _, err2 := f.UploadAsset(context.Background(), &file.File{ + Size: fileSizeLimit + 1, + }) + assert.Equal(t, "", u2) + assert.Same(t, gateway.ErrFileTooLarge, err2) + + u3, _, err3 := f.UploadAsset(context.Background(), &file.File{ + Content: nil, + }) + assert.Equal(t, "", u3) + assert.Error(t, err3) + + uf, _ := fs.Open(p) + c, _ := io.ReadAll(uf) + assert.Equal(t, "aaa", string(c)) +} + +func TestFile_DeleteAsset(t *testing.T) { + u := newUUID() + n := "aaa.txt" + fs := mockFs() + f, _ := NewFile(fs, "https://example.com/assets") + err := f.DeleteAsset(context.Background(), u, n) + assert.NoError(t, err) + + _, err = fs.Stat(getFSObjectPath(u, n)) + assert.ErrorIs(t, err, os.ErrNotExist) + + u1 := "" + n1 := "" + fs1 := mockFs() + f1, _ := NewFile(fs1, "https://example.com/assets") + err1 := f1.DeleteAsset(context.Background(), u1, n1) + assert.Same(t, gateway.ErrInvalidFile, err1) +} + +func TestFile_GetURL(t *testing.T) { + host := "https://example.com" + fs := mockFs() + r, err := NewFile(fs, host) + assert.NoError(t, err) + + u := newUUID() + n := "xxx.yyy" + a := asset.New().NewID(). + Project(id.NewProjectID()). + CreatedByUser(accountdomain.NewUserID()). + Size(1000).FileName(n). + UUID(u). + Thread(id.NewThreadID()). + MustBuild() + + expected, err := url.JoinPath(host, assetDir, u[:2], u[2:], url.PathEscape(n)) + assert.NoError(t, err) + actual := r.GetURL(a) + assert.Equal(t, expected, actual) +} + +func TestFile_GetFSObjectPath(t *testing.T) { + u := newUUID() + n := "xxx.yyy" + assert.Equal(t, filepath.Join(assetDir, u[:2], u[2:], "xxx.yyy"), getFSObjectPath(u, n)) + + u1 := "" + n1 := "" + assert.Equal(t, "", getFSObjectPath(u1, n1)) +} + +func TestFile_IsValidUUID(t *testing.T) { + u := newUUID() + assert.Equal(t, true, IsValidUUID(u)) + + u1 := "xxxxxx" + assert.Equal(t, false, IsValidUUID(u1)) +} + +func mockFs() afero.Fs { + files := map[string]string{ + filepath.Join("assets", "51", "30c89f-8f67-4766-b127-49ee6796d464", "xxx.txt"): "hello", + filepath.Join("assets", "51", "30c89f-8f67-4766-b127-49ee6796d464", "yyy", "hello.txt"): "hello!", + filepath.Join("plugins", "aaa~1.0.0", "foo.js"): "bar", + filepath.Join("published", "s.json"): "{}", + } + + fs := afero.NewMemMapFs() + for name, content := range files { + f, _ := fs.Create(name) + _, _ = f.WriteString(content) + _ = f.Close() + } + return fs +} diff --git a/asset/assetinfrastructure/assetmemory/asset.go b/asset/assetinfrastructure/assetmemory/asset.go new file mode 100644 index 0000000..4eef6e3 --- /dev/null +++ b/asset/assetinfrastructure/assetmemory/asset.go @@ -0,0 +1,106 @@ +package assetmemory + +import ( + "context" + + id "github.com/reearth/reearthx/asset/assetdomain" + "github.com/reearth/reearthx/asset/assetdomain/asset" + repo "github.com/reearth/reearthx/asset/assetusecase/assetrepo" + "github.com/reearth/reearthx/rerror" + "github.com/reearth/reearthx/usecasex" + "github.com/reearth/reearthx/util" + "github.com/samber/lo" +) + +type Asset struct { + data *util.SyncMap[asset.ID, *asset.Asset] + err error + f repo.ProjectFilter +} + +func NewAsset() repo.Asset { + return &Asset{ + data: &util.SyncMap[id.AssetID, *asset.Asset]{}, + } +} + +func (r *Asset) Filtered(f repo.ProjectFilter) repo.Asset { + return &Asset{ + data: r.data, + f: r.f.Merge(f), + } +} + +func (r *Asset) FindByID(ctx context.Context, id id.AssetID) (*asset.Asset, error) { + if r.err != nil { + return nil, r.err + } + + return rerror.ErrIfNil(r.data.Find(func(key asset.ID, value *asset.Asset) bool { + return key == id && r.f.CanRead(value.Project()) + }), rerror.ErrNotFound) +} + +func (r *Asset) FindByIDs(ctx context.Context, ids id.AssetIDList) ([]*asset.Asset, error) { + if r.err != nil { + return nil, r.err + } + + res := asset.List(r.data.FindAll(func(key asset.ID, value *asset.Asset) bool { + return ids.Has(key) && r.f.CanRead(value.Project()) + })).SortByID() + return res, nil +} + +func (r *Asset) FindByProject(ctx context.Context, id id.ProjectID, filter repo.AssetFilter) ([]*asset.Asset, *usecasex.PageInfo, error) { + if !r.f.CanRead(id) { + return nil, usecasex.EmptyPageInfo(), nil + } + + if r.err != nil { + return nil, nil, r.err + } + + result := asset.List(r.data.FindAll(func(_ asset.ID, v *asset.Asset) bool { + return v.Project() == id + })).SortByID() + + var startCursor, endCursor *usecasex.Cursor + if len(result) > 0 { + startCursor = lo.ToPtr(usecasex.Cursor(result[0].ID().String())) + endCursor = lo.ToPtr(usecasex.Cursor(result[len(result)-1].ID().String())) + } + + return result, usecasex.NewPageInfo( + int64(len(result)), + startCursor, + endCursor, + true, + true, + ), nil + +} + +func (r *Asset) Save(ctx context.Context, a *asset.Asset) error { + if !r.f.CanWrite(a.Project()) { + return repo.ErrOperationDenied + } + + if r.err != nil { + return r.err + } + + r.data.Store(a.ID(), a) + return nil +} + +func (r *Asset) Delete(ctx context.Context, id id.AssetID) error { + if r.err != nil { + return r.err + } + + if a, ok := r.data.Load(id); ok && r.f.CanWrite(a.Project()) { + r.data.Delete(id) + } + return nil +} diff --git a/asset/assetinfrastructure/assetmemory/asset_file.go b/asset/assetinfrastructure/assetmemory/asset_file.go new file mode 100644 index 0000000..1826366 --- /dev/null +++ b/asset/assetinfrastructure/assetmemory/asset_file.go @@ -0,0 +1,60 @@ +package assetmemory + +import ( + "context" + + id "github.com/reearth/reearthx/asset/assetdomain" + "github.com/reearth/reearthx/asset/assetdomain/asset" + "github.com/reearth/reearthx/rerror" + "github.com/reearth/reearthx/util" + "golang.org/x/exp/slices" +) + +type AssetFile struct { + data *util.SyncMap[asset.ID, *asset.File] + files *util.SyncMap[asset.ID, []*asset.File] + err error +} + +func NewAssetFile() *AssetFile { + return &AssetFile{ + data: &util.SyncMap[id.AssetID, *asset.File]{}, + files: &util.SyncMap[id.AssetID, []*asset.File]{}, + } +} + +func (r *AssetFile) FindByID(ctx context.Context, id id.AssetID) (*asset.File, error) { + if r.err != nil { + return nil, r.err + } + + f := r.data.Find(func(key asset.ID, value *asset.File) bool { + return key == id + }).Clone() + fs := r.files.Find(func(key asset.ID, value []*asset.File) bool { + return key == id + }) + if len(fs) > 0 { + // f = asset.FoldFiles(fs, f) + f.SetFiles(fs) + } + return rerror.ErrIfNil(f, rerror.ErrNotFound) +} + +func (r *AssetFile) Save(ctx context.Context, id id.AssetID, file *asset.File) error { + if r.err != nil { + return r.err + } + + r.data.Store(id, file.Clone()) + return nil +} + +func (r *AssetFile) SaveFlat(ctx context.Context, id id.AssetID, parent *asset.File, files []*asset.File) error { + if r.err != nil { + return r.err + } + r.data.Store(id, parent.Clone()) + r.files.Store(id, slices.Clone(files)) + return nil +} diff --git a/asset/assetinfrastructure/assetmemory/asset_test.go b/asset/assetinfrastructure/assetmemory/asset_test.go new file mode 100644 index 0000000..23e27c3 --- /dev/null +++ b/asset/assetinfrastructure/assetmemory/asset_test.go @@ -0,0 +1,456 @@ +package assetmemory + +import ( + "context" + "testing" + + "github.com/reearth/reearthx/account/accountdomain" + id "github.com/reearth/reearthx/asset/assetdomain" + "github.com/reearth/reearthx/asset/assetdomain/asset" + repo "github.com/reearth/reearthx/asset/assetusecase/assetrepo" + "github.com/reearth/reearthx/rerror" + "github.com/reearth/reearthx/usecasex" + "github.com/samber/lo" + "github.com/stretchr/testify/assert" +) + +func TestAssetRepo_Filtered(t *testing.T) { + tid1 := id.NewProjectID() + id1 := id.NewAssetID() + id2 := id.NewAssetID() + uid1 := accountdomain.NewUserID() + uid2 := accountdomain.NewUserID() + s := lo.ToPtr(asset.ArchiveExtractionStatusPending) + p1 := asset.New().ID(id1).Project(tid1).CreatedByUser(uid1).Size(1000).Thread(id.NewThreadID()).ArchiveExtractionStatus(s).NewUUID().MustBuild() + p2 := asset.New().ID(id2).Project(tid1).CreatedByUser(uid2).Size(1000).Thread(id.NewThreadID()).ArchiveExtractionStatus(s).NewUUID().MustBuild() + + tests := []struct { + name string + seeds asset.List + arg repo.ProjectFilter + wantErr error + mockErr bool + }{ + { + name: "project filter operation denied", + seeds: asset.List{ + p1, + p2, + }, + arg: repo.ProjectFilter{ + Readable: []id.ProjectID{}, + Writable: []id.ProjectID{}, + }, + wantErr: repo.ErrOperationDenied, + }, + { + name: "project filter operation success", + seeds: asset.List{ + p1, + p2, + }, + arg: repo.ProjectFilter{ + Readable: []id.ProjectID{tid1}, + Writable: []id.ProjectID{tid1}, + }, + wantErr: nil, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + r := NewAsset().Filtered(tc.arg) + ctx := context.Background() + for _, p := range tc.seeds { + err := r.Save(ctx, p.Clone()) + assert.ErrorIs(t, err, tc.wantErr) + } + }) + } +} + +func TestAssetRepo_FindByID(t *testing.T) { + pid1 := id.NewProjectID() + uid1 := accountdomain.NewUserID() + id1 := id.NewAssetID() + s := lo.ToPtr(asset.ArchiveExtractionStatusPending) + a1 := asset.New().ID(id1). + Project(pid1). + CreatedByUser(uid1). + Size(1000). + Thread(id.NewThreadID()). + NewUUID(). + MustBuild() + tests := []struct { + name string + seeds []*asset.Asset + arg id.AssetID + want *asset.Asset + wantErr error + }{ + { + name: "Not found in empty db", + seeds: []*asset.Asset{}, + arg: id.NewAssetID(), + want: nil, + wantErr: rerror.ErrNotFound, + }, + { + name: "Not found", + seeds: []*asset.Asset{ + asset.New().NewID().Project(pid1).ArchiveExtractionStatus(s).NewUUID(). + CreatedByUser(uid1).Size(1000).Thread(id.NewThreadID()).MustBuild(), + }, + arg: id.NewAssetID(), + want: nil, + wantErr: rerror.ErrNotFound, + }, + { + name: "Found 1", + seeds: []*asset.Asset{ + a1, + }, + arg: id1, + want: a1, + wantErr: nil, + }, + { + name: "Found 2", + seeds: []*asset.Asset{ + a1, + asset.New().NewID().Project(id.NewProjectID()).ArchiveExtractionStatus(s).NewUUID(). + CreatedByUser(accountdomain.NewUserID()).Size(1000).Thread(id.NewThreadID()).MustBuild(), + asset.New().NewID().Project(id.NewProjectID()).ArchiveExtractionStatus(s).NewUUID(). + CreatedByUser(accountdomain.NewUserID()).Size(1000).Thread(id.NewThreadID()).MustBuild(), + }, + arg: id1, + want: a1, + wantErr: nil, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + r := NewAsset() + ctx := context.Background() + + for _, a := range tc.seeds { + err := r.Save(ctx, a.Clone()) + assert.NoError(t, err) + } + + got, err := r.FindByID(ctx, tc.arg) + if tc.wantErr != nil { + assert.ErrorIs(t, err, tc.wantErr) + return + } + assert.Equal(t, tc.want, got) + }) + } +} + +func TestAssetRepo_FindByIDs(t *testing.T) { + pid1 := id.NewProjectID() + uid1 := accountdomain.NewUserID() + id1 := id.NewAssetID() + id2 := id.NewAssetID() + s := lo.ToPtr(asset.ArchiveExtractionStatusPending) + a1 := asset.New().ID(id1).Project(pid1).ArchiveExtractionStatus(s).NewUUID(). + CreatedByUser(uid1).Size(1000).Thread(id.NewThreadID()).MustBuild() + a2 := asset.New().ID(id2).Project(pid1).ArchiveExtractionStatus(s).NewUUID(). + CreatedByUser(uid1).Size(1000).Thread(id.NewThreadID()).MustBuild() + + tests := []struct { + name string + seeds []*asset.Asset + arg id.AssetIDList + want []*asset.Asset + wantErr error + }{ + { + name: "0 count in empty db", + seeds: []*asset.Asset{}, + arg: id.AssetIDList{}, + want: nil, + wantErr: nil, + }, + { + name: "0 count with asset for another workspaces", + seeds: []*asset.Asset{ + asset.New().NewID().Project(id.NewProjectID()).ArchiveExtractionStatus(s).NewUUID(). + CreatedByUser(accountdomain.NewUserID()).Size(1000).Thread(id.NewThreadID()).MustBuild(), + }, + arg: id.AssetIDList{}, + want: nil, + wantErr: nil, + }, + { + name: "1 count with single asset", + seeds: []*asset.Asset{ + a1, + }, + arg: id.AssetIDList{id1}, + want: []*asset.Asset{a1}, + wantErr: nil, + }, + { + name: "1 count with multi assets", + seeds: []*asset.Asset{ + a1, + asset.New().NewID().Project(id.NewProjectID()).ArchiveExtractionStatus(s).NewUUID(). + CreatedByUser(accountdomain.NewUserID()).Size(1000).Thread(id.NewThreadID()).MustBuild(), + asset.New().NewID().Project(id.NewProjectID()).ArchiveExtractionStatus(s).NewUUID(). + CreatedByUser(accountdomain.NewUserID()).Size(1000).Thread(id.NewThreadID()).MustBuild(), + }, + arg: id.AssetIDList{id1}, + want: []*asset.Asset{a1}, + wantErr: nil, + }, + { + name: "2 count with multi assets", + seeds: []*asset.Asset{ + a1, + a2, + asset.New().NewID().Project(id.NewProjectID()).ArchiveExtractionStatus(s).NewUUID(). + CreatedByUser(accountdomain.NewUserID()).Size(1000).Thread(id.NewThreadID()).MustBuild(), + asset.New().NewID().Project(id.NewProjectID()).ArchiveExtractionStatus(s).NewUUID(). + CreatedByUser(accountdomain.NewUserID()).Size(1000).Thread(id.NewThreadID()).MustBuild(), + }, + arg: id.AssetIDList{id1, id2}, + want: []*asset.Asset{a1, a2}, + wantErr: nil, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + r := NewAsset() + ctx := context.Background() + for _, a := range tc.seeds { + err := r.Save(ctx, a.Clone()) + assert.NoError(t, err) + } + + got, err := r.FindByIDs(ctx, tc.arg) + if tc.wantErr != nil { + assert.ErrorIs(t, err, tc.wantErr) + return + } + + assert.Equal(t, tc.want, got) + }) + } +} + +func TestAssetRepo_FindByProject(t *testing.T) { + pid1 := id.NewProjectID() + uid1 := accountdomain.NewUserID() + s := lo.ToPtr(asset.ArchiveExtractionStatusPending) + a1 := asset.New().NewID().Project(pid1).ArchiveExtractionStatus(s).NewUUID(). + CreatedByUser(uid1).Size(1000).Thread(id.NewThreadID()).MustBuild() + a2 := asset.New().NewID().Project(pid1).ArchiveExtractionStatus(s).NewUUID(). + CreatedByUser(uid1).Size(1000).Thread(id.NewThreadID()).MustBuild() + + type args struct { + pid id.ProjectID + pInfo *usecasex.Pagination + } + tests := []struct { + name string + seeds []*asset.Asset + args args + filter *repo.ProjectFilter + want []*asset.Asset + wantErr error + }{ + { + name: "0 count in empty db", + seeds: []*asset.Asset{}, + args: args{id.NewProjectID(), nil}, + want: nil, + wantErr: nil, + }, + { + name: "0 count with asset for another workspaces", + seeds: []*asset.Asset{ + asset.New().NewID().Project(id.NewProjectID()).ArchiveExtractionStatus(s).NewUUID(). + CreatedByUser(accountdomain.NewUserID()).Size(1000).Thread(id.NewThreadID()).MustBuild(), + }, + args: args{id.NewProjectID(), nil}, + want: nil, + wantErr: nil, + }, + { + name: "1 count with single asset", + seeds: []*asset.Asset{ + a1, + }, + args: args{pid1, usecasex.CursorPagination{First: lo.ToPtr(int64(1))}.Wrap()}, + want: []*asset.Asset{a1}, + wantErr: nil, + }, + { + name: "1 count with multi assets", + seeds: []*asset.Asset{ + a1, + asset.New().NewID().Project(id.NewProjectID()).ArchiveExtractionStatus(s).NewUUID(). + CreatedByUser(accountdomain.NewUserID()).Size(1000).Thread(id.NewThreadID()).MustBuild(), + asset.New().NewID().Project(id.NewProjectID()).ArchiveExtractionStatus(s).NewUUID(). + CreatedByUser(accountdomain.NewUserID()).Size(1000).Thread(id.NewThreadID()).MustBuild(), + }, + args: args{pid1, usecasex.CursorPagination{First: lo.ToPtr(int64(1))}.Wrap()}, + want: []*asset.Asset{a1}, + wantErr: nil, + }, + { + name: "2 count with multi assets", + seeds: []*asset.Asset{ + a1, + a2, + asset.New().NewID().Project(id.NewProjectID()).ArchiveExtractionStatus(s).NewUUID(). + CreatedByUser(accountdomain.NewUserID()).Size(1000).Thread(id.NewThreadID()).MustBuild(), + asset.New().NewID().Project(id.NewProjectID()).ArchiveExtractionStatus(s).NewUUID(). + CreatedByUser(accountdomain.NewUserID()).Size(1000).Thread(id.NewThreadID()).MustBuild(), + }, + args: args{pid1, usecasex.CursorPagination{First: lo.ToPtr(int64(2))}.Wrap()}, + want: []*asset.Asset{a1, a2}, + wantErr: nil, + }, + { + name: "get 1st page of 2", + seeds: []*asset.Asset{ + a1, + a2, + asset.New().NewID().Project(id.NewProjectID()).ArchiveExtractionStatus(s).NewUUID(). + CreatedByUser(accountdomain.NewUserID()).Size(1000).Thread(id.NewThreadID()).MustBuild(), + asset.New().NewID().Project(id.NewProjectID()).ArchiveExtractionStatus(s).NewUUID(). + CreatedByUser(accountdomain.NewUserID()).Size(1000).Thread(id.NewThreadID()).MustBuild(), + }, + args: args{pid1, usecasex.CursorPagination{First: lo.ToPtr(int64(1))}.Wrap()}, + want: []*asset.Asset{a1, a2}, + wantErr: nil, + }, + { + name: "project filter operation succeed", + seeds: asset.List{ + a1, + asset.New().NewID().Project(id.NewProjectID()).ArchiveExtractionStatus(s).NewUUID(). + CreatedByUser(accountdomain.NewUserID()).Size(1000).Thread(id.NewThreadID()).MustBuild(), + asset.New().NewID().Project(id.NewProjectID()).ArchiveExtractionStatus(s).NewUUID(). + CreatedByUser(accountdomain.NewUserID()).Size(1000).Thread(id.NewThreadID()).MustBuild(), + }, + args: args{pid1, usecasex.CursorPagination{First: lo.ToPtr(int64(1))}.Wrap()}, + filter: &repo.ProjectFilter{Readable: []id.ProjectID{pid1}, Writable: []id.ProjectID{pid1}}, + want: []*asset.Asset{a1}, + wantErr: nil, + }, + { + name: "project filter operation denied", + seeds: asset.List{ + a1, + asset.New().NewID().Project(id.NewProjectID()).ArchiveExtractionStatus(s).NewUUID(). + CreatedByUser(accountdomain.NewUserID()).Size(1000).Thread(id.NewThreadID()).MustBuild(), + asset.New().NewID().Project(id.NewProjectID()).ArchiveExtractionStatus(s).NewUUID(). + CreatedByUser(accountdomain.NewUserID()).Size(1000).Thread(id.NewThreadID()).MustBuild(), + }, + args: args{pid1, usecasex.CursorPagination{First: lo.ToPtr(int64(1))}.Wrap()}, + filter: &repo.ProjectFilter{Readable: []id.ProjectID{}, Writable: []id.ProjectID{}}, + want: nil, + wantErr: nil, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + r := NewAsset() + ctx := context.Background() + for _, a := range tc.seeds { + err := r.Save(ctx, a.Clone()) + assert.NoError(t, err) + } + + if tc.filter != nil { + r = r.Filtered(*tc.filter) + } + + got, _, err := r.FindByProject(ctx, tc.args.pid, repo.AssetFilter{}) + if tc.wantErr != nil { + assert.ErrorIs(t, err, tc.wantErr) + return + } + + assert.Equal(t, tc.want, got) + }) + } +} + +func TestAssetRepo_Delete(t *testing.T) { + pid1 := id.NewProjectID() + id1 := id.NewAssetID() + uid1 := accountdomain.NewUserID() + s := lo.ToPtr(asset.ArchiveExtractionStatusPending) + a1 := asset.New().NewID().Project(pid1).ArchiveExtractionStatus(s).NewUUID(). + CreatedByUser(uid1).Size(1000).Thread(id.NewThreadID()).MustBuild() + tests := []struct { + name string + seeds []*asset.Asset + arg id.AssetID + wantErr error + }{ + { + name: "Found 1", + seeds: []*asset.Asset{ + a1, + }, + arg: id1, + wantErr: nil, + }, + { + name: "Found 2", + seeds: []*asset.Asset{ + a1, + asset.New().NewID().Project(id.NewProjectID()).ArchiveExtractionStatus(s).NewUUID(). + CreatedByUser(accountdomain.NewUserID()).Size(1000).Thread(id.NewThreadID()).MustBuild(), + asset.New().NewID().Project(id.NewProjectID()).ArchiveExtractionStatus(s).NewUUID(). + CreatedByUser(accountdomain.NewUserID()).Size(1000).Thread(id.NewThreadID()).MustBuild(), + }, + arg: id1, + wantErr: nil, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + r := NewAsset() + ctx := context.Background() + for _, a := range tc.seeds { + err := r.Save(ctx, a.Clone()) + assert.NoError(t, err) + } + + err := r.Delete(ctx, tc.arg) + if tc.wantErr != nil { + assert.ErrorIs(t, err, tc.wantErr) + return + } + assert.NoError(t, err) + _, err = r.FindByID(ctx, tc.arg) + assert.ErrorIs(t, err, rerror.ErrNotFound) + }) + } +} diff --git a/asset/assetinfrastructure/assetmemory/container.go b/asset/assetinfrastructure/assetmemory/container.go new file mode 100644 index 0000000..704969a --- /dev/null +++ b/asset/assetinfrastructure/assetmemory/container.go @@ -0,0 +1,31 @@ +package assetmemory + +import ( + "github.com/reearth/reearthx/account/accountinfrastructure/accountmemory" + "time" + + repo "github.com/reearth/reearthx/asset/assetusecase/assetrepo" + "github.com/reearth/reearthx/usecasex" +) + +func New() *repo.Container { + return &repo.Container{ + Asset: NewAsset(), + AssetFile: NewAssetFile(), + User: accountmemory.NewUser(), + Workspace: accountmemory.NewWorkspace(), + Integration: NewIntegration(), + Project: NewProject(), + Thread: NewThread(), + Event: NewEvent(), + Transaction: &usecasex.NopTransaction{}, + } +} + +func MockNow(r *repo.Container, t time.Time) func() { + p := r.Project.(*Project).now.Mock(t) + + return func() { + p() + } +} diff --git a/asset/assetinfrastructure/assetmemory/container_test.go b/asset/assetinfrastructure/assetmemory/container_test.go new file mode 100644 index 0000000..188461d --- /dev/null +++ b/asset/assetinfrastructure/assetmemory/container_test.go @@ -0,0 +1,18 @@ +package assetmemory + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestNew(t *testing.T) { + got := New() + assert.NotNil(t, got) + assert.NotNil(t, got.Asset) + assert.NotNil(t, got.AssetFile) + assert.NotNil(t, got.Project) + assert.NotNil(t, got.Thread) + assert.NotNil(t, got.Event) + assert.NotNil(t, got.Transaction) +} diff --git a/asset/assetinfrastructure/assetmemory/event.go b/asset/assetinfrastructure/assetmemory/event.go new file mode 100644 index 0000000..9d0bc37 --- /dev/null +++ b/asset/assetinfrastructure/assetmemory/event.go @@ -0,0 +1,46 @@ +package assetmemory + +import ( + "context" + + id "github.com/reearth/reearthx/asset/assetdomain" + "github.com/reearth/reearthx/asset/assetdomain/event" + repo "github.com/reearth/reearthx/asset/assetusecase/assetrepo" + "github.com/reearth/reearthx/rerror" + "github.com/reearth/reearthx/util" +) + +type Event struct { + data *util.SyncMap[id.EventID, *event.Event[any]] + err error +} + +func NewEvent() repo.Event { + return &Event{ + data: &util.SyncMap[id.EventID, *event.Event[any]]{}, + } +} + +func (r *Event) FindByID(_ context.Context, iId id.EventID) (*event.Event[any], error) { + if r.err != nil { + return nil, r.err + } + + i := r.data.Find(func(k id.EventID, i *event.Event[any]) bool { + return k == iId + }) + + if i != nil { + return i, nil + } + return nil, rerror.ErrNotFound +} + +func (r *Event) Save(ctx context.Context, ev *event.Event[any]) error { + if r.err != nil { + return r.err + } + + r.data.Store(ev.ID(), ev) + return nil +} diff --git a/asset/assetinfrastructure/assetmemory/event_test.go b/asset/assetinfrastructure/assetmemory/event_test.go new file mode 100644 index 0000000..80386f5 --- /dev/null +++ b/asset/assetinfrastructure/assetmemory/event_test.go @@ -0,0 +1,61 @@ +package assetmemory + +import ( + "context" + "testing" + "time" + + "github.com/reearth/reearthx/account/accountdomain/user" + id "github.com/reearth/reearthx/asset/assetdomain" + "github.com/reearth/reearthx/asset/assetdomain/asset" + "github.com/reearth/reearthx/asset/assetdomain/event" + "github.com/reearth/reearthx/asset/assetdomain/operator" + "github.com/reearth/reearthx/asset/assetdomain/project" + "github.com/reearth/reearthx/rerror" + "github.com/stretchr/testify/assert" +) + +func TestEvent_FindByID(t *testing.T) { + now := time.Now() + u := user.New().NewID().Email("hoge@example.com").Name("John").MustBuild() + a := asset.New().NewID().Project(project.NewID()).Size(100).NewUUID(). + CreatedByUser(u.ID()).Thread(id.NewThreadID()).MustBuild() + eID1 := event.NewID() + ev := event.New[any]().ID(eID1).Timestamp(now).Type(event.AssetCreate).Operator(operator.OperatorFromUser(u.ID())).Object(a).MustBuild() + + r := NewEvent() + ctx := context.Background() + // seed + err := r.Save(ctx, ev) + assert.NoError(t, err) + + // found + got, err := r.FindByID(ctx, eID1) + assert.NoError(t, err) + assert.Equal(t, ev, got) + + // not found + eID2 := event.NewID() + got2, err := r.FindByID(ctx, eID2) + assert.Nil(t, got2) + assert.Equal(t, rerror.ErrNotFound, err) +} + +func TestEvent_Save(t *testing.T) { + now := time.Now() + u := user.New().NewID().Email("hoge@example.com").Name("John").MustBuild() + a := asset.New().NewID().Project(project.NewID()).Size(100).NewUUID(). + CreatedByUser(u.ID()).Thread(id.NewThreadID()).MustBuild() + eID1 := event.NewID() + ev := event.New[any]().ID(eID1).Timestamp(now).Type(event.AssetCreate).Operator(operator.OperatorFromUser(u.ID())).Object(a).MustBuild() + + r := NewEvent() + ctx := context.Background() + err := r.Save(ctx, ev) + assert.NoError(t, err) + assert.Equal(t, ev, r.(*Event).data.Values()[0]) + + // already exist + _ = r.Save(ctx, ev) + assert.Equal(t, 1, len(r.(*Event).data.Values())) +} diff --git a/asset/assetinfrastructure/assetmemory/integration.go b/asset/assetinfrastructure/assetmemory/integration.go new file mode 100644 index 0000000..b5eeff6 --- /dev/null +++ b/asset/assetinfrastructure/assetmemory/integration.go @@ -0,0 +1,109 @@ +package assetmemory + +import ( + "context" + "time" + + "github.com/reearth/reearthx/account/accountdomain" + id "github.com/reearth/reearthx/asset/assetdomain" + "github.com/reearth/reearthx/asset/assetdomain/integration" + repo "github.com/reearth/reearthx/asset/assetusecase/assetrepo" + "github.com/reearth/reearthx/rerror" + "github.com/reearth/reearthx/util" +) + +type Integration struct { + data *util.SyncMap[id.IntegrationID, *integration.Integration] + now *util.TimeNow + err error +} + +func NewIntegration() repo.Integration { + return &Integration{ + data: &util.SyncMap[id.IntegrationID, *integration.Integration]{}, + now: &util.TimeNow{}, + } +} + +func (r *Integration) FindByID(_ context.Context, iId id.IntegrationID) (*integration.Integration, error) { + if r.err != nil { + return nil, r.err + } + + i := r.data.Find(func(k id.IntegrationID, i *integration.Integration) bool { + return k == iId + }) + + if i != nil { + return i, nil + } + return nil, rerror.ErrNotFound +} + +func (r *Integration) FindByToken(_ context.Context, token string) (*integration.Integration, error) { + if r.err != nil { + return nil, r.err + } + + i := r.data.Find(func(_ id.IntegrationID, i *integration.Integration) bool { + return i.Token() == token + }) + + if i != nil { + return i, nil + } + return nil, rerror.ErrNotFound +} + +func (r *Integration) FindByIDs(_ context.Context, iIds id.IntegrationIDList) (integration.List, error) { + if r.err != nil { + return nil, r.err + } + + result := r.data.FindAll(func(k id.IntegrationID, i *integration.Integration) bool { + return iIds.Has(k) + }) + + return integration.List(result).SortByID(), nil +} + +func (r *Integration) FindByUser(_ context.Context, uID accountdomain.UserID) (integration.List, error) { + if r.err != nil { + return nil, r.err + } + + result := r.data.FindAll(func(k id.IntegrationID, i *integration.Integration) bool { + return i.Developer() == uID + }) + + return integration.List(result).SortByID(), nil +} + +func (r *Integration) Save(_ context.Context, i *integration.Integration) error { + if r.err != nil { + return r.err + } + + r.data.Store(i.ID(), i) + return nil +} + +func (r *Integration) Remove(_ context.Context, iId id.IntegrationID) error { + if r.err != nil { + return r.err + } + + if _, ok := r.data.Load(iId); ok { + r.data.Delete(iId) + return nil + } + return rerror.ErrNotFound +} + +func MockIntegrationNow(r repo.Integration, t time.Time) func() { + return r.(*Integration).now.Mock(t) +} + +func SetIntegrationError(r repo.Integration, err error) { + r.(*Integration).err = err +} diff --git a/asset/assetinfrastructure/assetmemory/integration_test.go b/asset/assetinfrastructure/assetmemory/integration_test.go new file mode 100644 index 0000000..d13cce3 --- /dev/null +++ b/asset/assetinfrastructure/assetmemory/integration_test.go @@ -0,0 +1,376 @@ +package assetmemory + +import ( + "context" + "errors" + "testing" + "time" + + "github.com/reearth/reearthx/account/accountdomain" + id "github.com/reearth/reearthx/asset/assetdomain" + "github.com/reearth/reearthx/asset/assetdomain/integration" + "github.com/reearth/reearthx/rerror" + "github.com/stretchr/testify/assert" +) + +func TestIntegrationRepo_FindByID(t *testing.T) { + now := time.Now() + iId1 := id.NewIntegrationID() + i1 := integration.New().ID(iId1).UpdatedAt(now).MustBuild() + + tests := []struct { + name string + seeds integration.List + arg integration.ID + want *integration.Integration + wantErr error + mockErr bool + }{ + { + name: "Not found in empty db", + seeds: integration.List{}, + arg: integration.NewID(), + want: nil, + wantErr: rerror.ErrNotFound, + }, + { + name: "Not found", + seeds: integration.List{i1}, + arg: integration.NewID(), + want: nil, + wantErr: rerror.ErrNotFound, + }, + { + name: "Found 1", + seeds: integration.List{i1}, + arg: iId1, + want: i1, + wantErr: nil, + }, + { + name: "Found 2", + seeds: integration.List{ + integration.New().NewID().UpdatedAt(now).MustBuild(), + i1, + integration.New().NewID().UpdatedAt(now).MustBuild(), + }, + arg: iId1, + want: i1, + wantErr: nil, + }, + { + name: "must mock error", + wantErr: errors.New("test"), + mockErr: true, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + r := NewIntegration() + if tc.mockErr { + SetIntegrationError(r, tc.wantErr) + } + defer MockIntegrationNow(r, now)() + ctx := context.Background() + for _, p := range tc.seeds { + err := r.Save(ctx, p.Clone()) + assert.NoError(t, err) + } + + got, err := r.FindByID(ctx, tc.arg) + if tc.wantErr != nil { + assert.ErrorIs(t, err, tc.wantErr) + return + } + + assert.NoError(t, err) + assert.Equal(t, tc.want, got) + }) + } +} + +func TestIntegrationRepo_FindByIDs(t *testing.T) { + now := time.Now() + iId1 := id.NewIntegrationID() + iId2 := id.NewIntegrationID() + i1 := integration.New().ID(iId1).UpdatedAt(now).MustBuild() + i2 := integration.New().ID(iId2).UpdatedAt(now).MustBuild() + + tests := []struct { + name string + seeds integration.List + arg id.IntegrationIDList + want integration.List + wantErr error + mockErr bool + }{ + { + name: "0 count in empty db", + seeds: integration.List{}, + arg: id.IntegrationIDList{}, + want: nil, + wantErr: nil, + }, + { + name: "0 count", + seeds: integration.List{i1, i2}, + arg: id.IntegrationIDList{}, + want: nil, + wantErr: nil, + }, + { + name: "1 count with single", + seeds: integration.List{i1, i2}, + arg: id.IntegrationIDList{iId2}, + want: integration.List{i2}, + wantErr: nil, + }, + { + name: "2 count with multi", + seeds: integration.List{i1, i2}, + arg: id.IntegrationIDList{iId1, iId2}, + want: integration.List{i1, i2}, + wantErr: nil, + }, + { + name: "2 count with multi (reverse order)", + seeds: integration.List{i1, i2}, + arg: id.IntegrationIDList{iId2, iId1}, + want: integration.List{i1, i2}, + wantErr: nil, + }, + { + name: "must mock error", + wantErr: errors.New("test"), + mockErr: true, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + r := NewIntegration() + if tc.mockErr { + SetIntegrationError(r, tc.wantErr) + } + defer MockIntegrationNow(r, now)() + ctx := context.Background() + for _, p := range tc.seeds { + err := r.Save(ctx, p.Clone()) + assert.NoError(t, err) + } + + got, err := r.FindByIDs(ctx, tc.arg) + if tc.wantErr != nil { + assert.ErrorIs(t, err, tc.wantErr) + return + } + + assert.NoError(t, err) + assert.Equal(t, tc.want, got) + }) + } +} + +func TestIntegrationRepo_FindByUser(t *testing.T) { + now := time.Now() + uId := accountdomain.NewUserID() + iId1 := id.NewIntegrationID() + iId2 := id.NewIntegrationID() + i1 := integration.New().ID(iId1).Developer(uId).UpdatedAt(now).MustBuild() + i2 := integration.New().ID(iId2).Developer(uId).UpdatedAt(now).MustBuild() + + tests := []struct { + name string + seeds integration.List + arg integration.UserID + want integration.List + wantErr error + mockErr bool + }{ + { + name: "Not found in empty db", + seeds: integration.List{}, + arg: uId, + want: nil, + wantErr: nil, + }, + { + name: "Not found", + seeds: integration.List{i1, i2}, + arg: accountdomain.NewUserID(), + want: nil, + wantErr: nil, + }, + { + name: "Found", + seeds: integration.List{i1, i2}, + arg: uId, + want: integration.List{i1, i2}, + wantErr: nil, + }, + { + name: "must mock error", + wantErr: errors.New("test"), + mockErr: true, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + r := NewIntegration() + if tc.mockErr { + SetIntegrationError(r, tc.wantErr) + } + defer MockIntegrationNow(r, now)() + ctx := context.Background() + for _, p := range tc.seeds { + err := r.Save(ctx, p.Clone()) + assert.NoError(t, err) + } + + got, err := r.FindByUser(ctx, tc.arg) + if tc.wantErr != nil { + assert.ErrorIs(t, err, tc.wantErr) + return + } + assert.NoError(t, err) + assert.Equal(t, tc.want, got) + }) + } +} + +func TestIntegrationRepo_Remove(t *testing.T) { + now := time.Now() + iId1 := id.NewIntegrationID() + i1 := integration.New().ID(iId1).UpdatedAt(now).MustBuild() + + tests := []struct { + name string + seeds integration.List + arg *integration.Integration + want integration.List + wantErr error + mockErr bool + }{ + { + name: "Saved", + seeds: integration.List{}, + arg: i1, + want: integration.List{i1}, + wantErr: nil, + }, + { + name: "Saved same data", + seeds: integration.List{i1}, + arg: i1, + want: integration.List{i1}, + wantErr: nil, + }, + { + name: "must mock error", + wantErr: errors.New("test"), + mockErr: true, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + r := NewIntegration() + if tc.mockErr { + SetIntegrationError(r, tc.wantErr) + } + ctx := context.Background() + for _, p := range tc.seeds { + err := r.Save(ctx, p.Clone()) + if tc.wantErr != nil { + assert.ErrorIs(t, err, tc.wantErr) + return + } + } + + err := r.Save(ctx, tc.arg.Clone()) + if tc.wantErr != nil { + assert.ErrorIs(t, err, tc.wantErr) + return + } + + assert.Equal(t, []*integration.Integration(tc.want), r.(*Integration).data.Values()) + }) + } +} + +func TestIntegrationRepo_Save(t *testing.T) { + now := time.Now() + iId1 := id.NewIntegrationID() + i1 := integration.New().ID(iId1).UpdatedAt(now).MustBuild() + + tests := []struct { + name string + seeds integration.List + arg integration.ID + want integration.List + wantErr error + mockErr bool + }{ + { + name: "Saved", + seeds: integration.List{}, + arg: iId1, + want: integration.List{}, + wantErr: rerror.ErrNotFound, + }, + { + name: "Saved same data", + seeds: integration.List{i1}, + arg: iId1, + want: nil, + wantErr: nil, + }, + { + name: "must mock error", + wantErr: errors.New("test"), + mockErr: true, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + r := NewIntegration() + if tc.mockErr { + SetIntegrationError(r, tc.wantErr) + } + ctx := context.Background() + for _, p := range tc.seeds { + err := r.Save(ctx, p.Clone()) + if tc.wantErr != nil { + assert.ErrorIs(t, err, tc.wantErr) + return + } + } + + err := r.Remove(ctx, tc.arg) + if tc.wantErr != nil { + assert.ErrorIs(t, err, tc.wantErr) + return + } + + assert.Equal(t, []*integration.Integration(tc.want), r.(*Integration).data.Values()) + }) + } +} diff --git a/asset/assetinfrastructure/assetmemory/project.go b/asset/assetinfrastructure/assetmemory/project.go new file mode 100644 index 0000000..b075e93 --- /dev/null +++ b/asset/assetinfrastructure/assetmemory/project.go @@ -0,0 +1,178 @@ +package assetmemory + +import ( + "context" + "time" + + "github.com/reearth/reearthx/account/accountdomain" + id "github.com/reearth/reearthx/asset/assetdomain" + "github.com/reearth/reearthx/asset/assetdomain/project" + repo "github.com/reearth/reearthx/asset/assetusecase/assetrepo" + "github.com/reearth/reearthx/rerror" + "github.com/reearth/reearthx/usecasex" + "github.com/reearth/reearthx/util" + "github.com/samber/lo" +) + +type Project struct { + data *util.SyncMap[id.ProjectID, *project.Project] + f repo.WorkspaceFilter + now *util.TimeNow + err error +} + +func NewProject() repo.Project { + return &Project{ + data: &util.SyncMap[id.ProjectID, *project.Project]{}, + now: &util.TimeNow{}, + } +} + +func (r *Project) Filtered(f repo.WorkspaceFilter) repo.Project { + return &Project{ + data: r.data, + f: r.f.Merge(f), + now: &util.TimeNow{}, + } +} + +func (r *Project) FindByWorkspaces(_ context.Context, wids accountdomain.WorkspaceIDList, _ *usecasex.Pagination) (project.List, *usecasex.PageInfo, error) { + if r.err != nil { + return nil, nil, r.err + } + + // TODO: implement pagination + + result := project.List(r.data.FindAll(func(_ id.ProjectID, v *project.Project) bool { + return wids.Has(v.Workspace()) && r.f.CanRead(v.Workspace()) + })).SortByID() + + var startCursor, endCursor *usecasex.Cursor + if len(result) > 0 { + startCursor = lo.ToPtr(usecasex.Cursor(result[0].ID().String())) + endCursor = lo.ToPtr(usecasex.Cursor(result[len(result)-1].ID().String())) + } + + return result, usecasex.NewPageInfo( + int64(len(result)), + startCursor, + endCursor, + false, + true, + ), nil +} + +func (r *Project) FindByIDs(_ context.Context, ids id.ProjectIDList) (project.List, error) { + if r.err != nil { + return nil, r.err + } + + result := r.data.FindAll(func(k id.ProjectID, v *project.Project) bool { + return ids.Has(k) && r.f.CanRead(v.Workspace()) + }) + + return project.List(result).SortByID(), nil +} + +func (r *Project) FindByID(_ context.Context, pid id.ProjectID) (*project.Project, error) { + if r.err != nil { + return nil, r.err + } + + p := r.data.Find(func(k id.ProjectID, v *project.Project) bool { + return k == pid && r.f.CanRead(v.Workspace()) + }) + + if p != nil { + return p, nil + } + return nil, rerror.ErrNotFound +} + +func (r *Project) FindByIDOrAlias(_ context.Context, q project.IDOrAlias) (*project.Project, error) { + if r.err != nil { + return nil, r.err + } + + pid := q.ID() + alias := q.Alias() + if pid == nil && (alias == nil || *alias == "") { + return nil, rerror.ErrNotFound + } + + p := r.data.Find(func(k id.ProjectID, v *project.Project) bool { + return (pid != nil && k == *pid || alias != nil && v.Alias() == *alias) && r.f.CanRead(v.Workspace()) + }) + + if p != nil { + return p, nil + } + return nil, rerror.ErrNotFound +} + +func (r *Project) FindByPublicName(_ context.Context, name string) (*project.Project, error) { + if r.err != nil { + return nil, r.err + } + + if name == "" { + return nil, nil + } + + p := r.data.Find(func(_ id.ProjectID, v *project.Project) bool { + return v.Alias() == name && r.f.CanRead(v.Workspace()) + }) + + if p != nil { + return p, nil + } + return nil, rerror.ErrNotFound +} + +func (r *Project) CountByWorkspace(_ context.Context, workspace accountdomain.WorkspaceID) (c int, err error) { + if r.err != nil { + return 0, r.err + } + + if !r.f.CanRead(workspace) { + return 0, nil + } + + return r.data.CountAll(func(_ id.ProjectID, v *project.Project) bool { + return v.Workspace() == workspace + }), nil +} + +func (r *Project) Save(_ context.Context, p *project.Project) error { + if r.err != nil { + return r.err + } + + if !r.f.CanWrite(p.Workspace()) { + return repo.ErrOperationDenied + } + + p.SetUpdatedAt(r.now.Now()) + r.data.Store(p.ID(), p) + return nil +} + +func (r *Project) Remove(_ context.Context, id id.ProjectID) error { + if r.err != nil { + return r.err + } + + if p, ok := r.data.Load(id); ok && r.f.CanWrite(p.Workspace()) { + r.data.Delete(id) + return nil + } + return rerror.ErrNotFound +} + +func MockProjectNow(r repo.Project, t time.Time) func() { + return r.(*Project).now.Mock(t) +} + +func SetProjectError(r repo.Project, err error) { + r.(*Project).err = err +} diff --git a/asset/assetinfrastructure/assetmemory/project_test.go b/asset/assetinfrastructure/assetmemory/project_test.go new file mode 100644 index 0000000..6ca3d52 --- /dev/null +++ b/asset/assetinfrastructure/assetmemory/project_test.go @@ -0,0 +1,940 @@ +package assetmemory + +import ( + "context" + "errors" + "testing" + "time" + + "github.com/reearth/reearthx/account/accountdomain" + id "github.com/reearth/reearthx/asset/assetdomain" + "github.com/reearth/reearthx/asset/assetdomain/project" + repo "github.com/reearth/reearthx/asset/assetusecase/assetrepo" + "github.com/reearth/reearthx/rerror" + "github.com/reearth/reearthx/usecasex" + "github.com/samber/lo" + "github.com/stretchr/testify/assert" +) + +func TestProjectRepo_CountByWorkspace(t *testing.T) { + tid1 := accountdomain.NewWorkspaceID() + tests := []struct { + name string + seeds project.List + arg accountdomain.WorkspaceID + filter *repo.WorkspaceFilter + want int + wantErr error + mockErr bool + }{ + { + name: "0 count in empty db", + seeds: project.List{}, + arg: accountdomain.NewWorkspaceID(), + filter: nil, + want: 0, + wantErr: nil, + }, + { + name: "0 count with project for another workspaces", + seeds: project.List{ + project.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + }, + arg: accountdomain.NewWorkspaceID(), + filter: nil, + want: 0, + wantErr: nil, + }, + { + name: "1 count with single project", + seeds: project.List{ + project.New().NewID().Workspace(tid1).MustBuild(), + }, + arg: tid1, + filter: nil, + want: 1, + wantErr: nil, + }, + { + name: "1 count with multi projects", + seeds: project.List{ + project.New().NewID().Workspace(tid1).MustBuild(), + project.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + project.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + }, + arg: tid1, + filter: nil, + want: 1, + wantErr: nil, + }, + { + name: "2 count with multi projects", + seeds: project.List{ + project.New().NewID().Workspace(tid1).MustBuild(), + project.New().NewID().Workspace(tid1).MustBuild(), + project.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + project.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + }, + arg: tid1, + filter: nil, + want: 2, + wantErr: nil, + }, + { + name: "2 count with multi projects", + seeds: project.List{ + project.New().NewID().Workspace(tid1).MustBuild(), + project.New().NewID().Workspace(tid1).MustBuild(), + project.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + project.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + }, + arg: tid1, + filter: &repo.WorkspaceFilter{Readable: []accountdomain.WorkspaceID{accountdomain.NewWorkspaceID()}, Writable: []accountdomain.WorkspaceID{}}, + want: 0, + wantErr: nil, + }, + { + name: "must mock error", + wantErr: errors.New("test"), + mockErr: true, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + r := NewProject() + if tc.mockErr { + SetProjectError(r, tc.wantErr) + } + ctx := context.Background() + for _, p := range tc.seeds { + err := r.Save(ctx, p.Clone()) + assert.NoError(t, err) + } + + if tc.filter != nil { + r = r.Filtered(*tc.filter) + } + + got, err := r.CountByWorkspace(ctx, tc.arg) + if tc.wantErr != nil { + assert.ErrorIs(t, err, tc.wantErr) + return + } + assert.Equal(t, tc.want, got) + }) + } +} + +func TestProjectRepo_Filtered(t *testing.T) { + mocknow := time.Now().Truncate(time.Millisecond).UTC() + tid1 := accountdomain.NewWorkspaceID() + id1 := id.NewProjectID() + id2 := id.NewProjectID() + p1 := project.New().ID(id1).Workspace(tid1).UpdatedAt(mocknow).MustBuild() + p2 := project.New().ID(id2).Workspace(tid1).UpdatedAt(mocknow).MustBuild() + + tests := []struct { + name string + seeds project.List + arg repo.WorkspaceFilter + wantErr error + mockErr bool + }{ + { + name: "no r/w workspaces operation denied", + seeds: project.List{ + p1, + p2, + }, + arg: repo.WorkspaceFilter{ + Readable: []accountdomain.WorkspaceID{}, + Writable: []accountdomain.WorkspaceID{}, + }, + wantErr: repo.ErrOperationDenied, + }, + { + name: "r/w workspaces operation success", + seeds: project.List{ + p1, + p2, + }, + arg: repo.WorkspaceFilter{ + Readable: []accountdomain.WorkspaceID{tid1}, + Writable: []accountdomain.WorkspaceID{tid1}, + }, + wantErr: nil, + }, + { + name: "must mock error", + wantErr: errors.New("test"), + mockErr: true, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + r := NewProject().Filtered(tc.arg) + if tc.mockErr { + SetProjectError(r, tc.wantErr) + } + defer MockProjectNow(r, mocknow)() + ctx := context.Background() + for _, p := range tc.seeds { + err := r.Save(ctx, p.Clone()) + assert.ErrorIs(t, err, tc.wantErr) + } + }) + } +} + +func TestProjectRepo_FindByID(t *testing.T) { + tid1 := accountdomain.NewWorkspaceID() + id1 := id.NewProjectID() + mocknow := time.Now().Truncate(time.Millisecond).UTC() + p1 := project.New().ID(id1).Workspace(tid1).UpdatedAt(mocknow).MustBuild() + tests := []struct { + name string + seeds project.List + arg id.ProjectID + filter *repo.WorkspaceFilter + want *project.Project + wantErr error + mockErr bool + }{ + { + name: "Not found in empty db", + seeds: project.List{}, + arg: id.NewProjectID(), + filter: nil, + want: nil, + wantErr: rerror.ErrNotFound, + }, + { + name: "Not found", + seeds: project.List{ + project.New().NewID().MustBuild(), + }, + arg: id.NewProjectID(), + filter: nil, + want: nil, + wantErr: rerror.ErrNotFound, + }, + { + name: "Found 1", + seeds: project.List{ + p1, + }, + arg: id1, + filter: nil, + want: p1, + wantErr: nil, + }, + { + name: "Found 2", + seeds: project.List{ + p1, + project.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + project.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + }, + arg: id1, + filter: nil, + want: p1, + wantErr: nil, + }, + { + name: "Filtered Found 0", + seeds: project.List{ + p1, + project.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + project.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + }, + arg: id1, + filter: &repo.WorkspaceFilter{Readable: []accountdomain.WorkspaceID{accountdomain.NewWorkspaceID()}, Writable: []accountdomain.WorkspaceID{}}, + want: nil, + wantErr: nil, + }, + { + name: "Filtered Found 2", + seeds: project.List{ + p1, + project.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + project.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + }, + arg: id1, + filter: &repo.WorkspaceFilter{Readable: []accountdomain.WorkspaceID{tid1}, Writable: []accountdomain.WorkspaceID{}}, + want: p1, + wantErr: nil, + }, + { + name: "must mock error", + wantErr: errors.New("test"), + mockErr: true, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + r := NewProject() + if tc.mockErr { + SetProjectError(r, tc.wantErr) + } + defer MockProjectNow(r, mocknow)() + ctx := context.Background() + for _, p := range tc.seeds { + err := r.Save(ctx, p.Clone()) + assert.NoError(t, err) + } + + if tc.filter != nil { + r = r.Filtered(*tc.filter) + } + + got, err := r.FindByID(ctx, tc.arg) + if tc.wantErr != nil { + assert.ErrorIs(t, err, tc.wantErr) + return + } + assert.Equal(t, tc.want, got) + }) + } +} + +func TestProjectRepo_FindByIDs(t *testing.T) { + mocknow := time.Now().Truncate(time.Millisecond).UTC() + tid1 := accountdomain.NewWorkspaceID() + id1 := id.NewProjectID() + id2 := id.NewProjectID() + p1 := project.New().ID(id1).Workspace(tid1).UpdatedAt(mocknow).MustBuild() + p2 := project.New().ID(id2).Workspace(tid1).UpdatedAt(mocknow).MustBuild() + + tests := []struct { + name string + seeds project.List + arg id.ProjectIDList + filter *repo.WorkspaceFilter + want project.List + wantErr error + mockErr bool + }{ + { + name: "0 count in empty db", + seeds: project.List{}, + arg: []id.ProjectID{}, + filter: nil, + want: nil, + wantErr: nil, + }, + { + name: "0 count with project for another workspaces", + seeds: project.List{ + project.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + }, + arg: []id.ProjectID{}, + filter: nil, + want: nil, + wantErr: nil, + }, + { + name: "1 count with single project", + seeds: project.List{ + p1, + }, + arg: []id.ProjectID{id1}, + filter: nil, + want: project.List{p1}, + wantErr: nil, + }, + { + name: "1 count with multi projects", + seeds: project.List{ + p1, + project.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + project.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + }, + arg: []id.ProjectID{id1}, + filter: nil, + want: project.List{p1}, + wantErr: nil, + }, + { + name: "2 count with multi projects", + seeds: project.List{ + p1, + p2, + project.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + project.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + }, + arg: []id.ProjectID{id1, id2}, + filter: nil, + want: project.List{p1, p2}, + wantErr: nil, + }, + { + name: "Filter 2 count with multi projects", + seeds: project.List{ + p1, + p2, + project.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + project.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + }, + arg: []id.ProjectID{id1, id2}, + filter: &repo.WorkspaceFilter{Readable: []accountdomain.WorkspaceID{accountdomain.NewWorkspaceID()}, Writable: []accountdomain.WorkspaceID{}}, + want: nil, + wantErr: nil, + }, + { + name: "Filter 2 count with multi projects", + seeds: project.List{ + p1, + p2, + project.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + project.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + }, + arg: []id.ProjectID{id1, id2}, + filter: &repo.WorkspaceFilter{Readable: []accountdomain.WorkspaceID{tid1}, Writable: []accountdomain.WorkspaceID{}}, + want: project.List{p1, p2}, + wantErr: nil, + }, + { + name: "must mock error", + wantErr: errors.New("test"), + mockErr: true, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + r := NewProject() + if tc.mockErr { + SetProjectError(r, tc.wantErr) + } + defer MockProjectNow(r, mocknow)() + ctx := context.Background() + for _, p := range tc.seeds { + err := r.Save(ctx, p.Clone()) + assert.NoError(t, err) + } + + if tc.filter != nil { + r = r.Filtered(*tc.filter) + } + + got, err := r.FindByIDs(ctx, tc.arg) + if tc.wantErr != nil { + assert.ErrorIs(t, err, tc.wantErr) + return + } + + assert.Equal(t, tc.want, got) + }) + } +} + +func TestProjectRepo_FindByPublicName(t *testing.T) { + mocknow := time.Now().Truncate(time.Millisecond).UTC() + tid1 := accountdomain.NewWorkspaceID() + id1 := id.NewProjectID() + p1 := project.New(). + ID(id1). + Workspace(tid1). + Alias("xyz123"). + UpdatedAt(mocknow). + MustBuild() + + id2 := id.NewProjectID() + p2 := project.New(). + ID(id2). + Workspace(accountdomain.NewWorkspaceID()). + Alias("xyz321"). + UpdatedAt(mocknow). + MustBuild() + + tests := []struct { + name string + seeds project.List + arg string + filter *repo.WorkspaceFilter + want *project.Project + wantErr error + mockErr bool + }{ + { + name: "Not found in empty db", + seeds: project.List{}, + arg: "xyz123", + filter: nil, + want: nil, + wantErr: rerror.ErrNotFound, + }, + { + name: "Not found", + seeds: project.List{ + project.New().NewID().Alias("abc123").MustBuild(), + }, + arg: "xyz123", + filter: nil, + want: nil, + wantErr: rerror.ErrNotFound, + }, + { + name: "public Found", + seeds: project.List{ + p1, + }, + arg: "xyz123", + filter: nil, + want: p1, + wantErr: nil, + }, + { + name: "linited Found", + seeds: project.List{ + p2, + }, + arg: "xyz321", + want: p2, + filter: nil, + wantErr: nil, + }, + { + name: "Found 2", + seeds: project.List{ + p1, + project.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + project.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + }, + arg: "xyz123", + filter: nil, + want: p1, + wantErr: nil, + }, + { + name: "Filtered should not Found", + seeds: project.List{ + p1, + project.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + project.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + }, + arg: "xyz123", + filter: &repo.WorkspaceFilter{Readable: []accountdomain.WorkspaceID{accountdomain.NewWorkspaceID()}, Writable: []accountdomain.WorkspaceID{}}, + want: nil, + wantErr: rerror.ErrNotFound, + }, + { + name: "Filtered should Found", + seeds: project.List{ + p1, + project.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + project.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + }, + arg: "xyz123", + filter: &repo.WorkspaceFilter{Readable: []accountdomain.WorkspaceID{tid1}, Writable: []accountdomain.WorkspaceID{}}, + want: p1, + wantErr: nil, + }, + { + name: "must mock error", + wantErr: errors.New("test"), + mockErr: true, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + r := NewProject() + if tc.mockErr { + SetProjectError(r, tc.wantErr) + } + defer MockProjectNow(r, mocknow)() + ctx := context.Background() + for _, p := range tc.seeds { + err := r.Save(ctx, p.Clone()) + assert.NoError(t, err) + } + + if tc.filter != nil { + r = r.Filtered(*tc.filter) + } + + got, err := r.FindByPublicName(ctx, tc.arg) + if tc.wantErr != nil { + assert.ErrorIs(t, err, tc.wantErr) + return + } + assert.NoError(t, err) + assert.Equal(t, tc.want, got) + }) + } +} + +func TestProjectRepo_FindByWorkspaces(t *testing.T) { + mocknow := time.Now().Truncate(time.Millisecond).UTC() + tid1 := accountdomain.NewWorkspaceID() + p1 := project.New().NewID().Workspace(tid1).UpdatedAt(mocknow).MustBuild() + p2 := project.New().NewID().Workspace(tid1).UpdatedAt(mocknow).MustBuild() + + type args struct { + wids accountdomain.WorkspaceIDList + pInfo *usecasex.Pagination + } + tests := []struct { + name string + seeds project.List + args args + filter *repo.WorkspaceFilter + want project.List + wantErr error + mockErr bool + }{ + { + name: "0 count in empty db", + seeds: project.List{}, + args: args{accountdomain.WorkspaceIDList{accountdomain.NewWorkspaceID()}, nil}, + filter: nil, + want: nil, + wantErr: nil, + }, + { + name: "0 count with project for another workspaces", + seeds: project.List{ + project.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + }, + args: args{accountdomain.WorkspaceIDList{accountdomain.NewWorkspaceID()}, nil}, + filter: nil, + want: nil, + wantErr: nil, + }, + { + name: "1 count with single project", + seeds: project.List{ + p1, + }, + args: args{accountdomain.WorkspaceIDList{tid1}, usecasex.CursorPagination{First: lo.ToPtr(int64(1))}.Wrap()}, + filter: nil, + want: project.List{p1}, + wantErr: nil, + }, + { + name: "1 count with multi projects", + seeds: project.List{ + p1, + project.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + project.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + }, + args: args{accountdomain.WorkspaceIDList{tid1}, usecasex.CursorPagination{First: lo.ToPtr(int64(1))}.Wrap()}, + filter: nil, + want: project.List{p1}, + wantErr: nil, + }, + { + name: "2 count with multi projects", + seeds: project.List{ + p1, + p2, + project.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + project.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + }, + args: args{accountdomain.WorkspaceIDList{tid1}, usecasex.CursorPagination{First: lo.ToPtr(int64(2))}.Wrap()}, + filter: nil, + want: project.List{p1, p2}, + wantErr: nil, + }, + { + name: "get 1st page of 2", + seeds: project.List{ + p1, + p2, + project.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + project.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + }, + args: args{accountdomain.WorkspaceIDList{tid1}, usecasex.CursorPagination{First: lo.ToPtr(int64(1))}.Wrap()}, + filter: nil, + want: project.List{p1, p2}, + wantErr: nil, + }, + { + name: "get last page of 2", + seeds: project.List{ + p1, + p2, + project.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + project.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + }, + args: args{accountdomain.WorkspaceIDList{tid1}, usecasex.CursorPagination{Last: lo.ToPtr(int64(1))}.Wrap()}, + filter: nil, + want: project.List{p1, p2}, + wantErr: nil, + }, + { + name: "Filtered sholud not 1 count with multi projects", + seeds: project.List{ + p1, + project.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + project.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + }, + args: args{accountdomain.WorkspaceIDList{tid1}, usecasex.CursorPagination{First: lo.ToPtr(int64(1))}.Wrap()}, + filter: &repo.WorkspaceFilter{Readable: []accountdomain.WorkspaceID{accountdomain.NewWorkspaceID()}, Writable: []accountdomain.WorkspaceID{}}, + want: nil, + wantErr: nil, + }, + { + name: "must mock error", + wantErr: errors.New("test"), + mockErr: true, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + r := NewProject() + if tc.mockErr { + SetProjectError(r, tc.wantErr) + } + defer MockProjectNow(r, mocknow)() + ctx := context.Background() + for _, p := range tc.seeds { + err := r.Save(ctx, p.Clone()) + assert.NoError(t, err) + } + + if tc.filter != nil { + r = r.Filtered(*tc.filter) + } + + got, _, err := r.FindByWorkspaces(ctx, tc.args.wids, tc.args.pInfo) + if tc.wantErr != nil { + assert.ErrorIs(t, err, tc.wantErr) + return + } + + assert.Equal(t, tc.want, got) + }) + } +} + +func TestProjectRepo_Remove(t *testing.T) { + tid1 := accountdomain.NewWorkspaceID() + id1 := id.NewProjectID() + p1 := project.New().ID(id1).Workspace(tid1).MustBuild() + tests := []struct { + name string + seeds project.List + arg id.ProjectID + filter *repo.WorkspaceFilter + wantErr error + mockErr bool + }{ + { + name: "Not found in empty db", + seeds: project.List{}, + arg: id.NewProjectID(), + filter: nil, + wantErr: rerror.ErrNotFound, + }, + { + name: "Not found", + seeds: project.List{ + project.New().NewID().MustBuild(), + }, + arg: id.NewProjectID(), + filter: nil, + wantErr: rerror.ErrNotFound, + }, + { + name: "Found 1", + seeds: project.List{ + p1, + }, + arg: id1, + filter: nil, + wantErr: nil, + }, + { + name: "Found 2", + seeds: project.List{ + p1, + project.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + project.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + }, + arg: id1, + filter: nil, + wantErr: nil, + }, + { + name: "Filtered should fail Found 2", + seeds: project.List{ + p1, + project.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + project.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + }, + arg: id1, + filter: &repo.WorkspaceFilter{Readable: []accountdomain.WorkspaceID{accountdomain.NewWorkspaceID()}, Writable: []accountdomain.WorkspaceID{accountdomain.NewWorkspaceID()}}, + wantErr: rerror.ErrNotFound, + }, + { + name: "Filtered should work Found 2", + seeds: project.List{ + p1, + project.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + project.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + }, + arg: id1, + filter: &repo.WorkspaceFilter{Readable: []accountdomain.WorkspaceID{}, Writable: []accountdomain.WorkspaceID{tid1}}, + wantErr: nil, + }, + { + name: "must mock error", + wantErr: errors.New("test"), + mockErr: true, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + r := NewProject() + if tc.mockErr { + SetProjectError(r, tc.wantErr) + } + ctx := context.Background() + for _, p := range tc.seeds { + err := r.Save(ctx, p.Clone()) + assert.NoError(t, err) + } + + if tc.filter != nil { + r = r.Filtered(*tc.filter) + } + + err := r.Remove(ctx, tc.arg) + if tc.wantErr != nil { + assert.ErrorIs(t, err, tc.wantErr) + return + } + assert.NoError(t, err) + _, err = r.FindByID(ctx, tc.arg) + assert.ErrorIs(t, err, rerror.ErrNotFound) + }) + } +} + +func TestProjectRepo_Save(t *testing.T) { + tid1 := accountdomain.NewWorkspaceID() + id1 := id.NewProjectID() + p1 := project.New().ID(id1).Workspace(tid1).UpdatedAt(time.Now().Truncate(time.Millisecond).UTC()).MustBuild() + + tests := []struct { + name string + seeds project.List + arg *project.Project + filter *repo.WorkspaceFilter + want *project.Project + wantErr error + mockErr bool + }{ + { + name: "Saved", + seeds: project.List{ + p1, + }, + arg: p1, + filter: nil, + want: p1, + wantErr: nil, + }, + { + name: "Filtered should fail - Saved", + seeds: project.List{ + p1, + }, + arg: p1, + filter: &repo.WorkspaceFilter{Readable: []accountdomain.WorkspaceID{}, Writable: []accountdomain.WorkspaceID{}}, + want: nil, + wantErr: repo.ErrOperationDenied, + }, + { + name: "Filtered should work - Saved", + seeds: project.List{ + p1, + }, + arg: p1, + filter: &repo.WorkspaceFilter{Readable: []accountdomain.WorkspaceID{tid1}, Writable: []accountdomain.WorkspaceID{tid1}}, + want: p1, + wantErr: nil, + }, + { + name: "Filtered should work - Saved same data", + seeds: project.List{ + p1, + }, + arg: p1, + filter: &repo.WorkspaceFilter{Readable: []accountdomain.WorkspaceID{}, Writable: []accountdomain.WorkspaceID{tid1}}, + want: p1, + wantErr: nil, + }, + { + name: "must mock error", + wantErr: errors.New("test"), + mockErr: true, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + r := NewProject() + if tc.filter != nil { + r = r.Filtered(*tc.filter) + } + if tc.mockErr { + SetProjectError(r, tc.wantErr) + } + ctx := context.Background() + for _, p := range tc.seeds { + err := r.Save(ctx, p.Clone()) + if tc.wantErr != nil { + assert.ErrorIs(t, err, tc.wantErr) + return + } + } + + err := r.Save(ctx, tc.arg.Clone()) + if tc.wantErr != nil { + assert.ErrorIs(t, err, tc.wantErr) + return + } + + got, err := r.CountByWorkspace(ctx, tc.arg.Workspace()) + if tc.wantErr != nil { + assert.Zero(t, got) + assert.Equal(t, tc.wantErr, err) + return + } + assert.NoError(t, err) + assert.Equal(t, 1, got) + }) + } +} diff --git a/asset/assetinfrastructure/assetmemory/thread.go b/asset/assetinfrastructure/assetmemory/thread.go new file mode 100644 index 0000000..a9a4d8f --- /dev/null +++ b/asset/assetinfrastructure/assetmemory/thread.go @@ -0,0 +1,73 @@ +package assetmemory + +import ( + "context" + + id "github.com/reearth/reearthx/asset/assetdomain" + "github.com/reearth/reearthx/asset/assetdomain/thread" + repo "github.com/reearth/reearthx/asset/assetusecase/assetrepo" + "github.com/reearth/reearthx/rerror" + "github.com/reearth/reearthx/util" +) + +type Thread struct { + data *util.SyncMap[thread.ID, *thread.Thread] + f repo.WorkspaceFilter + err error +} + +func NewThread() repo.Thread { + return &Thread{ + data: &util.SyncMap[id.ThreadID, *thread.Thread]{}, + } +} + +func (r *Thread) Save(_ context.Context, th *thread.Thread) error { + if r.err != nil { + return r.err + } + + if !r.f.CanWrite(th.Workspace()) { + return repo.ErrOperationDenied + } + + r.data.Store(th.ID(), th) + return nil +} + +func (r *Thread) Filtered(f repo.WorkspaceFilter) repo.Thread { + return &Thread{ + data: r.data, + f: r.f.Merge(f), + } +} + +func (r *Thread) FindByID(ctx context.Context, thid id.ThreadID) (*thread.Thread, error) { + if r.err != nil { + return nil, r.err + } + + th := r.data.Find(func(k id.ThreadID, v *thread.Thread) bool { + return k == thid && r.f.CanRead(v.Workspace()) + }) + + if th != nil { + return th, nil + } + return nil, rerror.ErrNotFound +} + +func (r *Thread) FindByIDs(ctx context.Context, ids id.ThreadIDList) ([]*thread.Thread, error) { + if r.err != nil { + return nil, r.err + } + + res := thread.List(r.data.FindAll(func(key thread.ID, value *thread.Thread) bool { + return ids.Has(key) && r.f.CanRead(value.Workspace()) + })).SortByID() + return res, nil +} + +func SetThreadError(r repo.Thread, err error) { + r.(*Thread).err = err +} diff --git a/asset/assetinfrastructure/assetmemory/thread_test.go b/asset/assetinfrastructure/assetmemory/thread_test.go new file mode 100644 index 0000000..cea0b1f --- /dev/null +++ b/asset/assetinfrastructure/assetmemory/thread_test.go @@ -0,0 +1,315 @@ +package assetmemory + +import ( + "context" + "errors" + "testing" + + "github.com/reearth/reearthx/account/accountdomain" + id "github.com/reearth/reearthx/asset/assetdomain" + "github.com/reearth/reearthx/asset/assetdomain/thread" + repo "github.com/reearth/reearthx/asset/assetusecase/assetrepo" + "github.com/reearth/reearthx/rerror" + "github.com/stretchr/testify/assert" +) + +func TestThreadRepo_Save(t *testing.T) { + wid1 := accountdomain.NewWorkspaceID() + id1 := id.NewThreadID() + th1 := thread.New().ID(id1).Workspace(wid1).MustBuild() + + tests := []struct { + name string + seeds thread.List + arg *thread.Thread + filter *repo.WorkspaceFilter + want *thread.Thread + wantErr error + }{ + { + name: "Save succeed", + seeds: thread.List{ + th1, + }, + arg: th1, + want: th1, + wantErr: nil, + }, + { + name: "Filtered operation error", + seeds: thread.List{ + th1, + thread.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + thread.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + }, + arg: th1, + filter: &repo.WorkspaceFilter{Readable: []accountdomain.WorkspaceID{}, Writable: []accountdomain.WorkspaceID{}}, + want: nil, + wantErr: repo.ErrOperationDenied, + }, + { + name: "Filtered succeed", + seeds: thread.List{ + th1, + thread.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + thread.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + }, + arg: th1, + filter: &repo.WorkspaceFilter{Readable: []accountdomain.WorkspaceID{wid1}, Writable: []accountdomain.WorkspaceID{wid1}}, + want: th1, + wantErr: nil, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + r := NewThread() + ctx := context.Background() + + if tc.filter != nil { + r = r.Filtered(*tc.filter) + } + + for _, th := range tc.seeds { + err := r.Save(ctx, th) + if tc.wantErr != nil { + assert.ErrorIs(t, err, tc.wantErr) + return + } + } + + err := r.Save(ctx, tc.arg) + if tc.wantErr != nil { + assert.ErrorIs(t, err, tc.wantErr) + return + } + }) + } +} + +func TestThread_Filtered(t *testing.T) { + r := &Thread{} + wid := accountdomain.NewWorkspaceID() + + assert.Equal(t, &Thread{ + f: repo.WorkspaceFilter{ + Readable: accountdomain.WorkspaceIDList{wid}, + Writable: nil, + }, + }, r.Filtered(repo.WorkspaceFilter{ + Readable: accountdomain.WorkspaceIDList{wid}, + Writable: nil, + })) +} + +func TestThreadRepo_FindByID(t *testing.T) { + tid1 := accountdomain.NewWorkspaceID() + id1 := id.NewThreadID() + th1 := thread.New().ID(id1).Workspace(tid1).MustBuild() + tests := []struct { + name string + seeds thread.List + arg id.ThreadID + filter *repo.WorkspaceFilter + want *thread.Thread + wantErr error + mockErr bool + }{ + { + name: "Not found in empty db", + seeds: thread.List{}, + arg: id.NewThreadID(), + filter: nil, + want: nil, + wantErr: rerror.ErrNotFound, + }, + { + name: "Not found", + seeds: thread.List{ + thread.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + }, + arg: id.NewThreadID(), + filter: nil, + want: nil, + wantErr: rerror.ErrNotFound, + }, + { + name: "Found 1", + seeds: thread.List{ + th1, + }, + arg: id1, + filter: nil, + want: th1, + wantErr: nil, + }, + { + name: "Found 2", + seeds: thread.List{ + th1, + thread.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + thread.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + }, + arg: id1, + filter: nil, + want: th1, + wantErr: nil, + }, + { + name: "Filtered Found 0", + seeds: thread.List{ + th1, + thread.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + thread.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + }, + arg: id1, + filter: &repo.WorkspaceFilter{Readable: []accountdomain.WorkspaceID{accountdomain.NewWorkspaceID()}, Writable: []accountdomain.WorkspaceID{}}, + want: nil, + wantErr: nil, + }, + { + name: "Filtered Found 2", + seeds: thread.List{ + th1, + thread.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + thread.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + }, + arg: id1, + filter: &repo.WorkspaceFilter{Readable: []accountdomain.WorkspaceID{tid1}, Writable: []accountdomain.WorkspaceID{}}, + want: th1, + wantErr: nil, + }, + { + name: "must mock error", + wantErr: errors.New("test"), + mockErr: true, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + r := NewThread() + if tc.mockErr { + SetThreadError(r, tc.wantErr) + } + + ctx := context.Background() + for _, th := range tc.seeds { + err := r.Save(ctx, th.Clone()) + assert.Nil(t, err) + } + + if tc.filter != nil { + r = r.Filtered(*tc.filter) + } + + got, err := r.FindByID(ctx, tc.arg) + if tc.wantErr != nil { + assert.ErrorIs(t, err, tc.wantErr) + return + } + assert.Equal(t, tc.want, got) + }) + } +} + +func TestThreadRepo_FindByIDs(t *testing.T) { + wid1 := accountdomain.NewWorkspaceID() + id1 := id.NewThreadID() + id2 := id.NewThreadID() + th1 := thread.New().ID(id1).Workspace(wid1).MustBuild() + th2 := thread.New().ID(id2).Workspace(wid1).MustBuild() + + tests := []struct { + name string + seeds []*thread.Thread + arg id.ThreadIDList + want []*thread.Thread + wantErr error + mockErr bool + }{ + { + name: "0 count in empty db", + seeds: []*thread.Thread{}, + arg: id.ThreadIDList{}, + want: nil, + wantErr: nil, + }, + { + name: "0 count with thread for another workspaces", + seeds: []*thread.Thread{ + thread.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + }, + arg: id.ThreadIDList{}, + want: nil, + wantErr: nil, + }, + { + name: "1 count with single thread", + seeds: []*thread.Thread{ + th1, + }, + arg: id.ThreadIDList{id1}, + want: []*thread.Thread{th1}, + wantErr: nil, + }, + { + name: "1 count with multi threads", + seeds: []*thread.Thread{ + th1, + thread.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + thread.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + }, + arg: id.ThreadIDList{id1}, + want: []*thread.Thread{th1}, + wantErr: nil, + }, + { + name: "2 count with multi threads", + seeds: []*thread.Thread{ + th1, + th2, + thread.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + thread.New().NewID().Workspace(accountdomain.NewWorkspaceID()).MustBuild(), + }, + arg: id.ThreadIDList{id1, id2}, + want: []*thread.Thread{th1, th2}, + wantErr: nil, + }, + { + name: "must mock error", + wantErr: errors.New("test"), + mockErr: true, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + r := NewThread() + if tc.mockErr { + SetThreadError(r, tc.wantErr) + } + + ctx := context.Background() + for _, a := range tc.seeds { + err := r.Save(ctx, a.Clone()) + assert.Nil(t, err) + } + + got, err := r.FindByIDs(ctx, tc.arg) + if tc.wantErr != nil { + assert.ErrorIs(t, err, tc.wantErr) + return + } + + assert.Equal(t, tc.want, got) + }) + } +} From bdffd15d3ed9d9f7b9c152e1241c03f9cbaf50f9 Mon Sep 17 00:00:00 2001 From: shumon84 Date: Mon, 9 Sep 2024 02:37:40 +0900 Subject: [PATCH 09/10] feat(asset): go mod tidy --- go.mod | 91 +++++++++++++------------ go.sum | 204 +++++++++++++++++++++++++++++---------------------------- 2 files changed, 154 insertions(+), 141 deletions(-) diff --git a/go.mod b/go.mod index a1440fc..6e2260a 100644 --- a/go.mod +++ b/go.mod @@ -1,20 +1,24 @@ module github.com/reearth/reearthx -go 1.21 +go 1.22.1 + +toolchain go1.22.7 require ( - github.com/99designs/gqlgen v0.17.43 + github.com/99designs/gqlgen v0.17.44 github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.21.0 github.com/Khan/genqlient v0.6.0 github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d github.com/auth0/go-jwt-middleware/v2 v2.2.1 - github.com/aws/aws-sdk-go-v2 v1.24.1 - github.com/aws/aws-sdk-go-v2/config v1.26.6 + github.com/aws/aws-sdk-go-v2 v1.25.3 + github.com/aws/aws-sdk-go-v2/config v1.27.7 github.com/aws/aws-sdk-go-v2/service/ses v1.19.6 github.com/goccy/go-yaml v1.11.3 github.com/golang-jwt/jwt v3.2.2+incompatible github.com/golang/gddo v0.0.0-20210115222349-20d68f94ee1f + github.com/golang/mock v1.6.0 github.com/google/uuid v1.6.0 + github.com/goombaio/namegenerator v0.0.0-20181006234301-989e774b106e github.com/gorilla/mux v1.8.1 github.com/iancoleman/strcase v0.3.0 github.com/jarcoal/httpmock v1.3.1 @@ -26,20 +30,21 @@ require ( github.com/oklog/ulid v1.3.1 github.com/pkg/errors v0.9.1 github.com/ravilushqa/otelgqlgen v0.15.0 + github.com/reearth/reearth-cms/server v0.0.0-20240903081242-e53864457087 github.com/samber/lo v1.39.0 github.com/sendgrid/sendgrid-go v3.14.0+incompatible github.com/spf13/afero v1.11.0 - github.com/stretchr/testify v1.8.4 + github.com/stretchr/testify v1.9.0 github.com/uber/jaeger-client-go v2.30.0+incompatible github.com/uber/jaeger-lib v2.4.1+incompatible github.com/vektah/gqlparser/v2 v2.5.11 github.com/zitadel/oidc v1.13.5 - go.mongodb.org/mongo-driver v1.13.1 - go.opentelemetry.io/otel v1.22.0 + go.mongodb.org/mongo-driver v1.14.0 + go.opentelemetry.io/otel v1.24.0 go.opentelemetry.io/otel/sdk v1.22.0 go.uber.org/atomic v1.11.0 go.uber.org/zap v1.26.0 - golang.org/x/crypto v0.18.0 + golang.org/x/crypto v0.21.0 golang.org/x/exp v0.0.0-20240119083558-1b970713d09a golang.org/x/text v0.14.0 gopkg.in/go-jose/go-jose.v2 v2.6.2 @@ -47,25 +52,28 @@ require ( ) require ( - cloud.google.com/go/compute v1.23.4 // indirect + cloud.google.com/go/compute v1.24.0 // indirect cloud.google.com/go/compute/metadata v0.2.3 // indirect + cloud.google.com/go/logging v1.9.0 // indirect + cloud.google.com/go/monitoring v1.18.0 // indirect cloud.google.com/go/trace v1.10.5 // indirect github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.45.0 // indirect github.com/HdrHistogram/hdrhistogram-go v1.1.2 // indirect github.com/agnivade/levenshtein v1.1.1 // indirect github.com/alexflint/go-arg v1.4.3 // indirect github.com/alexflint/go-scalar v1.2.0 // indirect - github.com/aws/aws-sdk-go-v2/credentials v1.16.16 // indirect - github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.14.11 // indirect - github.com/aws/aws-sdk-go-v2/internal/configsources v1.2.10 // indirect - github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.5.10 // indirect - github.com/aws/aws-sdk-go-v2/internal/ini v1.7.3 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.10.4 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.10.10 // indirect - github.com/aws/aws-sdk-go-v2/service/sso v1.18.7 // indirect - github.com/aws/aws-sdk-go-v2/service/ssooidc v1.21.7 // indirect - github.com/aws/aws-sdk-go-v2/service/sts v1.26.7 // indirect - github.com/aws/smithy-go v1.19.0 // indirect + github.com/aws/aws-sdk-go-v2/credentials v1.17.7 // indirect + github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.15.3 // indirect + github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.3 // indirect + github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.3 // indirect + github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.1 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.5 // indirect + github.com/aws/aws-sdk-go-v2/service/sso v1.20.2 // indirect + github.com/aws/aws-sdk-go-v2/service/ssooidc v1.23.2 // indirect + github.com/aws/aws-sdk-go-v2/service/sts v1.28.4 // indirect + github.com/aws/smithy-go v1.20.1 // indirect + github.com/chrispappas/golang-generics-set v1.0.1 // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/dgryski/trifles v0.0.0-20200705224438-cafc02a1ee2b // indirect github.com/fatih/color v1.16.0 // indirect @@ -74,58 +82,57 @@ require ( github.com/go-logr/stdr v1.2.2 // indirect github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/golang/protobuf v1.5.3 // indirect - github.com/golang/snappy v0.0.3 // indirect + github.com/golang/snappy v0.0.4 // indirect github.com/google/s2a-go v0.1.7 // indirect github.com/googleapis/enterprise-certificate-proxy v0.3.2 // indirect - github.com/googleapis/gax-go/v2 v2.12.0 // indirect + github.com/googleapis/gax-go/v2 v2.12.2 // indirect github.com/gorilla/schema v1.2.0 // indirect github.com/gorilla/securecookie v1.1.1 // indirect - github.com/gorilla/websocket v1.5.0 // indirect + github.com/gorilla/websocket v1.5.1 // indirect github.com/hashicorp/errwrap v1.1.0 // indirect github.com/hashicorp/go-multierror v1.1.1 // indirect - github.com/hashicorp/golang-lru/v2 v2.0.3 // indirect + github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect github.com/jmespath/go-jmespath v0.4.0 // indirect - github.com/klauspost/compress v1.13.6 // indirect + github.com/klauspost/compress v1.17.5 // indirect github.com/mattn/go-colorable v0.1.13 // indirect github.com/mattn/go-isatty v0.0.20 // indirect github.com/mitchellh/mapstructure v1.5.0 // indirect - github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe // indirect + github.com/montanaflynn/stats v0.7.1 // indirect github.com/opentracing/opentracing-go v1.2.0 // indirect + github.com/paulmach/go.geojson v1.5.0 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/posener/complete v1.2.3 // indirect github.com/rs/cors v1.10.1 // indirect github.com/sendgrid/rest v2.6.9+incompatible // indirect github.com/sirupsen/logrus v1.9.3 // indirect github.com/sosodev/duration v1.2.0 // indirect - github.com/stretchr/objx v0.5.1 // indirect github.com/valyala/bytebufferpool v1.0.0 // indirect github.com/valyala/fasttemplate v1.2.2 // indirect github.com/xdg-go/pbkdf2 v1.0.0 // indirect github.com/xdg-go/scram v1.1.2 // indirect github.com/xdg-go/stringprep v1.0.4 // indirect - github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d // indirect + github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a // indirect github.com/zitadel/logging v0.3.4 // indirect go.opencensus.io v0.24.0 // indirect go.opentelemetry.io/contrib v1.22.0 // indirect - go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.47.0 // indirect - go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.47.0 // indirect - go.opentelemetry.io/otel/metric v1.22.0 // indirect - go.opentelemetry.io/otel/trace v1.22.0 // indirect + go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 // indirect + go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 // indirect + go.opentelemetry.io/otel/metric v1.24.0 // indirect + go.opentelemetry.io/otel/trace v1.24.0 // indirect go.uber.org/multierr v1.11.0 // indirect - golang.org/x/mod v0.14.0 // indirect - golang.org/x/net v0.20.0 // indirect - golang.org/x/oauth2 v0.16.0 // indirect + golang.org/x/mod v0.15.0 // indirect + golang.org/x/net v0.22.0 // indirect + golang.org/x/oauth2 v0.17.0 // indirect golang.org/x/sync v0.6.0 // indirect - golang.org/x/sys v0.16.0 // indirect + golang.org/x/sys v0.18.0 // indirect golang.org/x/time v0.5.0 // indirect - golang.org/x/tools v0.17.0 // indirect + golang.org/x/tools v0.18.0 // indirect golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 // indirect - google.golang.org/api v0.161.0 // indirect + google.golang.org/api v0.169.0 // indirect google.golang.org/appengine v1.6.8 // indirect - google.golang.org/genproto v0.0.0-20240125205218-1f4bbc51befe // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20240125205218-1f4bbc51befe // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20240125205218-1f4bbc51befe // indirect - google.golang.org/grpc v1.61.0 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20240304161311-37d4d3c04a78 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20240304161311-37d4d3c04a78 // indirect + google.golang.org/grpc v1.62.0 // indirect google.golang.org/protobuf v1.32.0 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect diff --git a/go.sum b/go.sum index fe2f940..11d6020 100644 --- a/go.sum +++ b/go.sum @@ -1,21 +1,21 @@ cloud.google.com/go v0.16.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go v0.112.0 h1:tpFCD7hpHFlQ8yPwT3x+QeXqc2T6+n6T+hmABHfDUSM= -cloud.google.com/go/compute v1.23.4 h1:EBT9Nw4q3zyE7G45Wvv3MzolIrCJEuHys5muLY0wvAw= -cloud.google.com/go/compute v1.23.4/go.mod h1:/EJMj55asU6kAFnuZET8zqgwgJ9FvXWXOkkfQZa4ioI= +cloud.google.com/go v0.112.1 h1:uJSeirPke5UNZHIb4SxfZklVSiWWVqW4oXlETwZziwM= +cloud.google.com/go/compute v1.24.0 h1:phWcR2eWzRJaL/kOiJwfFsPs4BaKq1j6vnpZrc1YlVg= +cloud.google.com/go/compute v1.24.0/go.mod h1:kw1/T+h/+tK2LJK0wiPPx1intgdAM3j/g3hFDlscY40= cloud.google.com/go/compute/metadata v0.2.3 h1:mg4jlk7mCAj6xXp9UJ4fjI9VUI5rubuGBW5aJ7UnBMY= cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA= cloud.google.com/go/logging v1.9.0 h1:iEIOXFO9EmSiTjDmfpbRjOxECO7R8C7b8IXUGOj7xZw= cloud.google.com/go/logging v1.9.0/go.mod h1:1Io0vnZv4onoUnsVUQY3HZ3Igb1nBchky0A0y7BBBhE= -cloud.google.com/go/longrunning v0.5.4 h1:w8xEcbZodnA2BbW6sVirkkoC+1gP8wS57EUUgGS0GVg= -cloud.google.com/go/longrunning v0.5.4/go.mod h1:zqNVncI0BOP8ST6XQD1+VcvuShMmq7+xFSzOL++V0dI= -cloud.google.com/go/monitoring v1.17.0 h1:blrdvF0MkPPivSO041ihul7rFMhXdVp8Uq7F59DKXTU= -cloud.google.com/go/monitoring v1.17.0/go.mod h1:KwSsX5+8PnXv5NJnICZzW2R8pWTis8ypC4zmdRD63Tw= +cloud.google.com/go/longrunning v0.5.5 h1:GOE6pZFdSrTb4KAiKnXsJBtlE6mEyaW44oKyMILWnOg= +cloud.google.com/go/longrunning v0.5.5/go.mod h1:WV2LAxD8/rg5Z1cNW6FJ/ZpX4E4VnDnoTk0yawPBB7s= +cloud.google.com/go/monitoring v1.18.0 h1:NfkDLQDG2UR3WYZVQE8kwSbUIEyIqJUPl+aOQdFH1T4= +cloud.google.com/go/monitoring v1.18.0/go.mod h1:c92vVBCeq/OB4Ioyo+NbN2U7tlg5ZH41PZcdvfc+Lcg= cloud.google.com/go/trace v1.10.5 h1:0pr4lIKJ5XZFYD9GtxXEWr0KkVeigc3wlGpZco0X1oA= cloud.google.com/go/trace v1.10.5/go.mod h1:9hjCV1nGBCtXbAE4YK7OqJ8pmPYSxPA0I67JwRd5s3M= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= -github.com/99designs/gqlgen v0.17.43 h1:I4SYg6ahjowErAQcHFVKy5EcWuwJ3+Xw9z2fLpuFCPo= -github.com/99designs/gqlgen v0.17.43/go.mod h1:lO0Zjy8MkZgBdv4T1U91x09r0e0WFOdhVUutlQs1Rsc= +github.com/99designs/gqlgen v0.17.44 h1:OS2wLk/67Y+vXM75XHbwRnNYJcbuJd4OBL76RX3NQQA= +github.com/99designs/gqlgen v0.17.44/go.mod h1:UTCu3xpK2mLI5qcMNw+HKDiEL77it/1XtAjisC4sLwM= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/toml v1.3.2 h1:o7IhLm0Msx3BaB+n3Ag7L8EVlByGnpq14C4YWiu/gL8= github.com/BurntSushi/toml v1.3.2/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= @@ -46,42 +46,42 @@ github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0 h1:jfIu9sQUG6Ig github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0/go.mod h1:t2tdKJDJF9BV14lnkjHmOQgcvEKgtqs5a1N3LNdJhGE= github.com/auth0/go-jwt-middleware/v2 v2.2.1 h1:pqxEIwlCztD0T9ZygGfOrw4NK/F9iotnCnPJVADKbkE= github.com/auth0/go-jwt-middleware/v2 v2.2.1/go.mod h1:CSi0tuu0QrALbWdiQZwqFL8SbBhj4e2MJzkvNfjY0Us= -github.com/aws/aws-sdk-go-v2 v1.24.1 h1:xAojnj+ktS95YZlDf0zxWBkbFtymPeDP+rvUQIH3uAU= -github.com/aws/aws-sdk-go-v2 v1.24.1/go.mod h1:LNh45Br1YAkEKaAqvmE1m8FUx6a5b/V0oAKV7of29b4= -github.com/aws/aws-sdk-go-v2/config v1.26.6 h1:Z/7w9bUqlRI0FFQpetVuFYEsjzE3h7fpU6HuGmfPL/o= -github.com/aws/aws-sdk-go-v2/config v1.26.6/go.mod h1:uKU6cnDmYCvJ+pxO9S4cWDb2yWWIH5hra+32hVh1MI4= -github.com/aws/aws-sdk-go-v2/credentials v1.16.16 h1:8q6Rliyv0aUFAVtzaldUEcS+T5gbadPbWdV1WcAddK8= -github.com/aws/aws-sdk-go-v2/credentials v1.16.16/go.mod h1:UHVZrdUsv63hPXFo1H7c5fEneoVo9UXiz36QG1GEPi0= -github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.14.11 h1:c5I5iH+DZcH3xOIMlz3/tCKJDaHFwYEmxvlh2fAcFo8= -github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.14.11/go.mod h1:cRrYDYAMUohBJUtUnOhydaMHtiK/1NZ0Otc9lIb6O0Y= -github.com/aws/aws-sdk-go-v2/internal/configsources v1.2.10 h1:vF+Zgd9s+H4vOXd5BMaPWykta2a6Ih0AKLq/X6NYKn4= -github.com/aws/aws-sdk-go-v2/internal/configsources v1.2.10/go.mod h1:6BkRjejp/GR4411UGqkX8+wFMbFbqsUIimfK4XjOKR4= -github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.5.10 h1:nYPe006ktcqUji8S2mqXf9c/7NdiKriOwMvWQHgYztw= -github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.5.10/go.mod h1:6UV4SZkVvmODfXKql4LCbaZUpF7HO2BX38FgBf9ZOLw= -github.com/aws/aws-sdk-go-v2/internal/ini v1.7.3 h1:n3GDfwqF2tzEkXlv5cuy4iy7LpKDtqDMcNLfZDu9rls= -github.com/aws/aws-sdk-go-v2/internal/ini v1.7.3/go.mod h1:6fQQgfuGmw8Al/3M2IgIllycxV7ZW7WCdVSqfBeUiCY= -github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.10.4 h1:/b31bi3YVNlkzkBrm9LfpaKoaYZUxIAj4sHfOTmLfqw= -github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.10.4/go.mod h1:2aGXHFmbInwgP9ZfpmdIfOELL79zhdNYNmReK8qDfdQ= -github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.10.10 h1:DBYTXwIGQSGs9w4jKm60F5dmCQ3EEruxdc0MFh+3EY4= -github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.10.10/go.mod h1:wohMUQiFdzo0NtxbBg0mSRGZ4vL3n0dKjLTINdcIino= +github.com/aws/aws-sdk-go-v2 v1.25.3 h1:xYiLpZTQs1mzvz5PaI6uR0Wh57ippuEthxS4iK5v0n0= +github.com/aws/aws-sdk-go-v2 v1.25.3/go.mod h1:35hUlJVYd+M++iLI3ALmVwMOyRYMmRqUXpTtRGW+K9I= +github.com/aws/aws-sdk-go-v2/config v1.27.7 h1:JSfb5nOQF01iOgxFI5OIKWwDiEXWTyTgg1Mm1mHi0A4= +github.com/aws/aws-sdk-go-v2/config v1.27.7/go.mod h1:PH0/cNpoMO+B04qET699o5W92Ca79fVtbUnvMIZro4I= +github.com/aws/aws-sdk-go-v2/credentials v1.17.7 h1:WJd+ubWKoBeRh7A5iNMnxEOs982SyVKOJD+K8HIezu4= +github.com/aws/aws-sdk-go-v2/credentials v1.17.7/go.mod h1:UQi7LMR0Vhvs+44w5ec8Q+VS+cd10cjwgHwiVkE0YGU= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.15.3 h1:p+y7FvkK2dxS+FEwRIDHDe//ZX+jDhP8HHE50ppj4iI= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.15.3/go.mod h1:/fYB+FZbDlwlAiynK9KDXlzZl3ANI9JkD0Uhz5FjNT4= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.3 h1:ifbIbHZyGl1alsAhPIYsHOg5MuApgqOvVeI8wIugXfs= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.3/go.mod h1:oQZXg3c6SNeY6OZrDY+xHcF4VGIEoNotX2B4PrDeoJI= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.3 h1:Qvodo9gHG9F3E8SfYOspPeBt0bjSbsevK8WhRAUHcoY= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.3/go.mod h1:vCKrdLXtybdf/uQd/YfVR2r5pcbNuEYKzMQpcxmeSJw= +github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0 h1:hT8rVHwugYE2lEfdFE0QWVo81lF7jMrYJVDWI+f+VxU= +github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0/go.mod h1:8tu/lYfQfFe6IGnaOdrpVgEL2IrrDOf6/m9RQum4NkY= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.1 h1:EyBZibRTVAs6ECHZOw5/wlylS9OcTzwyjeQMudmREjE= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.1/go.mod h1:JKpmtYhhPs7D97NL/ltqz7yCkERFW5dOlHyVl66ZYF8= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.5 h1:K/NXvIftOlX+oGgWGIa3jDyYLDNsdVhsjHmsBH2GLAQ= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.5/go.mod h1:cl9HGLV66EnCmMNzq4sYOti+/xo8w34CsgzVtm2GgsY= github.com/aws/aws-sdk-go-v2/service/ses v1.19.6 h1:2WWiQwUVU39kD8EGYw/sTGU+REd5Q+BFarTccU00Asc= github.com/aws/aws-sdk-go-v2/service/ses v1.19.6/go.mod h1:huHEdSNRqZOquzLTTjbBoEpoz7snBRwu2fe1dvvhZwE= -github.com/aws/aws-sdk-go-v2/service/sso v1.18.7 h1:eajuO3nykDPdYicLlP3AGgOyVN3MOlFmZv7WGTuJPow= -github.com/aws/aws-sdk-go-v2/service/sso v1.18.7/go.mod h1:+mJNDdF+qiUlNKNC3fxn74WWNN+sOiGOEImje+3ScPM= -github.com/aws/aws-sdk-go-v2/service/ssooidc v1.21.7 h1:QPMJf+Jw8E1l7zqhZmMlFw6w1NmfkfiSK8mS4zOx3BA= -github.com/aws/aws-sdk-go-v2/service/ssooidc v1.21.7/go.mod h1:ykf3COxYI0UJmxcfcxcVuz7b6uADi1FkiUz6Eb7AgM8= -github.com/aws/aws-sdk-go-v2/service/sts v1.26.7 h1:NzO4Vrau795RkUdSHKEwiR01FaGzGOH1EETJ+5QHnm0= -github.com/aws/aws-sdk-go-v2/service/sts v1.26.7/go.mod h1:6h2YuIoxaMSCFf5fi1EgZAwdfkGMgDY+DVfa61uLe4U= -github.com/aws/smithy-go v1.19.0 h1:KWFKQV80DpP3vJrrA9sVAHQ5gc2z8i4EzrLhLlWXcBM= -github.com/aws/smithy-go v1.19.0/go.mod h1:NukqUGpCZIILqqiV0NIjeFh24kd/FAa4beRb6nbIUPE= +github.com/aws/aws-sdk-go-v2/service/sso v1.20.2 h1:XOPfar83RIRPEzfihnp+U6udOveKZJvPQ76SKWrLRHc= +github.com/aws/aws-sdk-go-v2/service/sso v1.20.2/go.mod h1:Vv9Xyk1KMHXrR3vNQe8W5LMFdTjSeWk0gBZBzvf3Qa0= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.23.2 h1:pi0Skl6mNl2w8qWZXcdOyg197Zsf4G97U7Sso9JXGZE= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.23.2/go.mod h1:JYzLoEVeLXk+L4tn1+rrkfhkxl6mLDEVaDSvGq9og90= +github.com/aws/aws-sdk-go-v2/service/sts v1.28.4 h1:Ppup1nVNAOWbBOrcoOxaxPeEnSFB2RnnQdguhXpmeQk= +github.com/aws/aws-sdk-go-v2/service/sts v1.28.4/go.mod h1:+K1rNPVyGxkRuv9NNiaZ4YhBFuyw2MMA9SlIJ1Zlpz8= +github.com/aws/smithy-go v1.20.1 h1:4SZlSlMr36UEqC7XOyRVb27XMeZubNcBNN+9IgEPIQw= +github.com/aws/smithy-go v1.20.1/go.mod h1:krry+ya/rV9RDcV/Q16kpu6ypI4K2czasz0NC3qS14E= github.com/bradfitz/gomemcache v0.0.0-20170208213004-1952afaa557d/go.mod h1:PmM6Mmwb0LSuEubjR8N7PtNe1KxZLtOUHtbeikc5h60= github.com/bradleyjkemp/cupaloy/v2 v2.6.0 h1:knToPYa2xtfg42U3I6punFEjaGFKWQRXJwj0JTv4mTs= github.com/bradleyjkemp/cupaloy/v2 v2.6.0/go.mod h1:bm7JXdkRd4BHJk9HpwqAI8BoAY1lps46Enkdqw6aRX0= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/chrispappas/golang-generics-set v1.0.1 h1:91l8cInAWTxCPwZ8UNg7qkkPsdFdkYS9hytsd8UJsIU= +github.com/chrispappas/golang-generics-set v1.0.1/go.mod h1:cp8j73+rlDyFF9PrjUkrRvi8L4jSRIsRK6Q1nPPIoqo= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= -github.com/cncf/xds/go v0.0.0-20231109132714-523115ebc101 h1:7To3pQ+pZo0i3dsWEbinPNFs5gPSBOsJtx3wTT94VBY= -github.com/cncf/xds/go v0.0.0-20231109132714-523115ebc101/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= @@ -93,8 +93,6 @@ github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymF github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= -github.com/envoyproxy/protoc-gen-validate v1.0.2 h1:QkIBuU5k+x7/QXPvPPnWXWlCdaBFApVqftFV6k087DA= -github.com/envoyproxy/protoc-gen-validate v1.0.2/go.mod h1:GpiZQP3dDbg4JouG/NNS7QWXpgx6x8QiMKdmN72jogE= github.com/fatih/color v1.16.0 h1:zmkK9Ngbjj+K0yRhTVONQh1p/HknKYSlNT+vZCzyokM= github.com/fatih/color v1.16.0/go.mod h1:fL2Sau1YI5c0pdGEVCbKQbLXB6edEj1ZgiY4NijnWvE= github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= @@ -145,16 +143,14 @@ github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiu github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg= github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/golang/snappy v0.0.0-20170215233205-553a64147049/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= -github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= -github.com/golang/snappy v0.0.3 h1:fHPg5GQYlCeLIPB9BZqMVR5nR9A+IM5zcgeTdjMYmLA= -github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= +github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/google/go-cmp v0.1.1-0.20171103154506-982329095285/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= @@ -169,16 +165,18 @@ github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+ github.com/googleapis/enterprise-certificate-proxy v0.3.2 h1:Vie5ybvEvT75RniqhfFxPRy3Bf7vr3h0cechB90XaQs= github.com/googleapis/enterprise-certificate-proxy v0.3.2/go.mod h1:VLSiSSBs/ksPL8kq3OBOQ6WRI2QnaFynd1DCjZ62+V0= github.com/googleapis/gax-go v2.0.0+incompatible/go.mod h1:SFVmujtThgffbyetf+mdk2eWhX2bMyUtNHzFKcPA9HY= -github.com/googleapis/gax-go/v2 v2.12.0 h1:A+gCJKdRfqXkr+BIRGtZLibNXf0m1f9E4HG56etFpas= -github.com/googleapis/gax-go/v2 v2.12.0/go.mod h1:y+aIqrI5eb1YGMVJfuV3185Ts/D7qKpsEkdD5+I6QGU= +github.com/googleapis/gax-go/v2 v2.12.2 h1:mhN09QQW1jEWeMF74zGR81R30z4VJzjZsfkUhuHF+DA= +github.com/googleapis/gax-go/v2 v2.12.2/go.mod h1:61M8vcyyXR2kqKFxKrfA22jaA8JGF7Dc8App1U3H6jc= +github.com/goombaio/namegenerator v0.0.0-20181006234301-989e774b106e h1:XmA6L9IPRdUr28a+SK/oMchGgQy159wvzXA5tJ7l+40= +github.com/goombaio/namegenerator v0.0.0-20181006234301-989e774b106e/go.mod h1:AFIo+02s+12CEg8Gzz9kzhCbmbq6JcKNrhHffCGA9z4= github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY= github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ= github.com/gorilla/schema v1.2.0 h1:YufUaxZYCKGFuAq3c96BOhjgd5nmXiOY9NGzF247Tsc= github.com/gorilla/schema v1.2.0/go.mod h1:kgLaKoK1FELgZqMAVxx/5cbj0kT+57qxUrAlIO2eleU= github.com/gorilla/securecookie v1.1.1 h1:miw7JPhV+b/lAHSXz4qd/nN9jRiAFV5FwjeKyCS8BvQ= github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4= -github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc= -github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/gorilla/websocket v1.5.1 h1:gmztn0JnHVt9JZquRuzLw3g4wouNVzKL15iLr/zn/QY= +github.com/gorilla/websocket v1.5.1/go.mod h1:x3kM2JMyaluk02fnUJpQuwD2dCS5NDG2ZHL0uE0tcaY= github.com/gregjones/httpcache v0.0.0-20170920190843-316c5e0ff04e/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= @@ -186,8 +184,8 @@ github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brv github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= -github.com/hashicorp/golang-lru/v2 v2.0.3 h1:kmRrRLlInXvng0SmLxmQpQkpbYAvcXm7NPDrgxJa9mE= -github.com/hashicorp/golang-lru/v2 v2.0.3/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM= +github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k= +github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM= github.com/hashicorp/hcl v0.0.0-20170914154624-68e816d1c783/go.mod h1:oZtUIOe8dh44I2q6ScRibXws4Ajl+d+nod3AaR9vL5w= github.com/iancoleman/strcase v0.3.0 h1:nTXanmYxhfFAMjZL34Ov6gkzEsSJZ5DbhxWjvSASxEI= github.com/iancoleman/strcase v0.3.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= @@ -201,8 +199,8 @@ github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfC github.com/jpillora/opts v1.2.3 h1:Q0YuOM7y0BlunHJ7laR1TUxkUA7xW8A2rciuZ70xs8g= github.com/jpillora/opts v1.2.3/go.mod h1:7p7X/vlpKZmtaDFYKs956EujFqA6aCrOkcCaS6UBcR4= github.com/jung-kurt/gofpdf v1.0.3-0.20190309125859-24315acbbda5/go.mod h1:7Id9E/uU8ce6rXgefFLlgrJj/GYY22cpxn+r32jIOes= -github.com/klauspost/compress v1.13.6 h1:P76CopJELS0TiO2mebmnzgWaajssP/EszplttgQxcgc= -github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= +github.com/klauspost/compress v1.17.5 h1:d4vBd+7CHydUqpFBgUEKkSdtSugf9YFmSkvUYPquI5E= +github.com/klauspost/compress v1.17.5/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= @@ -233,8 +231,8 @@ github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d/go.mod h1:01TrycV0kFyex github.com/mitchellh/mapstructure v0.0.0-20170523030023-d0303fe80992/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= -github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe h1:iruDEfMl2E6fbMZ9s0scYfZQ84/6SPL6zC8ACM2oIL0= -github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc= +github.com/montanaflynn/stats v0.7.1 h1:etflOAAHORrCC44V+aR6Ftzort912ZU+YLiSTuV8eaE= +github.com/montanaflynn/stats v0.7.1/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow= github.com/nicksnyder/go-i18n/v2 v2.4.0 h1:3IcvPOAvnCKwNm0TB0dLDTuawWEj+ax/RERNC+diLMM= github.com/nicksnyder/go-i18n/v2 v2.4.0/go.mod h1:nxYSZE9M0bf3Y70gPQjN9ha7XNHX7gMc814+6wVyEI4= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs= @@ -243,6 +241,8 @@ github.com/oklog/ulid v1.3.1 h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4= github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= github.com/opentracing/opentracing-go v1.2.0 h1:uEJPy/1a5RIPAJ0Ov+OIO8OxWu77jEv+1B0VhjKrZUs= github.com/opentracing/opentracing-go v1.2.0/go.mod h1:GxEUsuufX4nBwe+T+Wl9TAgYrxe9dPLANfrWvHYVTgc= +github.com/paulmach/go.geojson v1.5.0 h1:7mhpMK89SQdHFcEGomT7/LuJhwhEgfmpWYVlVmLEdQw= +github.com/paulmach/go.geojson v1.5.0/go.mod h1:DgdUy2rRVDDVgKqrjMe2vZAHMfhDTrjVKt3LmHIXGbU= github.com/pelletier/go-toml v1.0.1-0.20170904195809-1d6b12b7cb29/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= @@ -254,6 +254,8 @@ github.com/posener/complete v1.2.3/go.mod h1:WZIdtGGp+qx0sLrYKtIRAruyNpv6hFCicSg github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/ravilushqa/otelgqlgen v0.15.0 h1:U85nrlweMXTGaMChUViYM39/MXBZVeVVlpuHq+6eECQ= github.com/ravilushqa/otelgqlgen v0.15.0/go.mod h1:o+1Eju0VySmgq2BP8Vupz2YrN21Bj7D7imBqu3m2uB8= +github.com/reearth/reearth-cms/server v0.0.0-20240903081242-e53864457087 h1:qh7IMP4ILvhrNijpGhrbo/uGrTsix+6dp/+nQOMj+L8= +github.com/reearth/reearth-cms/server v0.0.0-20240903081242-e53864457087/go.mod h1:rw/ZONl2/2OY6UhP3uwOZxnrjcmkd2dRxH5C9k8XrwE= github.com/rs/cors v1.10.1 h1:L0uuZVXIKlI1SShY2nhFfo44TYvDPQ1w4oFkUJNfhyo= github.com/rs/cors v1.10.1/go.mod h1:XyqrcTp5zjWr1wsJ8PIRZssZ8b/WMcMf71DJnit4EMU= github.com/samber/lo v1.39.0 h1:4gTz1wUhNYLhFSKl6O+8peW0v2F4BCY034GRpU9WnuA= @@ -279,8 +281,8 @@ github.com/spf13/viper v1.0.0/go.mod h1:A8kyI5cUJhb8N+3pkfONlcEcZbueH6nhAm0Fq7Sr github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= -github.com/stretchr/objx v0.5.1 h1:4VhoImhV/Bm0ToFkXFi8hXNXwpDRZ/ynw3amt82mzq0= -github.com/stretchr/objx v0.5.1/go.mod h1:/iHQpkQwBD6DLUmQ4pE+s1TXdob1mORJ4/UFdrifcy0= +github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= @@ -288,9 +290,8 @@ github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/ github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= -github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= -github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= -github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/uber/jaeger-client-go v2.30.0+incompatible h1:D6wyKGCecFaSRUpo8lCVbaOOb6ThwMmTEbhRwtKR97o= github.com/uber/jaeger-client-go v2.30.0+incompatible/go.mod h1:WVhlPFC8FDjOFMMWRy2pZqQJSXxYSwNYOkTr/Z6d3Kk= github.com/uber/jaeger-lib v2.4.1+incompatible h1:td4jdvLcExb4cBISKIpHuGoVXh+dVKhn2Um6rjCsSsg= @@ -307,31 +308,32 @@ github.com/xdg-go/scram v1.1.2 h1:FHX5I5B4i4hKRVRBCFRxq1iQRej7WO3hhBuJf+UUySY= github.com/xdg-go/scram v1.1.2/go.mod h1:RT/sEzTbU5y00aCK8UOx6R7YryM0iF1N2MOmC3kKLN4= github.com/xdg-go/stringprep v1.0.4 h1:XLI/Ng3O1Atzq0oBs3TWm+5ZVgkq2aqdlvP9JtoZ6c8= github.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gijq1dTyGkM= -github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d h1:splanxYIlg+5LfHAM6xpdFEAYOk8iySO56hMFq6uLyA= -github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA= +github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a h1:fZHgsYlfvtyqToslyjUt3VOPF4J7aK/3MPcK7xp3PDk= +github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a/go.mod h1:ul22v+Nro/R083muKhosV54bj5niojjWZvU8xrevuH4= +github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= github.com/zitadel/logging v0.3.4 h1:9hZsTjMMTE3X2LUi0xcF9Q9EdLo+FAezeu52ireBbHM= github.com/zitadel/logging v0.3.4/go.mod h1:aPpLQhE+v6ocNK0TWrBrd363hZ95KcI17Q1ixAQwZF0= github.com/zitadel/oidc v1.13.5 h1:7jhh68NGZitLqwLiVU9Dtwa4IraJPFF1vS+4UupO93U= github.com/zitadel/oidc v1.13.5/go.mod h1:rHs1DhU3Sv3tnI6bQRVlFa3u0lCwtR7S21WHY+yXgPA= -go.mongodb.org/mongo-driver v1.13.1 h1:YIc7HTYsKndGK4RFzJ3covLz1byri52x0IoMB0Pt/vk= -go.mongodb.org/mongo-driver v1.13.1/go.mod h1:wcDf1JBCXy2mOW0bWHwO/IOYqdca1MPCwDtFu/Z9+eo= +go.mongodb.org/mongo-driver v1.14.0 h1:P98w8egYRjYe3XDjxhYJagTokP/H6HzlsnojRgZRd80= +go.mongodb.org/mongo-driver v1.14.0/go.mod h1:Vzb0Mk/pa7e6cWw85R4F/endUC3u0U9jGcNU603k65c= go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= go.opentelemetry.io/contrib v1.22.0 h1:QflN9z334UrOPzGGEr8VaMlWm+i+d9YLW8KzQtbvmBM= go.opentelemetry.io/contrib v1.22.0/go.mod h1:usW9bPlrjHiJFbK0a6yK/M5wNHs3nLmtrT3vzhoD3co= -go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.47.0 h1:UNQQKPfTDe1J81ViolILjTKPr9WetKW6uei2hFgJmFs= -go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.47.0/go.mod h1:r9vWsPS/3AQItv3OSlEJ/E4mbrhUbbw18meOjArPtKQ= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.47.0 h1:sv9kVfal0MK0wBMCOGr+HeJm9v803BkJxGrk2au7j08= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.47.0/go.mod h1:SK2UL73Zy1quvRPonmOmRDiWk1KBV3LyIeeIxcEApWw= -go.opentelemetry.io/otel v1.22.0 h1:xS7Ku+7yTFvDfDraDIJVpw7XPyuHlB9MCiqqX5mcJ6Y= -go.opentelemetry.io/otel v1.22.0/go.mod h1:eoV4iAi3Ea8LkAEI9+GFT44O6T/D0GWAVFyZVCC6pMI= -go.opentelemetry.io/otel/metric v1.22.0 h1:lypMQnGyJYeuYPhOM/bgjbFM6WE44W1/T45er4d8Hhg= -go.opentelemetry.io/otel/metric v1.22.0/go.mod h1:evJGjVpZv0mQ5QBRJoBF64yMuOf4xCWdXjK8pzFvliY= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 h1:4Pp6oUg3+e/6M4C0A/3kJ2VYa++dsWVTtGgLVj5xtHg= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0/go.mod h1:Mjt1i1INqiaoZOMGR1RIUJN+i3ChKoFRqzrRQhlkbs0= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 h1:jq9TW8u3so/bN+JPT166wjOI6/vQPF6Xe7nMNIltagk= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0/go.mod h1:p8pYQP+m5XfbZm9fxtSKAbM6oIllS7s2AfxrChvc7iw= +go.opentelemetry.io/otel v1.24.0 h1:0LAOdjNmQeSTzGBzduGe/rU4tZhMwL5rWgtp9Ku5Jfo= +go.opentelemetry.io/otel v1.24.0/go.mod h1:W7b9Ozg4nkF5tWI5zsXkaKKDjdVjpD4oAt9Qi/MArHo= +go.opentelemetry.io/otel/metric v1.24.0 h1:6EhoGWWK28x1fbpA4tYTOWBkPefTDQnb8WSGXlc88kI= +go.opentelemetry.io/otel/metric v1.24.0/go.mod h1:VYhLe1rFfxuTXLgj4CBiyz+9WYBA8pNGJgDcSFRKBco= go.opentelemetry.io/otel/sdk v1.22.0 h1:6coWHw9xw7EfClIC/+O31R8IY3/+EiRFHevmHafB2Gw= go.opentelemetry.io/otel/sdk v1.22.0/go.mod h1:iu7luyVGYovrRpe2fmj3CVKouQNdTOkxtLzPvPz1DOc= -go.opentelemetry.io/otel/trace v1.22.0 h1:Hg6pPujv0XG9QaVbGOBVHunyuLcCC3jN7WEhPx83XD0= -go.opentelemetry.io/otel/trace v1.22.0/go.mod h1:RbbHXVqKES9QhzZq/fE5UnOSILqRt40a21sPw2He1xo= +go.opentelemetry.io/otel/trace v1.24.0 h1:CsKnnL4dUAr/0llH9FKuc698G04IrpWV0MQA/Y1YELI= +go.opentelemetry.io/otel/trace v1.24.0/go.mod h1:HPc3Xr/cOApsBI154IU0OI0HJexz+aw5uPdbs3UCjNU= go.uber.org/atomic v1.11.0 h1:ZvwS0R+56ePWxUNi+Atn9dWONBPp/AUETXlHW0DxSjE= go.uber.org/atomic v1.11.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0= go.uber.org/goleak v1.2.0 h1:xqgm/S+aQvhWFTtR0XK3Jvg7z8kGV8P4X14IzwN3Eqk= @@ -342,11 +344,12 @@ go.uber.org/zap v1.26.0 h1:sI7k6L95XOKS281NhVKOFCUNIvv9e0w4BF8N3u+tCRo= go.uber.org/zap v1.26.0/go.mod h1:dtElttAiwGvoJ/vj4IwHBS/gXsEu/pZ50mUIRWuG0so= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.18.0 h1:PGVlW0xEltQnzFZ55hkuX5+KLyrMYhHld1YHO4AKcdc= -golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg= +golang.org/x/crypto v0.21.0 h1:X31++rzVUdKhX5sWmSOFZxx8UW/ldWx55cbf08iNAMA= +golang.org/x/crypto v0.21.0/go.mod h1:0BP7YvVV9gBbVKyeTG0Gyn+gZm94bibOW5BjDEYAOMs= golang.org/x/exp v0.0.0-20180321215751-8460e604b9de/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20180807140117-3d87b88a115f/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= @@ -363,9 +366,10 @@ golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvx golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= +golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= -golang.org/x/mod v0.14.0 h1:dGoOF9QVLYng8IHTm7BAyWqCqSheQ5pYWGhzW00YJr0= -golang.org/x/mod v0.14.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/mod v0.15.0 h1:SernR4v+D55NyBH2QiEQrlBAnj1ECL6AGrA5+dPaMY8= +golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -375,18 +379,19 @@ golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= -golang.org/x/net v0.20.0 h1:aCL9BSgETF1k+blQaYUBx9hJ9LOGP3gAVemcZlf1Kpo= -golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY= +golang.org/x/net v0.22.0 h1:9sGLhx7iRIHEiX0oAJ3MRZMUCElJgy7Br1nO+AMN3Tc= +golang.org/x/net v0.22.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg= golang.org/x/oauth2 v0.0.0-20170912212905-13449ad91cb2/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= -golang.org/x/oauth2 v0.16.0 h1:aDkGMBSYxElaoP81NpoUoz2oo2R2wHdZpGToUxfyQrQ= -golang.org/x/oauth2 v0.16.0/go.mod h1:hqZ+0LWXsiVoZpeld6jVt06P3adbS2Uu911W1SsJv2o= +golang.org/x/oauth2 v0.17.0 h1:6m3ZPmLEFdVxKKWnKq4VqZ60gutO35zm+zrAHVmHyDQ= +golang.org/x/oauth2 v0.17.0/go.mod h1:OzPDGQiuQMguemayvdylqddI7qcD9lnSDb+1FiwQ5HA= golang.org/x/sync v0.0.0-20170517211232-f52d1811a629/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ= golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= @@ -397,7 +402,8 @@ golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -408,17 +414,15 @@ golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.16.0 h1:xWw16ngr6ZMtmxDyKyIgsE93KNKz5HKmMa3b8ALHidU= -golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.18.0 h1:DBdB3niSjOA/O0blCZBqDefyWNYveAYMNF1Wum0DYQ4= +golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= -golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/time v0.0.0-20170424234030-8be79e1e0910/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= @@ -433,10 +437,12 @@ golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3 golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= -golang.org/x/tools v0.17.0 h1:FvmRgNOcs3kOa+T20R1uhfP9F6HgG2mfxDv1vrx1Htc= -golang.org/x/tools v0.17.0/go.mod h1:xsh6VxdV005rRVaS6SSAf9oiAqljS7UZUacMZ8Bnsps= +golang.org/x/tools v0.18.0 h1:k8NLag8AGHnn+PHbl7g43CtqZAwG60vZkLqgyZgIHgQ= +golang.org/x/tools v0.18.0/go.mod h1:GL7B4CwcLLeo59yx/9UWWuNOW1n3VZ4f5axWfML7Lcg= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 h1:+cNy6SZtPcJQH3LJVLOSmiC7MMxXNOb3PU/VUEz+EhU= @@ -446,8 +452,8 @@ gonum.org/v1/gonum v0.8.2/go.mod h1:oe/vMfY3deqTw+1EZJhuvEW2iwGF1bW9wwu7XCu0+v0= gonum.org/v1/netlib v0.0.0-20190313105609-8cb42192e0e0/go.mod h1:wa6Ws7BG/ESfp6dHfk7C6KdzKA7wR7u/rKwOGE66zvw= gonum.org/v1/plot v0.0.0-20190515093506-e2840ee46a6b/go.mod h1:Wt8AAjI+ypCyYX3nZBvf6cAIx93T+c/OS2HFAYskSZc= google.golang.org/api v0.0.0-20170921000349-586095a6e407/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0= -google.golang.org/api v0.161.0 h1:oYzk/bs26WN10AV7iU7MVJVXBH8oCPS2hHyBiEeFoSU= -google.golang.org/api v0.161.0/go.mod h1:0mu0TpK33qnydLvWqbImq2b1eQ5FHRSDCBzAxX9ZHyw= +google.golang.org/api v0.169.0 h1:QwWPy71FgMWqJN/l6jVlFHUa29a7dcUy02I8o799nPY= +google.golang.org/api v0.169.0/go.mod h1:gpNOiMA2tZ4mf5R9Iwf4rK/Dcz0fbdIgWYWVoxmsyLg= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= @@ -457,20 +463,20 @@ google.golang.org/genproto v0.0.0-20170918111702-1e559d0a00ee/go.mod h1:JiN7NxoA google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= -google.golang.org/genproto v0.0.0-20240125205218-1f4bbc51befe h1:USL2DhxfgRchafRvt/wYyyQNzwgL7ZiURcozOE/Pkvo= -google.golang.org/genproto v0.0.0-20240125205218-1f4bbc51befe/go.mod h1:cc8bqMqtv9gMOr0zHg2Vzff5ULhhL2IXP4sbcn32Dro= -google.golang.org/genproto/googleapis/api v0.0.0-20240125205218-1f4bbc51befe h1:0poefMBYvYbs7g5UkjS6HcxBPaTRAmznle9jnxYoAI8= -google.golang.org/genproto/googleapis/api v0.0.0-20240125205218-1f4bbc51befe/go.mod h1:4jWUdICTdgc3Ibxmr8nAJiiLHwQBY0UI0XZcEMaFKaA= -google.golang.org/genproto/googleapis/rpc v0.0.0-20240125205218-1f4bbc51befe h1:bQnxqljG/wqi4NTXu2+DJ3n7APcEA882QZ1JvhQAq9o= -google.golang.org/genproto/googleapis/rpc v0.0.0-20240125205218-1f4bbc51befe/go.mod h1:PAREbraiVEVGVdTZsVWjSbbTtSyGbAgIIvni8a8CD5s= +google.golang.org/genproto v0.0.0-20240213162025-012b6fc9bca9 h1:9+tzLLstTlPTRyJTh+ah5wIMsBW5c4tQwGTN3thOW9Y= +google.golang.org/genproto v0.0.0-20240213162025-012b6fc9bca9/go.mod h1:mqHbVIp48Muh7Ywss/AD6I5kNVKZMmAa/QEW58Gxp2s= +google.golang.org/genproto/googleapis/api v0.0.0-20240304161311-37d4d3c04a78 h1:SzXBGiWM1LNVYLCRP3e0/Gsze804l4jGoJ5lYysEO5I= +google.golang.org/genproto/googleapis/api v0.0.0-20240304161311-37d4d3c04a78/go.mod h1:O1cOfN1Cy6QEYr7VxtjOyP5AdAuR0aJ/MYZaaof623Y= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240304161311-37d4d3c04a78 h1:Xs9lu+tLXxLIfuci70nG4cpwaRC+mRQPUL7LoIeDJC4= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240304161311-37d4d3c04a78/go.mod h1:UCOku4NytXMJuLQE5VuqA5lX3PcHCBo8pxNyvkf4xBs= google.golang.org/grpc v1.2.1-0.20170921194603-d4b75ebd4f9f/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= -google.golang.org/grpc v1.61.0 h1:TOvOcuXn30kRao+gfcvsebNEa5iZIiLkisYEkf7R7o0= -google.golang.org/grpc v1.61.0/go.mod h1:VUbo7IFqmF1QtCAstipjG0GIoq49KvMe9+h1jFLBNJs= +google.golang.org/grpc v1.62.0 h1:HQKZ/fa1bXkX1oFOvSjmZEUL8wLSaZTjCcLAlmZRtdk= +google.golang.org/grpc v1.62.0/go.mod h1:IWTG0VlJLCh1SkC58F7np9ka9mx/WNkjl4PGJaiq+QE= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= From 6ee07c6d6e3e16156f85a9651ce12f1ddf24235b Mon Sep 17 00:00:00 2001 From: shumon84 Date: Mon, 9 Sep 2024 03:06:22 +0900 Subject: [PATCH 10/10] feat(asset): fix lint --- asset/assetdomain/event/event_test.go | 8 ++--- asset/assetdomain/id.go | 1 + asset/assetdomain/integration/builder_test.go | 2 +- .../integration/integration_test.go | 2 +- asset/assetdomain/integration/list.go | 2 +- asset/assetdomain/integration/list_test.go | 2 +- asset/assetdomain/integration/webhook.go | 2 +- .../integration/webhook_builder.go | 2 +- .../integration/webhook_builder_test.go | 4 +-- asset/assetdomain/integration/webhook_test.go | 4 +-- asset/assetdomain/thread/comment.go | 2 +- asset/assetdomain/thread/comment_test.go | 2 +- asset/assetdomain/thread/thread.go | 2 +- asset/assetdomain/thread/thread_test.go | 4 +-- .../assetinteractor/asset_test.go | 4 +-- .../assetinteractor/common_test.go | 36 +++++++------------ go.mod | 3 -- go.sum | 12 ++----- 18 files changed, 38 insertions(+), 56 deletions(-) diff --git a/asset/assetdomain/event/event_test.go b/asset/assetdomain/event/event_test.go index c26b415..b903356 100644 --- a/asset/assetdomain/event/event_test.go +++ b/asset/assetdomain/event/event_test.go @@ -4,11 +4,11 @@ import ( "testing" "time" - "github.com/reearth/reearth-cms/server/pkg/asset" - "github.com/reearth/reearth-cms/server/pkg/id" - "github.com/reearth/reearth-cms/server/pkg/operator" - "github.com/reearth/reearth-cms/server/pkg/project" "github.com/reearth/reearthx/account/accountdomain/user" + id "github.com/reearth/reearthx/asset/assetdomain" + "github.com/reearth/reearthx/asset/assetdomain/asset" + "github.com/reearth/reearthx/asset/assetdomain/operator" + "github.com/reearth/reearthx/asset/assetdomain/project" "github.com/stretchr/testify/assert" ) diff --git a/asset/assetdomain/id.go b/asset/assetdomain/id.go index 5be3661..db9bed7 100644 --- a/asset/assetdomain/id.go +++ b/asset/assetdomain/id.go @@ -9,6 +9,7 @@ import ( "github.com/reearth/reearthx/util" "github.com/samber/lo" "regexp" + "slices" "strings" ) diff --git a/asset/assetdomain/integration/builder_test.go b/asset/assetdomain/integration/builder_test.go index 8976384..bd76b4e 100644 --- a/asset/assetdomain/integration/builder_test.go +++ b/asset/assetdomain/integration/builder_test.go @@ -6,8 +6,8 @@ import ( "testing" "time" - "github.com/reearth/reearth-cms/server/pkg/id" "github.com/reearth/reearthx/account/accountdomain" + id "github.com/reearth/reearthx/asset/assetdomain" "github.com/samber/lo" "github.com/stretchr/testify/assert" ) diff --git a/asset/assetdomain/integration/integration_test.go b/asset/assetdomain/integration/integration_test.go index 4e4347d..287389e 100644 --- a/asset/assetdomain/integration/integration_test.go +++ b/asset/assetdomain/integration/integration_test.go @@ -6,8 +6,8 @@ import ( "testing" "time" - "github.com/reearth/reearth-cms/server/pkg/id" "github.com/reearth/reearthx/account/accountdomain" + id "github.com/reearth/reearthx/asset/assetdomain" "github.com/samber/lo" "github.com/stretchr/testify/assert" ) diff --git a/asset/assetdomain/integration/list.go b/asset/assetdomain/integration/list.go index 06cc0bf..e08cc68 100644 --- a/asset/assetdomain/integration/list.go +++ b/asset/assetdomain/integration/list.go @@ -1,7 +1,7 @@ package integration import ( - "github.com/reearth/reearth-cms/server/pkg/event" + "github.com/reearth/reearthx/asset/assetdomain/event" "github.com/reearth/reearthx/util" "github.com/samber/lo" "golang.org/x/exp/slices" diff --git a/asset/assetdomain/integration/list_test.go b/asset/assetdomain/integration/list_test.go index ea0ec6f..1760498 100644 --- a/asset/assetdomain/integration/list_test.go +++ b/asset/assetdomain/integration/list_test.go @@ -5,8 +5,8 @@ import ( "testing" "time" - "github.com/reearth/reearth-cms/server/pkg/event" "github.com/reearth/reearthx/account/accountdomain/user" + "github.com/reearth/reearthx/asset/assetdomain/event" "github.com/samber/lo" "github.com/stretchr/testify/assert" ) diff --git a/asset/assetdomain/integration/webhook.go b/asset/assetdomain/integration/webhook.go index 7612554..de3449e 100644 --- a/asset/assetdomain/integration/webhook.go +++ b/asset/assetdomain/integration/webhook.go @@ -4,7 +4,7 @@ import ( "net/url" "time" - "github.com/reearth/reearth-cms/server/pkg/event" + "github.com/reearth/reearthx/asset/assetdomain/event" ) type Webhook struct { diff --git a/asset/assetdomain/integration/webhook_builder.go b/asset/assetdomain/integration/webhook_builder.go index e774fda..a2db96c 100644 --- a/asset/assetdomain/integration/webhook_builder.go +++ b/asset/assetdomain/integration/webhook_builder.go @@ -4,7 +4,7 @@ import ( "net/url" "time" - "github.com/reearth/reearth-cms/server/pkg/id" + id "github.com/reearth/reearthx/asset/assetdomain" ) type WebhookBuilder struct { diff --git a/asset/assetdomain/integration/webhook_builder_test.go b/asset/assetdomain/integration/webhook_builder_test.go index 5ba927e..65c4982 100644 --- a/asset/assetdomain/integration/webhook_builder_test.go +++ b/asset/assetdomain/integration/webhook_builder_test.go @@ -5,8 +5,8 @@ import ( "testing" "time" - "github.com/reearth/reearth-cms/server/pkg/event" - "github.com/reearth/reearth-cms/server/pkg/id" + id "github.com/reearth/reearthx/asset/assetdomain" + "github.com/reearth/reearthx/asset/assetdomain/event" "github.com/samber/lo" "github.com/stretchr/testify/assert" ) diff --git a/asset/assetdomain/integration/webhook_test.go b/asset/assetdomain/integration/webhook_test.go index aef0137..8ab119c 100644 --- a/asset/assetdomain/integration/webhook_test.go +++ b/asset/assetdomain/integration/webhook_test.go @@ -5,8 +5,8 @@ import ( "testing" "time" - "github.com/reearth/reearth-cms/server/pkg/event" - "github.com/reearth/reearth-cms/server/pkg/id" + id "github.com/reearth/reearthx/asset/assetdomain" + "github.com/reearth/reearthx/asset/assetdomain/event" "github.com/samber/lo" "github.com/stretchr/testify/assert" ) diff --git a/asset/assetdomain/thread/comment.go b/asset/assetdomain/thread/comment.go index 9ec3812..3e6bdde 100644 --- a/asset/assetdomain/thread/comment.go +++ b/asset/assetdomain/thread/comment.go @@ -3,7 +3,7 @@ package thread import ( "time" - "github.com/reearth/reearth-cms/server/pkg/operator" + "github.com/reearth/reearthx/asset/assetdomain/operator" ) type Comment struct { diff --git a/asset/assetdomain/thread/comment_test.go b/asset/assetdomain/thread/comment_test.go index 6214c1c..3dfd9c3 100644 --- a/asset/assetdomain/thread/comment_test.go +++ b/asset/assetdomain/thread/comment_test.go @@ -4,7 +4,7 @@ import ( "testing" "time" - "github.com/reearth/reearth-cms/server/pkg/operator" + "github.com/reearth/reearthx/asset/assetdomain/operator" "github.com/stretchr/testify/assert" ) diff --git a/asset/assetdomain/thread/thread.go b/asset/assetdomain/thread/thread.go index 18b1933..58a3a03 100644 --- a/asset/assetdomain/thread/thread.go +++ b/asset/assetdomain/thread/thread.go @@ -1,8 +1,8 @@ package thread import ( - "github.com/reearth/reearth-cms/server/pkg/id" "github.com/reearth/reearthx/account/accountdomain" + id "github.com/reearth/reearthx/asset/assetdomain" "github.com/reearth/reearthx/util" "github.com/samber/lo" "golang.org/x/exp/slices" diff --git a/asset/assetdomain/thread/thread_test.go b/asset/assetdomain/thread/thread_test.go index 650751f..dfdbeba 100644 --- a/asset/assetdomain/thread/thread_test.go +++ b/asset/assetdomain/thread/thread_test.go @@ -3,9 +3,9 @@ package thread import ( "testing" - "github.com/reearth/reearth-cms/server/pkg/id" - "github.com/reearth/reearth-cms/server/pkg/operator" "github.com/reearth/reearthx/account/accountdomain" + id "github.com/reearth/reearthx/asset/assetdomain" + "github.com/reearth/reearthx/asset/assetdomain/operator" "github.com/stretchr/testify/assert" ) diff --git a/asset/assetusecase/assetinteractor/asset_test.go b/asset/assetusecase/assetinteractor/asset_test.go index 8837868..d222404 100644 --- a/asset/assetusecase/assetinteractor/asset_test.go +++ b/asset/assetusecase/assetinteractor/asset_test.go @@ -3,8 +3,6 @@ package interactor import ( "bytes" "context" - "github.com/reearth/reearthx/asset/assetinfrastructure/assetfs" - "github.com/reearth/reearthx/asset/assetinfrastructure/assetmemory" "io" "path" "runtime" @@ -22,6 +20,8 @@ import ( "github.com/reearth/reearthx/asset/assetdomain/file" "github.com/reearth/reearthx/asset/assetdomain/project" "github.com/reearth/reearthx/asset/assetdomain/task" + "github.com/reearth/reearthx/asset/assetinfrastructure/assetfs" + "github.com/reearth/reearthx/asset/assetinfrastructure/assetmemory" usecase "github.com/reearth/reearthx/asset/assetusecase" gateway "github.com/reearth/reearthx/asset/assetusecase/assetgateway" interfaces "github.com/reearth/reearthx/asset/assetusecase/assetinterfaces" diff --git a/asset/assetusecase/assetinteractor/common_test.go b/asset/assetusecase/assetinteractor/common_test.go index b3cfa7f..aaade63 100644 --- a/asset/assetusecase/assetinteractor/common_test.go +++ b/asset/assetusecase/assetinteractor/common_test.go @@ -7,21 +7,21 @@ import ( "time" "github.com/golang/mock/gomock" - "github.com/reearth/reearth-cms/server/internal/infrastructure/memory" - "github.com/reearth/reearth-cms/server/internal/usecase/gateway" - "github.com/reearth/reearth-cms/server/internal/usecase/gateway/gatewaymock" - "github.com/reearth/reearth-cms/server/internal/usecase/interfaces" - "github.com/reearth/reearth-cms/server/pkg/asset" - "github.com/reearth/reearth-cms/server/pkg/event" - "github.com/reearth/reearth-cms/server/pkg/integration" - "github.com/reearth/reearth-cms/server/pkg/operator" - "github.com/reearth/reearth-cms/server/pkg/project" - "github.com/reearth/reearth-cms/server/pkg/task" "github.com/reearth/reearthx/account/accountdomain" "github.com/reearth/reearthx/account/accountdomain/user" "github.com/reearth/reearthx/account/accountdomain/workspace" "github.com/reearth/reearthx/account/accountusecase/accountinteractor" "github.com/reearth/reearthx/account/accountusecase/accountrepo" + "github.com/reearth/reearthx/asset/assetdomain/asset" + "github.com/reearth/reearthx/asset/assetdomain/event" + "github.com/reearth/reearthx/asset/assetdomain/integration" + "github.com/reearth/reearthx/asset/assetdomain/operator" + "github.com/reearth/reearthx/asset/assetdomain/project" + "github.com/reearth/reearthx/asset/assetdomain/task" + memory "github.com/reearth/reearthx/asset/assetinfrastructure/assetmemory" + gateway "github.com/reearth/reearthx/asset/assetusecase/assetgateway" + "github.com/reearth/reearthx/asset/assetusecase/assetgateway/gatewaymock" + interfaces "github.com/reearth/reearthx/asset/assetusecase/assetinterfaces" "github.com/reearth/reearthx/util" "github.com/samber/lo" "github.com/stretchr/testify/assert" @@ -128,18 +128,8 @@ func TestNew(t *testing.T) { uc := New(nil, nil, &accountrepo.Container{}, nil, ContainerConfig{}) assert.NotNil(t, uc) assert.Equal(t, interfaces.Container{ - Asset: NewAsset(nil, nil), - Workspace: accountinteractor.NewWorkspace(&accountrepo.Container{}, nil), - User: accountinteractor.NewUser(&accountrepo.Container{}, nil, "", ""), - Item: NewItem(nil, nil), - View: NewView(nil, nil), - Project: NewProject(nil, nil), - Request: NewRequest(nil, nil), - Model: NewModel(nil, nil), - Schema: NewSchema(nil, nil), - Integration: NewIntegration(nil, nil), - Thread: NewThread(nil, nil), - Group: NewGroup(nil, nil), - WorkspaceSettings: NewWorkspaceSettings(nil, nil), + Asset: NewAsset(nil, nil), + Workspace: accountinteractor.NewWorkspace(&accountrepo.Container{}, nil), + User: accountinteractor.NewUser(&accountrepo.Container{}, nil, "", ""), }, uc) } diff --git a/go.mod b/go.mod index 6e2260a..b6c3ed3 100644 --- a/go.mod +++ b/go.mod @@ -30,7 +30,6 @@ require ( github.com/oklog/ulid v1.3.1 github.com/pkg/errors v0.9.1 github.com/ravilushqa/otelgqlgen v0.15.0 - github.com/reearth/reearth-cms/server v0.0.0-20240903081242-e53864457087 github.com/samber/lo v1.39.0 github.com/sendgrid/sendgrid-go v3.14.0+incompatible github.com/spf13/afero v1.11.0 @@ -73,7 +72,6 @@ require ( github.com/aws/aws-sdk-go-v2/service/ssooidc v1.23.2 // indirect github.com/aws/aws-sdk-go-v2/service/sts v1.28.4 // indirect github.com/aws/smithy-go v1.20.1 // indirect - github.com/chrispappas/golang-generics-set v1.0.1 // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/dgryski/trifles v0.0.0-20200705224438-cafc02a1ee2b // indirect github.com/fatih/color v1.16.0 // indirect @@ -99,7 +97,6 @@ require ( github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/montanaflynn/stats v0.7.1 // indirect github.com/opentracing/opentracing-go v1.2.0 // indirect - github.com/paulmach/go.geojson v1.5.0 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/posener/complete v1.2.3 // indirect github.com/rs/cors v1.10.1 // indirect diff --git a/go.sum b/go.sum index 11d6020..d04844e 100644 --- a/go.sum +++ b/go.sum @@ -1,6 +1,6 @@ cloud.google.com/go v0.16.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go v0.112.1 h1:uJSeirPke5UNZHIb4SxfZklVSiWWVqW4oXlETwZziwM= +cloud.google.com/go v0.112.0 h1:tpFCD7hpHFlQ8yPwT3x+QeXqc2T6+n6T+hmABHfDUSM= cloud.google.com/go/compute v1.24.0 h1:phWcR2eWzRJaL/kOiJwfFsPs4BaKq1j6vnpZrc1YlVg= cloud.google.com/go/compute v1.24.0/go.mod h1:kw1/T+h/+tK2LJK0wiPPx1intgdAM3j/g3hFDlscY40= cloud.google.com/go/compute/metadata v0.2.3 h1:mg4jlk7mCAj6xXp9UJ4fjI9VUI5rubuGBW5aJ7UnBMY= @@ -78,8 +78,6 @@ github.com/bradfitz/gomemcache v0.0.0-20170208213004-1952afaa557d/go.mod h1:PmM6 github.com/bradleyjkemp/cupaloy/v2 v2.6.0 h1:knToPYa2xtfg42U3I6punFEjaGFKWQRXJwj0JTv4mTs= github.com/bradleyjkemp/cupaloy/v2 v2.6.0/go.mod h1:bm7JXdkRd4BHJk9HpwqAI8BoAY1lps46Enkdqw6aRX0= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= -github.com/chrispappas/golang-generics-set v1.0.1 h1:91l8cInAWTxCPwZ8UNg7qkkPsdFdkYS9hytsd8UJsIU= -github.com/chrispappas/golang-generics-set v1.0.1/go.mod h1:cp8j73+rlDyFF9PrjUkrRvi8L4jSRIsRK6Q1nPPIoqo= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= @@ -241,8 +239,6 @@ github.com/oklog/ulid v1.3.1 h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4= github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= github.com/opentracing/opentracing-go v1.2.0 h1:uEJPy/1a5RIPAJ0Ov+OIO8OxWu77jEv+1B0VhjKrZUs= github.com/opentracing/opentracing-go v1.2.0/go.mod h1:GxEUsuufX4nBwe+T+Wl9TAgYrxe9dPLANfrWvHYVTgc= -github.com/paulmach/go.geojson v1.5.0 h1:7mhpMK89SQdHFcEGomT7/LuJhwhEgfmpWYVlVmLEdQw= -github.com/paulmach/go.geojson v1.5.0/go.mod h1:DgdUy2rRVDDVgKqrjMe2vZAHMfhDTrjVKt3LmHIXGbU= github.com/pelletier/go-toml v1.0.1-0.20170904195809-1d6b12b7cb29/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= @@ -254,8 +250,6 @@ github.com/posener/complete v1.2.3/go.mod h1:WZIdtGGp+qx0sLrYKtIRAruyNpv6hFCicSg github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/ravilushqa/otelgqlgen v0.15.0 h1:U85nrlweMXTGaMChUViYM39/MXBZVeVVlpuHq+6eECQ= github.com/ravilushqa/otelgqlgen v0.15.0/go.mod h1:o+1Eju0VySmgq2BP8Vupz2YrN21Bj7D7imBqu3m2uB8= -github.com/reearth/reearth-cms/server v0.0.0-20240903081242-e53864457087 h1:qh7IMP4ILvhrNijpGhrbo/uGrTsix+6dp/+nQOMj+L8= -github.com/reearth/reearth-cms/server v0.0.0-20240903081242-e53864457087/go.mod h1:rw/ZONl2/2OY6UhP3uwOZxnrjcmkd2dRxH5C9k8XrwE= github.com/rs/cors v1.10.1 h1:L0uuZVXIKlI1SShY2nhFfo44TYvDPQ1w4oFkUJNfhyo= github.com/rs/cors v1.10.1/go.mod h1:XyqrcTp5zjWr1wsJ8PIRZssZ8b/WMcMf71DJnit4EMU= github.com/samber/lo v1.39.0 h1:4gTz1wUhNYLhFSKl6O+8peW0v2F4BCY034GRpU9WnuA= @@ -463,8 +457,8 @@ google.golang.org/genproto v0.0.0-20170918111702-1e559d0a00ee/go.mod h1:JiN7NxoA google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= -google.golang.org/genproto v0.0.0-20240213162025-012b6fc9bca9 h1:9+tzLLstTlPTRyJTh+ah5wIMsBW5c4tQwGTN3thOW9Y= -google.golang.org/genproto v0.0.0-20240213162025-012b6fc9bca9/go.mod h1:mqHbVIp48Muh7Ywss/AD6I5kNVKZMmAa/QEW58Gxp2s= +google.golang.org/genproto v0.0.0-20240205150955-31a09d347014 h1:g/4bk7P6TPMkAUbUhquq98xey1slwvuVJPosdBqYJlU= +google.golang.org/genproto v0.0.0-20240205150955-31a09d347014/go.mod h1:xEgQu1e4stdSSsxPDK8Azkrk/ECl5HvdPf6nbZrTS5M= google.golang.org/genproto/googleapis/api v0.0.0-20240304161311-37d4d3c04a78 h1:SzXBGiWM1LNVYLCRP3e0/Gsze804l4jGoJ5lYysEO5I= google.golang.org/genproto/googleapis/api v0.0.0-20240304161311-37d4d3c04a78/go.mod h1:O1cOfN1Cy6QEYr7VxtjOyP5AdAuR0aJ/MYZaaof623Y= google.golang.org/genproto/googleapis/rpc v0.0.0-20240304161311-37d4d3c04a78 h1:Xs9lu+tLXxLIfuci70nG4cpwaRC+mRQPUL7LoIeDJC4=