From ac902360222ce404de2356eeb3ec0143a9380402 Mon Sep 17 00:00:00 2001 From: AzenKain Date: Sat, 25 Apr 2026 14:05:15 +0700 Subject: [PATCH] UPDATE: Project Module --- cmd/api/server.go | 6 +- db/migrations/0000010_revision.down.sql | 1 + db/migrations/0000010_revision.up.sql | 21 + db/migrations/0000011_submission.down.sql | 1 + db/migrations/0000011_submission.up.sql | 24 + db/migrations/000008_geometries.up.sql | 6 +- db/migrations/000009_project.down.sql | 1 + db/migrations/000009_project.up.sql | 26 + db/query/entities.sql | 5 +- db/query/files.sql | 6 +- db/query/geometries.sql | 13 +- db/query/project.sql | 179 ++++++ db/query/revision.sql | 30 + db/query/roles.sql | 3 +- db/query/submission.sql | 191 ++++++ db/query/users.sql | 37 +- db/query/verification.sql | 52 +- db/query/wiki.sql | 4 + db/schema.sql | 44 +- docs/docs.go | 478 ++++++++++++++- docs/swagger.json | 478 ++++++++++++++- docs/swagger.yaml | 315 +++++++++- internal/controllers/projectController.go | 200 +++++++ internal/controllers/userController.go | 80 +++ internal/dtos/request/project.go | 30 + internal/dtos/response/common.go | 2 +- internal/dtos/response/project.go | 20 + internal/gen/sqlc/entities.sql.go | 32 + internal/gen/sqlc/files.sql.go | 35 ++ internal/gen/sqlc/geometries.sql.go | 74 ++- internal/gen/sqlc/models.go | 42 +- internal/gen/sqlc/project.sql.go | 556 ++++++++++++++++++ internal/gen/sqlc/revision.sql.go | 182 ++++++ internal/gen/sqlc/submission.sql.go | 493 ++++++++++++++++ internal/gen/sqlc/users.sql.go | 78 +++ internal/gen/sqlc/verification.sql.go | 97 ++- internal/gen/sqlc/wiki.sql.go | 31 + internal/middlewares/jwtMiddleware.go | 4 +- internal/middlewares/roleMiddleware.go | 8 +- internal/models/geometry.go | 23 +- internal/models/project.go | 74 +++ internal/models/role.go | 4 +- internal/repositories/entityRepository.go | 48 +- internal/repositories/geometryRepository.go | 64 +- internal/repositories/mediaRepository.go | 50 +- internal/repositories/projectRepository.go | 345 +++++++++++ internal/repositories/roleRepository.go | 40 +- internal/repositories/tokenRepository.go | 6 +- internal/repositories/userRepository.go | 44 +- .../repositories/verificationRepository.go | 46 +- internal/repositories/wikiRepository.go | 47 +- internal/routes/mediaRoute.go | 6 +- internal/routes/projectRoute.go | 41 ++ internal/routes/userRoute.go | 27 +- internal/routes/verificationRoute.go | 8 +- internal/services/authService.go | 24 +- internal/services/mediaService.go | 4 +- internal/services/projectService.go | 256 ++++++++ internal/services/userService.go | 14 +- internal/services/verificationService.go | 18 +- pkg/constants/geometry.go | 66 +++ pkg/constants/project.go | 54 ++ pkg/constants/provider.go | 8 +- pkg/constants/role.go | 30 +- pkg/constants/status.go | 30 +- pkg/constants/{sream.go => stream.go} | 0 pkg/constants/token.go | 32 +- pkg/constants/verify.go | 38 +- pkg/convert/convert.go | 21 + pkg/database/seed.go | 6 +- pkg/email/email.go | 8 +- 71 files changed, 5110 insertions(+), 257 deletions(-) create mode 100644 db/migrations/0000010_revision.down.sql create mode 100644 db/migrations/0000010_revision.up.sql create mode 100644 db/migrations/0000011_submission.down.sql create mode 100644 db/migrations/0000011_submission.up.sql create mode 100644 db/migrations/000009_project.down.sql create mode 100644 db/migrations/000009_project.up.sql create mode 100644 db/query/project.sql create mode 100644 db/query/revision.sql create mode 100644 db/query/submission.sql create mode 100644 internal/controllers/projectController.go create mode 100644 internal/dtos/request/project.go create mode 100644 internal/dtos/response/project.go create mode 100644 internal/gen/sqlc/project.sql.go create mode 100644 internal/gen/sqlc/revision.sql.go create mode 100644 internal/gen/sqlc/submission.sql.go create mode 100644 internal/models/project.go create mode 100644 internal/repositories/projectRepository.go create mode 100644 internal/routes/projectRoute.go create mode 100644 internal/services/projectService.go create mode 100644 pkg/constants/geometry.go create mode 100644 pkg/constants/project.go rename pkg/constants/{sream.go => stream.go} (100%) diff --git a/cmd/api/server.go b/cmd/api/server.go index 635143a..5ecb141 100644 --- a/cmd/api/server.go +++ b/cmd/api/server.go @@ -87,6 +87,7 @@ func (s *FiberServer) SetupServer( entityRepo := repositories.NewEntityRepository(sqlPg, redis) geometryRepo := repositories.NewGeometryRepository(sqlPg, redis) wikiRepo := repositories.NewWikiRepository(sqlPg, redis) + projectRepo := repositories.NewProjectRepository(sqlPg, redis) // service setup authService := services.NewAuthService(userRepo, roleRepo, tokenRepo, redis) @@ -99,10 +100,11 @@ func (s *FiberServer) SetupServer( entityService := services.NewEntityService(entityRepo) geometryService := services.NewGeometryService(geometryRepo) wikiService := services.NewWikiService(wikiRepo) + projectService := services.NewProjectService(projectRepo) // controller setup authController := controllers.NewAuthController(authService, oauth) - userController := controllers.NewUserController(userService, mediaService, verificationService) + userController := controllers.NewUserController(userService, mediaService, verificationService, projectService) tileController := controllers.NewTileController(tileService) rasterTileController := controllers.NewRasterTileController(rasterTileService) roleController := controllers.NewRoleController(roleService) @@ -111,6 +113,7 @@ func (s *FiberServer) SetupServer( entityController := controllers.NewEntityController(entityService) geometryController := controllers.NewGeometryController(geometryService) wikiController := controllers.NewWikiController(wikiService) + projectController := controllers.NewProjectController(projectService) // route setup routes.AuthRoutes(s.App, authController, userRepo) @@ -123,5 +126,6 @@ func (s *FiberServer) SetupServer( routes.EntityRoutes(s.App, entityController) routes.GeometryRoutes(s.App, geometryController) routes.WikiRoutes(s.App, wikiController) + routes.ProjectRoutes(s.App, projectController, userRepo) routes.NotFoundRoute(s.App) } diff --git a/db/migrations/0000010_revision.down.sql b/db/migrations/0000010_revision.down.sql new file mode 100644 index 0000000..61d7b53 --- /dev/null +++ b/db/migrations/0000010_revision.down.sql @@ -0,0 +1 @@ +DROP TABLE IF EXISTS revisions; diff --git a/db/migrations/0000010_revision.up.sql b/db/migrations/0000010_revision.up.sql new file mode 100644 index 0000000..2492d3e --- /dev/null +++ b/db/migrations/0000010_revision.up.sql @@ -0,0 +1,21 @@ +CREATE TABLE IF NOT EXISTS revisions ( + id UUID PRIMARY KEY DEFAULT uuidv7(), + project_id UUID NOT NULL REFERENCES projects(id) ON DELETE CASCADE, + version_no INT NOT NULL, + snapshot_json JSONB NOT NULL, + snapshot_hash TEXT, + parent_id UUID REFERENCES revisions(id), + user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE, + edit_summary TEXT, + is_deleted BOOLEAN NOT NULL DEFAULT false, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +); + +CREATE INDEX idx_revisions_project_history +ON revisions (project_id, version_no DESC); + +CREATE INDEX idx_revisions_user_history +ON revisions (user_id, created_at DESC); + +CREATE INDEX idx_revisions_parent_id +ON revisions (parent_id); \ No newline at end of file diff --git a/db/migrations/0000011_submission.down.sql b/db/migrations/0000011_submission.down.sql new file mode 100644 index 0000000..f95be31 --- /dev/null +++ b/db/migrations/0000011_submission.down.sql @@ -0,0 +1 @@ +DROP TABLE IF EXISTS submissions; \ No newline at end of file diff --git a/db/migrations/0000011_submission.up.sql b/db/migrations/0000011_submission.up.sql new file mode 100644 index 0000000..99ee4f7 --- /dev/null +++ b/db/migrations/0000011_submission.up.sql @@ -0,0 +1,24 @@ +CREATE TABLE IF NOT EXISTS submissions ( + id UUID PRIMARY KEY DEFAULT uuidv7(), + project_id UUID NOT NULL REFERENCES projects(id) ON DELETE CASCADE, + revision_id UUID NOT NULL REFERENCES revisions(id) ON DELETE CASCADE, + submitted_by UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE, + submitted_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + status SMALLINT NOT NULL DEFAULT 1, + reviewed_by UUID REFERENCES users(id) ON DELETE SET NULL, + reviewed_at TIMESTAMPTZ, + review_note TEXT, + is_deleted BOOLEAN NOT NULL DEFAULT false +); + +CREATE INDEX idx_submissions_revision_id +ON submissions (revision_id); + +CREATE INDEX idx_submissions_moderator_queue +ON submissions (status, submitted_at ASC); + +CREATE INDEX idx_submissions_user_history +ON submissions (submitted_by, status, submitted_at DESC); + +CREATE INDEX idx_submissions_project_queue +ON submissions (project_id, submitted_at DESC); \ No newline at end of file diff --git a/db/migrations/000008_geometries.up.sql b/db/migrations/000008_geometries.up.sql index 219a680..67edc72 100644 --- a/db/migrations/000008_geometries.up.sql +++ b/db/migrations/000008_geometries.up.sql @@ -4,7 +4,7 @@ CREATE EXTENSION IF NOT EXISTS postgis; CREATE TABLE IF NOT EXISTS geometries ( id UUID PRIMARY KEY DEFAULT uuidv7(), - geo_type VARCHAR(50) NOT NULL DEFAULT 'id', + geo_type SMALLINT NOT NULL DEFAULT 1, draw_geometry JSONB NOT NULL, binding JSONB, time_start INT, @@ -15,10 +15,6 @@ CREATE TABLE IF NOT EXISTS geometries ( updated_at TIMESTAMPTZ DEFAULT now() ); -ALTER TABLE geometries DROP CONSTRAINT IF EXISTS check_geo_type; -ALTER TABLE geometries ADD CONSTRAINT check_geo_type -CHECK (geo_type IN ('id', 'name', 'icon', 'variant', 'description')); - CREATE TABLE IF NOT EXISTS entity_geometries ( entity_id UUID REFERENCES entities(id) ON DELETE CASCADE, geometry_id UUID REFERENCES geometries(id) ON DELETE CASCADE, diff --git a/db/migrations/000009_project.down.sql b/db/migrations/000009_project.down.sql new file mode 100644 index 0000000..f17c3a8 --- /dev/null +++ b/db/migrations/000009_project.down.sql @@ -0,0 +1 @@ +DROP TABLE IF EXISTS projects; diff --git a/db/migrations/000009_project.up.sql b/db/migrations/000009_project.up.sql new file mode 100644 index 0000000..7f7ace3 --- /dev/null +++ b/db/migrations/000009_project.up.sql @@ -0,0 +1,26 @@ +CREATE TABLE IF NOT EXISTS projects ( + id UUID PRIMARY KEY DEFAULT uuidv7(), + title TEXT NOT NULL, + description TEXT, + latest_revision_id UUID, + version_count INT NOT NULL DEFAULT 0, + project_status SMALLINT NOT NULL DEFAULT 1, + locked_by UUID, + is_deleted BOOLEAN NOT NULL DEFAULT false, + user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +); + + +CREATE INDEX idx_projects_latest_revision_id +ON projects (latest_revision_id); + +CREATE INDEX idx_projects_user_status_updated +ON projects (user_id, project_status, updated_at DESC); + +CREATE INDEX idx_projects_status_updated +ON projects (project_status, updated_at DESC); + +CREATE INDEX idx_projects_title_trgm +ON projects USING GIN (title gin_trgm_ops); \ No newline at end of file diff --git a/db/query/entities.sql b/db/query/entities.sql index ada5136..89e1ec4 100644 --- a/db/query/entities.sql +++ b/db/query/entities.sql @@ -37,4 +37,7 @@ WHERE is_deleted = false AND name ILIKE '%' || sqlc.arg('name')::text || '%' AND (sqlc.narg('cursor_id')::uuid IS NULL OR id < sqlc.narg('cursor_id')::uuid) ORDER BY id DESC -LIMIT sqlc.arg('limit_count'); \ No newline at end of file +LIMIT sqlc.arg('limit_count'); + +-- name: GetEntitiesByIDs :many +SELECT * FROM entities WHERE id = ANY($1::uuid[]) AND is_deleted = false; diff --git a/db/query/files.sql b/db/query/files.sql index 6c4048d..3d09d65 100644 --- a/db/query/files.sql +++ b/db/query/files.sql @@ -78,4 +78,8 @@ ORDER BY created_at DESC; -- name: GetMediaByID :one SELECT * FROM medias -WHERE id = $1; \ No newline at end of file +WHERE id = $1; + +-- name: GetMediaByIDs :many +SELECT * FROM medias +WHERE id = ANY($1::uuid[]); \ No newline at end of file diff --git a/db/query/geometries.sql b/db/query/geometries.sql index 85cff67..f41f8be 100644 --- a/db/query/geometries.sql +++ b/db/query/geometries.sql @@ -87,4 +87,15 @@ RETURNING geometry_id; INSERT INTO entity_geometries ( entity_id, geometry_id ) -SELECT $1, unnest(@geometry_ids::uuid[]); \ No newline at end of file +SELECT $1, unnest(@geometry_ids::uuid[]); + +-- name: GetGeometriesByIDs :many +SELECT + id, geo_type, draw_geometry, binding, time_start, time_end, + ST_XMin(bbox)::float8 as min_lng, + ST_YMin(bbox)::float8 as min_lat, + ST_XMax(bbox)::float8 as max_lng, + ST_YMax(bbox)::float8 as max_lat, + is_deleted, created_at, updated_at +FROM geometries +WHERE id = ANY($1::uuid[]) AND is_deleted = false; diff --git a/db/query/project.sql b/db/query/project.sql new file mode 100644 index 0000000..254e668 --- /dev/null +++ b/db/query/project.sql @@ -0,0 +1,179 @@ +-- name: CreateProject :one +INSERT INTO projects ( + title, description, project_status, user_id +) VALUES ( + $1, $2, $3, $4 +) +RETURNING + *, + json_build_object( + 'id', u.id, + 'email', u.email, + 'display_name', up.display_name, + 'full_name', up.full_name, + 'avatar_url', up.avatar_url + )::json AS user, + '{}'::uuid[] AS commit_ids, + '{}'::uuid[] AS submission_ids; + +-- name: GetProjectById :one +SELECT + p.*, + COALESCE( + (SELECT array_agg(id) FROM revisions WHERE project_id = p.id), + '{}' + )::uuid[] AS commit_ids, + COALESCE( + (SELECT array_agg(id) FROM submissions WHERE project_id = p.id), + '{}' + )::uuid[] AS submission_ids, + json_build_object( + 'id', u.id, + 'email', u.email, + 'display_name', up.display_name, + 'full_name', up.full_name, + 'avatar_url', up.avatar_url + )::json AS user +FROM projects p +JOIN users u ON p.user_id = u.id +LEFT JOIN user_profiles up ON u.id = up.user_id +WHERE p.id = $1 AND p.is_deleted = false; + +-- name: UpdateProject :one +UPDATE projects +SET + title = COALESCE(sqlc.narg('title'), title), + description = COALESCE(sqlc.narg('description'), description), + latest_revision_id = COALESCE(sqlc.narg('latest_revision_id'), latest_revision_id), + version_count = COALESCE(sqlc.narg('version_count'), version_count), + project_status = COALESCE(sqlc.narg('status'), status), + locked_by = COALESCE(sqlc.narg('locked_by'), locked_by), + updated_at = NOW() +FROM projects p +JOIN users u ON p.user_id = u.id +LEFT JOIN user_profiles up ON u.id = up.user_id +WHERE p.id = sqlc.arg('id') AND p.is_deleted = false +RETURNING + p.*, + json_build_object( + 'id', u.id, + 'email', u.email, + 'display_name', up.display_name, + 'full_name', up.full_name, + 'avatar_url', up.avatar_url + )::json AS user, + COALESCE((SELECT array_agg(id) FROM revisions WHERE project_id = projects.id), '{}')::uuid[] AS commit_ids, + COALESCE((SELECT array_agg(id) FROM submissions WHERE project_id = projects.id), '{}')::uuid[] AS submission_ids; + +-- name: DeleteProject :exec +UPDATE projects +SET + is_deleted = true +WHERE id = $1; + +-- name: SearchProjects :many +SELECT + p.*, + COALESCE( + (SELECT array_agg(id) FROM revisions WHERE project_id = p.id), + '{}' + )::uuid[] AS commit_ids, + COALESCE( + (SELECT array_agg(id) FROM submissions WHERE project_id = p.id), + '{}' + )::uuid[] AS submission_ids, + json_build_object( + 'id', u.id, + 'email', u.email, + 'display_name', up.display_name, + 'full_name', up.full_name, + 'avatar_url', up.avatar_url + )::json AS user +FROM projects p +JOIN users u ON p.user_id = u.id +LEFT JOIN user_profiles up ON u.id = up.user_id +WHERE p.is_deleted = false + AND ( + sqlc.narg('statuses')::text[] IS NULL + OR p.project_status = ANY(sqlc.narg('statuses')::text[]) + ) + AND (sqlc.narg('user_ids')::uuid[] IS NULL OR p.user_id = ANY(sqlc.narg('user_ids')::uuid[])) + AND ( + sqlc.narg('search_text')::text IS NULL OR + p.title ILIKE '%' || sqlc.narg('search_text')::text || '%' OR + p.description ILIKE '%' || sqlc.narg('search_text')::text || '%' + ) + AND (sqlc.narg('created_from')::timestamptz IS NULL OR p.created_at >= sqlc.narg('created_from')::timestamptz) + AND (sqlc.narg('created_to')::timestamptz IS NULL OR p.created_at <= sqlc.narg('created_to')::timestamptz) +ORDER BY + CASE WHEN sqlc.narg('sort') = 'created_at' AND sqlc.narg('order') = 'asc' THEN p.created_at END ASC, + CASE WHEN sqlc.narg('sort') = 'created_at' AND sqlc.narg('order') = 'desc' THEN p.created_at END DESC, + CASE WHEN sqlc.narg('sort') = 'updated_at' AND sqlc.narg('order') = 'asc' THEN p.updated_at END ASC, + CASE WHEN sqlc.narg('sort') = 'updated_at' AND sqlc.narg('order') = 'desc' THEN p.updated_at END DESC, + CASE WHEN sqlc.narg('sort') = 'title' AND sqlc.narg('order') = 'asc' THEN p.title END ASC, + CASE WHEN sqlc.narg('sort') = 'title' AND sqlc.narg('order') = 'desc' THEN p.title END DESC, + CASE WHEN sqlc.narg('sort') IS NULL THEN p.updated_at END DESC +LIMIT sqlc.arg('limit') +OFFSET sqlc.arg('offset'); + +-- name: CountProjects :one +SELECT count(*) +FROM projects p +WHERE p.is_deleted = false + AND ( + sqlc.narg('statuses')::text[] IS NULL + OR p.project_status = ANY(sqlc.narg('statuses')::text[]) + ) + AND (sqlc.narg('user_ids')::uuid[] IS NULL OR p.user_id = ANY(sqlc.narg('user_ids')::uuid[])) + AND ( + sqlc.narg('search_text')::text IS NULL OR + p.title ILIKE '%' || sqlc.narg('search_text')::text || '%' OR + p.description ILIKE '%' || sqlc.narg('search_text')::text || '%' + ) + AND (sqlc.narg('created_from')::timestamptz IS NULL OR p.created_at >= sqlc.narg('created_from')::timestamptz) + AND (sqlc.narg('created_to')::timestamptz IS NULL OR p.created_at <= sqlc.narg('created_to')::timestamptz); + +-- name: GetProjectsByUserId :many +SELECT + p.*, + COALESCE( + (SELECT array_agg(id) FROM revisions WHERE project_id = p.id), + '{}' + )::uuid[] AS commit_ids, + COALESCE( + (SELECT array_agg(id) FROM submissions WHERE project_id = p.id), + '{}' + )::uuid[] AS submission_ids, + json_build_object( + 'id', u.id, + 'email', u.email, + 'display_name', up.display_name, + 'full_name', up.full_name, + 'avatar_url', up.avatar_url + )::json AS user +FROM projects p +JOIN users u ON p.user_id = u.id +LEFT JOIN user_profiles up ON u.id = up.user_id +WHERE p.user_id = $1 + AND p.is_deleted = false + AND (sqlc.narg('cursor_id')::uuid IS NULL OR p.id < sqlc.narg('cursor_id')::uuid) +ORDER BY p.updated_at DESC +LIMIT sqlc.arg('limit'); + + +-- name: GetProjectsByIDs :many +SELECT + p.*, + COALESCE((SELECT array_agg(id) FROM revisions WHERE project_id = p.id), '{}')::uuid[] AS commit_ids, + COALESCE((SELECT array_agg(id) FROM submissions WHERE project_id = p.id), '{}')::uuid[] AS submission_ids, + json_build_object( + 'id', u.id, + 'email', u.email, + 'display_name', up.display_name, + 'full_name', up.full_name, + 'avatar_url', up.avatar_url + )::json AS user +FROM projects p +JOIN users u ON p.user_id = u.id +LEFT JOIN user_profiles up ON u.id = up.user_id +WHERE p.id = ANY($1::uuid[]) AND p.is_deleted = false; diff --git a/db/query/revision.sql b/db/query/revision.sql new file mode 100644 index 0000000..22d70f9 --- /dev/null +++ b/db/query/revision.sql @@ -0,0 +1,30 @@ +-- name: CreateRevision :one +INSERT INTO revisions ( + project_id, version_no, snapshot_json, snapshot_hash, parent_id, user_id, edit_summary +) VALUES ( + $1, $2, $3, $4, $5, $6, $7 +) +RETURNING *; + +-- name: GetRevisionById :one +SELECT * +FROM revisions +WHERE id = $1 AND is_deleted = false; + +-- name: DeleteRevision :exec +UPDATE revisions +SET is_deleted = true +WHERE id = $1; + +-- name: SearchRevisions :many +SELECT * +FROM revisions +WHERE is_deleted = false + AND (sqlc.narg('project_id')::uuid IS NULL OR project_id = sqlc.narg('project_id')) + AND (sqlc.narg('user_id')::uuid IS NULL OR user_id = sqlc.narg('user_id')) + AND (sqlc.narg('cursor_id')::uuid IS NULL OR id < sqlc.narg('cursor_id')::uuid) +ORDER BY version_no DESC +LIMIT sqlc.arg('limit'); + +-- name: GetRevisionsByIDs :many +SELECT * FROM revisions WHERE id = ANY($1::uuid[]) AND is_deleted = false; diff --git a/db/query/roles.sql b/db/query/roles.sql index c3f27f9..5aaa25e 100644 --- a/db/query/roles.sql +++ b/db/query/roles.sql @@ -61,4 +61,5 @@ UPDATE roles SET is_deleted = false, updated_at = now() -WHERE id = $1; \ No newline at end of file +WHERE id = $1; + diff --git a/db/query/submission.sql b/db/query/submission.sql new file mode 100644 index 0000000..4935a6c --- /dev/null +++ b/db/query/submission.sql @@ -0,0 +1,191 @@ +-- name: CreateSubmission :one +WITH inserted_submission AS ( + INSERT INTO submissions ( + project_id, revision_id, submitted_by, status + ) VALUES ( + $1, $2, $3, $4 + ) + RETURNING * +) +SELECT + s.id, s.project_id, s.revision_id, s.submitted_by, s.submitted_at, s.status, s.reviewed_by, s.reviewed_at, s.review_note, s.is_deleted, + json_build_object( + 'id', u.id, + 'email', u.email, + 'display_name', up.display_name, + 'full_name', up.full_name, + 'avatar_url', up.avatar_url + )::json AS submitter, + CASE WHEN s.reviewed_by IS NOT NULL THEN + json_build_object( + 'id', ru.id, + 'email', ru.email, + 'display_name', rup.display_name, + 'full_name', rup.full_name, + 'avatar_url', rup.avatar_url + )::json + ELSE NULL::json END AS reviewer +FROM inserted_submission s +JOIN users u ON s.submitted_by = u.id +LEFT JOIN user_profiles up ON u.id = up.user_id +LEFT JOIN users ru ON s.reviewed_by = ru.id +LEFT JOIN user_profiles rup ON ru.id = rup.user_id; + +-- name: GetSubmissionById :one +SELECT + s.id, s.project_id, s.revision_id, s.submitted_by, s.submitted_at, s.status, s.reviewed_by, s.reviewed_at, s.review_note, s.is_deleted, + json_build_object( + 'id', u.id, + 'email', u.email, + 'display_name', up.display_name, + 'full_name', up.full_name, + 'avatar_url', up.avatar_url + )::json AS submitter, + CASE WHEN s.reviewed_by IS NOT NULL THEN + json_build_object( + 'id', ru.id, + 'email', ru.email, + 'display_name', rup.display_name, + 'full_name', rup.full_name, + 'avatar_url', rup.avatar_url + )::json + ELSE NULL::json END AS reviewer +FROM submissions s +JOIN users u ON s.submitted_by = u.id +LEFT JOIN user_profiles up ON u.id = up.user_id +LEFT JOIN users ru ON s.reviewed_by = ru.id +LEFT JOIN user_profiles rup ON ru.id = rup.user_id +WHERE s.id = $1 AND s.is_deleted = false; + +-- name: UpdateSubmission :one +UPDATE submissions +SET + status = COALESCE(sqlc.narg('status'), status), + reviewed_by = COALESCE(sqlc.narg('reviewed_by'), reviewed_by), + reviewed_at = COALESCE(sqlc.narg('reviewed_at'), reviewed_at), + review_note = COALESCE(sqlc.narg('review_note'), review_note) +FROM submissions s +JOIN users u ON s.submitted_by = u.id +LEFT JOIN user_profiles up ON u.id = up.user_id +LEFT JOIN users ru ON s.reviewed_by = ru.id +LEFT JOIN user_profiles rup ON ru.id = rup.user_id +WHERE s.id = sqlc.arg('id') +RETURNING + s.id, s.project_id, s.revision_id, s.submitted_by, s.submitted_at, s.status, s.reviewed_by, s.reviewed_at, s.review_note, s.is_deleted, + json_build_object( + 'id', u.id, + 'email', u.email, + 'display_name', up.display_name, + 'full_name', up.full_name, + 'avatar_url', up.avatar_url + )::json AS submitter, + CASE WHEN s.reviewed_by IS NOT NULL THEN + json_build_object( + 'id', ru.id, + 'email', ru.email, + 'display_name', rup.display_name, + 'full_name', rup.full_name, + 'avatar_url', rup.avatar_url + )::json + ELSE NULL::json END AS reviewer; + +-- name: DeleteSubmission :exec +UPDATE submissions +SET is_deleted = true +WHERE id = $1; + +-- name: SearchSubmissions :many +SELECT + s.id, s.project_id, s.revision_id, s.submitted_by, s.submitted_at, s.status, s.reviewed_by, s.reviewed_at, s.review_note, s.is_deleted, + json_build_object( + 'id', u.id, + 'email', u.email, + 'display_name', up.display_name, + 'full_name', up.full_name, + 'avatar_url', up.avatar_url + )::json AS submitter, + CASE WHEN s.reviewed_by IS NOT NULL THEN + json_build_object( + 'id', ru.id, + 'email', ru.email, + 'display_name', rup.display_name, + 'full_name', rup.full_name, + 'avatar_url', rup.avatar_url + )::json + ELSE NULL::json END AS reviewer +FROM submissions s +JOIN users u ON s.submitted_by = u.id +LEFT JOIN user_profiles up ON u.id = up.user_id +LEFT JOIN users ru ON s.reviewed_by = ru.id +LEFT JOIN user_profiles rup ON ru.id = rup.user_id +WHERE s.is_deleted = false + AND (sqlc.narg('project_id')::uuid IS NULL OR s.project_id = sqlc.narg('project_id')) + AND (sqlc.narg('submitted_by')::uuid IS NULL OR s.submitted_by = sqlc.narg('submitted_by')) + AND (sqlc.narg('reviewed_by')::uuid IS NULL OR s.reviewed_by = sqlc.narg('reviewed_by')) + AND ( + sqlc.narg('statuses')::text[] IS NULL + OR s.status = ANY(sqlc.narg('statuses')::text[]) + ) + AND (sqlc.narg('created_from')::timestamptz IS NULL OR s.submitted_at >= sqlc.narg('created_from')::timestamptz) + AND (sqlc.narg('created_to')::timestamptz IS NULL OR s.submitted_at <= sqlc.narg('created_to')::timestamptz) + AND ( + sqlc.narg('search_text')::text IS NULL OR + s.id::text ILIKE '%' || sqlc.narg('search_text')::text || '%' OR + s.review_note ILIKE '%' || sqlc.narg('search_text')::text || '%' + ) +ORDER BY + CASE WHEN sqlc.narg('sort') = 'submitted_at' AND sqlc.narg('order') = 'asc' THEN s.submitted_at END ASC, + CASE WHEN sqlc.narg('sort') = 'submitted_at' AND sqlc.narg('order') = 'desc' THEN s.submitted_at END DESC, + CASE WHEN sqlc.narg('sort') = 'reviewed_at' AND sqlc.narg('order') = 'asc' THEN s.reviewed_at END ASC, + CASE WHEN sqlc.narg('sort') = 'reviewed_at' AND sqlc.narg('order') = 'desc' THEN s.reviewed_at END DESC, + CASE WHEN sqlc.narg('sort') = 'status' AND sqlc.narg('order') = 'asc' THEN s.status END ASC, + CASE WHEN sqlc.narg('sort') = 'status' AND sqlc.narg('order') = 'desc' THEN s.status END DESC, + CASE WHEN sqlc.narg('sort') IS NULL THEN s.submitted_at END DESC +LIMIT sqlc.arg('limit') +OFFSET sqlc.arg('offset'); + +-- name: CountSubmissions :one +SELECT count(*) +FROM submissions s +WHERE s.is_deleted = false + AND (sqlc.narg('project_id')::uuid IS NULL OR s.project_id = sqlc.narg('project_id')) + AND (sqlc.narg('submitted_by')::uuid IS NULL OR s.submitted_by = sqlc.narg('submitted_by')) + AND (sqlc.narg('reviewed_by')::uuid IS NULL OR s.reviewed_by = sqlc.narg('reviewed_by')) + AND ( + sqlc.narg('statuses')::text[] IS NULL + OR s.status = ANY(sqlc.narg('statuses')::text[]) + ) + AND (sqlc.narg('created_from')::timestamptz IS NULL OR s.submitted_at >= sqlc.narg('created_from')::timestamptz) + AND (sqlc.narg('created_to')::timestamptz IS NULL OR s.submitted_at <= sqlc.narg('created_to')::timestamptz) + AND ( + sqlc.narg('search_text')::text IS NULL OR + s.id::text ILIKE '%' || sqlc.narg('search_text')::text || '%' OR + s.review_note ILIKE '%' || sqlc.narg('search_text')::text || '%' + ); + + +-- name: GetSubmissionsByIDs :many +SELECT + s.id, s.project_id, s.revision_id, s.submitted_by, s.submitted_at, s.status, s.reviewed_by, s.reviewed_at, s.review_note, s.is_deleted, + json_build_object( + 'id', u.id, + 'email', u.email, + 'display_name', up.display_name, + 'full_name', up.full_name, + 'avatar_url', up.avatar_url + )::json AS submitter, + CASE WHEN s.reviewed_by IS NOT NULL THEN + json_build_object( + 'id', ru.id, + 'email', ru.email, + 'display_name', rup.display_name, + 'full_name', rup.full_name, + 'avatar_url', rup.avatar_url + )::json + ELSE NULL::json END AS reviewer +FROM submissions s +JOIN users u ON s.submitted_by = u.id +LEFT JOIN user_profiles up ON u.id = up.user_id +LEFT JOIN users ru ON s.reviewed_by = ru.id +LEFT JOIN user_profiles rup ON ru.id = rup.user_id +WHERE s.id = ANY($1::uuid[]) AND s.is_deleted = false; diff --git a/db/query/users.sql b/db/query/users.sql index 8c897be..183414a 100644 --- a/db/query/users.sql +++ b/db/query/users.sql @@ -303,4 +303,39 @@ WHERE p.phone ILIKE '%' || sqlc.narg('search_text')::text || '%' ) ) - ); \ No newline at end of file + ); + +-- name: GetUsersByIDs :many +SELECT + u.id, + u.email, + u.password_hash, + u.token_version, + u.is_deleted, + u.created_at, + u.updated_at, + ( + SELECT json_build_object( + 'display_name', p.display_name, + 'full_name', p.full_name, + 'avatar_url', p.avatar_url, + 'bio', p.bio, + 'location', p.location, + 'website', p.website, + 'country_code', p.country_code, + 'phone', p.phone + ) + FROM user_profiles p + WHERE p.user_id = u.id + ) AS profile, + ( + SELECT COALESCE( + json_agg(json_build_object('id', r.id, 'name', r.name)), + '[]' + )::json + FROM user_roles ur + JOIN roles r ON ur.role_id = r.id + WHERE ur.user_id = u.id + ) AS roles +FROM users u +WHERE u.id = ANY($1::uuid[]) AND u.is_deleted = false; diff --git a/db/query/verification.sql b/db/query/verification.sql index 6694618..e45f99b 100644 --- a/db/query/verification.sql +++ b/db/query/verification.sql @@ -23,7 +23,7 @@ SELECT 'full_name', up.full_name, 'avatar_url', up.avatar_url )::json AS user, - NULL::json AS reviewer, -- Khi mới tạo thì reviewer luôn null + NULL::json AS reviewer, '[]'::json AS medias FROM inserted_uv i JOIN users u ON i.user_id = u.id @@ -274,4 +274,52 @@ WHERE sqlc.narg('search_text')::text IS NULL OR uv.id::text ILIKE '%' || sqlc.narg('search_text')::text || '%' OR uv.content::text ILIKE '%' || sqlc.narg('search_text')::text || '%' - ); \ No newline at end of file + ); + +-- name: GetUserVerificationsByIDs :many +SELECT + uv.id, uv.verify_type, uv.content, + uv.is_deleted, uv.status, uv.review_note, + uv.reviewed_at, uv.created_at, + json_build_object( + 'id', u.id, + 'email', u.email, + 'display_name', up.display_name, + 'full_name', up.full_name, + 'avatar_url', up.avatar_url + )::json AS user, + CASE WHEN uv.reviewed_by IS NOT NULL THEN + json_build_object( + 'id', ru.id, + 'email', ru.email, + 'display_name', rup.display_name, + 'full_name', rup.full_name, + 'avatar_url', rup.avatar_url + )::json + ELSE NULL::json END AS reviewer, + ( + SELECT COALESCE( + json_agg( + json_build_object( + 'id', m.id, + 'storage_key', m.storage_key, + 'original_name', m.original_name, + 'mime_type', m.mime_type, + 'size', m.size, + 'file_metadata', m.file_metadata, + 'created_at', m.created_at + ) + ), + '[]' + )::json + FROM verification_medias vm + JOIN medias m ON vm.media_id = m.id + WHERE vm.verification_id = uv.id + ) AS medias +FROM user_verifications uv +JOIN users u ON uv.user_id = u.id +LEFT JOIN user_profiles up ON u.id = up.user_id +LEFT JOIN users ru ON uv.reviewed_by = ru.id +LEFT JOIN user_profiles rup ON ru.id = rup.user_id +WHERE uv.id = ANY($1::uuid[]) + AND uv.is_deleted = false; diff --git a/db/query/wiki.sql b/db/query/wiki.sql index 75d46f4..3f99722 100644 --- a/db/query/wiki.sql +++ b/db/query/wiki.sql @@ -54,3 +54,7 @@ INSERT INTO entity_wikis ( entity_id, wiki_id ) SELECT $1, unnest(@wiki_ids::uuid[]); + + +-- name: GetWikisByIDs :many +SELECT * FROM wikis WHERE id = ANY($1::uuid[]) AND is_deleted = false; diff --git a/db/schema.sql b/db/schema.sql index 74dec71..81be834 100644 --- a/db/schema.sql +++ b/db/schema.sql @@ -98,12 +98,12 @@ CREATE TABLE IF NOT EXISTS entity_wikis ( CREATE TABLE IF NOT EXISTS geometries ( id UUID PRIMARY KEY DEFAULT uuidv7(), - geo_type VARCHAR(50) NOT NULL DEFAULT 'id', + geo_type SMALLINT NOT NULL DEFAULT 1, draw_geometry JSONB NOT NULL, binding JSONB, time_start INT, time_end INT, - bbox GEOMETRY, + bbox GEOMETRY(Polygon, 4326), is_deleted BOOLEAN NOT NULL DEFAULT false, created_at TIMESTAMPTZ DEFAULT now(), updated_at TIMESTAMPTZ DEFAULT now() @@ -113,4 +113,44 @@ CREATE TABLE IF NOT EXISTS entity_geometries ( entity_id UUID REFERENCES entities(id) ON DELETE CASCADE, geometry_id UUID REFERENCES geometries(id) ON DELETE CASCADE, PRIMARY KEY (entity_id, geometry_id) +); + +CREATE TABLE IF NOT EXISTS projects ( + id UUID PRIMARY KEY DEFAULT uuidv7(), + title TEXT NOT NULL, + description TEXT, + latest_revision_id UUID, + version_count INT NOT NULL DEFAULT 0, + project_status SMALLINT NOT NULL DEFAULT 1, + locked_by UUID, + is_deleted BOOLEAN NOT NULL DEFAULT false, + user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +); + +CREATE TABLE IF NOT EXISTS revisions ( + id UUID PRIMARY KEY DEFAULT uuidv7(), + project_id UUID NOT NULL REFERENCES projects(id) ON DELETE CASCADE, + version_no INT NOT NULL, + snapshot_json JSONB NOT NULL, + snapshot_hash TEXT, + parent_id UUID REFERENCES revisions(id), + user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE, + edit_summary TEXT, + is_deleted BOOLEAN NOT NULL DEFAULT false, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +); + +CREATE TABLE IF NOT EXISTS submissions ( + id UUID PRIMARY KEY DEFAULT uuidv7(), + project_id UUID NOT NULL REFERENCES projects(id) ON DELETE CASCADE, + revision_id UUID NOT NULL REFERENCES revisions(id) ON DELETE CASCADE, + submitted_by UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE, + submitted_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + status SMALLINT NOT NULL DEFAULT 1, + reviewed_by UUID REFERENCES users(id) ON DELETE SET NULL, + reviewed_at TIMESTAMPTZ, + review_note TEXT, + is_deleted BOOLEAN NOT NULL DEFAULT false ); \ No newline at end of file diff --git a/docs/docs.go b/docs/docs.go index 5b564f6..37e931b 100644 --- a/docs/docs.go +++ b/docs/docs.go @@ -1305,6 +1305,323 @@ const docTemplate = `{ } } }, + "/projects": { + "get": { + "description": "Search and filter projects with pagination", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Projects" + ], + "summary": "Search projects", + "parameters": [ + { + "type": "string", + "name": "created_from", + "in": "query" + }, + { + "type": "string", + "name": "created_to", + "in": "query" + }, + { + "maximum": 100, + "minimum": 1, + "type": "integer", + "name": "limit", + "in": "query" + }, + { + "enum": [ + "asc", + "desc" + ], + "type": "string", + "name": "order", + "in": "query" + }, + { + "minimum": 1, + "type": "integer", + "name": "page", + "in": "query" + }, + { + "maxLength": 200, + "type": "string", + "name": "search", + "in": "query" + }, + { + "enum": [ + "created_at", + "updated_at", + "title" + ], + "type": "string", + "name": "sort", + "in": "query" + }, + { + "type": "array", + "items": { + "type": "string" + }, + "collectionFormat": "csv", + "name": "statuses", + "in": "query" + }, + { + "type": "array", + "items": { + "type": "string" + }, + "collectionFormat": "csv", + "name": "user_ids", + "in": "query" + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.PaginatedResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + } + } + }, + "post": { + "security": [ + { + "BearerAuth": [] + } + ], + "description": "Create a project for the current authenticated user", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Projects" + ], + "summary": "Create a new project", + "parameters": [ + { + "description": "Project Data", + "name": "request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_request.CreateProjectDto" + } + } + ], + "responses": { + "201": { + "description": "Created", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + } + } + } + }, + "/projects/{id}": { + "get": { + "description": "Retrieve project details by specific ID", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Projects" + ], + "summary": "Get project by ID", + "parameters": [ + { + "type": "string", + "description": "Project ID", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + } + } + }, + "put": { + "security": [ + { + "BearerAuth": [] + } + ], + "description": "Update project properties (Title, Description, Status)", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Projects" + ], + "summary": "Update a project", + "parameters": [ + { + "type": "string", + "description": "Project ID", + "name": "id", + "in": "path", + "required": true + }, + { + "description": "Project Data", + "name": "request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_request.UpdateProjectDto" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + } + } + }, + "delete": { + "security": [ + { + "BearerAuth": [] + } + ], + "description": "Delete project by ID", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Projects" + ], + "summary": "Delete a project", + "parameters": [ + { + "type": "string", + "description": "Project ID", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + } + } + } + }, "/raster-tiles/metadata": { "get": { "description": "Retrieve map metadata", @@ -1871,6 +2188,60 @@ const docTemplate = `{ } } }, + "/users/current/project": { + "get": { + "security": [ + { + "BearerAuth": [] + } + ], + "description": "Retrieve project list of the currently authenticated user", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Users" + ], + "summary": "Get current user's projects", + "parameters": [ + { + "type": "string", + "name": "cursor_id", + "in": "query" + }, + { + "maximum": 100, + "minimum": 1, + "type": "integer", + "name": "limit", + "in": "query" + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + } + } + } + }, "/users/{id}": { "get": { "security": [ @@ -2031,6 +2402,62 @@ const docTemplate = `{ } } }, + "/users/{id}/project": { + "get": { + "description": "Retrieve project list by specific user ID", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Users" + ], + "summary": "Get user's projects by user ID", + "parameters": [ + { + "type": "string", + "description": "User ID", + "name": "id", + "in": "path", + "required": true + }, + { + "type": "string", + "name": "cursor_id", + "in": "query" + }, + { + "maximum": 100, + "minimum": 1, + "type": "integer", + "name": "limit", + "in": "query" + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + } + } + } + }, "/users/{id}/restore": { "patch": { "security": [ @@ -2259,6 +2686,29 @@ const docTemplate = `{ } } }, + "history-api_internal_dtos_request.CreateProjectDto": { + "type": "object", + "required": [ + "title" + ], + "properties": { + "description": { + "type": "string" + }, + "status": { + "type": "string", + "enum": [ + "PRIVATE", + "PUBLIC", + "ARCHIVE" + ] + }, + "title": { + "type": "string", + "maxLength": 255 + } + } + }, "history-api_internal_dtos_request.CreateTokenDto": { "type": "object", "required": [ @@ -2445,6 +2895,26 @@ const docTemplate = `{ } } }, + "history-api_internal_dtos_request.UpdateProjectDto": { + "type": "object", + "properties": { + "description": { + "type": "string" + }, + "status": { + "type": "string", + "enum": [ + "PRIVATE", + "PUBLIC", + "ARCHIVE" + ] + }, + "title": { + "type": "string", + "maxLength": 255 + } + } + }, "history-api_internal_dtos_request.UpdateVerificationStatusDto": { "type": "object", "required": [ @@ -2552,10 +3022,10 @@ const docTemplate = `{ 4 ], "x-enum-varnames": [ - "TokenPasswordReset", - "TokenEmailVerify", - "TokenMagicLink", - "TokenUpload" + "TokenTypePasswordReset", + "TokenTypeEmailVerify", + "TokenTypeMagicLink", + "TokenTypeUpload" ] } }, diff --git a/docs/swagger.json b/docs/swagger.json index 74c1b64..61bdd9b 100644 --- a/docs/swagger.json +++ b/docs/swagger.json @@ -1298,6 +1298,323 @@ } } }, + "/projects": { + "get": { + "description": "Search and filter projects with pagination", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Projects" + ], + "summary": "Search projects", + "parameters": [ + { + "type": "string", + "name": "created_from", + "in": "query" + }, + { + "type": "string", + "name": "created_to", + "in": "query" + }, + { + "maximum": 100, + "minimum": 1, + "type": "integer", + "name": "limit", + "in": "query" + }, + { + "enum": [ + "asc", + "desc" + ], + "type": "string", + "name": "order", + "in": "query" + }, + { + "minimum": 1, + "type": "integer", + "name": "page", + "in": "query" + }, + { + "maxLength": 200, + "type": "string", + "name": "search", + "in": "query" + }, + { + "enum": [ + "created_at", + "updated_at", + "title" + ], + "type": "string", + "name": "sort", + "in": "query" + }, + { + "type": "array", + "items": { + "type": "string" + }, + "collectionFormat": "csv", + "name": "statuses", + "in": "query" + }, + { + "type": "array", + "items": { + "type": "string" + }, + "collectionFormat": "csv", + "name": "user_ids", + "in": "query" + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.PaginatedResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + } + } + }, + "post": { + "security": [ + { + "BearerAuth": [] + } + ], + "description": "Create a project for the current authenticated user", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Projects" + ], + "summary": "Create a new project", + "parameters": [ + { + "description": "Project Data", + "name": "request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_request.CreateProjectDto" + } + } + ], + "responses": { + "201": { + "description": "Created", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + } + } + } + }, + "/projects/{id}": { + "get": { + "description": "Retrieve project details by specific ID", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Projects" + ], + "summary": "Get project by ID", + "parameters": [ + { + "type": "string", + "description": "Project ID", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + } + } + }, + "put": { + "security": [ + { + "BearerAuth": [] + } + ], + "description": "Update project properties (Title, Description, Status)", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Projects" + ], + "summary": "Update a project", + "parameters": [ + { + "type": "string", + "description": "Project ID", + "name": "id", + "in": "path", + "required": true + }, + { + "description": "Project Data", + "name": "request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_request.UpdateProjectDto" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + } + } + }, + "delete": { + "security": [ + { + "BearerAuth": [] + } + ], + "description": "Delete project by ID", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Projects" + ], + "summary": "Delete a project", + "parameters": [ + { + "type": "string", + "description": "Project ID", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + } + } + } + }, "/raster-tiles/metadata": { "get": { "description": "Retrieve map metadata", @@ -1864,6 +2181,60 @@ } } }, + "/users/current/project": { + "get": { + "security": [ + { + "BearerAuth": [] + } + ], + "description": "Retrieve project list of the currently authenticated user", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Users" + ], + "summary": "Get current user's projects", + "parameters": [ + { + "type": "string", + "name": "cursor_id", + "in": "query" + }, + { + "maximum": 100, + "minimum": 1, + "type": "integer", + "name": "limit", + "in": "query" + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + } + } + } + }, "/users/{id}": { "get": { "security": [ @@ -2024,6 +2395,62 @@ } } }, + "/users/{id}/project": { + "get": { + "description": "Retrieve project list by specific user ID", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Users" + ], + "summary": "Get user's projects by user ID", + "parameters": [ + { + "type": "string", + "description": "User ID", + "name": "id", + "in": "path", + "required": true + }, + { + "type": "string", + "name": "cursor_id", + "in": "query" + }, + { + "maximum": 100, + "minimum": 1, + "type": "integer", + "name": "limit", + "in": "query" + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + } + } + } + }, "/users/{id}/restore": { "patch": { "security": [ @@ -2252,6 +2679,29 @@ } } }, + "history-api_internal_dtos_request.CreateProjectDto": { + "type": "object", + "required": [ + "title" + ], + "properties": { + "description": { + "type": "string" + }, + "status": { + "type": "string", + "enum": [ + "PRIVATE", + "PUBLIC", + "ARCHIVE" + ] + }, + "title": { + "type": "string", + "maxLength": 255 + } + } + }, "history-api_internal_dtos_request.CreateTokenDto": { "type": "object", "required": [ @@ -2438,6 +2888,26 @@ } } }, + "history-api_internal_dtos_request.UpdateProjectDto": { + "type": "object", + "properties": { + "description": { + "type": "string" + }, + "status": { + "type": "string", + "enum": [ + "PRIVATE", + "PUBLIC", + "ARCHIVE" + ] + }, + "title": { + "type": "string", + "maxLength": 255 + } + } + }, "history-api_internal_dtos_request.UpdateVerificationStatusDto": { "type": "object", "required": [ @@ -2545,10 +3015,10 @@ 4 ], "x-enum-varnames": [ - "TokenPasswordReset", - "TokenEmailVerify", - "TokenMagicLink", - "TokenUpload" + "TokenTypePasswordReset", + "TokenTypeEmailVerify", + "TokenTypeMagicLink", + "TokenTypeUpload" ] } }, diff --git a/docs/swagger.yaml b/docs/swagger.yaml index 94b7c32..85d28c5 100644 --- a/docs/swagger.yaml +++ b/docs/swagger.yaml @@ -23,6 +23,22 @@ definitions: required: - role_ids type: object + history-api_internal_dtos_request.CreateProjectDto: + properties: + description: + type: string + status: + enum: + - PRIVATE + - PUBLIC + - ARCHIVE + type: string + title: + maxLength: 255 + type: string + required: + - title + type: object history-api_internal_dtos_request.CreateTokenDto: properties: email: @@ -155,6 +171,20 @@ definitions: website: type: string type: object + history-api_internal_dtos_request.UpdateProjectDto: + properties: + description: + type: string + status: + enum: + - PRIVATE + - PUBLIC + - ARCHIVE + type: string + title: + maxLength: 255 + type: string + type: object history-api_internal_dtos_request.UpdateVerificationStatusDto: properties: review_note: @@ -230,10 +260,10 @@ definitions: format: int32 type: integer x-enum-varnames: - - TokenPasswordReset - - TokenEmailVerify - - TokenMagicLink - - TokenUpload + - TokenTypePasswordReset + - TokenTypeEmailVerify + - TokenTypeMagicLink + - TokenTypeUpload info: contact: email: support@swagger.io @@ -1078,6 +1108,212 @@ paths: summary: Upload media (server-side) tags: - Media + /projects: + get: + consumes: + - application/json + description: Search and filter projects with pagination + parameters: + - in: query + name: created_from + type: string + - in: query + name: created_to + type: string + - in: query + maximum: 100 + minimum: 1 + name: limit + type: integer + - enum: + - asc + - desc + in: query + name: order + type: string + - in: query + minimum: 1 + name: page + type: integer + - in: query + maxLength: 200 + name: search + type: string + - enum: + - created_at + - updated_at + - title + in: query + name: sort + type: string + - collectionFormat: csv + in: query + items: + type: string + name: statuses + type: array + - collectionFormat: csv + in: query + items: + type: string + name: user_ids + type: array + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/history-api_internal_dtos_response.PaginatedResponse' + "400": + description: Bad Request + schema: + $ref: '#/definitions/history-api_internal_dtos_response.CommonResponse' + "500": + description: Internal Server Error + schema: + $ref: '#/definitions/history-api_internal_dtos_response.CommonResponse' + summary: Search projects + tags: + - Projects + post: + consumes: + - application/json + description: Create a project for the current authenticated user + parameters: + - description: Project Data + in: body + name: request + required: true + schema: + $ref: '#/definitions/history-api_internal_dtos_request.CreateProjectDto' + produces: + - application/json + responses: + "201": + description: Created + schema: + $ref: '#/definitions/history-api_internal_dtos_response.CommonResponse' + "400": + description: Bad Request + schema: + $ref: '#/definitions/history-api_internal_dtos_response.CommonResponse' + "500": + description: Internal Server Error + schema: + $ref: '#/definitions/history-api_internal_dtos_response.CommonResponse' + security: + - BearerAuth: [] + summary: Create a new project + tags: + - Projects + /projects/{id}: + delete: + consumes: + - application/json + description: Delete project by ID + parameters: + - description: Project ID + in: path + name: id + required: true + type: string + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/history-api_internal_dtos_response.CommonResponse' + "400": + description: Bad Request + schema: + $ref: '#/definitions/history-api_internal_dtos_response.CommonResponse' + "404": + description: Not Found + schema: + $ref: '#/definitions/history-api_internal_dtos_response.CommonResponse' + "500": + description: Internal Server Error + schema: + $ref: '#/definitions/history-api_internal_dtos_response.CommonResponse' + security: + - BearerAuth: [] + summary: Delete a project + tags: + - Projects + get: + consumes: + - application/json + description: Retrieve project details by specific ID + parameters: + - description: Project ID + in: path + name: id + required: true + type: string + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/history-api_internal_dtos_response.CommonResponse' + "400": + description: Bad Request + schema: + $ref: '#/definitions/history-api_internal_dtos_response.CommonResponse' + "404": + description: Not Found + schema: + $ref: '#/definitions/history-api_internal_dtos_response.CommonResponse' + "500": + description: Internal Server Error + schema: + $ref: '#/definitions/history-api_internal_dtos_response.CommonResponse' + summary: Get project by ID + tags: + - Projects + put: + consumes: + - application/json + description: Update project properties (Title, Description, Status) + parameters: + - description: Project ID + in: path + name: id + required: true + type: string + - description: Project Data + in: body + name: request + required: true + schema: + $ref: '#/definitions/history-api_internal_dtos_request.UpdateProjectDto' + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/history-api_internal_dtos_response.CommonResponse' + "400": + description: Bad Request + schema: + $ref: '#/definitions/history-api_internal_dtos_response.CommonResponse' + "404": + description: Not Found + schema: + $ref: '#/definitions/history-api_internal_dtos_response.CommonResponse' + "500": + description: Internal Server Error + schema: + $ref: '#/definitions/history-api_internal_dtos_response.CommonResponse' + security: + - BearerAuth: [] + summary: Update a project + tags: + - Projects /raster-tiles/{z}/{x}/{y}: get: description: Fetch vector or raster map tile data by Z, X, Y coordinates @@ -1420,6 +1656,43 @@ paths: summary: Get user's media by user ID tags: - Users + /users/{id}/project: + get: + consumes: + - application/json + description: Retrieve project list by specific user ID + parameters: + - description: User ID + in: path + name: id + required: true + type: string + - in: query + name: cursor_id + type: string + - in: query + maximum: 100 + minimum: 1 + name: limit + type: integer + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/history-api_internal_dtos_response.CommonResponse' + "400": + description: Bad Request + schema: + $ref: '#/definitions/history-api_internal_dtos_response.CommonResponse' + "500": + description: Internal Server Error + schema: + $ref: '#/definitions/history-api_internal_dtos_response.CommonResponse' + summary: Get user's projects by user ID + tags: + - Users /users/{id}/restore: patch: consumes: @@ -1611,6 +1884,40 @@ paths: summary: Change user password tags: - Users + /users/current/project: + get: + consumes: + - application/json + description: Retrieve project list of the currently authenticated user + parameters: + - in: query + name: cursor_id + type: string + - in: query + maximum: 100 + minimum: 1 + name: limit + type: integer + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/history-api_internal_dtos_response.CommonResponse' + "400": + description: Bad Request + schema: + $ref: '#/definitions/history-api_internal_dtos_response.CommonResponse' + "500": + description: Internal Server Error + schema: + $ref: '#/definitions/history-api_internal_dtos_response.CommonResponse' + security: + - BearerAuth: [] + summary: Get current user's projects + tags: + - Users /wikis: get: consumes: diff --git a/internal/controllers/projectController.go b/internal/controllers/projectController.go new file mode 100644 index 0000000..18bb3be --- /dev/null +++ b/internal/controllers/projectController.go @@ -0,0 +1,200 @@ +package controllers + +import ( + "context" + "time" + + "github.com/gofiber/fiber/v3" + + "history-api/internal/dtos/request" + "history-api/internal/dtos/response" + "history-api/internal/services" + "history-api/pkg/validator" +) + +type ProjectController struct { + service services.ProjectService +} + +func NewProjectController(service services.ProjectService) *ProjectController { + return &ProjectController{ + service: service, + } +} + +// GetProjectByID godoc +// @Summary Get project by ID +// @Description Retrieve project details by specific ID +// @Tags Projects +// @Accept json +// @Produce json +// @Param id path string true "Project ID" +// @Success 200 {object} response.CommonResponse +// @Failure 400 {object} response.CommonResponse +// @Failure 404 {object} response.CommonResponse +// @Failure 500 {object} response.CommonResponse +// @Router /projects/{id} [get] +func (h *ProjectController) GetProjectByID(c fiber.Ctx) error { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + + projectID := c.Params("id") + res, err := h.service.GetProjectByID(ctx, projectID) + if err != nil { + return c.Status(fiber.StatusInternalServerError).JSON(response.CommonResponse{ + Status: false, + Message: err.Error(), + }) + } + + return c.Status(fiber.StatusOK).JSON(response.CommonResponse{ + Status: true, + Data: res, + }) +} + +// SearchProject godoc +// @Summary Search projects +// @Description Search and filter projects with pagination +// @Tags Projects +// @Accept json +// @Produce json +// @Param query query request.SearchProjectDto false "Search Query" +// @Success 200 {object} response.PaginatedResponse +// @Failure 400 {object} response.CommonResponse +// @Failure 500 {object} response.CommonResponse +// @Router /projects [get] +func (h *ProjectController) SearchProject(c fiber.Ctx) error { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + + dto := &request.SearchProjectDto{} + if err := validator.ValidateQueryDto(c, dto); err != nil { + return c.Status(fiber.StatusBadRequest).JSON(response.CommonResponse{ + Status: false, + Errors: err, + }) + } + + res, err := h.service.SearchProject(ctx, dto) + if err != nil { + return c.Status(fiber.StatusInternalServerError).JSON(response.CommonResponse{ + Status: false, + Message: err.Error(), + }) + } + + return c.Status(fiber.StatusOK).JSON(res) +} + +// CreateProject godoc +// @Summary Create a new project +// @Description Create a project for the current authenticated user +// @Tags Projects +// @Accept json +// @Produce json +// @Security BearerAuth +// @Param request body request.CreateProjectDto true "Project Data" +// @Success 201 {object} response.CommonResponse +// @Failure 400 {object} response.CommonResponse +// @Failure 500 {object} response.CommonResponse +// @Router /projects [post] +func (h *ProjectController) CreateProject(c fiber.Ctx) error { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + + dto := &request.CreateProjectDto{} + if err := validator.ValidateBodyDto(c, dto); err != nil { + return c.Status(fiber.StatusBadRequest).JSON(response.CommonResponse{ + Status: false, + Errors: err, + }) + } + + uid := c.Locals("uid").(string) + res, err := h.service.CreateProject(ctx, uid, dto) + if err != nil { + return c.Status(fiber.StatusInternalServerError).JSON(response.CommonResponse{ + Status: false, + Message: err.Error(), + }) + } + + return c.Status(fiber.StatusCreated).JSON(response.CommonResponse{ + Status: true, + Data: res, + }) +} + +// UpdateProject godoc +// @Summary Update a project +// @Description Update project properties (Title, Description, Status) +// @Tags Projects +// @Accept json +// @Produce json +// @Security BearerAuth +// @Param id path string true "Project ID" +// @Param request body request.UpdateProjectDto true "Project Data" +// @Success 200 {object} response.CommonResponse +// @Failure 400 {object} response.CommonResponse +// @Failure 404 {object} response.CommonResponse +// @Failure 500 {object} response.CommonResponse +// @Router /projects/{id} [put] +func (h *ProjectController) UpdateProject(c fiber.Ctx) error { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + + projectID := c.Params("id") + dto := &request.UpdateProjectDto{} + if err := validator.ValidateBodyDto(c, dto); err != nil { + return c.Status(fiber.StatusBadRequest).JSON(response.CommonResponse{ + Status: false, + Errors: err, + }) + } + + res, err := h.service.UpdateProject(ctx, projectID, dto) + if err != nil { + return c.Status(fiber.StatusInternalServerError).JSON(response.CommonResponse{ + Status: false, + Message: err.Error(), + }) + } + + return c.Status(fiber.StatusOK).JSON(response.CommonResponse{ + Status: true, + Data: res, + }) +} + +// DeleteProject godoc +// @Summary Delete a project +// @Description Delete project by ID +// @Tags Projects +// @Accept json +// @Produce json +// @Security BearerAuth +// @Param id path string true "Project ID" +// @Success 200 {object} response.CommonResponse +// @Failure 400 {object} response.CommonResponse +// @Failure 404 {object} response.CommonResponse +// @Failure 500 {object} response.CommonResponse +// @Router /projects/{id} [delete] +func (h *ProjectController) DeleteProject(c fiber.Ctx) error { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + + projectID := c.Params("id") + err := h.service.DeleteProject(ctx, projectID) + if err != nil { + return c.Status(fiber.StatusInternalServerError).JSON(response.CommonResponse{ + Status: false, + Message: err.Error(), + }) + } + + return c.Status(fiber.StatusOK).JSON(response.CommonResponse{ + Status: true, + Message: "Project deleted successfully", + }) +} diff --git a/internal/controllers/userController.go b/internal/controllers/userController.go index d683dff..c166d89 100644 --- a/internal/controllers/userController.go +++ b/internal/controllers/userController.go @@ -15,17 +15,20 @@ type UserController struct { service services.UserService mediaService services.MediaService verificationService services.VerificationService + projectService services.ProjectService } func NewUserController( svc services.UserService, mediaSvc services.MediaService, verificationSvc services.VerificationService, + projectSvc services.ProjectService, ) *UserController { return &UserController{ service: svc, mediaService: mediaSvc, verificationService: verificationSvc, + projectService: projectSvc, } } @@ -408,3 +411,80 @@ func (h *UserController) SearchUser(c fiber.Ctx) error { } return c.Status(fiber.StatusOK).JSON(res) } + +// GetUserProject godoc +// @Summary Get current user's projects +// @Description Retrieve project list of the currently authenticated user +// @Tags Users +// @Accept json +// @Produce json +// @Security BearerAuth +// @Param query query request.GetProjectsByUserDto false "Pagination Query" +// @Success 200 {object} response.CommonResponse +// @Failure 400 {object} response.CommonResponse +// @Failure 500 {object} response.CommonResponse +// @Router /users/current/project [get] +func (h *UserController) GetUserProject(c fiber.Ctx) error { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + + dto := &request.GetProjectsByUserDto{} + if err := validator.ValidateQueryDto(c, dto); err != nil { + return c.Status(fiber.StatusBadRequest).JSON(response.CommonResponse{ + Status: false, + Errors: err, + }) + } + + res, err := h.projectService.GetProjectByUserID(ctx, c.Locals("uid").(string), dto) + if err != nil { + return c.Status(fiber.StatusInternalServerError).JSON(response.CommonResponse{ + Status: false, + Message: err.Error(), + }) + } + + return c.Status(fiber.StatusOK).JSON(response.CommonResponse{ + Status: true, + Data: res, + }) +} + +// GetProjectByUserID godoc +// @Summary Get user's projects by user ID +// @Description Retrieve project list by specific user ID +// @Tags Users +// @Accept json +// @Produce json +// @Param id path string true "User ID" +// @Param query query request.GetProjectsByUserDto false "Pagination Query" +// @Success 200 {object} response.CommonResponse +// @Failure 400 {object} response.CommonResponse +// @Failure 500 {object} response.CommonResponse +// @Router /users/{id}/project [get] +func (h *UserController) GetProjectByUserID(c fiber.Ctx) error { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + + userID := c.Params("id") + dto := &request.GetProjectsByUserDto{} + if err := validator.ValidateQueryDto(c, dto); err != nil { + return c.Status(fiber.StatusBadRequest).JSON(response.CommonResponse{ + Status: false, + Errors: err, + }) + } + + res, err := h.projectService.GetProjectByUserID(ctx, userID, dto) + if err != nil { + return c.Status(fiber.StatusInternalServerError).JSON(response.CommonResponse{ + Status: false, + Message: err.Error(), + }) + } + + return c.Status(fiber.StatusOK).JSON(response.CommonResponse{ + Status: true, + Data: res, + }) +} diff --git a/internal/dtos/request/project.go b/internal/dtos/request/project.go new file mode 100644 index 0000000..7c1f37a --- /dev/null +++ b/internal/dtos/request/project.go @@ -0,0 +1,30 @@ +package request + +import "time" + +type SearchProjectDto struct { + PaginationDto + Sort string `json:"sort" query:"sort" validate:"omitempty,oneof=created_at updated_at title"` + Search string `json:"search" query:"search" validate:"omitempty,max=200"` + UserIDs []string `json:"user_ids" query:"user_ids" validate:"omitempty,dive,uuid"` + Statuses []string `json:"statuses" query:"statuses" validate:"omitempty,dive,oneof=PRIVATE PUBLIC ARCHIVE"` + CreatedFrom *time.Time `json:"created_from" query:"created_from"` + CreatedTo *time.Time `json:"created_to" query:"created_to"` +} + +type GetProjectsByUserDto struct { + CursorID string `json:"cursor_id" query:"cursor_id" validate:"omitempty,uuid"` + Limit int32 `json:"limit" query:"limit" validate:"omitempty,min=1,max=100"` +} + +type CreateProjectDto struct { + Title string `json:"title" validate:"required,max=255"` + Description *string `json:"description" validate:"omitempty"` + Status *string `json:"status" validate:"omitempty,oneof=PRIVATE PUBLIC ARCHIVE"` +} + +type UpdateProjectDto struct { + Title *string `json:"title" validate:"omitempty,max=255"` + Description *string `json:"description" validate:"omitempty"` + Status *string `json:"status" validate:"omitempty,oneof=PRIVATE PUBLIC ARCHIVE"` +} diff --git a/internal/dtos/response/common.go b/internal/dtos/response/common.go index d67ebd5..5da956e 100644 --- a/internal/dtos/response/common.go +++ b/internal/dtos/response/common.go @@ -15,7 +15,7 @@ type CommonResponse struct { type JWTClaims struct { UId string `json:"uid"` - Roles []constants.Role `json:"roles"` + Roles []constants.RoleType `json:"roles"` TokenVersion int32 `json:"token_version"` jwt.RegisteredClaims } diff --git a/internal/dtos/response/project.go b/internal/dtos/response/project.go new file mode 100644 index 0000000..ced7acf --- /dev/null +++ b/internal/dtos/response/project.go @@ -0,0 +1,20 @@ +package response + +import "time" + +type ProjectResponse struct { + ID string `json:"id"` + Title string `json:"title"` + Description string `json:"description"` + LatestRevisionID *string `json:"latest_revision_id,omitempty"` + VersionCount int32 `json:"version_count"` + ProjectStatus string `json:"project_status"` + LockedBy *string `json:"locked_by,omitempty"` + IsDeleted bool `json:"is_deleted"` + UserID string `json:"user_id"` + CreatedAt *time.Time `json:"created_at"` + UpdatedAt *time.Time `json:"updated_at"` + User *UserSimpleResponse `json:"user,omitempty"` + CommitIds []string `json:"commit_ids"` + SubmissionIds []string `json:"submission_ids"` +} diff --git a/internal/gen/sqlc/entities.sql.go b/internal/gen/sqlc/entities.sql.go index 14fcd28..3f7ae5c 100644 --- a/internal/gen/sqlc/entities.sql.go +++ b/internal/gen/sqlc/entities.sql.go @@ -53,6 +53,38 @@ func (q *Queries) DeleteEntity(ctx context.Context, id pgtype.UUID) error { return err } +const getEntitiesByIDs = `-- name: GetEntitiesByIDs :many +SELECT id, name, description, thumbnail_url, is_deleted, created_at, updated_at FROM entities WHERE id = ANY($1::uuid[]) AND is_deleted = false +` + +func (q *Queries) GetEntitiesByIDs(ctx context.Context, dollar_1 []pgtype.UUID) ([]Entity, error) { + rows, err := q.db.Query(ctx, getEntitiesByIDs, dollar_1) + if err != nil { + return nil, err + } + defer rows.Close() + items := []Entity{} + for rows.Next() { + var i Entity + if err := rows.Scan( + &i.ID, + &i.Name, + &i.Description, + &i.ThumbnailUrl, + &i.IsDeleted, + &i.CreatedAt, + &i.UpdatedAt, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + const getEntityById = `-- name: GetEntityById :one SELECT id, name, description, thumbnail_url, is_deleted, created_at, updated_at FROM entities diff --git a/internal/gen/sqlc/files.sql.go b/internal/gen/sqlc/files.sql.go index 9701012..43b7743 100644 --- a/internal/gen/sqlc/files.sql.go +++ b/internal/gen/sqlc/files.sql.go @@ -132,6 +132,41 @@ func (q *Queries) GetMediaByID(ctx context.Context, id pgtype.UUID) (Media, erro return i, err } +const getMediaByIDs = `-- name: GetMediaByIDs :many +SELECT id, user_id, storage_key, original_name, mime_type, size, file_metadata, created_at, updated_at FROM medias +WHERE id = ANY($1::uuid[]) +` + +func (q *Queries) GetMediaByIDs(ctx context.Context, dollar_1 []pgtype.UUID) ([]Media, error) { + rows, err := q.db.Query(ctx, getMediaByIDs, dollar_1) + if err != nil { + return nil, err + } + defer rows.Close() + items := []Media{} + for rows.Next() { + var i Media + if err := rows.Scan( + &i.ID, + &i.UserID, + &i.StorageKey, + &i.OriginalName, + &i.MimeType, + &i.Size, + &i.FileMetadata, + &i.CreatedAt, + &i.UpdatedAt, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + const getMediasByUserID = `-- name: GetMediasByUserID :many SELECT id, user_id, storage_key, original_name, mime_type, size, file_metadata, created_at, updated_at FROM medias WHERE user_id = $1 diff --git a/internal/gen/sqlc/geometries.sql.go b/internal/gen/sqlc/geometries.sql.go index 6d2cfe4..b99b767 100644 --- a/internal/gen/sqlc/geometries.sql.go +++ b/internal/gen/sqlc/geometries.sql.go @@ -67,7 +67,7 @@ RETURNING id, geo_type, draw_geometry, binding, time_start, time_end, ` type CreateGeometryParams struct { - GeoType string `json:"geo_type"` + GeoType int16 `json:"geo_type"` DrawGeometry json.RawMessage `json:"draw_geometry"` Binding []byte `json:"binding"` TimeStart pgtype.Int4 `json:"time_start"` @@ -80,7 +80,7 @@ type CreateGeometryParams struct { type CreateGeometryRow struct { ID pgtype.UUID `json:"id"` - GeoType string `json:"geo_type"` + GeoType int16 `json:"geo_type"` DrawGeometry json.RawMessage `json:"draw_geometry"` Binding []byte `json:"binding"` TimeStart pgtype.Int4 `json:"time_start"` @@ -137,6 +137,68 @@ func (q *Queries) DeleteGeometry(ctx context.Context, id pgtype.UUID) error { return err } +const getGeometriesByIDs = `-- name: GetGeometriesByIDs :many +SELECT + id, geo_type, draw_geometry, binding, time_start, time_end, + ST_XMin(bbox)::float8 as min_lng, + ST_YMin(bbox)::float8 as min_lat, + ST_XMax(bbox)::float8 as max_lng, + ST_YMax(bbox)::float8 as max_lat, + is_deleted, created_at, updated_at +FROM geometries +WHERE id = ANY($1::uuid[]) AND is_deleted = false +` + +type GetGeometriesByIDsRow struct { + ID pgtype.UUID `json:"id"` + GeoType int16 `json:"geo_type"` + DrawGeometry json.RawMessage `json:"draw_geometry"` + Binding []byte `json:"binding"` + TimeStart pgtype.Int4 `json:"time_start"` + TimeEnd pgtype.Int4 `json:"time_end"` + MinLng float64 `json:"min_lng"` + MinLat float64 `json:"min_lat"` + MaxLng float64 `json:"max_lng"` + MaxLat float64 `json:"max_lat"` + IsDeleted bool `json:"is_deleted"` + CreatedAt pgtype.Timestamptz `json:"created_at"` + UpdatedAt pgtype.Timestamptz `json:"updated_at"` +} + +func (q *Queries) GetGeometriesByIDs(ctx context.Context, dollar_1 []pgtype.UUID) ([]GetGeometriesByIDsRow, error) { + rows, err := q.db.Query(ctx, getGeometriesByIDs, dollar_1) + if err != nil { + return nil, err + } + defer rows.Close() + items := []GetGeometriesByIDsRow{} + for rows.Next() { + var i GetGeometriesByIDsRow + if err := rows.Scan( + &i.ID, + &i.GeoType, + &i.DrawGeometry, + &i.Binding, + &i.TimeStart, + &i.TimeEnd, + &i.MinLng, + &i.MinLat, + &i.MaxLng, + &i.MaxLat, + &i.IsDeleted, + &i.CreatedAt, + &i.UpdatedAt, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + const getGeometryById = `-- name: GetGeometryById :one SELECT id, geo_type, draw_geometry, binding, time_start, time_end, ST_XMin(bbox)::float8 as min_lng, ST_YMin(bbox)::float8 as min_lat, ST_XMax(bbox)::float8 as max_lng, ST_YMax(bbox)::float8 as max_lat, @@ -147,7 +209,7 @@ WHERE id = $1 AND is_deleted = false type GetGeometryByIdRow struct { ID pgtype.UUID `json:"id"` - GeoType string `json:"geo_type"` + GeoType int16 `json:"geo_type"` DrawGeometry json.RawMessage `json:"draw_geometry"` Binding []byte `json:"binding"` TimeStart pgtype.Int4 `json:"time_start"` @@ -232,7 +294,7 @@ type SearchGeometriesParams struct { type SearchGeometriesRow struct { ID pgtype.UUID `json:"id"` - GeoType string `json:"geo_type"` + GeoType int16 `json:"geo_type"` DrawGeometry json.RawMessage `json:"draw_geometry"` Binding []byte `json:"binding"` TimeStart pgtype.Int4 `json:"time_start"` @@ -308,7 +370,7 @@ RETURNING id, geo_type, draw_geometry, binding, time_start, time_end, ` type UpdateGeometryParams struct { - GeoType pgtype.Text `json:"geo_type"` + GeoType pgtype.Int2 `json:"geo_type"` DrawGeometry []byte `json:"draw_geometry"` Binding []byte `json:"binding"` TimeStart pgtype.Int4 `json:"time_start"` @@ -323,7 +385,7 @@ type UpdateGeometryParams struct { type UpdateGeometryRow struct { ID pgtype.UUID `json:"id"` - GeoType string `json:"geo_type"` + GeoType int16 `json:"geo_type"` DrawGeometry json.RawMessage `json:"draw_geometry"` Binding []byte `json:"binding"` TimeStart pgtype.Int4 `json:"time_start"` diff --git a/internal/gen/sqlc/models.go b/internal/gen/sqlc/models.go index 14f6933..f1f3697 100644 --- a/internal/gen/sqlc/models.go +++ b/internal/gen/sqlc/models.go @@ -32,7 +32,7 @@ type EntityWiki struct { type Geometry struct { ID pgtype.UUID `json:"id"` - GeoType string `json:"geo_type"` + GeoType int16 `json:"geo_type"` DrawGeometry json.RawMessage `json:"draw_geometry"` Binding []byte `json:"binding"` TimeStart pgtype.Int4 `json:"time_start"` @@ -55,6 +55,33 @@ type Media struct { UpdatedAt pgtype.Timestamptz `json:"updated_at"` } +type Project struct { + ID pgtype.UUID `json:"id"` + Title string `json:"title"` + Description pgtype.Text `json:"description"` + LatestRevisionID pgtype.UUID `json:"latest_revision_id"` + VersionCount int32 `json:"version_count"` + ProjectStatus int16 `json:"project_status"` + LockedBy pgtype.UUID `json:"locked_by"` + IsDeleted bool `json:"is_deleted"` + UserID pgtype.UUID `json:"user_id"` + CreatedAt pgtype.Timestamptz `json:"created_at"` + UpdatedAt pgtype.Timestamptz `json:"updated_at"` +} + +type Revision struct { + ID pgtype.UUID `json:"id"` + ProjectID pgtype.UUID `json:"project_id"` + VersionNo int32 `json:"version_no"` + SnapshotJson json.RawMessage `json:"snapshot_json"` + SnapshotHash pgtype.Text `json:"snapshot_hash"` + ParentID pgtype.UUID `json:"parent_id"` + UserID pgtype.UUID `json:"user_id"` + EditSummary pgtype.Text `json:"edit_summary"` + IsDeleted bool `json:"is_deleted"` + CreatedAt pgtype.Timestamptz `json:"created_at"` +} + type Role struct { ID pgtype.UUID `json:"id"` Name string `json:"name"` @@ -63,6 +90,19 @@ type Role struct { UpdatedAt pgtype.Timestamptz `json:"updated_at"` } +type Submission struct { + ID pgtype.UUID `json:"id"` + ProjectID pgtype.UUID `json:"project_id"` + RevisionID pgtype.UUID `json:"revision_id"` + SubmittedBy pgtype.UUID `json:"submitted_by"` + SubmittedAt pgtype.Timestamptz `json:"submitted_at"` + Status int16 `json:"status"` + ReviewedBy pgtype.UUID `json:"reviewed_by"` + ReviewedAt pgtype.Timestamptz `json:"reviewed_at"` + ReviewNote pgtype.Text `json:"review_note"` + IsDeleted bool `json:"is_deleted"` +} + type User struct { ID pgtype.UUID `json:"id"` Email string `json:"email"` diff --git a/internal/gen/sqlc/project.sql.go b/internal/gen/sqlc/project.sql.go new file mode 100644 index 0000000..4fe3ef3 --- /dev/null +++ b/internal/gen/sqlc/project.sql.go @@ -0,0 +1,556 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.30.0 +// source: project.sql + +package sqlc + +import ( + "context" + + "github.com/jackc/pgx/v5/pgtype" +) + +const countProjects = `-- name: CountProjects :one +SELECT count(*) +FROM projects p +WHERE p.is_deleted = false + AND ( + $1::text[] IS NULL + OR p.project_status = ANY($1::text[]) + ) + AND ($2::uuid[] IS NULL OR p.user_id = ANY($2::uuid[])) + AND ( + $3::text IS NULL OR + p.title ILIKE '%' || $3::text || '%' OR + p.description ILIKE '%' || $3::text || '%' + ) + AND ($4::timestamptz IS NULL OR p.created_at >= $4::timestamptz) + AND ($5::timestamptz IS NULL OR p.created_at <= $5::timestamptz) +` + +type CountProjectsParams struct { + Statuses []string `json:"statuses"` + UserIds []pgtype.UUID `json:"user_ids"` + SearchText pgtype.Text `json:"search_text"` + CreatedFrom pgtype.Timestamptz `json:"created_from"` + CreatedTo pgtype.Timestamptz `json:"created_to"` +} + +func (q *Queries) CountProjects(ctx context.Context, arg CountProjectsParams) (int64, error) { + row := q.db.QueryRow(ctx, countProjects, + arg.Statuses, + arg.UserIds, + arg.SearchText, + arg.CreatedFrom, + arg.CreatedTo, + ) + var count int64 + err := row.Scan(&count) + return count, err +} + +const createProject = `-- name: CreateProject :one +INSERT INTO projects ( + title, description, project_status, user_id +) VALUES ( + $1, $2, $3, $4 +) +RETURNING + id, title, description, latest_revision_id, version_count, project_status, locked_by, is_deleted, user_id, created_at, updated_at, + json_build_object( + 'id', u.id, + 'email', u.email, + 'display_name', up.display_name, + 'full_name', up.full_name, + 'avatar_url', up.avatar_url + )::json AS user, + '{}'::uuid[] AS commit_ids, + '{}'::uuid[] AS submission_ids +` + +type CreateProjectParams struct { + Title string `json:"title"` + Description pgtype.Text `json:"description"` + ProjectStatus int16 `json:"project_status"` + UserID pgtype.UUID `json:"user_id"` +} + +type CreateProjectRow struct { + ID pgtype.UUID `json:"id"` + Title string `json:"title"` + Description pgtype.Text `json:"description"` + LatestRevisionID pgtype.UUID `json:"latest_revision_id"` + VersionCount int32 `json:"version_count"` + ProjectStatus int16 `json:"project_status"` + LockedBy pgtype.UUID `json:"locked_by"` + IsDeleted bool `json:"is_deleted"` + UserID pgtype.UUID `json:"user_id"` + CreatedAt pgtype.Timestamptz `json:"created_at"` + UpdatedAt pgtype.Timestamptz `json:"updated_at"` + User []byte `json:"user"` + CommitIds []pgtype.UUID `json:"commit_ids"` + SubmissionIds []pgtype.UUID `json:"submission_ids"` +} + +func (q *Queries) CreateProject(ctx context.Context, arg CreateProjectParams) (CreateProjectRow, error) { + row := q.db.QueryRow(ctx, createProject, + arg.Title, + arg.Description, + arg.ProjectStatus, + arg.UserID, + ) + var i CreateProjectRow + err := row.Scan( + &i.ID, + &i.Title, + &i.Description, + &i.LatestRevisionID, + &i.VersionCount, + &i.ProjectStatus, + &i.LockedBy, + &i.IsDeleted, + &i.UserID, + &i.CreatedAt, + &i.UpdatedAt, + &i.User, + &i.CommitIds, + &i.SubmissionIds, + ) + return i, err +} + +const deleteProject = `-- name: DeleteProject :exec +UPDATE projects +SET + is_deleted = true +WHERE id = $1 +` + +func (q *Queries) DeleteProject(ctx context.Context, id pgtype.UUID) error { + _, err := q.db.Exec(ctx, deleteProject, id) + return err +} + +const getProjectById = `-- name: GetProjectById :one +SELECT + p.id, p.title, p.description, p.latest_revision_id, p.version_count, p.project_status, p.locked_by, p.is_deleted, p.user_id, p.created_at, p.updated_at, + COALESCE( + (SELECT array_agg(id) FROM revisions WHERE project_id = p.id), + '{}' + )::uuid[] AS commit_ids, + COALESCE( + (SELECT array_agg(id) FROM submissions WHERE project_id = p.id), + '{}' + )::uuid[] AS submission_ids, + json_build_object( + 'id', u.id, + 'email', u.email, + 'display_name', up.display_name, + 'full_name', up.full_name, + 'avatar_url', up.avatar_url + )::json AS user +FROM projects p +JOIN users u ON p.user_id = u.id +LEFT JOIN user_profiles up ON u.id = up.user_id +WHERE p.id = $1 AND p.is_deleted = false +` + +type GetProjectByIdRow struct { + ID pgtype.UUID `json:"id"` + Title string `json:"title"` + Description pgtype.Text `json:"description"` + LatestRevisionID pgtype.UUID `json:"latest_revision_id"` + VersionCount int32 `json:"version_count"` + ProjectStatus int16 `json:"project_status"` + LockedBy pgtype.UUID `json:"locked_by"` + IsDeleted bool `json:"is_deleted"` + UserID pgtype.UUID `json:"user_id"` + CreatedAt pgtype.Timestamptz `json:"created_at"` + UpdatedAt pgtype.Timestamptz `json:"updated_at"` + CommitIds []pgtype.UUID `json:"commit_ids"` + SubmissionIds []pgtype.UUID `json:"submission_ids"` + User []byte `json:"user"` +} + +func (q *Queries) GetProjectById(ctx context.Context, id pgtype.UUID) (GetProjectByIdRow, error) { + row := q.db.QueryRow(ctx, getProjectById, id) + var i GetProjectByIdRow + err := row.Scan( + &i.ID, + &i.Title, + &i.Description, + &i.LatestRevisionID, + &i.VersionCount, + &i.ProjectStatus, + &i.LockedBy, + &i.IsDeleted, + &i.UserID, + &i.CreatedAt, + &i.UpdatedAt, + &i.CommitIds, + &i.SubmissionIds, + &i.User, + ) + return i, err +} + +const getProjectsByIDs = `-- name: GetProjectsByIDs :many +SELECT + p.id, p.title, p.description, p.latest_revision_id, p.version_count, p.project_status, p.locked_by, p.is_deleted, p.user_id, p.created_at, p.updated_at, + COALESCE((SELECT array_agg(id) FROM revisions WHERE project_id = p.id), '{}')::uuid[] AS commit_ids, + COALESCE((SELECT array_agg(id) FROM submissions WHERE project_id = p.id), '{}')::uuid[] AS submission_ids, + json_build_object( + 'id', u.id, + 'email', u.email, + 'display_name', up.display_name, + 'full_name', up.full_name, + 'avatar_url', up.avatar_url + )::json AS user +FROM projects p +JOIN users u ON p.user_id = u.id +LEFT JOIN user_profiles up ON u.id = up.user_id +WHERE p.id = ANY($1::uuid[]) AND p.is_deleted = false +` + +type GetProjectsByIDsRow struct { + ID pgtype.UUID `json:"id"` + Title string `json:"title"` + Description pgtype.Text `json:"description"` + LatestRevisionID pgtype.UUID `json:"latest_revision_id"` + VersionCount int32 `json:"version_count"` + ProjectStatus int16 `json:"project_status"` + LockedBy pgtype.UUID `json:"locked_by"` + IsDeleted bool `json:"is_deleted"` + UserID pgtype.UUID `json:"user_id"` + CreatedAt pgtype.Timestamptz `json:"created_at"` + UpdatedAt pgtype.Timestamptz `json:"updated_at"` + CommitIds []pgtype.UUID `json:"commit_ids"` + SubmissionIds []pgtype.UUID `json:"submission_ids"` + User []byte `json:"user"` +} + +func (q *Queries) GetProjectsByIDs(ctx context.Context, dollar_1 []pgtype.UUID) ([]GetProjectsByIDsRow, error) { + rows, err := q.db.Query(ctx, getProjectsByIDs, dollar_1) + if err != nil { + return nil, err + } + defer rows.Close() + items := []GetProjectsByIDsRow{} + for rows.Next() { + var i GetProjectsByIDsRow + if err := rows.Scan( + &i.ID, + &i.Title, + &i.Description, + &i.LatestRevisionID, + &i.VersionCount, + &i.ProjectStatus, + &i.LockedBy, + &i.IsDeleted, + &i.UserID, + &i.CreatedAt, + &i.UpdatedAt, + &i.CommitIds, + &i.SubmissionIds, + &i.User, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getProjectsByUserId = `-- name: GetProjectsByUserId :many +SELECT + p.id, p.title, p.description, p.latest_revision_id, p.version_count, p.project_status, p.locked_by, p.is_deleted, p.user_id, p.created_at, p.updated_at, + COALESCE( + (SELECT array_agg(id) FROM revisions WHERE project_id = p.id), + '{}' + )::uuid[] AS commit_ids, + COALESCE( + (SELECT array_agg(id) FROM submissions WHERE project_id = p.id), + '{}' + )::uuid[] AS submission_ids, + json_build_object( + 'id', u.id, + 'email', u.email, + 'display_name', up.display_name, + 'full_name', up.full_name, + 'avatar_url', up.avatar_url + )::json AS user +FROM projects p +JOIN users u ON p.user_id = u.id +LEFT JOIN user_profiles up ON u.id = up.user_id +WHERE p.user_id = $1 + AND p.is_deleted = false + AND ($2::uuid IS NULL OR p.id < $2::uuid) +ORDER BY p.updated_at DESC +LIMIT $3 +` + +type GetProjectsByUserIdParams struct { + UserID pgtype.UUID `json:"user_id"` + CursorID pgtype.UUID `json:"cursor_id"` + Limit int32 `json:"limit"` +} + +type GetProjectsByUserIdRow struct { + ID pgtype.UUID `json:"id"` + Title string `json:"title"` + Description pgtype.Text `json:"description"` + LatestRevisionID pgtype.UUID `json:"latest_revision_id"` + VersionCount int32 `json:"version_count"` + ProjectStatus int16 `json:"project_status"` + LockedBy pgtype.UUID `json:"locked_by"` + IsDeleted bool `json:"is_deleted"` + UserID pgtype.UUID `json:"user_id"` + CreatedAt pgtype.Timestamptz `json:"created_at"` + UpdatedAt pgtype.Timestamptz `json:"updated_at"` + CommitIds []pgtype.UUID `json:"commit_ids"` + SubmissionIds []pgtype.UUID `json:"submission_ids"` + User []byte `json:"user"` +} + +func (q *Queries) GetProjectsByUserId(ctx context.Context, arg GetProjectsByUserIdParams) ([]GetProjectsByUserIdRow, error) { + rows, err := q.db.Query(ctx, getProjectsByUserId, arg.UserID, arg.CursorID, arg.Limit) + if err != nil { + return nil, err + } + defer rows.Close() + items := []GetProjectsByUserIdRow{} + for rows.Next() { + var i GetProjectsByUserIdRow + if err := rows.Scan( + &i.ID, + &i.Title, + &i.Description, + &i.LatestRevisionID, + &i.VersionCount, + &i.ProjectStatus, + &i.LockedBy, + &i.IsDeleted, + &i.UserID, + &i.CreatedAt, + &i.UpdatedAt, + &i.CommitIds, + &i.SubmissionIds, + &i.User, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const searchProjects = `-- name: SearchProjects :many +SELECT + p.id, p.title, p.description, p.latest_revision_id, p.version_count, p.project_status, p.locked_by, p.is_deleted, p.user_id, p.created_at, p.updated_at, + COALESCE( + (SELECT array_agg(id) FROM revisions WHERE project_id = p.id), + '{}' + )::uuid[] AS commit_ids, + COALESCE( + (SELECT array_agg(id) FROM submissions WHERE project_id = p.id), + '{}' + )::uuid[] AS submission_ids, + json_build_object( + 'id', u.id, + 'email', u.email, + 'display_name', up.display_name, + 'full_name', up.full_name, + 'avatar_url', up.avatar_url + )::json AS user +FROM projects p +JOIN users u ON p.user_id = u.id +LEFT JOIN user_profiles up ON u.id = up.user_id +WHERE p.is_deleted = false + AND ( + $1::text[] IS NULL + OR p.project_status = ANY($1::text[]) + ) + AND ($2::uuid[] IS NULL OR p.user_id = ANY($2::uuid[])) + AND ( + $3::text IS NULL OR + p.title ILIKE '%' || $3::text || '%' OR + p.description ILIKE '%' || $3::text || '%' + ) + AND ($4::timestamptz IS NULL OR p.created_at >= $4::timestamptz) + AND ($5::timestamptz IS NULL OR p.created_at <= $5::timestamptz) +ORDER BY + CASE WHEN $6 = 'created_at' AND $7 = 'asc' THEN p.created_at END ASC, + CASE WHEN $6 = 'created_at' AND $7 = 'desc' THEN p.created_at END DESC, + CASE WHEN $6 = 'updated_at' AND $7 = 'asc' THEN p.updated_at END ASC, + CASE WHEN $6 = 'updated_at' AND $7 = 'desc' THEN p.updated_at END DESC, + CASE WHEN $6 = 'title' AND $7 = 'asc' THEN p.title END ASC, + CASE WHEN $6 = 'title' AND $7 = 'desc' THEN p.title END DESC, + CASE WHEN $6 IS NULL THEN p.updated_at END DESC +LIMIT $9 +OFFSET $8 +` + +type SearchProjectsParams struct { + Statuses []string `json:"statuses"` + UserIds []pgtype.UUID `json:"user_ids"` + SearchText pgtype.Text `json:"search_text"` + CreatedFrom pgtype.Timestamptz `json:"created_from"` + CreatedTo pgtype.Timestamptz `json:"created_to"` + Sort interface{} `json:"sort"` + Order interface{} `json:"order"` + Offset int32 `json:"offset"` + Limit int32 `json:"limit"` +} + +type SearchProjectsRow struct { + ID pgtype.UUID `json:"id"` + Title string `json:"title"` + Description pgtype.Text `json:"description"` + LatestRevisionID pgtype.UUID `json:"latest_revision_id"` + VersionCount int32 `json:"version_count"` + ProjectStatus int16 `json:"project_status"` + LockedBy pgtype.UUID `json:"locked_by"` + IsDeleted bool `json:"is_deleted"` + UserID pgtype.UUID `json:"user_id"` + CreatedAt pgtype.Timestamptz `json:"created_at"` + UpdatedAt pgtype.Timestamptz `json:"updated_at"` + CommitIds []pgtype.UUID `json:"commit_ids"` + SubmissionIds []pgtype.UUID `json:"submission_ids"` + User []byte `json:"user"` +} + +func (q *Queries) SearchProjects(ctx context.Context, arg SearchProjectsParams) ([]SearchProjectsRow, error) { + rows, err := q.db.Query(ctx, searchProjects, + arg.Statuses, + arg.UserIds, + arg.SearchText, + arg.CreatedFrom, + arg.CreatedTo, + arg.Sort, + arg.Order, + arg.Offset, + arg.Limit, + ) + if err != nil { + return nil, err + } + defer rows.Close() + items := []SearchProjectsRow{} + for rows.Next() { + var i SearchProjectsRow + if err := rows.Scan( + &i.ID, + &i.Title, + &i.Description, + &i.LatestRevisionID, + &i.VersionCount, + &i.ProjectStatus, + &i.LockedBy, + &i.IsDeleted, + &i.UserID, + &i.CreatedAt, + &i.UpdatedAt, + &i.CommitIds, + &i.SubmissionIds, + &i.User, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const updateProject = `-- name: UpdateProject :one +UPDATE projects +SET + title = COALESCE($1, title), + description = COALESCE($2, description), + latest_revision_id = COALESCE($3, latest_revision_id), + version_count = COALESCE($4, version_count), + project_status = COALESCE($5, status), + locked_by = COALESCE($6, locked_by), + updated_at = NOW() +FROM projects p +JOIN users u ON p.user_id = u.id +LEFT JOIN user_profiles up ON u.id = up.user_id +WHERE p.id = $7 AND p.is_deleted = false +RETURNING + p.id, p.title, p.description, p.latest_revision_id, p.version_count, p.project_status, p.locked_by, p.is_deleted, p.user_id, p.created_at, p.updated_at, + json_build_object( + 'id', u.id, + 'email', u.email, + 'display_name', up.display_name, + 'full_name', up.full_name, + 'avatar_url', up.avatar_url + )::json AS user, + COALESCE((SELECT array_agg(id) FROM revisions WHERE project_id = projects.id), '{}')::uuid[] AS commit_ids, + COALESCE((SELECT array_agg(id) FROM submissions WHERE project_id = projects.id), '{}')::uuid[] AS submission_ids +` + +type UpdateProjectParams struct { + Title pgtype.Text `json:"title"` + Description pgtype.Text `json:"description"` + LatestRevisionID pgtype.UUID `json:"latest_revision_id"` + VersionCount pgtype.Int4 `json:"version_count"` + Status pgtype.Int2 `json:"status"` + LockedBy pgtype.UUID `json:"locked_by"` + ID pgtype.UUID `json:"id"` +} + +type UpdateProjectRow struct { + ID pgtype.UUID `json:"id"` + Title string `json:"title"` + Description pgtype.Text `json:"description"` + LatestRevisionID pgtype.UUID `json:"latest_revision_id"` + VersionCount int32 `json:"version_count"` + ProjectStatus int16 `json:"project_status"` + LockedBy pgtype.UUID `json:"locked_by"` + IsDeleted bool `json:"is_deleted"` + UserID pgtype.UUID `json:"user_id"` + CreatedAt pgtype.Timestamptz `json:"created_at"` + UpdatedAt pgtype.Timestamptz `json:"updated_at"` + User []byte `json:"user"` + CommitIds []pgtype.UUID `json:"commit_ids"` + SubmissionIds []pgtype.UUID `json:"submission_ids"` +} + +func (q *Queries) UpdateProject(ctx context.Context, arg UpdateProjectParams) (UpdateProjectRow, error) { + row := q.db.QueryRow(ctx, updateProject, + arg.Title, + arg.Description, + arg.LatestRevisionID, + arg.VersionCount, + arg.Status, + arg.LockedBy, + arg.ID, + ) + var i UpdateProjectRow + err := row.Scan( + &i.ID, + &i.Title, + &i.Description, + &i.LatestRevisionID, + &i.VersionCount, + &i.ProjectStatus, + &i.LockedBy, + &i.IsDeleted, + &i.UserID, + &i.CreatedAt, + &i.UpdatedAt, + &i.User, + &i.CommitIds, + &i.SubmissionIds, + ) + return i, err +} diff --git a/internal/gen/sqlc/revision.sql.go b/internal/gen/sqlc/revision.sql.go new file mode 100644 index 0000000..09e19d3 --- /dev/null +++ b/internal/gen/sqlc/revision.sql.go @@ -0,0 +1,182 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.30.0 +// source: revision.sql + +package sqlc + +import ( + "context" + "encoding/json" + + "github.com/jackc/pgx/v5/pgtype" +) + +const createRevision = `-- name: CreateRevision :one +INSERT INTO revisions ( + project_id, version_no, snapshot_json, snapshot_hash, parent_id, user_id, edit_summary +) VALUES ( + $1, $2, $3, $4, $5, $6, $7 +) +RETURNING id, project_id, version_no, snapshot_json, snapshot_hash, parent_id, user_id, edit_summary, is_deleted, created_at +` + +type CreateRevisionParams struct { + ProjectID pgtype.UUID `json:"project_id"` + VersionNo int32 `json:"version_no"` + SnapshotJson json.RawMessage `json:"snapshot_json"` + SnapshotHash pgtype.Text `json:"snapshot_hash"` + ParentID pgtype.UUID `json:"parent_id"` + UserID pgtype.UUID `json:"user_id"` + EditSummary pgtype.Text `json:"edit_summary"` +} + +func (q *Queries) CreateRevision(ctx context.Context, arg CreateRevisionParams) (Revision, error) { + row := q.db.QueryRow(ctx, createRevision, + arg.ProjectID, + arg.VersionNo, + arg.SnapshotJson, + arg.SnapshotHash, + arg.ParentID, + arg.UserID, + arg.EditSummary, + ) + var i Revision + err := row.Scan( + &i.ID, + &i.ProjectID, + &i.VersionNo, + &i.SnapshotJson, + &i.SnapshotHash, + &i.ParentID, + &i.UserID, + &i.EditSummary, + &i.IsDeleted, + &i.CreatedAt, + ) + return i, err +} + +const deleteRevision = `-- name: DeleteRevision :exec +UPDATE revisions +SET is_deleted = true +WHERE id = $1 +` + +func (q *Queries) DeleteRevision(ctx context.Context, id pgtype.UUID) error { + _, err := q.db.Exec(ctx, deleteRevision, id) + return err +} + +const getRevisionById = `-- name: GetRevisionById :one +SELECT id, project_id, version_no, snapshot_json, snapshot_hash, parent_id, user_id, edit_summary, is_deleted, created_at +FROM revisions +WHERE id = $1 AND is_deleted = false +` + +func (q *Queries) GetRevisionById(ctx context.Context, id pgtype.UUID) (Revision, error) { + row := q.db.QueryRow(ctx, getRevisionById, id) + var i Revision + err := row.Scan( + &i.ID, + &i.ProjectID, + &i.VersionNo, + &i.SnapshotJson, + &i.SnapshotHash, + &i.ParentID, + &i.UserID, + &i.EditSummary, + &i.IsDeleted, + &i.CreatedAt, + ) + return i, err +} + +const getRevisionsByIDs = `-- name: GetRevisionsByIDs :many +SELECT id, project_id, version_no, snapshot_json, snapshot_hash, parent_id, user_id, edit_summary, is_deleted, created_at FROM revisions WHERE id = ANY($1::uuid[]) AND is_deleted = false +` + +func (q *Queries) GetRevisionsByIDs(ctx context.Context, dollar_1 []pgtype.UUID) ([]Revision, error) { + rows, err := q.db.Query(ctx, getRevisionsByIDs, dollar_1) + if err != nil { + return nil, err + } + defer rows.Close() + items := []Revision{} + for rows.Next() { + var i Revision + if err := rows.Scan( + &i.ID, + &i.ProjectID, + &i.VersionNo, + &i.SnapshotJson, + &i.SnapshotHash, + &i.ParentID, + &i.UserID, + &i.EditSummary, + &i.IsDeleted, + &i.CreatedAt, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const searchRevisions = `-- name: SearchRevisions :many +SELECT id, project_id, version_no, snapshot_json, snapshot_hash, parent_id, user_id, edit_summary, is_deleted, created_at +FROM revisions +WHERE is_deleted = false + AND ($1::uuid IS NULL OR project_id = $1) + AND ($2::uuid IS NULL OR user_id = $2) + AND ($3::uuid IS NULL OR id < $3::uuid) +ORDER BY version_no DESC +LIMIT $4 +` + +type SearchRevisionsParams struct { + ProjectID pgtype.UUID `json:"project_id"` + UserID pgtype.UUID `json:"user_id"` + CursorID pgtype.UUID `json:"cursor_id"` + Limit int32 `json:"limit"` +} + +func (q *Queries) SearchRevisions(ctx context.Context, arg SearchRevisionsParams) ([]Revision, error) { + rows, err := q.db.Query(ctx, searchRevisions, + arg.ProjectID, + arg.UserID, + arg.CursorID, + arg.Limit, + ) + if err != nil { + return nil, err + } + defer rows.Close() + items := []Revision{} + for rows.Next() { + var i Revision + if err := rows.Scan( + &i.ID, + &i.ProjectID, + &i.VersionNo, + &i.SnapshotJson, + &i.SnapshotHash, + &i.ParentID, + &i.UserID, + &i.EditSummary, + &i.IsDeleted, + &i.CreatedAt, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} diff --git a/internal/gen/sqlc/submission.sql.go b/internal/gen/sqlc/submission.sql.go new file mode 100644 index 0000000..b2a9280 --- /dev/null +++ b/internal/gen/sqlc/submission.sql.go @@ -0,0 +1,493 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.30.0 +// source: submission.sql + +package sqlc + +import ( + "context" + + "github.com/jackc/pgx/v5/pgtype" +) + +const countSubmissions = `-- name: CountSubmissions :one +SELECT count(*) +FROM submissions s +WHERE s.is_deleted = false + AND ($1::uuid IS NULL OR s.project_id = $1) + AND ($2::uuid IS NULL OR s.submitted_by = $2) + AND ($3::uuid IS NULL OR s.reviewed_by = $3) + AND ( + $4::text[] IS NULL + OR s.status = ANY($4::text[]) + ) + AND ($5::timestamptz IS NULL OR s.submitted_at >= $5::timestamptz) + AND ($6::timestamptz IS NULL OR s.submitted_at <= $6::timestamptz) + AND ( + $7::text IS NULL OR + s.id::text ILIKE '%' || $7::text || '%' OR + s.review_note ILIKE '%' || $7::text || '%' + ) +` + +type CountSubmissionsParams struct { + ProjectID pgtype.UUID `json:"project_id"` + SubmittedBy pgtype.UUID `json:"submitted_by"` + ReviewedBy pgtype.UUID `json:"reviewed_by"` + Statuses []string `json:"statuses"` + CreatedFrom pgtype.Timestamptz `json:"created_from"` + CreatedTo pgtype.Timestamptz `json:"created_to"` + SearchText pgtype.Text `json:"search_text"` +} + +func (q *Queries) CountSubmissions(ctx context.Context, arg CountSubmissionsParams) (int64, error) { + row := q.db.QueryRow(ctx, countSubmissions, + arg.ProjectID, + arg.SubmittedBy, + arg.ReviewedBy, + arg.Statuses, + arg.CreatedFrom, + arg.CreatedTo, + arg.SearchText, + ) + var count int64 + err := row.Scan(&count) + return count, err +} + +const createSubmission = `-- name: CreateSubmission :one +WITH inserted_submission AS ( + INSERT INTO submissions ( + project_id, revision_id, submitted_by, status + ) VALUES ( + $1, $2, $3, $4 + ) + RETURNING id, project_id, revision_id, submitted_by, submitted_at, status, reviewed_by, reviewed_at, review_note, is_deleted +) +SELECT + s.id, s.project_id, s.revision_id, s.submitted_by, s.submitted_at, s.status, s.reviewed_by, s.reviewed_at, s.review_note, s.is_deleted, + json_build_object( + 'id', u.id, + 'email', u.email, + 'display_name', up.display_name, + 'full_name', up.full_name, + 'avatar_url', up.avatar_url + )::json AS submitter, + CASE WHEN s.reviewed_by IS NOT NULL THEN + json_build_object( + 'id', ru.id, + 'email', ru.email, + 'display_name', rup.display_name, + 'full_name', rup.full_name, + 'avatar_url', rup.avatar_url + )::json + ELSE NULL::json END AS reviewer +FROM inserted_submission s +JOIN users u ON s.submitted_by = u.id +LEFT JOIN user_profiles up ON u.id = up.user_id +LEFT JOIN users ru ON s.reviewed_by = ru.id +LEFT JOIN user_profiles rup ON ru.id = rup.user_id +` + +type CreateSubmissionParams struct { + ProjectID pgtype.UUID `json:"project_id"` + RevisionID pgtype.UUID `json:"revision_id"` + SubmittedBy pgtype.UUID `json:"submitted_by"` + Status int16 `json:"status"` +} + +type CreateSubmissionRow struct { + ID pgtype.UUID `json:"id"` + ProjectID pgtype.UUID `json:"project_id"` + RevisionID pgtype.UUID `json:"revision_id"` + SubmittedBy pgtype.UUID `json:"submitted_by"` + SubmittedAt pgtype.Timestamptz `json:"submitted_at"` + Status int16 `json:"status"` + ReviewedBy pgtype.UUID `json:"reviewed_by"` + ReviewedAt pgtype.Timestamptz `json:"reviewed_at"` + ReviewNote pgtype.Text `json:"review_note"` + IsDeleted bool `json:"is_deleted"` + Submitter []byte `json:"submitter"` + Reviewer []byte `json:"reviewer"` +} + +func (q *Queries) CreateSubmission(ctx context.Context, arg CreateSubmissionParams) (CreateSubmissionRow, error) { + row := q.db.QueryRow(ctx, createSubmission, + arg.ProjectID, + arg.RevisionID, + arg.SubmittedBy, + arg.Status, + ) + var i CreateSubmissionRow + err := row.Scan( + &i.ID, + &i.ProjectID, + &i.RevisionID, + &i.SubmittedBy, + &i.SubmittedAt, + &i.Status, + &i.ReviewedBy, + &i.ReviewedAt, + &i.ReviewNote, + &i.IsDeleted, + &i.Submitter, + &i.Reviewer, + ) + return i, err +} + +const deleteSubmission = `-- name: DeleteSubmission :exec +UPDATE submissions +SET is_deleted = true +WHERE id = $1 +` + +func (q *Queries) DeleteSubmission(ctx context.Context, id pgtype.UUID) error { + _, err := q.db.Exec(ctx, deleteSubmission, id) + return err +} + +const getSubmissionById = `-- name: GetSubmissionById :one +SELECT + s.id, s.project_id, s.revision_id, s.submitted_by, s.submitted_at, s.status, s.reviewed_by, s.reviewed_at, s.review_note, s.is_deleted, + json_build_object( + 'id', u.id, + 'email', u.email, + 'display_name', up.display_name, + 'full_name', up.full_name, + 'avatar_url', up.avatar_url + )::json AS submitter, + CASE WHEN s.reviewed_by IS NOT NULL THEN + json_build_object( + 'id', ru.id, + 'email', ru.email, + 'display_name', rup.display_name, + 'full_name', rup.full_name, + 'avatar_url', rup.avatar_url + )::json + ELSE NULL::json END AS reviewer +FROM submissions s +JOIN users u ON s.submitted_by = u.id +LEFT JOIN user_profiles up ON u.id = up.user_id +LEFT JOIN users ru ON s.reviewed_by = ru.id +LEFT JOIN user_profiles rup ON ru.id = rup.user_id +WHERE s.id = $1 AND s.is_deleted = false +` + +type GetSubmissionByIdRow struct { + ID pgtype.UUID `json:"id"` + ProjectID pgtype.UUID `json:"project_id"` + RevisionID pgtype.UUID `json:"revision_id"` + SubmittedBy pgtype.UUID `json:"submitted_by"` + SubmittedAt pgtype.Timestamptz `json:"submitted_at"` + Status int16 `json:"status"` + ReviewedBy pgtype.UUID `json:"reviewed_by"` + ReviewedAt pgtype.Timestamptz `json:"reviewed_at"` + ReviewNote pgtype.Text `json:"review_note"` + IsDeleted bool `json:"is_deleted"` + Submitter []byte `json:"submitter"` + Reviewer []byte `json:"reviewer"` +} + +func (q *Queries) GetSubmissionById(ctx context.Context, id pgtype.UUID) (GetSubmissionByIdRow, error) { + row := q.db.QueryRow(ctx, getSubmissionById, id) + var i GetSubmissionByIdRow + err := row.Scan( + &i.ID, + &i.ProjectID, + &i.RevisionID, + &i.SubmittedBy, + &i.SubmittedAt, + &i.Status, + &i.ReviewedBy, + &i.ReviewedAt, + &i.ReviewNote, + &i.IsDeleted, + &i.Submitter, + &i.Reviewer, + ) + return i, err +} + +const getSubmissionsByIDs = `-- name: GetSubmissionsByIDs :many +SELECT + s.id, s.project_id, s.revision_id, s.submitted_by, s.submitted_at, s.status, s.reviewed_by, s.reviewed_at, s.review_note, s.is_deleted, + json_build_object( + 'id', u.id, + 'email', u.email, + 'display_name', up.display_name, + 'full_name', up.full_name, + 'avatar_url', up.avatar_url + )::json AS submitter, + CASE WHEN s.reviewed_by IS NOT NULL THEN + json_build_object( + 'id', ru.id, + 'email', ru.email, + 'display_name', rup.display_name, + 'full_name', rup.full_name, + 'avatar_url', rup.avatar_url + )::json + ELSE NULL::json END AS reviewer +FROM submissions s +JOIN users u ON s.submitted_by = u.id +LEFT JOIN user_profiles up ON u.id = up.user_id +LEFT JOIN users ru ON s.reviewed_by = ru.id +LEFT JOIN user_profiles rup ON ru.id = rup.user_id +WHERE s.id = ANY($1::uuid[]) AND s.is_deleted = false +` + +type GetSubmissionsByIDsRow struct { + ID pgtype.UUID `json:"id"` + ProjectID pgtype.UUID `json:"project_id"` + RevisionID pgtype.UUID `json:"revision_id"` + SubmittedBy pgtype.UUID `json:"submitted_by"` + SubmittedAt pgtype.Timestamptz `json:"submitted_at"` + Status int16 `json:"status"` + ReviewedBy pgtype.UUID `json:"reviewed_by"` + ReviewedAt pgtype.Timestamptz `json:"reviewed_at"` + ReviewNote pgtype.Text `json:"review_note"` + IsDeleted bool `json:"is_deleted"` + Submitter []byte `json:"submitter"` + Reviewer []byte `json:"reviewer"` +} + +func (q *Queries) GetSubmissionsByIDs(ctx context.Context, dollar_1 []pgtype.UUID) ([]GetSubmissionsByIDsRow, error) { + rows, err := q.db.Query(ctx, getSubmissionsByIDs, dollar_1) + if err != nil { + return nil, err + } + defer rows.Close() + items := []GetSubmissionsByIDsRow{} + for rows.Next() { + var i GetSubmissionsByIDsRow + if err := rows.Scan( + &i.ID, + &i.ProjectID, + &i.RevisionID, + &i.SubmittedBy, + &i.SubmittedAt, + &i.Status, + &i.ReviewedBy, + &i.ReviewedAt, + &i.ReviewNote, + &i.IsDeleted, + &i.Submitter, + &i.Reviewer, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const searchSubmissions = `-- name: SearchSubmissions :many +SELECT + s.id, s.project_id, s.revision_id, s.submitted_by, s.submitted_at, s.status, s.reviewed_by, s.reviewed_at, s.review_note, s.is_deleted, + json_build_object( + 'id', u.id, + 'email', u.email, + 'display_name', up.display_name, + 'full_name', up.full_name, + 'avatar_url', up.avatar_url + )::json AS submitter, + CASE WHEN s.reviewed_by IS NOT NULL THEN + json_build_object( + 'id', ru.id, + 'email', ru.email, + 'display_name', rup.display_name, + 'full_name', rup.full_name, + 'avatar_url', rup.avatar_url + )::json + ELSE NULL::json END AS reviewer +FROM submissions s +JOIN users u ON s.submitted_by = u.id +LEFT JOIN user_profiles up ON u.id = up.user_id +LEFT JOIN users ru ON s.reviewed_by = ru.id +LEFT JOIN user_profiles rup ON ru.id = rup.user_id +WHERE s.is_deleted = false + AND ($1::uuid IS NULL OR s.project_id = $1) + AND ($2::uuid IS NULL OR s.submitted_by = $2) + AND ($3::uuid IS NULL OR s.reviewed_by = $3) + AND ( + $4::text[] IS NULL + OR s.status = ANY($4::text[]) + ) + AND ($5::timestamptz IS NULL OR s.submitted_at >= $5::timestamptz) + AND ($6::timestamptz IS NULL OR s.submitted_at <= $6::timestamptz) + AND ( + $7::text IS NULL OR + s.id::text ILIKE '%' || $7::text || '%' OR + s.review_note ILIKE '%' || $7::text || '%' + ) +ORDER BY + CASE WHEN $8 = 'submitted_at' AND $9 = 'asc' THEN s.submitted_at END ASC, + CASE WHEN $8 = 'submitted_at' AND $9 = 'desc' THEN s.submitted_at END DESC, + CASE WHEN $8 = 'reviewed_at' AND $9 = 'asc' THEN s.reviewed_at END ASC, + CASE WHEN $8 = 'reviewed_at' AND $9 = 'desc' THEN s.reviewed_at END DESC, + CASE WHEN $8 = 'status' AND $9 = 'asc' THEN s.status END ASC, + CASE WHEN $8 = 'status' AND $9 = 'desc' THEN s.status END DESC, + CASE WHEN $8 IS NULL THEN s.submitted_at END DESC +LIMIT $11 +OFFSET $10 +` + +type SearchSubmissionsParams struct { + ProjectID pgtype.UUID `json:"project_id"` + SubmittedBy pgtype.UUID `json:"submitted_by"` + ReviewedBy pgtype.UUID `json:"reviewed_by"` + Statuses []string `json:"statuses"` + CreatedFrom pgtype.Timestamptz `json:"created_from"` + CreatedTo pgtype.Timestamptz `json:"created_to"` + SearchText pgtype.Text `json:"search_text"` + Sort interface{} `json:"sort"` + Order interface{} `json:"order"` + Offset int32 `json:"offset"` + Limit int32 `json:"limit"` +} + +type SearchSubmissionsRow struct { + ID pgtype.UUID `json:"id"` + ProjectID pgtype.UUID `json:"project_id"` + RevisionID pgtype.UUID `json:"revision_id"` + SubmittedBy pgtype.UUID `json:"submitted_by"` + SubmittedAt pgtype.Timestamptz `json:"submitted_at"` + Status int16 `json:"status"` + ReviewedBy pgtype.UUID `json:"reviewed_by"` + ReviewedAt pgtype.Timestamptz `json:"reviewed_at"` + ReviewNote pgtype.Text `json:"review_note"` + IsDeleted bool `json:"is_deleted"` + Submitter []byte `json:"submitter"` + Reviewer []byte `json:"reviewer"` +} + +func (q *Queries) SearchSubmissions(ctx context.Context, arg SearchSubmissionsParams) ([]SearchSubmissionsRow, error) { + rows, err := q.db.Query(ctx, searchSubmissions, + arg.ProjectID, + arg.SubmittedBy, + arg.ReviewedBy, + arg.Statuses, + arg.CreatedFrom, + arg.CreatedTo, + arg.SearchText, + arg.Sort, + arg.Order, + arg.Offset, + arg.Limit, + ) + if err != nil { + return nil, err + } + defer rows.Close() + items := []SearchSubmissionsRow{} + for rows.Next() { + var i SearchSubmissionsRow + if err := rows.Scan( + &i.ID, + &i.ProjectID, + &i.RevisionID, + &i.SubmittedBy, + &i.SubmittedAt, + &i.Status, + &i.ReviewedBy, + &i.ReviewedAt, + &i.ReviewNote, + &i.IsDeleted, + &i.Submitter, + &i.Reviewer, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const updateSubmission = `-- name: UpdateSubmission :one +UPDATE submissions +SET + status = COALESCE($1, status), + reviewed_by = COALESCE($2, reviewed_by), + reviewed_at = COALESCE($3, reviewed_at), + review_note = COALESCE($4, review_note) +FROM submissions s +JOIN users u ON s.submitted_by = u.id +LEFT JOIN user_profiles up ON u.id = up.user_id +LEFT JOIN users ru ON s.reviewed_by = ru.id +LEFT JOIN user_profiles rup ON ru.id = rup.user_id +WHERE s.id = $5 +RETURNING + s.id, s.project_id, s.revision_id, s.submitted_by, s.submitted_at, s.status, s.reviewed_by, s.reviewed_at, s.review_note, s.is_deleted, + json_build_object( + 'id', u.id, + 'email', u.email, + 'display_name', up.display_name, + 'full_name', up.full_name, + 'avatar_url', up.avatar_url + )::json AS submitter, + CASE WHEN s.reviewed_by IS NOT NULL THEN + json_build_object( + 'id', ru.id, + 'email', ru.email, + 'display_name', rup.display_name, + 'full_name', rup.full_name, + 'avatar_url', rup.avatar_url + )::json + ELSE NULL::json END AS reviewer +` + +type UpdateSubmissionParams struct { + Status pgtype.Int2 `json:"status"` + ReviewedBy pgtype.UUID `json:"reviewed_by"` + ReviewedAt pgtype.Timestamptz `json:"reviewed_at"` + ReviewNote pgtype.Text `json:"review_note"` + ID pgtype.UUID `json:"id"` +} + +type UpdateSubmissionRow struct { + ID pgtype.UUID `json:"id"` + ProjectID pgtype.UUID `json:"project_id"` + RevisionID pgtype.UUID `json:"revision_id"` + SubmittedBy pgtype.UUID `json:"submitted_by"` + SubmittedAt pgtype.Timestamptz `json:"submitted_at"` + Status int16 `json:"status"` + ReviewedBy pgtype.UUID `json:"reviewed_by"` + ReviewedAt pgtype.Timestamptz `json:"reviewed_at"` + ReviewNote pgtype.Text `json:"review_note"` + IsDeleted bool `json:"is_deleted"` + Submitter []byte `json:"submitter"` + Reviewer []byte `json:"reviewer"` +} + +func (q *Queries) UpdateSubmission(ctx context.Context, arg UpdateSubmissionParams) (UpdateSubmissionRow, error) { + row := q.db.QueryRow(ctx, updateSubmission, + arg.Status, + arg.ReviewedBy, + arg.ReviewedAt, + arg.ReviewNote, + arg.ID, + ) + var i UpdateSubmissionRow + err := row.Scan( + &i.ID, + &i.ProjectID, + &i.RevisionID, + &i.SubmittedBy, + &i.SubmittedAt, + &i.Status, + &i.ReviewedBy, + &i.ReviewedAt, + &i.ReviewNote, + &i.IsDeleted, + &i.Submitter, + &i.Reviewer, + ) + return i, err +} diff --git a/internal/gen/sqlc/users.sql.go b/internal/gen/sqlc/users.sql.go index 658e074..407eaf8 100644 --- a/internal/gen/sqlc/users.sql.go +++ b/internal/gen/sqlc/users.sql.go @@ -341,6 +341,84 @@ func (q *Queries) GetUserByIDWithoutDeleted(ctx context.Context, id pgtype.UUID) return i, err } +const getUsersByIDs = `-- name: GetUsersByIDs :many +SELECT + u.id, + u.email, + u.password_hash, + u.token_version, + u.is_deleted, + u.created_at, + u.updated_at, + ( + SELECT json_build_object( + 'display_name', p.display_name, + 'full_name', p.full_name, + 'avatar_url', p.avatar_url, + 'bio', p.bio, + 'location', p.location, + 'website', p.website, + 'country_code', p.country_code, + 'phone', p.phone + ) + FROM user_profiles p + WHERE p.user_id = u.id + ) AS profile, + ( + SELECT COALESCE( + json_agg(json_build_object('id', r.id, 'name', r.name)), + '[]' + )::json + FROM user_roles ur + JOIN roles r ON ur.role_id = r.id + WHERE ur.user_id = u.id + ) AS roles +FROM users u +WHERE u.id = ANY($1::uuid[]) AND u.is_deleted = false +` + +type GetUsersByIDsRow struct { + ID pgtype.UUID `json:"id"` + Email string `json:"email"` + PasswordHash pgtype.Text `json:"password_hash"` + TokenVersion int32 `json:"token_version"` + IsDeleted bool `json:"is_deleted"` + CreatedAt pgtype.Timestamptz `json:"created_at"` + UpdatedAt pgtype.Timestamptz `json:"updated_at"` + Profile json.RawMessage `json:"profile"` + Roles []byte `json:"roles"` +} + +func (q *Queries) GetUsersByIDs(ctx context.Context, dollar_1 []pgtype.UUID) ([]GetUsersByIDsRow, error) { + rows, err := q.db.Query(ctx, getUsersByIDs, dollar_1) + if err != nil { + return nil, err + } + defer rows.Close() + items := []GetUsersByIDsRow{} + for rows.Next() { + var i GetUsersByIDsRow + if err := rows.Scan( + &i.ID, + &i.Email, + &i.PasswordHash, + &i.TokenVersion, + &i.IsDeleted, + &i.CreatedAt, + &i.UpdatedAt, + &i.Profile, + &i.Roles, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + const restoreUser = `-- name: RestoreUser :exec UPDATE users SET diff --git a/internal/gen/sqlc/verification.sql.go b/internal/gen/sqlc/verification.sql.go index 069b5a2..9508556 100644 --- a/internal/gen/sqlc/verification.sql.go +++ b/internal/gen/sqlc/verification.sql.go @@ -111,7 +111,7 @@ SELECT 'full_name', up.full_name, 'avatar_url', up.avatar_url )::json AS user, - NULL::json AS reviewer, -- Khi mới tạo thì reviewer luôn null + NULL::json AS reviewer, '[]'::json AS medias FROM inserted_uv i JOIN users u ON i.user_id = u.id @@ -387,6 +387,101 @@ func (q *Queries) GetUserVerifications(ctx context.Context, userID pgtype.UUID) return items, nil } +const getUserVerificationsByIDs = `-- name: GetUserVerificationsByIDs :many +SELECT + uv.id, uv.verify_type, uv.content, + uv.is_deleted, uv.status, uv.review_note, + uv.reviewed_at, uv.created_at, + json_build_object( + 'id', u.id, + 'email', u.email, + 'display_name', up.display_name, + 'full_name', up.full_name, + 'avatar_url', up.avatar_url + )::json AS user, + CASE WHEN uv.reviewed_by IS NOT NULL THEN + json_build_object( + 'id', ru.id, + 'email', ru.email, + 'display_name', rup.display_name, + 'full_name', rup.full_name, + 'avatar_url', rup.avatar_url + )::json + ELSE NULL::json END AS reviewer, + ( + SELECT COALESCE( + json_agg( + json_build_object( + 'id', m.id, + 'storage_key', m.storage_key, + 'original_name', m.original_name, + 'mime_type', m.mime_type, + 'size', m.size, + 'file_metadata', m.file_metadata, + 'created_at', m.created_at + ) + ), + '[]' + )::json + FROM verification_medias vm + JOIN medias m ON vm.media_id = m.id + WHERE vm.verification_id = uv.id + ) AS medias +FROM user_verifications uv +JOIN users u ON uv.user_id = u.id +LEFT JOIN user_profiles up ON u.id = up.user_id +LEFT JOIN users ru ON uv.reviewed_by = ru.id +LEFT JOIN user_profiles rup ON ru.id = rup.user_id +WHERE uv.id = ANY($1::uuid[]) + AND uv.is_deleted = false +` + +type GetUserVerificationsByIDsRow struct { + ID pgtype.UUID `json:"id"` + VerifyType int16 `json:"verify_type"` + Content pgtype.Text `json:"content"` + IsDeleted bool `json:"is_deleted"` + Status int16 `json:"status"` + ReviewNote pgtype.Text `json:"review_note"` + ReviewedAt pgtype.Timestamptz `json:"reviewed_at"` + CreatedAt pgtype.Timestamptz `json:"created_at"` + User []byte `json:"user"` + Reviewer []byte `json:"reviewer"` + Medias []byte `json:"medias"` +} + +func (q *Queries) GetUserVerificationsByIDs(ctx context.Context, dollar_1 []pgtype.UUID) ([]GetUserVerificationsByIDsRow, error) { + rows, err := q.db.Query(ctx, getUserVerificationsByIDs, dollar_1) + if err != nil { + return nil, err + } + defer rows.Close() + items := []GetUserVerificationsByIDsRow{} + for rows.Next() { + var i GetUserVerificationsByIDsRow + if err := rows.Scan( + &i.ID, + &i.VerifyType, + &i.Content, + &i.IsDeleted, + &i.Status, + &i.ReviewNote, + &i.ReviewedAt, + &i.CreatedAt, + &i.User, + &i.Reviewer, + &i.Medias, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + const searchUserVerifications = `-- name: SearchUserVerifications :many SELECT uv.id, diff --git a/internal/gen/sqlc/wiki.sql.go b/internal/gen/sqlc/wiki.sql.go index 7884e47..a7aee32 100644 --- a/internal/gen/sqlc/wiki.sql.go +++ b/internal/gen/sqlc/wiki.sql.go @@ -114,6 +114,37 @@ func (q *Queries) GetWikiById(ctx context.Context, id pgtype.UUID) (Wiki, error) return i, err } +const getWikisByIDs = `-- name: GetWikisByIDs :many +SELECT id, title, content, is_deleted, created_at, updated_at FROM wikis WHERE id = ANY($1::uuid[]) AND is_deleted = false +` + +func (q *Queries) GetWikisByIDs(ctx context.Context, dollar_1 []pgtype.UUID) ([]Wiki, error) { + rows, err := q.db.Query(ctx, getWikisByIDs, dollar_1) + if err != nil { + return nil, err + } + defer rows.Close() + items := []Wiki{} + for rows.Next() { + var i Wiki + if err := rows.Scan( + &i.ID, + &i.Title, + &i.Content, + &i.IsDeleted, + &i.CreatedAt, + &i.UpdatedAt, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + const searchWikis = `-- name: SearchWikis :many SELECT w.id, w.title, w.content, w.is_deleted, w.created_at, w.updated_at FROM wikis w diff --git a/internal/middlewares/jwtMiddleware.go b/internal/middlewares/jwtMiddleware.go index ee48610..8a93683 100644 --- a/internal/middlewares/jwtMiddleware.go +++ b/internal/middlewares/jwtMiddleware.go @@ -68,7 +68,7 @@ func jwtSuccess(userRepo repositories.UserRepository) fiber.Handler { return unauthorized() } - if slices.Contains(claims.Roles, constants.BANNED) { + if slices.Contains(claims.Roles, constants.RoleTypeBanned) { return c.Status(fiber.StatusForbidden).JSON(response.CommonResponse{ Status: false, Message: "User account is banned", @@ -119,7 +119,7 @@ func jwtSuccessRefresh() fiber.Handler { return unauthorized() } - if slices.Contains(claims.Roles, constants.BANNED) { + if slices.Contains(claims.Roles, constants.RoleTypeBanned) { return c.Status(fiber.StatusForbidden).JSON(response.CommonResponse{ Status: false, Message: "User account is banned", diff --git a/internal/middlewares/roleMiddleware.go b/internal/middlewares/roleMiddleware.go index 7876b4f..52410f3 100644 --- a/internal/middlewares/roleMiddleware.go +++ b/internal/middlewares/roleMiddleware.go @@ -8,7 +8,7 @@ import ( "github.com/gofiber/fiber/v3" ) -func getRoles(c fiber.Ctx) ([]constants.Role, error) { +func getRoles(c fiber.Ctx) ([]constants.RoleType, error) { claimsVal := c.Locals("user_claims") if claimsVal == nil { return nil, fiber.ErrUnauthorized @@ -22,7 +22,7 @@ func getRoles(c fiber.Ctx) ([]constants.Role, error) { return claims.Roles, nil } -func RequireAnyRole(required ...constants.Role) fiber.Handler { +func RequireAnyRole(required ...constants.RoleType) fiber.Handler { return func(c fiber.Ctx) error { userRoles, err := getRoles(c) if err != nil { @@ -43,7 +43,7 @@ func RequireAnyRole(required ...constants.Role) fiber.Handler { } } -func RequireAllRoles(required ...constants.Role) fiber.Handler { +func RequireAllRoles(required ...constants.RoleType) fiber.Handler { return func(c fiber.Ctx) error { userRoles, err := getRoles(c) if err != nil { @@ -61,7 +61,7 @@ func RequireAllRoles(required ...constants.Role) fiber.Handler { } } -func ForbidRoles(forbidden ...constants.Role) fiber.Handler { +func ForbidRoles(forbidden ...constants.RoleType) fiber.Handler { return func(c fiber.Ctx) error { userRoles, err := getRoles(c) if err != nil { diff --git a/internal/models/geometry.go b/internal/models/geometry.go index 9ce7934..ece95d0 100644 --- a/internal/models/geometry.go +++ b/internal/models/geometry.go @@ -3,20 +3,21 @@ package models import ( "encoding/json" "history-api/internal/dtos/response" + "history-api/pkg/constants" "time" ) type GeometryEntity struct { - ID string `json:"id"` - GeoType string `json:"geo_type"` - DrawGeometry json.RawMessage `json:"draw_geometry"` - Binding json.RawMessage `json:"binding"` - TimeStart int32 `json:"time_start"` - TimeEnd int32 `json:"time_end"` - Bbox *response.Bbox `json:"bbox"` - IsDeleted bool `json:"is_deleted"` - CreatedAt *time.Time `json:"created_at"` - UpdatedAt *time.Time `json:"updated_at"` + ID string `json:"id"` + GeoType constants.GeoType `json:"geo_type"` + DrawGeometry json.RawMessage `json:"draw_geometry"` + Binding json.RawMessage `json:"binding"` + TimeStart int32 `json:"time_start"` + TimeEnd int32 `json:"time_end"` + Bbox *response.Bbox `json:"bbox"` + IsDeleted bool `json:"is_deleted"` + CreatedAt *time.Time `json:"created_at"` + UpdatedAt *time.Time `json:"updated_at"` } func (g *GeometryEntity) ToResponse() *response.GeometryResponse { @@ -25,7 +26,7 @@ func (g *GeometryEntity) ToResponse() *response.GeometryResponse { } return &response.GeometryResponse{ ID: g.ID, - GeoType: g.GeoType, + GeoType: g.GeoType.String(), DrawGeometry: g.DrawGeometry, Binding: g.Binding, TimeStart: g.TimeStart, diff --git a/internal/models/project.go b/internal/models/project.go new file mode 100644 index 0000000..a355f58 --- /dev/null +++ b/internal/models/project.go @@ -0,0 +1,74 @@ +package models + +import ( + "encoding/json" + "history-api/internal/dtos/response" + "history-api/pkg/constants" + "time" +) + +type ProjectEntity struct { + ID string `json:"id"` + Title string `json:"title"` + Description string `json:"description"` + LatestRevisionID *string `json:"latest_revision_id"` + VersionCount int32 `json:"version_count"` + ProjectStatus constants.ProjectStatusType `json:"project_status"` + LockedBy *string `json:"locked_by"` + IsDeleted bool `json:"is_deleted"` + UserID string `json:"user_id"` + CreatedAt *time.Time `json:"created_at"` + UpdatedAt *time.Time `json:"updated_at"` + User *UserSimpleEntity `json:"user"` + CommitIds []string `json:"commit_ids"` + SubmissionIds []string `json:"submission_ids"` +} + +func (p *ProjectEntity) ParseUser(data []byte) error { + if len(data) == 0 || string(data) == "null" { + p.User = nil + return nil + } + return json.Unmarshal(data, &p.User) +} + +func (p *ProjectEntity) ToResponse() *response.ProjectResponse { + if p == nil { + return nil + } + var userResponse *response.UserSimpleResponse + if p.User != nil { + userResponse = p.User.ToResponse() + } + + return &response.ProjectResponse{ + ID: p.ID, + Title: p.Title, + Description: p.Description, + LatestRevisionID: p.LatestRevisionID, + VersionCount: p.VersionCount, + ProjectStatus: p.ProjectStatus.String(), + LockedBy: p.LockedBy, + IsDeleted: p.IsDeleted, + UserID: p.UserID, + CreatedAt: p.CreatedAt, + UpdatedAt: p.UpdatedAt, + User: userResponse, + CommitIds: p.CommitIds, + SubmissionIds: p.SubmissionIds, + } +} + +func ProjectsEntityToResponse(projects []*ProjectEntity) []*response.ProjectResponse { + out := make([]*response.ProjectResponse, 0) + if projects == nil { + return out + } + for _, project := range projects { + if project == nil { + continue + } + out = append(out, project.ToResponse()) + } + return out +} diff --git a/internal/models/role.go b/internal/models/role.go index 73e80c3..a929289 100644 --- a/internal/models/role.go +++ b/internal/models/role.go @@ -74,8 +74,8 @@ func RolesEntityToResponse(rs []*RoleEntity) []*response.RoleResponse { return out } -func RolesEntityToRoleConstant(rs []*RoleSimple) []constants.Role { - out := make([]constants.Role, 0) +func RolesEntityToRoleConstant(rs []*RoleSimple) []constants.RoleType { + out := make([]constants.RoleType, 0) if rs == nil { return out } diff --git a/internal/repositories/entityRepository.go b/internal/repositories/entityRepository.go index 26786f8..5df83b3 100644 --- a/internal/repositories/entityRepository.go +++ b/internal/repositories/entityRepository.go @@ -55,22 +55,46 @@ func (r *entityRepository) getByIDsWithFallback(ctx context.Context, ids []strin var entities []*models.EntityEntity missingToCache := make(map[string]any) + var missingPgIds []pgtype.UUID for i, b := range raws { - if len(b) > 0 { - var e models.EntityEntity - if err := json.Unmarshal(b, &e); err == nil { - entities = append(entities, &e) - } - } else { + if len(b) == 0 { pgId := pgtype.UUID{} err := pgId.Scan(ids[i]) - if err != nil { - continue + if err == nil { + missingPgIds = append(missingPgIds, pgId) } - dbEntity, err := r.GetByID(ctx, pgId) - if err == nil && dbEntity != nil { - entities = append(entities, dbEntity) - missingToCache[keys[i]] = dbEntity + } + } + + dbMap := make(map[string]*models.EntityEntity) + if len(missingPgIds) > 0 { + dbRows, err := r.q.GetEntitiesByIDs(ctx, missingPgIds) + if err == nil { + for _, row := range dbRows { + item := models.EntityEntity{ + ID: convert.UUIDToString(row.ID), + Name: row.Name, + Description: convert.TextToString(row.Description), + ThumbnailUrl: convert.TextToString(row.ThumbnailUrl), + IsDeleted: row.IsDeleted, + CreatedAt: convert.TimeToPtr(row.CreatedAt), + UpdatedAt: convert.TimeToPtr(row.UpdatedAt), + } + dbMap[item.ID] = &item + } + } + } + + for i, b := range raws { + if len(b) > 0 { + var u models.EntityEntity + if err := json.Unmarshal(b, &u); err == nil { + entities = append(entities, &u) + } + } else { + if item, ok := dbMap[ids[i]]; ok { + entities = append(entities, item) + missingToCache[keys[i]] = item } } } diff --git a/internal/repositories/geometryRepository.go b/internal/repositories/geometryRepository.go index a21bf37..335b44a 100644 --- a/internal/repositories/geometryRepository.go +++ b/internal/repositories/geometryRepository.go @@ -58,22 +58,54 @@ func (r *geometryRepository) getByIDsWithFallback(ctx context.Context, ids []str var geometries []*models.GeometryEntity missingToCache := make(map[string]any) + var missingPgIds []pgtype.UUID for i, b := range raws { - if len(b) > 0 { - var g models.GeometryEntity - if err := json.Unmarshal(b, &g); err == nil { - geometries = append(geometries, &g) - } - } else { + if len(b) == 0 { pgId := pgtype.UUID{} err := pgId.Scan(ids[i]) - if err != nil { - continue + if err == nil { + missingPgIds = append(missingPgIds, pgId) } - dbGeometry, err := r.GetByID(ctx, pgId) - if err == nil && dbGeometry != nil { - geometries = append(geometries, dbGeometry) - missingToCache[keys[i]] = dbGeometry + } + } + + dbMap := make(map[string]*models.GeometryEntity) + if len(missingPgIds) > 0 { + dbRows, err := r.q.GetGeometriesByIDs(ctx, missingPgIds) + if err == nil { + for _, row := range dbRows { + item := models.GeometryEntity{ + ID: convert.UUIDToString(row.ID), + GeoType: constants.ParseGeoType(row.GeoType), + DrawGeometry: row.DrawGeometry, + Binding: row.Binding, + TimeStart: convert.Int4ToInt32(row.TimeStart), + TimeEnd: convert.Int4ToInt32(row.TimeEnd), + Bbox: &response.Bbox{ + MinLng: row.MinLng, + MinLat: row.MinLat, + MaxLng: row.MaxLng, + MaxLat: row.MaxLat, + }, + IsDeleted: row.IsDeleted, + CreatedAt: convert.TimeToPtr(row.CreatedAt), + UpdatedAt: convert.TimeToPtr(row.UpdatedAt), + } + dbMap[item.ID] = &item + } + } + } + + for i, b := range raws { + if len(b) > 0 { + var u models.GeometryEntity + if err := json.Unmarshal(b, &u); err == nil { + geometries = append(geometries, &u) + } + } else { + if item, ok := dbMap[ids[i]]; ok { + geometries = append(geometries, item) + missingToCache[keys[i]] = item } } } @@ -105,7 +137,7 @@ func (r *geometryRepository) GetByID(ctx context.Context, id pgtype.UUID) (*mode geometry = models.GeometryEntity{ ID: convert.UUIDToString(row.ID), - GeoType: row.GeoType, + GeoType: constants.ParseGeoType(row.GeoType), DrawGeometry: row.DrawGeometry, Binding: row.Binding, TimeStart: convert.Int4ToInt32(row.TimeStart), @@ -143,7 +175,7 @@ func (r *geometryRepository) Search(ctx context.Context, params sqlc.SearchGeome for _, row := range rows { geometry := &models.GeometryEntity{ ID: convert.UUIDToString(row.ID), - GeoType: row.GeoType, + GeoType: constants.ParseGeoType(row.GeoType), DrawGeometry: row.DrawGeometry, Binding: row.Binding, TimeStart: convert.Int4ToInt32(row.TimeStart), @@ -181,7 +213,7 @@ func (r *geometryRepository) Create(ctx context.Context, params sqlc.CreateGeome geometry := models.GeometryEntity{ ID: convert.UUIDToString(row.ID), - GeoType: row.GeoType, + GeoType: constants.ParseGeoType(row.GeoType), DrawGeometry: row.DrawGeometry, Binding: row.Binding, TimeStart: convert.Int4ToInt32(row.TimeStart), @@ -212,7 +244,7 @@ func (r *geometryRepository) Update(ctx context.Context, params sqlc.UpdateGeome } geometry := models.GeometryEntity{ ID: convert.UUIDToString(row.ID), - GeoType: row.GeoType, + GeoType: constants.ParseGeoType(row.GeoType), DrawGeometry: row.DrawGeometry, Binding: row.Binding, TimeStart: convert.Int4ToInt32(row.TimeStart), diff --git a/internal/repositories/mediaRepository.go b/internal/repositories/mediaRepository.go index 48c1f0b..d47d755 100644 --- a/internal/repositories/mediaRepository.go +++ b/internal/repositories/mediaRepository.go @@ -56,22 +56,48 @@ func (r *mediaRepository) getByIDsWithFallback(ctx context.Context, ids []string var medias []*models.MediaEntity missingMediasToCache := make(map[string]any) + var missingPgIds []pgtype.UUID for i, b := range raws { - if len(b) > 0 { - var m models.MediaEntity - if err := json.Unmarshal(b, &m); err == nil { - medias = append(medias, &m) - } - } else { + if len(b) == 0 { pgId := pgtype.UUID{} err := pgId.Scan(ids[i]) - if err != nil { - continue + if err == nil { + missingPgIds = append(missingPgIds, pgId) } - dbMedia, err := r.GetByID(ctx, pgId) - if err == nil && dbMedia != nil { - medias = append(medias, dbMedia) - missingMediasToCache[keys[i]] = dbMedia + } + } + + dbMap := make(map[string]*models.MediaEntity) + if len(missingPgIds) > 0 { + dbRows, err := r.q.GetMediaByIDs(ctx, missingPgIds) + if err == nil { + for _, row := range dbRows { + item := models.MediaEntity{ + ID: convert.UUIDToString(row.ID), + UserID: convert.UUIDToString(row.UserID), + StorageKey: row.StorageKey, + OriginalName: row.OriginalName, + MimeType: row.MimeType, + Size: row.Size, + FileMetadata: row.FileMetadata, + CreatedAt: convert.TimeToPtr(row.CreatedAt), + UpdatedAt: convert.TimeToPtr(row.UpdatedAt), + } + dbMap[item.ID] = &item + } + } + } + + for i, b := range raws { + if len(b) > 0 { + var u models.MediaEntity + if err := json.Unmarshal(b, &u); err == nil { + medias = append(medias, &u) + } + } else { + if item, ok := dbMap[ids[i]]; ok { + medias = append(medias, item) + missingMediasToCache[keys[i]] = item } } } diff --git a/internal/repositories/projectRepository.go b/internal/repositories/projectRepository.go new file mode 100644 index 0000000..5193db5 --- /dev/null +++ b/internal/repositories/projectRepository.go @@ -0,0 +1,345 @@ +package repositories + +import ( + "context" + "crypto/md5" + "encoding/json" + "fmt" + + "github.com/jackc/pgx/v5/pgtype" + + "history-api/internal/gen/sqlc" + "history-api/internal/models" + "history-api/pkg/cache" + "history-api/pkg/constants" + "history-api/pkg/convert" +) + +type ProjectRepository interface { + GetByID(ctx context.Context, id pgtype.UUID) (*models.ProjectEntity, error) + GetByIDs(ctx context.Context, ids []string) ([]*models.ProjectEntity, error) + GetByUserID(ctx context.Context, params sqlc.GetProjectsByUserIdParams) ([]*models.ProjectEntity, error) + Search(ctx context.Context, params sqlc.SearchProjectsParams) ([]*models.ProjectEntity, error) + Count(ctx context.Context, params sqlc.CountProjectsParams) (int64, error) + Create(ctx context.Context, params sqlc.CreateProjectParams) (*models.ProjectEntity, error) + Update(ctx context.Context, params sqlc.UpdateProjectParams) (*models.ProjectEntity, error) + Delete(ctx context.Context, id pgtype.UUID) error +} + +type projectRepository struct { + q *sqlc.Queries + c cache.Cache +} + +func NewProjectRepository(db sqlc.DBTX, c cache.Cache) ProjectRepository { + return &projectRepository{ + q: sqlc.New(db), + c: c, + } +} + +func (r *projectRepository) generateQueryKey(prefix string, params any) string { + b, _ := json.Marshal(params) + hash := fmt.Sprintf("%x", md5.Sum(b)) + return fmt.Sprintf("%s:%s", prefix, hash) +} + + +func (r *projectRepository) getByIDsWithFallback(ctx context.Context, ids []string) ([]*models.ProjectEntity, error) { + if len(ids) == 0 { + return []*models.ProjectEntity{}, nil + } + keys := make([]string, len(ids)) + for i, id := range ids { + keys[i] = fmt.Sprintf("project:id:%s", id) + } + raws := r.c.MGet(ctx, keys...) + + var projects []*models.ProjectEntity + missingToCache := make(map[string]any) + + var missingPgIds []pgtype.UUID + for i, b := range raws { + if len(b) == 0 { + pgId := pgtype.UUID{} + err := pgId.Scan(ids[i]) + if err == nil { + missingPgIds = append(missingPgIds, pgId) + } + } + } + + dbMap := make(map[string]*models.ProjectEntity) + if len(missingPgIds) > 0 { + dbRows, err := r.q.GetProjectsByIDs(ctx, missingPgIds) + if err == nil { + for _, row := range dbRows { + item := models.ProjectEntity{ + ID: convert.UUIDToString(row.ID), + Title: row.Title, + Description: convert.TextToString(row.Description), + LatestRevisionID: convert.UUIDToStringPtr(row.LatestRevisionID), + VersionCount: row.VersionCount, + ProjectStatus: constants.ParseProjectStatusType(row.ProjectStatus), + LockedBy: convert.UUIDToStringPtr(row.LockedBy), + IsDeleted: row.IsDeleted, + UserID: convert.UUIDToString(row.UserID), + CreatedAt: convert.TimeToPtr(row.CreatedAt), + UpdatedAt: convert.TimeToPtr(row.UpdatedAt), + CommitIds: convert.ListUUIDToString(row.CommitIds), + SubmissionIds: convert.ListUUIDToString(row.SubmissionIds), + } + _ = item.ParseUser(row.User) + dbMap[item.ID] = &item + } + } + } + + for i, b := range raws { + if len(b) > 0 { + var p models.ProjectEntity + if err := json.Unmarshal(b, &p); err == nil { + projects = append(projects, &p) + } + } else { + if item, ok := dbMap[ids[i]]; ok { + projects = append(projects, item) + missingToCache[keys[i]] = item + } + } + } + + if len(missingToCache) > 0 { + _ = r.c.MSet(ctx, missingToCache, constants.NormalCacheDuration) + } + + return projects, nil +} + +func (r *projectRepository) GetByIDs(ctx context.Context, ids []string) ([]*models.ProjectEntity, error) { + return r.getByIDsWithFallback(ctx, ids) +} + +func (r *projectRepository) GetByID(ctx context.Context, id pgtype.UUID) (*models.ProjectEntity, error) { + cacheId := fmt.Sprintf("project:id:%s", convert.UUIDToString(id)) + var project models.ProjectEntity + err := r.c.Get(ctx, cacheId, &project) + if err == nil { + _ = r.c.Set(ctx, cacheId, project, constants.NormalCacheDuration) + return &project, nil + } + + row, err := r.q.GetProjectById(ctx, id) + if err != nil { + return nil, err + } + + project = models.ProjectEntity{ + ID: convert.UUIDToString(row.ID), + Title: row.Title, + Description: convert.TextToString(row.Description), + LatestRevisionID: convert.UUIDToStringPtr(row.LatestRevisionID), + VersionCount: row.VersionCount, + ProjectStatus: constants.ParseProjectStatusType(row.ProjectStatus), + LockedBy: convert.UUIDToStringPtr(row.LockedBy), + IsDeleted: row.IsDeleted, + UserID: convert.UUIDToString(row.UserID), + CreatedAt: convert.TimeToPtr(row.CreatedAt), + UpdatedAt: convert.TimeToPtr(row.UpdatedAt), + CommitIds: convert.ListUUIDToString(row.CommitIds), + SubmissionIds: convert.ListUUIDToString(row.SubmissionIds), + } + _ = project.ParseUser(row.User) + + _ = r.c.Set(ctx, cacheId, project, constants.NormalCacheDuration) + + return &project, nil +} + +func (r *projectRepository) GetByUserID(ctx context.Context, params sqlc.GetProjectsByUserIdParams) ([]*models.ProjectEntity, error) { + queryKey := r.generateQueryKey("project:user", params) + var cachedIDs []string + if err := r.c.Get(ctx, queryKey, &cachedIDs); err == nil && len(cachedIDs) > 0 { + return r.getByIDsWithFallback(ctx, cachedIDs) + } + + rows, err := r.q.GetProjectsByUserId(ctx, params) + if err != nil { + return nil, err + } + + var projects []*models.ProjectEntity + var ids []string + projectToCache := make(map[string]any) + + for _, row := range rows { + project := &models.ProjectEntity{ + ID: convert.UUIDToString(row.ID), + Title: row.Title, + Description: convert.TextToString(row.Description), + LatestRevisionID: convert.UUIDToStringPtr(row.LatestRevisionID), + VersionCount: row.VersionCount, + ProjectStatus: constants.ParseProjectStatusType(row.ProjectStatus), + LockedBy: convert.UUIDToStringPtr(row.LockedBy), + IsDeleted: row.IsDeleted, + UserID: convert.UUIDToString(row.UserID), + CreatedAt: convert.TimeToPtr(row.CreatedAt), + UpdatedAt: convert.TimeToPtr(row.UpdatedAt), + CommitIds: convert.ListUUIDToString(row.CommitIds), + SubmissionIds: convert.ListUUIDToString(row.SubmissionIds), + } + _ = project.ParseUser(row.User) + + ids = append(ids, project.ID) + projects = append(projects, project) + projectToCache[fmt.Sprintf("project:id:%s", project.ID)] = project + } + + if len(projectToCache) > 0 { + _ = r.c.MSet(ctx, projectToCache, constants.NormalCacheDuration) + } + if len(ids) > 0 { + _ = r.c.Set(ctx, queryKey, ids, constants.ListCacheDuration) + } + + return projects, nil +} + +func (r *projectRepository) Search(ctx context.Context, params sqlc.SearchProjectsParams) ([]*models.ProjectEntity, error) { + queryKey := r.generateQueryKey("project:search", params) + var cachedIDs []string + if err := r.c.Get(ctx, queryKey, &cachedIDs); err == nil && len(cachedIDs) > 0 { + return r.getByIDsWithFallback(ctx, cachedIDs) + } + + rows, err := r.q.SearchProjects(ctx, params) + if err != nil { + return nil, err + } + var projects []*models.ProjectEntity + var ids []string + projectToCache := make(map[string]any) + + for _, row := range rows { + project := &models.ProjectEntity{ + ID: convert.UUIDToString(row.ID), + Title: row.Title, + Description: convert.TextToString(row.Description), + LatestRevisionID: convert.UUIDToStringPtr(row.LatestRevisionID), + VersionCount: row.VersionCount, + ProjectStatus: constants.ParseProjectStatusType(row.ProjectStatus), + LockedBy: convert.UUIDToStringPtr(row.LockedBy), + IsDeleted: row.IsDeleted, + UserID: convert.UUIDToString(row.UserID), + CreatedAt: convert.TimeToPtr(row.CreatedAt), + UpdatedAt: convert.TimeToPtr(row.UpdatedAt), + CommitIds: convert.ListUUIDToString(row.CommitIds), + SubmissionIds: convert.ListUUIDToString(row.SubmissionIds), + } + _ = project.ParseUser(row.User) + + ids = append(ids, project.ID) + projects = append(projects, project) + projectToCache[fmt.Sprintf("project:id:%s", project.ID)] = project + } + + if len(projectToCache) > 0 { + _ = r.c.MSet(ctx, projectToCache, constants.NormalCacheDuration) + } + if len(ids) > 0 { + _ = r.c.Set(ctx, queryKey, ids, constants.ListCacheDuration) + } + + return projects, nil +} + +func (r *projectRepository) Count(ctx context.Context, params sqlc.CountProjectsParams) (int64, error) { + queryKey := r.generateQueryKey("project:count", params) + var count int64 + if err := r.c.Get(ctx, queryKey, &count); err == nil { + return count, nil + } + + count, err := r.q.CountProjects(ctx, params) + if err != nil { + return 0, err + } + + _ = r.c.Set(ctx, queryKey, count, constants.NormalCacheDuration) + return count, nil +} + +func (r *projectRepository) Create(ctx context.Context, params sqlc.CreateProjectParams) (*models.ProjectEntity, error) { + row, err := r.q.CreateProject(ctx, params) + if err != nil { + return nil, err + } + + project := models.ProjectEntity{ + ID: convert.UUIDToString(row.ID), + Title: row.Title, + Description: convert.TextToString(row.Description), + LatestRevisionID: convert.UUIDToStringPtr(row.LatestRevisionID), + VersionCount: row.VersionCount, + ProjectStatus: constants.ParseProjectStatusType(row.ProjectStatus), + LockedBy: convert.UUIDToStringPtr(row.LockedBy), + IsDeleted: row.IsDeleted, + UserID: convert.UUIDToString(row.UserID), + CreatedAt: convert.TimeToPtr(row.CreatedAt), + UpdatedAt: convert.TimeToPtr(row.UpdatedAt), + CommitIds: convert.ListUUIDToString(row.CommitIds), + SubmissionIds: convert.ListUUIDToString(row.SubmissionIds), + } + _ = project.ParseUser(row.User) + + _ = r.c.Set(ctx, fmt.Sprintf("project:id:%s", project.ID), project, constants.NormalCacheDuration) + + go func() { + bgCtx := context.Background() + _ = r.c.DelByPattern(bgCtx, "project:search*") + _ = r.c.DelByPattern(bgCtx, "project:user*") + _ = r.c.DelByPattern(bgCtx, "project:count*") + }() + return &project, nil +} + +func (r *projectRepository) Update(ctx context.Context, params sqlc.UpdateProjectParams) (*models.ProjectEntity, error) { + row, err := r.q.UpdateProject(ctx, params) + if err != nil { + return nil, err + } + project := models.ProjectEntity{ + ID: convert.UUIDToString(row.ID), + Title: row.Title, + Description: convert.TextToString(row.Description), + LatestRevisionID: convert.UUIDToStringPtr(row.LatestRevisionID), + VersionCount: row.VersionCount, + ProjectStatus: constants.ParseProjectStatusType(row.ProjectStatus), + LockedBy: convert.UUIDToStringPtr(row.LockedBy), + IsDeleted: row.IsDeleted, + UserID: convert.UUIDToString(row.UserID), + CreatedAt: convert.TimeToPtr(row.CreatedAt), + UpdatedAt: convert.TimeToPtr(row.UpdatedAt), + CommitIds: convert.ListUUIDToString(row.CommitIds), + SubmissionIds: convert.ListUUIDToString(row.SubmissionIds), + } + _ = project.ParseUser(row.User) + + _ = r.c.Set(ctx, fmt.Sprintf("project:id:%s", project.ID), project, constants.NormalCacheDuration) + return &project, nil +} + +func (r *projectRepository) Delete(ctx context.Context, id pgtype.UUID) error { + err := r.q.DeleteProject(ctx, id) + if err != nil { + return err + } + _ = r.c.Del(ctx, fmt.Sprintf("project:id:%s", convert.UUIDToString(id))) + go func() { + bgCtx := context.Background() + _ = r.c.DelByPattern(bgCtx, "project:search*") + _ = r.c.DelByPattern(bgCtx, "project:user*") + _ = r.c.DelByPattern(bgCtx, "project:count*") + }() + return nil +} diff --git a/internal/repositories/roleRepository.go b/internal/repositories/roleRepository.go index c50fa26..30eb5a4 100644 --- a/internal/repositories/roleRepository.go +++ b/internal/repositories/roleRepository.go @@ -61,6 +61,34 @@ func (r *roleRepository) getByIDsWithFallback(ctx context.Context, ids []string) var roles []*models.RoleEntity missingRolesToCache := make(map[string]any) + var missingPgIds []pgtype.UUID + for i, b := range raws { + if len(b) == 0 { + pgId := pgtype.UUID{} + err := pgId.Scan(ids[i]) + if err == nil { + missingPgIds = append(missingPgIds, pgId) + } + } + } + + dbMap := make(map[string]*models.RoleEntity) + if len(missingPgIds) > 0 { + dbRows, err := r.q.GetRolesByIDs(ctx, missingPgIds) + if err == nil { + for _, row := range dbRows { + item := models.RoleEntity{ + ID: convert.UUIDToString(row.ID), + Name: row.Name, + IsDeleted: row.IsDeleted, + CreatedAt: convert.TimeToPtr(row.CreatedAt), + UpdatedAt: convert.TimeToPtr(row.UpdatedAt), + } + dbMap[item.ID] = &item + } + } + } + for i, b := range raws { if len(b) > 0 { var u models.RoleEntity @@ -68,15 +96,9 @@ func (r *roleRepository) getByIDsWithFallback(ctx context.Context, ids []string) roles = append(roles, &u) } } else { - pgId := pgtype.UUID{} - err := pgId.Scan(ids[i]) - if err != nil { - continue - } - dbRole, err := r.GetByID(ctx, pgId) - if err == nil && dbRole != nil { - roles = append(roles, dbRole) - missingRolesToCache[keys[i]] = dbRole + if item, ok := dbMap[ids[i]]; ok { + roles = append(roles, item) + missingRolesToCache[keys[i]] = item } } } diff --git a/internal/repositories/tokenRepository.go b/internal/repositories/tokenRepository.go index 497c6b3..d0ad9ec 100644 --- a/internal/repositories/tokenRepository.go +++ b/internal/repositories/tokenRepository.go @@ -49,7 +49,7 @@ func (t *tokenRepository) CheckVerified(ctx context.Context, email string, token } func (t *tokenRepository) CreateUploadToken(ctx context.Context, userId string, token *models.TokenUploadEntity) error { - cacheKey := fmt.Sprintf("token:%d:%s:%s", constants.TokenUpload.Value(), userId, token.ID) + cacheKey := fmt.Sprintf("token:%d:%s:%s", constants.TokenTypeUpload.Value(), userId, token.ID) err := t.c.Set(ctx, cacheKey, token, constants.TokenUploadDuration) if err != nil { return err @@ -58,7 +58,7 @@ func (t *tokenRepository) CreateUploadToken(ctx context.Context, userId string, } func (t *tokenRepository) GetUploadToken(ctx context.Context, userId string, id string) (*models.TokenUploadEntity, error) { - cacheKey := fmt.Sprintf("token:%d:%s:%s", constants.TokenUpload.Value(), userId, id) + cacheKey := fmt.Sprintf("token:%d:%s:%s", constants.TokenTypeUpload.Value(), userId, id) var token models.TokenUploadEntity err := t.c.Get(ctx, cacheKey, &token) if err != nil { @@ -68,7 +68,7 @@ func (t *tokenRepository) GetUploadToken(ctx context.Context, userId string, id } func (t *tokenRepository) DeleteUploadToken(ctx context.Context, userId string, id string) error { - cacheKey := fmt.Sprintf("token:%d:%s:%s", constants.TokenUpload.Value(), userId, id) + cacheKey := fmt.Sprintf("token:%d:%s:%s", constants.TokenTypeUpload.Value(), userId, id) return t.c.Del(ctx, cacheKey) } diff --git a/internal/repositories/userRepository.go b/internal/repositories/userRepository.go index 1f89adf..d6ede42 100644 --- a/internal/repositories/userRepository.go +++ b/internal/repositories/userRepository.go @@ -63,6 +63,38 @@ func (r *userRepository) getByIDsWithFallback(ctx context.Context, ids []string) var users []*models.UserEntity missingUsersToCache := make(map[string]any) + var missingPgIds []pgtype.UUID + for i, b := range raws { + if len(b) == 0 { + pgId := pgtype.UUID{} + err := pgId.Scan(ids[i]) + if err == nil { + missingPgIds = append(missingPgIds, pgId) + } + } + } + + dbMap := make(map[string]*models.UserEntity) + if len(missingPgIds) > 0 { + dbRows, err := r.q.GetUsersByIDs(ctx, missingPgIds) + if err == nil { + for _, row := range dbRows { + item := models.UserEntity{ + ID: convert.UUIDToString(row.ID), + Email: row.Email, + PasswordHash: convert.TextToString(row.PasswordHash), + TokenVersion: row.TokenVersion, + IsDeleted: row.IsDeleted, + CreatedAt: convert.TimeToPtr(row.CreatedAt), + UpdatedAt: convert.TimeToPtr(row.UpdatedAt), + } + _ = item.ParseRoles(row.Roles) + _ = item.ParseProfile(row.Profile) + dbMap[item.ID] = &item + } + } + } + for i, b := range raws { if len(b) > 0 { var u models.UserEntity @@ -70,15 +102,9 @@ func (r *userRepository) getByIDsWithFallback(ctx context.Context, ids []string) users = append(users, &u) } } else { - pgId := pgtype.UUID{} - err := pgId.Scan(ids[i]) - if err != nil { - continue - } - dbUser, err := r.GetByID(ctx, pgId) - if err == nil && dbUser != nil { - users = append(users, dbUser) - missingUsersToCache[keys[i]] = dbUser + if item, ok := dbMap[ids[i]]; ok { + users = append(users, item) + missingUsersToCache[keys[i]] = item } } } diff --git a/internal/repositories/verificationRepository.go b/internal/repositories/verificationRepository.go index 9f69911..f421eb3 100644 --- a/internal/repositories/verificationRepository.go +++ b/internal/repositories/verificationRepository.go @@ -99,6 +99,40 @@ func (v *verificationRepository) getByIDsWithFallback(ctx context.Context, ids [ var verification []*models.UserVerificationEntity missingVerificationToCache := make(map[string]any) + var missingPgIds []pgtype.UUID + for i, b := range raws { + if len(b) == 0 { + pgId := pgtype.UUID{} + err := pgId.Scan(ids[i]) + if err == nil { + missingPgIds = append(missingPgIds, pgId) + } + } + } + + dbMap := make(map[string]*models.UserVerificationEntity) + if len(missingPgIds) > 0 { + dbRows, err := v.q.GetUserVerificationsByIDs(ctx, missingPgIds) + if err == nil { + for _, row := range dbRows { + item := models.UserVerificationEntity{ + ID: convert.UUIDToString(row.ID), + VerifyType: constants.ParseVerifyType(row.VerifyType), + Content: convert.TextToString(row.Content), + IsDeleted: row.IsDeleted, + Status: constants.ParseStatusType(row.Status), + ReviewNote: convert.TextToString(row.ReviewNote), + ReviewedAt: convert.TimeToPtr(row.ReviewedAt), + CreatedAt: convert.TimeToPtr(row.CreatedAt), + } + _ = item.ParseMedia(row.Medias) + _ = item.ParseUser(row.User) + _ = item.ParseReviewer(row.Reviewer) + dbMap[item.ID] = &item + } + } + } + for i, b := range raws { if len(b) > 0 { var u models.UserVerificationEntity @@ -106,15 +140,9 @@ func (v *verificationRepository) getByIDsWithFallback(ctx context.Context, ids [ verification = append(verification, &u) } } else { - pgId := pgtype.UUID{} - err := pgId.Scan(ids[i]) - if err != nil { - continue - } - dbUser, err := v.GetByID(ctx, pgId) - if err == nil && dbUser != nil { - verification = append(verification, dbUser) - missingVerificationToCache[keys[i]] = dbUser + if item, ok := dbMap[ids[i]]; ok { + verification = append(verification, item) + missingVerificationToCache[keys[i]] = item } } } diff --git a/internal/repositories/wikiRepository.go b/internal/repositories/wikiRepository.go index 863e12f..3a57d6b 100644 --- a/internal/repositories/wikiRepository.go +++ b/internal/repositories/wikiRepository.go @@ -57,22 +57,45 @@ func (r *wikiRepository) getByIDsWithFallback(ctx context.Context, ids []string) var wikis []*models.WikiEntity missingToCache := make(map[string]any) + var missingPgIds []pgtype.UUID for i, b := range raws { - if len(b) > 0 { - var w models.WikiEntity - if err := json.Unmarshal(b, &w); err == nil { - wikis = append(wikis, &w) - } - } else { + if len(b) == 0 { pgId := pgtype.UUID{} err := pgId.Scan(ids[i]) - if err != nil { - continue + if err == nil { + missingPgIds = append(missingPgIds, pgId) } - dbWiki, err := r.GetByID(ctx, pgId) - if err == nil && dbWiki != nil { - wikis = append(wikis, dbWiki) - missingToCache[keys[i]] = dbWiki + } + } + + dbMap := make(map[string]*models.WikiEntity) + if len(missingPgIds) > 0 { + dbRows, err := r.q.GetWikisByIDs(ctx, missingPgIds) + if err == nil { + for _, row := range dbRows { + item := models.WikiEntity{ + ID: convert.UUIDToString(row.ID), + Title: convert.TextToString(row.Title), + Content: convert.TextToString(row.Content), + IsDeleted: row.IsDeleted, + CreatedAt: convert.TimeToPtr(row.CreatedAt), + UpdatedAt: convert.TimeToPtr(row.UpdatedAt), + } + dbMap[item.ID] = &item + } + } + } + + for i, b := range raws { + if len(b) > 0 { + var u models.WikiEntity + if err := json.Unmarshal(b, &u); err == nil { + wikis = append(wikis, &u) + } + } else { + if item, ok := dbMap[ids[i]]; ok { + wikis = append(wikis, item) + missingToCache[keys[i]] = item } } } diff --git a/internal/routes/mediaRoute.go b/internal/routes/mediaRoute.go index 7537859..319b8c9 100644 --- a/internal/routes/mediaRoute.go +++ b/internal/routes/mediaRoute.go @@ -15,7 +15,7 @@ func MediaRoutes(app *fiber.App, controller *controllers.MediaController, userRe route.Post( "/upload", middlewares.JwtAccess(userRepo), - middlewares.RequireAnyRole(constants.ADMIN, constants.MOD), + middlewares.RequireAnyRole(constants.RoleTypeAdmin, constants.RoleTypeMod), controller.UploadServerSide, ) @@ -34,7 +34,7 @@ func MediaRoutes(app *fiber.App, controller *controllers.MediaController, userRe route.Get( "/:id", middlewares.JwtAccess(userRepo), - middlewares.RequireAnyRole(constants.ADMIN, constants.MOD), + middlewares.RequireAnyRole(constants.RoleTypeAdmin, constants.RoleTypeMod), controller.GetMediaByID, ) @@ -47,7 +47,7 @@ func MediaRoutes(app *fiber.App, controller *controllers.MediaController, userRe route.Get( "/", middlewares.JwtAccess(userRepo), - middlewares.RequireAnyRole(constants.ADMIN, constants.MOD), + middlewares.RequireAnyRole(constants.RoleTypeAdmin, constants.RoleTypeMod), controller.SearchMedia, ) route.Delete( diff --git a/internal/routes/projectRoute.go b/internal/routes/projectRoute.go new file mode 100644 index 0000000..9da48d6 --- /dev/null +++ b/internal/routes/projectRoute.go @@ -0,0 +1,41 @@ +package routes + +import ( + "history-api/internal/controllers" + "history-api/internal/middlewares" + "history-api/internal/repositories" + + "github.com/gofiber/fiber/v3" +) + +func ProjectRoutes(app *fiber.App, controller *controllers.ProjectController, userRepo repositories.UserRepository) { + route := app.Group("/projects") + + route.Get( + "/:id", + controller.GetProjectByID, + ) + + route.Put( + "/:id", + middlewares.JwtAccess(userRepo), + controller.UpdateProject, + ) + + route.Delete( + "/:id", + middlewares.JwtAccess(userRepo), + controller.DeleteProject, + ) + + route.Get( + "/", + controller.SearchProject, + ) + + route.Post( + "/", + middlewares.JwtAccess(userRepo), + controller.CreateProject, + ) +} diff --git a/internal/routes/userRoute.go b/internal/routes/userRoute.go index 74e39c1..652c5e6 100644 --- a/internal/routes/userRoute.go +++ b/internal/routes/userRoute.go @@ -36,6 +36,12 @@ func UserRoutes(app *fiber.App, controller *controllers.UserController, userRepo controller.GetUserApplication, ) + route.Get( + "/current/project", + middlewares.JwtAccess(userRepo), + controller.GetUserProject, + ) + route.Patch( "/current/password", middlewares.JwtAccess(userRepo), @@ -45,49 +51,56 @@ func UserRoutes(app *fiber.App, controller *controllers.UserController, userRepo route.Get( "/:id", middlewares.JwtAccess(userRepo), - middlewares.RequireAnyRole(constants.ADMIN, constants.MOD), + middlewares.RequireAnyRole(constants.RoleTypeAdmin, constants.RoleTypeMod), controller.GetUserById, ) route.Delete( "/:id", middlewares.JwtAccess(userRepo), - middlewares.RequireAnyRole(constants.ADMIN, constants.MOD), + middlewares.RequireAnyRole(constants.RoleTypeAdmin, constants.RoleTypeMod), controller.DeleteUser, ) route.Get( "/:id/media", middlewares.JwtAccess(userRepo), - middlewares.RequireAnyRole(constants.ADMIN, constants.MOD), + middlewares.RequireAnyRole(constants.RoleTypeAdmin, constants.RoleTypeMod), controller.GetMediaByUserID, ) route.Get( "/:id/application", middlewares.JwtAccess(userRepo), - middlewares.RequireAnyRole(constants.ADMIN, constants.MOD), + middlewares.RequireAnyRole(constants.RoleTypeAdmin, constants.RoleTypeMod), controller.GetVerificationByUserID, ) + route.Get( + "/:id/project", + middlewares.JwtAccess(userRepo), + middlewares.RequireAnyRole(constants.RoleTypeAdmin, constants.RoleTypeMod), + controller.GetProjectByUserID, + ) + route.Patch( "/:id/restore", middlewares.JwtAccess(userRepo), - middlewares.RequireAnyRole(constants.ADMIN, constants.MOD), + middlewares.RequireAnyRole(constants.RoleTypeAdmin, constants.RoleTypeMod), controller.RestoreUser, ) route.Patch( "/:id/role", middlewares.JwtAccess(userRepo), - middlewares.RequireAnyRole(constants.ADMIN, constants.MOD), + middlewares.RequireAnyRole(constants.RoleTypeAdmin, constants.RoleTypeMod), controller.ChangeRoleUser, ) route.Get( "/", middlewares.JwtAccess(userRepo), - middlewares.RequireAnyRole(constants.ADMIN, constants.MOD), + middlewares.RequireAnyRole(constants.RoleTypeAdmin, constants.RoleTypeMod), controller.SearchUser, ) diff --git a/internal/routes/verificationRoute.go b/internal/routes/verificationRoute.go index 4747422..9d846d0 100644 --- a/internal/routes/verificationRoute.go +++ b/internal/routes/verificationRoute.go @@ -15,7 +15,7 @@ func VerificationRoutes(app *fiber.App, controller *controllers.VerificationCont route.Get( "/:id", middlewares.JwtAccess(userRepo), - middlewares.RequireAnyRole(constants.ADMIN, constants.MOD), + middlewares.RequireAnyRole(constants.RoleTypeAdmin, constants.RoleTypeMod), controller.GetVerificationByID, ) @@ -28,21 +28,21 @@ func VerificationRoutes(app *fiber.App, controller *controllers.VerificationCont route.Put( "/:id/status", middlewares.JwtAccess(userRepo), - middlewares.RequireAnyRole(constants.ADMIN, constants.MOD), + middlewares.RequireAnyRole(constants.RoleTypeAdmin, constants.RoleTypeMod), controller.UpdateVerificationStatus, ) route.Get( "/", middlewares.JwtAccess(userRepo), - middlewares.RequireAnyRole(constants.ADMIN, constants.MOD), + middlewares.RequireAnyRole(constants.RoleTypeAdmin, constants.RoleTypeMod), controller.SearchVerification, ) route.Post( "/", middlewares.JwtAccess(userRepo), - middlewares.ForbidRoles(constants.HISTORIAN), + middlewares.ForbidRoles(constants.RoleTypeHistorian), controller.CreateVerification, ) diff --git a/internal/services/authService.go b/internal/services/authService.go index 927abde..1a6f71b 100644 --- a/internal/services/authService.go +++ b/internal/services/authService.go @@ -136,7 +136,7 @@ func (a *authService) Signin(ctx context.Context, dto *request.SignInDto) (*resp return nil, fiber.NewError(fiber.StatusInternalServerError, err.Error()) } - if user.AuthProvider != constants.LocalProvider.String() && user.PasswordHash == "" { + if user.AuthProvider != constants.ProviderTypeLocal.String() && user.PasswordHash == "" { return nil, fiber.NewError(fiber.StatusUnauthorized, "Please sign in with "+user.AuthProvider) } @@ -219,7 +219,7 @@ func (a *authService) RefreshToken(ctx context.Context, id string, refreshToken roles := models.RolesEntityToRoleConstant(user.Roles) - if slices.Contains(roles, constants.BANNED) { + if slices.Contains(roles, constants.RoleTypeBanned) { return nil, fiber.NewError(fiber.StatusUnauthorized, "User is banned!") } @@ -254,7 +254,7 @@ func (a *authService) Signup(ctx context.Context, dto *request.SignUpDto) (*resp return nil, fiber.NewError(fiber.StatusBadRequest, err.Error()) } - ok, err := a.tokenRepo.CheckVerified(ctx, dto.Email, constants.TokenEmailVerify, dto.TokenID) + ok, err := a.tokenRepo.CheckVerified(ctx, dto.Email, constants.TokenTypeEmailVerify, dto.TokenID) if err != nil { return nil, fiber.NewError(fiber.StatusInternalServerError, err.Error()) } @@ -284,7 +284,7 @@ func (a *authService) Signup(ctx context.Context, dto *request.SignUpDto) (*resp String: string(hashed), Valid: len(hashed) != 0, }, - AuthProvider: constants.LocalProvider.String(), + AuthProvider: constants.ProviderTypeLocal.String(), }, ) if err != nil { @@ -308,7 +308,7 @@ func (a *authService) Signup(ctx context.Context, dto *request.SignUpDto) (*resp if err != nil { return nil, fiber.NewError(fiber.StatusInternalServerError, err.Error()) } - role, err := a.roleRepo.GetByname(ctx, constants.USER.String()) + role, err := a.roleRepo.GetByname(ctx, constants.RoleTypeUser.String()) if err != nil { return nil, fiber.NewError(fiber.StatusInternalServerError, err.Error()) } @@ -352,7 +352,7 @@ func (a *authService) Signup(ctx context.Context, dto *request.SignUpDto) (*resp } func (a *authService) ForgotPassword(ctx context.Context, dto *request.ForgotPasswordDto) error { - ok, err := a.tokenRepo.CheckVerified(ctx, dto.Email, constants.TokenPasswordReset, dto.TokenID) + ok, err := a.tokenRepo.CheckVerified(ctx, dto.Email, constants.TokenTypePasswordReset, dto.TokenID) if err != nil { return fiber.NewError(fiber.StatusInternalServerError, err.Error()) } @@ -423,7 +423,7 @@ func (a *authService) SigninWithGoogle(ctx context.Context, dto *request.SigninW ctx, sqlc.UpsertUserParams{ Email: dto.Email, - AuthProvider: constants.GoogleProvider.String(), + AuthProvider: constants.ProviderTypeGoogle.String(), GoogleID: pgtype.Text{ String: dto.Sub, Valid: dto.Sub != "", @@ -454,7 +454,7 @@ func (a *authService) SigninWithGoogle(ctx context.Context, dto *request.SigninW if err != nil { return nil, fiber.NewError(fiber.StatusInternalServerError, err.Error()) } - role, err := a.roleRepo.GetByname(ctx, constants.USER.String()) + role, err := a.roleRepo.GetByname(ctx, constants.RoleTypeUser.String()) if err != nil { return nil, fiber.NewError(fiber.StatusInternalServerError, err.Error()) } @@ -521,8 +521,8 @@ func (a *authService) CreateToken(ctx context.Context, dto *request.CreateTokenD } shouldSend := true - if (dto.TokenType == constants.TokenEmailVerify && user != nil) || - (dto.TokenType == constants.TokenPasswordReset && user == nil) { + if (dto.TokenType == constants.TokenTypeEmailVerify && user != nil) || + (dto.TokenType == constants.TokenTypePasswordReset && user == nil) { shouldSend = false } @@ -575,8 +575,8 @@ func (a *authService) VerifyToken(ctx context.Context, dto *request.VerifyTokenD return nil, fiber.NewError(fiber.StatusInternalServerError, "Internal Server Error") } - if (dto.TokenType == constants.TokenEmailVerify && user != nil) || - (dto.TokenType == constants.TokenPasswordReset && user == nil) { + if (dto.TokenType == constants.TokenTypeEmailVerify && user != nil) || + (dto.TokenType == constants.TokenTypePasswordReset && user == nil) { return nil, genericError } diff --git a/internal/services/mediaService.go b/internal/services/mediaService.go index 8456760..e5947f4 100644 --- a/internal/services/mediaService.go +++ b/internal/services/mediaService.go @@ -71,7 +71,7 @@ func (m *mediaService) DeleteMedia(ctx context.Context, claims *response.JWTClai } shoudDelete := false - if slices.Contains(claims.Roles, constants.ADMIN) || slices.Contains(claims.Roles, constants.MOD) || media.UserID == claims.UId { + if slices.Contains(claims.Roles, constants.RoleTypeAdmin) || slices.Contains(claims.Roles, constants.RoleTypeMod) || media.UserID == claims.UId { shoudDelete = true } @@ -95,7 +95,7 @@ func (m *mediaService) BulkDeleteMedia(ctx context.Context, claims *response.JWT return fiber.NewError(fiber.StatusInternalServerError, err.Error()) } shoudDelete := false - if slices.Contains(claims.Roles, constants.ADMIN) || slices.Contains(claims.Roles, constants.MOD) { + if slices.Contains(claims.Roles, constants.RoleTypeAdmin) || slices.Contains(claims.Roles, constants.RoleTypeMod) { shoudDelete = true } listMediaIds := make([]pgtype.UUID, len(listMedia)) diff --git a/internal/services/projectService.go b/internal/services/projectService.go new file mode 100644 index 0000000..3016079 --- /dev/null +++ b/internal/services/projectService.go @@ -0,0 +1,256 @@ +package services + +import ( + "context" + "fmt" + + "github.com/gofiber/fiber/v3" + "github.com/jackc/pgx/v5/pgtype" + "golang.org/x/sync/errgroup" + + "history-api/internal/dtos/request" + "history-api/internal/dtos/response" + "history-api/internal/gen/sqlc" + "history-api/internal/models" + "history-api/internal/repositories" + "history-api/pkg/constants" + "history-api/pkg/convert" +) + +type ProjectService interface { + GetProjectByID(ctx context.Context, id string) (*response.ProjectResponse, error) + GetProjectByUserID(ctx context.Context, userID string, dto *request.GetProjectsByUserDto) ([]*response.ProjectResponse, error) + SearchProject(ctx context.Context, dto *request.SearchProjectDto) (*response.PaginatedResponse, error) + DeleteProject(ctx context.Context, id string) error + CreateProject(ctx context.Context, userID string, dto *request.CreateProjectDto) (*response.ProjectResponse, error) + UpdateProject(ctx context.Context, id string, dto *request.UpdateProjectDto) (*response.ProjectResponse, error) +} + +type projectService struct { + projectRepo repositories.ProjectRepository +} + +func NewProjectService(projectRepo repositories.ProjectRepository) ProjectService { + return &projectService{ + projectRepo: projectRepo, + } +} + +func (s *projectService) GetProjectByID(ctx context.Context, id string) (*response.ProjectResponse, error) { + projectUUID, err := convert.StringToUUID(id) + if err != nil { + return nil, fiber.NewError(fiber.StatusBadRequest, "Invalid project ID format") + } + + project, err := s.projectRepo.GetByID(ctx, projectUUID) + if err != nil { + return nil, fiber.NewError(fiber.StatusNotFound, "Project not found") + } + + return project.ToResponse(), nil +} + +func (s *projectService) GetProjectByUserID(ctx context.Context, userID string, dto *request.GetProjectsByUserDto) ([]*response.ProjectResponse, error) { + userUUID, err := convert.StringToUUID(userID) + if err != nil { + return nil, fiber.NewError(fiber.StatusBadRequest, "Invalid user ID format") + } + + limit := int32(20) + if dto.Limit > 0 { + limit = dto.Limit + } + + arg := sqlc.GetProjectsByUserIdParams{ + UserID: userUUID, + Limit: limit, + } + + if dto.CursorID != "" { + if cursorID, err := convert.StringToUUID(dto.CursorID); err == nil { + arg.CursorID = cursorID + } + } + + projects, err := s.projectRepo.GetByUserID(ctx, arg) + if err != nil { + return nil, fiber.NewError(fiber.StatusInternalServerError, err.Error()) + } + + return models.ProjectsEntityToResponse(projects), nil +} + +func (s *projectService) fillSearchArgs(arg *sqlc.SearchProjectsParams, dto *request.SearchProjectDto) { + if dto.Sort != "" { + arg.Sort = dto.Sort + } else { + arg.Sort = "updated_at" + } + + arg.Order = "desc" + if dto.Order == "asc" { + arg.Order = "asc" + } + + if len(dto.Statuses) > 0 { + for _, statusStr := range dto.Statuses { + statusType := constants.ParseProjectStatusTypeText(statusStr) + if statusType != constants.ProjectStatusTypeUnknow { + arg.Statuses = append(arg.Statuses, fmt.Sprintf("%d", statusType.Int16())) + } + } + } + + if len(dto.UserIDs) > 0 { + for _, id := range dto.UserIDs { + if u, err := convert.StringToUUID(id); err == nil { + arg.UserIds = append(arg.UserIds, u) + } + } + } + + if dto.CreatedFrom != nil { + arg.CreatedFrom = pgtype.Timestamptz{Time: *dto.CreatedFrom, Valid: true} + } + if dto.CreatedTo != nil { + arg.CreatedTo = pgtype.Timestamptz{Time: *dto.CreatedTo, Valid: true} + } + + if dto.Search != "" { + arg.SearchText = pgtype.Text{String: dto.Search, Valid: true} + } +} + +func (s *projectService) SearchProject(ctx context.Context, dto *request.SearchProjectDto) (*response.PaginatedResponse, error) { + if dto.Page < 1 { + dto.Page = 1 + } + if dto.Limit == 0 { + dto.Limit = 20 + } + offset := (dto.Page - 1) * dto.Limit + + arg := sqlc.SearchProjectsParams{ + Limit: int32(dto.Limit), + Offset: int32(offset), + } + + s.fillSearchArgs(&arg, dto) + + var rows []*models.ProjectEntity + var totalRecords int64 + + g, gCtx := errgroup.WithContext(ctx) + + g.Go(func() error { + var err error + rows, err = s.projectRepo.Search(gCtx, arg) + return err + }) + + g.Go(func() error { + countArg := sqlc.CountProjectsParams{ + Statuses: arg.Statuses, + UserIds: arg.UserIds, + SearchText: arg.SearchText, + CreatedFrom: arg.CreatedFrom, + CreatedTo: arg.CreatedTo, + } + var err error + totalRecords, err = s.projectRepo.Count(gCtx, countArg) + return err + }) + + if err := g.Wait(); err != nil { + return nil, fiber.NewError(fiber.StatusInternalServerError, err.Error()) + } + + projects := models.ProjectsEntityToResponse(rows) + + return response.BuildPaginatedResponse(projects, totalRecords, dto.Page, dto.Limit), nil +} + +func (s *projectService) CreateProject(ctx context.Context, userID string, dto *request.CreateProjectDto) (*response.ProjectResponse, error) { + userUUID, err := convert.StringToUUID(userID) + if err != nil { + return nil, fiber.NewError(fiber.StatusBadRequest, "Invalid user ID format") + } + + arg := sqlc.CreateProjectParams{ + Title: dto.Title, + Description: convert.PtrToText(dto.Description), + ProjectStatus: constants.ProjectStatusTypePrivate.Int16(), + UserID: userUUID, + } + + if dto.Status != nil { + statusType := constants.ParseProjectStatusTypeText(*dto.Status) + if statusType == constants.ProjectStatusTypeUnknow { + return nil, fiber.NewError(fiber.StatusBadRequest, "Invalid status type") + } + arg.ProjectStatus = statusType.Int16() + } + + project, err := s.projectRepo.Create(ctx, arg) + if err != nil { + return nil, fiber.NewError(fiber.StatusInternalServerError, "Failed to create project") + } + + return project.ToResponse(), nil +} + +func (s *projectService) UpdateProject(ctx context.Context, id string, dto *request.UpdateProjectDto) (*response.ProjectResponse, error) { + projectUUID, err := convert.StringToUUID(id) + if err != nil { + return nil, fiber.NewError(fiber.StatusBadRequest, "Invalid project ID format") + } + + _, err = s.projectRepo.GetByID(ctx, projectUUID) + if err != nil { + return nil, fiber.NewError(fiber.StatusNotFound, "Project not found") + } + + arg := sqlc.UpdateProjectParams{ + ID: projectUUID, + } + + if dto.Title != nil { + arg.Title = convert.PtrToText(dto.Title) + } + if dto.Description != nil { + arg.Description = convert.PtrToText(dto.Description) + } + if dto.Status != nil { + statusType := constants.ParseProjectStatusTypeText(*dto.Status) + if statusType == constants.ProjectStatusTypeUnknow { + return nil, fiber.NewError(fiber.StatusBadRequest, "Invalid status type") + } + arg.Status = pgtype.Int2{Int16: statusType.Int16(), Valid: true} + } + + project, err := s.projectRepo.Update(ctx, arg) + if err != nil { + return nil, fiber.NewError(fiber.StatusInternalServerError, "Failed to update project") + } + + return project.ToResponse(), nil +} + +func (s *projectService) DeleteProject(ctx context.Context, id string) error { + projectUUID, err := convert.StringToUUID(id) + if err != nil { + return fiber.NewError(fiber.StatusBadRequest, "Invalid project ID format") + } + + _, err = s.projectRepo.GetByID(ctx, projectUUID) + if err != nil { + return fiber.NewError(fiber.StatusNotFound, "Project not found") + } + + err = s.projectRepo.Delete(ctx, projectUUID) + if err != nil { + return fiber.NewError(fiber.StatusInternalServerError, "Failed to delete project") + } + + return nil +} diff --git a/internal/services/userService.go b/internal/services/userService.go index c125d0f..c26f2d4 100644 --- a/internal/services/userService.go +++ b/internal/services/userService.go @@ -117,16 +117,16 @@ func (u *userService) ChangeRoleUser(ctx context.Context, userId string, claims hasModRole := false for _, r := range newListRole { - if r.Name == constants.USER.String() { + if r.Name == constants.RoleTypeUser.String() { hasUserRole = true } - if r.Name == constants.ADMIN.String() { + if r.Name == constants.RoleTypeAdmin.String() { hasAdminRole = true } - if r.Name == constants.BANNED.String() { + if r.Name == constants.RoleTypeBanned.String() { hasBannedRole = true } - if r.Name == constants.MOD.String() { + if r.Name == constants.RoleTypeMod.String() { hasModRole = true } } @@ -135,7 +135,7 @@ func (u *userService) ChangeRoleUser(ctx context.Context, userId string, claims return nil, fiber.NewError(fiber.StatusNotFound, "User must have the USER role") } - if slices.Contains(claims.Roles, constants.MOD) && !slices.Contains(claims.Roles, constants.ADMIN) { + if slices.Contains(claims.Roles, constants.RoleTypeMod) && !slices.Contains(claims.Roles, constants.RoleTypeAdmin) { if hasAdminRole { return nil, fiber.NewError(fiber.StatusForbidden, "MOD cannot assign ADMIN role to any user") } @@ -149,7 +149,7 @@ func (u *userService) ChangeRoleUser(ctx context.Context, userId string, claims } isTargetAdminOrMod := false for _, r := range user.Roles { - if r.Name == constants.ADMIN.String() || r.Name == constants.MOD.String() { + if r.Name == constants.RoleTypeAdmin.String() || r.Name == constants.RoleTypeMod.String() { isTargetAdminOrMod = true break } @@ -159,7 +159,7 @@ func (u *userService) ChangeRoleUser(ctx context.Context, userId string, claims } } - if slices.Contains(claims.Roles, constants.ADMIN) { + if slices.Contains(claims.Roles, constants.RoleTypeAdmin) { if userId == claims.UId && hasBannedRole { return nil, fiber.NewError(fiber.StatusForbidden, "You can't assign BANNED role to yourself") } diff --git a/internal/services/verificationService.go b/internal/services/verificationService.go index 682bc40..bb0215f 100644 --- a/internal/services/verificationService.go +++ b/internal/services/verificationService.go @@ -53,7 +53,7 @@ func NewVerificationService( func (v *verificationService) CreateVerification(ctx context.Context, userId string, dto *request.CreateUserVerificationDto) (*response.UserVerificationResponse, error) { verifyType := constants.ParseVerifyTypeText(dto.VerifyType) - if verifyType == constants.VerifyUnknown { + if verifyType == constants.VerifyTypeUnknown { return nil, fiber.NewError(fiber.StatusInternalServerError, "Unknown verify type!") } @@ -124,11 +124,11 @@ func (v *verificationService) DeleteVerification(ctx context.Context, claims *re } shoudDelete := false - if slices.Contains(claims.Roles, constants.ADMIN) || slices.Contains(claims.Roles, constants.MOD) { + if slices.Contains(claims.Roles, constants.RoleTypeAdmin) || slices.Contains(claims.Roles, constants.RoleTypeMod) { shoudDelete = true } - if verification.User.ID == claims.UId && verification.Status == constants.StatusPending { + if verification.User.ID == claims.UId && verification.Status == constants.StatusTypePending { shoudDelete = true } @@ -182,7 +182,7 @@ func (m *verificationService) fillSearchArgs(arg *sqlc.SearchUserVerificationsPa if len(dto.Statuses) > 0 { for _, id := range dto.Statuses { - if u := constants.ParseStatusTypeText(id); u != constants.StatusUnknown { + if u := constants.ParseStatusTypeText(id); u != constants.StatusTypeUnknown { arg.Statuses = append(arg.Statuses, u.Int16()) } } @@ -190,7 +190,7 @@ func (m *verificationService) fillSearchArgs(arg *sqlc.SearchUserVerificationsPa if len(dto.VerifyTypes) > 0 { for _, id := range dto.VerifyTypes { - if u := constants.ParseVerifyTypeText(id); u != constants.VerifyUnknown { + if u := constants.ParseVerifyTypeText(id); u != constants.VerifyTypeUnknown { arg.VerifyTypes = append(arg.VerifyTypes, u.Int16()) } } @@ -276,7 +276,7 @@ func (v *verificationService) SearchVerification(ctx context.Context, dto *reque func (v *verificationService) UpdateStatusVerification(ctx context.Context, userId string, verificationId string, dto *request.UpdateVerificationStatusDto) (*response.UserVerificationResponse, error) { statusType := constants.ParseStatusTypeText(dto.Status) - if statusType == constants.StatusUnknown { + if statusType == constants.StatusTypeUnknown { return nil, fiber.NewError(fiber.StatusInternalServerError, "Unknown status type!") } verificationUUID, err := convert.StringToUUID(verificationId) @@ -289,7 +289,7 @@ func (v *verificationService) UpdateStatusVerification(ctx context.Context, user return nil, fiber.NewError(fiber.StatusInternalServerError, err.Error()) } - historianRole, err := v.roleRepo.GetByname(ctx, constants.HISTORIAN.String()) + historianRole, err := v.roleRepo.GetByname(ctx, constants.RoleTypeHistorian.String()) if err != nil { return nil, fiber.NewError(fiber.StatusInternalServerError, err.Error()) } @@ -304,7 +304,7 @@ func (v *verificationService) UpdateStatusVerification(ctx context.Context, user return nil, fiber.NewError(fiber.StatusInternalServerError, err.Error()) } - if verification.Status != constants.StatusPending { + if verification.Status != constants.StatusTypePending { return nil, fiber.NewError(fiber.StatusBadRequest, "Invalid status!") } @@ -340,7 +340,7 @@ func (v *verificationService) UpdateStatusVerification(ctx context.Context, user Status: statusType, } - if statusType == constants.StatusApproved { + if statusType == constants.StatusTypeApproved { roleIdList := make([]pgtype.UUID, 0) userVerification.Roles = append(userVerification.Roles, historianRole.ToRoleSimple()) diff --git a/pkg/constants/geometry.go b/pkg/constants/geometry.go new file mode 100644 index 0000000..05e9bec --- /dev/null +++ b/pkg/constants/geometry.go @@ -0,0 +1,66 @@ +package constants + +type GeoType int16 + +const ( + GeoTypeID GeoType = 1 + GeoTypeName GeoType = 2 + GeoTypeIcon GeoType = 3 + GeoTypeVariant GeoType = 4 + GeoTypeDescription GeoType = 5 + GeoTypeUnknow GeoType = 0 +) + +func (t GeoType) String() string { + switch t { + case GeoTypeID: + return "ID" + case GeoTypeName: + return "NAME" + case GeoTypeIcon: + return "ICON" + case GeoTypeVariant: + return "VARIANT" + case GeoTypeDescription: + return "DESCRIPTION" + default: + return "UNKNOW" + } +} + +func ParseGeoTypeText(v string) GeoType { + switch v { + case "ID": + return GeoTypeID + case "NAME": + return GeoTypeName + case "ICON": + return GeoTypeIcon + case "VARIANT": + return GeoTypeVariant + case "DESCRIPTION": + return GeoTypeDescription + default: + return GeoTypeUnknow + } +} +func ParseGeoType(v int16) GeoType { + switch v { + case 1: + return GeoTypeID + case 2: + return GeoTypeName + case 3: + return GeoTypeIcon + case 4: + return GeoTypeVariant + case 5: + return GeoTypeDescription + default: + return GeoTypeUnknow + } +} + +func (t GeoType) Int16() int16 { + return int16(t) +} diff --git a/pkg/constants/project.go b/pkg/constants/project.go new file mode 100644 index 0000000..48a5181 --- /dev/null +++ b/pkg/constants/project.go @@ -0,0 +1,54 @@ +package constants + +type ProjectStatusType int16 + + +const ( + ProjectStatusTypePrivate ProjectStatusType = 1 + ProjectStatusTypePublic ProjectStatusType = 2 + ProjectStatusTypeArchive ProjectStatusType = 3 + ProjectStatusTypeUnknow ProjectStatusType = 0 +) + +func (t ProjectStatusType) String() string { + switch t { + case ProjectStatusTypePrivate: + return "PRIVATE" + case ProjectStatusTypePublic: + return "PUBLIC" + case ProjectStatusTypeArchive: + return "ARCHIVE" + default: + return "UNKNOWN" + } +} + +func ParseProjectStatusTypeText(v string) ProjectStatusType { + switch v { + case "PRIVATE": + return ProjectStatusTypePrivate + case "PUBLIC": + return ProjectStatusTypePublic + case "ARCHIVE": + return ProjectStatusTypeArchive + default: + return ProjectStatusTypeUnknow + } +} + +func (t ProjectStatusType) Int16() int16 { + return int16(t) +} + +func ParseProjectStatusType(v int16) ProjectStatusType { + switch v { + case 1: + return ProjectStatusTypePrivate + case 2: + return ProjectStatusTypePublic + case 3: + return ProjectStatusTypeArchive + default: + return ProjectStatusTypeUnknow + } +} \ No newline at end of file diff --git a/pkg/constants/provider.go b/pkg/constants/provider.go index b422627..5bf3f7e 100644 --- a/pkg/constants/provider.go +++ b/pkg/constants/provider.go @@ -3,10 +3,10 @@ package constants type ProviderType string const ( - GoogleProvider ProviderType = "google" - GithubProvider ProviderType = "github" - FacebookProvider ProviderType = "facebook" - LocalProvider ProviderType = "local" + ProviderTypeGoogle ProviderType = "google" + ProviderTypeGithub ProviderType = "github" + ProviderTypeFacebook ProviderType = "facebook" + ProviderTypeLocal ProviderType = "local" ) func (p ProviderType) String() string { diff --git a/pkg/constants/role.go b/pkg/constants/role.go index ba1381d..afc879d 100644 --- a/pkg/constants/role.go +++ b/pkg/constants/role.go @@ -1,39 +1,39 @@ package constants -type Role string +type RoleType string const ( - ADMIN Role = "ADMIN" - MOD Role = "MOD" - USER Role = "USER" - HISTORIAN Role = "HISTORIAN" - BANNED Role = "BANNED" + RoleTypeAdmin RoleType = "ADMIN" + RoleTypeMod RoleType = "MOD" + RoleTypeUser RoleType = "USER" + RoleTypeHistorian RoleType = "HISTORIAN" + RoleTypeBanned RoleType = "BANNED" ) -func (r Role) String() string { +func (r RoleType) String() string { return string(r) } -func (r Role) Compare(other Role) bool { +func (r RoleType) Compare(other RoleType) bool { return r == other } -func (r Role) IsValid() bool { +func (r RoleType) IsValid() bool { return CheckValidRole(r) } -func CheckValidRole(r Role) bool { - return r == ADMIN || r == MOD || r == HISTORIAN || r == USER || r == BANNED +func CheckValidRole(r RoleType) bool { + return r == RoleTypeAdmin || r == RoleTypeMod || r == RoleTypeHistorian || r == RoleTypeUser || r == RoleTypeBanned } -func ParseRole(s string) (Role, bool) { - r := Role(s) +func ParseRole(s string) (RoleType, bool) { + r := RoleType(s) if CheckValidRole(r) { return r, true } return "", false } -func (r Role) ToSlice() []Role { - return []Role{r} +func (r RoleType) ToSlice() []RoleType { + return []RoleType{r} } diff --git a/pkg/constants/status.go b/pkg/constants/status.go index a6db104..ed7d1ed 100644 --- a/pkg/constants/status.go +++ b/pkg/constants/status.go @@ -3,19 +3,19 @@ package constants type StatusType int16 const ( - StatusUnknown StatusType = 0 - StatusPending StatusType = 1 - StatusApproved StatusType = 2 - StatusRejected StatusType = 3 + StatusTypeUnknown StatusType = 0 + StatusTypePending StatusType = 1 + StatusTypeApproved StatusType = 2 + StatusTypeRejected StatusType = 3 ) func (t StatusType) String() string { switch t { - case StatusPending: + case StatusTypePending: return "PENDING" - case StatusApproved: + case StatusTypeApproved: return "APPROVED" - case StatusRejected: + case StatusTypeRejected: return "REJECTED" default: return "UNKNOWN" @@ -29,25 +29,25 @@ func (t StatusType) Int16() int16 { func ParseStatusType(v int16) StatusType { switch v { case 1: - return StatusPending + return StatusTypePending case 2: - return StatusApproved + return StatusTypeApproved case 3: - return StatusRejected + return StatusTypeRejected default: - return StatusUnknown + return StatusTypeUnknown } } func ParseStatusTypeText(v string) StatusType { switch v { case "PENDING": - return StatusPending + return StatusTypePending case "APPROVED": - return StatusApproved + return StatusTypeApproved case "REJECTED": - return StatusRejected + return StatusTypeRejected default: - return StatusUnknown + return StatusTypeUnknown } } diff --git a/pkg/constants/sream.go b/pkg/constants/stream.go similarity index 100% rename from pkg/constants/sream.go rename to pkg/constants/stream.go diff --git a/pkg/constants/token.go b/pkg/constants/token.go index 6aac59b..59b2263 100644 --- a/pkg/constants/token.go +++ b/pkg/constants/token.go @@ -3,21 +3,21 @@ package constants type TokenType int16 const ( - TokenPasswordReset TokenType = 1 - TokenEmailVerify TokenType = 2 - TokenMagicLink TokenType = 3 - TokenUpload TokenType = 4 + TokenTypePasswordReset TokenType = 1 + TokenTypeEmailVerify TokenType = 2 + TokenTypeMagicLink TokenType = 3 + TokenTypeUpload TokenType = 4 ) func (t TokenType) String() string { switch t { - case TokenPasswordReset: + case TokenTypePasswordReset: return "PASSWORD_RESET" - case TokenEmailVerify: + case TokenTypeEmailVerify: return "EMAIL_VERIFY" - case TokenMagicLink: + case TokenTypeMagicLink: return "LOGIN_MAGIC_LINK" - case TokenUpload: + case TokenTypeUpload: return "UPLOAD" default: return "UNKNOWN" @@ -31,13 +31,13 @@ func (t TokenType) Value() int16 { func ParseTokenType(v int16) TokenType { switch v { case 1: - return TokenPasswordReset + return TokenTypePasswordReset case 2: - return TokenEmailVerify + return TokenTypeEmailVerify case 3: - return TokenMagicLink + return TokenTypeMagicLink case 4: - return TokenUpload + return TokenTypeUpload default: return 0 } @@ -46,13 +46,13 @@ func ParseTokenType(v int16) TokenType { func ParseTokenTypeFromString(s string) TokenType { switch s { case "PASSWORD_RESET": - return TokenPasswordReset + return TokenTypePasswordReset case "EMAIL_VERIFY": - return TokenEmailVerify + return TokenTypeEmailVerify case "LOGIN_MAGIC_LINK": - return TokenMagicLink + return TokenTypeMagicLink case "UPLOAD": - return TokenUpload + return TokenTypeUpload default: return 0 } diff --git a/pkg/constants/verify.go b/pkg/constants/verify.go index 8b120df..e7b52ce 100644 --- a/pkg/constants/verify.go +++ b/pkg/constants/verify.go @@ -3,22 +3,22 @@ package constants type VerifyType int16 const ( - VerifyUnknown VerifyType = 0 - VerifyIdCard VerifyType = 1 - VerifyEducation VerifyType = 2 - VerifyExpert VerifyType = 3 - VerifyOther VerifyType = 4 + VerifyTypeUnknown VerifyType = 0 + VerifyTypeIdCard VerifyType = 1 + VerifyTypeEducation VerifyType = 2 + VerifyTypeExpert VerifyType = 3 + VerifyTypeOther VerifyType = 4 ) func (t VerifyType) String() string { switch t { - case VerifyIdCard: + case VerifyTypeIdCard: return "ID_CARD" - case VerifyEducation: + case VerifyTypeEducation: return "EDUCATION" - case VerifyExpert: + case VerifyTypeExpert: return "EXPERT" - case VerifyOther: + case VerifyTypeOther: return "OTHER" default: return "UNKNOWN" @@ -32,29 +32,29 @@ func (t VerifyType) Int16() int16 { func ParseVerifyType(v int16) VerifyType { switch v { case 1: - return VerifyIdCard + return VerifyTypeIdCard case 2: - return VerifyEducation + return VerifyTypeEducation case 3: - return VerifyExpert + return VerifyTypeExpert case 4: - return VerifyOther + return VerifyTypeOther default: - return VerifyUnknown + return VerifyTypeUnknown } } func ParseVerifyTypeText(v string) VerifyType { switch v { case "ID_CARD": - return VerifyIdCard + return VerifyTypeIdCard case "EDUCATION": - return VerifyEducation + return VerifyTypeEducation case "EXPERT": - return VerifyExpert + return VerifyTypeExpert case "OTHER": - return VerifyOther + return VerifyTypeOther default: - return VerifyUnknown + return VerifyTypeUnknown } } diff --git a/pkg/convert/convert.go b/pkg/convert/convert.go index 2c53763..dbcc18c 100644 --- a/pkg/convert/convert.go +++ b/pkg/convert/convert.go @@ -13,6 +13,27 @@ func UUIDToString(v pgtype.UUID) string { return "" } +func UUIDToStringPtr(v pgtype.UUID) *string { + if v.Valid { + str := v.String() + return &str + } + return nil +} + +func ListUUIDToString(v []pgtype.UUID) []string { + if len(v) == 0 { + return []string{} + } + res := make([]string, 0, len(v)) + for _, u := range v { + if u.Valid { + res = append(res, UUIDToString(u)) + } + } + return res +} + func StringToUUID(s string) (pgtype.UUID, error) { var pgId pgtype.UUID err := pgId.Scan(s) diff --git a/pkg/database/seed.go b/pkg/database/seed.go index 6ab22a1..0546da5 100644 --- a/pkg/database/seed.go +++ b/pkg/database/seed.go @@ -53,7 +53,7 @@ func SeedSuperAdmin(pool *pgxpool.Pool) error { String: string(hashed), Valid: len(hashed) != 0, }, - AuthProvider: constants.LocalProvider.String(), + AuthProvider: constants.ProviderTypeLocal.String(), }) if err != nil { return err @@ -70,12 +70,12 @@ func SeedSuperAdmin(pool *pgxpool.Pool) error { return err } - adminRole, err := q.GetRoleByName(ctx, constants.ADMIN.String()) + adminRole, err := q.GetRoleByName(ctx, constants.RoleTypeAdmin.String()) if err != nil { return err } - useRole, err := q.GetRoleByName(ctx, constants.USER.String()) + useRole, err := q.GetRoleByName(ctx, constants.RoleTypeUser.String()) if err != nil { return err } diff --git a/pkg/email/email.go b/pkg/email/email.go index 1bbaf52..ff4e90e 100644 --- a/pkg/email/email.go +++ b/pkg/email/email.go @@ -66,10 +66,10 @@ func SendMailOTP(dto *models.TokenEntity) error { var templatePath string switch dto.TokenType { - case constants.TokenPasswordReset: + case constants.TokenTypePasswordReset: subject = "Your Password Reset Code" templatePath = "resources/password_reset.html" - case constants.TokenEmailVerify: + case constants.TokenTypeEmailVerify: subject = "Verify your email address" templatePath = "resources/email_verify.html" default: @@ -86,11 +86,11 @@ func SendHistorianReviewMail(dto *models.UserVerificationStorageEntity) error { var templatePath string feUrl := config.GetConfigWithDefault("FRONTEND_URL", "http://localhost:3000") switch dto.Status { - case constants.StatusApproved: + case constants.StatusTypeApproved: subject = "Your Historian Application is Approved" templatePath = "resources/historian_approved.html" - case constants.StatusRejected: + case constants.StatusTypeRejected: subject = "Your Historian Application is Rejected" templatePath = "resources/historian_rejected.html"