diff --git a/db/migrations/000006_entities.down.sql b/db/migrations/000006_entities.down.sql index f29ac13..53a1d92 100644 --- a/db/migrations/000006_entities.down.sql +++ b/db/migrations/000006_entities.down.sql @@ -1,2 +1 @@ -DROP TABLE IF EXISTS entity_types; DROP TABLE IF EXISTS entities; \ No newline at end of file diff --git a/db/migrations/000006_entities.up.sql b/db/migrations/000006_entities.up.sql index fb7def1..9c0bbff 100644 --- a/db/migrations/000006_entities.up.sql +++ b/db/migrations/000006_entities.up.sql @@ -1,48 +1,20 @@ -CREATE TABLE IF NOT EXISTS entity_types ( - id SMALLSERIAL PRIMARY KEY, - name TEXT UNIQUE NOT NULL -); CREATE TABLE IF NOT EXISTS entities ( id UUID PRIMARY KEY DEFAULT uuidv7(), - type_id SMALLINT REFERENCES entity_types(id), name TEXT NOT NULL, - slug TEXT UNIQUE, description TEXT, thumbnail_url TEXT, - status SMALLINT DEFAULT 1, -- 1 draft, 2 published is_deleted BOOLEAN NOT NULL DEFAULT false, - reviewed_by UUID REFERENCES users(id), - reviewed_at TIMESTAMPTZ, created_at TIMESTAMPTZ DEFAULT now(), updated_at TIMESTAMPTZ DEFAULT now() ); -CREATE UNIQUE INDEX uniq_entities_slug_active -ON entities(slug) -WHERE is_deleted = false; - -CREATE INDEX idx_entities_type -ON entities(type_id) -WHERE is_deleted = false; - - -CREATE INDEX idx_entities_status_created -ON entities(status, created_at DESC) -WHERE is_deleted = false; - - -CREATE INDEX idx_entities_type_status -ON entities(type_id, status) -WHERE is_deleted = false; - - -CREATE INDEX idx_entities_reviewed_by -ON entities(reviewed_by) -WHERE is_deleted = false; - CREATE INDEX idx_entities_name_search -ON entities USING gin (name gin_trgm_ops); +ON entities USING GIN (name gin_trgm_ops); + +CREATE INDEX idx_entities_created_active +ON entities(created_at DESC) +WHERE is_deleted = false; CREATE TRIGGER trigger_entities_updated_at BEFORE UPDATE ON entities diff --git a/db/migrations/000007_wiki.down.sql b/db/migrations/000007_wiki.down.sql index c920df3..3efaebf 100644 --- a/db/migrations/000007_wiki.down.sql +++ b/db/migrations/000007_wiki.down.sql @@ -1,2 +1,2 @@ -DROP TABLE IF EXISTS wiki_pages; -DROP TABLE IF EXISTS wiki_versions; \ No newline at end of file +DROP TABLE IF EXISTS wikis; +DROP TABLE IF EXISTS entity_wikis; \ No newline at end of file diff --git a/db/migrations/000007_wiki.up.sql b/db/migrations/000007_wiki.up.sql index b09273c..bc57885 100644 --- a/db/migrations/000007_wiki.up.sql +++ b/db/migrations/000007_wiki.up.sql @@ -1,17 +1,30 @@ +CREATE EXTENSION IF NOT EXISTS pg_trgm; + CREATE TABLE IF NOT EXISTS wikis ( id UUID PRIMARY KEY DEFAULT uuidv7(), - entity_id UUID REFERENCES entities(id) ON DELETE CASCADE, - user_id UUID REFERENCES users(id), title TEXT, - is_deleted BOOLEAN NOT NULL DEFAULT false, - note TEXT, content TEXT, + is_deleted BOOLEAN NOT NULL DEFAULT false, created_at TIMESTAMPTZ DEFAULT now(), updated_at TIMESTAMPTZ DEFAULT now() ); -CREATE INDEX idx_wiki_entity -ON wikis(entity_id) + +CREATE TABLE IF NOT EXISTS entity_wikis ( + entity_id UUID REFERENCES entities(id) ON DELETE CASCADE, + wiki_id UUID REFERENCES wikis(id) ON DELETE CASCADE, + PRIMARY KEY (entity_id, wiki_id) +); + +CREATE INDEX idx_entity_wikis_wiki_id +ON entity_wikis(wiki_id); + +CREATE INDEX idx_wikis_created_active +ON wikis(created_at DESC) +WHERE is_deleted = false; + +CREATE INDEX idx_wikis_title_search +ON wikis USING GIN (title gin_trgm_ops) WHERE is_deleted = false; CREATE TRIGGER trigger_wikis_updated_at diff --git a/db/migrations/000008_geometries.down.sql b/db/migrations/000008_geometries.down.sql index 1cf7dc4..c040e78 100644 --- a/db/migrations/000008_geometries.down.sql +++ b/db/migrations/000008_geometries.down.sql @@ -1,3 +1,2 @@ DROP TABLE IF EXISTS geometries; -DROP TABLE IF EXISTS geo_versions; DROP TABLE IF EXISTS entity_geometries; \ No newline at end of file diff --git a/db/migrations/000008_geometries.up.sql b/db/migrations/000008_geometries.up.sql index 07b8813..9a88d22 100644 --- a/db/migrations/000008_geometries.up.sql +++ b/db/migrations/000008_geometries.up.sql @@ -1,27 +1,22 @@ CREATE EXTENSION IF NOT EXISTS btree_gist; +CREATE EXTENSION IF NOT EXISTS postgis; CREATE TABLE IF NOT EXISTS geometries ( id UUID PRIMARY KEY DEFAULT uuidv7(), - geom GEOMETRY, -- point / polygon / line + geo_type VARCHAR(50) NOT NULL DEFAULT 'id' + draw_geometry JSONB NOT NULL, + binding JSONB, time_start INT, time_end INT, + bbox GEOMETRY, is_deleted BOOLEAN NOT NULL DEFAULT false, - bbox GEOMETRY, -- optional created_at TIMESTAMPTZ DEFAULT now(), updated_at TIMESTAMPTZ DEFAULT now() ); -CREATE TABLE IF NOT EXISTS geo_versions ( - id UUID PRIMARY KEY DEFAULT uuidv7(), - geo_id UUID REFERENCES geometries(id) ON DELETE CASCADE, - created_user UUID REFERENCES users(id), - geom GEOMETRY, - is_deleted BOOLEAN NOT NULL DEFAULT false, - note TEXT, - reviewed_by UUID REFERENCES users(id), - reviewed_at TIMESTAMPTZ, - created_at TIMESTAMPTZ DEFAULT now() -); +ALTER TABLE geometries DROP CONSTRAINT IF EXISTS check_geo_type; +ALTER TABLE geometries ADD CONSTRAINT check_geo_type +CHECK (geo_type IN ('id', 'name', 'icon', 'variant', 'description')); CREATE TABLE IF NOT EXISTS entity_geometries ( entity_id UUID REFERENCES entities(id) ON DELETE CASCADE, @@ -29,34 +24,27 @@ CREATE TABLE IF NOT EXISTS entity_geometries ( PRIMARY KEY (entity_id, geometry_id) ); -CREATE INDEX idx_geom_spatial_active -ON geometries USING GIST (geom) -WHERE is_deleted = false; +CREATE INDEX idx_geom_draw_geometry +ON geometries USING GIN (draw_geometry); CREATE INDEX idx_geom_bbox ON geometries USING GIST (bbox) WHERE is_deleted = false; CREATE INDEX idx_geom_time_range -ON geometries -USING GIST (int4range(time_start, time_end)) -WHERE is_deleted = false; - -CREATE INDEX idx_geo_versions_geo_id -ON geo_versions(geo_id) -WHERE is_deleted = false; - -CREATE INDEX idx_geo_versions_reviewed_by -ON geo_versions(reviewed_by) -WHERE is_deleted = false; - -CREATE INDEX idx_geo_versions_created_at -ON geo_versions(created_at DESC) +ON geometries USING GIST (int4range(time_start, time_end)) WHERE is_deleted = false; CREATE INDEX idx_entity_geometries_geometry ON entity_geometries(geometry_id); +CREATE INDEX idx_geom_binding +ON geometries USING GIN (binding); + +CREATE INDEX idx_geom_updated_at +ON geometries (updated_at DESC) +WHERE is_deleted = false; + CREATE TRIGGER trigger_geometries_updated_at BEFORE UPDATE ON geometries FOR EACH ROW diff --git a/db/query/entities.sql b/db/query/entities.sql new file mode 100644 index 0000000..ada5136 --- /dev/null +++ b/db/query/entities.sql @@ -0,0 +1,40 @@ +-- name: CreateEntity :one +INSERT INTO entities ( + name, description, thumbnail_url +) VALUES ( + $1, $2, $3 +) +RETURNING *; + + +-- name: GetEntityById :one +SELECT * +FROM entities +WHERE id = $1 AND is_deleted = false; + + +-- name: UpdateEntity :one +UPDATE entities +SET + name = COALESCE(sqlc.narg('name'), name), + description = COALESCE(sqlc.narg('description'), description), + thumbnail_url = COALESCE(sqlc.narg('thumbnail_url'), thumbnail_url) +WHERE id = sqlc.arg('id') AND is_deleted = false +RETURNING *; + + +-- name: DeleteEntity :exec +UPDATE entities +SET + is_deleted = true +WHERE id = $1; + + +-- name: SearchEntities :many +SELECT * +FROM entities +WHERE is_deleted = false + AND name ILIKE '%' || sqlc.arg('name')::text || '%' + AND (sqlc.narg('cursor_id')::uuid IS NULL OR id < sqlc.narg('cursor_id')::uuid) +ORDER BY id DESC +LIMIT sqlc.arg('limit_count'); \ No newline at end of file diff --git a/db/query/geometries.sql b/db/query/geometries.sql new file mode 100644 index 0000000..85cff67 --- /dev/null +++ b/db/query/geometries.sql @@ -0,0 +1,90 @@ +-- name: CreateGeometry :one +INSERT INTO geometries ( + geo_type, draw_geometry, binding, time_start, time_end, bbox +) VALUES ( + $1, $2, $3, $4, $5, ST_MakeEnvelope(sqlc.arg('min_lng')::float8, sqlc.arg('min_lat')::float8, sqlc.arg('max_lng')::float8, sqlc.arg('max_lat')::float8, 4326) +) +RETURNING id, geo_type, draw_geometry, binding, time_start, time_end, + ST_XMin(bbox)::float8 as min_lng, ST_YMin(bbox)::float8 as min_lat, ST_XMax(bbox)::float8 as max_lng, ST_YMax(bbox)::float8 as max_lat, + is_deleted, created_at, updated_at; + +-- name: GetGeometryById :one +SELECT id, geo_type, draw_geometry, binding, time_start, time_end, + ST_XMin(bbox)::float8 as min_lng, ST_YMin(bbox)::float8 as min_lat, ST_XMax(bbox)::float8 as max_lng, ST_YMax(bbox)::float8 as max_lat, + is_deleted, created_at, updated_at +FROM geometries +WHERE id = $1 AND is_deleted = false; + +-- name: UpdateGeometry :one +UPDATE geometries +SET + geo_type = COALESCE(sqlc.narg('geo_type'), geo_type), + draw_geometry = COALESCE(sqlc.narg('draw_geometry'), draw_geometry), + binding = COALESCE(sqlc.narg('binding'), binding), + time_start = COALESCE(sqlc.narg('time_start'), time_start), + time_end = COALESCE(sqlc.narg('time_end'), time_end), + bbox = CASE + WHEN sqlc.narg('update_bbox')::boolean = true THEN + ST_MakeEnvelope(sqlc.narg('min_lng')::float8, sqlc.narg('min_lat')::float8, sqlc.narg('max_lng')::float8, sqlc.narg('max_lat')::float8, 4326) + ELSE bbox + END, + updated_at = now() +WHERE id = sqlc.arg('id') AND is_deleted = false +RETURNING id, geo_type, draw_geometry, binding, time_start, time_end, + ST_XMin(bbox)::float8 as min_lng, ST_YMin(bbox)::float8 as min_lat, ST_XMax(bbox)::float8 as max_lng, ST_YMax(bbox)::float8 as max_lat, + is_deleted, created_at, updated_at; + +-- name: DeleteGeometry :exec +UPDATE geometries +SET + is_deleted = true +WHERE id = $1; + +-- name: SearchGeometries :many +SELECT + g.id, g.geo_type, g.draw_geometry, g.binding, g.time_start, g.time_end, + ST_XMin(g.bbox)::float8 as min_lng, + ST_YMin(g.bbox)::float8 as min_lat, + ST_XMax(g.bbox)::float8 as max_lng, + ST_YMax(g.bbox)::float8 as max_lat, + g.is_deleted, g.created_at, g.updated_at +FROM geometries g +WHERE g.is_deleted = false + AND ( + sqlc.narg('search_min_lng')::float8 IS NULL OR + sqlc.narg('search_min_lat')::float8 IS NULL OR + sqlc.narg('search_max_lng')::float8 IS NULL OR + sqlc.narg('search_max_lat')::float8 IS NULL OR + g.bbox && ST_MakeEnvelope( + sqlc.narg('search_min_lng')::float8, + sqlc.narg('search_min_lat')::float8, + sqlc.narg('search_max_lng')::float8, + sqlc.narg('search_max_lat')::float8, + 4326 + ) + ) + AND ( + sqlc.narg('time_point')::int IS NULL OR + (g.time_start <= sqlc.narg('time_point')::int AND g.time_end >= sqlc.narg('time_point')::int) + ) + AND ( + sqlc.narg('entity_id')::uuid IS NULL OR + EXISTS ( + SELECT 1 + FROM entity_geometries eg + WHERE eg.geometry_id = g.id + AND eg.entity_id = sqlc.narg('entity_id')::uuid + ) + ) +ORDER BY g.id DESC; + +-- name: BulkDeleteEntityGeometriesByEntityId :many +DELETE FROM entity_geometries +WHERE entity_id = $1 +RETURNING geometry_id; + +-- name: CreateEntityGeometries :exec +INSERT INTO entity_geometries ( + entity_id, geometry_id +) +SELECT $1, unnest(@geometry_ids::uuid[]); \ No newline at end of file diff --git a/db/query/wiki.sql b/db/query/wiki.sql new file mode 100644 index 0000000..75d46f4 --- /dev/null +++ b/db/query/wiki.sql @@ -0,0 +1,56 @@ +-- name: CreateWiki :one +INSERT INTO wikis ( + title, content +) VALUES ( + $1, $2 +) +RETURNING *; + +-- name: GetWikiById :one +SELECT * +FROM wikis +WHERE id = $1 AND is_deleted = false; + +-- name: UpdateWiki :one +UPDATE wikis +SET + title = COALESCE(sqlc.narg('title'), title), + content = COALESCE(sqlc.narg('content'), content) +WHERE id = sqlc.arg('id') AND is_deleted = false +RETURNING *; + +-- name: DeleteWiki :exec +UPDATE wikis +SET + is_deleted = true +WHERE id = $1; + +-- name: SearchWikis :many +SELECT w.* +FROM wikis w +WHERE w.is_deleted = false + AND w.title ILIKE '%' || sqlc.arg('title')::text || '%' + AND ( + sqlc.narg('entity_id')::uuid IS NULL OR + EXISTS ( + SELECT 1 + FROM entity_wikis ew + WHERE ew.wiki_id = w.id + AND ew.entity_id = sqlc.narg('entity_id')::uuid + ) + ) + AND (sqlc.narg('cursor_id')::uuid IS NULL OR w.id < sqlc.narg('cursor_id')::uuid) + +ORDER BY w.id DESC +LIMIT sqlc.arg('limit_count'); + +-- name: BulkDeleteEntityWikisByEntityId :many +DELETE FROM entity_wikis +WHERE entity_id = $1 +RETURNING wiki_id; + +-- name: CreateEntityWikis :exec +INSERT INTO entity_wikis ( + entity_id, wiki_id +) +SELECT $1, unnest(@wiki_ids::uuid[]); diff --git a/db/schema.sql b/db/schema.sql index f604503..74dec71 100644 --- a/db/schema.sql +++ b/db/schema.sql @@ -68,4 +68,49 @@ CREATE TABLE IF NOT EXISTS verification_medias ( verification_id UUID REFERENCES user_verifications(id) ON DELETE CASCADE, media_id UUID REFERENCES medias(id) ON DELETE CASCADE, PRIMARY KEY (verification_id, media_id) +); + +CREATE TABLE IF NOT EXISTS entities ( + id UUID PRIMARY KEY DEFAULT uuidv7(), + name TEXT NOT NULL, + description TEXT, + thumbnail_url TEXT, + is_deleted BOOLEAN NOT NULL DEFAULT false, + created_at TIMESTAMPTZ DEFAULT now(), + updated_at TIMESTAMPTZ DEFAULT now() +); + +CREATE TABLE IF NOT EXISTS wikis ( + id UUID PRIMARY KEY DEFAULT uuidv7(), + title TEXT, + content TEXT, + is_deleted BOOLEAN NOT NULL DEFAULT false, + created_at TIMESTAMPTZ DEFAULT now(), + updated_at TIMESTAMPTZ DEFAULT now() +); + + +CREATE TABLE IF NOT EXISTS entity_wikis ( + entity_id UUID REFERENCES entities(id) ON DELETE CASCADE, + wiki_id UUID REFERENCES wikis(id) ON DELETE CASCADE, + PRIMARY KEY (entity_id, wiki_id) +); + +CREATE TABLE IF NOT EXISTS geometries ( + id UUID PRIMARY KEY DEFAULT uuidv7(), + geo_type VARCHAR(50) NOT NULL DEFAULT 'id', + draw_geometry JSONB NOT NULL, + binding JSONB, + time_start INT, + time_end INT, + bbox GEOMETRY, + is_deleted BOOLEAN NOT NULL DEFAULT false, + created_at TIMESTAMPTZ DEFAULT now(), + updated_at TIMESTAMPTZ DEFAULT now() +); + +CREATE TABLE IF NOT EXISTS entity_geometries ( + entity_id UUID REFERENCES entities(id) ON DELETE CASCADE, + geometry_id UUID REFERENCES geometries(id) ON DELETE CASCADE, + PRIMARY KEY (entity_id, geometry_id) ); \ No newline at end of file diff --git a/docs/docs.go b/docs/docs.go index 62175ab..7dd7f7b 100644 --- a/docs/docs.go +++ b/docs/docs.go @@ -399,6 +399,204 @@ const docTemplate = `{ } } }, + "/entities": { + "get": { + "description": "Search entities with cursor pagination", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Entities" + ], + "summary": "Search entities", + "parameters": [ + { + "type": "string", + "name": "cursor", + "in": "query" + }, + { + "maximum": 100, + "minimum": 1, + "type": "integer", + "name": "limit", + "in": "query" + }, + { + "maxLength": 255, + "type": "string", + "name": "name", + "in": "query" + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + } + } + } + }, + "/entities/{id}": { + "get": { + "description": "Get detailed information about a specific entity", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Entities" + ], + "summary": "Get entity by ID", + "parameters": [ + { + "type": "string", + "description": "Entity ID", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + } + } + } + }, + "/geometries": { + "get": { + "description": "Search geometries with cursor pagination and spatial filtering", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Geometries" + ], + "summary": "Search geometries", + "parameters": [ + { + "type": "string", + "name": "entity_id", + "in": "query" + }, + { + "maximum": 90, + "minimum": -90, + "type": "number", + "name": "maxLat", + "in": "query", + "required": true + }, + { + "maximum": 180, + "minimum": -180, + "type": "number", + "name": "maxLng", + "in": "query", + "required": true + }, + { + "maximum": 90, + "minimum": -90, + "type": "number", + "name": "minLat", + "in": "query", + "required": true + }, + { + "maximum": 180, + "minimum": -180, + "type": "number", + "name": "minLng", + "in": "query", + "required": true + }, + { + "type": "integer", + "name": "time", + "in": "query" + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + } + } + } + }, + "/geometries/{id}": { + "get": { + "description": "Get detailed information about a specific geometry", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Geometries" + ], + "summary": "Get geometry by ID", + "parameters": [ + { + "type": "string", + "description": "Geometry ID", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + } + } + } + }, "/historian/application": { "get": { "security": [ @@ -1933,6 +2131,98 @@ const docTemplate = `{ } } } + }, + "/wikis": { + "get": { + "description": "Search wikis with cursor pagination", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Wikis" + ], + "summary": "Search wikis", + "parameters": [ + { + "type": "string", + "name": "cursor", + "in": "query" + }, + { + "type": "string", + "name": "entity_id", + "in": "query" + }, + { + "maximum": 100, + "minimum": 1, + "type": "integer", + "name": "limit", + "in": "query" + }, + { + "maxLength": 1000, + "type": "string", + "name": "title", + "in": "query" + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + } + } + } + }, + "/wikis/{id}": { + "get": { + "description": "Get detailed information about a specific wiki", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Wikis" + ], + "summary": "Get wiki by ID", + "parameters": [ + { + "type": "string", + "description": "Wiki ID", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + } + } + } } }, "definitions": { diff --git a/docs/swagger.json b/docs/swagger.json index 52f573c..8d2d6ab 100644 --- a/docs/swagger.json +++ b/docs/swagger.json @@ -392,6 +392,204 @@ } } }, + "/entities": { + "get": { + "description": "Search entities with cursor pagination", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Entities" + ], + "summary": "Search entities", + "parameters": [ + { + "type": "string", + "name": "cursor", + "in": "query" + }, + { + "maximum": 100, + "minimum": 1, + "type": "integer", + "name": "limit", + "in": "query" + }, + { + "maxLength": 255, + "type": "string", + "name": "name", + "in": "query" + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + } + } + } + }, + "/entities/{id}": { + "get": { + "description": "Get detailed information about a specific entity", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Entities" + ], + "summary": "Get entity by ID", + "parameters": [ + { + "type": "string", + "description": "Entity ID", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + } + } + } + }, + "/geometries": { + "get": { + "description": "Search geometries with cursor pagination and spatial filtering", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Geometries" + ], + "summary": "Search geometries", + "parameters": [ + { + "type": "string", + "name": "entity_id", + "in": "query" + }, + { + "maximum": 90, + "minimum": -90, + "type": "number", + "name": "maxLat", + "in": "query", + "required": true + }, + { + "maximum": 180, + "minimum": -180, + "type": "number", + "name": "maxLng", + "in": "query", + "required": true + }, + { + "maximum": 90, + "minimum": -90, + "type": "number", + "name": "minLat", + "in": "query", + "required": true + }, + { + "maximum": 180, + "minimum": -180, + "type": "number", + "name": "minLng", + "in": "query", + "required": true + }, + { + "type": "integer", + "name": "time", + "in": "query" + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + } + } + } + }, + "/geometries/{id}": { + "get": { + "description": "Get detailed information about a specific geometry", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Geometries" + ], + "summary": "Get geometry by ID", + "parameters": [ + { + "type": "string", + "description": "Geometry ID", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + } + } + } + }, "/historian/application": { "get": { "security": [ @@ -1926,6 +2124,98 @@ } } } + }, + "/wikis": { + "get": { + "description": "Search wikis with cursor pagination", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Wikis" + ], + "summary": "Search wikis", + "parameters": [ + { + "type": "string", + "name": "cursor", + "in": "query" + }, + { + "type": "string", + "name": "entity_id", + "in": "query" + }, + { + "maximum": 100, + "minimum": 1, + "type": "integer", + "name": "limit", + "in": "query" + }, + { + "maxLength": 1000, + "type": "string", + "name": "title", + "in": "query" + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + } + } + } + }, + "/wikis/{id}": { + "get": { + "description": "Get detailed information about a specific wiki", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Wikis" + ], + "summary": "Get wiki by ID", + "parameters": [ + { + "type": "string", + "description": "Wiki ID", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/history-api_internal_dtos_response.CommonResponse" + } + } + } + } } }, "definitions": { diff --git a/docs/swagger.yaml b/docs/swagger.yaml index 1dd2a93..5010e3e 100644 --- a/docs/swagger.yaml +++ b/docs/swagger.yaml @@ -493,6 +493,138 @@ paths: summary: Verify a security token tags: - Auth + /entities: + get: + consumes: + - application/json + description: Search entities with cursor pagination + parameters: + - in: query + name: cursor + type: string + - in: query + maximum: 100 + minimum: 1 + name: limit + type: integer + - in: query + maxLength: 255 + name: name + type: string + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/history-api_internal_dtos_response.CommonResponse' + "500": + description: Internal Server Error + schema: + $ref: '#/definitions/history-api_internal_dtos_response.CommonResponse' + summary: Search entities + tags: + - Entities + /entities/{id}: + get: + consumes: + - application/json + description: Get detailed information about a specific entity + parameters: + - description: Entity ID + in: path + name: id + required: true + type: string + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/history-api_internal_dtos_response.CommonResponse' + "500": + description: Internal Server Error + schema: + $ref: '#/definitions/history-api_internal_dtos_response.CommonResponse' + summary: Get entity by ID + tags: + - Entities + /geometries: + get: + consumes: + - application/json + description: Search geometries with cursor pagination and spatial filtering + parameters: + - in: query + name: entity_id + type: string + - in: query + maximum: 90 + minimum: -90 + name: maxLat + required: true + type: number + - in: query + maximum: 180 + minimum: -180 + name: maxLng + required: true + type: number + - in: query + maximum: 90 + minimum: -90 + name: minLat + required: true + type: number + - in: query + maximum: 180 + minimum: -180 + name: minLng + required: true + type: number + - in: query + name: time + type: integer + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/history-api_internal_dtos_response.CommonResponse' + "500": + description: Internal Server Error + schema: + $ref: '#/definitions/history-api_internal_dtos_response.CommonResponse' + summary: Search geometries + tags: + - Geometries + /geometries/{id}: + get: + consumes: + - application/json + description: Get detailed information about a specific geometry + parameters: + - description: Geometry ID + in: path + name: id + required: true + type: string + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/history-api_internal_dtos_response.CommonResponse' + "500": + description: Internal Server Error + schema: + $ref: '#/definitions/history-api_internal_dtos_response.CommonResponse' + summary: Get geometry by ID + tags: + - Geometries /historian/application: get: consumes: @@ -1480,6 +1612,66 @@ paths: summary: Change user password tags: - Users + /wikis: + get: + consumes: + - application/json + description: Search wikis with cursor pagination + parameters: + - in: query + name: cursor + type: string + - in: query + name: entity_id + type: string + - in: query + maximum: 100 + minimum: 1 + name: limit + type: integer + - in: query + maxLength: 1000 + name: title + type: string + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/history-api_internal_dtos_response.CommonResponse' + "500": + description: Internal Server Error + schema: + $ref: '#/definitions/history-api_internal_dtos_response.CommonResponse' + summary: Search wikis + tags: + - Wikis + /wikis/{id}: + get: + consumes: + - application/json + description: Get detailed information about a specific wiki + parameters: + - description: Wiki ID + in: path + name: id + required: true + type: string + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/history-api_internal_dtos_response.CommonResponse' + "500": + description: Internal Server Error + schema: + $ref: '#/definitions/history-api_internal_dtos_response.CommonResponse' + summary: Get wiki by ID + tags: + - Wikis securityDefinitions: BearerAuth: description: Type "Bearer " followed by a space and JWT token. diff --git a/internal/controllers/entityController.go b/internal/controllers/entityController.go new file mode 100644 index 0000000..9fb08dc --- /dev/null +++ b/internal/controllers/entityController.go @@ -0,0 +1,83 @@ +package controllers + +import ( + "context" + "history-api/internal/dtos/request" + "history-api/internal/dtos/response" + "history-api/internal/services" + "history-api/pkg/validator" + "time" + + "github.com/gofiber/fiber/v3" +) + +type EntityController struct { + service services.EntityService +} + +func NewEntityController(svc services.EntityService) *EntityController { + return &EntityController{service: svc} +} + +// GetEntityById handles fetching a single entity by ID. +// @Summary Get entity by ID +// @Description Get detailed information about a specific entity +// @Tags Entities +// @Accept json +// @Produce json +// @Param id path string true "Entity ID" +// @Success 200 {object} response.CommonResponse +// @Failure 500 {object} response.CommonResponse +// @Router /entities/{id} [get] +func (h *EntityController) GetEntityById(c fiber.Ctx) error { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + id := c.Params("id") + res, err := h.service.GetEntityByID(ctx, id) + if err != nil { + return c.Status(fiber.StatusInternalServerError).JSON(response.CommonResponse{ + Status: false, + Message: err.Error(), + }) + } + return c.Status(fiber.StatusOK).JSON(response.CommonResponse{ + Status: true, + Data: res, + }) +} + +// SearchEntities handles searching for entities. +// @Summary Search entities +// @Description Search entities with cursor pagination +// @Tags Entities +// @Accept json +// @Produce json +// @Param query query request.SearchEntityDto false "Search Query" +// @Success 200 {object} response.CommonResponse +// @Failure 500 {object} response.CommonResponse +// @Router /entities [get] +func (h *EntityController) SearchEntities(c fiber.Ctx) error { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + + dto := &request.SearchEntityDto{} + if err := validator.ValidateQueryDto(c, dto); err != nil { + return c.Status(fiber.StatusBadRequest).JSON(response.CommonResponse{ + Status: false, + Errors: err, + }) + } + + res, err := h.service.SearchEntities(ctx, dto) + if err != nil { + return c.Status(fiber.StatusInternalServerError).JSON(response.CommonResponse{ + Status: false, + Message: err.Error(), + }) + } + + return c.Status(fiber.StatusOK).JSON(response.CommonResponse{ + Status: true, + Data: res, + }) +} diff --git a/internal/controllers/geometryController.go b/internal/controllers/geometryController.go new file mode 100644 index 0000000..981e0ef --- /dev/null +++ b/internal/controllers/geometryController.go @@ -0,0 +1,83 @@ +package controllers + +import ( + "context" + "history-api/internal/dtos/request" + "history-api/internal/dtos/response" + "history-api/internal/services" + "history-api/pkg/validator" + "time" + + "github.com/gofiber/fiber/v3" +) + +type GeometryController struct { + service services.GeometryService +} + +func NewGeometryController(svc services.GeometryService) *GeometryController { + return &GeometryController{service: svc} +} + +// GetGeometryById handles fetching a single geometry by ID. +// @Summary Get geometry by ID +// @Description Get detailed information about a specific geometry +// @Tags Geometries +// @Accept json +// @Produce json +// @Param id path string true "Geometry ID" +// @Success 200 {object} response.CommonResponse +// @Failure 500 {object} response.CommonResponse +// @Router /geometries/{id} [get] +func (h *GeometryController) GetGeometryById(c fiber.Ctx) error { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + id := c.Params("id") + res, err := h.service.GetGeometryByID(ctx, id) + if err != nil { + return c.Status(fiber.StatusInternalServerError).JSON(response.CommonResponse{ + Status: false, + Message: err.Error(), + }) + } + return c.Status(fiber.StatusOK).JSON(response.CommonResponse{ + Status: true, + Data: res, + }) +} + +// SearchGeometries handles searching for geometries. +// @Summary Search geometries +// @Description Search geometries with cursor pagination and spatial filtering +// @Tags Geometries +// @Accept json +// @Produce json +// @Param query query request.SearchGeometryDto false "Search Query" +// @Success 200 {object} response.CommonResponse +// @Failure 500 {object} response.CommonResponse +// @Router /geometries [get] +func (h *GeometryController) SearchGeometries(c fiber.Ctx) error { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + + dto := &request.SearchGeometryDto{} + if err := validator.ValidateQueryDto(c, dto); err != nil { + return c.Status(fiber.StatusBadRequest).JSON(response.CommonResponse{ + Status: false, + Errors: err, + }) + } + + res, err := h.service.SearchGeometries(ctx, dto) + if err != nil { + return c.Status(fiber.StatusInternalServerError).JSON(response.CommonResponse{ + Status: false, + Message: err.Error(), + }) + } + + return c.Status(fiber.StatusOK).JSON(response.CommonResponse{ + Status: true, + Data: res, + }) +} diff --git a/internal/controllers/wikiController.go b/internal/controllers/wikiController.go new file mode 100644 index 0000000..85e9a9e --- /dev/null +++ b/internal/controllers/wikiController.go @@ -0,0 +1,83 @@ +package controllers + +import ( + "context" + "history-api/internal/dtos/request" + "history-api/internal/dtos/response" + "history-api/internal/services" + "history-api/pkg/validator" + "time" + + "github.com/gofiber/fiber/v3" +) + +type WikiController struct { + service services.WikiService +} + +func NewWikiController(svc services.WikiService) *WikiController { + return &WikiController{service: svc} +} + +// GetWikiById handles fetching a single wiki by ID. +// @Summary Get wiki by ID +// @Description Get detailed information about a specific wiki +// @Tags Wikis +// @Accept json +// @Produce json +// @Param id path string true "Wiki ID" +// @Success 200 {object} response.CommonResponse +// @Failure 500 {object} response.CommonResponse +// @Router /wikis/{id} [get] +func (h *WikiController) GetWikiById(c fiber.Ctx) error { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + id := c.Params("id") + res, err := h.service.GetWikiByID(ctx, id) + if err != nil { + return c.Status(fiber.StatusInternalServerError).JSON(response.CommonResponse{ + Status: false, + Message: err.Error(), + }) + } + return c.Status(fiber.StatusOK).JSON(response.CommonResponse{ + Status: true, + Data: res, + }) +} + +// SearchWikis handles searching for wikis. +// @Summary Search wikis +// @Description Search wikis with cursor pagination +// @Tags Wikis +// @Accept json +// @Produce json +// @Param query query request.SearchWikiDto false "Search Query" +// @Success 200 {object} response.CommonResponse +// @Failure 500 {object} response.CommonResponse +// @Router /wikis [get] +func (h *WikiController) SearchWikis(c fiber.Ctx) error { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + + dto := &request.SearchWikiDto{} + if err := validator.ValidateQueryDto(c, dto); err != nil { + return c.Status(fiber.StatusBadRequest).JSON(response.CommonResponse{ + Status: false, + Errors: err, + }) + } + + res, err := h.service.SearchWikis(ctx, dto) + if err != nil { + return c.Status(fiber.StatusInternalServerError).JSON(response.CommonResponse{ + Status: false, + Message: err.Error(), + }) + } + + return c.Status(fiber.StatusOK).JSON(response.CommonResponse{ + Status: true, + Data: res, + }) +} diff --git a/internal/dtos/request/entity.go b/internal/dtos/request/entity.go new file mode 100644 index 0000000..8cc52ee --- /dev/null +++ b/internal/dtos/request/entity.go @@ -0,0 +1,7 @@ +package request + +type SearchEntityDto struct { + Cursor string `json:"cursor" query:"cursor" validate:"omitempty,uuid"` + Limit int `json:"limit" query:"limit" validate:"omitempty,min=1,max=100"` + Name string `json:"name" query:"name" validate:"omitempty,max=255"` +} diff --git a/internal/dtos/request/geometry.go b/internal/dtos/request/geometry.go new file mode 100644 index 0000000..0d7af7b --- /dev/null +++ b/internal/dtos/request/geometry.go @@ -0,0 +1,10 @@ +package request + +type SearchGeometryDto struct { + MinLng *float64 `query:"min_lng" validate:"required,gte=-180,lte=180"` + MinLat *float64 `query:"min_lat" validate:"required,gte=-90,lte=90"` + MaxLng *float64 `query:"max_lng" validate:"required,gte=-180,lte=180"` + MaxLat *float64 `query:"max_lat" validate:"required,gte=-90,lte=90"` + TimePoint *int32 `json:"time" query:"time" validate:"omitempty,number"` + EntityID *string `json:"entity_id" query:"entity_id" validate:"omitempty,uuid"` +} diff --git a/internal/dtos/request/user.go b/internal/dtos/request/user.go index 1372f5d..508442f 100644 --- a/internal/dtos/request/user.go +++ b/internal/dtos/request/user.go @@ -5,13 +5,14 @@ import "time" type UpdateProfileDto struct { DisplayName *string `json:"display_name" validate:"omitempty,min=2,max=50"` FullName *string `json:"full_name" validate:"omitempty,min=2,max=100"` - AvatarUrl *string `json:"avatar_url" validate:"omitempty,url,image_url"` + AvatarUrl *string `json:"avatar_url" validate:"omitempty,image_url"` Bio *string `json:"bio" validate:"omitempty,max=255"` Location *string `json:"location" validate:"omitempty,max=100"` - Website *string `json:"website" validate:"omitempty,url"` + Website *string `json:"website" validate:"omitempty,optional_url"` CountryCode *string `json:"country_code" validate:"omitempty,len=2"` Phone *string `json:"phone" validate:"omitempty,min=8,max=20"` } + type ChangePasswordDto struct { OldPassword string `json:"old_password" validate:"required,min=8,max=64"` NewPassword string `json:"new_password" validate:"required,min=8,max=64,nefield=OldPassword"` @@ -26,6 +27,7 @@ type PaginationDto struct { Limit int `json:"limit" query:"limit" validate:"omitempty,min=1,max=100"` Order string `json:"order" query:"order" validate:"omitempty,oneof=asc desc"` } + type SearchUserDto struct { PaginationDto Sort string `json:"sort" query:"sort" validate:"omitempty,oneof=id created_at updated_at email is_deleted auth_provider"` diff --git a/internal/dtos/request/wiki.go b/internal/dtos/request/wiki.go new file mode 100644 index 0000000..42efd85 --- /dev/null +++ b/internal/dtos/request/wiki.go @@ -0,0 +1,8 @@ +package request + +type SearchWikiDto struct { + Cursor string `json:"cursor" query:"cursor" validate:"omitempty,uuid"` + Limit int `json:"limit" query:"limit" validate:"omitempty,min=1,max=100"` + Title string `json:"title" query:"title" validate:"omitempty,max=1000"` + EntityID string `json:"entity_id" query:"entity_id" validate:"omitempty,uuid"` +} diff --git a/internal/dtos/response/auth.go b/internal/dtos/response/auth.go index 96cc3ce..0014ea1 100644 --- a/internal/dtos/response/auth.go +++ b/internal/dtos/response/auth.go @@ -1,8 +1,8 @@ package response type AuthResponse struct { - AccessToken string `json:"access_token"` - RefreshToken string `json:"refresh_token"` + AccessToken string `json:"access_token,omitempty"` + RefreshToken string `json:"refresh_token,omitempty"` } type VerifyTokenResponse struct { diff --git a/internal/dtos/response/common.go b/internal/dtos/response/common.go index 90c0545..d67ebd5 100644 --- a/internal/dtos/response/common.go +++ b/internal/dtos/response/common.go @@ -8,9 +8,9 @@ import ( type CommonResponse struct { Status bool `json:"status"` - Data any `json:"data"` - Errors any `json:"errors"` - Message string `json:"message"` + Data any `json:"data,omitempty"` + Errors any `json:"errors,omitempty"` + Message string `json:"message,omitempty"` } type JWTClaims struct { @@ -29,10 +29,10 @@ type PaginationMeta struct { type PaginatedResponse struct { Status bool `json:"status"` - Message string `json:"message"` - Data any `json:"data"` - Errors any `json:"errors"` - Pagination *PaginationMeta `json:"pagination"` + Message string `json:"message,omitempty"` + Data any `json:"data,omitempty"` + Errors any `json:"errors,omitempty"` + Pagination *PaginationMeta `json:"pagination,omitempty"` } func BuildPaginatedResponse(data any, totalRecords int64, page int, limit int) *PaginatedResponse { diff --git a/internal/dtos/response/entity.go b/internal/dtos/response/entity.go new file mode 100644 index 0000000..f43c745 --- /dev/null +++ b/internal/dtos/response/entity.go @@ -0,0 +1,13 @@ +package response + +import "time" + +type EntityResponse struct { + ID string `json:"id"` + Name string `json:"name"` + Description string `json:"description,omitempty"` + ThumbnailUrl string `json:"thumbnail_url,omitempty"` + IsDeleted bool `json:"is_deleted"` + CreatedAt *time.Time `json:"created_at"` + UpdatedAt *time.Time `json:"updated_at"` +} diff --git a/internal/dtos/response/geometry.go b/internal/dtos/response/geometry.go new file mode 100644 index 0000000..5841d04 --- /dev/null +++ b/internal/dtos/response/geometry.go @@ -0,0 +1,26 @@ +package response + +import ( + "encoding/json" + "time" +) + +type Bbox struct { + MinLng float64 `json:"min_lng"` + MinLat float64 `json:"min_lat"` + MaxLng float64 `json:"max_lng"` + MaxLat float64 `json:"max_lat"` +} + +type GeometryResponse struct { + ID string `json:"id"` + GeoType string `json:"geo_type"` + DrawGeometry json.RawMessage `json:"draw_geometry"` + Binding json.RawMessage `json:"binding,omitempty"` + TimeStart int32 `json:"time_start,omitempty"` + TimeEnd int32 `json:"time_end,omitempty"` + Bbox *Bbox `json:"bbox,omitempty"` + IsDeleted bool `json:"is_deleted,omitempty"` + CreatedAt *time.Time `json:"created_at,omitempty"` + UpdatedAt *time.Time `json:"updated_at,omitempty"` +} diff --git a/internal/dtos/response/media.go b/internal/dtos/response/media.go index 45b26c0..58dcdf0 100644 --- a/internal/dtos/response/media.go +++ b/internal/dtos/response/media.go @@ -20,8 +20,8 @@ type MediaResponse struct { MimeType string `json:"mime_type"` Size int64 `json:"size"` FileMetadata json.RawMessage `json:"file_metadata"` - CreatedAt *time.Time `json:"created_at"` - UpdatedAt *time.Time `json:"updated_at"` + CreatedAt *time.Time `json:"created_at,omitempty"` + UpdatedAt *time.Time `json:"updated_at,omitempty"` } type MediaSimpleResponse struct { @@ -31,5 +31,5 @@ type MediaSimpleResponse struct { MimeType string `json:"mime_type"` Size int64 `json:"size"` FileMetadata json.RawMessage `json:"file_metadata"` - CreatedAt *time.Time `json:"created_at"` + CreatedAt *time.Time `json:"created_at,omitempty"` } diff --git a/internal/dtos/response/role.go b/internal/dtos/response/role.go index 525a03b..c1d1b35 100644 --- a/internal/dtos/response/role.go +++ b/internal/dtos/response/role.go @@ -10,7 +10,7 @@ type RoleSimpleResponse struct { type RoleResponse struct { ID string `json:"id"` Name string `json:"name"` - IsDeleted bool `json:"is_deleted"` - CreatedAt *time.Time `json:"created_at"` - UpdatedAt *time.Time `json:"updated_at"` + IsDeleted bool `json:"is_deleted,omitempty"` + CreatedAt *time.Time `json:"created_at,omitempty"` + UpdatedAt *time.Time `json:"updated_at,omitempty"` } diff --git a/internal/dtos/response/user.go b/internal/dtos/response/user.go index 62a8e52..1aff1a0 100644 --- a/internal/dtos/response/user.go +++ b/internal/dtos/response/user.go @@ -5,29 +5,29 @@ import "time" type UserResponse struct { ID string `json:"id"` Email string `json:"email"` - Profile *UserProfileSimpleResponse `json:"profile"` - TokenVersion int32 `json:"token_version"` - IsDeleted bool `json:"is_deleted"` - CreatedAt *time.Time `json:"created_at"` - UpdatedAt *time.Time `json:"updated_at"` - Roles []*RoleSimpleResponse `json:"roles"` + Profile *UserProfileSimpleResponse `json:"profile,omitempty"` + TokenVersion int32 `json:"token_version,omitempty"` + IsDeleted bool `json:"is_deleted,omitempty"` + CreatedAt *time.Time `json:"created_at,omitempty"` + UpdatedAt *time.Time `json:"updated_at,omitempty"` + Roles []*RoleSimpleResponse `json:"roles,omitempty"` } type UserSimpleResponse struct { ID string `json:"id"` Email string `json:"email"` - DisplayName string `json:"display_name"` - FullName string `json:"full_name"` - AvatarUrl string `json:"avatar_url"` + DisplayName string `json:"display_name,omitempty"` + FullName string `json:"full_name,omitempty"` + AvatarUrl string `json:"avatar_url,omitempty"` } type UserProfileSimpleResponse struct { DisplayName string `json:"display_name"` - FullName string `json:"full_name"` - AvatarUrl string `json:"avatar_url"` - Bio string `json:"bio"` - Location string `json:"location"` - Website string `json:"website"` - CountryCode string `json:"country_code"` - Phone string `json:"phone"` + FullName string `json:"full_name,omitempty"` + AvatarUrl string `json:"avatar_url,omitempty"` + Bio string `json:"bio,omitempty"` + Location string `json:"location,omitempty"` + Website string `json:"website,omitempty"` + CountryCode string `json:"country_code,omitempty"` + Phone string `json:"phone,omitempty"` } diff --git a/internal/dtos/response/verification.go b/internal/dtos/response/verification.go index 222bf28..5d91f15 100644 --- a/internal/dtos/response/verification.go +++ b/internal/dtos/response/verification.go @@ -4,13 +4,13 @@ import "time" type UserVerificationResponse struct { ID string `json:"id"` - User *UserSimpleResponse `json:"user"` + User *UserSimpleResponse `json:"user,omitempty"` VerifyType string `json:"verify_type"` Content string `json:"content"` Status string `json:"status"` - Reviewer *UserSimpleResponse `json:"reviewer"` - ReviewNote string `json:"review_note"` - ReviewedAt *time.Time `json:"reviewed_at"` - CreatedAt *time.Time `json:"created_at"` - Medias []*MediaSimpleResponse `json:"media"` + Reviewer *UserSimpleResponse `json:"reviewer,omitempty"` + ReviewNote string `json:"review_note,omitempty"` + ReviewedAt *time.Time `json:"reviewed_at,omitempty"` + CreatedAt *time.Time `json:"created_at,omitempty"` + Medias []*MediaSimpleResponse `json:"media,omitempty"` } diff --git a/internal/dtos/response/wiki.go b/internal/dtos/response/wiki.go new file mode 100644 index 0000000..ddd2019 --- /dev/null +++ b/internal/dtos/response/wiki.go @@ -0,0 +1,12 @@ +package response + +import "time" + +type WikiResponse struct { + ID string `json:"id"` + Title string `json:"title,omitempty"` + Content string `json:"content,omitempty"` + IsDeleted bool `json:"is_deleted,omitempty"` + CreatedAt *time.Time `json:"created_at,omitempty"` + UpdatedAt *time.Time `json:"updated_at,omitempty"` +} diff --git a/internal/gen/sqlc/entities.sql.go b/internal/gen/sqlc/entities.sql.go new file mode 100644 index 0000000..14fcd28 --- /dev/null +++ b/internal/gen/sqlc/entities.sql.go @@ -0,0 +1,156 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.30.0 +// source: entities.sql + +package sqlc + +import ( + "context" + + "github.com/jackc/pgx/v5/pgtype" +) + +const createEntity = `-- name: CreateEntity :one +INSERT INTO entities ( + name, description, thumbnail_url +) VALUES ( + $1, $2, $3 +) +RETURNING id, name, description, thumbnail_url, is_deleted, created_at, updated_at +` + +type CreateEntityParams struct { + Name string `json:"name"` + Description pgtype.Text `json:"description"` + ThumbnailUrl pgtype.Text `json:"thumbnail_url"` +} + +func (q *Queries) CreateEntity(ctx context.Context, arg CreateEntityParams) (Entity, error) { + row := q.db.QueryRow(ctx, createEntity, arg.Name, arg.Description, arg.ThumbnailUrl) + var i Entity + err := row.Scan( + &i.ID, + &i.Name, + &i.Description, + &i.ThumbnailUrl, + &i.IsDeleted, + &i.CreatedAt, + &i.UpdatedAt, + ) + return i, err +} + +const deleteEntity = `-- name: DeleteEntity :exec +UPDATE entities +SET + is_deleted = true +WHERE id = $1 +` + +func (q *Queries) DeleteEntity(ctx context.Context, id pgtype.UUID) error { + _, err := q.db.Exec(ctx, deleteEntity, id) + return err +} + +const getEntityById = `-- name: GetEntityById :one +SELECT id, name, description, thumbnail_url, is_deleted, created_at, updated_at +FROM entities +WHERE id = $1 AND is_deleted = false +` + +func (q *Queries) GetEntityById(ctx context.Context, id pgtype.UUID) (Entity, error) { + row := q.db.QueryRow(ctx, getEntityById, id) + var i Entity + err := row.Scan( + &i.ID, + &i.Name, + &i.Description, + &i.ThumbnailUrl, + &i.IsDeleted, + &i.CreatedAt, + &i.UpdatedAt, + ) + return i, err +} + +const searchEntities = `-- name: SearchEntities :many +SELECT id, name, description, thumbnail_url, is_deleted, created_at, updated_at +FROM entities +WHERE is_deleted = false + AND name ILIKE '%' || $1::text || '%' + AND ($2::uuid IS NULL OR id < $2::uuid) +ORDER BY id DESC +LIMIT $3 +` + +type SearchEntitiesParams struct { + Name string `json:"name"` + CursorID pgtype.UUID `json:"cursor_id"` + LimitCount int32 `json:"limit_count"` +} + +func (q *Queries) SearchEntities(ctx context.Context, arg SearchEntitiesParams) ([]Entity, error) { + rows, err := q.db.Query(ctx, searchEntities, arg.Name, arg.CursorID, arg.LimitCount) + if err != nil { + return nil, err + } + defer rows.Close() + items := []Entity{} + for rows.Next() { + var i Entity + if err := rows.Scan( + &i.ID, + &i.Name, + &i.Description, + &i.ThumbnailUrl, + &i.IsDeleted, + &i.CreatedAt, + &i.UpdatedAt, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const updateEntity = `-- name: UpdateEntity :one +UPDATE entities +SET + name = COALESCE($1, name), + description = COALESCE($2, description), + thumbnail_url = COALESCE($3, thumbnail_url) +WHERE id = $4 AND is_deleted = false +RETURNING id, name, description, thumbnail_url, is_deleted, created_at, updated_at +` + +type UpdateEntityParams struct { + Name pgtype.Text `json:"name"` + Description pgtype.Text `json:"description"` + ThumbnailUrl pgtype.Text `json:"thumbnail_url"` + ID pgtype.UUID `json:"id"` +} + +func (q *Queries) UpdateEntity(ctx context.Context, arg UpdateEntityParams) (Entity, error) { + row := q.db.QueryRow(ctx, updateEntity, + arg.Name, + arg.Description, + arg.ThumbnailUrl, + arg.ID, + ) + var i Entity + err := row.Scan( + &i.ID, + &i.Name, + &i.Description, + &i.ThumbnailUrl, + &i.IsDeleted, + &i.CreatedAt, + &i.UpdatedAt, + ) + return i, err +} diff --git a/internal/gen/sqlc/geometries.sql.go b/internal/gen/sqlc/geometries.sql.go new file mode 100644 index 0000000..6d2cfe4 --- /dev/null +++ b/internal/gen/sqlc/geometries.sql.go @@ -0,0 +1,371 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.30.0 +// source: geometries.sql + +package sqlc + +import ( + "context" + "encoding/json" + + "github.com/jackc/pgx/v5/pgtype" +) + +const bulkDeleteEntityGeometriesByEntityId = `-- name: BulkDeleteEntityGeometriesByEntityId :many +DELETE FROM entity_geometries +WHERE entity_id = $1 +RETURNING geometry_id +` + +func (q *Queries) BulkDeleteEntityGeometriesByEntityId(ctx context.Context, entityID pgtype.UUID) ([]pgtype.UUID, error) { + rows, err := q.db.Query(ctx, bulkDeleteEntityGeometriesByEntityId, entityID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []pgtype.UUID{} + for rows.Next() { + var geometry_id pgtype.UUID + if err := rows.Scan(&geometry_id); err != nil { + return nil, err + } + items = append(items, geometry_id) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const createEntityGeometries = `-- name: CreateEntityGeometries :exec +INSERT INTO entity_geometries ( + entity_id, geometry_id +) +SELECT $1, unnest($2::uuid[]) +` + +type CreateEntityGeometriesParams struct { + EntityID pgtype.UUID `json:"entity_id"` + GeometryIds []pgtype.UUID `json:"geometry_ids"` +} + +func (q *Queries) CreateEntityGeometries(ctx context.Context, arg CreateEntityGeometriesParams) error { + _, err := q.db.Exec(ctx, createEntityGeometries, arg.EntityID, arg.GeometryIds) + return err +} + +const createGeometry = `-- name: CreateGeometry :one +INSERT INTO geometries ( + geo_type, draw_geometry, binding, time_start, time_end, bbox +) VALUES ( + $1, $2, $3, $4, $5, ST_MakeEnvelope($6::float8, $7::float8, $8::float8, $9::float8, 4326) +) +RETURNING id, geo_type, draw_geometry, binding, time_start, time_end, + ST_XMin(bbox)::float8 as min_lng, ST_YMin(bbox)::float8 as min_lat, ST_XMax(bbox)::float8 as max_lng, ST_YMax(bbox)::float8 as max_lat, + is_deleted, created_at, updated_at +` + +type CreateGeometryParams struct { + GeoType string `json:"geo_type"` + DrawGeometry json.RawMessage `json:"draw_geometry"` + Binding []byte `json:"binding"` + TimeStart pgtype.Int4 `json:"time_start"` + TimeEnd pgtype.Int4 `json:"time_end"` + MinLng float64 `json:"min_lng"` + MinLat float64 `json:"min_lat"` + MaxLng float64 `json:"max_lng"` + MaxLat float64 `json:"max_lat"` +} + +type CreateGeometryRow struct { + ID pgtype.UUID `json:"id"` + GeoType string `json:"geo_type"` + DrawGeometry json.RawMessage `json:"draw_geometry"` + Binding []byte `json:"binding"` + TimeStart pgtype.Int4 `json:"time_start"` + TimeEnd pgtype.Int4 `json:"time_end"` + MinLng float64 `json:"min_lng"` + MinLat float64 `json:"min_lat"` + MaxLng float64 `json:"max_lng"` + MaxLat float64 `json:"max_lat"` + IsDeleted bool `json:"is_deleted"` + CreatedAt pgtype.Timestamptz `json:"created_at"` + UpdatedAt pgtype.Timestamptz `json:"updated_at"` +} + +func (q *Queries) CreateGeometry(ctx context.Context, arg CreateGeometryParams) (CreateGeometryRow, error) { + row := q.db.QueryRow(ctx, createGeometry, + arg.GeoType, + arg.DrawGeometry, + arg.Binding, + arg.TimeStart, + arg.TimeEnd, + arg.MinLng, + arg.MinLat, + arg.MaxLng, + arg.MaxLat, + ) + var i CreateGeometryRow + err := row.Scan( + &i.ID, + &i.GeoType, + &i.DrawGeometry, + &i.Binding, + &i.TimeStart, + &i.TimeEnd, + &i.MinLng, + &i.MinLat, + &i.MaxLng, + &i.MaxLat, + &i.IsDeleted, + &i.CreatedAt, + &i.UpdatedAt, + ) + return i, err +} + +const deleteGeometry = `-- name: DeleteGeometry :exec +UPDATE geometries +SET + is_deleted = true +WHERE id = $1 +` + +func (q *Queries) DeleteGeometry(ctx context.Context, id pgtype.UUID) error { + _, err := q.db.Exec(ctx, deleteGeometry, id) + return err +} + +const getGeometryById = `-- name: GetGeometryById :one +SELECT id, geo_type, draw_geometry, binding, time_start, time_end, + ST_XMin(bbox)::float8 as min_lng, ST_YMin(bbox)::float8 as min_lat, ST_XMax(bbox)::float8 as max_lng, ST_YMax(bbox)::float8 as max_lat, + is_deleted, created_at, updated_at +FROM geometries +WHERE id = $1 AND is_deleted = false +` + +type GetGeometryByIdRow struct { + ID pgtype.UUID `json:"id"` + GeoType string `json:"geo_type"` + DrawGeometry json.RawMessage `json:"draw_geometry"` + Binding []byte `json:"binding"` + TimeStart pgtype.Int4 `json:"time_start"` + TimeEnd pgtype.Int4 `json:"time_end"` + MinLng float64 `json:"min_lng"` + MinLat float64 `json:"min_lat"` + MaxLng float64 `json:"max_lng"` + MaxLat float64 `json:"max_lat"` + IsDeleted bool `json:"is_deleted"` + CreatedAt pgtype.Timestamptz `json:"created_at"` + UpdatedAt pgtype.Timestamptz `json:"updated_at"` +} + +func (q *Queries) GetGeometryById(ctx context.Context, id pgtype.UUID) (GetGeometryByIdRow, error) { + row := q.db.QueryRow(ctx, getGeometryById, id) + var i GetGeometryByIdRow + err := row.Scan( + &i.ID, + &i.GeoType, + &i.DrawGeometry, + &i.Binding, + &i.TimeStart, + &i.TimeEnd, + &i.MinLng, + &i.MinLat, + &i.MaxLng, + &i.MaxLat, + &i.IsDeleted, + &i.CreatedAt, + &i.UpdatedAt, + ) + return i, err +} + +const searchGeometries = `-- name: SearchGeometries :many +SELECT + g.id, g.geo_type, g.draw_geometry, g.binding, g.time_start, g.time_end, + ST_XMin(g.bbox)::float8 as min_lng, + ST_YMin(g.bbox)::float8 as min_lat, + ST_XMax(g.bbox)::float8 as max_lng, + ST_YMax(g.bbox)::float8 as max_lat, + g.is_deleted, g.created_at, g.updated_at +FROM geometries g +WHERE g.is_deleted = false + AND ( + $1::float8 IS NULL OR + $2::float8 IS NULL OR + $3::float8 IS NULL OR + $4::float8 IS NULL OR + g.bbox && ST_MakeEnvelope( + $1::float8, + $2::float8, + $3::float8, + $4::float8, + 4326 + ) + ) + AND ( + $5::int IS NULL OR + (g.time_start <= $5::int AND g.time_end >= $5::int) + ) + AND ( + $6::uuid IS NULL OR + EXISTS ( + SELECT 1 + FROM entity_geometries eg + WHERE eg.geometry_id = g.id + AND eg.entity_id = $6::uuid + ) + ) +ORDER BY g.id DESC +` + +type SearchGeometriesParams struct { + SearchMinLng pgtype.Float8 `json:"search_min_lng"` + SearchMinLat pgtype.Float8 `json:"search_min_lat"` + SearchMaxLng pgtype.Float8 `json:"search_max_lng"` + SearchMaxLat pgtype.Float8 `json:"search_max_lat"` + TimePoint pgtype.Int4 `json:"time_point"` + EntityID pgtype.UUID `json:"entity_id"` +} + +type SearchGeometriesRow struct { + ID pgtype.UUID `json:"id"` + GeoType string `json:"geo_type"` + DrawGeometry json.RawMessage `json:"draw_geometry"` + Binding []byte `json:"binding"` + TimeStart pgtype.Int4 `json:"time_start"` + TimeEnd pgtype.Int4 `json:"time_end"` + MinLng float64 `json:"min_lng"` + MinLat float64 `json:"min_lat"` + MaxLng float64 `json:"max_lng"` + MaxLat float64 `json:"max_lat"` + IsDeleted bool `json:"is_deleted"` + CreatedAt pgtype.Timestamptz `json:"created_at"` + UpdatedAt pgtype.Timestamptz `json:"updated_at"` +} + +func (q *Queries) SearchGeometries(ctx context.Context, arg SearchGeometriesParams) ([]SearchGeometriesRow, error) { + rows, err := q.db.Query(ctx, searchGeometries, + arg.SearchMinLng, + arg.SearchMinLat, + arg.SearchMaxLng, + arg.SearchMaxLat, + arg.TimePoint, + arg.EntityID, + ) + if err != nil { + return nil, err + } + defer rows.Close() + items := []SearchGeometriesRow{} + for rows.Next() { + var i SearchGeometriesRow + if err := rows.Scan( + &i.ID, + &i.GeoType, + &i.DrawGeometry, + &i.Binding, + &i.TimeStart, + &i.TimeEnd, + &i.MinLng, + &i.MinLat, + &i.MaxLng, + &i.MaxLat, + &i.IsDeleted, + &i.CreatedAt, + &i.UpdatedAt, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const updateGeometry = `-- name: UpdateGeometry :one +UPDATE geometries +SET + geo_type = COALESCE($1, geo_type), + draw_geometry = COALESCE($2, draw_geometry), + binding = COALESCE($3, binding), + time_start = COALESCE($4, time_start), + time_end = COALESCE($5, time_end), + bbox = CASE + WHEN $6::boolean = true THEN + ST_MakeEnvelope($7::float8, $8::float8, $9::float8, $10::float8, 4326) + ELSE bbox + END, + updated_at = now() +WHERE id = $11 AND is_deleted = false +RETURNING id, geo_type, draw_geometry, binding, time_start, time_end, + ST_XMin(bbox)::float8 as min_lng, ST_YMin(bbox)::float8 as min_lat, ST_XMax(bbox)::float8 as max_lng, ST_YMax(bbox)::float8 as max_lat, + is_deleted, created_at, updated_at +` + +type UpdateGeometryParams struct { + GeoType pgtype.Text `json:"geo_type"` + DrawGeometry []byte `json:"draw_geometry"` + Binding []byte `json:"binding"` + TimeStart pgtype.Int4 `json:"time_start"` + TimeEnd pgtype.Int4 `json:"time_end"` + UpdateBbox pgtype.Bool `json:"update_bbox"` + MinLng pgtype.Float8 `json:"min_lng"` + MinLat pgtype.Float8 `json:"min_lat"` + MaxLng pgtype.Float8 `json:"max_lng"` + MaxLat pgtype.Float8 `json:"max_lat"` + ID pgtype.UUID `json:"id"` +} + +type UpdateGeometryRow struct { + ID pgtype.UUID `json:"id"` + GeoType string `json:"geo_type"` + DrawGeometry json.RawMessage `json:"draw_geometry"` + Binding []byte `json:"binding"` + TimeStart pgtype.Int4 `json:"time_start"` + TimeEnd pgtype.Int4 `json:"time_end"` + MinLng float64 `json:"min_lng"` + MinLat float64 `json:"min_lat"` + MaxLng float64 `json:"max_lng"` + MaxLat float64 `json:"max_lat"` + IsDeleted bool `json:"is_deleted"` + CreatedAt pgtype.Timestamptz `json:"created_at"` + UpdatedAt pgtype.Timestamptz `json:"updated_at"` +} + +func (q *Queries) UpdateGeometry(ctx context.Context, arg UpdateGeometryParams) (UpdateGeometryRow, error) { + row := q.db.QueryRow(ctx, updateGeometry, + arg.GeoType, + arg.DrawGeometry, + arg.Binding, + arg.TimeStart, + arg.TimeEnd, + arg.UpdateBbox, + arg.MinLng, + arg.MinLat, + arg.MaxLng, + arg.MaxLat, + arg.ID, + ) + var i UpdateGeometryRow + err := row.Scan( + &i.ID, + &i.GeoType, + &i.DrawGeometry, + &i.Binding, + &i.TimeStart, + &i.TimeEnd, + &i.MinLng, + &i.MinLat, + &i.MaxLng, + &i.MaxLat, + &i.IsDeleted, + &i.CreatedAt, + &i.UpdatedAt, + ) + return i, err +} diff --git a/internal/gen/sqlc/models.go b/internal/gen/sqlc/models.go index eb7605c..14f6933 100644 --- a/internal/gen/sqlc/models.go +++ b/internal/gen/sqlc/models.go @@ -5,9 +5,44 @@ package sqlc import ( + "encoding/json" + "github.com/jackc/pgx/v5/pgtype" ) +type Entity struct { + ID pgtype.UUID `json:"id"` + Name string `json:"name"` + Description pgtype.Text `json:"description"` + ThumbnailUrl pgtype.Text `json:"thumbnail_url"` + IsDeleted bool `json:"is_deleted"` + CreatedAt pgtype.Timestamptz `json:"created_at"` + UpdatedAt pgtype.Timestamptz `json:"updated_at"` +} + +type EntityGeometry struct { + EntityID pgtype.UUID `json:"entity_id"` + GeometryID pgtype.UUID `json:"geometry_id"` +} + +type EntityWiki struct { + EntityID pgtype.UUID `json:"entity_id"` + WikiID pgtype.UUID `json:"wiki_id"` +} + +type Geometry struct { + ID pgtype.UUID `json:"id"` + GeoType string `json:"geo_type"` + DrawGeometry json.RawMessage `json:"draw_geometry"` + Binding []byte `json:"binding"` + TimeStart pgtype.Int4 `json:"time_start"` + TimeEnd pgtype.Int4 `json:"time_end"` + Bbox interface{} `json:"bbox"` + IsDeleted bool `json:"is_deleted"` + CreatedAt pgtype.Timestamptz `json:"created_at"` + UpdatedAt pgtype.Timestamptz `json:"updated_at"` +} + type Media struct { ID pgtype.UUID `json:"id"` UserID pgtype.UUID `json:"user_id"` @@ -77,3 +112,12 @@ type VerificationMedia struct { VerificationID pgtype.UUID `json:"verification_id"` MediaID pgtype.UUID `json:"media_id"` } + +type Wiki struct { + ID pgtype.UUID `json:"id"` + Title pgtype.Text `json:"title"` + Content pgtype.Text `json:"content"` + IsDeleted bool `json:"is_deleted"` + CreatedAt pgtype.Timestamptz `json:"created_at"` + UpdatedAt pgtype.Timestamptz `json:"updated_at"` +} diff --git a/internal/gen/sqlc/wiki.sql.go b/internal/gen/sqlc/wiki.sql.go new file mode 100644 index 0000000..7884e47 --- /dev/null +++ b/internal/gen/sqlc/wiki.sql.go @@ -0,0 +1,203 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.30.0 +// source: wiki.sql + +package sqlc + +import ( + "context" + + "github.com/jackc/pgx/v5/pgtype" +) + +const bulkDeleteEntityWikisByEntityId = `-- name: BulkDeleteEntityWikisByEntityId :many +DELETE FROM entity_wikis +WHERE entity_id = $1 +RETURNING wiki_id +` + +func (q *Queries) BulkDeleteEntityWikisByEntityId(ctx context.Context, entityID pgtype.UUID) ([]pgtype.UUID, error) { + rows, err := q.db.Query(ctx, bulkDeleteEntityWikisByEntityId, entityID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []pgtype.UUID{} + for rows.Next() { + var wiki_id pgtype.UUID + if err := rows.Scan(&wiki_id); err != nil { + return nil, err + } + items = append(items, wiki_id) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const createEntityWikis = `-- name: CreateEntityWikis :exec +INSERT INTO entity_wikis ( + entity_id, wiki_id +) +SELECT $1, unnest($2::uuid[]) +` + +type CreateEntityWikisParams struct { + EntityID pgtype.UUID `json:"entity_id"` + WikiIds []pgtype.UUID `json:"wiki_ids"` +} + +func (q *Queries) CreateEntityWikis(ctx context.Context, arg CreateEntityWikisParams) error { + _, err := q.db.Exec(ctx, createEntityWikis, arg.EntityID, arg.WikiIds) + return err +} + +const createWiki = `-- name: CreateWiki :one +INSERT INTO wikis ( + title, content +) VALUES ( + $1, $2 +) +RETURNING id, title, content, is_deleted, created_at, updated_at +` + +type CreateWikiParams struct { + Title pgtype.Text `json:"title"` + Content pgtype.Text `json:"content"` +} + +func (q *Queries) CreateWiki(ctx context.Context, arg CreateWikiParams) (Wiki, error) { + row := q.db.QueryRow(ctx, createWiki, arg.Title, arg.Content) + var i Wiki + err := row.Scan( + &i.ID, + &i.Title, + &i.Content, + &i.IsDeleted, + &i.CreatedAt, + &i.UpdatedAt, + ) + return i, err +} + +const deleteWiki = `-- name: DeleteWiki :exec +UPDATE wikis +SET + is_deleted = true +WHERE id = $1 +` + +func (q *Queries) DeleteWiki(ctx context.Context, id pgtype.UUID) error { + _, err := q.db.Exec(ctx, deleteWiki, id) + return err +} + +const getWikiById = `-- name: GetWikiById :one +SELECT id, title, content, is_deleted, created_at, updated_at +FROM wikis +WHERE id = $1 AND is_deleted = false +` + +func (q *Queries) GetWikiById(ctx context.Context, id pgtype.UUID) (Wiki, error) { + row := q.db.QueryRow(ctx, getWikiById, id) + var i Wiki + err := row.Scan( + &i.ID, + &i.Title, + &i.Content, + &i.IsDeleted, + &i.CreatedAt, + &i.UpdatedAt, + ) + return i, err +} + +const searchWikis = `-- name: SearchWikis :many +SELECT w.id, w.title, w.content, w.is_deleted, w.created_at, w.updated_at +FROM wikis w +WHERE w.is_deleted = false + AND w.title ILIKE '%' || $1::text || '%' + AND ( + $2::uuid IS NULL OR + EXISTS ( + SELECT 1 + FROM entity_wikis ew + WHERE ew.wiki_id = w.id + AND ew.entity_id = $2::uuid + ) + ) + AND ($3::uuid IS NULL OR w.id < $3::uuid) + +ORDER BY w.id DESC +LIMIT $4 +` + +type SearchWikisParams struct { + Title string `json:"title"` + EntityID pgtype.UUID `json:"entity_id"` + CursorID pgtype.UUID `json:"cursor_id"` + LimitCount int32 `json:"limit_count"` +} + +func (q *Queries) SearchWikis(ctx context.Context, arg SearchWikisParams) ([]Wiki, error) { + rows, err := q.db.Query(ctx, searchWikis, + arg.Title, + arg.EntityID, + arg.CursorID, + arg.LimitCount, + ) + if err != nil { + return nil, err + } + defer rows.Close() + items := []Wiki{} + for rows.Next() { + var i Wiki + if err := rows.Scan( + &i.ID, + &i.Title, + &i.Content, + &i.IsDeleted, + &i.CreatedAt, + &i.UpdatedAt, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const updateWiki = `-- name: UpdateWiki :one +UPDATE wikis +SET + title = COALESCE($1, title), + content = COALESCE($2, content) +WHERE id = $3 AND is_deleted = false +RETURNING id, title, content, is_deleted, created_at, updated_at +` + +type UpdateWikiParams struct { + Title pgtype.Text `json:"title"` + Content pgtype.Text `json:"content"` + ID pgtype.UUID `json:"id"` +} + +func (q *Queries) UpdateWiki(ctx context.Context, arg UpdateWikiParams) (Wiki, error) { + row := q.db.QueryRow(ctx, updateWiki, arg.Title, arg.Content, arg.ID) + var i Wiki + err := row.Scan( + &i.ID, + &i.Title, + &i.Content, + &i.IsDeleted, + &i.CreatedAt, + &i.UpdatedAt, + ) + return i, err +} diff --git a/internal/models/entity.go b/internal/models/entity.go new file mode 100644 index 0000000..3fa7887 --- /dev/null +++ b/internal/models/entity.go @@ -0,0 +1,45 @@ +package models + +import ( + "history-api/internal/dtos/response" + "time" +) + +type EntityEntity struct { + ID string `json:"id"` + Name string `json:"name"` + Description string `json:"description"` + ThumbnailUrl string `json:"thumbnail_url"` + IsDeleted bool `json:"is_deleted"` + CreatedAt *time.Time `json:"created_at"` + UpdatedAt *time.Time `json:"updated_at"` +} + +func (e *EntityEntity) ToResponse() *response.EntityResponse { + if e == nil { + return nil + } + return &response.EntityResponse{ + ID: e.ID, + Name: e.Name, + Description: e.Description, + ThumbnailUrl: e.ThumbnailUrl, + IsDeleted: e.IsDeleted, + CreatedAt: e.CreatedAt, + UpdatedAt: e.UpdatedAt, + } +} + +func EntitiesEntityToResponse(es []*EntityEntity) []*response.EntityResponse { + out := make([]*response.EntityResponse, 0) + if es == nil { + return out + } + for _, e := range es { + if e == nil { + continue + } + out = append(out, e.ToResponse()) + } + return out +} diff --git a/internal/models/geometry.go b/internal/models/geometry.go new file mode 100644 index 0000000..9ce7934 --- /dev/null +++ b/internal/models/geometry.go @@ -0,0 +1,52 @@ +package models + +import ( + "encoding/json" + "history-api/internal/dtos/response" + "time" +) + +type GeometryEntity struct { + ID string `json:"id"` + GeoType string `json:"geo_type"` + DrawGeometry json.RawMessage `json:"draw_geometry"` + Binding json.RawMessage `json:"binding"` + TimeStart int32 `json:"time_start"` + TimeEnd int32 `json:"time_end"` + Bbox *response.Bbox `json:"bbox"` + IsDeleted bool `json:"is_deleted"` + CreatedAt *time.Time `json:"created_at"` + UpdatedAt *time.Time `json:"updated_at"` +} + +func (g *GeometryEntity) ToResponse() *response.GeometryResponse { + if g == nil { + return nil + } + return &response.GeometryResponse{ + ID: g.ID, + GeoType: g.GeoType, + DrawGeometry: g.DrawGeometry, + Binding: g.Binding, + TimeStart: g.TimeStart, + TimeEnd: g.TimeEnd, + Bbox: g.Bbox, + IsDeleted: g.IsDeleted, + CreatedAt: g.CreatedAt, + UpdatedAt: g.UpdatedAt, + } +} + +func GeometriesEntityToResponse(gs []*GeometryEntity) []*response.GeometryResponse { + out := make([]*response.GeometryResponse, 0) + if gs == nil { + return out + } + for _, g := range gs { + if g == nil { + continue + } + out = append(out, g.ToResponse()) + } + return out +} diff --git a/internal/models/wiki.go b/internal/models/wiki.go new file mode 100644 index 0000000..6ccf164 --- /dev/null +++ b/internal/models/wiki.go @@ -0,0 +1,43 @@ +package models + +import ( + "history-api/internal/dtos/response" + "time" +) + +type WikiEntity struct { + ID string `json:"id"` + Title string `json:"title"` + Content string `json:"content"` + IsDeleted bool `json:"is_deleted"` + CreatedAt *time.Time `json:"created_at"` + UpdatedAt *time.Time `json:"updated_at"` +} + +func (w *WikiEntity) ToResponse() *response.WikiResponse { + if w == nil { + return nil + } + return &response.WikiResponse{ + ID: w.ID, + Title: w.Title, + Content: w.Content, + IsDeleted: w.IsDeleted, + CreatedAt: w.CreatedAt, + UpdatedAt: w.UpdatedAt, + } +} + +func WikisEntityToResponse(ws []*WikiEntity) []*response.WikiResponse { + out := make([]*response.WikiResponse, 0) + if ws == nil { + return out + } + for _, w := range ws { + if w == nil { + continue + } + out = append(out, w.ToResponse()) + } + return out +} diff --git a/internal/repositories/entityRepository.go b/internal/repositories/entityRepository.go new file mode 100644 index 0000000..26786f8 --- /dev/null +++ b/internal/repositories/entityRepository.go @@ -0,0 +1,205 @@ +package repositories + +import ( + "context" + "crypto/md5" + "encoding/json" + "fmt" + + "github.com/jackc/pgx/v5/pgtype" + + "history-api/internal/gen/sqlc" + "history-api/internal/models" + "history-api/pkg/cache" + "history-api/pkg/constants" + "history-api/pkg/convert" +) + +type EntityRepository interface { + GetByID(ctx context.Context, id pgtype.UUID) (*models.EntityEntity, error) + GetByIDs(ctx context.Context, ids []string) ([]*models.EntityEntity, error) + Search(ctx context.Context, params sqlc.SearchEntitiesParams) ([]*models.EntityEntity, error) + Create(ctx context.Context, params sqlc.CreateEntityParams) (*models.EntityEntity, error) + Update(ctx context.Context, params sqlc.UpdateEntityParams) (*models.EntityEntity, error) + Delete(ctx context.Context, id pgtype.UUID) error +} + +type entityRepository struct { + q *sqlc.Queries + c cache.Cache +} + +func NewEntityRepository(db sqlc.DBTX, c cache.Cache) EntityRepository { + return &entityRepository{ + q: sqlc.New(db), + c: c, + } +} + +func (r *entityRepository) generateQueryKey(prefix string, params any) string { + b, _ := json.Marshal(params) + hash := fmt.Sprintf("%x", md5.Sum(b)) + return fmt.Sprintf("%s:%s", prefix, hash) +} + +func (r *entityRepository) getByIDsWithFallback(ctx context.Context, ids []string) ([]*models.EntityEntity, error) { + if len(ids) == 0 { + return []*models.EntityEntity{}, nil + } + keys := make([]string, len(ids)) + for i, id := range ids { + keys[i] = fmt.Sprintf("entity:id:%s", id) + } + raws := r.c.MGet(ctx, keys...) + + var entities []*models.EntityEntity + missingToCache := make(map[string]any) + + for i, b := range raws { + if len(b) > 0 { + var e models.EntityEntity + if err := json.Unmarshal(b, &e); err == nil { + entities = append(entities, &e) + } + } else { + pgId := pgtype.UUID{} + err := pgId.Scan(ids[i]) + if err != nil { + continue + } + dbEntity, err := r.GetByID(ctx, pgId) + if err == nil && dbEntity != nil { + entities = append(entities, dbEntity) + missingToCache[keys[i]] = dbEntity + } + } + } + + if len(missingToCache) > 0 { + _ = r.c.MSet(ctx, missingToCache, constants.NormalCacheDuration) + } + + return entities, nil +} + +func (r *entityRepository) GetByIDs(ctx context.Context, ids []string) ([]*models.EntityEntity, error) { + return r.getByIDsWithFallback(ctx, ids) +} + +func (r *entityRepository) GetByID(ctx context.Context, id pgtype.UUID) (*models.EntityEntity, error) { + cacheId := fmt.Sprintf("entity:id:%s", convert.UUIDToString(id)) + var entity models.EntityEntity + err := r.c.Get(ctx, cacheId, &entity) + if err == nil { + _ = r.c.Set(ctx, cacheId, entity, constants.NormalCacheDuration) + return &entity, nil + } + + row, err := r.q.GetEntityById(ctx, id) + if err != nil { + return nil, err + } + + entity = models.EntityEntity{ + ID: convert.UUIDToString(row.ID), + Name: row.Name, + Description: convert.TextToString(row.Description), + ThumbnailUrl: convert.TextToString(row.ThumbnailUrl), + IsDeleted: row.IsDeleted, + CreatedAt: convert.TimeToPtr(row.CreatedAt), + UpdatedAt: convert.TimeToPtr(row.UpdatedAt), + } + _ = r.c.Set(ctx, cacheId, entity, constants.NormalCacheDuration) + + return &entity, nil +} + +func (r *entityRepository) Search(ctx context.Context, params sqlc.SearchEntitiesParams) ([]*models.EntityEntity, error) { + queryKey := r.generateQueryKey("entity:search", params) + var cachedIDs []string + if err := r.c.Get(ctx, queryKey, &cachedIDs); err == nil && len(cachedIDs) > 0 { + return r.getByIDsWithFallback(ctx, cachedIDs) + } + + rows, err := r.q.SearchEntities(ctx, params) + if err != nil { + return nil, err + } + var entities []*models.EntityEntity + var ids []string + entityToCache := make(map[string]any) + + for _, row := range rows { + entity := &models.EntityEntity{ + ID: convert.UUIDToString(row.ID), + Name: row.Name, + Description: convert.TextToString(row.Description), + ThumbnailUrl: convert.TextToString(row.ThumbnailUrl), + IsDeleted: row.IsDeleted, + CreatedAt: convert.TimeToPtr(row.CreatedAt), + UpdatedAt: convert.TimeToPtr(row.UpdatedAt), + } + ids = append(ids, entity.ID) + entities = append(entities, entity) + entityToCache[fmt.Sprintf("entity:id:%s", entity.ID)] = entity + } + + if len(entityToCache) > 0 { + _ = r.c.MSet(ctx, entityToCache, constants.NormalCacheDuration) + } + if len(ids) > 0 { + _ = r.c.Set(ctx, queryKey, ids, constants.ListCacheDuration) + } + + return entities, nil +} + +func (r *entityRepository) Create(ctx context.Context, params sqlc.CreateEntityParams) (*models.EntityEntity, error) { + row, err := r.q.CreateEntity(ctx, params) + if err != nil { + return nil, err + } + + entity := models.EntityEntity{ + ID: convert.UUIDToString(row.ID), + Name: row.Name, + Description: convert.TextToString(row.Description), + ThumbnailUrl: convert.TextToString(row.ThumbnailUrl), + IsDeleted: row.IsDeleted, + CreatedAt: convert.TimeToPtr(row.CreatedAt), + UpdatedAt: convert.TimeToPtr(row.UpdatedAt), + } + _ = r.c.Set(ctx, fmt.Sprintf("entity:id:%s", entity.ID), entity, constants.NormalCacheDuration) + + go func() { + _ = r.c.DelByPattern(context.Background(), "entity:search*") + }() + return &entity, nil +} + +func (r *entityRepository) Update(ctx context.Context, params sqlc.UpdateEntityParams) (*models.EntityEntity, error) { + row, err := r.q.UpdateEntity(ctx, params) + if err != nil { + return nil, err + } + entity := models.EntityEntity{ + ID: convert.UUIDToString(row.ID), + Name: row.Name, + Description: convert.TextToString(row.Description), + ThumbnailUrl: convert.TextToString(row.ThumbnailUrl), + IsDeleted: row.IsDeleted, + CreatedAt: convert.TimeToPtr(row.CreatedAt), + UpdatedAt: convert.TimeToPtr(row.UpdatedAt), + } + _ = r.c.Set(ctx, fmt.Sprintf("entity:id:%s", entity.ID), entity, constants.NormalCacheDuration) + return &entity, nil +} + +func (r *entityRepository) Delete(ctx context.Context, id pgtype.UUID) error { + err := r.q.DeleteEntity(ctx, id) + if err != nil { + return err + } + _ = r.c.Del(ctx, fmt.Sprintf("entity:id:%s", convert.UUIDToString(id))) + return nil +} diff --git a/internal/repositories/geometryRepository.go b/internal/repositories/geometryRepository.go new file mode 100644 index 0000000..a21bf37 --- /dev/null +++ b/internal/repositories/geometryRepository.go @@ -0,0 +1,266 @@ +package repositories + +import ( + "context" + "crypto/md5" + "encoding/json" + "fmt" + + "github.com/jackc/pgx/v5/pgtype" + + "history-api/internal/dtos/response" + "history-api/internal/gen/sqlc" + "history-api/internal/models" + "history-api/pkg/cache" + "history-api/pkg/constants" + "history-api/pkg/convert" +) + +type GeometryRepository interface { + GetByID(ctx context.Context, id pgtype.UUID) (*models.GeometryEntity, error) + GetByIDs(ctx context.Context, ids []string) ([]*models.GeometryEntity, error) + Search(ctx context.Context, params sqlc.SearchGeometriesParams) ([]*models.GeometryEntity, error) + Create(ctx context.Context, params sqlc.CreateGeometryParams) (*models.GeometryEntity, error) + Update(ctx context.Context, params sqlc.UpdateGeometryParams) (*models.GeometryEntity, error) + Delete(ctx context.Context, id pgtype.UUID) error + CreateEntityGeometries(ctx context.Context, params sqlc.CreateEntityGeometriesParams) error + BulkDeleteEntityGeometriesByEntityId(ctx context.Context, entityId pgtype.UUID) error +} + +type geometryRepository struct { + q *sqlc.Queries + c cache.Cache +} + +func NewGeometryRepository(db sqlc.DBTX, c cache.Cache) GeometryRepository { + return &geometryRepository{ + q: sqlc.New(db), + c: c, + } +} + +func (r *geometryRepository) generateQueryKey(prefix string, params any) string { + b, _ := json.Marshal(params) + hash := fmt.Sprintf("%x", md5.Sum(b)) + return fmt.Sprintf("%s:%s", prefix, hash) +} + +func (r *geometryRepository) getByIDsWithFallback(ctx context.Context, ids []string) ([]*models.GeometryEntity, error) { + if len(ids) == 0 { + return []*models.GeometryEntity{}, nil + } + keys := make([]string, len(ids)) + for i, id := range ids { + keys[i] = fmt.Sprintf("geometry:id:%s", id) + } + raws := r.c.MGet(ctx, keys...) + + var geometries []*models.GeometryEntity + missingToCache := make(map[string]any) + + for i, b := range raws { + if len(b) > 0 { + var g models.GeometryEntity + if err := json.Unmarshal(b, &g); err == nil { + geometries = append(geometries, &g) + } + } else { + pgId := pgtype.UUID{} + err := pgId.Scan(ids[i]) + if err != nil { + continue + } + dbGeometry, err := r.GetByID(ctx, pgId) + if err == nil && dbGeometry != nil { + geometries = append(geometries, dbGeometry) + missingToCache[keys[i]] = dbGeometry + } + } + } + + if len(missingToCache) > 0 { + _ = r.c.MSet(ctx, missingToCache, constants.NormalCacheDuration) + } + + return geometries, nil +} + +func (r *geometryRepository) GetByIDs(ctx context.Context, ids []string) ([]*models.GeometryEntity, error) { + return r.getByIDsWithFallback(ctx, ids) +} + +func (r *geometryRepository) GetByID(ctx context.Context, id pgtype.UUID) (*models.GeometryEntity, error) { + cacheId := fmt.Sprintf("geometry:id:%s", convert.UUIDToString(id)) + var geometry models.GeometryEntity + err := r.c.Get(ctx, cacheId, &geometry) + if err == nil { + _ = r.c.Set(ctx, cacheId, geometry, constants.NormalCacheDuration) + return &geometry, nil + } + + row, err := r.q.GetGeometryById(ctx, id) + if err != nil { + return nil, err + } + + geometry = models.GeometryEntity{ + ID: convert.UUIDToString(row.ID), + GeoType: row.GeoType, + DrawGeometry: row.DrawGeometry, + Binding: row.Binding, + TimeStart: convert.Int4ToInt32(row.TimeStart), + TimeEnd: convert.Int4ToInt32(row.TimeEnd), + Bbox: &response.Bbox{ + MinLng: row.MinLng, + MinLat: row.MinLat, + MaxLng: row.MaxLng, + MaxLat: row.MaxLat, + }, + IsDeleted: row.IsDeleted, + CreatedAt: convert.TimeToPtr(row.CreatedAt), + UpdatedAt: convert.TimeToPtr(row.UpdatedAt), + } + _ = r.c.Set(ctx, cacheId, geometry, constants.NormalCacheDuration) + + return &geometry, nil +} + +func (r *geometryRepository) Search(ctx context.Context, params sqlc.SearchGeometriesParams) ([]*models.GeometryEntity, error) { + queryKey := r.generateQueryKey("geometry:search", params) + var cachedIDs []string + if err := r.c.Get(ctx, queryKey, &cachedIDs); err == nil && len(cachedIDs) > 0 { + return r.getByIDsWithFallback(ctx, cachedIDs) + } + + rows, err := r.q.SearchGeometries(ctx, params) + if err != nil { + return nil, err + } + var geometries []*models.GeometryEntity + var ids []string + geometryToCache := make(map[string]any) + + for _, row := range rows { + geometry := &models.GeometryEntity{ + ID: convert.UUIDToString(row.ID), + GeoType: row.GeoType, + DrawGeometry: row.DrawGeometry, + Binding: row.Binding, + TimeStart: convert.Int4ToInt32(row.TimeStart), + TimeEnd: convert.Int4ToInt32(row.TimeEnd), + Bbox: &response.Bbox{ + MinLng: row.MinLng, + MinLat: row.MinLat, + MaxLng: row.MaxLng, + MaxLat: row.MaxLat, + }, + IsDeleted: row.IsDeleted, + CreatedAt: convert.TimeToPtr(row.CreatedAt), + UpdatedAt: convert.TimeToPtr(row.UpdatedAt), + } + ids = append(ids, geometry.ID) + geometries = append(geometries, geometry) + geometryToCache[fmt.Sprintf("geometry:id:%s", geometry.ID)] = geometry + } + + if len(geometryToCache) > 0 { + _ = r.c.MSet(ctx, geometryToCache, constants.NormalCacheDuration) + } + if len(ids) > 0 { + _ = r.c.Set(ctx, queryKey, ids, constants.ListCacheDuration) + } + + return geometries, nil +} + +func (r *geometryRepository) Create(ctx context.Context, params sqlc.CreateGeometryParams) (*models.GeometryEntity, error) { + row, err := r.q.CreateGeometry(ctx, params) + if err != nil { + return nil, err + } + + geometry := models.GeometryEntity{ + ID: convert.UUIDToString(row.ID), + GeoType: row.GeoType, + DrawGeometry: row.DrawGeometry, + Binding: row.Binding, + TimeStart: convert.Int4ToInt32(row.TimeStart), + TimeEnd: convert.Int4ToInt32(row.TimeEnd), + Bbox: &response.Bbox{ + MinLng: row.MinLng, + MinLat: row.MinLat, + MaxLng: row.MaxLng, + MaxLat: row.MaxLat, + }, + IsDeleted: row.IsDeleted, + CreatedAt: convert.TimeToPtr(row.CreatedAt), + UpdatedAt: convert.TimeToPtr(row.UpdatedAt), + } + _ = r.c.Set(ctx, fmt.Sprintf("geometry:id:%s", geometry.ID), geometry, constants.NormalCacheDuration) + + go func() { + bgCtx := context.Background() + _ = r.c.DelByPattern(bgCtx, "geometry:search*") + }() + return &geometry, nil +} + +func (r *geometryRepository) Update(ctx context.Context, params sqlc.UpdateGeometryParams) (*models.GeometryEntity, error) { + row, err := r.q.UpdateGeometry(ctx, params) + if err != nil { + return nil, err + } + geometry := models.GeometryEntity{ + ID: convert.UUIDToString(row.ID), + GeoType: row.GeoType, + DrawGeometry: row.DrawGeometry, + Binding: row.Binding, + TimeStart: convert.Int4ToInt32(row.TimeStart), + TimeEnd: convert.Int4ToInt32(row.TimeEnd), + Bbox: &response.Bbox{ + MinLng: row.MinLng, + MinLat: row.MinLat, + MaxLng: row.MaxLng, + MaxLat: row.MaxLat, + }, + IsDeleted: row.IsDeleted, + CreatedAt: convert.TimeToPtr(row.CreatedAt), + UpdatedAt: convert.TimeToPtr(row.UpdatedAt), + } + _ = r.c.Set(ctx, fmt.Sprintf("geometry:id:%s", geometry.ID), geometry, constants.NormalCacheDuration) + return &geometry, nil +} + +func (r *geometryRepository) Delete(ctx context.Context, id pgtype.UUID) error { + err := r.q.DeleteGeometry(ctx, id) + if err != nil { + return err + } + _ = r.c.Del(ctx, fmt.Sprintf("geometry:id:%s", convert.UUIDToString(id))) + return nil +} + +func (r *geometryRepository) CreateEntityGeometries(ctx context.Context, params sqlc.CreateEntityGeometriesParams) error { + err := r.q.CreateEntityGeometries(ctx, params) + if err != nil { + return err + } + return err +} + +func (r *geometryRepository) BulkDeleteEntityGeometriesByEntityId(ctx context.Context, entityId pgtype.UUID) error { + geometryIDs, err := r.q.BulkDeleteEntityGeometriesByEntityId(ctx, entityId) + if err != nil { + return err + } + if len(geometryIDs) > 0 { + keys := make([]string, len(geometryIDs)) + for i, id := range geometryIDs { + keys[i] = fmt.Sprintf("geometry:id:%s", convert.UUIDToString(id)) + } + go func() { + _ = r.c.Del(context.Background(), keys...) + }() + } + return nil +} diff --git a/internal/repositories/roleRepository.go b/internal/repositories/roleRepository.go index d14b2b6..c50fa26 100644 --- a/internal/repositories/roleRepository.go +++ b/internal/repositories/roleRepository.go @@ -148,8 +148,9 @@ func (r *roleRepository) Create(ctx context.Context, name string) (*models.RoleE if err != nil { return nil, err } - go func() { - bgCtx := context.Background() + + go func() { + bgCtx := context.Background() _ = r.c.DelByPattern(bgCtx, "role:all*") }() diff --git a/internal/repositories/verificationRepository.go b/internal/repositories/verificationRepository.go index 33ee88b..9f69911 100644 --- a/internal/repositories/verificationRepository.go +++ b/internal/repositories/verificationRepository.go @@ -225,8 +225,7 @@ func (v *verificationRepository) BulkVerificationMediaByMediaId(ctx context.Cont } go func() { - bgCtx := context.Background() - _ = v.c.Del(bgCtx, listCacheId...) + _ = v.c.Del(context.Background(), listCacheId...) }() return nil diff --git a/internal/repositories/wikiRepository.go b/internal/repositories/wikiRepository.go new file mode 100644 index 0000000..863e12f --- /dev/null +++ b/internal/repositories/wikiRepository.go @@ -0,0 +1,233 @@ +package repositories + +import ( + "context" + "crypto/md5" + "encoding/json" + "fmt" + + "github.com/jackc/pgx/v5/pgtype" + + "history-api/internal/gen/sqlc" + "history-api/internal/models" + "history-api/pkg/cache" + "history-api/pkg/constants" + "history-api/pkg/convert" +) + +type WikiRepository interface { + GetByID(ctx context.Context, id pgtype.UUID) (*models.WikiEntity, error) + GetByIDs(ctx context.Context, ids []string) ([]*models.WikiEntity, error) + Search(ctx context.Context, params sqlc.SearchWikisParams) ([]*models.WikiEntity, error) + Create(ctx context.Context, params sqlc.CreateWikiParams) (*models.WikiEntity, error) + Update(ctx context.Context, params sqlc.UpdateWikiParams) (*models.WikiEntity, error) + Delete(ctx context.Context, id pgtype.UUID) error + CreateEntityWikis(ctx context.Context, params sqlc.CreateEntityWikisParams) error + BulkDeleteEntityWikisByEntityId(ctx context.Context, entityId pgtype.UUID) error +} + +type wikiRepository struct { + q *sqlc.Queries + c cache.Cache +} + +func NewWikiRepository(db sqlc.DBTX, c cache.Cache) WikiRepository { + return &wikiRepository{ + q: sqlc.New(db), + c: c, + } +} + +func (r *wikiRepository) generateQueryKey(prefix string, params any) string { + b, _ := json.Marshal(params) + hash := fmt.Sprintf("%x", md5.Sum(b)) + return fmt.Sprintf("%s:%s", prefix, hash) +} + +func (r *wikiRepository) getByIDsWithFallback(ctx context.Context, ids []string) ([]*models.WikiEntity, error) { + if len(ids) == 0 { + return []*models.WikiEntity{}, nil + } + keys := make([]string, len(ids)) + for i, id := range ids { + keys[i] = fmt.Sprintf("wiki:id:%s", id) + } + raws := r.c.MGet(ctx, keys...) + + var wikis []*models.WikiEntity + missingToCache := make(map[string]any) + + for i, b := range raws { + if len(b) > 0 { + var w models.WikiEntity + if err := json.Unmarshal(b, &w); err == nil { + wikis = append(wikis, &w) + } + } else { + pgId := pgtype.UUID{} + err := pgId.Scan(ids[i]) + if err != nil { + continue + } + dbWiki, err := r.GetByID(ctx, pgId) + if err == nil && dbWiki != nil { + wikis = append(wikis, dbWiki) + missingToCache[keys[i]] = dbWiki + } + } + } + + if len(missingToCache) > 0 { + _ = r.c.MSet(ctx, missingToCache, constants.NormalCacheDuration) + } + + return wikis, nil +} + +func (r *wikiRepository) GetByIDs(ctx context.Context, ids []string) ([]*models.WikiEntity, error) { + return r.getByIDsWithFallback(ctx, ids) +} + +func (r *wikiRepository) GetByID(ctx context.Context, id pgtype.UUID) (*models.WikiEntity, error) { + cacheId := fmt.Sprintf("wiki:id:%s", convert.UUIDToString(id)) + var wiki models.WikiEntity + err := r.c.Get(ctx, cacheId, &wiki) + if err == nil { + _ = r.c.Set(ctx, cacheId, wiki, constants.NormalCacheDuration) + return &wiki, nil + } + + row, err := r.q.GetWikiById(ctx, id) + if err != nil { + return nil, err + } + + wiki = models.WikiEntity{ + ID: convert.UUIDToString(row.ID), + Title: convert.TextToString(row.Title), + Content: convert.TextToString(row.Content), + IsDeleted: row.IsDeleted, + CreatedAt: convert.TimeToPtr(row.CreatedAt), + UpdatedAt: convert.TimeToPtr(row.UpdatedAt), + } + _ = r.c.Set(ctx, cacheId, wiki, constants.NormalCacheDuration) + + return &wiki, nil +} + +func (r *wikiRepository) Search(ctx context.Context, params sqlc.SearchWikisParams) ([]*models.WikiEntity, error) { + queryKey := r.generateQueryKey("wiki:search", params) + var cachedIDs []string + if err := r.c.Get(ctx, queryKey, &cachedIDs); err == nil && len(cachedIDs) > 0 { + return r.getByIDsWithFallback(ctx, cachedIDs) + } + + rows, err := r.q.SearchWikis(ctx, params) + if err != nil { + return nil, err + } + var wikis []*models.WikiEntity + var ids []string + wikiToCache := make(map[string]any) + + for _, row := range rows { + wiki := &models.WikiEntity{ + ID: convert.UUIDToString(row.ID), + Title: convert.TextToString(row.Title), + Content: convert.TextToString(row.Content), + IsDeleted: row.IsDeleted, + CreatedAt: convert.TimeToPtr(row.CreatedAt), + UpdatedAt: convert.TimeToPtr(row.UpdatedAt), + } + ids = append(ids, wiki.ID) + wikis = append(wikis, wiki) + wikiToCache[fmt.Sprintf("wiki:id:%s", wiki.ID)] = wiki + } + + if len(wikiToCache) > 0 { + _ = r.c.MSet(ctx, wikiToCache, constants.NormalCacheDuration) + } + if len(ids) > 0 { + _ = r.c.Set(ctx, queryKey, ids, constants.ListCacheDuration) + } + + return wikis, nil +} + +func (r *wikiRepository) Create(ctx context.Context, params sqlc.CreateWikiParams) (*models.WikiEntity, error) { + row, err := r.q.CreateWiki(ctx, params) + if err != nil { + return nil, err + } + + wiki := models.WikiEntity{ + ID: convert.UUIDToString(row.ID), + Title: convert.TextToString(row.Title), + Content: convert.TextToString(row.Content), + IsDeleted: row.IsDeleted, + CreatedAt: convert.TimeToPtr(row.CreatedAt), + UpdatedAt: convert.TimeToPtr(row.UpdatedAt), + } + _ = r.c.Set(ctx, fmt.Sprintf("wiki:id:%s", wiki.ID), wiki, constants.NormalCacheDuration) + + go func() { + bgCtx := context.Background() + _ = r.c.DelByPattern(bgCtx, "wiki:search*") + }() + + return &wiki, nil +} + +func (r *wikiRepository) Update(ctx context.Context, params sqlc.UpdateWikiParams) (*models.WikiEntity, error) { + row, err := r.q.UpdateWiki(ctx, params) + if err != nil { + return nil, err + } + wiki := models.WikiEntity{ + ID: convert.UUIDToString(row.ID), + Title: convert.TextToString(row.Title), + Content: convert.TextToString(row.Content), + IsDeleted: row.IsDeleted, + CreatedAt: convert.TimeToPtr(row.CreatedAt), + UpdatedAt: convert.TimeToPtr(row.UpdatedAt), + } + _ = r.c.Set(ctx, fmt.Sprintf("wiki:id:%s", wiki.ID), wiki, constants.NormalCacheDuration) + return &wiki, nil +} + +func (r *wikiRepository) Delete(ctx context.Context, id pgtype.UUID) error { + err := r.q.DeleteWiki(ctx, id) + if err != nil { + return err + } + _ = r.c.Del(ctx, fmt.Sprintf("wiki:id:%s", convert.UUIDToString(id))) + go func() { + _ = r.c.DelByPattern(context.Background(), "wiki:search*") + }() + return nil +} + +func (r *wikiRepository) CreateEntityWikis(ctx context.Context, params sqlc.CreateEntityWikisParams) error { + err := r.q.CreateEntityWikis(ctx, params) + if err != nil { + return err + } + return nil +} + +func (r *wikiRepository) BulkDeleteEntityWikisByEntityId(ctx context.Context, entityId pgtype.UUID) error { + wikiIDs, err := r.q.BulkDeleteEntityWikisByEntityId(ctx, entityId) + if err != nil { + return err + } + if len(wikiIDs) > 0 { + keys := make([]string, len(wikiIDs)) + for i, id := range wikiIDs { + keys[i] = fmt.Sprintf("wiki:id:%s", convert.UUIDToString(id)) + } + go func() { + _ = r.c.Del(context.Background(), keys...) + }() + } + return nil +} diff --git a/internal/routes/entityRoute.go b/internal/routes/entityRoute.go new file mode 100644 index 0000000..50261bb --- /dev/null +++ b/internal/routes/entityRoute.go @@ -0,0 +1,13 @@ +package routes + +import ( + "history-api/internal/controllers" + + "github.com/gofiber/fiber/v3" +) + +func SetupEntityRoutes(router fiber.Router, entityController *controllers.EntityController) { + entity := router.Group("/entities") + entity.Get("/", entityController.SearchEntities) + entity.Get("/:id", entityController.GetEntityById) +} diff --git a/internal/routes/geometryRoute.go b/internal/routes/geometryRoute.go new file mode 100644 index 0000000..c0fb1b8 --- /dev/null +++ b/internal/routes/geometryRoute.go @@ -0,0 +1,13 @@ +package routes + +import ( + "history-api/internal/controllers" + + "github.com/gofiber/fiber/v3" +) + +func SetupGeometryRoutes(router fiber.Router, geometryController *controllers.GeometryController) { + geometry := router.Group("/geometries") + geometry.Get("/", geometryController.SearchGeometries) + geometry.Get("/:id", geometryController.GetGeometryById) +} diff --git a/internal/routes/wikiRoute.go b/internal/routes/wikiRoute.go new file mode 100644 index 0000000..051c850 --- /dev/null +++ b/internal/routes/wikiRoute.go @@ -0,0 +1,13 @@ +package routes + +import ( + "history-api/internal/controllers" + + "github.com/gofiber/fiber/v3" +) + +func SetupWikiRoutes(router fiber.Router, wikiController *controllers.WikiController) { + wiki := router.Group("/wikis") + wiki.Get("/", wikiController.SearchWikis) + wiki.Get("/:id", wikiController.GetWikiById) +} diff --git a/internal/services/entityService.go b/internal/services/entityService.go new file mode 100644 index 0000000..46498cb --- /dev/null +++ b/internal/services/entityService.go @@ -0,0 +1,68 @@ +package services + +import ( + "context" + "history-api/internal/dtos/request" + "history-api/internal/dtos/response" + "history-api/internal/gen/sqlc" + "history-api/internal/models" + "history-api/internal/repositories" + "history-api/pkg/convert" + + "github.com/gofiber/fiber/v3" +) + +type EntityService interface { + GetEntityByID(ctx context.Context, id string) (*response.EntityResponse, error) + SearchEntities(ctx context.Context, req *request.SearchEntityDto) ([]*response.EntityResponse, error) +} + +type entityService struct { + entityRepo repositories.EntityRepository +} + +func NewEntityService(entityRepo repositories.EntityRepository) EntityService { + return &entityService{ + entityRepo: entityRepo, + } +} + +func (s *entityService) GetEntityByID(ctx context.Context, id string) (*response.EntityResponse, error) { + entityId, err := convert.StringToUUID(id) + if err != nil { + return nil, fiber.NewError(fiber.StatusInternalServerError, err.Error()) + } + entity, err := s.entityRepo.GetByID(ctx, entityId) + if err != nil { + return nil, fiber.NewError(fiber.StatusNotFound, "Entity not found") + } + + return entity.ToResponse(), nil +} + +func (s *entityService) SearchEntities(ctx context.Context, req *request.SearchEntityDto) ([]*response.EntityResponse, error) { + limit := int32(25) + if req.Limit > 0 { + limit = int32(req.Limit) + } + + params := sqlc.SearchEntitiesParams{ + LimitCount: limit, + } + if req.Cursor != "" { + cursor, err := convert.StringToUUID(req.Cursor) + if err == nil { + params.CursorID = cursor + } + } + if req.Name != "" { + params.Name = req.Name + } + + entities, err := s.entityRepo.Search(ctx, params) + if err != nil { + return nil, fiber.NewError(fiber.StatusInternalServerError, err.Error()) + } + + return models.EntitiesEntityToResponse(entities), nil +} diff --git a/internal/services/geometryService.go b/internal/services/geometryService.go new file mode 100644 index 0000000..0d1c3e7 --- /dev/null +++ b/internal/services/geometryService.go @@ -0,0 +1,79 @@ +package services + +import ( + "context" + "history-api/internal/dtos/request" + "history-api/internal/dtos/response" + "history-api/internal/gen/sqlc" + "history-api/internal/models" + "history-api/internal/repositories" + "history-api/pkg/convert" + + "github.com/gofiber/fiber/v3" + "github.com/jackc/pgx/v5/pgtype" +) + +type GeometryService interface { + GetGeometryByID(ctx context.Context, id string) (*response.GeometryResponse, error) + SearchGeometries(ctx context.Context, req *request.SearchGeometryDto) ([]*response.GeometryResponse, error) +} + +type geometryService struct { + geometryRepo repositories.GeometryRepository +} + +func NewGeometryService(geometryRepo repositories.GeometryRepository) GeometryService { + return &geometryService{ + geometryRepo: geometryRepo, + } +} + +func (s *geometryService) GetGeometryByID(ctx context.Context, id string) (*response.GeometryResponse, error) { + geometryId, err := convert.StringToUUID(id) + if err != nil { + return nil, fiber.NewError(fiber.StatusInternalServerError, err.Error()) + } + geometry, err := s.geometryRepo.GetByID(ctx, geometryId) + if err != nil { + return nil, fiber.NewError(fiber.StatusNotFound, "Geometry not found") + } + + return geometry.ToResponse(), nil +} + +func (s *geometryService) SearchGeometries(ctx context.Context, req *request.SearchGeometryDto) ([]*response.GeometryResponse, error) { + params := sqlc.SearchGeometriesParams{} + + if req.MinLng != nil && req.MinLat != nil && req.MaxLng != nil && req.MaxLat != nil { + if *req.MaxLng < *req.MinLng || *req.MaxLat < *req.MinLat { + return nil, fiber.NewError(fiber.StatusBadRequest, "Invalid bounding box") + } + params.SearchMinLng = pgtype.Float8{Float64: *req.MinLng, Valid: true} + params.SearchMinLat = pgtype.Float8{Float64: *req.MinLat, Valid: true} + params.SearchMaxLng = pgtype.Float8{Float64: *req.MaxLng, Valid: true} + params.SearchMaxLat = pgtype.Float8{Float64: *req.MaxLat, Valid: true} + } else { + return nil, fiber.NewError(fiber.StatusBadRequest, "Must provid Bounding box!") + } + + if req.TimePoint != nil { + if *req.TimePoint < 0 { + return nil, fiber.NewError(fiber.StatusBadRequest, "Time point must be non-negative!") + } + params.TimePoint = pgtype.Int4{Int32: *req.TimePoint, Valid: true} + } + + if req.EntityID != nil { + entityId, err := convert.StringToUUID(*req.EntityID) + if err == nil { + params.EntityID = entityId + } + } + + geometries, err := s.geometryRepo.Search(ctx, params) + if err != nil { + return nil, fiber.NewError(fiber.StatusInternalServerError, err.Error()) + } + + return models.GeometriesEntityToResponse(geometries), nil +} diff --git a/internal/services/userService.go b/internal/services/userService.go index 0fee487..c125d0f 100644 --- a/internal/services/userService.go +++ b/internal/services/userService.go @@ -64,8 +64,16 @@ func (u *userService) ChangePassword(ctx context.Context, userId string, dto *re return fiber.NewError(fiber.StatusNotFound, "User not found") } - if err := bcrypt.CompareHashAndPassword([]byte(user.PasswordHash), []byte(dto.OldPassword)); err != nil { - return fiber.NewError(fiber.StatusUnauthorized, "Invalid identity or password!") + if user.PasswordHash != "" { + if dto.OldPassword == "" { + return fiber.NewError(fiber.StatusBadRequest, "Old password required") + } + + if err := bcrypt.CompareHashAndPassword([]byte(user.PasswordHash), []byte(dto.OldPassword)); err != nil { + return fiber.NewError(fiber.StatusUnauthorized, "Invalid password!") + } + } else if user.PasswordHash == "" && dto.OldPassword != "" { + return fiber.NewError(fiber.StatusBadRequest, "Invalid request") } hashPassword, err := bcrypt.GenerateFromPassword([]byte(dto.NewPassword), bcrypt.DefaultCost) diff --git a/internal/services/wikiService.go b/internal/services/wikiService.go new file mode 100644 index 0000000..fc49f17 --- /dev/null +++ b/internal/services/wikiService.go @@ -0,0 +1,74 @@ +package services + +import ( + "context" + "history-api/internal/dtos/request" + "history-api/internal/dtos/response" + "history-api/internal/gen/sqlc" + "history-api/internal/models" + "history-api/internal/repositories" + "history-api/pkg/convert" + + "github.com/gofiber/fiber/v3" +) + +type WikiService interface { + GetWikiByID(ctx context.Context, id string) (*response.WikiResponse, error) + SearchWikis(ctx context.Context, req *request.SearchWikiDto) ([]*response.WikiResponse, error) +} + +type wikiService struct { + wikiRepo repositories.WikiRepository +} + +func NewWikiService(wikiRepo repositories.WikiRepository) WikiService { + return &wikiService{ + wikiRepo: wikiRepo, + } +} + +func (s *wikiService) GetWikiByID(ctx context.Context, id string) (*response.WikiResponse, error) { + wikiId, err := convert.StringToUUID(id) + if err != nil { + return nil, fiber.NewError(fiber.StatusInternalServerError, err.Error()) + } + wiki, err := s.wikiRepo.GetByID(ctx, wikiId) + if err != nil { + return nil, fiber.NewError(fiber.StatusNotFound, "Wiki not found") + } + + return wiki.ToResponse(), nil +} + +func (s *wikiService) SearchWikis(ctx context.Context, req *request.SearchWikiDto) ([]*response.WikiResponse, error) { + limit := int32(25) + if req.Limit > 0 { + limit = int32(req.Limit) + } + + params := sqlc.SearchWikisParams{ + LimitCount: limit, + } + if req.Cursor != "" { + cursor, err := convert.StringToUUID(req.Cursor) + if err == nil { + params.CursorID = cursor + } + } + if req.Title != "" { + params.Title = req.Title + } + if req.EntityID != "" { + entityId, err := convert.StringToUUID(req.EntityID) + if err == nil { + params.EntityID = entityId + } + } + + wikis, err := s.wikiRepo.Search(ctx, params) + if err != nil { + return nil, fiber.NewError(fiber.StatusInternalServerError, err.Error()) + } + + return models.WikisEntityToResponse(wikis), nil +} diff --git a/pkg/convert/convert.go b/pkg/convert/convert.go index 20d25eb..2c53763 100644 --- a/pkg/convert/convert.go +++ b/pkg/convert/convert.go @@ -53,3 +53,24 @@ func PtrToText(s *string) pgtype.Text { Valid: true, } } + +func TextToPtr(v pgtype.Text) *string { + if !v.Valid { + return nil + } + return &v.String +} + +func Int4ToPtr(v pgtype.Int4) *int32 { + if !v.Valid { + return nil + } + return &v.Int32 +} + +func Int4ToInt32(v pgtype.Int4) int32 { + if v.Valid { + return v.Int32 + } + return 0 +} diff --git a/pkg/validator/validator.go b/pkg/validator/validator.go index 20bb804..44745ec 100644 --- a/pkg/validator/validator.go +++ b/pkg/validator/validator.go @@ -32,6 +32,33 @@ func init() { } return isImageURL(val) }) + + validate.RegisterValidation("optional_url", func(fl validator.FieldLevel) bool { + val := fl.Field().String() + if val == "" { + return true + } + return isValidURL(val) + }) +} + + + +func isValidURL(s string) bool { + u, err := url.ParseRequestURI(s) + if err != nil { + return false + } + + if u.Scheme != "http" && u.Scheme != "https" { + return false + } + + if u.Host == "" || !strings.Contains(u.Host, ".") { + return false + } + + return true } func isImageURL(u string) bool { @@ -116,4 +143,4 @@ func ValidateBodyDto(c fiber.Ctx, dto any) []*ErrorResponse { } return nil -} \ No newline at end of file +}