diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 00000000..e69de29b diff --git a/.sqlx/query-09ab64836127f6edb22a5deaa33ab77d9e8155386a5be60e01b3ad7db8541a27.json b/.sqlx/query-09ab64836127f6edb22a5deaa33ab77d9e8155386a5be60e01b3ad7db8541a27.json new file mode 100644 index 00000000..9288972f --- /dev/null +++ b/.sqlx/query-09ab64836127f6edb22a5deaa33ab77d9e8155386a5be60e01b3ad7db8541a27.json @@ -0,0 +1,25 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO loader_field_enum_values (enum_id, value, created, metadata)\n VALUES ($1, $2, COALESCE($3, timezone('utc', now())), $4)\n ON CONFLICT (enum_id, value) DO UPDATE\n SET metadata = COALESCE($4, loader_field_enum_values.metadata),\n created = COALESCE($3, loader_field_enum_values.created)\n RETURNING id\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [ + "Int4", + "Varchar", + "Timestamp", + "Jsonb" + ] + }, + "nullable": [ + false + ] + }, + "hash": "09ab64836127f6edb22a5deaa33ab77d9e8155386a5be60e01b3ad7db8541a27" +} diff --git a/.sqlx/query-0b52dc08a903a9c82234f6e1a2c59fdb631955011988910f033dd740b6a3b79b.json b/.sqlx/query-0b52dc08a903a9c82234f6e1a2c59fdb631955011988910f033dd740b6a3b79b.json new file mode 100644 index 00000000..56a912c3 --- /dev/null +++ b/.sqlx/query-0b52dc08a903a9c82234f6e1a2c59fdb631955011988910f033dd740b6a3b79b.json @@ -0,0 +1,40 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT lfe.id, lfe.enum_name, lfe.ordering, lfe.hidable \n FROM loader_field_enums lfe\n WHERE lfe.enum_name = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "enum_name", + "type_info": "Varchar" + }, + { + "ordinal": 2, + "name": "ordering", + "type_info": "Int4" + }, + { + "ordinal": 3, + "name": "hidable", + "type_info": "Bool" + } + ], + "parameters": { + "Left": [ + "Text" + ] + }, + "nullable": [ + false, + false, + true, + false + ] + }, + "hash": "0b52dc08a903a9c82234f6e1a2c59fdb631955011988910f033dd740b6a3b79b" +} diff --git a/.sqlx/query-177716d2b04fd2a2b63b2e14c8ffdfa554d84254b14053496c118dec24bf5049.json b/.sqlx/query-177716d2b04fd2a2b63b2e14c8ffdfa554d84254b14053496c118dec24bf5049.json deleted file mode 100644 index 3752ef7f..00000000 --- a/.sqlx/query-177716d2b04fd2a2b63b2e14c8ffdfa554d84254b14053496c118dec24bf5049.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n UPDATE mods\n SET game_versions = (\n SELECT COALESCE(ARRAY_AGG(DISTINCT gv.version) filter (where gv.version is not null), array[]::varchar[])\n FROM versions v\n INNER JOIN game_versions_versions gvv ON v.id = gvv.joining_version_id\n INNER JOIN game_versions gv on gvv.game_version_id = gv.id\n WHERE v.mod_id = mods.id AND v.status != ALL($2)\n )\n WHERE id = $1\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8", - "TextArray" - ] - }, - "nullable": [] - }, - "hash": "177716d2b04fd2a2b63b2e14c8ffdfa554d84254b14053496c118dec24bf5049" -} diff --git a/.sqlx/query-1d6f3e926fc4a27c5af172f672b7f825f9f5fe2d538b06337ef182ab1a553398.json b/.sqlx/query-1d6f3e926fc4a27c5af172f672b7f825f9f5fe2d538b06337ef182ab1a553398.json deleted file mode 100644 index 5b3eb4aa..00000000 --- a/.sqlx/query-1d6f3e926fc4a27c5af172f672b7f825f9f5fe2d538b06337ef182ab1a553398.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT name FROM project_types pt\n INNER JOIN mods ON mods.project_type = pt.id\n WHERE mods.id = $1\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "name", - "type_info": "Varchar" - } - ], - "parameters": { - "Left": [ - "Int8" - ] - }, - "nullable": [ - false - ] - }, - "hash": "1d6f3e926fc4a27c5af172f672b7f825f9f5fe2d538b06337ef182ab1a553398" -} diff --git a/.sqlx/query-1db6be78a74ff04c52ee105e0df30acf5bbf18f1de328980bb7f3da7f5f6569e.json b/.sqlx/query-1db6be78a74ff04c52ee105e0df30acf5bbf18f1de328980bb7f3da7f5f6569e.json deleted file mode 100644 index 7b6e7925..00000000 --- a/.sqlx/query-1db6be78a74ff04c52ee105e0df30acf5bbf18f1de328980bb7f3da7f5f6569e.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT id FROM side_types\n WHERE name = $1\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int4" - } - ], - "parameters": { - "Left": [ - "Text" - ] - }, - "nullable": [ - false - ] - }, - "hash": "1db6be78a74ff04c52ee105e0df30acf5bbf18f1de328980bb7f3da7f5f6569e" -} diff --git a/.sqlx/query-1e735a003ce305624ce8bbf181c99e41fbe8fcd836e926daf3e73aa3bb5552a6.json b/.sqlx/query-1e735a003ce305624ce8bbf181c99e41fbe8fcd836e926daf3e73aa3bb5552a6.json deleted file mode 100644 index bdf80942..00000000 --- a/.sqlx/query-1e735a003ce305624ce8bbf181c99e41fbe8fcd836e926daf3e73aa3bb5552a6.json +++ /dev/null @@ -1,124 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT v.id id, v.mod_id mod_id, v.author_id author_id, v.name version_name, v.version_number version_number,\n v.changelog changelog, v.date_published date_published, v.downloads downloads,\n v.version_type version_type, v.featured featured, v.status status, v.requested_status requested_status, v.ordering ordering,\n JSONB_AGG(DISTINCT jsonb_build_object('version', gv.version, 'created', gv.created)) filter (where gv.version is not null) game_versions,\n ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders,\n JSONB_AGG(DISTINCT jsonb_build_object('id', f.id, 'url', f.url, 'filename', f.filename, 'primary', f.is_primary, 'size', f.size, 'file_type', f.file_type)) filter (where f.id is not null) files,\n JSONB_AGG(DISTINCT jsonb_build_object('algorithm', h.algorithm, 'hash', encode(h.hash, 'escape'), 'file_id', h.file_id)) filter (where h.hash is not null) hashes,\n JSONB_AGG(DISTINCT jsonb_build_object('project_id', d.mod_dependency_id, 'version_id', d.dependency_id, 'dependency_type', d.dependency_type,'file_name', dependency_file_name)) filter (where d.dependency_type is not null) dependencies\n FROM versions v\n LEFT OUTER JOIN game_versions_versions gvv on v.id = gvv.joining_version_id\n LEFT OUTER JOIN game_versions gv on gvv.game_version_id = gv.id\n LEFT OUTER JOIN loaders_versions lv on v.id = lv.version_id\n LEFT OUTER JOIN loaders l on lv.loader_id = l.id\n LEFT OUTER JOIN files f on v.id = f.version_id\n LEFT OUTER JOIN hashes h on f.id = h.file_id\n LEFT OUTER JOIN dependencies d on v.id = d.dependent_id\n WHERE v.id = ANY($1)\n GROUP BY v.id\n ORDER BY v.ordering ASC NULLS LAST, v.date_published ASC;\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "mod_id", - "type_info": "Int8" - }, - { - "ordinal": 2, - "name": "author_id", - "type_info": "Int8" - }, - { - "ordinal": 3, - "name": "version_name", - "type_info": "Varchar" - }, - { - "ordinal": 4, - "name": "version_number", - "type_info": "Varchar" - }, - { - "ordinal": 5, - "name": "changelog", - "type_info": "Varchar" - }, - { - "ordinal": 6, - "name": "date_published", - "type_info": "Timestamptz" - }, - { - "ordinal": 7, - "name": "downloads", - "type_info": "Int4" - }, - { - "ordinal": 8, - "name": "version_type", - "type_info": "Varchar" - }, - { - "ordinal": 9, - "name": "featured", - "type_info": "Bool" - }, - { - "ordinal": 10, - "name": "status", - "type_info": "Varchar" - }, - { - "ordinal": 11, - "name": "requested_status", - "type_info": "Varchar" - }, - { - "ordinal": 12, - "name": "ordering", - "type_info": "Int4" - }, - { - "ordinal": 13, - "name": "game_versions", - "type_info": "Jsonb" - }, - { - "ordinal": 14, - "name": "loaders", - "type_info": "VarcharArray" - }, - { - "ordinal": 15, - "name": "files", - "type_info": "Jsonb" - }, - { - "ordinal": 16, - "name": "hashes", - "type_info": "Jsonb" - }, - { - "ordinal": 17, - "name": "dependencies", - "type_info": "Jsonb" - } - ], - "parameters": { - "Left": [ - "Int8Array" - ] - }, - "nullable": [ - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - true, - true, - null, - null, - null, - null, - null - ] - }, - "hash": "1e735a003ce305624ce8bbf181c99e41fbe8fcd836e926daf3e73aa3bb5552a6" -} diff --git a/.sqlx/query-21ef50f46b7b3e62b91e7d067c1cb33806e14c33bb76d63c2711f822c44261f6.json b/.sqlx/query-21ef50f46b7b3e62b91e7d067c1cb33806e14c33bb76d63c2711f822c44261f6.json deleted file mode 100644 index 2212ad16..00000000 --- a/.sqlx/query-21ef50f46b7b3e62b91e7d067c1cb33806e14c33bb76d63c2711f822c44261f6.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT name FROM project_types pt\n INNER JOIN mods ON mods.project_type = pt.id\n WHERE mods.id = $1\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "name", - "type_info": "Varchar" - } - ], - "parameters": { - "Left": [ - "Int8" - ] - }, - "nullable": [ - false - ] - }, - "hash": "21ef50f46b7b3e62b91e7d067c1cb33806e14c33bb76d63c2711f822c44261f6" -} diff --git a/.sqlx/query-3afbc93a8945e7ae07e39a88752f400c06f9c8a8132fd7a05dcc55c6eab5d2e7.json b/.sqlx/query-3afbc93a8945e7ae07e39a88752f400c06f9c8a8132fd7a05dcc55c6eab5d2e7.json new file mode 100644 index 00000000..0ecfb803 --- /dev/null +++ b/.sqlx/query-3afbc93a8945e7ae07e39a88752f400c06f9c8a8132fd7a05dcc55c6eab5d2e7.json @@ -0,0 +1,126 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT m.id id, m.title title, m.description description, m.color color,\n m.icon_url icon_url, m.slug slug,\n pt.name project_type, u.username username, u.avatar_url avatar_url,\n ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null) categories,\n ARRAY_AGG(DISTINCT lo.loader) filter (where lo.loader is not null) loaders,\n ARRAY_AGG(DISTINCT pt.name) filter (where pt.name is not null) project_types,\n ARRAY_AGG(DISTINCT g.name) filter (where g.name is not null) games,\n ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null and mg.featured is false) gallery,\n ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null and mg.featured is true) featured_gallery,\n JSONB_AGG(\n DISTINCT jsonb_build_object(\n 'field_id', vf.field_id,\n 'int_value', vf.int_value,\n 'enum_value', vf.enum_value,\n 'string_value', vf.string_value\n )\n ) filter (where vf.field_id is not null) version_fields,\n JSONB_AGG(\n DISTINCT jsonb_build_object(\n 'lf_id', lf.id,\n 'loader_name', lo.loader,\n 'field', lf.field,\n 'field_type', lf.field_type,\n 'enum_type', lf.enum_type,\n 'min_val', lf.min_val,\n 'max_val', lf.max_val,\n 'optional', lf.optional\n )\n ) filter (where lf.id is not null) loader_fields,\n JSONB_AGG(\n DISTINCT jsonb_build_object(\n 'id', lfev.id,\n 'enum_id', lfev.enum_id,\n 'value', lfev.value,\n 'ordering', lfev.ordering,\n 'created', lfev.created,\n 'metadata', lfev.metadata\n ) \n ) filter (where lfev.id is not null) loader_field_enum_values\n FROM mods m\n LEFT OUTER JOIN mods_categories mc ON joining_mod_id = m.id AND mc.is_additional = FALSE\n LEFT OUTER JOIN categories c ON mc.joining_category_id = c.id\n LEFT OUTER JOIN versions v ON v.mod_id = m.id AND v.status != ALL($2)\n LEFT OUTER JOIN loaders_versions lv ON lv.version_id = v.id\n LEFT OUTER JOIN loaders lo ON lo.id = lv.loader_id\n LEFT JOIN loaders_project_types lpt ON lpt.joining_loader_id = lo.id\n LEFT JOIN project_types pt ON pt.id = lpt.joining_project_type_id\n LEFT JOIN loaders_project_types_games lptg ON lptg.loader_id = lo.id AND lptg.project_type_id = pt.id\n LEFT JOIN games g ON lptg.game_id = g.id\n LEFT OUTER JOIN mods_gallery mg ON mg.mod_id = m.id\n INNER JOIN team_members tm ON tm.team_id = m.team_id AND tm.role = $3 AND tm.accepted = TRUE\n INNER JOIN users u ON tm.user_id = u.id\n LEFT OUTER JOIN version_fields vf on v.id = vf.version_id\n LEFT OUTER JOIN loader_fields lf on vf.field_id = lf.id\n LEFT OUTER JOIN loader_field_enums lfe on lf.enum_type = lfe.id\n LEFT OUTER JOIN loader_field_enum_values lfev on lfev.enum_id = lfe.id\n WHERE m.id = $1\n GROUP BY m.id, pt.id, u.id;\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "title", + "type_info": "Varchar" + }, + { + "ordinal": 2, + "name": "description", + "type_info": "Varchar" + }, + { + "ordinal": 3, + "name": "color", + "type_info": "Int4" + }, + { + "ordinal": 4, + "name": "icon_url", + "type_info": "Varchar" + }, + { + "ordinal": 5, + "name": "slug", + "type_info": "Varchar" + }, + { + "ordinal": 6, + "name": "project_type", + "type_info": "Varchar" + }, + { + "ordinal": 7, + "name": "username", + "type_info": "Varchar" + }, + { + "ordinal": 8, + "name": "avatar_url", + "type_info": "Varchar" + }, + { + "ordinal": 9, + "name": "categories", + "type_info": "VarcharArray" + }, + { + "ordinal": 10, + "name": "loaders", + "type_info": "VarcharArray" + }, + { + "ordinal": 11, + "name": "project_types", + "type_info": "VarcharArray" + }, + { + "ordinal": 12, + "name": "games", + "type_info": "VarcharArray" + }, + { + "ordinal": 13, + "name": "gallery", + "type_info": "VarcharArray" + }, + { + "ordinal": 14, + "name": "featured_gallery", + "type_info": "VarcharArray" + }, + { + "ordinal": 15, + "name": "version_fields", + "type_info": "Jsonb" + }, + { + "ordinal": 16, + "name": "loader_fields", + "type_info": "Jsonb" + }, + { + "ordinal": 17, + "name": "loader_field_enum_values", + "type_info": "Jsonb" + } + ], + "parameters": { + "Left": [ + "Int8", + "TextArray", + "Text" + ] + }, + "nullable": [ + false, + false, + false, + true, + true, + true, + false, + false, + true, + null, + null, + null, + null, + null, + null, + null, + null, + null + ] + }, + "hash": "3afbc93a8945e7ae07e39a88752f400c06f9c8a8132fd7a05dcc55c6eab5d2e7" +} diff --git a/.sqlx/query-3d384766d179f804c17e03d1917da65cc6043f88971ddc3fd23ba3be00717dfc.json b/.sqlx/query-3d384766d179f804c17e03d1917da65cc6043f88971ddc3fd23ba3be00717dfc.json deleted file mode 100644 index 8e6b0322..00000000 --- a/.sqlx/query-3d384766d179f804c17e03d1917da65cc6043f88971ddc3fd23ba3be00717dfc.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT gv.id id, gv.version version_, gv.type type_, gv.created created, gv.major FROM game_versions gv\n ORDER BY created DESC\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int4" - }, - { - "ordinal": 1, - "name": "version_", - "type_info": "Varchar" - }, - { - "ordinal": 2, - "name": "type_", - "type_info": "Varchar" - }, - { - "ordinal": 3, - "name": "created", - "type_info": "Timestamptz" - }, - { - "ordinal": 4, - "name": "major", - "type_info": "Bool" - } - ], - "parameters": { - "Left": [] - }, - "nullable": [ - false, - false, - false, - false, - false - ] - }, - "hash": "3d384766d179f804c17e03d1917da65cc6043f88971ddc3fd23ba3be00717dfc" -} diff --git a/.sqlx/query-4514723bdc1eb8a781215075bec51af1cc6fabe88a469338d5a59533eabf80c5.json b/.sqlx/query-4514723bdc1eb8a781215075bec51af1cc6fabe88a469338d5a59533eabf80c5.json deleted file mode 100644 index 18ddca84..00000000 --- a/.sqlx/query-4514723bdc1eb8a781215075bec51af1cc6fabe88a469338d5a59533eabf80c5.json +++ /dev/null @@ -1,168 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT m.id id, m.project_type project_type, m.title title, m.description description, m.downloads downloads, m.follows follows,\n m.icon_url icon_url, m.published published, m.approved approved, m.updated updated,\n m.team_id team_id, m.license license, m.slug slug, m.status status_name, m.color color,\n cs.name client_side_type, ss.name server_side_type, pt.name project_type_name, u.username username,\n ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is false) categories,\n ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is true) additional_categories,\n ARRAY_AGG(DISTINCT lo.loader) filter (where lo.loader is not null) loaders,\n ARRAY_AGG(DISTINCT gv.version) filter (where gv.version is not null) versions,\n ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null and mg.featured is false) gallery,\n ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null and mg.featured is true) featured_gallery\n FROM mods m\n LEFT OUTER JOIN mods_categories mc ON joining_mod_id = m.id\n LEFT OUTER JOIN categories c ON mc.joining_category_id = c.id\n LEFT OUTER JOIN versions v ON v.mod_id = m.id AND v.status != ALL($1)\n LEFT OUTER JOIN game_versions_versions gvv ON gvv.joining_version_id = v.id\n LEFT OUTER JOIN game_versions gv ON gvv.game_version_id = gv.id\n LEFT OUTER JOIN loaders_versions lv ON lv.version_id = v.id\n LEFT OUTER JOIN loaders lo ON lo.id = lv.loader_id\n LEFT OUTER JOIN mods_gallery mg ON mg.mod_id = m.id\n INNER JOIN project_types pt ON pt.id = m.project_type\n INNER JOIN side_types cs ON m.client_side = cs.id\n INNER JOIN side_types ss ON m.server_side = ss.id\n INNER JOIN team_members tm ON tm.team_id = m.team_id AND tm.role = $3 AND tm.accepted = TRUE\n INNER JOIN users u ON tm.user_id = u.id\n WHERE m.status = ANY($2)\n GROUP BY m.id, cs.id, ss.id, pt.id, u.id;\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "project_type", - "type_info": "Int4" - }, - { - "ordinal": 2, - "name": "title", - "type_info": "Varchar" - }, - { - "ordinal": 3, - "name": "description", - "type_info": "Varchar" - }, - { - "ordinal": 4, - "name": "downloads", - "type_info": "Int4" - }, - { - "ordinal": 5, - "name": "follows", - "type_info": "Int4" - }, - { - "ordinal": 6, - "name": "icon_url", - "type_info": "Varchar" - }, - { - "ordinal": 7, - "name": "published", - "type_info": "Timestamptz" - }, - { - "ordinal": 8, - "name": "approved", - "type_info": "Timestamptz" - }, - { - "ordinal": 9, - "name": "updated", - "type_info": "Timestamptz" - }, - { - "ordinal": 10, - "name": "team_id", - "type_info": "Int8" - }, - { - "ordinal": 11, - "name": "license", - "type_info": "Varchar" - }, - { - "ordinal": 12, - "name": "slug", - "type_info": "Varchar" - }, - { - "ordinal": 13, - "name": "status_name", - "type_info": "Varchar" - }, - { - "ordinal": 14, - "name": "color", - "type_info": "Int4" - }, - { - "ordinal": 15, - "name": "client_side_type", - "type_info": "Varchar" - }, - { - "ordinal": 16, - "name": "server_side_type", - "type_info": "Varchar" - }, - { - "ordinal": 17, - "name": "project_type_name", - "type_info": "Varchar" - }, - { - "ordinal": 18, - "name": "username", - "type_info": "Varchar" - }, - { - "ordinal": 19, - "name": "categories", - "type_info": "VarcharArray" - }, - { - "ordinal": 20, - "name": "additional_categories", - "type_info": "VarcharArray" - }, - { - "ordinal": 21, - "name": "loaders", - "type_info": "VarcharArray" - }, - { - "ordinal": 22, - "name": "versions", - "type_info": "VarcharArray" - }, - { - "ordinal": 23, - "name": "gallery", - "type_info": "VarcharArray" - }, - { - "ordinal": 24, - "name": "featured_gallery", - "type_info": "VarcharArray" - } - ], - "parameters": { - "Left": [ - "TextArray", - "TextArray", - "Text" - ] - }, - "nullable": [ - false, - false, - false, - false, - false, - false, - true, - false, - true, - false, - false, - false, - true, - false, - true, - false, - false, - false, - false, - null, - null, - null, - null, - null, - null - ] - }, - "hash": "4514723bdc1eb8a781215075bec51af1cc6fabe88a469338d5a59533eabf80c5" -} diff --git a/.sqlx/query-458630d00e46183c65f95729d2647d3635f629cfb892fc8ac1964d8ecc269576.json b/.sqlx/query-458630d00e46183c65f95729d2647d3635f629cfb892fc8ac1964d8ecc269576.json new file mode 100644 index 00000000..3b9f7c91 --- /dev/null +++ b/.sqlx/query-458630d00e46183c65f95729d2647d3635f629cfb892fc8ac1964d8ecc269576.json @@ -0,0 +1,52 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT id, enum_id, value, ordering, metadata, created FROM loader_field_enum_values\n WHERE enum_id = ANY($1)\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "enum_id", + "type_info": "Int4" + }, + { + "ordinal": 2, + "name": "value", + "type_info": "Varchar" + }, + { + "ordinal": 3, + "name": "ordering", + "type_info": "Int4" + }, + { + "ordinal": 4, + "name": "metadata", + "type_info": "Jsonb" + }, + { + "ordinal": 5, + "name": "created", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Int4Array" + ] + }, + "nullable": [ + false, + false, + false, + true, + true, + false + ] + }, + "hash": "458630d00e46183c65f95729d2647d3635f629cfb892fc8ac1964d8ecc269576" +} diff --git a/.sqlx/query-4a54d350b4695c32a802675506e85b0506fc62a63ca0ee5f38890824301d6515.json b/.sqlx/query-4a54d350b4695c32a802675506e85b0506fc62a63ca0ee5f38890824301d6515.json deleted file mode 100644 index 3ecd8c38..00000000 --- a/.sqlx/query-4a54d350b4695c32a802675506e85b0506fc62a63ca0ee5f38890824301d6515.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n UPDATE mods\n SET server_side = $1\n WHERE (id = $2)\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int4", - "Int8" - ] - }, - "nullable": [] - }, - "hash": "4a54d350b4695c32a802675506e85b0506fc62a63ca0ee5f38890824301d6515" -} diff --git a/.sqlx/query-507314fdcacaa3c7751738c9d0baee2b90aec719b6b203f922824eced5ea8369.json b/.sqlx/query-507314fdcacaa3c7751738c9d0baee2b90aec719b6b203f922824eced5ea8369.json deleted file mode 100644 index 5d4ebbde..00000000 --- a/.sqlx/query-507314fdcacaa3c7751738c9d0baee2b90aec719b6b203f922824eced5ea8369.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n DELETE FROM game_versions_versions WHERE joining_version_id = $1\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8" - ] - }, - "nullable": [] - }, - "hash": "507314fdcacaa3c7751738c9d0baee2b90aec719b6b203f922824eced5ea8369" -} diff --git a/.sqlx/query-5295fba2053675c8414c0b37a59943535b9a438a642ea1c68045e987f05ade13.json b/.sqlx/query-5295fba2053675c8414c0b37a59943535b9a438a642ea1c68045e987f05ade13.json deleted file mode 100644 index 27a8b53e..00000000 --- a/.sqlx/query-5295fba2053675c8414c0b37a59943535b9a438a642ea1c68045e987f05ade13.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT l.id id, l.loader loader, l.icon icon,\n ARRAY_AGG(DISTINCT pt.name) filter (where pt.name is not null) project_types\n FROM loaders l\n LEFT OUTER JOIN loaders_project_types lpt ON joining_loader_id = l.id\n LEFT OUTER JOIN project_types pt ON lpt.joining_project_type_id = pt.id\n GROUP BY l.id;\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int4" - }, - { - "ordinal": 1, - "name": "loader", - "type_info": "Varchar" - }, - { - "ordinal": 2, - "name": "icon", - "type_info": "Varchar" - }, - { - "ordinal": 3, - "name": "project_types", - "type_info": "VarcharArray" - } - ], - "parameters": { - "Left": [] - }, - "nullable": [ - false, - false, - false, - null - ] - }, - "hash": "5295fba2053675c8414c0b37a59943535b9a438a642ea1c68045e987f05ade13" -} diff --git a/.sqlx/query-59e95e832615c375753bfc9a56b07c02d916399adfa52fb11a79b8f7b56ecf8b.json b/.sqlx/query-59e95e832615c375753bfc9a56b07c02d916399adfa52fb11a79b8f7b56ecf8b.json deleted file mode 100644 index 12069b4d..00000000 --- a/.sqlx/query-59e95e832615c375753bfc9a56b07c02d916399adfa52fb11a79b8f7b56ecf8b.json +++ /dev/null @@ -1,114 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT m.id id, m.title title, m.description description, m.color color,\n m.icon_url icon_url, m.slug slug, cs.name client_side_type, ss.name server_side_type,\n pt.name project_type, u.username username, u.avatar_url avatar_url,\n ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null) categories,\n ARRAY_AGG(DISTINCT lo.loader) filter (where lo.loader is not null) loaders,\n JSONB_AGG(DISTINCT jsonb_build_object('id', gv.id, 'version', gv.version, 'type', gv.type, 'created', gv.created, 'major', gv.major)) filter (where gv.version is not null) versions,\n ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null and mg.featured is false) gallery,\n ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null and mg.featured is true) featured_gallery\n FROM mods m\n LEFT OUTER JOIN mods_categories mc ON joining_mod_id = m.id AND mc.is_additional = FALSE\n LEFT OUTER JOIN categories c ON mc.joining_category_id = c.id\n LEFT OUTER JOIN versions v ON v.mod_id = m.id AND v.status != ALL($2)\n LEFT OUTER JOIN game_versions_versions gvv ON gvv.joining_version_id = v.id\n LEFT OUTER JOIN game_versions gv ON gvv.game_version_id = gv.id\n LEFT OUTER JOIN loaders_versions lv ON lv.version_id = v.id\n LEFT OUTER JOIN loaders lo ON lo.id = lv.loader_id\n LEFT OUTER JOIN mods_gallery mg ON mg.mod_id = m.id\n INNER JOIN project_types pt ON pt.id = m.project_type\n INNER JOIN side_types cs ON m.client_side = cs.id\n INNER JOIN side_types ss ON m.server_side = ss.id\n INNER JOIN team_members tm ON tm.team_id = m.team_id AND tm.role = $3 AND tm.accepted = TRUE\n INNER JOIN users u ON tm.user_id = u.id\n WHERE m.id = $1\n GROUP BY m.id, cs.id, ss.id, pt.id, u.id;\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "title", - "type_info": "Varchar" - }, - { - "ordinal": 2, - "name": "description", - "type_info": "Varchar" - }, - { - "ordinal": 3, - "name": "color", - "type_info": "Int4" - }, - { - "ordinal": 4, - "name": "icon_url", - "type_info": "Varchar" - }, - { - "ordinal": 5, - "name": "slug", - "type_info": "Varchar" - }, - { - "ordinal": 6, - "name": "client_side_type", - "type_info": "Varchar" - }, - { - "ordinal": 7, - "name": "server_side_type", - "type_info": "Varchar" - }, - { - "ordinal": 8, - "name": "project_type", - "type_info": "Varchar" - }, - { - "ordinal": 9, - "name": "username", - "type_info": "Varchar" - }, - { - "ordinal": 10, - "name": "avatar_url", - "type_info": "Varchar" - }, - { - "ordinal": 11, - "name": "categories", - "type_info": "VarcharArray" - }, - { - "ordinal": 12, - "name": "loaders", - "type_info": "VarcharArray" - }, - { - "ordinal": 13, - "name": "versions", - "type_info": "Jsonb" - }, - { - "ordinal": 14, - "name": "gallery", - "type_info": "VarcharArray" - }, - { - "ordinal": 15, - "name": "featured_gallery", - "type_info": "VarcharArray" - } - ], - "parameters": { - "Left": [ - "Int8", - "TextArray", - "Text" - ] - }, - "nullable": [ - false, - false, - false, - true, - true, - true, - false, - false, - false, - false, - true, - null, - null, - null, - null, - null - ] - }, - "hash": "59e95e832615c375753bfc9a56b07c02d916399adfa52fb11a79b8f7b56ecf8b" -} diff --git a/.sqlx/query-622496d06b9d1e5019d7dcb45ac768558305f1270c1c43ef767f54b9baf5b5af.json b/.sqlx/query-622496d06b9d1e5019d7dcb45ac768558305f1270c1c43ef767f54b9baf5b5af.json new file mode 100644 index 00000000..b92f9f5a --- /dev/null +++ b/.sqlx/query-622496d06b9d1e5019d7dcb45ac768558305f1270c1c43ef767f54b9baf5b5af.json @@ -0,0 +1,56 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT lf.id, lf.field, lf.field_type, lf.optional, lf.min_val, lf.max_val, lf.enum_type\n FROM loader_fields lf\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "field", + "type_info": "Varchar" + }, + { + "ordinal": 2, + "name": "field_type", + "type_info": "Varchar" + }, + { + "ordinal": 3, + "name": "optional", + "type_info": "Bool" + }, + { + "ordinal": 4, + "name": "min_val", + "type_info": "Int4" + }, + { + "ordinal": 5, + "name": "max_val", + "type_info": "Int4" + }, + { + "ordinal": 6, + "name": "enum_type", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + false, + false, + false, + false, + true, + true, + true + ] + }, + "hash": "622496d06b9d1e5019d7dcb45ac768558305f1270c1c43ef767f54b9baf5b5af" +} diff --git a/.sqlx/query-683e186dc086ef21d2f82c0d427fcee16c613fb93ea74d6eb0da684363ca7b13.json b/.sqlx/query-683e186dc086ef21d2f82c0d427fcee16c613fb93ea74d6eb0da684363ca7b13.json new file mode 100644 index 00000000..b079a30f --- /dev/null +++ b/.sqlx/query-683e186dc086ef21d2f82c0d427fcee16c613fb93ea74d6eb0da684363ca7b13.json @@ -0,0 +1,28 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT id, project_type FROM categories\n WHERE category = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "project_type", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [ + "Text" + ] + }, + "nullable": [ + false, + false + ] + }, + "hash": "683e186dc086ef21d2f82c0d427fcee16c613fb93ea74d6eb0da684363ca7b13" +} diff --git a/.sqlx/query-6b89c2b2557e304c2a3a02d7824327685f9be696254bf2370d0c995aafc6a2d8.json b/.sqlx/query-6b89c2b2557e304c2a3a02d7824327685f9be696254bf2370d0c995aafc6a2d8.json deleted file mode 100644 index 603c03e6..00000000 --- a/.sqlx/query-6b89c2b2557e304c2a3a02d7824327685f9be696254bf2370d0c995aafc6a2d8.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n UPDATE mods\n SET loaders = (\n SELECT COALESCE(ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null), array[]::varchar[])\n FROM versions v\n INNER JOIN loaders_versions lv ON lv.version_id = v.id\n INNER JOIN loaders l on lv.loader_id = l.id\n WHERE v.mod_id = mods.id AND v.status != ALL($2)\n )\n WHERE id = $1\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8", - "TextArray" - ] - }, - "nullable": [] - }, - "hash": "6b89c2b2557e304c2a3a02d7824327685f9be696254bf2370d0c995aafc6a2d8" -} diff --git a/.sqlx/query-72c75313688dfd88a659c5250c71b9899abd6186ab32a067a7d4b8a0846ebd18.json b/.sqlx/query-72c75313688dfd88a659c5250c71b9899abd6186ab32a067a7d4b8a0846ebd18.json deleted file mode 100644 index a5495efc..00000000 --- a/.sqlx/query-72c75313688dfd88a659c5250c71b9899abd6186ab32a067a7d4b8a0846ebd18.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n INSERT INTO game_versions (version, type, created)\n VALUES ($1, COALESCE($2, 'other'), COALESCE($3, timezone('utc', now())))\n ON CONFLICT (version) DO UPDATE\n SET type = COALESCE($2, game_versions.type),\n created = COALESCE($3, game_versions.created)\n RETURNING id\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int4" - } - ], - "parameters": { - "Left": [ - "Varchar", - "Text", - "Timestamp" - ] - }, - "nullable": [ - false - ] - }, - "hash": "72c75313688dfd88a659c5250c71b9899abd6186ab32a067a7d4b8a0846ebd18" -} diff --git a/.sqlx/query-72d6b5f2f11d88981db82c7247c9e7e5ebfd8d34985a1a8209d6628e66490f37.json b/.sqlx/query-72d6b5f2f11d88981db82c7247c9e7e5ebfd8d34985a1a8209d6628e66490f37.json deleted file mode 100644 index abc96eab..00000000 --- a/.sqlx/query-72d6b5f2f11d88981db82c7247c9e7e5ebfd8d34985a1a8209d6628e66490f37.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT id FROM categories\n WHERE category = $1\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int4" - } - ], - "parameters": { - "Left": [ - "Text" - ] - }, - "nullable": [ - false - ] - }, - "hash": "72d6b5f2f11d88981db82c7247c9e7e5ebfd8d34985a1a8209d6628e66490f37" -} diff --git a/.sqlx/query-85c6de008681d9fc9dc51b17330bed09204010813111e66a7ca84bc0e603f537.json b/.sqlx/query-85c6de008681d9fc9dc51b17330bed09204010813111e66a7ca84bc0e603f537.json deleted file mode 100644 index 84cad42e..00000000 --- a/.sqlx/query-85c6de008681d9fc9dc51b17330bed09204010813111e66a7ca84bc0e603f537.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT name FROM side_types\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "name", - "type_info": "Varchar" - } - ], - "parameters": { - "Left": [] - }, - "nullable": [ - false - ] - }, - "hash": "85c6de008681d9fc9dc51b17330bed09204010813111e66a7ca84bc0e603f537" -} diff --git a/.sqlx/query-8cfa1380907e20fe18180d4f2ae929b7178f81056788ffb207a6c5e4bbcc7a7d.json b/.sqlx/query-8cfa1380907e20fe18180d4f2ae929b7178f81056788ffb207a6c5e4bbcc7a7d.json new file mode 100644 index 00000000..874d0bc0 --- /dev/null +++ b/.sqlx/query-8cfa1380907e20fe18180d4f2ae929b7178f81056788ffb207a6c5e4bbcc7a7d.json @@ -0,0 +1,18 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO version_fields (field_id, version_id, int_value, string_value, enum_value)\n SELECT * FROM UNNEST($1::integer[], $2::bigint[], $3::integer[], $4::text[], $5::integer[])\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int4Array", + "Int8Array", + "Int4Array", + "TextArray", + "Int4Array" + ] + }, + "nullable": [] + }, + "hash": "8cfa1380907e20fe18180d4f2ae929b7178f81056788ffb207a6c5e4bbcc7a7d" +} diff --git a/.sqlx/query-923d1d1e5e9b879479a244479952df15841d35b96fbdcadc7d5af8d6b4671f9e.json b/.sqlx/query-923d1d1e5e9b879479a244479952df15841d35b96fbdcadc7d5af8d6b4671f9e.json new file mode 100644 index 00000000..65c31f42 --- /dev/null +++ b/.sqlx/query-923d1d1e5e9b879479a244479952df15841d35b96fbdcadc7d5af8d6b4671f9e.json @@ -0,0 +1,44 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT l.id id, l.loader loader, l.icon icon,\n ARRAY_AGG(DISTINCT pt.name) filter (where pt.name is not null) project_types,\n ARRAY_AGG(DISTINCT g.name) filter (where g.name is not null) games\n FROM loaders l \n LEFT OUTER JOIN loaders_project_types lpt ON joining_loader_id = l.id\n LEFT OUTER JOIN project_types pt ON lpt.joining_project_type_id = pt.id\n LEFT OUTER JOIN loaders_project_types_games lptg ON lptg.loader_id = lpt.joining_loader_id AND lptg.project_type_id = lpt.joining_project_type_id\n LEFT OUTER JOIN games g ON lptg.game_id = g.id\n GROUP BY l.id;\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "loader", + "type_info": "Varchar" + }, + { + "ordinal": 2, + "name": "icon", + "type_info": "Varchar" + }, + { + "ordinal": 3, + "name": "project_types", + "type_info": "VarcharArray" + }, + { + "ordinal": 4, + "name": "games", + "type_info": "VarcharArray" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + false, + false, + false, + null, + null + ] + }, + "hash": "923d1d1e5e9b879479a244479952df15841d35b96fbdcadc7d5af8d6b4671f9e" +} diff --git a/.sqlx/query-a2f510708f04ad72fe36af9fa96bfb775fb088579fe23bcb87f50f5a8578f3c0.json b/.sqlx/query-a2f510708f04ad72fe36af9fa96bfb775fb088579fe23bcb87f50f5a8578f3c0.json new file mode 100644 index 00000000..897894b7 --- /dev/null +++ b/.sqlx/query-a2f510708f04ad72fe36af9fa96bfb775fb088579fe23bcb87f50f5a8578f3c0.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "\n DELETE FROM version_fields vf\n WHERE vf.version_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [] + }, + "hash": "a2f510708f04ad72fe36af9fa96bfb775fb088579fe23bcb87f50f5a8578f3c0" +} diff --git a/.sqlx/query-acd2e72610008d4fe240cdfadc1c70c997443f7319a5c535df967d56d24bd54a.json b/.sqlx/query-acd2e72610008d4fe240cdfadc1c70c997443f7319a5c535df967d56d24bd54a.json new file mode 100644 index 00000000..a80e8a36 --- /dev/null +++ b/.sqlx/query-acd2e72610008d4fe240cdfadc1c70c997443f7319a5c535df967d56d24bd54a.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n DELETE FROM version_fields \n WHERE version_id = $1\n AND field_id = ANY($2)\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Int4Array" + ] + }, + "nullable": [] + }, + "hash": "acd2e72610008d4fe240cdfadc1c70c997443f7319a5c535df967d56d24bd54a" +} diff --git a/.sqlx/query-bee1abe8313d17a56d93b06a31240e338c3973bc7a7374799ced3df5e38d3134.json b/.sqlx/query-bee1abe8313d17a56d93b06a31240e338c3973bc7a7374799ced3df5e38d3134.json deleted file mode 100644 index 0db64c92..00000000 --- a/.sqlx/query-bee1abe8313d17a56d93b06a31240e338c3973bc7a7374799ced3df5e38d3134.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n DELETE FROM game_versions_versions gvv\n WHERE gvv.joining_version_id = $1\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8" - ] - }, - "nullable": [] - }, - "hash": "bee1abe8313d17a56d93b06a31240e338c3973bc7a7374799ced3df5e38d3134" -} diff --git a/.sqlx/query-c1fddbf97350871b79cb0c235b1f7488c6616b7c1dfbde76a712fd57e91ba158.json b/.sqlx/query-c1fddbf97350871b79cb0c235b1f7488c6616b7c1dfbde76a712fd57e91ba158.json deleted file mode 100644 index 698f31f3..00000000 --- a/.sqlx/query-c1fddbf97350871b79cb0c235b1f7488c6616b7c1dfbde76a712fd57e91ba158.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT id FROM game_versions\n WHERE version = $1\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int4" - } - ], - "parameters": { - "Left": [ - "Text" - ] - }, - "nullable": [ - false - ] - }, - "hash": "c1fddbf97350871b79cb0c235b1f7488c6616b7c1dfbde76a712fd57e91ba158" -} diff --git a/.sqlx/query-c5d44333c62223bd3e68185d1fb3f95152fafec593da8d06c9b2b665218a02be.json b/.sqlx/query-c5d44333c62223bd3e68185d1fb3f95152fafec593da8d06c9b2b665218a02be.json deleted file mode 100644 index 78fd9eda..00000000 --- a/.sqlx/query-c5d44333c62223bd3e68185d1fb3f95152fafec593da8d06c9b2b665218a02be.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n UPDATE mods\n SET client_side = $1\n WHERE (id = $2)\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int4", - "Int8" - ] - }, - "nullable": [] - }, - "hash": "c5d44333c62223bd3e68185d1fb3f95152fafec593da8d06c9b2b665218a02be" -} diff --git a/.sqlx/query-cab90ea34929643f9e9814150c4dbd027fc0bd427bfba5e6eb99c989af53b680.json b/.sqlx/query-cab90ea34929643f9e9814150c4dbd027fc0bd427bfba5e6eb99c989af53b680.json new file mode 100644 index 00000000..0951bcc2 --- /dev/null +++ b/.sqlx/query-cab90ea34929643f9e9814150c4dbd027fc0bd427bfba5e6eb99c989af53b680.json @@ -0,0 +1,180 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT m.id id, v.id version_id, m.title title, m.description description, m.downloads downloads, m.follows follows,\n m.icon_url icon_url, m.published published, m.approved approved, m.updated updated,\n m.team_id team_id, m.license license, m.slug slug, m.status status_name, m.color color,\n pt.name project_type_name, u.username username,\n ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is false) categories,\n ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is true) additional_categories,\n ARRAY_AGG(DISTINCT lo.loader) filter (where lo.loader is not null) loaders,\n ARRAY_AGG(DISTINCT pt.name) filter (where pt.name is not null) project_types,\n ARRAY_AGG(DISTINCT g.name) filter (where g.name is not null) games,\n ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null and mg.featured is false) gallery,\n ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null and mg.featured is true) featured_gallery,\n JSONB_AGG(\n DISTINCT jsonb_build_object(\n 'field_id', vf.field_id,\n 'int_value', vf.int_value,\n 'enum_value', vf.enum_value,\n 'string_value', vf.string_value\n )\n ) filter (where vf.field_id is not null) version_fields,\n JSONB_AGG(\n DISTINCT jsonb_build_object(\n 'lf_id', lf.id,\n 'loader_name', lo.loader,\n 'field', lf.field,\n 'field_type', lf.field_type,\n 'enum_type', lf.enum_type,\n 'min_val', lf.min_val,\n 'max_val', lf.max_val,\n 'optional', lf.optional\n )\n ) filter (where lf.id is not null) loader_fields,\n JSONB_AGG(\n DISTINCT jsonb_build_object(\n 'id', lfev.id,\n 'enum_id', lfev.enum_id,\n 'value', lfev.value,\n 'ordering', lfev.ordering,\n 'created', lfev.created,\n 'metadata', lfev.metadata\n ) \n ) filter (where lfev.id is not null) loader_field_enum_values\n\n FROM versions v\n INNER JOIN mods m ON v.mod_id = m.id AND m.status = ANY($2)\n LEFT OUTER JOIN mods_categories mc ON joining_mod_id = m.id\n LEFT OUTER JOIN categories c ON mc.joining_category_id = c.id\n LEFT OUTER JOIN loaders_versions lv ON lv.version_id = v.id\n LEFT OUTER JOIN loaders lo ON lo.id = lv.loader_id\n LEFT JOIN loaders_project_types lpt ON lpt.joining_loader_id = lo.id\n LEFT JOIN project_types pt ON pt.id = lpt.joining_project_type_id\n LEFT JOIN loaders_project_types_games lptg ON lptg.loader_id = lo.id AND lptg.project_type_id = pt.id\n LEFT JOIN games g ON lptg.game_id = g.id\n LEFT OUTER JOIN mods_gallery mg ON mg.mod_id = m.id\n INNER JOIN team_members tm ON tm.team_id = m.team_id AND tm.role = $3 AND tm.accepted = TRUE\n INNER JOIN users u ON tm.user_id = u.id\n LEFT OUTER JOIN version_fields vf on v.id = vf.version_id\n LEFT OUTER JOIN loader_fields lf on vf.field_id = lf.id\n LEFT OUTER JOIN loader_field_enums lfe on lf.enum_type = lfe.id\n LEFT OUTER JOIN loader_field_enum_values lfev on lfev.enum_id = lfe.id\n WHERE v.status != ANY($1)\n GROUP BY v.id, m.id, pt.id, u.id;\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "version_id", + "type_info": "Int8" + }, + { + "ordinal": 2, + "name": "title", + "type_info": "Varchar" + }, + { + "ordinal": 3, + "name": "description", + "type_info": "Varchar" + }, + { + "ordinal": 4, + "name": "downloads", + "type_info": "Int4" + }, + { + "ordinal": 5, + "name": "follows", + "type_info": "Int4" + }, + { + "ordinal": 6, + "name": "icon_url", + "type_info": "Varchar" + }, + { + "ordinal": 7, + "name": "published", + "type_info": "Timestamptz" + }, + { + "ordinal": 8, + "name": "approved", + "type_info": "Timestamptz" + }, + { + "ordinal": 9, + "name": "updated", + "type_info": "Timestamptz" + }, + { + "ordinal": 10, + "name": "team_id", + "type_info": "Int8" + }, + { + "ordinal": 11, + "name": "license", + "type_info": "Varchar" + }, + { + "ordinal": 12, + "name": "slug", + "type_info": "Varchar" + }, + { + "ordinal": 13, + "name": "status_name", + "type_info": "Varchar" + }, + { + "ordinal": 14, + "name": "color", + "type_info": "Int4" + }, + { + "ordinal": 15, + "name": "project_type_name", + "type_info": "Varchar" + }, + { + "ordinal": 16, + "name": "username", + "type_info": "Varchar" + }, + { + "ordinal": 17, + "name": "categories", + "type_info": "VarcharArray" + }, + { + "ordinal": 18, + "name": "additional_categories", + "type_info": "VarcharArray" + }, + { + "ordinal": 19, + "name": "loaders", + "type_info": "VarcharArray" + }, + { + "ordinal": 20, + "name": "project_types", + "type_info": "VarcharArray" + }, + { + "ordinal": 21, + "name": "games", + "type_info": "VarcharArray" + }, + { + "ordinal": 22, + "name": "gallery", + "type_info": "VarcharArray" + }, + { + "ordinal": 23, + "name": "featured_gallery", + "type_info": "VarcharArray" + }, + { + "ordinal": 24, + "name": "version_fields", + "type_info": "Jsonb" + }, + { + "ordinal": 25, + "name": "loader_fields", + "type_info": "Jsonb" + }, + { + "ordinal": 26, + "name": "loader_field_enum_values", + "type_info": "Jsonb" + } + ], + "parameters": { + "Left": [ + "TextArray", + "TextArray", + "Text" + ] + }, + "nullable": [ + false, + false, + false, + false, + false, + false, + true, + false, + true, + false, + false, + false, + true, + false, + true, + false, + false, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null + ] + }, + "hash": "cab90ea34929643f9e9814150c4dbd027fc0bd427bfba5e6eb99c989af53b680" +} diff --git a/.sqlx/query-ef59f99fc0ab66ff5779d0e71c4a2134e2f26eed002ff9ea5626ea3e23518594.json b/.sqlx/query-ef59f99fc0ab66ff5779d0e71c4a2134e2f26eed002ff9ea5626ea3e23518594.json deleted file mode 100644 index 101e5838..00000000 --- a/.sqlx/query-ef59f99fc0ab66ff5779d0e71c4a2134e2f26eed002ff9ea5626ea3e23518594.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT name FROM project_types pt\n INNER JOIN mods ON mods.project_type = pt.id\n WHERE mods.id = $1\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "name", - "type_info": "Varchar" - } - ], - "parameters": { - "Left": [ - "Int8" - ] - }, - "nullable": [ - false - ] - }, - "hash": "ef59f99fc0ab66ff5779d0e71c4a2134e2f26eed002ff9ea5626ea3e23518594" -} diff --git a/.sqlx/query-ffcc8c65721465514ad39a0e9bd6138eda0fa32dd3399a8e850a76beb1f1bf16.json b/.sqlx/query-f73ffab12a96eb9480615e333d40cde031df280039cd8e435cfca5e15ed3d1c4.json similarity index 53% rename from .sqlx/query-ffcc8c65721465514ad39a0e9bd6138eda0fa32dd3399a8e850a76beb1f1bf16.json rename to .sqlx/query-f73ffab12a96eb9480615e333d40cde031df280039cd8e435cfca5e15ed3d1c4.json index d47d4f1a..91e7b818 100644 --- a/.sqlx/query-ffcc8c65721465514ad39a0e9bd6138eda0fa32dd3399a8e850a76beb1f1bf16.json +++ b/.sqlx/query-f73ffab12a96eb9480615e333d40cde031df280039cd8e435cfca5e15ed3d1c4.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT m.id id, m.project_type project_type, m.title title, m.description description, m.downloads downloads, m.follows follows,\n m.icon_url icon_url, m.body body, m.published published,\n m.updated updated, m.approved approved, m.queued, m.status status, m.requested_status requested_status,\n m.issues_url issues_url, m.source_url source_url, m.wiki_url wiki_url, m.discord_url discord_url, m.license_url license_url,\n m.team_id team_id, m.organization_id organization_id, m.client_side client_side, m.server_side server_side, m.license license, m.slug slug, m.moderation_message moderation_message, m.moderation_message_body moderation_message_body,\n cs.name client_side_type, ss.name server_side_type, pt.name project_type_name, m.webhook_sent, m.color,\n t.id thread_id, m.monetization_status monetization_status, m.loaders loaders, m.game_versions game_versions,\n ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is false) categories,\n ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is true) additional_categories,\n JSONB_AGG(DISTINCT jsonb_build_object('id', v.id, 'date_published', v.date_published)) filter (where v.id is not null) versions,\n JSONB_AGG(DISTINCT jsonb_build_object('image_url', mg.image_url, 'featured', mg.featured, 'title', mg.title, 'description', mg.description, 'created', mg.created, 'ordering', mg.ordering)) filter (where mg.image_url is not null) gallery,\n JSONB_AGG(DISTINCT jsonb_build_object('platform_id', md.joining_platform_id, 'platform_short', dp.short, 'platform_name', dp.name,'url', md.url)) filter (where md.joining_platform_id is not null) donations\n FROM mods m\n INNER JOIN project_types pt ON pt.id = m.project_type\n INNER JOIN side_types cs ON m.client_side = cs.id\n INNER JOIN side_types ss ON m.server_side = ss.id\n INNER JOIN threads t ON t.mod_id = m.id\n LEFT JOIN mods_gallery mg ON mg.mod_id = m.id\n LEFT JOIN mods_donations md ON md.joining_mod_id = m.id\n LEFT JOIN donation_platforms dp ON md.joining_platform_id = dp.id\n LEFT JOIN mods_categories mc ON mc.joining_mod_id = m.id\n LEFT JOIN categories c ON mc.joining_category_id = c.id\n LEFT JOIN versions v ON v.mod_id = m.id AND v.status = ANY($3)\n WHERE m.id = ANY($1) OR m.slug = ANY($2)\n GROUP BY pt.id, cs.id, ss.id, t.id, m.id;\n ", + "query": "\n SELECT m.id id, m.title title, m.description description, m.downloads downloads, m.follows follows,\n m.icon_url icon_url, m.body body, m.published published,\n m.updated updated, m.approved approved, m.queued, m.status status, m.requested_status requested_status,\n m.issues_url issues_url, m.source_url source_url, m.wiki_url wiki_url, m.discord_url discord_url, m.license_url license_url,\n m.team_id team_id, m.organization_id organization_id, m.license license, m.slug slug, m.moderation_message moderation_message, m.moderation_message_body moderation_message_body,\n m.webhook_sent, m.color,\n t.id thread_id, m.monetization_status monetization_status,\n ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders,\n ARRAY_AGG(DISTINCT pt.name) filter (where pt.name is not null) project_types,\n ARRAY_AGG(DISTINCT g.name) filter (where g.name is not null) games,\n ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is false) categories,\n ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is true) additional_categories,\n JSONB_AGG(DISTINCT jsonb_build_object('id', v.id, 'date_published', v.date_published)) filter (where v.id is not null) versions,\n JSONB_AGG(DISTINCT jsonb_build_object('image_url', mg.image_url, 'featured', mg.featured, 'title', mg.title, 'description', mg.description, 'created', mg.created, 'ordering', mg.ordering)) filter (where mg.image_url is not null) gallery,\n JSONB_AGG(DISTINCT jsonb_build_object('platform_id', md.joining_platform_id, 'platform_short', dp.short, 'platform_name', dp.name,'url', md.url)) filter (where md.joining_platform_id is not null) donations\n FROM mods m \n INNER JOIN threads t ON t.mod_id = m.id\n LEFT JOIN mods_gallery mg ON mg.mod_id = m.id\n LEFT JOIN mods_donations md ON md.joining_mod_id = m.id\n LEFT JOIN donation_platforms dp ON md.joining_platform_id = dp.id\n LEFT JOIN mods_categories mc ON mc.joining_mod_id = m.id\n LEFT JOIN categories c ON mc.joining_category_id = c.id\n LEFT JOIN versions v ON v.mod_id = m.id AND v.status = ANY($3)\n LEFT JOIN loaders_versions lv ON lv.version_id = v.id\n LEFT JOIN loaders l on lv.loader_id = l.id\n LEFT JOIN loaders_project_types lpt ON lpt.joining_loader_id = l.id\n LEFT JOIN project_types pt ON pt.id = lpt.joining_project_type_id\n LEFT JOIN loaders_project_types_games lptg ON lptg.loader_id = l.id AND lptg.project_type_id = pt.id\n LEFT JOIN games g ON lptg.game_id = g.id\n WHERE m.id = ANY($1) OR m.slug = ANY($2)\n GROUP BY t.id, m.id;\n ", "describe": { "columns": [ { @@ -10,201 +10,176 @@ }, { "ordinal": 1, - "name": "project_type", - "type_info": "Int4" - }, - { - "ordinal": 2, "name": "title", "type_info": "Varchar" }, { - "ordinal": 3, + "ordinal": 2, "name": "description", "type_info": "Varchar" }, { - "ordinal": 4, + "ordinal": 3, "name": "downloads", "type_info": "Int4" }, { - "ordinal": 5, + "ordinal": 4, "name": "follows", "type_info": "Int4" }, { - "ordinal": 6, + "ordinal": 5, "name": "icon_url", "type_info": "Varchar" }, { - "ordinal": 7, + "ordinal": 6, "name": "body", "type_info": "Varchar" }, { - "ordinal": 8, + "ordinal": 7, "name": "published", "type_info": "Timestamptz" }, { - "ordinal": 9, + "ordinal": 8, "name": "updated", "type_info": "Timestamptz" }, { - "ordinal": 10, + "ordinal": 9, "name": "approved", "type_info": "Timestamptz" }, { - "ordinal": 11, + "ordinal": 10, "name": "queued", "type_info": "Timestamptz" }, { - "ordinal": 12, + "ordinal": 11, "name": "status", "type_info": "Varchar" }, { - "ordinal": 13, + "ordinal": 12, "name": "requested_status", "type_info": "Varchar" }, { - "ordinal": 14, + "ordinal": 13, "name": "issues_url", "type_info": "Varchar" }, { - "ordinal": 15, + "ordinal": 14, "name": "source_url", "type_info": "Varchar" }, { - "ordinal": 16, + "ordinal": 15, "name": "wiki_url", "type_info": "Varchar" }, { - "ordinal": 17, + "ordinal": 16, "name": "discord_url", "type_info": "Varchar" }, { - "ordinal": 18, + "ordinal": 17, "name": "license_url", "type_info": "Varchar" }, { - "ordinal": 19, + "ordinal": 18, "name": "team_id", "type_info": "Int8" }, { - "ordinal": 20, + "ordinal": 19, "name": "organization_id", "type_info": "Int8" }, { - "ordinal": 21, - "name": "client_side", - "type_info": "Int4" - }, - { - "ordinal": 22, - "name": "server_side", - "type_info": "Int4" - }, - { - "ordinal": 23, + "ordinal": 20, "name": "license", "type_info": "Varchar" }, { - "ordinal": 24, + "ordinal": 21, "name": "slug", "type_info": "Varchar" }, { - "ordinal": 25, + "ordinal": 22, "name": "moderation_message", "type_info": "Varchar" }, { - "ordinal": 26, + "ordinal": 23, "name": "moderation_message_body", "type_info": "Varchar" }, { - "ordinal": 27, - "name": "client_side_type", - "type_info": "Varchar" - }, - { - "ordinal": 28, - "name": "server_side_type", - "type_info": "Varchar" - }, - { - "ordinal": 29, - "name": "project_type_name", - "type_info": "Varchar" - }, - { - "ordinal": 30, + "ordinal": 24, "name": "webhook_sent", "type_info": "Bool" }, { - "ordinal": 31, + "ordinal": 25, "name": "color", "type_info": "Int4" }, { - "ordinal": 32, + "ordinal": 26, "name": "thread_id", "type_info": "Int8" }, { - "ordinal": 33, + "ordinal": 27, "name": "monetization_status", "type_info": "Varchar" }, { - "ordinal": 34, + "ordinal": 28, "name": "loaders", "type_info": "VarcharArray" }, { - "ordinal": 35, - "name": "game_versions", + "ordinal": 29, + "name": "project_types", "type_info": "VarcharArray" }, { - "ordinal": 36, + "ordinal": 30, + "name": "games", + "type_info": "VarcharArray" + }, + { + "ordinal": 31, "name": "categories", "type_info": "VarcharArray" }, { - "ordinal": 37, + "ordinal": 32, "name": "additional_categories", "type_info": "VarcharArray" }, { - "ordinal": 38, + "ordinal": 33, "name": "versions", "type_info": "Jsonb" }, { - "ordinal": 39, + "ordinal": 34, "name": "gallery", "type_info": "Jsonb" }, { - "ordinal": 40, + "ordinal": 35, "name": "donations", "type_info": "Jsonb" } @@ -222,7 +197,6 @@ false, false, false, - false, true, false, false, @@ -239,20 +213,16 @@ false, true, false, - false, - false, true, true, true, false, - false, - false, - false, true, false, false, - false, - false, + null, + null, + null, null, null, null, @@ -260,5 +230,5 @@ null ] }, - "hash": "ffcc8c65721465514ad39a0e9bd6138eda0fa32dd3399a8e850a76beb1f1bf16" + "hash": "f73ffab12a96eb9480615e333d40cde031df280039cd8e435cfca5e15ed3d1c4" } diff --git a/.sqlx/query-f7aee6fbd3415c7819d9ae1a75a0ae5753aaa3373c3ac9bc04adb3087781b49f.json b/.sqlx/query-f7aee6fbd3415c7819d9ae1a75a0ae5753aaa3373c3ac9bc04adb3087781b49f.json new file mode 100644 index 00000000..d67b3518 --- /dev/null +++ b/.sqlx/query-f7aee6fbd3415c7819d9ae1a75a0ae5753aaa3373c3ac9bc04adb3087781b49f.json @@ -0,0 +1,148 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT v.id id, v.mod_id mod_id, v.author_id author_id, v.name version_name, v.version_number version_number,\n v.changelog changelog, v.date_published date_published, v.downloads downloads,\n v.version_type version_type, v.featured featured, v.status status, v.requested_status requested_status, v.ordering ordering,\n ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders,\n ARRAY_AGG(DISTINCT pt.name) filter (where pt.name is not null) project_types,\n ARRAY_AGG(DISTINCT g.name) filter (where g.name is not null) games,\n JSONB_AGG(DISTINCT jsonb_build_object('id', f.id, 'url', f.url, 'filename', f.filename, 'primary', f.is_primary, 'size', f.size, 'file_type', f.file_type)) filter (where f.id is not null) files,\n JSONB_AGG(DISTINCT jsonb_build_object('algorithm', h.algorithm, 'hash', encode(h.hash, 'escape'), 'file_id', h.file_id)) filter (where h.hash is not null) hashes,\n JSONB_AGG(DISTINCT jsonb_build_object('project_id', d.mod_dependency_id, 'version_id', d.dependency_id, 'dependency_type', d.dependency_type,'file_name', dependency_file_name)) filter (where d.dependency_type is not null) dependencies,\n \n JSONB_AGG(\n DISTINCT jsonb_build_object(\n 'field_id', vf.field_id,\n 'int_value', vf.int_value,\n 'enum_value', vf.enum_value,\n 'string_value', vf.string_value\n )\n ) filter (where vf.field_id is not null) version_fields,\n JSONB_AGG(\n DISTINCT jsonb_build_object(\n 'lf_id', lf.id,\n 'loader_name', l.loader,\n 'field', lf.field,\n 'field_type', lf.field_type,\n 'enum_type', lf.enum_type,\n 'min_val', lf.min_val,\n 'max_val', lf.max_val,\n 'optional', lf.optional\n )\n ) filter (where lf.id is not null) loader_fields,\n JSONB_AGG(\n DISTINCT jsonb_build_object(\n 'id', lfev.id,\n 'enum_id', lfev.enum_id,\n 'value', lfev.value,\n 'ordering', lfev.ordering,\n 'created', lfev.created,\n 'metadata', lfev.metadata\n ) \n ) filter (where lfev.id is not null) loader_field_enum_values\n \n FROM versions v\n LEFT OUTER JOIN loaders_versions lv on v.id = lv.version_id\n LEFT OUTER JOIN loaders l on lv.loader_id = l.id\n LEFT OUTER JOIN loaders_project_types lpt on l.id = lpt.joining_loader_id\n LEFT JOIN project_types pt on lpt.joining_project_type_id = pt.id\n LEFT OUTER JOIN loaders_project_types_games lptg on l.id = lptg.loader_id AND pt.id = lptg.project_type_id\n LEFT JOIN games g on lptg.game_id = g.id\n LEFT OUTER JOIN files f on v.id = f.version_id\n LEFT OUTER JOIN hashes h on f.id = h.file_id\n LEFT OUTER JOIN dependencies d on v.id = d.dependent_id\n LEFT OUTER JOIN version_fields vf on v.id = vf.version_id\n LEFT OUTER JOIN loader_fields lf on vf.field_id = lf.id\n LEFT OUTER JOIN loader_field_enums lfe on lf.enum_type = lfe.id\n LEFT OUTER JOIN loader_field_enum_values lfev on lfe.id = lfev.enum_id\n\n WHERE v.id = ANY($1)\n GROUP BY v.id\n ORDER BY v.ordering ASC NULLS LAST, v.date_published ASC;\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "mod_id", + "type_info": "Int8" + }, + { + "ordinal": 2, + "name": "author_id", + "type_info": "Int8" + }, + { + "ordinal": 3, + "name": "version_name", + "type_info": "Varchar" + }, + { + "ordinal": 4, + "name": "version_number", + "type_info": "Varchar" + }, + { + "ordinal": 5, + "name": "changelog", + "type_info": "Varchar" + }, + { + "ordinal": 6, + "name": "date_published", + "type_info": "Timestamptz" + }, + { + "ordinal": 7, + "name": "downloads", + "type_info": "Int4" + }, + { + "ordinal": 8, + "name": "version_type", + "type_info": "Varchar" + }, + { + "ordinal": 9, + "name": "featured", + "type_info": "Bool" + }, + { + "ordinal": 10, + "name": "status", + "type_info": "Varchar" + }, + { + "ordinal": 11, + "name": "requested_status", + "type_info": "Varchar" + }, + { + "ordinal": 12, + "name": "ordering", + "type_info": "Int4" + }, + { + "ordinal": 13, + "name": "loaders", + "type_info": "VarcharArray" + }, + { + "ordinal": 14, + "name": "project_types", + "type_info": "VarcharArray" + }, + { + "ordinal": 15, + "name": "games", + "type_info": "VarcharArray" + }, + { + "ordinal": 16, + "name": "files", + "type_info": "Jsonb" + }, + { + "ordinal": 17, + "name": "hashes", + "type_info": "Jsonb" + }, + { + "ordinal": 18, + "name": "dependencies", + "type_info": "Jsonb" + }, + { + "ordinal": 19, + "name": "version_fields", + "type_info": "Jsonb" + }, + { + "ordinal": 20, + "name": "loader_fields", + "type_info": "Jsonb" + }, + { + "ordinal": 21, + "name": "loader_field_enum_values", + "type_info": "Jsonb" + } + ], + "parameters": { + "Left": [ + "Int8Array" + ] + }, + "nullable": [ + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + true, + true, + null, + null, + null, + null, + null, + null, + null, + null, + null + ] + }, + "hash": "f7aee6fbd3415c7819d9ae1a75a0ae5753aaa3373c3ac9bc04adb3087781b49f" +} diff --git a/.sqlx/query-fa54ed32004b883daa44eeb413fc2e07b45883608afc6ac91ac6f74736a12256.json b/.sqlx/query-fa54ed32004b883daa44eeb413fc2e07b45883608afc6ac91ac6f74736a12256.json deleted file mode 100644 index c4163630..00000000 --- a/.sqlx/query-fa54ed32004b883daa44eeb413fc2e07b45883608afc6ac91ac6f74736a12256.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n INSERT INTO game_versions_versions (game_version_id, joining_version_id)\n SELECT * FROM UNNEST($1::integer[], $2::bigint[])\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int4Array", - "Int8Array" - ] - }, - "nullable": [] - }, - "hash": "fa54ed32004b883daa44eeb413fc2e07b45883608afc6ac91ac6f74736a12256" -} diff --git a/.sqlx/query-b36877d60945eaae76680770a5d28d2cbb26cfbb0ec94ecc8f0741f48178ec1c.json b/.sqlx/query-fefb4f07a0f0c0cf74e554d120f8707d698fc8b4dbb66d2830f4ec0229bc1019.json similarity index 59% rename from .sqlx/query-b36877d60945eaae76680770a5d28d2cbb26cfbb0ec94ecc8f0741f48178ec1c.json rename to .sqlx/query-fefb4f07a0f0c0cf74e554d120f8707d698fc8b4dbb66d2830f4ec0229bc1019.json index 74091817..e2b9c106 100644 --- a/.sqlx/query-b36877d60945eaae76680770a5d28d2cbb26cfbb0ec94ecc8f0741f48178ec1c.json +++ b/.sqlx/query-fefb4f07a0f0c0cf74e554d120f8707d698fc8b4dbb66d2830f4ec0229bc1019.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n INSERT INTO mods (\n id, team_id, title, description, body,\n published, downloads, icon_url, issues_url,\n source_url, wiki_url, status, requested_status, discord_url,\n client_side, server_side, license_url, license,\n slug, project_type, color, monetization_status\n )\n VALUES (\n $1, $2, $3, $4, $5,\n $6, $7, $8, $9,\n $10, $11, $12, $13, $14,\n $15, $16, $17, $18,\n LOWER($19), $20, $21, $22\n )\n ", + "query": "\n INSERT INTO mods (\n id, team_id, title, description, body,\n published, downloads, icon_url, issues_url,\n source_url, wiki_url, status, requested_status, discord_url,\n license_url, license,\n slug, color, monetization_status\n )\n VALUES (\n $1, $2, $3, $4, $5,\n $6, $7, $8, $9,\n $10, $11, $12, $13, $14,\n $15, $16, \n LOWER($17), $18, $19\n )\n ", "describe": { "columns": [], "parameters": { @@ -19,17 +19,14 @@ "Varchar", "Varchar", "Varchar", - "Int4", - "Int4", "Varchar", "Varchar", "Text", "Int4", - "Int4", "Varchar" ] }, "nullable": [] }, - "hash": "b36877d60945eaae76680770a5d28d2cbb26cfbb0ec94ecc8f0741f48178ec1c" + "hash": "fefb4f07a0f0c0cf74e554d120f8707d698fc8b4dbb66d2830f4ec0229bc1019" } diff --git a/migrations/20231005230721_dynamic-fields.sql b/migrations/20231005230721_dynamic-fields.sql new file mode 100644 index 00000000..7bf0052f --- /dev/null +++ b/migrations/20231005230721_dynamic-fields.sql @@ -0,0 +1,151 @@ +CREATE TABLE games ( + id int PRIMARY KEY, -- Only used in db + name varchar(64), + CONSTRAINT unique_game_name UNIQUE (name) +); +INSERT INTO games(id, name) VALUES (1, 'minecraft-java'); +INSERT INTO games(id, name) VALUES (2, 'minecraft-bedrock'); + +ALTER TABLE loaders ADD CONSTRAINT unique_loader_name UNIQUE (loader); + +CREATE TABLE loader_field_enums ( + id serial PRIMARY KEY, + enum_name varchar(64) NOT NULL, + ordering int NULL, + hidable BOOLEAN NOT NULL DEFAULT FALSE +); + +CREATE TABLE loader_field_enum_values ( + id serial PRIMARY KEY, + enum_id integer REFERENCES loader_field_enums NOT NULL, + value varchar(64) NOT NULL, + ordering int NULL, + created timestamptz NOT NULL DEFAULT CURRENT_TIMESTAMP, + -- metadata is json of all the extra data for this enum value + metadata jsonb NULL, + + original_id integer, -- This is for mapping only- it is dropped before the end of the migration + + CONSTRAINT unique_variant_per_enum UNIQUE (enum_id, value) + +); + +CREATE TABLE loader_fields ( + id serial PRIMARY KEY, + field varchar(64) UNIQUE NOT NULL, + -- "integer", "text", "enum", "bool", + -- "array_integer", "array_text", "array_enum", "array_bool" + field_type varchar(64) NOT NULL, + -- only for enum + enum_type integer REFERENCES loader_field_enums NULL, + optional BOOLEAN NOT NULL DEFAULT true, + -- for int- min/max val, for text- min len, for enum- min items, for bool- nothing + min_val integer NULL, + max_val integer NULL +); + +CREATE TABLE loader_fields_loaders ( + loader_id integer REFERENCES loaders NOT NULL, + loader_field_id integer REFERENCES loader_fields NOT NULL, + CONSTRAINT unique_loader_field UNIQUE (loader_id, loader_field_id) +); + +ALTER TABLE loaders ADD COLUMN hidable boolean NOT NULL default false; + +CREATE TABLE version_fields ( + version_id bigint REFERENCES versions NOT NULL, + field_id integer REFERENCES loader_fields NOT NULL, + -- for int/bool values + int_value integer NULL, + enum_value integer REFERENCES loader_field_enum_values NULL, + string_value text NULL +); + +-- Convert side_types +INSERT INTO loader_field_enums (id, enum_name, hidable) VALUES (1, 'side_types', true); +INSERT INTO loader_field_enum_values (original_id, enum_id, value) SELECT id, 1, name FROM side_types st; + +INSERT INTO loader_fields (field, field_type, enum_type, optional, min_val, max_val) SELECT 'client_side', 'enum', 1, false, 1, 1; +INSERT INTO loader_fields ( field, field_type, enum_type, optional, min_val, max_val) SELECT 'server_side', 'enum', 1, false, 1, 1; + +INSERT INTO loader_fields_loaders (loader_id, loader_field_id) SELECT l.id, lf.id FROM loaders l CROSS JOIN loader_fields lf WHERE lf.field = 'client_side' AND l.loader = ANY( ARRAY['forge', 'fabric', 'quilt', 'modloader','rift','liteloader', 'neoforge']); +INSERT INTO loader_fields_loaders (loader_id, loader_field_id) SELECT l.id, lf.id FROM loaders l CROSS JOIN loader_fields lf WHERE lf.field = 'server_side' AND l.loader = ANY( ARRAY['forge', 'fabric', 'quilt', 'modloader','rift','liteloader', 'neoforge']); + +INSERT INTO version_fields (version_id, field_id, enum_value) +SELECT v.id, 1, m.client_side +FROM versions v +INNER JOIN mods m ON v.mod_id = m.id +INNER JOIN loader_field_enum_values lfev ON m.client_side = lfev.original_id +WHERE client_side IS NOT NULL AND lfev.enum_id = 1; + +INSERT INTO version_fields (version_id, field_id, enum_value) +SELECT v.id, 1, m.server_side +FROM versions v +INNER JOIN mods m ON v.mod_id = m.id +INNER JOIN loader_field_enum_values lfev ON m.client_side = lfev.original_id +WHERE server_side IS NOT NULL AND lfev.enum_id = 1; + +ALTER TABLE mods DROP COLUMN client_side; +ALTER TABLE mods DROP COLUMN server_side; +DROP TABLE side_types; + +-- Convert game_versions +INSERT INTO loader_field_enums (id, enum_name, hidable) VALUES (2, 'game_versions', true); +INSERT INTO loader_field_enum_values (original_id, enum_id, value, created, metadata) +SELECT id, 2, version, created, json_build_object('type', type, 'major', major) FROM game_versions; + +INSERT INTO loader_fields (field, field_type, enum_type, optional, min_val) VALUES('game_versions', 'array_enum', 2, false, 0); + +INSERT INTO version_fields(version_id, field_id, enum_value) +SELECT gvv.joining_version_id, 2, lfev.id +FROM game_versions_versions gvv INNER JOIN loader_field_enum_values lfev ON gvv.game_version_id = lfev.original_id +WHERE lfev.enum_id = 2; + +ALTER TABLE mods DROP COLUMN loaders; +ALTER TABLE mods DROP COLUMN game_versions; +DROP TABLE game_versions_versions; +DROP TABLE game_versions; + +-- Convert project types +-- we are creating a new loader type- 'mrpack'- for minecraft modpacks +INSERT INTO loaders (loader) VALUES ('mrpack'); + +-- For the loader 'mrpack', we create loader fields for every loader +-- That way we keep information like "this modpack is a fabric modpack" +INSERT INTO loader_field_enums (id, enum_name, hidable) VALUES (3, 'mrpack_loaders', true); +INSERT INTO loader_field_enum_values (original_id, enum_id, value) SELECT id, 2, loader FROM loaders WHERE loader != 'mrpack'; +INSERT INTO loader_fields (field, field_type, enum_type, optional, min_val) VALUES('mrpack_loaders', 'array_enum', 3, false, 0); +INSERT INTO loader_fields_loaders (loader_id, loader_field_id) +SELECT l.id, lf.id FROM loaders l CROSS JOIN loader_fields lf WHERE lf.field = 'mrpack_loaders' AND l.loader = 'mrpack'; + +INSERT INTO version_fields(version_id, field_id, enum_value) +SELECT v.id, lf.id, lfev.id +FROM versions v +INNER JOIN mods m ON v.mod_id = m.id +INNER JOIN loaders_versions lv ON v.id = lv.version_id +INNER JOIN loaders l ON lv.loader_id = l.id +CROSS JOIN loader_fields lf +LEFT JOIN loader_field_enum_values lfev ON lf.enum_type = lfev.enum_id +WHERE m.project_type = (SELECT id FROM project_types WHERE name = 'modpack') AND lf.field = 'mrpack_loaders'; + +INSERT INTO loaders_project_types (joining_loader_id, joining_project_type_id) SELECT DISTINCT l.id, pt.id FROM loaders l CROSS JOIN project_types pt WHERE pt.name = 'modpack' AND l.loader = 'mrpack'; + +--- Non-mrpack loaders no longer support modpacks +DELETE FROM loaders_project_types WHERE joining_loader_id != (SELECT id FROM loaders WHERE loader = 'mrpack') AND joining_project_type_id = (SELECT id FROM project_types WHERE name = 'modpack'); + +CREATE TABLE loaders_project_types_games ( + loader_id integer REFERENCES loaders NOT NULL, + project_type_id integer REFERENCES project_types NOT NULL, + game_id integer REFERENCES games NOT NULL, + PRIMARY KEY (loader_id, project_type_id, game_id) +); + +-- all past loader_project_types are minecraft-java as the only game before this migration is minecraft-java +INSERT INTO loaders_project_types_games (loader_id, project_type_id, game_id) SELECT joining_loader_id, joining_project_type_id, 1 FROM loaders_project_types; + +-- Now that loaders are inferred, we can drop the project_type column from mods +ALTER TABLE mods DROP COLUMN project_type; + + +-- Drop original_id columns +ALTER TABLE loader_field_enum_values DROP COLUMN original_id; \ No newline at end of file diff --git a/src/database/models/categories.rs b/src/database/models/categories.rs index abb6d7f2..95d054f2 100644 --- a/src/database/models/categories.rs +++ b/src/database/models/categories.rs @@ -1,9 +1,9 @@ +use std::collections::HashMap; + use crate::database::redis::RedisPool; use super::ids::*; use super::DatabaseError; -use chrono::DateTime; -use chrono::Utc; use futures::TryStreamExt; use serde::{Deserialize, Serialize}; @@ -14,29 +14,6 @@ pub struct ProjectType { pub name: String, } -pub struct SideType { - pub id: SideTypeId, - pub name: String, -} - -#[derive(Serialize, Deserialize)] -pub struct Loader { - pub id: LoaderId, - pub loader: String, - pub icon: String, - pub supported_project_types: Vec, -} - -#[derive(Clone, Serialize, Deserialize, Debug)] -pub struct GameVersion { - pub id: GameVersionId, - pub version: String, - #[serde(rename = "type")] - pub type_: String, - pub created: DateTime, - pub major: bool, -} - #[derive(Serialize, Deserialize)] pub struct Category { pub id: CategoryId, @@ -59,21 +36,32 @@ pub struct DonationPlatform { } impl Category { - pub async fn get_id<'a, E>(name: &str, exec: E) -> Result, DatabaseError> + // Gets hashmap of category ids matching a name + // Multiple categories can have the same name, but different project types, so we need to return a hashmap + // ProjectTypeId -> CategoryId + pub async fn get_ids<'a, E>( + name: &str, + exec: E, + ) -> Result, DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres>, { let result = sqlx::query!( " - SELECT id FROM categories + SELECT id, project_type FROM categories WHERE category = $1 ", name, ) - .fetch_optional(exec) + .fetch_all(exec) .await?; - Ok(result.map(|r| CategoryId(r.id))) + let mut map = HashMap::new(); + for r in result { + map.insert(ProjectTypeId(r.project_type), CategoryId(r.id)); + } + + Ok(map) } pub async fn get_id_project<'a, E>( @@ -139,221 +127,6 @@ impl Category { } } -impl Loader { - pub async fn get_id<'a, E>(name: &str, exec: E) -> Result, DatabaseError> - where - E: sqlx::Executor<'a, Database = sqlx::Postgres>, - { - let result = sqlx::query!( - " - SELECT id FROM loaders - WHERE loader = $1 - ", - name - ) - .fetch_optional(exec) - .await?; - - Ok(result.map(|r| LoaderId(r.id))) - } - - pub async fn list<'a, E>(exec: E, redis: &RedisPool) -> Result, DatabaseError> - where - E: sqlx::Executor<'a, Database = sqlx::Postgres>, - { - let res: Option> = redis - .get_deserialized_from_json(TAGS_NAMESPACE, "loader") - .await?; - - if let Some(res) = res { - return Ok(res); - } - - let result = sqlx::query!( - " - SELECT l.id id, l.loader loader, l.icon icon, - ARRAY_AGG(DISTINCT pt.name) filter (where pt.name is not null) project_types - FROM loaders l - LEFT OUTER JOIN loaders_project_types lpt ON joining_loader_id = l.id - LEFT OUTER JOIN project_types pt ON lpt.joining_project_type_id = pt.id - GROUP BY l.id; - " - ) - .fetch_many(exec) - .try_filter_map(|e| async { - Ok(e.right().map(|x| Loader { - id: LoaderId(x.id), - loader: x.loader, - icon: x.icon, - supported_project_types: x - .project_types - .unwrap_or_default() - .iter() - .map(|x| x.to_string()) - .collect(), - })) - }) - .try_collect::>() - .await?; - - redis - .set_serialized_to_json(TAGS_NAMESPACE, "loader", &result, None) - .await?; - - Ok(result) - } -} - -#[derive(Default)] -pub struct GameVersionBuilder<'a> { - pub version: Option<&'a str>, - pub version_type: Option<&'a str>, - pub date: Option<&'a DateTime>, -} - -impl GameVersion { - pub fn builder() -> GameVersionBuilder<'static> { - GameVersionBuilder::default() - } - - pub async fn get_id<'a, E>( - version: &str, - exec: E, - ) -> Result, DatabaseError> - where - E: sqlx::Executor<'a, Database = sqlx::Postgres>, - { - let result = sqlx::query!( - " - SELECT id FROM game_versions - WHERE version = $1 - ", - version - ) - .fetch_optional(exec) - .await?; - - Ok(result.map(|r| GameVersionId(r.id))) - } - - pub async fn list<'a, E>(exec: E, redis: &RedisPool) -> Result, DatabaseError> - where - E: sqlx::Executor<'a, Database = sqlx::Postgres>, - { - let res: Option> = redis - .get_deserialized_from_json(TAGS_NAMESPACE, "game_version") - .await?; - - if let Some(res) = res { - return Ok(res); - } - - let result = sqlx::query!( - " - SELECT gv.id id, gv.version version_, gv.type type_, gv.created created, gv.major FROM game_versions gv - ORDER BY created DESC - " - ) - .fetch_many(exec) - .try_filter_map(|e| async { Ok(e.right().map(|c| GameVersion { - id: GameVersionId(c.id), - version: c.version_, - type_: c.type_, - created: c.created, - major: c.major - })) }) - .try_collect::>() - .await?; - - redis - .set_serialized_to_json(TAGS_NAMESPACE, "game_version", &result, None) - .await?; - Ok(result) - } - - pub async fn list_filter<'a, E>( - version_type_option: Option<&str>, - major_option: Option, - exec: E, - redis: &RedisPool, - ) -> Result, DatabaseError> - where - E: sqlx::Executor<'a, Database = sqlx::Postgres>, - { - let result = Self::list(exec, redis) - .await? - .into_iter() - .filter(|x| { - let mut bool = true; - - if let Some(version_type) = version_type_option { - bool &= &*x.type_ == version_type; - } - if let Some(major) = major_option { - bool &= x.major == major; - } - - bool - }) - .collect(); - - Ok(result) - } -} - -impl<'a> GameVersionBuilder<'a> { - /// The game version. Spaces must be replaced with '_' for it to be valid - pub fn version(self, version: &'a str) -> Result, DatabaseError> { - Ok(Self { - version: Some(version), - ..self - }) - } - - pub fn version_type( - self, - version_type: &'a str, - ) -> Result, DatabaseError> { - Ok(Self { - version_type: Some(version_type), - ..self - }) - } - - pub fn created(self, created: &'a DateTime) -> GameVersionBuilder<'a> { - Self { - date: Some(created), - ..self - } - } - - pub async fn insert<'b, E>(self, exec: E) -> Result - where - E: sqlx::Executor<'b, Database = sqlx::Postgres>, - { - // This looks like a mess, but it *should* work - // This allows game versions to be partially updated without - // replacing the unspecified fields with defaults. - let result = sqlx::query!( - " - INSERT INTO game_versions (version, type, created) - VALUES ($1, COALESCE($2, 'other'), COALESCE($3, timezone('utc', now()))) - ON CONFLICT (version) DO UPDATE - SET type = COALESCE($2, game_versions.type), - created = COALESCE($3, game_versions.created) - RETURNING id - ", - self.version, - self.version_type, - self.date.map(chrono::DateTime::naive_utc), - ) - .fetch_one(exec) - .await?; - - Ok(GameVersionId(result.id)) - } -} - impl DonationPlatform { pub async fn get_id<'a, E>( id: &str, @@ -509,51 +282,3 @@ impl ProjectType { Ok(result) } } - -impl SideType { - pub async fn get_id<'a, E>(name: &str, exec: E) -> Result, DatabaseError> - where - E: sqlx::Executor<'a, Database = sqlx::Postgres>, - { - let result = sqlx::query!( - " - SELECT id FROM side_types - WHERE name = $1 - ", - name - ) - .fetch_optional(exec) - .await?; - - Ok(result.map(|r| SideTypeId(r.id))) - } - - pub async fn list<'a, E>(exec: E, redis: &RedisPool) -> Result, DatabaseError> - where - E: sqlx::Executor<'a, Database = sqlx::Postgres>, - { - let res: Option> = redis - .get_deserialized_from_json(TAGS_NAMESPACE, "side_type") - .await?; - - if let Some(res) = res { - return Ok(res); - } - - let result = sqlx::query!( - " - SELECT name FROM side_types - " - ) - .fetch_many(exec) - .try_filter_map(|e| async { Ok(e.right().map(|c| c.name)) }) - .try_collect::>() - .await?; - - redis - .set_serialized_to_json(TAGS_NAMESPACE, "side_type", &result, None) - .await?; - - Ok(result) - } -} diff --git a/src/database/models/ids.rs b/src/database/models/ids.rs index b8953462..03463976 100644 --- a/src/database/models/ids.rs +++ b/src/database/models/ids.rs @@ -202,7 +202,7 @@ pub struct OrganizationId(pub i64); #[derive(Copy, Clone, Debug, Type, PartialEq, Eq, Hash, Serialize, Deserialize)] #[sqlx(transparent)] pub struct ProjectId(pub i64); -#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize)] +#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize, PartialEq, Eq, Hash)] #[sqlx(transparent)] pub struct ProjectTypeId(pub i32); @@ -219,10 +219,7 @@ pub struct DonationPlatformId(pub i32); #[derive(Copy, Clone, Debug, Type, PartialEq, Eq, Hash, Serialize, Deserialize)] #[sqlx(transparent)] pub struct VersionId(pub i64); -#[derive(Copy, Clone, Debug, Type, Deserialize, Serialize)] -#[sqlx(transparent)] -pub struct GameVersionId(pub i32); -#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize)] +#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize, PartialEq, Eq, Hash)] #[sqlx(transparent)] pub struct LoaderId(pub i32); #[derive(Copy, Clone, Debug, Type, Serialize, Deserialize)] @@ -270,6 +267,18 @@ pub struct SessionId(pub i64); #[sqlx(transparent)] pub struct ImageId(pub i64); +#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash)] +#[sqlx(transparent)] +pub struct LoaderFieldId(pub i32); + +#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash)] +#[sqlx(transparent)] +pub struct LoaderFieldEnumId(pub i32); + +#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash)] +#[sqlx(transparent)] +pub struct LoaderFieldEnumValueId(pub i32); + #[derive(Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash)] #[sqlx(transparent)] pub struct OAuthClientId(pub i64); diff --git a/src/database/models/legacy_loader_fields.rs b/src/database/models/legacy_loader_fields.rs new file mode 100644 index 00000000..d322768e --- /dev/null +++ b/src/database/models/legacy_loader_fields.rs @@ -0,0 +1,208 @@ +// In V3, we switched to dynamic loader fields for a better support for more loaders, games, and potential metadata. +// This file contains the legacy loader fields, which are still used by V2 projects. +// They are still useful to have in several places where minecraft-java functionality is hardcoded- for example, +// for fetching data from forge, maven, etc. +// These fields only apply to minecraft-java, and are hardcoded to the minecraft-java game. + +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use serde_json::json; + +use crate::database::redis::RedisPool; + +use super::{ + loader_fields::{LoaderFieldEnum, LoaderFieldEnumValue, VersionField, VersionFieldValue}, + DatabaseError, LoaderFieldEnumValueId, +}; + +#[derive(Clone, Serialize, Deserialize, Debug)] +pub struct MinecraftGameVersion { + pub id: LoaderFieldEnumValueId, + pub version: String, + #[serde(rename = "type")] + pub type_: String, + pub created: DateTime, + pub major: bool, +} + +impl MinecraftGameVersion { + // The name under which this legacy field is stored as a LoaderField + pub const FIELD_NAME: &'static str = "game_versions"; + + pub fn builder() -> MinecraftGameVersionBuilder<'static> { + MinecraftGameVersionBuilder::default() + } + + pub async fn list<'a, E>( + exec: E, + redis: &RedisPool, + ) -> Result, DatabaseError> + where + E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy, + { + let game_version_enum = LoaderFieldEnum::get(Self::FIELD_NAME, exec, redis) + .await? + .ok_or_else(|| { + DatabaseError::SchemaError("Could not find game version enum.".to_string()) + })?; + let game_version_enum_values = + LoaderFieldEnumValue::list(game_version_enum.id, exec, redis).await?; + Ok(game_version_enum_values + .into_iter() + .map(MinecraftGameVersion::from_enum_value) + .collect()) + } + + // TODO: remove this + pub async fn list_transaction( + transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, + redis: &RedisPool, + ) -> Result, DatabaseError> { + let game_version_enum = LoaderFieldEnum::get(Self::FIELD_NAME, &mut **transaction, redis) + .await? + .ok_or_else(|| { + DatabaseError::SchemaError("Could not find game version enum.".to_string()) + })?; + let game_version_enum_values = + LoaderFieldEnumValue::list(game_version_enum.id, &mut **transaction, redis).await?; + Ok(game_version_enum_values + .into_iter() + .map(MinecraftGameVersion::from_enum_value) + .collect()) + } + + // Tries to create a MinecraftGameVersion from a VersionField + // Clones on success + pub fn try_from_version_field( + version_field: &VersionField, + ) -> Result, DatabaseError> { + if version_field.field_name != Self::FIELD_NAME { + return Err(DatabaseError::SchemaError(format!( + "Field name {} is not {}", + version_field.field_name, + Self::FIELD_NAME + ))); + } + let game_versions = match version_field.clone() { + VersionField { + value: VersionFieldValue::ArrayEnum(_, values), + .. + } => values.into_iter().map(Self::from_enum_value).collect(), + VersionField { + value: VersionFieldValue::Enum(_, value), + .. + } => { + vec![Self::from_enum_value(value)] + } + _ => { + return Err(DatabaseError::SchemaError(format!( + "Game version requires field value to be an enum: {:?}", + version_field + ))); + } + }; + Ok(game_versions) + } + + pub fn from_enum_value(loader_field_enum_value: LoaderFieldEnumValue) -> MinecraftGameVersion { + MinecraftGameVersion { + id: loader_field_enum_value.id, + version: loader_field_enum_value.value, + created: loader_field_enum_value.created, + type_: loader_field_enum_value + .metadata + .get("type") + .and_then(|x| x.as_str()) + .map(|x| x.to_string()) + .unwrap_or_default(), + major: loader_field_enum_value + .metadata + .get("major") + .and_then(|x| x.as_bool()) + .unwrap_or_default(), + } + } +} + +#[derive(Default)] +pub struct MinecraftGameVersionBuilder<'a> { + pub version: Option<&'a str>, + pub version_type: Option<&'a str>, + pub date: Option<&'a DateTime>, +} + +impl<'a> MinecraftGameVersionBuilder<'a> { + pub fn new() -> Self { + Self::default() + } + /// The game version. Spaces must be replaced with '_' for it to be valid + pub fn version( + self, + version: &'a str, + ) -> Result, DatabaseError> { + Ok(Self { + version: Some(version), + ..self + }) + } + + pub fn version_type( + self, + version_type: &'a str, + ) -> Result, DatabaseError> { + Ok(Self { + version_type: Some(version_type), + ..self + }) + } + + pub fn created(self, created: &'a DateTime) -> MinecraftGameVersionBuilder<'a> { + Self { + date: Some(created), + ..self + } + } + + pub async fn insert<'b, E>( + self, + exec: E, + redis: &RedisPool, + ) -> Result + where + E: sqlx::Executor<'b, Database = sqlx::Postgres> + Copy, + { + let game_versions_enum = LoaderFieldEnum::get("game_versions", exec, redis) + .await? + .ok_or(DatabaseError::SchemaError( + "Missing loaders field: 'game_versions'".to_string(), + ))?; + + // Get enum id for game versions + let metadata = json!({ + "type": self.version_type, + "major": false + }); + + // This looks like a mess, but it *should* work + // This allows game versions to be partially updated without + // replacing the unspecified fields with defaults. + let result = sqlx::query!( + " + INSERT INTO loader_field_enum_values (enum_id, value, created, metadata) + VALUES ($1, $2, COALESCE($3, timezone('utc', now())), $4) + ON CONFLICT (enum_id, value) DO UPDATE + SET metadata = COALESCE($4, loader_field_enum_values.metadata), + created = COALESCE($3, loader_field_enum_values.created) + RETURNING id + ", + game_versions_enum.id.0, + self.version, + self.date.map(chrono::DateTime::naive_utc), + metadata + ) + .fetch_one(exec) + .await?; + + Ok(LoaderFieldEnumValueId(result.id)) + } +} diff --git a/src/database/models/loader_fields.rs b/src/database/models/loader_fields.rs new file mode 100644 index 00000000..3dba4e03 --- /dev/null +++ b/src/database/models/loader_fields.rs @@ -0,0 +1,959 @@ +use std::collections::HashMap; + +use super::ids::*; +use super::DatabaseError; +use crate::database::redis::RedisPool; +use chrono::DateTime; +use chrono::Utc; +use futures::TryStreamExt; +use itertools::Itertools; +use serde::{Deserialize, Serialize}; + +const LOADER_ID: &str = "loader_id"; +const LOADERS_LIST_NAMESPACE: &str = "loaders"; +const LOADER_FIELDS_NAMESPACE: &str = "loader_fields"; +const LOADER_FIELD_ENUMS_ID_NAMESPACE: &str = "loader_field_enums"; +const LOADER_FIELD_ENUM_VALUES_NAMESPACE: &str = "loader_field_enum_values"; + +#[derive(Clone, Serialize, Deserialize, Debug, Copy)] +pub enum Game { + MinecraftJava, + // MinecraftBedrock + // Future games +} + +impl Game { + pub fn name(&self) -> &'static str { + match self { + Game::MinecraftJava => "minecraft-java", + // Game::MinecraftBedrock => "minecraft-bedrock" + // Future games + } + } + + pub fn from_name(name: &str) -> Option { + match name { + "minecraft-java" => Some(Game::MinecraftJava), + // "minecraft-bedrock" => Some(Game::MinecraftBedrock) + // Future games + _ => None, + } + } +} + +#[derive(Serialize, Deserialize, Clone)] +pub struct Loader { + pub id: LoaderId, + pub loader: String, + pub icon: String, + pub supported_project_types: Vec, + pub supported_games: Vec, +} + +impl Loader { + pub async fn get_id<'a, E>( + name: &str, + exec: E, + redis: &RedisPool, + ) -> Result, DatabaseError> + where + E: sqlx::Executor<'a, Database = sqlx::Postgres>, + { + let cached_id: Option = redis.get_deserialized_from_json(LOADER_ID, name).await?; + if let Some(cached_id) = cached_id { + return Ok(Some(LoaderId(cached_id))); + } + + let result = sqlx::query!( + " + SELECT id FROM loaders + WHERE loader = $1 + ", + name + ) + .fetch_optional(exec) + .await? + .map(|r| LoaderId(r.id)); + + if let Some(result) = result { + redis + .set_serialized_to_json(LOADER_ID, name, &result.0, None) + .await?; + } + + Ok(result) + } + + pub async fn list<'a, E>(exec: E, redis: &RedisPool) -> Result, DatabaseError> + where + E: sqlx::Executor<'a, Database = sqlx::Postgres>, + { + let cached_loaders: Option> = redis + .get_deserialized_from_json(LOADERS_LIST_NAMESPACE, "all") + .await?; + if let Some(cached_loaders) = cached_loaders { + return Ok(cached_loaders); + } + + let result = sqlx::query!( + " + SELECT l.id id, l.loader loader, l.icon icon, + ARRAY_AGG(DISTINCT pt.name) filter (where pt.name is not null) project_types, + ARRAY_AGG(DISTINCT g.name) filter (where g.name is not null) games + FROM loaders l + LEFT OUTER JOIN loaders_project_types lpt ON joining_loader_id = l.id + LEFT OUTER JOIN project_types pt ON lpt.joining_project_type_id = pt.id + LEFT OUTER JOIN loaders_project_types_games lptg ON lptg.loader_id = lpt.joining_loader_id AND lptg.project_type_id = lpt.joining_project_type_id + LEFT OUTER JOIN games g ON lptg.game_id = g.id + GROUP BY l.id; + ", + ) + .fetch_many(exec) + .try_filter_map(|e| async { + Ok(e.right().map(|x| Loader { + id: LoaderId(x.id), + loader: x.loader, + icon: x.icon, + supported_project_types: x + .project_types + .unwrap_or_default() + .iter() + .map(|x| x.to_string()) + .collect(), + supported_games: x + .games + .unwrap_or_default() + .iter() + .filter_map(|x| Game::from_name(x)) + .collect(), + })) + }) + .try_collect::>() + .await?; + + redis + .set_serialized_to_json(LOADERS_LIST_NAMESPACE, "all", &result, None) + .await?; + + Ok(result) + } +} + +#[derive(Clone, Serialize, Deserialize, Debug)] +pub struct LoaderField { + pub id: LoaderFieldId, + pub field: String, + pub field_type: LoaderFieldType, + pub optional: bool, + pub min_val: Option, + pub max_val: Option, +} + +#[derive(Clone, Serialize, Deserialize, Debug)] +pub enum LoaderFieldType { + Integer, + Text, + Enum(LoaderFieldEnumId), + Boolean, + ArrayInteger, + ArrayText, + ArrayEnum(LoaderFieldEnumId), + ArrayBoolean, +} +impl LoaderFieldType { + pub fn build(field_type_name: &str, loader_field_enum: Option) -> Option { + Some(match (field_type_name, loader_field_enum) { + ("integer", _) => LoaderFieldType::Integer, + ("text", _) => LoaderFieldType::Text, + ("boolean", _) => LoaderFieldType::Boolean, + ("array_integer", _) => LoaderFieldType::ArrayInteger, + ("array_text", _) => LoaderFieldType::ArrayText, + ("array_boolean", _) => LoaderFieldType::ArrayBoolean, + ("enum", Some(id)) => LoaderFieldType::Enum(LoaderFieldEnumId(id)), + ("array_enum", Some(id)) => LoaderFieldType::ArrayEnum(LoaderFieldEnumId(id)), + _ => return None, + }) + } + + pub fn to_str(&self) -> &'static str { + match self { + LoaderFieldType::Integer => "integer", + LoaderFieldType::Text => "text", + LoaderFieldType::Boolean => "boolean", + LoaderFieldType::ArrayInteger => "array_integer", + LoaderFieldType::ArrayText => "array_text", + LoaderFieldType::ArrayBoolean => "array_boolean", + LoaderFieldType::Enum(_) => "enum", + LoaderFieldType::ArrayEnum(_) => "array_enum", + } + } +} + +#[derive(Clone, Serialize, Deserialize, Debug)] +pub struct LoaderFieldEnum { + pub id: LoaderFieldEnumId, + pub enum_name: String, + pub ordering: Option, + pub hidable: bool, +} + +#[derive(Clone, Serialize, Deserialize, Debug, PartialEq, Eq)] +pub struct LoaderFieldEnumValue { + pub id: LoaderFieldEnumValueId, + pub enum_id: LoaderFieldEnumId, + pub value: String, + pub ordering: Option, + pub created: DateTime, + #[serde(flatten)] + pub metadata: serde_json::Value, +} + +#[derive(Clone, Serialize, Deserialize, Debug, PartialEq, Eq)] +pub struct VersionField { + pub version_id: VersionId, + pub field_id: LoaderFieldId, + pub field_name: String, + pub value: VersionFieldValue, +} +#[derive(Clone, Serialize, Deserialize, Debug, PartialEq, Eq)] +pub enum VersionFieldValue { + Integer(i32), + Text(String), + Enum(LoaderFieldEnumId, LoaderFieldEnumValue), + Boolean(bool), + ArrayInteger(Vec), + ArrayText(Vec), + ArrayEnum(LoaderFieldEnumId, Vec), + ArrayBoolean(Vec), +} + +#[derive(Clone, Serialize, Deserialize, Debug)] +pub struct QueryVersionField { + pub version_id: VersionId, + pub field_id: LoaderFieldId, + pub int_value: Option, + pub enum_value: Option, + pub string_value: Option, +} + +impl QueryVersionField { + pub fn with_int_value(mut self, int_value: i32) -> Self { + self.int_value = Some(int_value); + self + } + + pub fn with_enum_value(mut self, enum_value: LoaderFieldEnumValue) -> Self { + self.enum_value = Some(enum_value); + self + } + + pub fn with_string_value(mut self, string_value: String) -> Self { + self.string_value = Some(string_value); + self + } +} + +#[derive(Clone, Serialize, Deserialize, Debug)] +pub struct SideType { + pub id: SideTypeId, + pub name: String, +} + +impl LoaderField { + pub async fn get_field<'a, E>( + field: &str, + exec: E, + redis: &RedisPool, + ) -> Result, DatabaseError> + where + E: sqlx::Executor<'a, Database = sqlx::Postgres>, + { + let fields = Self::get_fields(exec, redis).await?; + Ok(fields.into_iter().find(|f| f.field == field)) + } + + // Gets all fields for a given loader + // Returns all as this there are probably relatively few fields per loader + // TODO: in the future, this should be to get all fields in relation to something + // - e.g. get all fields for a given game? + pub async fn get_fields<'a, E>( + exec: E, + redis: &RedisPool, + ) -> Result, DatabaseError> + where + E: sqlx::Executor<'a, Database = sqlx::Postgres>, + { + let cached_fields = redis + .get_deserialized_from_json(LOADER_FIELDS_NAMESPACE, 0) // 0 => whatever we search for fields by + .await?; + if let Some(cached_fields) = cached_fields { + return Ok(cached_fields); + } + + let result = sqlx::query!( + " + SELECT lf.id, lf.field, lf.field_type, lf.optional, lf.min_val, lf.max_val, lf.enum_type + FROM loader_fields lf + ", + ) + .fetch_many(exec) + .try_filter_map(|e| async { + Ok(e.right().and_then(|r| { + Some(LoaderField { + id: LoaderFieldId(r.id), + field_type: LoaderFieldType::build(&r.field_type, r.enum_type)?, + field: r.field, + optional: r.optional, + min_val: r.min_val, + max_val: r.max_val, + }) + })) + }) + .try_collect::>() + .await?; + + redis + .set_serialized_to_json(LOADER_FIELDS_NAMESPACE, &0, &result, None) + .await?; + + Ok(result) + } +} + +impl LoaderFieldEnum { + pub async fn get<'a, E>( + enum_name: &str, // Note: NOT loader field name + exec: E, + redis: &RedisPool, + ) -> Result, DatabaseError> + where + E: sqlx::Executor<'a, Database = sqlx::Postgres>, + { + let cached_enum = redis + .get_deserialized_from_json(LOADER_FIELD_ENUMS_ID_NAMESPACE, enum_name) + .await?; + if let Some(cached_enum) = cached_enum { + return Ok(cached_enum); + } + + let result = sqlx::query!( + " + SELECT lfe.id, lfe.enum_name, lfe.ordering, lfe.hidable + FROM loader_field_enums lfe + WHERE lfe.enum_name = $1 + ", + enum_name + ) + .fetch_optional(exec) + .await? + .map(|l| LoaderFieldEnum { + id: LoaderFieldEnumId(l.id), + enum_name: l.enum_name, + ordering: l.ordering, + hidable: l.hidable, + }); + + redis + .set_serialized_to_json(LOADER_FIELD_ENUMS_ID_NAMESPACE, enum_name, &result, None) + .await?; + + Ok(result) + } +} + +impl LoaderFieldEnumValue { + pub async fn list<'a, E>( + loader_field_enum_id: LoaderFieldEnumId, + exec: E, + redis: &RedisPool, + ) -> Result, DatabaseError> + where + E: sqlx::Executor<'a, Database = sqlx::Postgres>, + { + Ok(Self::list_many(&[loader_field_enum_id], exec, redis) + .await? + .into_iter() + .next() + .map(|x| x.1) + .unwrap_or_default()) + } + + pub async fn list_many_loader_fields<'a, E>( + loader_fields: &[LoaderField], + exec: E, + redis: &RedisPool, + ) -> Result>, DatabaseError> + where + E: sqlx::Executor<'a, Database = sqlx::Postgres>, + { + let get_enum_id = |x: &LoaderField| match x.field_type { + LoaderFieldType::Enum(id) | LoaderFieldType::ArrayEnum(id) => Some(id), + _ => None, + }; + + let enum_ids = loader_fields + .iter() + .filter_map(|x| get_enum_id(x)) + .collect::>(); + let values = Self::list_many(&enum_ids, exec, redis) + .await? + .into_iter() + .collect::>(); + + let mut res = HashMap::new(); + for lf in loader_fields { + if let Some(id) = get_enum_id(lf) { + res.insert(lf.id, values.get(&id).unwrap_or(&Vec::new()).to_vec()); + } + } + Ok(res) + } + + pub async fn list_many<'a, E>( + loader_field_enum_ids: &[LoaderFieldEnumId], + exec: E, + redis: &RedisPool, + ) -> Result)>, DatabaseError> + where + E: sqlx::Executor<'a, Database = sqlx::Postgres>, + { + let mut found_enums = Vec::new(); + let mut remaining_enums: Vec = loader_field_enum_ids.to_vec(); + + if !remaining_enums.is_empty() { + let enums = redis + .multi_get::( + LOADER_FIELD_ENUM_VALUES_NAMESPACE, + loader_field_enum_ids.iter().map(|x| x.0), + ) + .await?; + + for lfe in enums { + if let Some(lfe) = lfe.and_then(|x| { + serde_json::from_str::<(LoaderFieldEnumId, Vec)>(&x).ok() + }) { + remaining_enums.retain(|x| lfe.0 .0 != x.0); + found_enums.push(lfe.1); + continue; + } + } + } + + let remaining_enums = remaining_enums.iter().map(|x| x.0).collect::>(); + let result = sqlx::query!( + " + SELECT id, enum_id, value, ordering, metadata, created FROM loader_field_enum_values + WHERE enum_id = ANY($1) + ", + &remaining_enums + ) + .fetch_many(exec) + .try_filter_map(|e| async { + Ok(e.right().map(|c| LoaderFieldEnumValue { + id: LoaderFieldEnumValueId(c.id), + enum_id: LoaderFieldEnumId(c.enum_id), + value: c.value, + ordering: c.ordering, + created: c.created, + metadata: c.metadata.unwrap_or_default(), + })) + }) + .try_collect::>() + .await?; + + // Convert from an Vec to a Vec<(LoaderFieldEnumId, Vec)> + let cachable_enum_sets: Vec<(LoaderFieldEnumId, Vec)> = result + .clone() + .into_iter() + .group_by(|x| x.enum_id) + .into_iter() + .map(|(k, v)| (k, v.collect::>().to_vec())) + .collect(); + for (k, v) in cachable_enum_sets.iter() { + redis + .set_serialized_to_json(LOADER_FIELD_ENUM_VALUES_NAMESPACE, k.0, v, None) + .await?; + } + + Ok(cachable_enum_sets) + } + + // Matches filter against metadata of enum values + pub async fn list_filter<'a, E>( + loader_field_enum_id: LoaderFieldEnumId, + filter: HashMap, + exec: E, + redis: &RedisPool, + ) -> Result, DatabaseError> + where + E: sqlx::Executor<'a, Database = sqlx::Postgres>, + { + let result = Self::list(loader_field_enum_id, exec, redis) + .await? + .into_iter() + .filter(|x| { + let mut bool = true; + for (key, value) in filter.iter() { + if let Some(metadata_value) = x.metadata.get(key) { + bool &= metadata_value == value; + } else { + bool = false; + } + } + bool + }) + .collect(); + + Ok(result) + } +} + +impl VersionField { + pub async fn insert_many( + items: Vec, + transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, + ) -> Result<(), DatabaseError> { + let mut query_version_fields = vec![]; + for item in items { + let base = QueryVersionField { + version_id: item.version_id, + field_id: item.field_id, + int_value: None, + enum_value: None, + string_value: None, + }; + + match item.value { + VersionFieldValue::Integer(i) => { + query_version_fields.push(base.clone().with_int_value(i)) + } + VersionFieldValue::Text(s) => { + query_version_fields.push(base.clone().with_string_value(s)) + } + VersionFieldValue::Boolean(b) => { + query_version_fields.push(base.clone().with_int_value(if b { 1 } else { 0 })) + } + VersionFieldValue::ArrayInteger(v) => { + for i in v { + query_version_fields.push(base.clone().with_int_value(i)); + } + } + VersionFieldValue::ArrayText(v) => { + for s in v { + query_version_fields.push(base.clone().with_string_value(s)); + } + } + VersionFieldValue::ArrayBoolean(v) => { + for b in v { + query_version_fields.push(base.clone().with_int_value(if b { + 1 + } else { + 0 + })); + } + } + VersionFieldValue::Enum(_, v) => { + query_version_fields.push(base.clone().with_enum_value(v)) + } + VersionFieldValue::ArrayEnum(_, v) => { + for ev in v { + query_version_fields.push(base.clone().with_enum_value(ev)); + } + } + }; + } + + let (field_ids, version_ids, int_values, enum_values, string_values): ( + Vec<_>, + Vec<_>, + Vec<_>, + Vec<_>, + Vec<_>, + ) = query_version_fields + .iter() + .map(|l| { + ( + l.field_id.0, + l.version_id.0, + l.int_value, + l.enum_value.as_ref().map(|e| e.id.0), + l.string_value.clone(), + ) + }) + .multiunzip(); + + sqlx::query!( + " + INSERT INTO version_fields (field_id, version_id, int_value, string_value, enum_value) + SELECT * FROM UNNEST($1::integer[], $2::bigint[], $3::integer[], $4::text[], $5::integer[]) + ", + &field_ids[..], + &version_ids[..], + &int_values[..] as &[Option], + &string_values[..] as &[Option], + &enum_values[..] as &[Option] + ) + .execute(&mut **transaction) + .await?; + + Ok(()) + } + + pub fn check_parse( + version_id: VersionId, + loader_field: LoaderField, + value: serde_json::Value, + enum_variants: Vec, + ) -> Result { + let value = VersionFieldValue::parse(&loader_field, value, enum_variants)?; + Ok(VersionField { + version_id, + field_id: loader_field.id, + field_name: loader_field.field, + value, + }) + } + + pub fn from_query_json( + version_id: i64, + loader_fields: Option, + version_fields: Option, + loader_field_enum_values: Option, + ) -> Vec { + #[derive(Deserialize, Debug)] + struct JsonLoaderField { + lf_id: i32, + field: String, + field_type: String, + enum_type: Option, + min_val: Option, + max_val: Option, + optional: bool, + } + + #[derive(Deserialize, Debug)] + struct JsonVersionField { + field_id: i32, + int_value: Option, + enum_value: Option, + string_value: Option, + } + + #[derive(Deserialize, Debug)] + struct JsonLoaderFieldEnumValue { + id: i32, + enum_id: i32, + value: String, + ordering: Option, + created: DateTime, + metadata: Option, + } + + let query_loader_fields: Vec = loader_fields + .and_then(|x| serde_json::from_value(x).ok()) + .unwrap_or_default(); + let query_version_field_combined: Vec = version_fields + .and_then(|x| serde_json::from_value(x).ok()) + .unwrap_or_default(); + let query_loader_field_enum_values: Vec = + loader_field_enum_values + .and_then(|x| serde_json::from_value(x).ok()) + .unwrap_or_default(); + let version_id = VersionId(version_id); + query_loader_fields + .into_iter() + .filter_map(|q| { + let loader_field_type = match LoaderFieldType::build(&q.field_type, q.enum_type) { + Some(lft) => lft, + None => return None, + }; + let loader_field = LoaderField { + id: LoaderFieldId(q.lf_id), + field: q.field.clone(), + field_type: loader_field_type, + optional: q.optional, + min_val: q.min_val, + max_val: q.max_val, + }; + let values = query_version_field_combined + .iter() + .filter_map(|qvf| { + if qvf.field_id == q.lf_id { + let lfev = query_loader_field_enum_values + .iter() + .find(|x| Some(x.id) == qvf.enum_value); + + Some(QueryVersionField { + version_id, + field_id: LoaderFieldId(qvf.field_id), + int_value: qvf.int_value, + enum_value: lfev.map(|lfev| LoaderFieldEnumValue { + id: LoaderFieldEnumValueId(lfev.id), + enum_id: LoaderFieldEnumId(lfev.enum_id), + value: lfev.value.clone(), + ordering: lfev.ordering, + created: lfev.created, + metadata: lfev.metadata.clone().unwrap_or_default(), + }), + string_value: qvf.string_value.clone(), + }) + } else { + None + } + }) + .collect::>(); + + VersionField::build(loader_field, version_id, values).ok() + }) + .collect() + } + + pub fn build( + loader_field: LoaderField, + version_id: VersionId, + query_version_fields: Vec, + ) -> Result { + let value = VersionFieldValue::build(&loader_field.field_type, query_version_fields)?; + Ok(VersionField { + version_id, + field_id: loader_field.id, + field_name: loader_field.field, + value, + }) + } +} + +impl VersionFieldValue { + // Build from user-submitted JSON data + // value is the attempted value of the field, which will be tried to parse to the correct type + // enum_array is the list of valid enum variants for the field, if it is an enum (see LoaderFieldEnumValue::list_many_loader_fields) + pub fn parse( + loader_field: &LoaderField, + value: serde_json::Value, + enum_array: Vec, + ) -> Result { + let field_name = &loader_field.field; + let field_type = &loader_field.field_type; + + let error_value = value.clone(); + let incorrect_type_error = |field_type: &str| { + format!( + "Provided value '{v}' for {field_name} could not be parsed to {field_type} ", + v = serde_json::to_string(&error_value).unwrap_or_default() + ) + }; + + Ok(match field_type { + LoaderFieldType::Integer => VersionFieldValue::Integer( + serde_json::from_value(value).map_err(|_| incorrect_type_error("integer"))?, + ), + LoaderFieldType::Text => VersionFieldValue::Text( + value + .as_str() + .ok_or_else(|| incorrect_type_error("string"))? + .to_string(), + ), + LoaderFieldType::Boolean => VersionFieldValue::Boolean( + value + .as_bool() + .ok_or_else(|| incorrect_type_error("boolean"))?, + ), + LoaderFieldType::ArrayInteger => VersionFieldValue::ArrayInteger({ + let array_values: Vec = serde_json::from_value(value) + .map_err(|_| incorrect_type_error("array of integers"))?; + array_values.into_iter().collect() + }), + LoaderFieldType::ArrayText => VersionFieldValue::ArrayText({ + let array_values: Vec = serde_json::from_value(value) + .map_err(|_| incorrect_type_error("array of strings"))?; + array_values.into_iter().collect() + }), + LoaderFieldType::ArrayBoolean => VersionFieldValue::ArrayBoolean({ + let array_values: Vec = serde_json::from_value(value) + .map_err(|_| incorrect_type_error("array of booleans"))?; + array_values.into_iter().map(|v| v != 0).collect() + }), + LoaderFieldType::Enum(id) => VersionFieldValue::Enum(*id, { + let enum_value = value.as_str().ok_or_else(|| incorrect_type_error("enum"))?; + if let Some(ev) = enum_array.into_iter().find(|v| v.value == enum_value) { + ev + } else { + return Err(format!( + "Provided value '{enum_value}' is not a valid variant for {field_name}" + )); + } + }), + LoaderFieldType::ArrayEnum(id) => VersionFieldValue::ArrayEnum(*id, { + let array_values: Vec = serde_json::from_value(value) + .map_err(|_| incorrect_type_error("array of enums"))?; + let mut enum_values = vec![]; + for av in array_values { + if let Some(ev) = enum_array.iter().find(|v| v.value == av) { + enum_values.push(ev.clone()); + } else { + return Err(format!( + "Provided value '{av}' is not a valid variant for {field_name}" + )); + } + } + enum_values + }), + }) + } + + // Build from internal query data + // This encapsulates reundant behavior in db querie -> object conversions + pub fn build( + field_type: &LoaderFieldType, + qvfs: Vec, + ) -> Result { + let field_name = field_type.to_str(); + let get_first = |qvfs: Vec| -> Result { + if qvfs.len() > 1 { + return Err(DatabaseError::SchemaError(format!( + "Multiple fields for field {}", + field_name + ))); + } + qvfs.into_iter().next().ok_or_else(|| { + DatabaseError::SchemaError(format!("No version fields for field {}", field_name)) + }) + }; + + let did_not_exist_error = |field_name: &str, desired_field: &str| { + DatabaseError::SchemaError(format!( + "Field name {} for field {} in does not exist", + desired_field, field_name + )) + }; + + Ok(match field_type { + LoaderFieldType::Integer => VersionFieldValue::Integer( + get_first(qvfs)? + .int_value + .ok_or(did_not_exist_error(field_name, "int_value"))?, + ), + LoaderFieldType::Text => VersionFieldValue::Text( + get_first(qvfs)? + .string_value + .ok_or(did_not_exist_error(field_name, "string_value"))?, + ), + LoaderFieldType::Boolean => VersionFieldValue::Boolean( + get_first(qvfs)? + .int_value + .ok_or(did_not_exist_error(field_name, "int_value"))? + != 0, + ), + LoaderFieldType::ArrayInteger => VersionFieldValue::ArrayInteger( + qvfs.into_iter() + .map(|qvf| { + qvf.int_value + .ok_or(did_not_exist_error(field_name, "int_value")) + }) + .collect::>()?, + ), + LoaderFieldType::ArrayText => VersionFieldValue::ArrayText( + qvfs.into_iter() + .map(|qvf| { + qvf.string_value + .ok_or(did_not_exist_error(field_name, "string_value")) + }) + .collect::>()?, + ), + LoaderFieldType::ArrayBoolean => VersionFieldValue::ArrayBoolean( + qvfs.into_iter() + .map(|qvf| { + Ok::( + qvf.int_value + .ok_or(did_not_exist_error(field_name, "int_value"))? + != 0, + ) + }) + .collect::>()?, + ), + + LoaderFieldType::Enum(id) => VersionFieldValue::Enum( + *id, + get_first(qvfs)? + .enum_value + .ok_or(did_not_exist_error(field_name, "enum_value"))?, + ), + LoaderFieldType::ArrayEnum(id) => VersionFieldValue::ArrayEnum( + *id, + qvfs.into_iter() + .map(|qvf| { + qvf.enum_value + .ok_or(did_not_exist_error(field_name, "enum_value")) + }) + .collect::>()?, + ), + }) + } + + // Serialize to internal value, such as for converting to user-facing JSON + pub fn serialize_internal(&self) -> serde_json::Value { + match self { + VersionFieldValue::Integer(i) => serde_json::Value::Number((*i).into()), + VersionFieldValue::Text(s) => serde_json::Value::String(s.clone()), + VersionFieldValue::Boolean(b) => serde_json::Value::Bool(*b), + VersionFieldValue::ArrayInteger(v) => serde_json::Value::Array( + v.iter() + .map(|i| serde_json::Value::Number((*i).into())) + .collect(), + ), + VersionFieldValue::ArrayText(v) => serde_json::Value::Array( + v.iter() + .map(|s| serde_json::Value::String(s.clone())) + .collect(), + ), + VersionFieldValue::ArrayBoolean(v) => { + serde_json::Value::Array(v.iter().map(|b| serde_json::Value::Bool(*b)).collect()) + } + VersionFieldValue::Enum(_, v) => serde_json::Value::String(v.value.clone()), + VersionFieldValue::ArrayEnum(_, v) => serde_json::Value::Array( + v.iter() + .map(|v| serde_json::Value::String(v.value.clone())) + .collect(), + ), + } + } + + // For conversion to an interanl string(s), such as for search facets, filtering, or direct hardcoding + // No matter the type, it will be converted to a Vec, whre the non-array types will have a single element + pub fn as_strings(&self) -> Vec { + match self { + VersionFieldValue::Integer(i) => vec![i.to_string()], + VersionFieldValue::Text(s) => vec![s.clone()], + VersionFieldValue::Boolean(b) => vec![b.to_string()], + VersionFieldValue::ArrayInteger(v) => v.iter().map(|i| i.to_string()).collect(), + VersionFieldValue::ArrayText(v) => v.clone(), + VersionFieldValue::ArrayBoolean(v) => v.iter().map(|b| b.to_string()).collect(), + VersionFieldValue::Enum(_, v) => vec![v.value.clone()], + VersionFieldValue::ArrayEnum(_, v) => v.iter().map(|v| v.value.clone()).collect(), + } + } + + pub fn contains_json_value(&self, value: &serde_json::Value) -> bool { + match self { + VersionFieldValue::Integer(i) => value.as_i64() == Some(*i as i64), + VersionFieldValue::Text(s) => value.as_str() == Some(s), + VersionFieldValue::Boolean(b) => value.as_bool() == Some(*b), + VersionFieldValue::ArrayInteger(v) => value + .as_i64() + .map(|i| v.contains(&(i as i32))) + .unwrap_or(false), + VersionFieldValue::ArrayText(v) => value + .as_str() + .map(|s| v.contains(&s.to_string())) + .unwrap_or(false), + VersionFieldValue::ArrayBoolean(v) => { + value.as_bool().map(|b| v.contains(&b)).unwrap_or(false) + } + VersionFieldValue::Enum(_, v) => value.as_str() == Some(&v.value), + VersionFieldValue::ArrayEnum(_, v) => value + .as_str() + .map(|s| v.iter().any(|v| v.value == s)) + .unwrap_or(false), + } + } +} diff --git a/src/database/models/mod.rs b/src/database/models/mod.rs index 5d5bc34f..eb4335cb 100644 --- a/src/database/models/mod.rs +++ b/src/database/models/mod.rs @@ -5,6 +5,8 @@ pub mod collection_item; pub mod flow_item; pub mod ids; pub mod image_item; +pub mod legacy_loader_fields; +pub mod loader_fields; pub mod notification_item; pub mod oauth_client_authorization_item; pub mod oauth_client_item; @@ -43,4 +45,6 @@ pub enum DatabaseError { RedisPool(#[from] deadpool_redis::PoolError), #[error("Error while serializing with the cache: {0}")] SerdeCacheError(#[from] serde_json::Error), + #[error("Schema error: {0}")] + SchemaError(String), } diff --git a/src/database/models/project_item.rs b/src/database/models/project_item.rs index 365dd473..a7589a3d 100644 --- a/src/database/models/project_item.rs +++ b/src/database/models/project_item.rs @@ -141,7 +141,6 @@ impl ModCategory { #[derive(Clone)] pub struct ProjectBuilder { pub project_id: ProjectId, - pub project_type_id: ProjectTypeId, pub team_id: TeamId, pub organization_id: Option, pub title: String, @@ -158,8 +157,6 @@ pub struct ProjectBuilder { pub initial_versions: Vec, pub status: ProjectStatus, pub requested_status: Option, - pub client_side: SideTypeId, - pub server_side: SideTypeId, pub license: String, pub slug: Option, pub donation_urls: Vec, @@ -175,7 +172,6 @@ impl ProjectBuilder { ) -> Result { let project_struct = Project { id: self.project_id, - project_type: self.project_type_id, team_id: self.team_id, organization_id: self.organization_id, title: self.title, @@ -200,8 +196,6 @@ impl ProjectBuilder { wiki_url: self.wiki_url, license_url: self.license_url, discord_url: self.discord_url, - client_side: self.client_side, - server_side: self.server_side, license: self.license, slug: self.slug, moderation_message: None, @@ -210,7 +204,6 @@ impl ProjectBuilder { color: self.color, monetization_status: self.monetization_status, loaders: vec![], - game_versions: vec![], }; project_struct.insert(&mut *transaction).await?; @@ -244,16 +237,12 @@ impl ProjectBuilder { .collect_vec(); ModCategory::insert_many(mod_categories, &mut *transaction).await?; - Project::update_game_versions(self.project_id, &mut *transaction).await?; - Project::update_loaders(self.project_id, &mut *transaction).await?; - Ok(self.project_id) } } #[derive(Clone, Debug, Serialize, Deserialize)] pub struct Project { pub id: ProjectId, - pub project_type: ProjectTypeId, pub team_id: TeamId, pub organization_id: Option, pub title: String, @@ -274,8 +263,6 @@ pub struct Project { pub wiki_url: Option, pub license_url: Option, pub discord_url: Option, - pub client_side: SideTypeId, - pub server_side: SideTypeId, pub license: String, pub slug: Option, pub moderation_message: Option, @@ -284,7 +271,6 @@ pub struct Project { pub color: Option, pub monetization_status: MonetizationStatus, pub loaders: Vec, - pub game_versions: Vec, } impl Project { @@ -298,15 +284,15 @@ impl Project { id, team_id, title, description, body, published, downloads, icon_url, issues_url, source_url, wiki_url, status, requested_status, discord_url, - client_side, server_side, license_url, license, - slug, project_type, color, monetization_status + license_url, license, + slug, color, monetization_status ) VALUES ( $1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, - $15, $16, $17, $18, - LOWER($19), $20, $21, $22 + $15, $16, + LOWER($17), $18, $19 ) ", self.id as ProjectId, @@ -323,12 +309,9 @@ impl Project { self.status.as_str(), self.requested_status.map(|x| x.as_str()), self.discord_url.as_ref(), - self.client_side as SideTypeId, - self.server_side as SideTypeId, self.license_url.as_ref(), &self.license, self.slug.as_ref(), - self.project_type as ProjectTypeId, self.color.map(|x| x as i32), self.monetization_status.as_str(), ) @@ -552,7 +535,6 @@ impl Project { .flatten() .collect(), ); - if !project_ids.is_empty() { let projects = redis .multi_get::(PROJECTS_NAMESPACE, project_ids) @@ -571,31 +553,31 @@ impl Project { } } } - if !remaining_strings.is_empty() { let project_ids_parsed: Vec = remaining_strings .iter() .flat_map(|x| parse_base62(&x.to_string()).ok()) .map(|x| x as i64) .collect(); + let db_projects: Vec = sqlx::query!( " - SELECT m.id id, m.project_type project_type, m.title title, m.description description, m.downloads downloads, m.follows follows, + SELECT m.id id, m.title title, m.description description, m.downloads downloads, m.follows follows, m.icon_url icon_url, m.body body, m.published published, m.updated updated, m.approved approved, m.queued, m.status status, m.requested_status requested_status, m.issues_url issues_url, m.source_url source_url, m.wiki_url wiki_url, m.discord_url discord_url, m.license_url license_url, - m.team_id team_id, m.organization_id organization_id, m.client_side client_side, m.server_side server_side, m.license license, m.slug slug, m.moderation_message moderation_message, m.moderation_message_body moderation_message_body, - cs.name client_side_type, ss.name server_side_type, pt.name project_type_name, m.webhook_sent, m.color, - t.id thread_id, m.monetization_status monetization_status, m.loaders loaders, m.game_versions game_versions, + m.team_id team_id, m.organization_id organization_id, m.license license, m.slug slug, m.moderation_message moderation_message, m.moderation_message_body moderation_message_body, + m.webhook_sent, m.color, + t.id thread_id, m.monetization_status monetization_status, + ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders, + ARRAY_AGG(DISTINCT pt.name) filter (where pt.name is not null) project_types, + ARRAY_AGG(DISTINCT g.name) filter (where g.name is not null) games, ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is false) categories, ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is true) additional_categories, JSONB_AGG(DISTINCT jsonb_build_object('id', v.id, 'date_published', v.date_published)) filter (where v.id is not null) versions, JSONB_AGG(DISTINCT jsonb_build_object('image_url', mg.image_url, 'featured', mg.featured, 'title', mg.title, 'description', mg.description, 'created', mg.created, 'ordering', mg.ordering)) filter (where mg.image_url is not null) gallery, JSONB_AGG(DISTINCT jsonb_build_object('platform_id', md.joining_platform_id, 'platform_short', dp.short, 'platform_name', dp.name,'url', md.url)) filter (where md.joining_platform_id is not null) donations - FROM mods m - INNER JOIN project_types pt ON pt.id = m.project_type - INNER JOIN side_types cs ON m.client_side = cs.id - INNER JOIN side_types ss ON m.server_side = ss.id + FROM mods m INNER JOIN threads t ON t.mod_id = m.id LEFT JOIN mods_gallery mg ON mg.mod_id = m.id LEFT JOIN mods_donations md ON md.joining_mod_id = m.id @@ -603,8 +585,14 @@ impl Project { LEFT JOIN mods_categories mc ON mc.joining_mod_id = m.id LEFT JOIN categories c ON mc.joining_category_id = c.id LEFT JOIN versions v ON v.mod_id = m.id AND v.status = ANY($3) + LEFT JOIN loaders_versions lv ON lv.version_id = v.id + LEFT JOIN loaders l on lv.loader_id = l.id + LEFT JOIN loaders_project_types lpt ON lpt.joining_loader_id = l.id + LEFT JOIN project_types pt ON pt.id = lpt.joining_project_type_id + LEFT JOIN loaders_project_types_games lptg ON lptg.loader_id = l.id AND lptg.project_type_id = pt.id + LEFT JOIN games g ON lptg.game_id = g.id WHERE m.id = ANY($1) OR m.slug = ANY($2) - GROUP BY pt.id, cs.id, ss.id, t.id, m.id; + GROUP BY t.id, m.id; ", &project_ids_parsed, &remaining_strings.into_iter().map(|x| x.to_string().to_lowercase()).collect::>(), @@ -614,11 +602,9 @@ impl Project { .try_filter_map(|e| async { Ok(e.right().map(|m| { let id = m.id; - QueryProject { inner: Project { id: ProjectId(id), - project_type: ProjectTypeId(m.project_type), team_id: TeamId(m.team_id), organization_id: m.organization_id.map(OrganizationId), title: m.title.clone(), @@ -633,14 +619,12 @@ impl Project { wiki_url: m.wiki_url.clone(), license_url: m.license_url.clone(), discord_url: m.discord_url.clone(), - client_side: SideTypeId(m.client_side), status: ProjectStatus::from_string( &m.status, ), requested_status: m.requested_status.map(|x| ProjectStatus::from_string( &x, )), - server_side: SideTypeId(m.server_side), license: m.license.clone(), slug: m.slug.clone(), body: m.body.clone(), @@ -654,12 +638,12 @@ impl Project { monetization_status: MonetizationStatus::from_string( &m.monetization_status, ), - loaders: m.loaders, - game_versions: m.game_versions, + loaders: m.loaders.unwrap_or_default(), }, - project_type: m.project_type_name, categories: m.categories.unwrap_or_default(), additional_categories: m.additional_categories.unwrap_or_default(), + project_types: m.project_types.unwrap_or_default(), + games: m.games.unwrap_or_default(), versions: { #[derive(Deserialize)] struct Version { @@ -674,7 +658,6 @@ impl Project { .unwrap_or_default(); versions.sort_by(|a, b| a.date_published.cmp(&b.date_published)); - versions.into_iter().map(|x| x.id).collect() }, gallery_items: { @@ -689,8 +672,6 @@ impl Project { donation_urls: serde_json::from_value( m.donations.unwrap_or_default(), ).ok().unwrap_or_default(), - client_side: crate::models::projects::SideType::from_string(&m.client_side_type), - server_side: crate::models::projects::SideType::from_string(&m.server_side_type), thread_id: ThreadId(m.thread_id), }})) }) @@ -768,56 +749,6 @@ impl Project { Ok(dependencies) } - pub async fn update_game_versions( - id: ProjectId, - transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, - ) -> Result<(), sqlx::error::Error> { - sqlx::query!( - " - UPDATE mods - SET game_versions = ( - SELECT COALESCE(ARRAY_AGG(DISTINCT gv.version) filter (where gv.version is not null), array[]::varchar[]) - FROM versions v - INNER JOIN game_versions_versions gvv ON v.id = gvv.joining_version_id - INNER JOIN game_versions gv on gvv.game_version_id = gv.id - WHERE v.mod_id = mods.id AND v.status != ALL($2) - ) - WHERE id = $1 - ", - id as ProjectId, - &*crate::models::projects::VersionStatus::iterator().filter(|x| x.is_hidden()).map(|x| x.to_string()).collect::>() - ) - .execute(&mut **transaction) - .await?; - - Ok(()) - } - - pub async fn update_loaders( - id: ProjectId, - transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, - ) -> Result<(), sqlx::error::Error> { - sqlx::query!( - " - UPDATE mods - SET loaders = ( - SELECT COALESCE(ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null), array[]::varchar[]) - FROM versions v - INNER JOIN loaders_versions lv ON lv.version_id = v.id - INNER JOIN loaders l on lv.loader_id = l.id - WHERE v.mod_id = mods.id AND v.status != ALL($2) - ) - WHERE id = $1 - ", - id as ProjectId, - &*crate::models::projects::VersionStatus::iterator().filter(|x| x.is_hidden()).map(|x| x.to_string()).collect::>() - ) - .execute(&mut **transaction) - .await?; - - Ok(()) - } - pub async fn clear_cache( id: ProjectId, slug: Option, @@ -845,13 +776,12 @@ impl Project { #[derive(Clone, Debug, Serialize, Deserialize)] pub struct QueryProject { pub inner: Project, - pub project_type: String, pub categories: Vec, pub additional_categories: Vec, pub versions: Vec, + pub project_types: Vec, + pub games: Vec, pub donation_urls: Vec, pub gallery_items: Vec, - pub client_side: crate::models::projects::SideType, - pub server_side: crate::models::projects::SideType, pub thread_id: ThreadId, } diff --git a/src/database/models/version_item.rs b/src/database/models/version_item.rs index 565e3aae..9cff920b 100644 --- a/src/database/models/version_item.rs +++ b/src/database/models/version_item.rs @@ -1,4 +1,5 @@ use super::ids::*; +use super::loader_fields::VersionField; use super::DatabaseError; use crate::database::redis::RedisPool; use crate::models::projects::{FileType, VersionStatus}; @@ -9,7 +10,7 @@ use std::cmp::Ordering; use std::collections::HashMap; use std::iter; -const VERSIONS_NAMESPACE: &str = "versions"; +pub const VERSIONS_NAMESPACE: &str = "versions"; const VERSION_FILES_NAMESPACE: &str = "versions_files"; #[derive(Clone)] @@ -22,8 +23,8 @@ pub struct VersionBuilder { pub changelog: String, pub files: Vec, pub dependencies: Vec, - pub game_versions: Vec, pub loaders: Vec, + pub version_fields: Vec, pub version_type: String, pub featured: bool, pub status: VersionStatus, @@ -234,7 +235,6 @@ impl VersionBuilder { let VersionBuilder { dependencies, loaders, - game_versions, files, version_id, .. @@ -249,17 +249,13 @@ impl VersionBuilder { .collect_vec(); LoaderVersion::insert_many(loader_versions, transaction).await?; - let game_version_versions = game_versions - .iter() - .map(|v| VersionVersion::new(*v, version_id)) - .collect_vec(); - VersionVersion::insert_many(game_version_versions, transaction).await?; + VersionField::insert_many(self.version_fields, transaction).await?; Ok(self.version_id) } } -#[derive(derive_new::new)] +#[derive(derive_new::new, Serialize, Deserialize)] pub struct LoaderVersion { pub loader_id: LoaderId, pub version_id: VersionId, @@ -289,36 +285,6 @@ impl LoaderVersion { } } -#[derive(derive_new::new)] -pub struct VersionVersion { - pub game_version_id: GameVersionId, - pub joining_version_id: VersionId, -} - -impl VersionVersion { - pub async fn insert_many( - items: Vec, - transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, - ) -> Result<(), DatabaseError> { - let (game_version_ids, version_ids): (Vec<_>, Vec<_>) = items - .into_iter() - .map(|i| (i.game_version_id.0, i.joining_version_id.0)) - .unzip(); - sqlx::query!( - " - INSERT INTO game_versions_versions (game_version_id, joining_version_id) - SELECT * FROM UNNEST($1::integer[], $2::bigint[]) - ", - &game_version_ids[..], - &version_ids[..], - ) - .execute(&mut **transaction) - .await?; - - Ok(()) - } -} - #[derive(Clone, Deserialize, Serialize, PartialEq, Eq)] pub struct Version { pub id: VersionId, @@ -401,8 +367,8 @@ impl Version { sqlx::query!( " - DELETE FROM game_versions_versions gvv - WHERE gvv.joining_version_id = $1 + DELETE FROM version_fields vf + WHERE vf.version_id = $1 ", id as VersionId, ) @@ -494,14 +460,11 @@ impl Version { .execute(&mut **transaction) .await?; - crate::database::models::Project::update_game_versions( + crate::database::models::Project::clear_cache( ProjectId(project_id.mod_id), - &mut *transaction, - ) - .await?; - crate::database::models::Project::update_loaders( - ProjectId(project_id.mod_id), - &mut *transaction, + None, + None, + redis, ) .await?; @@ -559,19 +522,59 @@ impl Version { SELECT v.id id, v.mod_id mod_id, v.author_id author_id, v.name version_name, v.version_number version_number, v.changelog changelog, v.date_published date_published, v.downloads downloads, v.version_type version_type, v.featured featured, v.status status, v.requested_status requested_status, v.ordering ordering, - JSONB_AGG(DISTINCT jsonb_build_object('version', gv.version, 'created', gv.created)) filter (where gv.version is not null) game_versions, ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders, + ARRAY_AGG(DISTINCT pt.name) filter (where pt.name is not null) project_types, + ARRAY_AGG(DISTINCT g.name) filter (where g.name is not null) games, JSONB_AGG(DISTINCT jsonb_build_object('id', f.id, 'url', f.url, 'filename', f.filename, 'primary', f.is_primary, 'size', f.size, 'file_type', f.file_type)) filter (where f.id is not null) files, JSONB_AGG(DISTINCT jsonb_build_object('algorithm', h.algorithm, 'hash', encode(h.hash, 'escape'), 'file_id', h.file_id)) filter (where h.hash is not null) hashes, - JSONB_AGG(DISTINCT jsonb_build_object('project_id', d.mod_dependency_id, 'version_id', d.dependency_id, 'dependency_type', d.dependency_type,'file_name', dependency_file_name)) filter (where d.dependency_type is not null) dependencies + JSONB_AGG(DISTINCT jsonb_build_object('project_id', d.mod_dependency_id, 'version_id', d.dependency_id, 'dependency_type', d.dependency_type,'file_name', dependency_file_name)) filter (where d.dependency_type is not null) dependencies, + + JSONB_AGG( + DISTINCT jsonb_build_object( + 'field_id', vf.field_id, + 'int_value', vf.int_value, + 'enum_value', vf.enum_value, + 'string_value', vf.string_value + ) + ) filter (where vf.field_id is not null) version_fields, + JSONB_AGG( + DISTINCT jsonb_build_object( + 'lf_id', lf.id, + 'loader_name', l.loader, + 'field', lf.field, + 'field_type', lf.field_type, + 'enum_type', lf.enum_type, + 'min_val', lf.min_val, + 'max_val', lf.max_val, + 'optional', lf.optional + ) + ) filter (where lf.id is not null) loader_fields, + JSONB_AGG( + DISTINCT jsonb_build_object( + 'id', lfev.id, + 'enum_id', lfev.enum_id, + 'value', lfev.value, + 'ordering', lfev.ordering, + 'created', lfev.created, + 'metadata', lfev.metadata + ) + ) filter (where lfev.id is not null) loader_field_enum_values + FROM versions v - LEFT OUTER JOIN game_versions_versions gvv on v.id = gvv.joining_version_id - LEFT OUTER JOIN game_versions gv on gvv.game_version_id = gv.id LEFT OUTER JOIN loaders_versions lv on v.id = lv.version_id LEFT OUTER JOIN loaders l on lv.loader_id = l.id + LEFT OUTER JOIN loaders_project_types lpt on l.id = lpt.joining_loader_id + LEFT JOIN project_types pt on lpt.joining_project_type_id = pt.id + LEFT OUTER JOIN loaders_project_types_games lptg on l.id = lptg.loader_id AND pt.id = lptg.project_type_id + LEFT JOIN games g on lptg.game_id = g.id LEFT OUTER JOIN files f on v.id = f.version_id LEFT OUTER JOIN hashes h on f.id = h.file_id LEFT OUTER JOIN dependencies d on v.id = d.dependent_id + LEFT OUTER JOIN version_fields vf on v.id = vf.version_id + LEFT OUTER JOIN loader_fields lf on vf.field_id = lf.id + LEFT OUTER JOIN loader_field_enums lfe on lf.enum_type = lfe.id + LEFT OUTER JOIN loader_field_enum_values lfev on lfe.id = lfev.enum_id + WHERE v.id = ANY($1) GROUP BY v.id ORDER BY v.ordering ASC NULLS LAST, v.date_published ASC; @@ -664,24 +667,10 @@ impl Version { files }, - game_versions: { - #[derive(Deserialize)] - struct GameVersion { - pub version: String, - pub created: DateTime, - } - - let mut game_versions: Vec = serde_json::from_value( - v.game_versions.unwrap_or_default(), - ) - .ok() - .unwrap_or_default(); - - game_versions.sort_by(|a, b| a.created.cmp(&b.created)); - - game_versions.into_iter().map(|x| x.version).collect() - }, + version_fields: VersionField::from_query_json(v.id, v.loader_fields, v.version_fields, v.loader_field_enum_values), loaders: v.loaders.unwrap_or_default(), + project_types: v.project_types.unwrap_or_default(), + games: v.games.unwrap_or_default(), dependencies: serde_json::from_value( v.dependencies.unwrap_or_default(), ) @@ -751,7 +740,6 @@ impl Version { .collect::>(), ) .await?; - for file in files { if let Some(mut file) = file.and_then(|x| serde_json::from_str::>(&x).ok()) @@ -861,8 +849,10 @@ pub struct QueryVersion { pub inner: Version, pub files: Vec, - pub game_versions: Vec, + pub version_fields: Vec, pub loaders: Vec, + pub project_types: Vec, + pub games: Vec, pub dependencies: Vec, } diff --git a/src/lib.rs b/src/lib.rs index d5fc97dc..0912e4e1 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -135,7 +135,7 @@ pub fn app_setup( } }); - scheduler::schedule_versions(&mut scheduler, pool.clone()); + scheduler::schedule_versions(&mut scheduler, pool.clone(), redis_pool.clone()); let session_queue = web::Data::new(AuthQueue::new()); @@ -159,7 +159,7 @@ pub fn app_setup( let reader = maxmind.clone(); { - let reader_ref = reader.clone(); + let reader_ref = reader; scheduler.run(std::time::Duration::from_secs(60 * 60 * 24), move || { let reader_ref = reader_ref.clone(); diff --git a/src/models/mod.rs b/src/models/mod.rs index 7c97ad31..c4ff81a2 100644 --- a/src/models/mod.rs +++ b/src/models/mod.rs @@ -1,16 +1,19 @@ -pub mod analytics; -pub mod collections; -pub mod error; -pub mod ids; -pub mod images; -pub mod notifications; -pub mod oauth_clients; -pub mod organizations; -pub mod pack; -pub mod pats; -pub mod projects; -pub mod reports; -pub mod sessions; -pub mod teams; -pub mod threads; -pub mod users; +pub mod v2; +pub mod v3; + +pub use v3::analytics; +pub use v3::collections; +pub use v3::error; +pub use v3::ids; +pub use v3::images; +pub use v3::notifications; +pub use v3::oauth_clients; +pub use v3::organizations; +pub use v3::pack; +pub use v3::pats; +pub use v3::projects; +pub use v3::reports; +pub use v3::sessions; +pub use v3::teams; +pub use v3::threads; +pub use v3::users; diff --git a/src/models/v2/mod.rs b/src/models/v2/mod.rs new file mode 100644 index 00000000..5df1866a --- /dev/null +++ b/src/models/v2/mod.rs @@ -0,0 +1,2 @@ +// Legacy models from V2, where its useful to keep the struct for rerouting/conversion +pub mod projects; diff --git a/src/models/v2/projects.rs b/src/models/v2/projects.rs new file mode 100644 index 00000000..7d2a0b85 --- /dev/null +++ b/src/models/v2/projects.rs @@ -0,0 +1,307 @@ +use super::super::ids::OrganizationId; +use super::super::teams::TeamId; +use super::super::users::UserId; +use crate::database::models::legacy_loader_fields::MinecraftGameVersion; +use crate::database::models::{version_item, DatabaseError}; +use crate::database::redis::RedisPool; +use crate::models::ids::{ProjectId, VersionId}; +use crate::models::projects::{ + Dependency, DonationLink, GalleryItem, License, Loader, ModeratorMessage, MonetizationStatus, + Project, ProjectStatus, Version, VersionFile, VersionStatus, VersionType, +}; +use crate::models::threads::ThreadId; +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; + +/// A project returned from the API +#[derive(Serialize, Deserialize, Clone)] +pub struct LegacyProject { + /// Relevant V2 fields- these were removed or modfified in V3, + /// and are now part of the dynamic fields system + /// The support range for the client project* + pub client_side: LegacySideType, + /// The support range for the server project + pub server_side: LegacySideType, + /// A list of game versions this project supports + pub game_versions: Vec, + + // All other fields are the same as V3 + // If they change, or their constituent types change, we may need to + // add a new struct for them here. + pub id: ProjectId, + pub slug: Option, + pub project_type: String, + pub team: TeamId, + pub organization: Option, + pub title: String, + pub description: String, + pub body: String, + pub body_url: Option, + pub published: DateTime, + pub updated: DateTime, + pub approved: Option>, + pub queued: Option>, + pub status: ProjectStatus, + pub requested_status: Option, + pub moderator_message: Option, + pub license: License, + pub downloads: u32, + pub followers: u32, + pub categories: Vec, + pub additional_categories: Vec, + pub loaders: Vec, + pub versions: Vec, + pub icon_url: Option, + pub issues_url: Option, + pub source_url: Option, + pub wiki_url: Option, + pub discord_url: Option, + pub donation_urls: Option>, + pub gallery: Vec, + pub color: Option, + pub thread_id: ThreadId, + pub monetization_status: MonetizationStatus, +} + +impl LegacyProject { + // Convert from a standard V3 project to a V2 project + // Requires any queried versions to be passed in, to get access to certain version fields contained within. + // - This can be any version, because the fields are ones that used to be on the project itself. + // - Its conceivable that certain V3 projects that have many different ones may not have the same fields on all of them. + // TODO: Should this return an error instead for v2 users? + // It's safe to use a db version_item for this as the only info is side types, game versions, and loader fields (for loaders), which used to be public on project anyway. + pub fn from(data: Project, versions_item: Option) -> Self { + let mut client_side = LegacySideType::Unknown; + let mut server_side = LegacySideType::Unknown; + let mut game_versions = Vec::new(); + + // V2 versions only have one project type- v3 versions can rarely have multiple. + // We'll just use the first one. + let mut project_type = data.project_types.get(0).cloned().unwrap_or_default(); + let mut loaders = data.loaders; + + if let Some(versions_item) = versions_item { + client_side = versions_item + .version_fields + .iter() + .find(|f| f.field_name == "client_side") + .and_then(|f| { + Some(LegacySideType::from_string( + f.value.serialize_internal().as_str()?, + )) + }) + .unwrap_or(LegacySideType::Unknown); + server_side = versions_item + .version_fields + .iter() + .find(|f| f.field_name == "server_side") + .and_then(|f| { + Some(LegacySideType::from_string( + f.value.serialize_internal().as_str()?, + )) + }) + .unwrap_or(LegacySideType::Unknown); + game_versions = versions_item + .version_fields + .iter() + .find(|f| f.field_name == "game_versions") + .and_then(|f| MinecraftGameVersion::try_from_version_field(f).ok()) + .map(|v| v.into_iter().map(|v| v.version).collect()) + .unwrap_or(Vec::new()); + + // - if loader is mrpack, this is a modpack + // the loaders are whatever the corresponding loader fields are + if versions_item.loaders == vec!["mrpack".to_string()] { + project_type = "modpack".to_string(); + if let Some(mrpack_loaders) = versions_item + .version_fields + .iter() + .find(|f| f.field_name == "mrpack_loaders") + { + loaders = mrpack_loaders.value.as_strings(); + } + } + } + + Self { + id: data.id, + slug: data.slug, + project_type, + team: data.team, + organization: data.organization, + title: data.title, + description: data.description, + body: data.body, + body_url: data.body_url, + published: data.published, + updated: data.updated, + approved: data.approved, + queued: data.queued, + status: data.status, + requested_status: data.requested_status, + moderator_message: data.moderator_message, + license: data.license, + downloads: data.downloads, + followers: data.followers, + categories: data.categories, + additional_categories: data.additional_categories, + loaders, + versions: data.versions, + icon_url: data.icon_url, + issues_url: data.issues_url, + source_url: data.source_url, + wiki_url: data.wiki_url, + discord_url: data.discord_url, + donation_urls: data.donation_urls, + gallery: data.gallery, + color: data.color, + thread_id: data.thread_id, + monetization_status: data.monetization_status, + client_side, + server_side, + game_versions, + } + } + + // Because from needs a version_item, this is a helper function to get many from one db query. + pub async fn from_many<'a, E>( + data: Vec, + exec: E, + redis: &RedisPool, + ) -> Result, DatabaseError> + where + E: sqlx::Executor<'a, Database = sqlx::Postgres>, + { + let version_ids: Vec<_> = data + .iter() + .filter_map(|p| p.versions.get(0).map(|i| (*i).into())) + .collect(); + let example_versions = version_item::Version::get_many(&version_ids, exec, redis).await?; + let mut legacy_projects = Vec::new(); + for project in data { + let version_item = example_versions + .iter() + .find(|v| v.inner.project_id == project.id.into()) + .cloned(); + let project = LegacyProject::from(project, version_item); + legacy_projects.push(project); + } + Ok(legacy_projects) + } +} + +#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)] +#[serde(rename_all = "kebab-case")] +pub enum LegacySideType { + Required, + Optional, + Unsupported, + Unknown, +} + +impl std::fmt::Display for LegacySideType { + fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result { + write!(fmt, "{}", self.as_str()) + } +} + +impl LegacySideType { + // These are constant, so this can remove unneccessary allocations (`to_string`) + pub fn as_str(&self) -> &'static str { + match self { + LegacySideType::Required => "required", + LegacySideType::Optional => "optional", + LegacySideType::Unsupported => "unsupported", + LegacySideType::Unknown => "unknown", + } + } + + pub fn from_string(string: &str) -> LegacySideType { + match string { + "required" => LegacySideType::Required, + "optional" => LegacySideType::Optional, + "unsupported" => LegacySideType::Unsupported, + _ => LegacySideType::Unknown, + } + } +} + +/// A specific version of a project +#[derive(Serialize, Deserialize, Clone)] +pub struct LegacyVersion { + /// Relevant V2 fields- these were removed or modfified in V3, + /// and are now part of the dynamic fields system + /// A list of game versions this project supports + pub game_versions: Vec, + /// A list of loaders this project supports + pub loaders: Vec, + + // TODO: remove this once we have v3 testing, as this is a v3 field and tests for it should be isolated to v3 + pub ordering: Option, + + pub id: VersionId, + pub project_id: ProjectId, + pub author_id: UserId, + pub featured: bool, + pub name: String, + pub version_number: String, + pub changelog: String, + pub changelog_url: Option, + pub date_published: DateTime, + pub downloads: u32, + pub version_type: VersionType, + pub status: VersionStatus, + pub requested_status: Option, + pub files: Vec, + pub dependencies: Vec, +} + +impl From for LegacyVersion { + fn from(data: Version) -> Self { + let mut game_versions = Vec::new(); + if let Some(value) = data.fields.get("game_versions").and_then(|v| v.as_array()) { + for gv in value { + if let Some(game_version) = gv.as_str() { + game_versions.push(game_version.to_string()); + } + } + } + + // - if loader is mrpack, this is a modpack + // the v2 loaders are whatever the corresponding loader fields are + let mut loaders = data.loaders.into_iter().map(|l| l.0).collect::>(); + if loaders == vec!["mrpack".to_string()] { + if let Some((_, mrpack_loaders)) = data + .fields + .into_iter() + .find(|(key, _)| key == "mrpack_loaders") + { + if let Ok(mrpack_loaders) = serde_json::from_value(mrpack_loaders) { + loaders = mrpack_loaders; + } + } + } + let loaders = loaders.into_iter().map(Loader).collect::>(); + + Self { + id: data.id, + project_id: data.project_id, + author_id: data.author_id, + featured: data.featured, + name: data.name, + version_number: data.version_number, + changelog: data.changelog, + changelog_url: data.changelog_url, + date_published: data.date_published, + downloads: data.downloads, + version_type: data.version_type, + status: data.status, + requested_status: data.requested_status, + files: data.files, + dependencies: data.dependencies, + game_versions, + ordering: data.ordering, + loaders, + } + } +} diff --git a/src/models/analytics.rs b/src/models/v3/analytics.rs similarity index 100% rename from src/models/analytics.rs rename to src/models/v3/analytics.rs diff --git a/src/models/collections.rs b/src/models/v3/collections.rs similarity index 100% rename from src/models/collections.rs rename to src/models/v3/collections.rs diff --git a/src/models/error.rs b/src/models/v3/error.rs similarity index 100% rename from src/models/error.rs rename to src/models/v3/error.rs diff --git a/src/models/ids.rs b/src/models/v3/ids.rs similarity index 100% rename from src/models/ids.rs rename to src/models/v3/ids.rs diff --git a/src/models/images.rs b/src/models/v3/images.rs similarity index 100% rename from src/models/images.rs rename to src/models/v3/images.rs diff --git a/src/models/v3/mod.rs b/src/models/v3/mod.rs new file mode 100644 index 00000000..7c97ad31 --- /dev/null +++ b/src/models/v3/mod.rs @@ -0,0 +1,16 @@ +pub mod analytics; +pub mod collections; +pub mod error; +pub mod ids; +pub mod images; +pub mod notifications; +pub mod oauth_clients; +pub mod organizations; +pub mod pack; +pub mod pats; +pub mod projects; +pub mod reports; +pub mod sessions; +pub mod teams; +pub mod threads; +pub mod users; diff --git a/src/models/notifications.rs b/src/models/v3/notifications.rs similarity index 100% rename from src/models/notifications.rs rename to src/models/v3/notifications.rs diff --git a/src/models/oauth_clients.rs b/src/models/v3/oauth_clients.rs similarity index 100% rename from src/models/oauth_clients.rs rename to src/models/v3/oauth_clients.rs diff --git a/src/models/organizations.rs b/src/models/v3/organizations.rs similarity index 100% rename from src/models/organizations.rs rename to src/models/v3/organizations.rs diff --git a/src/models/pack.rs b/src/models/v3/pack.rs similarity index 100% rename from src/models/pack.rs rename to src/models/v3/pack.rs diff --git a/src/models/pats.rs b/src/models/v3/pats.rs similarity index 98% rename from src/models/pats.rs rename to src/models/v3/pats.rs index 83f9b1c5..d4ef6e28 100644 --- a/src/models/pats.rs +++ b/src/models/v3/pats.rs @@ -132,9 +132,7 @@ impl Scopes { } pub fn parse_from_oauth_scopes(scopes: &str) -> Result { - let scopes = scopes - .replace(['+', ' '], "|") - .replace("%20", "|"); + let scopes = scopes.replace(['+', ' '], "|").replace("%20", "|"); bitflags::parser::from_str(&scopes) } diff --git a/src/models/projects.rs b/src/models/v3/projects.rs similarity index 93% rename from src/models/projects.rs rename to src/models/v3/projects.rs index 51f1f675..63c6593b 100644 --- a/src/models/projects.rs +++ b/src/models/v3/projects.rs @@ -1,3 +1,5 @@ +use std::collections::HashMap; + use super::ids::{Base62Id, OrganizationId}; use super::teams::TeamId; use super::users::UserId; @@ -27,8 +29,10 @@ pub struct Project { pub id: ProjectId, /// The slug of a project, used for vanity URLs pub slug: Option, - /// The project type of the project - pub project_type: String, + /// The aggregated project typs of the versions of this project + pub project_types: Vec, + /// The aggregated games of the versions of this project + pub games: Vec, /// The team of people that has ownership of this project. pub team: TeamId, /// The optional organization of people that have ownership of this project. @@ -66,11 +70,6 @@ pub struct Project { /// The license of this project pub license: License, - /// The support range for the client project* - pub client_side: SideType, - /// The support range for the server project - pub server_side: SideType, - /// The total number of downloads the project has had. pub downloads: u32, /// The total number of followers this project has accumulated @@ -81,8 +80,6 @@ pub struct Project { /// A list of the categories that the project is in. pub additional_categories: Vec, - /// A list of game versions this project supports - pub game_versions: Vec, /// A list of loaders this project supports pub loaders: Vec, @@ -120,7 +117,8 @@ impl From for Project { Self { id: m.id.into(), slug: m.slug, - project_type: data.project_type, + project_types: data.project_types, + games: data.games, team: m.team_id.into(), organization: m.organization_id.map(|i| i.into()), title: m.title, @@ -162,13 +160,10 @@ impl From for Project { }, url: m.license_url, }, - client_side: data.client_side, - server_side: data.server_side, downloads: m.downloads as u32, followers: m.follows as u32, categories: data.categories, additional_categories: data.additional_categories, - game_versions: m.game_versions, loaders: m.loaders, versions: data.versions.into_iter().map(|v| v.into()).collect(), icon_url: m.icon_url, @@ -462,11 +457,14 @@ pub struct Version { pub author_id: UserId, /// Whether the version is featured or not pub featured: bool, - /// The name of this version pub name: String, /// The version number. Ideally will follow semantic versioning pub version_number: String, + /// Project types for which this version is compatible with, extracted from Loader + pub project_types: Vec, + /// Games for which this version is compatible with, extracted from Loader/Project types + pub games: Vec, /// The changelog for this version of the project. pub changelog: String, /// A link to the changelog for this version of the project. Deprecated, always None @@ -487,26 +485,40 @@ pub struct Version { pub files: Vec, /// A list of projects that this version depends on. pub dependencies: Vec, - /// A list of versions of Minecraft that this version of the project supports. - pub game_versions: Vec, + /// The loaders that this version works on pub loaders: Vec, /// Ordering override, lower is returned first pub ordering: Option, + + // All other fields are loader-specific VersionFields + // These are flattened during serialization + #[serde(deserialize_with = "skip_nulls")] + #[serde(flatten)] + pub fields: HashMap, +} + +pub fn skip_nulls<'de, D>(deserializer: D) -> Result, D::Error> +where + D: serde::Deserializer<'de>, +{ + let mut map = HashMap::deserialize(deserializer)?; + map.retain(|_, v: &mut serde_json::Value| !v.is_null()); + Ok(map) } impl From for Version { fn from(data: QueryVersion) -> Version { let v = data.inner; - Version { id: v.id.into(), project_id: v.project_id.into(), author_id: v.author_id.into(), - featured: v.featured, name: v.name, version_number: v.version_number, + project_types: data.project_types, + games: data.games, changelog: v.changelog, changelog_url: None, date_published: v.date_published, @@ -543,8 +555,14 @@ impl From for Version { dependency_type: DependencyType::from_string(d.dependency_type.as_str()), }) .collect(), - game_versions: data.game_versions.into_iter().map(GameVersion).collect(), loaders: data.loaders.into_iter().map(Loader).collect(), + // Only add the internal component of the field for display + // "ie": "game_versions",["1.2.3"] instead of "game_versions",ArrayEnum(...) + fields: data + .version_fields + .into_iter() + .map(|vf| (vf.field_name, vf.value.serialize_internal())) + .collect(), } } } @@ -658,7 +676,7 @@ pub struct VersionFile { /// A dendency which describes what versions are required, break support, or are optional to the /// version's functionality -#[derive(Serialize, Deserialize, Clone)] +#[derive(Serialize, Deserialize, Clone, Debug)] pub struct Dependency { /// The specific version id that the dependency uses pub version_id: Option, @@ -670,7 +688,7 @@ pub struct Dependency { pub dependency_type: DependencyType, } -#[derive(Serialize, Deserialize, Copy, Clone, Eq, PartialEq)] +#[derive(Serialize, Deserialize, Copy, Clone, Eq, PartialEq, Debug)] #[serde(rename_all = "lowercase")] pub enum VersionType { Release, @@ -695,7 +713,7 @@ impl VersionType { } } -#[derive(Serialize, Deserialize, Copy, Clone)] +#[derive(Serialize, Deserialize, Copy, Clone, Debug)] #[serde(rename_all = "lowercase")] pub enum DependencyType { Required, @@ -766,19 +784,14 @@ impl FileType { } } -/// A specific version of Minecraft -#[derive(Serialize, Deserialize, Clone, PartialEq, Eq)] -#[serde(transparent)] -pub struct GameVersion(pub String); - /// A project loader -#[derive(Serialize, Deserialize, Clone)] +#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq)] #[serde(transparent)] pub struct Loader(pub String); // These fields must always succeed parsing; deserialize errors aren't // processed correctly (don't return JSON errors) -#[derive(Serialize, Deserialize)] +#[derive(Serialize, Deserialize, Debug)] pub struct SearchRequest { pub query: Option, pub offset: Option, @@ -787,7 +800,7 @@ pub struct SearchRequest { pub new_filters: Option, - // Deprecated values below. WILL BE REMOVED V3! + // TODO: Deprecated values below. WILL BE REMOVED V3! pub facets: Option, pub filters: Option, pub version: Option, diff --git a/src/models/reports.rs b/src/models/v3/reports.rs similarity index 100% rename from src/models/reports.rs rename to src/models/v3/reports.rs diff --git a/src/models/sessions.rs b/src/models/v3/sessions.rs similarity index 100% rename from src/models/sessions.rs rename to src/models/v3/sessions.rs diff --git a/src/models/teams.rs b/src/models/v3/teams.rs similarity index 100% rename from src/models/teams.rs rename to src/models/v3/teams.rs diff --git a/src/models/threads.rs b/src/models/v3/threads.rs similarity index 100% rename from src/models/threads.rs rename to src/models/v3/threads.rs diff --git a/src/models/users.rs b/src/models/v3/users.rs similarity index 100% rename from src/models/users.rs rename to src/models/v3/users.rs diff --git a/src/routes/maven.rs b/src/routes/maven.rs index f719073a..23d08637 100644 --- a/src/routes/maven.rs +++ b/src/routes/maven.rs @@ -1,4 +1,5 @@ -use crate::database::models::categories::Loader; +use crate::database::models::legacy_loader_fields::MinecraftGameVersion; +use crate::database::models::loader_fields::Loader; use crate::database::models::project_item::QueryProject; use crate::database::models::version_item::{QueryFile, QueryVersion}; use crate::database::redis::RedisPool; @@ -22,6 +23,8 @@ pub fn config(cfg: &mut web::ServiceConfig) { cfg.service(version_file); } +// TODO: These were modified in v3 and should be tested + #[derive(Default, Debug, Clone, YaSerialize)] #[yaserde(root = "metadata", rename = "metadata")] pub struct Metadata { @@ -198,8 +201,19 @@ async fn find_version( if !loaders.is_empty() { bool &= x.loaders.iter().any(|y| loaders.contains(y)); } + + // For maven in particular, we will hardcode it to use GameVersions rather than generic loader fields, as this is minecraft-java exclusive if !game_versions.is_empty() { - bool &= x.game_versions.iter().any(|y| game_versions.contains(y)); + let version_game_versions = x + .version_fields + .clone() + .into_iter() + .find_map(|v| MinecraftGameVersion::try_from_version_field(&v).ok()); + if let Some(version_game_versions) = version_game_versions { + bool &= version_game_versions + .iter() + .any(|y| game_versions.contains(&y.version)); + } } bool @@ -216,7 +230,6 @@ async fn find_version( fn find_file<'a>( project_id: &str, vcoords: &str, - project: &QueryProject, version: &'a QueryVersion, file: &str, ) -> Option<&'a QueryFile> { @@ -224,21 +237,27 @@ fn find_file<'a>( return Some(selected_file); } - let fileext = match project.project_type.as_str() { - "mod" => "jar", - "modpack" => "mrpack", - _ => return None, - }; + // Minecraft mods are not going to be both a mod and a modpack, so this minecraft-specific handling is fine + // As there can be multiple project types, returns the first allowable match + let mut fileexts = vec![]; + for project_type in version.project_types.iter() { + match project_type.as_str() { + "mod" => fileexts.push("jar"), + "modpack" => fileexts.push("mrpack"), + _ => (), + } + } - if file == format!("{}-{}.{}", &project_id, &vcoords, fileext) { - version - .files - .iter() - .find(|x| x.primary) - .or_else(|| version.files.iter().last()) - } else { - None + for fileext in fileexts { + if file == format!("{}-{}.{}", &project_id, &vcoords, fileext) { + return version + .files + .iter() + .find(|x| x.primary) + .or_else(|| version.files.iter().last()); + } } + None } #[route( @@ -297,7 +316,7 @@ pub async fn version_file( return Ok(HttpResponse::Ok() .content_type("text/xml") .body(yaserde::ser::to_string(&respdata).map_err(ApiError::Xml)?)); - } else if let Some(selected_file) = find_file(&project_id, &vnum, &project, &version, &file) { + } else if let Some(selected_file) = find_file(&project_id, &vnum, &version, &file) { return Ok(HttpResponse::TemporaryRedirect() .append_header(("location", &*selected_file.url)) .body("")); @@ -342,7 +361,7 @@ pub async fn version_file_sha1( return Ok(HttpResponse::NotFound().body("")); } - Ok(find_file(&project_id, &vnum, &project, &version, &file) + Ok(find_file(&project_id, &vnum, &version, &file) .and_then(|file| file.hashes.get("sha1")) .map(|hash_str| HttpResponse::Ok().body(hash_str.clone())) .unwrap_or_else(|| HttpResponse::NotFound().body(""))) @@ -384,7 +403,7 @@ pub async fn version_file_sha512( return Ok(HttpResponse::NotFound().body("")); } - Ok(find_file(&project_id, &vnum, &project, &version, &file) + Ok(find_file(&project_id, &vnum, &version, &file) .and_then(|file| file.hashes.get("sha512")) .map(|hash_str| HttpResponse::Ok().body(hash_str.clone())) .unwrap_or_else(|| HttpResponse::NotFound().body(""))) diff --git a/src/routes/mod.rs b/src/routes/mod.rs index 4a80c6d2..6e381d51 100644 --- a/src/routes/mod.rs +++ b/src/routes/mod.rs @@ -11,6 +11,8 @@ use futures::FutureExt; pub mod v2; pub mod v3; +pub mod v2_reroute; + mod analytics; mod index; mod maven; @@ -118,6 +120,8 @@ pub enum ApiError { PasswordStrengthCheck(#[from] zxcvbn::ZxcvbnError), #[error("{0}")] Mail(#[from] crate::auth::email::MailError), + #[error("Error while rerouting request: {0}")] + Reroute(#[from] reqwest::Error), } impl actix_web::ResponseError for ApiError { @@ -144,6 +148,7 @@ impl actix_web::ResponseError for ApiError { ApiError::PasswordHashing(..) => StatusCode::INTERNAL_SERVER_ERROR, ApiError::PasswordStrengthCheck(..) => StatusCode::BAD_REQUEST, ApiError::Mail(..) => StatusCode::INTERNAL_SERVER_ERROR, + ApiError::Reroute(..) => StatusCode::INTERNAL_SERVER_ERROR, } } @@ -171,6 +176,7 @@ impl actix_web::ResponseError for ApiError { ApiError::PasswordStrengthCheck(..) => "strength_check_error", ApiError::Mail(..) => "mail_error", ApiError::Clickhouse(..) => "clickhouse_error", + ApiError::Reroute(..) => "reroute_error", }, description: &self.to_string(), }) diff --git a/src/routes/updates.rs b/src/routes/updates.rs index 004621a9..f4d6d2f8 100644 --- a/src/routes/updates.rs +++ b/src/routes/updates.rs @@ -6,6 +6,7 @@ use sqlx::PgPool; use crate::auth::{filter_authorized_versions, get_user_from_headers, is_authorized}; use crate::database; +use crate::database::models::legacy_loader_fields::MinecraftGameVersion; use crate::database::redis::RedisPool; use crate::models::pats::Scopes; use crate::models::projects::VersionType; @@ -95,19 +96,29 @@ pub async fn forge_updates( }; for version in versions { + // For forge in particular, we will hardcode it to use GameVersions rather than generic loader fields, as this is minecraft-java exclusive + // Will have duplicates between game_versions (for non-forge loaders), but that's okay as + // before v3 this was stored to the project and not the version + let game_versions: Vec = version + .fields + .iter() + .find(|(key, _)| key.as_str() == MinecraftGameVersion::FIELD_NAME) + .and_then(|(_, value)| serde_json::from_value::>(value.clone()).ok()) + .unwrap_or_default(); + if version.version_type == VersionType::Release { - for game_version in &version.game_versions { + for game_version in &game_versions { response .promos - .entry(format!("{}-recommended", game_version.0)) + .entry(format!("{}-recommended", game_version)) .or_insert_with(|| version.version_number.clone()); } } - for game_version in &version.game_versions { + for game_version in &game_versions { response .promos - .entry(format!("{}-latest", game_version.0)) + .entry(format!("{}-latest", game_version)) .or_insert_with(|| version.version_number.clone()); } } diff --git a/src/routes/v2/admin.rs b/src/routes/v2/admin.rs index 40e54e8e..be914e91 100644 --- a/src/routes/v2/admin.rs +++ b/src/routes/v2/admin.rs @@ -9,6 +9,7 @@ use crate::queue::analytics::AnalyticsQueue; use crate::queue::maxmind::MaxMindIndexer; use crate::queue::session::AuthQueue; use crate::routes::ApiError; +use crate::search::SearchConfig; use crate::util::date::get_current_tenths_of_ms; use crate::util::guards::admin_key_guard; use crate::util::routes::read_from_payload; @@ -27,7 +28,8 @@ pub fn config(cfg: &mut web::ServiceConfig) { cfg.service( web::scope("admin") .service(count_download) - .service(trolley_webhook), + .service(trolley_webhook) + .service(force_reindex), ); } @@ -308,3 +310,13 @@ pub async fn trolley_webhook( Ok(HttpResponse::NoContent().finish()) } + +#[post("/_force_reindex", guard = "admin_key_guard")] +pub async fn force_reindex( + pool: web::Data, + config: web::Data, +) -> Result { + use crate::search::indexing::index_projects; + index_projects(pool.as_ref().clone(), &config).await?; + Ok(HttpResponse::NoContent().finish()) +} diff --git a/src/routes/v2/analytics_get.rs b/src/routes/v2/analytics_get.rs index 901dac2d..8dd6532a 100644 --- a/src/routes/v2/analytics_get.rs +++ b/src/routes/v2/analytics_get.rs @@ -1,24 +1,12 @@ use super::ApiError; use crate::database::redis::RedisPool; -use crate::{ - auth::{filter_authorized_projects, filter_authorized_versions, get_user_from_headers}, - database::models::{project_item, user_item, version_item}, - models::{ - ids::{ - base62_impl::{parse_base62, to_base62}, - ProjectId, VersionId, - }, - pats::Scopes, - }, - queue::session::AuthQueue, -}; +use crate::routes::v3; +use crate::{models::ids::VersionId, queue::session::AuthQueue}; use actix_web::{get, web, HttpRequest, HttpResponse}; -use chrono::{DateTime, Duration, Utc}; +use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; -use sqlx::postgres::types::PgInterval; use sqlx::PgPool; use std::collections::HashMap; -use std::convert::TryInto; pub fn config(cfg: &mut web::ServiceConfig) { cfg.service( @@ -76,66 +64,22 @@ pub async fn playtimes_get( pool: web::Data, redis: web::Data, ) -> Result { - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::ANALYTICS]), + let data = data.into_inner(); + v3::analytics_get::playtimes_get( + req, + clickhouse, + web::Query(v3::analytics_get::GetData { + project_ids: data.project_ids, + version_ids: data.version_ids, + start_date: data.start_date, + end_date: data.end_date, + resolution_minutes: data.resolution_minutes, + }), + session_queue, + pool, + redis, ) .await - .map(|x| x.1)?; - - let project_ids = data - .project_ids - .as_ref() - .map(|ids| serde_json::from_str::>(ids)) - .transpose()?; - let version_ids = data - .version_ids - .as_ref() - .map(|ids| serde_json::from_str::>(ids)) - .transpose()?; - - if project_ids.is_some() && version_ids.is_some() { - return Err(ApiError::InvalidInput( - "Only one of 'project_ids' or 'version_ids' should be used.".to_string(), - )); - } - - let start_date = data.start_date.unwrap_or(Utc::now() - Duration::weeks(2)); - let end_date = data.end_date.unwrap_or(Utc::now()); - let resolution_minutes = data.resolution_minutes.unwrap_or(60 * 24); - - // Convert String list to list of ProjectIds or VersionIds - // - Filter out unauthorized projects/versions - // - If no project_ids or version_ids are provided, we default to all projects the user has access to - let (project_ids, version_ids) = - filter_allowed_ids(project_ids, version_ids, user, &pool, &redis).await?; - - // Get the views - let playtimes = crate::clickhouse::fetch_playtimes( - project_ids, - version_ids, - start_date, - end_date, - resolution_minutes, - clickhouse.into_inner(), - ) - .await?; - - let mut hm = HashMap::new(); - for playtime in playtimes { - let id_string = to_base62(playtime.id); - if !hm.contains_key(&id_string) { - hm.insert(id_string.clone(), HashMap::new()); - } - if let Some(hm) = hm.get_mut(&id_string) { - hm.insert(playtime.time, playtime.total_seconds); - } - } - - Ok(HttpResponse::Ok().json(hm)) } /// Get view data for a set of projects or versions @@ -156,66 +100,22 @@ pub async fn views_get( pool: web::Data, redis: web::Data, ) -> Result { - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::ANALYTICS]), + let data = data.into_inner(); + v3::analytics_get::views_get( + req, + clickhouse, + web::Query(v3::analytics_get::GetData { + project_ids: data.project_ids, + version_ids: data.version_ids, + start_date: data.start_date, + end_date: data.end_date, + resolution_minutes: data.resolution_minutes, + }), + session_queue, + pool, + redis, ) .await - .map(|x| x.1)?; - - let project_ids = data - .project_ids - .as_ref() - .map(|ids| serde_json::from_str::>(ids)) - .transpose()?; - let version_ids = data - .version_ids - .as_ref() - .map(|ids| serde_json::from_str::>(ids)) - .transpose()?; - - if project_ids.is_some() && version_ids.is_some() { - return Err(ApiError::InvalidInput( - "Only one of 'project_ids' or 'version_ids' should be used.".to_string(), - )); - } - - let start_date = data.start_date.unwrap_or(Utc::now() - Duration::weeks(2)); - let end_date = data.end_date.unwrap_or(Utc::now()); - let resolution_minutes = data.resolution_minutes.unwrap_or(60 * 24); - - // Convert String list to list of ProjectIds or VersionIds - // - Filter out unauthorized projects/versions - // - If no project_ids or version_ids are provided, we default to all projects the user has access to - let (project_ids, version_ids) = - filter_allowed_ids(project_ids, version_ids, user, &pool, &redis).await?; - - // Get the views - let views = crate::clickhouse::fetch_views( - project_ids, - version_ids, - start_date, - end_date, - resolution_minutes, - clickhouse.into_inner(), - ) - .await?; - - let mut hm = HashMap::new(); - for views in views { - let id_string = to_base62(views.id); - if !hm.contains_key(&id_string) { - hm.insert(id_string.clone(), HashMap::new()); - } - if let Some(hm) = hm.get_mut(&id_string) { - hm.insert(views.time, views.total_views); - } - } - - Ok(HttpResponse::Ok().json(hm)) } /// Get download data for a set of projects or versions @@ -236,66 +136,22 @@ pub async fn downloads_get( pool: web::Data, redis: web::Data, ) -> Result { - let user_option = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::ANALYTICS]), + let data = data.into_inner(); + v3::analytics_get::downloads_get( + req, + clickhouse, + web::Query(v3::analytics_get::GetData { + project_ids: data.project_ids, + version_ids: data.version_ids, + start_date: data.start_date, + end_date: data.end_date, + resolution_minutes: data.resolution_minutes, + }), + session_queue, + pool, + redis, ) .await - .map(|x| x.1)?; - - let project_ids = data - .project_ids - .as_ref() - .map(|ids| serde_json::from_str::>(ids)) - .transpose()?; - let version_ids = data - .version_ids - .as_ref() - .map(|ids| serde_json::from_str::>(ids)) - .transpose()?; - - if project_ids.is_some() && version_ids.is_some() { - return Err(ApiError::InvalidInput( - "Only one of 'project_ids' or 'version_ids' should be used.".to_string(), - )); - } - - let start_date = data.start_date.unwrap_or(Utc::now() - Duration::weeks(2)); - let end_date = data.end_date.unwrap_or(Utc::now()); - let resolution_minutes = data.resolution_minutes.unwrap_or(60 * 24); - - // Convert String list to list of ProjectIds or VersionIds - // - Filter out unauthorized projects/versions - // - If no project_ids or version_ids are provided, we default to all projects the user has access to - let (project_ids, version_ids) = - filter_allowed_ids(project_ids, version_ids, user_option, &pool, &redis).await?; - - // Get the downloads - let downloads = crate::clickhouse::fetch_downloads( - project_ids, - version_ids, - start_date, - end_date, - resolution_minutes, - clickhouse.into_inner(), - ) - .await?; - - let mut hm = HashMap::new(); - for downloads in downloads { - let id_string = to_base62(downloads.id); - if !hm.contains_key(&id_string) { - hm.insert(id_string.clone(), HashMap::new()); - } - if let Some(hm) = hm.get_mut(&id_string) { - hm.insert(downloads.time, downloads.total_downloads); - } - } - - Ok(HttpResponse::Ok().json(hm)) } /// Get payout data for a set of projects @@ -315,77 +171,21 @@ pub async fn revenue_get( pool: web::Data, redis: web::Data, ) -> Result { - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::PAYOUTS_READ]), + let data = data.into_inner(); + v3::analytics_get::revenue_get( + req, + web::Query(v3::analytics_get::GetData { + project_ids: data.project_ids, + version_ids: None, + start_date: data.start_date, + end_date: data.end_date, + resolution_minutes: data.resolution_minutes, + }), + session_queue, + pool, + redis, ) .await - .map(|x| x.1)?; - - let project_ids = data - .project_ids - .as_ref() - .map(|ids| serde_json::from_str::>(ids)) - .transpose()?; - - let start_date = data.start_date.unwrap_or(Utc::now() - Duration::weeks(2)); - let end_date = data.end_date.unwrap_or(Utc::now()); - let resolution_minutes = data.resolution_minutes.unwrap_or(60 * 24); - - // Round up/down to nearest duration as we are using pgadmin, does not have rounding in the fetch command - // Round start_date down to nearest resolution - let diff = start_date.timestamp() % (resolution_minutes as i64 * 60); - let start_date = start_date - Duration::seconds(diff); - - // Round end_date up to nearest resolution - let diff = end_date.timestamp() % (resolution_minutes as i64 * 60); - let end_date = end_date + Duration::seconds((resolution_minutes as i64 * 60) - diff); - - // Convert String list to list of ProjectIds or VersionIds - // - Filter out unauthorized projects/versions - // - If no project_ids or version_ids are provided, we default to all projects the user has access to - let (project_ids, _) = filter_allowed_ids(project_ids, None, user, &pool, &redis).await?; - - let duration: PgInterval = Duration::minutes(resolution_minutes as i64) - .try_into() - .unwrap(); - // Get the revenue data - let payouts_values = sqlx::query!( - " - SELECT mod_id, SUM(amount) amount_sum, DATE_BIN($4::interval, created, TIMESTAMP '2001-01-01') AS interval_start - FROM payouts_values - WHERE mod_id = ANY($1) AND created BETWEEN $2 AND $3 - GROUP by mod_id, interval_start ORDER BY interval_start - ", - &project_ids.unwrap_or_default().into_iter().map(|x| x.0 as i64).collect::>(), - start_date, - end_date, - duration, - ) - .fetch_all(&**pool) - .await?; - - let mut hm = HashMap::new(); - for value in payouts_values { - if let Some(mod_id) = value.mod_id { - if let Some(amount) = value.amount_sum { - if let Some(interval_start) = value.interval_start { - let id_string = to_base62(mod_id as u64); - if !hm.contains_key(&id_string) { - hm.insert(id_string.clone(), HashMap::new()); - } - if let Some(hm) = hm.get_mut(&id_string) { - hm.insert(interval_start.timestamp(), amount); - } - } - } - } - } - - Ok(HttpResponse::Ok().json(hm)) } /// Get country data for a set of projects or versions @@ -409,64 +209,22 @@ pub async fn countries_downloads_get( pool: web::Data, redis: web::Data, ) -> Result { - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::ANALYTICS]), + let data = data.into_inner(); + v3::analytics_get::countries_downloads_get( + req, + clickhouse, + web::Query(v3::analytics_get::GetData { + project_ids: data.project_ids, + version_ids: data.version_ids, + start_date: data.start_date, + end_date: data.end_date, + resolution_minutes: data.resolution_minutes, + }), + session_queue, + pool, + redis, ) .await - .map(|x| x.1)?; - - let project_ids = data - .project_ids - .as_ref() - .map(|ids| serde_json::from_str::>(ids)) - .transpose()?; - let version_ids = data - .version_ids - .as_ref() - .map(|ids| serde_json::from_str::>(ids)) - .transpose()?; - - if project_ids.is_some() && version_ids.is_some() { - return Err(ApiError::InvalidInput( - "Only one of 'project_ids' or 'version_ids' should be used.".to_string(), - )); - } - - let start_date = data.start_date.unwrap_or(Utc::now() - Duration::weeks(2)); - let end_date = data.end_date.unwrap_or(Utc::now()); - - // Convert String list to list of ProjectIds or VersionIds - // - Filter out unauthorized projects/versions - // - If no project_ids or version_ids are provided, we default to all projects the user has access to - let (project_ids, version_ids) = - filter_allowed_ids(project_ids, version_ids, user, &pool, &redis).await?; - - // Get the countries - let countries = crate::clickhouse::fetch_countries( - project_ids, - version_ids, - start_date, - end_date, - clickhouse.into_inner(), - ) - .await?; - - let mut hm = HashMap::new(); - for views in countries { - let id_string = to_base62(views.id); - if !hm.contains_key(&id_string) { - hm.insert(id_string.clone(), HashMap::new()); - } - if let Some(hm) = hm.get_mut(&id_string) { - hm.insert(views.country, views.total_downloads); - } - } - - Ok(HttpResponse::Ok().json(hm)) } /// Get country data for a set of projects or versions @@ -490,126 +248,20 @@ pub async fn countries_views_get( pool: web::Data, redis: web::Data, ) -> Result { - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::ANALYTICS]), + let data = data.into_inner(); + v3::analytics_get::countries_views_get( + req, + clickhouse, + web::Query(v3::analytics_get::GetData { + project_ids: data.project_ids, + version_ids: data.version_ids, + start_date: data.start_date, + end_date: data.end_date, + resolution_minutes: data.resolution_minutes, + }), + session_queue, + pool, + redis, ) .await - .map(|x| x.1)?; - - let project_ids = data - .project_ids - .as_ref() - .map(|ids| serde_json::from_str::>(ids)) - .transpose()?; - let version_ids = data - .version_ids - .as_ref() - .map(|ids| serde_json::from_str::>(ids)) - .transpose()?; - - if project_ids.is_some() && version_ids.is_some() { - return Err(ApiError::InvalidInput( - "Only one of 'project_ids' or 'version_ids' should be used.".to_string(), - )); - } - - let start_date = data.start_date.unwrap_or(Utc::now() - Duration::weeks(2)); - let end_date = data.end_date.unwrap_or(Utc::now()); - - // Convert String list to list of ProjectIds or VersionIds - // - Filter out unauthorized projects/versions - // - If no project_ids or version_ids are provided, we default to all projects the user has access to - let (project_ids, version_ids) = - filter_allowed_ids(project_ids, version_ids, user, &pool, &redis).await?; - - // Get the countries - let countries = crate::clickhouse::fetch_countries( - project_ids, - version_ids, - start_date, - end_date, - clickhouse.into_inner(), - ) - .await?; - - let mut hm = HashMap::new(); - for views in countries { - let id_string = to_base62(views.id); - if !hm.contains_key(&id_string) { - hm.insert(id_string.clone(), HashMap::new()); - } - if let Some(hm) = hm.get_mut(&id_string) { - hm.insert(views.country, views.total_views); - } - } - - Ok(HttpResponse::Ok().json(hm)) -} - -async fn filter_allowed_ids( - mut project_ids: Option>, - version_ids: Option>, - user: crate::models::users::User, - pool: &web::Data, - redis: &RedisPool, -) -> Result<(Option>, Option>), ApiError> { - if project_ids.is_some() && version_ids.is_some() { - return Err(ApiError::InvalidInput( - "Only one of 'project_ids' or 'version_ids' should be used.".to_string(), - )); - } - - // If no project_ids or version_ids are provided, we default to all projects the user has access to - if project_ids.is_none() && version_ids.is_none() { - project_ids = Some( - user_item::User::get_projects(user.id.into(), &***pool, redis) - .await? - .into_iter() - .map(|x| ProjectId::from(x).to_string()) - .collect(), - ); - } - - // Convert String list to list of ProjectIds or VersionIds - // - Filter out unauthorized projects/versions - - let project_ids = if let Some(project_ids) = project_ids { - // Submitted project_ids are filtered by the user's permissions - let ids = project_ids - .iter() - .map(|id| Ok(ProjectId(parse_base62(id)?).into())) - .collect::, ApiError>>()?; - let projects = project_item::Project::get_many_ids(&ids, &***pool, redis).await?; - let ids: Vec = filter_authorized_projects(projects, &Some(user.clone()), pool) - .await? - .into_iter() - .map(|x| x.id) - .collect::>(); - Some(ids) - } else { - None - }; - let version_ids = if let Some(version_ids) = version_ids { - // Submitted version_ids are filtered by the user's permissions - let ids = version_ids - .iter() - .map(|id| Ok(VersionId(parse_base62(id)?).into())) - .collect::, ApiError>>()?; - let versions = version_item::Version::get_many(&ids, &***pool, redis).await?; - let ids: Vec = filter_authorized_versions(versions, &Some(user), pool) - .await? - .into_iter() - .map(|x| x.id) - .collect::>(); - Some(ids) - } else { - None - }; - - // Only one of project_ids or version_ids will be Some - Ok((project_ids, version_ids)) } diff --git a/src/routes/v2/collections.rs b/src/routes/v2/collections.rs index f37816bc..32412ab5 100644 --- a/src/routes/v2/collections.rs +++ b/src/routes/v2/collections.rs @@ -1,28 +1,16 @@ -use crate::auth::checks::{filter_authorized_collections, is_authorized_collection}; -use crate::auth::get_user_from_headers; -use crate::database::models::{collection_item, generate_collection_id, project_item}; use crate::database::redis::RedisPool; use crate::file_hosting::FileHost; -use crate::models::collections::{Collection, CollectionStatus}; -use crate::models::ids::base62_impl::parse_base62; -use crate::models::ids::{CollectionId, ProjectId}; -use crate::models::pats::Scopes; +use crate::models::collections::CollectionStatus; use crate::queue::session::AuthQueue; -use crate::routes::ApiError; -use crate::util::routes::read_from_payload; -use crate::util::validate::validation_errors_to_string; -use crate::{database, models}; +use crate::routes::v3::project_creation::CreateError; +use crate::routes::{v3, ApiError}; use actix_web::web::Data; use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse}; -use chrono::Utc; -use itertools::Itertools; use serde::{Deserialize, Serialize}; use sqlx::PgPool; use std::sync::Arc; use validator::Validate; -use super::project_creation::CreateError; - pub fn config(cfg: &mut web::ServiceConfig) { cfg.service(collections_get); cfg.service(collection_create); @@ -62,68 +50,18 @@ pub async fn collection_create( session_queue: Data, ) -> Result { let collection_create_data = collection_create_data.into_inner(); - - // The currently logged in user - let current_user = get_user_from_headers( - &req, - &**client, - &redis, - &session_queue, - Some(&[Scopes::COLLECTION_CREATE]), + v3::collections::collection_create( + req, + web::Json(v3::collections::CollectionCreateData { + title: collection_create_data.title, + description: collection_create_data.description, + projects: collection_create_data.projects, + }), + client, + redis, + session_queue, ) - .await? - .1; - - collection_create_data - .validate() - .map_err(|err| CreateError::InvalidInput(validation_errors_to_string(err, None)))?; - - let mut transaction = client.begin().await?; - - let collection_id: CollectionId = generate_collection_id(&mut transaction).await?.into(); - - let initial_project_ids = project_item::Project::get_many( - &collection_create_data.projects, - &mut *transaction, - &redis, - ) - .await? - .into_iter() - .map(|x| x.inner.id.into()) - .collect::>(); - - let collection_builder_actual = collection_item::CollectionBuilder { - collection_id: collection_id.into(), - user_id: current_user.id.into(), - title: collection_create_data.title, - description: collection_create_data.description, - status: CollectionStatus::Listed, - projects: initial_project_ids - .iter() - .copied() - .map(|x| x.into()) - .collect(), - }; - let collection_builder = collection_builder_actual.clone(); - - let now = Utc::now(); - collection_builder_actual.insert(&mut transaction).await?; - - let response = crate::models::collections::Collection { - id: collection_id, - user: collection_builder.user_id.into(), - title: collection_builder.title.clone(), - description: collection_builder.description.clone(), - created: now, - updated: now, - icon_url: None, - color: None, - status: collection_builder.status, - projects: initial_project_ids, - }; - transaction.commit().await?; - - Ok(HttpResponse::Ok().json(response)) + .await } #[derive(Serialize, Deserialize)] @@ -138,28 +76,14 @@ pub async fn collections_get( redis: web::Data, session_queue: web::Data, ) -> Result { - let ids = serde_json::from_str::>(&ids.ids)?; - let ids = ids - .into_iter() - .map(|x| parse_base62(x).map(|x| database::models::CollectionId(x as i64))) - .collect::, _>>()?; - - let collections_data = database::models::Collection::get_many(&ids, &**pool, &redis).await?; - - let user_option = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::COLLECTION_READ]), + v3::collections::collections_get( + req, + web::Query(v3::collections::CollectionIds { ids: ids.ids }), + pool, + redis, + session_queue, ) .await - .map(|x| x.1) - .ok(); - - let collections = filter_authorized_collections(collections_data, &user_option, &pool).await?; - - Ok(HttpResponse::Ok().json(collections)) } #[get("{id}")] @@ -170,27 +94,7 @@ pub async fn collection_get( redis: web::Data, session_queue: web::Data, ) -> Result { - let string = info.into_inner().0; - - let id = database::models::CollectionId(parse_base62(&string)? as i64); - let collection_data = database::models::Collection::get(id, &**pool, &redis).await?; - let user_option = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::COLLECTION_READ]), - ) - .await - .map(|x| x.1) - .ok(); - - if let Some(data) = collection_data { - if is_authorized_collection(&data, &user_option).await? { - return Ok(HttpResponse::Ok().json(Collection::from(data))); - } - } - Ok(HttpResponse::NotFound().body("")) + v3::collections::collection_get(req, info, pool, redis, session_queue).await } #[derive(Deserialize, Validate)] @@ -216,131 +120,21 @@ pub async fn collection_edit( redis: web::Data, session_queue: web::Data, ) -> Result { - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::COLLECTION_WRITE]), + let new_collection = new_collection.into_inner(); + v3::collections::collection_edit( + req, + info, + pool, + web::Json(v3::collections::EditCollection { + title: new_collection.title, + description: new_collection.description, + status: new_collection.status, + new_projects: new_collection.new_projects, + }), + redis, + session_queue, ) - .await? - .1; - - new_collection - .validate() - .map_err(|err| ApiError::Validation(validation_errors_to_string(err, None)))?; - - let string = info.into_inner().0; - let id = database::models::CollectionId(parse_base62(&string)? as i64); - let result = database::models::Collection::get(id, &**pool, &redis).await?; - - if let Some(collection_item) = result { - if !can_modify_collection(&collection_item, &user) { - return Ok(HttpResponse::Unauthorized().body("")); - } - - let id = collection_item.id; - - let mut transaction = pool.begin().await?; - - if let Some(title) = &new_collection.title { - sqlx::query!( - " - UPDATE collections - SET title = $1 - WHERE (id = $2) - ", - title.trim(), - id as database::models::ids::CollectionId, - ) - .execute(&mut *transaction) - .await?; - } - - if let Some(description) = &new_collection.description { - sqlx::query!( - " - UPDATE collections - SET description = $1 - WHERE (id = $2) - ", - description, - id as database::models::ids::CollectionId, - ) - .execute(&mut *transaction) - .await?; - } - - if let Some(status) = &new_collection.status { - if !(user.role.is_mod() - || collection_item.status.is_approved() && status.can_be_requested()) - { - return Err(ApiError::CustomAuthentication( - "You don't have permission to set this status!".to_string(), - )); - } - - sqlx::query!( - " - UPDATE collections - SET status = $1 - WHERE (id = $2) - ", - status.to_string(), - id as database::models::ids::CollectionId, - ) - .execute(&mut *transaction) - .await?; - } - - if let Some(new_project_ids) = &new_collection.new_projects { - // Delete all existing projects - sqlx::query!( - " - DELETE FROM collections_mods - WHERE collection_id = $1 - ", - collection_item.id as database::models::ids::CollectionId, - ) - .execute(&mut *transaction) - .await?; - - let collection_item_ids = new_project_ids - .iter() - .map(|_| collection_item.id.0) - .collect_vec(); - let mut validated_project_ids = Vec::new(); - for project_id in new_project_ids { - let project = database::models::Project::get(project_id, &**pool, &redis) - .await? - .ok_or_else(|| { - ApiError::InvalidInput(format!( - "The specified project {project_id} does not exist!" - )) - })?; - validated_project_ids.push(project.inner.id.0); - } - // Insert- don't throw an error if it already exists - sqlx::query!( - " - INSERT INTO collections_mods (collection_id, mod_id) - SELECT * FROM UNNEST ($1::int8[], $2::int8[]) - ON CONFLICT DO NOTHING - ", - &collection_item_ids[..], - &validated_project_ids[..], - ) - .execute(&mut *transaction) - .await?; - } - - database::models::Collection::clear_cache(collection_item.id, &redis).await?; - - transaction.commit().await?; - Ok(HttpResponse::NoContent().body("")) - } else { - Ok(HttpResponse::NotFound().body("")) - } + .await } #[derive(Serialize, Deserialize)] @@ -357,82 +151,20 @@ pub async fn collection_icon_edit( pool: web::Data, redis: web::Data, file_host: web::Data>, - mut payload: web::Payload, + payload: web::Payload, session_queue: web::Data, ) -> Result { - if let Some(content_type) = crate::util::ext::get_image_content_type(&ext.ext) { - let cdn_url = dotenvy::var("CDN_URL")?; - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::COLLECTION_WRITE]), - ) - .await? - .1; - - let string = info.into_inner().0; - let id = database::models::CollectionId(parse_base62(&string)? as i64); - let collection_item = database::models::Collection::get(id, &**pool, &redis) - .await? - .ok_or_else(|| { - ApiError::InvalidInput("The specified collection does not exist!".to_string()) - })?; - - if !can_modify_collection(&collection_item, &user) { - return Ok(HttpResponse::Unauthorized().body("")); - } - - if let Some(icon) = collection_item.icon_url { - let name = icon.split(&format!("{cdn_url}/")).nth(1); - - if let Some(icon_path) = name { - file_host.delete_file_version("", icon_path).await?; - } - } - - let bytes = - read_from_payload(&mut payload, 262144, "Icons must be smaller than 256KiB").await?; - - let color = crate::util::img::get_color_from_img(&bytes)?; - - let hash = sha1::Sha1::from(&bytes).hexdigest(); - let collection_id: CollectionId = collection_item.id.into(); - let upload_data = file_host - .upload_file( - content_type, - &format!("data/{}/{}.{}", collection_id, hash, ext.ext), - bytes.freeze(), - ) - .await?; - - let mut transaction = pool.begin().await?; - - sqlx::query!( - " - UPDATE collections - SET icon_url = $1, color = $2 - WHERE (id = $3) - ", - format!("{}/{}", cdn_url, upload_data.file_name), - color.map(|x| x as i32), - collection_item.id as database::models::ids::CollectionId, - ) - .execute(&mut *transaction) - .await?; - - database::models::Collection::clear_cache(collection_item.id, &redis).await?; - - transaction.commit().await?; - - Ok(HttpResponse::NoContent().body("")) - } else { - Err(ApiError::InvalidInput(format!( - "Invalid format for collection icon: {}", - ext.ext - ))) - } + v3::collections::collection_icon_edit( + web::Query(v3::collections::Extension { ext: ext.ext }), + req, + info, + pool, + redis, + file_host, + payload, + session_queue, + ) + .await } #[delete("{id}/icon")] @@ -444,54 +176,7 @@ pub async fn delete_collection_icon( file_host: web::Data>, session_queue: web::Data, ) -> Result { - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::COLLECTION_WRITE]), - ) - .await? - .1; - - let string = info.into_inner().0; - let id = database::models::CollectionId(parse_base62(&string)? as i64); - let collection_item = database::models::Collection::get(id, &**pool, &redis) - .await? - .ok_or_else(|| { - ApiError::InvalidInput("The specified collection does not exist!".to_string()) - })?; - if !can_modify_collection(&collection_item, &user) { - return Ok(HttpResponse::Unauthorized().body("")); - } - - let cdn_url = dotenvy::var("CDN_URL")?; - if let Some(icon) = collection_item.icon_url { - let name = icon.split(&format!("{cdn_url}/")).nth(1); - - if let Some(icon_path) = name { - file_host.delete_file_version("", icon_path).await?; - } - } - - let mut transaction = pool.begin().await?; - - sqlx::query!( - " - UPDATE collections - SET icon_url = NULL, color = NULL - WHERE (id = $1) - ", - collection_item.id as database::models::ids::CollectionId, - ) - .execute(&mut *transaction) - .await?; - - database::models::Collection::clear_cache(collection_item.id, &redis).await?; - - transaction.commit().await?; - - Ok(HttpResponse::NoContent().body("")) + v3::collections::delete_collection_icon(req, info, pool, redis, file_host, session_queue).await } #[delete("{id}")] @@ -502,44 +187,5 @@ pub async fn collection_delete( redis: web::Data, session_queue: web::Data, ) -> Result { - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::COLLECTION_DELETE]), - ) - .await? - .1; - - let string = info.into_inner().0; - let id = database::models::CollectionId(parse_base62(&string)? as i64); - let collection = database::models::Collection::get(id, &**pool, &redis) - .await? - .ok_or_else(|| { - ApiError::InvalidInput("The specified collection does not exist!".to_string()) - })?; - if !can_modify_collection(&collection, &user) { - return Ok(HttpResponse::Unauthorized().body("")); - } - let mut transaction = pool.begin().await?; - - let result = - database::models::Collection::remove(collection.id, &mut transaction, &redis).await?; - database::models::Collection::clear_cache(collection.id, &redis).await?; - - transaction.commit().await?; - - if result.is_some() { - Ok(HttpResponse::NoContent().body("")) - } else { - Ok(HttpResponse::NotFound().body("")) - } -} - -fn can_modify_collection( - collection: &database::models::Collection, - user: &models::users::User, -) -> bool { - collection.user_id == user.id.into() || user.role.is_mod() + v3::collections::collection_delete(req, info, pool, redis, session_queue).await } diff --git a/src/routes/v2/images.rs b/src/routes/v2/images.rs index 0d1eecbb..da6d2aea 100644 --- a/src/routes/v2/images.rs +++ b/src/routes/v2/images.rs @@ -1,17 +1,11 @@ use std::sync::Arc; -use crate::auth::{get_user_from_headers, is_authorized, is_authorized_version}; -use crate::database; -use crate::database::models::{project_item, report_item, thread_item, version_item}; use crate::database::redis::RedisPool; use crate::file_hosting::FileHost; use crate::models::ids::{ThreadMessageId, VersionId}; -use crate::models::images::{Image, ImageContext}; use crate::models::reports::ReportId; use crate::queue::session::AuthQueue; -use crate::routes::v2::threads::is_authorized_thread; -use crate::routes::ApiError; -use crate::util::routes::read_from_payload; +use crate::routes::{v3, ApiError}; use actix_web::{post, web, HttpRequest, HttpResponse}; use serde::{Deserialize, Serialize}; use sqlx::PgPool; @@ -40,195 +34,26 @@ pub async fn images_add( req: HttpRequest, web::Query(data): web::Query, file_host: web::Data>, - mut payload: web::Payload, + payload: web::Payload, pool: web::Data, redis: web::Data, session_queue: web::Data, ) -> Result { - if let Some(content_type) = crate::util::ext::get_image_content_type(&data.ext) { - let mut context = ImageContext::from_str(&data.context, None); - - let scopes = vec![context.relevant_scope()]; - - let cdn_url = dotenvy::var("CDN_URL")?; - let user = get_user_from_headers(&req, &**pool, &redis, &session_queue, Some(&scopes)) - .await? - .1; - - // Attempt to associated a supplied id with the context - // If the context cannot be found, or the user is not authorized to upload images for the context, return an error - match &mut context { - ImageContext::Project { project_id } => { - if let Some(id) = data.project_id { - let project = project_item::Project::get(&id, &**pool, &redis).await?; - if let Some(project) = project { - if is_authorized(&project.inner, &Some(user.clone()), &pool).await? { - *project_id = Some(project.inner.id.into()); - } else { - return Err(ApiError::CustomAuthentication( - "You are not authorized to upload images for this project" - .to_string(), - )); - } - } else { - return Err(ApiError::InvalidInput( - "The project could not be found.".to_string(), - )); - } - } - } - ImageContext::Version { version_id } => { - if let Some(id) = data.version_id { - let version = version_item::Version::get(id.into(), &**pool, &redis).await?; - if let Some(version) = version { - if is_authorized_version(&version.inner, &Some(user.clone()), &pool).await? - { - *version_id = Some(version.inner.id.into()); - } else { - return Err(ApiError::CustomAuthentication( - "You are not authorized to upload images for this version" - .to_string(), - )); - } - } else { - return Err(ApiError::InvalidInput( - "The version could not be found.".to_string(), - )); - } - } - } - ImageContext::ThreadMessage { thread_message_id } => { - if let Some(id) = data.thread_message_id { - let thread_message = thread_item::ThreadMessage::get(id.into(), &**pool) - .await? - .ok_or_else(|| { - ApiError::InvalidInput( - "The thread message could not found.".to_string(), - ) - })?; - let thread = thread_item::Thread::get(thread_message.thread_id, &**pool) - .await? - .ok_or_else(|| { - ApiError::InvalidInput( - "The thread associated with the thread message could not be found" - .to_string(), - ) - })?; - if is_authorized_thread(&thread, &user, &pool).await? { - *thread_message_id = Some(thread_message.id.into()); - } else { - return Err(ApiError::CustomAuthentication( - "You are not authorized to upload images for this thread message" - .to_string(), - )); - } - } - } - ImageContext::Report { report_id } => { - if let Some(id) = data.report_id { - let report = report_item::Report::get(id.into(), &**pool) - .await? - .ok_or_else(|| { - ApiError::InvalidInput("The report could not be found.".to_string()) - })?; - let thread = thread_item::Thread::get(report.thread_id, &**pool) - .await? - .ok_or_else(|| { - ApiError::InvalidInput( - "The thread associated with the report could not be found." - .to_string(), - ) - })?; - if is_authorized_thread(&thread, &user, &pool).await? { - *report_id = Some(report.id.into()); - } else { - return Err(ApiError::CustomAuthentication( - "You are not authorized to upload images for this report".to_string(), - )); - } - } - } - ImageContext::Unknown => { - return Err(ApiError::InvalidInput( - "Context must be one of: project, version, thread_message, report".to_string(), - )); - } - } - - // Upload the image to the file host - let bytes = - read_from_payload(&mut payload, 1_048_576, "Icons must be smaller than 1MiB").await?; - - let hash = sha1::Sha1::from(&bytes).hexdigest(); - let upload_data = file_host - .upload_file( - content_type, - &format!("data/cached_images/{}.{}", hash, data.ext), - bytes.freeze(), - ) - .await?; - - let mut transaction = pool.begin().await?; - - let db_image: database::models::Image = database::models::Image { - id: database::models::generate_image_id(&mut transaction).await?, - url: format!("{}/{}", cdn_url, upload_data.file_name), - size: upload_data.content_length as u64, - created: chrono::Utc::now(), - owner_id: database::models::UserId::from(user.id), - context: context.context_as_str().to_string(), - project_id: if let ImageContext::Project { - project_id: Some(id), - } = context - { - Some(database::models::ProjectId::from(id)) - } else { - None - }, - version_id: if let ImageContext::Version { - version_id: Some(id), - } = context - { - Some(database::models::VersionId::from(id)) - } else { - None - }, - thread_message_id: if let ImageContext::ThreadMessage { - thread_message_id: Some(id), - } = context - { - Some(database::models::ThreadMessageId::from(id)) - } else { - None - }, - report_id: if let ImageContext::Report { - report_id: Some(id), - } = context - { - Some(database::models::ReportId::from(id)) - } else { - None - }, - }; - - // Insert - db_image.insert(&mut transaction).await?; - - let image = Image { - id: db_image.id.into(), - url: db_image.url, - size: db_image.size, - created: db_image.created, - owner_id: db_image.owner_id.into(), - context, - }; - - transaction.commit().await?; - - Ok(HttpResponse::Ok().json(image)) - } else { - Err(ApiError::InvalidInput( - "The specified file is not an image!".to_string(), - )) - } + v3::images::images_add( + req, + web::Query(v3::images::ImageUpload { + ext: data.ext, + context: data.context, + project_id: data.project_id, + version_id: data.version_id, + thread_message_id: data.thread_message_id, + report_id: data.report_id, + }), + file_host, + payload, + pool, + redis, + session_queue, + ) + .await } diff --git a/src/routes/v2/mod.rs b/src/routes/v2/mod.rs index 3f95ae6d..1dcb5f75 100644 --- a/src/routes/v2/mod.rs +++ b/src/routes/v2/mod.rs @@ -9,12 +9,12 @@ pub(crate) mod project_creation; mod projects; mod reports; mod statistics; -mod tags; +pub mod tags; mod teams; mod threads; mod users; mod version_creation; -mod version_file; +pub mod version_file; mod versions; pub use super::ApiError; diff --git a/src/routes/v2/moderation.rs b/src/routes/v2/moderation.rs index ebebf654..f44214de 100644 --- a/src/routes/v2/moderation.rs +++ b/src/routes/v2/moderation.rs @@ -1,9 +1,7 @@ use super::ApiError; -use crate::database; use crate::database::redis::RedisPool; -use crate::models::projects::ProjectStatus; use crate::queue::session::AuthQueue; -use crate::{auth::check_is_moderator_from_headers, models::pats::Scopes}; +use crate::routes::v3; use actix_web::{get, web, HttpRequest, HttpResponse}; use serde::Deserialize; use sqlx::PgPool; @@ -30,37 +28,12 @@ pub async fn get_projects( count: web::Query, session_queue: web::Data, ) -> Result { - check_is_moderator_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::PROJECT_READ]), + v3::moderation::get_projects( + req, + pool, + redis, + web::Query(v3::moderation::ResultCount { count: count.count }), + session_queue, ) - .await?; - - use futures::stream::TryStreamExt; - - let project_ids = sqlx::query!( - " - SELECT id FROM mods - WHERE status = $1 - ORDER BY queued ASC - LIMIT $2; - ", - ProjectStatus::Processing.as_str(), - count.count as i64 - ) - .fetch_many(&**pool) - .try_filter_map(|e| async { Ok(e.right().map(|m| database::models::ProjectId(m.id))) }) - .try_collect::>() - .await?; - - let projects: Vec<_> = database::Project::get_many_ids(&project_ids, &**pool, &redis) - .await? - .into_iter() - .map(crate::models::projects::Project::from) - .collect(); - - Ok(HttpResponse::Ok().json(projects)) + .await } diff --git a/src/routes/v2/notifications.rs b/src/routes/v2/notifications.rs index 10d7aa12..af04cafb 100644 --- a/src/routes/v2/notifications.rs +++ b/src/routes/v2/notifications.rs @@ -1,10 +1,7 @@ -use crate::auth::get_user_from_headers; -use crate::database; use crate::database::redis::RedisPool; use crate::models::ids::NotificationId; -use crate::models::notifications::Notification; -use crate::models::pats::Scopes; use crate::queue::session::AuthQueue; +use crate::routes::v3; use crate::routes::ApiError; use actix_web::{delete, get, patch, web, HttpRequest, HttpResponse}; use serde::{Deserialize, Serialize}; @@ -36,36 +33,14 @@ pub async fn notifications_get( redis: web::Data, session_queue: web::Data, ) -> Result { - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::NOTIFICATION_READ]), + v3::notifications::notifications_get( + req, + web::Query(v3::notifications::NotificationIds { ids: ids.ids }), + pool, + redis, + session_queue, ) - .await? - .1; - - use database::models::notification_item::Notification as DBNotification; - use database::models::NotificationId as DBNotificationId; - - let notification_ids: Vec = - serde_json::from_str::>(ids.ids.as_str())? - .into_iter() - .map(DBNotificationId::from) - .collect(); - - let notifications_data: Vec = - database::models::notification_item::Notification::get_many(¬ification_ids, &**pool) - .await?; - - let notifications: Vec = notifications_data - .into_iter() - .filter(|n| n.user_id == user.id.into() || user.role.is_admin()) - .map(Notification::from) - .collect(); - - Ok(HttpResponse::Ok().json(notifications)) + .await } #[get("{id}")] @@ -76,30 +51,7 @@ pub async fn notification_get( redis: web::Data, session_queue: web::Data, ) -> Result { - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::NOTIFICATION_READ]), - ) - .await? - .1; - - let id = info.into_inner().0; - - let notification_data = - database::models::notification_item::Notification::get(id.into(), &**pool).await?; - - if let Some(data) = notification_data { - if user.id == data.user_id.into() || user.role.is_admin() { - Ok(HttpResponse::Ok().json(Notification::from(data))) - } else { - Ok(HttpResponse::NotFound().body("")) - } - } else { - Ok(HttpResponse::NotFound().body("")) - } + v3::notifications::notification_get(req, info, pool, redis, session_queue).await } #[patch("{id}")] @@ -110,43 +62,7 @@ pub async fn notification_read( redis: web::Data, session_queue: web::Data, ) -> Result { - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::NOTIFICATION_WRITE]), - ) - .await? - .1; - - let id = info.into_inner().0; - - let notification_data = - database::models::notification_item::Notification::get(id.into(), &**pool).await?; - - if let Some(data) = notification_data { - if data.user_id == user.id.into() || user.role.is_admin() { - let mut transaction = pool.begin().await?; - - database::models::notification_item::Notification::read( - id.into(), - &mut transaction, - &redis, - ) - .await?; - - transaction.commit().await?; - - Ok(HttpResponse::NoContent().body("")) - } else { - Err(ApiError::CustomAuthentication( - "You are not authorized to read this notification!".to_string(), - )) - } - } else { - Ok(HttpResponse::NotFound().body("")) - } + v3::notifications::notification_read(req, info, pool, redis, session_queue).await } #[delete("{id}")] @@ -157,43 +73,7 @@ pub async fn notification_delete( redis: web::Data, session_queue: web::Data, ) -> Result { - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::NOTIFICATION_WRITE]), - ) - .await? - .1; - - let id = info.into_inner().0; - - let notification_data = - database::models::notification_item::Notification::get(id.into(), &**pool).await?; - - if let Some(data) = notification_data { - if data.user_id == user.id.into() || user.role.is_admin() { - let mut transaction = pool.begin().await?; - - database::models::notification_item::Notification::remove( - id.into(), - &mut transaction, - &redis, - ) - .await?; - - transaction.commit().await?; - - Ok(HttpResponse::NoContent().body("")) - } else { - Err(ApiError::CustomAuthentication( - "You are not authorized to delete this notification!".to_string(), - )) - } - } else { - Ok(HttpResponse::NotFound().body("")) - } + v3::notifications::notification_delete(req, info, pool, redis, session_queue).await } #[patch("notifications")] @@ -204,45 +84,14 @@ pub async fn notifications_read( redis: web::Data, session_queue: web::Data, ) -> Result { - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::NOTIFICATION_WRITE]), - ) - .await? - .1; - - let notification_ids = serde_json::from_str::>(&ids.ids)? - .into_iter() - .map(|x| x.into()) - .collect::>(); - - let mut transaction = pool.begin().await?; - - let notifications_data = - database::models::notification_item::Notification::get_many(¬ification_ids, &**pool) - .await?; - - let mut notifications: Vec = Vec::new(); - - for notification in notifications_data { - if notification.user_id == user.id.into() || user.role.is_admin() { - notifications.push(notification.id); - } - } - - database::models::notification_item::Notification::read_many( - ¬ifications, - &mut transaction, - &redis, + v3::notifications::notifications_read( + req, + web::Query(v3::notifications::NotificationIds { ids: ids.ids }), + pool, + redis, + session_queue, ) - .await?; - - transaction.commit().await?; - - Ok(HttpResponse::NoContent().body("")) + .await } #[delete("notifications")] @@ -253,43 +102,12 @@ pub async fn notifications_delete( redis: web::Data, session_queue: web::Data, ) -> Result { - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::NOTIFICATION_WRITE]), - ) - .await? - .1; - - let notification_ids = serde_json::from_str::>(&ids.ids)? - .into_iter() - .map(|x| x.into()) - .collect::>(); - - let mut transaction = pool.begin().await?; - - let notifications_data = - database::models::notification_item::Notification::get_many(¬ification_ids, &**pool) - .await?; - - let mut notifications: Vec = Vec::new(); - - for notification in notifications_data { - if notification.user_id == user.id.into() || user.role.is_admin() { - notifications.push(notification.id); - } - } - - database::models::notification_item::Notification::remove_many( - ¬ifications, - &mut transaction, - &redis, + v3::notifications::notifications_delete( + req, + web::Query(v3::notifications::NotificationIds { ids: ids.ids }), + pool, + redis, + session_queue, ) - .await?; - - transaction.commit().await?; - - Ok(HttpResponse::NoContent().body("")) + .await } diff --git a/src/routes/v2/organizations.rs b/src/routes/v2/organizations.rs index e4dc5f07..15ea4e6f 100644 --- a/src/routes/v2/organizations.rs +++ b/src/routes/v2/organizations.rs @@ -1,25 +1,14 @@ -use std::collections::HashMap; -use std::sync::Arc; - -use crate::auth::{filter_authorized_projects, get_user_from_headers}; -use crate::database::models::team_item::TeamMember; -use crate::database::models::{generate_organization_id, team_item, Organization}; use crate::database::redis::RedisPool; use crate::file_hosting::FileHost; -use crate::models::ids::base62_impl::parse_base62; -use crate::models::organizations::OrganizationId; -use crate::models::pats::Scopes; -use crate::models::teams::{OrganizationPermissions, ProjectPermissions}; +use crate::models::projects::Project; +use crate::models::v2::projects::LegacyProject; use crate::queue::session::AuthQueue; -use crate::routes::v2::project_creation::CreateError; -use crate::routes::ApiError; -use crate::util::routes::read_from_payload; -use crate::util::validate::validation_errors_to_string; -use crate::{database, models}; +use crate::routes::v3::project_creation::CreateError; +use crate::routes::{v2_reroute, v3, ApiError}; use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse}; -use rust_decimal::Decimal; use serde::{Deserialize, Serialize}; use sqlx::PgPool; +use std::sync::Arc; use validator::Validate; pub fn config(cfg: &mut web::ServiceConfig) { @@ -58,82 +47,18 @@ pub async fn organization_create( redis: web::Data, session_queue: web::Data, ) -> Result { - let current_user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::ORGANIZATION_CREATE]), + let new_organization = new_organization.into_inner(); + v3::organizations::organization_create( + req, + web::Json(v3::organizations::NewOrganization { + title: new_organization.title, + description: new_organization.description, + }), + pool.clone(), + redis.clone(), + session_queue, ) - .await? - .1; - - new_organization - .validate() - .map_err(|err| CreateError::ValidationError(validation_errors_to_string(err, None)))?; - - let mut transaction = pool.begin().await?; - - // Try title - let title_organization_id_option: Option = parse_base62(&new_organization.title).ok(); - let mut organization_strings = vec![]; - if let Some(title_organization_id) = title_organization_id_option { - organization_strings.push(title_organization_id.to_string()); - } - organization_strings.push(new_organization.title.clone()); - let results = Organization::get_many(&organization_strings, &mut *transaction, &redis).await?; - if !results.is_empty() { - return Err(CreateError::SlugCollision); - } - - let organization_id = generate_organization_id(&mut transaction).await?; - - // Create organization managerial team - let team = team_item::TeamBuilder { - members: vec![team_item::TeamMemberBuilder { - user_id: current_user.id.into(), - role: models::teams::OWNER_ROLE.to_owned(), - permissions: ProjectPermissions::all(), - organization_permissions: Some(OrganizationPermissions::all()), - accepted: true, - payouts_split: Decimal::ONE_HUNDRED, - ordering: 0, - }], - }; - let team_id = team.insert(&mut transaction).await?; - - // Create organization - let organization = Organization { - id: organization_id, - title: new_organization.title.clone(), - description: new_organization.description.clone(), - team_id, - icon_url: None, - color: None, - }; - organization.clone().insert(&mut transaction).await?; - transaction.commit().await?; - - // Only member is the owner, the logged in one - let member_data = TeamMember::get_from_team_full(team_id, &**pool, &redis) - .await? - .into_iter() - .next(); - let members_data = if let Some(member_data) = member_data { - vec![crate::models::teams::TeamMember::from_model( - member_data, - current_user.clone(), - false, - )] - } else { - return Err(CreateError::InvalidInput( - "Failed to get created team.".to_owned(), // should never happen - )); - }; - - let organization = models::organizations::Organization::from(organization, members_data); - - Ok(HttpResponse::Ok().json(organization)) + .await } #[get("{id}")] @@ -144,57 +69,7 @@ pub async fn organization_get( redis: web::Data, session_queue: web::Data, ) -> Result { - let id = info.into_inner().0; - let current_user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::ORGANIZATION_READ]), - ) - .await - .map(|x| x.1) - .ok(); - let user_id = current_user.as_ref().map(|x| x.id.into()); - - let organization_data = Organization::get(&id, &**pool, &redis).await?; - if let Some(data) = organization_data { - let members_data = TeamMember::get_from_team_full(data.team_id, &**pool, &redis).await?; - - let users = crate::database::models::User::get_many_ids( - &members_data.iter().map(|x| x.user_id).collect::>(), - &**pool, - &redis, - ) - .await?; - let logged_in = current_user - .as_ref() - .and_then(|user| { - members_data - .iter() - .find(|x| x.user_id == user.id.into() && x.accepted) - }) - .is_some(); - let team_members: Vec<_> = members_data - .into_iter() - .filter(|x| { - logged_in - || x.accepted - || user_id - .map(|y: crate::database::models::UserId| y == x.user_id) - .unwrap_or(false) - }) - .flat_map(|data| { - users.iter().find(|x| x.id == data.user_id).map(|user| { - crate::models::teams::TeamMember::from(data, user.clone(), !logged_in) - }) - }) - .collect(); - - let organization = models::organizations::Organization::from(data, team_members); - return Ok(HttpResponse::Ok().json(organization)); - } - Ok(HttpResponse::NotFound().body("")) + v3::organizations::organization_get(req, info, pool.clone(), redis.clone(), session_queue).await } #[derive(Deserialize)] @@ -209,72 +84,14 @@ pub async fn organizations_get( redis: web::Data, session_queue: web::Data, ) -> Result { - let ids = serde_json::from_str::>(&ids.ids)?; - let organizations_data = Organization::get_many(&ids, &**pool, &redis).await?; - let team_ids = organizations_data - .iter() - .map(|x| x.team_id) - .collect::>(); - - let teams_data = TeamMember::get_from_team_full_many(&team_ids, &**pool, &redis).await?; - let users = database::models::User::get_many_ids( - &teams_data.iter().map(|x| x.user_id).collect::>(), - &**pool, - &redis, - ) - .await?; - - let current_user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::ORGANIZATION_READ]), + v3::organizations::organizations_get( + req, + web::Query(v3::organizations::OrganizationIds { ids: ids.ids }), + pool, + redis, + session_queue, ) .await - .map(|x| x.1) - .ok(); - let user_id = current_user.as_ref().map(|x| x.id.into()); - - let mut organizations = vec![]; - - let mut team_groups = HashMap::new(); - for item in teams_data { - team_groups.entry(item.team_id).or_insert(vec![]).push(item); - } - - for data in organizations_data { - let members_data = team_groups.remove(&data.team_id).unwrap_or(vec![]); - let logged_in = current_user - .as_ref() - .and_then(|user| { - members_data - .iter() - .find(|x| x.user_id == user.id.into() && x.accepted) - }) - .is_some(); - - let team_members: Vec<_> = members_data - .into_iter() - .filter(|x| { - logged_in - || x.accepted - || user_id - .map(|y: crate::database::models::UserId| y == x.user_id) - .unwrap_or(false) - }) - .flat_map(|data| { - users.iter().find(|x| x.id == data.user_id).map(|user| { - crate::models::teams::TeamMember::from(data, user.clone(), !logged_in) - }) - }) - .collect(); - - let organization = models::organizations::Organization::from(data, team_members); - organizations.push(organization); - } - - Ok(HttpResponse::Ok().json(organizations)) } #[derive(Serialize, Deserialize, Validate)] @@ -298,132 +115,19 @@ pub async fn organizations_edit( redis: web::Data, session_queue: web::Data, ) -> Result { - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::ORGANIZATION_WRITE]), + let new_organization = new_organization.into_inner(); + v3::organizations::organizations_edit( + req, + info, + web::Json(v3::organizations::OrganizationEdit { + description: new_organization.description, + title: new_organization.title, + }), + pool.clone(), + redis.clone(), + session_queue, ) - .await? - .1; - - new_organization - .validate() - .map_err(|err| ApiError::Validation(validation_errors_to_string(err, None)))?; - - let string = info.into_inner().0; - let result = database::models::Organization::get(&string, &**pool, &redis).await?; - if let Some(organization_item) = result { - let id = organization_item.id; - - let team_member = database::models::TeamMember::get_from_user_id( - organization_item.team_id, - user.id.into(), - &**pool, - ) - .await?; - - let permissions = - OrganizationPermissions::get_permissions_by_role(&user.role, &team_member); - - if let Some(perms) = permissions { - let mut transaction = pool.begin().await?; - if let Some(description) = &new_organization.description { - if !perms.contains(OrganizationPermissions::EDIT_DETAILS) { - return Err(ApiError::CustomAuthentication( - "You do not have the permissions to edit the description of this organization!" - .to_string(), - )); - } - sqlx::query!( - " - UPDATE organizations - SET description = $1 - WHERE (id = $2) - ", - description, - id as database::models::ids::OrganizationId, - ) - .execute(&mut *transaction) - .await?; - } - - if let Some(title) = &new_organization.title { - if !perms.contains(OrganizationPermissions::EDIT_DETAILS) { - return Err(ApiError::CustomAuthentication( - "You do not have the permissions to edit the title of this organization!" - .to_string(), - )); - } - - let title_organization_id_option: Option = parse_base62(title).ok(); - if let Some(title_organization_id) = title_organization_id_option { - let results = sqlx::query!( - " - SELECT EXISTS(SELECT 1 FROM organizations WHERE id=$1) - ", - title_organization_id as i64 - ) - .fetch_one(&mut *transaction) - .await?; - - if results.exists.unwrap_or(true) { - return Err(ApiError::InvalidInput( - "Title collides with other organization's id!".to_string(), - )); - } - } - - // Make sure the new title is different from the old one - // We are able to unwrap here because the title is always set - if !title.eq(&organization_item.title.clone()) { - let results = sqlx::query!( - " - SELECT EXISTS(SELECT 1 FROM organizations WHERE title = LOWER($1)) - ", - title - ) - .fetch_one(&mut *transaction) - .await?; - - if results.exists.unwrap_or(true) { - return Err(ApiError::InvalidInput( - "Title collides with other organization's id!".to_string(), - )); - } - } - - sqlx::query!( - " - UPDATE organizations - SET title = LOWER($1) - WHERE (id = $2) - ", - Some(title), - id as database::models::ids::OrganizationId, - ) - .execute(&mut *transaction) - .await?; - } - - database::models::Organization::clear_cache( - organization_item.id, - Some(organization_item.title), - &redis, - ) - .await?; - - transaction.commit().await?; - Ok(HttpResponse::NoContent().body("")) - } else { - Err(ApiError::CustomAuthentication( - "You do not have permission to edit this organization!".to_string(), - )) - } - } else { - Ok(HttpResponse::NotFound().body("")) - } + .await } #[delete("{id}")] @@ -434,60 +138,8 @@ pub async fn organization_delete( redis: web::Data, session_queue: web::Data, ) -> Result { - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::ORGANIZATION_DELETE]), - ) - .await? - .1; - let string = info.into_inner().0; - - let organization = database::models::Organization::get(&string, &**pool, &redis) - .await? - .ok_or_else(|| { - ApiError::InvalidInput("The specified organization does not exist!".to_string()) - })?; - - if !user.role.is_admin() { - let team_member = database::models::TeamMember::get_from_user_id_organization( - organization.id, - user.id.into(), - &**pool, - ) + v3::organizations::organization_delete(req, info, pool.clone(), redis.clone(), session_queue) .await - .map_err(ApiError::Database)? - .ok_or_else(|| { - ApiError::InvalidInput("The specified organization does not exist!".to_string()) - })?; - - let permissions = - OrganizationPermissions::get_permissions_by_role(&user.role, &Some(team_member)) - .unwrap_or_default(); - - if !permissions.contains(OrganizationPermissions::DELETE_ORGANIZATION) { - return Err(ApiError::CustomAuthentication( - "You don't have permission to delete this organization!".to_string(), - )); - } - } - - let mut transaction = pool.begin().await?; - let result = - database::models::Organization::remove(organization.id, &mut transaction, &redis).await?; - - transaction.commit().await?; - - database::models::Organization::clear_cache(organization.id, Some(organization.title), &redis) - .await?; - - if result.is_some() { - Ok(HttpResponse::NoContent().body("")) - } else { - Ok(HttpResponse::NotFound().body("")) - } } #[get("{id}/projects")] @@ -498,40 +150,23 @@ pub async fn organization_projects_get( redis: web::Data, session_queue: web::Data, ) -> Result { - let info = info.into_inner().0; - let current_user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::ORGANIZATION_READ, Scopes::PROJECT_READ]), - ) - .await - .map(|x| x.1) - .ok(); - - let possible_organization_id: Option = parse_base62(&info).ok(); - use futures::TryStreamExt; - - let project_ids = sqlx::query!( - " - SELECT m.id FROM organizations o - INNER JOIN mods m ON m.organization_id = o.id - WHERE (o.id = $1 AND $1 IS NOT NULL) OR (o.title = $2 AND $2 IS NOT NULL) - ", - possible_organization_id.map(|x| x as i64), - info + let response = v3::organizations::organization_projects_get( + req, + info, + pool.clone(), + redis.clone(), + session_queue, ) - .fetch_many(&**pool) - .try_filter_map(|e| async { Ok(e.right().map(|m| crate::database::models::ProjectId(m.id))) }) - .try_collect::>() .await?; - let projects_data = - crate::database::models::Project::get_many_ids(&project_ids, &**pool, &redis).await?; - - let projects = filter_authorized_projects(projects_data, ¤t_user, &pool).await?; - Ok(HttpResponse::Ok().json(projects)) + // Convert v3 projects to v2 + match v2_reroute::extract_ok_json::>(response).await { + Ok(project) => { + let legacy_projects = LegacyProject::from_many(project, &**pool, &redis).await?; + Ok(HttpResponse::Ok().json(legacy_projects)) + } + Err(response) => Ok(response), + } } #[derive(Deserialize)] @@ -547,98 +182,18 @@ pub async fn organization_projects_add( redis: web::Data, session_queue: web::Data, ) -> Result { - let info = info.into_inner().0; - let current_user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::PROJECT_WRITE, Scopes::ORGANIZATION_WRITE]), + let project_info = project_info.into_inner(); + v3::organizations::organization_projects_add( + req, + info, + web::Json(v3::organizations::OrganizationProjectAdd { + project_id: project_info.project_id, + }), + pool.clone(), + redis.clone(), + session_queue, ) - .await? - .1; - - let organization = database::models::Organization::get(&info, &**pool, &redis) - .await? - .ok_or_else(|| { - ApiError::InvalidInput("The specified organization does not exist!".to_string()) - })?; - - let project_item = database::models::Project::get(&project_info.project_id, &**pool, &redis) - .await? - .ok_or_else(|| { - ApiError::InvalidInput("The specified project does not exist!".to_string()) - })?; - if project_item.inner.organization_id.is_some() { - return Err(ApiError::InvalidInput( - "The specified project is already owned by an organization!".to_string(), - )); - } - - let project_team_member = database::models::TeamMember::get_from_user_id_project( - project_item.inner.id, - current_user.id.into(), - &**pool, - ) - .await? - .ok_or_else(|| ApiError::InvalidInput("You are not a member of this project!".to_string()))?; - - let organization_team_member = database::models::TeamMember::get_from_user_id_organization( - organization.id, - current_user.id.into(), - &**pool, - ) - .await? - .ok_or_else(|| { - ApiError::InvalidInput("You are not a member of this organization!".to_string()) - })?; - - // Require ownership of a project to add it to an organization - if !current_user.role.is_admin() - && !project_team_member - .role - .eq(crate::models::teams::OWNER_ROLE) - { - return Err(ApiError::CustomAuthentication( - "You need to be an owner of a project to add it to an organization!".to_string(), - )); - } - - let permissions = OrganizationPermissions::get_permissions_by_role( - ¤t_user.role, - &Some(organization_team_member), - ) - .unwrap_or_default(); - if permissions.contains(OrganizationPermissions::ADD_PROJECT) { - let mut transaction = pool.begin().await?; - sqlx::query!( - " - UPDATE mods - SET organization_id = $1 - WHERE (id = $2) - ", - organization.id as database::models::OrganizationId, - project_item.inner.id as database::models::ids::ProjectId - ) - .execute(&mut *transaction) - .await?; - - transaction.commit().await?; - - database::models::TeamMember::clear_cache(project_item.inner.team_id, &redis).await?; - database::models::Project::clear_cache( - project_item.inner.id, - project_item.inner.slug, - None, - &redis, - ) - .await?; - } else { - return Err(ApiError::CustomAuthentication( - "You do not have permission to add projects to this organization!".to_string(), - )); - } - Ok(HttpResponse::Ok().finish()) + .await } #[delete("{organization_id}/projects/{project_id}")] @@ -649,83 +204,14 @@ pub async fn organization_projects_remove( redis: web::Data, session_queue: web::Data, ) -> Result { - let (organization_id, project_id) = info.into_inner(); - let current_user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::PROJECT_WRITE, Scopes::ORGANIZATION_WRITE]), - ) - .await? - .1; - - let organization = database::models::Organization::get(&organization_id, &**pool, &redis) - .await? - .ok_or_else(|| { - ApiError::InvalidInput("The specified organization does not exist!".to_string()) - })?; - - let project_item = database::models::Project::get(&project_id, &**pool, &redis) - .await? - .ok_or_else(|| { - ApiError::InvalidInput("The specified project does not exist!".to_string()) - })?; - - if !project_item - .inner - .organization_id - .eq(&Some(organization.id)) - { - return Err(ApiError::InvalidInput( - "The specified project is not owned by this organization!".to_string(), - )); - } - - let organization_team_member = database::models::TeamMember::get_from_user_id_organization( - organization.id, - current_user.id.into(), - &**pool, + v3::organizations::organization_projects_remove( + req, + info, + pool.clone(), + redis.clone(), + session_queue, ) - .await? - .ok_or_else(|| { - ApiError::InvalidInput("You are not a member of this organization!".to_string()) - })?; - - let permissions = OrganizationPermissions::get_permissions_by_role( - ¤t_user.role, - &Some(organization_team_member), - ) - .unwrap_or_default(); - if permissions.contains(OrganizationPermissions::REMOVE_PROJECT) { - let mut transaction = pool.begin().await?; - sqlx::query!( - " - UPDATE mods - SET organization_id = NULL - WHERE (id = $1) - ", - project_item.inner.id as database::models::ids::ProjectId - ) - .execute(&mut *transaction) - .await?; - - transaction.commit().await?; - - database::models::TeamMember::clear_cache(project_item.inner.team_id, &redis).await?; - database::models::Project::clear_cache( - project_item.inner.id, - project_item.inner.slug, - None, - &redis, - ) - .await?; - } else { - return Err(ApiError::CustomAuthentication( - "You do not have permission to add projects to this organization!".to_string(), - )); - } - Ok(HttpResponse::Ok().finish()) + .await } #[derive(Serialize, Deserialize)] @@ -742,102 +228,20 @@ pub async fn organization_icon_edit( pool: web::Data, redis: web::Data, file_host: web::Data>, - mut payload: web::Payload, + payload: web::Payload, session_queue: web::Data, ) -> Result { - if let Some(content_type) = crate::util::ext::get_image_content_type(&ext.ext) { - let cdn_url = dotenvy::var("CDN_URL")?; - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::ORGANIZATION_WRITE]), - ) - .await? - .1; - let string = info.into_inner().0; - - let organization_item = database::models::Organization::get(&string, &**pool, &redis) - .await? - .ok_or_else(|| { - ApiError::InvalidInput("The specified organization does not exist!".to_string()) - })?; - - if !user.role.is_mod() { - let team_member = database::models::TeamMember::get_from_user_id( - organization_item.team_id, - user.id.into(), - &**pool, - ) - .await - .map_err(ApiError::Database)?; - - let permissions = - OrganizationPermissions::get_permissions_by_role(&user.role, &team_member) - .unwrap_or_default(); - - if !permissions.contains(OrganizationPermissions::EDIT_DETAILS) { - return Err(ApiError::CustomAuthentication( - "You don't have permission to edit this organization's icon.".to_string(), - )); - } - } - - if let Some(icon) = organization_item.icon_url { - let name = icon.split(&format!("{cdn_url}/")).nth(1); - - if let Some(icon_path) = name { - file_host.delete_file_version("", icon_path).await?; - } - } - - let bytes = - read_from_payload(&mut payload, 262144, "Icons must be smaller than 256KiB").await?; - - let color = crate::util::img::get_color_from_img(&bytes)?; - - let hash = sha1::Sha1::from(&bytes).hexdigest(); - let organization_id: OrganizationId = organization_item.id.into(); - let upload_data = file_host - .upload_file( - content_type, - &format!("data/{}/{}.{}", organization_id, hash, ext.ext), - bytes.freeze(), - ) - .await?; - - let mut transaction = pool.begin().await?; - - sqlx::query!( - " - UPDATE organizations - SET icon_url = $1, color = $2 - WHERE (id = $3) - ", - format!("{}/{}", cdn_url, upload_data.file_name), - color.map(|x| x as i32), - organization_item.id as database::models::ids::OrganizationId, - ) - .execute(&mut *transaction) - .await?; - - database::models::Organization::clear_cache( - organization_item.id, - Some(organization_item.title), - &redis, - ) - .await?; - - transaction.commit().await?; - - Ok(HttpResponse::NoContent().body("")) - } else { - Err(ApiError::InvalidInput(format!( - "Invalid format for project icon: {}", - ext.ext - ))) - } + v3::organizations::organization_icon_edit( + web::Query(v3::organizations::Extension { ext: ext.ext }), + req, + info, + pool.clone(), + redis.clone(), + file_host, + payload, + session_queue, + ) + .await } #[delete("{id}/icon")] @@ -849,73 +253,13 @@ pub async fn delete_organization_icon( file_host: web::Data>, session_queue: web::Data, ) -> Result { - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::ORGANIZATION_WRITE]), + v3::organizations::delete_organization_icon( + req, + info, + pool.clone(), + redis.clone(), + file_host, + session_queue, ) - .await? - .1; - let string = info.into_inner().0; - - let organization_item = database::models::Organization::get(&string, &**pool, &redis) - .await? - .ok_or_else(|| { - ApiError::InvalidInput("The specified organization does not exist!".to_string()) - })?; - - if !user.role.is_mod() { - let team_member = database::models::TeamMember::get_from_user_id( - organization_item.team_id, - user.id.into(), - &**pool, - ) - .await - .map_err(ApiError::Database)?; - - let permissions = - OrganizationPermissions::get_permissions_by_role(&user.role, &team_member) - .unwrap_or_default(); - - if !permissions.contains(OrganizationPermissions::EDIT_DETAILS) { - return Err(ApiError::CustomAuthentication( - "You don't have permission to edit this organization's icon.".to_string(), - )); - } - } - - let cdn_url = dotenvy::var("CDN_URL")?; - if let Some(icon) = organization_item.icon_url { - let name = icon.split(&format!("{cdn_url}/")).nth(1); - - if let Some(icon_path) = name { - file_host.delete_file_version("", icon_path).await?; - } - } - - let mut transaction = pool.begin().await?; - - sqlx::query!( - " - UPDATE organizations - SET icon_url = NULL, color = NULL - WHERE (id = $1) - ", - organization_item.id as database::models::ids::OrganizationId, - ) - .execute(&mut *transaction) - .await?; - - database::models::Organization::clear_cache( - organization_item.id, - Some(organization_item.title), - &redis, - ) - .await?; - - transaction.commit().await?; - - Ok(HttpResponse::NoContent().body("")) + .await } diff --git a/src/routes/v2/project_creation.rs b/src/routes/v2/project_creation.rs index dadfc096..6e36de5c 100644 --- a/src/routes/v2/project_creation.rs +++ b/src/routes/v2/project_creation.rs @@ -1,147 +1,32 @@ -use super::version_creation::InitialVersionData; -use crate::auth::{get_user_from_headers, AuthenticationError}; -use crate::database::models::thread_item::ThreadBuilder; -use crate::database::models::{self, image_item, User}; +use crate::database::models::version_item; use crate::database::redis::RedisPool; -use crate::file_hosting::{FileHost, FileHostingError}; -use crate::models::error::ApiError; -use crate::models::ids::base62_impl::parse_base62; +use crate::file_hosting::FileHost; +use crate::models; use crate::models::ids::ImageId; -use crate::models::images::{Image, ImageContext}; -use crate::models::pats::Scopes; -use crate::models::projects::{ - DonationLink, License, MonetizationStatus, ProjectId, ProjectStatus, SideType, VersionId, - VersionStatus, -}; -use crate::models::teams::ProjectPermissions; -use crate::models::threads::ThreadType; -use crate::models::users::UserId; +use crate::models::projects::{DonationLink, Loader, Project, ProjectStatus, SideType}; +use crate::models::v2::projects::LegacyProject; use crate::queue::session::AuthQueue; -use crate::search::indexing::IndexingError; -use crate::util::routes::read_from_field; -use crate::util::validate::validation_errors_to_string; -use actix_multipart::{Field, Multipart}; -use actix_web::http::StatusCode; +use crate::routes::v3::project_creation::default_project_type; +use crate::routes::v3::project_creation::{CreateError, NewGalleryItem}; +use crate::routes::{v2_reroute, v3}; +use actix_multipart::Multipart; use actix_web::web::Data; use actix_web::{post, HttpRequest, HttpResponse}; -use chrono::Utc; -use futures::stream::StreamExt; -use image::ImageError; -use rust_decimal::Decimal; use serde::{Deserialize, Serialize}; +use serde_json::json; use sqlx::postgres::PgPool; + +use std::collections::HashMap; use std::sync::Arc; -use thiserror::Error; use validator::Validate; +use super::version_creation::InitialVersionData; + pub fn config(cfg: &mut actix_web::web::ServiceConfig) { cfg.service(project_create); } -#[derive(Error, Debug)] -pub enum CreateError { - #[error("Environment Error")] - EnvError(#[from] dotenvy::Error), - #[error("An unknown database error occurred")] - SqlxDatabaseError(#[from] sqlx::Error), - #[error("Database Error: {0}")] - DatabaseError(#[from] models::DatabaseError), - #[error("Indexing Error: {0}")] - IndexingError(#[from] IndexingError), - #[error("Error while parsing multipart payload: {0}")] - MultipartError(#[from] actix_multipart::MultipartError), - #[error("Error while parsing JSON: {0}")] - SerDeError(#[from] serde_json::Error), - #[error("Error while validating input: {0}")] - ValidationError(String), - #[error("Error while uploading file: {0}")] - FileHostingError(#[from] FileHostingError), - #[error("Error while validating uploaded file: {0}")] - FileValidationError(#[from] crate::validate::ValidationError), - #[error("{}", .0)] - MissingValueError(String), - #[error("Invalid format for image: {0}")] - InvalidIconFormat(String), - #[error("Error with multipart data: {0}")] - InvalidInput(String), - #[error("Invalid game version: {0}")] - InvalidGameVersion(String), - #[error("Invalid loader: {0}")] - InvalidLoader(String), - #[error("Invalid category: {0}")] - InvalidCategory(String), - #[error("Invalid file type for version file: {0}")] - InvalidFileType(String), - #[error("Slug collides with other project's id!")] - SlugCollision, - #[error("Authentication Error: {0}")] - Unauthorized(#[from] AuthenticationError), - #[error("Authentication Error: {0}")] - CustomAuthenticationError(String), - #[error("Image Parsing Error: {0}")] - ImageError(#[from] ImageError), -} - -impl actix_web::ResponseError for CreateError { - fn status_code(&self) -> StatusCode { - match self { - CreateError::EnvError(..) => StatusCode::INTERNAL_SERVER_ERROR, - CreateError::SqlxDatabaseError(..) => StatusCode::INTERNAL_SERVER_ERROR, - CreateError::DatabaseError(..) => StatusCode::INTERNAL_SERVER_ERROR, - CreateError::IndexingError(..) => StatusCode::INTERNAL_SERVER_ERROR, - CreateError::FileHostingError(..) => StatusCode::INTERNAL_SERVER_ERROR, - CreateError::SerDeError(..) => StatusCode::BAD_REQUEST, - CreateError::MultipartError(..) => StatusCode::BAD_REQUEST, - CreateError::MissingValueError(..) => StatusCode::BAD_REQUEST, - CreateError::InvalidIconFormat(..) => StatusCode::BAD_REQUEST, - CreateError::InvalidInput(..) => StatusCode::BAD_REQUEST, - CreateError::InvalidGameVersion(..) => StatusCode::BAD_REQUEST, - CreateError::InvalidLoader(..) => StatusCode::BAD_REQUEST, - CreateError::InvalidCategory(..) => StatusCode::BAD_REQUEST, - CreateError::InvalidFileType(..) => StatusCode::BAD_REQUEST, - CreateError::Unauthorized(..) => StatusCode::UNAUTHORIZED, - CreateError::CustomAuthenticationError(..) => StatusCode::UNAUTHORIZED, - CreateError::SlugCollision => StatusCode::BAD_REQUEST, - CreateError::ValidationError(..) => StatusCode::BAD_REQUEST, - CreateError::FileValidationError(..) => StatusCode::BAD_REQUEST, - CreateError::ImageError(..) => StatusCode::BAD_REQUEST, - } - } - - fn error_response(&self) -> HttpResponse { - HttpResponse::build(self.status_code()).json(ApiError { - error: match self { - CreateError::EnvError(..) => "environment_error", - CreateError::SqlxDatabaseError(..) => "database_error", - CreateError::DatabaseError(..) => "database_error", - CreateError::IndexingError(..) => "indexing_error", - CreateError::FileHostingError(..) => "file_hosting_error", - CreateError::SerDeError(..) => "invalid_input", - CreateError::MultipartError(..) => "invalid_input", - CreateError::MissingValueError(..) => "invalid_input", - CreateError::InvalidIconFormat(..) => "invalid_input", - CreateError::InvalidInput(..) => "invalid_input", - CreateError::InvalidGameVersion(..) => "invalid_input", - CreateError::InvalidLoader(..) => "invalid_input", - CreateError::InvalidCategory(..) => "invalid_input", - CreateError::InvalidFileType(..) => "invalid_input", - CreateError::Unauthorized(..) => "unauthorized", - CreateError::CustomAuthenticationError(..) => "unauthorized", - CreateError::SlugCollision => "invalid_input", - CreateError::ValidationError(..) => "invalid_input", - CreateError::FileValidationError(..) => "invalid_input", - CreateError::ImageError(..) => "invalid_image", - }, - description: &self.to_string(), - }) - } -} - -fn default_project_type() -> String { - "mod".to_string() -} - -fn default_requested_status() -> ProjectStatus { +pub fn default_requested_status() -> ProjectStatus { ProjectStatus::Approved } @@ -248,757 +133,113 @@ struct ProjectCreateData { pub organization_id: Option, } -#[derive(Serialize, Deserialize, Validate, Clone)] -pub struct NewGalleryItem { - /// The name of the multipart item where the gallery media is located - pub item: String, - /// Whether the gallery item should show in search or not - pub featured: bool, - #[validate(length(min = 1, max = 2048))] - /// The title of the gallery item - pub title: Option, - #[validate(length(min = 1, max = 2048))] - /// The description of the gallery item - pub description: Option, - pub ordering: i64, -} - -pub struct UploadedFile { - pub file_id: String, - pub file_name: String, -} - -pub async fn undo_uploads( - file_host: &dyn FileHost, - uploaded_files: &[UploadedFile], -) -> Result<(), CreateError> { - for file in uploaded_files { - file_host - .delete_file_version(&file.file_id, &file.file_name) - .await?; - } - Ok(()) -} - #[post("project")] pub async fn project_create( req: HttpRequest, - mut payload: Multipart, + payload: Multipart, client: Data, redis: Data, file_host: Data>, session_queue: Data, ) -> Result { - let mut transaction = client.begin().await?; - let mut uploaded_files = Vec::new(); - - let result = project_create_inner( - req, - &mut payload, - &mut transaction, - &***file_host, - &mut uploaded_files, - &client, - &redis, - &session_queue, + // Convert V2 multipart payload to V3 multipart payload + let payload = v2_reroute::alter_actix_multipart( + payload, + req.headers().clone(), + |legacy_create: ProjectCreateData| { + // Side types will be applied to each version + let client_side = legacy_create.client_side; + let server_side = legacy_create.server_side; + + let project_type = legacy_create.project_type; + + let initial_versions = legacy_create + .initial_versions + .into_iter() + .map(|v| { + let mut fields = HashMap::new(); + fields.insert("client_side".to_string(), json!(client_side)); + fields.insert("server_side".to_string(), json!(server_side)); + fields.insert("game_versions".to_string(), json!(v.game_versions)); + + // Modpacks now use the "mrpack" loader, and loaders are converted to loader fields. + // Setting of 'project_type' directly is removed, it's loader-based now. + if project_type == "modpack" { + fields.insert("mrpack_loaders".to_string(), json!(v.loaders)); + } + + let loaders = if project_type == "modpack" { + vec![Loader("mrpack".to_string())] + } else { + v.loaders + }; + + v3::version_creation::InitialVersionData { + project_id: v.project_id, + file_parts: v.file_parts, + version_number: v.version_number, + version_title: v.version_title, + version_body: v.version_body, + dependencies: v.dependencies, + release_channel: v.release_channel, + loaders, + featured: v.featured, + primary_file: v.primary_file, + status: v.status, + file_types: v.file_types, + uploaded_images: v.uploaded_images, + ordering: v.ordering, + fields, + } + }) + .collect(); + + Ok(v3::project_creation::ProjectCreateData { + title: legacy_create.title, + slug: legacy_create.slug, + description: legacy_create.description, + body: legacy_create.body, + initial_versions, + categories: legacy_create.categories, + additional_categories: legacy_create.additional_categories, + issues_url: legacy_create.issues_url, + source_url: legacy_create.source_url, + wiki_url: legacy_create.wiki_url, + license_url: legacy_create.license_url, + discord_url: legacy_create.discord_url, + donation_urls: legacy_create.donation_urls, + is_draft: legacy_create.is_draft, + license_id: legacy_create.license_id, + gallery_items: legacy_create.gallery_items, + requested_status: legacy_create.requested_status, + uploaded_images: legacy_create.uploaded_images, + organization_id: legacy_create.organization_id, + }) + }, ) - .await; - - if result.is_err() { - let undo_result = undo_uploads(&***file_host, &uploaded_files).await; - let rollback_result = transaction.rollback().await; - - undo_result?; - if let Err(e) = rollback_result { - return Err(e.into()); - } - } else { - transaction.commit().await?; - } - - result -} -/* - -Project Creation Steps: -Get logged in user - Must match the author in the version creation - -1. Data - - Gets "data" field from multipart form; must be first - - Verification: string lengths - - Create versions - - Some shared logic with version creation - - Create list of VersionBuilders - - Create ProjectBuilder - -2. Upload - - Icon: check file format & size - - Upload to backblaze & record URL - - Project files - - Check for matching version - - File size limits? - - Check file type - - Eventually, malware scan - - Upload to backblaze & create VersionFileBuilder - - + .await?; -3. Creation - - Database stuff - - Add project data to indexing queue -*/ - -#[allow(clippy::too_many_arguments)] -async fn project_create_inner( - req: HttpRequest, - payload: &mut Multipart, - transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, - file_host: &dyn FileHost, - uploaded_files: &mut Vec, - pool: &PgPool, - redis: &RedisPool, - session_queue: &AuthQueue, -) -> Result { - // The base URL for files uploaded to backblaze - let cdn_url = dotenvy::var("CDN_URL")?; - - // The currently logged in user - let current_user = get_user_from_headers( - &req, - pool, - redis, + // Call V3 project creation + let response = v3::project_creation::project_create( + req, + payload, + client.clone(), + redis.clone(), + file_host, session_queue, - Some(&[Scopes::PROJECT_CREATE]), ) - .await? - .1; - - let project_id: ProjectId = models::generate_project_id(transaction).await?.into(); - - let project_create_data; - let mut versions; - let mut versions_map = std::collections::HashMap::new(); - let mut gallery_urls = Vec::new(); - - let all_game_versions = - models::categories::GameVersion::list(&mut **transaction, redis).await?; - let all_loaders = models::categories::Loader::list(&mut **transaction, redis).await?; - - { - // The first multipart field must be named "data" and contain a - // JSON `ProjectCreateData` object. - - let mut field = payload - .next() - .await - .map(|m| m.map_err(CreateError::MultipartError)) - .unwrap_or_else(|| { - Err(CreateError::MissingValueError(String::from( - "No `data` field in multipart upload", - ))) - })?; - - let content_disposition = field.content_disposition(); - let name = content_disposition - .get_name() - .ok_or_else(|| CreateError::MissingValueError(String::from("Missing content name")))?; - - if name != "data" { - return Err(CreateError::InvalidInput(String::from( - "`data` field must come before file fields", - ))); - } - let mut data = Vec::new(); - while let Some(chunk) = field.next().await { - data.extend_from_slice(&chunk.map_err(CreateError::MultipartError)?); - } - let create_data: ProjectCreateData = serde_json::from_slice(&data)?; - - create_data - .validate() - .map_err(|err| CreateError::InvalidInput(validation_errors_to_string(err, None)))?; - - let slug_project_id_option: Option = parse_base62(&create_data.slug).ok(); - - if let Some(slug_project_id) = slug_project_id_option { - let results = sqlx::query!( - " - SELECT EXISTS(SELECT 1 FROM mods WHERE id=$1) - ", - slug_project_id as i64 - ) - .fetch_one(&mut **transaction) - .await - .map_err(|e| CreateError::DatabaseError(e.into()))?; - - if results.exists.unwrap_or(false) { - return Err(CreateError::SlugCollision); - } - } - - { - let results = sqlx::query!( - " - SELECT EXISTS(SELECT 1 FROM mods WHERE slug = LOWER($1)) - ", - create_data.slug - ) - .fetch_one(&mut **transaction) - .await - .map_err(|e| CreateError::DatabaseError(e.into()))?; - - if results.exists.unwrap_or(false) { - return Err(CreateError::SlugCollision); - } - } - - // Create VersionBuilders for the versions specified in `initial_versions` - versions = Vec::with_capacity(create_data.initial_versions.len()); - for (i, data) in create_data.initial_versions.iter().enumerate() { - // Create a map of multipart field names to version indices - for name in &data.file_parts { - if versions_map.insert(name.to_owned(), i).is_some() { - // If the name is already used - return Err(CreateError::InvalidInput(String::from( - "Duplicate multipart field name", - ))); - } - } - versions.push( - create_initial_version( - data, - project_id, - current_user.id, - &all_game_versions, - &all_loaders, - &create_data.project_type, - transaction, - ) - .await?, - ); - } - project_create_data = create_data; - } - - let project_type_id = models::categories::ProjectType::get_id( - project_create_data.project_type.as_str(), - &mut **transaction, - ) - .await? - .ok_or_else(|| { - CreateError::InvalidInput(format!( - "Project Type {} does not exist.", - project_create_data.project_type.clone() - )) - })?; - - let mut icon_data = None; - - let mut error = None; - while let Some(item) = payload.next().await { - let mut field: Field = item?; - - if error.is_some() { - continue; - } - - let result = async { - let content_disposition = field.content_disposition().clone(); - - let name = content_disposition.get_name().ok_or_else(|| { - CreateError::MissingValueError("Missing content name".to_string()) - })?; - - let (file_name, file_extension) = - super::version_creation::get_name_ext(&content_disposition)?; - - if name == "icon" { - if icon_data.is_some() { - return Err(CreateError::InvalidInput(String::from( - "Projects can only have one icon", - ))); - } - // Upload the icon to the cdn - icon_data = Some( - process_icon_upload( - uploaded_files, - project_id.0, - file_extension, - file_host, - field, - &cdn_url, - ) - .await?, - ); - return Ok(()); - } - - if let Some(gallery_items) = &project_create_data.gallery_items { - if gallery_items.iter().filter(|a| a.featured).count() > 1 { - return Err(CreateError::InvalidInput(String::from( - "Only one gallery image can be featured.", - ))); - } - - if let Some(item) = gallery_items.iter().find(|x| x.item == name) { - let data = read_from_field( - &mut field, - 5 * (1 << 20), - "Gallery image exceeds the maximum of 5MiB.", - ) - .await?; - - let hash = sha1::Sha1::from(&data).hexdigest(); - let (_, file_extension) = - super::version_creation::get_name_ext(&content_disposition)?; - let content_type = crate::util::ext::get_image_content_type(file_extension) - .ok_or_else(|| { - CreateError::InvalidIconFormat(file_extension.to_string()) - })?; - - let url = format!("data/{project_id}/images/{hash}.{file_extension}"); - let upload_data = file_host - .upload_file(content_type, &url, data.freeze()) - .await?; - - uploaded_files.push(UploadedFile { - file_id: upload_data.file_id, - file_name: upload_data.file_name, - }); - - gallery_urls.push(crate::models::projects::GalleryItem { - url: format!("{cdn_url}/{url}"), - featured: item.featured, - title: item.title.clone(), - description: item.description.clone(), - created: Utc::now(), - ordering: item.ordering, - }); - - return Ok(()); - } - } - - let index = if let Some(i) = versions_map.get(name) { - *i - } else { - return Err(CreateError::InvalidInput(format!( - "File `{file_name}` (field {name}) isn't specified in the versions data" - ))); + .await?; + + // Convert response to V2 format + match v2_reroute::extract_ok_json::(response).await { + Ok(project) => { + let version_item = match project.versions.first() { + Some(vid) => version_item::Version::get((*vid).into(), &**client, &redis).await?, + None => None, }; - - // `index` is always valid for these lists - let created_version = versions.get_mut(index).unwrap(); - let version_data = project_create_data.initial_versions.get(index).unwrap(); - - // Upload the new jar file - super::version_creation::upload_file( - &mut field, - file_host, - version_data.file_parts.len(), - uploaded_files, - &mut created_version.files, - &mut created_version.dependencies, - &cdn_url, - &content_disposition, - project_id, - created_version.version_id.into(), - &project_create_data.project_type, - version_data.loaders.clone(), - version_data.game_versions.clone(), - all_game_versions.clone(), - version_data.primary_file.is_some(), - version_data.primary_file.as_deref() == Some(name), - None, - transaction, - ) - .await?; - - Ok(()) - } - .await; - - if result.is_err() { - error = result.err(); - } - } - - if let Some(error) = error { - return Err(error); - } - - { - // Check to make sure that all specified files were uploaded - for (version_data, builder) in project_create_data - .initial_versions - .iter() - .zip(versions.iter()) - { - if version_data.file_parts.len() != builder.files.len() { - return Err(CreateError::InvalidInput(String::from( - "Some files were specified in initial_versions but not uploaded", - ))); - } + let project = LegacyProject::from(project, version_item); + Ok(HttpResponse::Ok().json(project)) } - - // Convert the list of category names to actual categories - let mut categories = Vec::with_capacity(project_create_data.categories.len()); - for category in &project_create_data.categories { - let id = models::categories::Category::get_id_project( - category, - project_type_id, - &mut **transaction, - ) - .await? - .ok_or_else(|| CreateError::InvalidCategory(category.clone()))?; - categories.push(id); - } - - let mut additional_categories = - Vec::with_capacity(project_create_data.additional_categories.len()); - for category in &project_create_data.additional_categories { - let id = models::categories::Category::get_id_project( - category, - project_type_id, - &mut **transaction, - ) - .await? - .ok_or_else(|| CreateError::InvalidCategory(category.clone()))?; - additional_categories.push(id); - } - - let team = models::team_item::TeamBuilder { - members: vec![models::team_item::TeamMemberBuilder { - user_id: current_user.id.into(), - role: crate::models::teams::OWNER_ROLE.to_owned(), - // Allow all permissions for project creator, even if attached to a project - permissions: ProjectPermissions::all(), - organization_permissions: None, - accepted: true, - payouts_split: Decimal::ONE_HUNDRED, - ordering: 0, - }], - }; - - let team_id = team.insert(transaction).await?; - - let status; - if project_create_data.is_draft.unwrap_or(false) { - status = ProjectStatus::Draft; - } else { - status = ProjectStatus::Processing; - - if project_create_data.initial_versions.is_empty() { - return Err(CreateError::InvalidInput(String::from( - "Project submitted for review with no initial versions", - ))); - } - } - - if !project_create_data.requested_status.can_be_requested() { - return Err(CreateError::InvalidInput(String::from( - "Specified requested status is not allowed to be requested", - ))); - } - - let client_side_id = models::categories::SideType::get_id( - project_create_data.client_side.as_str(), - &mut **transaction, - ) - .await? - .ok_or_else(|| { - CreateError::InvalidInput("Client side type specified does not exist.".to_string()) - })?; - - let server_side_id = models::categories::SideType::get_id( - project_create_data.server_side.as_str(), - &mut **transaction, - ) - .await? - .ok_or_else(|| { - CreateError::InvalidInput("Server side type specified does not exist.".to_string()) - })?; - - let license_id = - spdx::Expression::parse(&project_create_data.license_id).map_err(|err| { - CreateError::InvalidInput(format!("Invalid SPDX license identifier: {err}")) - })?; - - let mut donation_urls = vec![]; - - if let Some(urls) = &project_create_data.donation_urls { - for url in urls { - let platform_id = - models::categories::DonationPlatform::get_id(&url.id, &mut **transaction) - .await? - .ok_or_else(|| { - CreateError::InvalidInput(format!( - "Donation platform {} does not exist.", - url.id.clone() - )) - })?; - - donation_urls.push(models::project_item::DonationUrl { - platform_id, - platform_short: "".to_string(), - platform_name: "".to_string(), - url: url.url.clone(), - }) - } - } - - let project_builder_actual = models::project_item::ProjectBuilder { - project_id: project_id.into(), - project_type_id, - team_id, - organization_id: project_create_data.organization_id, - title: project_create_data.title, - description: project_create_data.description, - body: project_create_data.body, - icon_url: icon_data.clone().map(|x| x.0), - issues_url: project_create_data.issues_url, - source_url: project_create_data.source_url, - wiki_url: project_create_data.wiki_url, - - license_url: project_create_data.license_url, - discord_url: project_create_data.discord_url, - categories, - additional_categories, - initial_versions: versions, - status, - requested_status: Some(project_create_data.requested_status), - client_side: client_side_id, - server_side: server_side_id, - license: license_id.to_string(), - slug: Some(project_create_data.slug), - donation_urls, - gallery_items: gallery_urls - .iter() - .map(|x| models::project_item::GalleryItem { - image_url: x.url.clone(), - featured: x.featured, - title: x.title.clone(), - description: x.description.clone(), - created: x.created, - ordering: x.ordering, - }) - .collect(), - color: icon_data.and_then(|x| x.1), - monetization_status: MonetizationStatus::Monetized, - }; - let project_builder = project_builder_actual.clone(); - - let now = Utc::now(); - - let id = project_builder_actual.insert(transaction).await?; - User::clear_project_cache(&[current_user.id.into()], redis).await?; - - for image_id in project_create_data.uploaded_images { - if let Some(db_image) = - image_item::Image::get(image_id.into(), &mut **transaction, redis).await? - { - let image: Image = db_image.into(); - if !matches!(image.context, ImageContext::Project { .. }) - || image.context.inner_id().is_some() - { - return Err(CreateError::InvalidInput(format!( - "Image {} is not unused and in the 'project' context", - image_id - ))); - } - - sqlx::query!( - " - UPDATE uploaded_images - SET mod_id = $1 - WHERE id = $2 - ", - id as models::ids::ProjectId, - image_id.0 as i64 - ) - .execute(&mut **transaction) - .await?; - - image_item::Image::clear_cache(image.id.into(), redis).await?; - } else { - return Err(CreateError::InvalidInput(format!( - "Image {} does not exist", - image_id - ))); - } - } - - let thread_id = ThreadBuilder { - type_: ThreadType::Project, - members: vec![], - project_id: Some(id), - report_id: None, - } - .insert(transaction) - .await?; - - let response = crate::models::projects::Project { - id: project_id, - slug: project_builder.slug.clone(), - project_type: project_create_data.project_type.clone(), - team: team_id.into(), - organization: project_create_data.organization_id.map(|x| x.into()), - title: project_builder.title.clone(), - description: project_builder.description.clone(), - body: project_builder.body.clone(), - body_url: None, - published: now, - updated: now, - approved: None, - queued: None, - status, - requested_status: project_builder.requested_status, - moderator_message: None, - license: License { - id: project_create_data.license_id.clone(), - name: "".to_string(), - url: project_builder.license_url.clone(), - }, - client_side: project_create_data.client_side, - server_side: project_create_data.server_side, - downloads: 0, - followers: 0, - categories: project_create_data.categories, - additional_categories: project_create_data.additional_categories, - game_versions: vec![], - loaders: vec![], - versions: project_builder - .initial_versions - .iter() - .map(|v| v.version_id.into()) - .collect::>(), - icon_url: project_builder.icon_url.clone(), - issues_url: project_builder.issues_url.clone(), - source_url: project_builder.source_url.clone(), - wiki_url: project_builder.wiki_url.clone(), - discord_url: project_builder.discord_url.clone(), - donation_urls: project_create_data.donation_urls.clone(), - gallery: gallery_urls, - color: project_builder.color, - thread_id: thread_id.into(), - monetization_status: MonetizationStatus::Monetized, - }; - - Ok(HttpResponse::Ok().json(response)) - } -} - -async fn create_initial_version( - version_data: &InitialVersionData, - project_id: ProjectId, - author: UserId, - all_game_versions: &[models::categories::GameVersion], - all_loaders: &[models::categories::Loader], - project_type: &str, - transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, -) -> Result { - if version_data.project_id.is_some() { - return Err(CreateError::InvalidInput(String::from( - "Found project id in initial version for new project", - ))); - } - - version_data - .validate() - .map_err(|err| CreateError::ValidationError(validation_errors_to_string(err, None)))?; - - // Randomly generate a new id to be used for the version - let version_id: VersionId = models::generate_version_id(transaction).await?.into(); - - let game_versions = version_data - .game_versions - .iter() - .map(|x| { - all_game_versions - .iter() - .find(|y| y.version == x.0) - .ok_or_else(|| CreateError::InvalidGameVersion(x.0.clone())) - .map(|y| y.id) - }) - .collect::, CreateError>>()?; - - let loaders = version_data - .loaders - .iter() - .map(|x| { - all_loaders - .iter() - .find(|y| { - y.loader == x.0 - && y.supported_project_types - .contains(&project_type.to_string()) - }) - .ok_or_else(|| CreateError::InvalidLoader(x.0.clone())) - .map(|y| y.id) - }) - .collect::, CreateError>>()?; - - let dependencies = version_data - .dependencies - .iter() - .map(|d| models::version_item::DependencyBuilder { - version_id: d.version_id.map(|x| x.into()), - project_id: d.project_id.map(|x| x.into()), - dependency_type: d.dependency_type.to_string(), - file_name: None, - }) - .collect::>(); - - let version = models::version_item::VersionBuilder { - version_id: version_id.into(), - project_id: project_id.into(), - author_id: author.into(), - name: version_data.version_title.clone(), - version_number: version_data.version_number.clone(), - changelog: version_data.version_body.clone().unwrap_or_default(), - files: Vec::new(), - dependencies, - game_versions, - loaders, - featured: version_data.featured, - status: VersionStatus::Listed, - version_type: version_data.release_channel.to_string(), - requested_status: None, - ordering: version_data.ordering, - }; - - Ok(version) -} - -async fn process_icon_upload( - uploaded_files: &mut Vec, - id: u64, - file_extension: &str, - file_host: &dyn FileHost, - mut field: Field, - cdn_url: &str, -) -> Result<(String, Option), CreateError> { - if let Some(content_type) = crate::util::ext::get_image_content_type(file_extension) { - let data = read_from_field(&mut field, 262144, "Icons must be smaller than 256KiB").await?; - - let color = crate::util::img::get_color_from_img(&data)?; - - let hash = sha1::Sha1::from(&data).hexdigest(); - let upload_data = file_host - .upload_file( - content_type, - &format!("data/{id}/{hash}.{file_extension}"), - data.freeze(), - ) - .await?; - - uploaded_files.push(UploadedFile { - file_id: upload_data.file_id, - file_name: upload_data.file_name.clone(), - }); - - Ok((format!("{}/{}", cdn_url, upload_data.file_name), color)) - } else { - Err(CreateError::InvalidIconFormat(file_extension.to_string())) + Err(response) => Ok(response), } } diff --git a/src/routes/v2/projects.rs b/src/routes/v2/projects.rs index 7da763d8..f85f785c 100644 --- a/src/routes/v2/projects.rs +++ b/src/routes/v2/projects.rs @@ -1,40 +1,24 @@ -use crate::auth::{filter_authorized_projects, get_user_from_headers, is_authorized}; -use crate::database; -use crate::database::models::image_item; -use crate::database::models::notification_item::NotificationBuilder; -use crate::database::models::project_item::{GalleryItem, ModCategory}; -use crate::database::models::thread_item::ThreadMessageBuilder; +use crate::database::models::{project_item, version_item}; use crate::database::redis::RedisPool; use crate::file_hosting::FileHost; use crate::models; -use crate::models::ids::base62_impl::parse_base62; -use crate::models::images::ImageContext; -use crate::models::notifications::NotificationBody; -use crate::models::pats::Scopes; use crate::models::projects::{ - DonationLink, MonetizationStatus, Project, ProjectId, ProjectStatus, SearchRequest, SideType, + DonationLink, MonetizationStatus, Project, ProjectStatus, SearchRequest, SideType, }; -use crate::models::teams::ProjectPermissions; -use crate::models::threads::MessageBody; +use crate::models::v2::projects::LegacyProject; use crate::queue::session::AuthQueue; -use crate::routes::ApiError; +use crate::routes::v3::projects::ProjectIds; +use crate::routes::{v2_reroute, v3, ApiError}; use crate::search::{search_for_project, SearchConfig, SearchError}; -use crate::util::img; -use crate::util::routes::read_from_payload; -use crate::util::validate::validation_errors_to_string; use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse}; use chrono::{DateTime, Utc}; -use futures::TryStreamExt; -use meilisearch_sdk::indexes::IndexesResults; use serde::{Deserialize, Serialize}; use serde_json::json; use sqlx::PgPool; +use std::collections::HashMap; use std::sync::Arc; use validator::Validate; -use database::models as db_models; -use db_models::ids as db_ids; - pub fn config(cfg: &mut web::ServiceConfig) { cfg.service(project_search); cfg.service(projects_get); @@ -70,7 +54,47 @@ pub async fn project_search( web::Query(info): web::Query, config: web::Data, ) -> Result { + // TODO: make this nicer + // Search now uses loader_fields instead of explicit 'client_side' and 'server_side' fields + // While the backend for this has changed, it doesnt affect much + // in the API calls except that 'versions:x' is now 'game_versions:x' + let facets: Option>> = if let Some(facets) = info.facets { + let facets = serde_json::from_str::>>(&facets)?; + Some( + facets + .into_iter() + .map(|facet| { + facet + .into_iter() + .map(|facet| { + let version = match facet.split(':').nth(1) { + Some(version) => version, + None => return facet.to_string(), + }; + + if facet.starts_with("versions:") { + format!("game_versions:{}", version) + } else { + facet.to_string() + } + }) + .collect::>() + }) + .collect(), + ) + } else { + None + }; + + let info = SearchRequest { + facets: facets.and_then(|x| serde_json::to_string(&x).ok()), + ..info + }; + let results = search_for_project(&info, &config).await?; + + // TODO: convert to v2 format-we may need a new v2 struct for this for 'original' format + Ok(HttpResponse::Ok().json(results)) } @@ -86,37 +110,22 @@ pub async fn random_projects_get( pool: web::Data, redis: web::Data, ) -> Result { - count - .validate() - .map_err(|err| ApiError::Validation(validation_errors_to_string(err, None)))?; - - let project_ids = sqlx::query!( - " - SELECT id FROM mods TABLESAMPLE SYSTEM_ROWS($1) WHERE status = ANY($2) - ", - count.count as i32, - &*crate::models::projects::ProjectStatus::iterator() - .filter(|x| x.is_searchable()) - .map(|x| x.to_string()) - .collect::>(), - ) - .fetch_many(&**pool) - .try_filter_map(|e| async { Ok(e.right().map(|m| db_ids::ProjectId(m.id))) }) - .try_collect::>() - .await?; - - let projects_data = db_models::Project::get_many_ids(&project_ids, &**pool, &redis) - .await? - .into_iter() - .map(Project::from) - .collect::>(); - - Ok(HttpResponse::Ok().json(projects_data)) -} - -#[derive(Serialize, Deserialize)] -pub struct ProjectIds { - pub ids: String, + let count = v3::projects::RandomProjects { count: count.count }; + + let response = + v3::projects::random_projects_get(web::Query(count), pool.clone(), redis.clone()).await?; + // Convert response to V2 format + match v2_reroute::extract_ok_json::(response).await { + Ok(project) => { + let version_item = match project.versions.first() { + Some(vid) => version_item::Version::get((*vid).into(), &**pool, &redis).await?, + None => None, + }; + let project = LegacyProject::from(project, version_item); + Ok(HttpResponse::Ok().json(project)) + } + Err(response) => Ok(response), + } } #[get("projects")] @@ -127,23 +136,24 @@ pub async fn projects_get( redis: web::Data, session_queue: web::Data, ) -> Result { - let ids = serde_json::from_str::>(&ids.ids)?; - let projects_data = db_models::Project::get_many(&ids, &**pool, &redis).await?; - - let user_option = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::PROJECT_READ]), + // Call V3 project creation + let response = v3::projects::projects_get( + req, + web::Query(ids), + pool.clone(), + redis.clone(), + session_queue, ) - .await - .map(|x| x.1) - .ok(); - - let projects = filter_authorized_projects(projects_data, &user_option, &pool).await?; + .await?; - Ok(HttpResponse::Ok().json(projects)) + // Convert response to V2 format + match v2_reroute::extract_ok_json::>(response).await { + Ok(project) => { + let legacy_projects = LegacyProject::from_many(project, &**pool, &redis).await?; + Ok(HttpResponse::Ok().json(legacy_projects)) + } + Err(response) => Ok(response), + } } #[get("{id}")] @@ -154,26 +164,24 @@ pub async fn project_get( redis: web::Data, session_queue: web::Data, ) -> Result { - let string = info.into_inner().0; - - let project_data = db_models::Project::get(&string, &**pool, &redis).await?; - let user_option = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::PROJECT_READ]), - ) - .await - .map(|x| x.1) - .ok(); - - if let Some(data) = project_data { - if is_authorized(&data.inner, &user_option, &pool).await? { - return Ok(HttpResponse::Ok().json(Project::from(data))); + // Convert V2 data to V3 data + + // Call V3 project creation + let response = + v3::projects::project_get(req, info, pool.clone(), redis.clone(), session_queue).await?; + + // Convert response to V2 format + match v2_reroute::extract_ok_json::(response).await { + Ok(project) => { + let version_item = match project.versions.first() { + Some(vid) => version_item::Version::get((*vid).into(), &**pool, &redis).await?, + None => None, + }; + let project = LegacyProject::from(project, version_item); + Ok(HttpResponse::Ok().json(project)) } + Err(response) => Ok(response), } - Ok(HttpResponse::NotFound().body("")) } //checks the validity of a project id or slug @@ -183,17 +191,7 @@ pub async fn project_get_check( pool: web::Data, redis: web::Data, ) -> Result { - let slug = info.into_inner().0; - - let project_data = db_models::Project::get(&slug, &**pool, &redis).await?; - - if let Some(project) = project_data { - Ok(HttpResponse::Ok().json(json! ({ - "id": models::ids::ProjectId::from(project.inner.id) - }))) - } else { - Ok(HttpResponse::NotFound().body("")) - } + v3::projects::project_get_check(info, pool, redis).await } #[derive(Serialize)] @@ -210,73 +208,8 @@ pub async fn dependency_list( redis: web::Data, session_queue: web::Data, ) -> Result { - let string = info.into_inner().0; - - let result = db_models::Project::get(&string, &**pool, &redis).await?; - - let user_option = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::PROJECT_READ]), - ) - .await - .map(|x| x.1) - .ok(); - - if let Some(project) = result { - if !is_authorized(&project.inner, &user_option, &pool).await? { - return Ok(HttpResponse::NotFound().body("")); - } - - let dependencies = - database::Project::get_dependencies(project.inner.id, &**pool, &redis).await?; - - let project_ids = dependencies - .iter() - .filter_map(|x| { - if x.0.is_none() { - if let Some(mod_dependency_id) = x.2 { - Some(mod_dependency_id) - } else { - x.1 - } - } else { - x.1 - } - }) - .collect::>(); - - let dep_version_ids = dependencies - .iter() - .filter_map(|x| x.0) - .collect::>(); - let (projects_result, versions_result) = futures::future::try_join( - database::Project::get_many_ids(&project_ids, &**pool, &redis), - database::Version::get_many(&dep_version_ids, &**pool, &redis), - ) - .await?; - - let mut projects = projects_result - .into_iter() - .map(models::projects::Project::from) - .collect::>(); - let mut versions = versions_result - .into_iter() - .map(models::projects::Version::from) - .collect::>(); - - projects.sort_by(|a, b| b.published.cmp(&a.published)); - projects.dedup_by(|a, b| a.id == b.id); - - versions.sort_by(|a, b| b.date_published.cmp(&a.date_published)); - versions.dedup_by(|a, b| a.id == b.id); - - Ok(HttpResponse::Ok().json(DependencyInfo { projects, versions })) - } else { - Ok(HttpResponse::NotFound().body("")) - } + // TODO: requires V2 conversion and tests, probably + v3::projects::dependency_list(req, info, pool, redis, session_queue).await } #[derive(Serialize, Deserialize, Validate)] @@ -388,753 +321,80 @@ pub async fn project_edit( redis: web::Data, session_queue: web::Data, ) -> Result { - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::PROJECT_WRITE]), - ) - .await? - .1; - - new_project - .validate() - .map_err(|err| ApiError::Validation(validation_errors_to_string(err, None)))?; - - let string = info.into_inner().0; - let result = db_models::Project::get(&string, &**pool, &redis).await?; - - if let Some(project_item) = result { - let id = project_item.inner.id; - - let (team_member, organization_team_member) = - db_models::TeamMember::get_for_project_permissions( - &project_item.inner, - user.id.into(), - &**pool, - ) - .await?; - - let permissions = ProjectPermissions::get_permissions_by_role( - &user.role, - &team_member, - &organization_team_member, - ); - - if let Some(perms) = permissions { - let mut transaction = pool.begin().await?; - - if let Some(title) = &new_project.title { - if !perms.contains(ProjectPermissions::EDIT_DETAILS) { - return Err(ApiError::CustomAuthentication( - "You do not have the permissions to edit the title of this project!" - .to_string(), - )); - } - - sqlx::query!( - " - UPDATE mods - SET title = $1 - WHERE (id = $2) - ", - title.trim(), - id as db_ids::ProjectId, - ) - .execute(&mut *transaction) - .await?; - } - - if let Some(description) = &new_project.description { - if !perms.contains(ProjectPermissions::EDIT_DETAILS) { - return Err(ApiError::CustomAuthentication( - "You do not have the permissions to edit the description of this project!" - .to_string(), - )); - } - - sqlx::query!( - " - UPDATE mods - SET description = $1 - WHERE (id = $2) - ", - description, - id as db_ids::ProjectId, - ) - .execute(&mut *transaction) - .await?; - } - - if let Some(status) = &new_project.status { - if !perms.contains(ProjectPermissions::EDIT_DETAILS) { - return Err(ApiError::CustomAuthentication( - "You do not have the permissions to edit the status of this project!" - .to_string(), - )); - } - - if !(user.role.is_mod() - || !project_item.inner.status.is_approved() - && status == &ProjectStatus::Processing - || project_item.inner.status.is_approved() && status.can_be_requested()) - { - return Err(ApiError::CustomAuthentication( - "You don't have permission to set this status!".to_string(), - )); - } - - if status == &ProjectStatus::Processing { - if project_item.versions.is_empty() { - return Err(ApiError::InvalidInput(String::from( - "Project submitted for review with no initial versions", - ))); - } - - sqlx::query!( - " - UPDATE mods - SET moderation_message = NULL, moderation_message_body = NULL, queued = NOW() - WHERE (id = $1) - ", - id as db_ids::ProjectId, - ) - .execute(&mut *transaction) - .await?; - - sqlx::query!( - " - UPDATE threads - SET show_in_mod_inbox = FALSE - WHERE id = $1 - ", - project_item.thread_id as db_ids::ThreadId, - ) - .execute(&mut *transaction) - .await?; - } - - if status.is_approved() && !project_item.inner.status.is_approved() { - sqlx::query!( - " - UPDATE mods - SET approved = NOW() - WHERE id = $1 AND approved IS NULL - ", - id as db_ids::ProjectId, - ) - .execute(&mut *transaction) - .await?; - } - - if status.is_searchable() && !project_item.inner.webhook_sent { - if let Ok(webhook_url) = dotenvy::var("PUBLIC_DISCORD_WEBHOOK") { - crate::util::webhook::send_discord_webhook( - project_item.inner.id.into(), - &pool, - &redis, - webhook_url, - None, - ) - .await - .ok(); - - sqlx::query!( - " - UPDATE mods - SET webhook_sent = TRUE - WHERE id = $1 - ", - id as db_ids::ProjectId, - ) - .execute(&mut *transaction) - .await?; - } - } - - if user.role.is_mod() { - if let Ok(webhook_url) = dotenvy::var("MODERATION_DISCORD_WEBHOOK") { - crate::util::webhook::send_discord_webhook( - project_item.inner.id.into(), - &pool, - &redis, - webhook_url, - Some( - format!( - "**[{}]({}/user/{})** changed project status from **{}** to **{}**", - user.username, - dotenvy::var("SITE_URL")?, - user.username, - &project_item.inner.status.as_friendly_str(), - status.as_friendly_str(), - ) - .to_string(), - ), - ) - .await - .ok(); - } - } - - if team_member.map(|x| !x.accepted).unwrap_or(true) { - let notified_members = sqlx::query!( - " - SELECT tm.user_id id - FROM team_members tm - WHERE tm.team_id = $1 AND tm.accepted - ", - project_item.inner.team_id as db_ids::TeamId - ) - .fetch_many(&mut *transaction) - .try_filter_map(|e| async { Ok(e.right().map(|c| db_models::UserId(c.id))) }) - .try_collect::>() - .await?; - - NotificationBuilder { - body: NotificationBody::StatusChange { - project_id: project_item.inner.id.into(), - old_status: project_item.inner.status, - new_status: *status, - }, - } - .insert_many(notified_members, &mut transaction, &redis) - .await?; - } - - ThreadMessageBuilder { - author_id: Some(user.id.into()), - body: MessageBody::StatusChange { - new_status: *status, - old_status: project_item.inner.status, - }, - thread_id: project_item.thread_id, - } - .insert(&mut transaction) - .await?; - - sqlx::query!( - " - UPDATE mods - SET status = $1 - WHERE (id = $2) - ", - status.as_str(), - id as db_ids::ProjectId, - ) - .execute(&mut *transaction) - .await?; - - if project_item.inner.status.is_searchable() && !status.is_searchable() { - delete_from_index(id.into(), config).await?; - } - } - - if let Some(requested_status) = &new_project.requested_status { - if !perms.contains(ProjectPermissions::EDIT_DETAILS) { - return Err(ApiError::CustomAuthentication( - "You do not have the permissions to edit the requested status of this project!" - .to_string(), - )); - } - - if !requested_status - .map(|x| x.can_be_requested()) - .unwrap_or(true) - { - return Err(ApiError::InvalidInput(String::from( - "Specified status cannot be requested!", - ))); - } - - sqlx::query!( - " - UPDATE mods - SET requested_status = $1 - WHERE (id = $2) - ", - requested_status.map(|x| x.as_str()), - id as db_ids::ProjectId, - ) - .execute(&mut *transaction) - .await?; - } - - if perms.contains(ProjectPermissions::EDIT_DETAILS) { - if new_project.categories.is_some() { - sqlx::query!( - " - DELETE FROM mods_categories - WHERE joining_mod_id = $1 AND is_additional = FALSE - ", - id as db_ids::ProjectId, - ) - .execute(&mut *transaction) - .await?; - } - - if new_project.additional_categories.is_some() { - sqlx::query!( - " - DELETE FROM mods_categories - WHERE joining_mod_id = $1 AND is_additional = TRUE - ", - id as db_ids::ProjectId, - ) - .execute(&mut *transaction) - .await?; - } - } - - if let Some(categories) = &new_project.categories { - edit_project_categories( - categories, - &perms, - id as db_ids::ProjectId, - false, - &mut transaction, - ) - .await?; - } - - if let Some(categories) = &new_project.additional_categories { - edit_project_categories( - categories, - &perms, - id as db_ids::ProjectId, - true, - &mut transaction, - ) - .await?; - } - - if let Some(issues_url) = &new_project.issues_url { - if !perms.contains(ProjectPermissions::EDIT_DETAILS) { - return Err(ApiError::CustomAuthentication( - "You do not have the permissions to edit the issues URL of this project!" - .to_string(), - )); - } - - sqlx::query!( - " - UPDATE mods - SET issues_url = $1 - WHERE (id = $2) - ", - issues_url.as_deref(), - id as db_ids::ProjectId, - ) - .execute(&mut *transaction) - .await?; - } - - if let Some(source_url) = &new_project.source_url { - if !perms.contains(ProjectPermissions::EDIT_DETAILS) { - return Err(ApiError::CustomAuthentication( - "You do not have the permissions to edit the source URL of this project!" - .to_string(), - )); - } - - sqlx::query!( - " - UPDATE mods - SET source_url = $1 - WHERE (id = $2) - ", - source_url.as_deref(), - id as db_ids::ProjectId, - ) - .execute(&mut *transaction) - .await?; - } - - if let Some(wiki_url) = &new_project.wiki_url { - if !perms.contains(ProjectPermissions::EDIT_DETAILS) { - return Err(ApiError::CustomAuthentication( - "You do not have the permissions to edit the wiki URL of this project!" - .to_string(), - )); - } - - sqlx::query!( - " - UPDATE mods - SET wiki_url = $1 - WHERE (id = $2) - ", - wiki_url.as_deref(), - id as db_ids::ProjectId, - ) - .execute(&mut *transaction) - .await?; - } - - if let Some(license_url) = &new_project.license_url { - if !perms.contains(ProjectPermissions::EDIT_DETAILS) { - return Err(ApiError::CustomAuthentication( - "You do not have the permissions to edit the license URL of this project!" - .to_string(), - )); - } - - sqlx::query!( - " - UPDATE mods - SET license_url = $1 - WHERE (id = $2) - ", - license_url.as_deref(), - id as db_ids::ProjectId, - ) - .execute(&mut *transaction) - .await?; - } - - if let Some(discord_url) = &new_project.discord_url { - if !perms.contains(ProjectPermissions::EDIT_DETAILS) { - return Err(ApiError::CustomAuthentication( - "You do not have the permissions to edit the discord URL of this project!" - .to_string(), - )); - } - - sqlx::query!( - " - UPDATE mods - SET discord_url = $1 - WHERE (id = $2) - ", - discord_url.as_deref(), - id as db_ids::ProjectId, - ) - .execute(&mut *transaction) - .await?; - } - - if let Some(slug) = &new_project.slug { - if !perms.contains(ProjectPermissions::EDIT_DETAILS) { - return Err(ApiError::CustomAuthentication( - "You do not have the permissions to edit the slug of this project!" - .to_string(), - )); - } - - let slug_project_id_option: Option = parse_base62(slug).ok(); - if let Some(slug_project_id) = slug_project_id_option { - let results = sqlx::query!( - " - SELECT EXISTS(SELECT 1 FROM mods WHERE id=$1) - ", - slug_project_id as i64 - ) - .fetch_one(&mut *transaction) - .await?; - - if results.exists.unwrap_or(true) { - return Err(ApiError::InvalidInput( - "Slug collides with other project's id!".to_string(), - )); - } - } - - // Make sure the new slug is different from the old one - // We are able to unwrap here because the slug is always set - if !slug.eq(&project_item.inner.slug.clone().unwrap_or_default()) { - let results = sqlx::query!( - " - SELECT EXISTS(SELECT 1 FROM mods WHERE slug = LOWER($1)) - ", - slug - ) - .fetch_one(&mut *transaction) - .await?; - - if results.exists.unwrap_or(true) { - return Err(ApiError::InvalidInput( - "Slug collides with other project's id!".to_string(), - )); - } - } - - sqlx::query!( - " - UPDATE mods - SET slug = LOWER($1) - WHERE (id = $2) - ", - Some(slug), - id as db_ids::ProjectId, - ) - .execute(&mut *transaction) - .await?; - } - - if let Some(new_side) = &new_project.client_side { - if !perms.contains(ProjectPermissions::EDIT_DETAILS) { - return Err(ApiError::CustomAuthentication( - "You do not have the permissions to edit the side type of this mod!" - .to_string(), - )); - } - - let side_type_id = - db_models::categories::SideType::get_id(new_side.as_str(), &mut *transaction) - .await? - .expect("No database entry found for side type"); - - sqlx::query!( - " - UPDATE mods - SET client_side = $1 - WHERE (id = $2) - ", - side_type_id as db_models::SideTypeId, - id as db_ids::ProjectId, - ) - .execute(&mut *transaction) - .await?; - } - - if let Some(new_side) = &new_project.server_side { - if !perms.contains(ProjectPermissions::EDIT_DETAILS) { - return Err(ApiError::CustomAuthentication( - "You do not have the permissions to edit the side type of this project!" - .to_string(), - )); - } - - let side_type_id = - db_models::categories::SideType::get_id(new_side.as_str(), &mut *transaction) - .await? - .expect("No database entry found for side type"); - - sqlx::query!( - " - UPDATE mods - SET server_side = $1 - WHERE (id = $2) - ", - side_type_id as db_models::SideTypeId, - id as db_ids::ProjectId, - ) - .execute(&mut *transaction) - .await?; - } - - if let Some(license) = &new_project.license_id { - if !perms.contains(ProjectPermissions::EDIT_DETAILS) { - return Err(ApiError::CustomAuthentication( - "You do not have the permissions to edit the license of this project!" - .to_string(), - )); - } - - let mut license = license.clone(); - - if license.to_lowercase() == "arr" { - license = models::projects::DEFAULT_LICENSE_ID.to_string(); - } - - spdx::Expression::parse(&license).map_err(|err| { - ApiError::InvalidInput(format!("Invalid SPDX license identifier: {err}")) - })?; - - sqlx::query!( - " - UPDATE mods - SET license = $1 - WHERE (id = $2) - ", - license, - id as db_ids::ProjectId, - ) - .execute(&mut *transaction) - .await?; - } - if let Some(donations) = &new_project.donation_urls { - if !perms.contains(ProjectPermissions::EDIT_DETAILS) { - return Err(ApiError::CustomAuthentication( - "You do not have the permissions to edit the donation links of this project!" - .to_string(), - )); - } - - sqlx::query!( - " - DELETE FROM mods_donations - WHERE joining_mod_id = $1 - ", - id as db_ids::ProjectId, - ) - .execute(&mut *transaction) - .await?; - - for donation in donations { - let platform_id = db_models::categories::DonationPlatform::get_id( - &donation.id, - &mut *transaction, - ) - .await? - .ok_or_else(|| { - ApiError::InvalidInput(format!( - "Platform {} does not exist.", - donation.id.clone() - )) - })?; - - sqlx::query!( - " - INSERT INTO mods_donations (joining_mod_id, joining_platform_id, url) - VALUES ($1, $2, $3) - ", - id as db_ids::ProjectId, - platform_id as db_ids::DonationPlatformId, - donation.url - ) - .execute(&mut *transaction) - .await?; - } - } - - if let Some(moderation_message) = &new_project.moderation_message { - if !user.role.is_mod() - && (!project_item.inner.status.is_approved() || moderation_message.is_some()) - { - return Err(ApiError::CustomAuthentication( - "You do not have the permissions to edit the moderation message of this project!" - .to_string(), - )); - } - - sqlx::query!( - " - UPDATE mods - SET moderation_message = $1 - WHERE (id = $2) - ", - moderation_message.as_deref(), - id as db_ids::ProjectId, - ) - .execute(&mut *transaction) - .await?; - } - - if let Some(moderation_message_body) = &new_project.moderation_message_body { - if !user.role.is_mod() - && (!project_item.inner.status.is_approved() - || moderation_message_body.is_some()) - { - return Err(ApiError::CustomAuthentication( - "You do not have the permissions to edit the moderation message body of this project!" - .to_string(), - )); - } - - sqlx::query!( - " - UPDATE mods - SET moderation_message_body = $1 - WHERE (id = $2) - ", - moderation_message_body.as_deref(), - id as db_ids::ProjectId, - ) - .execute(&mut *transaction) - .await?; - } - - if let Some(body) = &new_project.body { - if !perms.contains(ProjectPermissions::EDIT_BODY) { - return Err(ApiError::CustomAuthentication( - "You do not have the permissions to edit the body of this project!" - .to_string(), - )); - } - - sqlx::query!( - " - UPDATE mods - SET body = $1 - WHERE (id = $2) - ", - body, - id as db_ids::ProjectId, - ) - .execute(&mut *transaction) - .await?; - } - - if let Some(monetization_status) = &new_project.monetization_status { - if !perms.contains(ProjectPermissions::EDIT_DETAILS) { - return Err(ApiError::CustomAuthentication( - "You do not have the permissions to edit the monetization status of this project!" - .to_string(), - )); - } - - if (*monetization_status == MonetizationStatus::ForceDemonetized - || project_item.inner.monetization_status - == MonetizationStatus::ForceDemonetized) - && !user.role.is_mod() - { - return Err(ApiError::CustomAuthentication( - "You do not have the permissions to edit the monetization status of this project!" - .to_string(), - )); - } - - sqlx::query!( - " - UPDATE mods - SET monetization_status = $1 - WHERE (id = $2) - ", - monetization_status.as_str(), - id as db_ids::ProjectId, - ) - .execute(&mut *transaction) - .await?; - } - - // check new description and body for links to associated images - // if they no longer exist in the description or body, delete them - let checkable_strings: Vec<&str> = vec![&new_project.description, &new_project.body] - .into_iter() - .filter_map(|x| x.as_ref().map(|y| y.as_str())) - .collect(); + let v2_new_project = new_project.into_inner(); + let client_side = v2_new_project.client_side.clone(); + let server_side = v2_new_project.server_side.clone(); + let new_slug = v2_new_project.slug.clone(); + + // TODO: Some kind of handling here to ensure project type is fine. + // We expect the version uploaded to be of loader type modpack, but there might not be a way to check here for that. + // After all, theoretically, they could be creating a genuine 'fabric' mod, and modpack no longer carries information on whether its a mod or modpack, + // as those are out to the versions. + + // Ideally this would, if the project 'should' be a modpack: + // - change the loaders to mrpack only + // - add categories to the project for the corresponding loaders + + let new_project = v3::projects::EditProject { + title: v2_new_project.title, + description: v2_new_project.description, + body: v2_new_project.body, + categories: v2_new_project.categories, + additional_categories: v2_new_project.additional_categories, + issues_url: v2_new_project.issues_url, + source_url: v2_new_project.source_url, + wiki_url: v2_new_project.wiki_url, + license_url: v2_new_project.license_url, + discord_url: v2_new_project.discord_url, + donation_urls: v2_new_project.donation_urls, + license_id: v2_new_project.license_id, + slug: v2_new_project.slug, + status: v2_new_project.status, + requested_status: v2_new_project.requested_status, + moderation_message: v2_new_project.moderation_message, + moderation_message_body: v2_new_project.moderation_message_body, + monetization_status: v2_new_project.monetization_status, + }; - let context = ImageContext::Project { - project_id: Some(id.into()), - }; + // This returns 204 or failure so we don't need to do anything with it + let project_id = info.clone().0; + let mut response = v3::projects::project_edit( + req.clone(), + info, + pool.clone(), + config, + web::Json(new_project), + redis.clone(), + session_queue.clone(), + ) + .await?; - img::delete_unused_images(context, checkable_strings, &mut transaction, &redis).await?; - db_models::Project::clear_cache( - project_item.inner.id, - project_item.inner.slug, - None, - &redis, + // If client and server side were set, we will call + // the version setting route for each version to set the side types for each of them. + if response.status().is_success() && (client_side.is_some() || server_side.is_some()) { + let project_item = + project_item::Project::get(&new_slug.unwrap_or(project_id), &**pool, &redis).await?; + let version_ids = project_item.map(|x| x.versions).unwrap_or_default(); + let versions = version_item::Version::get_many(&version_ids, &**pool, &redis).await?; + for version in versions { + let mut fields = HashMap::new(); + fields.insert("client_side".to_string(), json!(client_side)); + fields.insert("server_side".to_string(), json!(server_side)); + response = v3::versions::version_edit_helper( + req.clone(), + (version.inner.id.into(),), + pool.clone(), + redis.clone(), + v3::versions::EditVersion { + fields, + ..Default::default() + }, + session_queue.clone(), ) .await?; - - transaction.commit().await?; - Ok(HttpResponse::NoContent().body("")) - } else { - Err(ApiError::CustomAuthentication( - "You do not have permission to edit this project!".to_string(), - )) } - } else { - Ok(HttpResponse::NotFound().body("")) } -} - -#[derive(derive_new::new)] -pub struct CategoryChanges<'a> { - pub categories: &'a Option>, - pub add_categories: &'a Option>, - pub remove_categories: &'a Option>, + Ok(response) } #[derive(Deserialize, Validate)] @@ -1209,360 +469,30 @@ pub async fn projects_edit( redis: web::Data, session_queue: web::Data, ) -> Result { - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::PROJECT_WRITE]), + let bulk_edit_project = bulk_edit_project.into_inner(); + v3::projects::projects_edit( + req, + web::Query(ids), + pool.clone(), + web::Json(v3::projects::BulkEditProject { + categories: bulk_edit_project.categories, + add_categories: bulk_edit_project.add_categories, + remove_categories: bulk_edit_project.remove_categories, + additional_categories: bulk_edit_project.additional_categories, + add_additional_categories: bulk_edit_project.add_additional_categories, + remove_additional_categories: bulk_edit_project.remove_additional_categories, + donation_urls: bulk_edit_project.donation_urls, + add_donation_urls: bulk_edit_project.add_donation_urls, + remove_donation_urls: bulk_edit_project.remove_donation_urls, + issues_url: bulk_edit_project.issues_url, + source_url: bulk_edit_project.source_url, + wiki_url: bulk_edit_project.wiki_url, + discord_url: bulk_edit_project.discord_url, + }), + redis, + session_queue, ) - .await? - .1; - - bulk_edit_project - .validate() - .map_err(|err| ApiError::Validation(validation_errors_to_string(err, None)))?; - - let project_ids: Vec = serde_json::from_str::>(&ids.ids)? - .into_iter() - .map(|x| x.into()) - .collect(); - - let projects_data = db_models::Project::get_many_ids(&project_ids, &**pool, &redis).await?; - - if let Some(id) = project_ids - .iter() - .find(|x| !projects_data.iter().any(|y| x == &&y.inner.id)) - { - return Err(ApiError::InvalidInput(format!( - "Project {} not found", - ProjectId(id.0 as u64) - ))); - } - - let team_ids = projects_data - .iter() - .map(|x| x.inner.team_id) - .collect::>(); - let team_members = - db_models::TeamMember::get_from_team_full_many(&team_ids, &**pool, &redis).await?; - - let organization_ids = projects_data - .iter() - .filter_map(|x| x.inner.organization_id) - .collect::>(); - let organizations = - db_models::Organization::get_many_ids(&organization_ids, &**pool, &redis).await?; - - let organization_team_ids = organizations - .iter() - .map(|x| x.team_id) - .collect::>(); - let organization_team_members = - db_models::TeamMember::get_from_team_full_many(&organization_team_ids, &**pool, &redis) - .await?; - - let categories = db_models::categories::Category::list(&**pool, &redis).await?; - let donation_platforms = db_models::categories::DonationPlatform::list(&**pool, &redis).await?; - - let mut transaction = pool.begin().await?; - - for project in projects_data { - if !user.role.is_mod() { - let team_member = team_members - .iter() - .find(|x| x.team_id == project.inner.team_id && x.user_id == user.id.into()); - - let organization = project - .inner - .organization_id - .and_then(|oid| organizations.iter().find(|x| x.id == oid)); - - let organization_team_member = if let Some(organization) = organization { - organization_team_members - .iter() - .find(|x| x.team_id == organization.team_id && x.user_id == user.id.into()) - } else { - None - }; - - let permissions = ProjectPermissions::get_permissions_by_role( - &user.role, - &team_member.cloned(), - &organization_team_member.cloned(), - ) - .unwrap_or_default(); - - if team_member.is_some() { - if !permissions.contains(ProjectPermissions::EDIT_DETAILS) { - return Err(ApiError::CustomAuthentication(format!( - "You do not have the permissions to bulk edit project {}!", - project.inner.title - ))); - } - } else if project.inner.status.is_hidden() { - return Err(ApiError::InvalidInput(format!( - "Project {} not found", - ProjectId(project.inner.id.0 as u64) - ))); - } else { - return Err(ApiError::CustomAuthentication(format!( - "You are not a member of project {}!", - project.inner.title - ))); - }; - } - - bulk_edit_project_categories( - &categories, - &project.categories, - project.inner.id as db_ids::ProjectId, - CategoryChanges::new( - &bulk_edit_project.categories, - &bulk_edit_project.add_categories, - &bulk_edit_project.remove_categories, - ), - 3, - false, - &mut transaction, - ) - .await?; - - bulk_edit_project_categories( - &categories, - &project.additional_categories, - project.inner.id as db_ids::ProjectId, - CategoryChanges::new( - &bulk_edit_project.additional_categories, - &bulk_edit_project.add_additional_categories, - &bulk_edit_project.remove_additional_categories, - ), - 256, - true, - &mut transaction, - ) - .await?; - - let project_donations: Vec = project - .donation_urls - .into_iter() - .map(|d| DonationLink { - id: d.platform_short, - platform: d.platform_name, - url: d.url, - }) - .collect(); - let mut set_donation_links = - if let Some(donation_links) = bulk_edit_project.donation_urls.clone() { - donation_links - } else { - project_donations.clone() - }; - - if let Some(delete_donations) = &bulk_edit_project.remove_donation_urls { - for donation in delete_donations { - if let Some(pos) = set_donation_links - .iter() - .position(|x| donation.url == x.url && donation.id == x.id) - { - set_donation_links.remove(pos); - } - } - } - - if let Some(add_donations) = &bulk_edit_project.add_donation_urls { - set_donation_links.append(&mut add_donations.clone()); - } - - if set_donation_links != project_donations { - sqlx::query!( - " - DELETE FROM mods_donations - WHERE joining_mod_id = $1 - ", - project.inner.id as db_ids::ProjectId, - ) - .execute(&mut *transaction) - .await?; - - for donation in set_donation_links { - let platform_id = donation_platforms - .iter() - .find(|x| x.short == donation.id) - .ok_or_else(|| { - ApiError::InvalidInput(format!( - "Platform {} does not exist.", - donation.id.clone() - )) - })? - .id; - - sqlx::query!( - " - INSERT INTO mods_donations (joining_mod_id, joining_platform_id, url) - VALUES ($1, $2, $3) - ", - project.inner.id as db_ids::ProjectId, - platform_id as db_ids::DonationPlatformId, - donation.url - ) - .execute(&mut *transaction) - .await?; - } - } - - if let Some(issues_url) = &bulk_edit_project.issues_url { - sqlx::query!( - " - UPDATE mods - SET issues_url = $1 - WHERE (id = $2) - ", - issues_url.as_deref(), - project.inner.id as db_ids::ProjectId, - ) - .execute(&mut *transaction) - .await?; - } - - if let Some(source_url) = &bulk_edit_project.source_url { - sqlx::query!( - " - UPDATE mods - SET source_url = $1 - WHERE (id = $2) - ", - source_url.as_deref(), - project.inner.id as db_ids::ProjectId, - ) - .execute(&mut *transaction) - .await?; - } - - if let Some(wiki_url) = &bulk_edit_project.wiki_url { - sqlx::query!( - " - UPDATE mods - SET wiki_url = $1 - WHERE (id = $2) - ", - wiki_url.as_deref(), - project.inner.id as db_ids::ProjectId, - ) - .execute(&mut *transaction) - .await?; - } - - if let Some(discord_url) = &bulk_edit_project.discord_url { - sqlx::query!( - " - UPDATE mods - SET discord_url = $1 - WHERE (id = $2) - ", - discord_url.as_deref(), - project.inner.id as db_ids::ProjectId, - ) - .execute(&mut *transaction) - .await?; - } - - db_models::Project::clear_cache(project.inner.id, project.inner.slug, None, &redis).await?; - } - - transaction.commit().await?; - - Ok(HttpResponse::NoContent().body("")) -} - -pub async fn bulk_edit_project_categories( - all_db_categories: &[db_models::categories::Category], - project_categories: &Vec, - project_id: db_ids::ProjectId, - bulk_changes: CategoryChanges<'_>, - max_num_categories: usize, - is_additional: bool, - transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, -) -> Result<(), ApiError> { - let mut set_categories = if let Some(categories) = bulk_changes.categories.clone() { - categories - } else { - project_categories.clone() - }; - - if let Some(delete_categories) = &bulk_changes.remove_categories { - for category in delete_categories { - if let Some(pos) = set_categories.iter().position(|x| x == category) { - set_categories.remove(pos); - } - } - } - - if let Some(add_categories) = &bulk_changes.add_categories { - for category in add_categories { - if set_categories.len() < max_num_categories { - set_categories.push(category.clone()); - } else { - break; - } - } - } - - if &set_categories != project_categories { - sqlx::query!( - " - DELETE FROM mods_categories - WHERE joining_mod_id = $1 AND is_additional = $2 - ", - project_id as db_ids::ProjectId, - is_additional - ) - .execute(&mut **transaction) - .await?; - - let mut mod_categories = Vec::new(); - for category in set_categories { - let category_id = all_db_categories - .iter() - .find(|x| x.category == category) - .ok_or_else(|| { - ApiError::InvalidInput(format!("Category {} does not exist.", category.clone())) - })? - .id; - mod_categories.push(ModCategory::new(project_id, category_id, is_additional)); - } - ModCategory::insert_many(mod_categories, &mut *transaction).await?; - } - - Ok(()) -} - -pub async fn edit_project_categories( - categories: &Vec, - perms: &ProjectPermissions, - project_id: db_ids::ProjectId, - additional: bool, - transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, -) -> Result<(), ApiError> { - if !perms.contains(ProjectPermissions::EDIT_DETAILS) { - let additional_str = if additional { "additional " } else { "" }; - return Err(ApiError::CustomAuthentication(format!( - "You do not have the permissions to edit the {additional_str}categories of this project!" - ))); - } - - let mut mod_categories = Vec::new(); - for category in categories { - let category_id = db_models::categories::Category::get_id(category, &mut **transaction) - .await? - .ok_or_else(|| { - ApiError::InvalidInput(format!("Category {} does not exist.", category.clone())) - })?; - mod_categories.push(ModCategory::new(project_id, category_id, additional)); - } - ModCategory::insert_many(mod_categories, &mut *transaction).await?; - - Ok(()) + .await } #[derive(Deserialize)] @@ -1580,84 +510,19 @@ pub async fn project_schedule( session_queue: web::Data, scheduling_data: web::Json, ) -> Result { - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::PROJECT_WRITE]), + let scheduling_data = scheduling_data.into_inner(); + v3::projects::project_schedule( + req, + info, + pool, + redis, + session_queue, + web::Json(v3::projects::SchedulingData { + time: scheduling_data.time, + requested_status: scheduling_data.requested_status, + }), ) - .await? - .1; - - if scheduling_data.time < Utc::now() { - return Err(ApiError::InvalidInput( - "You cannot schedule a project to be released in the past!".to_string(), - )); - } - - if !scheduling_data.requested_status.can_be_requested() { - return Err(ApiError::InvalidInput( - "Specified requested status cannot be requested!".to_string(), - )); - } - - let string = info.into_inner().0; - let result = db_models::Project::get(&string, &**pool, &redis).await?; - - if let Some(project_item) = result { - let (team_member, organization_team_member) = - db_models::TeamMember::get_for_project_permissions( - &project_item.inner, - user.id.into(), - &**pool, - ) - .await?; - - let permissions = ProjectPermissions::get_permissions_by_role( - &user.role, - &team_member.clone(), - &organization_team_member.clone(), - ) - .unwrap_or_default(); - - if !user.role.is_mod() && !permissions.contains(ProjectPermissions::EDIT_DETAILS) { - return Err(ApiError::CustomAuthentication( - "You do not have permission to edit this project's scheduling data!".to_string(), - )); - } - - if !project_item.inner.status.is_approved() { - return Err(ApiError::InvalidInput( - "This project has not been approved yet. Submit to the queue with the private status to schedule it in the future!".to_string(), - )); - } - - sqlx::query!( - " - UPDATE mods - SET status = $1, approved = $2 - WHERE (id = $3) - ", - ProjectStatus::Scheduled.as_str(), - scheduling_data.time, - project_item.inner.id as db_ids::ProjectId, - ) - .execute(&**pool) - .await?; - - db_models::Project::clear_cache( - project_item.inner.id, - project_item.inner.slug, - None, - &redis, - ) - .await?; - - Ok(HttpResponse::NoContent().body("")) - } else { - Ok(HttpResponse::NotFound().body("")) - } + .await } #[derive(Serialize, Deserialize)] @@ -1674,113 +539,20 @@ pub async fn project_icon_edit( pool: web::Data, redis: web::Data, file_host: web::Data>, - mut payload: web::Payload, + payload: web::Payload, session_queue: web::Data, ) -> Result { - if let Some(content_type) = crate::util::ext::get_image_content_type(&ext.ext) { - let cdn_url = dotenvy::var("CDN_URL")?; - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::PROJECT_WRITE]), - ) - .await? - .1; - let string = info.into_inner().0; - - let project_item = db_models::Project::get(&string, &**pool, &redis) - .await? - .ok_or_else(|| { - ApiError::InvalidInput("The specified project does not exist!".to_string()) - })?; - - if !user.role.is_mod() { - let (team_member, organization_team_member) = - db_models::TeamMember::get_for_project_permissions( - &project_item.inner, - user.id.into(), - &**pool, - ) - .await?; - - // Hide the project - if team_member.is_none() && organization_team_member.is_none() { - return Err(ApiError::CustomAuthentication( - "The specified project does not exist!".to_string(), - )); - } - - let permissions = ProjectPermissions::get_permissions_by_role( - &user.role, - &team_member, - &organization_team_member, - ) - .unwrap_or_default(); - - if !permissions.contains(ProjectPermissions::EDIT_DETAILS) { - return Err(ApiError::CustomAuthentication( - "You don't have permission to edit this project's icon.".to_string(), - )); - } - } - - if let Some(icon) = project_item.inner.icon_url { - let name = icon.split(&format!("{cdn_url}/")).nth(1); - - if let Some(icon_path) = name { - file_host.delete_file_version("", icon_path).await?; - } - } - - let bytes = - read_from_payload(&mut payload, 262144, "Icons must be smaller than 256KiB").await?; - - let color = crate::util::img::get_color_from_img(&bytes)?; - - let hash = sha1::Sha1::from(&bytes).hexdigest(); - let project_id: ProjectId = project_item.inner.id.into(); - let upload_data = file_host - .upload_file( - content_type, - &format!("data/{}/{}.{}", project_id, hash, ext.ext), - bytes.freeze(), - ) - .await?; - - let mut transaction = pool.begin().await?; - - sqlx::query!( - " - UPDATE mods - SET icon_url = $1, color = $2 - WHERE (id = $3) - ", - format!("{}/{}", cdn_url, upload_data.file_name), - color.map(|x| x as i32), - project_item.inner.id as db_ids::ProjectId, - ) - .execute(&mut *transaction) - .await?; - - db_models::Project::clear_cache( - project_item.inner.id, - project_item.inner.slug, - None, - &redis, - ) - .await?; - - transaction.commit().await?; - - Ok(HttpResponse::NoContent().body("")) - } else { - Err(ApiError::InvalidInput(format!( - "Invalid format for project icon: {}", - ext.ext - ))) - } + v3::projects::project_icon_edit( + web::Query(v3::projects::Extension { ext: ext.ext }), + req, + info, + pool, + redis, + file_host, + payload, + session_queue, + ) + .await } #[delete("{id}/icon")] @@ -1792,80 +564,7 @@ pub async fn delete_project_icon( file_host: web::Data>, session_queue: web::Data, ) -> Result { - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::PROJECT_WRITE]), - ) - .await? - .1; - let string = info.into_inner().0; - - let project_item = db_models::Project::get(&string, &**pool, &redis) - .await? - .ok_or_else(|| { - ApiError::InvalidInput("The specified project does not exist!".to_string()) - })?; - - if !user.role.is_mod() { - let (team_member, organization_team_member) = - db_models::TeamMember::get_for_project_permissions( - &project_item.inner, - user.id.into(), - &**pool, - ) - .await?; - - // Hide the project - if team_member.is_none() && organization_team_member.is_none() { - return Err(ApiError::CustomAuthentication( - "The specified project does not exist!".to_string(), - )); - } - let permissions = ProjectPermissions::get_permissions_by_role( - &user.role, - &team_member, - &organization_team_member, - ) - .unwrap_or_default(); - - if !permissions.contains(ProjectPermissions::EDIT_DETAILS) { - return Err(ApiError::CustomAuthentication( - "You don't have permission to edit this project's icon.".to_string(), - )); - } - } - - let cdn_url = dotenvy::var("CDN_URL")?; - if let Some(icon) = project_item.inner.icon_url { - let name = icon.split(&format!("{cdn_url}/")).nth(1); - - if let Some(icon_path) = name { - file_host.delete_file_version("", icon_path).await?; - } - } - - let mut transaction = pool.begin().await?; - - sqlx::query!( - " - UPDATE mods - SET icon_url = NULL, color = NULL - WHERE (id = $1) - ", - project_item.inner.id as db_ids::ProjectId, - ) - .execute(&mut *transaction) - .await?; - - db_models::Project::clear_cache(project_item.inner.id, project_item.inner.slug, None, &redis) - .await?; - - transaction.commit().await?; - - Ok(HttpResponse::NoContent().body("")) + v3::projects::delete_project_icon(req, info, pool, redis, file_host, session_queue).await } #[derive(Serialize, Deserialize, Validate)] @@ -1888,136 +587,26 @@ pub async fn add_gallery_item( pool: web::Data, redis: web::Data, file_host: web::Data>, - mut payload: web::Payload, + payload: web::Payload, session_queue: web::Data, ) -> Result { - if let Some(content_type) = crate::util::ext::get_image_content_type(&ext.ext) { - item.validate() - .map_err(|err| ApiError::Validation(validation_errors_to_string(err, None)))?; - - let cdn_url = dotenvy::var("CDN_URL")?; - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::PROJECT_WRITE]), - ) - .await? - .1; - let string = info.into_inner().0; - - let project_item = db_models::Project::get(&string, &**pool, &redis) - .await? - .ok_or_else(|| { - ApiError::InvalidInput("The specified project does not exist!".to_string()) - })?; - - if project_item.gallery_items.len() > 64 { - return Err(ApiError::CustomAuthentication( - "You have reached the maximum of gallery images to upload.".to_string(), - )); - } - - if !user.role.is_admin() { - let (team_member, organization_team_member) = - db_models::TeamMember::get_for_project_permissions( - &project_item.inner, - user.id.into(), - &**pool, - ) - .await?; - - // Hide the project - if team_member.is_none() && organization_team_member.is_none() { - return Err(ApiError::CustomAuthentication( - "The specified project does not exist!".to_string(), - )); - } - - let permissions = ProjectPermissions::get_permissions_by_role( - &user.role, - &team_member, - &organization_team_member, - ) - .unwrap_or_default(); - - if !permissions.contains(ProjectPermissions::EDIT_DETAILS) { - return Err(ApiError::CustomAuthentication( - "You don't have permission to edit this project's gallery.".to_string(), - )); - } - } - - let bytes = read_from_payload( - &mut payload, - 5 * (1 << 20), - "Gallery image exceeds the maximum of 5MiB.", - ) - .await?; - let hash = sha1::Sha1::from(&bytes).hexdigest(); - - let id: ProjectId = project_item.inner.id.into(); - let url = format!("data/{}/images/{}.{}", id, hash, &*ext.ext); - - let file_url = format!("{cdn_url}/{url}"); - if project_item - .gallery_items - .iter() - .any(|x| x.image_url == file_url) - { - return Err(ApiError::InvalidInput( - "You may not upload duplicate gallery images!".to_string(), - )); - } - - file_host - .upload_file(content_type, &url, bytes.freeze()) - .await?; - - let mut transaction = pool.begin().await?; - - if item.featured { - sqlx::query!( - " - UPDATE mods_gallery - SET featured = $2 - WHERE mod_id = $1 - ", - project_item.inner.id as db_ids::ProjectId, - false, - ) - .execute(&mut *transaction) - .await?; - } - - let gallery_item = vec![db_models::project_item::GalleryItem { - image_url: file_url, + v3::projects::add_gallery_item( + web::Query(v3::projects::Extension { ext: ext.ext }), + req, + web::Query(v3::projects::GalleryCreateQuery { featured: item.featured, title: item.title, description: item.description, - created: Utc::now(), - ordering: item.ordering.unwrap_or(0), - }]; - GalleryItem::insert_many(gallery_item, project_item.inner.id, &mut transaction).await?; - - db_models::Project::clear_cache( - project_item.inner.id, - project_item.inner.slug, - None, - &redis, - ) - .await?; - - transaction.commit().await?; - - Ok(HttpResponse::NoContent().body("")) - } else { - Err(ApiError::InvalidInput(format!( - "Invalid format for gallery image: {}", - ext.ext - ))) - } + ordering: item.ordering, + }), + info, + pool, + redis, + file_host, + payload, + session_queue, + ) + .await } #[derive(Serialize, Deserialize, Validate)] @@ -2051,148 +640,21 @@ pub async fn edit_gallery_item( redis: web::Data, session_queue: web::Data, ) -> Result { - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::PROJECT_WRITE]), - ) - .await? - .1; - let string = info.into_inner().0; - - item.validate() - .map_err(|err| ApiError::Validation(validation_errors_to_string(err, None)))?; - - let project_item = db_models::Project::get(&string, &**pool, &redis) - .await? - .ok_or_else(|| { - ApiError::InvalidInput("The specified project does not exist!".to_string()) - })?; - - if !user.role.is_mod() { - let (team_member, organization_team_member) = - db_models::TeamMember::get_for_project_permissions( - &project_item.inner, - user.id.into(), - &**pool, - ) - .await?; - - // Hide the project - if team_member.is_none() && organization_team_member.is_none() { - return Err(ApiError::CustomAuthentication( - "The specified project does not exist!".to_string(), - )); - } - let permissions = ProjectPermissions::get_permissions_by_role( - &user.role, - &team_member, - &organization_team_member, - ) - .unwrap_or_default(); - - if !permissions.contains(ProjectPermissions::EDIT_DETAILS) { - return Err(ApiError::CustomAuthentication( - "You don't have permission to edit this project's gallery.".to_string(), - )); - } - } - let mut transaction = pool.begin().await?; - - let id = sqlx::query!( - " - SELECT id FROM mods_gallery - WHERE image_url = $1 - ", - item.url + v3::projects::edit_gallery_item( + req, + web::Query(v3::projects::GalleryEditQuery { + url: item.url, + featured: item.featured, + title: item.title, + description: item.description, + ordering: item.ordering, + }), + info, + pool, + redis, + session_queue, ) - .fetch_optional(&mut *transaction) - .await? - .ok_or_else(|| { - ApiError::InvalidInput(format!( - "Gallery item at URL {} is not part of the project's gallery.", - item.url - )) - })? - .id; - - let mut transaction = pool.begin().await?; - - if let Some(featured) = item.featured { - if featured { - sqlx::query!( - " - UPDATE mods_gallery - SET featured = $2 - WHERE mod_id = $1 - ", - project_item.inner.id as db_ids::ProjectId, - false, - ) - .execute(&mut *transaction) - .await?; - } - - sqlx::query!( - " - UPDATE mods_gallery - SET featured = $2 - WHERE id = $1 - ", - id, - featured - ) - .execute(&mut *transaction) - .await?; - } - if let Some(title) = item.title { - sqlx::query!( - " - UPDATE mods_gallery - SET title = $2 - WHERE id = $1 - ", - id, - title - ) - .execute(&mut *transaction) - .await?; - } - if let Some(description) = item.description { - sqlx::query!( - " - UPDATE mods_gallery - SET description = $2 - WHERE id = $1 - ", - id, - description - ) - .execute(&mut *transaction) - .await?; - } - if let Some(ordering) = item.ordering { - sqlx::query!( - " - UPDATE mods_gallery - SET ordering = $2 - WHERE id = $1 - ", - id, - ordering - ) - .execute(&mut *transaction) - .await?; - } - - db_models::Project::clear_cache(project_item.inner.id, project_item.inner.slug, None, &redis) - .await?; - - transaction.commit().await?; - - Ok(HttpResponse::NoContent().body("")) + .await } #[derive(Serialize, Deserialize)] @@ -2210,96 +672,16 @@ pub async fn delete_gallery_item( file_host: web::Data>, session_queue: web::Data, ) -> Result { - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::PROJECT_WRITE]), + v3::projects::delete_gallery_item( + req, + web::Query(v3::projects::GalleryDeleteQuery { url: item.url }), + info, + pool, + redis, + file_host, + session_queue, ) - .await? - .1; - let string = info.into_inner().0; - - let project_item = db_models::Project::get(&string, &**pool, &redis) - .await? - .ok_or_else(|| { - ApiError::InvalidInput("The specified project does not exist!".to_string()) - })?; - - if !user.role.is_mod() { - let (team_member, organization_team_member) = - db_models::TeamMember::get_for_project_permissions( - &project_item.inner, - user.id.into(), - &**pool, - ) - .await?; - - // Hide the project - if team_member.is_none() && organization_team_member.is_none() { - return Err(ApiError::CustomAuthentication( - "The specified project does not exist!".to_string(), - )); - } - - let permissions = ProjectPermissions::get_permissions_by_role( - &user.role, - &team_member, - &organization_team_member, - ) - .unwrap_or_default(); - - if !permissions.contains(ProjectPermissions::EDIT_DETAILS) { - return Err(ApiError::CustomAuthentication( - "You don't have permission to edit this project's gallery.".to_string(), - )); - } - } - let mut transaction = pool.begin().await?; - - let id = sqlx::query!( - " - SELECT id FROM mods_gallery - WHERE image_url = $1 - ", - item.url - ) - .fetch_optional(&mut *transaction) - .await? - .ok_or_else(|| { - ApiError::InvalidInput(format!( - "Gallery item at URL {} is not part of the project's gallery.", - item.url - )) - })? - .id; - - let cdn_url = dotenvy::var("CDN_URL")?; - let name = item.url.split(&format!("{cdn_url}/")).nth(1); - - if let Some(icon_path) = name { - file_host.delete_file_version("", icon_path).await?; - } - - let mut transaction = pool.begin().await?; - - sqlx::query!( - " - DELETE FROM mods_gallery - WHERE id = $1 - ", - id - ) - .execute(&mut *transaction) - .await?; - - db_models::Project::clear_cache(project_item.inner.id, project_item.inner.slug, None, &redis) - .await?; - - transaction.commit().await?; - - Ok(HttpResponse::NoContent().body("")) + .await } #[delete("{id}")] @@ -2311,83 +693,7 @@ pub async fn project_delete( config: web::Data, session_queue: web::Data, ) -> Result { - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::PROJECT_DELETE]), - ) - .await? - .1; - let string = info.into_inner().0; - - let project = db_models::Project::get(&string, &**pool, &redis) - .await? - .ok_or_else(|| { - ApiError::InvalidInput("The specified project does not exist!".to_string()) - })?; - - if !user.role.is_admin() { - let (team_member, organization_team_member) = - db_models::TeamMember::get_for_project_permissions( - &project.inner, - user.id.into(), - &**pool, - ) - .await?; - - // Hide the project - if team_member.is_none() && organization_team_member.is_none() { - return Err(ApiError::CustomAuthentication( - "The specified project does not exist!".to_string(), - )); - } - - let permissions = ProjectPermissions::get_permissions_by_role( - &user.role, - &team_member, - &organization_team_member, - ) - .unwrap_or_default(); - - if !permissions.contains(ProjectPermissions::DELETE_PROJECT) { - return Err(ApiError::CustomAuthentication( - "You don't have permission to delete this project!".to_string(), - )); - } - } - - let mut transaction = pool.begin().await?; - let context = ImageContext::Project { - project_id: Some(project.inner.id.into()), - }; - let uploaded_images = db_models::Image::get_many_contexted(context, &mut transaction).await?; - for image in uploaded_images { - image_item::Image::remove(image.id, &mut transaction, &redis).await?; - } - - sqlx::query!( - " - DELETE FROM collections_mods - WHERE mod_id = $1 - ", - project.inner.id as db_ids::ProjectId, - ) - .execute(&mut *transaction) - .await?; - - let result = db_models::Project::remove(project.inner.id, &mut transaction, &redis).await?; - - transaction.commit().await?; - - delete_from_index(project.inner.id.into(), config).await?; - - if result.is_some() { - Ok(HttpResponse::NoContent().body("")) - } else { - Ok(HttpResponse::NotFound().body("")) - } + v3::projects::project_delete(req, info, pool, redis, config, session_queue).await } #[post("{id}/follow")] @@ -2398,75 +704,7 @@ pub async fn project_follow( redis: web::Data, session_queue: web::Data, ) -> Result { - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::USER_WRITE]), - ) - .await? - .1; - let string = info.into_inner().0; - - let result = db_models::Project::get(&string, &**pool, &redis) - .await? - .ok_or_else(|| { - ApiError::InvalidInput("The specified project does not exist!".to_string()) - })?; - - let user_id: db_ids::UserId = user.id.into(); - let project_id: db_ids::ProjectId = result.inner.id; - - if !is_authorized(&result.inner, &Some(user), &pool).await? { - return Ok(HttpResponse::NotFound().body("")); - } - - let following = sqlx::query!( - " - SELECT EXISTS(SELECT 1 FROM mod_follows mf WHERE mf.follower_id = $1 AND mf.mod_id = $2) - ", - user_id as db_ids::UserId, - project_id as db_ids::ProjectId - ) - .fetch_one(&**pool) - .await? - .exists - .unwrap_or(false); - - if !following { - let mut transaction = pool.begin().await?; - - sqlx::query!( - " - UPDATE mods - SET follows = follows + 1 - WHERE id = $1 - ", - project_id as db_ids::ProjectId, - ) - .execute(&mut *transaction) - .await?; - - sqlx::query!( - " - INSERT INTO mod_follows (follower_id, mod_id) - VALUES ($1, $2) - ", - user_id as db_ids::UserId, - project_id as db_ids::ProjectId - ) - .execute(&mut *transaction) - .await?; - - transaction.commit().await?; - - Ok(HttpResponse::NoContent().body("")) - } else { - Err(ApiError::InvalidInput( - "You are already following this project!".to_string(), - )) - } + v3::projects::project_follow(req, info, pool, redis, session_queue).await } #[delete("{id}/follow")] @@ -2477,84 +715,5 @@ pub async fn project_unfollow( redis: web::Data, session_queue: web::Data, ) -> Result { - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::USER_WRITE]), - ) - .await? - .1; - let string = info.into_inner().0; - - let result = db_models::Project::get(&string, &**pool, &redis) - .await? - .ok_or_else(|| { - ApiError::InvalidInput("The specified project does not exist!".to_string()) - })?; - - let user_id: db_ids::UserId = user.id.into(); - let project_id = result.inner.id; - - let following = sqlx::query!( - " - SELECT EXISTS(SELECT 1 FROM mod_follows mf WHERE mf.follower_id = $1 AND mf.mod_id = $2) - ", - user_id as db_ids::UserId, - project_id as db_ids::ProjectId - ) - .fetch_one(&**pool) - .await? - .exists - .unwrap_or(false); - - if following { - let mut transaction = pool.begin().await?; - - sqlx::query!( - " - UPDATE mods - SET follows = follows - 1 - WHERE id = $1 - ", - project_id as db_ids::ProjectId, - ) - .execute(&mut *transaction) - .await?; - - sqlx::query!( - " - DELETE FROM mod_follows - WHERE follower_id = $1 AND mod_id = $2 - ", - user_id as db_ids::UserId, - project_id as db_ids::ProjectId - ) - .execute(&mut *transaction) - .await?; - - transaction.commit().await?; - - Ok(HttpResponse::NoContent().body("")) - } else { - Err(ApiError::InvalidInput( - "You are not following this project!".to_string(), - )) - } -} - -pub async fn delete_from_index( - id: ProjectId, - config: web::Data, -) -> Result<(), meilisearch_sdk::errors::Error> { - let client = meilisearch_sdk::client::Client::new(&*config.address, &*config.key); - - let indexes: IndexesResults = client.get_indexes().await?; - - for index in indexes.results { - index.delete_document(id.to_string()).await?; - } - - Ok(()) + v3::projects::project_unfollow(req, info, pool, redis, session_queue).await } diff --git a/src/routes/v2/reports.rs b/src/routes/v2/reports.rs index 2589778a..f167eceb 100644 --- a/src/routes/v2/reports.rs +++ b/src/routes/v2/reports.rs @@ -1,20 +1,9 @@ -use crate::auth::{check_is_moderator_from_headers, get_user_from_headers}; -use crate::database; -use crate::database::models::image_item; -use crate::database::models::thread_item::{ThreadBuilder, ThreadMessageBuilder}; use crate::database::redis::RedisPool; use crate::models::ids::ImageId; -use crate::models::ids::{base62_impl::parse_base62, ProjectId, UserId, VersionId}; -use crate::models::images::{Image, ImageContext}; -use crate::models::pats::Scopes; -use crate::models::reports::{ItemType, Report}; -use crate::models::threads::{MessageBody, ThreadType}; +use crate::models::reports::ItemType; use crate::queue::session::AuthQueue; -use crate::routes::ApiError; -use crate::util::img; +use crate::routes::{v3, ApiError}; use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse}; -use chrono::Utc; -use futures::StreamExt; use serde::Deserialize; use sqlx::PgPool; use validator::Validate; @@ -44,177 +33,11 @@ pub struct CreateReport { pub async fn report_create( req: HttpRequest, pool: web::Data, - mut body: web::Payload, + body: web::Payload, redis: web::Data, session_queue: web::Data, ) -> Result { - let mut transaction = pool.begin().await?; - - let current_user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::REPORT_CREATE]), - ) - .await? - .1; - - let mut bytes = web::BytesMut::new(); - while let Some(item) = body.next().await { - bytes.extend_from_slice(&item.map_err(|_| { - ApiError::InvalidInput("Error while parsing request payload!".to_string()) - })?); - } - let new_report: CreateReport = serde_json::from_slice(bytes.as_ref())?; - - let id = crate::database::models::generate_report_id(&mut transaction).await?; - let report_type = crate::database::models::categories::ReportType::get_id( - &new_report.report_type, - &mut *transaction, - ) - .await? - .ok_or_else(|| { - ApiError::InvalidInput(format!("Invalid report type: {}", new_report.report_type)) - })?; - - let mut report = crate::database::models::report_item::Report { - id, - report_type_id: report_type, - project_id: None, - version_id: None, - user_id: None, - body: new_report.body.clone(), - reporter: current_user.id.into(), - created: Utc::now(), - closed: false, - }; - - match new_report.item_type { - ItemType::Project => { - let project_id = ProjectId(parse_base62(new_report.item_id.as_str())?); - - let result = sqlx::query!( - "SELECT EXISTS(SELECT 1 FROM mods WHERE id = $1)", - project_id.0 as i64 - ) - .fetch_one(&mut *transaction) - .await?; - - if !result.exists.unwrap_or(false) { - return Err(ApiError::InvalidInput(format!( - "Project could not be found: {}", - new_report.item_id - ))); - } - - report.project_id = Some(project_id.into()) - } - ItemType::Version => { - let version_id = VersionId(parse_base62(new_report.item_id.as_str())?); - - let result = sqlx::query!( - "SELECT EXISTS(SELECT 1 FROM versions WHERE id = $1)", - version_id.0 as i64 - ) - .fetch_one(&mut *transaction) - .await?; - - if !result.exists.unwrap_or(false) { - return Err(ApiError::InvalidInput(format!( - "Version could not be found: {}", - new_report.item_id - ))); - } - - report.version_id = Some(version_id.into()) - } - ItemType::User => { - let user_id = UserId(parse_base62(new_report.item_id.as_str())?); - - let result = sqlx::query!( - "SELECT EXISTS(SELECT 1 FROM users WHERE id = $1)", - user_id.0 as i64 - ) - .fetch_one(&mut *transaction) - .await?; - - if !result.exists.unwrap_or(false) { - return Err(ApiError::InvalidInput(format!( - "User could not be found: {}", - new_report.item_id - ))); - } - - report.user_id = Some(user_id.into()) - } - ItemType::Unknown => { - return Err(ApiError::InvalidInput(format!( - "Invalid report item type: {}", - new_report.item_type.as_str() - ))) - } - } - - report.insert(&mut transaction).await?; - - for image_id in new_report.uploaded_images { - if let Some(db_image) = - image_item::Image::get(image_id.into(), &mut *transaction, &redis).await? - { - let image: Image = db_image.into(); - if !matches!(image.context, ImageContext::Report { .. }) - || image.context.inner_id().is_some() - { - return Err(ApiError::InvalidInput(format!( - "Image {} is not unused and in the 'report' context", - image_id - ))); - } - - sqlx::query!( - " - UPDATE uploaded_images - SET report_id = $1 - WHERE id = $2 - ", - id.0 as i64, - image_id.0 as i64 - ) - .execute(&mut *transaction) - .await?; - - image_item::Image::clear_cache(image.id.into(), &redis).await?; - } else { - return Err(ApiError::InvalidInput(format!( - "Image {} could not be found", - image_id - ))); - } - } - - let thread_id = ThreadBuilder { - type_: ThreadType::Report, - members: vec![], - project_id: None, - report_id: Some(report.id), - } - .insert(&mut transaction) - .await?; - - transaction.commit().await?; - - Ok(HttpResponse::Ok().json(Report { - id: id.into(), - report_type: new_report.report_type.clone(), - item_id: new_report.item_id.clone(), - item_type: new_report.item_type.clone(), - reporter: current_user.id, - body: new_report.body.clone(), - created: Utc::now(), - closed: false, - thread_id: thread_id.into(), - })) + v3::reports::report_create(req, pool, body, redis, session_queue).await } #[derive(Deserialize)] @@ -240,65 +63,17 @@ pub async fn reports( count: web::Query, session_queue: web::Data, ) -> Result { - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::REPORT_READ]), + v3::reports::reports( + req, + pool, + redis, + web::Query(v3::reports::ReportsRequestOptions { + count: count.count, + all: count.all, + }), + session_queue, ) - .await? - .1; - - use futures::stream::TryStreamExt; - - let report_ids = if user.role.is_mod() && count.all { - sqlx::query!( - " - SELECT id FROM reports - WHERE closed = FALSE - ORDER BY created ASC - LIMIT $1; - ", - count.count as i64 - ) - .fetch_many(&**pool) - .try_filter_map(|e| async { - Ok(e.right() - .map(|m| crate::database::models::ids::ReportId(m.id))) - }) - .try_collect::>() - .await? - } else { - sqlx::query!( - " - SELECT id FROM reports - WHERE closed = FALSE AND reporter = $1 - ORDER BY created ASC - LIMIT $2; - ", - user.id.0 as i64, - count.count as i64 - ) - .fetch_many(&**pool) - .try_filter_map(|e| async { - Ok(e.right() - .map(|m| crate::database::models::ids::ReportId(m.id))) - }) - .try_collect::>() - .await? - }; - - let query_reports = - crate::database::models::report_item::Report::get_many(&report_ids, &**pool).await?; - - let mut reports: Vec = Vec::new(); - - for x in query_reports { - reports.push(x.into()); - } - - Ok(HttpResponse::Ok().json(reports)) + .await } #[derive(Deserialize)] @@ -314,32 +89,14 @@ pub async fn reports_get( redis: web::Data, session_queue: web::Data, ) -> Result { - let report_ids: Vec = - serde_json::from_str::>(&ids.ids)? - .into_iter() - .map(|x| x.into()) - .collect(); - - let reports_data = - crate::database::models::report_item::Report::get_many(&report_ids, &**pool).await?; - - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::REPORT_READ]), + v3::reports::reports_get( + req, + web::Query(v3::reports::ReportIds { ids: ids.ids }), + pool, + redis, + session_queue, ) - .await? - .1; - - let all_reports = reports_data - .into_iter() - .filter(|x| user.role.is_mod() || x.reporter == user.id.into()) - .map(|x| x.into()) - .collect::>(); - - Ok(HttpResponse::Ok().json(all_reports)) + .await } #[get("report/{id}")] @@ -350,29 +107,7 @@ pub async fn report_get( info: web::Path<(crate::models::reports::ReportId,)>, session_queue: web::Data, ) -> Result { - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::REPORT_READ]), - ) - .await? - .1; - let id = info.into_inner().0.into(); - - let report = crate::database::models::report_item::Report::get(id, &**pool).await?; - - if let Some(report) = report { - if !user.role.is_mod() && report.reporter != user.id.into() { - return Ok(HttpResponse::NotFound().body("")); - } - - let report: Report = report.into(); - Ok(HttpResponse::Ok().json(report)) - } else { - Ok(HttpResponse::NotFound().body("")) - } + v3::reports::report_get(req, pool, redis, info, session_queue).await } #[derive(Deserialize, Validate)] @@ -391,101 +126,19 @@ pub async fn report_edit( session_queue: web::Data, edit_report: web::Json, ) -> Result { - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::REPORT_WRITE]), + let edit_report = edit_report.into_inner(); + v3::reports::report_edit( + req, + pool, + redis, + info, + session_queue, + web::Json(v3::reports::EditReport { + body: edit_report.body, + closed: edit_report.closed, + }), ) - .await? - .1; - let id = info.into_inner().0.into(); - - let report = crate::database::models::report_item::Report::get(id, &**pool).await?; - - if let Some(report) = report { - if !user.role.is_mod() && report.reporter != user.id.into() { - return Ok(HttpResponse::NotFound().body("")); - } - - let mut transaction = pool.begin().await?; - - if let Some(edit_body) = &edit_report.body { - sqlx::query!( - " - UPDATE reports - SET body = $1 - WHERE (id = $2) - ", - edit_body, - id as crate::database::models::ids::ReportId, - ) - .execute(&mut *transaction) - .await?; - } - - if let Some(edit_closed) = edit_report.closed { - if !user.role.is_mod() { - return Err(ApiError::InvalidInput( - "You cannot reopen a report!".to_string(), - )); - } - - ThreadMessageBuilder { - author_id: Some(user.id.into()), - body: if !edit_closed && report.closed { - MessageBody::ThreadReopen - } else { - MessageBody::ThreadClosure - }, - thread_id: report.thread_id, - } - .insert(&mut transaction) - .await?; - - sqlx::query!( - " - UPDATE reports - SET closed = $1 - WHERE (id = $2) - ", - edit_closed, - id as crate::database::models::ids::ReportId, - ) - .execute(&mut *transaction) - .await?; - - sqlx::query!( - " - UPDATE threads - SET show_in_mod_inbox = $1 - WHERE id = $2 - ", - !(edit_closed || report.closed), - report.thread_id.0, - ) - .execute(&mut *transaction) - .await?; - } - - // delete any images no longer in the body - let checkable_strings: Vec<&str> = vec![&edit_report.body] - .into_iter() - .filter_map(|x: &Option| x.as_ref().map(|y| y.as_str())) - .collect(); - let image_context = ImageContext::Report { - report_id: Some(id.into()), - }; - img::delete_unused_images(image_context, checkable_strings, &mut transaction, &redis) - .await?; - - transaction.commit().await?; - - Ok(HttpResponse::NoContent().body("")) - } else { - Ok(HttpResponse::NotFound().body("")) - } + .await } #[delete("report/{id}")] @@ -496,35 +149,5 @@ pub async fn report_delete( redis: web::Data, session_queue: web::Data, ) -> Result { - check_is_moderator_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::REPORT_DELETE]), - ) - .await?; - - let mut transaction = pool.begin().await?; - - let id = info.into_inner().0; - let context = ImageContext::Report { - report_id: Some(id), - }; - let uploaded_images = - database::models::Image::get_many_contexted(context, &mut transaction).await?; - for image in uploaded_images { - image_item::Image::remove(image.id, &mut transaction, &redis).await?; - } - - let result = - crate::database::models::report_item::Report::remove_full(id.into(), &mut transaction) - .await?; - transaction.commit().await?; - - if result.is_some() { - Ok(HttpResponse::NoContent().body("")) - } else { - Ok(HttpResponse::NotFound().body("")) - } + v3::reports::report_delete(req, pool, info, redis, session_queue).await } diff --git a/src/routes/v2/statistics.rs b/src/routes/v2/statistics.rs index a5220a8e..962bc39f 100644 --- a/src/routes/v2/statistics.rs +++ b/src/routes/v2/statistics.rs @@ -1,6 +1,5 @@ -use crate::routes::ApiError; +use crate::routes::{v3, ApiError}; use actix_web::{get, web, HttpResponse}; -use serde_json::json; use sqlx::PgPool; pub fn config(cfg: &mut web::ServiceConfig) { @@ -9,78 +8,5 @@ pub fn config(cfg: &mut web::ServiceConfig) { #[get("statistics")] pub async fn get_stats(pool: web::Data) -> Result { - let projects = sqlx::query!( - " - SELECT COUNT(id) - FROM mods - WHERE status = ANY($1) - ", - &*crate::models::projects::ProjectStatus::iterator() - .filter(|x| x.is_searchable()) - .map(|x| x.to_string()) - .collect::>(), - ) - .fetch_one(&**pool) - .await?; - - let versions = sqlx::query!( - " - SELECT COUNT(v.id) - FROM versions v - INNER JOIN mods m on v.mod_id = m.id AND m.status = ANY($1) - WHERE v.status = ANY($2) - ", - &*crate::models::projects::ProjectStatus::iterator() - .filter(|x| x.is_searchable()) - .map(|x| x.to_string()) - .collect::>(), - &*crate::models::projects::VersionStatus::iterator() - .filter(|x| x.is_listed()) - .map(|x| x.to_string()) - .collect::>(), - ) - .fetch_one(&**pool) - .await?; - - let authors = sqlx::query!( - " - SELECT COUNT(DISTINCT u.id) - FROM users u - INNER JOIN team_members tm on u.id = tm.user_id AND tm.accepted = TRUE - INNER JOIN mods m on tm.team_id = m.team_id AND m.status = ANY($1) - ", - &*crate::models::projects::ProjectStatus::iterator() - .filter(|x| x.is_searchable()) - .map(|x| x.to_string()) - .collect::>(), - ) - .fetch_one(&**pool) - .await?; - - let files = sqlx::query!( - " - SELECT COUNT(f.id) FROM files f - INNER JOIN versions v on f.version_id = v.id AND v.status = ANY($2) - INNER JOIN mods m on v.mod_id = m.id AND m.status = ANY($1) - ", - &*crate::models::projects::ProjectStatus::iterator() - .filter(|x| x.is_searchable()) - .map(|x| x.to_string()) - .collect::>(), - &*crate::models::projects::VersionStatus::iterator() - .filter(|x| x.is_listed()) - .map(|x| x.to_string()) - .collect::>(), - ) - .fetch_one(&**pool) - .await?; - - let json = json!({ - "projects": projects.count, - "versions": versions.count, - "authors": authors.count, - "files": files.count, - }); - - Ok(HttpResponse::Ok().json(json)) + v3::statistics::get_stats(pool).await } diff --git a/src/routes/v2/tags.rs b/src/routes/v2/tags.rs index 56ffaac5..2f4075ea 100644 --- a/src/routes/v2/tags.rs +++ b/src/routes/v2/tags.rs @@ -1,10 +1,12 @@ +use std::collections::HashMap; + use super::ApiError; -use crate::database::models; -use crate::database::models::categories::{DonationPlatform, ProjectType, ReportType, SideType}; +use crate::database::models::loader_fields::LoaderFieldEnumValue; use crate::database::redis::RedisPool; +use crate::routes::v3::tags::{LoaderData as LoaderDataV3, LoaderFieldsEnumQuery}; +use crate::routes::{v2_reroute, v3}; use actix_web::{get, web, HttpResponse}; use chrono::{DateTime, Utc}; -use models::categories::{Category, GameVersion, Loader}; use sqlx::PgPool; pub fn config(cfg: &mut web::ServiceConfig) { @@ -24,10 +26,10 @@ pub fn config(cfg: &mut web::ServiceConfig) { #[derive(serde::Serialize, serde::Deserialize)] pub struct CategoryData { - icon: String, - name: String, - project_type: String, - header: String, + pub icon: String, + pub name: String, + pub project_type: String, + pub header: String, } #[get("category")] @@ -35,25 +37,14 @@ pub async fn category_list( pool: web::Data, redis: web::Data, ) -> Result { - let results = Category::list(&**pool, &redis) - .await? - .into_iter() - .map(|x| CategoryData { - icon: x.icon, - name: x.category, - project_type: x.project_type, - header: x.header, - }) - .collect::>(); - - Ok(HttpResponse::Ok().json(results)) + v3::tags::category_list(pool, redis).await } #[derive(serde::Serialize, serde::Deserialize)] pub struct LoaderData { - icon: String, - name: String, - supported_project_types: Vec, + pub icon: String, + pub name: String, + pub supported_project_types: Vec, } #[get("loader")] @@ -61,22 +52,26 @@ pub async fn loader_list( pool: web::Data, redis: web::Data, ) -> Result { - let mut results = Loader::list(&**pool, &redis) - .await? - .into_iter() - .map(|x| LoaderData { - icon: x.icon, - name: x.loader, - supported_project_types: x.supported_project_types, - }) - .collect::>(); - - results.sort_by(|a, b| a.name.to_lowercase().cmp(&b.name.to_lowercase())); - - Ok(HttpResponse::Ok().json(results)) + let response = v3::tags::loader_list(pool, redis).await?; + + // Convert to V2 format + match v2_reroute::extract_ok_json::>(response).await { + Ok(loaders) => { + let loaders = loaders + .into_iter() + .map(|l| LoaderData { + icon: l.icon, + name: l.name, + supported_project_types: l.supported_project_types, + }) + .collect::>(); + Ok(HttpResponse::Ok().json(loaders)) + } + Err(response) => Ok(response), + } } -#[derive(serde::Serialize)] +#[derive(serde::Serialize, serde::Deserialize)] pub struct GameVersionQueryData { pub version: String, pub version_type: String, @@ -97,21 +92,50 @@ pub async fn game_version_list( query: web::Query, redis: web::Data, ) -> Result { - let results: Vec = if query.type_.is_some() || query.major.is_some() { - GameVersion::list_filter(query.type_.as_deref(), query.major, &**pool, &redis).await? - } else { - GameVersion::list(&**pool, &redis).await? + let mut filters = HashMap::new(); + if let Some(type_) = &query.type_ { + filters.insert("type".to_string(), serde_json::json!(type_)); } - .into_iter() - .map(|x| GameVersionQueryData { - version: x.version, - version_type: x.type_, - date: x.created, - major: x.major, - }) - .collect(); - - Ok(HttpResponse::Ok().json(results)) + if let Some(major) = query.major { + filters.insert("major".to_string(), serde_json::json!(major)); + } + let response = v3::tags::loader_fields_list( + pool, + web::Query(LoaderFieldsEnumQuery { + loader_field: "game_versions".to_string(), + filters: Some(filters), + }), + redis, + ) + .await?; + + // Convert to V2 format + Ok( + match v2_reroute::extract_ok_json::>(response).await { + Ok(fields) => { + let fields = fields + .into_iter() + .map(|f| GameVersionQueryData { + version: f.value, + version_type: f + .metadata + .get("type") + .and_then(|m| m.as_str()) + .unwrap_or_default() + .to_string(), + date: f.created, + major: f + .metadata + .get("major") + .and_then(|m| m.as_bool()) + .unwrap_or_default(), + }) + .collect::>(); + HttpResponse::Ok().json(fields) + } + Err(response) => response, + }, + ) } #[derive(serde::Serialize)] @@ -122,17 +146,7 @@ pub struct License { #[get("license")] pub async fn license_list() -> HttpResponse { - let licenses = spdx::identifiers::LICENSES; - let mut results: Vec = Vec::with_capacity(licenses.len()); - - for (short, name, _) in licenses { - results.push(License { - short: short.to_string(), - name: name.to_string(), - }); - } - - HttpResponse::Ok().json(results) + v3::tags::license_list().await } #[derive(serde::Serialize)] @@ -143,25 +157,7 @@ pub struct LicenseText { #[get("license/{id}")] pub async fn license_text(params: web::Path<(String,)>) -> Result { - let license_id = params.into_inner().0; - - if license_id == *crate::models::projects::DEFAULT_LICENSE_ID { - return Ok(HttpResponse::Ok().json(LicenseText { - title: "All Rights Reserved".to_string(), - body: "All rights reserved unless explicitly stated.".to_string(), - })); - } - - if let Some(license) = spdx::license_id(&license_id) { - return Ok(HttpResponse::Ok().json(LicenseText { - title: license.full_name.to_string(), - body: license.text().to_string(), - })); - } - - Err(ApiError::InvalidInput( - "Invalid SPDX identifier specified".to_string(), - )) + v3::tags::license_text(params).await } #[derive(serde::Serialize)] @@ -175,15 +171,7 @@ pub async fn donation_platform_list( pool: web::Data, redis: web::Data, ) -> Result { - let results: Vec = DonationPlatform::list(&**pool, &redis) - .await? - .into_iter() - .map(|x| DonationPlatformQueryData { - short: x.short, - name: x.name, - }) - .collect(); - Ok(HttpResponse::Ok().json(results)) + v3::tags::donation_platform_list(pool, redis).await } #[get("report_type")] @@ -191,8 +179,7 @@ pub async fn report_type_list( pool: web::Data, redis: web::Data, ) -> Result { - let results = ReportType::list(&**pool, &redis).await?; - Ok(HttpResponse::Ok().json(results)) + v3::tags::report_type_list(pool, redis).await } #[get("project_type")] @@ -200,8 +187,7 @@ pub async fn project_type_list( pool: web::Data, redis: web::Data, ) -> Result { - let results = ProjectType::list(&**pool, &redis).await?; - Ok(HttpResponse::Ok().json(results)) + v3::tags::project_type_list(pool, redis).await } #[get("side_type")] @@ -209,6 +195,24 @@ pub async fn side_type_list( pool: web::Data, redis: web::Data, ) -> Result { - let results = SideType::list(&**pool, &redis).await?; - Ok(HttpResponse::Ok().json(results)) + let response = v3::tags::loader_fields_list( + pool, + web::Query(LoaderFieldsEnumQuery { + loader_field: "client_side".to_string(), // same as server_side + filters: None, + }), + redis, + ) + .await?; + + // Convert to V2 format + Ok( + match v2_reroute::extract_ok_json::>(response).await { + Ok(fields) => { + let fields = fields.into_iter().map(|f| f.value).collect::>(); + HttpResponse::Ok().json(fields) + } + Err(response) => response, + }, + ) } diff --git a/src/routes/v2/teams.rs b/src/routes/v2/teams.rs index 254e6609..87b2df16 100644 --- a/src/routes/v2/teams.rs +++ b/src/routes/v2/teams.rs @@ -1,15 +1,8 @@ -use crate::auth::{get_user_from_headers, is_authorized}; -use crate::database::models::notification_item::NotificationBuilder; -use crate::database::models::team_item::TeamAssociationId; -use crate::database::models::{Organization, Team, TeamMember, User}; use crate::database::redis::RedisPool; -use crate::database::Project; -use crate::models::notifications::NotificationBody; -use crate::models::pats::Scopes; use crate::models::teams::{OrganizationPermissions, ProjectPermissions, TeamId}; use crate::models::users::UserId; use crate::queue::session::AuthQueue; -use crate::routes::ApiError; +use crate::routes::{v3, ApiError}; use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse}; use rust_decimal::Decimal; use serde::{Deserialize, Serialize}; @@ -41,75 +34,7 @@ pub async fn team_members_get_project( redis: web::Data, session_queue: web::Data, ) -> Result { - let string = info.into_inner().0; - let project_data = crate::database::models::Project::get(&string, &**pool, &redis).await?; - - if let Some(project) = project_data { - let current_user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::PROJECT_READ]), - ) - .await - .map(|x| x.1) - .ok(); - - if !is_authorized(&project.inner, ¤t_user, &pool).await? { - return Ok(HttpResponse::NotFound().body("")); - } - let mut members_data = - TeamMember::get_from_team_full(project.inner.team_id, &**pool, &redis).await?; - let mut member_user_ids = members_data.iter().map(|x| x.user_id).collect::>(); - - // Adds the organization's team members to the list of members, if the project is associated with an organization - if let Some(oid) = project.inner.organization_id { - let organization_data = Organization::get_id(oid, &**pool, &redis).await?; - if let Some(organization_data) = organization_data { - let org_team = - TeamMember::get_from_team_full(organization_data.team_id, &**pool, &redis) - .await?; - for member in org_team { - if !member_user_ids.contains(&member.user_id) { - member_user_ids.push(member.user_id); - members_data.push(member); - } - } - } - } - - let users = - crate::database::models::User::get_many_ids(&member_user_ids, &**pool, &redis).await?; - - let user_id = current_user.as_ref().map(|x| x.id.into()); - - let logged_in = current_user - .and_then(|user| { - members_data - .iter() - .find(|x| x.user_id == user.id.into() && x.accepted) - }) - .is_some(); - let team_members: Vec<_> = members_data - .into_iter() - .filter(|x| { - logged_in - || x.accepted - || user_id - .map(|y: crate::database::models::UserId| y == x.user_id) - .unwrap_or(false) - }) - .flat_map(|data| { - users.iter().find(|x| x.id == data.user_id).map(|user| { - crate::models::teams::TeamMember::from(data, user.clone(), !logged_in) - }) - }) - .collect(); - Ok(HttpResponse::Ok().json(team_members)) - } else { - Ok(HttpResponse::NotFound().body("")) - } + v3::teams::team_members_get_project(req, info, pool, redis, session_queue).await } #[get("{id}/members")] @@ -120,61 +45,7 @@ pub async fn team_members_get_organization( redis: web::Data, session_queue: web::Data, ) -> Result { - let string = info.into_inner().0; - let organization_data = - crate::database::models::Organization::get(&string, &**pool, &redis).await?; - - if let Some(organization) = organization_data { - let current_user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::ORGANIZATION_READ]), - ) - .await - .map(|x| x.1) - .ok(); - - let members_data = - TeamMember::get_from_team_full(organization.team_id, &**pool, &redis).await?; - let users = crate::database::models::User::get_many_ids( - &members_data.iter().map(|x| x.user_id).collect::>(), - &**pool, - &redis, - ) - .await?; - - let user_id = current_user.as_ref().map(|x| x.id.into()); - - let logged_in = current_user - .and_then(|user| { - members_data - .iter() - .find(|x| x.user_id == user.id.into() && x.accepted) - }) - .is_some(); - - let team_members: Vec<_> = members_data - .into_iter() - .filter(|x| { - logged_in - || x.accepted - || user_id - .map(|y: crate::database::models::UserId| y == x.user_id) - .unwrap_or(false) - }) - .flat_map(|data| { - users.iter().find(|x| x.id == data.user_id).map(|user| { - crate::models::teams::TeamMember::from(data, user.clone(), !logged_in) - }) - }) - .collect(); - - Ok(HttpResponse::Ok().json(team_members)) - } else { - Ok(HttpResponse::NotFound().body("")) - } + v3::teams::team_members_get_organization(req, info, pool, redis, session_queue).await } // Returns all members of a team, but not necessarily those of a project-team's organization (unlike team_members_get_project) @@ -186,53 +57,7 @@ pub async fn team_members_get( redis: web::Data, session_queue: web::Data, ) -> Result { - let id = info.into_inner().0; - let members_data = TeamMember::get_from_team_full(id.into(), &**pool, &redis).await?; - let users = crate::database::models::User::get_many_ids( - &members_data.iter().map(|x| x.user_id).collect::>(), - &**pool, - &redis, - ) - .await?; - - let current_user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::PROJECT_READ]), - ) - .await - .map(|x| x.1) - .ok(); - let user_id = current_user.as_ref().map(|x| x.id.into()); - - let logged_in = current_user - .and_then(|user| { - members_data - .iter() - .find(|x| x.user_id == user.id.into() && x.accepted) - }) - .is_some(); - - let team_members: Vec<_> = members_data - .into_iter() - .filter(|x| { - logged_in - || x.accepted - || user_id - .map(|y: crate::database::models::UserId| y == x.user_id) - .unwrap_or(false) - }) - .flat_map(|data| { - users - .iter() - .find(|x| x.id == data.user_id) - .map(|user| crate::models::teams::TeamMember::from(data, user.clone(), !logged_in)) - }) - .collect(); - - Ok(HttpResponse::Ok().json(team_members)) + v3::teams::team_members_get(req, info, pool, redis, session_queue).await } #[derive(Serialize, Deserialize)] @@ -248,61 +73,14 @@ pub async fn teams_get( redis: web::Data, session_queue: web::Data, ) -> Result { - use itertools::Itertools; - - let team_ids = serde_json::from_str::>(&ids.ids)? - .into_iter() - .map(|x| x.into()) - .collect::>(); - - let teams_data = TeamMember::get_from_team_full_many(&team_ids, &**pool, &redis).await?; - let users = crate::database::models::User::get_many_ids( - &teams_data.iter().map(|x| x.user_id).collect::>(), - &**pool, - &redis, - ) - .await?; - - let current_user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::PROJECT_READ]), + v3::teams::teams_get( + req, + web::Query(v3::teams::TeamIds { ids: ids.ids }), + pool, + redis, + session_queue, ) .await - .map(|x| x.1) - .ok(); - - let teams_groups = teams_data.into_iter().group_by(|data| data.team_id.0); - - let mut teams: Vec> = vec![]; - - for (_, member_data) in &teams_groups { - let members = member_data.collect::>(); - - let logged_in = current_user - .as_ref() - .and_then(|user| { - members - .iter() - .find(|x| x.user_id == user.id.into() && x.accepted) - }) - .is_some(); - - let team_members = members - .into_iter() - .filter(|x| logged_in || x.accepted) - .flat_map(|data| { - users.iter().find(|x| x.id == data.user_id).map(|user| { - crate::models::teams::TeamMember::from(data, user.clone(), !logged_in) - }) - }); - - teams.push(team_members.collect()); - } - - Ok(HttpResponse::Ok().json(teams)) } #[post("{id}/join")] @@ -313,53 +91,7 @@ pub async fn join_team( redis: web::Data, session_queue: web::Data, ) -> Result { - let team_id = info.into_inner().0.into(); - let current_user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::PROJECT_WRITE]), - ) - .await? - .1; - - let member = - TeamMember::get_from_user_id_pending(team_id, current_user.id.into(), &**pool).await?; - - if let Some(member) = member { - if member.accepted { - return Err(ApiError::InvalidInput( - "You are already a member of this team".to_string(), - )); - } - let mut transaction = pool.begin().await?; - - // Edit Team Member to set Accepted to True - TeamMember::edit_team_member( - team_id, - current_user.id.into(), - None, - None, - None, - Some(true), - None, - None, - &mut transaction, - ) - .await?; - - User::clear_project_cache(&[current_user.id.into()], &redis).await?; - TeamMember::clear_cache(team_id, &redis).await?; - - transaction.commit().await?; - } else { - return Err(ApiError::InvalidInput( - "There is no pending request from this team".to_string(), - )); - } - - Ok(HttpResponse::NoContent().body("")) + v3::teams::join_team(req, info, pool, redis, session_queue).await } fn default_role() -> String { @@ -394,165 +126,22 @@ pub async fn add_team_member( redis: web::Data, session_queue: web::Data, ) -> Result { - let team_id = info.into_inner().0.into(); - - let mut transaction = pool.begin().await?; - - let current_user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::PROJECT_WRITE]), + v3::teams::add_team_member( + req, + info, + pool, + web::Json(v3::teams::NewTeamMember { + user_id: new_member.user_id, + role: new_member.role.clone(), + permissions: new_member.permissions, + organization_permissions: new_member.organization_permissions, + payouts_split: new_member.payouts_split, + ordering: new_member.ordering, + }), + redis, + session_queue, ) - .await? - .1; - let team_association = Team::get_association(team_id, &**pool) - .await? - .ok_or_else(|| ApiError::InvalidInput("The team specified does not exist".to_string()))?; - let member = TeamMember::get_from_user_id(team_id, current_user.id.into(), &**pool).await?; - match team_association { - // If team is associated with a project, check if they have permissions to invite users to that project - TeamAssociationId::Project(pid) => { - let organization = - Organization::get_associated_organization_project_id(pid, &**pool).await?; - let organization_team_member = if let Some(organization) = &organization { - TeamMember::get_from_user_id(organization.team_id, current_user.id.into(), &**pool) - .await? - } else { - None - }; - let permissions = ProjectPermissions::get_permissions_by_role( - ¤t_user.role, - &member, - &organization_team_member, - ) - .unwrap_or_default(); - - if !permissions.contains(ProjectPermissions::MANAGE_INVITES) { - return Err(ApiError::CustomAuthentication( - "You don't have permission to invite users to this team".to_string(), - )); - } - if !permissions.contains(new_member.permissions) { - return Err(ApiError::InvalidInput( - "The new member has permissions that you don't have".to_string(), - )); - } - - if new_member.organization_permissions.is_some() { - return Err(ApiError::InvalidInput( - "The organization permissions of a project team member cannot be set" - .to_string(), - )); - } - } - // If team is associated with an organization, check if they have permissions to invite users to that organization - TeamAssociationId::Organization(_) => { - let organization_permissions = - OrganizationPermissions::get_permissions_by_role(¤t_user.role, &member) - .unwrap_or_default(); - if !organization_permissions.contains(OrganizationPermissions::MANAGE_INVITES) { - return Err(ApiError::CustomAuthentication( - "You don't have permission to invite users to this organization".to_string(), - )); - } - if !organization_permissions - .contains(new_member.organization_permissions.unwrap_or_default()) - { - return Err(ApiError::InvalidInput( - "The new member has organization permissions that you don't have".to_string(), - )); - } - if !organization_permissions - .contains(OrganizationPermissions::EDIT_MEMBER_DEFAULT_PERMISSIONS) - && !new_member.permissions.is_empty() - { - return Err(ApiError::CustomAuthentication( - "You do not have permission to give this user default project permissions. Ensure 'permissions' is set if it is not, and empty (0)." - .to_string(), - )); - } - } - } - - if new_member.role == crate::models::teams::OWNER_ROLE { - return Err(ApiError::InvalidInput( - "The `Owner` role is restricted to one person".to_string(), - )); - } - - if new_member.payouts_split < Decimal::ZERO || new_member.payouts_split > Decimal::from(5000) { - return Err(ApiError::InvalidInput( - "Payouts split must be between 0 and 5000!".to_string(), - )); - } - - let request = - TeamMember::get_from_user_id_pending(team_id, new_member.user_id.into(), &**pool).await?; - - if let Some(req) = request { - if req.accepted { - return Err(ApiError::InvalidInput( - "The user is already a member of that team".to_string(), - )); - } else { - return Err(ApiError::InvalidInput( - "There is already a pending member request for this user".to_string(), - )); - } - } - crate::database::models::User::get_id(new_member.user_id.into(), &**pool, &redis) - .await? - .ok_or_else(|| ApiError::InvalidInput("An invalid User ID specified".to_string()))?; - - let new_id = crate::database::models::ids::generate_team_member_id(&mut transaction).await?; - TeamMember { - id: new_id, - team_id, - user_id: new_member.user_id.into(), - role: new_member.role.clone(), - permissions: new_member.permissions, - organization_permissions: new_member.organization_permissions, - accepted: false, - payouts_split: new_member.payouts_split, - ordering: new_member.ordering, - } - .insert(&mut transaction) - .await?; - - match team_association { - TeamAssociationId::Project(pid) => { - NotificationBuilder { - body: NotificationBody::TeamInvite { - project_id: pid.into(), - team_id: team_id.into(), - invited_by: current_user.id, - role: new_member.role.clone(), - }, - } - .insert(new_member.user_id.into(), &mut transaction, &redis) - .await?; - } - TeamAssociationId::Organization(oid) => { - NotificationBuilder { - body: NotificationBody::OrganizationInvite { - organization_id: oid.into(), - team_id: team_id.into(), - invited_by: current_user.id, - role: new_member.role.clone(), - }, - } - .insert(new_member.user_id.into(), &mut transaction, &redis) - .await?; - } - } - - TeamMember::clear_cache(team_id, &redis).await?; - - transaction.commit().await?; - - Ok(HttpResponse::NoContent().body("")) + .await } #[derive(Serialize, Deserialize, Clone)] @@ -573,143 +162,21 @@ pub async fn edit_team_member( redis: web::Data, session_queue: web::Data, ) -> Result { - let ids = info.into_inner(); - let id = ids.0.into(); - let user_id = ids.1.into(); - - let current_user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::PROJECT_WRITE]), - ) - .await? - .1; - - let team_association = Team::get_association(id, &**pool) - .await? - .ok_or_else(|| ApiError::InvalidInput("The team specified does not exist".to_string()))?; - let member = TeamMember::get_from_user_id(id, current_user.id.into(), &**pool).await?; - let edit_member_db = TeamMember::get_from_user_id_pending(id, user_id, &**pool) - .await? - .ok_or_else(|| { - ApiError::CustomAuthentication( - "You don't have permission to edit members of this team".to_string(), - ) - })?; - - let mut transaction = pool.begin().await?; - - if &*edit_member_db.role == crate::models::teams::OWNER_ROLE - && (edit_member.role.is_some() || edit_member.permissions.is_some()) - { - return Err(ApiError::InvalidInput( - "The owner's permission and role of a team cannot be edited".to_string(), - )); - } - - match team_association { - TeamAssociationId::Project(project_id) => { - let organization = - Organization::get_associated_organization_project_id(project_id, &**pool).await?; - let organization_team_member = if let Some(organization) = &organization { - TeamMember::get_from_user_id(organization.team_id, current_user.id.into(), &**pool) - .await? - } else { - None - }; - let permissions = ProjectPermissions::get_permissions_by_role( - ¤t_user.role, - &member.clone(), - &organization_team_member, - ) - .unwrap_or_default(); - if !permissions.contains(ProjectPermissions::EDIT_MEMBER) { - return Err(ApiError::CustomAuthentication( - "You don't have permission to edit members of this team".to_string(), - )); - } - - if let Some(new_permissions) = edit_member.permissions { - if !permissions.contains(new_permissions) { - return Err(ApiError::InvalidInput( - "The new permissions have permissions that you don't have".to_string(), - )); - } - } - - if edit_member.organization_permissions.is_some() { - return Err(ApiError::InvalidInput( - "The organization permissions of a project team member cannot be edited" - .to_string(), - )); - } - } - TeamAssociationId::Organization(_) => { - let organization_permissions = - OrganizationPermissions::get_permissions_by_role(¤t_user.role, &member) - .unwrap_or_default(); - - if !organization_permissions.contains(OrganizationPermissions::EDIT_MEMBER) { - return Err(ApiError::CustomAuthentication( - "You don't have permission to edit members of this team".to_string(), - )); - } - - if let Some(new_permissions) = edit_member.organization_permissions { - if !organization_permissions.contains(new_permissions) { - return Err(ApiError::InvalidInput( - "The new organization permissions have permissions that you don't have" - .to_string(), - )); - } - } - - if edit_member.permissions.is_some() - && !organization_permissions - .contains(OrganizationPermissions::EDIT_MEMBER_DEFAULT_PERMISSIONS) - { - return Err(ApiError::CustomAuthentication( - "You do not have permission to give this user default project permissions." - .to_string(), - )); - } - } - } - - if let Some(payouts_split) = edit_member.payouts_split { - if payouts_split < Decimal::ZERO || payouts_split > Decimal::from(5000) { - return Err(ApiError::InvalidInput( - "Payouts split must be between 0 and 5000!".to_string(), - )); - } - } - - if edit_member.role.as_deref() == Some(crate::models::teams::OWNER_ROLE) { - return Err(ApiError::InvalidInput( - "The `Owner` role is restricted to one person".to_string(), - )); - } - - TeamMember::edit_team_member( - id, - user_id, - edit_member.permissions, - edit_member.organization_permissions, - edit_member.role.clone(), - None, - edit_member.payouts_split, - edit_member.ordering, - &mut transaction, + v3::teams::edit_team_member( + req, + info, + pool, + web::Json(v3::teams::EditTeamMember { + permissions: edit_member.permissions, + organization_permissions: edit_member.organization_permissions, + role: edit_member.role.clone(), + payouts_split: edit_member.payouts_split, + ordering: edit_member.ordering, + }), + redis, + session_queue, ) - .await?; - - TeamMember::clear_cache(id, &redis).await?; - - transaction.commit().await?; - - Ok(HttpResponse::NoContent().body("")) + .await } #[derive(Deserialize)] @@ -726,94 +193,17 @@ pub async fn transfer_ownership( redis: web::Data, session_queue: web::Data, ) -> Result { - let id = info.into_inner().0; - - let current_user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::PROJECT_WRITE]), + v3::teams::transfer_ownership( + req, + info, + pool, + web::Json(v3::teams::TransferOwnership { + user_id: new_owner.user_id, + }), + redis, + session_queue, ) - .await? - .1; - - // Forbid transferring ownership of a project team that is owned by an organization - // These are owned by the organization owner, and must be removed from the organization first - let pid = Team::get_association(id.into(), &**pool).await?; - if let Some(TeamAssociationId::Project(pid)) = pid { - let result = Project::get_id(pid, &**pool, &redis).await?; - if let Some(project_item) = result { - if project_item.inner.organization_id.is_some() { - return Err(ApiError::InvalidInput( - "You cannot transfer ownership of a project team that is owend by an organization" - .to_string(), - )); - } - } - } - - if !current_user.role.is_admin() { - let member = TeamMember::get_from_user_id(id.into(), current_user.id.into(), &**pool) - .await? - .ok_or_else(|| { - ApiError::CustomAuthentication( - "You don't have permission to edit members of this team".to_string(), - ) - })?; - - if member.role != crate::models::teams::OWNER_ROLE { - return Err(ApiError::CustomAuthentication( - "You don't have permission to edit the ownership of this team".to_string(), - )); - } - } - - let new_member = TeamMember::get_from_user_id(id.into(), new_owner.user_id.into(), &**pool) - .await? - .ok_or_else(|| { - ApiError::InvalidInput("The new owner specified does not exist".to_string()) - })?; - - if !new_member.accepted { - return Err(ApiError::InvalidInput( - "You can only transfer ownership to members who are currently in your team".to_string(), - )); - } - - let mut transaction = pool.begin().await?; - - TeamMember::edit_team_member( - id.into(), - current_user.id.into(), - None, - None, - Some(crate::models::teams::DEFAULT_ROLE.to_string()), - None, - None, - None, - &mut transaction, - ) - .await?; - - TeamMember::edit_team_member( - id.into(), - new_owner.user_id.into(), - Some(ProjectPermissions::all()), - Some(OrganizationPermissions::all()), - Some(crate::models::teams::OWNER_ROLE.to_string()), - None, - None, - None, - &mut transaction, - ) - .await?; - - TeamMember::clear_cache(id.into(), &redis).await?; - - transaction.commit().await?; - - Ok(HttpResponse::NoContent().body("")) + .await } #[delete("{id}/members/{user_id}")] @@ -824,126 +214,5 @@ pub async fn remove_team_member( redis: web::Data, session_queue: web::Data, ) -> Result { - let ids = info.into_inner(); - let id = ids.0.into(); - let user_id = ids.1.into(); - - let current_user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::PROJECT_WRITE]), - ) - .await? - .1; - - let team_association = Team::get_association(id, &**pool) - .await? - .ok_or_else(|| ApiError::InvalidInput("The team specified does not exist".to_string()))?; - let member = TeamMember::get_from_user_id(id, current_user.id.into(), &**pool).await?; - - let delete_member = TeamMember::get_from_user_id_pending(id, user_id, &**pool).await?; - - if let Some(delete_member) = delete_member { - if delete_member.role == crate::models::teams::OWNER_ROLE { - // The owner cannot be removed from a team - return Err(ApiError::CustomAuthentication( - "The owner can't be removed from a team".to_string(), - )); - } - - let mut transaction = pool.begin().await?; - - // Organization attached to a project this team is attached to - match team_association { - TeamAssociationId::Project(pid) => { - let organization = - Organization::get_associated_organization_project_id(pid, &**pool).await?; - let organization_team_member = if let Some(organization) = &organization { - TeamMember::get_from_user_id( - organization.team_id, - current_user.id.into(), - &**pool, - ) - .await? - } else { - None - }; - let permissions = ProjectPermissions::get_permissions_by_role( - ¤t_user.role, - &member, - &organization_team_member, - ) - .unwrap_or_default(); - - if delete_member.accepted { - // Members other than the owner can either leave the team, or be - // removed by a member with the REMOVE_MEMBER permission. - if Some(delete_member.user_id) == member.as_ref().map(|m| m.user_id) - || permissions.contains(ProjectPermissions::REMOVE_MEMBER) - // true as if the permission exists, but the member does not, they are part of an org - { - TeamMember::delete(id, user_id, &mut transaction).await?; - } else { - return Err(ApiError::CustomAuthentication( - "You do not have permission to remove a member from this team" - .to_string(), - )); - } - } else if Some(delete_member.user_id) == member.as_ref().map(|m| m.user_id) - || permissions.contains(ProjectPermissions::MANAGE_INVITES) - // true as if the permission exists, but the member does not, they are part of an org - { - // This is a pending invite rather than a member, so the - // user being invited or team members with the MANAGE_INVITES - // permission can remove it. - TeamMember::delete(id, user_id, &mut transaction).await?; - } else { - return Err(ApiError::CustomAuthentication( - "You do not have permission to cancel a team invite".to_string(), - )); - } - } - TeamAssociationId::Organization(_) => { - let organization_permissions = - OrganizationPermissions::get_permissions_by_role(¤t_user.role, &member) - .unwrap_or_default(); - // Organization teams requires a TeamMember, so we can 'unwrap' - if delete_member.accepted { - // Members other than the owner can either leave the team, or be - // removed by a member with the REMOVE_MEMBER permission. - if Some(delete_member.user_id) == member.map(|m| m.user_id) - || organization_permissions.contains(OrganizationPermissions::REMOVE_MEMBER) - { - TeamMember::delete(id, user_id, &mut transaction).await?; - } else { - return Err(ApiError::CustomAuthentication( - "You do not have permission to remove a member from this organization" - .to_string(), - )); - } - } else if Some(delete_member.user_id) == member.map(|m| m.user_id) - || organization_permissions.contains(OrganizationPermissions::MANAGE_INVITES) - { - // This is a pending invite rather than a member, so the - // user being invited or team members with the MANAGE_INVITES - // permission can remove it. - TeamMember::delete(id, user_id, &mut transaction).await?; - } else { - return Err(ApiError::CustomAuthentication( - "You do not have permission to cancel an organization invite".to_string(), - )); - } - } - } - - TeamMember::clear_cache(id, &redis).await?; - User::clear_project_cache(&[delete_member.user_id], &redis).await?; - - transaction.commit().await?; - Ok(HttpResponse::NoContent().body("")) - } else { - Ok(HttpResponse::NotFound().body("")) - } + v3::teams::remove_team_member(req, info, pool, redis, session_queue).await } diff --git a/src/routes/v2/threads.rs b/src/routes/v2/threads.rs index 44e76efc..6b7e8c2b 100644 --- a/src/routes/v2/threads.rs +++ b/src/routes/v2/threads.rs @@ -1,23 +1,12 @@ use std::sync::Arc; -use crate::auth::{check_is_moderator_from_headers, get_user_from_headers}; -use crate::database; -use crate::database::models::image_item; -use crate::database::models::notification_item::NotificationBuilder; -use crate::database::models::thread_item::ThreadMessageBuilder; use crate::database::redis::RedisPool; use crate::file_hosting::FileHost; use crate::models::ids::ThreadMessageId; -use crate::models::images::{Image, ImageContext}; -use crate::models::notifications::NotificationBody; -use crate::models::pats::Scopes; -use crate::models::projects::ProjectStatus; -use crate::models::threads::{MessageBody, Thread, ThreadId, ThreadType}; -use crate::models::users::User; +use crate::models::threads::{MessageBody, ThreadId}; use crate::queue::session::AuthQueue; -use crate::routes::ApiError; +use crate::routes::{v3, ApiError}; use actix_web::{delete, get, post, web, HttpRequest, HttpResponse}; -use futures::TryStreamExt; use serde::Deserialize; use sqlx::PgPool; @@ -33,194 +22,6 @@ pub fn config(cfg: &mut web::ServiceConfig) { cfg.service(threads_get); } -pub async fn is_authorized_thread( - thread: &database::models::Thread, - user: &User, - pool: &PgPool, -) -> Result { - if user.role.is_mod() { - return Ok(true); - } - - let user_id: database::models::UserId = user.id.into(); - Ok(match thread.type_ { - ThreadType::Report => { - if let Some(report_id) = thread.report_id { - let report_exists = sqlx::query!( - "SELECT EXISTS(SELECT 1 FROM reports WHERE id = $1 AND reporter = $2)", - report_id as database::models::ids::ReportId, - user_id as database::models::ids::UserId, - ) - .fetch_one(pool) - .await? - .exists; - - report_exists.unwrap_or(false) - } else { - false - } - } - ThreadType::Project => { - if let Some(project_id) = thread.project_id { - let project_exists = sqlx::query!( - "SELECT EXISTS(SELECT 1 FROM mods m INNER JOIN team_members tm ON tm.team_id = m.team_id AND tm.user_id = $2 WHERE m.id = $1)", - project_id as database::models::ids::ProjectId, - user_id as database::models::ids::UserId, - ) - .fetch_one(pool) - .await? - .exists; - - project_exists.unwrap_or(false) - } else { - false - } - } - ThreadType::DirectMessage => thread.members.contains(&user_id), - }) -} - -pub async fn filter_authorized_threads( - threads: Vec, - user: &User, - pool: &web::Data, - redis: &RedisPool, -) -> Result, ApiError> { - let user_id: database::models::UserId = user.id.into(); - - let mut return_threads = Vec::new(); - let mut check_threads = Vec::new(); - - for thread in threads { - if user.role.is_mod() - || (thread.type_ == ThreadType::DirectMessage && thread.members.contains(&user_id)) - { - return_threads.push(thread); - } else { - check_threads.push(thread); - } - } - - if !check_threads.is_empty() { - let project_thread_ids = check_threads - .iter() - .filter(|x| x.type_ == ThreadType::Project) - .flat_map(|x| x.project_id.map(|x| x.0)) - .collect::>(); - - if !project_thread_ids.is_empty() { - sqlx::query!( - " - SELECT m.id FROM mods m - INNER JOIN team_members tm ON tm.team_id = m.team_id AND user_id = $2 - WHERE m.id = ANY($1) - ", - &*project_thread_ids, - user_id as database::models::ids::UserId, - ) - .fetch_many(&***pool) - .try_for_each(|e| { - if let Some(row) = e.right() { - check_threads.retain(|x| { - let bool = x.project_id.map(|x| x.0) == Some(row.id); - - if bool { - return_threads.push(x.clone()); - } - - !bool - }); - } - - futures::future::ready(Ok(())) - }) - .await?; - } - - let report_thread_ids = check_threads - .iter() - .filter(|x| x.type_ == ThreadType::Report) - .flat_map(|x| x.report_id.map(|x| x.0)) - .collect::>(); - - if !report_thread_ids.is_empty() { - sqlx::query!( - " - SELECT id FROM reports - WHERE id = ANY($1) AND reporter = $2 - ", - &*report_thread_ids, - user_id as database::models::ids::UserId, - ) - .fetch_many(&***pool) - .try_for_each(|e| { - if let Some(row) = e.right() { - check_threads.retain(|x| { - let bool = x.report_id.map(|x| x.0) == Some(row.id); - - if bool { - return_threads.push(x.clone()); - } - - !bool - }); - } - - futures::future::ready(Ok(())) - }) - .await?; - } - } - - let mut user_ids = return_threads - .iter() - .flat_map(|x| x.members.clone()) - .collect::>(); - user_ids.append( - &mut return_threads - .iter() - .flat_map(|x| { - x.messages - .iter() - .filter_map(|x| x.author_id) - .collect::>() - }) - .collect::>(), - ); - - let users: Vec = database::models::User::get_many_ids(&user_ids, &***pool, redis) - .await? - .into_iter() - .map(From::from) - .collect(); - - let mut final_threads = Vec::new(); - - for thread in return_threads { - let mut authors = thread.members.clone(); - - authors.append( - &mut thread - .messages - .iter() - .filter_map(|x| x.author_id) - .collect::>(), - ); - - final_threads.push(Thread::from( - thread, - users - .iter() - .filter(|x| authors.contains(&x.id.into())) - .cloned() - .collect(), - user, - )); - } - - Ok(final_threads) -} - #[get("{id}")] pub async fn thread_get( req: HttpRequest, @@ -229,42 +30,7 @@ pub async fn thread_get( redis: web::Data, session_queue: web::Data, ) -> Result { - let string = info.into_inner().0.into(); - - let thread_data = database::models::Thread::get(string, &**pool).await?; - - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::THREAD_READ]), - ) - .await? - .1; - - if let Some(mut data) = thread_data { - if is_authorized_thread(&data, &user, &pool).await? { - let authors = &mut data.members; - - authors.append( - &mut data - .messages - .iter() - .filter_map(|x| x.author_id) - .collect::>(), - ); - - let users: Vec = database::models::User::get_many_ids(authors, &**pool, &redis) - .await? - .into_iter() - .map(From::from) - .collect(); - - return Ok(HttpResponse::Ok().json(Thread::from(data, users, &user))); - } - } - Ok(HttpResponse::NotFound().body("")) + v3::threads::thread_get(req, info, pool, redis, session_queue).await } #[derive(Deserialize)] @@ -280,27 +46,14 @@ pub async fn threads_get( redis: web::Data, session_queue: web::Data, ) -> Result { - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::THREAD_READ]), + v3::threads::threads_get( + req, + web::Query(v3::threads::ThreadIds { ids: ids.ids }), + pool, + redis, + session_queue, ) - .await? - .1; - - let thread_ids: Vec = - serde_json::from_str::>(&ids.ids)? - .into_iter() - .map(|x| x.into()) - .collect(); - - let threads_data = database::models::Thread::get_many(&thread_ids, &**pool).await?; - - let threads = filter_authorized_threads(threads_data, &user, &pool, &redis).await?; - - Ok(HttpResponse::Ok().json(threads)) + .await } #[derive(Deserialize)] @@ -317,193 +70,18 @@ pub async fn thread_send_message( redis: web::Data, session_queue: web::Data, ) -> Result { - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::THREAD_WRITE]), + let new_message = new_message.into_inner(); + v3::threads::thread_send_message( + req, + info, + pool, + web::Json(v3::threads::NewThreadMessage { + body: new_message.body, + }), + redis, + session_queue, ) - .await? - .1; - - let string: database::models::ThreadId = info.into_inner().0.into(); - - if let MessageBody::Text { - body, - replying_to, - private, - .. - } = &new_message.body - { - if body.len() > 65536 { - return Err(ApiError::InvalidInput( - "Input body is too long!".to_string(), - )); - } - - if *private && !user.role.is_mod() { - return Err(ApiError::InvalidInput( - "You are not allowed to send private messages!".to_string(), - )); - } - - if let Some(replying_to) = replying_to { - let thread_message = - database::models::ThreadMessage::get((*replying_to).into(), &**pool).await?; - - if let Some(thread_message) = thread_message { - if thread_message.thread_id != string { - return Err(ApiError::InvalidInput( - "Message replied to is from another thread!".to_string(), - )); - } - } else { - return Err(ApiError::InvalidInput( - "Message replied to does not exist!".to_string(), - )); - } - } - } else { - return Err(ApiError::InvalidInput( - "You may only send text messages through this route!".to_string(), - )); - } - - let result = database::models::Thread::get(string, &**pool).await?; - - if let Some(thread) = result { - if !is_authorized_thread(&thread, &user, &pool).await? { - return Ok(HttpResponse::NotFound().body("")); - } - - let mut transaction = pool.begin().await?; - - let id = ThreadMessageBuilder { - author_id: Some(user.id.into()), - body: new_message.body.clone(), - thread_id: thread.id, - } - .insert(&mut transaction) - .await?; - - let mod_notif = if let Some(project_id) = thread.project_id { - let project = database::models::Project::get_id(project_id, &**pool, &redis).await?; - - if let Some(project) = project { - if project.inner.status != ProjectStatus::Processing && user.role.is_mod() { - let members = database::models::TeamMember::get_from_team_full( - project.inner.team_id, - &**pool, - &redis, - ) - .await?; - - NotificationBuilder { - body: NotificationBody::ModeratorMessage { - thread_id: thread.id.into(), - message_id: id.into(), - project_id: Some(project.inner.id.into()), - report_id: None, - }, - } - .insert_many( - members.into_iter().map(|x| x.user_id).collect(), - &mut transaction, - &redis, - ) - .await?; - } - } - - !user.role.is_mod() - } else if let Some(report_id) = thread.report_id { - let report = database::models::report_item::Report::get(report_id, &**pool).await?; - - if let Some(report) = report { - if report.closed && !user.role.is_mod() { - return Err(ApiError::InvalidInput( - "You may not reply to a closed report".to_string(), - )); - } - - if user.id != report.reporter.into() { - NotificationBuilder { - body: NotificationBody::ModeratorMessage { - thread_id: thread.id.into(), - message_id: id.into(), - project_id: None, - report_id: Some(report.id.into()), - }, - } - .insert(report.reporter, &mut transaction, &redis) - .await?; - } - } - - !user.role.is_mod() - } else { - false - }; - - sqlx::query!( - " - UPDATE threads - SET show_in_mod_inbox = $1 - WHERE id = $2 - ", - mod_notif, - thread.id.0, - ) - .execute(&mut *transaction) - .await?; - - if let MessageBody::Text { - associated_images, .. - } = &new_message.body - { - for image_id in associated_images { - if let Some(db_image) = - image_item::Image::get((*image_id).into(), &mut *transaction, &redis).await? - { - let image: Image = db_image.into(); - if !matches!(image.context, ImageContext::ThreadMessage { .. }) - || image.context.inner_id().is_some() - { - return Err(ApiError::InvalidInput(format!( - "Image {} is not unused and in the 'thread_message' context", - image_id - ))); - } - - sqlx::query!( - " - UPDATE uploaded_images - SET thread_message_id = $1 - WHERE id = $2 - ", - thread.id.0, - image_id.0 as i64 - ) - .execute(&mut *transaction) - .await?; - - image_item::Image::clear_cache(image.id.into(), &redis).await?; - } else { - return Err(ApiError::InvalidInput(format!( - "Image {} does not exist", - image_id - ))); - } - } - } - - transaction.commit().await?; - - Ok(HttpResponse::NoContent().body("")) - } else { - Ok(HttpResponse::NotFound().body("")) - } + .await } #[get("inbox")] @@ -513,30 +91,7 @@ pub async fn moderation_inbox( redis: web::Data, session_queue: web::Data, ) -> Result { - let user = check_is_moderator_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::THREAD_READ]), - ) - .await?; - - let ids = sqlx::query!( - " - SELECT id - FROM threads - WHERE show_in_mod_inbox = TRUE - " - ) - .fetch_many(&**pool) - .try_filter_map(|e| async { Ok(e.right().map(|m| database::models::ThreadId(m.id))) }) - .try_collect::>() - .await?; - - let threads_data = database::models::Thread::get_many(&ids, &**pool).await?; - let threads = filter_authorized_threads(threads_data, &user, &pool, &redis).await?; - Ok(HttpResponse::Ok().json(threads)) + v3::threads::moderation_inbox(req, pool, redis, session_queue).await } #[post("{id}/read")] @@ -547,32 +102,7 @@ pub async fn thread_read( redis: web::Data, session_queue: web::Data, ) -> Result { - check_is_moderator_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::THREAD_READ]), - ) - .await?; - - let id = info.into_inner().0; - let mut transaction = pool.begin().await?; - - sqlx::query!( - " - UPDATE threads - SET show_in_mod_inbox = FALSE - WHERE id = $1 - ", - id.0 as i64, - ) - .execute(&mut *transaction) - .await?; - - transaction.commit().await?; - - Ok(HttpResponse::NoContent().body("")) + v3::threads::thread_read(req, info, pool, redis, session_queue).await } #[delete("{id}")] @@ -584,45 +114,5 @@ pub async fn message_delete( session_queue: web::Data, file_host: web::Data>, ) -> Result { - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::THREAD_WRITE]), - ) - .await? - .1; - - let result = database::models::ThreadMessage::get(info.into_inner().0.into(), &**pool).await?; - - if let Some(thread) = result { - if !user.role.is_mod() && thread.author_id != Some(user.id.into()) { - return Err(ApiError::CustomAuthentication( - "You cannot delete this message!".to_string(), - )); - } - - let mut transaction = pool.begin().await?; - - let context = ImageContext::ThreadMessage { - thread_message_id: Some(thread.id.into()), - }; - let images = database::Image::get_many_contexted(context, &mut transaction).await?; - let cdn_url = dotenvy::var("CDN_URL")?; - for image in images { - let name = image.url.split(&format!("{cdn_url}/")).nth(1); - if let Some(icon_path) = name { - file_host.delete_file_version("", icon_path).await?; - } - database::Image::remove(image.id, &mut transaction, &redis).await?; - } - - database::models::ThreadMessage::remove_full(thread.id, &mut transaction).await?; - transaction.commit().await?; - - Ok(HttpResponse::NoContent().body("")) - } else { - Ok(HttpResponse::NotFound().body("")) - } + v3::threads::message_delete(req, info, pool, redis, session_queue, file_host).await } diff --git a/src/routes/v2/users.rs b/src/routes/v2/users.rs index 250a3a80..830d81a3 100644 --- a/src/routes/v2/users.rs +++ b/src/routes/v2/users.rs @@ -1,27 +1,17 @@ -use crate::auth::get_user_from_headers; -use crate::database::models::User; use crate::database::redis::RedisPool; use crate::file_hosting::FileHost; -use crate::models::collections::{Collection, CollectionStatus}; -use crate::models::notifications::Notification; -use crate::models::pats::Scopes; use crate::models::projects::Project; -use crate::models::users::{ - Badges, Payout, PayoutStatus, RecipientStatus, Role, UserId, UserPayoutData, -}; +use crate::models::users::{Badges, Role}; +use crate::models::v2::projects::LegacyProject; use crate::queue::payouts::PayoutsQueue; use crate::queue::session::AuthQueue; -use crate::routes::ApiError; -use crate::util::routes::read_from_payload; -use crate::util::validate::validation_errors_to_string; +use crate::routes::{v2_reroute, v3, ApiError}; use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse}; use lazy_static::lazy_static; use regex::Regex; use rust_decimal::Decimal; use serde::{Deserialize, Serialize}; -use serde_json::json; use sqlx::PgPool; -use std::collections::HashMap; use std::sync::Arc; use tokio::sync::Mutex; use validator::Validate; @@ -54,24 +44,7 @@ pub async fn user_auth_get( redis: web::Data, session_queue: web::Data, ) -> Result { - let (scopes, mut user) = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::USER_READ]), - ) - .await?; - - if !scopes.contains(Scopes::USER_READ_EMAIL) { - user.email = None; - } - - if !scopes.contains(Scopes::PAYOUTS_READ) { - user.payout_data = None; - } - - Ok(HttpResponse::Ok().json(user)) + v3::users::user_auth_get(req, pool, redis, session_queue).await } #[derive(Serialize, Deserialize)] @@ -85,13 +58,7 @@ pub async fn users_get( pool: web::Data, redis: web::Data, ) -> Result { - let user_ids = serde_json::from_str::>(&ids.ids)?; - - let users_data = User::get_many(&user_ids, &**pool, &redis).await?; - - let users: Vec = users_data.into_iter().map(From::from).collect(); - - Ok(HttpResponse::Ok().json(users)) + v3::users::users_get(web::Query(v3::users::UserIds { ids: ids.ids }), pool, redis).await } #[get("{id}")] @@ -100,14 +67,7 @@ pub async fn user_get( pool: web::Data, redis: web::Data, ) -> Result { - let user_data = User::get(&info.into_inner().0, &**pool, &redis).await?; - - if let Some(data) = user_data { - let response: crate::models::users::User = data.into(); - Ok(HttpResponse::Ok().json(response)) - } else { - Ok(HttpResponse::NotFound().body("")) - } + v3::users::user_get(info, pool, redis).await } #[get("{user_id}/projects")] @@ -118,39 +78,16 @@ pub async fn projects_list( redis: web::Data, session_queue: web::Data, ) -> Result { - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::PROJECT_READ]), - ) - .await - .map(|x| x.1) - .ok(); - - let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?; - - if let Some(id) = id_option.map(|x| x.id) { - let user_id: UserId = id.into(); - - let can_view_private = user - .map(|y| y.role.is_mod() || y.id == user_id) - .unwrap_or(false); - - let project_data = User::get_projects(id, &**pool, &redis).await?; - - let response: Vec<_> = - crate::database::Project::get_many_ids(&project_data, &**pool, &redis) - .await? - .into_iter() - .filter(|x| can_view_private || x.inner.status.is_searchable()) - .map(Project::from) - .collect(); - - Ok(HttpResponse::Ok().json(response)) - } else { - Ok(HttpResponse::NotFound().body("")) + let response = + v3::users::projects_list(req, info, pool.clone(), redis.clone(), session_queue).await?; + + // Convert to V2 projects + match v2_reroute::extract_ok_json::>(response).await { + Ok(project) => { + let legacy_projects = LegacyProject::from_many(project, &**pool, &redis).await?; + Ok(HttpResponse::Ok().json(legacy_projects)) + } + Err(response) => Ok(response), } } @@ -162,40 +99,7 @@ pub async fn collections_list( redis: web::Data, session_queue: web::Data, ) -> Result { - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::COLLECTION_READ]), - ) - .await - .map(|x| x.1) - .ok(); - - let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?; - - if let Some(id) = id_option.map(|x| x.id) { - let user_id: UserId = id.into(); - - let can_view_private = user - .map(|y| y.role.is_mod() || y.id == user_id) - .unwrap_or(false); - - let project_data = User::get_collections(id, &**pool).await?; - - let response: Vec<_> = - crate::database::models::Collection::get_many(&project_data, &**pool, &redis) - .await? - .into_iter() - .filter(|x| can_view_private || matches!(x.status, CollectionStatus::Listed)) - .map(Collection::from) - .collect(); - - Ok(HttpResponse::Ok().json(response)) - } else { - Ok(HttpResponse::NotFound().body("")) - } + v3::users::collections_list(req, info, pool, redis, session_queue).await } #[get("{user_id}/organizations")] @@ -206,79 +110,7 @@ pub async fn orgs_list( redis: web::Data, session_queue: web::Data, ) -> Result { - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::PROJECT_READ]), - ) - .await - .map(|x| x.1) - .ok(); - - let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?; - - if let Some(id) = id_option.map(|x| x.id) { - let org_data = User::get_organizations(id, &**pool).await?; - - let organizations_data = - crate::database::models::organization_item::Organization::get_many_ids( - &org_data, &**pool, &redis, - ) - .await?; - - let team_ids = organizations_data - .iter() - .map(|x| x.team_id) - .collect::>(); - - let teams_data = crate::database::models::TeamMember::get_from_team_full_many( - &team_ids, &**pool, &redis, - ) - .await?; - let users = User::get_many_ids( - &teams_data.iter().map(|x| x.user_id).collect::>(), - &**pool, - &redis, - ) - .await?; - - let mut organizations = vec![]; - let mut team_groups = HashMap::new(); - for item in teams_data { - team_groups.entry(item.team_id).or_insert(vec![]).push(item); - } - - for data in organizations_data { - let members_data = team_groups.remove(&data.team_id).unwrap_or(vec![]); - let logged_in = user - .as_ref() - .and_then(|user| { - members_data - .iter() - .find(|x| x.user_id == user.id.into() && x.accepted) - }) - .is_some(); - - let team_members: Vec<_> = members_data - .into_iter() - .filter(|x| logged_in || x.accepted || id == x.user_id) - .flat_map(|data| { - users.iter().find(|x| x.id == data.user_id).map(|user| { - crate::models::teams::TeamMember::from(data, user.clone(), !logged_in) - }) - }) - .collect(); - - let organization = crate::models::organizations::Organization::from(data, team_members); - organizations.push(organization); - } - - Ok(HttpResponse::Ok().json(organizations)) - } else { - Ok(HttpResponse::NotFound().body("")) - } + v3::users::orgs_list(req, info, pool, redis, session_queue).await } lazy_static! { @@ -316,137 +148,22 @@ pub async fn user_edit( redis: web::Data, session_queue: web::Data, ) -> Result { - let (_scopes, user) = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::USER_WRITE]), + let new_user = new_user.into_inner(); + v3::users::user_edit( + req, + info, + web::Json(v3::users::EditUser { + username: new_user.username, + name: new_user.name, + bio: new_user.bio, + role: new_user.role, + badges: new_user.badges, + }), + pool, + redis, + session_queue, ) - .await?; - - new_user - .validate() - .map_err(|err| ApiError::Validation(validation_errors_to_string(err, None)))?; - - let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?; - - if let Some(actual_user) = id_option { - let id = actual_user.id; - let user_id: UserId = id.into(); - - if user.id == user_id || user.role.is_mod() { - let mut transaction = pool.begin().await?; - - if let Some(username) = &new_user.username { - let existing_user_id_option = User::get(username, &**pool, &redis).await?; - - if existing_user_id_option - .map(|x| UserId::from(x.id)) - .map(|id| id == user.id) - .unwrap_or(true) - { - sqlx::query!( - " - UPDATE users - SET username = $1 - WHERE (id = $2) - ", - username, - id as crate::database::models::ids::UserId, - ) - .execute(&mut *transaction) - .await?; - } else { - return Err(ApiError::InvalidInput(format!( - "Username {username} is taken!" - ))); - } - } - - if let Some(name) = &new_user.name { - sqlx::query!( - " - UPDATE users - SET name = $1 - WHERE (id = $2) - ", - name.as_deref(), - id as crate::database::models::ids::UserId, - ) - .execute(&mut *transaction) - .await?; - } - - if let Some(bio) = &new_user.bio { - sqlx::query!( - " - UPDATE users - SET bio = $1 - WHERE (id = $2) - ", - bio.as_deref(), - id as crate::database::models::ids::UserId, - ) - .execute(&mut *transaction) - .await?; - } - - if let Some(role) = &new_user.role { - if !user.role.is_admin() { - return Err(ApiError::CustomAuthentication( - "You do not have the permissions to edit the role of this user!" - .to_string(), - )); - } - - let role = role.to_string(); - - sqlx::query!( - " - UPDATE users - SET role = $1 - WHERE (id = $2) - ", - role, - id as crate::database::models::ids::UserId, - ) - .execute(&mut *transaction) - .await?; - } - - if let Some(badges) = &new_user.badges { - if !user.role.is_admin() { - return Err(ApiError::CustomAuthentication( - "You do not have the permissions to edit the badges of this user!" - .to_string(), - )); - } - - sqlx::query!( - " - UPDATE users - SET badges = $1 - WHERE (id = $2) - ", - badges.bits() as i64, - id as crate::database::models::ids::UserId, - ) - .execute(&mut *transaction) - .await?; - } - - User::clear_caches(&[(id, Some(actual_user.username))], &redis).await?; - transaction.commit().await?; - Ok(HttpResponse::NoContent().body("")) - } else { - Err(ApiError::CustomAuthentication( - "You do not have permission to edit this user!".to_string(), - )) - } - } else { - Ok(HttpResponse::NotFound().body("")) - } + .await } #[derive(Serialize, Deserialize)] @@ -463,75 +180,20 @@ pub async fn user_icon_edit( pool: web::Data, redis: web::Data, file_host: web::Data>, - mut payload: web::Payload, + payload: web::Payload, session_queue: web::Data, ) -> Result { - if let Some(content_type) = crate::util::ext::get_image_content_type(&ext.ext) { - let cdn_url = dotenvy::var("CDN_URL")?; - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::USER_WRITE]), - ) - .await? - .1; - let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?; - - if let Some(actual_user) = id_option { - if user.id != actual_user.id.into() && !user.role.is_mod() { - return Err(ApiError::CustomAuthentication( - "You don't have permission to edit this user's icon.".to_string(), - )); - } - - let icon_url = actual_user.avatar_url; - let user_id: UserId = actual_user.id.into(); - - if let Some(icon) = icon_url { - let name = icon.split(&format!("{cdn_url}/")).nth(1); - - if let Some(icon_path) = name { - file_host.delete_file_version("", icon_path).await?; - } - } - - let bytes = - read_from_payload(&mut payload, 2097152, "Icons must be smaller than 2MiB").await?; - - let hash = sha1::Sha1::from(&bytes).hexdigest(); - let upload_data = file_host - .upload_file( - content_type, - &format!("user/{}/{}.{}", user_id, hash, ext.ext), - bytes.freeze(), - ) - .await?; - - sqlx::query!( - " - UPDATE users - SET avatar_url = $1 - WHERE (id = $2) - ", - format!("{}/{}", cdn_url, upload_data.file_name), - actual_user.id as crate::database::models::ids::UserId, - ) - .execute(&**pool) - .await?; - User::clear_caches(&[(actual_user.id, None)], &redis).await?; - - Ok(HttpResponse::NoContent().body("")) - } else { - Ok(HttpResponse::NotFound().body("")) - } - } else { - Err(ApiError::InvalidInput(format!( - "Invalid format for user icon: {}", - ext.ext - ))) - } + v3::users::user_icon_edit( + web::Query(v3::users::Extension { ext: ext.ext }), + req, + info, + pool, + redis, + file_host, + payload, + session_queue, + ) + .await } #[derive(Deserialize)] @@ -553,44 +215,18 @@ pub async fn user_delete( redis: web::Data, session_queue: web::Data, ) -> Result { - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::USER_DELETE]), + let removal_type = removal_type.into_inner(); + v3::users::user_delete( + req, + info, + pool, + web::Query(v3::users::RemovalType { + removal_type: removal_type.removal_type, + }), + redis, + session_queue, ) - .await? - .1; - let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?; - - if let Some(id) = id_option.map(|x| x.id) { - if !user.role.is_admin() && user.id != id.into() { - return Err(ApiError::CustomAuthentication( - "You do not have permission to delete this user!".to_string(), - )); - } - - let mut transaction = pool.begin().await?; - - let result = User::remove( - id, - removal_type.removal_type == "full", - &mut transaction, - &redis, - ) - .await?; - - transaction.commit().await?; - - if result.is_some() { - Ok(HttpResponse::NoContent().body("")) - } else { - Ok(HttpResponse::NotFound().body("")) - } - } else { - Ok(HttpResponse::NotFound().body("")) - } + .await } #[get("{id}/follows")] @@ -601,52 +237,7 @@ pub async fn user_follows( redis: web::Data, session_queue: web::Data, ) -> Result { - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::USER_READ]), - ) - .await? - .1; - let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?; - - if let Some(id) = id_option.map(|x| x.id) { - if !user.role.is_admin() && user.id != id.into() { - return Err(ApiError::CustomAuthentication( - "You do not have permission to see the projects this user follows!".to_string(), - )); - } - - use futures::TryStreamExt; - - let project_ids = sqlx::query!( - " - SELECT mf.mod_id FROM mod_follows mf - WHERE mf.follower_id = $1 - ", - id as crate::database::models::ids::UserId, - ) - .fetch_many(&**pool) - .try_filter_map(|e| async { - Ok(e.right() - .map(|m| crate::database::models::ProjectId(m.mod_id))) - }) - .try_collect::>() - .await?; - - let projects: Vec<_> = - crate::database::Project::get_many_ids(&project_ids, &**pool, &redis) - .await? - .into_iter() - .map(Project::from) - .collect(); - - Ok(HttpResponse::Ok().json(projects)) - } else { - Ok(HttpResponse::NotFound().body("")) - } + v3::users::user_follows(req, info, pool, redis, session_queue).await } #[get("{id}/notifications")] @@ -657,39 +248,7 @@ pub async fn user_notifications( redis: web::Data, session_queue: web::Data, ) -> Result { - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::NOTIFICATION_READ]), - ) - .await? - .1; - let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?; - - if let Some(id) = id_option.map(|x| x.id) { - if !user.role.is_admin() && user.id != id.into() { - return Err(ApiError::CustomAuthentication( - "You do not have permission to see the notifications of this user!".to_string(), - )); - } - - let mut notifications: Vec = - crate::database::models::notification_item::Notification::get_many_user( - id, &**pool, &redis, - ) - .await? - .into_iter() - .map(Into::into) - .collect(); - - notifications.sort_by(|a, b| b.created.cmp(&a.created)); - - Ok(HttpResponse::Ok().json(notifications)) - } else { - Ok(HttpResponse::NotFound().body("")) - } + v3::users::user_notifications(req, info, pool, redis, session_queue).await } #[get("{id}/payouts")] @@ -700,74 +259,7 @@ pub async fn user_payouts( redis: web::Data, session_queue: web::Data, ) -> Result { - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::PAYOUTS_READ]), - ) - .await? - .1; - let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?; - - if let Some(id) = id_option.map(|x| x.id) { - if !user.role.is_admin() && user.id != id.into() { - return Err(ApiError::CustomAuthentication( - "You do not have permission to see the payouts of this user!".to_string(), - )); - } - - let (all_time, last_month, payouts) = futures::future::try_join3( - sqlx::query!( - " - SELECT SUM(pv.amount) amount - FROM payouts_values pv - WHERE pv.user_id = $1 - ", - id as crate::database::models::UserId - ) - .fetch_one(&**pool), - sqlx::query!( - " - SELECT SUM(pv.amount) amount - FROM payouts_values pv - WHERE pv.user_id = $1 AND created > NOW() - '1 month'::interval - ", - id as crate::database::models::UserId - ) - .fetch_one(&**pool), - sqlx::query!( - " - SELECT hp.created, hp.amount, hp.status - FROM historical_payouts hp - WHERE hp.user_id = $1 - ORDER BY hp.created DESC - ", - id as crate::database::models::UserId - ) - .fetch_many(&**pool) - .try_filter_map(|e| async { - Ok(e.right().map(|row| Payout { - created: row.created, - amount: row.amount, - status: PayoutStatus::from_string(&row.status), - })) - }) - .try_collect::>(), - ) - .await?; - - use futures::TryStreamExt; - - Ok(HttpResponse::Ok().json(json!({ - "all_time": all_time.amount, - "last_month": last_month.amount, - "payouts": payouts, - }))) - } else { - Ok(HttpResponse::NotFound().body("")) - } + v3::users::user_payouts(req, info, pool, redis, session_queue).await } #[derive(Deserialize)] @@ -785,44 +277,18 @@ pub async fn user_payouts_fees( session_queue: web::Data, payouts_queue: web::Data>, ) -> Result { - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::PAYOUTS_READ]), + v3::users::user_payouts_fees( + req, + info, + web::Query(v3::users::FeeEstimateAmount { + amount: amount.amount, + }), + pool, + redis, + session_queue, + payouts_queue, ) - .await? - .1; - let actual_user = User::get(&info.into_inner().0, &**pool, &redis).await?; - - if let Some(actual_user) = actual_user { - if !user.role.is_admin() && user.id != actual_user.id.into() { - return Err(ApiError::CustomAuthentication( - "You do not have permission to request payouts of this user!".to_string(), - )); - } - - if let Some(UserPayoutData { - trolley_id: Some(trolley_id), - .. - }) = user.payout_data - { - let payouts = payouts_queue - .lock() - .await - .get_estimated_fees(&trolley_id, amount.amount) - .await?; - - Ok(HttpResponse::Ok().json(payouts)) - } else { - Err(ApiError::InvalidInput( - "You must set up your trolley account first!".to_string(), - )) - } - } else { - Ok(HttpResponse::NotFound().body("")) - } + .await } #[derive(Deserialize)] @@ -840,87 +306,16 @@ pub async fn user_payouts_request( redis: web::Data, session_queue: web::Data, ) -> Result { - let mut payouts_queue = payouts_queue.lock().await; - - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::PAYOUTS_WRITE]), + v3::users::user_payouts_request( + req, + info, + pool, + web::Json(v3::users::PayoutData { + amount: data.amount, + }), + payouts_queue, + redis, + session_queue, ) - .await? - .1; - let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?; - - if let Some(id) = id_option.map(|x| x.id) { - if !user.role.is_admin() && user.id != id.into() { - return Err(ApiError::CustomAuthentication( - "You do not have permission to request payouts of this user!".to_string(), - )); - } - - if let Some(UserPayoutData { - trolley_id: Some(trolley_id), - trolley_status: Some(trolley_status), - balance, - .. - }) = user.payout_data - { - if trolley_status == RecipientStatus::Active { - return if data.amount < balance { - let mut transaction = pool.begin().await?; - - let (batch_id, payment_id) = - payouts_queue.send_payout(&trolley_id, data.amount).await?; - - sqlx::query!( - " - INSERT INTO historical_payouts (user_id, amount, status, batch_id, payment_id) - VALUES ($1, $2, $3, $4, $5) - ", - id as crate::database::models::ids::UserId, - data.amount, - "processing", - batch_id, - payment_id, - ) - .execute(&mut *transaction) - .await?; - - sqlx::query!( - " - UPDATE users - SET balance = balance - $1 - WHERE id = $2 - ", - data.amount, - id as crate::database::models::ids::UserId - ) - .execute(&mut *transaction) - .await?; - - User::clear_caches(&[(id, None)], &redis).await?; - - transaction.commit().await?; - - Ok(HttpResponse::NoContent().body("")) - } else { - Err(ApiError::InvalidInput( - "You do not have enough funds to make this payout!".to_string(), - )) - }; - } else { - return Err(ApiError::InvalidInput( - "Please complete payout information via the trolley dashboard!".to_string(), - )); - } - } - - Err(ApiError::InvalidInput( - "You are not enrolled in the payouts program yet!".to_string(), - )) - } else { - Ok(HttpResponse::NotFound().body("")) - } + .await } diff --git a/src/routes/v2/version_creation.rs b/src/routes/v2/version_creation.rs index e94284cf..5652e829 100644 --- a/src/routes/v2/version_creation.rs +++ b/src/routes/v2/version_creation.rs @@ -1,37 +1,24 @@ -use super::project_creation::{CreateError, UploadedFile}; -use crate::auth::get_user_from_headers; -use crate::database::models::notification_item::NotificationBuilder; -use crate::database::models::version_item::{ - DependencyBuilder, VersionBuilder, VersionFileBuilder, -}; -use crate::database::models::{self, image_item, Organization}; use crate::database::redis::RedisPool; use crate::file_hosting::FileHost; -use crate::models::images::{Image, ImageContext, ImageId}; -use crate::models::notifications::NotificationBody; -use crate::models::pack::PackFileHash; -use crate::models::pats::Scopes; +use crate::models::ids::ImageId; use crate::models::projects::{ - Dependency, DependencyType, FileType, GameVersion, Loader, ProjectId, Version, VersionFile, - VersionId, VersionStatus, VersionType, + Dependency, FileType, Loader, ProjectId, Version, VersionId, VersionStatus, VersionType, }; -use crate::models::teams::ProjectPermissions; +use crate::models::v2::projects::LegacyVersion; use crate::queue::session::AuthQueue; -use crate::util::routes::read_from_field; -use crate::util::validate::validation_errors_to_string; -use crate::validate::{validate_file, ValidationResult}; -use actix_multipart::{Field, Multipart}; +use crate::routes::v3::project_creation::CreateError; +use crate::routes::{v2_reroute, v3}; +use actix_multipart::Multipart; use actix_web::web::Data; use actix_web::{post, web, HttpRequest, HttpResponse}; -use chrono::Utc; -use futures::stream::StreamExt; use serde::{Deserialize, Serialize}; +use serde_json::json; use sqlx::postgres::PgPool; use std::collections::HashMap; use std::sync::Arc; use validator::Validate; -fn default_requested_status() -> VersionStatus { +pub fn default_requested_status() -> VersionStatus { VersionStatus::Listed } @@ -61,7 +48,7 @@ pub struct InitialVersionData { )] pub dependencies: Vec, #[validate(length(min = 1))] - pub game_versions: Vec, + pub game_versions: Vec, #[serde(alias = "version_type")] pub release_channel: VersionType, #[validate(length(min = 1))] @@ -91,420 +78,72 @@ struct InitialFileData { #[post("version")] pub async fn version_create( req: HttpRequest, - mut payload: Multipart, + payload: Multipart, client: Data, redis: Data, file_host: Data>, session_queue: Data, ) -> Result { - let mut transaction = client.begin().await?; - let mut uploaded_files = Vec::new(); - - let result = version_create_inner( - req, - &mut payload, - &mut transaction, - &redis, - &***file_host, - &mut uploaded_files, - &client, - &session_queue, + let payload = v2_reroute::alter_actix_multipart( + payload, + req.headers().clone(), + |legacy_create: InitialVersionData| { + // Convert input data to V3 format + let mut fields = HashMap::new(); + fields.insert( + "game_versions".to_string(), + json!(legacy_create.game_versions), + ); + + // TODO: Some kind of handling here to ensure project type is fine. + // We expect the version uploaded to be of loader type modpack, but there might not be a way to check here for that. + // After all, theoretically, they could be creating a genuine 'fabric' mod, and modpack no longer carries information on whether its a mod or modpack, + // as those are out to the versions. + + // Ideally this would, if the project 'should' be a modpack: + // - change the loaders to mrpack only + // - add loader fields to the project for the corresponding loaders + + Ok(v3::version_creation::InitialVersionData { + project_id: legacy_create.project_id, + file_parts: legacy_create.file_parts, + version_number: legacy_create.version_number, + version_title: legacy_create.version_title, + version_body: legacy_create.version_body, + dependencies: legacy_create.dependencies, + release_channel: legacy_create.release_channel, + loaders: legacy_create.loaders, + featured: legacy_create.featured, + primary_file: legacy_create.primary_file, + status: legacy_create.status, + file_types: legacy_create.file_types, + uploaded_images: legacy_create.uploaded_images, + ordering: legacy_create.ordering, + fields, + }) + }, ) - .await; - - if result.is_err() { - let undo_result = - super::project_creation::undo_uploads(&***file_host, &uploaded_files).await; - let rollback_result = transaction.rollback().await; - - undo_result?; - if let Err(e) = rollback_result { - return Err(e.into()); - } - } else { - transaction.commit().await?; - } - - result -} - -#[allow(clippy::too_many_arguments)] -async fn version_create_inner( - req: HttpRequest, - payload: &mut Multipart, - transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, - redis: &RedisPool, - file_host: &dyn FileHost, - uploaded_files: &mut Vec, - pool: &PgPool, - session_queue: &AuthQueue, -) -> Result { - let cdn_url = dotenvy::var("CDN_URL")?; - - let mut initial_version_data = None; - let mut version_builder = None; - - let all_game_versions = - models::categories::GameVersion::list(&mut **transaction, redis).await?; - let all_loaders = models::categories::Loader::list(&mut **transaction, redis).await?; + .await?; - let user = get_user_from_headers( - &req, - pool, - redis, + // Call V3 project creation + let response = v3::version_creation::version_create( + req, + payload, + client.clone(), + redis.clone(), + file_host, session_queue, - Some(&[Scopes::VERSION_CREATE]), ) - .await? - .1; - - let mut error = None; - while let Some(item) = payload.next().await { - let mut field: Field = item?; - - if error.is_some() { - continue; - } - - let result = async { - let content_disposition = field.content_disposition().clone(); - let name = content_disposition.get_name().ok_or_else(|| { - CreateError::MissingValueError("Missing content name".to_string()) - })?; - - if name == "data" { - let mut data = Vec::new(); - while let Some(chunk) = field.next().await { - data.extend_from_slice(&chunk?); - } - - let version_create_data: InitialVersionData = serde_json::from_slice(&data)?; - initial_version_data = Some(version_create_data); - let version_create_data = initial_version_data.as_ref().unwrap(); - if version_create_data.project_id.is_none() { - return Err(CreateError::MissingValueError( - "Missing project id".to_string(), - )); - } - - version_create_data.validate().map_err(|err| { - CreateError::ValidationError(validation_errors_to_string(err, None)) - })?; - - if !version_create_data.status.can_be_requested() { - return Err(CreateError::InvalidInput( - "Status specified cannot be requested".to_string(), - )); - } - - let project_id: models::ProjectId = version_create_data.project_id.unwrap().into(); - - // Ensure that the project this version is being added to exists - let results = sqlx::query!( - "SELECT EXISTS(SELECT 1 FROM mods WHERE id=$1)", - project_id as models::ProjectId - ) - .fetch_one(&mut **transaction) - .await?; - - if !results.exists.unwrap_or(false) { - return Err(CreateError::InvalidInput( - "An invalid project id was supplied".to_string(), - )); - } - - // Check that the user creating this version is a team member - // of the project the version is being added to. - let team_member = models::TeamMember::get_from_user_id_project( - project_id, - user.id.into(), - &mut **transaction, - ) - .await?; - - // Get organization attached, if exists, and the member project permissions - let organization = models::Organization::get_associated_organization_project_id( - project_id, - &mut **transaction, - ) - .await?; - - let organization_team_member = if let Some(organization) = &organization { - models::TeamMember::get_from_user_id( - organization.team_id, - user.id.into(), - &mut **transaction, - ) - .await? - } else { - None - }; - - let permissions = ProjectPermissions::get_permissions_by_role( - &user.role, - &team_member, - &organization_team_member, - ) - .unwrap_or_default(); - - if !permissions.contains(ProjectPermissions::UPLOAD_VERSION) { - return Err(CreateError::CustomAuthenticationError( - "You don't have permission to upload this version!".to_string(), - )); - } - - let version_id: VersionId = models::generate_version_id(transaction).await?.into(); - - let project_type = sqlx::query!( - " - SELECT name FROM project_types pt - INNER JOIN mods ON mods.project_type = pt.id - WHERE mods.id = $1 - ", - project_id as models::ProjectId, - ) - .fetch_one(&mut **transaction) - .await? - .name; - - let game_versions = version_create_data - .game_versions - .iter() - .map(|x| { - all_game_versions - .iter() - .find(|y| y.version == x.0) - .ok_or_else(|| CreateError::InvalidGameVersion(x.0.clone())) - .map(|y| y.id) - }) - .collect::, CreateError>>()?; - - let loaders = version_create_data - .loaders - .iter() - .map(|x| { - all_loaders - .iter() - .find(|y| { - y.loader == x.0 && y.supported_project_types.contains(&project_type) - }) - .ok_or_else(|| CreateError::InvalidLoader(x.0.clone())) - .map(|y| y.id) - }) - .collect::, CreateError>>()?; - - let dependencies = version_create_data - .dependencies - .iter() - .map(|d| models::version_item::DependencyBuilder { - version_id: d.version_id.map(|x| x.into()), - project_id: d.project_id.map(|x| x.into()), - dependency_type: d.dependency_type.to_string(), - file_name: None, - }) - .collect::>(); - - version_builder = Some(VersionBuilder { - version_id: version_id.into(), - project_id, - author_id: user.id.into(), - name: version_create_data.version_title.clone(), - version_number: version_create_data.version_number.clone(), - changelog: version_create_data.version_body.clone().unwrap_or_default(), - files: Vec::new(), - dependencies, - game_versions, - loaders, - version_type: version_create_data.release_channel.to_string(), - featured: version_create_data.featured, - status: version_create_data.status, - requested_status: None, - ordering: version_create_data.ordering, - }); - - return Ok(()); - } - - let version = version_builder.as_mut().ok_or_else(|| { - CreateError::InvalidInput(String::from("`data` field must come before file fields")) - })?; - - let project_type = sqlx::query!( - " - SELECT name FROM project_types pt - INNER JOIN mods ON mods.project_type = pt.id - WHERE mods.id = $1 - ", - version.project_id as models::ProjectId, - ) - .fetch_one(&mut **transaction) - .await? - .name; - - let version_data = initial_version_data - .clone() - .ok_or_else(|| CreateError::InvalidInput("`data` field is required".to_string()))?; - - upload_file( - &mut field, - file_host, - version_data.file_parts.len(), - uploaded_files, - &mut version.files, - &mut version.dependencies, - &cdn_url, - &content_disposition, - version.project_id.into(), - version.version_id.into(), - &project_type, - version_data.loaders, - version_data.game_versions, - all_game_versions.clone(), - version_data.primary_file.is_some(), - version_data.primary_file.as_deref() == Some(name), - version_data.file_types.get(name).copied().flatten(), - transaction, - ) - .await?; - - Ok(()) - } - .await; - - if result.is_err() { - error = result.err(); - } - } - - if let Some(error) = error { - return Err(error); - } - - let version_data = initial_version_data - .ok_or_else(|| CreateError::InvalidInput("`data` field is required".to_string()))?; - let builder = version_builder - .ok_or_else(|| CreateError::InvalidInput("`data` field is required".to_string()))?; - - if builder.files.is_empty() { - return Err(CreateError::InvalidInput( - "Versions must have at least one file uploaded to them".to_string(), - )); - } - - use futures::stream::TryStreamExt; - - let users = sqlx::query!( - " - SELECT follower_id FROM mod_follows - WHERE mod_id = $1 - ", - builder.project_id as crate::database::models::ids::ProjectId - ) - .fetch_many(&mut **transaction) - .try_filter_map(|e| async { Ok(e.right().map(|m| models::ids::UserId(m.follower_id))) }) - .try_collect::>() .await?; - let project_id: ProjectId = builder.project_id.into(); - let version_id: VersionId = builder.version_id.into(); - - NotificationBuilder { - body: NotificationBody::ProjectUpdate { - project_id, - version_id, - }, - } - .insert_many(users, transaction, redis) - .await?; - - let response = Version { - id: builder.version_id.into(), - project_id: builder.project_id.into(), - author_id: user.id, - featured: builder.featured, - name: builder.name.clone(), - version_number: builder.version_number.clone(), - changelog: builder.changelog.clone(), - changelog_url: None, - date_published: Utc::now(), - downloads: 0, - version_type: version_data.release_channel, - status: builder.status, - requested_status: builder.requested_status, - ordering: builder.ordering, - files: builder - .files - .iter() - .map(|file| VersionFile { - hashes: file - .hashes - .iter() - .map(|hash| { - ( - hash.algorithm.clone(), - // This is a hack since the hashes are currently stored as ASCII - // in the database, but represented here as a Vec. At some - // point we need to change the hash to be the real bytes in the - // database and add more processing here. - String::from_utf8(hash.hash.clone()).unwrap(), - ) - }) - .collect(), - url: file.url.clone(), - filename: file.filename.clone(), - primary: file.primary, - size: file.size, - file_type: file.file_type, - }) - .collect::>(), - dependencies: version_data.dependencies, - game_versions: version_data.game_versions, - loaders: version_data.loaders, - }; - - let project_id = builder.project_id; - builder.insert(transaction).await?; - - for image_id in version_data.uploaded_images { - if let Some(db_image) = - image_item::Image::get(image_id.into(), &mut **transaction, redis).await? - { - let image: Image = db_image.into(); - if !matches!(image.context, ImageContext::Report { .. }) - || image.context.inner_id().is_some() - { - return Err(CreateError::InvalidInput(format!( - "Image {} is not unused and in the 'version' context", - image_id - ))); - } - - sqlx::query!( - " - UPDATE uploaded_images - SET version_id = $1 - WHERE id = $2 - ", - version_id.0 as i64, - image_id.0 as i64 - ) - .execute(&mut **transaction) - .await?; - - image_item::Image::clear_cache(image.id.into(), redis).await?; - } else { - return Err(CreateError::InvalidInput(format!( - "Image {} does not exist", - image_id - ))); + // Convert response to V2 format + match v2_reroute::extract_ok_json::(response).await { + Ok(version) => { + let v2_version = LegacyVersion::from(version); + Ok(HttpResponse::Ok().json(v2_version)) } + Err(response) => Ok(response), } - - models::Project::update_game_versions(project_id, transaction).await?; - models::Project::update_loaders(project_id, transaction).await?; - models::Project::clear_cache(project_id, None, Some(true), redis).await?; - - Ok(HttpResponse::Ok().json(response)) } // under /api/v1/version/{version_id} @@ -512,452 +151,21 @@ async fn version_create_inner( pub async fn upload_file_to_version( req: HttpRequest, url_data: web::Path<(VersionId,)>, - mut payload: Multipart, + payload: Multipart, client: Data, redis: Data, file_host: Data>, session_queue: web::Data, ) -> Result { - let mut transaction = client.begin().await?; - let mut uploaded_files = Vec::new(); - - let version_id = models::VersionId::from(url_data.into_inner().0); - - let result = upload_file_to_version_inner( + let response = v3::version_creation::upload_file_to_version( req, - &mut payload, - client, - &mut transaction, - redis, - &***file_host, - &mut uploaded_files, - version_id, - &session_queue, - ) - .await; - - if result.is_err() { - let undo_result = - super::project_creation::undo_uploads(&***file_host, &uploaded_files).await; - let rollback_result = transaction.rollback().await; - - undo_result?; - if let Err(e) = rollback_result { - return Err(e.into()); - } - } else { - transaction.commit().await?; - } - - result -} - -#[allow(clippy::too_many_arguments)] -async fn upload_file_to_version_inner( - req: HttpRequest, - payload: &mut Multipart, - client: Data, - transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, - redis: Data, - file_host: &dyn FileHost, - uploaded_files: &mut Vec, - version_id: models::VersionId, - session_queue: &AuthQueue, -) -> Result { - let cdn_url = dotenvy::var("CDN_URL")?; - - let mut initial_file_data: Option = None; - let mut file_builders: Vec = Vec::new(); - - let user = get_user_from_headers( - &req, - &**client, - &redis, + url_data, + payload, + client.clone(), + redis.clone(), + file_host, session_queue, - Some(&[Scopes::VERSION_WRITE]), - ) - .await? - .1; - - let result = models::Version::get(version_id, &**client, &redis).await?; - - let version = match result { - Some(v) => v, - None => { - return Err(CreateError::InvalidInput( - "An invalid version id was supplied".to_string(), - )); - } - }; - - if !user.role.is_admin() { - let team_member = models::TeamMember::get_from_user_id_project( - version.inner.project_id, - user.id.into(), - &mut **transaction, - ) - .await?; - - let organization = Organization::get_associated_organization_project_id( - version.inner.project_id, - &**client, - ) - .await?; - - let organization_team_member = if let Some(organization) = &organization { - models::TeamMember::get_from_user_id( - organization.team_id, - user.id.into(), - &mut **transaction, - ) - .await? - } else { - None - }; - - let permissions = ProjectPermissions::get_permissions_by_role( - &user.role, - &team_member, - &organization_team_member, - ) - .unwrap_or_default(); - - if !permissions.contains(ProjectPermissions::UPLOAD_VERSION) { - return Err(CreateError::CustomAuthenticationError( - "You don't have permission to upload files to this version!".to_string(), - )); - } - } - - let project_id = ProjectId(version.inner.project_id.0 as u64); - - let project_type = sqlx::query!( - " - SELECT name FROM project_types pt - INNER JOIN mods ON mods.project_type = pt.id - WHERE mods.id = $1 - ", - version.inner.project_id as models::ProjectId, - ) - .fetch_one(&mut **transaction) - .await? - .name; - - let all_game_versions = - models::categories::GameVersion::list(&mut **transaction, &redis).await?; - - let mut error = None; - while let Some(item) = payload.next().await { - let mut field: Field = item?; - - if error.is_some() { - continue; - } - - let result = async { - let content_disposition = field.content_disposition().clone(); - let name = content_disposition.get_name().ok_or_else(|| { - CreateError::MissingValueError("Missing content name".to_string()) - })?; - - if name == "data" { - let mut data = Vec::new(); - while let Some(chunk) = field.next().await { - data.extend_from_slice(&chunk?); - } - let file_data: InitialFileData = serde_json::from_slice(&data)?; - - initial_file_data = Some(file_data); - return Ok(()); - } - - let file_data = initial_file_data.as_ref().ok_or_else(|| { - CreateError::InvalidInput(String::from("`data` field must come before file fields")) - })?; - - let mut dependencies = version - .dependencies - .iter() - .map(|x| DependencyBuilder { - project_id: x.project_id, - version_id: x.version_id, - file_name: x.file_name.clone(), - dependency_type: x.dependency_type.clone(), - }) - .collect(); - - upload_file( - &mut field, - file_host, - 0, - uploaded_files, - &mut file_builders, - &mut dependencies, - &cdn_url, - &content_disposition, - project_id, - version_id.into(), - &project_type, - version.loaders.clone().into_iter().map(Loader).collect(), - version - .game_versions - .clone() - .into_iter() - .map(GameVersion) - .collect(), - all_game_versions.clone(), - true, - false, - file_data.file_types.get(name).copied().flatten(), - transaction, - ) - .await?; - - Ok(()) - } - .await; - - if result.is_err() { - error = result.err(); - } - } - - if let Some(error) = error { - return Err(error); - } - - if file_builders.is_empty() { - return Err(CreateError::InvalidInput( - "At least one file must be specified".to_string(), - )); - } else { - VersionFileBuilder::insert_many(file_builders, version_id, transaction).await?; - } - - // Clear version cache - models::Version::clear_cache(&version, &redis).await?; - - Ok(HttpResponse::NoContent().body("")) -} - -// This function is used for adding a file to a version, uploading the initial -// files for a version, and for uploading the initial version files for a project -#[allow(clippy::too_many_arguments)] -pub async fn upload_file( - field: &mut Field, - file_host: &dyn FileHost, - total_files_len: usize, - uploaded_files: &mut Vec, - version_files: &mut Vec, - dependencies: &mut Vec, - cdn_url: &str, - content_disposition: &actix_web::http::header::ContentDisposition, - project_id: ProjectId, - version_id: VersionId, - project_type: &str, - loaders: Vec, - game_versions: Vec, - all_game_versions: Vec, - ignore_primary: bool, - force_primary: bool, - file_type: Option, - transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, -) -> Result<(), CreateError> { - let (file_name, file_extension) = get_name_ext(content_disposition)?; - - if file_name.contains('/') { - return Err(CreateError::InvalidInput( - "File names must not contain slashes!".to_string(), - )); - } - - let content_type = crate::util::ext::project_file_type(file_extension) - .ok_or_else(|| CreateError::InvalidFileType(file_extension.to_string()))?; - - let data = read_from_field( - field, 500 * (1 << 20), - "Project file exceeds the maximum of 500MiB. Contact a moderator or admin to request permission to upload larger files." - ).await?; - - let hash = sha1::Sha1::from(&data).hexdigest(); - let exists = sqlx::query!( - " - SELECT EXISTS(SELECT 1 FROM hashes h - INNER JOIN files f ON f.id = h.file_id - INNER JOIN versions v ON v.id = f.version_id - WHERE h.algorithm = $2 AND h.hash = $1 AND v.mod_id != $3) - ", - hash.as_bytes(), - "sha1", - project_id.0 as i64 - ) - .fetch_one(&mut **transaction) - .await? - .exists - .unwrap_or(false); - - if exists { - return Err(CreateError::InvalidInput( - "Duplicate files are not allowed to be uploaded to Modrinth!".to_string(), - )); - } - - let validation_result = validate_file( - data.clone().into(), - file_extension.to_string(), - project_type.to_string(), - loaders.clone(), - game_versions.clone(), - all_game_versions.clone(), - file_type, ) .await?; - - if let ValidationResult::PassWithPackDataAndFiles { - ref format, - ref files, - } = validation_result - { - if dependencies.is_empty() { - let hashes: Vec> = format - .files - .iter() - .filter_map(|x| x.hashes.get(&PackFileHash::Sha1)) - .map(|x| x.as_bytes().to_vec()) - .collect(); - - let res = sqlx::query!( - " - SELECT v.id version_id, v.mod_id project_id, h.hash hash FROM hashes h - INNER JOIN files f on h.file_id = f.id - INNER JOIN versions v on f.version_id = v.id - WHERE h.algorithm = 'sha1' AND h.hash = ANY($1) - ", - &*hashes - ) - .fetch_all(&mut **transaction) - .await?; - - for file in &format.files { - if let Some(dep) = res.iter().find(|x| { - Some(&*x.hash) == file.hashes.get(&PackFileHash::Sha1).map(|x| x.as_bytes()) - }) { - dependencies.push(DependencyBuilder { - project_id: Some(models::ProjectId(dep.project_id)), - version_id: Some(models::VersionId(dep.version_id)), - file_name: None, - dependency_type: DependencyType::Embedded.to_string(), - }); - } else if let Some(first_download) = file.downloads.first() { - dependencies.push(DependencyBuilder { - project_id: None, - version_id: None, - file_name: Some( - first_download - .rsplit('/') - .next() - .unwrap_or(first_download) - .to_string(), - ), - dependency_type: DependencyType::Embedded.to_string(), - }); - } - } - - for file in files { - if !file.is_empty() { - dependencies.push(DependencyBuilder { - project_id: None, - version_id: None, - file_name: Some(file.to_string()), - dependency_type: DependencyType::Embedded.to_string(), - }); - } - } - } - } - - let data = data.freeze(); - - let primary = (validation_result.is_passed() - && version_files.iter().all(|x| !x.primary) - && !ignore_primary) - || force_primary - || total_files_len == 1; - - let file_path_encode = format!( - "data/{}/versions/{}/{}", - project_id, - version_id, - urlencoding::encode(file_name) - ); - let file_path = format!("data/{}/versions/{}/{}", project_id, version_id, &file_name); - - let upload_data = file_host - .upload_file(content_type, &file_path, data) - .await?; - - uploaded_files.push(UploadedFile { - file_id: upload_data.file_id, - file_name: file_path, - }); - - let sha1_bytes = upload_data.content_sha1.into_bytes(); - let sha512_bytes = upload_data.content_sha512.into_bytes(); - - if version_files.iter().any(|x| { - x.hashes - .iter() - .any(|y| y.hash == sha1_bytes || y.hash == sha512_bytes) - }) { - return Err(CreateError::InvalidInput( - "Duplicate files are not allowed to be uploaded to Modrinth!".to_string(), - )); - } - - if let ValidationResult::Warning(msg) = validation_result { - if primary { - return Err(CreateError::InvalidInput(msg.to_string())); - } - } - - version_files.push(VersionFileBuilder { - filename: file_name.to_string(), - url: format!("{cdn_url}/{file_path_encode}"), - hashes: vec![ - models::version_item::HashBuilder { - algorithm: "sha1".to_string(), - // This is an invalid cast - the database expects the hash's - // bytes, but this is the string version. - hash: sha1_bytes, - }, - models::version_item::HashBuilder { - algorithm: "sha512".to_string(), - // This is an invalid cast - the database expects the hash's - // bytes, but this is the string version. - hash: sha512_bytes, - }, - ], - primary, - size: upload_data.content_length, - file_type, - }); - - Ok(()) -} - -pub fn get_name_ext( - content_disposition: &actix_web::http::header::ContentDisposition, -) -> Result<(&str, &str), CreateError> { - let file_name = content_disposition - .get_filename() - .ok_or_else(|| CreateError::MissingValueError("Missing content file name".to_string()))?; - let file_extension = if let Some(last_period) = file_name.rfind('.') { - file_name.get((last_period + 1)..).unwrap_or("") - } else { - return Err(CreateError::MissingValueError( - "Missing content file extension".to_string(), - )); - }; - Ok((file_name, file_extension)) + Ok(response) } diff --git a/src/routes/v2/version_file.rs b/src/routes/v2/version_file.rs index 5d98dc4a..36906347 100644 --- a/src/routes/v2/version_file.rs +++ b/src/routes/v2/version_file.rs @@ -1,17 +1,11 @@ use super::ApiError; -use crate::auth::{ - filter_authorized_projects, filter_authorized_versions, get_user_from_headers, - is_authorized_version, -}; use crate::database::redis::RedisPool; -use crate::models::ids::VersionId; -use crate::models::pats::Scopes; -use crate::models::projects::VersionType; -use crate::models::teams::ProjectPermissions; +use crate::models::projects::{Project, Version, VersionType}; +use crate::models::v2::projects::{LegacyProject, LegacyVersion}; use crate::queue::session::AuthQueue; -use crate::{database, models}; +use crate::routes::v3::version_file::{default_algorithm, HashQuery}; +use crate::routes::{v2_reroute, v3}; use actix_web::{delete, get, post, web, HttpRequest, HttpResponse}; -use itertools::Itertools; use serde::{Deserialize, Serialize}; use sqlx::PgPool; use std::collections::HashMap; @@ -34,17 +28,6 @@ pub fn config(cfg: &mut web::ServiceConfig) { ); } -#[derive(Serialize, Deserialize)] -pub struct HashQuery { - #[serde(default = "default_algorithm")] - pub algorithm: String, - pub version_id: Option, -} - -fn default_algorithm() -> String { - "sha1".into() -} - // under /api/v1/version_file/{hash} #[get("{version_id}")] pub async fn get_version_from_hash( @@ -55,46 +38,20 @@ pub async fn get_version_from_hash( hash_query: web::Query, session_queue: web::Data, ) -> Result { - let user_option = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::VERSION_READ]), - ) - .await - .map(|x| x.1) - .ok(); - let hash = info.into_inner().0.to_lowercase(); - let file = database::models::Version::get_file_from_hash( - hash_query.algorithm.clone(), - hash, - hash_query.version_id.map(|x| x.into()), - &**pool, - &redis, - ) - .await?; - if let Some(file) = file { - let version = database::models::Version::get(file.version_id, &**pool, &redis).await?; - if let Some(version) = version { - if !is_authorized_version(&version.inner, &user_option, &pool).await? { - return Ok(HttpResponse::NotFound().body("")); - } - - Ok(HttpResponse::Ok().json(models::projects::Version::from(version))) - } else { - Ok(HttpResponse::NotFound().body("")) + let response = + v3::version_file::get_version_from_hash(req, info, pool, redis, hash_query, session_queue) + .await; + + // Convert response to V2 format + match v2_reroute::extract_ok_json::(response?).await { + Ok(version) => { + let v2_version = LegacyVersion::from(version); + Ok(HttpResponse::Ok().json(v2_version)) } - } else { - Ok(HttpResponse::NotFound().body("")) + Err(response) => Ok(response), } } -#[derive(Serialize, Deserialize)] -pub struct DownloadRedirect { - pub url: String, -} - // under /api/v1/version_file/{hash}/download #[get("{version_id}/download")] pub async fn download_version( @@ -105,44 +62,7 @@ pub async fn download_version( hash_query: web::Query, session_queue: web::Data, ) -> Result { - let user_option = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::VERSION_READ]), - ) - .await - .map(|x| x.1) - .ok(); - - let hash = info.into_inner().0.to_lowercase(); - let file = database::models::Version::get_file_from_hash( - hash_query.algorithm.clone(), - hash, - hash_query.version_id.map(|x| x.into()), - &**pool, - &redis, - ) - .await?; - - if let Some(file) = file { - let version = database::models::Version::get(file.version_id, &**pool, &redis).await?; - - if let Some(version) = version { - if !is_authorized_version(&version.inner, &user_option, &pool).await? { - return Ok(HttpResponse::NotFound().body("")); - } - - Ok(HttpResponse::TemporaryRedirect() - .append_header(("Location", &*file.url)) - .json(DownloadRedirect { url: file.url })) - } else { - Ok(HttpResponse::NotFound().body("")) - } - } else { - Ok(HttpResponse::NotFound().body("")) - } + v3::version_file::download_version(req, info, pool, redis, hash_query, session_queue).await } // under /api/v1/version_file/{hash} @@ -155,113 +75,10 @@ pub async fn delete_file( hash_query: web::Query, session_queue: web::Data, ) -> Result { - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::VERSION_WRITE]), - ) - .await? - .1; - - let hash = info.into_inner().0.to_lowercase(); - - let file = database::models::Version::get_file_from_hash( - hash_query.algorithm.clone(), - hash, - hash_query.version_id.map(|x| x.into()), - &**pool, - &redis, - ) - .await?; - - if let Some(row) = file { - if !user.role.is_admin() { - let team_member = database::models::TeamMember::get_from_user_id_version( - row.version_id, - user.id.into(), - &**pool, - ) - .await - .map_err(ApiError::Database)?; - - let organization = - database::models::Organization::get_associated_organization_project_id( - row.project_id, - &**pool, - ) - .await - .map_err(ApiError::Database)?; - - let organization_team_member = if let Some(organization) = &organization { - database::models::TeamMember::get_from_user_id_organization( - organization.id, - user.id.into(), - &**pool, - ) - .await - .map_err(ApiError::Database)? - } else { - None - }; - - let permissions = ProjectPermissions::get_permissions_by_role( - &user.role, - &team_member, - &organization_team_member, - ) - .unwrap_or_default(); - - if !permissions.contains(ProjectPermissions::DELETE_VERSION) { - return Err(ApiError::CustomAuthentication( - "You don't have permission to delete this file!".to_string(), - )); - } - } - - let version = database::models::Version::get(row.version_id, &**pool, &redis).await?; - if let Some(version) = version { - if version.files.len() < 2 { - return Err(ApiError::InvalidInput( - "Versions must have at least one file uploaded to them".to_string(), - )); - } - - database::models::Version::clear_cache(&version, &redis).await?; - } - - let mut transaction = pool.begin().await?; - - sqlx::query!( - " - DELETE FROM hashes - WHERE file_id = $1 - ", - row.id.0 - ) - .execute(&mut *transaction) - .await?; - - sqlx::query!( - " - DELETE FROM files - WHERE files.id = $1 - ", - row.id.0, - ) - .execute(&mut *transaction) - .await?; - - transaction.commit().await?; - - Ok(HttpResponse::NoContent().body("")) - } else { - Ok(HttpResponse::NotFound().body("")) - } + v3::version_file::delete_file(req, info, pool, redis, hash_query, session_queue).await } -#[derive(Deserialize)] +#[derive(Serialize, Deserialize)] pub struct UpdateData { pub loaders: Option>, pub game_versions: Option>, @@ -278,65 +95,40 @@ pub async fn get_update_from_hash( update_data: web::Json, session_queue: web::Data, ) -> Result { - let user_option = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::VERSION_READ]), - ) - .await - .map(|x| x.1) - .ok(); - let hash = info.into_inner().0.to_lowercase(); - - if let Some(file) = database::models::Version::get_file_from_hash( - hash_query.algorithm.clone(), - hash, - hash_query.version_id.map(|x| x.into()), - &**pool, - &redis, + let update_data = update_data.into_inner(); + let mut loader_fields = HashMap::new(); + let mut game_versions = vec![]; + for gv in update_data.game_versions.into_iter().flatten() { + game_versions.push(serde_json::json!(gv.clone())); + } + if !game_versions.is_empty() { + loader_fields.insert("game_versions".to_string(), game_versions); + } + let update_data = v3::version_file::UpdateData { + loaders: update_data.loaders.clone(), + version_types: update_data.version_types.clone(), + loader_fields: Some(loader_fields), + }; + + let response = v3::version_file::get_update_from_hash( + req, + info, + pool, + redis, + hash_query, + web::Json(update_data), + session_queue, ) - .await? - { - if let Some(project) = - database::models::Project::get_id(file.project_id, &**pool, &redis).await? - { - let mut versions = - database::models::Version::get_many(&project.versions, &**pool, &redis) - .await? - .into_iter() - .filter(|x| { - let mut bool = true; - - if let Some(version_types) = &update_data.version_types { - bool &= version_types - .iter() - .any(|y| y.as_str() == x.inner.version_type); - } - if let Some(loaders) = &update_data.loaders { - bool &= x.loaders.iter().any(|y| loaders.contains(y)); - } - if let Some(game_versions) = &update_data.game_versions { - bool &= x.game_versions.iter().any(|y| game_versions.contains(y)); - } - - bool - }) - .sorted() - .collect::>(); - - if let Some(first) = versions.pop() { - if !is_authorized_version(&first.inner, &user_option, &pool).await? { - return Ok(HttpResponse::NotFound().body("")); - } + .await?; - return Ok(HttpResponse::Ok().json(models::projects::Version::from(first))); - } + // Convert response to V2 format + match v2_reroute::extract_ok_json::(response).await { + Ok(version) => { + let v2_version = LegacyVersion::from(version); + Ok(HttpResponse::Ok().json(v2_version)) } + Err(response) => Ok(response), } - - Ok(HttpResponse::NotFound().body("")) } // Requests above with multiple versions below @@ -356,44 +148,34 @@ pub async fn get_versions_from_hashes( file_data: web::Json, session_queue: web::Data, ) -> Result { - let user_option = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::VERSION_READ]), - ) - .await - .map(|x| x.1) - .ok(); - - let files = database::models::Version::get_files_from_hash( - file_data.algorithm.clone(), - &file_data.hashes, - &**pool, - &redis, - ) - .await?; - - let version_ids = files.iter().map(|x| x.version_id).collect::>(); - let versions_data = filter_authorized_versions( - database::models::Version::get_many(&version_ids, &**pool, &redis).await?, - &user_option, - &pool, + let file_data = file_data.into_inner(); + let file_data = v3::version_file::FileHashes { + algorithm: file_data.algorithm, + hashes: file_data.hashes, + }; + let response = v3::version_file::get_versions_from_hashes( + req, + pool, + redis, + web::Json(file_data), + session_queue, ) .await?; - let mut response = HashMap::new(); - - for version in versions_data { - for file in files.iter().filter(|x| x.version_id == version.id.into()) { - if let Some(hash) = file.hashes.get(&file_data.algorithm) { - response.insert(hash.clone(), version.clone()); - } + // Convert to V2 + match v2_reroute::extract_ok_json::>(response).await { + Ok(versions) => { + let v2_versions = versions + .into_iter() + .map(|(hash, version)| { + let v2_version = LegacyVersion::from(version); + (hash, v2_version) + }) + .collect::>(); + Ok(HttpResponse::Ok().json(v2_versions)) } + Err(response) => Ok(response), } - - Ok(HttpResponse::Ok().json(response)) } #[post("project")] @@ -404,45 +186,46 @@ pub async fn get_projects_from_hashes( file_data: web::Json, session_queue: web::Data, ) -> Result { - let user_option = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::PROJECT_READ, Scopes::VERSION_READ]), - ) - .await - .map(|x| x.1) - .ok(); - - let files = database::models::Version::get_files_from_hash( - file_data.algorithm.clone(), - &file_data.hashes, - &**pool, - &redis, + let file_data = file_data.into_inner(); + let file_data = v3::version_file::FileHashes { + algorithm: file_data.algorithm, + hashes: file_data.hashes, + }; + let response = v3::version_file::get_projects_from_hashes( + req, + pool.clone(), + redis.clone(), + web::Json(file_data), + session_queue, ) .await?; - let project_ids = files.iter().map(|x| x.project_id).collect::>(); - - let projects_data = filter_authorized_projects( - database::models::Project::get_many_ids(&project_ids, &**pool, &redis).await?, - &user_option, - &pool, - ) - .await?; - - let mut response = HashMap::new(); + // Convert to V2 + match v2_reroute::extract_ok_json::>(response).await { + Ok(projects_hashes) => { + let hash_to_project_id = projects_hashes + .iter() + .map(|(hash, project)| { + let project_id = project.id; + (hash.clone(), project_id) + }) + .collect::>(); + let legacy_projects = + LegacyProject::from_many(projects_hashes.into_values().collect(), &**pool, &redis) + .await?; + let legacy_projects_hashes = hash_to_project_id + .into_iter() + .filter_map(|(hash, project_id)| { + let legacy_project = + legacy_projects.iter().find(|x| x.id == project_id)?.clone(); + Some((hash, legacy_project)) + }) + .collect::>(); - for project in projects_data { - for file in files.iter().filter(|x| x.project_id == project.id.into()) { - if let Some(hash) = file.hashes.get(&file_data.algorithm) { - response.insert(hash.clone(), project.clone()); - } + Ok(HttpResponse::Ok().json(legacy_projects_hashes)) } + Err(response) => Ok(response), } - - Ok(HttpResponse::Ok().json(response)) } #[derive(Deserialize)] @@ -463,85 +246,44 @@ pub async fn update_files( update_data: web::Json, session_queue: web::Data, ) -> Result { - let user_option = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::VERSION_READ]), - ) - .await - .map(|x| x.1) - .ok(); - - let files = database::models::Version::get_files_from_hash( - update_data.algorithm.clone(), - &update_data.hashes, - &**pool, - &redis, - ) - .await?; - - let projects = database::models::Project::get_many_ids( - &files.iter().map(|x| x.project_id).collect::>(), - &**pool, - &redis, - ) - .await?; - let all_versions = database::models::Version::get_many( - &projects - .iter() - .flat_map(|x| x.versions.clone()) - .collect::>(), - &**pool, - &redis, - ) - .await?; - - let mut response = HashMap::new(); - - for project in projects { - for file in files.iter().filter(|x| x.project_id == project.inner.id) { - let version = all_versions - .iter() - .filter(|x| x.inner.project_id == file.project_id) - .filter(|x| { - let mut bool = true; - - if let Some(version_types) = &update_data.version_types { - bool &= version_types - .iter() - .any(|y| y.as_str() == x.inner.version_type); - } - if let Some(loaders) = &update_data.loaders { - bool &= x.loaders.iter().any(|y| loaders.contains(y)); - } - if let Some(game_versions) = &update_data.game_versions { - bool &= x.game_versions.iter().any(|y| game_versions.contains(y)); - } - - bool + let update_data = update_data.into_inner(); + let mut loader_fields = HashMap::new(); + let mut game_versions = vec![]; + for gv in update_data.game_versions.into_iter().flatten() { + game_versions.push(serde_json::json!(gv.clone())); + } + if !game_versions.is_empty() { + loader_fields.insert("game_versions".to_string(), game_versions); + } + let update_data = v3::version_file::ManyUpdateData { + loaders: update_data.loaders.clone(), + version_types: update_data.version_types.clone(), + loader_fields: Some(loader_fields), + algorithm: update_data.algorithm, + hashes: update_data.hashes, + }; + + let response = + v3::version_file::update_files(req, pool, redis, web::Json(update_data), session_queue) + .await?; + + // Convert response to V2 format + match v2_reroute::extract_ok_json::>(response).await { + Ok(returned_versions) => { + let v3_versions = returned_versions + .into_iter() + .map(|(hash, version)| { + let v2_version = LegacyVersion::from(version); + (hash, v2_version) }) - .sorted() - .next(); - - if let Some(version) = version { - if is_authorized_version(&version.inner, &user_option, &pool).await? { - if let Some(hash) = file.hashes.get(&update_data.algorithm) { - response.insert( - hash.clone(), - models::projects::Version::from(version.clone()), - ); - } - } - } + .collect::>(); + Ok(HttpResponse::Ok().json(v3_versions)) } + Err(response) => Ok(response), } - - Ok(HttpResponse::Ok().json(response)) } -#[derive(Deserialize)] +#[derive(Serialize, Deserialize)] pub struct FileUpdateData { pub hash: String, pub loaders: Option>, @@ -564,86 +306,52 @@ pub async fn update_individual_files( update_data: web::Json, session_queue: web::Data, ) -> Result { - let user_option = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::VERSION_READ]), - ) - .await - .map(|x| x.1) - .ok(); - - let files = database::models::Version::get_files_from_hash( - update_data.algorithm.clone(), - &update_data + let update_data = update_data.into_inner(); + let update_data = v3::version_file::ManyFileUpdateData { + algorithm: update_data.algorithm, + hashes: update_data .hashes - .iter() - .map(|x| x.hash.clone()) - .collect::>(), - &**pool, - &redis, - ) - .await?; - - let projects = database::models::Project::get_many_ids( - &files.iter().map(|x| x.project_id).collect::>(), - &**pool, - &redis, - ) - .await?; - let all_versions = database::models::Version::get_many( - &projects - .iter() - .flat_map(|x| x.versions.clone()) - .collect::>(), - &**pool, - &redis, + .into_iter() + .map(|x| { + let mut loader_fields = HashMap::new(); + let mut game_versions = vec![]; + for gv in x.game_versions.into_iter().flatten() { + game_versions.push(serde_json::json!(gv.clone())); + } + if !game_versions.is_empty() { + loader_fields.insert("game_versions".to_string(), game_versions); + } + v3::version_file::FileUpdateData { + hash: x.hash.clone(), + loaders: x.loaders.clone(), + loader_fields: Some(loader_fields), + version_types: x.version_types, + } + }) + .collect(), + }; + + let response = v3::version_file::update_individual_files( + req, + pool, + redis, + web::Json(update_data), + session_queue, ) .await?; - let mut response = HashMap::new(); - - for project in projects { - for file in files.iter().filter(|x| x.project_id == project.inner.id) { - if let Some(hash) = file.hashes.get(&update_data.algorithm) { - if let Some(query_file) = update_data.hashes.iter().find(|x| &x.hash == hash) { - let version = all_versions - .iter() - .filter(|x| x.inner.project_id == file.project_id) - .filter(|x| { - let mut bool = true; - - if let Some(version_types) = &query_file.version_types { - bool &= version_types - .iter() - .any(|y| y.as_str() == x.inner.version_type); - } - if let Some(loaders) = &query_file.loaders { - bool &= x.loaders.iter().any(|y| loaders.contains(y)); - } - if let Some(game_versions) = &query_file.game_versions { - bool &= x.game_versions.iter().any(|y| game_versions.contains(y)); - } - - bool - }) - .sorted() - .next(); - - if let Some(version) = version { - if is_authorized_version(&version.inner, &user_option, &pool).await? { - response.insert( - hash.clone(), - models::projects::Version::from(version.clone()), - ); - } - } - } - } + // Convert response to V2 format + match v2_reroute::extract_ok_json::>(response).await { + Ok(returned_versions) => { + let v3_versions = returned_versions + .into_iter() + .map(|(hash, version)| { + let v2_version = LegacyVersion::from(version); + (hash, v2_version) + }) + .collect::>(); + Ok(HttpResponse::Ok().json(v3_versions)) } + Err(response) => Ok(response), } - - Ok(HttpResponse::Ok().json(response)) } diff --git a/src/routes/v2/versions.rs b/src/routes/v2/versions.rs index 44517a84..255ecfc2 100644 --- a/src/routes/v2/versions.rs +++ b/src/routes/v2/versions.rs @@ -1,20 +1,13 @@ +use std::collections::HashMap; + use super::ApiError; -use crate::auth::{ - filter_authorized_versions, get_user_from_headers, is_authorized, is_authorized_version, -}; -use crate::database; -use crate::database::models::version_item::{DependencyBuilder, LoaderVersion, VersionVersion}; -use crate::database::models::{image_item, Organization}; use crate::database::redis::RedisPool; use crate::models; -use crate::models::ids::base62_impl::parse_base62; -use crate::models::images::ImageContext; -use crate::models::pats::Scopes; -use crate::models::projects::{Dependency, FileType, VersionStatus, VersionType}; -use crate::models::teams::ProjectPermissions; +use crate::models::ids::VersionId; +use crate::models::projects::{Dependency, FileType, Version, VersionStatus, VersionType}; +use crate::models::v2::projects::LegacyVersion; use crate::queue::session::AuthQueue; -use crate::util::img; -use crate::util::validate::validation_errors_to_string; +use crate::routes::{v2_reroute, v3}; use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse}; use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; @@ -54,115 +47,46 @@ pub async fn version_list( redis: web::Data, session_queue: web::Data, ) -> Result { - let string = info.into_inner().0; - - let result = database::models::Project::get(&string, &**pool, &redis).await?; - - let user_option = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::PROJECT_READ, Scopes::VERSION_READ]), - ) - .await - .map(|x| x.1) - .ok(); - - if let Some(project) = result { - if !is_authorized(&project.inner, &user_option, &pool).await? { - return Ok(HttpResponse::NotFound().body("")); - } - - let version_filters = filters - .game_versions - .as_ref() - .map(|x| serde_json::from_str::>(x).unwrap_or_default()); - let loader_filters = filters - .loaders - .as_ref() - .map(|x| serde_json::from_str::>(x).unwrap_or_default()); - let mut versions = database::models::Version::get_many(&project.versions, &**pool, &redis) - .await? - .into_iter() - .skip(filters.offset.unwrap_or(0)) - .take(filters.limit.unwrap_or(usize::MAX)) - .filter(|x| { - let mut bool = true; - - if let Some(version_type) = filters.version_type { - bool &= &*x.inner.version_type == version_type.as_str(); + let loader_fields = if let Some(game_versions) = filters.game_versions { + // TODO: extract this logic which is similar to the other v2->v3 version_file functions + let mut loader_fields = HashMap::new(); + serde_json::from_str::>(&game_versions) + .ok() + .and_then(|versions| { + let mut game_versions: Vec = vec![]; + for gv in versions { + game_versions.push(serde_json::json!(gv.clone())); } - if let Some(loaders) = &loader_filters { - bool &= x.loaders.iter().any(|y| loaders.contains(y)); - } - if let Some(game_versions) = &version_filters { - bool &= x.game_versions.iter().any(|y| game_versions.contains(y)); - } - - bool - }) - .collect::>(); - - let mut response = versions - .iter() - .filter(|version| { - filters - .featured - .map(|featured| featured == version.inner.featured) - .unwrap_or(true) + loader_fields.insert("game_versions".to_string(), game_versions); + serde_json::to_string(&loader_fields).ok() }) - .cloned() - .collect::>(); + } else { + None + }; - versions.sort(); + let filters = v3::versions::VersionListFilters { + loader_fields, + loaders: filters.loaders, + featured: filters.featured, + version_type: filters.version_type, + limit: filters.limit, + offset: filters.offset, + }; - // Attempt to populate versions with "auto featured" versions - if response.is_empty() && !versions.is_empty() && filters.featured.unwrap_or(false) { - let (loaders, game_versions) = futures::future::try_join( - database::models::categories::Loader::list(&**pool, &redis), - database::models::categories::GameVersion::list_filter( - None, - Some(true), - &**pool, - &redis, - ), - ) + let response = + v3::versions::version_list(req, info, web::Query(filters), pool, redis, session_queue) .await?; - let mut joined_filters = Vec::new(); - for game_version in &game_versions { - for loader in &loaders { - joined_filters.push((game_version, loader)) - } - } - - joined_filters.into_iter().for_each(|filter| { - versions - .iter() - .find(|version| { - version.game_versions.contains(&filter.0.version) - && version.loaders.contains(&filter.1.loader) - }) - .map(|version| response.push(version.clone())) - .unwrap_or(()); - }); - - if response.is_empty() { - versions - .into_iter() - .for_each(|version| response.push(version)); - } + // Convert response to V2 format + match v2_reroute::extract_ok_json::>(response).await { + Ok(versions) => { + let v2_versions = versions + .into_iter() + .map(LegacyVersion::from) + .collect::>(); + Ok(HttpResponse::Ok().json(v2_versions)) } - - response.sort(); - response.dedup_by(|a, b| a.inner.id == b.inner.id); - - let response = filter_authorized_versions(response, &user_option, &pool).await?; - - Ok(HttpResponse::Ok().json(response)) - } else { - Ok(HttpResponse::NotFound().body("")) + Err(response) => Ok(response), } } @@ -176,41 +100,16 @@ pub async fn version_project_get( session_queue: web::Data, ) -> Result { let id = info.into_inner(); - - let result = database::models::Project::get(&id.0, &**pool, &redis).await?; - - let user_option = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::PROJECT_READ, Scopes::VERSION_READ]), - ) - .await - .map(|x| x.1) - .ok(); - - if let Some(project) = result { - if !is_authorized(&project.inner, &user_option, &pool).await? { - return Ok(HttpResponse::NotFound().body("")); - } - - let versions = - database::models::Version::get_many(&project.versions, &**pool, &redis).await?; - - let id_opt = parse_base62(&id.1).ok(); - let version = versions - .into_iter() - .find(|x| Some(x.inner.id.0 as u64) == id_opt || x.inner.version_number == id.1); - - if let Some(version) = version { - if is_authorized_version(&version.inner, &user_option, &pool).await? { - return Ok(HttpResponse::Ok().json(models::projects::Version::from(version))); - } + let response = + v3::versions::version_project_get_helper(req, id, pool, redis, session_queue).await?; + // Convert response to V2 format + match v2_reroute::extract_ok_json::(response).await { + Ok(version) => { + let v2_version = LegacyVersion::from(version); + Ok(HttpResponse::Ok().json(v2_version)) } + Err(response) => Ok(response), } - - Ok(HttpResponse::NotFound().body("")) } #[derive(Serialize, Deserialize)] @@ -226,26 +125,21 @@ pub async fn versions_get( redis: web::Data, session_queue: web::Data, ) -> Result { - let version_ids = serde_json::from_str::>(&ids.ids)? - .into_iter() - .map(|x| x.into()) - .collect::>(); - let versions_data = database::models::Version::get_many(&version_ids, &**pool, &redis).await?; - - let user_option = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::VERSION_READ]), - ) - .await - .map(|x| x.1) - .ok(); - - let versions = filter_authorized_versions(versions_data, &user_option, &pool).await?; - - Ok(HttpResponse::Ok().json(versions)) + let ids = v3::versions::VersionIds { ids: ids.ids }; + let response = + v3::versions::versions_get(req, web::Query(ids), pool, redis, session_queue).await?; + + // Convert response to V2 format + match v2_reroute::extract_ok_json::>(response).await { + Ok(versions) => { + let v2_versions = versions + .into_iter() + .map(LegacyVersion::from) + .collect::>(); + Ok(HttpResponse::Ok().json(v2_versions)) + } + Err(response) => Ok(response), + } } #[get("{version_id}")] @@ -257,26 +151,15 @@ pub async fn version_get( session_queue: web::Data, ) -> Result { let id = info.into_inner().0; - let version_data = database::models::Version::get(id.into(), &**pool, &redis).await?; - - let user_option = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::VERSION_READ]), - ) - .await - .map(|x| x.1) - .ok(); - - if let Some(data) = version_data { - if is_authorized_version(&data.inner, &user_option, &pool).await? { - return Ok(HttpResponse::Ok().json(models::projects::Version::from(data))); + let response = v3::versions::version_get_helper(req, id, pool, redis, session_queue).await?; + // Convert response to V2 format + match v2_reroute::extract_ok_json::(response).await { + Ok(version) => { + let v2_version = LegacyVersion::from(version); + Ok(HttpResponse::Ok().json(v2_version)) } + Err(response) => Ok(response), } - - Ok(HttpResponse::NotFound().body("")) } #[derive(Serialize, Deserialize, Validate)] @@ -299,7 +182,7 @@ pub struct EditVersion { custom(function = "crate::util::validate::validate_deps") )] pub dependencies: Option>, - pub game_versions: Option>, + pub game_versions: Option>, pub loaders: Option>, pub featured: Option, pub primary_file: Option<(String, String)>, @@ -319,415 +202,56 @@ pub struct EditVersionFileType { #[patch("{id}")] pub async fn version_edit( req: HttpRequest, - info: web::Path<(models::ids::VersionId,)>, + info: web::Path<(VersionId,)>, pool: web::Data, redis: web::Data, new_version: web::Json, session_queue: web::Data, ) -> Result { - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::VERSION_WRITE]), - ) - .await? - .1; - - new_version - .validate() - .map_err(|err| ApiError::Validation(validation_errors_to_string(err, None)))?; + let new_version = new_version.into_inner(); - let version_id = info.into_inner().0; - let id = version_id.into(); - - let result = database::models::Version::get(id, &**pool, &redis).await?; - - if let Some(version_item) = result { - let project_item = - database::models::Project::get_id(version_item.inner.project_id, &**pool, &redis) - .await?; - - let team_member = database::models::TeamMember::get_from_user_id_project( - version_item.inner.project_id, - user.id.into(), - &**pool, - ) - .await?; - - let organization = Organization::get_associated_organization_project_id( - version_item.inner.project_id, - &**pool, - ) - .await?; - - let organization_team_member = if let Some(organization) = &organization { - database::models::TeamMember::get_from_user_id( - organization.team_id, - user.id.into(), - &**pool, - ) - .await? - } else { - None - }; - - let permissions = ProjectPermissions::get_permissions_by_role( - &user.role, - &team_member, - &organization_team_member, + let mut fields = HashMap::new(); + if new_version.game_versions.is_some() { + fields.insert( + "game_versions".to_string(), + serde_json::json!(new_version.game_versions), ); + } - if let Some(perms) = permissions { - if !perms.contains(ProjectPermissions::UPLOAD_VERSION) { - return Err(ApiError::CustomAuthentication( - "You do not have the permissions to edit this version!".to_string(), - )); - } - - let mut transaction = pool.begin().await?; - - if let Some(name) = &new_version.name { - sqlx::query!( - " - UPDATE versions - SET name = $1 - WHERE (id = $2) - ", - name.trim(), - id as database::models::ids::VersionId, - ) - .execute(&mut *transaction) - .await?; - } - - if let Some(number) = &new_version.version_number { - sqlx::query!( - " - UPDATE versions - SET version_number = $1 - WHERE (id = $2) - ", - number, - id as database::models::ids::VersionId, - ) - .execute(&mut *transaction) - .await?; - } - - if let Some(version_type) = &new_version.version_type { - sqlx::query!( - " - UPDATE versions - SET version_type = $1 - WHERE (id = $2) - ", - version_type.as_str(), - id as database::models::ids::VersionId, - ) - .execute(&mut *transaction) - .await?; - } - - if let Some(dependencies) = &new_version.dependencies { - if let Some(project) = project_item { - if project.project_type != "modpack" { - sqlx::query!( - " - DELETE FROM dependencies WHERE dependent_id = $1 - ", - id as database::models::ids::VersionId, - ) - .execute(&mut *transaction) - .await?; - - let builders = dependencies - .iter() - .map(|x| database::models::version_item::DependencyBuilder { - project_id: x.project_id.map(|x| x.into()), - version_id: x.version_id.map(|x| x.into()), - file_name: x.file_name.clone(), - dependency_type: x.dependency_type.to_string(), - }) - .collect::>(); - - DependencyBuilder::insert_many( - builders, - version_item.inner.id, - &mut transaction, - ) - .await?; - } - } - } - - if let Some(game_versions) = &new_version.game_versions { - sqlx::query!( - " - DELETE FROM game_versions_versions WHERE joining_version_id = $1 - ", - id as database::models::ids::VersionId, - ) - .execute(&mut *transaction) - .await?; - - let mut version_versions = Vec::new(); - for game_version in game_versions { - let game_version_id = database::models::categories::GameVersion::get_id( - &game_version.0, - &mut *transaction, - ) - .await? - .ok_or_else(|| { - ApiError::InvalidInput( - "No database entry for game version provided.".to_string(), - ) - })?; - - version_versions.push(VersionVersion::new(game_version_id, id)); - } - VersionVersion::insert_many(version_versions, &mut transaction).await?; - - database::models::Project::update_game_versions( - version_item.inner.project_id, - &mut transaction, - ) - .await?; - } - - if let Some(loaders) = &new_version.loaders { - sqlx::query!( - " - DELETE FROM loaders_versions WHERE version_id = $1 - ", - id as database::models::ids::VersionId, - ) - .execute(&mut *transaction) - .await?; - - let mut loader_versions = Vec::new(); - for loader in loaders { - let loader_id = - database::models::categories::Loader::get_id(&loader.0, &mut *transaction) - .await? - .ok_or_else(|| { - ApiError::InvalidInput( - "No database entry for loader provided.".to_string(), - ) - })?; - loader_versions.push(LoaderVersion::new(loader_id, id)); - } - LoaderVersion::insert_many(loader_versions, &mut transaction).await?; - - database::models::Project::update_loaders( - version_item.inner.project_id, - &mut transaction, - ) - .await?; - } - - if let Some(featured) = &new_version.featured { - sqlx::query!( - " - UPDATE versions - SET featured = $1 - WHERE (id = $2) - ", - featured, - id as database::models::ids::VersionId, - ) - .execute(&mut *transaction) - .await?; - } - - if let Some(primary_file) = &new_version.primary_file { - let result = sqlx::query!( - " - SELECT f.id id FROM hashes h - INNER JOIN files f ON h.file_id = f.id - WHERE h.algorithm = $2 AND h.hash = $1 - ", - primary_file.1.as_bytes(), - primary_file.0 - ) - .fetch_optional(&**pool) - .await? - .ok_or_else(|| { - ApiError::InvalidInput(format!( - "Specified file with hash {} does not exist.", - primary_file.1.clone() - )) - })?; - - sqlx::query!( - " - UPDATE files - SET is_primary = FALSE - WHERE (version_id = $1) - ", - id as database::models::ids::VersionId, - ) - .execute(&mut *transaction) - .await?; - - sqlx::query!( - " - UPDATE files - SET is_primary = TRUE - WHERE (id = $1) - ", - result.id, - ) - .execute(&mut *transaction) - .await?; - } - - if let Some(body) = &new_version.changelog { - sqlx::query!( - " - UPDATE versions - SET changelog = $1 - WHERE (id = $2) - ", - body, - id as database::models::ids::VersionId, - ) - .execute(&mut *transaction) - .await?; - } - - if let Some(downloads) = &new_version.downloads { - if !user.role.is_mod() { - return Err(ApiError::CustomAuthentication( - "You don't have permission to set the downloads of this mod".to_string(), - )); - } - - sqlx::query!( - " - UPDATE versions - SET downloads = $1 - WHERE (id = $2) - ", - *downloads as i32, - id as database::models::ids::VersionId, - ) - .execute(&mut *transaction) - .await?; - - let diff = *downloads - (version_item.inner.downloads as u32); - - sqlx::query!( - " - UPDATE mods - SET downloads = downloads + $1 - WHERE (id = $2) - ", - diff as i32, - version_item.inner.project_id as database::models::ids::ProjectId, - ) - .execute(&mut *transaction) - .await?; - } - - if let Some(status) = &new_version.status { - if !status.can_be_requested() { - return Err(ApiError::InvalidInput( - "The requested status cannot be set!".to_string(), - )); - } - - sqlx::query!( - " - UPDATE versions - SET status = $1 - WHERE (id = $2) - ", - status.as_str(), - id as database::models::ids::VersionId, - ) - .execute(&mut *transaction) - .await?; - } - - if let Some(file_types) = &new_version.file_types { - for file_type in file_types { - let result = sqlx::query!( - " - SELECT f.id id FROM hashes h - INNER JOIN files f ON h.file_id = f.id - WHERE h.algorithm = $2 AND h.hash = $1 - ", - file_type.hash.as_bytes(), - file_type.algorithm - ) - .fetch_optional(&**pool) - .await? - .ok_or_else(|| { - ApiError::InvalidInput(format!( - "Specified file with hash {} does not exist.", - file_type.algorithm.clone() - )) - })?; - - sqlx::query!( - " - UPDATE files - SET file_type = $2 - WHERE (id = $1) - ", - result.id, - file_type.file_type.as_ref().map(|x| x.as_str()), - ) - .execute(&mut *transaction) - .await?; - } - } - - if let Some(ordering) = &new_version.ordering { - sqlx::query!( - " - UPDATE versions - SET ordering = $1 - WHERE (id = $2) - ", - ordering.to_owned() as Option, - id as database::models::ids::VersionId, - ) - .execute(&mut *transaction) - .await?; - } - - // delete any images no longer in the changelog - let checkable_strings: Vec<&str> = vec![&new_version.changelog] - .into_iter() - .filter_map(|x| x.as_ref().map(|y| y.as_str())) - .collect(); - let context = ImageContext::Version { - version_id: Some(version_item.inner.id.into()), - }; - - img::delete_unused_images(context, checkable_strings, &mut transaction, &redis).await?; + let new_version = v3::versions::EditVersion { + name: new_version.name, + version_number: new_version.version_number, + changelog: new_version.changelog, + version_type: new_version.version_type, + dependencies: new_version.dependencies, + loaders: new_version.loaders, + featured: new_version.featured, + primary_file: new_version.primary_file, + downloads: new_version.downloads, + status: new_version.status, + file_types: new_version.file_types.map(|v| { + v.into_iter() + .map(|evft| v3::versions::EditVersionFileType { + algorithm: evft.algorithm, + hash: evft.hash, + file_type: evft.file_type, + }) + .collect::>() + }), + ordering: new_version.ordering, + fields, + }; - database::models::Version::clear_cache(&version_item, &redis).await?; - database::models::Project::clear_cache( - version_item.inner.project_id, - None, - Some(true), - &redis, - ) - .await?; - transaction.commit().await?; - Ok(HttpResponse::NoContent().body("")) - } else { - Err(ApiError::CustomAuthentication( - "You do not have permission to edit this version!".to_string(), - )) - } - } else { - Ok(HttpResponse::NotFound().body("")) - } + let response = v3::versions::version_edit( + req, + info, + pool, + redis, + web::Json(serde_json::to_value(new_version)?), + session_queue, + ) + .await?; + Ok(response) } #[derive(Deserialize)] @@ -745,92 +269,18 @@ pub async fn version_schedule( scheduling_data: web::Json, session_queue: web::Data, ) -> Result { - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::VERSION_WRITE]), + v3::versions::version_schedule( + req, + info, + pool, + redis, + web::Json(v3::versions::SchedulingData { + time: scheduling_data.time, + requested_status: scheduling_data.requested_status, + }), + session_queue, ) - .await? - .1; - - if scheduling_data.time < Utc::now() { - return Err(ApiError::InvalidInput( - "You cannot schedule a version to be released in the past!".to_string(), - )); - } - - if !scheduling_data.requested_status.can_be_requested() { - return Err(ApiError::InvalidInput( - "Specified requested status cannot be requested!".to_string(), - )); - } - - let string = info.into_inner().0; - let result = database::models::Version::get(string.into(), &**pool, &redis).await?; - - if let Some(version_item) = result { - let team_member = database::models::TeamMember::get_from_user_id_project( - version_item.inner.project_id, - user.id.into(), - &**pool, - ) - .await?; - - let organization_item = - database::models::Organization::get_associated_organization_project_id( - version_item.inner.project_id, - &**pool, - ) - .await - .map_err(ApiError::Database)?; - - let organization_team_member = if let Some(organization) = &organization_item { - database::models::TeamMember::get_from_user_id( - organization.team_id, - user.id.into(), - &**pool, - ) - .await? - } else { - None - }; - - let permissions = ProjectPermissions::get_permissions_by_role( - &user.role, - &team_member, - &organization_team_member, - ) - .unwrap_or_default(); - - if !user.role.is_mod() && !permissions.contains(ProjectPermissions::EDIT_DETAILS) { - return Err(ApiError::CustomAuthentication( - "You do not have permission to edit this version's scheduling data!".to_string(), - )); - } - - let mut transaction = pool.begin().await?; - sqlx::query!( - " - UPDATE versions - SET status = $1, date_published = $2 - WHERE (id = $3) - ", - VersionStatus::Scheduled.as_str(), - scheduling_data.time, - version_item.inner.id as database::models::ids::VersionId, - ) - .execute(&mut *transaction) - .await?; - - database::models::Version::clear_cache(&version_item, &redis).await?; - transaction.commit().await?; - - Ok(HttpResponse::NoContent().body("")) - } else { - Ok(HttpResponse::NotFound().body("")) - } + .await } #[delete("{version_id}")] @@ -841,81 +291,5 @@ pub async fn version_delete( redis: web::Data, session_queue: web::Data, ) -> Result { - let user = get_user_from_headers( - &req, - &**pool, - &redis, - &session_queue, - Some(&[Scopes::VERSION_DELETE]), - ) - .await? - .1; - let id = info.into_inner().0; - - let version = database::models::Version::get(id.into(), &**pool, &redis) - .await? - .ok_or_else(|| { - ApiError::InvalidInput("The specified version does not exist!".to_string()) - })?; - - if !user.role.is_admin() { - let team_member = database::models::TeamMember::get_from_user_id_project( - version.inner.project_id, - user.id.into(), - &**pool, - ) - .await - .map_err(ApiError::Database)?; - - let organization = - Organization::get_associated_organization_project_id(version.inner.project_id, &**pool) - .await?; - - let organization_team_member = if let Some(organization) = &organization { - database::models::TeamMember::get_from_user_id( - organization.team_id, - user.id.into(), - &**pool, - ) - .await? - } else { - None - }; - let permissions = ProjectPermissions::get_permissions_by_role( - &user.role, - &team_member, - &organization_team_member, - ) - .unwrap_or_default(); - - if !permissions.contains(ProjectPermissions::DELETE_VERSION) { - return Err(ApiError::CustomAuthentication( - "You do not have permission to delete versions in this team".to_string(), - )); - } - } - - let mut transaction = pool.begin().await?; - let context = ImageContext::Version { - version_id: Some(version.inner.id.into()), - }; - let uploaded_images = - database::models::Image::get_many_contexted(context, &mut transaction).await?; - for image in uploaded_images { - image_item::Image::remove(image.id, &mut transaction, &redis).await?; - } - - let result = - database::models::Version::remove_full(version.inner.id, &redis, &mut transaction).await?; - - database::models::Project::clear_cache(version.inner.project_id, None, Some(true), &redis) - .await?; - - transaction.commit().await?; - - if result.is_some() { - Ok(HttpResponse::NoContent().body("")) - } else { - Ok(HttpResponse::NotFound().body("")) - } + v3::versions::version_delete(req, info, pool, redis, session_queue).await } diff --git a/src/routes/v2_reroute.rs b/src/routes/v2_reroute.rs new file mode 100644 index 00000000..7e2c16cf --- /dev/null +++ b/src/routes/v2_reroute.rs @@ -0,0 +1,112 @@ +use super::v3::project_creation::CreateError; +use crate::util::actix::{generate_multipart, MultipartSegment, MultipartSegmentData}; +use actix_multipart::Multipart; +use actix_web::http::header::{HeaderMap, TryIntoHeaderPair}; +use actix_web::HttpResponse; +use futures::{stream, StreamExt}; +use serde_json::json; + +pub async fn extract_ok_json(response: HttpResponse) -> Result +where + T: serde::de::DeserializeOwned, +{ + if response.status() == actix_web::http::StatusCode::OK { + let failure_http_response = || { + HttpResponse::InternalServerError().json(json!({ + "error": "reroute_error", + "description": "Could not parse response from V2 redirection of route." + })) + }; + // Takes json out of HttpResponse, mutates it, then regenerates the HttpResponse + let body = response.into_body(); + let bytes = actix_web::body::to_bytes(body) + .await + .map_err(|_| failure_http_response())?; + let json_value: T = serde_json::from_slice(&bytes).map_err(|_| failure_http_response())?; + Ok(json_value) + } else { + Err(response) + } +} + +pub async fn alter_actix_multipart( + mut multipart: Multipart, + mut headers: HeaderMap, + mut closure: impl FnMut(T) -> Result, +) -> Result +where + T: serde::de::DeserializeOwned, + U: serde::Serialize, +{ + let mut segments: Vec = Vec::new(); + + if let Some(field) = multipart.next().await { + let mut field = field?; + let content_disposition = field.content_disposition().clone(); + let field_name = content_disposition.get_name().unwrap_or(""); + let field_filename = content_disposition.get_filename(); + let field_content_type = field.content_type(); + let field_content_type = field_content_type.map(|ct| ct.to_string()); + + let mut buffer = Vec::new(); + while let Some(chunk) = field.next().await { + let data = chunk?; + buffer.extend_from_slice(&data); + } + + { + let json_value: T = serde_json::from_slice(&buffer)?; + let json_value: U = closure(json_value)?; + buffer = serde_json::to_vec(&json_value)?; + } + + segments.push(MultipartSegment { + name: field_name.to_string(), + filename: field_filename.map(|s| s.to_string()), + content_type: field_content_type, + data: MultipartSegmentData::Binary(buffer), + }) + } + + while let Some(field) = multipart.next().await { + let mut field = field?; + let content_disposition = field.content_disposition().clone(); + let field_name = content_disposition.get_name().unwrap_or(""); + let field_filename = content_disposition.get_filename(); + let field_content_type = field.content_type(); + let field_content_type = field_content_type.map(|ct| ct.to_string()); + + let mut buffer = Vec::new(); + while let Some(chunk) = field.next().await { + let data = chunk?; + buffer.extend_from_slice(&data); + } + + segments.push(MultipartSegment { + name: field_name.to_string(), + filename: field_filename.map(|s| s.to_string()), + content_type: field_content_type, + data: MultipartSegmentData::Binary(buffer), + }) + } + + let (boundary, payload) = generate_multipart(segments); + + match ( + "Content-Type", + format!("multipart/form-data; boundary={}", boundary).as_str(), + ) + .try_into_pair() + { + Ok((key, value)) => { + headers.insert(key, value); + } + Err(err) => { + CreateError::InvalidInput(format!("Error inserting test header: {:?}.", err)); + } + }; + + let new_multipart = Multipart::new(&headers, stream::once(async { Ok(payload) })); + + Ok(new_multipart) +} diff --git a/src/routes/v3/analytics_get.rs b/src/routes/v3/analytics_get.rs new file mode 100644 index 00000000..dc31c69c --- /dev/null +++ b/src/routes/v3/analytics_get.rs @@ -0,0 +1,611 @@ +use super::ApiError; +use crate::database::redis::RedisPool; +use crate::{ + auth::{filter_authorized_projects, filter_authorized_versions, get_user_from_headers}, + database::models::{project_item, user_item, version_item}, + models::{ + ids::{ + base62_impl::{parse_base62, to_base62}, + ProjectId, VersionId, + }, + pats::Scopes, + }, + queue::session::AuthQueue, +}; +use actix_web::{web, HttpRequest, HttpResponse}; +use chrono::{DateTime, Duration, Utc}; +use serde::{Deserialize, Serialize}; +use sqlx::postgres::types::PgInterval; +use sqlx::PgPool; +use std::collections::HashMap; +use std::convert::TryInto; + +pub fn config(cfg: &mut web::ServiceConfig) { + cfg.service( + web::scope("analytics") + .route("playtime", web::get().to(playtimes_get)) + .route("views", web::get().to(views_get)) + .route("downloads", web::get().to(downloads_get)) + .route("revenue", web::get().to(revenue_get)) + .route( + "countries/downloads", + web::get().to(countries_downloads_get), + ) + .route("countries/views", web::get().to(countries_views_get)), + ); +} + +/// The json data to be passed to fetch analytic data +/// Either a list of project_ids or version_ids can be used, but not both. Unauthorized projects/versions will be filtered out. +/// start_date and end_date are optional, and default to two weeks ago, and the maximum date respectively. +/// resolution_minutes is optional. This refers to the window by which we are looking (every day, every minute, etc) and defaults to 1440 (1 day) +#[derive(Serialize, Deserialize, Clone, Debug)] +pub struct GetData { + // only one of project_ids or version_ids should be used + // if neither are provided, all projects the user has access to will be used + pub project_ids: Option, + pub version_ids: Option, + + pub start_date: Option>, // defaults to 2 weeks ago + pub end_date: Option>, // defaults to now + + pub resolution_minutes: Option, // defaults to 1 day. Ignored in routes that do not aggregate over a resolution (eg: /countries) +} + +/// Get playtime data for a set of projects or versions +/// Data is returned as a hashmap of project/version ids to a hashmap of days to playtime data +/// eg: +/// { +/// "4N1tEhnO": { +/// "20230824": 23 +/// } +///} +/// Either a list of project_ids or version_ids can be used, but not both. Unauthorized projects/versions will be filtered out. +#[derive(Serialize, Deserialize, Clone)] +pub struct FetchedPlaytime { + pub time: u64, + pub total_seconds: u64, + pub loader_seconds: HashMap, + pub game_version_seconds: HashMap, + pub parent_seconds: HashMap, +} +pub async fn playtimes_get( + req: HttpRequest, + clickhouse: web::Data, + data: web::Query, + session_queue: web::Data, + pool: web::Data, + redis: web::Data, +) -> Result { + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::ANALYTICS]), + ) + .await + .map(|x| x.1)?; + + let project_ids = data + .project_ids + .as_ref() + .map(|ids| serde_json::from_str::>(ids)) + .transpose()?; + let version_ids = data + .version_ids + .as_ref() + .map(|ids| serde_json::from_str::>(ids)) + .transpose()?; + + if project_ids.is_some() && version_ids.is_some() { + return Err(ApiError::InvalidInput( + "Only one of 'project_ids' or 'version_ids' should be used.".to_string(), + )); + } + + let start_date = data.start_date.unwrap_or(Utc::now() - Duration::weeks(2)); + let end_date = data.end_date.unwrap_or(Utc::now()); + let resolution_minutes = data.resolution_minutes.unwrap_or(60 * 24); + + // Convert String list to list of ProjectIds or VersionIds + // - Filter out unauthorized projects/versions + // - If no project_ids or version_ids are provided, we default to all projects the user has access to + let (project_ids, version_ids) = + filter_allowed_ids(project_ids, version_ids, user, &pool, &redis).await?; + + // Get the views + let playtimes = crate::clickhouse::fetch_playtimes( + project_ids, + version_ids, + start_date, + end_date, + resolution_minutes, + clickhouse.into_inner(), + ) + .await?; + + let mut hm = HashMap::new(); + for playtime in playtimes { + let id_string = to_base62(playtime.id); + if !hm.contains_key(&id_string) { + hm.insert(id_string.clone(), HashMap::new()); + } + if let Some(hm) = hm.get_mut(&id_string) { + hm.insert(playtime.time, playtime.total_seconds); + } + } + + Ok(HttpResponse::Ok().json(hm)) +} + +/// Get view data for a set of projects or versions +/// Data is returned as a hashmap of project/version ids to a hashmap of days to views +/// eg: +/// { +/// "4N1tEhnO": { +/// "20230824": 1090 +/// } +///} +/// Either a list of project_ids or version_ids can be used, but not both. Unauthorized projects/versions will be filtered out. +pub async fn views_get( + req: HttpRequest, + clickhouse: web::Data, + data: web::Query, + session_queue: web::Data, + pool: web::Data, + redis: web::Data, +) -> Result { + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::ANALYTICS]), + ) + .await + .map(|x| x.1)?; + + let project_ids = data + .project_ids + .as_ref() + .map(|ids| serde_json::from_str::>(ids)) + .transpose()?; + let version_ids = data + .version_ids + .as_ref() + .map(|ids| serde_json::from_str::>(ids)) + .transpose()?; + + if project_ids.is_some() && version_ids.is_some() { + return Err(ApiError::InvalidInput( + "Only one of 'project_ids' or 'version_ids' should be used.".to_string(), + )); + } + + let start_date = data.start_date.unwrap_or(Utc::now() - Duration::weeks(2)); + let end_date = data.end_date.unwrap_or(Utc::now()); + let resolution_minutes = data.resolution_minutes.unwrap_or(60 * 24); + + // Convert String list to list of ProjectIds or VersionIds + // - Filter out unauthorized projects/versions + // - If no project_ids or version_ids are provided, we default to all projects the user has access to + let (project_ids, version_ids) = + filter_allowed_ids(project_ids, version_ids, user, &pool, &redis).await?; + + // Get the views + let views = crate::clickhouse::fetch_views( + project_ids, + version_ids, + start_date, + end_date, + resolution_minutes, + clickhouse.into_inner(), + ) + .await?; + + let mut hm = HashMap::new(); + for views in views { + let id_string = to_base62(views.id); + if !hm.contains_key(&id_string) { + hm.insert(id_string.clone(), HashMap::new()); + } + if let Some(hm) = hm.get_mut(&id_string) { + hm.insert(views.time, views.total_views); + } + } + + Ok(HttpResponse::Ok().json(hm)) +} + +/// Get download data for a set of projects or versions +/// Data is returned as a hashmap of project/version ids to a hashmap of days to downloads +/// eg: +/// { +/// "4N1tEhnO": { +/// "20230824": 32 +/// } +///} +/// Either a list of project_ids or version_ids can be used, but not both. Unauthorized projects/versions will be filtered out. +pub async fn downloads_get( + req: HttpRequest, + clickhouse: web::Data, + data: web::Query, + session_queue: web::Data, + pool: web::Data, + redis: web::Data, +) -> Result { + let user_option = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::ANALYTICS]), + ) + .await + .map(|x| x.1)?; + + let project_ids = data + .project_ids + .as_ref() + .map(|ids| serde_json::from_str::>(ids)) + .transpose()?; + let version_ids = data + .version_ids + .as_ref() + .map(|ids| serde_json::from_str::>(ids)) + .transpose()?; + + if project_ids.is_some() && version_ids.is_some() { + return Err(ApiError::InvalidInput( + "Only one of 'project_ids' or 'version_ids' should be used.".to_string(), + )); + } + + let start_date = data.start_date.unwrap_or(Utc::now() - Duration::weeks(2)); + let end_date = data.end_date.unwrap_or(Utc::now()); + let resolution_minutes = data.resolution_minutes.unwrap_or(60 * 24); + + // Convert String list to list of ProjectIds or VersionIds + // - Filter out unauthorized projects/versions + // - If no project_ids or version_ids are provided, we default to all projects the user has access to + let (project_ids, version_ids) = + filter_allowed_ids(project_ids, version_ids, user_option, &pool, &redis).await?; + + // Get the downloads + let downloads = crate::clickhouse::fetch_downloads( + project_ids, + version_ids, + start_date, + end_date, + resolution_minutes, + clickhouse.into_inner(), + ) + .await?; + + let mut hm = HashMap::new(); + for downloads in downloads { + let id_string = to_base62(downloads.id); + if !hm.contains_key(&id_string) { + hm.insert(id_string.clone(), HashMap::new()); + } + if let Some(hm) = hm.get_mut(&id_string) { + hm.insert(downloads.time, downloads.total_downloads); + } + } + + Ok(HttpResponse::Ok().json(hm)) +} + +/// Get payout data for a set of projects +/// Data is returned as a hashmap of project ids to a hashmap of days to amount earned per day +/// eg: +/// { +/// "4N1tEhnO": { +/// "20230824": 0.001 +/// } +///} +/// ONLY project IDs can be used. Unauthorized projects will be filtered out. +pub async fn revenue_get( + req: HttpRequest, + data: web::Query, + session_queue: web::Data, + pool: web::Data, + redis: web::Data, +) -> Result { + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::PAYOUTS_READ]), + ) + .await + .map(|x| x.1)?; + + let project_ids = data + .project_ids + .as_ref() + .map(|ids| serde_json::from_str::>(ids)) + .transpose()?; + + let start_date = data.start_date.unwrap_or(Utc::now() - Duration::weeks(2)); + let end_date = data.end_date.unwrap_or(Utc::now()); + let resolution_minutes = data.resolution_minutes.unwrap_or(60 * 24); + + // Round up/down to nearest duration as we are using pgadmin, does not have rounding in the fetch command + // Round start_date down to nearest resolution + let diff = start_date.timestamp() % (resolution_minutes as i64 * 60); + let start_date = start_date - Duration::seconds(diff); + + // Round end_date up to nearest resolution + let diff = end_date.timestamp() % (resolution_minutes as i64 * 60); + let end_date = end_date + Duration::seconds((resolution_minutes as i64 * 60) - diff); + + // Convert String list to list of ProjectIds or VersionIds + // - Filter out unauthorized projects/versions + // - If no project_ids or version_ids are provided, we default to all projects the user has access to + let (project_ids, _) = filter_allowed_ids(project_ids, None, user, &pool, &redis).await?; + + let duration: PgInterval = Duration::minutes(resolution_minutes as i64) + .try_into() + .map_err(|_| ApiError::InvalidInput("Invalid resolution_minutes".to_string()))?; + // Get the revenue data + let payouts_values = sqlx::query!( + " + SELECT mod_id, SUM(amount) amount_sum, DATE_BIN($4::interval, created, TIMESTAMP '2001-01-01') AS interval_start + FROM payouts_values + WHERE mod_id = ANY($1) AND created BETWEEN $2 AND $3 + GROUP by mod_id, interval_start ORDER BY interval_start + ", + &project_ids.unwrap_or_default().into_iter().map(|x| x.0 as i64).collect::>(), + start_date, + end_date, + duration, + ) + .fetch_all(&**pool) + .await?; + + let mut hm = HashMap::new(); + for value in payouts_values { + if let Some(mod_id) = value.mod_id { + if let Some(amount) = value.amount_sum { + if let Some(interval_start) = value.interval_start { + let id_string = to_base62(mod_id as u64); + if !hm.contains_key(&id_string) { + hm.insert(id_string.clone(), HashMap::new()); + } + if let Some(hm) = hm.get_mut(&id_string) { + hm.insert(interval_start.timestamp(), amount); + } + } + } + } + } + + Ok(HttpResponse::Ok().json(hm)) +} + +/// Get country data for a set of projects or versions +/// Data is returned as a hashmap of project/version ids to a hashmap of coutnry to downloads. +/// Unknown countries are labeled "". +/// This is usuable to see significant performing countries per project +/// eg: +/// { +/// "4N1tEhnO": { +/// "CAN": 22 +/// } +///} +/// Either a list of project_ids or version_ids can be used, but not both. Unauthorized projects/versions will be filtered out. +/// For this endpoint, provided dates are a range to aggregate over, not specific days to fetch +pub async fn countries_downloads_get( + req: HttpRequest, + clickhouse: web::Data, + data: web::Query, + session_queue: web::Data, + pool: web::Data, + redis: web::Data, +) -> Result { + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::ANALYTICS]), + ) + .await + .map(|x| x.1)?; + + let project_ids = data + .project_ids + .as_ref() + .map(|ids| serde_json::from_str::>(ids)) + .transpose()?; + let version_ids = data + .version_ids + .as_ref() + .map(|ids| serde_json::from_str::>(ids)) + .transpose()?; + + if project_ids.is_some() && version_ids.is_some() { + return Err(ApiError::InvalidInput( + "Only one of 'project_ids' or 'version_ids' should be used.".to_string(), + )); + } + + let start_date = data.start_date.unwrap_or(Utc::now() - Duration::weeks(2)); + let end_date = data.end_date.unwrap_or(Utc::now()); + + // Convert String list to list of ProjectIds or VersionIds + // - Filter out unauthorized projects/versions + // - If no project_ids or version_ids are provided, we default to all projects the user has access to + let (project_ids, version_ids) = + filter_allowed_ids(project_ids, version_ids, user, &pool, &redis).await?; + + // Get the countries + let countries = crate::clickhouse::fetch_countries( + project_ids, + version_ids, + start_date, + end_date, + clickhouse.into_inner(), + ) + .await?; + + let mut hm = HashMap::new(); + for views in countries { + let id_string = to_base62(views.id); + if !hm.contains_key(&id_string) { + hm.insert(id_string.clone(), HashMap::new()); + } + if let Some(hm) = hm.get_mut(&id_string) { + hm.insert(views.country, views.total_downloads); + } + } + + Ok(HttpResponse::Ok().json(hm)) +} + +/// Get country data for a set of projects or versions +/// Data is returned as a hashmap of project/version ids to a hashmap of coutnry to views. +/// Unknown countries are labeled "". +/// This is usuable to see significant performing countries per project +/// eg: +/// { +/// "4N1tEhnO": { +/// "CAN": 56165 +/// } +///} +/// Either a list of project_ids or version_ids can be used, but not both. Unauthorized projects/versions will be filtered out. +/// For this endpoint, provided dates are a range to aggregate over, not specific days to fetch +pub async fn countries_views_get( + req: HttpRequest, + clickhouse: web::Data, + data: web::Query, + session_queue: web::Data, + pool: web::Data, + redis: web::Data, +) -> Result { + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::ANALYTICS]), + ) + .await + .map(|x| x.1)?; + + let project_ids = data + .project_ids + .as_ref() + .map(|ids| serde_json::from_str::>(ids)) + .transpose()?; + let version_ids = data + .version_ids + .as_ref() + .map(|ids| serde_json::from_str::>(ids)) + .transpose()?; + + if project_ids.is_some() && version_ids.is_some() { + return Err(ApiError::InvalidInput( + "Only one of 'project_ids' or 'version_ids' should be used.".to_string(), + )); + } + + let start_date = data.start_date.unwrap_or(Utc::now() - Duration::weeks(2)); + let end_date = data.end_date.unwrap_or(Utc::now()); + + // Convert String list to list of ProjectIds or VersionIds + // - Filter out unauthorized projects/versions + // - If no project_ids or version_ids are provided, we default to all projects the user has access to + let (project_ids, version_ids) = + filter_allowed_ids(project_ids, version_ids, user, &pool, &redis).await?; + + // Get the countries + let countries = crate::clickhouse::fetch_countries( + project_ids, + version_ids, + start_date, + end_date, + clickhouse.into_inner(), + ) + .await?; + + let mut hm = HashMap::new(); + for views in countries { + let id_string = to_base62(views.id); + if !hm.contains_key(&id_string) { + hm.insert(id_string.clone(), HashMap::new()); + } + if let Some(hm) = hm.get_mut(&id_string) { + hm.insert(views.country, views.total_views); + } + } + + Ok(HttpResponse::Ok().json(hm)) +} + +async fn filter_allowed_ids( + mut project_ids: Option>, + version_ids: Option>, + user: crate::models::users::User, + pool: &web::Data, + redis: &RedisPool, +) -> Result<(Option>, Option>), ApiError> { + if project_ids.is_some() && version_ids.is_some() { + return Err(ApiError::InvalidInput( + "Only one of 'project_ids' or 'version_ids' should be used.".to_string(), + )); + } + + // If no project_ids or version_ids are provided, we default to all projects the user has access to + if project_ids.is_none() && version_ids.is_none() { + project_ids = Some( + user_item::User::get_projects(user.id.into(), &***pool, redis) + .await? + .into_iter() + .map(|x| ProjectId::from(x).to_string()) + .collect(), + ); + } + + // Convert String list to list of ProjectIds or VersionIds + // - Filter out unauthorized projects/versions + + let project_ids = if let Some(project_ids) = project_ids { + // Submitted project_ids are filtered by the user's permissions + let ids = project_ids + .iter() + .map(|id| Ok(ProjectId(parse_base62(id)?).into())) + .collect::, ApiError>>()?; + let projects = project_item::Project::get_many_ids(&ids, &***pool, redis).await?; + let ids: Vec = filter_authorized_projects(projects, &Some(user.clone()), pool) + .await? + .into_iter() + .map(|x| x.id) + .collect::>(); + Some(ids) + } else { + None + }; + let version_ids = if let Some(version_ids) = version_ids { + // Submitted version_ids are filtered by the user's permissions + let ids = version_ids + .iter() + .map(|id| Ok(VersionId(parse_base62(id)?).into())) + .collect::, ApiError>>()?; + let versions = version_item::Version::get_many(&ids, &***pool, redis).await?; + let ids: Vec = filter_authorized_versions(versions, &Some(user), pool) + .await? + .into_iter() + .map(|x| x.id) + .collect::>(); + Some(ids) + } else { + None + }; + + // Only one of project_ids or version_ids will be Some + Ok((project_ids, version_ids)) +} diff --git a/src/routes/v3/collections.rs b/src/routes/v3/collections.rs new file mode 100644 index 00000000..5ee0d823 --- /dev/null +++ b/src/routes/v3/collections.rs @@ -0,0 +1,538 @@ +use crate::auth::checks::{filter_authorized_collections, is_authorized_collection}; +use crate::auth::get_user_from_headers; +use crate::database::models::{collection_item, generate_collection_id, project_item}; +use crate::database::redis::RedisPool; +use crate::file_hosting::FileHost; +use crate::models::collections::{Collection, CollectionStatus}; +use crate::models::ids::base62_impl::parse_base62; +use crate::models::ids::{CollectionId, ProjectId}; +use crate::models::pats::Scopes; +use crate::queue::session::AuthQueue; +use crate::routes::v3::project_creation::CreateError; +use crate::routes::ApiError; +use crate::util::routes::read_from_payload; +use crate::util::validate::validation_errors_to_string; +use crate::{database, models}; +use actix_web::web::Data; +use actix_web::{web, HttpRequest, HttpResponse}; +use chrono::Utc; +use itertools::Itertools; +use serde::{Deserialize, Serialize}; +use sqlx::PgPool; +use std::sync::Arc; +use validator::Validate; + +pub fn config(cfg: &mut web::ServiceConfig) { + cfg.route("collections", web::get().to(collections_get)); + cfg.route("collection", web::post().to(collection_create)); + + cfg.service( + web::scope("collection") + .route("{id}", web::get().to(collection_get)) + .route("{id}", web::delete().to(collection_delete)) + .route("{id}", web::patch().to(collection_edit)) + .route("{id}/icon", web::patch().to(collection_icon_edit)) + .route("{id}/icon", web::delete().to(delete_collection_icon)), + ); +} + +#[derive(Serialize, Deserialize, Validate, Clone)] +pub struct CollectionCreateData { + #[validate( + length(min = 3, max = 64), + custom(function = "crate::util::validate::validate_name") + )] + /// The title or name of the project. + pub title: String, + #[validate(length(min = 3, max = 255))] + /// A short description of the collection. + pub description: String, + #[validate(length(max = 32))] + #[serde(default = "Vec::new")] + /// A list of initial projects to use with the created collection + pub projects: Vec, +} + +pub async fn collection_create( + req: HttpRequest, + collection_create_data: web::Json, + client: Data, + redis: Data, + session_queue: Data, +) -> Result { + let collection_create_data = collection_create_data.into_inner(); + + // The currently logged in user + let current_user = get_user_from_headers( + &req, + &**client, + &redis, + &session_queue, + Some(&[Scopes::COLLECTION_CREATE]), + ) + .await? + .1; + + collection_create_data + .validate() + .map_err(|err| CreateError::InvalidInput(validation_errors_to_string(err, None)))?; + + let mut transaction = client.begin().await?; + + let collection_id: CollectionId = generate_collection_id(&mut transaction).await?.into(); + + let initial_project_ids = project_item::Project::get_many( + &collection_create_data.projects, + &mut *transaction, + &redis, + ) + .await? + .into_iter() + .map(|x| x.inner.id.into()) + .collect::>(); + + let collection_builder_actual = collection_item::CollectionBuilder { + collection_id: collection_id.into(), + user_id: current_user.id.into(), + title: collection_create_data.title, + description: collection_create_data.description, + status: CollectionStatus::Listed, + projects: initial_project_ids + .iter() + .copied() + .map(|x| x.into()) + .collect(), + }; + let collection_builder = collection_builder_actual.clone(); + + let now = Utc::now(); + collection_builder_actual.insert(&mut transaction).await?; + + let response = crate::models::collections::Collection { + id: collection_id, + user: collection_builder.user_id.into(), + title: collection_builder.title.clone(), + description: collection_builder.description.clone(), + created: now, + updated: now, + icon_url: None, + color: None, + status: collection_builder.status, + projects: initial_project_ids, + }; + transaction.commit().await?; + + Ok(HttpResponse::Ok().json(response)) +} + +#[derive(Serialize, Deserialize)] +pub struct CollectionIds { + pub ids: String, +} +pub async fn collections_get( + req: HttpRequest, + web::Query(ids): web::Query, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let ids = serde_json::from_str::>(&ids.ids)?; + let ids = ids + .into_iter() + .map(|x| parse_base62(x).map(|x| database::models::CollectionId(x as i64))) + .collect::, _>>()?; + + let collections_data = database::models::Collection::get_many(&ids, &**pool, &redis).await?; + + let user_option = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::COLLECTION_READ]), + ) + .await + .map(|x| x.1) + .ok(); + + let collections = filter_authorized_collections(collections_data, &user_option, &pool).await?; + + Ok(HttpResponse::Ok().json(collections)) +} + +pub async fn collection_get( + req: HttpRequest, + info: web::Path<(String,)>, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let string = info.into_inner().0; + + let id = database::models::CollectionId(parse_base62(&string)? as i64); + let collection_data = database::models::Collection::get(id, &**pool, &redis).await?; + let user_option = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::COLLECTION_READ]), + ) + .await + .map(|x| x.1) + .ok(); + + if let Some(data) = collection_data { + if is_authorized_collection(&data, &user_option).await? { + return Ok(HttpResponse::Ok().json(Collection::from(data))); + } + } + Ok(HttpResponse::NotFound().body("")) +} + +#[derive(Deserialize, Validate)] +pub struct EditCollection { + #[validate( + length(min = 3, max = 64), + custom(function = "crate::util::validate::validate_name") + )] + pub title: Option, + #[validate(length(min = 3, max = 256))] + pub description: Option, + pub status: Option, + #[validate(length(max = 64))] + pub new_projects: Option>, +} + +pub async fn collection_edit( + req: HttpRequest, + info: web::Path<(String,)>, + pool: web::Data, + new_collection: web::Json, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::COLLECTION_WRITE]), + ) + .await? + .1; + + new_collection + .validate() + .map_err(|err| ApiError::Validation(validation_errors_to_string(err, None)))?; + + let string = info.into_inner().0; + let id = database::models::CollectionId(parse_base62(&string)? as i64); + let result = database::models::Collection::get(id, &**pool, &redis).await?; + + if let Some(collection_item) = result { + if !can_modify_collection(&collection_item, &user) { + return Ok(HttpResponse::Unauthorized().body("")); + } + + let id = collection_item.id; + + let mut transaction = pool.begin().await?; + + if let Some(title) = &new_collection.title { + sqlx::query!( + " + UPDATE collections + SET title = $1 + WHERE (id = $2) + ", + title.trim(), + id as database::models::ids::CollectionId, + ) + .execute(&mut *transaction) + .await?; + } + + if let Some(description) = &new_collection.description { + sqlx::query!( + " + UPDATE collections + SET description = $1 + WHERE (id = $2) + ", + description, + id as database::models::ids::CollectionId, + ) + .execute(&mut *transaction) + .await?; + } + + if let Some(status) = &new_collection.status { + if !(user.role.is_mod() + || collection_item.status.is_approved() && status.can_be_requested()) + { + return Err(ApiError::CustomAuthentication( + "You don't have permission to set this status!".to_string(), + )); + } + + sqlx::query!( + " + UPDATE collections + SET status = $1 + WHERE (id = $2) + ", + status.to_string(), + id as database::models::ids::CollectionId, + ) + .execute(&mut *transaction) + .await?; + } + + if let Some(new_project_ids) = &new_collection.new_projects { + // Delete all existing projects + sqlx::query!( + " + DELETE FROM collections_mods + WHERE collection_id = $1 + ", + collection_item.id as database::models::ids::CollectionId, + ) + .execute(&mut *transaction) + .await?; + + let collection_item_ids = new_project_ids + .iter() + .map(|_| collection_item.id.0) + .collect_vec(); + let mut validated_project_ids = Vec::new(); + for project_id in new_project_ids { + let project = database::models::Project::get(project_id, &**pool, &redis) + .await? + .ok_or_else(|| { + ApiError::InvalidInput(format!( + "The specified project {project_id} does not exist!" + )) + })?; + validated_project_ids.push(project.inner.id.0); + } + // Insert- don't throw an error if it already exists + sqlx::query!( + " + INSERT INTO collections_mods (collection_id, mod_id) + SELECT * FROM UNNEST ($1::int8[], $2::int8[]) + ON CONFLICT DO NOTHING + ", + &collection_item_ids[..], + &validated_project_ids[..], + ) + .execute(&mut *transaction) + .await?; + } + + database::models::Collection::clear_cache(collection_item.id, &redis).await?; + + transaction.commit().await?; + Ok(HttpResponse::NoContent().body("")) + } else { + Ok(HttpResponse::NotFound().body("")) + } +} + +#[derive(Serialize, Deserialize)] +pub struct Extension { + pub ext: String, +} + +#[allow(clippy::too_many_arguments)] +pub async fn collection_icon_edit( + web::Query(ext): web::Query, + req: HttpRequest, + info: web::Path<(String,)>, + pool: web::Data, + redis: web::Data, + file_host: web::Data>, + mut payload: web::Payload, + session_queue: web::Data, +) -> Result { + if let Some(content_type) = crate::util::ext::get_image_content_type(&ext.ext) { + let cdn_url = dotenvy::var("CDN_URL")?; + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::COLLECTION_WRITE]), + ) + .await? + .1; + + let string = info.into_inner().0; + let id = database::models::CollectionId(parse_base62(&string)? as i64); + let collection_item = database::models::Collection::get(id, &**pool, &redis) + .await? + .ok_or_else(|| { + ApiError::InvalidInput("The specified collection does not exist!".to_string()) + })?; + + if !can_modify_collection(&collection_item, &user) { + return Ok(HttpResponse::Unauthorized().body("")); + } + + if let Some(icon) = collection_item.icon_url { + let name = icon.split(&format!("{cdn_url}/")).nth(1); + + if let Some(icon_path) = name { + file_host.delete_file_version("", icon_path).await?; + } + } + + let bytes = + read_from_payload(&mut payload, 262144, "Icons must be smaller than 256KiB").await?; + + let color = crate::util::img::get_color_from_img(&bytes)?; + + let hash = sha1::Sha1::from(&bytes).hexdigest(); + let collection_id: CollectionId = collection_item.id.into(); + let upload_data = file_host + .upload_file( + content_type, + &format!("data/{}/{}.{}", collection_id, hash, ext.ext), + bytes.freeze(), + ) + .await?; + + let mut transaction = pool.begin().await?; + + sqlx::query!( + " + UPDATE collections + SET icon_url = $1, color = $2 + WHERE (id = $3) + ", + format!("{}/{}", cdn_url, upload_data.file_name), + color.map(|x| x as i32), + collection_item.id as database::models::ids::CollectionId, + ) + .execute(&mut *transaction) + .await?; + + database::models::Collection::clear_cache(collection_item.id, &redis).await?; + + transaction.commit().await?; + + Ok(HttpResponse::NoContent().body("")) + } else { + Err(ApiError::InvalidInput(format!( + "Invalid format for collection icon: {}", + ext.ext + ))) + } +} + +pub async fn delete_collection_icon( + req: HttpRequest, + info: web::Path<(String,)>, + pool: web::Data, + redis: web::Data, + file_host: web::Data>, + session_queue: web::Data, +) -> Result { + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::COLLECTION_WRITE]), + ) + .await? + .1; + + let string = info.into_inner().0; + let id = database::models::CollectionId(parse_base62(&string)? as i64); + let collection_item = database::models::Collection::get(id, &**pool, &redis) + .await? + .ok_or_else(|| { + ApiError::InvalidInput("The specified collection does not exist!".to_string()) + })?; + if !can_modify_collection(&collection_item, &user) { + return Ok(HttpResponse::Unauthorized().body("")); + } + + let cdn_url = dotenvy::var("CDN_URL")?; + if let Some(icon) = collection_item.icon_url { + let name = icon.split(&format!("{cdn_url}/")).nth(1); + + if let Some(icon_path) = name { + file_host.delete_file_version("", icon_path).await?; + } + } + + let mut transaction = pool.begin().await?; + + sqlx::query!( + " + UPDATE collections + SET icon_url = NULL, color = NULL + WHERE (id = $1) + ", + collection_item.id as database::models::ids::CollectionId, + ) + .execute(&mut *transaction) + .await?; + + database::models::Collection::clear_cache(collection_item.id, &redis).await?; + + transaction.commit().await?; + + Ok(HttpResponse::NoContent().body("")) +} + +pub async fn collection_delete( + req: HttpRequest, + info: web::Path<(String,)>, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::COLLECTION_DELETE]), + ) + .await? + .1; + + let string = info.into_inner().0; + let id = database::models::CollectionId(parse_base62(&string)? as i64); + let collection = database::models::Collection::get(id, &**pool, &redis) + .await? + .ok_or_else(|| { + ApiError::InvalidInput("The specified collection does not exist!".to_string()) + })?; + if !can_modify_collection(&collection, &user) { + return Ok(HttpResponse::Unauthorized().body("")); + } + let mut transaction = pool.begin().await?; + + let result = + database::models::Collection::remove(collection.id, &mut transaction, &redis).await?; + database::models::Collection::clear_cache(collection.id, &redis).await?; + + transaction.commit().await?; + + if result.is_some() { + Ok(HttpResponse::NoContent().body("")) + } else { + Ok(HttpResponse::NotFound().body("")) + } +} + +fn can_modify_collection( + collection: &database::models::Collection, + user: &models::users::User, +) -> bool { + collection.user_id == user.id.into() || user.role.is_mod() +} diff --git a/src/routes/v3/images.rs b/src/routes/v3/images.rs new file mode 100644 index 00000000..e287426a --- /dev/null +++ b/src/routes/v3/images.rs @@ -0,0 +1,234 @@ +use std::sync::Arc; + +use crate::auth::{get_user_from_headers, is_authorized, is_authorized_version}; +use crate::database; +use crate::database::models::{project_item, report_item, thread_item, version_item}; +use crate::database::redis::RedisPool; +use crate::file_hosting::FileHost; +use crate::models::ids::{ThreadMessageId, VersionId}; +use crate::models::images::{Image, ImageContext}; +use crate::models::reports::ReportId; +use crate::queue::session::AuthQueue; +use crate::routes::ApiError; +use crate::util::routes::read_from_payload; +use actix_web::{web, HttpRequest, HttpResponse}; +use serde::{Deserialize, Serialize}; +use sqlx::PgPool; + +use super::threads::is_authorized_thread; + +pub fn config(cfg: &mut web::ServiceConfig) { + cfg.route("image", web::post().to(images_add)); +} + +#[derive(Serialize, Deserialize)] +pub struct ImageUpload { + pub ext: String, + + // Context must be an allowed context + // currently: project, version, thread_message, report + pub context: String, + + // Optional context id to associate with + pub project_id: Option, // allow slug or id + pub version_id: Option, + pub thread_message_id: Option, + pub report_id: Option, +} + +pub async fn images_add( + req: HttpRequest, + web::Query(data): web::Query, + file_host: web::Data>, + mut payload: web::Payload, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + if let Some(content_type) = crate::util::ext::get_image_content_type(&data.ext) { + let mut context = ImageContext::from_str(&data.context, None); + + let scopes = vec![context.relevant_scope()]; + + let cdn_url = dotenvy::var("CDN_URL")?; + let user = get_user_from_headers(&req, &**pool, &redis, &session_queue, Some(&scopes)) + .await? + .1; + + // Attempt to associated a supplied id with the context + // If the context cannot be found, or the user is not authorized to upload images for the context, return an error + match &mut context { + ImageContext::Project { project_id } => { + if let Some(id) = data.project_id { + let project = project_item::Project::get(&id, &**pool, &redis).await?; + if let Some(project) = project { + if is_authorized(&project.inner, &Some(user.clone()), &pool).await? { + *project_id = Some(project.inner.id.into()); + } else { + return Err(ApiError::CustomAuthentication( + "You are not authorized to upload images for this project" + .to_string(), + )); + } + } else { + return Err(ApiError::InvalidInput( + "The project could not be found.".to_string(), + )); + } + } + } + ImageContext::Version { version_id } => { + if let Some(id) = data.version_id { + let version = version_item::Version::get(id.into(), &**pool, &redis).await?; + if let Some(version) = version { + if is_authorized_version(&version.inner, &Some(user.clone()), &pool).await? + { + *version_id = Some(version.inner.id.into()); + } else { + return Err(ApiError::CustomAuthentication( + "You are not authorized to upload images for this version" + .to_string(), + )); + } + } else { + return Err(ApiError::InvalidInput( + "The version could not be found.".to_string(), + )); + } + } + } + ImageContext::ThreadMessage { thread_message_id } => { + if let Some(id) = data.thread_message_id { + let thread_message = thread_item::ThreadMessage::get(id.into(), &**pool) + .await? + .ok_or_else(|| { + ApiError::InvalidInput( + "The thread message could not found.".to_string(), + ) + })?; + let thread = thread_item::Thread::get(thread_message.thread_id, &**pool) + .await? + .ok_or_else(|| { + ApiError::InvalidInput( + "The thread associated with the thread message could not be found" + .to_string(), + ) + })?; + if is_authorized_thread(&thread, &user, &pool).await? { + *thread_message_id = Some(thread_message.id.into()); + } else { + return Err(ApiError::CustomAuthentication( + "You are not authorized to upload images for this thread message" + .to_string(), + )); + } + } + } + ImageContext::Report { report_id } => { + if let Some(id) = data.report_id { + let report = report_item::Report::get(id.into(), &**pool) + .await? + .ok_or_else(|| { + ApiError::InvalidInput("The report could not be found.".to_string()) + })?; + let thread = thread_item::Thread::get(report.thread_id, &**pool) + .await? + .ok_or_else(|| { + ApiError::InvalidInput( + "The thread associated with the report could not be found." + .to_string(), + ) + })?; + if is_authorized_thread(&thread, &user, &pool).await? { + *report_id = Some(report.id.into()); + } else { + return Err(ApiError::CustomAuthentication( + "You are not authorized to upload images for this report".to_string(), + )); + } + } + } + ImageContext::Unknown => { + return Err(ApiError::InvalidInput( + "Context must be one of: project, version, thread_message, report".to_string(), + )); + } + } + + // Upload the image to the file host + let bytes = + read_from_payload(&mut payload, 1_048_576, "Icons must be smaller than 1MiB").await?; + + let hash = sha1::Sha1::from(&bytes).hexdigest(); + let upload_data = file_host + .upload_file( + content_type, + &format!("data/cached_images/{}.{}", hash, data.ext), + bytes.freeze(), + ) + .await?; + + let mut transaction = pool.begin().await?; + + let db_image: database::models::Image = database::models::Image { + id: database::models::generate_image_id(&mut transaction).await?, + url: format!("{}/{}", cdn_url, upload_data.file_name), + size: upload_data.content_length as u64, + created: chrono::Utc::now(), + owner_id: database::models::UserId::from(user.id), + context: context.context_as_str().to_string(), + project_id: if let ImageContext::Project { + project_id: Some(id), + } = context + { + Some(database::models::ProjectId::from(id)) + } else { + None + }, + version_id: if let ImageContext::Version { + version_id: Some(id), + } = context + { + Some(database::models::VersionId::from(id)) + } else { + None + }, + thread_message_id: if let ImageContext::ThreadMessage { + thread_message_id: Some(id), + } = context + { + Some(database::models::ThreadMessageId::from(id)) + } else { + None + }, + report_id: if let ImageContext::Report { + report_id: Some(id), + } = context + { + Some(database::models::ReportId::from(id)) + } else { + None + }, + }; + + // Insert + db_image.insert(&mut transaction).await?; + + let image = Image { + id: db_image.id.into(), + url: db_image.url, + size: db_image.size, + created: db_image.created, + owner_id: db_image.owner_id.into(), + context, + }; + + transaction.commit().await?; + + Ok(HttpResponse::Ok().json(image)) + } else { + Err(ApiError::InvalidInput( + "The specified file is not an image!".to_string(), + )) + } +} diff --git a/src/routes/v3/mod.rs b/src/routes/v3/mod.rs index d90429c2..c715d85b 100644 --- a/src/routes/v3/mod.rs +++ b/src/routes/v3/mod.rs @@ -3,13 +3,42 @@ use crate::{auth::oauth, util::cors::default_cors}; use actix_web::{web, HttpResponse}; use serde_json::json; +pub mod analytics_get; +pub mod collections; +pub mod images; +pub mod moderation; +pub mod notifications; +pub mod organizations; +pub mod project_creation; +pub mod projects; +pub mod reports; +pub mod statistics; +pub mod tags; +pub mod teams; +pub mod threads; +pub mod users; +pub mod version_creation; +pub mod version_file; +pub mod versions; + pub mod oauth_clients; pub fn config(cfg: &mut web::ServiceConfig) { cfg.service( web::scope("v3") .wrap(default_cors()) - .route("", web::get().to(hello_world)) + .configure(analytics_get::config) + .configure(collections::config) + .configure(images::config) + .configure(organizations::config) + .configure(project_creation::config) + .configure(projects::config) + .configure(reports::config) + .configure(tags::config) + .configure(teams::config) + .configure(threads::config) + .configure(version_file::config) + .configure(versions::config) .configure(oauth::config) .configure(oauth_clients::config), ); diff --git a/src/routes/v3/moderation.rs b/src/routes/v3/moderation.rs new file mode 100644 index 00000000..8b72e036 --- /dev/null +++ b/src/routes/v3/moderation.rs @@ -0,0 +1,65 @@ +use super::ApiError; +use crate::database; +use crate::database::redis::RedisPool; +use crate::models::projects::ProjectStatus; +use crate::queue::session::AuthQueue; +use crate::{auth::check_is_moderator_from_headers, models::pats::Scopes}; +use actix_web::{web, HttpRequest, HttpResponse}; +use serde::Deserialize; +use sqlx::PgPool; + +pub fn config(cfg: &mut web::ServiceConfig) { + cfg.route("moderation/projects", web::get().to(get_projects)); +} + +#[derive(Deserialize)] +pub struct ResultCount { + #[serde(default = "default_count")] + pub count: i16, +} + +fn default_count() -> i16 { + 100 +} + +pub async fn get_projects( + req: HttpRequest, + pool: web::Data, + redis: web::Data, + count: web::Query, + session_queue: web::Data, +) -> Result { + check_is_moderator_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::PROJECT_READ]), + ) + .await?; + + use futures::stream::TryStreamExt; + + let project_ids = sqlx::query!( + " + SELECT id FROM mods + WHERE status = $1 + ORDER BY queued ASC + LIMIT $2; + ", + ProjectStatus::Processing.as_str(), + count.count as i64 + ) + .fetch_many(&**pool) + .try_filter_map(|e| async { Ok(e.right().map(|m| database::models::ProjectId(m.id))) }) + .try_collect::>() + .await?; + + let projects: Vec<_> = database::Project::get_many_ids(&project_ids, &**pool, &redis) + .await? + .into_iter() + .map(crate::models::projects::Project::from) + .collect(); + + Ok(HttpResponse::Ok().json(projects)) +} diff --git a/src/routes/v3/notifications.rs b/src/routes/v3/notifications.rs new file mode 100644 index 00000000..3eda349e --- /dev/null +++ b/src/routes/v3/notifications.rs @@ -0,0 +1,289 @@ +use crate::auth::get_user_from_headers; +use crate::database; +use crate::database::redis::RedisPool; +use crate::models::ids::NotificationId; +use crate::models::notifications::Notification; +use crate::models::pats::Scopes; +use crate::queue::session::AuthQueue; +use crate::routes::ApiError; +use actix_web::{web, HttpRequest, HttpResponse}; +use serde::{Deserialize, Serialize}; +use sqlx::PgPool; + +pub fn config(cfg: &mut web::ServiceConfig) { + cfg.route("notifications", web::get().to(notifications_get)); + cfg.route("notifications", web::patch().to(notifications_read)); + cfg.route("notifications", web::delete().to(notifications_delete)); + + cfg.service( + web::scope("notification") + .route("{id}", web::get().to(notification_get)) + .route("{id}", web::patch().to(notification_read)) + .route("{id}", web::delete().to(notification_delete)), + ); +} + +#[derive(Serialize, Deserialize)] +pub struct NotificationIds { + pub ids: String, +} + +pub async fn notifications_get( + req: HttpRequest, + web::Query(ids): web::Query, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::NOTIFICATION_READ]), + ) + .await? + .1; + + use database::models::notification_item::Notification as DBNotification; + use database::models::NotificationId as DBNotificationId; + + let notification_ids: Vec = + serde_json::from_str::>(ids.ids.as_str())? + .into_iter() + .map(DBNotificationId::from) + .collect(); + + let notifications_data: Vec = + database::models::notification_item::Notification::get_many(¬ification_ids, &**pool) + .await?; + + let notifications: Vec = notifications_data + .into_iter() + .filter(|n| n.user_id == user.id.into() || user.role.is_admin()) + .map(Notification::from) + .collect(); + + Ok(HttpResponse::Ok().json(notifications)) +} + +pub async fn notification_get( + req: HttpRequest, + info: web::Path<(NotificationId,)>, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::NOTIFICATION_READ]), + ) + .await? + .1; + + let id = info.into_inner().0; + + let notification_data = + database::models::notification_item::Notification::get(id.into(), &**pool).await?; + + if let Some(data) = notification_data { + if user.id == data.user_id.into() || user.role.is_admin() { + Ok(HttpResponse::Ok().json(Notification::from(data))) + } else { + Ok(HttpResponse::NotFound().body("")) + } + } else { + Ok(HttpResponse::NotFound().body("")) + } +} + +pub async fn notification_read( + req: HttpRequest, + info: web::Path<(NotificationId,)>, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::NOTIFICATION_WRITE]), + ) + .await? + .1; + + let id = info.into_inner().0; + + let notification_data = + database::models::notification_item::Notification::get(id.into(), &**pool).await?; + + if let Some(data) = notification_data { + if data.user_id == user.id.into() || user.role.is_admin() { + let mut transaction = pool.begin().await?; + + database::models::notification_item::Notification::read( + id.into(), + &mut transaction, + &redis, + ) + .await?; + + transaction.commit().await?; + + Ok(HttpResponse::NoContent().body("")) + } else { + Err(ApiError::CustomAuthentication( + "You are not authorized to read this notification!".to_string(), + )) + } + } else { + Ok(HttpResponse::NotFound().body("")) + } +} + +pub async fn notification_delete( + req: HttpRequest, + info: web::Path<(NotificationId,)>, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::NOTIFICATION_WRITE]), + ) + .await? + .1; + + let id = info.into_inner().0; + + let notification_data = + database::models::notification_item::Notification::get(id.into(), &**pool).await?; + + if let Some(data) = notification_data { + if data.user_id == user.id.into() || user.role.is_admin() { + let mut transaction = pool.begin().await?; + + database::models::notification_item::Notification::remove( + id.into(), + &mut transaction, + &redis, + ) + .await?; + + transaction.commit().await?; + + Ok(HttpResponse::NoContent().body("")) + } else { + Err(ApiError::CustomAuthentication( + "You are not authorized to delete this notification!".to_string(), + )) + } + } else { + Ok(HttpResponse::NotFound().body("")) + } +} + +pub async fn notifications_read( + req: HttpRequest, + web::Query(ids): web::Query, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::NOTIFICATION_WRITE]), + ) + .await? + .1; + + let notification_ids = serde_json::from_str::>(&ids.ids)? + .into_iter() + .map(|x| x.into()) + .collect::>(); + + let mut transaction = pool.begin().await?; + + let notifications_data = + database::models::notification_item::Notification::get_many(¬ification_ids, &**pool) + .await?; + + let mut notifications: Vec = Vec::new(); + + for notification in notifications_data { + if notification.user_id == user.id.into() || user.role.is_admin() { + notifications.push(notification.id); + } + } + + database::models::notification_item::Notification::read_many( + ¬ifications, + &mut transaction, + &redis, + ) + .await?; + + transaction.commit().await?; + + Ok(HttpResponse::NoContent().body("")) +} + +pub async fn notifications_delete( + req: HttpRequest, + web::Query(ids): web::Query, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::NOTIFICATION_WRITE]), + ) + .await? + .1; + + let notification_ids = serde_json::from_str::>(&ids.ids)? + .into_iter() + .map(|x| x.into()) + .collect::>(); + + let mut transaction = pool.begin().await?; + + let notifications_data = + database::models::notification_item::Notification::get_many(¬ification_ids, &**pool) + .await?; + + let mut notifications: Vec = Vec::new(); + + for notification in notifications_data { + if notification.user_id == user.id.into() || user.role.is_admin() { + notifications.push(notification.id); + } + } + + database::models::notification_item::Notification::remove_many( + ¬ifications, + &mut transaction, + &redis, + ) + .await?; + + transaction.commit().await?; + + Ok(HttpResponse::NoContent().body("")) +} diff --git a/src/routes/v3/oauth_clients.rs b/src/routes/v3/oauth_clients.rs index b04cb9bb..0378a708 100644 --- a/src/routes/v3/oauth_clients.rs +++ b/src/routes/v3/oauth_clients.rs @@ -35,7 +35,7 @@ use crate::{ pats::Scopes, }, queue::session::AuthQueue, - routes::v2::project_creation::CreateError, + routes::v3::project_creation::CreateError, util::validate::validation_errors_to_string, }; diff --git a/src/routes/v3/organizations.rs b/src/routes/v3/organizations.rs new file mode 100644 index 00000000..a61e33b4 --- /dev/null +++ b/src/routes/v3/organizations.rs @@ -0,0 +1,918 @@ +use std::collections::HashMap; +use std::sync::Arc; + +use super::ApiError; +use crate::auth::{filter_authorized_projects, get_user_from_headers}; +use crate::database::models::team_item::TeamMember; +use crate::database::models::{generate_organization_id, team_item, Organization}; +use crate::database::redis::RedisPool; +use crate::file_hosting::FileHost; +use crate::models::ids::base62_impl::parse_base62; +use crate::models::organizations::OrganizationId; +use crate::models::pats::Scopes; +use crate::models::teams::{OrganizationPermissions, ProjectPermissions}; +use crate::queue::session::AuthQueue; +use crate::routes::v3::project_creation::CreateError; +use crate::util::routes::read_from_payload; +use crate::util::validate::validation_errors_to_string; +use crate::{database, models}; +use actix_web::{web, HttpRequest, HttpResponse}; +use rust_decimal::Decimal; +use serde::{Deserialize, Serialize}; +use sqlx::PgPool; +use validator::Validate; + +pub fn config(cfg: &mut web::ServiceConfig) { + cfg.service( + web::scope("organization") + .route("{id}/projects", web::get().to(organization_projects_get)) + .route("{id}", web::get().to(organization_get)) + .route("{id}", web::patch().to(organizations_edit)) + .route("{id}", web::delete().to(organization_delete)) + .route("{id}/projects", web::post().to(organization_projects_add)) + .route( + "{id}/projects", + web::delete().to(organization_projects_remove), + ) + .route("{id}/icon", web::patch().to(organization_icon_edit)) + .route("{id}/icon", web::delete().to(delete_organization_icon)) + .route( + "{id}/members", + web::get().to(super::teams::team_members_get_organization), + ), + ); +} + +pub async fn organization_projects_get( + req: HttpRequest, + info: web::Path<(String,)>, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let info = info.into_inner().0; + let current_user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::ORGANIZATION_READ, Scopes::PROJECT_READ]), + ) + .await + .map(|x| x.1) + .ok(); + + let possible_organization_id: Option = parse_base62(&info).ok(); + use futures::TryStreamExt; + + let project_ids = sqlx::query!( + " + SELECT m.id FROM organizations o + INNER JOIN mods m ON m.organization_id = o.id + WHERE (o.id = $1 AND $1 IS NOT NULL) OR (o.title = $2 AND $2 IS NOT NULL) + ", + possible_organization_id.map(|x| x as i64), + info + ) + .fetch_many(&**pool) + .try_filter_map(|e| async { Ok(e.right().map(|m| crate::database::models::ProjectId(m.id))) }) + .try_collect::>() + .await?; + + let projects_data = + crate::database::models::Project::get_many_ids(&project_ids, &**pool, &redis).await?; + + let projects = filter_authorized_projects(projects_data, ¤t_user, &pool).await?; + Ok(HttpResponse::Ok().json(projects)) +} + +#[derive(Deserialize, Validate)] +pub struct NewOrganization { + #[validate( + length(min = 3, max = 64), + regex = "crate::util::validate::RE_URL_SAFE" + )] + // Title of the organization, also used as slug + pub title: String, + #[validate(length(min = 3, max = 256))] + pub description: String, +} + +pub async fn organization_create( + req: HttpRequest, + new_organization: web::Json, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let current_user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::ORGANIZATION_CREATE]), + ) + .await? + .1; + + new_organization + .validate() + .map_err(|err| CreateError::ValidationError(validation_errors_to_string(err, None)))?; + + let mut transaction = pool.begin().await?; + + // Try title + let title_organization_id_option: Option = + serde_json::from_str(&format!("\"{}\"", new_organization.title)).ok(); + let mut organization_strings = vec![]; + if let Some(title_organization_id) = title_organization_id_option { + organization_strings.push(title_organization_id.to_string()); + } + organization_strings.push(new_organization.title.clone()); + let results = Organization::get_many(&organization_strings, &mut *transaction, &redis).await?; + if !results.is_empty() { + return Err(CreateError::SlugCollision); + } + + let organization_id = generate_organization_id(&mut transaction).await?; + + // Create organization managerial team + let team = team_item::TeamBuilder { + members: vec![team_item::TeamMemberBuilder { + user_id: current_user.id.into(), + role: crate::models::teams::OWNER_ROLE.to_owned(), + permissions: ProjectPermissions::all(), + organization_permissions: Some(OrganizationPermissions::all()), + accepted: true, + payouts_split: Decimal::ONE_HUNDRED, + ordering: 0, + }], + }; + let team_id = team.insert(&mut transaction).await?; + + // Create organization + let organization = Organization { + id: organization_id, + title: new_organization.title.clone(), + description: new_organization.description.clone(), + team_id, + icon_url: None, + color: None, + }; + organization.clone().insert(&mut transaction).await?; + transaction.commit().await?; + + // Only member is the owner, the logged in one + let member_data = TeamMember::get_from_team_full(team_id, &**pool, &redis) + .await? + .into_iter() + .next(); + let members_data = if let Some(member_data) = member_data { + vec![crate::models::teams::TeamMember::from_model( + member_data, + current_user.clone(), + false, + )] + } else { + return Err(CreateError::InvalidInput( + "Failed to get created team.".to_owned(), // should never happen + )); + }; + + let organization = models::organizations::Organization::from(organization, members_data); + + Ok(HttpResponse::Ok().json(organization)) +} + +pub async fn organization_get( + req: HttpRequest, + info: web::Path<(String,)>, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let id = info.into_inner().0; + let current_user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::ORGANIZATION_READ]), + ) + .await + .map(|x| x.1) + .ok(); + let user_id = current_user.as_ref().map(|x| x.id.into()); + + let organization_data = Organization::get(&id, &**pool, &redis).await?; + if let Some(data) = organization_data { + let members_data = TeamMember::get_from_team_full(data.team_id, &**pool, &redis).await?; + + let users = crate::database::models::User::get_many_ids( + &members_data.iter().map(|x| x.user_id).collect::>(), + &**pool, + &redis, + ) + .await?; + let logged_in = current_user + .as_ref() + .and_then(|user| { + members_data + .iter() + .find(|x| x.user_id == user.id.into() && x.accepted) + }) + .is_some(); + let team_members: Vec<_> = members_data + .into_iter() + .filter(|x| { + logged_in + || x.accepted + || user_id + .map(|y: crate::database::models::UserId| y == x.user_id) + .unwrap_or(false) + }) + .flat_map(|data| { + users.iter().find(|x| x.id == data.user_id).map(|user| { + crate::models::teams::TeamMember::from(data, user.clone(), !logged_in) + }) + }) + .collect(); + + let organization = models::organizations::Organization::from(data, team_members); + return Ok(HttpResponse::Ok().json(organization)); + } + Ok(HttpResponse::NotFound().body("")) +} + +#[derive(Deserialize)] +pub struct OrganizationIds { + pub ids: String, +} + +pub async fn organizations_get( + req: HttpRequest, + web::Query(ids): web::Query, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let ids = serde_json::from_str::>(&ids.ids)?; + let organizations_data = Organization::get_many(&ids, &**pool, &redis).await?; + let team_ids = organizations_data + .iter() + .map(|x| x.team_id) + .collect::>(); + + let teams_data = TeamMember::get_from_team_full_many(&team_ids, &**pool, &redis).await?; + let users = crate::database::models::User::get_many_ids( + &teams_data.iter().map(|x| x.user_id).collect::>(), + &**pool, + &redis, + ) + .await?; + + let current_user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::ORGANIZATION_READ]), + ) + .await + .map(|x| x.1) + .ok(); + let user_id = current_user.as_ref().map(|x| x.id.into()); + + let mut organizations = vec![]; + + let mut team_groups = HashMap::new(); + for item in teams_data { + team_groups.entry(item.team_id).or_insert(vec![]).push(item); + } + + for data in organizations_data { + let members_data = team_groups.remove(&data.team_id).unwrap_or(vec![]); + let logged_in = current_user + .as_ref() + .and_then(|user| { + members_data + .iter() + .find(|x| x.user_id == user.id.into() && x.accepted) + }) + .is_some(); + + let team_members: Vec<_> = members_data + .into_iter() + .filter(|x| { + logged_in + || x.accepted + || user_id + .map(|y: crate::database::models::UserId| y == x.user_id) + .unwrap_or(false) + }) + .flat_map(|data| { + users.iter().find(|x| x.id == data.user_id).map(|user| { + crate::models::teams::TeamMember::from(data, user.clone(), !logged_in) + }) + }) + .collect(); + + let organization = models::organizations::Organization::from(data, team_members); + organizations.push(organization); + } + + Ok(HttpResponse::Ok().json(organizations)) +} + +#[derive(Serialize, Deserialize, Validate)] +pub struct OrganizationEdit { + #[validate(length(min = 3, max = 256))] + pub description: Option, + #[validate( + length(min = 3, max = 64), + regex = "crate::util::validate::RE_URL_SAFE" + )] + // Title of the organization, also used as slug + pub title: Option, +} + +pub async fn organizations_edit( + req: HttpRequest, + info: web::Path<(String,)>, + new_organization: web::Json, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::ORGANIZATION_WRITE]), + ) + .await? + .1; + + new_organization + .validate() + .map_err(|err| ApiError::Validation(validation_errors_to_string(err, None)))?; + + let string = info.into_inner().0; + let result = database::models::Organization::get(&string, &**pool, &redis).await?; + if let Some(organization_item) = result { + let id = organization_item.id; + + let team_member = database::models::TeamMember::get_from_user_id( + organization_item.team_id, + user.id.into(), + &**pool, + ) + .await?; + + let permissions = + OrganizationPermissions::get_permissions_by_role(&user.role, &team_member); + + if let Some(perms) = permissions { + let mut transaction = pool.begin().await?; + if let Some(description) = &new_organization.description { + if !perms.contains(OrganizationPermissions::EDIT_DETAILS) { + return Err(ApiError::CustomAuthentication( + "You do not have the permissions to edit the description of this organization!" + .to_string(), + )); + } + sqlx::query!( + " + UPDATE organizations + SET description = $1 + WHERE (id = $2) + ", + description, + id as database::models::ids::OrganizationId, + ) + .execute(&mut *transaction) + .await?; + } + + if let Some(title) = &new_organization.title { + if !perms.contains(OrganizationPermissions::EDIT_DETAILS) { + return Err(ApiError::CustomAuthentication( + "You do not have the permissions to edit the title of this organization!" + .to_string(), + )); + } + + let title_organization_id_option: Option = parse_base62(title).ok(); + if let Some(title_organization_id) = title_organization_id_option { + let results = sqlx::query!( + " + SELECT EXISTS(SELECT 1 FROM organizations WHERE id=$1) + ", + title_organization_id as i64 + ) + .fetch_one(&mut *transaction) + .await?; + + if results.exists.unwrap_or(true) { + return Err(ApiError::InvalidInput( + "Title collides with other organization's id!".to_string(), + )); + } + } + + // Make sure the new title is different from the old one + // We are able to unwrap here because the title is always set + if !title.eq(&organization_item.title.clone()) { + let results = sqlx::query!( + " + SELECT EXISTS(SELECT 1 FROM organizations WHERE title = LOWER($1)) + ", + title + ) + .fetch_one(&mut *transaction) + .await?; + + if results.exists.unwrap_or(true) { + return Err(ApiError::InvalidInput( + "Title collides with other organization's id!".to_string(), + )); + } + } + + sqlx::query!( + " + UPDATE organizations + SET title = LOWER($1) + WHERE (id = $2) + ", + Some(title), + id as database::models::ids::OrganizationId, + ) + .execute(&mut *transaction) + .await?; + } + + database::models::Organization::clear_cache( + organization_item.id, + Some(organization_item.title), + &redis, + ) + .await?; + + transaction.commit().await?; + Ok(HttpResponse::NoContent().body("")) + } else { + Err(ApiError::CustomAuthentication( + "You do not have permission to edit this organization!".to_string(), + )) + } + } else { + Ok(HttpResponse::NotFound().body("")) + } +} + +pub async fn organization_delete( + req: HttpRequest, + info: web::Path<(String,)>, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::ORGANIZATION_DELETE]), + ) + .await? + .1; + let string = info.into_inner().0; + + let organization = database::models::Organization::get(&string, &**pool, &redis) + .await? + .ok_or_else(|| { + ApiError::InvalidInput("The specified organization does not exist!".to_string()) + })?; + + if !user.role.is_admin() { + let team_member = database::models::TeamMember::get_from_user_id_organization( + organization.id, + user.id.into(), + &**pool, + ) + .await + .map_err(ApiError::Database)? + .ok_or_else(|| { + ApiError::InvalidInput("The specified organization does not exist!".to_string()) + })?; + + let permissions = + OrganizationPermissions::get_permissions_by_role(&user.role, &Some(team_member)) + .unwrap_or_default(); + + if !permissions.contains(OrganizationPermissions::DELETE_ORGANIZATION) { + return Err(ApiError::CustomAuthentication( + "You don't have permission to delete this organization!".to_string(), + )); + } + } + + let mut transaction = pool.begin().await?; + let result = + database::models::Organization::remove(organization.id, &mut transaction, &redis).await?; + + transaction.commit().await?; + + database::models::Organization::clear_cache(organization.id, Some(organization.title), &redis) + .await?; + + if result.is_some() { + Ok(HttpResponse::NoContent().body("")) + } else { + Ok(HttpResponse::NotFound().body("")) + } +} + +#[derive(Deserialize)] +pub struct OrganizationProjectAdd { + pub project_id: String, // Also allow title/slug +} +pub async fn organization_projects_add( + req: HttpRequest, + info: web::Path<(String,)>, + project_info: web::Json, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let info = info.into_inner().0; + let current_user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::PROJECT_WRITE, Scopes::ORGANIZATION_WRITE]), + ) + .await? + .1; + + let organization = database::models::Organization::get(&info, &**pool, &redis) + .await? + .ok_or_else(|| { + ApiError::InvalidInput("The specified organization does not exist!".to_string()) + })?; + + let project_item = database::models::Project::get(&project_info.project_id, &**pool, &redis) + .await? + .ok_or_else(|| { + ApiError::InvalidInput("The specified project does not exist!".to_string()) + })?; + if project_item.inner.organization_id.is_some() { + return Err(ApiError::InvalidInput( + "The specified project is already owned by an organization!".to_string(), + )); + } + + let project_team_member = database::models::TeamMember::get_from_user_id_project( + project_item.inner.id, + current_user.id.into(), + &**pool, + ) + .await? + .ok_or_else(|| ApiError::InvalidInput("You are not a member of this project!".to_string()))?; + + let organization_team_member = database::models::TeamMember::get_from_user_id_organization( + organization.id, + current_user.id.into(), + &**pool, + ) + .await? + .ok_or_else(|| { + ApiError::InvalidInput("You are not a member of this organization!".to_string()) + })?; + + // Require ownership of a project to add it to an organization + if !current_user.role.is_admin() + && !project_team_member + .role + .eq(crate::models::teams::OWNER_ROLE) + { + return Err(ApiError::CustomAuthentication( + "You need to be an owner of a project to add it to an organization!".to_string(), + )); + } + + let permissions = OrganizationPermissions::get_permissions_by_role( + ¤t_user.role, + &Some(organization_team_member), + ) + .unwrap_or_default(); + if permissions.contains(OrganizationPermissions::ADD_PROJECT) { + let mut transaction = pool.begin().await?; + sqlx::query!( + " + UPDATE mods + SET organization_id = $1 + WHERE (id = $2) + ", + organization.id as database::models::OrganizationId, + project_item.inner.id as database::models::ids::ProjectId + ) + .execute(&mut *transaction) + .await?; + + transaction.commit().await?; + + database::models::TeamMember::clear_cache(project_item.inner.team_id, &redis).await?; + database::models::Project::clear_cache( + project_item.inner.id, + project_item.inner.slug, + None, + &redis, + ) + .await?; + } else { + return Err(ApiError::CustomAuthentication( + "You do not have permission to add projects to this organization!".to_string(), + )); + } + Ok(HttpResponse::Ok().finish()) +} + +pub async fn organization_projects_remove( + req: HttpRequest, + info: web::Path<(String, String)>, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let (organization_id, project_id) = info.into_inner(); + let current_user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::PROJECT_WRITE, Scopes::ORGANIZATION_WRITE]), + ) + .await? + .1; + + let organization = database::models::Organization::get(&organization_id, &**pool, &redis) + .await? + .ok_or_else(|| { + ApiError::InvalidInput("The specified organization does not exist!".to_string()) + })?; + + let project_item = database::models::Project::get(&project_id, &**pool, &redis) + .await? + .ok_or_else(|| { + ApiError::InvalidInput("The specified project does not exist!".to_string()) + })?; + + if !project_item + .inner + .organization_id + .eq(&Some(organization.id)) + { + return Err(ApiError::InvalidInput( + "The specified project is not owned by this organization!".to_string(), + )); + } + + let organization_team_member = database::models::TeamMember::get_from_user_id_organization( + organization.id, + current_user.id.into(), + &**pool, + ) + .await? + .ok_or_else(|| { + ApiError::InvalidInput("You are not a member of this organization!".to_string()) + })?; + + let permissions = OrganizationPermissions::get_permissions_by_role( + ¤t_user.role, + &Some(organization_team_member), + ) + .unwrap_or_default(); + if permissions.contains(OrganizationPermissions::REMOVE_PROJECT) { + let mut transaction = pool.begin().await?; + sqlx::query!( + " + UPDATE mods + SET organization_id = NULL + WHERE (id = $1) + ", + project_item.inner.id as database::models::ids::ProjectId + ) + .execute(&mut *transaction) + .await?; + + transaction.commit().await?; + + database::models::TeamMember::clear_cache(project_item.inner.team_id, &redis).await?; + database::models::Project::clear_cache( + project_item.inner.id, + project_item.inner.slug, + None, + &redis, + ) + .await?; + } else { + return Err(ApiError::CustomAuthentication( + "You do not have permission to add projects to this organization!".to_string(), + )); + } + Ok(HttpResponse::Ok().finish()) +} + +#[derive(Serialize, Deserialize)] +pub struct Extension { + pub ext: String, +} + +#[allow(clippy::too_many_arguments)] +pub async fn organization_icon_edit( + web::Query(ext): web::Query, + req: HttpRequest, + info: web::Path<(String,)>, + pool: web::Data, + redis: web::Data, + file_host: web::Data>, + mut payload: web::Payload, + session_queue: web::Data, +) -> Result { + if let Some(content_type) = crate::util::ext::get_image_content_type(&ext.ext) { + let cdn_url = dotenvy::var("CDN_URL")?; + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::ORGANIZATION_WRITE]), + ) + .await? + .1; + let string = info.into_inner().0; + + let organization_item = database::models::Organization::get(&string, &**pool, &redis) + .await? + .ok_or_else(|| { + ApiError::InvalidInput("The specified organization does not exist!".to_string()) + })?; + + if !user.role.is_mod() { + let team_member = database::models::TeamMember::get_from_user_id( + organization_item.team_id, + user.id.into(), + &**pool, + ) + .await + .map_err(ApiError::Database)?; + + let permissions = + OrganizationPermissions::get_permissions_by_role(&user.role, &team_member) + .unwrap_or_default(); + + if !permissions.contains(OrganizationPermissions::EDIT_DETAILS) { + return Err(ApiError::CustomAuthentication( + "You don't have permission to edit this organization's icon.".to_string(), + )); + } + } + + if let Some(icon) = organization_item.icon_url { + let name = icon.split(&format!("{cdn_url}/")).nth(1); + + if let Some(icon_path) = name { + file_host.delete_file_version("", icon_path).await?; + } + } + + let bytes = + read_from_payload(&mut payload, 262144, "Icons must be smaller than 256KiB").await?; + + let color = crate::util::img::get_color_from_img(&bytes)?; + + let hash = sha1::Sha1::from(&bytes).hexdigest(); + let organization_id: OrganizationId = organization_item.id.into(); + let upload_data = file_host + .upload_file( + content_type, + &format!("data/{}/{}.{}", organization_id, hash, ext.ext), + bytes.freeze(), + ) + .await?; + + let mut transaction = pool.begin().await?; + + sqlx::query!( + " + UPDATE organizations + SET icon_url = $1, color = $2 + WHERE (id = $3) + ", + format!("{}/{}", cdn_url, upload_data.file_name), + color.map(|x| x as i32), + organization_item.id as database::models::ids::OrganizationId, + ) + .execute(&mut *transaction) + .await?; + + database::models::Organization::clear_cache( + organization_item.id, + Some(organization_item.title), + &redis, + ) + .await?; + + transaction.commit().await?; + + Ok(HttpResponse::NoContent().body("")) + } else { + Err(ApiError::InvalidInput(format!( + "Invalid format for project icon: {}", + ext.ext + ))) + } +} + +pub async fn delete_organization_icon( + req: HttpRequest, + info: web::Path<(String,)>, + pool: web::Data, + redis: web::Data, + file_host: web::Data>, + session_queue: web::Data, +) -> Result { + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::ORGANIZATION_WRITE]), + ) + .await? + .1; + let string = info.into_inner().0; + + let organization_item = database::models::Organization::get(&string, &**pool, &redis) + .await? + .ok_or_else(|| { + ApiError::InvalidInput("The specified organization does not exist!".to_string()) + })?; + + if !user.role.is_mod() { + let team_member = database::models::TeamMember::get_from_user_id( + organization_item.team_id, + user.id.into(), + &**pool, + ) + .await + .map_err(ApiError::Database)?; + + let permissions = + OrganizationPermissions::get_permissions_by_role(&user.role, &team_member) + .unwrap_or_default(); + + if !permissions.contains(OrganizationPermissions::EDIT_DETAILS) { + return Err(ApiError::CustomAuthentication( + "You don't have permission to edit this organization's icon.".to_string(), + )); + } + } + + let cdn_url = dotenvy::var("CDN_URL")?; + if let Some(icon) = organization_item.icon_url { + let name = icon.split(&format!("{cdn_url}/")).nth(1); + + if let Some(icon_path) = name { + file_host.delete_file_version("", icon_path).await?; + } + } + + let mut transaction = pool.begin().await?; + + sqlx::query!( + " + UPDATE organizations + SET icon_url = NULL, color = NULL + WHERE (id = $1) + ", + organization_item.id as database::models::ids::OrganizationId, + ) + .execute(&mut *transaction) + .await?; + + database::models::Organization::clear_cache( + organization_item.id, + Some(organization_item.title), + &redis, + ) + .await?; + + transaction.commit().await?; + + Ok(HttpResponse::NoContent().body("")) +} diff --git a/src/routes/v3/project_creation.rs b/src/routes/v3/project_creation.rs new file mode 100644 index 00000000..637aa46a --- /dev/null +++ b/src/routes/v3/project_creation.rs @@ -0,0 +1,975 @@ +use super::version_creation::InitialVersionData; +use crate::auth::{get_user_from_headers, AuthenticationError}; +use crate::database::models::loader_fields::{ + Loader, LoaderField, LoaderFieldEnumValue, VersionField, +}; +use crate::database::models::thread_item::ThreadBuilder; +use crate::database::models::{self, image_item, User}; +use crate::database::redis::RedisPool; +use crate::file_hosting::{FileHost, FileHostingError}; +use crate::models::error::ApiError; +use crate::models::ids::{ImageId, OrganizationId}; +use crate::models::images::{Image, ImageContext}; +use crate::models::pats::Scopes; +use crate::models::projects::{ + DonationLink, License, MonetizationStatus, ProjectId, ProjectStatus, VersionId, VersionStatus, +}; +use crate::models::teams::ProjectPermissions; +use crate::models::threads::ThreadType; +use crate::models::users::UserId; +use crate::queue::session::AuthQueue; +use crate::search::indexing::IndexingError; +use crate::util::routes::read_from_field; +use crate::util::validate::validation_errors_to_string; +use actix_multipart::{Field, Multipart}; +use actix_web::http::StatusCode; +use actix_web::web::{self, Data}; +use actix_web::{HttpRequest, HttpResponse}; +use chrono::Utc; +use futures::stream::StreamExt; +use image::ImageError; +use itertools::Itertools; +use rust_decimal::Decimal; +use serde::{Deserialize, Serialize}; +use sqlx::postgres::PgPool; +use std::sync::Arc; +use thiserror::Error; +use validator::Validate; + +pub fn config(cfg: &mut actix_web::web::ServiceConfig) { + cfg.route("create", web::post().to(project_create)); +} + +#[derive(Error, Debug)] +pub enum CreateError { + #[error("Environment Error")] + EnvError(#[from] dotenvy::Error), + #[error("An unknown database error occurred")] + SqlxDatabaseError(#[from] sqlx::Error), + #[error("Database Error: {0}")] + DatabaseError(#[from] models::DatabaseError), + #[error("Indexing Error: {0}")] + IndexingError(#[from] IndexingError), + #[error("Error while parsing multipart payload: {0}")] + MultipartError(#[from] actix_multipart::MultipartError), + #[error("Error while parsing JSON: {0}")] + SerDeError(#[from] serde_json::Error), + #[error("Error while validating input: {0}")] + ValidationError(String), + #[error("Error while uploading file: {0}")] + FileHostingError(#[from] FileHostingError), + #[error("Error while validating uploaded file: {0}")] + FileValidationError(#[from] crate::validate::ValidationError), + #[error("{}", .0)] + MissingValueError(String), + #[error("Invalid format for image: {0}")] + InvalidIconFormat(String), + #[error("Error with multipart data: {0}")] + InvalidInput(String), + #[error("Invalid game version: {0}")] + InvalidGameVersion(String), + #[error("Invalid loader: {0}")] + InvalidLoader(String), + #[error("Invalid category: {0}")] + InvalidCategory(String), + #[error("Invalid file type for version file: {0}")] + InvalidFileType(String), + #[error("Slug collides with other project's id!")] + SlugCollision, + #[error("Authentication Error: {0}")] + Unauthorized(#[from] AuthenticationError), + #[error("Authentication Error: {0}")] + CustomAuthenticationError(String), + #[error("Image Parsing Error: {0}")] + ImageError(#[from] ImageError), + #[error("Reroute Error: {0}")] + RerouteError(#[from] reqwest::Error), +} + +impl actix_web::ResponseError for CreateError { + fn status_code(&self) -> StatusCode { + match self { + CreateError::EnvError(..) => StatusCode::INTERNAL_SERVER_ERROR, + CreateError::SqlxDatabaseError(..) => StatusCode::INTERNAL_SERVER_ERROR, + CreateError::DatabaseError(..) => StatusCode::INTERNAL_SERVER_ERROR, + CreateError::IndexingError(..) => StatusCode::INTERNAL_SERVER_ERROR, + CreateError::FileHostingError(..) => StatusCode::INTERNAL_SERVER_ERROR, + CreateError::SerDeError(..) => StatusCode::BAD_REQUEST, + CreateError::MultipartError(..) => StatusCode::BAD_REQUEST, + CreateError::MissingValueError(..) => StatusCode::BAD_REQUEST, + CreateError::InvalidIconFormat(..) => StatusCode::BAD_REQUEST, + CreateError::InvalidInput(..) => StatusCode::BAD_REQUEST, + CreateError::InvalidGameVersion(..) => StatusCode::BAD_REQUEST, + CreateError::InvalidLoader(..) => StatusCode::BAD_REQUEST, + CreateError::InvalidCategory(..) => StatusCode::BAD_REQUEST, + CreateError::InvalidFileType(..) => StatusCode::BAD_REQUEST, + CreateError::Unauthorized(..) => StatusCode::UNAUTHORIZED, + CreateError::CustomAuthenticationError(..) => StatusCode::UNAUTHORIZED, + CreateError::SlugCollision => StatusCode::BAD_REQUEST, + CreateError::ValidationError(..) => StatusCode::BAD_REQUEST, + CreateError::FileValidationError(..) => StatusCode::BAD_REQUEST, + CreateError::ImageError(..) => StatusCode::BAD_REQUEST, + CreateError::RerouteError(..) => StatusCode::INTERNAL_SERVER_ERROR, + } + } + + fn error_response(&self) -> HttpResponse { + HttpResponse::build(self.status_code()).json(ApiError { + error: match self { + CreateError::EnvError(..) => "environment_error", + CreateError::SqlxDatabaseError(..) => "database_error", + CreateError::DatabaseError(..) => "database_error", + CreateError::IndexingError(..) => "indexing_error", + CreateError::FileHostingError(..) => "file_hosting_error", + CreateError::SerDeError(..) => "invalid_input", + CreateError::MultipartError(..) => "invalid_input", + CreateError::MissingValueError(..) => "invalid_input", + CreateError::InvalidIconFormat(..) => "invalid_input", + CreateError::InvalidInput(..) => "invalid_input", + CreateError::InvalidGameVersion(..) => "invalid_input", + CreateError::InvalidLoader(..) => "invalid_input", + CreateError::InvalidCategory(..) => "invalid_input", + CreateError::InvalidFileType(..) => "invalid_input", + CreateError::Unauthorized(..) => "unauthorized", + CreateError::CustomAuthenticationError(..) => "unauthorized", + CreateError::SlugCollision => "invalid_input", + CreateError::ValidationError(..) => "invalid_input", + CreateError::FileValidationError(..) => "invalid_input", + CreateError::ImageError(..) => "invalid_image", + CreateError::RerouteError(..) => "reroute_error", + }, + description: &self.to_string(), + }) + } +} + +pub fn default_project_type() -> String { + "mod".to_string() +} + +fn default_requested_status() -> ProjectStatus { + ProjectStatus::Approved +} + +#[derive(Serialize, Deserialize, Validate, Clone)] +pub struct ProjectCreateData { + #[validate( + length(min = 3, max = 64), + custom(function = "crate::util::validate::validate_name") + )] + #[serde(alias = "mod_name")] + /// The title or name of the project. + pub title: String, + #[validate( + length(min = 3, max = 64), + regex = "crate::util::validate::RE_URL_SAFE" + )] + #[serde(alias = "mod_slug")] + /// The slug of a project, used for vanity URLs + pub slug: String, + #[validate(length(min = 3, max = 255))] + #[serde(alias = "mod_description")] + /// A short description of the project. + pub description: String, + #[validate(length(max = 65536))] + #[serde(alias = "mod_body")] + /// A long description of the project, in markdown. + pub body: String, + + #[validate(length(max = 32))] + #[validate] + /// A list of initial versions to upload with the created project + pub initial_versions: Vec, + #[validate(length(max = 3))] + /// A list of the categories that the project is in. + pub categories: Vec, + #[validate(length(max = 256))] + #[serde(default = "Vec::new")] + /// A list of the categories that the project is in. + pub additional_categories: Vec, + + #[validate( + custom(function = "crate::util::validate::validate_url"), + length(max = 2048) + )] + /// An optional link to where to submit bugs or issues with the project. + pub issues_url: Option, + #[validate( + custom(function = "crate::util::validate::validate_url"), + length(max = 2048) + )] + /// An optional link to the source code for the project. + pub source_url: Option, + #[validate( + custom(function = "crate::util::validate::validate_url"), + length(max = 2048) + )] + /// An optional link to the project's wiki page or other relevant information. + pub wiki_url: Option, + #[validate( + custom(function = "crate::util::validate::validate_url"), + length(max = 2048) + )] + /// An optional link to the project's license page + pub license_url: Option, + #[validate( + custom(function = "crate::util::validate::validate_url"), + length(max = 2048) + )] + /// An optional link to the project's discord. + pub discord_url: Option, + /// An optional list of all donation links the project has\ + #[validate] + pub donation_urls: Option>, + + /// An optional boolean. If true, the project will be created as a draft. + pub is_draft: Option, + + /// The license id that the project follows + pub license_id: String, + + #[validate(length(max = 64))] + #[validate] + /// The multipart names of the gallery items to upload + pub gallery_items: Option>, + #[serde(default = "default_requested_status")] + /// The status of the mod to be set once it is approved + pub requested_status: ProjectStatus, + + // Associations to uploaded images in body/description + #[validate(length(max = 10))] + #[serde(default)] + pub uploaded_images: Vec, + + /// The id of the organization to create the project in + pub organization_id: Option, +} + +#[derive(Serialize, Deserialize, Validate, Clone)] +pub struct NewGalleryItem { + /// The name of the multipart item where the gallery media is located + pub item: String, + /// Whether the gallery item should show in search or not + pub featured: bool, + #[validate(length(min = 1, max = 2048))] + /// The title of the gallery item + pub title: Option, + #[validate(length(min = 1, max = 2048))] + /// The description of the gallery item + pub description: Option, + pub ordering: i64, +} + +pub struct UploadedFile { + pub file_id: String, + pub file_name: String, +} + +pub async fn undo_uploads( + file_host: &dyn FileHost, + uploaded_files: &[UploadedFile], +) -> Result<(), CreateError> { + for file in uploaded_files { + file_host + .delete_file_version(&file.file_id, &file.file_name) + .await?; + } + Ok(()) +} + +pub async fn project_create( + req: HttpRequest, + mut payload: Multipart, + client: Data, + redis: Data, + file_host: Data>, + session_queue: Data, +) -> Result { + let mut transaction = client.begin().await?; + let mut uploaded_files = Vec::new(); + + let result = project_create_inner( + req, + &mut payload, + &mut transaction, + &***file_host, + &mut uploaded_files, + &client, + &redis, + &session_queue, + ) + .await; + + if result.is_err() { + let undo_result = undo_uploads(&***file_host, &uploaded_files).await; + let rollback_result = transaction.rollback().await; + + undo_result?; + if let Err(e) = rollback_result { + return Err(e.into()); + } + } else { + transaction.commit().await?; + } + + result +} +/* + +Project Creation Steps: +Get logged in user + Must match the author in the version creation + +1. Data + - Gets "data" field from multipart form; must be first + - Verification: string lengths + - Create versions + - Some shared logic with version creation + - Create list of VersionBuilders + - Create ProjectBuilder + +2. Upload + - Icon: check file format & size + - Upload to backblaze & record URL + - Project files + - Check for matching version + - File size limits? + - Check file type + - Eventually, malware scan + - Upload to backblaze & create VersionFileBuilder + - + +3. Creation + - Database stuff + - Add project data to indexing queue +*/ + +#[allow(clippy::too_many_arguments)] +async fn project_create_inner( + req: HttpRequest, + payload: &mut Multipart, + transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, + file_host: &dyn FileHost, + uploaded_files: &mut Vec, + pool: &PgPool, + redis: &RedisPool, + session_queue: &AuthQueue, +) -> Result { + // The base URL for files uploaded to backblaze + let cdn_url = dotenvy::var("CDN_URL")?; + + // The currently logged in user + let current_user = get_user_from_headers( + &req, + pool, + redis, + session_queue, + Some(&[Scopes::PROJECT_CREATE]), + ) + .await? + .1; + + let project_id: ProjectId = models::generate_project_id(transaction).await?.into(); + let all_loaders = models::loader_fields::Loader::list(&mut **transaction, redis).await?; + + let project_create_data: ProjectCreateData; + let mut versions; + let mut versions_map = std::collections::HashMap::new(); + let mut gallery_urls = Vec::new(); + { + // The first multipart field must be named "data" and contain a + // JSON `ProjectCreateData` object. + + let mut field = payload + .next() + .await + .map(|m| m.map_err(CreateError::MultipartError)) + .unwrap_or_else(|| { + Err(CreateError::MissingValueError(String::from( + "No `data` field in multipart upload", + ))) + })?; + + let content_disposition = field.content_disposition(); + let name = content_disposition + .get_name() + .ok_or_else(|| CreateError::MissingValueError(String::from("Missing content name")))?; + + if name != "data" { + return Err(CreateError::InvalidInput(String::from( + "`data` field must come before file fields", + ))); + } + + let mut data = Vec::new(); + while let Some(chunk) = field.next().await { + data.extend_from_slice(&chunk.map_err(CreateError::MultipartError)?); + } + let create_data: ProjectCreateData = serde_json::from_slice(&data)?; + + create_data + .validate() + .map_err(|err| CreateError::InvalidInput(validation_errors_to_string(err, None)))?; + + let slug_project_id_option: Option = + serde_json::from_str(&format!("\"{}\"", create_data.slug)).ok(); + + if let Some(slug_project_id) = slug_project_id_option { + let slug_project_id: models::ids::ProjectId = slug_project_id.into(); + let results = sqlx::query!( + " + SELECT EXISTS(SELECT 1 FROM mods WHERE id=$1) + ", + slug_project_id as models::ids::ProjectId + ) + .fetch_one(&mut **transaction) + .await + .map_err(|e| CreateError::DatabaseError(e.into()))?; + + if results.exists.unwrap_or(false) { + return Err(CreateError::SlugCollision); + } + } + + { + let results = sqlx::query!( + " + SELECT EXISTS(SELECT 1 FROM mods WHERE slug = LOWER($1)) + ", + create_data.slug + ) + .fetch_one(&mut **transaction) + .await + .map_err(|e| CreateError::DatabaseError(e.into()))?; + + if results.exists.unwrap_or(false) { + return Err(CreateError::SlugCollision); + } + } + + // Create VersionBuilders for the versions specified in `initial_versions` + versions = Vec::with_capacity(create_data.initial_versions.len()); + for (i, data) in create_data.initial_versions.iter().enumerate() { + // Create a map of multipart field names to version indices + for name in &data.file_parts { + if versions_map.insert(name.to_owned(), i).is_some() { + // If the name is already used + return Err(CreateError::InvalidInput(String::from( + "Duplicate multipart field name", + ))); + } + } + versions.push( + create_initial_version( + data, + project_id, + current_user.id, + &all_loaders, + transaction, + redis, + ) + .await?, + ); + } + + project_create_data = create_data; + } + + let mut icon_data = None; + + let mut error = None; + while let Some(item) = payload.next().await { + let mut field: Field = item?; + + if error.is_some() { + continue; + } + + let result = async { + let content_disposition = field.content_disposition().clone(); + + let name = content_disposition.get_name().ok_or_else(|| { + CreateError::MissingValueError("Missing content name".to_string()) + })?; + + let (file_name, file_extension) = + super::version_creation::get_name_ext(&content_disposition)?; + + if name == "icon" { + if icon_data.is_some() { + return Err(CreateError::InvalidInput(String::from( + "Projects can only have one icon", + ))); + } + // Upload the icon to the cdn + icon_data = Some( + process_icon_upload( + uploaded_files, + project_id.0, + file_extension, + file_host, + field, + &cdn_url, + ) + .await?, + ); + return Ok(()); + } + if let Some(gallery_items) = &project_create_data.gallery_items { + if gallery_items.iter().filter(|a| a.featured).count() > 1 { + return Err(CreateError::InvalidInput(String::from( + "Only one gallery image can be featured.", + ))); + } + if let Some(item) = gallery_items.iter().find(|x| x.item == name) { + let data = read_from_field( + &mut field, + 5 * (1 << 20), + "Gallery image exceeds the maximum of 5MiB.", + ) + .await?; + let hash = sha1::Sha1::from(&data).hexdigest(); + let (_, file_extension) = + super::version_creation::get_name_ext(&content_disposition)?; + let content_type = crate::util::ext::get_image_content_type(file_extension) + .ok_or_else(|| { + CreateError::InvalidIconFormat(file_extension.to_string()) + })?; + let url = format!("data/{project_id}/images/{hash}.{file_extension}"); + let upload_data = file_host + .upload_file(content_type, &url, data.freeze()) + .await?; + uploaded_files.push(UploadedFile { + file_id: upload_data.file_id, + file_name: upload_data.file_name, + }); + gallery_urls.push(crate::models::projects::GalleryItem { + url: format!("{cdn_url}/{url}"), + featured: item.featured, + title: item.title.clone(), + description: item.description.clone(), + created: Utc::now(), + ordering: item.ordering, + }); + return Ok(()); + } + } + let index = if let Some(i) = versions_map.get(name) { + *i + } else { + return Err(CreateError::InvalidInput(format!( + "File `{file_name}` (field {name}) isn't specified in the versions data" + ))); + }; + // `index` is always valid for these lists + let created_version = versions.get_mut(index).unwrap(); + let version_data = project_create_data.initial_versions.get(index).unwrap(); + // TODO: maybe redundant is this calculation done elsewhere? + + // Upload the new jar file + super::version_creation::upload_file( + &mut field, + file_host, + version_data.file_parts.len(), + uploaded_files, + &mut created_version.files, + &mut created_version.dependencies, + &cdn_url, + &content_disposition, + project_id, + created_version.version_id.into(), + &created_version.version_fields, + version_data.loaders.clone(), + version_data.primary_file.is_some(), + version_data.primary_file.as_deref() == Some(name), + None, + transaction, + redis, + ) + .await?; + + Ok(()) + } + .await; + + if result.is_err() { + error = result.err(); + } + } + + if let Some(error) = error { + return Err(error); + } + + { + // Check to make sure that all specified files were uploaded + for (version_data, builder) in project_create_data + .initial_versions + .iter() + .zip(versions.iter()) + { + if version_data.file_parts.len() != builder.files.len() { + return Err(CreateError::InvalidInput(String::from( + "Some files were specified in initial_versions but not uploaded", + ))); + } + } + + // Convert the list of category names to actual categories + let mut categories = Vec::with_capacity(project_create_data.categories.len()); + for category in &project_create_data.categories { + let ids = models::categories::Category::get_ids(category, &mut **transaction).await?; + if ids.is_empty() { + return Err(CreateError::InvalidCategory(category.clone())); + } + + // TODO: We should filter out categories that don't match the project type of any of the versions + // ie: if mod and modpack both share a name this should only have modpack if it only has a modpack as a version + categories.extend(ids.values()); + } + + let mut additional_categories = + Vec::with_capacity(project_create_data.additional_categories.len()); + for category in &project_create_data.additional_categories { + let ids = models::categories::Category::get_ids(category, &mut **transaction).await?; + if ids.is_empty() { + return Err(CreateError::InvalidCategory(category.clone())); + } + // TODO: We should filter out categories that don't match the project type of any of the versions + // ie: if mod and modpack both share a name this should only have modpack if it only has a modpack as a version + additional_categories.extend(ids.values()); + } + + let team = models::team_item::TeamBuilder { + members: vec![models::team_item::TeamMemberBuilder { + user_id: current_user.id.into(), + role: crate::models::teams::OWNER_ROLE.to_owned(), + // Allow all permissions for project creator, even if attached to a project + permissions: ProjectPermissions::all(), + organization_permissions: None, + accepted: true, + payouts_split: Decimal::ONE_HUNDRED, + ordering: 0, + }], + }; + + let team_id = team.insert(&mut *transaction).await?; + + let status; + if project_create_data.is_draft.unwrap_or(false) { + status = ProjectStatus::Draft; + } else { + status = ProjectStatus::Processing; + if project_create_data.initial_versions.is_empty() { + return Err(CreateError::InvalidInput(String::from( + "Project submitted for review with no initial versions", + ))); + } + } + + let license_id = + spdx::Expression::parse(&project_create_data.license_id).map_err(|err| { + CreateError::InvalidInput(format!("Invalid SPDX license identifier: {err}")) + })?; + + let mut donation_urls = vec![]; + + if let Some(urls) = &project_create_data.donation_urls { + for url in urls { + let platform_id = + models::categories::DonationPlatform::get_id(&url.id, &mut **transaction) + .await? + .ok_or_else(|| { + CreateError::InvalidInput(format!( + "Donation platform {} does not exist.", + url.id.clone() + )) + })?; + + donation_urls.push(models::project_item::DonationUrl { + platform_id, + platform_short: "".to_string(), + platform_name: "".to_string(), + url: url.url.clone(), + }) + } + } + + let project_builder_actual = models::project_item::ProjectBuilder { + project_id: project_id.into(), + team_id, + organization_id: project_create_data.organization_id.map(|x| x.into()), + title: project_create_data.title, + description: project_create_data.description, + body: project_create_data.body, + icon_url: icon_data.clone().map(|x| x.0), + issues_url: project_create_data.issues_url, + source_url: project_create_data.source_url, + wiki_url: project_create_data.wiki_url, + + license_url: project_create_data.license_url, + discord_url: project_create_data.discord_url, + categories, + additional_categories, + initial_versions: versions, + status, + requested_status: Some(project_create_data.requested_status), + license: license_id.to_string(), + slug: Some(project_create_data.slug), + donation_urls, + gallery_items: gallery_urls + .iter() + .map(|x| models::project_item::GalleryItem { + image_url: x.url.clone(), + featured: x.featured, + title: x.title.clone(), + description: x.description.clone(), + created: x.created, + ordering: x.ordering, + }) + .collect(), + color: icon_data.and_then(|x| x.1), + monetization_status: MonetizationStatus::Monetized, + }; + let project_builder = project_builder_actual.clone(); + + let now = Utc::now(); + + let id = project_builder_actual.insert(&mut *transaction).await?; + User::clear_project_cache(&[current_user.id.into()], redis).await?; + + for image_id in project_create_data.uploaded_images { + if let Some(db_image) = + image_item::Image::get(image_id.into(), &mut **transaction, redis).await? + { + let image: Image = db_image.into(); + if !matches!(image.context, ImageContext::Project { .. }) + || image.context.inner_id().is_some() + { + return Err(CreateError::InvalidInput(format!( + "Image {} is not unused and in the 'project' context", + image_id + ))); + } + + sqlx::query!( + " + UPDATE uploaded_images + SET mod_id = $1 + WHERE id = $2 + ", + id as models::ids::ProjectId, + image_id.0 as i64 + ) + .execute(&mut **transaction) + .await?; + + image_item::Image::clear_cache(image.id.into(), redis).await?; + } else { + return Err(CreateError::InvalidInput(format!( + "Image {} does not exist", + image_id + ))); + } + } + + let thread_id = ThreadBuilder { + type_: ThreadType::Project, + members: vec![], + project_id: Some(id), + report_id: None, + } + .insert(&mut *transaction) + .await?; + + let loaders = project_builder + .initial_versions + .iter() + .flat_map(|v| v.loaders.clone()) + .unique() + .collect::>(); + let (project_types, games) = Loader::list(&mut **transaction, redis) + .await? + .into_iter() + .fold( + (Vec::new(), Vec::new()), + |(mut project_types, mut games), loader| { + if loaders.contains(&loader.id) { + project_types.extend(loader.supported_project_types); + games.extend(loader.supported_games.iter().map(|x| x.name().to_string())); + } + (project_types, games) + }, + ); + + let response = crate::models::projects::Project { + id: project_id, + slug: project_builder.slug.clone(), + project_types, + games, + team: team_id.into(), + organization: project_create_data.organization_id, + title: project_builder.title.clone(), + description: project_builder.description.clone(), + body: project_builder.body.clone(), + body_url: None, + published: now, + updated: now, + approved: None, + queued: None, + status, + requested_status: project_builder.requested_status, + moderator_message: None, + license: License { + id: project_create_data.license_id.clone(), + name: "".to_string(), + url: project_builder.license_url.clone(), + }, + downloads: 0, + followers: 0, + categories: project_create_data.categories, + additional_categories: project_create_data.additional_categories, + loaders: vec![], + versions: project_builder + .initial_versions + .iter() + .map(|v| v.version_id.into()) + .collect::>(), + icon_url: project_builder.icon_url.clone(), + issues_url: project_builder.issues_url.clone(), + source_url: project_builder.source_url.clone(), + wiki_url: project_builder.wiki_url.clone(), + discord_url: project_builder.discord_url.clone(), + donation_urls: project_create_data.donation_urls.clone(), + gallery: gallery_urls, + color: project_builder.color, + thread_id: thread_id.into(), + monetization_status: MonetizationStatus::Monetized, + }; + + Ok(HttpResponse::Ok().json(response)) + } +} + +async fn create_initial_version( + version_data: &InitialVersionData, + project_id: ProjectId, + author: UserId, + all_loaders: &[models::loader_fields::Loader], + transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, + redis: &RedisPool, +) -> Result { + if version_data.project_id.is_some() { + return Err(CreateError::InvalidInput(String::from( + "Found project id in initial version for new project", + ))); + } + + version_data + .validate() + .map_err(|err| CreateError::ValidationError(validation_errors_to_string(err, None)))?; + + // Randomly generate a new id to be used for the version + let version_id: VersionId = models::generate_version_id(transaction).await?.into(); + + let loaders = version_data + .loaders + .iter() + .map(|x| { + all_loaders + .iter() + .find(|y| y.loader == x.0) + .ok_or_else(|| CreateError::InvalidLoader(x.0.clone())) + .map(|y| y.id) + }) + .collect::, CreateError>>()?; + + let loader_fields = LoaderField::get_fields(&mut **transaction, redis).await?; + let mut version_fields = vec![]; + let mut loader_field_enum_values = + LoaderFieldEnumValue::list_many_loader_fields(&loader_fields, &mut **transaction, redis) + .await?; + for (key, value) in version_data.fields.iter() { + let loader_field = loader_fields + .iter() + .find(|lf| &lf.field == key) + .ok_or_else(|| { + CreateError::InvalidInput(format!("Loader field '{key}' does not exist!")) + })?; + let enum_variants = loader_field_enum_values + .remove(&loader_field.id) + .unwrap_or_default(); + let vf: VersionField = VersionField::check_parse( + version_id.into(), + loader_field.clone(), + value.clone(), + enum_variants, + ) + .map_err(CreateError::InvalidInput)?; + version_fields.push(vf); + } + + let dependencies = version_data + .dependencies + .iter() + .map(|d| models::version_item::DependencyBuilder { + version_id: d.version_id.map(|x| x.into()), + project_id: d.project_id.map(|x| x.into()), + dependency_type: d.dependency_type.to_string(), + file_name: None, + }) + .collect::>(); + + let version = models::version_item::VersionBuilder { + version_id: version_id.into(), + project_id: project_id.into(), + author_id: author.into(), + name: version_data.version_title.clone(), + version_number: version_data.version_number.clone(), + changelog: version_data.version_body.clone().unwrap_or_default(), + files: Vec::new(), + dependencies, + loaders, + version_fields, + featured: version_data.featured, + status: VersionStatus::Listed, + version_type: version_data.release_channel.to_string(), + requested_status: None, + ordering: version_data.ordering, + }; + + Ok(version) +} + +async fn process_icon_upload( + uploaded_files: &mut Vec, + id: u64, + file_extension: &str, + file_host: &dyn FileHost, + mut field: Field, + cdn_url: &str, +) -> Result<(String, Option), CreateError> { + if let Some(content_type) = crate::util::ext::get_image_content_type(file_extension) { + let data = read_from_field(&mut field, 262144, "Icons must be smaller than 256KiB").await?; + + let color = crate::util::img::get_color_from_img(&data)?; + + let hash = sha1::Sha1::from(&data).hexdigest(); + let upload_data = file_host + .upload_file( + content_type, + &format!("data/{id}/{hash}.{file_extension}"), + data.freeze(), + ) + .await?; + + uploaded_files.push(UploadedFile { + file_id: upload_data.file_id, + file_name: upload_data.file_name.clone(), + }); + + Ok((format!("{}/{}", cdn_url, upload_data.file_name), color)) + } else { + Err(CreateError::InvalidIconFormat(file_extension.to_string())) + } +} diff --git a/src/routes/v3/projects.rs b/src/routes/v3/projects.rs new file mode 100644 index 00000000..d35182aa --- /dev/null +++ b/src/routes/v3/projects.rs @@ -0,0 +1,2495 @@ +use std::sync::Arc; + +use crate::auth::{filter_authorized_projects, get_user_from_headers, is_authorized}; +use crate::database::models::notification_item::NotificationBuilder; +use crate::database::models::project_item::{GalleryItem, ModCategory}; +use crate::database::models::thread_item::ThreadMessageBuilder; +use crate::database::models::{ids as db_ids, image_item}; +use crate::database::redis::RedisPool; +use crate::database::{self, models as db_models}; +use crate::file_hosting::FileHost; +use crate::models; +use crate::models::ids::base62_impl::parse_base62; +use crate::models::images::ImageContext; +use crate::models::notifications::NotificationBody; +use crate::models::pats::Scopes; +use crate::models::projects::{ + DonationLink, MonetizationStatus, Project, ProjectId, ProjectStatus, SearchRequest, +}; +use crate::models::teams::ProjectPermissions; +use crate::models::threads::MessageBody; +use crate::queue::session::AuthQueue; +use crate::routes::ApiError; +use crate::search::{search_for_project, SearchConfig, SearchError}; +use crate::util::img; +use crate::util::routes::read_from_payload; +use crate::util::validate::validation_errors_to_string; +use actix_web::{web, HttpRequest, HttpResponse}; +use chrono::{DateTime, Utc}; +use futures::TryStreamExt; +use meilisearch_sdk::indexes::IndexesResults; +use serde::{Deserialize, Serialize}; +use serde_json::json; +use sqlx::PgPool; +use validator::Validate; + +pub fn config(cfg: &mut web::ServiceConfig) { + cfg.route("search", web::get().to(project_search)); + cfg.route("projects", web::get().to(projects_get)); + cfg.route("projects", web::patch().to(projects_edit)); + cfg.route("projects_random", web::get().to(random_projects_get)); + + cfg.service( + web::scope("project") + .route("{id}", web::get().to(project_get)) + .route("{id}/check", web::get().to(project_get_check)) + .route("{id}", web::delete().to(project_get)) + .route("{id}", web::patch().to(project_edit)) + .route("{id}/icon", web::patch().to(project_icon_edit)) + .route("{id}/icon", web::delete().to(delete_project_icon)) + .route("{id}/gallery", web::post().to(add_gallery_item)) + .route("{id}/gallery", web::patch().to(edit_gallery_item)) + .route("{id}/gallery", web::delete().to(delete_gallery_item)) + .route("{id}/follow", web::post().to(project_follow)) + .route("{id}/follow", web::delete().to(project_unfollow)) + .route("{id}/schedule", web::post().to(project_schedule)) + .service( + web::scope("{project_id}") + .route( + "members", + web::get().to(super::teams::team_members_get_project), + ) + .route("versions", web::get().to(super::versions::version_list)) + .route( + "version/{slug}", + web::get().to(super::versions::version_project_get), + ) + .route("dependencies", web::get().to(dependency_list)), + ), + ); +} + +#[derive(Deserialize, Validate)] +pub struct RandomProjects { + #[validate(range(min = 1, max = 100))] + pub count: u32, +} + +pub async fn random_projects_get( + web::Query(count): web::Query, + pool: web::Data, + redis: web::Data, +) -> Result { + count + .validate() + .map_err(|err| ApiError::Validation(validation_errors_to_string(err, None)))?; + + let project_ids = sqlx::query!( + " + SELECT id FROM mods TABLESAMPLE SYSTEM_ROWS($1) WHERE status = ANY($2) + ", + count.count as i32, + &*crate::models::projects::ProjectStatus::iterator() + .filter(|x| x.is_searchable()) + .map(|x| x.to_string()) + .collect::>(), + ) + .fetch_many(&**pool) + .try_filter_map(|e| async { Ok(e.right().map(|m| db_ids::ProjectId(m.id))) }) + .try_collect::>() + .await?; + + let projects_data = db_models::Project::get_many_ids(&project_ids, &**pool, &redis) + .await? + .into_iter() + .map(Project::from) + .collect::>(); + + Ok(HttpResponse::Ok().json(projects_data)) +} + +#[derive(Serialize, Deserialize)] +pub struct ProjectIds { + pub ids: String, +} + +pub async fn projects_get( + req: HttpRequest, + web::Query(ids): web::Query, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let ids = serde_json::from_str::>(&ids.ids)?; + let projects_data = db_models::Project::get_many(&ids, &**pool, &redis).await?; + + let user_option = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::PROJECT_READ]), + ) + .await + .map(|x| x.1) + .ok(); + + let projects = filter_authorized_projects(projects_data, &user_option, &pool).await?; + + Ok(HttpResponse::Ok().json(projects)) +} + +pub async fn project_get( + req: HttpRequest, + info: web::Path<(String,)>, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let string = info.into_inner().0; + + let project_data = db_models::Project::get(&string, &**pool, &redis).await?; + let user_option = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::PROJECT_READ]), + ) + .await + .map(|x| x.1) + .ok(); + + if let Some(data) = project_data { + if is_authorized(&data.inner, &user_option, &pool).await? { + return Ok(HttpResponse::Ok().json(Project::from(data))); + } + } + Ok(HttpResponse::NotFound().body("")) +} + +#[derive(Serialize, Deserialize, Validate)] +pub struct EditProject { + #[validate( + length(min = 3, max = 64), + custom(function = "crate::util::validate::validate_name") + )] + pub title: Option, + #[validate(length(min = 3, max = 256))] + pub description: Option, + #[validate(length(max = 65536))] + pub body: Option, + #[validate(length(max = 3))] + pub categories: Option>, + #[validate(length(max = 256))] + pub additional_categories: Option>, + #[serde( + default, + skip_serializing_if = "Option::is_none", + with = "::serde_with::rust::double_option" + )] + #[validate( + custom(function = "crate::util::validate::validate_url"), + length(max = 2048) + )] + pub issues_url: Option>, + #[serde( + default, + skip_serializing_if = "Option::is_none", + with = "::serde_with::rust::double_option" + )] + #[validate( + custom(function = "crate::util::validate::validate_url"), + length(max = 2048) + )] + pub source_url: Option>, + #[serde( + default, + skip_serializing_if = "Option::is_none", + with = "::serde_with::rust::double_option" + )] + #[validate( + custom(function = "crate::util::validate::validate_url"), + length(max = 2048) + )] + pub wiki_url: Option>, + #[serde( + default, + skip_serializing_if = "Option::is_none", + with = "::serde_with::rust::double_option" + )] + #[validate( + custom(function = "crate::util::validate::validate_url"), + length(max = 2048) + )] + pub license_url: Option>, + #[serde( + default, + skip_serializing_if = "Option::is_none", + with = "::serde_with::rust::double_option" + )] + #[validate( + custom(function = "crate::util::validate::validate_url"), + length(max = 2048) + )] + pub discord_url: Option>, + #[validate] + pub donation_urls: Option>, + pub license_id: Option, + #[validate( + length(min = 3, max = 64), + regex = "crate::util::validate::RE_URL_SAFE" + )] + pub slug: Option, + pub status: Option, + #[serde( + default, + skip_serializing_if = "Option::is_none", + with = "::serde_with::rust::double_option" + )] + pub requested_status: Option>, + #[serde( + default, + skip_serializing_if = "Option::is_none", + with = "::serde_with::rust::double_option" + )] + #[validate(length(max = 2000))] + pub moderation_message: Option>, + #[serde( + default, + skip_serializing_if = "Option::is_none", + with = "::serde_with::rust::double_option" + )] + #[validate(length(max = 65536))] + pub moderation_message_body: Option>, + pub monetization_status: Option, +} + +pub async fn project_edit( + req: HttpRequest, + info: web::Path<(String,)>, + pool: web::Data, + config: web::Data, + new_project: web::Json, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::PROJECT_WRITE]), + ) + .await? + .1; + + new_project + .validate() + .map_err(|err| ApiError::Validation(validation_errors_to_string(err, None)))?; + + let string = info.into_inner().0; + let result = db_models::Project::get(&string, &**pool, &redis).await?; + if let Some(project_item) = result { + let id = project_item.inner.id; + + let (team_member, organization_team_member) = + db_models::TeamMember::get_for_project_permissions( + &project_item.inner, + user.id.into(), + &**pool, + ) + .await?; + + let permissions = ProjectPermissions::get_permissions_by_role( + &user.role, + &team_member, + &organization_team_member, + ); + + if let Some(perms) = permissions { + let mut transaction = pool.begin().await?; + + if let Some(title) = &new_project.title { + if !perms.contains(ProjectPermissions::EDIT_DETAILS) { + return Err(ApiError::CustomAuthentication( + "You do not have the permissions to edit the title of this project!" + .to_string(), + )); + } + + sqlx::query!( + " + UPDATE mods + SET title = $1 + WHERE (id = $2) + ", + title.trim(), + id as db_ids::ProjectId, + ) + .execute(&mut *transaction) + .await?; + } + + if let Some(description) = &new_project.description { + if !perms.contains(ProjectPermissions::EDIT_DETAILS) { + return Err(ApiError::CustomAuthentication( + "You do not have the permissions to edit the description of this project!" + .to_string(), + )); + } + + sqlx::query!( + " + UPDATE mods + SET description = $1 + WHERE (id = $2) + ", + description, + id as db_ids::ProjectId, + ) + .execute(&mut *transaction) + .await?; + } + + if let Some(status) = &new_project.status { + if !perms.contains(ProjectPermissions::EDIT_DETAILS) { + return Err(ApiError::CustomAuthentication( + "You do not have the permissions to edit the status of this project!" + .to_string(), + )); + } + + if !(user.role.is_mod() + || !project_item.inner.status.is_approved() + && status == &ProjectStatus::Processing + || project_item.inner.status.is_approved() && status.can_be_requested()) + { + return Err(ApiError::CustomAuthentication( + "You don't have permission to set this status!".to_string(), + )); + } + + if status == &ProjectStatus::Processing { + if project_item.versions.is_empty() { + return Err(ApiError::InvalidInput(String::from( + "Project submitted for review with no initial versions", + ))); + } + + sqlx::query!( + " + UPDATE mods + SET moderation_message = NULL, moderation_message_body = NULL, queued = NOW() + WHERE (id = $1) + ", + id as db_ids::ProjectId, + ) + .execute(&mut *transaction) + .await?; + + sqlx::query!( + " + UPDATE threads + SET show_in_mod_inbox = FALSE + WHERE id = $1 + ", + project_item.thread_id as db_ids::ThreadId, + ) + .execute(&mut *transaction) + .await?; + } + + if status.is_approved() && !project_item.inner.status.is_approved() { + sqlx::query!( + " + UPDATE mods + SET approved = NOW() + WHERE id = $1 AND approved IS NULL + ", + id as db_ids::ProjectId, + ) + .execute(&mut *transaction) + .await?; + } + if status.is_searchable() && !project_item.inner.webhook_sent { + if let Ok(webhook_url) = dotenvy::var("PUBLIC_DISCORD_WEBHOOK") { + crate::util::webhook::send_discord_webhook( + project_item.inner.id.into(), + &pool, + &redis, + webhook_url, + None, + ) + .await + .ok(); + + sqlx::query!( + " + UPDATE mods + SET webhook_sent = TRUE + WHERE id = $1 + ", + id as db_ids::ProjectId, + ) + .execute(&mut *transaction) + .await?; + } + } + + if user.role.is_mod() { + if let Ok(webhook_url) = dotenvy::var("MODERATION_DISCORD_WEBHOOK") { + crate::util::webhook::send_discord_webhook( + project_item.inner.id.into(), + &pool, + &redis, + webhook_url, + Some( + format!( + "**[{}]({}/user/{})** changed project status from **{}** to **{}**", + user.username, + dotenvy::var("SITE_URL")?, + user.username, + &project_item.inner.status.as_friendly_str(), + status.as_friendly_str(), + ) + .to_string(), + ), + ) + .await + .ok(); + } + } + + if team_member.map(|x| !x.accepted).unwrap_or(true) { + let notified_members = sqlx::query!( + " + SELECT tm.user_id id + FROM team_members tm + WHERE tm.team_id = $1 AND tm.accepted + ", + project_item.inner.team_id as db_ids::TeamId + ) + .fetch_many(&mut *transaction) + .try_filter_map(|e| async { Ok(e.right().map(|c| db_models::UserId(c.id))) }) + .try_collect::>() + .await?; + + NotificationBuilder { + body: NotificationBody::StatusChange { + project_id: project_item.inner.id.into(), + old_status: project_item.inner.status, + new_status: *status, + }, + } + .insert_many(notified_members, &mut transaction, &redis) + .await?; + } + + ThreadMessageBuilder { + author_id: Some(user.id.into()), + body: MessageBody::StatusChange { + new_status: *status, + old_status: project_item.inner.status, + }, + thread_id: project_item.thread_id, + } + .insert(&mut transaction) + .await?; + + sqlx::query!( + " + UPDATE mods + SET status = $1 + WHERE (id = $2) + ", + status.as_str(), + id as db_ids::ProjectId, + ) + .execute(&mut *transaction) + .await?; + + if project_item.inner.status.is_searchable() && !status.is_searchable() { + delete_from_index(id.into(), config).await?; + } + } + + if let Some(requested_status) = &new_project.requested_status { + if !perms.contains(ProjectPermissions::EDIT_DETAILS) { + return Err(ApiError::CustomAuthentication( + "You do not have the permissions to edit the requested status of this project!" + .to_string(), + )); + } + + if !requested_status + .map(|x| x.can_be_requested()) + .unwrap_or(true) + { + return Err(ApiError::InvalidInput(String::from( + "Specified status cannot be requested!", + ))); + } + + sqlx::query!( + " + UPDATE mods + SET requested_status = $1 + WHERE (id = $2) + ", + requested_status.map(|x| x.as_str()), + id as db_ids::ProjectId, + ) + .execute(&mut *transaction) + .await?; + } + + if perms.contains(ProjectPermissions::EDIT_DETAILS) { + if new_project.categories.is_some() { + sqlx::query!( + " + DELETE FROM mods_categories + WHERE joining_mod_id = $1 AND is_additional = FALSE + ", + id as db_ids::ProjectId, + ) + .execute(&mut *transaction) + .await?; + } + + if new_project.additional_categories.is_some() { + sqlx::query!( + " + DELETE FROM mods_categories + WHERE joining_mod_id = $1 AND is_additional = TRUE + ", + id as db_ids::ProjectId, + ) + .execute(&mut *transaction) + .await?; + } + } + + if let Some(categories) = &new_project.categories { + edit_project_categories( + categories, + &perms, + id as db_ids::ProjectId, + false, + &mut transaction, + ) + .await?; + } + + if let Some(categories) = &new_project.additional_categories { + edit_project_categories( + categories, + &perms, + id as db_ids::ProjectId, + true, + &mut transaction, + ) + .await?; + } + + if let Some(issues_url) = &new_project.issues_url { + if !perms.contains(ProjectPermissions::EDIT_DETAILS) { + return Err(ApiError::CustomAuthentication( + "You do not have the permissions to edit the issues URL of this project!" + .to_string(), + )); + } + + sqlx::query!( + " + UPDATE mods + SET issues_url = $1 + WHERE (id = $2) + ", + issues_url.as_deref(), + id as db_ids::ProjectId, + ) + .execute(&mut *transaction) + .await?; + } + + if let Some(source_url) = &new_project.source_url { + if !perms.contains(ProjectPermissions::EDIT_DETAILS) { + return Err(ApiError::CustomAuthentication( + "You do not have the permissions to edit the source URL of this project!" + .to_string(), + )); + } + + sqlx::query!( + " + UPDATE mods + SET source_url = $1 + WHERE (id = $2) + ", + source_url.as_deref(), + id as db_ids::ProjectId, + ) + .execute(&mut *transaction) + .await?; + } + + if let Some(wiki_url) = &new_project.wiki_url { + if !perms.contains(ProjectPermissions::EDIT_DETAILS) { + return Err(ApiError::CustomAuthentication( + "You do not have the permissions to edit the wiki URL of this project!" + .to_string(), + )); + } + + sqlx::query!( + " + UPDATE mods + SET wiki_url = $1 + WHERE (id = $2) + ", + wiki_url.as_deref(), + id as db_ids::ProjectId, + ) + .execute(&mut *transaction) + .await?; + } + + if let Some(license_url) = &new_project.license_url { + if !perms.contains(ProjectPermissions::EDIT_DETAILS) { + return Err(ApiError::CustomAuthentication( + "You do not have the permissions to edit the license URL of this project!" + .to_string(), + )); + } + + sqlx::query!( + " + UPDATE mods + SET license_url = $1 + WHERE (id = $2) + ", + license_url.as_deref(), + id as db_ids::ProjectId, + ) + .execute(&mut *transaction) + .await?; + } + + if let Some(discord_url) = &new_project.discord_url { + if !perms.contains(ProjectPermissions::EDIT_DETAILS) { + return Err(ApiError::CustomAuthentication( + "You do not have the permissions to edit the discord URL of this project!" + .to_string(), + )); + } + + sqlx::query!( + " + UPDATE mods + SET discord_url = $1 + WHERE (id = $2) + ", + discord_url.as_deref(), + id as db_ids::ProjectId, + ) + .execute(&mut *transaction) + .await?; + } + + if let Some(slug) = &new_project.slug { + if !perms.contains(ProjectPermissions::EDIT_DETAILS) { + return Err(ApiError::CustomAuthentication( + "You do not have the permissions to edit the slug of this project!" + .to_string(), + )); + } + + let slug_project_id_option: Option = parse_base62(slug).ok(); + if let Some(slug_project_id) = slug_project_id_option { + let results = sqlx::query!( + " + SELECT EXISTS(SELECT 1 FROM mods WHERE id=$1) + ", + slug_project_id as i64 + ) + .fetch_one(&mut *transaction) + .await?; + + if results.exists.unwrap_or(true) { + return Err(ApiError::InvalidInput( + "Slug collides with other project's id!".to_string(), + )); + } + } + + // Make sure the new slug is different from the old one + // We are able to unwrap here because the slug is always set + if !slug.eq(&project_item.inner.slug.clone().unwrap_or_default()) { + let results = sqlx::query!( + " + SELECT EXISTS(SELECT 1 FROM mods WHERE slug = LOWER($1)) + ", + slug + ) + .fetch_one(&mut *transaction) + .await?; + + if results.exists.unwrap_or(true) { + return Err(ApiError::InvalidInput( + "Slug collides with other project's id!".to_string(), + )); + } + } + + sqlx::query!( + " + UPDATE mods + SET slug = LOWER($1) + WHERE (id = $2) + ", + Some(slug), + id as db_ids::ProjectId, + ) + .execute(&mut *transaction) + .await?; + } + + if let Some(license) = &new_project.license_id { + if !perms.contains(ProjectPermissions::EDIT_DETAILS) { + return Err(ApiError::CustomAuthentication( + "You do not have the permissions to edit the license of this project!" + .to_string(), + )); + } + + let mut license = license.clone(); + + if license.to_lowercase() == "arr" { + license = models::projects::DEFAULT_LICENSE_ID.to_string(); + } + + spdx::Expression::parse(&license).map_err(|err| { + ApiError::InvalidInput(format!("Invalid SPDX license identifier: {err}")) + })?; + + sqlx::query!( + " + UPDATE mods + SET license = $1 + WHERE (id = $2) + ", + license, + id as db_ids::ProjectId, + ) + .execute(&mut *transaction) + .await?; + } + if let Some(donations) = &new_project.donation_urls { + if !perms.contains(ProjectPermissions::EDIT_DETAILS) { + return Err(ApiError::CustomAuthentication( + "You do not have the permissions to edit the donation links of this project!" + .to_string(), + )); + } + + sqlx::query!( + " + DELETE FROM mods_donations + WHERE joining_mod_id = $1 + ", + id as db_ids::ProjectId, + ) + .execute(&mut *transaction) + .await?; + + for donation in donations { + let platform_id = db_models::categories::DonationPlatform::get_id( + &donation.id, + &mut *transaction, + ) + .await? + .ok_or_else(|| { + ApiError::InvalidInput(format!( + "Platform {} does not exist.", + donation.id.clone() + )) + })?; + + sqlx::query!( + " + INSERT INTO mods_donations (joining_mod_id, joining_platform_id, url) + VALUES ($1, $2, $3) + ", + id as db_ids::ProjectId, + platform_id as db_ids::DonationPlatformId, + donation.url + ) + .execute(&mut *transaction) + .await?; + } + } + + if let Some(moderation_message) = &new_project.moderation_message { + if !user.role.is_mod() + && (!project_item.inner.status.is_approved() || moderation_message.is_some()) + { + return Err(ApiError::CustomAuthentication( + "You do not have the permissions to edit the moderation message of this project!" + .to_string(), + )); + } + + sqlx::query!( + " + UPDATE mods + SET moderation_message = $1 + WHERE (id = $2) + ", + moderation_message.as_deref(), + id as db_ids::ProjectId, + ) + .execute(&mut *transaction) + .await?; + } + + if let Some(moderation_message_body) = &new_project.moderation_message_body { + if !user.role.is_mod() + && (!project_item.inner.status.is_approved() + || moderation_message_body.is_some()) + { + return Err(ApiError::CustomAuthentication( + "You do not have the permissions to edit the moderation message body of this project!" + .to_string(), + )); + } + + sqlx::query!( + " + UPDATE mods + SET moderation_message_body = $1 + WHERE (id = $2) + ", + moderation_message_body.as_deref(), + id as db_ids::ProjectId, + ) + .execute(&mut *transaction) + .await?; + } + + if let Some(body) = &new_project.body { + if !perms.contains(ProjectPermissions::EDIT_BODY) { + return Err(ApiError::CustomAuthentication( + "You do not have the permissions to edit the body of this project!" + .to_string(), + )); + } + + sqlx::query!( + " + UPDATE mods + SET body = $1 + WHERE (id = $2) + ", + body, + id as db_ids::ProjectId, + ) + .execute(&mut *transaction) + .await?; + } + + if let Some(monetization_status) = &new_project.monetization_status { + if !perms.contains(ProjectPermissions::EDIT_DETAILS) { + return Err(ApiError::CustomAuthentication( + "You do not have the permissions to edit the monetization status of this project!" + .to_string(), + )); + } + + if (*monetization_status == MonetizationStatus::ForceDemonetized + || project_item.inner.monetization_status + == MonetizationStatus::ForceDemonetized) + && !user.role.is_mod() + { + return Err(ApiError::CustomAuthentication( + "You do not have the permissions to edit the monetization status of this project!" + .to_string(), + )); + } + + sqlx::query!( + " + UPDATE mods + SET monetization_status = $1 + WHERE (id = $2) + ", + monetization_status.as_str(), + id as db_ids::ProjectId, + ) + .execute(&mut *transaction) + .await?; + } + + // check new description and body for links to associated images + // if they no longer exist in the description or body, delete them + let checkable_strings: Vec<&str> = vec![&new_project.description, &new_project.body] + .into_iter() + .filter_map(|x| x.as_ref().map(|y| y.as_str())) + .collect(); + + let context = ImageContext::Project { + project_id: Some(id.into()), + }; + + img::delete_unused_images(context, checkable_strings, &mut transaction, &redis).await?; + db_models::Project::clear_cache( + project_item.inner.id, + project_item.inner.slug, + None, + &redis, + ) + .await?; + + transaction.commit().await?; + Ok(HttpResponse::NoContent().body("")) + } else { + Err(ApiError::CustomAuthentication( + "You do not have permission to edit this project!".to_string(), + )) + } + } else { + Ok(HttpResponse::NotFound().body("")) + } +} + +pub async fn edit_project_categories( + categories: &Vec, + perms: &ProjectPermissions, + project_id: db_ids::ProjectId, + additional: bool, + transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, +) -> Result<(), ApiError> { + if !perms.contains(ProjectPermissions::EDIT_DETAILS) { + let additional_str = if additional { "additional " } else { "" }; + return Err(ApiError::CustomAuthentication(format!( + "You do not have the permissions to edit the {additional_str}categories of this project!" + ))); + } + + let mut mod_categories = Vec::new(); + for category in categories { + let category_ids = + db_models::categories::Category::get_ids(category, &mut **transaction).await?; + // TODO: We should filter out categories that don't match the project type of any of the versions + // ie: if mod and modpack both share a name this should only have modpack if it only has a modpack as a version + + let mcategories = category_ids + .values() + .map(|x| ModCategory::new(project_id, *x, additional)) + .collect::>(); + mod_categories.extend(mcategories); + } + ModCategory::insert_many(mod_categories, &mut *transaction).await?; + + Ok(()) +} + +pub async fn project_search( + web::Query(info): web::Query, + config: web::Data, +) -> Result { + let results = search_for_project(&info, &config).await?; + Ok(HttpResponse::Ok().json(results)) +} + +pub async fn delete_from_index( + id: ProjectId, + config: web::Data, +) -> Result<(), meilisearch_sdk::errors::Error> { + let client = meilisearch_sdk::client::Client::new(&*config.address, &*config.key); + + let indexes: IndexesResults = client.get_indexes().await?; + + for index in indexes.results { + index.delete_document(id.to_string()).await?; + } + + Ok(()) +} + +//checks the validity of a project id or slug +pub async fn project_get_check( + info: web::Path<(String,)>, + pool: web::Data, + redis: web::Data, +) -> Result { + let slug = info.into_inner().0; + + let project_data = db_models::Project::get(&slug, &**pool, &redis).await?; + + if let Some(project) = project_data { + Ok(HttpResponse::Ok().json(json! ({ + "id": models::ids::ProjectId::from(project.inner.id) + }))) + } else { + Ok(HttpResponse::NotFound().body("")) + } +} + +#[derive(Serialize)] +struct DependencyInfo { + pub projects: Vec, + pub versions: Vec, +} + +pub async fn dependency_list( + req: HttpRequest, + info: web::Path<(String,)>, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let string = info.into_inner().0; + + let result = db_models::Project::get(&string, &**pool, &redis).await?; + + let user_option = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::PROJECT_READ]), + ) + .await + .map(|x| x.1) + .ok(); + + if let Some(project) = result { + if !is_authorized(&project.inner, &user_option, &pool).await? { + return Ok(HttpResponse::NotFound().body("")); + } + + let dependencies = + database::Project::get_dependencies(project.inner.id, &**pool, &redis).await?; + + let project_ids = dependencies + .iter() + .filter_map(|x| { + if x.0.is_none() { + if let Some(mod_dependency_id) = x.2 { + Some(mod_dependency_id) + } else { + x.1 + } + } else { + x.1 + } + }) + .collect::>(); + + let dep_version_ids = dependencies + .iter() + .filter_map(|x| x.0) + .collect::>(); + let (projects_result, versions_result) = futures::future::try_join( + database::Project::get_many_ids(&project_ids, &**pool, &redis), + database::Version::get_many(&dep_version_ids, &**pool, &redis), + ) + .await?; + + let mut projects = projects_result + .into_iter() + .map(models::projects::Project::from) + .collect::>(); + let mut versions = versions_result + .into_iter() + .map(models::projects::Version::from) + .collect::>(); + + projects.sort_by(|a, b| b.published.cmp(&a.published)); + projects.dedup_by(|a, b| a.id == b.id); + + versions.sort_by(|a, b| b.date_published.cmp(&a.date_published)); + versions.dedup_by(|a, b| a.id == b.id); + + Ok(HttpResponse::Ok().json(DependencyInfo { projects, versions })) + } else { + Ok(HttpResponse::NotFound().body("")) + } +} + +#[derive(derive_new::new)] +pub struct CategoryChanges<'a> { + pub categories: &'a Option>, + pub add_categories: &'a Option>, + pub remove_categories: &'a Option>, +} + +#[derive(Deserialize, Validate)] +pub struct BulkEditProject { + #[validate(length(max = 3))] + pub categories: Option>, + #[validate(length(max = 3))] + pub add_categories: Option>, + pub remove_categories: Option>, + + #[validate(length(max = 256))] + pub additional_categories: Option>, + #[validate(length(max = 3))] + pub add_additional_categories: Option>, + pub remove_additional_categories: Option>, + + #[validate] + pub donation_urls: Option>, + #[validate] + pub add_donation_urls: Option>, + #[validate] + pub remove_donation_urls: Option>, + + #[serde( + default, + skip_serializing_if = "Option::is_none", + with = "::serde_with::rust::double_option" + )] + #[validate( + custom(function = "crate::util::validate::validate_url"), + length(max = 2048) + )] + pub issues_url: Option>, + #[serde( + default, + skip_serializing_if = "Option::is_none", + with = "::serde_with::rust::double_option" + )] + #[validate( + custom(function = "crate::util::validate::validate_url"), + length(max = 2048) + )] + pub source_url: Option>, + #[serde( + default, + skip_serializing_if = "Option::is_none", + with = "::serde_with::rust::double_option" + )] + #[validate( + custom(function = "crate::util::validate::validate_url"), + length(max = 2048) + )] + pub wiki_url: Option>, + #[serde( + default, + skip_serializing_if = "Option::is_none", + with = "::serde_with::rust::double_option" + )] + #[validate( + custom(function = "crate::util::validate::validate_url"), + length(max = 2048) + )] + pub discord_url: Option>, +} + +pub async fn projects_edit( + req: HttpRequest, + web::Query(ids): web::Query, + pool: web::Data, + bulk_edit_project: web::Json, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::PROJECT_WRITE]), + ) + .await? + .1; + + bulk_edit_project + .validate() + .map_err(|err| ApiError::Validation(validation_errors_to_string(err, None)))?; + + let project_ids: Vec = serde_json::from_str::>(&ids.ids)? + .into_iter() + .map(|x| x.into()) + .collect(); + + let projects_data = db_models::Project::get_many_ids(&project_ids, &**pool, &redis).await?; + + if let Some(id) = project_ids + .iter() + .find(|x| !projects_data.iter().any(|y| x == &&y.inner.id)) + { + return Err(ApiError::InvalidInput(format!( + "Project {} not found", + ProjectId(id.0 as u64) + ))); + } + + let team_ids = projects_data + .iter() + .map(|x| x.inner.team_id) + .collect::>(); + let team_members = + db_models::TeamMember::get_from_team_full_many(&team_ids, &**pool, &redis).await?; + + let organization_ids = projects_data + .iter() + .filter_map(|x| x.inner.organization_id) + .collect::>(); + let organizations = + db_models::Organization::get_many_ids(&organization_ids, &**pool, &redis).await?; + + let organization_team_ids = organizations + .iter() + .map(|x| x.team_id) + .collect::>(); + let organization_team_members = + db_models::TeamMember::get_from_team_full_many(&organization_team_ids, &**pool, &redis) + .await?; + + let categories = db_models::categories::Category::list(&**pool, &redis).await?; + let donation_platforms = db_models::categories::DonationPlatform::list(&**pool, &redis).await?; + + let mut transaction = pool.begin().await?; + + for project in projects_data { + if !user.role.is_mod() { + let team_member = team_members + .iter() + .find(|x| x.team_id == project.inner.team_id && x.user_id == user.id.into()); + + let organization = project + .inner + .organization_id + .and_then(|oid| organizations.iter().find(|x| x.id == oid)); + + let organization_team_member = if let Some(organization) = organization { + organization_team_members + .iter() + .find(|x| x.team_id == organization.team_id && x.user_id == user.id.into()) + } else { + None + }; + + let permissions = ProjectPermissions::get_permissions_by_role( + &user.role, + &team_member.cloned(), + &organization_team_member.cloned(), + ) + .unwrap_or_default(); + + if team_member.is_some() { + if !permissions.contains(ProjectPermissions::EDIT_DETAILS) { + return Err(ApiError::CustomAuthentication(format!( + "You do not have the permissions to bulk edit project {}!", + project.inner.title + ))); + } + } else if project.inner.status.is_hidden() { + return Err(ApiError::InvalidInput(format!( + "Project {} not found", + ProjectId(project.inner.id.0 as u64) + ))); + } else { + return Err(ApiError::CustomAuthentication(format!( + "You are not a member of project {}!", + project.inner.title + ))); + }; + } + + bulk_edit_project_categories( + &categories, + &project.categories, + project.inner.id as db_ids::ProjectId, + CategoryChanges::new( + &bulk_edit_project.categories, + &bulk_edit_project.add_categories, + &bulk_edit_project.remove_categories, + ), + 3, + false, + &mut transaction, + ) + .await?; + + bulk_edit_project_categories( + &categories, + &project.additional_categories, + project.inner.id as db_ids::ProjectId, + CategoryChanges::new( + &bulk_edit_project.additional_categories, + &bulk_edit_project.add_additional_categories, + &bulk_edit_project.remove_additional_categories, + ), + 256, + true, + &mut transaction, + ) + .await?; + + let project_donations: Vec = project + .donation_urls + .into_iter() + .map(|d| DonationLink { + id: d.platform_short, + platform: d.platform_name, + url: d.url, + }) + .collect(); + let mut set_donation_links = + if let Some(donation_links) = bulk_edit_project.donation_urls.clone() { + donation_links + } else { + project_donations.clone() + }; + + if let Some(delete_donations) = &bulk_edit_project.remove_donation_urls { + for donation in delete_donations { + if let Some(pos) = set_donation_links + .iter() + .position(|x| donation.url == x.url && donation.id == x.id) + { + set_donation_links.remove(pos); + } + } + } + + if let Some(add_donations) = &bulk_edit_project.add_donation_urls { + set_donation_links.append(&mut add_donations.clone()); + } + + if set_donation_links != project_donations { + sqlx::query!( + " + DELETE FROM mods_donations + WHERE joining_mod_id = $1 + ", + project.inner.id as db_ids::ProjectId, + ) + .execute(&mut *transaction) + .await?; + + for donation in set_donation_links { + let platform_id = donation_platforms + .iter() + .find(|x| x.short == donation.id) + .ok_or_else(|| { + ApiError::InvalidInput(format!( + "Platform {} does not exist.", + donation.id.clone() + )) + })? + .id; + + sqlx::query!( + " + INSERT INTO mods_donations (joining_mod_id, joining_platform_id, url) + VALUES ($1, $2, $3) + ", + project.inner.id as db_ids::ProjectId, + platform_id as db_ids::DonationPlatformId, + donation.url + ) + .execute(&mut *transaction) + .await?; + } + } + + if let Some(issues_url) = &bulk_edit_project.issues_url { + sqlx::query!( + " + UPDATE mods + SET issues_url = $1 + WHERE (id = $2) + ", + issues_url.as_deref(), + project.inner.id as db_ids::ProjectId, + ) + .execute(&mut *transaction) + .await?; + } + + if let Some(source_url) = &bulk_edit_project.source_url { + sqlx::query!( + " + UPDATE mods + SET source_url = $1 + WHERE (id = $2) + ", + source_url.as_deref(), + project.inner.id as db_ids::ProjectId, + ) + .execute(&mut *transaction) + .await?; + } + + if let Some(wiki_url) = &bulk_edit_project.wiki_url { + sqlx::query!( + " + UPDATE mods + SET wiki_url = $1 + WHERE (id = $2) + ", + wiki_url.as_deref(), + project.inner.id as db_ids::ProjectId, + ) + .execute(&mut *transaction) + .await?; + } + + if let Some(discord_url) = &bulk_edit_project.discord_url { + sqlx::query!( + " + UPDATE mods + SET discord_url = $1 + WHERE (id = $2) + ", + discord_url.as_deref(), + project.inner.id as db_ids::ProjectId, + ) + .execute(&mut *transaction) + .await?; + } + + db_models::Project::clear_cache(project.inner.id, project.inner.slug, None, &redis).await?; + } + + transaction.commit().await?; + + Ok(HttpResponse::NoContent().body("")) +} + +pub async fn bulk_edit_project_categories( + all_db_categories: &[db_models::categories::Category], + project_categories: &Vec, + project_id: db_ids::ProjectId, + bulk_changes: CategoryChanges<'_>, + max_num_categories: usize, + is_additional: bool, + transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, +) -> Result<(), ApiError> { + let mut set_categories = if let Some(categories) = bulk_changes.categories.clone() { + categories + } else { + project_categories.clone() + }; + + if let Some(delete_categories) = &bulk_changes.remove_categories { + for category in delete_categories { + if let Some(pos) = set_categories.iter().position(|x| x == category) { + set_categories.remove(pos); + } + } + } + + if let Some(add_categories) = &bulk_changes.add_categories { + for category in add_categories { + if set_categories.len() < max_num_categories { + set_categories.push(category.clone()); + } else { + break; + } + } + } + + if &set_categories != project_categories { + sqlx::query!( + " + DELETE FROM mods_categories + WHERE joining_mod_id = $1 AND is_additional = $2 + ", + project_id as db_ids::ProjectId, + is_additional + ) + .execute(&mut **transaction) + .await?; + + let mut mod_categories = Vec::new(); + for category in set_categories { + let category_id = all_db_categories + .iter() + .find(|x| x.category == category) + .ok_or_else(|| { + ApiError::InvalidInput(format!("Category {} does not exist.", category.clone())) + })? + .id; + mod_categories.push(ModCategory::new(project_id, category_id, is_additional)); + } + ModCategory::insert_many(mod_categories, &mut *transaction).await?; + } + + Ok(()) +} + +#[derive(Deserialize)] +pub struct SchedulingData { + pub time: DateTime, + pub requested_status: ProjectStatus, +} + +pub async fn project_schedule( + req: HttpRequest, + info: web::Path<(String,)>, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, + scheduling_data: web::Json, +) -> Result { + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::PROJECT_WRITE]), + ) + .await? + .1; + + if scheduling_data.time < Utc::now() { + return Err(ApiError::InvalidInput( + "You cannot schedule a project to be released in the past!".to_string(), + )); + } + + if !scheduling_data.requested_status.can_be_requested() { + return Err(ApiError::InvalidInput( + "Specified requested status cannot be requested!".to_string(), + )); + } + + let string = info.into_inner().0; + let result = db_models::Project::get(&string, &**pool, &redis).await?; + + if let Some(project_item) = result { + let (team_member, organization_team_member) = + db_models::TeamMember::get_for_project_permissions( + &project_item.inner, + user.id.into(), + &**pool, + ) + .await?; + + let permissions = ProjectPermissions::get_permissions_by_role( + &user.role, + &team_member.clone(), + &organization_team_member.clone(), + ) + .unwrap_or_default(); + + if !user.role.is_mod() && !permissions.contains(ProjectPermissions::EDIT_DETAILS) { + return Err(ApiError::CustomAuthentication( + "You do not have permission to edit this project's scheduling data!".to_string(), + )); + } + + if !project_item.inner.status.is_approved() { + return Err(ApiError::InvalidInput( + "This project has not been approved yet. Submit to the queue with the private status to schedule it in the future!".to_string(), + )); + } + + sqlx::query!( + " + UPDATE mods + SET status = $1, approved = $2 + WHERE (id = $3) + ", + ProjectStatus::Scheduled.as_str(), + scheduling_data.time, + project_item.inner.id as db_ids::ProjectId, + ) + .execute(&**pool) + .await?; + + db_models::Project::clear_cache( + project_item.inner.id, + project_item.inner.slug, + None, + &redis, + ) + .await?; + + Ok(HttpResponse::NoContent().body("")) + } else { + Ok(HttpResponse::NotFound().body("")) + } +} + +#[derive(Serialize, Deserialize)] +pub struct Extension { + pub ext: String, +} + +#[allow(clippy::too_many_arguments)] +pub async fn project_icon_edit( + web::Query(ext): web::Query, + req: HttpRequest, + info: web::Path<(String,)>, + pool: web::Data, + redis: web::Data, + file_host: web::Data>, + mut payload: web::Payload, + session_queue: web::Data, +) -> Result { + if let Some(content_type) = crate::util::ext::get_image_content_type(&ext.ext) { + let cdn_url = dotenvy::var("CDN_URL")?; + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::PROJECT_WRITE]), + ) + .await? + .1; + let string = info.into_inner().0; + + let project_item = db_models::Project::get(&string, &**pool, &redis) + .await? + .ok_or_else(|| { + ApiError::InvalidInput("The specified project does not exist!".to_string()) + })?; + + if !user.role.is_mod() { + let (team_member, organization_team_member) = + db_models::TeamMember::get_for_project_permissions( + &project_item.inner, + user.id.into(), + &**pool, + ) + .await?; + + // Hide the project + if team_member.is_none() && organization_team_member.is_none() { + return Err(ApiError::CustomAuthentication( + "The specified project does not exist!".to_string(), + )); + } + + let permissions = ProjectPermissions::get_permissions_by_role( + &user.role, + &team_member, + &organization_team_member, + ) + .unwrap_or_default(); + + if !permissions.contains(ProjectPermissions::EDIT_DETAILS) { + return Err(ApiError::CustomAuthentication( + "You don't have permission to edit this project's icon.".to_string(), + )); + } + } + + if let Some(icon) = project_item.inner.icon_url { + let name = icon.split(&format!("{cdn_url}/")).nth(1); + + if let Some(icon_path) = name { + file_host.delete_file_version("", icon_path).await?; + } + } + + let bytes = + read_from_payload(&mut payload, 262144, "Icons must be smaller than 256KiB").await?; + + let color = crate::util::img::get_color_from_img(&bytes)?; + + let hash = sha1::Sha1::from(&bytes).hexdigest(); + let project_id: ProjectId = project_item.inner.id.into(); + let upload_data = file_host + .upload_file( + content_type, + &format!("data/{}/{}.{}", project_id, hash, ext.ext), + bytes.freeze(), + ) + .await?; + + let mut transaction = pool.begin().await?; + + sqlx::query!( + " + UPDATE mods + SET icon_url = $1, color = $2 + WHERE (id = $3) + ", + format!("{}/{}", cdn_url, upload_data.file_name), + color.map(|x| x as i32), + project_item.inner.id as db_ids::ProjectId, + ) + .execute(&mut *transaction) + .await?; + + db_models::Project::clear_cache( + project_item.inner.id, + project_item.inner.slug, + None, + &redis, + ) + .await?; + + transaction.commit().await?; + + Ok(HttpResponse::NoContent().body("")) + } else { + Err(ApiError::InvalidInput(format!( + "Invalid format for project icon: {}", + ext.ext + ))) + } +} + +pub async fn delete_project_icon( + req: HttpRequest, + info: web::Path<(String,)>, + pool: web::Data, + redis: web::Data, + file_host: web::Data>, + session_queue: web::Data, +) -> Result { + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::PROJECT_WRITE]), + ) + .await? + .1; + let string = info.into_inner().0; + + let project_item = db_models::Project::get(&string, &**pool, &redis) + .await? + .ok_or_else(|| { + ApiError::InvalidInput("The specified project does not exist!".to_string()) + })?; + + if !user.role.is_mod() { + let (team_member, organization_team_member) = + db_models::TeamMember::get_for_project_permissions( + &project_item.inner, + user.id.into(), + &**pool, + ) + .await?; + + // Hide the project + if team_member.is_none() && organization_team_member.is_none() { + return Err(ApiError::CustomAuthentication( + "The specified project does not exist!".to_string(), + )); + } + let permissions = ProjectPermissions::get_permissions_by_role( + &user.role, + &team_member, + &organization_team_member, + ) + .unwrap_or_default(); + + if !permissions.contains(ProjectPermissions::EDIT_DETAILS) { + return Err(ApiError::CustomAuthentication( + "You don't have permission to edit this project's icon.".to_string(), + )); + } + } + + let cdn_url = dotenvy::var("CDN_URL")?; + if let Some(icon) = project_item.inner.icon_url { + let name = icon.split(&format!("{cdn_url}/")).nth(1); + + if let Some(icon_path) = name { + file_host.delete_file_version("", icon_path).await?; + } + } + + let mut transaction = pool.begin().await?; + + sqlx::query!( + " + UPDATE mods + SET icon_url = NULL, color = NULL + WHERE (id = $1) + ", + project_item.inner.id as db_ids::ProjectId, + ) + .execute(&mut *transaction) + .await?; + + db_models::Project::clear_cache(project_item.inner.id, project_item.inner.slug, None, &redis) + .await?; + + transaction.commit().await?; + + Ok(HttpResponse::NoContent().body("")) +} + +#[derive(Serialize, Deserialize, Validate)] +pub struct GalleryCreateQuery { + pub featured: bool, + #[validate(length(min = 1, max = 255))] + pub title: Option, + #[validate(length(min = 1, max = 2048))] + pub description: Option, + pub ordering: Option, +} + +#[allow(clippy::too_many_arguments)] +pub async fn add_gallery_item( + web::Query(ext): web::Query, + req: HttpRequest, + web::Query(item): web::Query, + info: web::Path<(String,)>, + pool: web::Data, + redis: web::Data, + file_host: web::Data>, + mut payload: web::Payload, + session_queue: web::Data, +) -> Result { + if let Some(content_type) = crate::util::ext::get_image_content_type(&ext.ext) { + item.validate() + .map_err(|err| ApiError::Validation(validation_errors_to_string(err, None)))?; + + let cdn_url = dotenvy::var("CDN_URL")?; + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::PROJECT_WRITE]), + ) + .await? + .1; + let string = info.into_inner().0; + + let project_item = db_models::Project::get(&string, &**pool, &redis) + .await? + .ok_or_else(|| { + ApiError::InvalidInput("The specified project does not exist!".to_string()) + })?; + + if project_item.gallery_items.len() > 64 { + return Err(ApiError::CustomAuthentication( + "You have reached the maximum of gallery images to upload.".to_string(), + )); + } + + if !user.role.is_admin() { + let (team_member, organization_team_member) = + db_models::TeamMember::get_for_project_permissions( + &project_item.inner, + user.id.into(), + &**pool, + ) + .await?; + + // Hide the project + if team_member.is_none() && organization_team_member.is_none() { + return Err(ApiError::CustomAuthentication( + "The specified project does not exist!".to_string(), + )); + } + + let permissions = ProjectPermissions::get_permissions_by_role( + &user.role, + &team_member, + &organization_team_member, + ) + .unwrap_or_default(); + + if !permissions.contains(ProjectPermissions::EDIT_DETAILS) { + return Err(ApiError::CustomAuthentication( + "You don't have permission to edit this project's gallery.".to_string(), + )); + } + } + + let bytes = read_from_payload( + &mut payload, + 5 * (1 << 20), + "Gallery image exceeds the maximum of 5MiB.", + ) + .await?; + let hash = sha1::Sha1::from(&bytes).hexdigest(); + + let id: ProjectId = project_item.inner.id.into(); + let url = format!("data/{}/images/{}.{}", id, hash, &*ext.ext); + + let file_url = format!("{cdn_url}/{url}"); + if project_item + .gallery_items + .iter() + .any(|x| x.image_url == file_url) + { + return Err(ApiError::InvalidInput( + "You may not upload duplicate gallery images!".to_string(), + )); + } + + file_host + .upload_file(content_type, &url, bytes.freeze()) + .await?; + + let mut transaction = pool.begin().await?; + + if item.featured { + sqlx::query!( + " + UPDATE mods_gallery + SET featured = $2 + WHERE mod_id = $1 + ", + project_item.inner.id as db_ids::ProjectId, + false, + ) + .execute(&mut *transaction) + .await?; + } + + let gallery_item = vec![db_models::project_item::GalleryItem { + image_url: file_url, + featured: item.featured, + title: item.title, + description: item.description, + created: Utc::now(), + ordering: item.ordering.unwrap_or(0), + }]; + GalleryItem::insert_many(gallery_item, project_item.inner.id, &mut transaction).await?; + + db_models::Project::clear_cache( + project_item.inner.id, + project_item.inner.slug, + None, + &redis, + ) + .await?; + + transaction.commit().await?; + + Ok(HttpResponse::NoContent().body("")) + } else { + Err(ApiError::InvalidInput(format!( + "Invalid format for gallery image: {}", + ext.ext + ))) + } +} + +#[derive(Serialize, Deserialize, Validate)] +pub struct GalleryEditQuery { + /// The url of the gallery item to edit + pub url: String, + pub featured: Option, + #[serde( + default, + skip_serializing_if = "Option::is_none", + with = "::serde_with::rust::double_option" + )] + #[validate(length(min = 1, max = 255))] + pub title: Option>, + #[serde( + default, + skip_serializing_if = "Option::is_none", + with = "::serde_with::rust::double_option" + )] + #[validate(length(min = 1, max = 2048))] + pub description: Option>, + pub ordering: Option, +} + +pub async fn edit_gallery_item( + req: HttpRequest, + web::Query(item): web::Query, + info: web::Path<(String,)>, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::PROJECT_WRITE]), + ) + .await? + .1; + let string = info.into_inner().0; + + item.validate() + .map_err(|err| ApiError::Validation(validation_errors_to_string(err, None)))?; + + let project_item = db_models::Project::get(&string, &**pool, &redis) + .await? + .ok_or_else(|| { + ApiError::InvalidInput("The specified project does not exist!".to_string()) + })?; + + if !user.role.is_mod() { + let (team_member, organization_team_member) = + db_models::TeamMember::get_for_project_permissions( + &project_item.inner, + user.id.into(), + &**pool, + ) + .await?; + + // Hide the project + if team_member.is_none() && organization_team_member.is_none() { + return Err(ApiError::CustomAuthentication( + "The specified project does not exist!".to_string(), + )); + } + let permissions = ProjectPermissions::get_permissions_by_role( + &user.role, + &team_member, + &organization_team_member, + ) + .unwrap_or_default(); + + if !permissions.contains(ProjectPermissions::EDIT_DETAILS) { + return Err(ApiError::CustomAuthentication( + "You don't have permission to edit this project's gallery.".to_string(), + )); + } + } + let mut transaction = pool.begin().await?; + + let id = sqlx::query!( + " + SELECT id FROM mods_gallery + WHERE image_url = $1 + ", + item.url + ) + .fetch_optional(&mut *transaction) + .await? + .ok_or_else(|| { + ApiError::InvalidInput(format!( + "Gallery item at URL {} is not part of the project's gallery.", + item.url + )) + })? + .id; + + let mut transaction = pool.begin().await?; + + if let Some(featured) = item.featured { + if featured { + sqlx::query!( + " + UPDATE mods_gallery + SET featured = $2 + WHERE mod_id = $1 + ", + project_item.inner.id as db_ids::ProjectId, + false, + ) + .execute(&mut *transaction) + .await?; + } + + sqlx::query!( + " + UPDATE mods_gallery + SET featured = $2 + WHERE id = $1 + ", + id, + featured + ) + .execute(&mut *transaction) + .await?; + } + if let Some(title) = item.title { + sqlx::query!( + " + UPDATE mods_gallery + SET title = $2 + WHERE id = $1 + ", + id, + title + ) + .execute(&mut *transaction) + .await?; + } + if let Some(description) = item.description { + sqlx::query!( + " + UPDATE mods_gallery + SET description = $2 + WHERE id = $1 + ", + id, + description + ) + .execute(&mut *transaction) + .await?; + } + if let Some(ordering) = item.ordering { + sqlx::query!( + " + UPDATE mods_gallery + SET ordering = $2 + WHERE id = $1 + ", + id, + ordering + ) + .execute(&mut *transaction) + .await?; + } + + db_models::Project::clear_cache(project_item.inner.id, project_item.inner.slug, None, &redis) + .await?; + + transaction.commit().await?; + + Ok(HttpResponse::NoContent().body("")) +} + +#[derive(Serialize, Deserialize)] +pub struct GalleryDeleteQuery { + pub url: String, +} + +pub async fn delete_gallery_item( + req: HttpRequest, + web::Query(item): web::Query, + info: web::Path<(String,)>, + pool: web::Data, + redis: web::Data, + file_host: web::Data>, + session_queue: web::Data, +) -> Result { + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::PROJECT_WRITE]), + ) + .await? + .1; + let string = info.into_inner().0; + + let project_item = db_models::Project::get(&string, &**pool, &redis) + .await? + .ok_or_else(|| { + ApiError::InvalidInput("The specified project does not exist!".to_string()) + })?; + + if !user.role.is_mod() { + let (team_member, organization_team_member) = + db_models::TeamMember::get_for_project_permissions( + &project_item.inner, + user.id.into(), + &**pool, + ) + .await?; + + // Hide the project + if team_member.is_none() && organization_team_member.is_none() { + return Err(ApiError::CustomAuthentication( + "The specified project does not exist!".to_string(), + )); + } + + let permissions = ProjectPermissions::get_permissions_by_role( + &user.role, + &team_member, + &organization_team_member, + ) + .unwrap_or_default(); + + if !permissions.contains(ProjectPermissions::EDIT_DETAILS) { + return Err(ApiError::CustomAuthentication( + "You don't have permission to edit this project's gallery.".to_string(), + )); + } + } + let mut transaction = pool.begin().await?; + + let id = sqlx::query!( + " + SELECT id FROM mods_gallery + WHERE image_url = $1 + ", + item.url + ) + .fetch_optional(&mut *transaction) + .await? + .ok_or_else(|| { + ApiError::InvalidInput(format!( + "Gallery item at URL {} is not part of the project's gallery.", + item.url + )) + })? + .id; + + let cdn_url = dotenvy::var("CDN_URL")?; + let name = item.url.split(&format!("{cdn_url}/")).nth(1); + + if let Some(icon_path) = name { + file_host.delete_file_version("", icon_path).await?; + } + + let mut transaction = pool.begin().await?; + + sqlx::query!( + " + DELETE FROM mods_gallery + WHERE id = $1 + ", + id + ) + .execute(&mut *transaction) + .await?; + + db_models::Project::clear_cache(project_item.inner.id, project_item.inner.slug, None, &redis) + .await?; + + transaction.commit().await?; + + Ok(HttpResponse::NoContent().body("")) +} + +pub async fn project_delete( + req: HttpRequest, + info: web::Path<(String,)>, + pool: web::Data, + redis: web::Data, + config: web::Data, + session_queue: web::Data, +) -> Result { + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::PROJECT_DELETE]), + ) + .await? + .1; + let string = info.into_inner().0; + + let project = db_models::Project::get(&string, &**pool, &redis) + .await? + .ok_or_else(|| { + ApiError::InvalidInput("The specified project does not exist!".to_string()) + })?; + + if !user.role.is_admin() { + let (team_member, organization_team_member) = + db_models::TeamMember::get_for_project_permissions( + &project.inner, + user.id.into(), + &**pool, + ) + .await?; + + // Hide the project + if team_member.is_none() && organization_team_member.is_none() { + return Err(ApiError::CustomAuthentication( + "The specified project does not exist!".to_string(), + )); + } + + let permissions = ProjectPermissions::get_permissions_by_role( + &user.role, + &team_member, + &organization_team_member, + ) + .unwrap_or_default(); + + if !permissions.contains(ProjectPermissions::DELETE_PROJECT) { + return Err(ApiError::CustomAuthentication( + "You don't have permission to delete this project!".to_string(), + )); + } + } + + let mut transaction = pool.begin().await?; + let context = ImageContext::Project { + project_id: Some(project.inner.id.into()), + }; + let uploaded_images = db_models::Image::get_many_contexted(context, &mut transaction).await?; + for image in uploaded_images { + image_item::Image::remove(image.id, &mut transaction, &redis).await?; + } + + sqlx::query!( + " + DELETE FROM collections_mods + WHERE mod_id = $1 + ", + project.inner.id as db_ids::ProjectId, + ) + .execute(&mut *transaction) + .await?; + + let result = db_models::Project::remove(project.inner.id, &mut transaction, &redis).await?; + + transaction.commit().await?; + + delete_from_index(project.inner.id.into(), config).await?; + + if result.is_some() { + Ok(HttpResponse::NoContent().body("")) + } else { + Ok(HttpResponse::NotFound().body("")) + } +} + +pub async fn project_follow( + req: HttpRequest, + info: web::Path<(String,)>, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::USER_WRITE]), + ) + .await? + .1; + let string = info.into_inner().0; + + let result = db_models::Project::get(&string, &**pool, &redis) + .await? + .ok_or_else(|| { + ApiError::InvalidInput("The specified project does not exist!".to_string()) + })?; + + let user_id: db_ids::UserId = user.id.into(); + let project_id: db_ids::ProjectId = result.inner.id; + + if !is_authorized(&result.inner, &Some(user), &pool).await? { + return Ok(HttpResponse::NotFound().body("")); + } + + let following = sqlx::query!( + " + SELECT EXISTS(SELECT 1 FROM mod_follows mf WHERE mf.follower_id = $1 AND mf.mod_id = $2) + ", + user_id as db_ids::UserId, + project_id as db_ids::ProjectId + ) + .fetch_one(&**pool) + .await? + .exists + .unwrap_or(false); + + if !following { + let mut transaction = pool.begin().await?; + + sqlx::query!( + " + UPDATE mods + SET follows = follows + 1 + WHERE id = $1 + ", + project_id as db_ids::ProjectId, + ) + .execute(&mut *transaction) + .await?; + + sqlx::query!( + " + INSERT INTO mod_follows (follower_id, mod_id) + VALUES ($1, $2) + ", + user_id as db_ids::UserId, + project_id as db_ids::ProjectId + ) + .execute(&mut *transaction) + .await?; + + transaction.commit().await?; + + Ok(HttpResponse::NoContent().body("")) + } else { + Err(ApiError::InvalidInput( + "You are already following this project!".to_string(), + )) + } +} + +pub async fn project_unfollow( + req: HttpRequest, + info: web::Path<(String,)>, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::USER_WRITE]), + ) + .await? + .1; + let string = info.into_inner().0; + + let result = db_models::Project::get(&string, &**pool, &redis) + .await? + .ok_or_else(|| { + ApiError::InvalidInput("The specified project does not exist!".to_string()) + })?; + + let user_id: db_ids::UserId = user.id.into(); + let project_id = result.inner.id; + + let following = sqlx::query!( + " + SELECT EXISTS(SELECT 1 FROM mod_follows mf WHERE mf.follower_id = $1 AND mf.mod_id = $2) + ", + user_id as db_ids::UserId, + project_id as db_ids::ProjectId + ) + .fetch_one(&**pool) + .await? + .exists + .unwrap_or(false); + + if following { + let mut transaction = pool.begin().await?; + + sqlx::query!( + " + UPDATE mods + SET follows = follows - 1 + WHERE id = $1 + ", + project_id as db_ids::ProjectId, + ) + .execute(&mut *transaction) + .await?; + + sqlx::query!( + " + DELETE FROM mod_follows + WHERE follower_id = $1 AND mod_id = $2 + ", + user_id as db_ids::UserId, + project_id as db_ids::ProjectId + ) + .execute(&mut *transaction) + .await?; + + transaction.commit().await?; + + Ok(HttpResponse::NoContent().body("")) + } else { + Err(ApiError::InvalidInput( + "You are not following this project!".to_string(), + )) + } +} diff --git a/src/routes/v3/reports.rs b/src/routes/v3/reports.rs new file mode 100644 index 00000000..04e69c8c --- /dev/null +++ b/src/routes/v3/reports.rs @@ -0,0 +1,524 @@ +use crate::auth::{check_is_moderator_from_headers, get_user_from_headers}; +use crate::database; +use crate::database::models::image_item; +use crate::database::models::thread_item::{ThreadBuilder, ThreadMessageBuilder}; +use crate::database::redis::RedisPool; +use crate::models::ids::ImageId; +use crate::models::ids::{base62_impl::parse_base62, ProjectId, UserId, VersionId}; +use crate::models::images::{Image, ImageContext}; +use crate::models::pats::Scopes; +use crate::models::reports::{ItemType, Report}; +use crate::models::threads::{MessageBody, ThreadType}; +use crate::queue::session::AuthQueue; +use crate::routes::ApiError; +use crate::util::img; +use actix_web::{web, HttpRequest, HttpResponse}; +use chrono::Utc; +use futures::StreamExt; +use serde::Deserialize; +use sqlx::PgPool; +use validator::Validate; + +pub fn config(cfg: &mut web::ServiceConfig) { + cfg.route("report", web::post().to(report_create)); + cfg.route("report", web::get().to(reports)); + cfg.route("reports", web::get().to(reports_get)); + cfg.route("report/{id}", web::get().to(report_get)); + cfg.route("report/{id}", web::patch().to(report_edit)); + cfg.route("report/{id}", web::delete().to(report_delete)); +} + +#[derive(Deserialize, Validate)] +pub struct CreateReport { + pub report_type: String, + pub item_id: String, + pub item_type: ItemType, + pub body: String, + // Associations to uploaded images + #[validate(length(max = 10))] + #[serde(default)] + pub uploaded_images: Vec, +} + +pub async fn report_create( + req: HttpRequest, + pool: web::Data, + mut body: web::Payload, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let mut transaction = pool.begin().await?; + + let current_user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::REPORT_CREATE]), + ) + .await? + .1; + + let mut bytes = web::BytesMut::new(); + while let Some(item) = body.next().await { + bytes.extend_from_slice(&item.map_err(|_| { + ApiError::InvalidInput("Error while parsing request payload!".to_string()) + })?); + } + let new_report: CreateReport = serde_json::from_slice(bytes.as_ref())?; + + let id = crate::database::models::generate_report_id(&mut transaction).await?; + let report_type = crate::database::models::categories::ReportType::get_id( + &new_report.report_type, + &mut *transaction, + ) + .await? + .ok_or_else(|| { + ApiError::InvalidInput(format!("Invalid report type: {}", new_report.report_type)) + })?; + + let mut report = crate::database::models::report_item::Report { + id, + report_type_id: report_type, + project_id: None, + version_id: None, + user_id: None, + body: new_report.body.clone(), + reporter: current_user.id.into(), + created: Utc::now(), + closed: false, + }; + + match new_report.item_type { + ItemType::Project => { + let project_id = ProjectId(parse_base62(new_report.item_id.as_str())?); + + let result = sqlx::query!( + "SELECT EXISTS(SELECT 1 FROM mods WHERE id = $1)", + project_id.0 as i64 + ) + .fetch_one(&mut *transaction) + .await?; + + if !result.exists.unwrap_or(false) { + return Err(ApiError::InvalidInput(format!( + "Project could not be found: {}", + new_report.item_id + ))); + } + + report.project_id = Some(project_id.into()) + } + ItemType::Version => { + let version_id = VersionId(parse_base62(new_report.item_id.as_str())?); + + let result = sqlx::query!( + "SELECT EXISTS(SELECT 1 FROM versions WHERE id = $1)", + version_id.0 as i64 + ) + .fetch_one(&mut *transaction) + .await?; + + if !result.exists.unwrap_or(false) { + return Err(ApiError::InvalidInput(format!( + "Version could not be found: {}", + new_report.item_id + ))); + } + + report.version_id = Some(version_id.into()) + } + ItemType::User => { + let user_id = UserId(parse_base62(new_report.item_id.as_str())?); + + let result = sqlx::query!( + "SELECT EXISTS(SELECT 1 FROM users WHERE id = $1)", + user_id.0 as i64 + ) + .fetch_one(&mut *transaction) + .await?; + + if !result.exists.unwrap_or(false) { + return Err(ApiError::InvalidInput(format!( + "User could not be found: {}", + new_report.item_id + ))); + } + + report.user_id = Some(user_id.into()) + } + ItemType::Unknown => { + return Err(ApiError::InvalidInput(format!( + "Invalid report item type: {}", + new_report.item_type.as_str() + ))) + } + } + + report.insert(&mut transaction).await?; + + for image_id in new_report.uploaded_images { + if let Some(db_image) = + image_item::Image::get(image_id.into(), &mut *transaction, &redis).await? + { + let image: Image = db_image.into(); + if !matches!(image.context, ImageContext::Report { .. }) + || image.context.inner_id().is_some() + { + return Err(ApiError::InvalidInput(format!( + "Image {} is not unused and in the 'report' context", + image_id + ))); + } + + sqlx::query!( + " + UPDATE uploaded_images + SET report_id = $1 + WHERE id = $2 + ", + id.0 as i64, + image_id.0 as i64 + ) + .execute(&mut *transaction) + .await?; + + image_item::Image::clear_cache(image.id.into(), &redis).await?; + } else { + return Err(ApiError::InvalidInput(format!( + "Image {} could not be found", + image_id + ))); + } + } + + let thread_id = ThreadBuilder { + type_: ThreadType::Report, + members: vec![], + project_id: None, + report_id: Some(report.id), + } + .insert(&mut transaction) + .await?; + + transaction.commit().await?; + + Ok(HttpResponse::Ok().json(Report { + id: id.into(), + report_type: new_report.report_type.clone(), + item_id: new_report.item_id.clone(), + item_type: new_report.item_type.clone(), + reporter: current_user.id, + body: new_report.body.clone(), + created: Utc::now(), + closed: false, + thread_id: thread_id.into(), + })) +} + +#[derive(Deserialize)] +pub struct ReportsRequestOptions { + #[serde(default = "default_count")] + pub count: i16, + #[serde(default = "default_all")] + pub all: bool, +} + +fn default_count() -> i16 { + 100 +} +fn default_all() -> bool { + true +} + +pub async fn reports( + req: HttpRequest, + pool: web::Data, + redis: web::Data, + count: web::Query, + session_queue: web::Data, +) -> Result { + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::REPORT_READ]), + ) + .await? + .1; + + use futures::stream::TryStreamExt; + + let report_ids = if user.role.is_mod() && count.all { + sqlx::query!( + " + SELECT id FROM reports + WHERE closed = FALSE + ORDER BY created ASC + LIMIT $1; + ", + count.count as i64 + ) + .fetch_many(&**pool) + .try_filter_map(|e| async { + Ok(e.right() + .map(|m| crate::database::models::ids::ReportId(m.id))) + }) + .try_collect::>() + .await? + } else { + sqlx::query!( + " + SELECT id FROM reports + WHERE closed = FALSE AND reporter = $1 + ORDER BY created ASC + LIMIT $2; + ", + user.id.0 as i64, + count.count as i64 + ) + .fetch_many(&**pool) + .try_filter_map(|e| async { + Ok(e.right() + .map(|m| crate::database::models::ids::ReportId(m.id))) + }) + .try_collect::>() + .await? + }; + + let query_reports = + crate::database::models::report_item::Report::get_many(&report_ids, &**pool).await?; + + let mut reports: Vec = Vec::new(); + + for x in query_reports { + reports.push(x.into()); + } + + Ok(HttpResponse::Ok().json(reports)) +} + +#[derive(Deserialize)] +pub struct ReportIds { + pub ids: String, +} + +pub async fn reports_get( + req: HttpRequest, + web::Query(ids): web::Query, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let report_ids: Vec = + serde_json::from_str::>(&ids.ids)? + .into_iter() + .map(|x| x.into()) + .collect(); + + let reports_data = + crate::database::models::report_item::Report::get_many(&report_ids, &**pool).await?; + + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::REPORT_READ]), + ) + .await? + .1; + + let all_reports = reports_data + .into_iter() + .filter(|x| user.role.is_mod() || x.reporter == user.id.into()) + .map(|x| x.into()) + .collect::>(); + + Ok(HttpResponse::Ok().json(all_reports)) +} + +pub async fn report_get( + req: HttpRequest, + pool: web::Data, + redis: web::Data, + info: web::Path<(crate::models::reports::ReportId,)>, + session_queue: web::Data, +) -> Result { + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::REPORT_READ]), + ) + .await? + .1; + let id = info.into_inner().0.into(); + + let report = crate::database::models::report_item::Report::get(id, &**pool).await?; + + if let Some(report) = report { + if !user.role.is_mod() && report.reporter != user.id.into() { + return Ok(HttpResponse::NotFound().body("")); + } + + let report: Report = report.into(); + Ok(HttpResponse::Ok().json(report)) + } else { + Ok(HttpResponse::NotFound().body("")) + } +} + +#[derive(Deserialize, Validate)] +pub struct EditReport { + #[validate(length(max = 65536))] + pub body: Option, + pub closed: Option, +} + +pub async fn report_edit( + req: HttpRequest, + pool: web::Data, + redis: web::Data, + info: web::Path<(crate::models::reports::ReportId,)>, + session_queue: web::Data, + edit_report: web::Json, +) -> Result { + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::REPORT_WRITE]), + ) + .await? + .1; + let id = info.into_inner().0.into(); + + let report = crate::database::models::report_item::Report::get(id, &**pool).await?; + + if let Some(report) = report { + if !user.role.is_mod() && report.reporter != user.id.into() { + return Ok(HttpResponse::NotFound().body("")); + } + + let mut transaction = pool.begin().await?; + + if let Some(edit_body) = &edit_report.body { + sqlx::query!( + " + UPDATE reports + SET body = $1 + WHERE (id = $2) + ", + edit_body, + id as crate::database::models::ids::ReportId, + ) + .execute(&mut *transaction) + .await?; + } + + if let Some(edit_closed) = edit_report.closed { + if !user.role.is_mod() { + return Err(ApiError::InvalidInput( + "You cannot reopen a report!".to_string(), + )); + } + + ThreadMessageBuilder { + author_id: Some(user.id.into()), + body: if !edit_closed && report.closed { + MessageBody::ThreadReopen + } else { + MessageBody::ThreadClosure + }, + thread_id: report.thread_id, + } + .insert(&mut transaction) + .await?; + + sqlx::query!( + " + UPDATE reports + SET closed = $1 + WHERE (id = $2) + ", + edit_closed, + id as crate::database::models::ids::ReportId, + ) + .execute(&mut *transaction) + .await?; + + sqlx::query!( + " + UPDATE threads + SET show_in_mod_inbox = $1 + WHERE id = $2 + ", + !(edit_closed || report.closed), + report.thread_id.0, + ) + .execute(&mut *transaction) + .await?; + } + + // delete any images no longer in the body + let checkable_strings: Vec<&str> = vec![&edit_report.body] + .into_iter() + .filter_map(|x: &Option| x.as_ref().map(|y| y.as_str())) + .collect(); + let image_context = ImageContext::Report { + report_id: Some(id.into()), + }; + img::delete_unused_images(image_context, checkable_strings, &mut transaction, &redis) + .await?; + + transaction.commit().await?; + + Ok(HttpResponse::NoContent().body("")) + } else { + Ok(HttpResponse::NotFound().body("")) + } +} + +pub async fn report_delete( + req: HttpRequest, + pool: web::Data, + info: web::Path<(crate::models::reports::ReportId,)>, + redis: web::Data, + session_queue: web::Data, +) -> Result { + check_is_moderator_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::REPORT_DELETE]), + ) + .await?; + + let mut transaction = pool.begin().await?; + + let id = info.into_inner().0; + let context = ImageContext::Report { + report_id: Some(id), + }; + let uploaded_images = + database::models::Image::get_many_contexted(context, &mut transaction).await?; + for image in uploaded_images { + image_item::Image::remove(image.id, &mut transaction, &redis).await?; + } + + let result = + crate::database::models::report_item::Report::remove_full(id.into(), &mut transaction) + .await?; + transaction.commit().await?; + + if result.is_some() { + Ok(HttpResponse::NoContent().body("")) + } else { + Ok(HttpResponse::NotFound().body("")) + } +} diff --git a/src/routes/v3/statistics.rs b/src/routes/v3/statistics.rs new file mode 100644 index 00000000..7a0def23 --- /dev/null +++ b/src/routes/v3/statistics.rs @@ -0,0 +1,85 @@ +use crate::routes::ApiError; +use actix_web::{web, HttpResponse}; +use serde_json::json; +use sqlx::PgPool; + +pub fn config(cfg: &mut web::ServiceConfig) { + cfg.route("statistics", web::get().to(get_stats)); +} + +pub async fn get_stats(pool: web::Data) -> Result { + let projects = sqlx::query!( + " + SELECT COUNT(id) + FROM mods + WHERE status = ANY($1) + ", + &*crate::models::projects::ProjectStatus::iterator() + .filter(|x| x.is_searchable()) + .map(|x| x.to_string()) + .collect::>(), + ) + .fetch_one(&**pool) + .await?; + + let versions = sqlx::query!( + " + SELECT COUNT(v.id) + FROM versions v + INNER JOIN mods m on v.mod_id = m.id AND m.status = ANY($1) + WHERE v.status = ANY($2) + ", + &*crate::models::projects::ProjectStatus::iterator() + .filter(|x| x.is_searchable()) + .map(|x| x.to_string()) + .collect::>(), + &*crate::models::projects::VersionStatus::iterator() + .filter(|x| x.is_listed()) + .map(|x| x.to_string()) + .collect::>(), + ) + .fetch_one(&**pool) + .await?; + + let authors = sqlx::query!( + " + SELECT COUNT(DISTINCT u.id) + FROM users u + INNER JOIN team_members tm on u.id = tm.user_id AND tm.accepted = TRUE + INNER JOIN mods m on tm.team_id = m.team_id AND m.status = ANY($1) + ", + &*crate::models::projects::ProjectStatus::iterator() + .filter(|x| x.is_searchable()) + .map(|x| x.to_string()) + .collect::>(), + ) + .fetch_one(&**pool) + .await?; + + let files = sqlx::query!( + " + SELECT COUNT(f.id) FROM files f + INNER JOIN versions v on f.version_id = v.id AND v.status = ANY($2) + INNER JOIN mods m on v.mod_id = m.id AND m.status = ANY($1) + ", + &*crate::models::projects::ProjectStatus::iterator() + .filter(|x| x.is_searchable()) + .map(|x| x.to_string()) + .collect::>(), + &*crate::models::projects::VersionStatus::iterator() + .filter(|x| x.is_listed()) + .map(|x| x.to_string()) + .collect::>(), + ) + .fetch_one(&**pool) + .await?; + + let json = json!({ + "projects": projects.count, + "versions": versions.count, + "authors": authors.count, + "files": files.count, + }); + + Ok(HttpResponse::Ok().json(json)) +} diff --git a/src/routes/v3/tags.rs b/src/routes/v3/tags.rs new file mode 100644 index 00000000..a82beaf1 --- /dev/null +++ b/src/routes/v3/tags.rs @@ -0,0 +1,210 @@ +use std::collections::HashMap; + +use super::ApiError; +use crate::database::models::categories::{Category, DonationPlatform, ProjectType, ReportType}; +use crate::database::models::loader_fields::{ + Loader, LoaderField, LoaderFieldEnumValue, LoaderFieldType, +}; +use crate::database::redis::RedisPool; +use actix_web::{web, HttpResponse}; +use serde_json::Value; +use sqlx::PgPool; + +pub fn config(cfg: &mut web::ServiceConfig) { + cfg.service( + web::scope("tag") + .route("category", web::get().to(category_list)) + .route("loader", web::get().to(loader_list)), + ) + .route("loader_fields", web::get().to(loader_fields_list)) + .route("license", web::get().to(license_list)) + .route("license/{id}", web::get().to(license_text)) + .route("donation_platform", web::get().to(donation_platform_list)) + .route("report_type", web::get().to(report_type_list)) + .route("project_type", web::get().to(project_type_list)); +} + +#[derive(serde::Serialize, serde::Deserialize)] +pub struct CategoryData { + pub icon: String, + pub name: String, + pub project_type: String, + pub header: String, +} + +pub async fn category_list( + pool: web::Data, + redis: web::Data, +) -> Result { + let results = Category::list(&**pool, &redis) + .await? + .into_iter() + .map(|x| CategoryData { + icon: x.icon, + name: x.category, + project_type: x.project_type, + header: x.header, + }) + .collect::>(); + + Ok(HttpResponse::Ok().json(results)) +} + +#[derive(serde::Serialize, serde::Deserialize)] +pub struct LoaderData { + pub icon: String, + pub name: String, + pub supported_project_types: Vec, + pub supported_games: Vec, +} + +pub async fn loader_list( + pool: web::Data, + redis: web::Data, +) -> Result { + let mut results = Loader::list(&**pool, &redis) + .await? + .into_iter() + .map(|x| LoaderData { + icon: x.icon, + name: x.loader, + supported_project_types: x.supported_project_types, + supported_games: x + .supported_games + .iter() + .map(|x| x.name().to_string()) + .collect(), + }) + .collect::>(); + + results.sort_by(|a, b| a.name.to_lowercase().cmp(&b.name.to_lowercase())); + + Ok(HttpResponse::Ok().json(results)) +} + +#[derive(serde::Deserialize, serde::Serialize)] +pub struct LoaderFieldsEnumQuery { + pub loader_field: String, + pub filters: Option>, // For metadata +} + +// Provides the variants for any enumerable loader field. +pub async fn loader_fields_list( + pool: web::Data, + query: web::Query, + redis: web::Data, +) -> Result { + let query = query.into_inner(); + let loader_field = LoaderField::get_field(&query.loader_field, &**pool, &redis) + .await? + .ok_or_else(|| { + ApiError::InvalidInput(format!( + "'{}' was not a valid loader field.", + query.loader_field + )) + })?; + + let loader_field_enum_id = match loader_field.field_type { + LoaderFieldType::Enum(enum_id) | LoaderFieldType::ArrayEnum(enum_id) => enum_id, + _ => { + return Err(ApiError::InvalidInput(format!( + "'{}' is not an enumerable field, but an '{}' field.", + query.loader_field, + loader_field.field_type.to_str() + ))) + } + }; + + let results: Vec<_> = if let Some(filters) = query.filters { + LoaderFieldEnumValue::list_filter(loader_field_enum_id, filters, &**pool, &redis).await? + } else { + LoaderFieldEnumValue::list(loader_field_enum_id, &**pool, &redis).await? + }; + + Ok(HttpResponse::Ok().json(results)) +} + +#[derive(serde::Serialize)] +pub struct License { + short: String, + name: String, +} + +pub async fn license_list() -> HttpResponse { + let licenses = spdx::identifiers::LICENSES; + let mut results: Vec = Vec::with_capacity(licenses.len()); + + for (short, name, _) in licenses { + results.push(License { + short: short.to_string(), + name: name.to_string(), + }); + } + + HttpResponse::Ok().json(results) +} + +#[derive(serde::Serialize)] +pub struct LicenseText { + title: String, + body: String, +} + +pub async fn license_text(params: web::Path<(String,)>) -> Result { + let license_id = params.into_inner().0; + + if license_id == *crate::models::projects::DEFAULT_LICENSE_ID { + return Ok(HttpResponse::Ok().json(LicenseText { + title: "All Rights Reserved".to_string(), + body: "All rights reserved unless explicitly stated.".to_string(), + })); + } + + if let Some(license) = spdx::license_id(&license_id) { + return Ok(HttpResponse::Ok().json(LicenseText { + title: license.full_name.to_string(), + body: license.text().to_string(), + })); + } + + Err(ApiError::InvalidInput( + "Invalid SPDX identifier specified".to_string(), + )) +} + +#[derive(serde::Serialize)] +pub struct DonationPlatformQueryData { + short: String, + name: String, +} + +pub async fn donation_platform_list( + pool: web::Data, + redis: web::Data, +) -> Result { + let results: Vec = DonationPlatform::list(&**pool, &redis) + .await? + .into_iter() + .map(|x| DonationPlatformQueryData { + short: x.short, + name: x.name, + }) + .collect(); + Ok(HttpResponse::Ok().json(results)) +} + +pub async fn report_type_list( + pool: web::Data, + redis: web::Data, +) -> Result { + let results = ReportType::list(&**pool, &redis).await?; + Ok(HttpResponse::Ok().json(results)) +} + +pub async fn project_type_list( + pool: web::Data, + redis: web::Data, +) -> Result { + let results = ProjectType::list(&**pool, &redis).await?; + Ok(HttpResponse::Ok().json(results)) +} diff --git a/src/routes/v3/teams.rs b/src/routes/v3/teams.rs new file mode 100644 index 00000000..05f19c69 --- /dev/null +++ b/src/routes/v3/teams.rs @@ -0,0 +1,943 @@ +use crate::auth::{get_user_from_headers, is_authorized}; +use crate::database::models::notification_item::NotificationBuilder; +use crate::database::models::team_item::TeamAssociationId; +use crate::database::models::{Organization, Team, TeamMember, User}; +use crate::database::redis::RedisPool; +use crate::database::Project; +use crate::models::notifications::NotificationBody; +use crate::models::pats::Scopes; +use crate::models::teams::{OrganizationPermissions, ProjectPermissions, TeamId}; +use crate::models::users::UserId; +use crate::queue::session::AuthQueue; +use crate::routes::ApiError; +use actix_web::{web, HttpRequest, HttpResponse}; +use rust_decimal::Decimal; +use serde::{Deserialize, Serialize}; +use sqlx::PgPool; + +pub fn config(cfg: &mut web::ServiceConfig) { + cfg.route("teams", web::get().to(teams_get)); + + cfg.service( + web::scope("team") + .route("{id}/members", web::get().to(team_members_get)) + .route("{id}/members/{user_id}", web::patch().to(edit_team_member)) + .route( + "{id}/members/{user_id}", + web::delete().to(remove_team_member), + ) + .route("{id}/members", web::post().to(add_team_member)) + .route("{id}/join", web::post().to(join_team)) + .route("{id}/owner", web::patch().to(transfer_ownership)), + ); +} + +// Returns all members of a project, +// including the team members of the project's team, but +// also the members of the organization's team if the project is associated with an organization +// (Unlike team_members_get_project, which only returns the members of the project's team) +pub async fn team_members_get_project( + req: HttpRequest, + info: web::Path<(String,)>, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let string = info.into_inner().0; + let project_data = crate::database::models::Project::get(&string, &**pool, &redis).await?; + + if let Some(project) = project_data { + let current_user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::PROJECT_READ]), + ) + .await + .map(|x| x.1) + .ok(); + + if !is_authorized(&project.inner, ¤t_user, &pool).await? { + return Ok(HttpResponse::NotFound().body("")); + } + let mut members_data = + TeamMember::get_from_team_full(project.inner.team_id, &**pool, &redis).await?; + let mut member_user_ids = members_data.iter().map(|x| x.user_id).collect::>(); + + // Adds the organization's team members to the list of members, if the project is associated with an organization + if let Some(oid) = project.inner.organization_id { + let organization_data = Organization::get_id(oid, &**pool, &redis).await?; + if let Some(organization_data) = organization_data { + let org_team = + TeamMember::get_from_team_full(organization_data.team_id, &**pool, &redis) + .await?; + for member in org_team { + if !member_user_ids.contains(&member.user_id) { + member_user_ids.push(member.user_id); + members_data.push(member); + } + } + } + } + + let users = + crate::database::models::User::get_many_ids(&member_user_ids, &**pool, &redis).await?; + + let user_id = current_user.as_ref().map(|x| x.id.into()); + + let logged_in = current_user + .and_then(|user| { + members_data + .iter() + .find(|x| x.user_id == user.id.into() && x.accepted) + }) + .is_some(); + let team_members: Vec<_> = members_data + .into_iter() + .filter(|x| { + logged_in + || x.accepted + || user_id + .map(|y: crate::database::models::UserId| y == x.user_id) + .unwrap_or(false) + }) + .flat_map(|data| { + users.iter().find(|x| x.id == data.user_id).map(|user| { + crate::models::teams::TeamMember::from(data, user.clone(), !logged_in) + }) + }) + .collect(); + Ok(HttpResponse::Ok().json(team_members)) + } else { + Ok(HttpResponse::NotFound().body("")) + } +} + +pub async fn team_members_get_organization( + req: HttpRequest, + info: web::Path<(String,)>, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let string = info.into_inner().0; + let organization_data = + crate::database::models::Organization::get(&string, &**pool, &redis).await?; + + if let Some(organization) = organization_data { + let current_user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::ORGANIZATION_READ]), + ) + .await + .map(|x| x.1) + .ok(); + + let members_data = + TeamMember::get_from_team_full(organization.team_id, &**pool, &redis).await?; + let users = crate::database::models::User::get_many_ids( + &members_data.iter().map(|x| x.user_id).collect::>(), + &**pool, + &redis, + ) + .await?; + + let user_id = current_user.as_ref().map(|x| x.id.into()); + + let logged_in = current_user + .and_then(|user| { + members_data + .iter() + .find(|x| x.user_id == user.id.into() && x.accepted) + }) + .is_some(); + + let team_members: Vec<_> = members_data + .into_iter() + .filter(|x| { + logged_in + || x.accepted + || user_id + .map(|y: crate::database::models::UserId| y == x.user_id) + .unwrap_or(false) + }) + .flat_map(|data| { + users.iter().find(|x| x.id == data.user_id).map(|user| { + crate::models::teams::TeamMember::from(data, user.clone(), !logged_in) + }) + }) + .collect(); + + Ok(HttpResponse::Ok().json(team_members)) + } else { + Ok(HttpResponse::NotFound().body("")) + } +} + +// Returns all members of a team, but not necessarily those of a project-team's organization (unlike team_members_get_project) +pub async fn team_members_get( + req: HttpRequest, + info: web::Path<(TeamId,)>, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let id = info.into_inner().0; + let members_data = TeamMember::get_from_team_full(id.into(), &**pool, &redis).await?; + let users = crate::database::models::User::get_many_ids( + &members_data.iter().map(|x| x.user_id).collect::>(), + &**pool, + &redis, + ) + .await?; + + let current_user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::PROJECT_READ]), + ) + .await + .map(|x| x.1) + .ok(); + let user_id = current_user.as_ref().map(|x| x.id.into()); + + let logged_in = current_user + .and_then(|user| { + members_data + .iter() + .find(|x| x.user_id == user.id.into() && x.accepted) + }) + .is_some(); + + let team_members: Vec<_> = members_data + .into_iter() + .filter(|x| { + logged_in + || x.accepted + || user_id + .map(|y: crate::database::models::UserId| y == x.user_id) + .unwrap_or(false) + }) + .flat_map(|data| { + users + .iter() + .find(|x| x.id == data.user_id) + .map(|user| crate::models::teams::TeamMember::from(data, user.clone(), !logged_in)) + }) + .collect(); + + Ok(HttpResponse::Ok().json(team_members)) +} + +#[derive(Serialize, Deserialize)] +pub struct TeamIds { + pub ids: String, +} + +pub async fn teams_get( + req: HttpRequest, + web::Query(ids): web::Query, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + use itertools::Itertools; + + let team_ids = serde_json::from_str::>(&ids.ids)? + .into_iter() + .map(|x| x.into()) + .collect::>(); + + let teams_data = TeamMember::get_from_team_full_many(&team_ids, &**pool, &redis).await?; + let users = crate::database::models::User::get_many_ids( + &teams_data.iter().map(|x| x.user_id).collect::>(), + &**pool, + &redis, + ) + .await?; + + let current_user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::PROJECT_READ]), + ) + .await + .map(|x| x.1) + .ok(); + + let teams_groups = teams_data.into_iter().group_by(|data| data.team_id.0); + + let mut teams: Vec> = vec![]; + + for (_, member_data) in &teams_groups { + let members = member_data.collect::>(); + + let logged_in = current_user + .as_ref() + .and_then(|user| { + members + .iter() + .find(|x| x.user_id == user.id.into() && x.accepted) + }) + .is_some(); + + let team_members = members + .into_iter() + .filter(|x| logged_in || x.accepted) + .flat_map(|data| { + users.iter().find(|x| x.id == data.user_id).map(|user| { + crate::models::teams::TeamMember::from(data, user.clone(), !logged_in) + }) + }); + + teams.push(team_members.collect()); + } + + Ok(HttpResponse::Ok().json(teams)) +} + +pub async fn join_team( + req: HttpRequest, + info: web::Path<(TeamId,)>, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let team_id = info.into_inner().0.into(); + let current_user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::PROJECT_WRITE]), + ) + .await? + .1; + + let member = + TeamMember::get_from_user_id_pending(team_id, current_user.id.into(), &**pool).await?; + + if let Some(member) = member { + if member.accepted { + return Err(ApiError::InvalidInput( + "You are already a member of this team".to_string(), + )); + } + let mut transaction = pool.begin().await?; + + // Edit Team Member to set Accepted to True + TeamMember::edit_team_member( + team_id, + current_user.id.into(), + None, + None, + None, + Some(true), + None, + None, + &mut transaction, + ) + .await?; + + User::clear_project_cache(&[current_user.id.into()], &redis).await?; + TeamMember::clear_cache(team_id, &redis).await?; + + transaction.commit().await?; + } else { + return Err(ApiError::InvalidInput( + "There is no pending request from this team".to_string(), + )); + } + + Ok(HttpResponse::NoContent().body("")) +} + +fn default_role() -> String { + "Member".to_string() +} + +fn default_ordering() -> i64 { + 0 +} + +#[derive(Serialize, Deserialize, Clone)] +pub struct NewTeamMember { + pub user_id: UserId, + #[serde(default = "default_role")] + pub role: String, + #[serde(default)] + pub permissions: ProjectPermissions, + #[serde(default)] + pub organization_permissions: Option, + #[serde(default)] + pub payouts_split: Decimal, + #[serde(default = "default_ordering")] + pub ordering: i64, +} + +pub async fn add_team_member( + req: HttpRequest, + info: web::Path<(TeamId,)>, + pool: web::Data, + new_member: web::Json, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let team_id = info.into_inner().0.into(); + + let mut transaction = pool.begin().await?; + + let current_user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::PROJECT_WRITE]), + ) + .await? + .1; + let team_association = Team::get_association(team_id, &**pool) + .await? + .ok_or_else(|| ApiError::InvalidInput("The team specified does not exist".to_string()))?; + let member = TeamMember::get_from_user_id(team_id, current_user.id.into(), &**pool).await?; + match team_association { + // If team is associated with a project, check if they have permissions to invite users to that project + TeamAssociationId::Project(pid) => { + let organization = + Organization::get_associated_organization_project_id(pid, &**pool).await?; + let organization_team_member = if let Some(organization) = &organization { + TeamMember::get_from_user_id(organization.team_id, current_user.id.into(), &**pool) + .await? + } else { + None + }; + let permissions = ProjectPermissions::get_permissions_by_role( + ¤t_user.role, + &member, + &organization_team_member, + ) + .unwrap_or_default(); + + if !permissions.contains(ProjectPermissions::MANAGE_INVITES) { + return Err(ApiError::CustomAuthentication( + "You don't have permission to invite users to this team".to_string(), + )); + } + if !permissions.contains(new_member.permissions) { + return Err(ApiError::InvalidInput( + "The new member has permissions that you don't have".to_string(), + )); + } + + if new_member.organization_permissions.is_some() { + return Err(ApiError::InvalidInput( + "The organization permissions of a project team member cannot be set" + .to_string(), + )); + } + } + // If team is associated with an organization, check if they have permissions to invite users to that organization + TeamAssociationId::Organization(_) => { + let organization_permissions = + OrganizationPermissions::get_permissions_by_role(¤t_user.role, &member) + .unwrap_or_default(); + if !organization_permissions.contains(OrganizationPermissions::MANAGE_INVITES) { + return Err(ApiError::CustomAuthentication( + "You don't have permission to invite users to this organization".to_string(), + )); + } + if !organization_permissions + .contains(new_member.organization_permissions.unwrap_or_default()) + { + return Err(ApiError::InvalidInput( + "The new member has organization permissions that you don't have".to_string(), + )); + } + if !organization_permissions + .contains(OrganizationPermissions::EDIT_MEMBER_DEFAULT_PERMISSIONS) + && !new_member.permissions.is_empty() + { + return Err(ApiError::CustomAuthentication( + "You do not have permission to give this user default project permissions. Ensure 'permissions' is set if it is not, and empty (0)." + .to_string(), + )); + } + } + } + + if new_member.role == crate::models::teams::OWNER_ROLE { + return Err(ApiError::InvalidInput( + "The `Owner` role is restricted to one person".to_string(), + )); + } + + if new_member.payouts_split < Decimal::ZERO || new_member.payouts_split > Decimal::from(5000) { + return Err(ApiError::InvalidInput( + "Payouts split must be between 0 and 5000!".to_string(), + )); + } + + let request = + TeamMember::get_from_user_id_pending(team_id, new_member.user_id.into(), &**pool).await?; + + if let Some(req) = request { + if req.accepted { + return Err(ApiError::InvalidInput( + "The user is already a member of that team".to_string(), + )); + } else { + return Err(ApiError::InvalidInput( + "There is already a pending member request for this user".to_string(), + )); + } + } + crate::database::models::User::get_id(new_member.user_id.into(), &**pool, &redis) + .await? + .ok_or_else(|| ApiError::InvalidInput("An invalid User ID specified".to_string()))?; + + let new_id = crate::database::models::ids::generate_team_member_id(&mut transaction).await?; + TeamMember { + id: new_id, + team_id, + user_id: new_member.user_id.into(), + role: new_member.role.clone(), + permissions: new_member.permissions, + organization_permissions: new_member.organization_permissions, + accepted: false, + payouts_split: new_member.payouts_split, + ordering: new_member.ordering, + } + .insert(&mut transaction) + .await?; + + match team_association { + TeamAssociationId::Project(pid) => { + NotificationBuilder { + body: NotificationBody::TeamInvite { + project_id: pid.into(), + team_id: team_id.into(), + invited_by: current_user.id, + role: new_member.role.clone(), + }, + } + .insert(new_member.user_id.into(), &mut transaction, &redis) + .await?; + } + TeamAssociationId::Organization(oid) => { + NotificationBuilder { + body: NotificationBody::OrganizationInvite { + organization_id: oid.into(), + team_id: team_id.into(), + invited_by: current_user.id, + role: new_member.role.clone(), + }, + } + .insert(new_member.user_id.into(), &mut transaction, &redis) + .await?; + } + } + + TeamMember::clear_cache(team_id, &redis).await?; + + transaction.commit().await?; + + Ok(HttpResponse::NoContent().body("")) +} + +#[derive(Serialize, Deserialize, Clone)] +pub struct EditTeamMember { + pub permissions: Option, + pub organization_permissions: Option, + pub role: Option, + pub payouts_split: Option, + pub ordering: Option, +} + +pub async fn edit_team_member( + req: HttpRequest, + info: web::Path<(TeamId, UserId)>, + pool: web::Data, + edit_member: web::Json, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let ids = info.into_inner(); + let id = ids.0.into(); + let user_id = ids.1.into(); + + let current_user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::PROJECT_WRITE]), + ) + .await? + .1; + + let team_association = Team::get_association(id, &**pool) + .await? + .ok_or_else(|| ApiError::InvalidInput("The team specified does not exist".to_string()))?; + let member = TeamMember::get_from_user_id(id, current_user.id.into(), &**pool).await?; + let edit_member_db = TeamMember::get_from_user_id_pending(id, user_id, &**pool) + .await? + .ok_or_else(|| { + ApiError::CustomAuthentication( + "You don't have permission to edit members of this team".to_string(), + ) + })?; + + let mut transaction = pool.begin().await?; + + if &*edit_member_db.role == crate::models::teams::OWNER_ROLE + && (edit_member.role.is_some() || edit_member.permissions.is_some()) + { + return Err(ApiError::InvalidInput( + "The owner's permission and role of a team cannot be edited".to_string(), + )); + } + + match team_association { + TeamAssociationId::Project(project_id) => { + let organization = + Organization::get_associated_organization_project_id(project_id, &**pool).await?; + let organization_team_member = if let Some(organization) = &organization { + TeamMember::get_from_user_id(organization.team_id, current_user.id.into(), &**pool) + .await? + } else { + None + }; + let permissions = ProjectPermissions::get_permissions_by_role( + ¤t_user.role, + &member.clone(), + &organization_team_member, + ) + .unwrap_or_default(); + if !permissions.contains(ProjectPermissions::EDIT_MEMBER) { + return Err(ApiError::CustomAuthentication( + "You don't have permission to edit members of this team".to_string(), + )); + } + + if let Some(new_permissions) = edit_member.permissions { + if !permissions.contains(new_permissions) { + return Err(ApiError::InvalidInput( + "The new permissions have permissions that you don't have".to_string(), + )); + } + } + + if edit_member.organization_permissions.is_some() { + return Err(ApiError::InvalidInput( + "The organization permissions of a project team member cannot be edited" + .to_string(), + )); + } + } + TeamAssociationId::Organization(_) => { + let organization_permissions = + OrganizationPermissions::get_permissions_by_role(¤t_user.role, &member) + .unwrap_or_default(); + + if !organization_permissions.contains(OrganizationPermissions::EDIT_MEMBER) { + return Err(ApiError::CustomAuthentication( + "You don't have permission to edit members of this team".to_string(), + )); + } + + if let Some(new_permissions) = edit_member.organization_permissions { + if !organization_permissions.contains(new_permissions) { + return Err(ApiError::InvalidInput( + "The new organization permissions have permissions that you don't have" + .to_string(), + )); + } + } + + if edit_member.permissions.is_some() + && !organization_permissions + .contains(OrganizationPermissions::EDIT_MEMBER_DEFAULT_PERMISSIONS) + { + return Err(ApiError::CustomAuthentication( + "You do not have permission to give this user default project permissions." + .to_string(), + )); + } + } + } + + if let Some(payouts_split) = edit_member.payouts_split { + if payouts_split < Decimal::ZERO || payouts_split > Decimal::from(5000) { + return Err(ApiError::InvalidInput( + "Payouts split must be between 0 and 5000!".to_string(), + )); + } + } + + if edit_member.role.as_deref() == Some(crate::models::teams::OWNER_ROLE) { + return Err(ApiError::InvalidInput( + "The `Owner` role is restricted to one person".to_string(), + )); + } + + TeamMember::edit_team_member( + id, + user_id, + edit_member.permissions, + edit_member.organization_permissions, + edit_member.role.clone(), + None, + edit_member.payouts_split, + edit_member.ordering, + &mut transaction, + ) + .await?; + + TeamMember::clear_cache(id, &redis).await?; + + transaction.commit().await?; + + Ok(HttpResponse::NoContent().body("")) +} + +#[derive(Deserialize)] +pub struct TransferOwnership { + pub user_id: UserId, +} + +pub async fn transfer_ownership( + req: HttpRequest, + info: web::Path<(TeamId,)>, + pool: web::Data, + new_owner: web::Json, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let id = info.into_inner().0; + + let current_user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::PROJECT_WRITE]), + ) + .await? + .1; + + // Forbid transferring ownership of a project team that is owned by an organization + // These are owned by the organization owner, and must be removed from the organization first + let pid = Team::get_association(id.into(), &**pool).await?; + if let Some(TeamAssociationId::Project(pid)) = pid { + let result = Project::get_id(pid, &**pool, &redis).await?; + if let Some(project_item) = result { + if project_item.inner.organization_id.is_some() { + return Err(ApiError::InvalidInput( + "You cannot transfer ownership of a project team that is owend by an organization" + .to_string(), + )); + } + } + } + + if !current_user.role.is_admin() { + let member = TeamMember::get_from_user_id(id.into(), current_user.id.into(), &**pool) + .await? + .ok_or_else(|| { + ApiError::CustomAuthentication( + "You don't have permission to edit members of this team".to_string(), + ) + })?; + + if member.role != crate::models::teams::OWNER_ROLE { + return Err(ApiError::CustomAuthentication( + "You don't have permission to edit the ownership of this team".to_string(), + )); + } + } + + let new_member = TeamMember::get_from_user_id(id.into(), new_owner.user_id.into(), &**pool) + .await? + .ok_or_else(|| { + ApiError::InvalidInput("The new owner specified does not exist".to_string()) + })?; + + if !new_member.accepted { + return Err(ApiError::InvalidInput( + "You can only transfer ownership to members who are currently in your team".to_string(), + )); + } + + let mut transaction = pool.begin().await?; + + TeamMember::edit_team_member( + id.into(), + current_user.id.into(), + None, + None, + Some(crate::models::teams::DEFAULT_ROLE.to_string()), + None, + None, + None, + &mut transaction, + ) + .await?; + + TeamMember::edit_team_member( + id.into(), + new_owner.user_id.into(), + Some(ProjectPermissions::all()), + Some(OrganizationPermissions::all()), + Some(crate::models::teams::OWNER_ROLE.to_string()), + None, + None, + None, + &mut transaction, + ) + .await?; + + TeamMember::clear_cache(id.into(), &redis).await?; + + transaction.commit().await?; + + Ok(HttpResponse::NoContent().body("")) +} + +pub async fn remove_team_member( + req: HttpRequest, + info: web::Path<(TeamId, UserId)>, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let ids = info.into_inner(); + let id = ids.0.into(); + let user_id = ids.1.into(); + + let current_user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::PROJECT_WRITE]), + ) + .await? + .1; + + let team_association = Team::get_association(id, &**pool) + .await? + .ok_or_else(|| ApiError::InvalidInput("The team specified does not exist".to_string()))?; + let member = TeamMember::get_from_user_id(id, current_user.id.into(), &**pool).await?; + + let delete_member = TeamMember::get_from_user_id_pending(id, user_id, &**pool).await?; + + if let Some(delete_member) = delete_member { + if delete_member.role == crate::models::teams::OWNER_ROLE { + // The owner cannot be removed from a team + return Err(ApiError::CustomAuthentication( + "The owner can't be removed from a team".to_string(), + )); + } + + let mut transaction = pool.begin().await?; + + // Organization attached to a project this team is attached to + match team_association { + TeamAssociationId::Project(pid) => { + let organization = + Organization::get_associated_organization_project_id(pid, &**pool).await?; + let organization_team_member = if let Some(organization) = &organization { + TeamMember::get_from_user_id( + organization.team_id, + current_user.id.into(), + &**pool, + ) + .await? + } else { + None + }; + let permissions = ProjectPermissions::get_permissions_by_role( + ¤t_user.role, + &member, + &organization_team_member, + ) + .unwrap_or_default(); + + if delete_member.accepted { + // Members other than the owner can either leave the team, or be + // removed by a member with the REMOVE_MEMBER permission. + if Some(delete_member.user_id) == member.as_ref().map(|m| m.user_id) + || permissions.contains(ProjectPermissions::REMOVE_MEMBER) + // true as if the permission exists, but the member does not, they are part of an org + { + TeamMember::delete(id, user_id, &mut transaction).await?; + } else { + return Err(ApiError::CustomAuthentication( + "You do not have permission to remove a member from this team" + .to_string(), + )); + } + } else if Some(delete_member.user_id) == member.as_ref().map(|m| m.user_id) + || permissions.contains(ProjectPermissions::MANAGE_INVITES) + // true as if the permission exists, but the member does not, they are part of an org + { + // This is a pending invite rather than a member, so the + // user being invited or team members with the MANAGE_INVITES + // permission can remove it. + TeamMember::delete(id, user_id, &mut transaction).await?; + } else { + return Err(ApiError::CustomAuthentication( + "You do not have permission to cancel a team invite".to_string(), + )); + } + } + TeamAssociationId::Organization(_) => { + let organization_permissions = + OrganizationPermissions::get_permissions_by_role(¤t_user.role, &member) + .unwrap_or_default(); + // Organization teams requires a TeamMember, so we can 'unwrap' + if delete_member.accepted { + // Members other than the owner can either leave the team, or be + // removed by a member with the REMOVE_MEMBER permission. + if Some(delete_member.user_id) == member.map(|m| m.user_id) + || organization_permissions.contains(OrganizationPermissions::REMOVE_MEMBER) + { + TeamMember::delete(id, user_id, &mut transaction).await?; + } else { + return Err(ApiError::CustomAuthentication( + "You do not have permission to remove a member from this organization" + .to_string(), + )); + } + } else if Some(delete_member.user_id) == member.map(|m| m.user_id) + || organization_permissions.contains(OrganizationPermissions::MANAGE_INVITES) + { + // This is a pending invite rather than a member, so the + // user being invited or team members with the MANAGE_INVITES + // permission can remove it. + TeamMember::delete(id, user_id, &mut transaction).await?; + } else { + return Err(ApiError::CustomAuthentication( + "You do not have permission to cancel an organization invite".to_string(), + )); + } + } + } + + TeamMember::clear_cache(id, &redis).await?; + User::clear_project_cache(&[delete_member.user_id], &redis).await?; + + transaction.commit().await?; + Ok(HttpResponse::NoContent().body("")) + } else { + Ok(HttpResponse::NotFound().body("")) + } +} diff --git a/src/routes/v3/threads.rs b/src/routes/v3/threads.rs new file mode 100644 index 00000000..9f10c2b5 --- /dev/null +++ b/src/routes/v3/threads.rs @@ -0,0 +1,622 @@ +use std::sync::Arc; + +use crate::auth::{check_is_moderator_from_headers, get_user_from_headers}; +use crate::database; +use crate::database::models::image_item; +use crate::database::models::notification_item::NotificationBuilder; +use crate::database::models::thread_item::ThreadMessageBuilder; +use crate::database::redis::RedisPool; +use crate::file_hosting::FileHost; +use crate::models::ids::ThreadMessageId; +use crate::models::images::{Image, ImageContext}; +use crate::models::notifications::NotificationBody; +use crate::models::pats::Scopes; +use crate::models::projects::ProjectStatus; +use crate::models::threads::{MessageBody, Thread, ThreadId, ThreadType}; +use crate::models::users::User; +use crate::queue::session::AuthQueue; +use crate::routes::ApiError; +use actix_web::{web, HttpRequest, HttpResponse}; +use futures::TryStreamExt; +use serde::Deserialize; +use sqlx::PgPool; + +pub fn config(cfg: &mut web::ServiceConfig) { + cfg.service( + web::scope("thread") + .route("{id}", web::get().to(thread_get)) + .route("inbox", web::get().to(moderation_inbox)) + .route("{id}", web::post().to(thread_send_message)) + .route("{id}/read", web::post().to(thread_read)), + ); + cfg.service(web::scope("message").route("{id}", web::delete().to(message_delete))); + cfg.route("threads", web::get().to(threads_get)); +} + +pub async fn is_authorized_thread( + thread: &database::models::Thread, + user: &User, + pool: &PgPool, +) -> Result { + if user.role.is_mod() { + return Ok(true); + } + + let user_id: database::models::UserId = user.id.into(); + Ok(match thread.type_ { + ThreadType::Report => { + if let Some(report_id) = thread.report_id { + let report_exists = sqlx::query!( + "SELECT EXISTS(SELECT 1 FROM reports WHERE id = $1 AND reporter = $2)", + report_id as database::models::ids::ReportId, + user_id as database::models::ids::UserId, + ) + .fetch_one(pool) + .await? + .exists; + + report_exists.unwrap_or(false) + } else { + false + } + } + ThreadType::Project => { + if let Some(project_id) = thread.project_id { + let project_exists = sqlx::query!( + "SELECT EXISTS(SELECT 1 FROM mods m INNER JOIN team_members tm ON tm.team_id = m.team_id AND tm.user_id = $2 WHERE m.id = $1)", + project_id as database::models::ids::ProjectId, + user_id as database::models::ids::UserId, + ) + .fetch_one(pool) + .await? + .exists; + + project_exists.unwrap_or(false) + } else { + false + } + } + ThreadType::DirectMessage => thread.members.contains(&user_id), + }) +} + +pub async fn filter_authorized_threads( + threads: Vec, + user: &User, + pool: &web::Data, + redis: &RedisPool, +) -> Result, ApiError> { + let user_id: database::models::UserId = user.id.into(); + + let mut return_threads = Vec::new(); + let mut check_threads = Vec::new(); + + for thread in threads { + if user.role.is_mod() + || (thread.type_ == ThreadType::DirectMessage && thread.members.contains(&user_id)) + { + return_threads.push(thread); + } else { + check_threads.push(thread); + } + } + + if !check_threads.is_empty() { + let project_thread_ids = check_threads + .iter() + .filter(|x| x.type_ == ThreadType::Project) + .flat_map(|x| x.project_id.map(|x| x.0)) + .collect::>(); + + if !project_thread_ids.is_empty() { + sqlx::query!( + " + SELECT m.id FROM mods m + INNER JOIN team_members tm ON tm.team_id = m.team_id AND user_id = $2 + WHERE m.id = ANY($1) + ", + &*project_thread_ids, + user_id as database::models::ids::UserId, + ) + .fetch_many(&***pool) + .try_for_each(|e| { + if let Some(row) = e.right() { + check_threads.retain(|x| { + let bool = x.project_id.map(|x| x.0) == Some(row.id); + + if bool { + return_threads.push(x.clone()); + } + + !bool + }); + } + + futures::future::ready(Ok(())) + }) + .await?; + } + + let report_thread_ids = check_threads + .iter() + .filter(|x| x.type_ == ThreadType::Report) + .flat_map(|x| x.report_id.map(|x| x.0)) + .collect::>(); + + if !report_thread_ids.is_empty() { + sqlx::query!( + " + SELECT id FROM reports + WHERE id = ANY($1) AND reporter = $2 + ", + &*report_thread_ids, + user_id as database::models::ids::UserId, + ) + .fetch_many(&***pool) + .try_for_each(|e| { + if let Some(row) = e.right() { + check_threads.retain(|x| { + let bool = x.report_id.map(|x| x.0) == Some(row.id); + + if bool { + return_threads.push(x.clone()); + } + + !bool + }); + } + + futures::future::ready(Ok(())) + }) + .await?; + } + } + + let mut user_ids = return_threads + .iter() + .flat_map(|x| x.members.clone()) + .collect::>(); + user_ids.append( + &mut return_threads + .iter() + .flat_map(|x| { + x.messages + .iter() + .filter_map(|x| x.author_id) + .collect::>() + }) + .collect::>(), + ); + + let users: Vec = database::models::User::get_many_ids(&user_ids, &***pool, redis) + .await? + .into_iter() + .map(From::from) + .collect(); + + let mut final_threads = Vec::new(); + + for thread in return_threads { + let mut authors = thread.members.clone(); + + authors.append( + &mut thread + .messages + .iter() + .filter_map(|x| x.author_id) + .collect::>(), + ); + + final_threads.push(Thread::from( + thread, + users + .iter() + .filter(|x| authors.contains(&x.id.into())) + .cloned() + .collect(), + user, + )); + } + + Ok(final_threads) +} + +pub async fn thread_get( + req: HttpRequest, + info: web::Path<(ThreadId,)>, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let string = info.into_inner().0.into(); + + let thread_data = database::models::Thread::get(string, &**pool).await?; + + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::THREAD_READ]), + ) + .await? + .1; + + if let Some(mut data) = thread_data { + if is_authorized_thread(&data, &user, &pool).await? { + let authors = &mut data.members; + + authors.append( + &mut data + .messages + .iter() + .filter_map(|x| x.author_id) + .collect::>(), + ); + + let users: Vec = database::models::User::get_many_ids(authors, &**pool, &redis) + .await? + .into_iter() + .map(From::from) + .collect(); + + return Ok(HttpResponse::Ok().json(Thread::from(data, users, &user))); + } + } + Ok(HttpResponse::NotFound().body("")) +} + +#[derive(Deserialize)] +pub struct ThreadIds { + pub ids: String, +} + +pub async fn threads_get( + req: HttpRequest, + web::Query(ids): web::Query, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::THREAD_READ]), + ) + .await? + .1; + + let thread_ids: Vec = + serde_json::from_str::>(&ids.ids)? + .into_iter() + .map(|x| x.into()) + .collect(); + + let threads_data = database::models::Thread::get_many(&thread_ids, &**pool).await?; + + let threads = filter_authorized_threads(threads_data, &user, &pool, &redis).await?; + + Ok(HttpResponse::Ok().json(threads)) +} + +#[derive(Deserialize)] +pub struct NewThreadMessage { + pub body: MessageBody, +} + +pub async fn thread_send_message( + req: HttpRequest, + info: web::Path<(ThreadId,)>, + pool: web::Data, + new_message: web::Json, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::THREAD_WRITE]), + ) + .await? + .1; + + let string: database::models::ThreadId = info.into_inner().0.into(); + + if let MessageBody::Text { + body, + replying_to, + private, + .. + } = &new_message.body + { + if body.len() > 65536 { + return Err(ApiError::InvalidInput( + "Input body is too long!".to_string(), + )); + } + + if *private && !user.role.is_mod() { + return Err(ApiError::InvalidInput( + "You are not allowed to send private messages!".to_string(), + )); + } + + if let Some(replying_to) = replying_to { + let thread_message = + database::models::ThreadMessage::get((*replying_to).into(), &**pool).await?; + + if let Some(thread_message) = thread_message { + if thread_message.thread_id != string { + return Err(ApiError::InvalidInput( + "Message replied to is from another thread!".to_string(), + )); + } + } else { + return Err(ApiError::InvalidInput( + "Message replied to does not exist!".to_string(), + )); + } + } + } else { + return Err(ApiError::InvalidInput( + "You may only send text messages through this route!".to_string(), + )); + } + + let result = database::models::Thread::get(string, &**pool).await?; + + if let Some(thread) = result { + if !is_authorized_thread(&thread, &user, &pool).await? { + return Ok(HttpResponse::NotFound().body("")); + } + + let mut transaction = pool.begin().await?; + + let id = ThreadMessageBuilder { + author_id: Some(user.id.into()), + body: new_message.body.clone(), + thread_id: thread.id, + } + .insert(&mut transaction) + .await?; + + let mod_notif = if let Some(project_id) = thread.project_id { + let project = database::models::Project::get_id(project_id, &**pool, &redis).await?; + + if let Some(project) = project { + if project.inner.status != ProjectStatus::Processing && user.role.is_mod() { + let members = database::models::TeamMember::get_from_team_full( + project.inner.team_id, + &**pool, + &redis, + ) + .await?; + + NotificationBuilder { + body: NotificationBody::ModeratorMessage { + thread_id: thread.id.into(), + message_id: id.into(), + project_id: Some(project.inner.id.into()), + report_id: None, + }, + } + .insert_many( + members.into_iter().map(|x| x.user_id).collect(), + &mut transaction, + &redis, + ) + .await?; + } + } + + !user.role.is_mod() + } else if let Some(report_id) = thread.report_id { + let report = database::models::report_item::Report::get(report_id, &**pool).await?; + + if let Some(report) = report { + if report.closed && !user.role.is_mod() { + return Err(ApiError::InvalidInput( + "You may not reply to a closed report".to_string(), + )); + } + + if user.id != report.reporter.into() { + NotificationBuilder { + body: NotificationBody::ModeratorMessage { + thread_id: thread.id.into(), + message_id: id.into(), + project_id: None, + report_id: Some(report.id.into()), + }, + } + .insert(report.reporter, &mut transaction, &redis) + .await?; + } + } + + !user.role.is_mod() + } else { + false + }; + + sqlx::query!( + " + UPDATE threads + SET show_in_mod_inbox = $1 + WHERE id = $2 + ", + mod_notif, + thread.id.0, + ) + .execute(&mut *transaction) + .await?; + + if let MessageBody::Text { + associated_images, .. + } = &new_message.body + { + for image_id in associated_images { + if let Some(db_image) = + image_item::Image::get((*image_id).into(), &mut *transaction, &redis).await? + { + let image: Image = db_image.into(); + if !matches!(image.context, ImageContext::ThreadMessage { .. }) + || image.context.inner_id().is_some() + { + return Err(ApiError::InvalidInput(format!( + "Image {} is not unused and in the 'thread_message' context", + image_id + ))); + } + + sqlx::query!( + " + UPDATE uploaded_images + SET thread_message_id = $1 + WHERE id = $2 + ", + thread.id.0, + image_id.0 as i64 + ) + .execute(&mut *transaction) + .await?; + + image_item::Image::clear_cache(image.id.into(), &redis).await?; + } else { + return Err(ApiError::InvalidInput(format!( + "Image {} does not exist", + image_id + ))); + } + } + } + + transaction.commit().await?; + + Ok(HttpResponse::NoContent().body("")) + } else { + Ok(HttpResponse::NotFound().body("")) + } +} + +pub async fn moderation_inbox( + req: HttpRequest, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let user = check_is_moderator_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::THREAD_READ]), + ) + .await?; + + let ids = sqlx::query!( + " + SELECT id + FROM threads + WHERE show_in_mod_inbox = TRUE + " + ) + .fetch_many(&**pool) + .try_filter_map(|e| async { Ok(e.right().map(|m| database::models::ThreadId(m.id))) }) + .try_collect::>() + .await?; + + let threads_data = database::models::Thread::get_many(&ids, &**pool).await?; + let threads = filter_authorized_threads(threads_data, &user, &pool, &redis).await?; + Ok(HttpResponse::Ok().json(threads)) +} + +pub async fn thread_read( + req: HttpRequest, + info: web::Path<(ThreadId,)>, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + check_is_moderator_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::THREAD_READ]), + ) + .await?; + + let id = info.into_inner().0; + let mut transaction = pool.begin().await?; + + sqlx::query!( + " + UPDATE threads + SET show_in_mod_inbox = FALSE + WHERE id = $1 + ", + id.0 as i64, + ) + .execute(&mut *transaction) + .await?; + + transaction.commit().await?; + + Ok(HttpResponse::NoContent().body("")) +} + +pub async fn message_delete( + req: HttpRequest, + info: web::Path<(ThreadMessageId,)>, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, + file_host: web::Data>, +) -> Result { + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::THREAD_WRITE]), + ) + .await? + .1; + + let result = database::models::ThreadMessage::get(info.into_inner().0.into(), &**pool).await?; + + if let Some(thread) = result { + if !user.role.is_mod() && thread.author_id != Some(user.id.into()) { + return Err(ApiError::CustomAuthentication( + "You cannot delete this message!".to_string(), + )); + } + + let mut transaction = pool.begin().await?; + + let context = ImageContext::ThreadMessage { + thread_message_id: Some(thread.id.into()), + }; + let images = database::Image::get_many_contexted(context, &mut transaction).await?; + let cdn_url = dotenvy::var("CDN_URL")?; + for image in images { + let name = image.url.split(&format!("{cdn_url}/")).nth(1); + if let Some(icon_path) = name { + file_host.delete_file_version("", icon_path).await?; + } + database::Image::remove(image.id, &mut transaction, &redis).await?; + } + + database::models::ThreadMessage::remove_full(thread.id, &mut transaction).await?; + transaction.commit().await?; + + Ok(HttpResponse::NoContent().body("")) + } else { + Ok(HttpResponse::NotFound().body("")) + } +} diff --git a/src/routes/v3/users.rs b/src/routes/v3/users.rs new file mode 100644 index 00000000..96b666dd --- /dev/null +++ b/src/routes/v3/users.rs @@ -0,0 +1,913 @@ +use std::{collections::HashMap, sync::Arc}; + +use actix_web::{web, HttpRequest, HttpResponse}; +use lazy_static::lazy_static; +use regex::Regex; +use rust_decimal::Decimal; +use serde::{Deserialize, Serialize}; +use serde_json::json; +use sqlx::PgPool; +use tokio::sync::Mutex; +use validator::Validate; + +use crate::{ + auth::get_user_from_headers, + database::{models::User, redis::RedisPool}, + file_hosting::FileHost, + models::{ + collections::{Collection, CollectionStatus}, + ids::UserId, + notifications::Notification, + pats::Scopes, + projects::Project, + users::{Badges, Payout, PayoutStatus, RecipientStatus, Role, UserPayoutData}, + }, + queue::{payouts::PayoutsQueue, session::AuthQueue}, + util::{routes::read_from_payload, validate::validation_errors_to_string}, +}; + +use super::ApiError; + +pub fn config(cfg: &mut web::ServiceConfig) { + cfg.route("user", web::get().to(user_auth_get)); + cfg.route("users", web::get().to(users_get)); + + cfg.service( + web::scope("user") + .route("{user_id}/projects", web::get().to(projects_list)) + .route("{id}", web::get().to(user_get)) + .route("{user_id}/collections", web::get().to(collections_list)) + .route("{user_id}/organizations", web::get().to(orgs_list)) + .route("{id}", web::patch().to(user_edit)) + .route("{id}/icon", web::patch().to(user_icon_edit)) + .route("{id}", web::delete().to(user_delete)) + .route("{id}/follows", web::get().to(user_follows)) + .route("{id}/notifications", web::get().to(user_notifications)) + .route("{id}/payouts", web::get().to(user_payouts)) + .route("{id}/payouts_fees", web::get().to(user_payouts_fees)) + .route("{id}/payouts", web::post().to(user_payouts_request)), + ); +} + +pub async fn projects_list( + req: HttpRequest, + info: web::Path<(String,)>, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::PROJECT_READ]), + ) + .await + .map(|x| x.1) + .ok(); + + let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?; + + if let Some(id) = id_option.map(|x| x.id) { + let user_id: UserId = id.into(); + + let can_view_private = user + .map(|y| y.role.is_mod() || y.id == user_id) + .unwrap_or(false); + + let project_data = User::get_projects(id, &**pool, &redis).await?; + + let response: Vec<_> = + crate::database::Project::get_many_ids(&project_data, &**pool, &redis) + .await? + .into_iter() + .filter(|x| can_view_private || x.inner.status.is_searchable()) + .map(Project::from) + .collect(); + + Ok(HttpResponse::Ok().json(response)) + } else { + Ok(HttpResponse::NotFound().body("")) + } +} + +pub async fn user_auth_get( + req: HttpRequest, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let (scopes, mut user) = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::USER_READ]), + ) + .await?; + + if !scopes.contains(Scopes::USER_READ_EMAIL) { + user.email = None; + } + + if !scopes.contains(Scopes::PAYOUTS_READ) { + user.payout_data = None; + } + + Ok(HttpResponse::Ok().json(user)) +} + +#[derive(Serialize, Deserialize)] +pub struct UserIds { + pub ids: String, +} + +pub async fn users_get( + web::Query(ids): web::Query, + pool: web::Data, + redis: web::Data, +) -> Result { + let user_ids = serde_json::from_str::>(&ids.ids)?; + + let users_data = User::get_many(&user_ids, &**pool, &redis).await?; + + let users: Vec = users_data.into_iter().map(From::from).collect(); + + Ok(HttpResponse::Ok().json(users)) +} + +pub async fn user_get( + info: web::Path<(String,)>, + pool: web::Data, + redis: web::Data, +) -> Result { + let user_data = User::get(&info.into_inner().0, &**pool, &redis).await?; + + if let Some(data) = user_data { + let response: crate::models::users::User = data.into(); + Ok(HttpResponse::Ok().json(response)) + } else { + Ok(HttpResponse::NotFound().body("")) + } +} + +pub async fn collections_list( + req: HttpRequest, + info: web::Path<(String,)>, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::COLLECTION_READ]), + ) + .await + .map(|x| x.1) + .ok(); + + let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?; + + if let Some(id) = id_option.map(|x| x.id) { + let user_id: UserId = id.into(); + + let can_view_private = user + .map(|y| y.role.is_mod() || y.id == user_id) + .unwrap_or(false); + + let project_data = User::get_collections(id, &**pool).await?; + + let response: Vec<_> = + crate::database::models::Collection::get_many(&project_data, &**pool, &redis) + .await? + .into_iter() + .filter(|x| can_view_private || matches!(x.status, CollectionStatus::Listed)) + .map(Collection::from) + .collect(); + + Ok(HttpResponse::Ok().json(response)) + } else { + Ok(HttpResponse::NotFound().body("")) + } +} + +pub async fn orgs_list( + req: HttpRequest, + info: web::Path<(String,)>, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::PROJECT_READ]), + ) + .await + .map(|x| x.1) + .ok(); + + let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?; + + if let Some(id) = id_option.map(|x| x.id) { + let org_data = User::get_organizations(id, &**pool).await?; + + let organizations_data = + crate::database::models::organization_item::Organization::get_many_ids( + &org_data, &**pool, &redis, + ) + .await?; + + let team_ids = organizations_data + .iter() + .map(|x| x.team_id) + .collect::>(); + + let teams_data = crate::database::models::TeamMember::get_from_team_full_many( + &team_ids, &**pool, &redis, + ) + .await?; + let users = User::get_many_ids( + &teams_data.iter().map(|x| x.user_id).collect::>(), + &**pool, + &redis, + ) + .await?; + + let mut organizations = vec![]; + let mut team_groups = HashMap::new(); + for item in teams_data { + team_groups.entry(item.team_id).or_insert(vec![]).push(item); + } + + for data in organizations_data { + let members_data = team_groups.remove(&data.team_id).unwrap_or(vec![]); + let logged_in = user + .as_ref() + .and_then(|user| { + members_data + .iter() + .find(|x| x.user_id == user.id.into() && x.accepted) + }) + .is_some(); + + let team_members: Vec<_> = members_data + .into_iter() + .filter(|x| logged_in || x.accepted || id == x.user_id) + .flat_map(|data| { + users.iter().find(|x| x.id == data.user_id).map(|user| { + crate::models::teams::TeamMember::from(data, user.clone(), !logged_in) + }) + }) + .collect(); + + let organization = crate::models::organizations::Organization::from(data, team_members); + organizations.push(organization); + } + + Ok(HttpResponse::Ok().json(organizations)) + } else { + Ok(HttpResponse::NotFound().body("")) + } +} + +lazy_static! { + static ref RE_URL_SAFE: Regex = Regex::new(r"^[a-zA-Z0-9_-]*$").unwrap(); +} + +#[derive(Serialize, Deserialize, Validate)] +pub struct EditUser { + #[validate(length(min = 1, max = 39), regex = "RE_URL_SAFE")] + pub username: Option, + #[serde( + default, + skip_serializing_if = "Option::is_none", + with = "::serde_with::rust::double_option" + )] + #[validate(length(min = 1, max = 64), regex = "RE_URL_SAFE")] + pub name: Option>, + #[serde( + default, + skip_serializing_if = "Option::is_none", + with = "::serde_with::rust::double_option" + )] + #[validate(length(max = 160))] + pub bio: Option>, + pub role: Option, + pub badges: Option, +} + +pub async fn user_edit( + req: HttpRequest, + info: web::Path<(String,)>, + new_user: web::Json, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let (_scopes, user) = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::USER_WRITE]), + ) + .await?; + + new_user + .validate() + .map_err(|err| ApiError::Validation(validation_errors_to_string(err, None)))?; + + let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?; + + if let Some(actual_user) = id_option { + let id = actual_user.id; + let user_id: UserId = id.into(); + + if user.id == user_id || user.role.is_mod() { + let mut transaction = pool.begin().await?; + + if let Some(username) = &new_user.username { + let existing_user_id_option = User::get(username, &**pool, &redis).await?; + + if existing_user_id_option + .map(|x| UserId::from(x.id)) + .map(|id| id == user.id) + .unwrap_or(true) + { + sqlx::query!( + " + UPDATE users + SET username = $1 + WHERE (id = $2) + ", + username, + id as crate::database::models::ids::UserId, + ) + .execute(&mut *transaction) + .await?; + } else { + return Err(ApiError::InvalidInput(format!( + "Username {username} is taken!" + ))); + } + } + + if let Some(name) = &new_user.name { + sqlx::query!( + " + UPDATE users + SET name = $1 + WHERE (id = $2) + ", + name.as_deref(), + id as crate::database::models::ids::UserId, + ) + .execute(&mut *transaction) + .await?; + } + + if let Some(bio) = &new_user.bio { + sqlx::query!( + " + UPDATE users + SET bio = $1 + WHERE (id = $2) + ", + bio.as_deref(), + id as crate::database::models::ids::UserId, + ) + .execute(&mut *transaction) + .await?; + } + + if let Some(role) = &new_user.role { + if !user.role.is_admin() { + return Err(ApiError::CustomAuthentication( + "You do not have the permissions to edit the role of this user!" + .to_string(), + )); + } + + let role = role.to_string(); + + sqlx::query!( + " + UPDATE users + SET role = $1 + WHERE (id = $2) + ", + role, + id as crate::database::models::ids::UserId, + ) + .execute(&mut *transaction) + .await?; + } + + if let Some(badges) = &new_user.badges { + if !user.role.is_admin() { + return Err(ApiError::CustomAuthentication( + "You do not have the permissions to edit the badges of this user!" + .to_string(), + )); + } + + sqlx::query!( + " + UPDATE users + SET badges = $1 + WHERE (id = $2) + ", + badges.bits() as i64, + id as crate::database::models::ids::UserId, + ) + .execute(&mut *transaction) + .await?; + } + + User::clear_caches(&[(id, Some(actual_user.username))], &redis).await?; + transaction.commit().await?; + Ok(HttpResponse::NoContent().body("")) + } else { + Err(ApiError::CustomAuthentication( + "You do not have permission to edit this user!".to_string(), + )) + } + } else { + Ok(HttpResponse::NotFound().body("")) + } +} + +#[derive(Serialize, Deserialize)] +pub struct Extension { + pub ext: String, +} + +#[allow(clippy::too_many_arguments)] +pub async fn user_icon_edit( + web::Query(ext): web::Query, + req: HttpRequest, + info: web::Path<(String,)>, + pool: web::Data, + redis: web::Data, + file_host: web::Data>, + mut payload: web::Payload, + session_queue: web::Data, +) -> Result { + if let Some(content_type) = crate::util::ext::get_image_content_type(&ext.ext) { + let cdn_url = dotenvy::var("CDN_URL")?; + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::USER_WRITE]), + ) + .await? + .1; + let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?; + + if let Some(actual_user) = id_option { + if user.id != actual_user.id.into() && !user.role.is_mod() { + return Err(ApiError::CustomAuthentication( + "You don't have permission to edit this user's icon.".to_string(), + )); + } + + let icon_url = actual_user.avatar_url; + let user_id: UserId = actual_user.id.into(); + + if let Some(icon) = icon_url { + let name = icon.split(&format!("{cdn_url}/")).nth(1); + + if let Some(icon_path) = name { + file_host.delete_file_version("", icon_path).await?; + } + } + + let bytes = + read_from_payload(&mut payload, 2097152, "Icons must be smaller than 2MiB").await?; + + let hash = sha1::Sha1::from(&bytes).hexdigest(); + let upload_data = file_host + .upload_file( + content_type, + &format!("user/{}/{}.{}", user_id, hash, ext.ext), + bytes.freeze(), + ) + .await?; + + sqlx::query!( + " + UPDATE users + SET avatar_url = $1 + WHERE (id = $2) + ", + format!("{}/{}", cdn_url, upload_data.file_name), + actual_user.id as crate::database::models::ids::UserId, + ) + .execute(&**pool) + .await?; + User::clear_caches(&[(actual_user.id, None)], &redis).await?; + + Ok(HttpResponse::NoContent().body("")) + } else { + Ok(HttpResponse::NotFound().body("")) + } + } else { + Err(ApiError::InvalidInput(format!( + "Invalid format for user icon: {}", + ext.ext + ))) + } +} + +#[derive(Deserialize)] +pub struct RemovalType { + #[serde(default = "default_removal")] + pub removal_type: String, +} + +fn default_removal() -> String { + "partial".into() +} + +pub async fn user_delete( + req: HttpRequest, + info: web::Path<(String,)>, + pool: web::Data, + removal_type: web::Query, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::USER_DELETE]), + ) + .await? + .1; + let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?; + + if let Some(id) = id_option.map(|x| x.id) { + if !user.role.is_admin() && user.id != id.into() { + return Err(ApiError::CustomAuthentication( + "You do not have permission to delete this user!".to_string(), + )); + } + + let mut transaction = pool.begin().await?; + + let result = User::remove( + id, + removal_type.removal_type == "full", + &mut transaction, + &redis, + ) + .await?; + + transaction.commit().await?; + + if result.is_some() { + Ok(HttpResponse::NoContent().body("")) + } else { + Ok(HttpResponse::NotFound().body("")) + } + } else { + Ok(HttpResponse::NotFound().body("")) + } +} + +pub async fn user_follows( + req: HttpRequest, + info: web::Path<(String,)>, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::USER_READ]), + ) + .await? + .1; + let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?; + + if let Some(id) = id_option.map(|x| x.id) { + if !user.role.is_admin() && user.id != id.into() { + return Err(ApiError::CustomAuthentication( + "You do not have permission to see the projects this user follows!".to_string(), + )); + } + + use futures::TryStreamExt; + + let project_ids = sqlx::query!( + " + SELECT mf.mod_id FROM mod_follows mf + WHERE mf.follower_id = $1 + ", + id as crate::database::models::ids::UserId, + ) + .fetch_many(&**pool) + .try_filter_map(|e| async { + Ok(e.right() + .map(|m| crate::database::models::ProjectId(m.mod_id))) + }) + .try_collect::>() + .await?; + + let projects: Vec<_> = + crate::database::Project::get_many_ids(&project_ids, &**pool, &redis) + .await? + .into_iter() + .map(Project::from) + .collect(); + + Ok(HttpResponse::Ok().json(projects)) + } else { + Ok(HttpResponse::NotFound().body("")) + } +} + +pub async fn user_notifications( + req: HttpRequest, + info: web::Path<(String,)>, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::NOTIFICATION_READ]), + ) + .await? + .1; + let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?; + + if let Some(id) = id_option.map(|x| x.id) { + if !user.role.is_admin() && user.id != id.into() { + return Err(ApiError::CustomAuthentication( + "You do not have permission to see the notifications of this user!".to_string(), + )); + } + + let mut notifications: Vec = + crate::database::models::notification_item::Notification::get_many_user( + id, &**pool, &redis, + ) + .await? + .into_iter() + .map(Into::into) + .collect(); + + notifications.sort_by(|a, b| b.created.cmp(&a.created)); + Ok(HttpResponse::Ok().json(notifications)) + } else { + Ok(HttpResponse::NotFound().body("")) + } +} + +pub async fn user_payouts( + req: HttpRequest, + info: web::Path<(String,)>, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::PAYOUTS_READ]), + ) + .await? + .1; + let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?; + + if let Some(id) = id_option.map(|x| x.id) { + if !user.role.is_admin() && user.id != id.into() { + return Err(ApiError::CustomAuthentication( + "You do not have permission to see the payouts of this user!".to_string(), + )); + } + + let (all_time, last_month, payouts) = futures::future::try_join3( + sqlx::query!( + " + SELECT SUM(pv.amount) amount + FROM payouts_values pv + WHERE pv.user_id = $1 + ", + id as crate::database::models::UserId + ) + .fetch_one(&**pool), + sqlx::query!( + " + SELECT SUM(pv.amount) amount + FROM payouts_values pv + WHERE pv.user_id = $1 AND created > NOW() - '1 month'::interval + ", + id as crate::database::models::UserId + ) + .fetch_one(&**pool), + sqlx::query!( + " + SELECT hp.created, hp.amount, hp.status + FROM historical_payouts hp + WHERE hp.user_id = $1 + ORDER BY hp.created DESC + ", + id as crate::database::models::UserId + ) + .fetch_many(&**pool) + .try_filter_map(|e| async { + Ok(e.right().map(|row| Payout { + created: row.created, + amount: row.amount, + status: PayoutStatus::from_string(&row.status), + })) + }) + .try_collect::>(), + ) + .await?; + + use futures::TryStreamExt; + + Ok(HttpResponse::Ok().json(json!({ + "all_time": all_time.amount, + "last_month": last_month.amount, + "payouts": payouts, + }))) + } else { + Ok(HttpResponse::NotFound().body("")) + } +} + +#[derive(Deserialize)] +pub struct FeeEstimateAmount { + pub amount: Decimal, +} + +pub async fn user_payouts_fees( + req: HttpRequest, + info: web::Path<(String,)>, + web::Query(amount): web::Query, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, + payouts_queue: web::Data>, +) -> Result { + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::PAYOUTS_READ]), + ) + .await? + .1; + let actual_user = User::get(&info.into_inner().0, &**pool, &redis).await?; + + if let Some(actual_user) = actual_user { + if !user.role.is_admin() && user.id != actual_user.id.into() { + return Err(ApiError::CustomAuthentication( + "You do not have permission to request payouts of this user!".to_string(), + )); + } + + if let Some(UserPayoutData { + trolley_id: Some(trolley_id), + .. + }) = user.payout_data + { + let payouts = payouts_queue + .lock() + .await + .get_estimated_fees(&trolley_id, amount.amount) + .await?; + + Ok(HttpResponse::Ok().json(payouts)) + } else { + Err(ApiError::InvalidInput( + "You must set up your trolley account first!".to_string(), + )) + } + } else { + Ok(HttpResponse::NotFound().body("")) + } +} + +#[derive(Deserialize)] +pub struct PayoutData { + pub amount: Decimal, +} + +pub async fn user_payouts_request( + req: HttpRequest, + info: web::Path<(String,)>, + pool: web::Data, + data: web::Json, + payouts_queue: web::Data>, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let mut payouts_queue = payouts_queue.lock().await; + + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::PAYOUTS_WRITE]), + ) + .await? + .1; + let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?; + + if let Some(id) = id_option.map(|x| x.id) { + if !user.role.is_admin() && user.id != id.into() { + return Err(ApiError::CustomAuthentication( + "You do not have permission to request payouts of this user!".to_string(), + )); + } + + if let Some(UserPayoutData { + trolley_id: Some(trolley_id), + trolley_status: Some(trolley_status), + balance, + .. + }) = user.payout_data + { + if trolley_status == RecipientStatus::Active { + return if data.amount < balance { + let mut transaction = pool.begin().await?; + + let (batch_id, payment_id) = + payouts_queue.send_payout(&trolley_id, data.amount).await?; + + sqlx::query!( + " + INSERT INTO historical_payouts (user_id, amount, status, batch_id, payment_id) + VALUES ($1, $2, $3, $4, $5) + ", + id as crate::database::models::ids::UserId, + data.amount, + "processing", + batch_id, + payment_id, + ) + .execute(&mut *transaction) + .await?; + + sqlx::query!( + " + UPDATE users + SET balance = balance - $1 + WHERE id = $2 + ", + data.amount, + id as crate::database::models::ids::UserId + ) + .execute(&mut *transaction) + .await?; + + User::clear_caches(&[(id, None)], &redis).await?; + + transaction.commit().await?; + + Ok(HttpResponse::NoContent().body("")) + } else { + Err(ApiError::InvalidInput( + "You do not have enough funds to make this payout!".to_string(), + )) + }; + } else { + return Err(ApiError::InvalidInput( + "Please complete payout information via the trolley dashboard!".to_string(), + )); + } + } + + Err(ApiError::InvalidInput( + "You are not enrolled in the payouts program yet!".to_string(), + )) + } else { + Ok(HttpResponse::NotFound().body("")) + } +} diff --git a/src/routes/v3/version_creation.rs b/src/routes/v3/version_creation.rs new file mode 100644 index 00000000..524e0c45 --- /dev/null +++ b/src/routes/v3/version_creation.rs @@ -0,0 +1,969 @@ +use super::project_creation::{CreateError, UploadedFile}; +use crate::auth::get_user_from_headers; +use crate::database::models::loader_fields::{LoaderField, LoaderFieldEnumValue, VersionField}; +use crate::database::models::notification_item::NotificationBuilder; +use crate::database::models::version_item::{ + DependencyBuilder, VersionBuilder, VersionFileBuilder, +}; +use crate::database::models::{self, image_item, Organization}; +use crate::database::redis::RedisPool; +use crate::file_hosting::FileHost; +use crate::models::images::{Image, ImageContext, ImageId}; +use crate::models::notifications::NotificationBody; +use crate::models::pack::PackFileHash; +use crate::models::pats::Scopes; +use crate::models::projects::{skip_nulls, DependencyType}; +use crate::models::projects::{ + Dependency, FileType, Loader, ProjectId, Version, VersionFile, VersionId, VersionStatus, + VersionType, +}; +use crate::models::teams::ProjectPermissions; +use crate::queue::session::AuthQueue; +use crate::util::routes::read_from_field; +use crate::util::validate::validation_errors_to_string; +use crate::validate::{validate_file, ValidationResult}; +use actix_multipart::{Field, Multipart}; +use actix_web::web::Data; +use actix_web::{web, HttpRequest, HttpResponse}; +use chrono::Utc; +use futures::stream::StreamExt; +use itertools::Itertools; +use serde::{Deserialize, Serialize}; +use sqlx::postgres::PgPool; +use std::collections::HashMap; +use std::sync::Arc; +use validator::Validate; + +fn default_requested_status() -> VersionStatus { + VersionStatus::Listed +} + +#[derive(Serialize, Deserialize, Validate, Clone)] +pub struct InitialVersionData { + #[serde(alias = "mod_id")] + pub project_id: Option, + #[validate(length(min = 1, max = 256))] + pub file_parts: Vec, + #[validate( + length(min = 1, max = 32), + regex = "crate::util::validate::RE_URL_SAFE" + )] + pub version_number: String, + #[validate( + length(min = 1, max = 64), + custom(function = "crate::util::validate::validate_name") + )] + #[serde(alias = "name")] + pub version_title: String, + #[validate(length(max = 65536))] + #[serde(alias = "changelog")] + pub version_body: Option, + #[validate( + length(min = 0, max = 4096), + custom(function = "crate::util::validate::validate_deps") + )] + pub dependencies: Vec, + #[serde(alias = "version_type")] + pub release_channel: VersionType, + #[validate(length(min = 1))] + pub loaders: Vec, + pub featured: bool, + pub primary_file: Option, + #[serde(default = "default_requested_status")] + pub status: VersionStatus, + #[serde(default = "HashMap::new")] + pub file_types: HashMap>, + // Associations to uploaded images in changelog + #[validate(length(max = 10))] + #[serde(default)] + pub uploaded_images: Vec, + // The ordering relative to other versions + pub ordering: Option, + + // Flattened loader fields + // All other fields are loader-specific VersionFields + // These are flattened during serialization + #[serde(deserialize_with = "skip_nulls")] + #[serde(flatten)] + pub fields: HashMap, +} + +#[derive(Serialize, Deserialize, Clone)] +struct InitialFileData { + #[serde(default = "HashMap::new")] + pub file_types: HashMap>, +} + +// under `/api/v1/version` +pub async fn version_create( + req: HttpRequest, + mut payload: Multipart, + client: Data, + redis: Data, + file_host: Data>, + session_queue: Data, +) -> Result { + let mut transaction = client.begin().await?; + let mut uploaded_files = Vec::new(); + + let result = version_create_inner( + req, + &mut payload, + &mut transaction, + &redis, + &***file_host, + &mut uploaded_files, + &client, + &session_queue, + ) + .await; + + if result.is_err() { + let undo_result = + super::project_creation::undo_uploads(&***file_host, &uploaded_files).await; + let rollback_result = transaction.rollback().await; + + undo_result?; + if let Err(e) = rollback_result { + return Err(e.into()); + } + } else { + transaction.commit().await?; + } + + result +} + +#[allow(clippy::too_many_arguments)] +async fn version_create_inner( + req: HttpRequest, + payload: &mut Multipart, + transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, + redis: &RedisPool, + file_host: &dyn FileHost, + uploaded_files: &mut Vec, + pool: &PgPool, + session_queue: &AuthQueue, +) -> Result { + let cdn_url = dotenvy::var("CDN_URL")?; + + let mut initial_version_data = None; + let mut version_builder = None; + let mut selected_loaders = None; + + let user = get_user_from_headers( + &req, + pool, + redis, + session_queue, + Some(&[Scopes::VERSION_CREATE]), + ) + .await? + .1; + + let mut error = None; + while let Some(item) = payload.next().await { + let mut field: Field = item?; + + if error.is_some() { + continue; + } + + let result = async { + let content_disposition = field.content_disposition().clone(); + let name = content_disposition.get_name().ok_or_else(|| { + CreateError::MissingValueError("Missing content name".to_string()) + })?; + + if name == "data" { + let mut data = Vec::new(); + while let Some(chunk) = field.next().await { + data.extend_from_slice(&chunk?); + } + + let version_create_data: InitialVersionData = serde_json::from_slice(&data)?; + initial_version_data = Some(version_create_data); + let version_create_data = initial_version_data.as_ref().unwrap(); + if version_create_data.project_id.is_none() { + return Err(CreateError::MissingValueError( + "Missing project id".to_string(), + )); + } + + version_create_data.validate().map_err(|err| { + CreateError::ValidationError(validation_errors_to_string(err, None)) + })?; + + if !version_create_data.status.can_be_requested() { + return Err(CreateError::InvalidInput( + "Status specified cannot be requested".to_string(), + )); + } + + let project_id: models::ProjectId = version_create_data.project_id.unwrap().into(); + + // Ensure that the project this version is being added to exists + if models::Project::get_id(project_id, &mut **transaction, redis) + .await? + .is_none() + { + return Err(CreateError::InvalidInput( + "An invalid project id was supplied".to_string(), + )); + } + + // Check that the user creating this version is a team member + // of the project the version is being added to. + let team_member = models::TeamMember::get_from_user_id_project( + project_id, + user.id.into(), + &mut **transaction, + ) + .await?; + + // Get organization attached, if exists, and the member project permissions + let organization = models::Organization::get_associated_organization_project_id( + project_id, + &mut **transaction, + ) + .await?; + + let organization_team_member = if let Some(organization) = &organization { + models::TeamMember::get_from_user_id( + organization.team_id, + user.id.into(), + &mut **transaction, + ) + .await? + } else { + None + }; + + let permissions = ProjectPermissions::get_permissions_by_role( + &user.role, + &team_member, + &organization_team_member, + ) + .unwrap_or_default(); + + if !permissions.contains(ProjectPermissions::UPLOAD_VERSION) { + return Err(CreateError::CustomAuthenticationError( + "You don't have permission to upload this version!".to_string(), + )); + } + + let version_id: VersionId = models::generate_version_id(transaction).await?.into(); + + let all_loaders = + models::loader_fields::Loader::list(&mut **transaction, redis).await?; + + let loader_fields = LoaderField::get_fields(&mut **transaction, redis).await?; + let mut version_fields = vec![]; + let mut loader_field_enum_values = LoaderFieldEnumValue::list_many_loader_fields( + &loader_fields, + &mut **transaction, + redis, + ) + .await?; + for (key, value) in version_create_data.fields.iter() { + let loader_field = loader_fields + .iter() + .find(|lf| &lf.field == key) + .ok_or_else(|| { + CreateError::InvalidInput(format!( + "Loader field '{key}' does not exist!" + )) + })?; + let enum_variants = loader_field_enum_values + .remove(&loader_field.id) + .unwrap_or_default(); + let vf: VersionField = VersionField::check_parse( + version_id.into(), + loader_field.clone(), + value.clone(), + enum_variants, + ) + .map_err(CreateError::InvalidInput)?; + version_fields.push(vf); + } + + let loaders = version_create_data + .loaders + .iter() + .map(|x| { + all_loaders + .iter() + .find(|y| y.loader == x.0) + .cloned() + .ok_or_else(|| CreateError::InvalidLoader(x.0.clone())) + }) + .collect::, _>>()?; + selected_loaders = Some(loaders.clone()); + let loader_ids = loaders.iter().map(|y| y.id).collect_vec(); + + let dependencies = version_create_data + .dependencies + .iter() + .map(|d| models::version_item::DependencyBuilder { + version_id: d.version_id.map(|x| x.into()), + project_id: d.project_id.map(|x| x.into()), + dependency_type: d.dependency_type.to_string(), + file_name: None, + }) + .collect::>(); + + version_builder = Some(VersionBuilder { + version_id: version_id.into(), + project_id, + author_id: user.id.into(), + name: version_create_data.version_title.clone(), + version_number: version_create_data.version_number.clone(), + changelog: version_create_data.version_body.clone().unwrap_or_default(), + files: Vec::new(), + dependencies, + loaders: loader_ids, + version_fields, + version_type: version_create_data.release_channel.to_string(), + featured: version_create_data.featured, + status: version_create_data.status, + requested_status: None, + ordering: version_create_data.ordering, + }); + + return Ok(()); + } + + let version = version_builder.as_mut().ok_or_else(|| { + CreateError::InvalidInput(String::from("`data` field must come before file fields")) + })?; + let loaders = selected_loaders.as_ref().ok_or_else(|| { + CreateError::InvalidInput(String::from("`data` field must come before file fields")) + })?; + let loaders = loaders + .iter() + .map(|x| Loader(x.loader.clone())) + .collect::>(); + + let version_data = initial_version_data + .clone() + .ok_or_else(|| CreateError::InvalidInput("`data` field is required".to_string()))?; + + upload_file( + &mut field, + file_host, + version_data.file_parts.len(), + uploaded_files, + &mut version.files, + &mut version.dependencies, + &cdn_url, + &content_disposition, + version.project_id.into(), + version.version_id.into(), + &version.version_fields, + loaders, + version_data.primary_file.is_some(), + version_data.primary_file.as_deref() == Some(name), + version_data.file_types.get(name).copied().flatten(), + transaction, + redis, + ) + .await?; + + Ok(()) + } + .await; + + if result.is_err() { + error = result.err(); + } + } + + if let Some(error) = error { + return Err(error); + } + + let version_data = initial_version_data + .ok_or_else(|| CreateError::InvalidInput("`data` field is required".to_string()))?; + let builder = version_builder + .ok_or_else(|| CreateError::InvalidInput("`data` field is required".to_string()))?; + + if builder.files.is_empty() { + return Err(CreateError::InvalidInput( + "Versions must have at least one file uploaded to them".to_string(), + )); + } + + use futures::stream::TryStreamExt; + + let users = sqlx::query!( + " + SELECT follower_id FROM mod_follows + WHERE mod_id = $1 + ", + builder.project_id as crate::database::models::ids::ProjectId + ) + .fetch_many(&mut **transaction) + .try_filter_map(|e| async { Ok(e.right().map(|m| models::ids::UserId(m.follower_id))) }) + .try_collect::>() + .await?; + + let project_id: ProjectId = builder.project_id.into(); + let version_id: VersionId = builder.version_id.into(); + + NotificationBuilder { + body: NotificationBody::ProjectUpdate { + project_id, + version_id, + }, + } + .insert_many(users, &mut *transaction, redis) + .await?; + + let loader_structs = selected_loaders.unwrap_or_default(); + let (all_project_types, all_games): (Vec, Vec) = + loader_structs.iter().fold((vec![], vec![]), |mut acc, x| { + acc.0.extend_from_slice(&x.supported_project_types); + acc.1 + .extend(x.supported_games.iter().map(|x| x.name().to_string())); + acc + }); + + let response = Version { + id: builder.version_id.into(), + project_id: builder.project_id.into(), + author_id: user.id, + featured: builder.featured, + name: builder.name.clone(), + version_number: builder.version_number.clone(), + project_types: all_project_types, + games: all_games, + changelog: builder.changelog.clone(), + changelog_url: None, + date_published: Utc::now(), + downloads: 0, + version_type: version_data.release_channel, + status: builder.status, + requested_status: builder.requested_status, + ordering: builder.ordering, + files: builder + .files + .iter() + .map(|file| VersionFile { + hashes: file + .hashes + .iter() + .map(|hash| { + ( + hash.algorithm.clone(), + // This is a hack since the hashes are currently stored as ASCII + // in the database, but represented here as a Vec. At some + // point we need to change the hash to be the real bytes in the + // database and add more processing here. + String::from_utf8(hash.hash.clone()).unwrap(), + ) + }) + .collect(), + url: file.url.clone(), + filename: file.filename.clone(), + primary: file.primary, + size: file.size, + file_type: file.file_type, + }) + .collect::>(), + dependencies: version_data.dependencies, + loaders: version_data.loaders, + fields: version_data.fields, + }; + + let project_id = builder.project_id; + builder.insert(transaction).await?; + + for image_id in version_data.uploaded_images { + if let Some(db_image) = + image_item::Image::get(image_id.into(), &mut **transaction, redis).await? + { + let image: Image = db_image.into(); + if !matches!(image.context, ImageContext::Report { .. }) + || image.context.inner_id().is_some() + { + return Err(CreateError::InvalidInput(format!( + "Image {} is not unused and in the 'version' context", + image_id + ))); + } + + sqlx::query!( + " + UPDATE uploaded_images + SET version_id = $1 + WHERE id = $2 + ", + version_id.0 as i64, + image_id.0 as i64 + ) + .execute(&mut **transaction) + .await?; + + image_item::Image::clear_cache(image.id.into(), redis).await?; + } else { + return Err(CreateError::InvalidInput(format!( + "Image {} does not exist", + image_id + ))); + } + } + + models::Project::clear_cache(project_id, None, Some(true), redis).await?; + + Ok(HttpResponse::Ok().json(response)) +} + +pub async fn upload_file_to_version( + req: HttpRequest, + url_data: web::Path<(VersionId,)>, + mut payload: Multipart, + client: Data, + redis: Data, + file_host: Data>, + session_queue: web::Data, +) -> Result { + let mut transaction = client.begin().await?; + let mut uploaded_files = Vec::new(); + + let version_id = models::VersionId::from(url_data.into_inner().0); + + let result = upload_file_to_version_inner( + req, + &mut payload, + client, + &mut transaction, + redis, + &***file_host, + &mut uploaded_files, + version_id, + &session_queue, + ) + .await; + + if result.is_err() { + let undo_result = + super::project_creation::undo_uploads(&***file_host, &uploaded_files).await; + let rollback_result = transaction.rollback().await; + + undo_result?; + if let Err(e) = rollback_result { + return Err(e.into()); + } + } else { + transaction.commit().await?; + } + + result +} + +#[allow(clippy::too_many_arguments)] +async fn upload_file_to_version_inner( + req: HttpRequest, + payload: &mut Multipart, + client: Data, + transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, + redis: Data, + file_host: &dyn FileHost, + uploaded_files: &mut Vec, + version_id: models::VersionId, + session_queue: &AuthQueue, +) -> Result { + let cdn_url = dotenvy::var("CDN_URL")?; + + let mut initial_file_data: Option = None; + let mut file_builders: Vec = Vec::new(); + + let user = get_user_from_headers( + &req, + &**client, + &redis, + session_queue, + Some(&[Scopes::VERSION_WRITE]), + ) + .await? + .1; + + let result = models::Version::get(version_id, &**client, &redis).await?; + + let version = match result { + Some(v) => v, + None => { + return Err(CreateError::InvalidInput( + "An invalid version id was supplied".to_string(), + )); + } + }; + + let all_loaders = models::loader_fields::Loader::list(&mut **transaction, &redis).await?; + + // TODO: this coded is reused a lot, it should be refactored into a function + let selected_loaders = version + .loaders + .iter() + .map(|x| { + all_loaders + .iter() + .find(|y| &y.loader == x) + .cloned() + .ok_or_else(|| CreateError::InvalidLoader(x.clone())) + }) + .collect::, _>>()?; + + if models::Project::get_id(version.inner.project_id, &mut **transaction, &redis) + .await? + .is_none() + { + return Err(CreateError::InvalidInput( + "An invalid project id was supplied".to_string(), + )); + } + + if !user.role.is_admin() { + let team_member = models::TeamMember::get_from_user_id_project( + version.inner.project_id, + user.id.into(), + &mut **transaction, + ) + .await?; + + let organization = Organization::get_associated_organization_project_id( + version.inner.project_id, + &**client, + ) + .await?; + + let organization_team_member = if let Some(organization) = &organization { + models::TeamMember::get_from_user_id( + organization.team_id, + user.id.into(), + &mut **transaction, + ) + .await? + } else { + None + }; + + let permissions = ProjectPermissions::get_permissions_by_role( + &user.role, + &team_member, + &organization_team_member, + ) + .unwrap_or_default(); + + if !permissions.contains(ProjectPermissions::UPLOAD_VERSION) { + return Err(CreateError::CustomAuthenticationError( + "You don't have permission to upload files to this version!".to_string(), + )); + } + } + + let project_id = ProjectId(version.inner.project_id.0 as u64); + let mut error = None; + while let Some(item) = payload.next().await { + let mut field: Field = item?; + + if error.is_some() { + continue; + } + + let result = async { + let content_disposition = field.content_disposition().clone(); + let name = content_disposition.get_name().ok_or_else(|| { + CreateError::MissingValueError("Missing content name".to_string()) + })?; + + if name == "data" { + let mut data = Vec::new(); + while let Some(chunk) = field.next().await { + data.extend_from_slice(&chunk?); + } + let file_data: InitialFileData = serde_json::from_slice(&data)?; + + initial_file_data = Some(file_data); + return Ok(()); + } + + let file_data = initial_file_data.as_ref().ok_or_else(|| { + CreateError::InvalidInput(String::from("`data` field must come before file fields")) + })?; + + let loaders = selected_loaders + .iter() + .map(|x| Loader(x.loader.clone())) + .collect::>(); + + let mut dependencies = version + .dependencies + .iter() + .map(|x| DependencyBuilder { + project_id: x.project_id, + version_id: x.version_id, + file_name: x.file_name.clone(), + dependency_type: x.dependency_type.clone(), + }) + .collect(); + + upload_file( + &mut field, + file_host, + 0, + uploaded_files, + &mut file_builders, + &mut dependencies, + &cdn_url, + &content_disposition, + project_id, + version_id.into(), + &version.version_fields, + loaders, + true, + false, + file_data.file_types.get(name).copied().flatten(), + transaction, + &redis, + ) + .await?; + + Ok(()) + } + .await; + + if result.is_err() { + error = result.err(); + } + } + + if let Some(error) = error { + return Err(error); + } + + if file_builders.is_empty() { + return Err(CreateError::InvalidInput( + "At least one file must be specified".to_string(), + )); + } else { + VersionFileBuilder::insert_many(file_builders, version_id, &mut *transaction).await?; + } + + // Clear version cache + models::Version::clear_cache(&version, &redis).await?; + + Ok(HttpResponse::NoContent().body("")) +} + +// This function is used for adding a file to a version, uploading the initial +// files for a version, and for uploading the initial version files for a project +#[allow(clippy::too_many_arguments)] +pub async fn upload_file( + field: &mut Field, + file_host: &dyn FileHost, + total_files_len: usize, + uploaded_files: &mut Vec, + version_files: &mut Vec, + dependencies: &mut Vec, + cdn_url: &str, + content_disposition: &actix_web::http::header::ContentDisposition, + project_id: ProjectId, + version_id: VersionId, + version_fields: &[VersionField], + loaders: Vec, + ignore_primary: bool, + force_primary: bool, + file_type: Option, + transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, + redis: &RedisPool, +) -> Result<(), CreateError> { + let (file_name, file_extension) = get_name_ext(content_disposition)?; + + if file_name.contains('/') { + return Err(CreateError::InvalidInput( + "File names must not contain slashes!".to_string(), + )); + } + + let content_type = crate::util::ext::project_file_type(file_extension) + .ok_or_else(|| CreateError::InvalidFileType(file_extension.to_string()))?; + + let data = read_from_field( + field, 500 * (1 << 20), + "Project file exceeds the maximum of 500MiB. Contact a moderator or admin to request permission to upload larger files." + ).await?; + + let hash = sha1::Sha1::from(&data).hexdigest(); + let exists = sqlx::query!( + " + SELECT EXISTS(SELECT 1 FROM hashes h + INNER JOIN files f ON f.id = h.file_id + INNER JOIN versions v ON v.id = f.version_id + WHERE h.algorithm = $2 AND h.hash = $1 AND v.mod_id != $3) + ", + hash.as_bytes(), + "sha1", + project_id.0 as i64 + ) + .fetch_one(&mut **transaction) + .await? + .exists + .unwrap_or(false); + + if exists { + return Err(CreateError::InvalidInput( + "Duplicate files are not allowed to be uploaded to Modrinth!".to_string(), + )); + } + + let validation_result = validate_file( + data.clone().into(), + file_extension.to_string(), + loaders.clone(), + file_type, + version_fields.to_vec(), + &mut *transaction, + redis, + ) + .await?; + + if let ValidationResult::PassWithPackDataAndFiles { + ref format, + ref files, + } = validation_result + { + if dependencies.is_empty() { + let hashes: Vec> = format + .files + .iter() + .filter_map(|x| x.hashes.get(&PackFileHash::Sha1)) + .map(|x| x.as_bytes().to_vec()) + .collect(); + + let res = sqlx::query!( + " + SELECT v.id version_id, v.mod_id project_id, h.hash hash FROM hashes h + INNER JOIN files f on h.file_id = f.id + INNER JOIN versions v on f.version_id = v.id + WHERE h.algorithm = 'sha1' AND h.hash = ANY($1) + ", + &*hashes + ) + .fetch_all(&mut **transaction) + .await?; + + for file in &format.files { + if let Some(dep) = res.iter().find(|x| { + Some(&*x.hash) == file.hashes.get(&PackFileHash::Sha1).map(|x| x.as_bytes()) + }) { + dependencies.push(DependencyBuilder { + project_id: Some(models::ProjectId(dep.project_id)), + version_id: Some(models::VersionId(dep.version_id)), + file_name: None, + dependency_type: DependencyType::Embedded.to_string(), + }); + } else if let Some(first_download) = file.downloads.first() { + dependencies.push(DependencyBuilder { + project_id: None, + version_id: None, + file_name: Some( + first_download + .rsplit('/') + .next() + .unwrap_or(first_download) + .to_string(), + ), + dependency_type: DependencyType::Embedded.to_string(), + }); + } + } + + for file in files { + if !file.is_empty() { + dependencies.push(DependencyBuilder { + project_id: None, + version_id: None, + file_name: Some(file.to_string()), + dependency_type: DependencyType::Embedded.to_string(), + }); + } + } + } + } + + let data = data.freeze(); + let primary = (version_files.iter().all(|x| !x.primary) && !ignore_primary) + || force_primary + || total_files_len == 1; + + let file_path_encode = format!( + "data/{}/versions/{}/{}", + project_id, + version_id, + urlencoding::encode(file_name) + ); + let file_path = format!("data/{}/versions/{}/{}", project_id, version_id, &file_name); + + let upload_data = file_host + .upload_file(content_type, &file_path, data) + .await?; + + uploaded_files.push(UploadedFile { + file_id: upload_data.file_id, + file_name: file_path, + }); + + let sha1_bytes = upload_data.content_sha1.into_bytes(); + let sha512_bytes = upload_data.content_sha512.into_bytes(); + + if version_files.iter().any(|x| { + x.hashes + .iter() + .any(|y| y.hash == sha1_bytes || y.hash == sha512_bytes) + }) { + return Err(CreateError::InvalidInput( + "Duplicate files are not allowed to be uploaded to Modrinth!".to_string(), + )); + } + + version_files.push(VersionFileBuilder { + filename: file_name.to_string(), + url: format!("{cdn_url}/{file_path_encode}"), + hashes: vec![ + models::version_item::HashBuilder { + algorithm: "sha1".to_string(), + // This is an invalid cast - the database expects the hash's + // bytes, but this is the string version. + hash: sha1_bytes, + }, + models::version_item::HashBuilder { + algorithm: "sha512".to_string(), + // This is an invalid cast - the database expects the hash's + // bytes, but this is the string version. + hash: sha512_bytes, + }, + ], + primary, + size: upload_data.content_length, + file_type, + }); + + Ok(()) +} + +pub fn get_name_ext( + content_disposition: &actix_web::http::header::ContentDisposition, +) -> Result<(&str, &str), CreateError> { + let file_name = content_disposition + .get_filename() + .ok_or_else(|| CreateError::MissingValueError("Missing content file name".to_string()))?; + let file_extension = if let Some(last_period) = file_name.rfind('.') { + file_name.get((last_period + 1)..).unwrap_or("") + } else { + return Err(CreateError::MissingValueError( + "Missing content file extension".to_string(), + )); + }; + Ok((file_name, file_extension)) +} diff --git a/src/routes/v3/version_file.rs b/src/routes/v3/version_file.rs new file mode 100644 index 00000000..558cf5f9 --- /dev/null +++ b/src/routes/v3/version_file.rs @@ -0,0 +1,664 @@ +use super::ApiError; +use crate::auth::{ + filter_authorized_projects, filter_authorized_versions, get_user_from_headers, + is_authorized_version, +}; +use crate::database::redis::RedisPool; +use crate::models::ids::VersionId; +use crate::models::pats::Scopes; +use crate::models::projects::VersionType; +use crate::models::teams::ProjectPermissions; +use crate::queue::session::AuthQueue; +use crate::{database, models}; +use actix_web::{web, HttpRequest, HttpResponse}; +use itertools::Itertools; +use serde::{Deserialize, Serialize}; +use sqlx::PgPool; +use std::collections::HashMap; + +pub fn config(cfg: &mut web::ServiceConfig) { + cfg.service( + web::scope("version_file") + .route("version_id", web::get().to(get_version_from_hash)) + .route("{version_id}/update", web::post().to(get_update_from_hash)) + .route("project", web::post().to(get_projects_from_hashes)) + .route("{version_id}", web::delete().to(delete_file)) + .route("{version_id}/download", web::get().to(download_version)), + ); + cfg.service( + web::scope("version_files") + .route("update", web::post().to(update_files)) + .route("update_individual", web::post().to(update_individual_files)) + .route("", web::post().to(get_versions_from_hashes)), + ); +} + +pub async fn get_version_from_hash( + req: HttpRequest, + info: web::Path<(String,)>, + pool: web::Data, + redis: web::Data, + hash_query: web::Query, + session_queue: web::Data, +) -> Result { + let user_option = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::VERSION_READ]), + ) + .await + .map(|x| x.1) + .ok(); + let hash = info.into_inner().0.to_lowercase(); + let file = database::models::Version::get_file_from_hash( + hash_query.algorithm.clone(), + hash, + hash_query.version_id.map(|x| x.into()), + &**pool, + &redis, + ) + .await?; + if let Some(file) = file { + let version = database::models::Version::get(file.version_id, &**pool, &redis).await?; + if let Some(version) = version { + if !is_authorized_version(&version.inner, &user_option, &pool).await? { + return Ok(HttpResponse::NotFound().body("")); + } + + Ok(HttpResponse::Ok().json(models::projects::Version::from(version))) + } else { + Ok(HttpResponse::NotFound().body("")) + } + } else { + Ok(HttpResponse::NotFound().body("")) + } +} + +#[derive(Serialize, Deserialize)] +pub struct HashQuery { + #[serde(default = "default_algorithm")] + pub algorithm: String, + pub version_id: Option, +} + +pub fn default_algorithm() -> String { + "sha1".into() +} + +#[derive(Serialize, Deserialize)] +pub struct UpdateData { + pub loaders: Option>, + pub version_types: Option>, + /* + Loader fields to filter with: + "game_versions": ["1.16.5", "1.17"] + + Returns if it matches any of the values + */ + pub loader_fields: Option>>, +} + +pub async fn get_update_from_hash( + req: HttpRequest, + info: web::Path<(String,)>, + pool: web::Data, + redis: web::Data, + hash_query: web::Query, + update_data: web::Json, + session_queue: web::Data, +) -> Result { + let user_option = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::VERSION_READ]), + ) + .await + .map(|x| x.1) + .ok(); + let hash = info.into_inner().0.to_lowercase(); + + if let Some(file) = database::models::Version::get_file_from_hash( + hash_query.algorithm.clone(), + hash, + hash_query.version_id.map(|x| x.into()), + &**pool, + &redis, + ) + .await? + { + if let Some(project) = + database::models::Project::get_id(file.project_id, &**pool, &redis).await? + { + let versions = database::models::Version::get_many(&project.versions, &**pool, &redis) + .await? + .into_iter() + .filter(|x| { + let mut bool = true; + if let Some(version_types) = &update_data.version_types { + bool &= version_types + .iter() + .any(|y| y.as_str() == x.inner.version_type); + } + if let Some(loaders) = &update_data.loaders { + bool &= x.loaders.iter().any(|y| loaders.contains(y)); + } + if let Some(loader_fields) = &update_data.loader_fields { + for (key, values) in loader_fields { + bool &= if let Some(x_vf) = + x.version_fields.iter().find(|y| y.field_name == *key) + { + values.iter().any(|v| x_vf.value.contains_json_value(v)) + } else { + true + }; + } + } + bool + }) + .sorted(); + + if let Some(first) = versions.last() { + if !is_authorized_version(&first.inner, &user_option, &pool).await? { + return Ok(HttpResponse::NotFound().body("")); + } + + return Ok(HttpResponse::Ok().json(models::projects::Version::from(first))); + } + } + } + + Ok(HttpResponse::NotFound().body("")) +} + +// Requests above with multiple versions below +#[derive(Deserialize)] +pub struct FileHashes { + #[serde(default = "default_algorithm")] + pub algorithm: String, + pub hashes: Vec, +} + +pub async fn get_versions_from_hashes( + req: HttpRequest, + pool: web::Data, + redis: web::Data, + file_data: web::Json, + session_queue: web::Data, +) -> Result { + let user_option = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::VERSION_READ]), + ) + .await + .map(|x| x.1) + .ok(); + + let files = database::models::Version::get_files_from_hash( + file_data.algorithm.clone(), + &file_data.hashes, + &**pool, + &redis, + ) + .await?; + + let version_ids = files.iter().map(|x| x.version_id).collect::>(); + let versions_data = filter_authorized_versions( + database::models::Version::get_many(&version_ids, &**pool, &redis).await?, + &user_option, + &pool, + ) + .await?; + + let mut response = HashMap::new(); + + for version in versions_data { + for file in files.iter().filter(|x| x.version_id == version.id.into()) { + if let Some(hash) = file.hashes.get(&file_data.algorithm) { + response.insert(hash.clone(), version.clone()); + } + } + } + + Ok(HttpResponse::Ok().json(response)) +} + +pub async fn get_projects_from_hashes( + req: HttpRequest, + pool: web::Data, + redis: web::Data, + file_data: web::Json, + session_queue: web::Data, +) -> Result { + let user_option = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::PROJECT_READ, Scopes::VERSION_READ]), + ) + .await + .map(|x| x.1) + .ok(); + + let files = database::models::Version::get_files_from_hash( + file_data.algorithm.clone(), + &file_data.hashes, + &**pool, + &redis, + ) + .await?; + + let project_ids = files.iter().map(|x| x.project_id).collect::>(); + + let projects_data = filter_authorized_projects( + database::models::Project::get_many_ids(&project_ids, &**pool, &redis).await?, + &user_option, + &pool, + ) + .await?; + + let mut response = HashMap::new(); + + for project in projects_data { + for file in files.iter().filter(|x| x.project_id == project.id.into()) { + if let Some(hash) = file.hashes.get(&file_data.algorithm) { + response.insert(hash.clone(), project.clone()); + } + } + } + + Ok(HttpResponse::Ok().json(response)) +} + +#[derive(Deserialize)] +pub struct ManyUpdateData { + #[serde(default = "default_algorithm")] + pub algorithm: String, + pub hashes: Vec, + pub loaders: Option>, + pub loader_fields: Option>>, + pub version_types: Option>, +} +pub async fn update_files( + req: HttpRequest, + pool: web::Data, + redis: web::Data, + update_data: web::Json, + session_queue: web::Data, +) -> Result { + let user_option = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::VERSION_READ]), + ) + .await + .map(|x| x.1) + .ok(); + + let files = database::models::Version::get_files_from_hash( + update_data.algorithm.clone(), + &update_data.hashes, + &**pool, + &redis, + ) + .await?; + + let projects = database::models::Project::get_many_ids( + &files.iter().map(|x| x.project_id).collect::>(), + &**pool, + &redis, + ) + .await?; + let all_versions = database::models::Version::get_many( + &projects + .iter() + .flat_map(|x| x.versions.clone()) + .collect::>(), + &**pool, + &redis, + ) + .await?; + + let mut response = HashMap::new(); + + for project in projects { + for file in files.iter().filter(|x| x.project_id == project.inner.id) { + let version = all_versions + .iter() + .filter(|x| x.inner.project_id == file.project_id) + .filter(|x| { + // TODO: Behaviour here is repeated in a few other filtering places, should be abstracted + let mut bool = true; + + if let Some(version_types) = &update_data.version_types { + bool &= version_types + .iter() + .any(|y| y.as_str() == x.inner.version_type); + } + if let Some(loaders) = &update_data.loaders { + bool &= x.loaders.iter().any(|y| loaders.contains(y)); + } + if let Some(loader_fields) = &update_data.loader_fields { + for (key, values) in loader_fields { + bool &= if let Some(x_vf) = + x.version_fields.iter().find(|y| y.field_name == *key) + { + values.iter().any(|v| x_vf.value.contains_json_value(v)) + } else { + true + }; + } + } + + bool + }) + .sorted() + .last(); + + if let Some(version) = version { + if is_authorized_version(&version.inner, &user_option, &pool).await? { + if let Some(hash) = file.hashes.get(&update_data.algorithm) { + response.insert( + hash.clone(), + models::projects::Version::from(version.clone()), + ); + } + } + } + } + } + + Ok(HttpResponse::Ok().json(response)) +} + +#[derive(Deserialize)] +pub struct FileUpdateData { + pub hash: String, + pub loaders: Option>, + pub loader_fields: Option>>, + pub version_types: Option>, +} + +#[derive(Deserialize)] +pub struct ManyFileUpdateData { + #[serde(default = "default_algorithm")] + pub algorithm: String, + pub hashes: Vec, +} + +pub async fn update_individual_files( + req: HttpRequest, + pool: web::Data, + redis: web::Data, + update_data: web::Json, + session_queue: web::Data, +) -> Result { + let user_option = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::VERSION_READ]), + ) + .await + .map(|x| x.1) + .ok(); + + let files = database::models::Version::get_files_from_hash( + update_data.algorithm.clone(), + &update_data + .hashes + .iter() + .map(|x| x.hash.clone()) + .collect::>(), + &**pool, + &redis, + ) + .await?; + + let projects = database::models::Project::get_many_ids( + &files.iter().map(|x| x.project_id).collect::>(), + &**pool, + &redis, + ) + .await?; + let all_versions = database::models::Version::get_many( + &projects + .iter() + .flat_map(|x| x.versions.clone()) + .collect::>(), + &**pool, + &redis, + ) + .await?; + + let mut response = HashMap::new(); + + for project in projects { + for file in files.iter().filter(|x| x.project_id == project.inner.id) { + if let Some(hash) = file.hashes.get(&update_data.algorithm) { + if let Some(query_file) = update_data.hashes.iter().find(|x| &x.hash == hash) { + let version = all_versions + .iter() + .filter(|x| x.inner.project_id == file.project_id) + .filter(|x| { + let mut bool = true; + + if let Some(version_types) = &query_file.version_types { + bool &= version_types + .iter() + .any(|y| y.as_str() == x.inner.version_type); + } + if let Some(loaders) = &query_file.loaders { + bool &= x.loaders.iter().any(|y| loaders.contains(y)); + } + if let Some(loader_fields) = &query_file.loader_fields { + for (key, values) in loader_fields { + bool &= if let Some(x_vf) = + x.version_fields.iter().find(|y| y.field_name == *key) + { + values.iter().any(|v| x_vf.value.contains_json_value(v)) + } else { + true + }; + } + } + + bool + }) + .sorted() + .last(); + + if let Some(version) = version { + if is_authorized_version(&version.inner, &user_option, &pool).await? { + response.insert( + hash.clone(), + models::projects::Version::from(version.clone()), + ); + } + } + } + } + } + } + + Ok(HttpResponse::Ok().json(response)) +} + +// under /api/v1/version_file/{hash} +pub async fn delete_file( + req: HttpRequest, + info: web::Path<(String,)>, + pool: web::Data, + redis: web::Data, + hash_query: web::Query, + session_queue: web::Data, +) -> Result { + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::VERSION_WRITE]), + ) + .await? + .1; + + let hash = info.into_inner().0.to_lowercase(); + + let file = database::models::Version::get_file_from_hash( + hash_query.algorithm.clone(), + hash, + hash_query.version_id.map(|x| x.into()), + &**pool, + &redis, + ) + .await?; + + if let Some(row) = file { + if !user.role.is_admin() { + let team_member = database::models::TeamMember::get_from_user_id_version( + row.version_id, + user.id.into(), + &**pool, + ) + .await + .map_err(ApiError::Database)?; + + let organization = + database::models::Organization::get_associated_organization_project_id( + row.project_id, + &**pool, + ) + .await + .map_err(ApiError::Database)?; + + let organization_team_member = if let Some(organization) = &organization { + database::models::TeamMember::get_from_user_id_organization( + organization.id, + user.id.into(), + &**pool, + ) + .await + .map_err(ApiError::Database)? + } else { + None + }; + + let permissions = ProjectPermissions::get_permissions_by_role( + &user.role, + &team_member, + &organization_team_member, + ) + .unwrap_or_default(); + + if !permissions.contains(ProjectPermissions::DELETE_VERSION) { + return Err(ApiError::CustomAuthentication( + "You don't have permission to delete this file!".to_string(), + )); + } + } + + let version = database::models::Version::get(row.version_id, &**pool, &redis).await?; + if let Some(version) = version { + if version.files.len() < 2 { + return Err(ApiError::InvalidInput( + "Versions must have at least one file uploaded to them".to_string(), + )); + } + + database::models::Version::clear_cache(&version, &redis).await?; + } + + let mut transaction = pool.begin().await?; + + sqlx::query!( + " + DELETE FROM hashes + WHERE file_id = $1 + ", + row.id.0 + ) + .execute(&mut *transaction) + .await?; + + sqlx::query!( + " + DELETE FROM files + WHERE files.id = $1 + ", + row.id.0, + ) + .execute(&mut *transaction) + .await?; + + transaction.commit().await?; + + Ok(HttpResponse::NoContent().body("")) + } else { + Ok(HttpResponse::NotFound().body("")) + } +} + +#[derive(Serialize, Deserialize)] +pub struct DownloadRedirect { + pub url: String, +} + +// under /api/v1/version_file/{hash}/download +pub async fn download_version( + req: HttpRequest, + info: web::Path<(String,)>, + pool: web::Data, + redis: web::Data, + hash_query: web::Query, + session_queue: web::Data, +) -> Result { + let user_option = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::VERSION_READ]), + ) + .await + .map(|x| x.1) + .ok(); + + let hash = info.into_inner().0.to_lowercase(); + let file = database::models::Version::get_file_from_hash( + hash_query.algorithm.clone(), + hash, + hash_query.version_id.map(|x| x.into()), + &**pool, + &redis, + ) + .await?; + + if let Some(file) = file { + let version = database::models::Version::get(file.version_id, &**pool, &redis).await?; + + if let Some(version) = version { + if !is_authorized_version(&version.inner, &user_option, &pool).await? { + return Ok(HttpResponse::NotFound().body("")); + } + + Ok(HttpResponse::TemporaryRedirect() + .append_header(("Location", &*file.url)) + .json(DownloadRedirect { url: file.url })) + } else { + Ok(HttpResponse::NotFound().body("")) + } + } else { + Ok(HttpResponse::NotFound().body("")) + } +} diff --git a/src/routes/v3/versions.rs b/src/routes/v3/versions.rs new file mode 100644 index 00000000..b3504ee9 --- /dev/null +++ b/src/routes/v3/versions.rs @@ -0,0 +1,1005 @@ +use std::collections::HashMap; + +use super::ApiError; +use crate::auth::{ + filter_authorized_versions, get_user_from_headers, is_authorized, is_authorized_version, +}; +use crate::database; +use crate::database::models::loader_fields::{LoaderField, LoaderFieldEnumValue, VersionField}; +use crate::database::models::version_item::{DependencyBuilder, LoaderVersion}; +use crate::database::models::{image_item, Organization}; +use crate::database::redis::RedisPool; +use crate::models; +use crate::models::ids::base62_impl::parse_base62; +use crate::models::ids::VersionId; +use crate::models::images::ImageContext; +use crate::models::pats::Scopes; +use crate::models::projects::{skip_nulls, Loader}; +use crate::models::projects::{Dependency, FileType, VersionStatus, VersionType}; +use crate::models::teams::ProjectPermissions; +use crate::queue::session::AuthQueue; +use crate::util::img; +use crate::util::validate::validation_errors_to_string; +use actix_web::{web, HttpRequest, HttpResponse}; +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use sqlx::PgPool; +use validator::Validate; + +pub fn config(cfg: &mut web::ServiceConfig) { + cfg.route( + "version", + web::post().to(super::version_creation::version_create), + ); + cfg.route("versions", web::get().to(versions_get)); + + cfg.service( + web::scope("version") + .route("{id}", web::get().to(version_get)) + .route("{id}", web::patch().to(version_edit)) + .route("{id}", web::delete().to(version_delete)) + .route("{id}/schedule", web::post().to(version_schedule)) + .route( + "{version_id}/file", + web::post().to(super::version_creation::upload_file_to_version), + ), + ); +} + +// Given a project ID/slug and a version slug +pub async fn version_project_get( + req: HttpRequest, + info: web::Path<(String, String)>, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let info = info.into_inner(); + version_project_get_helper(req, info, pool, redis, session_queue).await +} +pub async fn version_project_get_helper( + req: HttpRequest, + id: (String, String), + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let result = database::models::Project::get(&id.0, &**pool, &redis).await?; + + let user_option = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::PROJECT_READ, Scopes::VERSION_READ]), + ) + .await + .map(|x| x.1) + .ok(); + + if let Some(project) = result { + if !is_authorized(&project.inner, &user_option, &pool).await? { + return Ok(HttpResponse::NotFound().body("")); + } + + let versions = + database::models::Version::get_many(&project.versions, &**pool, &redis).await?; + + let id_opt = parse_base62(&id.1).ok(); + let version = versions + .into_iter() + .find(|x| Some(x.inner.id.0 as u64) == id_opt || x.inner.version_number == id.1); + + if let Some(version) = version { + if is_authorized_version(&version.inner, &user_option, &pool).await? { + return Ok(HttpResponse::Ok().json(models::projects::Version::from(version))); + } + } + } + + Ok(HttpResponse::NotFound().body("")) +} + +#[derive(Serialize, Deserialize)] +pub struct VersionIds { + pub ids: String, +} + +pub async fn versions_get( + req: HttpRequest, + web::Query(ids): web::Query, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let version_ids = serde_json::from_str::>(&ids.ids)? + .into_iter() + .map(|x| x.into()) + .collect::>(); + let versions_data = database::models::Version::get_many(&version_ids, &**pool, &redis).await?; + + let user_option = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::VERSION_READ]), + ) + .await + .map(|x| x.1) + .ok(); + + let versions = filter_authorized_versions(versions_data, &user_option, &pool).await?; + + Ok(HttpResponse::Ok().json(versions)) +} + +pub async fn version_get( + req: HttpRequest, + info: web::Path<(models::ids::VersionId,)>, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let id = info.into_inner().0; + version_get_helper(req, id, pool, redis, session_queue).await +} + +pub async fn version_get_helper( + req: HttpRequest, + id: models::ids::VersionId, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let version_data = database::models::Version::get(id.into(), &**pool, &redis).await?; + + let user_option = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::VERSION_READ]), + ) + .await + .map(|x| x.1) + .ok(); + + if let Some(data) = version_data { + if is_authorized_version(&data.inner, &user_option, &pool).await? { + return Ok(HttpResponse::Ok().json(models::projects::Version::from(data))); + } + } + + Ok(HttpResponse::NotFound().body("")) +} + +#[derive(Serialize, Deserialize, Validate, Default, Debug)] +pub struct EditVersion { + #[validate( + length(min = 1, max = 64), + custom(function = "crate::util::validate::validate_name") + )] + pub name: Option, + #[validate( + length(min = 1, max = 32), + regex = "crate::util::validate::RE_URL_SAFE" + )] + pub version_number: Option, + #[validate(length(max = 65536))] + pub changelog: Option, + pub version_type: Option, + #[validate( + length(min = 0, max = 4096), + custom(function = "crate::util::validate::validate_deps") + )] + pub dependencies: Option>, + pub loaders: Option>, + pub featured: Option, + pub primary_file: Option<(String, String)>, + pub downloads: Option, + pub status: Option, + pub file_types: Option>, + + pub ordering: Option>, //TODO: How do you actually pass this in json? + + // Flattened loader fields + // All other fields are loader-specific VersionFields + // These are flattened during serialization + #[serde(deserialize_with = "skip_nulls")] + #[serde(flatten)] + pub fields: HashMap, +} + +#[derive(Serialize, Deserialize, Debug)] +pub struct EditVersionFileType { + pub algorithm: String, + pub hash: String, + pub file_type: Option, +} + +// TODO: Avoid this 'helper' pattern here and similar fnunctoins- a macro might be the best bet here to ensure it's callable from both v2 and v3 +// (web::Path can't be recreated naturally) +pub async fn version_edit( + req: HttpRequest, + info: web::Path<(VersionId,)>, + pool: web::Data, + redis: web::Data, + new_version: web::Json, + session_queue: web::Data, +) -> Result { + let new_version: EditVersion = serde_json::from_value(new_version.into_inner())?; + version_edit_helper( + req, + info.into_inner(), + pool, + redis, + new_version, + session_queue, + ) + .await +} +pub async fn version_edit_helper( + req: HttpRequest, + info: (VersionId,), + pool: web::Data, + redis: web::Data, + new_version: EditVersion, + session_queue: web::Data, +) -> Result { + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::VERSION_WRITE]), + ) + .await? + .1; + + new_version + .validate() + .map_err(|err| ApiError::Validation(validation_errors_to_string(err, None)))?; + + let version_id = info.0; + let id = version_id.into(); + + let result = database::models::Version::get(id, &**pool, &redis).await?; + + if let Some(version_item) = result { + let team_member = database::models::TeamMember::get_from_user_id_project( + version_item.inner.project_id, + user.id.into(), + &**pool, + ) + .await?; + + let organization = Organization::get_associated_organization_project_id( + version_item.inner.project_id, + &**pool, + ) + .await?; + + let organization_team_member = if let Some(organization) = &organization { + database::models::TeamMember::get_from_user_id( + organization.team_id, + user.id.into(), + &**pool, + ) + .await? + } else { + None + }; + + let permissions = ProjectPermissions::get_permissions_by_role( + &user.role, + &team_member, + &organization_team_member, + ); + + if let Some(perms) = permissions { + if !perms.contains(ProjectPermissions::UPLOAD_VERSION) { + return Err(ApiError::CustomAuthentication( + "You do not have the permissions to edit this version!".to_string(), + )); + } + + let mut transaction = pool.begin().await?; + + if let Some(name) = &new_version.name { + sqlx::query!( + " + UPDATE versions + SET name = $1 + WHERE (id = $2) + ", + name.trim(), + id as database::models::ids::VersionId, + ) + .execute(&mut *transaction) + .await?; + } + + if let Some(number) = &new_version.version_number { + sqlx::query!( + " + UPDATE versions + SET version_number = $1 + WHERE (id = $2) + ", + number, + id as database::models::ids::VersionId, + ) + .execute(&mut *transaction) + .await?; + } + + if let Some(version_type) = &new_version.version_type { + sqlx::query!( + " + UPDATE versions + SET version_type = $1 + WHERE (id = $2) + ", + version_type.as_str(), + id as database::models::ids::VersionId, + ) + .execute(&mut *transaction) + .await?; + } + + if let Some(dependencies) = &new_version.dependencies { + // TODO: Re-add this exclusions when modpack also has separate dependency retrieval that was removed from validators + // if let Some(project) = project_item { + // if project.project_type != "modpack" { + sqlx::query!( + " + DELETE FROM dependencies WHERE dependent_id = $1 + ", + id as database::models::ids::VersionId, + ) + .execute(&mut *transaction) + .await?; + + let builders = dependencies + .iter() + .map(|x| database::models::version_item::DependencyBuilder { + project_id: x.project_id.map(|x| x.into()), + version_id: x.version_id.map(|x| x.into()), + file_name: x.file_name.clone(), + dependency_type: x.dependency_type.to_string(), + }) + .collect::>(); + + DependencyBuilder::insert_many(builders, version_item.inner.id, &mut transaction) + .await?; + // } + // } + } + + if !new_version.fields.is_empty() { + let version_fields_names = new_version + .fields + .keys() + .map(|x| x.to_string()) + .collect::>(); + + let loader_fields = LoaderField::get_fields(&mut *transaction, &redis) + .await? + .into_iter() + .filter(|lf| version_fields_names.contains(&lf.field)) + .collect::>(); + + let loader_field_ids = loader_fields.iter().map(|lf| lf.id.0).collect::>(); + sqlx::query!( + " + DELETE FROM version_fields + WHERE version_id = $1 + AND field_id = ANY($2) + ", + id as database::models::ids::VersionId, + &loader_field_ids + ) + .execute(&mut *transaction) + .await?; + + let mut loader_field_enum_values = LoaderFieldEnumValue::list_many_loader_fields( + &loader_fields, + &mut *transaction, + &redis, + ) + .await?; + + let mut version_fields = Vec::new(); + for (vf_name, vf_value) in new_version.fields { + let loader_field = loader_fields + .iter() + .find(|lf| lf.field == vf_name) + .ok_or_else(|| { + ApiError::InvalidInput(format!( + "Loader field '{vf_name}' does not exist." + )) + })?; + let enum_variants = loader_field_enum_values + .remove(&loader_field.id) + .unwrap_or_default(); + let vf: VersionField = VersionField::check_parse( + version_id.into(), + loader_field.clone(), + vf_value.clone(), + enum_variants, + ) + .map_err(ApiError::InvalidInput)?; + version_fields.push(vf); + } + VersionField::insert_many(version_fields, &mut transaction).await?; + } + + if let Some(loaders) = &new_version.loaders { + sqlx::query!( + " + DELETE FROM loaders_versions WHERE version_id = $1 + ", + id as database::models::ids::VersionId, + ) + .execute(&mut *transaction) + .await?; + + let mut loader_versions = Vec::new(); + for loader in loaders { + let loader_id = database::models::loader_fields::Loader::get_id( + &loader.0, + &mut *transaction, + &redis, + ) + .await? + .ok_or_else(|| { + ApiError::InvalidInput("No database entry for loader provided.".to_string()) + })?; + loader_versions.push(LoaderVersion::new(loader_id, id)); + } + LoaderVersion::insert_many(loader_versions, &mut transaction).await?; + + crate::database::models::Project::clear_cache( + version_item.inner.project_id, + None, + None, + &redis, + ) + .await?; + } + + if let Some(featured) = &new_version.featured { + sqlx::query!( + " + UPDATE versions + SET featured = $1 + WHERE (id = $2) + ", + featured, + id as database::models::ids::VersionId, + ) + .execute(&mut *transaction) + .await?; + } + + if let Some(primary_file) = &new_version.primary_file { + let result = sqlx::query!( + " + SELECT f.id id FROM hashes h + INNER JOIN files f ON h.file_id = f.id + WHERE h.algorithm = $2 AND h.hash = $1 + ", + primary_file.1.as_bytes(), + primary_file.0 + ) + .fetch_optional(&**pool) + .await? + .ok_or_else(|| { + ApiError::InvalidInput(format!( + "Specified file with hash {} does not exist.", + primary_file.1.clone() + )) + })?; + + sqlx::query!( + " + UPDATE files + SET is_primary = FALSE + WHERE (version_id = $1) + ", + id as database::models::ids::VersionId, + ) + .execute(&mut *transaction) + .await?; + + sqlx::query!( + " + UPDATE files + SET is_primary = TRUE + WHERE (id = $1) + ", + result.id, + ) + .execute(&mut *transaction) + .await?; + } + + if let Some(body) = &new_version.changelog { + sqlx::query!( + " + UPDATE versions + SET changelog = $1 + WHERE (id = $2) + ", + body, + id as database::models::ids::VersionId, + ) + .execute(&mut *transaction) + .await?; + } + + if let Some(downloads) = &new_version.downloads { + if !user.role.is_mod() { + return Err(ApiError::CustomAuthentication( + "You don't have permission to set the downloads of this mod".to_string(), + )); + } + + sqlx::query!( + " + UPDATE versions + SET downloads = $1 + WHERE (id = $2) + ", + *downloads as i32, + id as database::models::ids::VersionId, + ) + .execute(&mut *transaction) + .await?; + + let diff = *downloads - (version_item.inner.downloads as u32); + + sqlx::query!( + " + UPDATE mods + SET downloads = downloads + $1 + WHERE (id = $2) + ", + diff as i32, + version_item.inner.project_id as database::models::ids::ProjectId, + ) + .execute(&mut *transaction) + .await?; + } + + if let Some(status) = &new_version.status { + if !status.can_be_requested() { + return Err(ApiError::InvalidInput( + "The requested status cannot be set!".to_string(), + )); + } + + sqlx::query!( + " + UPDATE versions + SET status = $1 + WHERE (id = $2) + ", + status.as_str(), + id as database::models::ids::VersionId, + ) + .execute(&mut *transaction) + .await?; + } + + if let Some(file_types) = &new_version.file_types { + for file_type in file_types { + let result = sqlx::query!( + " + SELECT f.id id FROM hashes h + INNER JOIN files f ON h.file_id = f.id + WHERE h.algorithm = $2 AND h.hash = $1 + ", + file_type.hash.as_bytes(), + file_type.algorithm + ) + .fetch_optional(&**pool) + .await? + .ok_or_else(|| { + ApiError::InvalidInput(format!( + "Specified file with hash {} does not exist.", + file_type.algorithm.clone() + )) + })?; + + sqlx::query!( + " + UPDATE files + SET file_type = $2 + WHERE (id = $1) + ", + result.id, + file_type.file_type.as_ref().map(|x| x.as_str()), + ) + .execute(&mut *transaction) + .await?; + } + } + + if let Some(ordering) = &new_version.ordering { + sqlx::query!( + " + UPDATE versions + SET ordering = $1 + WHERE (id = $2) + ", + ordering.to_owned() as Option, + id as database::models::ids::VersionId, + ) + .execute(&mut *transaction) + .await?; + } + + // delete any images no longer in the changelog + let checkable_strings: Vec<&str> = vec![&new_version.changelog] + .into_iter() + .filter_map(|x| x.as_ref().map(|y| y.as_str())) + .collect(); + let context = ImageContext::Version { + version_id: Some(version_item.inner.id.into()), + }; + + img::delete_unused_images(context, checkable_strings, &mut transaction, &redis).await?; + + database::models::Version::clear_cache(&version_item, &redis).await?; + database::models::Project::clear_cache( + version_item.inner.project_id, + None, + Some(true), + &redis, + ) + .await?; + transaction.commit().await?; + Ok(HttpResponse::NoContent().body("")) + } else { + Err(ApiError::CustomAuthentication( + "You do not have permission to edit this version!".to_string(), + )) + } + } else { + Ok(HttpResponse::NotFound().body("")) + } +} + +#[derive(Serialize, Deserialize)] +pub struct VersionListFilters { + pub loaders: Option, + pub featured: Option, + pub version_type: Option, + pub limit: Option, + pub offset: Option, + /* + Loader fields to filter with: + "game_versions": ["1.16.5", "1.17"] + + Returns if it matches any of the values + */ + pub loader_fields: Option, +} + +pub async fn version_list( + req: HttpRequest, + info: web::Path<(String,)>, + web::Query(filters): web::Query, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let string = info.into_inner().0; + + let result = database::models::Project::get(&string, &**pool, &redis).await?; + + let user_option = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::PROJECT_READ, Scopes::VERSION_READ]), + ) + .await + .map(|x| x.1) + .ok(); + + if let Some(project) = result { + if !is_authorized(&project.inner, &user_option, &pool).await? { + return Ok(HttpResponse::NotFound().body("")); + } + + let loader_field_filters = filters.loader_fields.as_ref().map(|x| { + serde_json::from_str::>>(x).unwrap_or_default() + }); + let loader_filters = filters + .loaders + .as_ref() + .map(|x| serde_json::from_str::>(x).unwrap_or_default()); + let mut versions = database::models::Version::get_many(&project.versions, &**pool, &redis) + .await? + .into_iter() + .skip(filters.offset.unwrap_or(0)) + .take(filters.limit.unwrap_or(usize::MAX)) + .filter(|x| { + let mut bool = true; + + if let Some(version_type) = filters.version_type { + bool &= &*x.inner.version_type == version_type.as_str(); + } + if let Some(loaders) = &loader_filters { + bool &= x.loaders.iter().any(|y| loaders.contains(y)); + } + if let Some(loader_fields) = &loader_field_filters { + for (key, values) in loader_fields { + bool &= if let Some(x_vf) = + x.version_fields.iter().find(|y| y.field_name == *key) + { + values.iter().any(|v| x_vf.value.contains_json_value(v)) + } else { + true + }; + } + } + bool + }) + .collect::>(); + + let mut response = versions + .iter() + .filter(|version| { + filters + .featured + .map(|featured| featured == version.inner.featured) + .unwrap_or(true) + }) + .cloned() + .collect::>(); + + versions.sort(); + + // Attempt to populate versions with "auto featured" versions + if response.is_empty() && !versions.is_empty() && filters.featured.unwrap_or(false) { + // TODO: Re-implement this + // let (loaders, game_versions) = futures::future::try_join( + // database::models::loader_fields::Loader::list(&**pool, &redis), + // database::models::loader_fields::GameVersion::list_filter( + // None, + // Some(true), + // &**pool, + // &redis, + // ), + // ) + // .await?; + + // let mut joined_filters = Vec::new(); + // for game_version in &game_versions { + // for loader in &loaders { + // joined_filters.push((game_version, loader)) + // } + // } + + // joined_filters.into_iter().for_each(|filter| { + // versions + // .iter() + // .find(|version| { + // // version.game_versions.contains(&filter.0.version) + // // && + // version.loaders.contains(&filter.1.loader) + // }) + // .map(|version| response.push(version.clone())) + // .unwrap_or(()); + // }); + + if response.is_empty() { + versions + .into_iter() + .for_each(|version| response.push(version)); + } + } + + response.sort(); + response.dedup_by(|a, b| a.inner.id == b.inner.id); + + let response = filter_authorized_versions(response, &user_option, &pool).await?; + + Ok(HttpResponse::Ok().json(response)) + } else { + Ok(HttpResponse::NotFound().body("")) + } +} + +#[derive(Deserialize)] +pub struct SchedulingData { + pub time: DateTime, + pub requested_status: VersionStatus, +} + +pub async fn version_schedule( + req: HttpRequest, + info: web::Path<(models::ids::VersionId,)>, + pool: web::Data, + redis: web::Data, + scheduling_data: web::Json, + session_queue: web::Data, +) -> Result { + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::VERSION_WRITE]), + ) + .await? + .1; + + if scheduling_data.time < Utc::now() { + return Err(ApiError::InvalidInput( + "You cannot schedule a version to be released in the past!".to_string(), + )); + } + + if !scheduling_data.requested_status.can_be_requested() { + return Err(ApiError::InvalidInput( + "Specified requested status cannot be requested!".to_string(), + )); + } + + let string = info.into_inner().0; + let result = database::models::Version::get(string.into(), &**pool, &redis).await?; + + if let Some(version_item) = result { + let team_member = database::models::TeamMember::get_from_user_id_project( + version_item.inner.project_id, + user.id.into(), + &**pool, + ) + .await?; + + let organization_item = + database::models::Organization::get_associated_organization_project_id( + version_item.inner.project_id, + &**pool, + ) + .await + .map_err(ApiError::Database)?; + + let organization_team_member = if let Some(organization) = &organization_item { + database::models::TeamMember::get_from_user_id( + organization.team_id, + user.id.into(), + &**pool, + ) + .await? + } else { + None + }; + + let permissions = ProjectPermissions::get_permissions_by_role( + &user.role, + &team_member, + &organization_team_member, + ) + .unwrap_or_default(); + + if !user.role.is_mod() && !permissions.contains(ProjectPermissions::EDIT_DETAILS) { + return Err(ApiError::CustomAuthentication( + "You do not have permission to edit this version's scheduling data!".to_string(), + )); + } + + let mut transaction = pool.begin().await?; + sqlx::query!( + " + UPDATE versions + SET status = $1, date_published = $2 + WHERE (id = $3) + ", + VersionStatus::Scheduled.as_str(), + scheduling_data.time, + version_item.inner.id as database::models::ids::VersionId, + ) + .execute(&mut *transaction) + .await?; + + database::models::Version::clear_cache(&version_item, &redis).await?; + transaction.commit().await?; + + Ok(HttpResponse::NoContent().body("")) + } else { + Ok(HttpResponse::NotFound().body("")) + } +} + +pub async fn version_delete( + req: HttpRequest, + info: web::Path<(models::ids::VersionId,)>, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + let user = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::VERSION_DELETE]), + ) + .await? + .1; + let id = info.into_inner().0; + + let version = database::models::Version::get(id.into(), &**pool, &redis) + .await? + .ok_or_else(|| { + ApiError::InvalidInput("The specified version does not exist!".to_string()) + })?; + + if !user.role.is_admin() { + let team_member = database::models::TeamMember::get_from_user_id_project( + version.inner.project_id, + user.id.into(), + &**pool, + ) + .await + .map_err(ApiError::Database)?; + + let organization = + Organization::get_associated_organization_project_id(version.inner.project_id, &**pool) + .await?; + + let organization_team_member = if let Some(organization) = &organization { + database::models::TeamMember::get_from_user_id( + organization.team_id, + user.id.into(), + &**pool, + ) + .await? + } else { + None + }; + let permissions = ProjectPermissions::get_permissions_by_role( + &user.role, + &team_member, + &organization_team_member, + ) + .unwrap_or_default(); + + if !permissions.contains(ProjectPermissions::DELETE_VERSION) { + return Err(ApiError::CustomAuthentication( + "You do not have permission to delete versions in this team".to_string(), + )); + } + } + + let mut transaction = pool.begin().await?; + let context = ImageContext::Version { + version_id: Some(version.inner.id.into()), + }; + let uploaded_images = + database::models::Image::get_many_contexted(context, &mut transaction).await?; + for image in uploaded_images { + image_item::Image::remove(image.id, &mut transaction, &redis).await?; + } + + let result = + database::models::Version::remove_full(version.inner.id, &redis, &mut transaction).await?; + + database::models::Project::clear_cache(version.inner.project_id, None, Some(true), &redis) + .await?; + + transaction.commit().await?; + + if result.is_some() { + Ok(HttpResponse::NoContent().body("")) + } else { + Ok(HttpResponse::NotFound().body("")) + } +} diff --git a/src/scheduler.rs b/src/scheduler.rs index 055601c3..68cb593b 100644 --- a/src/scheduler.rs +++ b/src/scheduler.rs @@ -38,15 +38,20 @@ impl Drop for Scheduler { use log::{info, warn}; -pub fn schedule_versions(scheduler: &mut Scheduler, pool: sqlx::Pool) { +pub fn schedule_versions( + scheduler: &mut Scheduler, + pool: sqlx::Pool, + redis: RedisPool, +) { let version_index_interval = std::time::Duration::from_secs(parse_var("VERSION_INDEX_INTERVAL").unwrap_or(1800)); scheduler.run(version_index_interval, move || { let pool_ref = pool.clone(); + let redis = redis.clone(); async move { info!("Indexing game versions list from Mojang"); - let result = update_versions(&pool_ref).await; + let result = update_versions(&pool_ref, &redis).await; if let Err(e) = result { warn!("Version update failed: {}", e); } @@ -65,7 +70,10 @@ pub enum VersionIndexingError { DatabaseError(#[from] crate::database::models::DatabaseError), } -use crate::util::env::parse_var; +use crate::{ + database::{models::legacy_loader_fields::MinecraftGameVersion, redis::RedisPool}, + util::env::parse_var, +}; use chrono::{DateTime, Utc}; use serde::Deserialize; use tokio_stream::wrappers::IntervalStream; @@ -84,7 +92,10 @@ struct VersionFormat<'a> { release_time: DateTime, } -async fn update_versions(pool: &sqlx::Pool) -> Result<(), VersionIndexingError> { +async fn update_versions( + pool: &sqlx::Pool, + redis: &RedisPool, +) -> Result<(), VersionIndexingError> { let input = reqwest::get("https://piston-meta.mojang.com/mc/game/version_manifest_v2.json") .await? .json::() @@ -168,7 +179,7 @@ async fn update_versions(pool: &sqlx::Pool) -> Result<(), Versio _ => "other", }; - crate::database::models::categories::GameVersion::builder() + MinecraftGameVersion::builder() .version(&name)? .version_type(type_)? .created( @@ -180,7 +191,7 @@ async fn update_versions(pool: &sqlx::Pool) -> Result<(), Versio &version.release_time }, ) - .insert(pool) + .insert(pool, redis) .await?; } diff --git a/src/search/indexing/local_import.rs b/src/search/indexing/local_import.rs index 7c612bee..3b3c80f8 100644 --- a/src/search/indexing/local_import.rs +++ b/src/search/indexing/local_import.rs @@ -1,50 +1,94 @@ +use std::collections::HashMap; +use std::sync::Arc; + +use dashmap::DashSet; use futures::TryStreamExt; use log::info; use super::IndexingError; +use crate::database::models::loader_fields::VersionField; use crate::database::models::ProjectId; use crate::search::UploadSearchProject; use sqlx::postgres::PgPool; -pub async fn index_local(pool: PgPool) -> Result, IndexingError> { +pub async fn index_local( + pool: PgPool, +) -> Result<(Vec, Vec), IndexingError> { info!("Indexing local projects!"); - - Ok( + let loader_field_keys: Arc> = Arc::new(DashSet::new()); + let uploads = sqlx::query!( " - SELECT m.id id, m.project_type project_type, m.title title, m.description description, m.downloads downloads, m.follows follows, + SELECT m.id id, v.id version_id, m.title title, m.description description, m.downloads downloads, m.follows follows, m.icon_url icon_url, m.published published, m.approved approved, m.updated updated, m.team_id team_id, m.license license, m.slug slug, m.status status_name, m.color color, - cs.name client_side_type, ss.name server_side_type, pt.name project_type_name, u.username username, + pt.name project_type_name, u.username username, ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is false) categories, ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is true) additional_categories, ARRAY_AGG(DISTINCT lo.loader) filter (where lo.loader is not null) loaders, - ARRAY_AGG(DISTINCT gv.version) filter (where gv.version is not null) versions, + ARRAY_AGG(DISTINCT pt.name) filter (where pt.name is not null) project_types, + ARRAY_AGG(DISTINCT g.name) filter (where g.name is not null) games, ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null and mg.featured is false) gallery, - ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null and mg.featured is true) featured_gallery - FROM mods m + ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null and mg.featured is true) featured_gallery, + JSONB_AGG( + DISTINCT jsonb_build_object( + 'field_id', vf.field_id, + 'int_value', vf.int_value, + 'enum_value', vf.enum_value, + 'string_value', vf.string_value + ) + ) filter (where vf.field_id is not null) version_fields, + JSONB_AGG( + DISTINCT jsonb_build_object( + 'lf_id', lf.id, + 'loader_name', lo.loader, + 'field', lf.field, + 'field_type', lf.field_type, + 'enum_type', lf.enum_type, + 'min_val', lf.min_val, + 'max_val', lf.max_val, + 'optional', lf.optional + ) + ) filter (where lf.id is not null) loader_fields, + JSONB_AGG( + DISTINCT jsonb_build_object( + 'id', lfev.id, + 'enum_id', lfev.enum_id, + 'value', lfev.value, + 'ordering', lfev.ordering, + 'created', lfev.created, + 'metadata', lfev.metadata + ) + ) filter (where lfev.id is not null) loader_field_enum_values + + FROM versions v + INNER JOIN mods m ON v.mod_id = m.id AND m.status = ANY($2) LEFT OUTER JOIN mods_categories mc ON joining_mod_id = m.id LEFT OUTER JOIN categories c ON mc.joining_category_id = c.id - LEFT OUTER JOIN versions v ON v.mod_id = m.id AND v.status != ALL($1) - LEFT OUTER JOIN game_versions_versions gvv ON gvv.joining_version_id = v.id - LEFT OUTER JOIN game_versions gv ON gvv.game_version_id = gv.id LEFT OUTER JOIN loaders_versions lv ON lv.version_id = v.id LEFT OUTER JOIN loaders lo ON lo.id = lv.loader_id + LEFT JOIN loaders_project_types lpt ON lpt.joining_loader_id = lo.id + LEFT JOIN project_types pt ON pt.id = lpt.joining_project_type_id + LEFT JOIN loaders_project_types_games lptg ON lptg.loader_id = lo.id AND lptg.project_type_id = pt.id + LEFT JOIN games g ON lptg.game_id = g.id LEFT OUTER JOIN mods_gallery mg ON mg.mod_id = m.id - INNER JOIN project_types pt ON pt.id = m.project_type - INNER JOIN side_types cs ON m.client_side = cs.id - INNER JOIN side_types ss ON m.server_side = ss.id INNER JOIN team_members tm ON tm.team_id = m.team_id AND tm.role = $3 AND tm.accepted = TRUE INNER JOIN users u ON tm.user_id = u.id - WHERE m.status = ANY($2) - GROUP BY m.id, cs.id, ss.id, pt.id, u.id; + LEFT OUTER JOIN version_fields vf on v.id = vf.version_id + LEFT OUTER JOIN loader_fields lf on vf.field_id = lf.id + LEFT OUTER JOIN loader_field_enums lfe on lf.enum_type = lfe.id + LEFT OUTER JOIN loader_field_enum_values lfev on lfev.enum_id = lfe.id + WHERE v.status != ANY($1) + GROUP BY v.id, m.id, pt.id, u.id; ", &*crate::models::projects::VersionStatus::iterator().filter(|x| x.is_hidden()).map(|x| x.to_string()).collect::>(), &*crate::models::projects::ProjectStatus::iterator().filter(|x| x.is_searchable()).map(|x| x.to_string()).collect::>(), crate::models::teams::OWNER_ROLE, ) .fetch_many(&pool) - .try_filter_map(|e| async { + .try_filter_map(|e| { + let loader_field_keys = loader_field_keys.clone(); + async move { Ok(e.right().map(|m| { let mut additional_categories = m.additional_categories.unwrap_or_default(); let mut categories = m.categories.unwrap_or_default(); @@ -54,9 +98,18 @@ pub async fn index_local(pool: PgPool) -> Result, Index let display_categories = categories.clone(); categories.append(&mut additional_categories); - let versions = m.versions.unwrap_or_default(); + let version_fields = VersionField::from_query_json(m.id, m.loader_fields, m.version_fields, m.loader_field_enum_values); + + let loader_fields : HashMap> = version_fields.into_iter().map(|vf| { + (vf.field_name, vf.value.as_strings()) + }).collect(); + + for v in loader_fields.keys().cloned() { + loader_field_keys.insert(v); + } let project_id: crate::models::projects::ProjectId = ProjectId(m.id).into(); + let version_id: crate::models::projects::ProjectId = ProjectId(m.version_id).into(); let license = match m.license.split(' ').next() { Some(license) => license.to_string(), @@ -68,7 +121,17 @@ pub async fn index_local(pool: PgPool) -> Result, Index _ => false, }; + // SPECIAL BEHAVIOUR + // Todo: revisit. + // For consistency with v2 searching, we consider the loader field 'mrpack_loaders' to be a category. + // These were previously considered the loader, and in v2, the loader is a category for searching. + // So to avoid breakage or awkward conversions, we just consider those loader_fields to be categories. + // The loaders are kept in loader_fields as well, so that no information is lost on retrieval. + let mrpack_loaders = loader_fields.get("mrpack_loaders").cloned().unwrap_or_default(); + categories.extend(mrpack_loaders); + UploadSearchProject { + version_id: version_id.to_string(), project_id: project_id.to_string(), title: m.title, description: m.description, @@ -81,11 +144,7 @@ pub async fn index_local(pool: PgPool) -> Result, Index created_timestamp: m.approved.unwrap_or(m.published).timestamp(), date_modified: m.updated, modified_timestamp: m.updated.timestamp(), - latest_version: versions.last().cloned().unwrap_or_else(|| "None".to_string()), - versions, license, - client_side: m.client_side_type, - server_side: m.server_side_type, slug: m.slug, project_type: m.project_type_name, gallery: m.gallery.unwrap_or_default(), @@ -93,10 +152,17 @@ pub async fn index_local(pool: PgPool) -> Result, Index open_source, color: m.color.map(|x| x as u32), featured_gallery: m.featured_gallery.unwrap_or_default().first().cloned(), + loader_fields } })) - }) +}}) .try_collect::>() - .await? - ) + .await?; + Ok(( + uploads, + Arc::try_unwrap(loader_field_keys) + .unwrap_or_default() + .into_iter() + .collect(), + )) } diff --git a/src/search/indexing/mod.rs b/src/search/indexing/mod.rs index d0cd58d7..e6e1f378 100644 --- a/src/search/indexing/mod.rs +++ b/src/search/indexing/mod.rs @@ -32,11 +32,14 @@ const MEILISEARCH_CHUNK_SIZE: usize = 10000; pub async fn index_projects(pool: PgPool, config: &SearchConfig) -> Result<(), IndexingError> { let mut docs_to_add: Vec = vec![]; + let mut additional_fields: Vec = vec![]; - docs_to_add.append(&mut index_local(pool.clone()).await?); + let (mut uploads, mut loader_fields) = index_local(pool.clone()).await?; + docs_to_add.append(&mut uploads); + additional_fields.append(&mut loader_fields); // Write Indices - add_projects(docs_to_add, config).await?; + add_projects(docs_to_add, additional_fields, config).await?; Ok(()) } @@ -69,7 +72,7 @@ async fn create_index( }, )) => { // Only create index and set settings if the index doesn't already exist - let task = client.create_index(name, Some("project_id")).await?; + let task = client.create_index(name, Some("version_id")).await?; let task = task.wait_for_completion(client, None, None).await?; let index = task .try_make_index(client) @@ -103,7 +106,7 @@ async fn add_to_index( ) -> Result<(), IndexingError> { for chunk in mods.chunks(MEILISEARCH_CHUNK_SIZE) { index - .add_documents(chunk, Some("project_id")) + .add_documents(chunk, Some("version_id")) .await? .wait_for_completion(client, None, None) .await?; @@ -114,25 +117,35 @@ async fn add_to_index( async fn create_and_add_to_index( client: &Client, projects: &[UploadSearchProject], + additional_fields: &[String], name: &'static str, custom_rules: Option<&'static [&'static str]>, ) -> Result<(), IndexingError> { let index = create_index(client, name, custom_rules).await?; + + let mut new_filterable_attributes = index.get_filterable_attributes().await?; + new_filterable_attributes.extend(additional_fields.iter().map(|s| s.to_string())); + index + .set_filterable_attributes(new_filterable_attributes) + .await?; + add_to_index(client, index, projects).await?; Ok(()) } pub async fn add_projects( projects: Vec, + additional_fields: Vec, config: &SearchConfig, ) -> Result<(), IndexingError> { let client = config.make_client(); - create_and_add_to_index(&client, &projects, "projects", None).await?; + create_and_add_to_index(&client, &projects, &additional_fields, "projects", None).await?; create_and_add_to_index( &client, &projects, + &additional_fields, "projects_filtered", Some(&[ "sort", @@ -150,6 +163,7 @@ pub async fn add_projects( fn default_settings() -> Settings { Settings::new() + .with_distinct_attribute("project_id") .with_displayed_attributes(DEFAULT_DISPLAYED_ATTRIBUTES) .with_searchable_attributes(DEFAULT_SEARCHABLE_ATTRIBUTES) .with_sortable_attributes(DEFAULT_SORTABLE_ATTRIBUTES) @@ -161,6 +175,7 @@ fn default_settings() -> Settings { const DEFAULT_DISPLAYED_ATTRIBUTES: &[&str] = &[ "project_id", + "version_id", "project_type", "slug", "author", @@ -168,7 +183,6 @@ const DEFAULT_DISPLAYED_ATTRIBUTES: &[&str] = &[ "description", "categories", "display_categories", - "versions", "downloads", "follows", "icon_url", @@ -176,8 +190,6 @@ const DEFAULT_DISPLAYED_ATTRIBUTES: &[&str] = &[ "date_modified", "latest_version", "license", - "client_side", - "server_side", "gallery", "featured_gallery", "color", @@ -187,10 +199,7 @@ const DEFAULT_SEARCHABLE_ATTRIBUTES: &[&str] = &["title", "description", "author const DEFAULT_ATTRIBUTES_FOR_FACETING: &[&str] = &[ "categories", - "versions", "license", - "client_side", - "server_side", "project_type", "downloads", "follows", diff --git a/src/search/mod.rs b/src/search/mod.rs index af8155a1..becf32aa 100644 --- a/src/search/mod.rs +++ b/src/search/mod.rs @@ -7,6 +7,7 @@ use meilisearch_sdk::client::Client; use serde::{Deserialize, Serialize}; use std::borrow::Cow; use std::cmp::min; +use std::collections::HashMap; use std::fmt::Write; use thiserror::Error; @@ -71,6 +72,7 @@ impl SearchConfig { /// This contains some extra data that is not returned by search results. #[derive(Serialize, Deserialize, Debug, Clone)] pub struct UploadSearchProject { + pub version_id: String, pub project_id: String, pub project_type: String, pub slug: Option, @@ -79,14 +81,10 @@ pub struct UploadSearchProject { pub description: String, pub categories: Vec, pub display_categories: Vec, - pub versions: Vec, pub follows: i32, pub downloads: i32, pub icon_url: String, - pub latest_version: String, pub license: String, - pub client_side: String, - pub server_side: String, pub gallery: Vec, pub featured_gallery: Option, /// RFC 3339 formatted creation date of the project @@ -99,6 +97,9 @@ pub struct UploadSearchProject { pub modified_timestamp: i64, pub open_source: bool, pub color: Option, + + #[serde(flatten)] + pub loader_fields: HashMap>, } #[derive(Serialize, Deserialize, Debug)] @@ -111,6 +112,7 @@ pub struct SearchResults { #[derive(Serialize, Deserialize, Debug, Clone)] pub struct ResultSearchProject { + pub version_id: String, pub project_id: String, pub project_type: String, pub slug: Option, @@ -119,7 +121,6 @@ pub struct ResultSearchProject { pub description: String, pub categories: Vec, pub display_categories: Vec, - pub versions: Vec, pub downloads: i32, pub follows: i32, pub icon_url: String, @@ -127,13 +128,24 @@ pub struct ResultSearchProject { pub date_created: String, /// RFC 3339 formatted modification date of the project pub date_modified: String, - pub latest_version: String, pub license: String, - pub client_side: String, - pub server_side: String, pub gallery: Vec, pub featured_gallery: Option, pub color: Option, + + #[serde(flatten)] + pub loader_fields: HashMap>, +} + +pub fn get_sort_index(index: &str) -> Result<(&str, [&str; 1]), SearchError> { + Ok(match index { + "relevance" => ("projects", ["downloads:desc"]), + "downloads" => ("projects_filtered", ["downloads:desc"]), + "follows" => ("projects", ["follows:desc"]), + "updated" => ("projects", ["date_modified:desc"]), + "newest" => ("projects", ["date_created:desc"]), + i => return Err(SearchError::InvalidIndex(i.to_string())), + }) } pub async fn search_for_project( @@ -146,14 +158,7 @@ pub async fn search_for_project( let index = info.index.as_deref().unwrap_or("relevance"); let limit = info.limit.as_deref().unwrap_or("10").parse()?; - let sort = match index { - "relevance" => ("projects", ["downloads:desc"]), - "downloads" => ("projects_filtered", ["downloads:desc"]), - "follows" => ("projects", ["follows:desc"]), - "updated" => ("projects", ["date_modified:desc"]), - "newest" => ("projects", ["date_created:desc"]), - i => return Err(SearchError::InvalidIndex(i.to_string())), - }; + let sort = get_sort_index(index)?; let meilisearch_index = client.get_index(sort.0).await?; diff --git a/tests/common/actix.rs b/src/util/actix.rs similarity index 87% rename from tests/common/actix.rs rename to src/util/actix.rs index 11759d7f..fc77e663 100644 --- a/tests/common/actix.rs +++ b/src/util/actix.rs @@ -1,7 +1,9 @@ use actix_web::test::TestRequest; use bytes::{Bytes, BytesMut}; -// Multipart functionality (actix-test does not innately support multipart) +// Multipart functionality for actix +// Primarily for testing or some implementations of route-redirection +// (actix-test does not innately support multipart) #[derive(Debug, Clone)] pub struct MultipartSegment { pub name: String, @@ -32,8 +34,8 @@ impl AppendsMultipart for TestRequest { } } -fn generate_multipart(data: impl IntoIterator) -> (String, Bytes) { - let mut boundary = String::from("----WebKitFormBoundary"); +pub fn generate_multipart(data: impl IntoIterator) -> (String, Bytes) { + let mut boundary: String = String::from("----WebKitFormBoundary"); boundary.push_str(&rand::random::().to_string()); boundary.push_str(&rand::random::().to_string()); boundary.push_str(&rand::random::().to_string()); diff --git a/src/util/mod.rs b/src/util/mod.rs index 74588dd7..5729d570 100644 --- a/src/util/mod.rs +++ b/src/util/mod.rs @@ -1,3 +1,4 @@ +pub mod actix; pub mod bitflag; pub mod captcha; pub mod cors; diff --git a/src/util/routes.rs b/src/util/routes.rs index 79512f72..00bb288e 100644 --- a/src/util/routes.rs +++ b/src/util/routes.rs @@ -1,4 +1,4 @@ -use crate::routes::v2::project_creation::CreateError; +use crate::routes::v3::project_creation::CreateError; use crate::routes::ApiError; use actix_multipart::Field; use actix_web::web::Payload; diff --git a/src/util/webhook.rs b/src/util/webhook.rs index 8b5b5a65..aa8c7480 100644 --- a/src/util/webhook.rs +++ b/src/util/webhook.rs @@ -1,4 +1,5 @@ -use crate::database::models::categories::GameVersion; +use crate::database::models::legacy_loader_fields::MinecraftGameVersion; +use crate::database::models::loader_fields::VersionField; use crate::database::redis::RedisPool; use crate::models::projects::ProjectId; use crate::routes::ApiError; @@ -77,35 +78,71 @@ pub async fn send_discord_webhook( webhook_url: String, message: Option, ) -> Result<(), ApiError> { - let all_game_versions = GameVersion::list(pool, redis).await?; + // TODO: this currently uses Minecraft as it is a v2 webhook, and requires 'game_versions', a minecraft-java loader field. + // TODO: This should be updated to use the generic loader fields w/ discord from the project game + let all_game_versions = MinecraftGameVersion::list(pool, redis).await?; let row = sqlx::query!( " SELECT m.id id, m.title title, m.description description, m.color color, - m.icon_url icon_url, m.slug slug, cs.name client_side_type, ss.name server_side_type, + m.icon_url icon_url, m.slug slug, pt.name project_type, u.username username, u.avatar_url avatar_url, ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null) categories, ARRAY_AGG(DISTINCT lo.loader) filter (where lo.loader is not null) loaders, - JSONB_AGG(DISTINCT jsonb_build_object('id', gv.id, 'version', gv.version, 'type', gv.type, 'created', gv.created, 'major', gv.major)) filter (where gv.version is not null) versions, + ARRAY_AGG(DISTINCT pt.name) filter (where pt.name is not null) project_types, + ARRAY_AGG(DISTINCT g.name) filter (where g.name is not null) games, ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null and mg.featured is false) gallery, - ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null and mg.featured is true) featured_gallery + ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null and mg.featured is true) featured_gallery, + JSONB_AGG( + DISTINCT jsonb_build_object( + 'field_id', vf.field_id, + 'int_value', vf.int_value, + 'enum_value', vf.enum_value, + 'string_value', vf.string_value + ) + ) filter (where vf.field_id is not null) version_fields, + JSONB_AGG( + DISTINCT jsonb_build_object( + 'lf_id', lf.id, + 'loader_name', lo.loader, + 'field', lf.field, + 'field_type', lf.field_type, + 'enum_type', lf.enum_type, + 'min_val', lf.min_val, + 'max_val', lf.max_val, + 'optional', lf.optional + ) + ) filter (where lf.id is not null) loader_fields, + JSONB_AGG( + DISTINCT jsonb_build_object( + 'id', lfev.id, + 'enum_id', lfev.enum_id, + 'value', lfev.value, + 'ordering', lfev.ordering, + 'created', lfev.created, + 'metadata', lfev.metadata + ) + ) filter (where lfev.id is not null) loader_field_enum_values FROM mods m LEFT OUTER JOIN mods_categories mc ON joining_mod_id = m.id AND mc.is_additional = FALSE LEFT OUTER JOIN categories c ON mc.joining_category_id = c.id LEFT OUTER JOIN versions v ON v.mod_id = m.id AND v.status != ALL($2) - LEFT OUTER JOIN game_versions_versions gvv ON gvv.joining_version_id = v.id - LEFT OUTER JOIN game_versions gv ON gvv.game_version_id = gv.id LEFT OUTER JOIN loaders_versions lv ON lv.version_id = v.id LEFT OUTER JOIN loaders lo ON lo.id = lv.loader_id + LEFT JOIN loaders_project_types lpt ON lpt.joining_loader_id = lo.id + LEFT JOIN project_types pt ON pt.id = lpt.joining_project_type_id + LEFT JOIN loaders_project_types_games lptg ON lptg.loader_id = lo.id AND lptg.project_type_id = pt.id + LEFT JOIN games g ON lptg.game_id = g.id LEFT OUTER JOIN mods_gallery mg ON mg.mod_id = m.id - INNER JOIN project_types pt ON pt.id = m.project_type - INNER JOIN side_types cs ON m.client_side = cs.id - INNER JOIN side_types ss ON m.server_side = ss.id INNER JOIN team_members tm ON tm.team_id = m.team_id AND tm.role = $3 AND tm.accepted = TRUE INNER JOIN users u ON tm.user_id = u.id + LEFT OUTER JOIN version_fields vf on v.id = vf.version_id + LEFT OUTER JOIN loader_fields lf on vf.field_id = lf.id + LEFT OUTER JOIN loader_field_enums lfe on lf.enum_type = lfe.id + LEFT OUTER JOIN loader_field_enum_values lfev on lfev.enum_id = lfe.id WHERE m.id = $1 - GROUP BY m.id, cs.id, ss.id, pt.id, u.id; + GROUP BY m.id, pt.id, u.id; ", project_id.0 as i64, &*crate::models::projects::VersionStatus::iterator().filter(|x| x.is_hidden()).map(|x| x.to_string()).collect::>(), @@ -120,10 +157,10 @@ pub async fn send_discord_webhook( let categories = project.categories.unwrap_or_default(); let loaders = project.loaders.unwrap_or_default(); - let versions: Vec = - serde_json::from_value(project.versions.unwrap_or_default()) - .ok() - .unwrap_or_default(); + // let versions: Vec = + // serde_json::from_value(project.versions.unwrap_or_default()) + // .ok() + // .unwrap_or_default(); if !categories.is_empty() { fields.push(DiscordEmbedField { @@ -187,9 +224,21 @@ pub async fn send_discord_webhook( }); } + // TODO: Modified to keep "Versions" as a field as it may be hardcoded. Ideally, this pushes all loader fields to the embed for v3 + // TODO: This might need some work to manually test + let version_fields = VersionField::from_query_json( + project.id, + project.loader_fields, + project.version_fields, + project.loader_field_enum_values, + ); + let versions = version_fields + .into_iter() + .find_map(|vf| MinecraftGameVersion::try_from_version_field(&vf).ok()) + .unwrap_or_default(); + if !versions.is_empty() { let formatted_game_versions: String = get_gv_range(versions, all_game_versions); - fields.push(DiscordEmbedField { name: "Versions", value: formatted_game_versions, @@ -270,8 +319,8 @@ pub async fn send_discord_webhook( } fn get_gv_range( - mut game_versions: Vec, - mut all_game_versions: Vec, + mut game_versions: Vec, + mut all_game_versions: Vec, ) -> String { // both -> least to greatest game_versions.sort_by(|a, b| a.created.cmp(&b.created)); diff --git a/src/validate/mod.rs b/src/validate/mod.rs index b0ed3394..90507927 100644 --- a/src/validate/mod.rs +++ b/src/validate/mod.rs @@ -1,5 +1,9 @@ +use crate::database::models::legacy_loader_fields::MinecraftGameVersion; +use crate::database::models::loader_fields::VersionField; +use crate::database::models::DatabaseError; +use crate::database::redis::RedisPool; use crate::models::pack::PackFormat; -use crate::models::projects::{FileType, GameVersion, Loader}; +use crate::models::projects::{FileType, Loader}; use crate::validate::datapack::DataPackValidator; use crate::validate::fabric::FabricValidator; use crate::validate::forge::{ForgeValidator, LegacyForgeValidator}; @@ -36,6 +40,8 @@ pub enum ValidationError { InvalidInput(std::borrow::Cow<'static, str>), #[error("Error while managing threads")] Blocking(#[from] actix_web::error::BlockingError), + #[error("Error while querying database")] + Database(#[from] DatabaseError), } #[derive(Eq, PartialEq)] @@ -66,7 +72,7 @@ pub enum SupportedGameVersions { PastDate(DateTime), Range(DateTime, DateTime), #[allow(dead_code)] - Custom(Vec), + Custom(Vec), } pub trait Validator: Sync { @@ -102,13 +108,53 @@ static VALIDATORS: &[&dyn Validator] = &[ ]; /// The return value is whether this file should be marked as primary or not, based on the analysis of the file +#[allow(clippy::too_many_arguments)] pub async fn validate_file( + data: bytes::Bytes, + file_extension: String, + loaders: Vec, + file_type: Option, + version_fields: Vec, + transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, + redis: &RedisPool, +) -> Result { + // TODO: This needs to be revisited or removed with v3. + // Currently, it checks if the loader is the modpack loader, and extracts the pack data from it. + // This (and the funnction that calls this) should be refactored such that + // - validators are removed (or altogether reworked) + // - if a mrpack is uploaded, the pack data is extracted and usable to extract dependencies automatically + + // TODO: A test needs to be written for this. + match loaders { + loaders if loaders == vec![Loader("mrpack".to_string())] => { + let game_versions = version_fields + .into_iter() + .find_map(|v| MinecraftGameVersion::try_from_version_field(&v).ok()) + .unwrap_or_default(); + let all_game_versions = + MinecraftGameVersion::list_transaction(&mut *transaction, redis).await?; + validate_minecraft_file( + data, + file_extension, + "modpack".to_string(), + loaders, + game_versions, + all_game_versions, + file_type, + ) + .await + } + _ => Ok(ValidationResult::Pass), + } +} + +async fn validate_minecraft_file( data: bytes::Bytes, file_extension: String, mut project_type: String, mut loaders: Vec, - game_versions: Vec, - all_game_versions: Vec, + game_versions: Vec, + all_game_versions: Vec, file_type: Option, ) -> Result { actix_web::web::block(move || { @@ -162,9 +208,10 @@ pub async fn validate_file( .await? } +// Write tests for this fn game_version_supported( - game_versions: &[GameVersion], - all_game_versions: &[crate::database::models::categories::GameVersion], + game_versions: &[MinecraftGameVersion], + all_game_versions: &[MinecraftGameVersion], supported_game_versions: SupportedGameVersions, ) -> bool { match supported_game_versions { @@ -172,19 +219,21 @@ fn game_version_supported( SupportedGameVersions::PastDate(date) => game_versions.iter().any(|x| { all_game_versions .iter() - .find(|y| y.version == x.0) + .find(|y| y.version == x.version) .map(|x| x.created > date) .unwrap_or(false) }), SupportedGameVersions::Range(before, after) => game_versions.iter().any(|x| { all_game_versions .iter() - .find(|y| y.version == x.0) + .find(|y| y.version == x.version) .map(|x| x.created > before && x.created < after) .unwrap_or(false) }), SupportedGameVersions::Custom(versions) => { - versions.iter().any(|x| game_versions.contains(x)) + let version_ids = versions.iter().map(|gv| gv.id).collect::>(); + let game_version_ids: Vec<_> = game_versions.iter().map(|gv| gv.id).collect::>(); + version_ids.iter().any(|x| game_version_ids.contains(x)) } } } diff --git a/tests/common/api_v2/mod.rs b/tests/common/api_v2/mod.rs index 0f01b771..2a85bbb3 100644 --- a/tests/common/api_v2/mod.rs +++ b/tests/common/api_v2/mod.rs @@ -6,6 +6,7 @@ use std::rc::Rc; pub mod organization; pub mod project; +pub mod tags; pub mod team; pub mod version; @@ -18,4 +19,15 @@ impl ApiV2 { pub async fn call(&self, req: actix_http::Request) -> ServiceResponse { self.test_app.call(req).await.unwrap() } + + pub async fn reset_search_index(&self) -> ServiceResponse { + let req = actix_web::test::TestRequest::post() + .uri("/v2/admin/_force_reindex") + .append_header(( + "Modrinth-Admin", + dotenvy::var("LABRINTH_ADMIN_KEY").unwrap(), + )) + .to_request(); + self.call(req).await + } } diff --git a/tests/common/api_v2/organization.rs b/tests/common/api_v2/organization.rs index 31f0ea4c..5cfb214d 100644 --- a/tests/common/api_v2/organization.rs +++ b/tests/common/api_v2/organization.rs @@ -3,7 +3,7 @@ use actix_web::{ test::{self, TestRequest}, }; use bytes::Bytes; -use labrinth::models::{organizations::Organization, projects::Project}; +use labrinth::models::{organizations::Organization, v2::projects::LegacyProject}; use serde_json::json; use crate::common::request_data::ImageData; @@ -58,7 +58,7 @@ impl ApiV2 { &self, id_or_title: &str, pat: &str, - ) -> Vec { + ) -> Vec { let resp = self.get_organization_projects(id_or_title, pat).await; assert_eq!(resp.status(), 200); test::read_body_json(resp).await diff --git a/tests/common/api_v2/project.rs b/tests/common/api_v2/project.rs index 38287937..25075089 100644 --- a/tests/common/api_v2/project.rs +++ b/tests/common/api_v2/project.rs @@ -7,12 +7,15 @@ use actix_web::{ }; use bytes::Bytes; use chrono::{DateTime, Utc}; -use labrinth::models::projects::{Project, Version}; +use labrinth::{ + models::v2::projects::{LegacyProject, LegacyVersion}, + search::SearchResults, + util::actix::AppendsMultipart, +}; use rust_decimal::Decimal; use serde_json::json; use crate::common::{ - actix::AppendsMultipart, asserts::assert_status, database::MOD_USER_PAT, request_data::{ImageData, ProjectCreationRequestData}, @@ -25,7 +28,7 @@ impl ApiV2 { &self, creation_data: ProjectCreationRequestData, pat: &str, - ) -> (Project, Vec) { + ) -> (LegacyProject, Vec) { // Add a project. let req = TestRequest::post() .uri("/v2/project") @@ -58,7 +61,7 @@ impl ApiV2 { .append_header(("Authorization", pat)) .to_request(); let resp = self.call(req).await; - let versions: Vec = test::read_body_json(resp).await; + let versions: Vec = test::read_body_json(resp).await; (project, versions) } @@ -80,7 +83,7 @@ impl ApiV2 { .to_request(); self.call(req).await } - pub async fn get_project_deserialized(&self, id_or_slug: &str, pat: &str) -> Project { + pub async fn get_project_deserialized(&self, id_or_slug: &str, pat: &str) -> LegacyProject { let resp = self.get_project(id_or_slug, pat).await; assert_eq!(resp.status(), 200); test::read_body_json(resp).await @@ -98,36 +101,12 @@ impl ApiV2 { &self, user_id_or_username: &str, pat: &str, - ) -> Vec { + ) -> Vec { let resp = self.get_user_projects(user_id_or_username, pat).await; assert_eq!(resp.status(), 200); test::read_body_json(resp).await } - pub async fn get_version_from_hash( - &self, - hash: &str, - algorithm: &str, - pat: &str, - ) -> ServiceResponse { - let req = test::TestRequest::get() - .uri(&format!("/v2/version_file/{hash}?algorithm={algorithm}")) - .append_header(("Authorization", pat)) - .to_request(); - self.call(req).await - } - - pub async fn get_version_from_hash_deserialized( - &self, - hash: &str, - algorithm: &str, - pat: &str, - ) -> Version { - let resp = self.get_version_from_hash(hash, algorithm, pat).await; - assert_eq!(resp.status(), 200); - test::read_body_json(resp).await - } - pub async fn edit_project( &self, id_or_slug: &str, @@ -195,6 +174,34 @@ impl ApiV2 { } } + pub async fn search_deserialized( + &self, + query: Option<&str>, + facets: Option, + pat: &str, + ) -> SearchResults { + let query_field = if let Some(query) = query { + format!("&query={}", urlencoding::encode(query)) + } else { + "".to_string() + }; + + let facets_field = if let Some(facets) = facets { + format!("&facets={}", urlencoding::encode(&facets.to_string())) + } else { + "".to_string() + }; + + let req = test::TestRequest::get() + .uri(&format!("/v2/search?{}{}", query_field, facets_field)) + .append_header(("Authorization", pat)) + .to_request(); + let resp = self.call(req).await; + let status = resp.status(); + assert_eq!(status, 200); + test::read_body_json(resp).await + } + pub async fn get_analytics_revenue( &self, id_or_slugs: Vec<&str>, diff --git a/tests/common/api_v2/tags.rs b/tests/common/api_v2/tags.rs new file mode 100644 index 00000000..f220b17b --- /dev/null +++ b/tests/common/api_v2/tags.rs @@ -0,0 +1,69 @@ +use actix_web::{ + dev::ServiceResponse, + test::{self, TestRequest}, +}; +use labrinth::routes::v2::tags::{CategoryData, GameVersionQueryData, LoaderData}; + +use crate::common::database::ADMIN_USER_PAT; + +use super::ApiV2; + +impl ApiV2 { + // Tag gets do not include PAT, as they are public. + + pub async fn get_side_types(&self) -> ServiceResponse { + let req = TestRequest::get() + .uri("/v2/tag/side_type") + .append_header(("Authorization", ADMIN_USER_PAT)) + .to_request(); + self.call(req).await + } + + pub async fn get_side_types_deserialized(&self) -> Vec { + let resp = self.get_side_types().await; + assert_eq!(resp.status(), 200); + test::read_body_json(resp).await + } + + pub async fn get_loaders(&self) -> ServiceResponse { + let req = TestRequest::get() + .uri("/v2/tag/loader") + .append_header(("Authorization", ADMIN_USER_PAT)) + .to_request(); + self.call(req).await + } + + pub async fn get_loaders_deserialized(&self) -> Vec { + let resp = self.get_loaders().await; + assert_eq!(resp.status(), 200); + test::read_body_json(resp).await + } + + pub async fn get_categories(&self) -> ServiceResponse { + let req = TestRequest::get() + .uri("/v2/tag/category") + .append_header(("Authorization", ADMIN_USER_PAT)) + .to_request(); + self.call(req).await + } + + pub async fn get_categories_deserialized(&self) -> Vec { + let resp = self.get_categories().await; + assert_eq!(resp.status(), 200); + test::read_body_json(resp).await + } + + pub async fn get_game_versions(&self) -> ServiceResponse { + let req = TestRequest::get() + .uri("/v2/tag/game_version") + .append_header(("Authorization", ADMIN_USER_PAT)) + .to_request(); + self.call(req).await + } + + pub async fn get_game_versions_deserialized(&self) -> Vec { + let resp = self.get_game_versions().await; + assert_eq!(resp.status(), 200); + test::read_body_json(resp).await + } +} diff --git a/tests/common/api_v2/version.rs b/tests/common/api_v2/version.rs index 306db36c..eafef956 100644 --- a/tests/common/api_v2/version.rs +++ b/tests/common/api_v2/version.rs @@ -1,9 +1,18 @@ +use std::collections::HashMap; + use actix_http::{header::AUTHORIZATION, StatusCode}; -use actix_web::{dev::ServiceResponse, test}; -use labrinth::models::projects::Version; +use actix_web::{ + dev::ServiceResponse, + test::{self, TestRequest}, +}; +use labrinth::{ + models::{projects::VersionType, v2::projects::LegacyVersion}, + routes::v2::version_file::FileUpdateData, + util::actix::AppendsMultipart, +}; use serde_json::json; -use crate::common::{self, actix::AppendsMultipart, asserts::assert_status}; +use crate::common::{asserts::assert_status, request_data::VersionCreationRequestData}; use super::ApiV2; @@ -13,12 +22,319 @@ pub fn url_encode_json_serialized_vec(elements: &[String]) -> String { } impl ApiV2 { + pub async fn add_public_version( + &self, + creation_data: VersionCreationRequestData, + pat: &str, + ) -> LegacyVersion { + // Add a project. + let req = TestRequest::post() + .uri("/v2/version") + .append_header(("Authorization", pat)) + .set_multipart(creation_data.segment_data) + .to_request(); + let resp = self.call(req).await; + assert_status(&resp, StatusCode::OK); + let value: serde_json::Value = test::read_body_json(resp).await; + let version_id = value["id"].as_str().unwrap(); + + // // Approve as a moderator. + // let req = TestRequest::patch() + // .uri(&format!("/v2/project/{}", creation_data.slug)) + // .append_header(("Authorization", MOD_USER_PAT)) + // .set_json(json!( + // { + // "status": "approved" + // } + // )) + // .to_request(); + // let resp = self.call(req).await; + // assert_status(resp, StatusCode::NO_CONTENT); + + self.get_version_deserialized(version_id, pat).await + } + + pub async fn get_version(&self, id: &str, pat: &str) -> ServiceResponse { + let req = TestRequest::get() + .uri(&format!("/v2/version/{id}")) + .append_header(("Authorization", pat)) + .to_request(); + self.call(req).await + } + + pub async fn get_version_deserialized(&self, id: &str, pat: &str) -> LegacyVersion { + let resp = self.get_version(id, pat).await; + assert_eq!(resp.status(), 200); + test::read_body_json(resp).await + } + + pub async fn edit_version( + &self, + version_id: &str, + patch: serde_json::Value, + pat: &str, + ) -> ServiceResponse { + let req = test::TestRequest::patch() + .uri(&format!("/v2/version/{version_id}")) + .append_header(("Authorization", pat)) + .set_json(patch) + .to_request(); + + self.call(req).await + } + + pub async fn get_version_from_hash( + &self, + hash: &str, + algorithm: &str, + pat: &str, + ) -> ServiceResponse { + let req = test::TestRequest::get() + .uri(&format!("/v2/version_file/{hash}?algorithm={algorithm}")) + .append_header(("Authorization", pat)) + .to_request(); + self.call(req).await + } + + pub async fn get_version_from_hash_deserialized( + &self, + hash: &str, + algorithm: &str, + pat: &str, + ) -> LegacyVersion { + let resp = self.get_version_from_hash(hash, algorithm, pat).await; + assert_eq!(resp.status(), 200); + test::read_body_json(resp).await + } + + pub async fn get_versions_from_hashes( + &self, + hashes: &[&str], + algorithm: &str, + pat: &str, + ) -> ServiceResponse { + let req = TestRequest::post() + .uri("/v2/version_files") + .append_header(("Authorization", pat)) + .set_json(json!({ + "hashes": hashes, + "algorithm": algorithm, + })) + .to_request(); + self.call(req).await + } + + pub async fn get_versions_from_hashes_deserialized( + &self, + hashes: &[&str], + algorithm: &str, + pat: &str, + ) -> HashMap { + let resp = self.get_versions_from_hashes(hashes, algorithm, pat).await; + assert_eq!(resp.status(), 200); + test::read_body_json(resp).await + } + + pub async fn get_update_from_hash( + &self, + hash: &str, + algorithm: &str, + loaders: Option>, + game_versions: Option>, + version_types: Option>, + pat: &str, + ) -> ServiceResponse { + let req = test::TestRequest::post() + .uri(&format!( + "/v2/version_file/{hash}/update?algorithm={algorithm}" + )) + .append_header(("Authorization", pat)) + .set_json(json!({ + "loaders": loaders, + "game_versions": game_versions, + "version_types": version_types, + })) + .to_request(); + self.call(req).await + } + + pub async fn get_update_from_hash_deserialized( + &self, + hash: &str, + algorithm: &str, + loaders: Option>, + game_versions: Option>, + version_types: Option>, + pat: &str, + ) -> LegacyVersion { + let resp = self + .get_update_from_hash(hash, algorithm, loaders, game_versions, version_types, pat) + .await; + assert_eq!(resp.status(), 200); + test::read_body_json(resp).await + } + + pub async fn update_files( + &self, + algorithm: &str, + hashes: Vec, + loaders: Option>, + game_versions: Option>, + version_types: Option>, + pat: &str, + ) -> ServiceResponse { + let req = test::TestRequest::post() + .uri("/v2/version_files/update") + .append_header(("Authorization", pat)) + .set_json(json!({ + "algorithm": algorithm, + "hashes": hashes, + "loaders": loaders, + "game_versions": game_versions, + "version_types": version_types, + })) + .to_request(); + self.call(req).await + } + + pub async fn update_files_deserialized( + &self, + algorithm: &str, + hashes: Vec, + loaders: Option>, + game_versions: Option>, + version_types: Option>, + pat: &str, + ) -> HashMap { + let resp = self + .update_files( + algorithm, + hashes, + loaders, + game_versions, + version_types, + pat, + ) + .await; + assert_eq!(resp.status(), 200); + test::read_body_json(resp).await + } + + pub async fn update_individual_files( + &self, + algorithm: &str, + hashes: Vec, + pat: &str, + ) -> ServiceResponse { + let req = test::TestRequest::post() + .uri("/v2/version_files/update_individual") + .append_header(("Authorization", pat)) + .set_json(json!({ + "algorithm": algorithm, + "hashes": hashes + })) + .to_request(); + self.call(req).await + } + + pub async fn update_individual_files_deserialized( + &self, + algorithm: &str, + hashes: Vec, + pat: &str, + ) -> HashMap { + let resp = self.update_individual_files(algorithm, hashes, pat).await; + assert_eq!(resp.status(), 200); + test::read_body_json(resp).await + } + + // TODO: Not all fields are tested currently in the V2 tests, only the v2-v3 relevant ones are + #[allow(clippy::too_many_arguments)] + pub async fn get_project_versions( + &self, + project_id_slug: &str, + game_versions: Option>, + loaders: Option>, + featured: Option, + version_type: Option, + limit: Option, + offset: Option, + pat: &str, + ) -> ServiceResponse { + let mut query_string = String::new(); + if let Some(game_versions) = game_versions { + query_string.push_str(&format!( + "&game_versions={}", + urlencoding::encode(&serde_json::to_string(&game_versions).unwrap()) + )); + } + if let Some(loaders) = loaders { + query_string.push_str(&format!( + "&loaders={}", + urlencoding::encode(&serde_json::to_string(&loaders).unwrap()) + )); + } + if let Some(featured) = featured { + query_string.push_str(&format!("&featured={}", featured)); + } + if let Some(version_type) = version_type { + query_string.push_str(&format!("&version_type={}", version_type)); + } + if let Some(limit) = limit { + let limit = limit.to_string(); + query_string.push_str(&format!("&limit={}", limit)); + } + if let Some(offset) = offset { + let offset = offset.to_string(); + query_string.push_str(&format!("&offset={}", offset)); + } + + let req = test::TestRequest::get() + .uri(&format!( + "/v2/project/{project_id_slug}/version?{}", + query_string.trim_start_matches('&') + )) + .append_header(("Authorization", pat)) + .to_request(); + self.call(req).await + } + + #[allow(clippy::too_many_arguments)] + pub async fn get_project_versions_deserialized( + &self, + slug: &str, + game_versions: Option>, + loaders: Option>, + featured: Option, + version_type: Option, + limit: Option, + offset: Option, + pat: &str, + ) -> Vec { + let resp = self + .get_project_versions( + slug, + game_versions, + loaders, + featured, + version_type, + limit, + offset, + pat, + ) + .await; + assert_eq!(resp.status(), 200); + test::read_body_json(resp).await + } + + // TODO: remove redundancy in these functions + pub async fn create_default_version( &self, project_id: &str, ordering: Option, pat: &str, - ) -> Version { + ) -> LegacyVersion { let json_data = json!( { "project_id": project_id, @@ -33,19 +349,19 @@ impl ApiV2 { "ordering": ordering, } ); - let json_segment = common::actix::MultipartSegment { + let json_segment = labrinth::util::actix::MultipartSegment { name: "data".to_string(), filename: None, content_type: Some("application/json".to_string()), - data: common::actix::MultipartSegmentData::Text( + data: labrinth::util::actix::MultipartSegmentData::Text( serde_json::to_string(&json_data).unwrap(), ), }; - let file_segment = common::actix::MultipartSegment { + let file_segment = labrinth::util::actix::MultipartSegment { name: "basic-mod-different.jar".to_string(), filename: Some("basic-mod.jar".to_string()), content_type: Some("application/java-archive".to_string()), - data: common::actix::MultipartSegmentData::Binary( + data: labrinth::util::actix::MultipartSegmentData::Binary( include_bytes!("../../../tests/files/basic-mod-different.jar").to_vec(), ), }; @@ -60,7 +376,7 @@ impl ApiV2 { test::read_body_json(resp).await } - pub async fn get_versions(&self, version_ids: Vec, pat: &str) -> Vec { + pub async fn get_versions(&self, version_ids: Vec, pat: &str) -> Vec { let ids = url_encode_json_serialized_vec(&version_ids); let request = test::TestRequest::get() .uri(&format!("/v2/versions?ids={}", ids)) diff --git a/tests/common/api_v3/oauth.rs b/tests/common/api_v3/oauth.rs index ee78b5d9..1a6b35f4 100644 --- a/tests/common/api_v3/oauth.rs +++ b/tests/common/api_v3/oauth.rs @@ -114,7 +114,6 @@ pub fn generate_authorize_uri( optional_query_param("scope", scope), optional_query_param("state", state), ) - .to_string() } pub async fn get_authorize_accept_flow_id(response: ServiceResponse) -> String { diff --git a/tests/common/asserts.rs b/tests/common/asserts.rs index 97885b8a..0c0d5464 100644 --- a/tests/common/asserts.rs +++ b/tests/common/asserts.rs @@ -2,15 +2,13 @@ use crate::common::get_json_val_str; use itertools::Itertools; +use labrinth::models::v2::projects::LegacyVersion; pub fn assert_status(response: &actix_web::dev::ServiceResponse, status: actix_http::StatusCode) { assert_eq!(response.status(), status, "{:#?}", response.response()); } -pub fn assert_version_ids( - versions: &[labrinth::models::projects::Version], - expected_ids: Vec, -) { +pub fn assert_version_ids(versions: &[LegacyVersion], expected_ids: Vec) { let version_ids = versions .iter() .map(|v| get_json_val_str(v.id)) diff --git a/tests/common/dummy_data.rs b/tests/common/dummy_data.rs index ed3b7f08..619579a0 100644 --- a/tests/common/dummy_data.rs +++ b/tests/common/dummy_data.rs @@ -1,25 +1,24 @@ #![allow(dead_code)] +use std::io::{Cursor, Write}; + use actix_http::StatusCode; use actix_web::test::{self, TestRequest}; -use labrinth::{ - models::projects::Project, - models::{ - oauth_clients::OAuthClient, organizations::Organization, pats::Scopes, projects::Version, - }, +use labrinth::models::{ + oauth_clients::OAuthClient, + organizations::Organization, + pats::Scopes, + v2::projects::{LegacyProject, LegacyVersion}, }; use serde_json::json; use sqlx::Executor; +use zip::{write::FileOptions, CompressionMethod, ZipWriter}; -use crate::common::{actix::AppendsMultipart, database::USER_USER_PAT}; +use crate::common::database::USER_USER_PAT; +use labrinth::util::actix::{AppendsMultipart, MultipartSegment, MultipartSegmentData}; -use super::{ - actix::{MultipartSegment, MultipartSegmentData}, - asserts::assert_status, - database::USER_USER_ID, - environment::TestEnvironment, - get_json_val_str, - request_data::get_public_project_creation_data, -}; +use super::{environment::TestEnvironment, request_data::get_public_project_creation_data}; + +use super::{asserts::assert_status, database::USER_USER_ID, get_json_val_str}; pub const DUMMY_DATA_UPDATE: i64 = 3; @@ -37,13 +36,107 @@ pub const DUMMY_CATEGORIES: &[&str] = &[ pub const DUMMY_OAUTH_CLIENT_ALPHA_SECRET: &str = "abcdefghijklmnopqrstuvwxyz"; #[allow(dead_code)] -pub enum DummyJarFile { +pub enum TestFile { DummyProjectAlpha, DummyProjectBeta, BasicMod, BasicModDifferent, + // Randomly generates a valid .jar with a random hash. + // Unlike the other dummy jar files, this one is not a static file. + // and BasicModRandom.bytes() will return a different file each time. + BasicModRandom { filename: String, bytes: Vec }, + BasicModpackRandom { filename: String, bytes: Vec }, +} + +impl TestFile { + pub fn build_random_jar() -> Self { + let filename = format!("random-mod-{}.jar", rand::random::()); + + let fabric_mod_json = serde_json::json!({ + "schemaVersion": 1, + "id": filename, + "version": "1.0.1", + + "name": filename, + "description": "Does nothing", + "authors": [ + "user" + ], + "contact": { + "homepage": "https://www.modrinth.com", + "sources": "https://www.modrinth.com", + "issues": "https://www.modrinth.com" + }, + + "license": "MIT", + "icon": "none.png", + + "environment": "client", + "entrypoints": { + "main": [ + "io.github.modrinth.Modrinth" + ] + }, + "depends": { + "minecraft": ">=1.20-" + } + } + ) + .to_string(); + + // Create a simulated zip file + let mut cursor = Cursor::new(Vec::new()); + { + let mut zip = ZipWriter::new(&mut cursor); + zip.start_file( + "fabric.mod.json", + FileOptions::default().compression_method(CompressionMethod::Stored), + ) + .unwrap(); + zip.write_all(fabric_mod_json.as_bytes()).unwrap(); + zip.finish().unwrap(); + } + let bytes = cursor.into_inner(); + + TestFile::BasicModRandom { filename, bytes } + } + + pub fn build_random_mrpack() -> Self { + let filename = format!("random-modpack-{}.mrpack", rand::random::()); + + let modrinth_index_json = serde_json::json!({ + "formatVersion": 1, + "game": "minecraft", + "versionId": "1.20.1-9.6", + "name": filename, + "files": [], + "dependencies": { + "fabric-loader": "0.14.22", + "minecraft": "1.20.1" + } + } + ) + .to_string(); + + // Create a simulated zip file + let mut cursor = Cursor::new(Vec::new()); + { + let mut zip = ZipWriter::new(&mut cursor); + zip.start_file( + "modrinth.index.json", + FileOptions::default().compression_method(CompressionMethod::Stored), + ) + .unwrap(); + zip.write_all(modrinth_index_json.as_bytes()).unwrap(); + zip.finish().unwrap(); + } + let bytes = cursor.into_inner(); + + TestFile::BasicModpackRandom { filename, bytes } + } } +#[derive(Clone)] #[allow(dead_code)] pub enum DummyImage { SmallIcon, // 200x200 @@ -77,10 +170,10 @@ pub struct DummyData { impl DummyData { pub fn new( - project_alpha: Project, - project_alpha_version: Version, - project_beta: Project, - project_beta_version: Version, + project_alpha: LegacyProject, + project_alpha_version: LegacyVersion, + project_beta: LegacyProject, + project_beta_version: LegacyVersion, organization_zeta: Organization, oauth_client_alpha: OAuthClient, ) -> Self { @@ -210,21 +303,21 @@ pub async fn get_dummy_data(test_env: &TestEnvironment) -> DummyData { ) } -pub async fn add_project_alpha(test_env: &TestEnvironment) -> (Project, Version) { +pub async fn add_project_alpha(test_env: &TestEnvironment) -> (LegacyProject, LegacyVersion) { let (project, versions) = test_env .v2 .add_public_project( - get_public_project_creation_data("alpha", Some(DummyJarFile::DummyProjectAlpha)), + get_public_project_creation_data("alpha", Some(TestFile::DummyProjectAlpha)), USER_USER_PAT, ) .await; (project, versions.into_iter().next().unwrap()) } -pub async fn add_project_beta(test_env: &TestEnvironment) -> (Project, Version) { +pub async fn add_project_beta(test_env: &TestEnvironment) -> (LegacyProject, LegacyVersion) { // Adds dummy data to the database with sqlx (projects, versions, threads) // Generate test project data. - let jar = DummyJarFile::DummyProjectBeta; + let jar = TestFile::DummyProjectBeta; let json_data = json!( { "title": "Test Project Beta", @@ -298,14 +391,14 @@ pub async fn add_organization_zeta(test_env: &TestEnvironment) -> Organization { get_organization_zeta(test_env).await } -pub async fn get_project_alpha(test_env: &TestEnvironment) -> (Project, Version) { +pub async fn get_project_alpha(test_env: &TestEnvironment) -> (LegacyProject, LegacyVersion) { // Get project let req = TestRequest::get() .uri("/v2/project/alpha") .append_header(("Authorization", USER_USER_PAT)) .to_request(); let resp = test_env.call(req).await; - let project: Project = test::read_body_json(resp).await; + let project: LegacyProject = test::read_body_json(resp).await; // Get project's versions let req = TestRequest::get() @@ -313,13 +406,13 @@ pub async fn get_project_alpha(test_env: &TestEnvironment) -> (Project, Version) .append_header(("Authorization", USER_USER_PAT)) .to_request(); let resp = test_env.call(req).await; - let versions: Vec = test::read_body_json(resp).await; + let versions: Vec = test::read_body_json(resp).await; let version = versions.into_iter().next().unwrap(); (project, version) } -pub async fn get_project_beta(test_env: &TestEnvironment) -> (Project, Version) { +pub async fn get_project_beta(test_env: &TestEnvironment) -> (LegacyProject, LegacyVersion) { // Get project let req = TestRequest::get() .uri("/v2/project/beta") @@ -327,7 +420,8 @@ pub async fn get_project_beta(test_env: &TestEnvironment) -> (Project, Version) .to_request(); let resp = test_env.call(req).await; assert_status(&resp, StatusCode::OK); - let project: Project = test::read_body_json(resp).await; + let project: serde_json::Value = test::read_body_json(resp).await; + let project: LegacyProject = serde_json::from_value(project).unwrap(); // Get project's versions let req = TestRequest::get() @@ -336,7 +430,7 @@ pub async fn get_project_beta(test_env: &TestEnvironment) -> (Project, Version) .to_request(); let resp = test_env.call(req).await; assert_status(&resp, StatusCode::OK); - let versions: Vec = test::read_body_json(resp).await; + let versions: Vec = test::read_body_json(resp).await; let version = versions.into_iter().next().unwrap(); (project, version) @@ -362,30 +456,47 @@ pub async fn get_oauth_client_alpha(test_env: &TestEnvironment) -> OAuthClient { oauth_clients.into_iter().next().unwrap() } -impl DummyJarFile { +impl TestFile { pub fn filename(&self) -> String { match self { - DummyJarFile::DummyProjectAlpha => "dummy-project-alpha.jar", - DummyJarFile::DummyProjectBeta => "dummy-project-beta.jar", - DummyJarFile::BasicMod => "basic-mod.jar", - DummyJarFile::BasicModDifferent => "basic-mod-different.jar", + TestFile::DummyProjectAlpha => "dummy-project-alpha.jar", + TestFile::DummyProjectBeta => "dummy-project-beta.jar", + TestFile::BasicMod => "basic-mod.jar", + TestFile::BasicModDifferent => "basic-mod-different.jar", + TestFile::BasicModRandom { filename, .. } => filename, + TestFile::BasicModpackRandom { filename, .. } => filename, } .to_string() } pub fn bytes(&self) -> Vec { match self { - DummyJarFile::DummyProjectAlpha => { + TestFile::DummyProjectAlpha => { include_bytes!("../../tests/files/dummy-project-alpha.jar").to_vec() } - DummyJarFile::DummyProjectBeta => { + TestFile::DummyProjectBeta => { include_bytes!("../../tests/files/dummy-project-beta.jar").to_vec() } - DummyJarFile::BasicMod => include_bytes!("../../tests/files/basic-mod.jar").to_vec(), - DummyJarFile::BasicModDifferent => { + TestFile::BasicMod => include_bytes!("../../tests/files/basic-mod.jar").to_vec(), + TestFile::BasicModDifferent => { include_bytes!("../../tests/files/basic-mod-different.jar").to_vec() } + TestFile::BasicModRandom { bytes, .. } => bytes.clone(), + TestFile::BasicModpackRandom { bytes, .. } => bytes.clone(), + } + } + + pub fn project_type(&self) -> String { + match self { + TestFile::DummyProjectAlpha => "mod", + TestFile::DummyProjectBeta => "mod", + TestFile::BasicMod => "mod", + TestFile::BasicModDifferent => "mod", + TestFile::BasicModRandom { .. } => "mod", + + TestFile::BasicModpackRandom { .. } => "modpack", } + .to_string() } } diff --git a/tests/common/mod.rs b/tests/common/mod.rs index b2a317bb..84ac23ab 100644 --- a/tests/common/mod.rs +++ b/tests/common/mod.rs @@ -2,9 +2,6 @@ use labrinth::{check_env_vars, clickhouse}; use labrinth::{file_hosting, queue, LabrinthConfig}; use std::sync::Arc; -use self::database::TemporaryDatabase; - -pub mod actix; pub mod api_v2; pub mod api_v3; pub mod asserts; @@ -18,7 +15,7 @@ pub mod scopes; // Testing equivalent to 'setup' function, producing a LabrinthConfig // If making a test, you should probably use environment::TestEnvironment::build() (which calls this) -pub async fn setup(db: &TemporaryDatabase) -> LabrinthConfig { +pub async fn setup(db: &database::TemporaryDatabase) -> LabrinthConfig { println!("Setting up labrinth config"); dotenvy::dotenv().ok(); @@ -40,7 +37,7 @@ pub async fn setup(db: &TemporaryDatabase) -> LabrinthConfig { redis_pool.clone(), &mut clickhouse, file_host.clone(), - maxmind_reader.clone(), + maxmind_reader, ) } diff --git a/tests/common/permissions.rs b/tests/common/permissions.rs index 4ab33900..22dbdd8a 100644 --- a/tests/common/permissions.rs +++ b/tests/common/permissions.rs @@ -175,6 +175,7 @@ impl<'a> PermissionsTest<'a> { let resp = test_env.call(request).await; if !self.allowed_failure_codes.contains(&resp.status().as_u16()) { + println!("Body: {:?}", resp.response().body()); return Err(format!( "Failure permissions test failed. Expected failure codes {} got {}", self.allowed_failure_codes @@ -206,6 +207,7 @@ impl<'a> PermissionsTest<'a> { let resp = test_env.call(request).await; if !resp.status().is_success() { + println!("Body: {:?}", resp.response().body()); return Err(format!( "Success permissions test failed. Expected success, got {}", resp.status().as_u16() @@ -673,8 +675,7 @@ impl<'a> PermissionsTest<'a> { Ok(()) }; - tokio::try_join!(test_1, test_2, test_3, test_4, test_5, test_6, test_7,) - .map_err(|e| e.to_string())?; + tokio::try_join!(test_1, test_2, test_3, test_4, test_5, test_6, test_7,).map_err(|e| e)?; Ok(()) } @@ -837,7 +838,7 @@ impl<'a> PermissionsTest<'a> { Ok(()) }; - tokio::try_join!(test_1, test_2, test_3,).map_err(|e| e.to_string())?; + tokio::try_join!(test_1, test_2, test_3,).map_err(|e| e)?; Ok(()) } diff --git a/tests/common/request_data.rs b/tests/common/request_data.rs index bd5eb284..1522ded2 100644 --- a/tests/common/request_data.rs +++ b/tests/common/request_data.rs @@ -1,15 +1,21 @@ #![allow(dead_code)] use serde_json::json; -use super::{ - actix::MultipartSegment, - dummy_data::{DummyImage, DummyJarFile}, +use super::dummy_data::{DummyImage, TestFile}; +use labrinth::{ + models::projects::ProjectId, + util::actix::{MultipartSegment, MultipartSegmentData}, }; -use crate::common::actix::MultipartSegmentData; pub struct ProjectCreationRequestData { pub slug: String, - pub jar: Option, + pub jar: Option, + pub segment_data: Vec, +} + +pub struct VersionCreationRequestData { + pub version: String, + pub jar: Option, pub segment_data: Vec, } @@ -21,29 +27,64 @@ pub struct ImageData { pub fn get_public_project_creation_data( slug: &str, - version_jar: Option, + version_jar: Option, ) -> ProjectCreationRequestData { - let initial_versions = if let Some(ref jar) = version_jar { - json!([{ - "file_parts": [jar.filename()], - "version_number": "1.2.3", - "version_title": "start", - "dependencies": [], - "game_versions": ["1.20.1"] , - "release_channel": "release", - "loaders": ["fabric"], - "featured": true - }]) + let json_data = get_public_project_creation_data_json(slug, version_jar.as_ref()); + let multipart_data = get_public_creation_data_multipart(&json_data, version_jar.as_ref()); + ProjectCreationRequestData { + slug: slug.to_string(), + jar: version_jar, + segment_data: multipart_data, + } +} + +pub fn get_public_version_creation_data( + project_id: ProjectId, + version_number: &str, + version_jar: TestFile, +) -> VersionCreationRequestData { + let mut json_data = get_public_version_creation_data_json(version_number, &version_jar); + json_data["project_id"] = json!(project_id); + let multipart_data = get_public_creation_data_multipart(&json_data, Some(&version_jar)); + VersionCreationRequestData { + version: version_number.to_string(), + jar: Some(version_jar), + segment_data: multipart_data, + } +} + +pub fn get_public_version_creation_data_json( + version_number: &str, + version_jar: &TestFile, +) -> serde_json::Value { + json!({ + "file_parts": [version_jar.filename()], + "version_number": version_number, + "version_title": "start", + "dependencies": [], + "game_versions": ["1.20.1"] , + "release_channel": "release", + "loaders": ["fabric"], + "featured": true + }) +} + +pub fn get_public_project_creation_data_json( + slug: &str, + version_jar: Option<&TestFile>, +) -> serde_json::Value { + let initial_versions = if let Some(jar) = version_jar { + json!([get_public_version_creation_data_json("1.2.3", jar)]) } else { json!([]) }; let is_draft = version_jar.is_none(); - - let json_data = json!( + json!( { "title": format!("Test Project {slug}"), "slug": slug, + "project_type": version_jar.as_ref().map(|f| f.project_type()).unwrap_or("mod".to_string()), "description": "A dummy project for testing with.", "body": "This project is approved, and versions are listed.", "client_side": "required", @@ -51,19 +92,24 @@ pub fn get_public_project_creation_data( "initial_versions": initial_versions, "is_draft": is_draft, "categories": [], - "license_id": "MIT" + "license_id": "MIT", } - ); + ) +} +pub fn get_public_creation_data_multipart( + json_data: &serde_json::Value, + version_jar: Option<&TestFile>, +) -> Vec { // Basic json let json_segment = MultipartSegment { name: "data".to_string(), filename: None, content_type: Some("application/json".to_string()), - data: MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()), + data: MultipartSegmentData::Text(serde_json::to_string(json_data).unwrap()), }; - let segment_data = if let Some(ref jar) = version_jar { + if let Some(jar) = version_jar { // Basic file let file_segment = MultipartSegment { name: jar.filename(), @@ -72,15 +118,9 @@ pub fn get_public_project_creation_data( data: MultipartSegmentData::Binary(jar.bytes()), }; - vec![json_segment.clone(), file_segment] + vec![json_segment, file_segment] } else { - vec![json_segment.clone()] - }; - - ProjectCreationRequestData { - slug: slug.to_string(), - jar: version_jar, - segment_data, + vec![json_segment] } } diff --git a/tests/files/dummy_data.sql b/tests/files/dummy_data.sql index aaa8c1b7..1a26659e 100644 --- a/tests/files/dummy_data.sql +++ b/tests/files/dummy_data.sql @@ -19,22 +19,43 @@ INSERT INTO pats (id, user_id, name, access_token, scopes, expires) VALUES (52, INSERT INTO pats (id, user_id, name, access_token, scopes, expires) VALUES (53, 4, 'friend-pat', 'mrp_patfriend', $1, '2030-08-18 15:48:58.435729+00'); INSERT INTO pats (id, user_id, name, access_token, scopes, expires) VALUES (54, 5, 'enemy-pat', 'mrp_patenemy', $1, '2030-08-18 15:48:58.435729+00'); --- -- Sample game versions, loaders, categories -INSERT INTO game_versions (id, version, type, created) -VALUES (20000, '1.20.1', 'release', timezone('utc', now())); +INSERT INTO loaders (id, loader) VALUES (5, 'fabric'); +INSERT INTO loaders_project_types (joining_loader_id, joining_project_type_id) VALUES (5,1); -INSERT INTO loaders (id, loader) VALUES (1, 'fabric'); -INSERT INTO loaders_project_types (joining_loader_id, joining_project_type_id) VALUES (1,1); -INSERT INTO loaders_project_types (joining_loader_id, joining_project_type_id) VALUES (1,2); +INSERT INTO loaders (id, loader) VALUES (6, 'forge'); +INSERT INTO loaders_project_types (joining_loader_id, joining_project_type_id) VALUES (6,1); + +-- Adds dummies to mrpack_loaders +INSERT INTO loader_field_enum_values (enum_id, value) SELECT id, 'fabric' FROM loader_field_enums WHERE enum_name = 'mrpack_loaders'; +INSERT INTO loader_field_enum_values (enum_id, value) SELECT id, 'forge' FROM loader_field_enums WHERE enum_name = 'mrpack_loaders'; + +INSERT INTO loaders_project_types_games (loader_id, project_type_id, game_id) SELECT joining_loader_id, joining_project_type_id, 1 FROM loaders_project_types WHERE joining_loader_id = 5; +INSERT INTO loaders_project_types_games (loader_id, project_type_id, game_id) SELECT joining_loader_id, joining_project_type_id, 1 FROM loaders_project_types WHERE joining_loader_id = 6; + +-- Sample game versions, loaders, categories +-- Game versions is '2' +INSERT INTO loader_field_enum_values(enum_id, value, metadata) +VALUES (2, '1.20.1', '{"type":"release","major":false}'); +INSERT INTO loader_field_enum_values(enum_id, value, metadata) +VALUES (2, '1.20.2', '{"type":"release","major":false}'); +INSERT INTO loader_field_enum_values(enum_id, value, metadata) +VALUES (2, '1.20.3', '{"type":"release","major":false}'); +INSERT INTO loader_field_enum_values(enum_id, value, metadata) +VALUES (2, '1.20.4', '{"type":"beta","major":false}'); +INSERT INTO loader_field_enum_values(enum_id, value, metadata) +VALUES (2, '1.20.5', '{"type":"release","major":true}'); + +INSERT INTO loader_fields_loaders(loader_id, loader_field_id) +SELECT l.id, lf.id FROM loaders l CROSS JOIN loader_fields lf WHERE lf.field = 'game_versions' OR lf.field = 'client_side' OR lf.field = 'server_side'; INSERT INTO categories (id, category, project_type) VALUES - (1, 'combat', 1), - (2, 'decoration', 1), - (3, 'economy', 1), - (4, 'food', 1), - (5, 'magic', 1), - (6, 'mobs', 1), - (7, 'optimization', 1); + (51, 'combat', 1), + (52, 'decoration', 1), + (53, 'economy', 1), + (54, 'food', 1), + (55, 'magic', 1), + (56, 'mobs', 1), + (57, 'optimization', 1); INSERT INTO categories (id, category, project_type) VALUES (101, 'combat', 2), diff --git a/tests/project.rs b/tests/project.rs index 2a34dae1..5bdf5890 100644 --- a/tests/project.rs +++ b/tests/project.rs @@ -2,19 +2,19 @@ use actix_http::StatusCode; use actix_web::test; use bytes::Bytes; use chrono::{Duration, Utc}; -use common::actix::MultipartSegment; -use common::environment::with_test_environment; +use common::environment::{with_test_environment, TestEnvironment}; use common::permissions::{PermissionsTest, PermissionsTestContext}; use futures::StreamExt; +use itertools::Itertools; use labrinth::database::models::project_item::{PROJECTS_NAMESPACE, PROJECTS_SLUGS_NAMESPACE}; use labrinth::models::ids::base62_impl::parse_base62; use labrinth::models::teams::ProjectPermissions; +use labrinth::util::actix::{AppendsMultipart, MultipartSegment, MultipartSegmentData}; use serde_json::json; -use crate::common::database::*; +use crate::common::{database::*, request_data}; -use crate::common::dummy_data::DUMMY_CATEGORIES; -use crate::common::{actix::AppendsMultipart, environment::TestEnvironment}; +use crate::common::dummy_data::{TestFile, DUMMY_CATEGORIES}; // importing common module. mod common; @@ -130,54 +130,50 @@ async fn test_add_remove_project() { ); // Basic json - let json_segment = common::actix::MultipartSegment { + let json_segment = MultipartSegment { name: "data".to_string(), filename: None, content_type: Some("application/json".to_string()), - data: common::actix::MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()), + data: MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()), }; // Basic json, with a different file json_data["initial_versions"][0]["file_parts"][0] = json!("basic-mod-different.jar"); - let json_diff_file_segment = common::actix::MultipartSegment { - data: common::actix::MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()), + let json_diff_file_segment = MultipartSegment { + data: MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()), ..json_segment.clone() }; // Basic json, with a different file, and a different slug json_data["slug"] = json!("new_demo"); json_data["initial_versions"][0]["file_parts"][0] = json!("basic-mod-different.jar"); - let json_diff_slug_file_segment = common::actix::MultipartSegment { - data: common::actix::MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()), + let json_diff_slug_file_segment = MultipartSegment { + data: MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()), ..json_segment.clone() }; // Basic file - let file_segment = common::actix::MultipartSegment { + let file_segment = MultipartSegment { name: "basic-mod.jar".to_string(), filename: Some("basic-mod.jar".to_string()), content_type: Some("application/java-archive".to_string()), - data: common::actix::MultipartSegmentData::Binary( - include_bytes!("../tests/files/basic-mod.jar").to_vec(), - ), + data: MultipartSegmentData::Binary(include_bytes!("../tests/files/basic-mod.jar").to_vec()), }; // Differently named file, with the same content (for hash testing) - let file_diff_name_segment = common::actix::MultipartSegment { + let file_diff_name_segment = MultipartSegment { name: "basic-mod-different.jar".to_string(), filename: Some("basic-mod-different.jar".to_string()), content_type: Some("application/java-archive".to_string()), - data: common::actix::MultipartSegmentData::Binary( - include_bytes!("../tests/files/basic-mod.jar").to_vec(), - ), + data: MultipartSegmentData::Binary(include_bytes!("../tests/files/basic-mod.jar").to_vec()), }; // Differently named file, with different content - let file_diff_name_content_segment = common::actix::MultipartSegment { + let file_diff_name_content_segment = MultipartSegment { name: "basic-mod-different.jar".to_string(), filename: Some("basic-mod-different.jar".to_string()), content_type: Some("application/java-archive".to_string()), - data: common::actix::MultipartSegmentData::Binary( + data: MultipartSegmentData::Binary( include_bytes!("../tests/files/basic-mod-different.jar").to_vec(), ), }; @@ -283,6 +279,55 @@ async fn test_add_remove_project() { test_env.cleanup().await; } +#[actix_rt::test] +async fn test_project_type_sanity() { + let test_env = TestEnvironment::build(None).await; + let api = &test_env.v2; + + // Perform all other patch tests on both 'mod' and 'modpack' + let test_creation_mod = request_data::get_public_project_creation_data( + "test-mod", + Some(TestFile::build_random_jar()), + ); + let test_creation_modpack = request_data::get_public_project_creation_data( + "test-modpack", + Some(TestFile::build_random_mrpack()), + ); + for (mod_or_modpack, test_creation_data) in [ + ("mod", test_creation_mod), + ("modpack", test_creation_modpack), + ] { + let (test_project, test_version) = api + .add_public_project(test_creation_data, USER_USER_PAT) + .await; + let test_project_slug = test_project.slug.as_ref().unwrap(); + + assert_eq!(test_project.project_type, mod_or_modpack); + assert_eq!(test_project.loaders, vec!["fabric"]); + assert_eq!( + test_version[0].loaders.iter().map(|x| &x.0).collect_vec(), + vec!["fabric"] + ); + + let project = api + .get_project_deserialized(test_project_slug, USER_USER_PAT) + .await; + assert_eq!(test_project.loaders, vec!["fabric"]); + assert_eq!(project.project_type, mod_or_modpack); + + let version = api + .get_version_deserialized(&test_version[0].id.to_string(), USER_USER_PAT) + .await; + assert_eq!( + version.loaders.iter().map(|x| &x.0).collect_vec(), + vec!["fabric"] + ); + } + + // TODO: as we get more complicated strucures with v3 testing, and alpha/beta get more complicated, we should add more tests here, + // to ensure that projects created with v3 routes are still valid and work with v2 routes. +} + #[actix_rt::test] pub async fn test_patch_project() { let test_env = TestEnvironment::build(None).await; @@ -426,19 +471,30 @@ pub async fn test_patch_project() { assert_eq!(resp.status(), 404); // New slug does work - let project = api.get_project_deserialized("newslug", USER_USER_PAT).await; - assert_eq!(project.slug, Some("newslug".to_string())); - assert_eq!(project.title, "New successful title"); - assert_eq!(project.description, "New successful description"); - assert_eq!(project.body, "New successful body"); - assert_eq!(project.categories, vec![DUMMY_CATEGORIES[0]]); - assert_eq!(project.license.id, "MIT"); - assert_eq!(project.issues_url, Some("https://github.com".to_string())); - assert_eq!(project.discord_url, Some("https://discord.gg".to_string())); - assert_eq!(project.wiki_url, Some("https://wiki.com".to_string())); - assert_eq!(project.client_side.to_string(), "optional"); - assert_eq!(project.server_side.to_string(), "required"); - assert_eq!(project.donation_urls.unwrap()[0].url, "https://patreon.com"); + let resp = api.get_project("newslug", USER_USER_PAT).await; + let project: serde_json::Value = test::read_body_json(resp).await; + + assert_eq!(project["slug"], json!(Some("newslug".to_string()))); + assert_eq!(project["title"], "New successful title"); + assert_eq!(project["description"], "New successful description"); + assert_eq!(project["body"], "New successful body"); + assert_eq!(project["categories"], json!(vec![DUMMY_CATEGORIES[0]])); + assert_eq!(project["license"]["id"], "MIT"); + assert_eq!( + project["issues_url"], + json!(Some("https://github.com".to_string())) + ); + assert_eq!( + project["discord_url"], + json!(Some("https://discord.gg".to_string())) + ); + assert_eq!( + project["wiki_url"], + json!(Some("https://wiki.com".to_string())) + ); + assert_eq!(project["client_side"], json!("optional")); + assert_eq!(project["server_side"], json!("required")); + assert_eq!(project["donation_urls"][0]["url"], "https://patreon.com"); // Cleanup test db test_env.cleanup().await; @@ -499,8 +555,8 @@ async fn permissions_patch_project() { ("title", json!("randomname")), ("description", json!("randomdescription")), ("categories", json!(["combat", "economy"])), - ("client_side", json!("unsupported")), - ("server_side", json!("unsupported")), + // ("client_side", json!("unsupported")), + // ("server_side", json!("unsupported")), ("additional_categories", json!(["decoration"])), ("issues_url", json!("https://issues.com")), ("source_url", json!("https://source.com")), @@ -532,11 +588,10 @@ async fn permissions_patch_project() { }, })) }; - PermissionsTest::new(&test_env) .simple_project_permissions_test(edit_details, req_gen) .await - .unwrap(); + .into_iter(); } }) .buffer_unordered(4) @@ -744,7 +799,7 @@ async fn permissions_upload_version() { name: "data".to_string(), filename: None, content_type: Some("application/json".to_string()), - data: common::actix::MultipartSegmentData::Text( + data: MultipartSegmentData::Text( serde_json::to_string(&json!({ "project_id": ctx.project_id.unwrap(), "file_parts": ["basic-mod.jar"], @@ -764,7 +819,7 @@ async fn permissions_upload_version() { name: "basic-mod.jar".to_string(), filename: Some("basic-mod.jar".to_string()), content_type: Some("application/java-archive".to_string()), - data: common::actix::MultipartSegmentData::Binary( + data: MultipartSegmentData::Binary( include_bytes!("../tests/files/basic-mod.jar").to_vec(), ), }, @@ -785,7 +840,7 @@ async fn permissions_upload_version() { name: "data".to_string(), filename: None, content_type: Some("application/json".to_string()), - data: common::actix::MultipartSegmentData::Text( + data: MultipartSegmentData::Text( serde_json::to_string(&json!({ "file_parts": ["basic-mod-different.jar"], })) @@ -796,7 +851,7 @@ async fn permissions_upload_version() { name: "basic-mod-different.jar".to_string(), filename: Some("basic-mod-different.jar".to_string()), content_type: Some("application/java-archive".to_string()), - data: common::actix::MultipartSegmentData::Binary( + data: MultipartSegmentData::Binary( include_bytes!("../tests/files/basic-mod-different.jar").to_vec(), ), }, diff --git a/tests/scopes.rs b/tests/scopes.rs index 7c11aa79..c0508e6b 100644 --- a/tests/scopes.rs +++ b/tests/scopes.rs @@ -1,8 +1,8 @@ use actix_web::test::{self, TestRequest}; use bytes::Bytes; use chrono::{Duration, Utc}; -use common::actix::AppendsMultipart; use labrinth::models::pats::Scopes; +use labrinth::util::actix::{AppendsMultipart, MultipartSegment, MultipartSegmentData}; use serde_json::json; use crate::common::{database::*, environment::TestEnvironment, scopes::ScopeTest}; @@ -225,19 +225,17 @@ pub async fn project_version_create_scopes() { "license_id": "MIT" } ); - let json_segment = common::actix::MultipartSegment { + let json_segment = MultipartSegment { name: "data".to_string(), filename: None, content_type: Some("application/json".to_string()), - data: common::actix::MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()), + data: MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()), }; - let file_segment = common::actix::MultipartSegment { + let file_segment = MultipartSegment { name: "basic-mod.jar".to_string(), filename: Some("basic-mod.jar".to_string()), content_type: Some("application/java-archive".to_string()), - data: common::actix::MultipartSegmentData::Binary( - include_bytes!("../tests/files/basic-mod.jar").to_vec(), - ), + data: MultipartSegmentData::Binary(include_bytes!("../tests/files/basic-mod.jar").to_vec()), }; let req_gen = || { @@ -266,17 +264,17 @@ pub async fn project_version_create_scopes() { "featured": true } ); - let json_segment = common::actix::MultipartSegment { + let json_segment = MultipartSegment { name: "data".to_string(), filename: None, content_type: Some("application/json".to_string()), - data: common::actix::MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()), + data: MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()), }; - let file_segment = common::actix::MultipartSegment { + let file_segment = MultipartSegment { name: "basic-mod-different.jar".to_string(), filename: Some("basic-mod.jar".to_string()), content_type: Some("application/java-archive".to_string()), - data: common::actix::MultipartSegmentData::Binary( + data: MultipartSegmentData::Binary( include_bytes!("../tests/files/basic-mod-different.jar").to_vec(), ), }; @@ -819,11 +817,11 @@ pub async fn version_write_scopes() { // Generate test project data. // Basic json - let json_segment = common::actix::MultipartSegment { + let json_segment = MultipartSegment { name: "data".to_string(), filename: None, content_type: Some("application/json".to_string()), - data: common::actix::MultipartSegmentData::Text( + data: MultipartSegmentData::Text( serde_json::to_string(&json!( { "file_types": { @@ -836,11 +834,11 @@ pub async fn version_write_scopes() { }; // Differently named file, with different content - let content_segment = common::actix::MultipartSegment { + let content_segment = MultipartSegment { name: "simple-zip.zip".to_string(), filename: Some("simple-zip.zip".to_string()), content_type: Some("application/zip".to_string()), - data: common::actix::MultipartSegmentData::Binary( + data: MultipartSegmentData::Binary( include_bytes!("../tests/files/simple-zip.zip").to_vec(), ), }; diff --git a/tests/search.rs b/tests/search.rs new file mode 100644 index 00000000..9e87ee18 --- /dev/null +++ b/tests/search.rs @@ -0,0 +1,298 @@ +use crate::common::database::*; +use crate::common::dummy_data::DUMMY_CATEGORIES; +use crate::common::environment::TestEnvironment; +use crate::common::request_data::{get_public_version_creation_data, ProjectCreationRequestData}; +use common::dummy_data::TestFile; +use common::request_data; +use futures::stream::StreamExt; +use labrinth::models::ids::base62_impl::parse_base62; +use serde_json::json; +use std::collections::HashMap; +use std::sync::Arc; + +// importing common module. +mod common; + +#[actix_rt::test] +async fn search_projects() { + // Test setup and dummy data + let test_env = TestEnvironment::build(Some(8)).await; + let api = &test_env.v2; + let test_name = test_env.db.database_name.clone(); + + // Add dummy projects of various categories for searchability + let mut project_creation_futures = vec![]; + + let create_async_future = + |id: u64, + pat: &'static str, + is_modpack: bool, + modify_json: Box| { + let slug = format!("{test_name}-searchable-project-{id}"); + + let jar = if is_modpack { + TestFile::build_random_mrpack() + } else { + TestFile::build_random_jar() + }; + let mut basic_project_json = + request_data::get_public_project_creation_data_json(&slug, Some(&jar)); + modify_json(&mut basic_project_json); + + let basic_project_multipart = + request_data::get_public_creation_data_multipart(&basic_project_json, Some(&jar)); + // Add a project- simple, should work. + let req = api.add_public_project( + ProjectCreationRequestData { + slug, + jar: Some(jar), + segment_data: basic_project_multipart, + }, + pat, + ); + async move { + let (project, _) = req.await; + + // Approve, so that the project is searchable + let resp = api + .edit_project( + &project.id.to_string(), + json!({ + "status": "approved" + }), + MOD_USER_PAT, + ) + .await; + assert_eq!(resp.status(), 204); + (project.id.0, id) + } + }; + + // Test project 0 + let id = 0; + let modify_json = |json: &mut serde_json::Value| { + json["categories"] = json!(DUMMY_CATEGORIES[4..6]); + json["server_side"] = json!("required"); + json["license_id"] = json!("LGPL-3.0-or-later"); + }; + project_creation_futures.push(create_async_future( + id, + USER_USER_PAT, + false, + Box::new(modify_json), + )); + + // Test project 1 + let id = 1; + let modify_json = |json: &mut serde_json::Value| { + json["categories"] = json!(DUMMY_CATEGORIES[0..2]); + json["client_side"] = json!("optional"); + }; + project_creation_futures.push(create_async_future( + id, + USER_USER_PAT, + false, + Box::new(modify_json), + )); + + // Test project 2 + let id = 2; + let modify_json = |json: &mut serde_json::Value| { + json["categories"] = json!(DUMMY_CATEGORIES[0..2]); + json["server_side"] = json!("required"); + json["title"] = json!("Mysterious Project"); + }; + project_creation_futures.push(create_async_future( + id, + USER_USER_PAT, + false, + Box::new(modify_json), + )); + + // Test project 3 + let id = 3; + let modify_json = |json: &mut serde_json::Value| { + json["categories"] = json!(DUMMY_CATEGORIES[0..3]); + json["server_side"] = json!("required"); + json["initial_versions"][0]["game_versions"] = json!(["1.20.4"]); + json["title"] = json!("Mysterious Project"); + json["license_id"] = json!("LicenseRef-All-Rights-Reserved"); // closed source + }; + project_creation_futures.push(create_async_future( + id, + FRIEND_USER_PAT, + false, + Box::new(modify_json), + )); + + // Test project 4 + let id = 4; + let modify_json = |json: &mut serde_json::Value| { + json["categories"] = json!(DUMMY_CATEGORIES[0..3]); + json["client_side"] = json!("optional"); + json["initial_versions"][0]["game_versions"] = json!(["1.20.5"]); + }; + project_creation_futures.push(create_async_future( + id, + USER_USER_PAT, + true, + Box::new(modify_json), + )); + + // Test project 5 + let id = 5; + let modify_json = |json: &mut serde_json::Value| { + json["categories"] = json!(DUMMY_CATEGORIES[5..6]); + json["client_side"] = json!("optional"); + json["initial_versions"][0]["game_versions"] = json!(["1.20.5"]); + json["license_id"] = json!("LGPL-3.0-or-later"); + }; + project_creation_futures.push(create_async_future( + id, + USER_USER_PAT, + false, + Box::new(modify_json), + )); + + // Test project 6 + let id = 6; + let modify_json = |json: &mut serde_json::Value| { + json["categories"] = json!(DUMMY_CATEGORIES[5..6]); + json["client_side"] = json!("optional"); + json["server_side"] = json!("required"); + json["license_id"] = json!("LGPL-3.0-or-later"); + }; + project_creation_futures.push(create_async_future( + id, + FRIEND_USER_PAT, + false, + Box::new(modify_json), + )); + + // Test project 7 (testing the search bug) + // This project has an initial private forge version that is 1.20.3, and a fabric 1.20.5 version. + // This means that a search for fabric + 1.20.3 or forge + 1.20.5 should not return this project. + let id = 7; + let modify_json = |json: &mut serde_json::Value| { + json["categories"] = json!(DUMMY_CATEGORIES[5..6]); + json["client_side"] = json!("optional"); + json["server_side"] = json!("required"); + json["license_id"] = json!("LGPL-3.0-or-later"); + json["initial_versions"][0]["loaders"] = json!(["forge"]); + json["initial_versions"][0]["game_versions"] = json!(["1.20.2"]); + }; + project_creation_futures.push(create_async_future( + id, + USER_USER_PAT, + false, + Box::new(modify_json), + )); + + // Await all project creation + // Returns a mapping of: + // project id -> test id + let id_conversion: Arc> = Arc::new( + futures::future::join_all(project_creation_futures) + .await + .into_iter() + .collect(), + ); + + // Create a second version for project 7 + let project_7 = api + .get_project_deserialized(&format!("{test_name}-searchable-project-7"), USER_USER_PAT) + .await; + api.add_public_version( + get_public_version_creation_data(project_7.id, "1.0.0", TestFile::build_random_jar()), + USER_USER_PAT, + ) + .await; + + // Pairs of: + // 1. vec of search facets + // 2. expected project ids to be returned by this search + let pairs = vec![ + (json!([["categories:fabric"]]), vec![0, 1, 2, 3, 4, 5, 6, 7]), + (json!([["categories:forge"]]), vec![7]), + ( + json!([["categories:fabric", "categories:forge"]]), + vec![0, 1, 2, 3, 4, 5, 6, 7], + ), + (json!([["categories:fabric"], ["categories:forge"]]), vec![]), + ( + json!([ + ["categories:fabric"], + [&format!("categories:{}", DUMMY_CATEGORIES[0])], + ]), + vec![1, 2, 3, 4], + ), + (json!([["project_type:modpack"]]), vec![4]), + (json!([["client_side:required"]]), vec![0, 2, 3]), + (json!([["server_side:required"]]), vec![0, 2, 3, 6, 7]), + (json!([["open_source:true"]]), vec![0, 1, 2, 4, 5, 6, 7]), + (json!([["license:MIT"]]), vec![1, 2, 4]), + (json!([[r#"title:'Mysterious Project'"#]]), vec![2, 3]), + (json!([["author:user"]]), vec![0, 1, 2, 4, 5, 7]), + (json!([["versions:1.20.5"]]), vec![4, 5]), + // bug fix + ( + json!([ + // Only the forge one has 1.20.2, so its true that this project 'has' + // 1.20.2 and a fabric version, but not true that it has a 1.20.2 fabric version. + ["categories:fabric"], + ["versions:1.20.2"] + ]), + vec![], + ), + // Project type change + // Modpack should still be able to search based on former loader, even though technically the loader is 'mrpack' + (json!([["categories:mrpack"]]), vec![4]), + ( + json!([["categories:mrpack"], ["categories:fabric"]]), + vec![4], + ), + ( + json!([ + ["categories:mrpack"], + ["categories:fabric"], + ["project_type:modpack"] + ]), + vec![4], + ), + ]; + // TODO: versions, game versions + // Untested: + // - downloads (not varied) + // - color (not varied) + // - created_timestamp (not varied) + // - modified_timestamp (not varied) + + // Forcibly reset the search index + let resp = api.reset_search_index().await; + assert_eq!(resp.status(), 204); + + // Test searches + let stream = futures::stream::iter(pairs); + stream + .for_each_concurrent(1, |(facets, mut expected_project_ids)| { + let id_conversion = id_conversion.clone(); + let test_name = test_name.clone(); + async move { + let projects = api + .search_deserialized(Some(&test_name), Some(facets.clone()), USER_USER_PAT) + .await; + let mut found_project_ids: Vec = projects + .hits + .into_iter() + .map(|p| id_conversion[&parse_base62(&p.project_id).unwrap()]) + .collect(); + expected_project_ids.sort(); + found_project_ids.sort(); + assert_eq!(found_project_ids, expected_project_ids); + } + }) + .await; + + // Cleanup test db + test_env.cleanup().await; +} diff --git a/tests/tags.rs b/tests/tags.rs new file mode 100644 index 00000000..5563b635 --- /dev/null +++ b/tests/tags.rs @@ -0,0 +1,68 @@ +use crate::common::environment::TestEnvironment; +use std::collections::HashSet; + +mod common; + +#[actix_rt::test] +async fn get_tags() { + let test_env = TestEnvironment::build(None).await; + let api = &test_env.v2; + + let game_versions = api.get_game_versions_deserialized().await; + let loaders = api.get_loaders_deserialized().await; + let side_types = api.get_side_types_deserialized().await; + let categories = api.get_categories_deserialized().await; + + // These tests match dummy data and will need to be updated if the dummy data changes; + let game_version_versions = game_versions + .into_iter() + .map(|x| x.version) + .collect::>(); + assert_eq!( + game_version_versions, + ["1.20.1", "1.20.2", "1.20.3", "1.20.4", "1.20.5"] + .iter() + .map(|s| s.to_string()) + .collect() + ); + + let loader_names = loaders.into_iter().map(|x| x.name).collect::>(); + assert_eq!( + loader_names, + ["fabric", "forge", "mrpack"] + .iter() + .map(|s| s.to_string()) + .collect() + ); + + let side_type_names = side_types.into_iter().collect::>(); + assert_eq!( + side_type_names, + ["unknown", "required", "optional", "unsupported"] + .iter() + .map(|s| s.to_string()) + .collect() + ); + + let category_names = categories + .into_iter() + .map(|x| x.name) + .collect::>(); + assert_eq!( + category_names, + [ + "combat", + "economy", + "food", + "optimization", + "decoration", + "mobs", + "magic" + ] + .iter() + .map(|s| s.to_string()) + .collect() + ); + + test_env.cleanup().await; +} diff --git a/tests/user.rs b/tests/user.rs index 664bbdc1..82fcc3f8 100644 --- a/tests/user.rs +++ b/tests/user.rs @@ -3,7 +3,7 @@ use common::{ environment::with_test_environment, }; -use crate::common::{dummy_data::DummyJarFile, request_data::get_public_project_creation_data}; +use crate::common::{dummy_data::TestFile, request_data::get_public_project_creation_data}; mod common; @@ -25,7 +25,7 @@ pub async fn get_user_projects_after_creating_project_returns_new_project() { let (project, _) = api .add_public_project( - get_public_project_creation_data("slug", Some(DummyJarFile::BasicMod)), + get_public_project_creation_data("slug", Some(TestFile::BasicMod)), USER_USER_PAT, ) .await; @@ -44,7 +44,7 @@ pub async fn get_user_projects_after_deleting_project_shows_removal() { let api = test_env.v2; let (project, _) = api .add_public_project( - get_public_project_creation_data("iota", Some(DummyJarFile::BasicMod)), + get_public_project_creation_data("iota", Some(TestFile::BasicMod)), USER_USER_PAT, ) .await; diff --git a/tests/version.rs b/tests/version.rs index 04198c5b..2edd400e 100644 --- a/tests/version.rs +++ b/tests/version.rs @@ -1,11 +1,497 @@ +use actix_web::test; +use common::environment::TestEnvironment; +use futures::StreamExt; +use labrinth::database::models::version_item::VERSIONS_NAMESPACE; +use labrinth::models::ids::base62_impl::parse_base62; +use labrinth::models::projects::{Loader, ProjectId, VersionId, VersionStatus, VersionType}; +use labrinth::routes::v2::version_file::FileUpdateData; +use serde_json::json; + +use crate::common::database::*; + +use crate::common::dummy_data::TestFile; +use crate::common::request_data::get_public_version_creation_data; + +// importing common module. +mod common; + +#[actix_rt::test] +async fn test_get_version() { + // Test setup and dummy data + let test_env = TestEnvironment::build(None).await; + let api = &test_env.v2; + let alpha_project_id: &String = &test_env.dummy.as_ref().unwrap().project_alpha.project_id; + let alpha_version_id = &test_env.dummy.as_ref().unwrap().project_alpha.version_id; + let beta_version_id = &test_env.dummy.as_ref().unwrap().project_beta.version_id; + + // Perform request on dummy data + let version = api + .get_version_deserialized(alpha_version_id, USER_USER_PAT) + .await; + assert_eq!(&version.project_id.to_string(), alpha_project_id); + assert_eq!(&version.id.to_string(), alpha_version_id); + + let cached_project = test_env + .db + .redis_pool + .get::(VERSIONS_NAMESPACE, parse_base62(alpha_version_id).unwrap()) + .await + .unwrap() + .unwrap(); + let cached_project: serde_json::Value = serde_json::from_str(&cached_project).unwrap(); + assert_eq!( + cached_project["inner"]["project_id"], + json!(parse_base62(alpha_project_id).unwrap()) + ); + + // Request should fail on non-existent version + let resp = api.get_version("false", USER_USER_PAT).await; + assert_eq!(resp.status(), 404); + + // Similarly, request should fail on non-authorized user, on a yet-to-be-approved or hidden project, with a 404 (hiding the existence of the project) + // TODO: beta version should already be draft in dummy data, but theres a bug in finding it that + api.edit_version( + beta_version_id, + json!({ + "status": "draft" + }), + USER_USER_PAT, + ) + .await; + let resp = api.get_version(beta_version_id, USER_USER_PAT).await; + assert_eq!(resp.status(), 200); + let resp = api.get_version(beta_version_id, ENEMY_USER_PAT).await; + assert_eq!(resp.status(), 404); + + // Cleanup test db + test_env.cleanup().await; +} + +#[actix_rt::test] + +async fn version_updates() { + // Test setup and dummy data + let test_env = TestEnvironment::build(None).await; + let api = &test_env.v2; + + let alpha_project_id: &String = &test_env.dummy.as_ref().unwrap().project_alpha.project_id; + let alpha_version_id = &test_env.dummy.as_ref().unwrap().project_alpha.version_id; + let beta_version_id = &test_env.dummy.as_ref().unwrap().project_beta.version_id; + let alpha_version_hash = &test_env.dummy.as_ref().unwrap().project_alpha.file_hash; + let beta_version_hash = &test_env.dummy.as_ref().unwrap().project_beta.file_hash; + + // Quick test, using get version from hash + let version = api + .get_version_from_hash_deserialized(alpha_version_hash, "sha1", USER_USER_PAT) + .await; + assert_eq!(&version.id.to_string(), alpha_version_id); + + // Get versions from hash + let versions = api + .get_versions_from_hashes_deserialized( + &[alpha_version_hash.as_str(), beta_version_hash.as_str()], + "sha1", + USER_USER_PAT, + ) + .await; + assert_eq!(versions.len(), 2); + assert_eq!( + &versions[alpha_version_hash].id.to_string(), + alpha_version_id + ); + assert_eq!(&versions[beta_version_hash].id.to_string(), beta_version_id); + + // When there is only the one version, there should be no updates + let version = api + .get_update_from_hash_deserialized( + alpha_version_hash, + "sha1", + None, + None, + None, + USER_USER_PAT, + ) + .await; + assert_eq!(&version.id.to_string(), alpha_version_id); + + let versions = api + .update_files_deserialized( + "sha1", + vec![alpha_version_hash.to_string()], + None, + None, + None, + USER_USER_PAT, + ) + .await; + assert_eq!(versions.len(), 1); + assert_eq!( + &versions[alpha_version_hash].id.to_string(), + alpha_version_id + ); + + // Add 3 new versions, 1 before, and 2 after, with differing game_version/version_types/loaders + let mut update_ids = vec![]; + for (version_number, patch_value) in [ + ( + "0.9.9", + json!({ + "game_versions": ["1.20.1"], + }), + ), + ( + "1.5.0", + json!({ + "game_versions": ["1.20.3"], + "loaders": ["fabric"], + }), + ), + ( + "1.5.1", + json!({ + "game_versions": ["1.20.4"], + "loaders": ["forge"], + "version_type": "beta" + }), + ), + ] + .iter() + { + let version = api + .add_public_version( + get_public_version_creation_data( + ProjectId(parse_base62(alpha_project_id).unwrap()), + version_number, + TestFile::build_random_jar(), + ), + USER_USER_PAT, + ) + .await; + update_ids.push(version.id); + + // Patch using json + api.edit_version(&version.id.to_string(), patch_value.clone(), USER_USER_PAT) + .await; + } + + let check_expected = |game_versions: Option>, + loaders: Option>, + version_types: Option>, + result_id: Option| async move { + let (success, result_id) = match result_id { + Some(id) => (true, id), + None => (false, VersionId(0)), + }; + // get_update_from_hash + let resp = api + .get_update_from_hash( + alpha_version_hash, + "sha1", + loaders.clone(), + game_versions.clone(), + version_types.clone(), + USER_USER_PAT, + ) + .await; + if success { + assert_eq!(resp.status(), 200); + let body: serde_json::Value = test::read_body_json(resp).await; + let id = body["id"].as_str().unwrap(); + assert_eq!(id, &result_id.to_string()); + } else { + assert_eq!(resp.status(), 404); + } + + // update_files + let versions = api + .update_files_deserialized( + "sha1", + vec![alpha_version_hash.to_string()], + loaders.clone(), + game_versions.clone(), + version_types.clone(), + USER_USER_PAT, + ) + .await; + if success { + assert_eq!(versions.len(), 1); + let first = versions.iter().next().unwrap(); + assert_eq!(first.1.id, result_id); + } else { + assert_eq!(versions.len(), 0); + } + + // update_individual_files + let hashes = vec![FileUpdateData { + hash: alpha_version_hash.to_string(), + loaders, + game_versions, + version_types: version_types.map(|v| { + v.into_iter() + .map(|v| serde_json::from_str(&format!("\"{v}\"")).unwrap()) + .collect() + }), + }]; + let versions = api + .update_individual_files_deserialized("sha1", hashes, USER_USER_PAT) + .await; + if success { + assert_eq!(versions.len(), 1); + let first = versions.iter().next().unwrap(); + assert_eq!(first.1.id, result_id); + } else { + assert_eq!(versions.len(), 0); + } + }; + + let tests = vec![ + check_expected( + Some(vec!["1.20.1".to_string()]), + None, + None, + Some(update_ids[0]), + ), + check_expected( + Some(vec!["1.20.3".to_string()]), + None, + None, + Some(update_ids[1]), + ), + check_expected( + Some(vec!["1.20.4".to_string()]), + None, + None, + Some(update_ids[2]), + ), + // Loader restrictions + check_expected( + None, + Some(vec!["fabric".to_string()]), + None, + Some(update_ids[1]), + ), + check_expected( + None, + Some(vec!["forge".to_string()]), + None, + Some(update_ids[2]), + ), + // Version type restrictions + check_expected( + None, + None, + Some(vec!["release".to_string()]), + Some(update_ids[1]), + ), + check_expected( + None, + None, + Some(vec!["beta".to_string()]), + Some(update_ids[2]), + ), + // Specific combination + check_expected( + None, + Some(vec!["fabric".to_string()]), + Some(vec!["release".to_string()]), + Some(update_ids[1]), + ), + // Impossible combination + check_expected( + None, + Some(vec!["fabric".to_string()]), + Some(vec!["beta".to_string()]), + None, + ), + // No restrictions, should do the last one + check_expected(None, None, None, Some(update_ids[2])), + ]; + + // Wait on all tests, 4 at a time + futures::stream::iter(tests) + .buffer_unordered(4) + .collect::>() + .await; + + // We do a couple small tests for get_project_versions_deserialized as well + // TODO: expand this more. + let versions = api + .get_project_versions_deserialized( + alpha_project_id, + None, + None, + None, + None, + None, + None, + USER_USER_PAT, + ) + .await; + assert_eq!(versions.len(), 4); + let versions = api + .get_project_versions_deserialized( + alpha_project_id, + None, + Some(vec!["forge".to_string()]), + None, + None, + None, + None, + USER_USER_PAT, + ) + .await; + assert_eq!(versions.len(), 1); + + // Cleanup test db + test_env.cleanup().await; +} + +#[actix_rt::test] +pub async fn test_patch_version() { + let test_env = TestEnvironment::build(None).await; + let api = &test_env.v2; + + let alpha_version_id = &test_env.dummy.as_ref().unwrap().project_alpha.version_id; + + // // First, we do some patch requests that should fail. + // // Failure because the user is not authorized. + let resp = api + .edit_version( + alpha_version_id, + json!({ + "name": "test 1", + }), + ENEMY_USER_PAT, + ) + .await; + assert_eq!(resp.status(), 401); + + // Failure because these are illegal requested statuses for a normal user. + for req in ["unknown", "scheduled"] { + let resp = api + .edit_version( + alpha_version_id, + json!({ + "status": req, + // requested status it not set here, but in /schedule + }), + USER_USER_PAT, + ) + .await; + assert_eq!(resp.status(), 400); + } + + // Sucessful request to patch many fields. + let resp = api + .edit_version( + alpha_version_id, + json!({ + "name": "new version name", + "version_number": "1.3.0", + "changelog": "new changelog", + "version_type": "beta", + // // "dependencies": [], TODO: test this + "game_versions": ["1.20.5"], + "loaders": ["forge"], + "featured": false, + // "primary_file": [], TODO: test this + // // "downloads": 0, TODO: moderator exclusive + "status": "draft", + // // "filetypes": ["jar"], TODO: test this + }), + USER_USER_PAT, + ) + .await; + assert_eq!(resp.status(), 204); + + let version = api + .get_version_deserialized(alpha_version_id, USER_USER_PAT) + .await; + assert_eq!(version.name, "new version name"); + assert_eq!(version.version_number, "1.3.0"); + assert_eq!(version.changelog, "new changelog"); + assert_eq!( + version.version_type, + serde_json::from_str::("\"beta\"").unwrap() + ); + assert_eq!(version.game_versions, vec!["1.20.5"]); + assert_eq!(version.loaders, vec![Loader("forge".to_string())]); + assert!(!version.featured); + assert_eq!(version.status, VersionStatus::from_string("draft")); + + // These ones are checking the v2-v3 rerouting, we eneusre that only 'game_versions' + // works as expected, as well as only 'loaders' + let resp = api + .edit_version( + alpha_version_id, + json!({ + "game_versions": ["1.20.1", "1.20.2", "1.20.4"], + }), + USER_USER_PAT, + ) + .await; + assert_eq!(resp.status(), 204); + + let version = api + .get_version_deserialized(alpha_version_id, USER_USER_PAT) + .await; + assert_eq!(version.game_versions, vec!["1.20.1", "1.20.2", "1.20.4"]); + assert_eq!(version.loaders, vec![Loader("forge".to_string())]); // From last patch + + let resp = api + .edit_version( + alpha_version_id, + json!({ + "loaders": ["fabric"], + }), + USER_USER_PAT, + ) + .await; + assert_eq!(resp.status(), 204); + + let version = api + .get_version_deserialized(alpha_version_id, USER_USER_PAT) + .await; + assert_eq!(version.game_versions, vec!["1.20.1", "1.20.2", "1.20.4"]); // From last patch + assert_eq!(version.loaders, vec![Loader("fabric".to_string())]); + + // Cleanup test db + test_env.cleanup().await; +} + +#[actix_rt::test] +pub async fn test_project_versions() { + let test_env = TestEnvironment::build(None).await; + let api = &test_env.v2; + let alpha_project_id: &String = &test_env.dummy.as_ref().unwrap().project_alpha.project_id; + let alpha_version_id = &test_env.dummy.as_ref().unwrap().project_alpha.version_id; + let _beta_version_id = &test_env.dummy.as_ref().unwrap().project_beta.version_id; + let _alpha_version_hash = &test_env.dummy.as_ref().unwrap().project_alpha.file_hash; + let _beta_version_hash = &test_env.dummy.as_ref().unwrap().project_beta.file_hash; + + let versions = api + .get_project_versions_deserialized( + alpha_project_id, + None, + None, + None, + None, + None, + None, + USER_USER_PAT, + ) + .await; + assert_eq!(versions.len(), 1); + assert_eq!(&versions[0].id.to_string(), alpha_version_id); + + test_env.cleanup().await; +} use crate::common::{asserts::assert_status, get_json_val_str}; use actix_http::StatusCode; use common::{ asserts::assert_version_ids, database::USER_USER_PAT, environment::with_test_environment, }; -mod common; - #[actix_rt::test] async fn can_create_version_with_ordering() { with_test_environment(|env| async move {