diff --git a/CLAUDE.md b/CLAUDE.md index 2f6a226977..0ab19bc52c 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -56,7 +56,7 @@ Use `docker exec labrinth-clickhouse clickhouse-client` to access the Clickhouse ### Postgres -Use `docker exec labrinth-postgres psql -U postgres` to access the PostgreSQL instance. +Use `docker exec labrinth-postgres psql -U labrinth -d labrinth -c "SELECT 1"` to access the PostgreSQL instance, replacing the `SELECT 1` with your query. # Guidelines diff --git a/Cargo.lock b/Cargo.lock index 4fb3b0b077..68e577e318 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4535,6 +4535,7 @@ dependencies = [ "hyper-util", "iana-time-zone", "image", + "indexmap 2.11.4", "itertools 0.14.0", "jemalloc_pprof", "json-patch 4.1.0", diff --git a/apps/frontend/nuxt.config.ts b/apps/frontend/nuxt.config.ts index beaf5a9982..5e2f726cfe 100644 --- a/apps/frontend/nuxt.config.ts +++ b/apps/frontend/nuxt.config.ts @@ -154,7 +154,7 @@ export default defineNuxtConfig({ (state.errors ?? []).length === 0 ) { console.log( - 'Tags already recently generated. Delete apps/frontend/generated/state.json to force regeneration.', + 'Tags already recently generated. Delete apps/frontend/src/generated/state.json to force regeneration.', ) return } diff --git a/apps/labrinth/.env.docker-compose b/apps/labrinth/.env.docker-compose index 6271bd3cb1..7da2b7f764 100644 --- a/apps/labrinth/.env.docker-compose +++ b/apps/labrinth/.env.docker-compose @@ -129,7 +129,7 @@ PYRO_API_KEY=none BREX_API_URL=https://platform.brexapis.com/v2/ BREX_API_KEY=none -DELPHI_URL=none +DELPHI_URL=http://labrinth-delphi:59999 DELPHI_SLACK_WEBHOOK=none AVALARA_1099_API_URL=https://www.track1099.com/api diff --git a/apps/labrinth/.sqlx/query-0ed2e6e3149352d12a673fddc50f9530c311eef084abb6fce35de5f37d79bcea.json b/apps/labrinth/.sqlx/query-0ed2e6e3149352d12a673fddc50f9530c311eef084abb6fce35de5f37d79bcea.json new file mode 100644 index 0000000000..6f7b991949 --- /dev/null +++ b/apps/labrinth/.sqlx/query-0ed2e6e3149352d12a673fddc50f9530c311eef084abb6fce35de5f37d79bcea.json @@ -0,0 +1,34 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n version_id AS \"version_id: crate::database::models::DBVersionId\",\n versions.mod_id AS \"project_id: crate::database::models::DBProjectId\",\n files.url AS \"url\"\n FROM files INNER JOIN versions ON files.version_id = versions.id\n WHERE files.id = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "version_id: crate::database::models::DBVersionId", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "project_id: crate::database::models::DBProjectId", + "type_info": "Int8" + }, + { + "ordinal": 2, + "name": "url", + "type_info": "Varchar" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + false, + false, + false + ] + }, + "hash": "0ed2e6e3149352d12a673fddc50f9530c311eef084abb6fce35de5f37d79bcea" +} diff --git a/apps/labrinth/.sqlx/query-10a332091be118f580d50ceb7a8724e9a4d5b9765d52305f99f859f939c2e854.json b/apps/labrinth/.sqlx/query-10a332091be118f580d50ceb7a8724e9a4d5b9765d52305f99f859f939c2e854.json new file mode 100644 index 0000000000..7e30ece2ec --- /dev/null +++ b/apps/labrinth/.sqlx/query-10a332091be118f580d50ceb7a8724e9a4d5b9765d52305f99f859f939c2e854.json @@ -0,0 +1,35 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO delphi_report_issues (report_id, issue_type, status)\n VALUES ($1, $2, $3)\n ON CONFLICT (report_id, issue_type) DO UPDATE SET status = $3\n RETURNING id\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Int8", + "Text", + { + "Custom": { + "name": "delphi_report_issue_status", + "kind": { + "Enum": [ + "pending", + "safe", + "unsafe" + ] + } + } + } + ] + }, + "nullable": [ + false + ] + }, + "hash": "10a332091be118f580d50ceb7a8724e9a4d5b9765d52305f99f859f939c2e854" +} diff --git a/apps/labrinth/.sqlx/query-7099e3a96324aadd3e7e0fbcc5e7023c1becd6d30e74a6110c14c0049d156118.json b/apps/labrinth/.sqlx/query-7099e3a96324aadd3e7e0fbcc5e7023c1becd6d30e74a6110c14c0049d156118.json new file mode 100644 index 0000000000..0b88c91314 --- /dev/null +++ b/apps/labrinth/.sqlx/query-7099e3a96324aadd3e7e0fbcc5e7023c1becd6d30e74a6110c14c0049d156118.json @@ -0,0 +1,22 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n to_jsonb(dr)\n || jsonb_build_object(\n 'file_id', f.id,\n 'version_id', v.id,\n 'project_id', v.mod_id,\n 'file_name', f.filename,\n 'file_size', f.size,\n 'flag_reason', 'delphi',\n 'issues', json_array(\n SELECT\n to_jsonb(dri)\n || jsonb_build_object(\n 'details', json_array(\n SELECT to_jsonb(drid)\n FROM delphi_report_issue_details drid\n WHERE drid.issue_id = dri.id\n )\n )\n FROM delphi_report_issues dri\n WHERE dri.report_id = dr.id\n )\n ) AS \"data!: sqlx::types::Json\"\n FROM delphi_reports dr\n INNER JOIN files f ON f.id = dr.file_id\n INNER JOIN versions v ON v.id = f.version_id\n INNER JOIN delphi_report_issues dri ON dri.report_id = dr.id\n LEFT JOIN delphi_report_issue_details drid ON drid.issue_id = dri.id\n WHERE dr.id = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "data!: sqlx::types::Json", + "type_info": "Jsonb" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + null + ] + }, + "hash": "7099e3a96324aadd3e7e0fbcc5e7023c1becd6d30e74a6110c14c0049d156118" +} diff --git a/apps/labrinth/.sqlx/query-749fc694a88b419d30f820b7563aad9db6ed1b17ae7002172b4172b959bd7710.json b/apps/labrinth/.sqlx/query-749fc694a88b419d30f820b7563aad9db6ed1b17ae7002172b4172b959bd7710.json new file mode 100644 index 0000000000..8c1c8e1f3a --- /dev/null +++ b/apps/labrinth/.sqlx/query-749fc694a88b419d30f820b7563aad9db6ed1b17ae7002172b4172b959bd7710.json @@ -0,0 +1,126 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n delphi_report_issues.id AS \"id\", report_id,\n issue_type,\n delphi_report_issues.status AS \"status: DelphiReportIssueStatus\",\n\n file_id, delphi_version, artifact_url, created, severity AS \"severity: DelphiSeverity\",\n json_array(SELECT to_jsonb(delphi_report_issue_details)\n FROM delphi_report_issue_details\n WHERE issue_id = delphi_report_issues.id\n ) AS \"details: sqlx::types::Json>\",\n versions.mod_id AS \"project_id?\", mods.published AS \"project_published?\"\n FROM delphi_report_issues\n INNER JOIN delphi_reports ON delphi_reports.id = report_id\n LEFT OUTER JOIN files ON files.id = file_id\n LEFT OUTER JOIN versions ON versions.id = files.version_id\n LEFT OUTER JOIN mods ON mods.id = versions.mod_id\n WHERE\n (issue_type = $1 OR $1 IS NULL)\n AND (delphi_report_issues.status = $2 OR $2 IS NULL)\n ORDER BY\n CASE WHEN $3 = 'created_asc' THEN delphi_reports.created ELSE TO_TIMESTAMP(0) END ASC,\n CASE WHEN $3 = 'created_desc' THEN delphi_reports.created ELSE TO_TIMESTAMP(0) END DESC,\n CASE WHEN $3 = 'pending_status_first' THEN delphi_report_issues.status ELSE 'pending'::delphi_report_issue_status END ASC,\n CASE WHEN $3 = 'severity_asc' THEN delphi_reports.severity ELSE 'low'::delphi_severity END ASC,\n CASE WHEN $3 = 'severity_desc' THEN delphi_reports.severity ELSE 'low'::delphi_severity END DESC\n OFFSET $5\n LIMIT $4\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "report_id", + "type_info": "Int8" + }, + { + "ordinal": 2, + "name": "issue_type", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "status: DelphiReportIssueStatus", + "type_info": { + "Custom": { + "name": "delphi_report_issue_status", + "kind": { + "Enum": [ + "pending", + "safe", + "unsafe" + ] + } + } + } + }, + { + "ordinal": 4, + "name": "file_id", + "type_info": "Int8" + }, + { + "ordinal": 5, + "name": "delphi_version", + "type_info": "Int4" + }, + { + "ordinal": 6, + "name": "artifact_url", + "type_info": "Varchar" + }, + { + "ordinal": 7, + "name": "created", + "type_info": "Timestamptz" + }, + { + "ordinal": 8, + "name": "severity: DelphiSeverity", + "type_info": { + "Custom": { + "name": "delphi_severity", + "kind": { + "Enum": [ + "low", + "medium", + "high", + "severe" + ] + } + } + } + }, + { + "ordinal": 9, + "name": "details: sqlx::types::Json>", + "type_info": "Jsonb" + }, + { + "ordinal": 10, + "name": "project_id?", + "type_info": "Int8" + }, + { + "ordinal": 11, + "name": "project_published?", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Text", + { + "Custom": { + "name": "delphi_report_issue_status", + "kind": { + "Enum": [ + "pending", + "safe", + "unsafe" + ] + } + } + }, + "Text", + "Int8", + "Int8" + ] + }, + "nullable": [ + false, + false, + false, + false, + true, + false, + false, + false, + false, + null, + true, + true + ] + }, + "hash": "749fc694a88b419d30f820b7563aad9db6ed1b17ae7002172b4172b959bd7710" +} diff --git a/apps/labrinth/.sqlx/query-b1df83f4592701f8aa03f6d16bac9e2bd27ac9a87987eafd79b06f1c4ecdb659.json b/apps/labrinth/.sqlx/query-b1df83f4592701f8aa03f6d16bac9e2bd27ac9a87987eafd79b06f1c4ecdb659.json new file mode 100644 index 0000000000..216435cf2b --- /dev/null +++ b/apps/labrinth/.sqlx/query-b1df83f4592701f8aa03f6d16bac9e2bd27ac9a87987eafd79b06f1c4ecdb659.json @@ -0,0 +1,26 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE delphi_report_issues\n SET status = $1\n WHERE id = $2\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + { + "Custom": { + "name": "delphi_report_issue_status", + "kind": { + "Enum": [ + "pending", + "safe", + "unsafe" + ] + } + } + }, + "Int8" + ] + }, + "nullable": [] + }, + "hash": "b1df83f4592701f8aa03f6d16bac9e2bd27ac9a87987eafd79b06f1c4ecdb659" +} diff --git a/apps/labrinth/.sqlx/query-b65094517546487e43b65a76aa38efd9e422151b683d9897a071ee0c4bac1cd4.json b/apps/labrinth/.sqlx/query-b65094517546487e43b65a76aa38efd9e422151b683d9897a071ee0c4bac1cd4.json new file mode 100644 index 0000000000..a0ea4442ee --- /dev/null +++ b/apps/labrinth/.sqlx/query-b65094517546487e43b65a76aa38efd9e422151b683d9897a071ee0c4bac1cd4.json @@ -0,0 +1,39 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO delphi_report_issue_details (issue_id, key, file_path, decompiled_source, data, severity)\n VALUES ($1, $2, $3, $4, $5, $6)\n RETURNING id\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Int8", + "Text", + "Text", + "Text", + "Jsonb", + { + "Custom": { + "name": "delphi_severity", + "kind": { + "Enum": [ + "low", + "medium", + "high", + "severe" + ] + } + } + } + ] + }, + "nullable": [ + false + ] + }, + "hash": "b65094517546487e43b65a76aa38efd9e422151b683d9897a071ee0c4bac1cd4" +} diff --git a/apps/labrinth/.sqlx/query-c7c72cf1f98cbc2b647ab840bdfadf1de8aaf214b32a2aab299a0d87fd2dc453.json b/apps/labrinth/.sqlx/query-c7c72cf1f98cbc2b647ab840bdfadf1de8aaf214b32a2aab299a0d87fd2dc453.json new file mode 100644 index 0000000000..db0d075672 --- /dev/null +++ b/apps/labrinth/.sqlx/query-c7c72cf1f98cbc2b647ab840bdfadf1de8aaf214b32a2aab299a0d87fd2dc453.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "DELETE FROM delphi_report_issue_details WHERE issue_id = $1", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [] + }, + "hash": "c7c72cf1f98cbc2b647ab840bdfadf1de8aaf214b32a2aab299a0d87fd2dc453" +} diff --git a/apps/labrinth/.sqlx/query-d8e3e59bce087a32d3475bac8d38fe0dbf285367b8051070bedbb075c4006c8d.json b/apps/labrinth/.sqlx/query-d8e3e59bce087a32d3475bac8d38fe0dbf285367b8051070bedbb075c4006c8d.json new file mode 100644 index 0000000000..e27ec02523 --- /dev/null +++ b/apps/labrinth/.sqlx/query-d8e3e59bce087a32d3475bac8d38fe0dbf285367b8051070bedbb075c4006c8d.json @@ -0,0 +1,37 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n m.id AS \"project_id: DBProjectId\",\n t.id AS \"project_thread_id: DBThreadId\",\n to_jsonb(dr)\n || jsonb_build_object(\n 'file_id', f.id,\n 'version_id', v.id,\n 'project_id', v.mod_id,\n 'file_name', f.filename,\n 'file_size', f.size,\n 'flag_reason', 'delphi',\n 'issues', json_array(\n SELECT\n to_jsonb(dri)\n || jsonb_build_object(\n 'details', json_array(\n SELECT jsonb_build_object(\n 'id', drid.id,\n 'issue_id', drid.issue_id,\n 'key', drid.key,\n 'file_path', drid.file_path,\n -- ignore `decompiled_source`\n 'data', drid.data,\n 'severity', drid.severity\n )\n FROM delphi_report_issue_details drid\n WHERE drid.issue_id = dri.id\n )\n )\n FROM delphi_report_issues dri\n WHERE dri.report_id = dr.id\n )\n ) AS \"report!: sqlx::types::Json\"\n FROM delphi_reports dr\n INNER JOIN files f ON f.id = dr.file_id\n INNER JOIN versions v ON v.id = f.version_id\n INNER JOIN mods m ON m.id = v.mod_id\n INNER JOIN threads t ON t.mod_id = m.id\n INNER JOIN delphi_report_issues dri ON dri.report_id = dr.id\n LEFT JOIN delphi_report_issue_details drid ON drid.issue_id = dri.id\n\n -- filtering\n LEFT JOIN mods_categories mc ON mc.joining_mod_id = m.id\n LEFT JOIN categories c ON c.id = mc.joining_category_id\n WHERE\n -- project type\n (cardinality($1::int[]) = 0 OR c.project_type = ANY($1::int[]))\n\n -- sorting\n ORDER BY\n CASE WHEN $2 = 'created_asc' THEN created ELSE TO_TIMESTAMP(0) END ASC,\n CASE WHEN $2 = 'created_desc' THEN created ELSE TO_TIMESTAMP(0) END DESC,\n CASE WHEN $2 = 'severity_asc' THEN dr.severity ELSE 'low'::delphi_severity END ASC,\n CASE WHEN $2 = 'severity_desc' THEN dr.severity ELSE 'low'::delphi_severity END DESC\n\n -- pagination\n LIMIT $3\n OFFSET $4\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "project_id: DBProjectId", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "project_thread_id: DBThreadId", + "type_info": "Int8" + }, + { + "ordinal": 2, + "name": "report!: sqlx::types::Json", + "type_info": "Jsonb" + } + ], + "parameters": { + "Left": [ + "Int4Array", + "Text", + "Int8", + "Int8" + ] + }, + "nullable": [ + false, + false, + null + ] + }, + "hash": "d8e3e59bce087a32d3475bac8d38fe0dbf285367b8051070bedbb075c4006c8d" +} diff --git a/apps/labrinth/.sqlx/query-e70536fc2d4e45e1075258f618bd00483f055231f07ad5f39ce716a25ec2c6ad.json b/apps/labrinth/.sqlx/query-e70536fc2d4e45e1075258f618bd00483f055231f07ad5f39ce716a25ec2c6ad.json new file mode 100644 index 0000000000..b36f5b1197 --- /dev/null +++ b/apps/labrinth/.sqlx/query-e70536fc2d4e45e1075258f618bd00483f055231f07ad5f39ce716a25ec2c6ad.json @@ -0,0 +1,22 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n to_jsonb(dri)\n || jsonb_build_object(\n 'details', json_array(\n SELECT to_jsonb(drid)\n FROM delphi_report_issue_details drid\n WHERE drid.issue_id = dri.id\n )\n ) AS \"data!: sqlx::types::Json\"\n FROM delphi_report_issues dri\n LEFT JOIN delphi_report_issue_details drid ON dri.id = drid.issue_id\n WHERE dri.id = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "data!: sqlx::types::Json", + "type_info": "Jsonb" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + null + ] + }, + "hash": "e70536fc2d4e45e1075258f618bd00483f055231f07ad5f39ce716a25ec2c6ad" +} diff --git a/apps/labrinth/.sqlx/query-f2054ae7dcc89b21ed6b2f04526de1e7cddd68ac956143bef994104280a8dc07.json b/apps/labrinth/.sqlx/query-f2054ae7dcc89b21ed6b2f04526de1e7cddd68ac956143bef994104280a8dc07.json new file mode 100644 index 0000000000..8cbe94abd5 --- /dev/null +++ b/apps/labrinth/.sqlx/query-f2054ae7dcc89b21ed6b2f04526de1e7cddd68ac956143bef994104280a8dc07.json @@ -0,0 +1,37 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO delphi_reports (file_id, delphi_version, artifact_url, severity)\n VALUES ($1, $2, $3, $4)\n ON CONFLICT (file_id, delphi_version) DO UPDATE SET\n delphi_version = $2, artifact_url = $3, created = CURRENT_TIMESTAMP, severity = $4\n RETURNING id\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Int8", + "Int4", + "Varchar", + { + "Custom": { + "name": "delphi_severity", + "kind": { + "Enum": [ + "low", + "medium", + "high", + "severe" + ] + } + } + } + ] + }, + "nullable": [ + false + ] + }, + "hash": "f2054ae7dcc89b21ed6b2f04526de1e7cddd68ac956143bef994104280a8dc07" +} diff --git a/apps/labrinth/.sqlx/query-fe571872262fe7d119b4b6eb1e55d818fde0499d8e5a08e9e22bee42014877f3.json b/apps/labrinth/.sqlx/query-fe571872262fe7d119b4b6eb1e55d818fde0499d8e5a08e9e22bee42014877f3.json new file mode 100644 index 0000000000..38db606828 --- /dev/null +++ b/apps/labrinth/.sqlx/query-fe571872262fe7d119b4b6eb1e55d818fde0499d8e5a08e9e22bee42014877f3.json @@ -0,0 +1,20 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT MAX(delphi_version) FROM delphi_reports", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "max", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + null + ] + }, + "hash": "fe571872262fe7d119b4b6eb1e55d818fde0499d8e5a08e9e22bee42014877f3" +} diff --git a/apps/labrinth/Cargo.toml b/apps/labrinth/Cargo.toml index 227a8b23e7..bb7147bfaa 100644 --- a/apps/labrinth/Cargo.toml +++ b/apps/labrinth/Cargo.toml @@ -66,6 +66,7 @@ image = { workspace = true, features = [ "tiff", "webp", ] } +indexmap = { workspace = true } itertools = { workspace = true } json-patch = { workspace = true } lettre = { workspace = true } diff --git a/apps/labrinth/fixtures/delphi-report-2025-11-15.sql b/apps/labrinth/fixtures/delphi-report-2025-11-15.sql new file mode 100644 index 0000000000..389159672a --- /dev/null +++ b/apps/labrinth/fixtures/delphi-report-2025-11-15.sql @@ -0,0 +1,90 @@ +-- +-- PostgreSQL database dump +-- + +\restrict RGysBmMc8KFBQ9AssusGyNPozUiB43hdmIPxlv5KSWbX7tdW7XVMPpMginvod9K + +-- Dumped from database version 17.6 +-- Dumped by pg_dump version 17.6 + +SET statement_timeout = 0; +SET lock_timeout = 0; +SET idle_in_transaction_session_timeout = 0; +SET transaction_timeout = 0; +SET client_encoding = 'UTF8'; +SET standard_conforming_strings = on; +SELECT pg_catalog.set_config('search_path', '', false); +SET check_function_bodies = false; +SET xmloption = content; +SET client_min_messages = warning; +SET row_security = off; + +-- +-- Data for Name: delphi_reports; Type: TABLE DATA; Schema: public; Owner: labrinth +-- + +COPY public.delphi_reports (id, file_id, delphi_version, artifact_url, created, severity) FROM stdin; +1 157529403422109 3 file:///tmp/modrinth/data/CaG4Mr66/versions/fMzLDsVA/sodium.jar 2025-11-15 23:01:30.012526+00 high +\. + + +-- +-- Data for Name: delphi_report_issues; Type: TABLE DATA; Schema: public; Owner: labrinth +-- + +COPY public.delphi_report_issues (id, report_id, issue_type, status) FROM stdin; +1 1 runtime_exec_usage pending +2 1 hardcoded_url pending +3 1 classloader_usage pending +4 1 obfuscated_names pending +5 1 main_method pending +\. + + +-- +-- Data for Name: delphi_report_issue_details; Type: TABLE DATA; Schema: public; Owner: labrinth +-- + +COPY public.delphi_report_issue_details (id, issue_id, key, file_path, decompiled_source, data, severity) FROM stdin; +1 1 d670186a0e5210fc2b9332a2163849740f19bec59a99d890bef0ae9e6608f83d net/caffeinemc/mods/sodium/desktop/utils/browse/XDGImpl package net.caffeinemc.mods.sodium.desktop.utils.browse;\n\nimport java.io.IOException;\nimport java.util.Locale;\n\nclass XDGImpl implements BrowseUrlHandler {\n public static boolean isSupported() {\n String os = System.getProperty("os.name").toLowerCase(Locale.ROOT);\n return os.equals("linux");\n }\n\n @Override\n public void browseTo(String url) throws IOException {\n Process process = Runtime.getRuntime().exec(new String[]{"xdg-open", url});\n\n try {\n int result = process.waitFor();\n if (result != 0) {\n throw new IOException("xdg-open exited with code: %d".formatted(result));\n }\n } catch (InterruptedException var4) {\n throw new RuntimeException(var4);\n }\n }\n}\n {} medium +2 1 317dd815f60f04f1cef5d855e30f6a2719570c583ef49ae94ca2b563179fc1fa net/caffeinemc/mods/sodium/client/compatibility/environment/probe/GraphicsAdapterProbe package net.caffeinemc.mods.sodium.client.compatibility.environment.probe;\n\nimport java.io.BufferedReader;\nimport java.io.IOException;\nimport java.io.InputStreamReader;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.List;\nimport java.util.Set;\nimport java.util.stream.Stream;\nimport net.caffeinemc.mods.sodium.client.compatibility.environment.OsUtils;\nimport net.caffeinemc.mods.sodium.client.platform.windows.api.d3dkmt.D3DKMT;\nimport org.jetbrains.annotations.Nullable;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class GraphicsAdapterProbe {\n private static final Logger LOGGER = LoggerFactory.getLogger("Sodium-GraphicsAdapterProbe");\n private static final Set LINUX_PCI_CLASSES = Set.of("0x030000", "0x030001", "0x030200", "0x038000");\n private static List ADAPTERS = List.of();\n\n public static void findAdapters() {\n LOGGER.info("Searching for graphics cards...");\n\n List adapters;\n try {\n adapters = switch (OsUtils.getOs()) {\n case WIN -> findAdapters$Windows();\n case LINUX -> findAdapters$Linux();\n default -> null;\n };\n } catch (Exception var3) {\n LOGGER.error("Failed to find graphics adapters!", var3);\n return;\n }\n\n if (adapters != null) {\n if (adapters.isEmpty()) {\n LOGGER.warn(\n "Could not find any graphics adapters! Probably the device is not on a bus we can probe, or there are no devices supporting 3D acceleration."\n );\n } else {\n for (GraphicsAdapterInfo adapter : adapters) {\n LOGGER.info("Found graphics adapter: {}", adapter);\n }\n }\n\n ADAPTERS = adapters;\n }\n }\n\n private static List findAdapters$Windows() {\n return D3DKMT.findGraphicsAdapters();\n }\n\n private static List findAdapters$Linux() {\n ArrayList results = new ArrayList();\n\n try {\n Stream devices = Files.list(Path.of("/sys/bus/pci/devices/"));\n\n try {\n for (Path devicePath : devices::iterator) {\n String deviceClass = Files.readString(devicePath.resolve("class")).trim();\n if (LINUX_PCI_CLASSES.contains(deviceClass)) {\n String pciVendorId = Files.readString(devicePath.resolve("vendor")).trim();\n String pciDeviceId = Files.readString(devicePath.resolve("device")).trim();\n GraphicsAdapterVendor adapterVendor = GraphicsAdapterVendor.fromPciVendorId(pciVendorId);\n String adapterName = getPciDeviceName$Linux(pciVendorId, pciDeviceId);\n if (adapterName == null) {\n adapterName = "";\n }\n\n GraphicsAdapterInfo.LinuxPciAdapterInfo info = new GraphicsAdapterInfo.LinuxPciAdapterInfo(\n adapterVendor, adapterName, pciVendorId, pciDeviceId\n );\n results.add(info);\n }\n }\n } catch (Throwable var12) {\n if (devices != null) {\n try {\n devices.close();\n } catch (Throwable var11) {\n var12.addSuppressed(var11);\n }\n }\n\n throw var12;\n }\n\n if (devices != null) {\n devices.close();\n }\n } catch (IOException var13) {\n }\n\n return results;\n }\n\n @Nullable\n private static String getPciDeviceName$Linux(String vendorId, String deviceId) {\n String deviceFilter = vendorId.substring(2) + ":" + deviceId.substring(2);\n\n try {\n Process process = Runtime.getRuntime().exec(new String[]{"lspci", "-vmm", "-d", deviceFilter});\n int result = process.waitFor();\n if (result != 0) {\n throw new IOException("lspci exited with error code: %s".formatted(result));\n } else {\n BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream()));\n\n String var7;\n label40: {\n String line;\n try {\n while ((line = reader.readLine()) != null) {\n if (line.startsWith("Device:")) {\n var7 = line.substring("Device:".length()).trim();\n break label40;\n }\n }\n } catch (Throwable var9) {\n try {\n reader.close();\n } catch (Throwable var8) {\n var9.addSuppressed(var8);\n }\n\n throw var9;\n }\n\n reader.close();\n throw new IOException("lspci did not return a device name");\n }\n\n reader.close();\n return var7;\n }\n } catch (Throwable var10) {\n LOGGER.warn("Failed to query PCI device name for %s:%s".formatted(vendorId, deviceId), var10);\n return null;\n }\n }\n\n public static Collection getAdapters() {\n if (ADAPTERS == null) {\n LOGGER.error("Graphics adapters not probed yet; returning an empty list.");\n return Collections.emptyList();\n } else {\n return ADAPTERS;\n }\n }\n}\n {} medium +3 2 5ba58b7f9dcc59f14c8a0fd9b78c23a19723791bd9006ed408d43557ea24abb4 net/caffeinemc/mods/sodium/desktop/LaunchWarn package net.caffeinemc.mods.sodium.desktop;\n\nimport java.awt.GraphicsEnvironment;\nimport java.io.IOException;\nimport javax.swing.JDialog;\nimport javax.swing.JOptionPane;\nimport javax.swing.UIManager;\nimport javax.swing.UnsupportedLookAndFeelException;\nimport net.caffeinemc.mods.sodium.desktop.utils.browse.BrowseUrlHandler;\n\npublic class LaunchWarn {\n private static final String HELP_URL = "https://link.caffeinemc.net/guides/sodium/installation";\n private static final String RICH_MESSAGE = "

You have tried to launch Sodium (a Minecraft mod) directly, but it is not an executable program or mod installer. Instead, you must install Fabric Loader for Minecraft, and then place this file in your mods directory.

If this is your first time installing mods with Fabric Loader, then click the \\"Help\\" button for an installation guide.

";\n private static final String FALLBACK_MESSAGE = "

You have tried to launch Sodium (a Minecraft mod) directly, but it is not an executable program or mod installer. Instead, you must install Fabric Loader for Minecraft, and then place this file in your mods directory.

If this is your first time installing mods with Fabric Loader, then visit https://link.caffeinemc.net/guides/sodium/installation for an installation guide.

";\n private static final String FAILED_TO_BROWSE_MESSAGE = "

Failed to open the default browser! Your system may be misconfigured. Please open the URL https://link.caffeinemc.net/guides/sodium/installation manually.

";\n public static final String WINDOW_TITLE = "Sodium";\n\n public static void main(String[] args) {\n if (GraphicsEnvironment.isHeadless()) {\n showHeadlessError();\n } else {\n showGraphicalError();\n }\n }\n\n private static void showHeadlessError() {\n System.err\n .println(\n "

You have tried to launch Sodium (a Minecraft mod) directly, but it is not an executable program or mod installer. Instead, you must install Fabric Loader for Minecraft, and then place this file in your mods directory.

If this is your first time installing mods with Fabric Loader, then visit https://link.caffeinemc.net/guides/sodium/installation for an installation guide.

"\n );\n }\n\n private static void showGraphicalError() {\n trySetSystemLookAndFeel();\n trySetSystemFontPreferences();\n BrowseUrlHandler browseUrlHandler = BrowseUrlHandler.createImplementation();\n if (browseUrlHandler != null) {\n showRichGraphicalDialog(browseUrlHandler);\n } else {\n showFallbackGraphicalDialog();\n }\n\n System.exit(0);\n }\n\n private static void showRichGraphicalDialog(BrowseUrlHandler browseUrlHandler) {\n int selectedOption = showDialogBox(\n "

You have tried to launch Sodium (a Minecraft mod) directly, but it is not an executable program or mod installer. Instead, you must install Fabric Loader for Minecraft, and then place this file in your mods directory.

If this is your first time installing mods with Fabric Loader, then click the \\"Help\\" button for an installation guide.

",\n "Sodium",\n 0,\n 1,\n new String[]{"Help", "Close"},\n 0\n );\n if (selectedOption == 0) {\n log("Opening URL: https://link.caffeinemc.net/guides/sodium/installation");\n\n try {\n browseUrlHandler.browseTo("https://link.caffeinemc.net/guides/sodium/installation");\n } catch (IOException var3) {\n log("Failed to open default web browser!", var3);\n showDialogBox(\n "

Failed to open the default browser! Your system may be misconfigured. Please open the URL https://link.caffeinemc.net/guides/sodium/installation manually.

",\n "Sodium",\n -1,\n 2,\n null,\n -1\n );\n }\n }\n }\n\n private static void showFallbackGraphicalDialog() {\n showDialogBox(\n "

You have tried to launch Sodium (a Minecraft mod) directly, but it is not an executable program or mod installer. Instead, you must install Fabric Loader for Minecraft, and then place this file in your mods directory.

If this is your first time installing mods with Fabric Loader, then visit https://link.caffeinemc.net/guides/sodium/installation for an installation guide.

",\n "Sodium",\n -1,\n 1,\n null,\n null\n );\n }\n\n private static int showDialogBox(String message, String title, int optionType, int messageType, String[] options, Object initialValue) {\n JOptionPane pane = new JOptionPane(message, messageType, optionType, null, options, initialValue);\n JDialog dialog = pane.createDialog(title);\n dialog.setVisible(true);\n Object selectedValue = pane.getValue();\n if (selectedValue == null) {\n return -1;\n } else if (options == null) {\n return selectedValue instanceof Integer ? (Integer)selectedValue : -1;\n } else {\n for (int counter = 0; counter < options.length; counter++) {\n String option = options[counter];\n if (option.equals(selectedValue)) {\n return counter;\n }\n }\n\n return -1;\n }\n }\n\n private static void trySetSystemLookAndFeel() {\n try {\n UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName());\n } catch (UnsupportedLookAndFeelException | ReflectiveOperationException var1) {\n }\n }\n\n private static void trySetSystemFontPreferences() {\n System.setProperty("awt.useSystemAAFontSettings", "on");\n }\n\n private static void log(String message) {\n System.err.println(message);\n }\n\n private static void log(String message, Throwable exception) {\n System.err.println(message);\n exception.printStackTrace(System.err);\n }\n}\n {"url": "https://link.caffeinemc.net/guides/sodium/installation"} low +4 2 34a4ceb119311f669d4b3b036dfef9f93c1e86f765582ebf556b92486766f861 net/caffeinemc/mods/sodium/client/gui/SodiumOptionsGUI package net.caffeinemc.mods.sodium.client.gui;\n\nimport com.google.common.collect.UnmodifiableIterator;\nimport java.io.IOException;\nimport java.time.Instant;\nimport java.time.temporal.ChronoUnit;\nimport java.util.ArrayList;\nimport java.util.EnumSet;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.stream.Stream;\nimport net.caffeinemc.mods.sodium.client.SodiumClientMod;\nimport net.caffeinemc.mods.sodium.client.console.Console;\nimport net.caffeinemc.mods.sodium.client.console.message.MessageLevel;\nimport net.caffeinemc.mods.sodium.client.data.fingerprint.HashedFingerprint;\nimport net.caffeinemc.mods.sodium.client.gui.options.Option;\nimport net.caffeinemc.mods.sodium.client.gui.options.OptionFlag;\nimport net.caffeinemc.mods.sodium.client.gui.options.OptionGroup;\nimport net.caffeinemc.mods.sodium.client.gui.options.OptionImpact;\nimport net.caffeinemc.mods.sodium.client.gui.options.OptionPage;\nimport net.caffeinemc.mods.sodium.client.gui.options.control.Control;\nimport net.caffeinemc.mods.sodium.client.gui.options.control.ControlElement;\nimport net.caffeinemc.mods.sodium.client.gui.options.storage.OptionStorage;\nimport net.caffeinemc.mods.sodium.client.gui.prompt.ScreenPrompt;\nimport net.caffeinemc.mods.sodium.client.gui.prompt.ScreenPromptable;\nimport net.caffeinemc.mods.sodium.client.gui.screen.ConfigCorruptedScreen;\nimport net.caffeinemc.mods.sodium.client.gui.widgets.AbstractWidget;\nimport net.caffeinemc.mods.sodium.client.gui.widgets.FlatButtonWidget;\nimport net.caffeinemc.mods.sodium.client.services.PlatformRuntimeInformation;\nimport net.caffeinemc.mods.sodium.client.util.Dim2i;\nimport net.minecraft.class_124;\nimport net.minecraft.class_156;\nimport net.minecraft.class_2561;\nimport net.minecraft.class_310;\nimport net.minecraft.class_332;\nimport net.minecraft.class_364;\nimport net.minecraft.class_437;\nimport net.minecraft.class_446;\nimport net.minecraft.class_5250;\nimport net.minecraft.class_5348;\nimport net.minecraft.class_5481;\nimport org.jetbrains.annotations.Nullable;\n\npublic class SodiumOptionsGUI extends class_437 implements ScreenPromptable {\n private final List pages = new ArrayList();\n private final List> controls = new ArrayList();\n private final class_437 prevScreen;\n private OptionPage currentPage;\n private FlatButtonWidget applyButton;\n private FlatButtonWidget closeButton;\n private FlatButtonWidget undoButton;\n private FlatButtonWidget donateButton;\n private FlatButtonWidget hideDonateButton;\n private boolean hasPendingChanges;\n private ControlElement hoveredElement;\n @Nullable\n private ScreenPrompt prompt;\n private static final List DONATION_PROMPT_MESSAGE = List.of(\n class_5348.method_29433(new class_5348[]{class_2561.method_43470("Hello!")}),\n class_5348.method_29433(\n new class_5348[]{\n class_2561.method_43470("It seems that you've been enjoying "),\n class_2561.method_43470("Sodium").method_54663(2616210),\n class_2561.method_43470(", the powerful and open rendering optimization mod for Minecraft.")\n }\n ),\n class_5348.method_29433(\n new class_5348[]{\n class_2561.method_43470("Mods like these are complex. They require "),\n class_2561.method_43470("thousands of hours").method_54663(16739840),\n class_2561.method_43470(" of development, debugging, and tuning to create the experience that players have come to expect.")\n }\n ),\n class_5348.method_29433(\n new class_5348[]{\n class_2561.method_43470("If you'd like to show your token of appreciation, and support the development of our mod in the process, then consider "),\n class_2561.method_43470("buying us a coffee").method_54663(15550926),\n class_2561.method_43470(".")\n }\n ),\n class_5348.method_29433(new class_5348[]{class_2561.method_43470("And thanks again for using our mod! We hope it helps you (and your computer.)")})\n );\n\n private SodiumOptionsGUI(class_437 prevScreen) {\n super(class_2561.method_43470("Sodium Renderer Settings"));\n this.prevScreen = prevScreen;\n this.pages.add(SodiumGameOptionPages.general());\n this.pages.add(SodiumGameOptionPages.quality());\n this.pages.add(SodiumGameOptionPages.performance());\n this.pages.add(SodiumGameOptionPages.advanced());\n this.checkPromptTimers();\n }\n\n private void checkPromptTimers() {\n if (!PlatformRuntimeInformation.getInstance().isDevelopmentEnvironment()) {\n SodiumGameOptions options = SodiumClientMod.options();\n if (!options.notifications.hasSeenDonationPrompt) {\n HashedFingerprint fingerprint = null;\n\n try {\n fingerprint = HashedFingerprint.loadFromDisk();\n } catch (Throwable var5) {\n SodiumClientMod.logger().error("Failed to read the fingerprint from disk", var5);\n }\n\n if (fingerprint != null) {\n Instant now = Instant.now();\n Instant threshold = Instant.ofEpochSecond(fingerprint.timestamp()).plus(3L, ChronoUnit.DAYS);\n if (now.isAfter(threshold)) {\n this.openDonationPrompt(options);\n }\n }\n }\n }\n }\n\n private void openDonationPrompt(SodiumGameOptions options) {\n ScreenPrompt prompt = new ScreenPrompt(\n this, DONATION_PROMPT_MESSAGE, 320, 190, new ScreenPrompt.Action(class_2561.method_43470("Buy us a coffee"), this::openDonationPage)\n );\n prompt.method_25365(true);\n options.notifications.hasSeenDonationPrompt = true;\n\n try {\n SodiumGameOptions.writeToDisk(options);\n } catch (IOException var4) {\n SodiumClientMod.logger().error("Failed to update config file", var4);\n }\n }\n\n public static class_437 createScreen(class_437 currentScreen) {\n return (class_437)(SodiumClientMod.options().isReadOnly()\n ? new ConfigCorruptedScreen(currentScreen, SodiumOptionsGUI::new)\n : new SodiumOptionsGUI(currentScreen));\n }\n\n public void setPage(OptionPage page) {\n this.currentPage = page;\n this.rebuildGUI();\n }\n\n protected void method_25426() {\n super.method_25426();\n this.rebuildGUI();\n if (this.prompt != null) {\n this.prompt.init();\n }\n }\n\n private void rebuildGUI() {\n this.controls.clear();\n this.method_37067();\n if (this.currentPage == null) {\n if (this.pages.isEmpty()) {\n throw new IllegalStateException("No pages are available?!");\n }\n\n this.currentPage = (OptionPage)this.pages.get(0);\n }\n\n this.rebuildGUIPages();\n this.rebuildGUIOptions();\n this.undoButton = new FlatButtonWidget(\n new Dim2i(this.field_22789 - 211, this.field_22790 - 30, 65, 20), class_2561.method_43471("sodium.options.buttons.undo"), this::undoChanges\n );\n this.applyButton = new FlatButtonWidget(\n new Dim2i(this.field_22789 - 142, this.field_22790 - 30, 65, 20), class_2561.method_43471("sodium.options.buttons.apply"), this::applyChanges\n );\n this.closeButton = new FlatButtonWidget(\n new Dim2i(this.field_22789 - 73, this.field_22790 - 30, 65, 20), class_2561.method_43471("gui.done"), this::method_25419\n );\n this.donateButton = new FlatButtonWidget(\n new Dim2i(this.field_22789 - 128, 6, 100, 20), class_2561.method_43471("sodium.options.buttons.donate"), this::openDonationPage\n );\n this.hideDonateButton = new FlatButtonWidget(new Dim2i(this.field_22789 - 26, 6, 20, 20), class_2561.method_43470("x"), this::hideDonationButton);\n if (SodiumClientMod.options().notifications.hasClearedDonationButton) {\n this.setDonationButtonVisibility(false);\n }\n\n this.method_37063(this.undoButton);\n this.method_37063(this.applyButton);\n this.method_37063(this.closeButton);\n this.method_37063(this.donateButton);\n this.method_37063(this.hideDonateButton);\n }\n\n private void setDonationButtonVisibility(boolean value) {\n this.donateButton.setVisible(value);\n this.hideDonateButton.setVisible(value);\n }\n\n private void hideDonationButton() {\n SodiumGameOptions options = SodiumClientMod.options();\n options.notifications.hasClearedDonationButton = true;\n\n try {\n SodiumGameOptions.writeToDisk(options);\n } catch (IOException var3) {\n throw new RuntimeException("Failed to save configuration", var3);\n }\n\n this.setDonationButtonVisibility(false);\n }\n\n private void rebuildGUIPages() {\n int x = 6;\n int y = 6;\n\n for (OptionPage page : this.pages) {\n int width = 12 + this.field_22793.method_27525(page.getName());\n FlatButtonWidget button = new FlatButtonWidget(new Dim2i(x, y, width, 18), page.getName(), () -> this.setPage(page));\n button.setSelected(this.currentPage == page);\n x += width + 6;\n this.method_37063(button);\n }\n }\n\n private void rebuildGUIOptions() {\n int x = 6;\n int y = 28;\n\n for (UnmodifiableIterator var3 = this.currentPage.getGroups().iterator(); var3.hasNext(); y += 4) {\n OptionGroup group = (OptionGroup)var3.next();\n\n for (UnmodifiableIterator var5 = group.getOptions().iterator(); var5.hasNext(); y += 18) {\n Option option = (Option)var5.next();\n Control control = option.getControl();\n ControlElement element = control.createElement(new Dim2i(x, y, 240, 18));\n this.method_37063(element);\n this.controls.add(element);\n }\n }\n }\n\n public void method_25394(class_332 graphics, int mouseX, int mouseY, float delta) {\n this.updateControls();\n super.method_25394(graphics, this.prompt != null ? -1 : mouseX, this.prompt != null ? -1 : mouseY, delta);\n if (this.hoveredElement != null) {\n this.renderOptionTooltip(graphics, this.hoveredElement);\n }\n\n if (this.prompt != null) {\n this.prompt.method_25394(graphics, mouseX, mouseY, delta);\n }\n }\n\n private void updateControls() {\n ControlElement hovered = (ControlElement)this.getActiveControls()\n .filter(AbstractWidget::isHovered)\n .findFirst()\n .orElse((ControlElement)this.getActiveControls().filter(AbstractWidget::method_25370).findFirst().orElse(null));\n boolean hasChanges = this.getAllOptions().anyMatch(Option::hasChanged);\n\n for (OptionPage page : this.pages) {\n UnmodifiableIterator var5 = page.getOptions().iterator();\n\n while (var5.hasNext()) {\n Option option = (Option)var5.next();\n if (option.hasChanged()) {\n hasChanges = true;\n }\n }\n }\n\n this.applyButton.setEnabled(hasChanges);\n this.undoButton.setVisible(hasChanges);\n this.closeButton.setEnabled(!hasChanges);\n this.hasPendingChanges = hasChanges;\n this.hoveredElement = hovered;\n }\n\n private Stream> getAllOptions() {\n return this.pages.stream().flatMap(s -> s.getOptions().stream());\n }\n\n private Stream> getActiveControls() {\n return this.controls.stream();\n }\n\n private void renderOptionTooltip(class_332 graphics, ControlElement element) {\n Dim2i dim = element.getDimensions();\n int textPadding = 3;\n int boxPadding = 3;\n int boxY = dim.y();\n int boxX = dim.getLimitX() + boxPadding;\n int boxWidth = Math.min(200, this.field_22789 - boxX - boxPadding);\n Option option = element.getOption();\n int splitWidth = boxWidth - textPadding * 2;\n List tooltip = new ArrayList(this.field_22793.method_1728(option.getTooltip(), splitWidth));\n OptionImpact impact = option.getImpact();\n if (impact != null) {\n class_5250 impactText = class_2561.method_43469("sodium.options.performance_impact_string", new Object[]{impact.getLocalizedName()});\n tooltip.addAll(this.field_22793.method_1728(impactText.method_27692(class_124.field_1080), splitWidth));\n }\n\n int boxHeight = tooltip.size() * 12 + boxPadding;\n int boxYLimit = boxY + boxHeight;\n int boxYCutoff = this.field_22790 - 40;\n if (boxYLimit > boxYCutoff) {\n boxY -= boxYLimit - boxYCutoff;\n }\n\n graphics.method_25296(boxX, boxY, boxX + boxWidth, boxY + boxHeight, -536870912, -536870912);\n\n for (int i = 0; i < tooltip.size(); i++) {\n graphics.method_35720(this.field_22793, (class_5481)tooltip.get(i), boxX + textPadding, boxY + textPadding + i * 12, -1);\n }\n }\n\n private void applyChanges() {\n HashSet> dirtyStorages = new HashSet();\n EnumSet flags = EnumSet.noneOf(OptionFlag.class);\n this.getAllOptions().forEach(option -> {\n if (option.hasChanged()) {\n option.applyChanges();\n flags.addAll(option.getFlags());\n dirtyStorages.add(option.getStorage());\n }\n });\n class_310 client = class_310.method_1551();\n if (client.field_1687 != null) {\n if (flags.contains(OptionFlag.REQUIRES_RENDERER_RELOAD)) {\n client.field_1769.method_3279();\n } else if (flags.contains(OptionFlag.REQUIRES_RENDERER_UPDATE)) {\n client.field_1769.method_3292();\n }\n }\n\n if (flags.contains(OptionFlag.REQUIRES_ASSET_RELOAD)) {\n client.method_24041((Integer)client.field_1690.method_42563().method_41753());\n client.method_1513();\n }\n\n if (flags.contains(OptionFlag.REQUIRES_VIDEOMODE_RELOAD)) {\n client.method_22683().method_4475();\n }\n\n if (flags.contains(OptionFlag.REQUIRES_GAME_RESTART)) {\n Console.instance().logMessage(MessageLevel.WARN, "sodium.console.game_restart", true, 10.0);\n }\n\n for (OptionStorage storage : dirtyStorages) {\n storage.save();\n }\n }\n\n private void undoChanges() {\n this.getAllOptions().forEach(Option::reset);\n }\n\n private void openDonationPage() {\n class_156.method_668().method_670("https://caffeinemc.net/donate");\n }\n\n public boolean method_25404(int keyCode, int scanCode, int modifiers) {\n if (this.prompt != null && this.prompt.method_25404(keyCode, scanCode, modifiers)) {\n return true;\n } else if (this.prompt == null && keyCode == 80 && (modifiers & 1) != 0) {\n class_310.method_1551().method_1507(new class_446(this.prevScreen, class_310.method_1551(), class_310.method_1551().field_1690));\n return true;\n } else {\n return super.method_25404(keyCode, scanCode, modifiers);\n }\n }\n\n public boolean method_25402(double mouseX, double mouseY, int button) {\n if (this.prompt != null) {\n return this.prompt.method_25402(mouseX, mouseY, button);\n } else {\n boolean clicked = super.method_25402(mouseX, mouseY, button);\n if (!clicked) {\n this.method_25395(null);\n return true;\n } else {\n return clicked;\n }\n }\n }\n\n public boolean method_25422() {\n return !this.hasPendingChanges;\n }\n\n public void method_25419() {\n this.field_22787.method_1507(this.prevScreen);\n }\n\n public List method_25396() {\n return this.prompt == null ? super.method_25396() : this.prompt.getWidgets();\n }\n\n @Override\n public void setPrompt(@Nullable ScreenPrompt prompt) {\n this.prompt = prompt;\n }\n\n @Nullable\n @Override\n public ScreenPrompt getPrompt() {\n return this.prompt;\n }\n\n @Override\n public Dim2i getDimensions() {\n return new Dim2i(0, 0, this.field_22789, this.field_22790);\n }\n}\n {"url": "https://caffeinemc.net/donate"} low +5 2 2048cee1aed753e10480183673ffb5e685de2ce414e99e93f7d1dd11a87a19af net/caffeinemc/mods/sodium/client/compatibility/checks/PreLaunchChecks package net.caffeinemc.mods.sodium.client.compatibility.checks;\n\nimport net.caffeinemc.mods.sodium.client.platform.PlatformHelper;\nimport org.lwjgl.Version;\n\npublic class PreLaunchChecks {\n private static final String REQUIRED_LWJGL_VERSION = "3.3.3";\n\n public static void checkEnvironment() {\n if (BugChecks.ISSUE_2561) {\n checkLwjglRuntimeVersion();\n }\n }\n\n private static void checkLwjglRuntimeVersion() {\n if (!isUsingKnownCompatibleLwjglVersion()) {\n String advice;\n if (isUsingPrismLauncher()) {\n advice = "It appears you are using Prism Launcher to start the game. You can likely fix this problem by opening your instance settings and navigating to the Versionsection in the sidebar.";\n } else {\n advice = "You must change the LWJGL version in your launcher to continue. This is usually controlled by the settings for a profile or instance in your launcher.";\n }\n\n String message = "The game failed to start because the currently active LWJGL version is not compatible.\\n\\nInstalled version: ###CURRENT_VERSION###\\nRequired version: ###REQUIRED_VERSION###\\n\\n###ADVICE_STRING###"\n .replace("###CURRENT_VERSION###", Version.getVersion())\n .replace("###REQUIRED_VERSION###", "3.3.3")\n .replace("###ADVICE_STRING###", advice);\n PlatformHelper.showCriticalErrorAndClose(\n null, "Sodium Renderer - Unsupported LWJGL", message, "https://link.caffeinemc.net/help/sodium/runtime-issue/lwjgl3/gh-2561"\n );\n }\n }\n\n private static boolean isUsingKnownCompatibleLwjglVersion() {\n return Version.getVersion().startsWith("3.3.3");\n }\n\n private static boolean isUsingPrismLauncher() {\n return getLauncherBrand().equalsIgnoreCase("PrismLauncher");\n }\n\n private static String getLauncherBrand() {\n return System.getProperty("minecraft.launcher.brand", "unknown");\n }\n}\n {"url": "https://link.caffeinemc.net/help/sodium/runtime-issue/lwjgl3/gh-2561"} low +6 2 0b9d53bc482f11c0d8c71a9689645132f0a50249838091b3e6f95fefbc279075 net/caffeinemc/mods/sodium/client/compatibility/checks/ModuleScanner package net.caffeinemc.mods.sodium.client.compatibility.checks;\n\nimport com.sun.jna.Platform;\nimport com.sun.jna.platform.win32.Kernel32;\nimport com.sun.jna.platform.win32.Kernel32Util;\nimport com.sun.jna.platform.win32.Tlhelp32.MODULEENTRY32W;\nimport java.nio.file.Files;\nimport java.nio.file.LinkOption;\nimport java.nio.file.Path;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.List;\nimport net.caffeinemc.mods.sodium.client.platform.MessageBox;\nimport net.caffeinemc.mods.sodium.client.platform.NativeWindowHandle;\nimport net.caffeinemc.mods.sodium.client.platform.windows.WindowsFileVersion;\nimport net.caffeinemc.mods.sodium.client.platform.windows.api.version.Version;\nimport net.caffeinemc.mods.sodium.client.platform.windows.api.version.VersionFixedFileInfoStruct;\nimport net.caffeinemc.mods.sodium.client.platform.windows.api.version.VersionInfo;\nimport org.jetbrains.annotations.Nullable;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class ModuleScanner {\n private static final Logger LOGGER = LoggerFactory.getLogger("Sodium-Win32ModuleChecks");\n private static final String[] RTSS_HOOKS_MODULE_NAMES = new String[]{"RTSSHooks64.dll", "RTSSHooks.dll"};\n private static final String[] ASUS_GPU_TWEAK_MODULE_NAMES = new String[]{\n "GTIII-OSD64-GL.dll", "GTIII-OSD-GL.dll", "GTIII-OSD64-VK.dll", "GTIII-OSD-VK.dll", "GTIII-OSD64.dll", "GTIII-OSD.dll"\n };\n\n public static void checkModules(NativeWindowHandle window) {\n List modules;\n try {\n modules = listModules();\n } catch (Throwable var3) {\n LOGGER.warn("Failed to scan the currently loaded modules", var3);\n return;\n }\n\n if (!modules.isEmpty()) {\n if (BugChecks.ISSUE_2048 && isModuleLoaded(modules, RTSS_HOOKS_MODULE_NAMES)) {\n checkRTSSModules(window);\n }\n\n if (BugChecks.ISSUE_2637 && isModuleLoaded(modules, ASUS_GPU_TWEAK_MODULE_NAMES)) {\n checkASUSGpuTweakIII(window);\n }\n }\n }\n\n private static List listModules() {\n if (!Platform.isWindows()) {\n return List.of();\n } else {\n int pid = Kernel32.INSTANCE.GetCurrentProcessId();\n ArrayList modules = new ArrayList();\n\n for (MODULEENTRY32W module : Kernel32Util.getModules(pid)) {\n modules.add(module.szModule());\n }\n\n return Collections.unmodifiableList(modules);\n }\n }\n\n private static void checkRTSSModules(NativeWindowHandle window) {\n LOGGER.warn("RivaTuner Statistics Server (RTSS) has injected into the process! Attempting to apply workarounds for compatibility...");\n WindowsFileVersion version = null;\n\n try {\n version = findRTSSModuleVersion();\n } catch (Throwable var3) {\n LOGGER.warn("Exception thrown while reading file version", var3);\n }\n\n if (version == null) {\n LOGGER.warn("Could not determine version of RivaTuner Statistics Server");\n } else {\n LOGGER.info("Detected RivaTuner Statistics Server version: {}", version);\n }\n\n if (version == null || !isRTSSCompatible(version)) {\n MessageBox.showMessageBox(\n window,\n MessageBox.IconType.ERROR,\n "Sodium Renderer",\n "You appear to be using an older version of RivaTuner Statistics Server (RTSS) which is not compatible with Sodium.\\n\\nYou must either update to a newer version (7.3.4 and later) or close the RivaTuner Statistics Server application.\\n\\nFor more information on how to solve this problem, click the 'Help' button.",\n "https://link.caffeinemc.net/help/sodium/incompatible-software/rivatuner-statistics-server/gh-2048"\n );\n throw new RuntimeException(\n "The installed version of RivaTuner Statistics Server (RTSS) is not compatible with Sodium, see here for more details: https://link.caffeinemc.net/help/sodium/incompatible-software/rivatuner-statistics-server/gh-2048"\n );\n }\n }\n\n private static boolean isRTSSCompatible(WindowsFileVersion version) {\n int x = version.x();\n int y = version.y();\n int z = version.z();\n return x > 7 || x == 7 && y > 3 || x == 7 && y == 3 && z >= 4;\n }\n\n private static void checkASUSGpuTweakIII(NativeWindowHandle window) {\n MessageBox.showMessageBox(\n window,\n MessageBox.IconType.ERROR,\n "Sodium Renderer",\n "ASUS GPU Tweak III is not compatible with Minecraft, and causes extreme performance issues and severe graphical corruption when used with Minecraft.\\n\\nYou *must* do one of the following things to continue:\\n\\na) Open the settings of ASUS GPU Tweak III, enable the Blacklist option, click \\"Browse from file...\\", and select the Java runtime (javaw.exe) which is used by Minecraft.\\n\\nb) Completely uninstall the ASUS GPU Tweak III application.\\n\\nFor more information on how to solve this problem, click the 'Help' button.",\n "https://link.caffeinemc.net/help/sodium/incompatible-software/asus-gtiii/gh-2637"\n );\n throw new RuntimeException(\n "ASUS GPU Tweak III is not compatible with Minecraft, see here for more details: https://link.caffeinemc.net/help/sodium/incompatible-software/asus-gtiii/gh-2637"\n );\n }\n\n @Nullable\n private static WindowsFileVersion findRTSSModuleVersion() {\n long module;\n try {\n module = net.caffeinemc.mods.sodium.client.platform.windows.api.Kernel32.getModuleHandleByNames(RTSS_HOOKS_MODULE_NAMES);\n } catch (Throwable var9) {\n LOGGER.warn("Failed to locate module", var9);\n return null;\n }\n\n String moduleFileName;\n try {\n moduleFileName = net.caffeinemc.mods.sodium.client.platform.windows.api.Kernel32.getModuleFileName(module);\n } catch (Throwable var8) {\n LOGGER.warn("Failed to get path of module", var8);\n return null;\n }\n\n Path modulePath = Path.of(moduleFileName);\n Path moduleDirectory = modulePath.getParent();\n LOGGER.info("Searching directory: {}", moduleDirectory);\n Path executablePath = moduleDirectory.resolve("RTSS.exe");\n if (!Files.exists(executablePath, new LinkOption[0])) {\n LOGGER.warn("Could not find executable: {}", executablePath);\n return null;\n } else {\n LOGGER.info("Parsing file: {}", executablePath);\n VersionInfo version = Version.getModuleFileVersion(executablePath.toAbsolutePath().toString());\n if (version == null) {\n LOGGER.warn("Couldn't find version structure");\n return null;\n } else {\n VersionFixedFileInfoStruct fileVersion = version.queryFixedFileInfo();\n if (fileVersion == null) {\n LOGGER.warn("Couldn't query file version");\n return null;\n } else {\n return WindowsFileVersion.fromFileVersion(fileVersion);\n }\n }\n }\n }\n\n private static boolean isModuleLoaded(List modules, String[] names) {\n for (String name : names) {\n for (String module : modules) {\n if (module.equalsIgnoreCase(name)) {\n return true;\n }\n }\n }\n\n return false;\n }\n}\n {"url": "https://link.caffeinemc.net/help/sodium/incompatible-software/asus-gtiii/gh-2637"} low +7 2 c7ef03b142e9371c5b10fd54ee7a22060872bd3addb96c6fa21ab10ccdc4b481 net/caffeinemc/mods/sodium/client/compatibility/checks/ModuleScanner package net.caffeinemc.mods.sodium.client.compatibility.checks;\n\nimport com.sun.jna.Platform;\nimport com.sun.jna.platform.win32.Kernel32;\nimport com.sun.jna.platform.win32.Kernel32Util;\nimport com.sun.jna.platform.win32.Tlhelp32.MODULEENTRY32W;\nimport java.nio.file.Files;\nimport java.nio.file.LinkOption;\nimport java.nio.file.Path;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.List;\nimport net.caffeinemc.mods.sodium.client.platform.MessageBox;\nimport net.caffeinemc.mods.sodium.client.platform.NativeWindowHandle;\nimport net.caffeinemc.mods.sodium.client.platform.windows.WindowsFileVersion;\nimport net.caffeinemc.mods.sodium.client.platform.windows.api.version.Version;\nimport net.caffeinemc.mods.sodium.client.platform.windows.api.version.VersionFixedFileInfoStruct;\nimport net.caffeinemc.mods.sodium.client.platform.windows.api.version.VersionInfo;\nimport org.jetbrains.annotations.Nullable;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class ModuleScanner {\n private static final Logger LOGGER = LoggerFactory.getLogger("Sodium-Win32ModuleChecks");\n private static final String[] RTSS_HOOKS_MODULE_NAMES = new String[]{"RTSSHooks64.dll", "RTSSHooks.dll"};\n private static final String[] ASUS_GPU_TWEAK_MODULE_NAMES = new String[]{\n "GTIII-OSD64-GL.dll", "GTIII-OSD-GL.dll", "GTIII-OSD64-VK.dll", "GTIII-OSD-VK.dll", "GTIII-OSD64.dll", "GTIII-OSD.dll"\n };\n\n public static void checkModules(NativeWindowHandle window) {\n List modules;\n try {\n modules = listModules();\n } catch (Throwable var3) {\n LOGGER.warn("Failed to scan the currently loaded modules", var3);\n return;\n }\n\n if (!modules.isEmpty()) {\n if (BugChecks.ISSUE_2048 && isModuleLoaded(modules, RTSS_HOOKS_MODULE_NAMES)) {\n checkRTSSModules(window);\n }\n\n if (BugChecks.ISSUE_2637 && isModuleLoaded(modules, ASUS_GPU_TWEAK_MODULE_NAMES)) {\n checkASUSGpuTweakIII(window);\n }\n }\n }\n\n private static List listModules() {\n if (!Platform.isWindows()) {\n return List.of();\n } else {\n int pid = Kernel32.INSTANCE.GetCurrentProcessId();\n ArrayList modules = new ArrayList();\n\n for (MODULEENTRY32W module : Kernel32Util.getModules(pid)) {\n modules.add(module.szModule());\n }\n\n return Collections.unmodifiableList(modules);\n }\n }\n\n private static void checkRTSSModules(NativeWindowHandle window) {\n LOGGER.warn("RivaTuner Statistics Server (RTSS) has injected into the process! Attempting to apply workarounds for compatibility...");\n WindowsFileVersion version = null;\n\n try {\n version = findRTSSModuleVersion();\n } catch (Throwable var3) {\n LOGGER.warn("Exception thrown while reading file version", var3);\n }\n\n if (version == null) {\n LOGGER.warn("Could not determine version of RivaTuner Statistics Server");\n } else {\n LOGGER.info("Detected RivaTuner Statistics Server version: {}", version);\n }\n\n if (version == null || !isRTSSCompatible(version)) {\n MessageBox.showMessageBox(\n window,\n MessageBox.IconType.ERROR,\n "Sodium Renderer",\n "You appear to be using an older version of RivaTuner Statistics Server (RTSS) which is not compatible with Sodium.\\n\\nYou must either update to a newer version (7.3.4 and later) or close the RivaTuner Statistics Server application.\\n\\nFor more information on how to solve this problem, click the 'Help' button.",\n "https://link.caffeinemc.net/help/sodium/incompatible-software/rivatuner-statistics-server/gh-2048"\n );\n throw new RuntimeException(\n "The installed version of RivaTuner Statistics Server (RTSS) is not compatible with Sodium, see here for more details: https://link.caffeinemc.net/help/sodium/incompatible-software/rivatuner-statistics-server/gh-2048"\n );\n }\n }\n\n private static boolean isRTSSCompatible(WindowsFileVersion version) {\n int x = version.x();\n int y = version.y();\n int z = version.z();\n return x > 7 || x == 7 && y > 3 || x == 7 && y == 3 && z >= 4;\n }\n\n private static void checkASUSGpuTweakIII(NativeWindowHandle window) {\n MessageBox.showMessageBox(\n window,\n MessageBox.IconType.ERROR,\n "Sodium Renderer",\n "ASUS GPU Tweak III is not compatible with Minecraft, and causes extreme performance issues and severe graphical corruption when used with Minecraft.\\n\\nYou *must* do one of the following things to continue:\\n\\na) Open the settings of ASUS GPU Tweak III, enable the Blacklist option, click \\"Browse from file...\\", and select the Java runtime (javaw.exe) which is used by Minecraft.\\n\\nb) Completely uninstall the ASUS GPU Tweak III application.\\n\\nFor more information on how to solve this problem, click the 'Help' button.",\n "https://link.caffeinemc.net/help/sodium/incompatible-software/asus-gtiii/gh-2637"\n );\n throw new RuntimeException(\n "ASUS GPU Tweak III is not compatible with Minecraft, see here for more details: https://link.caffeinemc.net/help/sodium/incompatible-software/asus-gtiii/gh-2637"\n );\n }\n\n @Nullable\n private static WindowsFileVersion findRTSSModuleVersion() {\n long module;\n try {\n module = net.caffeinemc.mods.sodium.client.platform.windows.api.Kernel32.getModuleHandleByNames(RTSS_HOOKS_MODULE_NAMES);\n } catch (Throwable var9) {\n LOGGER.warn("Failed to locate module", var9);\n return null;\n }\n\n String moduleFileName;\n try {\n moduleFileName = net.caffeinemc.mods.sodium.client.platform.windows.api.Kernel32.getModuleFileName(module);\n } catch (Throwable var8) {\n LOGGER.warn("Failed to get path of module", var8);\n return null;\n }\n\n Path modulePath = Path.of(moduleFileName);\n Path moduleDirectory = modulePath.getParent();\n LOGGER.info("Searching directory: {}", moduleDirectory);\n Path executablePath = moduleDirectory.resolve("RTSS.exe");\n if (!Files.exists(executablePath, new LinkOption[0])) {\n LOGGER.warn("Could not find executable: {}", executablePath);\n return null;\n } else {\n LOGGER.info("Parsing file: {}", executablePath);\n VersionInfo version = Version.getModuleFileVersion(executablePath.toAbsolutePath().toString());\n if (version == null) {\n LOGGER.warn("Couldn't find version structure");\n return null;\n } else {\n VersionFixedFileInfoStruct fileVersion = version.queryFixedFileInfo();\n if (fileVersion == null) {\n LOGGER.warn("Couldn't query file version");\n return null;\n } else {\n return WindowsFileVersion.fromFileVersion(fileVersion);\n }\n }\n }\n }\n\n private static boolean isModuleLoaded(List modules, String[] names) {\n for (String name : names) {\n for (String module : modules) {\n if (module.equalsIgnoreCase(name)) {\n return true;\n }\n }\n }\n\n return false;\n }\n}\n {"url": "https://link.caffeinemc.net/help/sodium/incompatible-software/rivatuner-statistics-server/gh-2048"} low +8 2 54aa6d079497c9fc459f84c660a303496d18fa17c35c0e22cbe2160924de212e net/caffeinemc/mods/sodium/client/compatibility/checks/GraphicsDriverChecks package net.caffeinemc.mods.sodium.client.compatibility.checks;\n\nimport net.caffeinemc.mods.sodium.client.compatibility.environment.GlContextInfo;\nimport net.caffeinemc.mods.sodium.client.compatibility.environment.probe.GraphicsAdapterVendor;\nimport net.caffeinemc.mods.sodium.client.compatibility.workarounds.intel.IntelWorkarounds;\nimport net.caffeinemc.mods.sodium.client.compatibility.workarounds.nvidia.NvidiaDriverVersion;\nimport net.caffeinemc.mods.sodium.client.compatibility.workarounds.nvidia.NvidiaWorkarounds;\nimport net.caffeinemc.mods.sodium.client.platform.NativeWindowHandle;\nimport net.caffeinemc.mods.sodium.client.platform.PlatformHelper;\nimport net.caffeinemc.mods.sodium.client.platform.windows.WindowsFileVersion;\n\nclass GraphicsDriverChecks {\n static void postContextInit(NativeWindowHandle window, GlContextInfo context) {\n GraphicsAdapterVendor vendor = GraphicsAdapterVendor.fromContext(context);\n if (vendor != GraphicsAdapterVendor.UNKNOWN) {\n if (vendor == GraphicsAdapterVendor.INTEL && BugChecks.ISSUE_899) {\n WindowsFileVersion installedVersion = IntelWorkarounds.findIntelDriverMatchingBug899();\n if (installedVersion != null) {\n String installedVersionString = installedVersion.toString();\n PlatformHelper.showCriticalErrorAndClose(\n window,\n "Sodium Renderer - Unsupported Driver",\n "The game failed to start because the currently installed Intel Graphics Driver is not compatible.\\n\\nInstalled version: ###CURRENT_DRIVER###\\nRequired version: 10.18.10.5161 (or newer)\\n\\nPlease click the 'Help' button to read more about how to fix this problem."\n .replace("###CURRENT_DRIVER###", installedVersionString),\n "https://link.caffeinemc.net/help/sodium/graphics-driver/windows/intel/gh-899"\n );\n }\n }\n\n if (vendor == GraphicsAdapterVendor.NVIDIA && BugChecks.ISSUE_1486) {\n WindowsFileVersion installedVersion = NvidiaWorkarounds.findNvidiaDriverMatchingBug1486();\n if (installedVersion != null) {\n String installedVersionString = NvidiaDriverVersion.parse(installedVersion).toString();\n PlatformHelper.showCriticalErrorAndClose(\n window,\n "Sodium Renderer - Unsupported Driver",\n "The game failed to start because the currently installed NVIDIA Graphics Driver is not compatible.\\n\\nInstalled version: ###CURRENT_DRIVER###\\nRequired version: 536.23 (or newer)\\n\\nPlease click the 'Help' button to read more about how to fix this problem."\n .replace("###CURRENT_DRIVER###", installedVersionString),\n "https://link.caffeinemc.net/help/sodium/graphics-driver/windows/nvidia/gh-1486"\n );\n }\n }\n }\n }\n}\n {"url": "https://link.caffeinemc.net/help/sodium/graphics-driver/windows/nvidia/gh-1486"} low +9 2 5ff865ff6c2e250096fb15b9a943c645deb8558eddc72fa1b492748eb2c78b32 net/caffeinemc/mods/sodium/client/compatibility/checks/GraphicsDriverChecks package net.caffeinemc.mods.sodium.client.compatibility.checks;\n\nimport net.caffeinemc.mods.sodium.client.compatibility.environment.GlContextInfo;\nimport net.caffeinemc.mods.sodium.client.compatibility.environment.probe.GraphicsAdapterVendor;\nimport net.caffeinemc.mods.sodium.client.compatibility.workarounds.intel.IntelWorkarounds;\nimport net.caffeinemc.mods.sodium.client.compatibility.workarounds.nvidia.NvidiaDriverVersion;\nimport net.caffeinemc.mods.sodium.client.compatibility.workarounds.nvidia.NvidiaWorkarounds;\nimport net.caffeinemc.mods.sodium.client.platform.NativeWindowHandle;\nimport net.caffeinemc.mods.sodium.client.platform.PlatformHelper;\nimport net.caffeinemc.mods.sodium.client.platform.windows.WindowsFileVersion;\n\nclass GraphicsDriverChecks {\n static void postContextInit(NativeWindowHandle window, GlContextInfo context) {\n GraphicsAdapterVendor vendor = GraphicsAdapterVendor.fromContext(context);\n if (vendor != GraphicsAdapterVendor.UNKNOWN) {\n if (vendor == GraphicsAdapterVendor.INTEL && BugChecks.ISSUE_899) {\n WindowsFileVersion installedVersion = IntelWorkarounds.findIntelDriverMatchingBug899();\n if (installedVersion != null) {\n String installedVersionString = installedVersion.toString();\n PlatformHelper.showCriticalErrorAndClose(\n window,\n "Sodium Renderer - Unsupported Driver",\n "The game failed to start because the currently installed Intel Graphics Driver is not compatible.\\n\\nInstalled version: ###CURRENT_DRIVER###\\nRequired version: 10.18.10.5161 (or newer)\\n\\nPlease click the 'Help' button to read more about how to fix this problem."\n .replace("###CURRENT_DRIVER###", installedVersionString),\n "https://link.caffeinemc.net/help/sodium/graphics-driver/windows/intel/gh-899"\n );\n }\n }\n\n if (vendor == GraphicsAdapterVendor.NVIDIA && BugChecks.ISSUE_1486) {\n WindowsFileVersion installedVersion = NvidiaWorkarounds.findNvidiaDriverMatchingBug1486();\n if (installedVersion != null) {\n String installedVersionString = NvidiaDriverVersion.parse(installedVersion).toString();\n PlatformHelper.showCriticalErrorAndClose(\n window,\n "Sodium Renderer - Unsupported Driver",\n "The game failed to start because the currently installed NVIDIA Graphics Driver is not compatible.\\n\\nInstalled version: ###CURRENT_DRIVER###\\nRequired version: 536.23 (or newer)\\n\\nPlease click the 'Help' button to read more about how to fix this problem."\n .replace("###CURRENT_DRIVER###", installedVersionString),\n "https://link.caffeinemc.net/help/sodium/graphics-driver/windows/nvidia/gh-1486"\n );\n }\n }\n }\n }\n}\n {"url": "https://link.caffeinemc.net/help/sodium/graphics-driver/windows/intel/gh-899"} low +10 2 dcf24bb91e7861b7a382958053c3efc201684e93df7ce058e849effa3f947fb0 net/caffeinemc/mods/sodium/client/checks/ResourcePackScanner package net.caffeinemc.mods.sodium.client.checks;\n\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.List;\nimport java.util.Set;\nimport net.caffeinemc.mods.sodium.client.console.Console;\nimport net.caffeinemc.mods.sodium.client.console.message.MessageLevel;\nimport net.minecraft.class_3258;\nimport net.minecraft.class_3259;\nimport net.minecraft.class_3262;\nimport net.minecraft.class_3264;\nimport net.minecraft.class_3300;\nimport org.jetbrains.annotations.NotNull;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class ResourcePackScanner {\n private static final Logger LOGGER = LoggerFactory.getLogger("Sodium-ResourcePackScanner");\n private static final Set SHADER_PROGRAM_BLACKLIST = Set.of(\n "rendertype_solid.vsh",\n "rendertype_solid.fsh",\n "rendertype_solid.json",\n "rendertype_cutout_mipped.vsh",\n "rendertype_cutout_mipped.fsh",\n "rendertype_cutout_mipped.json",\n "rendertype_cutout.vsh",\n "rendertype_cutout.fsh",\n "rendertype_cutout.json",\n "rendertype_translucent.vsh",\n "rendertype_translucent.fsh",\n "rendertype_translucent.json",\n "rendertype_tripwire.vsh",\n "rendertype_tripwire.fsh",\n "rendertype_tripwire.json",\n "rendertype_clouds.vsh",\n "rendertype_clouds.fsh",\n "rendertype_clouds.json"\n );\n private static final Set SHADER_INCLUDE_BLACKLIST = Set.of("light.glsl", "fog.glsl");\n\n public static void checkIfCoreShaderLoaded(class_3300 manager) {\n List outputs = manager.method_29213()\n .filter(ResourcePackScanner::isExternalResourcePack)\n .map(ResourcePackScanner::scanResources)\n .toList();\n printToasts(outputs);\n printCompatibilityReport(outputs);\n }\n\n private static void printToasts(Collection resourcePacks) {\n List incompatibleResourcePacks = resourcePacks.stream().filter(pack -> !pack.shaderPrograms.isEmpty()).toList();\n List likelyIncompatibleResourcePacks = resourcePacks.stream()\n .filter(pack -> !pack.shaderIncludes.isEmpty())\n .filter(pack -> !incompatibleResourcePacks.contains(pack))\n .toList();\n boolean shown = false;\n if (!incompatibleResourcePacks.isEmpty()) {\n showConsoleMessage("sodium.console.core_shaders_error", true, MessageLevel.SEVERE);\n\n for (ResourcePackScanner.ScannedResourcePack entry : incompatibleResourcePacks) {\n showConsoleMessage(getResourcePackName(entry.resourcePack), false, MessageLevel.SEVERE);\n }\n\n shown = true;\n }\n\n if (!likelyIncompatibleResourcePacks.isEmpty()) {\n showConsoleMessage("sodium.console.core_shaders_warn", true, MessageLevel.WARN);\n\n for (ResourcePackScanner.ScannedResourcePack entry : likelyIncompatibleResourcePacks) {\n showConsoleMessage(getResourcePackName(entry.resourcePack), false, MessageLevel.WARN);\n }\n\n shown = true;\n }\n\n if (shown) {\n showConsoleMessage("sodium.console.core_shaders_info", true, MessageLevel.INFO);\n }\n }\n\n private static void printCompatibilityReport(Collection scanResults) {\n StringBuilder builder = new StringBuilder();\n\n for (ResourcePackScanner.ScannedResourcePack entry : scanResults) {\n if (!entry.shaderPrograms.isEmpty() || !entry.shaderIncludes.isEmpty()) {\n builder.append("- Resource pack: ").append(getResourcePackName(entry.resourcePack)).append("\\n");\n if (!entry.shaderPrograms.isEmpty()) {\n emitProblem(\n builder,\n "The resource pack replaces terrain shaders, which are not supported",\n "https://github.com/CaffeineMC/sodium/wiki/Resource-Packs",\n entry.shaderPrograms\n );\n }\n\n if (!entry.shaderIncludes.isEmpty()) {\n emitProblem(\n builder,\n "The resource pack modifies shader include files, which are not fully supported",\n "https://github.com/CaffeineMC/sodium/wiki/Resource-Packs",\n entry.shaderIncludes\n );\n }\n }\n }\n\n if (!builder.isEmpty()) {\n LOGGER.error("The following compatibility issues were found with installed resource packs:\\n{}", builder);\n }\n }\n\n private static void emitProblem(StringBuilder builder, String description, String url, List resources) {\n builder.append("\\t- Problem found: ").append("\\n");\n builder.append("\\t\\t- Description:\\n\\t\\t\\t").append(description).append("\\n");\n builder.append("\\t\\t- More information: ").append(url).append("\\n");\n builder.append("\\t\\t- Files: ").append("\\n");\n\n for (String resource : resources) {\n builder.append("\\t\\t\\t- ").append(resource).append("\\n");\n }\n }\n\n @NotNull\n private static ResourcePackScanner.ScannedResourcePack scanResources(class_3262 resourcePack) {\n List ignoredShaders = determineIgnoredShaders(resourcePack);\n if (!ignoredShaders.isEmpty()) {\n LOGGER.warn(\n "Resource pack '{}' indicates the following shaders should be ignored: {}", getResourcePackName(resourcePack), String.join(", ", ignoredShaders)\n );\n }\n\n ArrayList unsupportedShaderPrograms = new ArrayList();\n ArrayList unsupportedShaderIncludes = new ArrayList();\n resourcePack.method_14408(class_3264.field_14188, "minecraft", "shaders", (identifier, supplier) -> {\n String path = identifier.method_12832();\n String name = path.substring(path.lastIndexOf(47) + 1);\n if (!ignoredShaders.contains(name)) {\n if (SHADER_PROGRAM_BLACKLIST.contains(name)) {\n unsupportedShaderPrograms.add(path);\n } else if (SHADER_INCLUDE_BLACKLIST.contains(name)) {\n unsupportedShaderIncludes.add(path);\n }\n }\n });\n return new ResourcePackScanner.ScannedResourcePack(resourcePack, unsupportedShaderPrograms, unsupportedShaderIncludes);\n }\n\n private static boolean isExternalResourcePack(class_3262 pack) {\n return pack instanceof class_3259 || pack instanceof class_3258;\n }\n\n private static String getResourcePackName(class_3262 pack) {\n String path = pack.method_14409();\n return path.startsWith("file/") ? path.substring(5) : path;\n }\n\n private static List determineIgnoredShaders(class_3262 resourcePack) {\n ArrayList ignoredShaders = new ArrayList();\n\n try {\n SodiumResourcePackMetadata meta = (SodiumResourcePackMetadata)resourcePack.method_14407(SodiumResourcePackMetadata.SERIALIZER);\n if (meta != null) {\n ignoredShaders.addAll(meta.ignoredShaders());\n }\n } catch (IOException var3) {\n LOGGER.error("Failed to load pack.mcmeta file for resource pack '{}'", resourcePack.method_14409());\n }\n\n return ignoredShaders;\n }\n\n private static void showConsoleMessage(String message, boolean translatable, MessageLevel messageLevel) {\n Console.instance().logMessage(messageLevel, message, translatable, 12.5);\n }\n\n private record ScannedResourcePack(class_3262 resourcePack, ArrayList shaderPrograms, ArrayList shaderIncludes) {\n }\n}\n {"url": "https://github.com/CaffeineMC/sodium/wiki/Resource-Packs"} low +11 3 fc7e089f517eab447befde28ce1b5b2438bc5a08131eb338adeaaacbdef7d6cf net/fabricmc/fabric/impl/base/event/EventFactoryImpl \N {} low +12 4 967302d02a45f4cfa29af6604a50d12097295caa1aabff33b1a3d8e7638f9962 net/caffeinemc/mods/sodium/client/platform/windows/WindowsFileVersion package net.caffeinemc.mods.sodium.client.platform.windows;\n\nimport net.caffeinemc.mods.sodium.client.platform.windows.api.version.VersionFixedFileInfoStruct;\nimport org.jetbrains.annotations.NotNull;\n\npublic record WindowsFileVersion(int x, int y, int z, int w) {\n @NotNull\n public static WindowsFileVersion fromFileVersion(VersionFixedFileInfoStruct fileVersion) {\n int x = fileVersion.getFileVersionMostSignificantBits() >>> 16 & 65535;\n int y = fileVersion.getFileVersionMostSignificantBits() >>> 0 & 65535;\n int z = fileVersion.getFileVersionLeastSignificantBits() >>> 16 & 65535;\n int w = fileVersion.getFileVersionLeastSignificantBits() >>> 0 & 65535;\n return new WindowsFileVersion(x, y, z, w);\n }\n\n public String toString() {\n return "%s.%s.%s.%s".formatted(this.x, this.y, this.z, this.w);\n }\n}\n {} high +13 5 7005a5b5d443c84d758eceb963351a357d93b5c15eeb19caf20aae99d65a623b net/caffeinemc/mods/sodium/desktop/LaunchWarn package net.caffeinemc.mods.sodium.desktop;\n\nimport java.awt.GraphicsEnvironment;\nimport java.io.IOException;\nimport javax.swing.JDialog;\nimport javax.swing.JOptionPane;\nimport javax.swing.UIManager;\nimport javax.swing.UnsupportedLookAndFeelException;\nimport net.caffeinemc.mods.sodium.desktop.utils.browse.BrowseUrlHandler;\n\npublic class LaunchWarn {\n private static final String HELP_URL = "https://link.caffeinemc.net/guides/sodium/installation";\n private static final String RICH_MESSAGE = "

You have tried to launch Sodium (a Minecraft mod) directly, but it is not an executable program or mod installer. Instead, you must install Fabric Loader for Minecraft, and then place this file in your mods directory.

If this is your first time installing mods with Fabric Loader, then click the \\"Help\\" button for an installation guide.

";\n private static final String FALLBACK_MESSAGE = "

You have tried to launch Sodium (a Minecraft mod) directly, but it is not an executable program or mod installer. Instead, you must install Fabric Loader for Minecraft, and then place this file in your mods directory.

If this is your first time installing mods with Fabric Loader, then visit https://link.caffeinemc.net/guides/sodium/installation for an installation guide.

";\n private static final String FAILED_TO_BROWSE_MESSAGE = "

Failed to open the default browser! Your system may be misconfigured. Please open the URL https://link.caffeinemc.net/guides/sodium/installation manually.

";\n public static final String WINDOW_TITLE = "Sodium";\n\n public static void main(String[] args) {\n if (GraphicsEnvironment.isHeadless()) {\n showHeadlessError();\n } else {\n showGraphicalError();\n }\n }\n\n private static void showHeadlessError() {\n System.err\n .println(\n "

You have tried to launch Sodium (a Minecraft mod) directly, but it is not an executable program or mod installer. Instead, you must install Fabric Loader for Minecraft, and then place this file in your mods directory.

If this is your first time installing mods with Fabric Loader, then visit https://link.caffeinemc.net/guides/sodium/installation for an installation guide.

"\n );\n }\n\n private static void showGraphicalError() {\n trySetSystemLookAndFeel();\n trySetSystemFontPreferences();\n BrowseUrlHandler browseUrlHandler = BrowseUrlHandler.createImplementation();\n if (browseUrlHandler != null) {\n showRichGraphicalDialog(browseUrlHandler);\n } else {\n showFallbackGraphicalDialog();\n }\n\n System.exit(0);\n }\n\n private static void showRichGraphicalDialog(BrowseUrlHandler browseUrlHandler) {\n int selectedOption = showDialogBox(\n "

You have tried to launch Sodium (a Minecraft mod) directly, but it is not an executable program or mod installer. Instead, you must install Fabric Loader for Minecraft, and then place this file in your mods directory.

If this is your first time installing mods with Fabric Loader, then click the \\"Help\\" button for an installation guide.

",\n "Sodium",\n 0,\n 1,\n new String[]{"Help", "Close"},\n 0\n );\n if (selectedOption == 0) {\n log("Opening URL: https://link.caffeinemc.net/guides/sodium/installation");\n\n try {\n browseUrlHandler.browseTo("https://link.caffeinemc.net/guides/sodium/installation");\n } catch (IOException var3) {\n log("Failed to open default web browser!", var3);\n showDialogBox(\n "

Failed to open the default browser! Your system may be misconfigured. Please open the URL https://link.caffeinemc.net/guides/sodium/installation manually.

",\n "Sodium",\n -1,\n 2,\n null,\n -1\n );\n }\n }\n }\n\n private static void showFallbackGraphicalDialog() {\n showDialogBox(\n "

You have tried to launch Sodium (a Minecraft mod) directly, but it is not an executable program or mod installer. Instead, you must install Fabric Loader for Minecraft, and then place this file in your mods directory.

If this is your first time installing mods with Fabric Loader, then visit https://link.caffeinemc.net/guides/sodium/installation for an installation guide.

",\n "Sodium",\n -1,\n 1,\n null,\n null\n );\n }\n\n private static int showDialogBox(String message, String title, int optionType, int messageType, String[] options, Object initialValue) {\n JOptionPane pane = new JOptionPane(message, messageType, optionType, null, options, initialValue);\n JDialog dialog = pane.createDialog(title);\n dialog.setVisible(true);\n Object selectedValue = pane.getValue();\n if (selectedValue == null) {\n return -1;\n } else if (options == null) {\n return selectedValue instanceof Integer ? (Integer)selectedValue : -1;\n } else {\n for (int counter = 0; counter < options.length; counter++) {\n String option = options[counter];\n if (option.equals(selectedValue)) {\n return counter;\n }\n }\n\n return -1;\n }\n }\n\n private static void trySetSystemLookAndFeel() {\n try {\n UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName());\n } catch (UnsupportedLookAndFeelException | ReflectiveOperationException var1) {\n }\n }\n\n private static void trySetSystemFontPreferences() {\n System.setProperty("awt.useSystemAAFontSettings", "on");\n }\n\n private static void log(String message) {\n System.err.println(message);\n }\n\n private static void log(String message, Throwable exception) {\n System.err.println(message);\n exception.printStackTrace(System.err);\n }\n}\n {} low +\. + + +-- +-- Name: delphi_report_issue_details_id_seq; Type: SEQUENCE SET; Schema: public; Owner: labrinth +-- + +SELECT pg_catalog.setval('public.delphi_report_issue_details_id_seq', 13, true); + + +-- +-- Name: delphi_report_issues_id_seq; Type: SEQUENCE SET; Schema: public; Owner: labrinth +-- + +SELECT pg_catalog.setval('public.delphi_report_issues_id_seq', 5, true); + + +-- +-- Name: delphi_reports_id_seq; Type: SEQUENCE SET; Schema: public; Owner: labrinth +-- + +SELECT pg_catalog.setval('public.delphi_reports_id_seq', 1, true); + + +-- +-- PostgreSQL database dump complete +-- + +\unrestrict RGysBmMc8KFBQ9AssusGyNPozUiB43hdmIPxlv5KSWbX7tdW7XVMPpMginvod9K diff --git a/apps/labrinth/migrations/20250810155316_delphi-reports.sql b/apps/labrinth/migrations/20250810155316_delphi-reports.sql new file mode 100644 index 0000000000..2aa7a75913 --- /dev/null +++ b/apps/labrinth/migrations/20250810155316_delphi-reports.sql @@ -0,0 +1,47 @@ +CREATE TYPE delphi_severity AS ENUM ('low', 'medium', 'high', 'severe'); + +CREATE TYPE delphi_report_issue_status AS ENUM ('pending', 'safe', 'unsafe'); + +-- A Delphi analysis report for a project version +CREATE TABLE delphi_reports ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + file_id BIGINT REFERENCES files (id) + ON DELETE SET NULL + ON UPDATE CASCADE, + delphi_version INTEGER NOT NULL, + artifact_url VARCHAR(2048) NOT NULL, + created TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL, + severity DELPHI_SEVERITY NOT NULL, + UNIQUE (file_id, delphi_version) +); +CREATE INDEX delphi_version ON delphi_reports (delphi_version); + +-- An issue found in a Delphi report. Every issue belongs to a report, +-- and a report can have zero, one, or more issues attached to it +CREATE TABLE delphi_report_issues ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + report_id BIGINT NOT NULL REFERENCES delphi_reports (id) + ON DELETE CASCADE + ON UPDATE CASCADE, + issue_type TEXT NOT NULL, + status DELPHI_REPORT_ISSUE_STATUS NOT NULL, + UNIQUE (report_id, issue_type) +); +CREATE INDEX delphi_report_issue_by_status_and_type ON delphi_report_issues (status, issue_type); + +-- The details of a Delphi report issue, which contain data about a +-- Java class affected by it. Every Delphi report issue details object +-- belongs to a specific issue, and an issue can have zero, one, or +-- more details attached to it. (Some issues may be artifact-wide, +-- or otherwise not really specific to any particular class.) +CREATE TABLE delphi_report_issue_details ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + issue_id BIGINT NOT NULL REFERENCES delphi_report_issues (id) + ON DELETE CASCADE + ON UPDATE CASCADE, + key TEXT NOT NULL, + file_path TEXT NOT NULL, + decompiled_source TEXT, + data JSONB NOT NULL, + severity DELPHI_SEVERITY NOT NULL +); diff --git a/apps/labrinth/src/auth/mod.rs b/apps/labrinth/src/auth/mod.rs index 953d978c52..8131c77f5f 100644 --- a/apps/labrinth/src/auth/mod.rs +++ b/apps/labrinth/src/auth/mod.rs @@ -112,7 +112,15 @@ impl AuthenticationError { } #[derive( - Serialize, Deserialize, Default, Eq, PartialEq, Clone, Copy, Debug, + Debug, + Clone, + Copy, + PartialEq, + Eq, + Default, + Serialize, + Deserialize, + utoipa::ToSchema, )] #[serde(rename_all = "lowercase")] pub enum AuthProvider { diff --git a/apps/labrinth/src/database/models/delphi_report_item.rs b/apps/labrinth/src/database/models/delphi_report_item.rs new file mode 100644 index 0000000000..8bcaf4d938 --- /dev/null +++ b/apps/labrinth/src/database/models/delphi_report_item.rs @@ -0,0 +1,314 @@ +use std::{ + collections::HashMap, + fmt::{self, Display, Formatter}, +}; + +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use sqlx::types::Json; + +use crate::database::models::{ + DBFileId, DBProjectId, DatabaseError, DelphiReportId, + DelphiReportIssueDetailsId, DelphiReportIssueId, +}; + +/// A Delphi malware analysis report for a project version file. +/// +/// Malware analysis reports usually belong to a specific project file, +/// but they can get orphaned if the versions they belong to are deleted. +/// Thus, deleting versions does not delete these reports. +#[derive(Serialize)] +pub struct DBDelphiReport { + pub id: DelphiReportId, + pub file_id: Option, + /// A sequential, monotonically increasing version number for the + /// Delphi version that generated this report. + pub delphi_version: i32, + pub artifact_url: String, + pub created: DateTime, + pub severity: DelphiSeverity, +} + +impl DBDelphiReport { + pub async fn upsert( + &self, + transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, + ) -> Result { + Ok(DelphiReportId(sqlx::query_scalar!( + " + INSERT INTO delphi_reports (file_id, delphi_version, artifact_url, severity) + VALUES ($1, $2, $3, $4) + ON CONFLICT (file_id, delphi_version) DO UPDATE SET + delphi_version = $2, artifact_url = $3, created = CURRENT_TIMESTAMP, severity = $4 + RETURNING id + ", + self.file_id as Option, + self.delphi_version, + self.artifact_url, + self.severity as DelphiSeverity, + ) + .fetch_one(&mut **transaction) + .await?)) + } +} + +/// A severity level reported by Delphi. +#[derive( + Deserialize, + Serialize, + Debug, + Clone, + Copy, + PartialEq, + Eq, + Hash, + sqlx::Type, + utoipa::ToSchema, +)] +// The canonical serialized form of this enum is the snake_case representation. +// We add `alias`es so we can deserialize it from how Delphi sends it, +// which follows the Java conventions of `SCREAMING_SNAKE_CASE`. +#[serde(rename_all = "snake_case")] +#[sqlx(type_name = "delphi_severity", rename_all = "snake_case")] +pub enum DelphiSeverity { + #[serde(alias = "LOW")] + Low, + #[serde(alias = "MEDIUM")] + Medium, + #[serde(alias = "HIGH")] + High, + #[serde(alias = "SEVERE")] + Severe, +} + +/// An issue found in a Delphi report. Every issue belongs to a report, +/// and a report can have zero, one, or more issues attached to it. +#[derive(Deserialize, Serialize)] +pub struct DBDelphiReportIssue { + pub id: DelphiReportIssueId, + pub report_id: DelphiReportId, + pub issue_type: String, + pub status: DelphiReportIssueStatus, +} + +/// An status a Delphi report issue can have. +#[derive( + Deserialize, + Serialize, + Debug, + Clone, + Copy, + PartialEq, + Eq, + Hash, + sqlx::Type, + utoipa::ToSchema, +)] +#[serde(rename_all = "snake_case")] +#[sqlx(type_name = "delphi_report_issue_status", rename_all = "snake_case")] +pub enum DelphiReportIssueStatus { + /// The issue is pending review by the moderation team. + Pending, + /// The issue has been rejected (i.e., reviewed as a false positive). + /// The affected artifact has thus been verified to be clean, other issues + /// with it notwithstanding. + Safe, + /// The issue has been approved (i.e., reviewed as a valid, true positive). + /// The affected artifact has thus been verified to be potentially malicious. + Unsafe, +} + +impl Display for DelphiReportIssueStatus { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + self.serialize(f) + } +} + +/// An order in which Delphi report issues can be sorted during queries. +#[derive(Deserialize, Serialize, Debug, Clone, Copy, PartialEq, Eq, Hash)] +#[serde(rename_all = "snake_case")] +pub enum DelphiReportListOrder { + CreatedAsc, + CreatedDesc, + PendingStatusFirst, + SeverityAsc, + SeverityDesc, +} + +impl Display for DelphiReportListOrder { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + self.serialize(f) + } +} + +/// A result returned from a Delphi report issue query, slightly +/// denormalized with related entity information for ease of +/// consumption by clients. +#[derive(Serialize)] +pub struct DelphiReportIssueResult { + pub issue: DBDelphiReportIssue, + pub report: DBDelphiReport, + pub details: Vec, + pub project_id: Option, + pub project_published: Option>, +} + +impl DBDelphiReportIssue { + pub async fn upsert( + &self, + transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, + ) -> Result { + Ok(DelphiReportIssueId( + sqlx::query_scalar!( + " + INSERT INTO delphi_report_issues (report_id, issue_type, status) + VALUES ($1, $2, $3) + ON CONFLICT (report_id, issue_type) DO UPDATE SET status = $3 + RETURNING id + ", + self.report_id as DelphiReportId, + self.issue_type, + self.status as DelphiReportIssueStatus, + ) + .fetch_one(&mut **transaction) + .await?, + )) + } + + pub async fn find_all_by( + ty: Option, + status: Option, + order_by: Option, + count: Option, + offset: Option, + exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>, + ) -> Result, DatabaseError> { + Ok(sqlx::query!( + r#" + SELECT + delphi_report_issues.id AS "id", report_id, + issue_type, + delphi_report_issues.status AS "status: DelphiReportIssueStatus", + + file_id, delphi_version, artifact_url, created, severity AS "severity: DelphiSeverity", + json_array(SELECT to_jsonb(delphi_report_issue_details) + FROM delphi_report_issue_details + WHERE issue_id = delphi_report_issues.id + ) AS "details: sqlx::types::Json>", + versions.mod_id AS "project_id?", mods.published AS "project_published?" + FROM delphi_report_issues + INNER JOIN delphi_reports ON delphi_reports.id = report_id + LEFT OUTER JOIN files ON files.id = file_id + LEFT OUTER JOIN versions ON versions.id = files.version_id + LEFT OUTER JOIN mods ON mods.id = versions.mod_id + WHERE + (issue_type = $1 OR $1 IS NULL) + AND (delphi_report_issues.status = $2 OR $2 IS NULL) + ORDER BY + CASE WHEN $3 = 'created_asc' THEN delphi_reports.created ELSE TO_TIMESTAMP(0) END ASC, + CASE WHEN $3 = 'created_desc' THEN delphi_reports.created ELSE TO_TIMESTAMP(0) END DESC, + CASE WHEN $3 = 'pending_status_first' THEN delphi_report_issues.status ELSE 'pending'::delphi_report_issue_status END ASC, + CASE WHEN $3 = 'severity_asc' THEN delphi_reports.severity ELSE 'low'::delphi_severity END ASC, + CASE WHEN $3 = 'severity_desc' THEN delphi_reports.severity ELSE 'low'::delphi_severity END DESC + OFFSET $5 + LIMIT $4 + "#, + ty, + status as Option, + order_by.map(|order_by| order_by.to_string()), + count.map(|count| count as i64), + offset, + ) + .map(|row| DelphiReportIssueResult { + issue: DBDelphiReportIssue { + id: DelphiReportIssueId(row.id), + report_id: DelphiReportId(row.report_id), + issue_type: row.issue_type, + status: row.status, + }, + report: DBDelphiReport { + id: DelphiReportId(row.report_id), + file_id: row.file_id.map(DBFileId), + delphi_version: row.delphi_version, + artifact_url: row.artifact_url, + created: row.created, + severity: row.severity, + }, + details: row + .details + .into_iter() + .flat_map(|details_list| details_list.0) + .collect(), + project_id: row.project_id.map(DBProjectId), + project_published: row.project_published, + }) + .fetch_all(exec) + .await?) + } +} + +/// The details of a Delphi report issue, which contain data about a +/// Java class affected by it. Every Delphi report issue details object +/// belongs to a specific issue, and an issue can have zero, one, or +/// more details attached to it. (Some issues may be artifact-wide, +/// or otherwise not really specific to any particular class.) +#[derive( + Debug, Clone, Deserialize, Serialize, utoipa::ToSchema, sqlx::FromRow, +)] +pub struct ReportIssueDetail { + /// ID of this issue detail. + pub id: DelphiReportIssueDetailsId, + /// ID of the issue this detail belongs to. + pub issue_id: DelphiReportIssueId, + /// Opaque identifier for where this issue detail is located, relative to + /// the file scanned. + /// + /// This acts as a stable identifier for an issue detail, even across + /// different versions of the same file. + pub key: String, + /// Name of the Java class path in which this issue was found. + pub file_path: String, + /// Decompiled, pretty-printed source of the Java class. + pub decompiled_source: Option, + /// Extra detail-specific info for this detail. + #[sqlx(json)] + pub data: HashMap, + /// How important is this issue, as flagged by Delphi? + pub severity: DelphiSeverity, +} + +impl ReportIssueDetail { + pub async fn insert( + &self, + transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, + ) -> Result { + Ok(DelphiReportIssueDetailsId(sqlx::query_scalar!( + " + INSERT INTO delphi_report_issue_details (issue_id, key, file_path, decompiled_source, data, severity) + VALUES ($1, $2, $3, $4, $5, $6) + RETURNING id + ", + self.issue_id as DelphiReportIssueId, + self.key, + self.file_path, + self.decompiled_source, + sqlx::types::Json(&self.data) as Json<&HashMap>, + self.severity as DelphiSeverity, + ) + .fetch_one(&mut **transaction) + .await?)) + } + + pub async fn remove_all_by_issue_id( + issue_id: DelphiReportIssueId, + transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, + ) -> Result { + Ok(sqlx::query!( + "DELETE FROM delphi_report_issue_details WHERE issue_id = $1", + issue_id as DelphiReportIssueId, + ) + .execute(&mut **transaction) + .await? + .rows_affected()) + } +} diff --git a/apps/labrinth/src/database/models/ids.rs b/apps/labrinth/src/database/models/ids.rs index 483f6841e5..10f8cc8ae9 100644 --- a/apps/labrinth/src/database/models/ids.rs +++ b/apps/labrinth/src/database/models/ids.rs @@ -94,7 +94,7 @@ macro_rules! generate_bulk_ids { macro_rules! impl_db_id_interface { ($id_struct:ident, $db_id_struct:ident, $(, generator: $generator_function:ident @ $db_table:expr, $(bulk_generator: $bulk_generator_function:ident,)?)?) => { - #[derive(Copy, Clone, Debug, Type, Serialize, Deserialize, PartialEq, Eq, Hash)] + #[derive(Copy, Clone, Debug, Type, Serialize, Deserialize, PartialEq, Eq, Hash, utoipa::ToSchema)] #[sqlx(transparent)] pub struct $db_id_struct(pub i64); @@ -140,8 +140,8 @@ macro_rules! db_id_interface { }; } -macro_rules! short_id_type { - ($name:ident) => { +macro_rules! id_type { + ($name:ident as $type:ty) => { #[derive( Copy, Clone, @@ -152,9 +152,10 @@ macro_rules! short_id_type { Eq, PartialEq, Hash, + utoipa::ToSchema, )] #[sqlx(transparent)] - pub struct $name(pub i32); + pub struct $name(pub $type); }; } @@ -268,14 +269,17 @@ db_id_interface!( generator: generate_affiliate_code_id @ "affiliate_codes", ); -short_id_type!(CategoryId); -short_id_type!(GameId); -short_id_type!(LinkPlatformId); -short_id_type!(LoaderFieldEnumId); -short_id_type!(LoaderFieldEnumValueId); -short_id_type!(LoaderFieldId); -short_id_type!(LoaderId); -short_id_type!(NotificationActionId); -short_id_type!(ProjectTypeId); -short_id_type!(ReportTypeId); -short_id_type!(StatusId); +id_type!(CategoryId as i32); +id_type!(GameId as i32); +id_type!(LinkPlatformId as i32); +id_type!(LoaderFieldEnumId as i32); +id_type!(LoaderFieldEnumValueId as i32); +id_type!(LoaderFieldId as i32); +id_type!(LoaderId as i32); +id_type!(NotificationActionId as i32); +id_type!(ProjectTypeId as i32); +id_type!(ReportTypeId as i32); +id_type!(StatusId as i32); +id_type!(DelphiReportId as i64); +id_type!(DelphiReportIssueId as i64); +id_type!(DelphiReportIssueDetailsId as i64); diff --git a/apps/labrinth/src/database/models/mod.rs b/apps/labrinth/src/database/models/mod.rs index 0e5f31cdf8..0d78310cee 100644 --- a/apps/labrinth/src/database/models/mod.rs +++ b/apps/labrinth/src/database/models/mod.rs @@ -4,6 +4,7 @@ pub mod affiliate_code_item; pub mod categories; pub mod charge_item; pub mod collection_item; +pub mod delphi_report_item; pub mod flow_item; pub mod friend_item; pub mod ids; diff --git a/apps/labrinth/src/database/models/thread_item.rs b/apps/labrinth/src/database/models/thread_item.rs index 04275d62e1..d01a1b3842 100644 --- a/apps/labrinth/src/database/models/thread_item.rs +++ b/apps/labrinth/src/database/models/thread_item.rs @@ -11,7 +11,7 @@ pub struct ThreadBuilder { pub report_id: Option, } -#[derive(Clone, Serialize)] +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] pub struct DBThread { pub id: DBThreadId, @@ -30,7 +30,7 @@ pub struct ThreadMessageBuilder { pub hide_identity: bool, } -#[derive(Serialize, Deserialize, Clone)] +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] pub struct DBThreadMessage { pub id: DBThreadMessageId, pub thread_id: DBThreadId, diff --git a/apps/labrinth/src/database/models/version_item.rs b/apps/labrinth/src/database/models/version_item.rs index c97a43e50c..d0ba9c0b97 100644 --- a/apps/labrinth/src/database/models/version_item.rs +++ b/apps/labrinth/src/database/models/version_item.rs @@ -6,6 +6,7 @@ use crate::database::models::loader_fields::{ }; use crate::database::redis::RedisPool; use crate::models::projects::{FileType, VersionStatus}; +use crate::routes::internal::delphi::DelphiRunParameters; use chrono::{DateTime, Utc}; use dashmap::{DashMap, DashSet}; use futures::TryStreamExt; @@ -164,6 +165,17 @@ impl VersionFileBuilder { .await?; } + if let Err(err) = crate::routes::internal::delphi::run( + &mut **transaction, + DelphiRunParameters { + file_id: file_id.into(), + }, + ) + .await + { + tracing::error!("Error submitting new file to Delphi: {err}"); + } + Ok(file_id) } } diff --git a/apps/labrinth/src/file_hosting/mock.rs b/apps/labrinth/src/file_hosting/mock.rs index c04f92420f..2565bd287c 100644 --- a/apps/labrinth/src/file_hosting/mock.rs +++ b/apps/labrinth/src/file_hosting/mock.rs @@ -27,7 +27,9 @@ impl FileHost for MockHost { file_publicity: FileHostPublicity, file_bytes: Bytes, ) -> Result { - let path = get_file_path(file_name, file_publicity); + let file_name = urlencoding::decode(file_name) + .map_err(|_| FileHostingError::InvalidFilename)?; + let path = get_file_path(&file_name, file_publicity); std::fs::create_dir_all( path.parent().ok_or(FileHostingError::InvalidFilename)?, )?; diff --git a/apps/labrinth/src/main.rs b/apps/labrinth/src/main.rs index f3cab342f0..9484d62224 100644 --- a/apps/labrinth/src/main.rs +++ b/apps/labrinth/src/main.rs @@ -21,6 +21,7 @@ use std::sync::Arc; use tracing::{Instrument, error, info, info_span}; use tracing_actix_web::TracingLogger; use utoipa::OpenApi; +use utoipa::openapi::security::{ApiKey, ApiKeyValue, SecurityScheme}; use utoipa_actix_web::AppExt; use utoipa_swagger_ui::SwaggerUi; @@ -262,9 +263,23 @@ async fn main() -> std::io::Result<()> { } #[derive(utoipa::OpenApi)] -#[openapi(info(title = "Labrinth"))] +#[openapi(info(title = "Labrinth"), modifiers(&SecurityAddon))] struct ApiDoc; +struct SecurityAddon; + +impl utoipa::Modify for SecurityAddon { + fn modify(&self, openapi: &mut utoipa::openapi::OpenApi) { + let components = openapi.components.as_mut().unwrap(); + components.add_security_scheme( + "bearer_auth", + SecurityScheme::ApiKey(ApiKey::Header(ApiKeyValue::new( + "authorization", + ))), + ); + } +} + fn log_error(err: &actix_web::Error) { if err.as_response_error().status_code().is_client_error() { tracing::debug!( diff --git a/apps/labrinth/src/models/v3/projects.rs b/apps/labrinth/src/models/v3/projects.rs index f70d806af1..c6c1101ffe 100644 --- a/apps/labrinth/src/models/v3/projects.rs +++ b/apps/labrinth/src/models/v3/projects.rs @@ -5,7 +5,7 @@ use crate::database::models::loader_fields::VersionField; use crate::database::models::project_item::{LinkUrl, ProjectQueryResult}; use crate::database::models::version_item::VersionQueryResult; use crate::models::ids::{ - OrganizationId, ProjectId, TeamId, ThreadId, VersionId, + FileId, OrganizationId, ProjectId, TeamId, ThreadId, VersionId, }; use ariadne::ids::UserId; use chrono::{DateTime, Utc}; @@ -731,6 +731,7 @@ impl From for Version { .files .into_iter() .map(|f| VersionFile { + id: Some(FileId(f.id.0 as u64)), url: f.url, filename: f.filename, hashes: f.hashes, @@ -855,6 +856,10 @@ impl VersionStatus { /// A single project file, with a url for the file and the file's hash #[derive(Serialize, Deserialize, Clone)] pub struct VersionFile { + /// The ID of the file. Every file has an ID once created, but it + /// is not known until it indeed has been created. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub id: Option, /// A map of hashes of the file. The key is the hashing algorithm /// and the value is the string version of the hash. pub hashes: std::collections::HashMap, diff --git a/apps/labrinth/src/models/v3/threads.rs b/apps/labrinth/src/models/v3/threads.rs index a1a32214bd..5b918899d2 100644 --- a/apps/labrinth/src/models/v3/threads.rs +++ b/apps/labrinth/src/models/v3/threads.rs @@ -7,7 +7,7 @@ use ariadne::ids::UserId; use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; -#[derive(Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] pub struct Thread { pub id: ThreadId, #[serde(rename = "type")] @@ -18,7 +18,7 @@ pub struct Thread { pub members: Vec, } -#[derive(Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] pub struct ThreadMessage { pub id: ThreadMessageId, pub author_id: Option, @@ -27,7 +27,7 @@ pub struct ThreadMessage { pub hide_identity: bool, } -#[derive(Serialize, Deserialize, Clone)] +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] #[serde(tag = "type", rename_all = "snake_case")] pub enum MessageBody { Text { @@ -50,7 +50,9 @@ pub enum MessageBody { }, } -#[derive(Serialize, Deserialize, Eq, PartialEq, Copy, Clone)] +#[derive( + Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, utoipa::ToSchema, +)] #[serde(rename_all = "snake_case")] pub enum ThreadType { Report, diff --git a/apps/labrinth/src/models/v3/users.rs b/apps/labrinth/src/models/v3/users.rs index d8b0a2e822..0f276fc8fa 100644 --- a/apps/labrinth/src/models/v3/users.rs +++ b/apps/labrinth/src/models/v3/users.rs @@ -8,7 +8,7 @@ use serde::{Deserialize, Serialize}; pub const DELETED_USER: UserId = UserId(127155982985829); bitflags::bitflags! { - #[derive(Copy, Clone, Debug)] + #[derive(Debug, Clone, Copy)] pub struct Badges: u64 { const MIDAS = 1 << 0; const EARLY_MODPACK_ADOPTER = 1 << 1; @@ -21,6 +21,23 @@ bitflags::bitflags! { } } +impl utoipa::PartialSchema for Badges { + fn schema() -> utoipa::openapi::RefOr { + u64::schema() + } +} + +impl utoipa::ToSchema for Badges { + fn schemas( + schemas: &mut Vec<( + String, + utoipa::openapi::RefOr, + )>, + ) { + u64::schemas(schemas); + } +} + bitflags_serde_impl!(Badges, u64); impl Default for Badges { @@ -29,7 +46,7 @@ impl Default for Badges { } } -#[derive(Serialize, Deserialize, Clone, Debug)] +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] pub struct User { pub id: UserId, pub username: String, @@ -52,7 +69,7 @@ pub struct User { pub github_id: Option, } -#[derive(Serialize, Deserialize, Clone, Debug)] +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] pub struct UserPayoutData { pub paypal_address: Option, pub paypal_country: Option, @@ -137,7 +154,9 @@ impl User { } } -#[derive(Serialize, Deserialize, PartialEq, Eq, Clone, Debug)] +#[derive( + Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, utoipa::ToSchema, +)] #[serde(rename_all = "lowercase")] pub enum Role { Developer, diff --git a/apps/labrinth/src/routes/internal/admin.rs b/apps/labrinth/src/routes/internal/admin.rs index 518a315aa8..c4c64b5bf6 100644 --- a/apps/labrinth/src/routes/internal/admin.rs +++ b/apps/labrinth/src/routes/internal/admin.rs @@ -1,12 +1,9 @@ use crate::auth::validate::get_user_record_from_bearer_token; -use crate::database::models::thread_item::ThreadMessageBuilder; use crate::database::redis::RedisPool; use crate::models::analytics::Download; use crate::models::ids::ProjectId; use crate::models::pats::Scopes; -use crate::models::threads::MessageBody; use crate::queue::analytics::AnalyticsQueue; -use crate::queue::moderation::AUTOMOD_ID; use crate::queue::session::AuthQueue; use crate::routes::ApiError; use crate::search::SearchConfig; @@ -17,17 +14,14 @@ use modrinth_maxmind::MaxMind; use serde::Deserialize; use sqlx::PgPool; use std::collections::HashMap; -use std::fmt::Write; use std::net::Ipv4Addr; use std::sync::Arc; -use tracing::info; pub fn config(cfg: &mut web::ServiceConfig) { cfg.service( web::scope("admin") .service(count_download) - .service(force_reindex) - .service(delphi_result_ingest), + .service(force_reindex), ); } @@ -163,98 +157,3 @@ pub async fn force_reindex( index_projects(pool.as_ref().clone(), redis.clone(), &config).await?; Ok(HttpResponse::NoContent().finish()) } - -#[derive(Deserialize)] -pub struct DelphiIngest { - pub url: String, - pub project_id: crate::models::ids::ProjectId, - pub version_id: crate::models::ids::VersionId, - pub issues: HashMap>, -} - -#[post("/_delphi", guard = "admin_key_guard")] -pub async fn delphi_result_ingest( - pool: web::Data, - redis: web::Data, - body: web::Json, -) -> Result { - if body.issues.is_empty() { - info!("No issues found for file {}", body.url); - return Ok(HttpResponse::NoContent().finish()); - } - - let webhook_url = dotenvy::var("DELPHI_SLACK_WEBHOOK")?; - - let project = crate::database::models::DBProject::get_id( - body.project_id.into(), - &**pool, - &redis, - ) - .await? - .ok_or_else(|| { - ApiError::InvalidInput(format!( - "Project {} does not exist", - body.project_id - )) - })?; - - let mut header = format!("Suspicious traces found at {}", body.url); - - for (issue, trace) in &body.issues { - for (path, code) in trace { - write!( - &mut header, - "\n issue {issue} found at file {path}: \n ```\n{code}\n```" - ) - .unwrap(); - } - } - - crate::util::webhook::send_slack_project_webhook( - body.project_id, - &pool, - &redis, - webhook_url, - Some(header), - ) - .await - .ok(); - - let mut thread_header = format!( - "Suspicious traces found at [version {}](https://modrinth.com/project/{}/version/{})", - body.version_id, body.project_id, body.version_id - ); - - for (issue, trace) in &body.issues { - for path in trace.keys() { - write!( - &mut thread_header, - "\n\n- issue {issue} found at file {path}" - ) - .unwrap(); - } - - if trace.is_empty() { - write!(&mut thread_header, "\n\n- issue {issue} found").unwrap(); - } - } - - let mut transaction = pool.begin().await?; - ThreadMessageBuilder { - author_id: Some(crate::database::models::DBUserId(AUTOMOD_ID)), - body: MessageBody::Text { - body: thread_header, - private: true, - replying_to: None, - associated_images: vec![], - }, - thread_id: project.thread_id, - hide_identity: false, - } - .insert(&mut transaction) - .await?; - - transaction.commit().await?; - - Ok(HttpResponse::NoContent().finish()) -} diff --git a/apps/labrinth/src/routes/internal/delphi.rs b/apps/labrinth/src/routes/internal/delphi.rs new file mode 100644 index 0000000000..0c690634ac --- /dev/null +++ b/apps/labrinth/src/routes/internal/delphi.rs @@ -0,0 +1,443 @@ +use std::{collections::HashMap, fmt::Write, sync::LazyLock, time::Instant}; + +use actix_web::{HttpRequest, HttpResponse, get, post, put, web}; +use chrono::{DateTime, Utc}; +use eyre::eyre; +use reqwest::header::{HeaderMap, HeaderValue, USER_AGENT}; +use serde::Deserialize; +use sqlx::PgPool; +use tokio::sync::Mutex; +use tracing::info; + +use crate::{ + auth::check_is_moderator_from_headers, + database::{ + models::{ + DBFileId, DelphiReportId, DelphiReportIssueDetailsId, + DelphiReportIssueId, + delphi_report_item::{ + DBDelphiReport, DBDelphiReportIssue, DelphiReportIssueStatus, + DelphiReportListOrder, DelphiSeverity, ReportIssueDetail, + }, + }, + redis::RedisPool, + }, + models::{ + ids::{ProjectId, VersionId}, + pats::Scopes, + }, + queue::session::AuthQueue, + routes::ApiError, + util::{error::Context, guards::admin_key_guard}, +}; + +pub fn config(cfg: &mut web::ServiceConfig) { + cfg.service( + web::scope("delphi") + .service(ingest_report) + .service(_run) + .service(version) + .service(issues) + .service(update_issue) + .service(issue_type_schema), + ); +} + +static DELPHI_CLIENT: LazyLock = LazyLock::new(|| { + reqwest::Client::builder() + .default_headers({ + HeaderMap::from_iter([( + USER_AGENT, + HeaderValue::from_static(concat!( + "Labrinth/", + env!("COMPILATION_DATE") + )), + )]) + }) + .build() + .unwrap() +}); + +#[derive(Deserialize)] +struct DelphiReportIssueDetails { + pub file: String, + pub key: String, + pub data: HashMap, + pub severity: DelphiSeverity, +} + +#[derive(Deserialize)] +struct DelphiReport { + pub url: String, + pub project_id: crate::models::ids::ProjectId, + #[serde(rename = "version_id")] + pub version_id: crate::models::ids::VersionId, + pub file_id: crate::models::ids::FileId, + /// A sequential, monotonically increasing version number for the + /// Delphi version that generated this report. + pub delphi_version: i32, + pub issues: HashMap>, + pub severity: DelphiSeverity, + /// Map of [`DelphiReportIssueDetails::file`] to the decompiled Java source + /// code. + pub decompiled_sources: HashMap>, +} + +impl DelphiReport { + async fn send_to_slack( + &self, + pool: &PgPool, + redis: &RedisPool, + ) -> Result<(), ApiError> { + let webhook_url = dotenvy::var("DELPHI_SLACK_WEBHOOK")?; + + let mut message_header = + format!("⚠️ Suspicious traces found at {}", self.url); + + for (issue, trace) in &self.issues { + for DelphiReportIssueDetails { file, .. } in trace { + let decompiled_source = + self.decompiled_sources.get(file).and_then(|o| o.as_ref()); + + write!( + &mut message_header, + "\n issue {issue} found at class `{file}`:\n```\n{}\n```", + decompiled_source.as_ref().map_or( + "No decompiled source available", + |decompiled_source| &**decompiled_source + ) + ) + .ok(); + } + } + + crate::util::webhook::send_slack_project_webhook( + self.project_id, + pool, + redis, + webhook_url, + Some(message_header), + ) + .await + } +} + +#[derive(Deserialize)] +pub struct DelphiRunParameters { + pub file_id: crate::models::ids::FileId, +} + +#[post("ingest", guard = "admin_key_guard")] +async fn ingest_report( + pool: web::Data, + redis: web::Data, + web::Json(report): web::Json, +) -> Result<(), ApiError> { + // treat this as an internal error, since it's not a bad request from the + // client's side - it's *our* fault for handling the Delphi schema wrong + // this could happen if Delphi updates and Labrinth doesn't + let report = serde_json::from_value::(report.clone()) + .wrap_internal_err_with(|| { + eyre!( + "Delphi sent a response which does not match our schema\n\n{}", + serde_json::to_string_pretty(&report).unwrap() + ) + })?; + + ingest_report_deserialized(pool, redis, report).await +} + +#[tracing::instrument( + level = "info", + skip_all, + fields( + %report.url, + %report.file_id, + %report.project_id, + %report.version_id, + ) +)] +async fn ingest_report_deserialized( + pool: web::Data, + redis: web::Data, + report: DelphiReport, +) -> Result<(), ApiError> { + if report.issues.is_empty() { + info!("No issues found for file"); + return Ok(()); + } + + report.send_to_slack(&pool, &redis).await.ok(); + + let mut transaction = pool.begin().await?; + + let report_id = DBDelphiReport { + id: DelphiReportId(0), // This will be set by the database + file_id: Some(DBFileId(report.file_id.0 as i64)), + delphi_version: report.delphi_version, + artifact_url: report.url.clone(), + created: DateTime::::MIN_UTC, // This will be set by the database + severity: report.severity, + } + .upsert(&mut transaction) + .await?; + + info!( + num_issues = %report.issues.len(), + "Delphi found issues in file", + ); + + for (issue_type, issue_details) in report.issues { + let issue_id = DBDelphiReportIssue { + id: DelphiReportIssueId(0), // This will be set by the database + report_id, + issue_type, + status: DelphiReportIssueStatus::Pending, + } + .upsert(&mut transaction) + .await?; + + // This is required to handle the case where the same Delphi version is re-run on the same file + ReportIssueDetail::remove_all_by_issue_id(issue_id, &mut transaction) + .await?; + + for issue_detail in issue_details { + let decompiled_source = + report.decompiled_sources.get(&issue_detail.file); + + ReportIssueDetail { + id: DelphiReportIssueDetailsId(0), // This will be set by the database + issue_id, + key: issue_detail.key, + file_path: issue_detail.file, + decompiled_source: decompiled_source.cloned().flatten(), + data: issue_detail.data, + severity: issue_detail.severity, + } + .insert(&mut transaction) + .await?; + } + } + + transaction.commit().await?; + + Ok(()) +} + +pub async fn run( + exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>, + run_parameters: DelphiRunParameters, +) -> Result { + let file_data = sqlx::query!( + r#" + SELECT + version_id AS "version_id: crate::database::models::DBVersionId", + versions.mod_id AS "project_id: crate::database::models::DBProjectId", + files.url AS "url" + FROM files INNER JOIN versions ON files.version_id = versions.id + WHERE files.id = $1 + "#, + run_parameters.file_id.0 as i64 + ) + .fetch_one(exec) + .await?; + + tracing::debug!( + "Running Delphi for project {}, version {}, file {}", + file_data.project_id.0, + file_data.version_id.0, + run_parameters.file_id.0 + ); + + DELPHI_CLIENT + .post(dotenvy::var("DELPHI_URL")?) + .json(&serde_json::json!({ + "url": file_data.url, + "project_id": ProjectId(file_data.project_id.0 as u64), + "version_id": VersionId(file_data.version_id.0 as u64), + "file_id": run_parameters.file_id, + })) + .send() + .await + .and_then(|res| res.error_for_status()) + .map_err(ApiError::Delphi)?; + + Ok(HttpResponse::NoContent().finish()) +} + +#[post("run")] +async fn _run( + req: HttpRequest, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, + run_parameters: web::Query, +) -> Result { + check_is_moderator_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Scopes::PROJECT_READ, + ) + .await?; + + run(&**pool, run_parameters.into_inner()).await +} + +#[get("version")] +async fn version( + req: HttpRequest, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + check_is_moderator_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Scopes::PROJECT_READ, + ) + .await?; + + Ok(HttpResponse::Ok().json( + sqlx::query_scalar!("SELECT MAX(delphi_version) FROM delphi_reports") + .fetch_one(&**pool) + .await?, + )) +} + +#[derive(Deserialize)] +struct DelphiIssuesSearchOptions { + #[serde(rename = "type")] + ty: Option, + status: Option, + order_by: Option, + count: Option, + offset: Option, +} + +#[get("issues")] +async fn issues( + req: HttpRequest, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, + web::Query(search_options): web::Query, +) -> Result { + check_is_moderator_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Scopes::PROJECT_READ, + ) + .await?; + + Ok(HttpResponse::Ok().json( + DBDelphiReportIssue::find_all_by( + search_options.ty, + search_options.status, + search_options.order_by, + search_options.count, + search_options + .offset + .map(|offset| offset.try_into()) + .transpose() + .map_err(|err| { + ApiError::InvalidInput(format!("Invalid offset: {err}")) + })?, + &**pool, + ) + .await?, + )) +} + +#[put("issue/{issue_id}")] +async fn update_issue( + req: HttpRequest, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, + issue_id: web::Path, + web::Json(update_data): web::Json, +) -> Result { + check_is_moderator_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Scopes::PROJECT_READ, + ) + .await?; + + let new_id = issue_id.into_inner(); + + let mut transaction = pool.begin().await?; + + let modified_same_issue = (DBDelphiReportIssue { + id: new_id, // Doesn't matter, upsert done for values of other fields + report_id: update_data.report_id, + issue_type: update_data.issue_type, + status: update_data.status, + }) + .upsert(&mut transaction) + .await? + == new_id; + + transaction.commit().await?; + + if modified_same_issue { + Ok(HttpResponse::NoContent().finish()) + } else { + Ok(HttpResponse::Created().finish()) + } +} + +#[get("issue_type/schema")] +async fn issue_type_schema( + req: HttpRequest, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + check_is_moderator_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Scopes::PROJECT_READ, + ) + .await?; + + // This route is expected to be called often by the frontend, and Delphi is not necessarily + // built to scale beyond malware analysis, so cache the result of its quasi-constant-valued + // schema route to alleviate the load on it + + static CACHED_ISSUE_TYPE_SCHEMA: Mutex< + Option<(serde_json::Map, Instant)>, + > = Mutex::const_new(None); + + match &mut *CACHED_ISSUE_TYPE_SCHEMA.lock().await { + Some((schema, last_fetch)) if last_fetch.elapsed().as_secs() < 60 => { + Ok(HttpResponse::Ok().json(schema)) + } + cache_entry => Ok(HttpResponse::Ok().json( + &cache_entry + .insert(( + DELPHI_CLIENT + .get(format!("{}/schema", dotenvy::var("DELPHI_URL")?)) + .send() + .await + .and_then(|res| res.error_for_status()) + .map_err(ApiError::Delphi)? + .json::>() + .await + .map_err(ApiError::Delphi)?, + Instant::now(), + )) + .0, + )), + } +} diff --git a/apps/labrinth/src/routes/internal/mod.rs b/apps/labrinth/src/routes/internal/mod.rs index f15da09f0b..af79ed3408 100644 --- a/apps/labrinth/src/routes/internal/mod.rs +++ b/apps/labrinth/src/routes/internal/mod.rs @@ -1,6 +1,7 @@ pub(crate) mod admin; pub mod affiliate; pub mod billing; +pub mod delphi; pub mod external_notifications; pub mod flows; pub mod gdpr; @@ -32,7 +33,8 @@ pub fn config(cfg: &mut actix_web::web::ServiceConfig) { .configure(medal::config) .configure(external_notifications::config) .configure(affiliate::config) - .configure(mural::config), + .configure(mural::config) + .configure(delphi::config), ); } diff --git a/apps/labrinth/src/routes/internal/moderation.rs b/apps/labrinth/src/routes/internal/moderation/mod.rs similarity index 80% rename from apps/labrinth/src/routes/internal/moderation.rs rename to apps/labrinth/src/routes/internal/moderation/mod.rs index 973183820b..29b7331253 100644 --- a/apps/labrinth/src/routes/internal/moderation.rs +++ b/apps/labrinth/src/routes/internal/moderation/mod.rs @@ -1,8 +1,7 @@ use super::ApiError; use crate::database; -use crate::database::models::{DBOrganization, DBTeamId, DBTeamMember, DBUser}; use crate::database::redis::RedisPool; -use crate::models::ids::{OrganizationId, TeamId}; +use crate::models::ids::OrganizationId; use crate::models::projects::{Project, ProjectStatus}; use crate::queue::moderation::{ApprovalType, IdentifiedFile, MissingMetadata}; use crate::queue::session::AuthQueue; @@ -10,15 +9,22 @@ use crate::util::error::Context; use crate::{auth::check_is_moderator_from_headers, models::pats::Scopes}; use actix_web::{HttpRequest, get, post, web}; use ariadne::ids::{UserId, random_base62}; -use eyre::eyre; +use ownership::get_projects_ownership; use serde::{Deserialize, Serialize}; use sqlx::PgPool; use std::collections::HashMap; +mod ownership; +mod tech_review; + pub fn config(cfg: &mut utoipa_actix_web::service_config::ServiceConfig) { cfg.service(get_projects) .service(get_project_meta) - .service(set_project_meta); + .service(set_project_meta) + .service( + utoipa_actix_web::scope("/tech-review") + .configure(tech_review::config), + ); } #[derive(Deserialize, utoipa::ToSchema)] @@ -47,7 +53,7 @@ pub struct FetchedProject { } /// Fetched information on who owns a project. -#[derive(Debug, Serialize, Deserialize, utoipa::ToSchema)] +#[derive(Debug, Serialize, Deserialize, utoipa::ToSchema, Clone)] #[serde(tag = "kind", rename_all = "snake_case")] pub enum Ownership { /// Project is owned by a team, and this is the team owner. @@ -129,73 +135,20 @@ pub async fn get_projects_internal( .map(crate::models::projects::Project::from) .collect::>(); - let team_ids = projects - .iter() - .map(|project| project.team_id) - .map(DBTeamId::from) - .collect::>(); - let org_ids = projects - .iter() - .filter_map(|project| project.organization) - .collect::>(); - - let team_members = - DBTeamMember::get_from_team_full_many(&team_ids, &**pool, &redis) - .await - .wrap_internal_err("failed to fetch team members")?; - let users = DBUser::get_many_ids( - &team_members - .iter() - .map(|member| member.user_id) - .collect::>(), - &**pool, - &redis, - ) - .await - .wrap_internal_err("failed to fetch user data of team members")?; - let orgs = DBOrganization::get_many(&org_ids, &**pool, &redis) + let ownerships = get_projects_ownership(&projects, &pool, &redis) .await - .wrap_internal_err("failed to fetch organizations")?; - - let map_project = |project: Project| -> Result { - let project_id = project.id; - let ownership = if let Some(org_id) = project.organization { - let org = orgs - .iter() - .find(|org| OrganizationId::from(org.id) == org_id) - .wrap_internal_err_with(|| { - eyre!( - "project {project_id} is owned by an invalid organization {org_id}" - ) - })?; - - Ownership::Organization { - id: OrganizationId::from(org.id), - name: org.name.clone(), - icon_url: org.icon_url.clone(), - } - } else { - let team_id = project.team_id; - let team_owner = team_members.iter().find(|member| TeamId::from(member.team_id) == team_id && member.is_owner) - .wrap_internal_err_with(|| eyre!("project {project_id} is owned by a team {team_id} which has no valid owner"))?; - let team_owner_id = team_owner.user_id; - let user = users.iter().find(|user| user.id == team_owner_id) - .wrap_internal_err_with(|| eyre!("project {project_id} is owned by a team {team_id} which has owner {} which does not exist", UserId::from(team_owner_id)))?; - - Ownership::User { - id: UserId::from(user.id), - name: user.username.clone(), - icon_url: user.avatar_url.clone(), - } - }; + .wrap_internal_err("failed to fetch project ownerships")?; - Ok(FetchedProject { ownership, project }) - }; + let map_project = + |(project, ownership): (Project, Ownership)| -> FetchedProject { + FetchedProject { ownership, project } + }; let projects = projects .into_iter() + .zip(ownerships) .map(map_project) - .collect::, _>>()?; + .collect::>(); Ok(web::Json(projects)) } diff --git a/apps/labrinth/src/routes/internal/moderation/ownership.rs b/apps/labrinth/src/routes/internal/moderation/ownership.rs new file mode 100644 index 0000000000..5979811cee --- /dev/null +++ b/apps/labrinth/src/routes/internal/moderation/ownership.rs @@ -0,0 +1,84 @@ +use crate::database::models::{DBOrganization, DBTeamId, DBTeamMember, DBUser}; +use crate::database::redis::RedisPool; +use crate::models::ids::OrganizationId; +use crate::routes::internal::moderation::Ownership; +use crate::util::error::Context; +use ariadne::ids::UserId; +use eyre::eyre; +use sqlx::PgPool; + +/// Fetches ownership information for multiple projects efficiently +pub async fn get_projects_ownership( + projects: &[crate::models::projects::Project], + pool: &PgPool, + redis: &RedisPool, +) -> Result, crate::routes::ApiError> { + let team_ids = projects + .iter() + .map(|project| project.team_id) + .map(DBTeamId::from) + .collect::>(); + let org_ids = projects + .iter() + .filter_map(|project| project.organization) + .collect::>(); + + let team_members = + DBTeamMember::get_from_team_full_many(&team_ids, pool, redis) + .await + .wrap_internal_err("failed to fetch team members")?; + let users = DBUser::get_many_ids( + &team_members + .iter() + .map(|member| member.user_id) + .collect::>(), + pool, + redis, + ) + .await + .wrap_internal_err("failed to fetch user data of team members")?; + let orgs = DBOrganization::get_many(&org_ids, pool, redis) + .await + .wrap_internal_err("failed to fetch organizations")?; + + let mut ownerships = Vec::with_capacity(projects.len()); + + for project in projects { + let project_id = project.id; + let ownership = if let Some(org_id) = project.organization { + let org = orgs + .iter() + .find(|org| OrganizationId::from(org.id) == org_id) + .wrap_internal_err_with(|| { + eyre!( + "project {project_id} is owned by an invalid organization {org_id}" + ) + })?; + + Ownership::Organization { + id: OrganizationId::from(org.id), + name: org.name.clone(), + icon_url: org.icon_url.clone(), + } + } else { + let team_id = project.team_id; + let team_owner = team_members.iter().find(|member| { + crate::models::ids::TeamId::from(member.team_id) == team_id && member.is_owner + }) + .wrap_internal_err_with(|| eyre!("project {project_id} is owned by a team {team_id} which has no valid owner"))?; + let team_owner_id = team_owner.user_id; + let user = users.iter().find(|user| user.id == team_owner_id) + .wrap_internal_err_with(|| eyre!("project {project_id} is owned by a team {team_id} which has owner {} which does not exist", UserId::from(team_owner_id)))?; + + Ownership::User { + id: ariadne::ids::UserId::from(user.id), + name: user.username.clone(), + icon_url: user.avatar_url.clone(), + } + }; + + ownerships.push(ownership); + } + + Ok(ownerships) +} diff --git a/apps/labrinth/src/routes/internal/moderation/tech_review.rs b/apps/labrinth/src/routes/internal/moderation/tech_review.rs new file mode 100644 index 0000000000..eb2d82a854 --- /dev/null +++ b/apps/labrinth/src/routes/internal/moderation/tech_review.rs @@ -0,0 +1,537 @@ +use std::{collections::HashMap, fmt}; + +use actix_web::{HttpRequest, get, post, web}; +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use sqlx::PgPool; +use tokio_stream::StreamExt; + +use super::ownership::get_projects_ownership; +use crate::{ + auth::check_is_moderator_from_headers, + database::{ + DBProject, + models::{ + DBProjectId, DBThread, DBThreadId, DBUser, DelphiReportId, + DelphiReportIssueId, ProjectTypeId, + delphi_report_item::{ + DelphiReportIssueStatus, DelphiSeverity, ReportIssueDetail, + }, + }, + redis::RedisPool, + }, + models::{ + ids::{FileId, ProjectId, ThreadId, VersionId}, + pats::Scopes, + projects::Project, + threads::Thread, + }, + queue::session::AuthQueue, + routes::{ApiError, internal::moderation::Ownership}, + util::error::Context, +}; + +pub fn config(cfg: &mut utoipa_actix_web::service_config::ServiceConfig) { + cfg.service(search_projects) + .service(get_report) + .service(get_issue) + .service(update_issue); +} + +/// Arguments for searching project technical reviews. +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct SearchProjects { + #[serde(default = "default_limit")] + #[schema(default = 20)] + pub limit: u64, + #[serde(default)] + #[schema(default = 0)] + pub page: u64, + #[serde(default)] + pub filter: SearchProjectsFilter, + #[serde(default = "default_sort_by")] + pub sort_by: SearchProjectsSort, +} + +fn default_limit() -> u64 { + 20 +} + +fn default_sort_by() -> SearchProjectsSort { + SearchProjectsSort::CreatedAsc +} + +#[derive(Debug, Clone, Default, Serialize, Deserialize, utoipa::ToSchema)] +pub struct SearchProjectsFilter { + pub project_type: Vec, +} + +#[derive( + Debug, + Clone, + Copy, + PartialEq, + Eq, + Hash, + Serialize, + Deserialize, + utoipa::ToSchema, +)] +#[serde(rename_all = "snake_case")] +pub enum SearchProjectsSort { + CreatedAsc, + CreatedDesc, + SeverityAsc, + SeverityDesc, +} + +impl fmt::Display for SearchProjectsSort { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let s = serde_json::to_value(*self).unwrap(); + let s = s.as_str().unwrap(); + write!(f, "{s}") + } +} + +#[derive(Debug, Serialize, Deserialize, utoipa::ToSchema)] +pub struct FileReport { + /// ID of this report. + pub id: DelphiReportId, + /// ID of the file that was scanned. + pub file_id: FileId, + /// ID of the project version this report is for. + pub version_id: VersionId, + /// ID of the project this report is for. + pub project_id: ProjectId, + /// When the report for this file was created. + pub created: DateTime, + /// Why this project was flagged. + pub flag_reason: FlagReason, + /// According to this report, how likely is the project malicious? + pub severity: DelphiSeverity, + /// Name of the flagged file. + pub file_name: String, + /// Size of the flagged file, in bytes. + pub file_size: i32, + /// URL to download the flagged file. + pub download_url: String, + /// What issues appeared in the file. + pub issues: Vec, +} + +/// Issue raised by Delphi in a flagged file. +/// +/// The issue is scoped to the JAR, not any specific class, but issues can be +/// raised because they appeared in a class - see [`FileIssueDetails`]. +#[derive(Debug, Serialize, Deserialize, utoipa::ToSchema)] +pub struct FileIssue { + /// ID of the issue. + pub id: DelphiReportIssueId, + /// ID of the report this issue is a part of. + pub report_id: DelphiReportId, + /// Delphi-determined kind of issue that this is, e.g. `OBFUSCATED_NAMES`. + /// + /// Labrinth does not know the full set of kinds of issues, so this is kept + /// as a string. + pub issue_type: String, + /// Is this issue valid (malicious) or a false positive (safe)? + pub status: DelphiReportIssueStatus, + /// Details of why this issue might have been raised, such as what file it + /// was found in. + pub details: Vec, +} + +/// Why a project was flagged for technical review. +#[derive( + Debug, + Clone, + Copy, + PartialEq, + Eq, + Hash, + Serialize, + Deserialize, + utoipa::ToSchema, +)] +#[serde(rename_all = "snake_case")] +pub enum FlagReason { + /// Delphi anti-malware scanner flagged a file in the project. + Delphi, +} + +/// Get info on an issue in a Delphi report. +#[utoipa::path( + security(("bearer_auth" = [])), + responses((status = OK, body = inline(FileIssue))) +)] +#[get("/issue/{issue_id}")] +async fn get_issue( + req: HttpRequest, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, + path: web::Path<(DelphiReportIssueId,)>, +) -> Result, ApiError> { + check_is_moderator_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Scopes::PROJECT_READ, + ) + .await?; + + let (issue_id,) = path.into_inner(); + let row = sqlx::query!( + r#" + SELECT + to_jsonb(dri) + || jsonb_build_object( + 'details', json_array( + SELECT to_jsonb(drid) + FROM delphi_report_issue_details drid + WHERE drid.issue_id = dri.id + ) + ) AS "data!: sqlx::types::Json" + FROM delphi_report_issues dri + WHERE dri.id = $1 + "#, + issue_id as DelphiReportIssueId, + ) + .fetch_optional(&**pool) + .await + .wrap_internal_err("failed to fetch issue from database")? + .ok_or(ApiError::NotFound)?; + + Ok(web::Json(row.data.0)) +} + +/// Get info on a specific report for a project. +#[utoipa::path( + security(("bearer_auth" = [])), + responses((status = OK, body = inline(FileReport))) +)] +#[get("/report/{id}")] +async fn get_report( + req: HttpRequest, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, + path: web::Path<(DelphiReportId,)>, +) -> Result, ApiError> { + check_is_moderator_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Scopes::PROJECT_READ, + ) + .await?; + + let (report_id,) = path.into_inner(); + + let row = sqlx::query!( + r#" + SELECT DISTINCT ON (dr.id) + to_jsonb(dr) + || jsonb_build_object( + 'file_id', to_base62(f.id), + 'version_id', to_base62(v.id), + 'project_id', to_base62(v.mod_id), + 'file_name', f.filename, + 'file_size', f.size, + 'flag_reason', 'delphi', + 'download_url', f.url, + 'issues', json_array( + SELECT + to_jsonb(dri) + || jsonb_build_object( + 'details', json_array( + SELECT to_jsonb(drid) + FROM delphi_report_issue_details drid + WHERE drid.issue_id = dri.id + ) + ) + FROM delphi_report_issues dri + WHERE dri.report_id = dr.id + ) + ) AS "data!: sqlx::types::Json" + FROM delphi_reports dr + INNER JOIN files f ON f.id = dr.file_id + INNER JOIN versions v ON v.id = f.version_id + WHERE dr.id = $1 + "#, + report_id as DelphiReportId, + ) + .fetch_optional(&**pool) + .await + .wrap_internal_err("failed to fetch report from database")? + .ok_or(ApiError::NotFound)?; + + Ok(web::Json(row.data.0)) +} + +/// See [`search_projects`]. +#[derive(Debug, Serialize, Deserialize, utoipa::ToSchema)] +pub struct SearchResponse { + /// List of reports returned. + pub reports: Vec, + /// Fetched project information for projects in the returned reports. + pub projects: HashMap, + /// Fetched moderation threads for projects in the returned reports. + pub threads: HashMap, + /// Fetched owner information for projects. + pub ownership: HashMap, +} + +/// Limited set of project information returned by [`search_projects`]. +#[derive(Debug, Serialize, Deserialize, utoipa::ToSchema)] +pub struct ProjectModerationInfo { + /// Projecet ID. + pub id: ProjectId, + /// Project moderation thread ID. + pub thread_id: ThreadId, + /// Project name. + pub name: String, + /// The aggregated project typos of the versions of this project + pub project_types: Vec, + /// The URL of the icon of the project + pub icon_url: Option, +} + +/// Searches all projects which are awaiting technical review. +#[utoipa::path( + security(("bearer_auth" = [])), + responses((status = OK, body = inline(Vec))) +)] +#[post("/search")] +async fn search_projects( + req: HttpRequest, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, + search_req: web::Json, +) -> Result, ApiError> { + let user = check_is_moderator_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Scopes::PROJECT_READ, + ) + .await?; + + let sort_by = search_req.sort_by.to_string(); + let limit = search_req.limit.max(50); + let offset = limit.saturating_mul(search_req.page); + + let limit = + i64::try_from(limit).wrap_request_err("limit cannot fit into `i64`")?; + let offset = i64::try_from(offset) + .wrap_request_err("offset cannot fit into `i64`")?; + + let mut reports = Vec::::new(); + let mut project_ids = Vec::::new(); + let mut thread_ids = Vec::::new(); + let mut rows = sqlx::query!( + r#" + SELECT + project_id AS "project_id: DBProjectId", + project_thread_id AS "project_thread_id: DBThreadId", + report AS "report!: sqlx::types::Json" + FROM ( + SELECT DISTINCT ON (dr.id) + dr.id AS report_id, + dr.created AS report_created, + dr.severity AS report_severity, + m.id AS project_id, + t.id AS project_thread_id, + + to_jsonb(dr) + || jsonb_build_object( + 'file_id', to_base62(f.id), + 'version_id', to_base62(v.id), + 'project_id', to_base62(v.mod_id), + 'file_name', f.filename, + 'file_size', f.size, + 'flag_reason', 'delphi', + 'download_url', f.url, + 'issues', json_array( + SELECT + to_jsonb(dri) + || jsonb_build_object( + 'details', json_array( + SELECT jsonb_build_object( + 'id', drid.id, + 'issue_id', drid.issue_id, + 'key', drid.key, + 'file_path', drid.file_path, + -- ignore `decompiled_source` + 'data', drid.data, + 'severity', drid.severity + ) + FROM delphi_report_issue_details drid + WHERE drid.issue_id = dri.id + ) + ) + FROM delphi_report_issues dri + WHERE dri.report_id = dr.id + ) + ) AS report + FROM delphi_reports dr + INNER JOIN files f ON f.id = dr.file_id + INNER JOIN versions v ON v.id = f.version_id + INNER JOIN mods m ON m.id = v.mod_id + INNER JOIN threads t ON t.mod_id = m.id + + -- filtering + LEFT JOIN mods_categories mc ON mc.joining_mod_id = m.id + LEFT JOIN categories c ON c.id = mc.joining_category_id + WHERE + -- project type + (cardinality($1::int[]) = 0 OR c.project_type = ANY($1::int[])) + ) t + + -- sorting + ORDER BY + CASE WHEN $2 = 'created_asc' THEN t.report_created ELSE TO_TIMESTAMP(0) END ASC, + CASE WHEN $2 = 'created_desc' THEN t.report_created ELSE TO_TIMESTAMP(0) END DESC, + CASE WHEN $2 = 'severity_asc' THEN t.report_severity ELSE 'low'::delphi_severity END ASC, + CASE WHEN $2 = 'severity_desc' THEN t.report_severity ELSE 'low'::delphi_severity END DESC + + -- pagination + LIMIT $3 + OFFSET $4 + "#, + &search_req + .filter + .project_type + .iter() + .map(|ty| ty.0) + .collect::>(), + &sort_by, + limit, + offset, + ) + .fetch(&**pool); + + while let Some(row) = rows + .next() + .await + .transpose() + .wrap_internal_err("failed to fetch reports")? + { + reports.push(row.report.0); + project_ids.push(row.project_id); + thread_ids.push(row.project_thread_id); + } + + let projects = DBProject::get_many_ids(&project_ids, &**pool, &redis) + .await + .wrap_internal_err("failed to fetch projects")? + .into_iter() + .map(|project| { + (ProjectId::from(project.inner.id), Project::from(project)) + }) + .collect::>(); + let db_threads = DBThread::get_many(&thread_ids, &**pool) + .await + .wrap_internal_err("failed to fetch threads")?; + let thread_author_ids = db_threads + .iter() + .flat_map(|thread| thread.members.clone()) + .collect::>(); + let thread_authors = + DBUser::get_many_ids(&thread_author_ids, &**pool, &redis) + .await + .wrap_internal_err("failed to fetch thread authors")? + .into_iter() + .map(From::from) + .collect::>(); + let threads = db_threads + .into_iter() + .map(|thread| { + let thread = Thread::from(thread, thread_authors.clone(), &user); + (thread.id, thread) + }) + .collect::>(); + + let project_list: Vec = projects.values().cloned().collect(); + + let ownerships = get_projects_ownership(&project_list, &pool, &redis) + .await + .wrap_internal_err("failed to fetch project ownerships")?; + let ownership = projects + .keys() + .copied() + .zip(ownerships) + .collect::>(); + + Ok(web::Json(SearchResponse { + reports, + projects: projects + .into_iter() + .map(|(id, project)| { + ( + id, + ProjectModerationInfo { + id, + thread_id: project.thread_id, + name: project.name, + project_types: project.project_types, + icon_url: project.icon_url, + }, + ) + }) + .collect(), + threads, + ownership, + })) +} + +/// See [`update_issue`]. +#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)] +pub struct UpdateIssue { + /// Status to set the issue to. + pub status: DelphiReportIssueStatus, +} + +/// Updates the state of a technical review issue. +#[utoipa::path( + security(("bearer_auth" = [])), + responses((status = NO_CONTENT)) +)] +#[post("/issue/{id}")] +async fn update_issue( + req: HttpRequest, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, + update_req: web::Json, + path: web::Path<(DelphiReportIssueId,)>, +) -> Result<(), ApiError> { + check_is_moderator_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Scopes::PROJECT_WRITE, + ) + .await?; + let (issue_id,) = path.into_inner(); + + sqlx::query!( + " + UPDATE delphi_report_issues + SET status = $1 + WHERE id = $2 + ", + update_req.status as DelphiReportIssueStatus, + issue_id as DelphiReportIssueId, + ) + .execute(&**pool) + .await + .wrap_internal_err("failed to update issue")?; + + Ok(()) +} diff --git a/apps/labrinth/src/routes/mod.rs b/apps/labrinth/src/routes/mod.rs index ca55240b8e..2ba78fa632 100644 --- a/apps/labrinth/src/routes/mod.rs +++ b/apps/labrinth/src/routes/mod.rs @@ -161,6 +161,8 @@ pub enum ApiError { RateLimitError(u128, u32), #[error("Error while interacting with payment processor: {0}")] Stripe(#[from] stripe::StripeError), + #[error("Error while interacting with Delphi: {0}")] + Delphi(reqwest::Error), } impl ApiError { @@ -201,6 +203,7 @@ impl ApiError { Self::Stripe(..) => "stripe_error", Self::TaxProcessor(..) => "tax_processor_error", Self::Slack(..) => "slack_error", + Self::Delphi(..) => "delphi_error", }, description: match self { Self::Internal(e) => format!("{e:#?}"), @@ -249,6 +252,7 @@ impl actix_web::ResponseError for ApiError { Self::Stripe(..) => StatusCode::FAILED_DEPENDENCY, Self::TaxProcessor(..) => StatusCode::INTERNAL_SERVER_ERROR, Self::Slack(..) => StatusCode::INTERNAL_SERVER_ERROR, + Self::Delphi(..) => StatusCode::INTERNAL_SERVER_ERROR, } } diff --git a/apps/labrinth/src/routes/v3/project_creation.rs b/apps/labrinth/src/routes/v3/project_creation.rs index e03d2dd58e..dad60fec85 100644 --- a/apps/labrinth/src/routes/v3/project_creation.rs +++ b/apps/labrinth/src/routes/v3/project_creation.rs @@ -339,9 +339,6 @@ async fn project_create_inner( redis: &RedisPool, session_queue: &AuthQueue, ) -> Result { - // The base URL for files uploaded to S3 - let cdn_url = dotenvy::var("CDN_URL")?; - // The currently logged in user let (_, current_user) = get_user_from_headers( &req, @@ -577,7 +574,6 @@ async fn project_create_inner( uploaded_files, &mut created_version.files, &mut created_version.dependencies, - &cdn_url, &content_disposition, project_id, created_version.version_id.into(), diff --git a/apps/labrinth/src/routes/v3/version_creation.rs b/apps/labrinth/src/routes/v3/version_creation.rs index 396395e69c..03ebb81283 100644 --- a/apps/labrinth/src/routes/v3/version_creation.rs +++ b/apps/labrinth/src/routes/v3/version_creation.rs @@ -38,7 +38,6 @@ use sha1::Digest; use sqlx::postgres::PgPool; use std::collections::{HashMap, HashSet}; use std::sync::Arc; -use tracing::error; use validator::Validate; fn default_requested_status() -> VersionStatus { @@ -158,8 +157,6 @@ async fn version_create_inner( session_queue: &AuthQueue, moderation_queue: &AutomatedModerationQueue, ) -> Result { - let cdn_url = dotenvy::var("CDN_URL")?; - let mut initial_version_data = None; let mut version_builder = None; let mut selected_loaders = None; @@ -355,7 +352,6 @@ async fn version_create_inner( uploaded_files, &mut version.files, &mut version.dependencies, - &cdn_url, &content_disposition, version.project_id.into(), version.version_id.into(), @@ -451,6 +447,7 @@ async fn version_create_inner( .files .iter() .map(|file| VersionFile { + id: None, hashes: file .hashes .iter() @@ -590,8 +587,6 @@ async fn upload_file_to_version_inner( version_id: models::DBVersionId, session_queue: &AuthQueue, ) -> Result { - let cdn_url = dotenvy::var("CDN_URL")?; - let mut initial_file_data: Option = None; let mut file_builders: Vec = Vec::new(); @@ -741,7 +736,6 @@ async fn upload_file_to_version_inner( uploaded_files, &mut file_builders, &mut dependencies, - &cdn_url, &content_disposition, project_id, version_id.into(), @@ -795,7 +789,6 @@ pub async fn upload_file( uploaded_files: &mut Vec, version_files: &mut Vec, dependencies: &mut Vec, - cdn_url: &str, content_disposition: &actix_web::http::header::ContentDisposition, project_id: ProjectId, version_id: VersionId, @@ -942,21 +935,17 @@ pub async fn upload_file( || force_primary || total_files_len == 1; - let file_path_encode = format!( - "data/{}/versions/{}/{}", - project_id, - version_id, + let file_path = format!( + "data/{project_id}/versions/{version_id}/{}", urlencoding::encode(file_name) ); - let file_path = - format!("data/{}/versions/{}/{}", project_id, version_id, &file_name); let upload_data = file_host .upload_file(content_type, &file_path, FileHostPublicity::Public, data) .await?; uploaded_files.push(UploadedFile { - name: file_path, + name: file_path.clone(), publicity: FileHostPublicity::Public, }); @@ -980,33 +969,9 @@ pub async fn upload_file( return Err(CreateError::InvalidInput(msg.to_string())); } - let url = format!("{cdn_url}/{file_path_encode}"); - - let client = reqwest::Client::new(); - let delphi_url = dotenvy::var("DELPHI_URL")?; - match client - .post(delphi_url) - .json(&serde_json::json!({ - "url": url, - "project_id": project_id, - "version_id": version_id, - })) - .send() - .await - { - Ok(res) => { - if !res.status().is_success() { - error!("Failed to upload file to Delphi: {url}"); - } - } - Err(e) => { - error!("Failed to upload file to Delphi: {url}: {e}"); - } - } - version_files.push(VersionFileBuilder { filename: file_name.to_string(), - url: format!("{cdn_url}/{file_path_encode}"), + url: format!("{}/{file_path}", dotenvy::var("CDN_URL")?), hashes: vec![ models::version_item::HashBuilder { algorithm: "sha1".to_string(), diff --git a/apps/labrinth/src/util/error.rs b/apps/labrinth/src/util/error.rs index 5f9ff343c2..cba9c21971 100644 --- a/apps/labrinth/src/util/error.rs +++ b/apps/labrinth/src/util/error.rs @@ -19,6 +19,7 @@ pub trait Context: Sized { /// Maps the error variant into an [`eyre::Report`] with the given message. #[inline] + #[track_caller] fn wrap_err(self, msg: D) -> Result where D: Send + Sync + Debug + Display + 'static, @@ -28,6 +29,7 @@ pub trait Context: Sized { /// Maps the error variant into an [`ApiError::Internal`] using the closure to create the message. #[inline] + #[track_caller] fn wrap_internal_err_with( self, f: impl FnOnce() -> D, @@ -40,6 +42,7 @@ pub trait Context: Sized { /// Maps the error variant into an [`ApiError::Internal`] with the given message. #[inline] + #[track_caller] fn wrap_internal_err(self, msg: D) -> Result where D: Send + Sync + Debug + Display + 'static, @@ -49,6 +52,7 @@ pub trait Context: Sized { /// Maps the error variant into an [`ApiError::Request`] using the closure to create the message. #[inline] + #[track_caller] fn wrap_request_err_with( self, f: impl FnOnce() -> D, @@ -61,6 +65,7 @@ pub trait Context: Sized { /// Maps the error variant into an [`ApiError::Request`] with the given message. #[inline] + #[track_caller] fn wrap_request_err(self, msg: D) -> Result where D: Send + Sync + Debug + Display + 'static, @@ -70,6 +75,7 @@ pub trait Context: Sized { /// Maps the error variant into an [`ApiError::Auth`] using the closure to create the message. #[inline] + #[track_caller] fn wrap_auth_err_with(self, f: impl FnOnce() -> D) -> Result where D: Send + Sync + Debug + Display + 'static, @@ -79,6 +85,7 @@ pub trait Context: Sized { /// Maps the error variant into an [`ApiError::Auth`] with the given message. #[inline] + #[track_caller] fn wrap_auth_err(self, msg: D) -> Result where D: Send + Sync + Debug + Display + 'static, diff --git a/docker-compose.yml b/docker-compose.yml index 9ded6c6311..777eeef4d1 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -12,7 +12,7 @@ services: POSTGRES_PASSWORD: labrinth POSTGRES_HOST_AUTH_METHOD: trust healthcheck: - test: ['CMD', 'pg_isready'] + test: ['CMD', 'pg_isready', '-U', 'labrinth'] interval: 3s timeout: 5s retries: 3 @@ -114,6 +114,30 @@ services: watch: - path: ./apps/labrinth action: rebuild + delphi: + profiles: + - with-delphi + image: ghcr.io/modrinth/delphi:feature-schema-rework + container_name: labrinth-delphi + ports: + - '59999:59999' + environment: + LABRINTH_ENDPOINT: http://host.docker.internal:8000/_internal/delphi/ingest + LABRINTH_ADMIN_KEY: feedbeef + healthcheck: + test: + ['CMD', 'wget', '-q', '-O/dev/null', 'http://localhost:59999/health'] + interval: 3s + timeout: 5s + retries: 3 + volumes: + # Labrinth deposits version files here; + # Delphi reads them from here + - /tmp/modrinth:/tmp/modrinth:ro + extra_hosts: + # Delphi must send a message on a webhook to our backend, + # so it must have access to our local network + - 'host.docker.internal:host-gateway' volumes: meilisearch-data: db-data: