mirror of
https://github.com/PCSX2/web-api.git
synced 2026-01-31 01:15:16 +01:00
API Rewrite and Dockerization (#78)
* workers: init * workers: d1 initial work * workers: resuming work, simplified schema * api: flesh out the majority of critical features * api: get rid of the old implementation * db: seed database with current releases * db: break seed files up, too much for a single stdout buffer * api: support version diff'ing * d1: debugging insert issue * api: fix insert issue (missing `await`s) and explicitly cache to avoid invocations * api: append CORS headers for requests originating from `pcsx2.net` * api: update seed data and fix response data * api: optimize DB indexes and add caching * api: update page rule cache when a release is added/deleted/modified * api: most functionality ported over to rocket.rs * api: finish off core implementation * api: dockerize * api: cleaning up TODOs * v1: remove some of the old implementation * v2: small script to pull release data, update DB seed * v2: minor cleanup * v2: finalize v1 -> v2 transition * v2: synchronize db on startup * sqlx: commit sql query metadata * v2: handful of bug fixes and v1 parity adjustments * v2: some repo house cleaning * ci: add CI workflows * ci: finalize ci implementation
This commit is contained in:
8
.dockerignore
Normal file
8
.dockerignore
Normal file
@@ -0,0 +1,8 @@
|
||||
# Added by cargo
|
||||
|
||||
/target
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
.env
|
||||
.sqlx/
|
||||
*.log
|
||||
9
.env.template
Normal file
9
.env.template
Normal file
@@ -0,0 +1,9 @@
|
||||
# Fill in this file and rename to `.env`
|
||||
GITHUB_API_TOKEN=TODO
|
||||
GITHUB_WEBHOOK_SECRET=TODO
|
||||
ADMIN_API_KEY=TODO
|
||||
# The following parameters will likely be fine
|
||||
DATABASE_URL=sqlite://db.sqlite3
|
||||
ERROR_LOG_PATH=./error.log
|
||||
APP_LOG_PATH=./app.log
|
||||
VERBOSE_LOGGING=true
|
||||
@@ -1,3 +0,0 @@
|
||||
dist/
|
||||
node_modules/
|
||||
tsconfig.json
|
||||
@@ -1,14 +0,0 @@
|
||||
{
|
||||
"env": {
|
||||
"browser": true,
|
||||
"es2021": true
|
||||
},
|
||||
"extends": ["eslint:recommended", "plugin:@typescript-eslint/recommended"],
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 13,
|
||||
"sourceType": "module"
|
||||
},
|
||||
"plugins": ["@typescript-eslint"],
|
||||
"rules": {}
|
||||
}
|
||||
4
.gitattributes
vendored
Normal file
4
.gitattributes
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
# Ensure line endings are consistently 'LF'
|
||||
* text=auto
|
||||
|
||||
.sqlx/**/* linguist-generated
|
||||
2
.github/dependabot.yml
vendored
2
.github/dependabot.yml
vendored
@@ -4,7 +4,7 @@ updates:
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
- package-ecosystem: "npm"
|
||||
- package-ecosystem: "cargo"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
|
||||
24
.github/workflows/build-backend.yml
vendored
24
.github/workflows/build-backend.yml
vendored
@@ -1,24 +0,0 @@
|
||||
name: Build
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
name: Build Backend
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Get Dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Build App
|
||||
run: npm run build
|
||||
56
.github/workflows/build.yml
vendored
Normal file
56
.github/workflows/build.yml
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
name: 🔨 Build
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
|
||||
jobs:
|
||||
build-server:
|
||||
name: Server
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rust Stable
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
name: Cache Rust Build
|
||||
with:
|
||||
shared-key: web-api-build-${{ matrix.platform }}
|
||||
|
||||
- name: Build Tauri App
|
||||
run: |
|
||||
cargo install --path .
|
||||
|
||||
build-docker:
|
||||
name: Docker Image
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
|
||||
- name: Build Docker image
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
push: false
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
27
.github/workflows/lint-backend.yml
vendored
27
.github/workflows/lint-backend.yml
vendored
@@ -1,27 +0,0 @@
|
||||
name: Linter
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
name: Linting & Formatting
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Get Dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Check Formatting
|
||||
run: npx prettier --check ./
|
||||
|
||||
- name: Check Linting
|
||||
run: npx eslint ./
|
||||
48
.github/workflows/lint.yml
vendored
Normal file
48
.github/workflows/lint.yml
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
name: 📝 Linter
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
formatting:
|
||||
name: Formatting
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rust Stable
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
components: clippy
|
||||
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
name: Cache Rust Build
|
||||
with:
|
||||
shared-key: web-api-build-ubuntu-latest
|
||||
|
||||
- name: Check Rust formatting
|
||||
run: cargo fmt --all --check
|
||||
|
||||
linter:
|
||||
name: Linter
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
name: Cache Rust Build
|
||||
with:
|
||||
shared-key: web-api-build-${{ matrix.platform }}
|
||||
|
||||
- uses: actions-rs/clippy-check@v1
|
||||
name: Rust Linting - Clippy
|
||||
continue-on-error: true
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
args: --all-features --manifest-path Cargo.toml
|
||||
95
.github/workflows/release.yml
vendored
Normal file
95
.github/workflows/release.yml
vendored
Normal file
@@ -0,0 +1,95 @@
|
||||
name: 🏭 Draft Release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
bump:
|
||||
description: 'Semver Bump Type'
|
||||
required: true
|
||||
default: 'patch'
|
||||
type: choice
|
||||
options:
|
||||
- patch
|
||||
- minor
|
||||
- major
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
cut_release:
|
||||
name: Cut Release
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
new_tag: ${{ steps.set_tag.outputs.new_tag }}
|
||||
steps:
|
||||
# Docs - https://github.com/mathieudutour/github-tag-action
|
||||
- name: Bump Version and Push Tag
|
||||
if: github.repository == 'PCSX2/web-api'
|
||||
id: tag_version
|
||||
uses: mathieudutour/github-tag-action@v6.2
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
tag_prefix: v
|
||||
default_bump: ${{ github.event.inputs.bump }}
|
||||
|
||||
- name: Create Release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: gh release create ${{ steps.tag_version.outputs.new_tag }} --generate-notes --draft --repo ${{ github.repository }}
|
||||
|
||||
- name: Output new tag
|
||||
id: set_tag
|
||||
run: |
|
||||
echo "new_tag=${{ steps.tag_version.outputs.new_tag }}" >> $GITHUB_OUTPUT
|
||||
|
||||
build_image:
|
||||
if: github.repository == 'PCSX2/web-api'
|
||||
needs:
|
||||
- cut_release
|
||||
name: "Build and Publish Image"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ needs.cut_release.outputs.new_tag }}
|
||||
|
||||
- name: Log in to the Container registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
|
||||
- name: Build and push Docker image
|
||||
id: push
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
|
||||
- name: Generate artifact attestation
|
||||
uses: actions/attest-build-provenance@v2
|
||||
with:
|
||||
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME}}
|
||||
subject-digest: ${{ steps.push.outputs.digest }}
|
||||
push-to-registry: true
|
||||
|
||||
- name: Publish Release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
TAG_VAL=${{ needs.cut_release.outputs.new_tag }}
|
||||
gh release edit ${TAG_VAL} --draft=false --repo open-goal/jak-project
|
||||
16
.gitignore
vendored
16
.gitignore
vendored
@@ -1,4 +1,12 @@
|
||||
node_modules/
|
||||
.env
|
||||
dist/
|
||||
certs/
|
||||
# Added by cargo
|
||||
|
||||
/target
|
||||
*.sqlite3
|
||||
*.sqlite3-journal
|
||||
*.sqlite3-shm
|
||||
*.sqlite3-wal
|
||||
.env
|
||||
*.log
|
||||
*.tar.gz
|
||||
TODO.md
|
||||
temp-scripts/
|
||||
@@ -1,3 +0,0 @@
|
||||
dist/
|
||||
node_modules/
|
||||
tsconfig.json
|
||||
20
.sqlx/query-13f66c14a27476857730238c8d4e70d6b7a9c1c85a226cb84f0cca0ef90f5392.json
generated
Normal file
20
.sqlx/query-13f66c14a27476857730238c8d4e70d6b7a9c1c85a226cb84f0cca0ef90f5392.json
generated
Normal file
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT COUNT(*) as count FROM releases WHERE release_type = ?;",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "count",
|
||||
"ordinal": 0,
|
||||
"type_info": "Int"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "13f66c14a27476857730238c8d4e70d6b7a9c1c85a226cb84f0cca0ef90f5392"
|
||||
}
|
||||
20
.sqlx/query-2dd2301f84c890ffe8af5abc5822cf6a712213bf06aa1bf744d4ebc69636a2c2.json
generated
Normal file
20
.sqlx/query-2dd2301f84c890ffe8af5abc5822cf6a712213bf06aa1bf744d4ebc69636a2c2.json
generated
Normal file
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n SELECT version FROM releases;\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "version",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 0
|
||||
},
|
||||
"nullable": [
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "2dd2301f84c890ffe8af5abc5822cf6a712213bf06aa1bf744d4ebc69636a2c2"
|
||||
}
|
||||
92
.sqlx/query-362ffd0aaca76ea3ba4ab894b763bab3db1e7b6d54db8e3d34c4b10be5eff745.json
generated
Normal file
92
.sqlx/query-362ffd0aaca76ea3ba4ab894b763bab3db1e7b6d54db8e3d34c4b10be5eff745.json
generated
Normal file
@@ -0,0 +1,92 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n SELECT * FROM releases WHERE release_type = ? AND archived = 0 ORDER BY version_integral DESC LIMIT ?;\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "version",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "version_integral",
|
||||
"ordinal": 2,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "published_timestamp",
|
||||
"ordinal": 3,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "created_timestamp",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "github_release_id",
|
||||
"ordinal": 5,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "github_url",
|
||||
"ordinal": 6,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "release_type",
|
||||
"ordinal": 7,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "next_audit",
|
||||
"ordinal": 8,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "next_audit_days",
|
||||
"ordinal": 9,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "archived",
|
||||
"ordinal": 10,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "notes",
|
||||
"ordinal": 11,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "assets",
|
||||
"ordinal": 12,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 2
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "362ffd0aaca76ea3ba4ab894b763bab3db1e7b6d54db8e3d34c4b10be5eff745"
|
||||
}
|
||||
12
.sqlx/query-55c07d68995ecee7259be9bc1f87225c60b58dfc50b77940b65015372a94aea3.json
generated
Normal file
12
.sqlx/query-55c07d68995ecee7259be9bc1f87225c60b58dfc50b77940b65015372a94aea3.json
generated
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "UPDATE releases SET archived = 1 WHERE version = ?;",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "55c07d68995ecee7259be9bc1f87225c60b58dfc50b77940b65015372a94aea3"
|
||||
}
|
||||
12
.sqlx/query-8a1a00a0c4fad4cc725ace58c4350f523f03043352e8f12bddea227190702049.json
generated
Normal file
12
.sqlx/query-8a1a00a0c4fad4cc725ace58c4350f523f03043352e8f12bddea227190702049.json
generated
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "INSERT OR IGNORE INTO releases (version, version_integral, published_timestamp, created_timestamp, github_release_id, github_url, release_type, next_audit, next_audit_days, archived, notes, assets) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 12
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "8a1a00a0c4fad4cc725ace58c4350f523f03043352e8f12bddea227190702049"
|
||||
}
|
||||
92
.sqlx/query-9177b2a134f884daed2affb270401a6a42653170e69a81a29f995a409a2c928d.json
generated
Normal file
92
.sqlx/query-9177b2a134f884daed2affb270401a6a42653170e69a81a29f995a409a2c928d.json
generated
Normal file
@@ -0,0 +1,92 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n SELECT * FROM releases WHERE release_type = ? AND archived = 0 ORDER BY version_integral DESC LIMIT ? OFFSET ?;\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "version",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "version_integral",
|
||||
"ordinal": 2,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "published_timestamp",
|
||||
"ordinal": 3,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "created_timestamp",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "github_release_id",
|
||||
"ordinal": 5,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "github_url",
|
||||
"ordinal": 6,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "release_type",
|
||||
"ordinal": 7,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "next_audit",
|
||||
"ordinal": 8,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "next_audit_days",
|
||||
"ordinal": 9,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "archived",
|
||||
"ordinal": 10,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "notes",
|
||||
"ordinal": 11,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "assets",
|
||||
"ordinal": 12,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 3
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "9177b2a134f884daed2affb270401a6a42653170e69a81a29f995a409a2c928d"
|
||||
}
|
||||
12
.sqlx/query-a9cc05704770e8e024c56384078daadd7ef88071719b05ade9f6b0290609cfec.json
generated
Normal file
12
.sqlx/query-a9cc05704770e8e024c56384078daadd7ef88071719b05ade9f6b0290609cfec.json
generated
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "UPDATE releases SET notes = ?, assets = ? WHERE version = ?;",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 3
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "a9cc05704770e8e024c56384078daadd7ef88071719b05ade9f6b0290609cfec"
|
||||
}
|
||||
92
.sqlx/query-aeb714ba8fea0875403dfd7167616f53df3a0eda8d9695ccd0e0e1882cd672b2.json
generated
Normal file
92
.sqlx/query-aeb714ba8fea0875403dfd7167616f53df3a0eda8d9695ccd0e0e1882cd672b2.json
generated
Normal file
@@ -0,0 +1,92 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n SELECT * FROM releases WHERE release_type = ? AND version_integral < ? AND archived = 0 ORDER BY version_integral DESC LIMIT ?;\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "version",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "version_integral",
|
||||
"ordinal": 2,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "published_timestamp",
|
||||
"ordinal": 3,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "created_timestamp",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "github_release_id",
|
||||
"ordinal": 5,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "github_url",
|
||||
"ordinal": 6,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "release_type",
|
||||
"ordinal": 7,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "next_audit",
|
||||
"ordinal": 8,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "next_audit_days",
|
||||
"ordinal": 9,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "archived",
|
||||
"ordinal": 10,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "notes",
|
||||
"ordinal": 11,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "assets",
|
||||
"ordinal": 12,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 3
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "aeb714ba8fea0875403dfd7167616f53df3a0eda8d9695ccd0e0e1882cd672b2"
|
||||
}
|
||||
92
.sqlx/query-bcd98dc280b0f507d884f3c1d6c1bb82edf69954361f92be9894173d755c8d84.json
generated
Normal file
92
.sqlx/query-bcd98dc280b0f507d884f3c1d6c1bb82edf69954361f92be9894173d755c8d84.json
generated
Normal file
@@ -0,0 +1,92 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n SELECT * FROM releases WHERE release_type = 'stable' AND archived = 0 ORDER BY version_integral DESC LIMIT 1;\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "version",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "version_integral",
|
||||
"ordinal": 2,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "published_timestamp",
|
||||
"ordinal": 3,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "created_timestamp",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "github_release_id",
|
||||
"ordinal": 5,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "github_url",
|
||||
"ordinal": 6,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "release_type",
|
||||
"ordinal": 7,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "next_audit",
|
||||
"ordinal": 8,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "next_audit_days",
|
||||
"ordinal": 9,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "archived",
|
||||
"ordinal": 10,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "notes",
|
||||
"ordinal": 11,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "assets",
|
||||
"ordinal": 12,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 0
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "bcd98dc280b0f507d884f3c1d6c1bb82edf69954361f92be9894173d755c8d84"
|
||||
}
|
||||
92
.sqlx/query-ce8f031a54e0644e26f615e0a1c5b4889d31b37a36cce44c4ddc90ee99c0e643.json
generated
Normal file
92
.sqlx/query-ce8f031a54e0644e26f615e0a1c5b4889d31b37a36cce44c4ddc90ee99c0e643.json
generated
Normal file
@@ -0,0 +1,92 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n SELECT * FROM releases WHERE release_type = 'stable' AND archived = 0 ORDER BY version_integral DESC LIMIT 200;\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "version",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "version_integral",
|
||||
"ordinal": 2,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "published_timestamp",
|
||||
"ordinal": 3,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "created_timestamp",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "github_release_id",
|
||||
"ordinal": 5,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "github_url",
|
||||
"ordinal": 6,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "release_type",
|
||||
"ordinal": 7,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "next_audit",
|
||||
"ordinal": 8,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "next_audit_days",
|
||||
"ordinal": 9,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "archived",
|
||||
"ordinal": 10,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "notes",
|
||||
"ordinal": 11,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "assets",
|
||||
"ordinal": 12,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 0
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "ce8f031a54e0644e26f615e0a1c5b4889d31b37a36cce44c4ddc90ee99c0e643"
|
||||
}
|
||||
92
.sqlx/query-e0cc761d842dbe684c2644de3ccc43db596361b929117a14ac1ac35f7b7a73f4.json
generated
Normal file
92
.sqlx/query-e0cc761d842dbe684c2644de3ccc43db596361b929117a14ac1ac35f7b7a73f4.json
generated
Normal file
@@ -0,0 +1,92 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n SELECT * FROM releases WHERE release_type = 'nightly' AND archived = 0 ORDER BY version_integral DESC LIMIT 200;\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "version",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "version_integral",
|
||||
"ordinal": 2,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "published_timestamp",
|
||||
"ordinal": 3,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "created_timestamp",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "github_release_id",
|
||||
"ordinal": 5,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "github_url",
|
||||
"ordinal": 6,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "release_type",
|
||||
"ordinal": 7,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "next_audit",
|
||||
"ordinal": 8,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "next_audit_days",
|
||||
"ordinal": 9,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "archived",
|
||||
"ordinal": 10,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "notes",
|
||||
"ordinal": 11,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "assets",
|
||||
"ordinal": 12,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 0
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "e0cc761d842dbe684c2644de3ccc43db596361b929117a14ac1ac35f7b7a73f4"
|
||||
}
|
||||
20
.sqlx/query-e434cf8182e15e56fa32637828d9187842239b1d1496582ab4e7cb2000f7cd23.json
generated
Normal file
20
.sqlx/query-e434cf8182e15e56fa32637828d9187842239b1d1496582ab4e7cb2000f7cd23.json
generated
Normal file
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n SELECT notes FROM releases WHERE archived = 0 AND version_integral >= ? AND version_integral <= ? ORDER BY version_integral DESC;\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "notes",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 2
|
||||
},
|
||||
"nullable": [
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "e434cf8182e15e56fa32637828d9187842239b1d1496582ab4e7cb2000f7cd23"
|
||||
}
|
||||
26
.sqlx/query-ee0e20e536083fc3287053ebbfaa73561a65fad4ecf074985b995398d7098054.json
generated
Normal file
26
.sqlx/query-ee0e20e536083fc3287053ebbfaa73561a65fad4ecf074985b995398d7098054.json
generated
Normal file
@@ -0,0 +1,26 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n SELECT * FROM api_keys WHERE api_key = ?;\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "api_key",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "metadata_json",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "ee0e20e536083fc3287053ebbfaa73561a65fad4ecf074985b995398d7098054"
|
||||
}
|
||||
12
.sqlx/query-ee7e0c30cc8c93d4262846c64b49c93e5a323dff187b2f52fe8586302d1da8bf.json
generated
Normal file
12
.sqlx/query-ee7e0c30cc8c93d4262846c64b49c93e5a323dff187b2f52fe8586302d1da8bf.json
generated
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n INSERT OR IGNORE INTO api_keys (api_key, metadata_json) VALUES (?, ?);\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 2
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "ee7e0c30cc8c93d4262846c64b49c93e5a323dff187b2f52fe8586302d1da8bf"
|
||||
}
|
||||
92
.sqlx/query-f36897873f3ed6a0e64708b5d50618d79464b907626ccdb3701fd3bb8f5f5d1c.json
generated
Normal file
92
.sqlx/query-f36897873f3ed6a0e64708b5d50618d79464b907626ccdb3701fd3bb8f5f5d1c.json
generated
Normal file
@@ -0,0 +1,92 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n SELECT * FROM releases WHERE release_type = 'nightly' AND archived = 0 ORDER BY version_integral DESC LIMIT 1;\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "version",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "version_integral",
|
||||
"ordinal": 2,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "published_timestamp",
|
||||
"ordinal": 3,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "created_timestamp",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "github_release_id",
|
||||
"ordinal": 5,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "github_url",
|
||||
"ordinal": 6,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "release_type",
|
||||
"ordinal": 7,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "next_audit",
|
||||
"ordinal": 8,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "next_audit_days",
|
||||
"ordinal": 9,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "archived",
|
||||
"ordinal": 10,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "notes",
|
||||
"ordinal": 11,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "assets",
|
||||
"ordinal": 12,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 0
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "f36897873f3ed6a0e64708b5d50618d79464b907626ccdb3701fd3bb8f5f5d1c"
|
||||
}
|
||||
3032
Cargo.lock
generated
Normal file
3032
Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
21
Cargo.toml
Normal file
21
Cargo.toml
Normal file
@@ -0,0 +1,21 @@
|
||||
[package]
|
||||
name = "pcsx2-api"
|
||||
version = "2.0.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
hex = "0.4.3"
|
||||
rocket = { version = "0.5.0", features = ["json"] }
|
||||
sqlx = { version = "0.7.3", features = ["runtime-tokio", "sqlite"] }
|
||||
dotenvy = "0.15"
|
||||
regex = "1.5"
|
||||
lazy_static = "1.4"
|
||||
sha2 = "0.10.8"
|
||||
hmac = "0.12.1"
|
||||
octocrab = { version = "0.32.0", features = ["stream"] }
|
||||
chrono = "0.4.31"
|
||||
fern = { version = "0.6.2", features = ["date-based", "colored"] }
|
||||
log = "0.4.20"
|
||||
|
||||
[profile.release]
|
||||
strip = true # Automatically strip symbols from the binary.
|
||||
36
Dockerfile
Normal file
36
Dockerfile
Normal file
@@ -0,0 +1,36 @@
|
||||
FROM rust:1.81-slim-bullseye as base
|
||||
|
||||
RUN apt-get update
|
||||
RUN apt-get install -y libssl-dev pkg-config
|
||||
|
||||
FROM base as builder
|
||||
|
||||
WORKDIR /usr/src/pcsx2-api
|
||||
COPY . .
|
||||
|
||||
# SQLX prep
|
||||
RUN cargo install sqlx-cli
|
||||
ENV DATABASE_URL="sqlite://db.sqlite3"
|
||||
RUN sqlx database create
|
||||
RUN sqlx migrate run --source ./db/migrations
|
||||
RUN cargo sqlx prepare
|
||||
|
||||
# Build the binary
|
||||
RUN cargo install --path .
|
||||
RUN chmod +x ./target/release/pcsx2-api
|
||||
|
||||
FROM debian:bullseye-slim as final
|
||||
|
||||
RUN mkdir /app && chown nobody:nogroup /app && chmod 700 /app
|
||||
# Install latest package updates
|
||||
RUN apt update -y && apt upgrade -y
|
||||
# Install CA Certificates
|
||||
RUN apt-get install -y ca-certificates && update-ca-certificates
|
||||
# Copy in Binary
|
||||
COPY --from=builder /usr/src/pcsx2-api/target/release/pcsx2-api /app/pcsx2-api
|
||||
|
||||
# Run container as non-root user
|
||||
USER nobody
|
||||
WORKDIR /app
|
||||
|
||||
ENTRYPOINT ["/app/pcsx2-api"]
|
||||
35
README.md
35
README.md
@@ -1 +1,34 @@
|
||||
web-api
|
||||
# PCSX2 API
|
||||
|
||||
TODO
|
||||
|
||||
## Development
|
||||
|
||||
### Running Locally
|
||||
|
||||
#### SQLite Setup
|
||||
|
||||
- `cargo install sqlx-cli`
|
||||
- `sqlx database create`
|
||||
- `sqlx migrate run --source ./db/migrations`
|
||||
- `cargo sqlx prepare`
|
||||
|
||||
#### Running the App
|
||||
|
||||
- `cargo run`
|
||||
|
||||
### Docker
|
||||
|
||||
#### Building Docker Container
|
||||
|
||||
- Ensure Docker is running
|
||||
- `docker build . --tag pcsx2-api:local`
|
||||
|
||||
#### Running Local Docker Container
|
||||
|
||||
- `docker-compose -f ./docker-compose.local.yaml up`
|
||||
|
||||
#### Package Docker Container
|
||||
|
||||
- `docker save -o $PWD/pcsx2-api.tar.gz pcsx2-api:test`
|
||||
- `docker load -i pcsx2-api.tar.tar`
|
||||
2
Rocket.toml
Normal file
2
Rocket.toml
Normal file
@@ -0,0 +1,2 @@
|
||||
[default]
|
||||
address = "0.0.0.0"
|
||||
@@ -1,67 +0,0 @@
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
import { ReleaseCache } from "../models/ReleaseCache";
|
||||
import { LogFactory } from "../utils/LogFactory";
|
||||
import { Request, Response } from "express";
|
||||
import crypto from "crypto";
|
||||
|
||||
export class GithubController {
|
||||
private releaseCache: ReleaseCache;
|
||||
private log = new LogFactory("gh-listener").getLogger();
|
||||
private readonly webhookSecret;
|
||||
|
||||
constructor(releaseCache: ReleaseCache) {
|
||||
this.releaseCache = releaseCache;
|
||||
const secret = process.env.GH_WEBHOOK_SECRET;
|
||||
if (secret == undefined) {
|
||||
this.log.error("GH_WEBHOOK_SECRET isn't set. Aborting");
|
||||
throw new Error("GH_WEBHOOK_SECRET isn't set. Aborting");
|
||||
} else {
|
||||
this.webhookSecret = secret;
|
||||
}
|
||||
}
|
||||
|
||||
// in the future, might change it from instead of listing all releases it just uses the content of the webhook to evict the cache
|
||||
// for the foreseeable future though, this is fine
|
||||
webhookHandler(req: Request, resp: Response) {
|
||||
const cid = uuidv4();
|
||||
this.log.info("Received webhook request");
|
||||
const ghDigestRaw = req.header("x-hub-signature-256");
|
||||
if (ghDigestRaw == undefined) {
|
||||
this.log.warn("Webhook lacked digest signature, ignoring");
|
||||
resp.send(403);
|
||||
return;
|
||||
}
|
||||
const ghDigest = Buffer.from(ghDigestRaw, "utf8");
|
||||
const digest = Buffer.from(
|
||||
`sha256=${crypto
|
||||
.createHmac("sha256", this.webhookSecret)
|
||||
.update(JSON.stringify(req.body))
|
||||
.digest("hex")}`,
|
||||
"utf8"
|
||||
);
|
||||
if (crypto.timingSafeEqual(digest, ghDigest)) {
|
||||
// Valid webhook from github, proceed
|
||||
const body = req.body;
|
||||
if (body?.action === "published" && body?.release?.draft == false) {
|
||||
// Release event
|
||||
if (body?.repository?.full_name == "PCSX2/pcsx2") {
|
||||
this.log.info("Webhook was a release event from PCSX2!");
|
||||
this.releaseCache.refreshReleaseCache(cid);
|
||||
} else if (body?.repository?.full_name == "PCSX2/archive") {
|
||||
this.releaseCache.refreshLegacyReleaseCache(cid);
|
||||
}
|
||||
} else if (
|
||||
body?.action == "completed" &&
|
||||
body?.check_suite?.status == "completed" &&
|
||||
body?.check_suite?.conclusion == "success"
|
||||
) {
|
||||
this.releaseCache.refreshPullRequestBuildCache(cid);
|
||||
}
|
||||
} else {
|
||||
this.log.warn("Webhook digest signature was invalid, ignoring");
|
||||
resp.send(403);
|
||||
return;
|
||||
}
|
||||
resp.send(204);
|
||||
}
|
||||
}
|
||||
@@ -1,116 +0,0 @@
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
import { ReleaseCache } from "../models/ReleaseCache";
|
||||
import { LogFactory } from "../utils/LogFactory";
|
||||
import { Request, Response } from "express";
|
||||
|
||||
export class ReleaseCacheControllerV1 {
|
||||
private releaseCache: ReleaseCache;
|
||||
private log = new LogFactory("release-cache").getLogger();
|
||||
private maxPageSize = 100;
|
||||
|
||||
constructor(releaseCache: ReleaseCache) {
|
||||
this.releaseCache = releaseCache;
|
||||
}
|
||||
|
||||
getLatestReleasesAndPullRequests(req: Request, resp: Response) {
|
||||
const cid = uuidv4();
|
||||
this.log.info("Fetching latest releases");
|
||||
resp.status(200).send(this.releaseCache.getLatestReleases(cid));
|
||||
}
|
||||
|
||||
getStableReleases(req: Request, resp: Response) {
|
||||
const cid = uuidv4();
|
||||
const offset = Number(req.query.offset) || 0;
|
||||
const pageSize = Number(req.query.pageSize) || 30;
|
||||
if (offset < 0) {
|
||||
this.log.info("API error occurred - invalid offset", {
|
||||
cid: cid,
|
||||
offset: offset,
|
||||
pageSize: pageSize,
|
||||
});
|
||||
resp.status(400).send("Invalid offset value");
|
||||
return;
|
||||
}
|
||||
if (pageSize > this.maxPageSize) {
|
||||
this.log.info("API error occurred - pageSize exceeded", {
|
||||
cid: cid,
|
||||
offset: offset,
|
||||
pageSize: pageSize,
|
||||
});
|
||||
resp.status(400).send("pageSize exceeded maximum allowed '100'");
|
||||
return;
|
||||
}
|
||||
this.log.info("Fetching stable releases", {
|
||||
cid: cid,
|
||||
offset: offset,
|
||||
pageSize: pageSize,
|
||||
});
|
||||
resp
|
||||
.status(200)
|
||||
.send(this.releaseCache.getStableReleases(cid, offset, pageSize));
|
||||
}
|
||||
|
||||
getNightlyReleases(req: Request, resp: Response) {
|
||||
const cid = uuidv4();
|
||||
const offset = Number(req.query.offset) || 0;
|
||||
const pageSize = Number(req.query.pageSize) || 30;
|
||||
if (offset < 0) {
|
||||
this.log.info("API error occurred - invalid offset", {
|
||||
cid: cid,
|
||||
offset: offset,
|
||||
pageSize: pageSize,
|
||||
});
|
||||
resp.status(400).send("Invalid offset value");
|
||||
return;
|
||||
}
|
||||
if (pageSize > this.maxPageSize) {
|
||||
this.log.info("API error occurred - pageSize exceeded", {
|
||||
cid: cid,
|
||||
offset: offset,
|
||||
pageSize: pageSize,
|
||||
});
|
||||
resp.status(400).send("pageSize exceeded maximum allowed '100'");
|
||||
return;
|
||||
}
|
||||
this.log.info("Fetching nightly releases", {
|
||||
cid: cid,
|
||||
offset: offset,
|
||||
pageSize: pageSize,
|
||||
});
|
||||
resp
|
||||
.status(200)
|
||||
.send(this.releaseCache.getNightlyReleases(cid, offset, pageSize));
|
||||
}
|
||||
|
||||
getPullRequests(req: Request, resp: Response) {
|
||||
const cid = uuidv4();
|
||||
const offset = Number(req.query.offset) || 0;
|
||||
const pageSize = Number(req.query.pageSize) || 30;
|
||||
if (offset < 0) {
|
||||
this.log.info("API error occurred - invalid offset", {
|
||||
cid: cid,
|
||||
offset: offset,
|
||||
pageSize: pageSize,
|
||||
});
|
||||
resp.status(400).send("Invalid offset value");
|
||||
return;
|
||||
}
|
||||
if (pageSize > this.maxPageSize) {
|
||||
this.log.info("API error occurred - pageSize exceeded", {
|
||||
cid: cid,
|
||||
offset: offset,
|
||||
pageSize: pageSize,
|
||||
});
|
||||
resp.status(400).send("pageSize exceeded maximum allowed '100'");
|
||||
return;
|
||||
}
|
||||
this.log.info("Fetching current pull requests", {
|
||||
cid: cid,
|
||||
offset: offset,
|
||||
pageSize: pageSize,
|
||||
});
|
||||
resp
|
||||
.status(200)
|
||||
.send(this.releaseCache.getPullRequestBuilds(cid, offset, pageSize));
|
||||
}
|
||||
}
|
||||
27
db/migrations/20231128032010_create-release-table.sql
Normal file
27
db/migrations/20231128032010_create-release-table.sql
Normal file
@@ -0,0 +1,27 @@
|
||||
-- Add migration script here
|
||||
CREATE TABLE IF NOT EXISTS `releases` (
|
||||
`id` integer not null primary key autoincrement,
|
||||
`version` TEXT not null,
|
||||
`version_integral` INTEGER not null,
|
||||
`published_timestamp` TEXT not null,
|
||||
`created_timestamp` TEXT not null,
|
||||
`github_release_id` INTEGER not null,
|
||||
`github_url` TEXT not null,
|
||||
`release_type` TEXT not null,
|
||||
`next_audit` TEXT not null,
|
||||
`next_audit_days` INTEGER not null,
|
||||
`archived` INTEGER DEFAULT 0 not null,
|
||||
`notes` TEXT null,
|
||||
`assets` TEXT DEFAULT "[]" not null
|
||||
-- JSON
|
||||
-- `download_url` TEXT not null,
|
||||
-- `platform` TEXT not null,
|
||||
-- `tags` TEXT null, /* JSON array */
|
||||
-- `download_count` integer null,
|
||||
-- `download_size_bytes` integer null
|
||||
);
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS releases_index_version ON releases (version);
|
||||
-- For list query optimization
|
||||
CREATE INDEX IF NOT EXISTS idx_releases_type_archived_version_integral ON releases (release_type, archived, version_integral DESC);
|
||||
-- For changelog query optimization
|
||||
CREATE INDEX IF NOT EXISTS idx_releases_archived_version_integral ON releases (archived, version_integral DESC);
|
||||
5
db/migrations/20231208000429_create-api-key-table.sql
Normal file
5
db/migrations/20231208000429_create-api-key-table.sql
Normal file
@@ -0,0 +1,5 @@
|
||||
-- Add migration script here
|
||||
CREATE TABLE IF NOT EXISTS `api_keys` (
|
||||
`api_key` TEXT not null primary key,
|
||||
`metadata_json` TEXT not null
|
||||
);
|
||||
14
docker-compose.local.yaml
Normal file
14
docker-compose.local.yaml
Normal file
@@ -0,0 +1,14 @@
|
||||
version: '3'
|
||||
|
||||
services:
|
||||
pcsx2-api:
|
||||
container_name: api
|
||||
image: pcsx2-api:local
|
||||
ports:
|
||||
- "8000:8000"
|
||||
volumes:
|
||||
- ./.env:/app/.env
|
||||
- ./Rocket.toml:/app/Rocket.toml
|
||||
- ./db.sqlite3:/app/db.sqlite3
|
||||
- ./app.log:/app/app.log
|
||||
- ./error.log:/app/error.log
|
||||
14
docker-compose.yaml
Normal file
14
docker-compose.yaml
Normal file
@@ -0,0 +1,14 @@
|
||||
version: '3'
|
||||
|
||||
services:
|
||||
pcsx2-api:
|
||||
container_name: api
|
||||
image: ghcr.io/PCSX2/web-api:latest
|
||||
ports:
|
||||
- "8000:8000"
|
||||
volumes:
|
||||
- ./.env:/app/.env
|
||||
- ./Rocket.toml:/app/Rocket.toml
|
||||
- ./db.sqlite3:/app/db.sqlite3
|
||||
- ./app.log:/app/app.log
|
||||
- ./error.log:/app/error.log
|
||||
134
index.ts
134
index.ts
@@ -1,134 +0,0 @@
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
import express from "express";
|
||||
import cors from "cors";
|
||||
import compression from "compression";
|
||||
import { ReleaseCache } from "./models/ReleaseCache";
|
||||
import { exit } from "process";
|
||||
import { LogFactory } from "./utils/LogFactory";
|
||||
import { RoutesV1 } from "./routes/RoutesV1";
|
||||
import fs from "fs";
|
||||
import https from "https";
|
||||
|
||||
const log = new LogFactory("app").getLogger();
|
||||
|
||||
const devEnv = process.env.NODE_ENV !== "production";
|
||||
|
||||
const ghWebhookSecret = process.env.GH_WEBHOOK_SECRET;
|
||||
if (ghWebhookSecret == undefined) {
|
||||
log.warn("GH_WEBHOOK_SECRET isn't set. Aborting");
|
||||
exit(1);
|
||||
}
|
||||
|
||||
// explicit list of origins to allow
|
||||
let corsAllowedOriginWhitelist: string[] = [];
|
||||
if (process.env.CORS_ALLOWED_ORIGINS != undefined) {
|
||||
corsAllowedOriginWhitelist = process.env.CORS_ALLOWED_ORIGINS.split(",");
|
||||
}
|
||||
|
||||
// allowed origins via regex patterns
|
||||
let corsAllowedOriginPatterns: string[] = [];
|
||||
if (process.env.CORS_ALLOWED_ORIGIN_PATTERNS != undefined) {
|
||||
corsAllowedOriginPatterns =
|
||||
process.env.CORS_ALLOWED_ORIGIN_PATTERNS.split(",");
|
||||
}
|
||||
|
||||
// if we are in a dev environment, allow local origins
|
||||
if (devEnv) {
|
||||
corsAllowedOriginPatterns.push("^https?:\\/\\/localhost:\\d+");
|
||||
}
|
||||
|
||||
const corsOptions = {
|
||||
// @typescript-eslint/no-explicit-any
|
||||
origin: function (origin: any, callback: any) {
|
||||
if (origin == undefined) {
|
||||
// Request did not originate from a browser, allow it
|
||||
callback(null, true);
|
||||
} else if (corsAllowedOriginWhitelist.indexOf(origin) !== -1) {
|
||||
callback(null, true);
|
||||
} else {
|
||||
// check the regex's, this is to support things like cloudflare pages that subdomain with the commit sha
|
||||
for (let i = 0; i < corsAllowedOriginPatterns.length; i++) {
|
||||
if (origin.match(corsAllowedOriginPatterns[i]) != null) {
|
||||
callback(null, true);
|
||||
return;
|
||||
}
|
||||
}
|
||||
callback(new Error(`'${origin}' not matched by CORS whitelist`));
|
||||
}
|
||||
},
|
||||
methods: "GET,POST,OPTIONS",
|
||||
optionsSuccessStatus: 200, // some legacy browsers (IE11, various SmartTVs) choke on 204
|
||||
};
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const rateLimit = require("express-rate-limit");
|
||||
|
||||
const app = express();
|
||||
app.use(cors(corsOptions));
|
||||
app.use(express.json());
|
||||
app.use(compression());
|
||||
|
||||
// Enable if you're behind a reverse proxy (Heroku, Bluemix, AWS ELB, Nginx, etc)
|
||||
// see https://expressjs.com/en/guide/behind-proxies.html
|
||||
app.set("trust proxy", 1);
|
||||
|
||||
const limiter = rateLimit({
|
||||
windowMs: 1 * 60 * 1000, // 1 minutes
|
||||
max: 30, // limit each IP to 30 requests per minute
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars, @typescript-eslint/no-explicit-any
|
||||
onLimitReached: function (req: any, res: any, options: any) {
|
||||
log.warn("rate limit hit", {
|
||||
ip: req.ip,
|
||||
url: req.url,
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
// apply to all requests. Commented out to avoid rate-limit conflicts. See issue #137
|
||||
// app.use(limiter);
|
||||
|
||||
const releaseCache = new ReleaseCache();
|
||||
|
||||
(async function () {
|
||||
const cid = uuidv4();
|
||||
log.info("Initializing Server Cache", { cid: cid });
|
||||
await releaseCache.refreshReleaseCache(cid);
|
||||
await releaseCache.refreshPullRequestBuildCache(cid);
|
||||
// build up legacy releases in the background
|
||||
releaseCache.refreshLegacyReleaseCache(cid);
|
||||
log.info("Initializing Server Cache", { cid: cid });
|
||||
})();
|
||||
|
||||
// Init Routes
|
||||
const v1Router = new RoutesV1(releaseCache);
|
||||
app.use("/v1", v1Router.router);
|
||||
|
||||
// Default Route
|
||||
app.use(function (req, res) {
|
||||
log.warn("invalid route accessed", {
|
||||
url: req.originalUrl,
|
||||
});
|
||||
res.send(404);
|
||||
});
|
||||
|
||||
const useHttps = process.env.USE_HTTPS === "true" || false;
|
||||
|
||||
if (useHttps) {
|
||||
const key = fs.readFileSync(__dirname + "/../certs/ssl.key");
|
||||
const cert = fs.readFileSync(__dirname + "/../certs/ssl.crt");
|
||||
const sslOptions = { key: key, cert: cert };
|
||||
const httpsServer = https.createServer(sslOptions, app);
|
||||
httpsServer.listen(Number(process.env.PORT), async () => {
|
||||
log.info("Cache Initialized, Serving...", {
|
||||
protocol: "https",
|
||||
port: Number(process.env.PORT),
|
||||
});
|
||||
});
|
||||
} else {
|
||||
app.listen(Number(process.env.PORT), async () => {
|
||||
log.info("Cache Initialized, Serving...", {
|
||||
protocol: "http",
|
||||
port: Number(process.env.PORT),
|
||||
});
|
||||
});
|
||||
}
|
||||
@@ -1,617 +0,0 @@
|
||||
import { Octokit } from "@octokit/rest";
|
||||
import { throttling } from "@octokit/plugin-throttling";
|
||||
import { retry } from "@octokit/plugin-retry";
|
||||
import striptags from "striptags";
|
||||
import * as path from "path";
|
||||
import { LogFactory } from "../utils/LogFactory";
|
||||
|
||||
enum ReleaseType {
|
||||
Stable = 1,
|
||||
Nightly,
|
||||
PullRequest,
|
||||
}
|
||||
|
||||
enum ReleasePlatform {
|
||||
Windows = "Windows",
|
||||
Linux = "Linux",
|
||||
MacOS = "MacOS",
|
||||
}
|
||||
|
||||
class ReleaseAsset {
|
||||
constructor(
|
||||
readonly url: string,
|
||||
readonly displayName: string,
|
||||
readonly additionalTags: string[], // things like 32bit, AppImage, distro names, etc
|
||||
readonly downloadCount: number,
|
||||
readonly size: number,
|
||||
) {}
|
||||
}
|
||||
|
||||
class Release {
|
||||
constructor(
|
||||
readonly version: string,
|
||||
readonly url: string,
|
||||
readonly semverMajor: number,
|
||||
readonly semverMinor: number,
|
||||
readonly semverPatch: number,
|
||||
readonly description: string | undefined | null,
|
||||
readonly assets: Record<ReleasePlatform, ReleaseAsset[]>,
|
||||
readonly type: ReleaseType,
|
||||
readonly prerelease: boolean,
|
||||
readonly createdAt: Date,
|
||||
readonly publishedAt: Date | undefined | null
|
||||
) {}
|
||||
}
|
||||
|
||||
class PullRequest {
|
||||
constructor(
|
||||
readonly number: number,
|
||||
readonly link: string,
|
||||
readonly githubUser: string,
|
||||
readonly updatedAt: Date,
|
||||
readonly body: string,
|
||||
readonly title: string,
|
||||
readonly additions: number,
|
||||
readonly deletions: number
|
||||
) {}
|
||||
}
|
||||
|
||||
Octokit.plugin(throttling);
|
||||
Octokit.plugin(retry);
|
||||
|
||||
const log = new LogFactory("release-cache").getLogger();
|
||||
|
||||
const semverRegex = /v?(\d+)\.(\d+)\.(\d+)/;
|
||||
const semverNoPatchRegex = /v?(\d+)\.(\d+)/;
|
||||
|
||||
const octokit = new Octokit({
|
||||
auth: process.env.GH_TOKEN,
|
||||
userAgent: "PCSX2/PCSX2.github.io",
|
||||
throttle: {
|
||||
onRateLimit: (retryAfter: any, options: any) => {
|
||||
log.warn(
|
||||
`Request quota exhausted for request ${options.method} ${options.url}`
|
||||
);
|
||||
|
||||
// Retry twice after hitting a rate limit error, then give up
|
||||
if (options.request.retryCount <= 2) {
|
||||
log.warn(`Retrying after ${retryAfter} seconds!`);
|
||||
return true;
|
||||
}
|
||||
},
|
||||
onAbuseLimit: (retryAfter: any, options: any) => {
|
||||
// does not retry, only logs a warning
|
||||
log.warn(`Abuse detected for request ${options.method} ${options.url}`);
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// NOTE - Depends on asset naming convention:
|
||||
// pcsx2-<version>-windows-<arch>-<additional tags>.whatever
|
||||
// In the case of macOS:
|
||||
// pcsx2-<version>-macOS-<additional tags>.whatever
|
||||
// In the case of linux:
|
||||
// pcsx2-<version>-linux-<distro OR appimage>-<arch>-<additional tags>.whatever
|
||||
function gatherReleaseAssets(
|
||||
release: any,
|
||||
legacy: boolean
|
||||
): Record<ReleasePlatform, ReleaseAsset[]> {
|
||||
const assets: Record<ReleasePlatform, ReleaseAsset[]> = {
|
||||
Windows: [],
|
||||
Linux: [],
|
||||
MacOS: [],
|
||||
};
|
||||
|
||||
if (!("assets" in release)) {
|
||||
return assets;
|
||||
}
|
||||
|
||||
// NOTE - pre-releases are assumed to be from the old nightly build system
|
||||
// there names do not conform to a standard, and therefore they are hacked around
|
||||
if (legacy && release.prerelease) {
|
||||
for (let i = 0; i < release.assets.length; i++) {
|
||||
const asset = release.assets[i];
|
||||
if (asset.name.includes("windows")) {
|
||||
assets.Windows.push(
|
||||
new ReleaseAsset(
|
||||
asset.browser_download_url,
|
||||
`Windows 32bit`,
|
||||
[],
|
||||
asset.download_count,
|
||||
asset.size
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
return assets;
|
||||
} else if (legacy) {
|
||||
for (let i = 0; i < release.assets.length; i++) {
|
||||
const asset = release.assets[i];
|
||||
const assetComponents = path
|
||||
.parse(asset.name)
|
||||
.name.split("-")
|
||||
.map((s) => {
|
||||
return s.replace(".tar", "");
|
||||
});
|
||||
if (asset.name.includes("windows")) {
|
||||
assets.Windows.push(
|
||||
new ReleaseAsset(
|
||||
asset.browser_download_url,
|
||||
`Windows`,
|
||||
assetComponents.slice(3),
|
||||
asset.download_count,
|
||||
asset.size
|
||||
)
|
||||
);
|
||||
} else if (asset.name.includes("linux")) {
|
||||
assets.Linux.push(
|
||||
new ReleaseAsset(
|
||||
asset.browser_download_url,
|
||||
`Linux`,
|
||||
assetComponents.slice(3),
|
||||
asset.download_count,
|
||||
asset.size
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
return assets;
|
||||
}
|
||||
|
||||
for (let i = 0; i < release.assets.length; i++) {
|
||||
const asset = release.assets[i];
|
||||
const assetComponents = path.parse(asset.name).name.split("-");
|
||||
if (assetComponents.length < 3) {
|
||||
log.warn("invalid release asset naming", {
|
||||
isLegacy: legacy,
|
||||
semver: release.tag_name,
|
||||
assetName: asset.name,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
let platform = assetComponents[2].toLowerCase();
|
||||
if (assetComponents[2].toLowerCase().startsWith("macos")) {
|
||||
platform = "macos";
|
||||
} else if (assetComponents.length < 4) {
|
||||
log.warn("invalid release asset naming", {
|
||||
isLegacy: legacy,
|
||||
semver: release.tag_name,
|
||||
assetName: asset.name,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
if (platform == "windows") {
|
||||
const arch = assetComponents[3];
|
||||
const additionalTags = assetComponents.slice(4);
|
||||
assets.Windows.push(
|
||||
new ReleaseAsset(
|
||||
asset.browser_download_url,
|
||||
`Windows ${arch}`,
|
||||
additionalTags,
|
||||
asset.download_count,
|
||||
asset.size
|
||||
)
|
||||
);
|
||||
} else if (platform == "linux") {
|
||||
const distroOrAppImage = assetComponents[3];
|
||||
const additionalTags = assetComponents.slice(4);
|
||||
assets.Linux.push(
|
||||
new ReleaseAsset(
|
||||
asset.browser_download_url,
|
||||
`Linux ${distroOrAppImage}`,
|
||||
additionalTags,
|
||||
asset.download_count,
|
||||
asset.size
|
||||
)
|
||||
);
|
||||
} else if (platform == "macos") {
|
||||
const additionalTags = assetComponents.slice(3);
|
||||
assets.MacOS.push(
|
||||
new ReleaseAsset(
|
||||
asset.browser_download_url,
|
||||
`MacOS`,
|
||||
additionalTags,
|
||||
asset.download_count,
|
||||
asset.size
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
return assets;
|
||||
}
|
||||
|
||||
export class ReleaseCache {
|
||||
private combinedStableReleases: Release[] = [];
|
||||
private stableReleases: Release[] = [];
|
||||
private legacyStableReleases: Release[] = [];
|
||||
|
||||
private combinedNightlyReleases: Release[] = [];
|
||||
private nightlyReleases: Release[] = [];
|
||||
private legacyNightlyReleases: Release[] = [];
|
||||
|
||||
private pullRequestBuilds: PullRequest[] = [];
|
||||
|
||||
private initialized: boolean;
|
||||
|
||||
constructor() {
|
||||
this.initialized = false;
|
||||
}
|
||||
|
||||
public isInitialized(cid: string): boolean {
|
||||
return this.initialized;
|
||||
}
|
||||
|
||||
public async refreshReleaseCache(cid: string): Promise<void> {
|
||||
log.info("refreshing main release cache", { cid: cid, cacheType: "main" });
|
||||
const releases = await octokit.paginate(octokit.rest.repos.listReleases, {
|
||||
owner: "PCSX2",
|
||||
repo: "pcsx2",
|
||||
per_page: 100,
|
||||
});
|
||||
|
||||
const newStableReleases: Release[] = [];
|
||||
const newNightlyReleases: Release[] = [];
|
||||
for (let i = 0; i < releases.length; i++) {
|
||||
const release = releases[i];
|
||||
if (release.draft) {
|
||||
continue;
|
||||
}
|
||||
const releaseAssets = gatherReleaseAssets(release, false);
|
||||
let semverGroups = release.tag_name.match(semverRegex);
|
||||
// work-around an old improper stable release semver (missing patch)
|
||||
if (semverGroups == null || semverGroups.length != 4) {
|
||||
const tempGroups = release.tag_name.match(semverNoPatchRegex);
|
||||
if (tempGroups != null && tempGroups.length == 3) {
|
||||
semverGroups = [tempGroups[0], tempGroups[1], tempGroups[2], "0"];
|
||||
}
|
||||
}
|
||||
if (semverGroups != null && semverGroups.length == 4) {
|
||||
const newRelease = new Release(
|
||||
release.tag_name,
|
||||
release.html_url,
|
||||
Number(semverGroups[1]),
|
||||
Number(semverGroups[2]),
|
||||
Number(semverGroups[3]),
|
||||
release.body == undefined || release.body == null
|
||||
? release.body
|
||||
: striptags(release.body),
|
||||
releaseAssets,
|
||||
release.prerelease ? ReleaseType.Nightly : ReleaseType.Stable,
|
||||
release.prerelease,
|
||||
new Date(release.created_at),
|
||||
release.published_at == null
|
||||
? undefined
|
||||
: new Date(release.published_at)
|
||||
);
|
||||
if (newRelease.type == ReleaseType.Nightly) {
|
||||
newNightlyReleases.push(newRelease);
|
||||
} else {
|
||||
newStableReleases.push(newRelease);
|
||||
}
|
||||
} else {
|
||||
log.warn("invalid semantic version", {
|
||||
cid: cid,
|
||||
cacheType: "main",
|
||||
semver: release.tag_name,
|
||||
matches: semverGroups,
|
||||
});
|
||||
}
|
||||
}
|
||||
this.stableReleases = newStableReleases;
|
||||
this.combinedStableReleases = this.stableReleases.concat(
|
||||
this.legacyStableReleases
|
||||
);
|
||||
// Releases returned from github are not sorted by semantic version, but by published date -- this ensures consistency
|
||||
this.combinedStableReleases.sort(
|
||||
(a, b) =>
|
||||
b.semverMajor - a.semverMajor ||
|
||||
b.semverMinor - a.semverMinor ||
|
||||
b.semverPatch - a.semverPatch
|
||||
);
|
||||
|
||||
this.nightlyReleases = newNightlyReleases;
|
||||
this.combinedNightlyReleases = this.nightlyReleases.concat(
|
||||
this.legacyNightlyReleases
|
||||
);
|
||||
this.combinedNightlyReleases.sort(
|
||||
(a, b) =>
|
||||
b.semverMajor - a.semverMajor ||
|
||||
b.semverMinor - a.semverMinor ||
|
||||
b.semverPatch - a.semverPatch
|
||||
);
|
||||
log.info("main release cache refreshed", { cid: cid, cacheType: "main" });
|
||||
}
|
||||
|
||||
public async refreshLegacyReleaseCache(cid: string): Promise<void> {
|
||||
log.info("refreshing legacy release cache", {
|
||||
cid: cid,
|
||||
cacheType: "legacy",
|
||||
});
|
||||
// First pull down the legacy releases, these are OLD nightlys
|
||||
const legacyReleases = await octokit.paginate(
|
||||
octokit.rest.repos.listReleases,
|
||||
{
|
||||
owner: "PCSX2",
|
||||
repo: "archive",
|
||||
per_page: 100,
|
||||
}
|
||||
);
|
||||
|
||||
const newLegacyNightlyReleases: Release[] = [];
|
||||
const newStableStableReleases: Release[] = [];
|
||||
for (let i = 0; i < legacyReleases.length; i++) {
|
||||
const release = legacyReleases[i];
|
||||
if (release.draft) {
|
||||
continue;
|
||||
}
|
||||
const releaseAssets = gatherReleaseAssets(release, true);
|
||||
const semverGroups = release.tag_name.match(semverRegex);
|
||||
if (semverGroups != null && semverGroups.length == 4) {
|
||||
let createdAt = release.created_at;
|
||||
// Allow the creation date to be overridden
|
||||
if (release.body !== undefined && release.body !== null) {
|
||||
if (release.body.includes("DATE_OVERRIDE")) {
|
||||
const regexp = /DATE_OVERRIDE:\s?(\d{4}-\d{2}-\d{2})/g;
|
||||
const match = Array.from(
|
||||
release.body.matchAll(regexp),
|
||||
(m) => m[1]
|
||||
);
|
||||
if (match.length > 0) {
|
||||
createdAt = `${match[0]}T12:00:00.000Z`;
|
||||
}
|
||||
}
|
||||
}
|
||||
const newRelease = new Release(
|
||||
release.tag_name,
|
||||
release.html_url,
|
||||
Number(semverGroups[1]),
|
||||
Number(semverGroups[2]),
|
||||
Number(semverGroups[3]),
|
||||
release.body == undefined || release.body == null
|
||||
? release.body
|
||||
: striptags(release.body),
|
||||
releaseAssets,
|
||||
ReleaseType.Nightly,
|
||||
release.prerelease,
|
||||
new Date(createdAt),
|
||||
release.published_at == null
|
||||
? undefined
|
||||
: new Date(release.published_at)
|
||||
);
|
||||
if (newRelease.prerelease) {
|
||||
newLegacyNightlyReleases.push(newRelease);
|
||||
} else {
|
||||
newStableStableReleases.push(newRelease);
|
||||
}
|
||||
} else {
|
||||
log.warn("invalid semantic version", {
|
||||
cid: cid,
|
||||
cacheType: "main",
|
||||
semver: release.tag_name,
|
||||
matches: semverGroups,
|
||||
});
|
||||
}
|
||||
}
|
||||
this.legacyStableReleases = newStableStableReleases;
|
||||
this.combinedStableReleases = this.stableReleases.concat(
|
||||
this.legacyStableReleases
|
||||
);
|
||||
this.combinedStableReleases.sort(
|
||||
(a, b) =>
|
||||
b.semverMajor - a.semverMajor ||
|
||||
b.semverMinor - a.semverMinor ||
|
||||
b.semverPatch - a.semverPatch
|
||||
);
|
||||
|
||||
this.legacyNightlyReleases = newLegacyNightlyReleases;
|
||||
this.combinedNightlyReleases = this.nightlyReleases.concat(
|
||||
this.legacyNightlyReleases
|
||||
);
|
||||
this.combinedNightlyReleases.sort(
|
||||
(a, b) =>
|
||||
b.semverMajor - a.semverMajor ||
|
||||
b.semverMinor - a.semverMinor ||
|
||||
b.semverPatch - a.semverPatch
|
||||
);
|
||||
log.info("legacy release cache refreshed", {
|
||||
cid: cid,
|
||||
cacheType: "legacy",
|
||||
});
|
||||
}
|
||||
|
||||
private async grabPullRequestInfo(cursor: string | null): Promise<any> {
|
||||
const response: any = await octokit.graphql(
|
||||
`
|
||||
fragment pr on PullRequest {
|
||||
number
|
||||
author {
|
||||
login
|
||||
}
|
||||
updatedAt
|
||||
body
|
||||
title
|
||||
additions
|
||||
deletions
|
||||
isDraft
|
||||
permalink
|
||||
commits(last: 1) {
|
||||
nodes {
|
||||
commit {
|
||||
statusCheckRollup {
|
||||
state
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
query ($owner: String!, $repo: String!, $states: [PullRequestState!], $baseRefName: String, $headRefName: String, $orderField: IssueOrderField = UPDATED_AT, $orderDirection: OrderDirection = DESC, $perPage: Int!, $endCursor: String) {
|
||||
repository(owner: $owner, name: $repo) {
|
||||
pullRequests(states: $states, orderBy: {field: $orderField, direction: $orderDirection}, baseRefName: $baseRefName, headRefName: $headRefName, first: $perPage, after: $endCursor) {
|
||||
nodes {
|
||||
...pr
|
||||
}
|
||||
pageInfo {
|
||||
hasNextPage
|
||||
endCursor
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`,
|
||||
{
|
||||
owner: "PCSX2",
|
||||
repo: "pcsx2",
|
||||
states: "OPEN",
|
||||
baseRefName: "master",
|
||||
perPage: 100,
|
||||
endCursor: cursor,
|
||||
}
|
||||
);
|
||||
return response;
|
||||
}
|
||||
|
||||
public async refreshPullRequestBuildCache(cid: string): Promise<void> {
|
||||
log.info("refreshing pull request cache", {
|
||||
cid: cid,
|
||||
cacheType: "pullRequests",
|
||||
});
|
||||
|
||||
try {
|
||||
let paginate = true;
|
||||
let cursor: string | null = null;
|
||||
const newPullRequestCache: PullRequest[] = [];
|
||||
while (paginate) {
|
||||
const resp: any = await this.grabPullRequestInfo(cursor);
|
||||
if (resp.repository.pullRequests.pageInfo.hasNextPage) {
|
||||
cursor = resp.repository.pullRequests.pageInfo.endCursor;
|
||||
} else {
|
||||
paginate = false;
|
||||
}
|
||||
for (let i = 0; i < resp.repository.pullRequests.nodes.length; i++) {
|
||||
// We only care about non-draft / successfully building PRs
|
||||
const pr = resp.repository.pullRequests.nodes[i];
|
||||
if (pr.isDraft) {
|
||||
continue;
|
||||
}
|
||||
if (pr.commits.nodes[0].commit.statusCheckRollup.state == "SUCCESS") {
|
||||
newPullRequestCache.push(
|
||||
new PullRequest(
|
||||
pr.number,
|
||||
pr.permalink,
|
||||
pr.author.login,
|
||||
new Date(pr.updatedAt),
|
||||
pr.body == undefined || pr.body == null
|
||||
? pr.body
|
||||
: striptags(pr.body),
|
||||
pr.title == undefined || pr.title == null
|
||||
? pr.title
|
||||
: striptags(pr.title),
|
||||
pr.additions,
|
||||
pr.deletions
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
this.pullRequestBuilds = newPullRequestCache;
|
||||
log.info("finished refreshing pull request cache", {
|
||||
cid: cid,
|
||||
cacheType: "pullRequests",
|
||||
});
|
||||
} catch (error) {
|
||||
log.error("error occurred when refreshing main release cache", error);
|
||||
}
|
||||
}
|
||||
|
||||
// Returns the first page of each release type in a single response
|
||||
public getLatestReleases(cid: string) {
|
||||
return {
|
||||
stableReleases: this.getStableReleases(cid, 0, 30),
|
||||
nightlyReleases: this.getNightlyReleases(cid, 0, 30),
|
||||
pullRequestBuilds: this.getPullRequestBuilds(cid, 0, 30),
|
||||
};
|
||||
}
|
||||
|
||||
public getStableReleases(cid: string, offset: number, pageSize: number) {
|
||||
if (offset >= this.combinedStableReleases.length) {
|
||||
return {
|
||||
data: [],
|
||||
pageInfo: {
|
||||
total: 0,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const ret = [];
|
||||
for (
|
||||
let i = 0;
|
||||
i < pageSize && i + offset < this.combinedStableReleases.length;
|
||||
i++
|
||||
) {
|
||||
ret.push(this.combinedStableReleases[i + offset]);
|
||||
}
|
||||
|
||||
return {
|
||||
data: ret,
|
||||
pageInfo: {
|
||||
total: this.combinedStableReleases.length,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
public getNightlyReleases(cid: string, offset: number, pageSize: number) {
|
||||
if (offset >= this.combinedNightlyReleases.length) {
|
||||
return {
|
||||
data: [],
|
||||
pageInfo: {
|
||||
total: 0,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const ret = [];
|
||||
for (
|
||||
let i = 0;
|
||||
i < pageSize && i + offset < this.combinedNightlyReleases.length;
|
||||
i++
|
||||
) {
|
||||
ret.push(this.combinedNightlyReleases[i + offset]);
|
||||
}
|
||||
|
||||
return {
|
||||
data: ret,
|
||||
pageInfo: {
|
||||
total: this.combinedNightlyReleases.length,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
public getPullRequestBuilds(cid: string, offset: number, pageSize: number) {
|
||||
if (offset >= this.pullRequestBuilds.length) {
|
||||
return {
|
||||
data: [],
|
||||
pageInfo: {
|
||||
total: 0,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const ret = [];
|
||||
for (
|
||||
let i = 0;
|
||||
i < pageSize && i + offset < this.pullRequestBuilds.length;
|
||||
i++
|
||||
) {
|
||||
ret.push(this.pullRequestBuilds[i + offset]);
|
||||
}
|
||||
|
||||
return {
|
||||
data: ret,
|
||||
pageInfo: {
|
||||
total: this.pullRequestBuilds.length,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
6459
package-lock.json
generated
6459
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
48
package.json
48
package.json
@@ -1,48 +0,0 @@
|
||||
{
|
||||
"name": "pcsx2-webapi",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"serve": "ts-node -r dotenv/config index.ts",
|
||||
"build": "tsc -p .",
|
||||
"start": "node -r dotenv/config ./dist/index.js dotenv_config_path=./.env",
|
||||
"format": "npx prettier --write .",
|
||||
"lint": "npx eslint ./"
|
||||
},
|
||||
"engines": {
|
||||
"node": "16.x"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@octokit/graphql": "^4.8.0",
|
||||
"@octokit/plugin-retry": "^3.0.9",
|
||||
"@octokit/plugin-throttling": "^3.5.2",
|
||||
"@octokit/rest": "^18.12.0",
|
||||
"@octokit/types": "^6.31.3",
|
||||
"compression": "^1.7.4",
|
||||
"cors": "^2.8.5",
|
||||
"dotenv": "^10.0.0",
|
||||
"express": "^4.17.1",
|
||||
"express-rate-limit": "^5.5.1",
|
||||
"striptags": "^3.2.0",
|
||||
"uuid": "^8.3.2",
|
||||
"winston": "^3.3.3",
|
||||
"winston-loki": "^6.0.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/compression": "^1.7.2",
|
||||
"@types/cors": "^2.8.12",
|
||||
"@types/express": "^4.17.13",
|
||||
"@types/node": "^16.11.7",
|
||||
"@types/uuid": "^8.3.1",
|
||||
"@typescript-eslint/eslint-plugin": "^5.3.1",
|
||||
"@typescript-eslint/parser": "^5.3.1",
|
||||
"eslint": "^8.2.0",
|
||||
"prettier": "2.4.1",
|
||||
"ts-node": "^10.4.0",
|
||||
"typescript": "^4.4.4"
|
||||
}
|
||||
}
|
||||
@@ -1,46 +0,0 @@
|
||||
import express from "express";
|
||||
import { GithubController } from "../controllers/GithubController";
|
||||
import { ReleaseCacheControllerV1 } from "../controllers/ReleaseCacheControllerV1";
|
||||
import { ReleaseCache } from "../models/ReleaseCache";
|
||||
|
||||
export class RoutesV1 {
|
||||
router: express.Router;
|
||||
private githubController: GithubController;
|
||||
private releaseCacheControllerV1: ReleaseCacheControllerV1;
|
||||
|
||||
constructor(releaseCache: ReleaseCache) {
|
||||
this.router = express.Router();
|
||||
this.githubController = new GithubController(releaseCache);
|
||||
this.releaseCacheControllerV1 = new ReleaseCacheControllerV1(releaseCache);
|
||||
|
||||
// Init Routes
|
||||
this.router
|
||||
.route("/latestReleasesAndPullRequests")
|
||||
.get((req, resp) =>
|
||||
this.releaseCacheControllerV1.getLatestReleasesAndPullRequests(
|
||||
req,
|
||||
resp
|
||||
)
|
||||
);
|
||||
this.router
|
||||
.route("/stableReleases")
|
||||
.get((req, resp) =>
|
||||
this.releaseCacheControllerV1.getStableReleases(req, resp)
|
||||
);
|
||||
this.router
|
||||
.route("/nightlyReleases")
|
||||
.get((req, resp) =>
|
||||
this.releaseCacheControllerV1.getNightlyReleases(req, resp)
|
||||
);
|
||||
this.router
|
||||
.route("/pullRequests")
|
||||
.get((req, resp) =>
|
||||
this.releaseCacheControllerV1.getPullRequests(req, resp)
|
||||
);
|
||||
|
||||
// Other Routes
|
||||
this.router
|
||||
.route("/github-webhook")
|
||||
.post((req, resp) => this.githubController.webhookHandler(req, resp));
|
||||
}
|
||||
}
|
||||
3
src/api/mod.rs
Normal file
3
src/api/mod.rs
Normal file
@@ -0,0 +1,3 @@
|
||||
pub mod models;
|
||||
pub mod v1;
|
||||
pub mod v2;
|
||||
57
src/api/models.rs
Normal file
57
src/api/models.rs
Normal file
@@ -0,0 +1,57 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use crate::storage::models::ReleaseRow;
|
||||
use lazy_static::lazy_static;
|
||||
use regex::Regex;
|
||||
use rocket::serde::json::serde_json;
|
||||
use rocket::serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ReleaseAsset {
|
||||
pub download_url: String,
|
||||
pub tags: Vec<String>,
|
||||
pub download_count: i64,
|
||||
pub download_size_bytes: i64,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Debug)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Release {
|
||||
pub version: String,
|
||||
pub published_timestamp: Option<String>,
|
||||
pub created_timestamp: Option<String>,
|
||||
pub github_release_id: i64,
|
||||
pub github_url: String,
|
||||
pub release_type: String,
|
||||
pub notes: Option<String>,
|
||||
pub assets: HashMap<String, Vec<ReleaseAsset>>,
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
static ref VALID_ASSETS_REGEX: Regex =
|
||||
Regex::new(r".*pcsx2-v(\d+\.?){1,3}-(windows|linux|macos)").unwrap();
|
||||
}
|
||||
|
||||
impl Release {
|
||||
pub fn from_database(db_row: &ReleaseRow) -> Self {
|
||||
let assets: Result<HashMap<String, Vec<ReleaseAsset>>, serde_json::Error> =
|
||||
serde_json::from_str(db_row.assets.as_str());
|
||||
let mut db_assets = assets.unwrap(); // TODO - handle error
|
||||
db_assets.iter_mut().for_each(|(_, assets)| {
|
||||
assets.retain(|asset| VALID_ASSETS_REGEX.is_match(&asset.download_url.to_lowercase()));
|
||||
});
|
||||
Self {
|
||||
version: db_row.version.clone(),
|
||||
published_timestamp: db_row.published_timestamp.clone(),
|
||||
created_timestamp: db_row.created_timestamp.clone(),
|
||||
github_release_id: db_row.github_release_id,
|
||||
github_url: db_row.github_url.clone(),
|
||||
release_type: db_row.release_type.clone(),
|
||||
notes: db_row.notes.clone(),
|
||||
assets: db_assets,
|
||||
}
|
||||
}
|
||||
}
|
||||
331
src/api/v1.rs
Normal file
331
src/api/v1.rs
Normal file
@@ -0,0 +1,331 @@
|
||||
// TODO V1 - to be removed asap
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::vec;
|
||||
|
||||
use lazy_static::lazy_static;
|
||||
use log::info;
|
||||
use regex::Regex;
|
||||
use rocket::serde::json::serde_json;
|
||||
use rocket::{
|
||||
http::Status,
|
||||
serde::{json::Json, Deserialize, Serialize},
|
||||
State,
|
||||
};
|
||||
use sqlx::{Pool, Sqlite};
|
||||
|
||||
use crate::storage::v1::{get_total_count_of_release_type, list_releases_with_offset};
|
||||
use crate::util::Semver;
|
||||
use crate::{
|
||||
guards::RateLimiter,
|
||||
responders::CachedResponse,
|
||||
storage::{models::ReleaseRow, sqlite},
|
||||
};
|
||||
|
||||
use super::models::ReleaseAsset;
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ReleaseAssetV1 {
|
||||
pub url: String,
|
||||
pub display_name: String,
|
||||
pub additional_tags: Vec<String>,
|
||||
pub download_count: i64,
|
||||
pub size: i64,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Debug)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ReleaseV1 {
|
||||
pub version: String,
|
||||
pub url: String,
|
||||
pub semver_major: i64,
|
||||
pub semver_minor: i64,
|
||||
pub semver_patch: i64,
|
||||
pub description: Option<String>,
|
||||
pub assets: HashMap<String, Vec<ReleaseAssetV1>>,
|
||||
#[serde(rename = "type")]
|
||||
pub release_type: i64,
|
||||
pub prerelease: bool,
|
||||
pub created_at: Option<String>,
|
||||
pub published_at: Option<String>,
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
static ref VALID_ASSETS_REGEX: Regex =
|
||||
Regex::new(r".*pcsx2-v(\d+\.?){1,3}-(windows|linux|macos)").unwrap();
|
||||
}
|
||||
|
||||
impl ReleaseV1 {
|
||||
fn from_v2(db_row: &ReleaseRow) -> Self {
|
||||
let assets_v2: Result<HashMap<String, Vec<ReleaseAsset>>, serde_json::Error> =
|
||||
serde_json::from_str(db_row.assets.as_str());
|
||||
let semver = Semver::new(db_row.version.as_str());
|
||||
let mut release_type = 1;
|
||||
let mut prerelease = false;
|
||||
if db_row.release_type == "nightly" {
|
||||
release_type = 2;
|
||||
prerelease = true;
|
||||
}
|
||||
let mut assets_v1: HashMap<String, Vec<ReleaseAssetV1>> = HashMap::new();
|
||||
if let Ok(assets) = assets_v2 {
|
||||
for (k, v) in assets {
|
||||
assets_v1.insert(
|
||||
k.clone(),
|
||||
v.into_iter()
|
||||
.filter(|asset| {
|
||||
VALID_ASSETS_REGEX.is_match(&asset.download_url.to_lowercase())
|
||||
})
|
||||
.map(|asset| {
|
||||
// Derive the display name
|
||||
let mut cleaned_tags = asset.tags.clone();
|
||||
let mut display_name: String = "".to_owned();
|
||||
if k.clone().to_lowercase().contains("macos") {
|
||||
display_name = "MacOS".to_owned();
|
||||
cleaned_tags = cleaned_tags
|
||||
.into_iter()
|
||||
.filter(|tag| !tag.to_lowercase().contains("qt"))
|
||||
.collect();
|
||||
} else if k.clone().to_lowercase().contains("windows") {
|
||||
display_name = "Windows".to_owned();
|
||||
if asset.download_url.to_lowercase().contains("x64") {
|
||||
display_name = format!("{} x64", display_name);
|
||||
} else {
|
||||
display_name = format!("{} 32bit", display_name);
|
||||
}
|
||||
cleaned_tags = cleaned_tags
|
||||
.into_iter()
|
||||
.filter(|tag| {
|
||||
!tag.to_lowercase().contains("32bit")
|
||||
&& !tag.to_lowercase().contains("64")
|
||||
})
|
||||
.collect();
|
||||
} else if k.clone().to_lowercase().contains("linux") {
|
||||
display_name = "Linux".to_owned();
|
||||
if asset.download_url.to_lowercase().contains("appimage") {
|
||||
display_name = format!("{} appimage", display_name);
|
||||
} else if asset.download_url.to_lowercase().contains("flatpak") {
|
||||
display_name = format!("{} flatpak", display_name);
|
||||
}
|
||||
cleaned_tags = cleaned_tags
|
||||
.into_iter()
|
||||
.filter(|tag| {
|
||||
!tag.to_lowercase().contains("appimage")
|
||||
&& !tag.to_lowercase().contains("flatpak")
|
||||
})
|
||||
.collect();
|
||||
}
|
||||
|
||||
ReleaseAssetV1 {
|
||||
url: asset.download_url,
|
||||
display_name: display_name.to_owned(),
|
||||
additional_tags: cleaned_tags,
|
||||
download_count: asset.download_count,
|
||||
size: asset.download_size_bytes,
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
);
|
||||
}
|
||||
}
|
||||
if let Some(v) = assets_v1.remove("macOS") {
|
||||
assets_v1.insert("MacOS".to_string(), v);
|
||||
}
|
||||
if !assets_v1.contains_key("MacOS") {
|
||||
assets_v1.insert("MacOS".to_string(), vec![]);
|
||||
}
|
||||
if !assets_v1.contains_key("Linux") {
|
||||
assets_v1.insert("Linux".to_string(), vec![]);
|
||||
}
|
||||
if !assets_v1.contains_key("Windows") {
|
||||
assets_v1.insert("Windows".to_string(), vec![]);
|
||||
}
|
||||
|
||||
let mut created_at_timestamp = db_row.created_timestamp.clone();
|
||||
let mut description = db_row.notes.clone();
|
||||
|
||||
if let Some(v) = &description {
|
||||
if v.starts_with("<!-- DATE_OVERRIDE: ") {
|
||||
let re = Regex::new(r"<!-- DATE_OVERRIDE: (\d{4}-\d{2}-\d{2}) -->\r\n").unwrap();
|
||||
if let Some(time) = re
|
||||
.captures(&v)
|
||||
.and_then(|caps| caps.get(1).map(|m| m.as_str().to_string()))
|
||||
{
|
||||
created_at_timestamp = Some(format!("{}T12:00:00.000Z", time));
|
||||
}
|
||||
let cleaned_description = re.replace(v.as_str(), "").to_string();
|
||||
description = Some(cleaned_description);
|
||||
}
|
||||
}
|
||||
Self {
|
||||
version: db_row.version.clone(),
|
||||
url: db_row.github_url.clone(),
|
||||
semver_major: semver.major,
|
||||
semver_minor: semver.minor,
|
||||
semver_patch: semver.patch,
|
||||
description,
|
||||
assets: assets_v1,
|
||||
release_type,
|
||||
prerelease,
|
||||
created_at: created_at_timestamp,
|
||||
published_at: db_row.published_timestamp.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Debug)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct PageInfo {
|
||||
total: i64,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Debug)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct LatestReleasesAndPullRequestsResponseData {
|
||||
data: Vec<ReleaseV1>,
|
||||
page_info: PageInfo,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Debug)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LatestReleasesAndPullRequestsResponse {
|
||||
stable_releases: LatestReleasesAndPullRequestsResponseData,
|
||||
nightly_releases: LatestReleasesAndPullRequestsResponseData,
|
||||
}
|
||||
|
||||
#[get("/latestReleasesAndPullRequests")]
|
||||
pub async fn get_latest_releases_and_pull_requests(
|
||||
_rate_limiter: RateLimiter,
|
||||
db: &State<Pool<Sqlite>>,
|
||||
) -> Result<CachedResponse<Json<LatestReleasesAndPullRequestsResponse>>, Status> {
|
||||
let db_nightly_releases = sqlite::get_recent_nightly_releases(db).await;
|
||||
let db_stable_releases = sqlite::get_recent_stable_releases(db).await;
|
||||
let total_nightly_release_count = get_total_count_of_release_type(db, "nightly").await;
|
||||
let total_stable_release_count = get_total_count_of_release_type(db, "stable").await;
|
||||
|
||||
if db_nightly_releases.is_err() || db_stable_releases.is_err() {
|
||||
return Err(Status::InternalServerError);
|
||||
}
|
||||
|
||||
let nightly_releases = db_nightly_releases
|
||||
.unwrap()
|
||||
.iter()
|
||||
.take(30)
|
||||
.map(|db_release| ReleaseV1::from_v2(db_release))
|
||||
.collect();
|
||||
let stable_releases = db_stable_releases
|
||||
.unwrap()
|
||||
.iter()
|
||||
.take(30)
|
||||
.map(|db_release| ReleaseV1::from_v2(db_release))
|
||||
.collect();
|
||||
|
||||
let response = LatestReleasesAndPullRequestsResponse {
|
||||
stable_releases: LatestReleasesAndPullRequestsResponseData {
|
||||
data: stable_releases,
|
||||
page_info: PageInfo {
|
||||
total: total_stable_release_count.expect("to retrieve a count successfully"),
|
||||
},
|
||||
},
|
||||
nightly_releases: LatestReleasesAndPullRequestsResponseData {
|
||||
data: nightly_releases,
|
||||
page_info: PageInfo {
|
||||
total: total_nightly_release_count.expect("to retrieve a count successfully"),
|
||||
},
|
||||
},
|
||||
};
|
||||
Ok(CachedResponse::new(
|
||||
Json(response),
|
||||
"public, max-age=300".to_owned(),
|
||||
))
|
||||
}
|
||||
|
||||
#[derive(Serialize, Debug)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct StableReleasesResponse {
|
||||
data: Vec<ReleaseV1>,
|
||||
page_info: PageInfo,
|
||||
}
|
||||
|
||||
#[get("/stableReleases?<offset>&<pageSize>")]
|
||||
pub async fn list_stable_releases(
|
||||
_rate_limiter: RateLimiter,
|
||||
db: &State<Pool<Sqlite>>,
|
||||
offset: Option<i32>,
|
||||
pageSize: Option<i32>,
|
||||
) -> Result<CachedResponse<Json<StableReleasesResponse>>, Status> {
|
||||
let mut final_page_size = 25;
|
||||
if let Some(size) = pageSize {
|
||||
final_page_size = size.clamp(1, 100);
|
||||
}
|
||||
let mut final_offset = 0;
|
||||
if let Some(offset) = offset {
|
||||
final_offset = offset.max(0);
|
||||
}
|
||||
info!("page size - {}", final_page_size);
|
||||
|
||||
let db_releases = list_releases_with_offset(db, final_offset, "stable", final_page_size).await;
|
||||
let total_release_count = get_total_count_of_release_type(db, "stable").await;
|
||||
match db_releases {
|
||||
Ok(db_releases) => {
|
||||
let releases = db_releases
|
||||
.iter()
|
||||
.map(|db_release| ReleaseV1::from_v2(db_release))
|
||||
.collect();
|
||||
Ok(CachedResponse::new(
|
||||
Json(StableReleasesResponse {
|
||||
data: releases,
|
||||
page_info: PageInfo {
|
||||
total: total_release_count.expect("to retrieve a count successfully"),
|
||||
},
|
||||
}),
|
||||
"public, max-age=300".to_owned(),
|
||||
))
|
||||
}
|
||||
Err(_) => Err(Status::InternalServerError),
|
||||
}
|
||||
}
|
||||
|
||||
#[get("/nightlyReleases?<offset>&<pageSize>")]
|
||||
pub async fn list_nightly_releases(
|
||||
_rate_limiter: RateLimiter,
|
||||
db: &State<Pool<Sqlite>>,
|
||||
offset: Option<i32>,
|
||||
pageSize: Option<i32>,
|
||||
) -> Result<CachedResponse<Json<StableReleasesResponse>>, Status> {
|
||||
let mut final_page_size = 25;
|
||||
if let Some(size) = pageSize {
|
||||
final_page_size = size.clamp(1, 100);
|
||||
}
|
||||
let mut final_offset = 0;
|
||||
if let Some(offset) = offset {
|
||||
final_offset = offset.max(0);
|
||||
}
|
||||
|
||||
let db_releases = list_releases_with_offset(db, final_offset, "nightly", final_page_size).await;
|
||||
let total_release_count = get_total_count_of_release_type(db, "nightly").await;
|
||||
match db_releases {
|
||||
Ok(db_releases) => {
|
||||
let releases = db_releases
|
||||
.iter()
|
||||
.map(|db_release| ReleaseV1::from_v2(db_release))
|
||||
.collect();
|
||||
Ok(CachedResponse::new(
|
||||
Json(StableReleasesResponse {
|
||||
data: releases,
|
||||
page_info: PageInfo {
|
||||
total: total_release_count.expect("to retrieve a count successfully"),
|
||||
},
|
||||
}),
|
||||
"public, max-age=300".to_owned(),
|
||||
))
|
||||
}
|
||||
Err(_) => Err(Status::InternalServerError),
|
||||
}
|
||||
}
|
||||
246
src/api/v2.rs
Normal file
246
src/api/v2.rs
Normal file
@@ -0,0 +1,246 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use crate::{
|
||||
api::models::Release,
|
||||
guards::{AdminAccess, GithubWebhookEvent, RateLimiter},
|
||||
responders::CachedResponse,
|
||||
storage::{
|
||||
models::ReleaseRow,
|
||||
sqlite::{self, insert_new_api_key},
|
||||
},
|
||||
util::semver_tag_to_integral,
|
||||
};
|
||||
|
||||
use log::debug;
|
||||
use octocrab::models::webhook_events::{payload::ReleaseWebhookEventAction, WebhookEventPayload};
|
||||
use rocket::State;
|
||||
use rocket::{
|
||||
http::Status,
|
||||
serde::{json::Json, Deserialize},
|
||||
};
|
||||
use sqlx::{Pool, Sqlite};
|
||||
|
||||
#[get("/releases/latest")]
|
||||
pub async fn get_latest_releases(
|
||||
_rate_limiter: RateLimiter,
|
||||
db: &State<Pool<Sqlite>>,
|
||||
) -> Result<CachedResponse<Json<HashMap<String, Release>>>, Status> {
|
||||
let latest_nightly_release = sqlite::get_latest_nightly_release(db).await;
|
||||
let latest_stable_release = sqlite::get_latest_stable_release(db).await;
|
||||
|
||||
if latest_nightly_release.is_err() || latest_stable_release.is_err() {
|
||||
return Err(Status::InternalServerError);
|
||||
}
|
||||
|
||||
let response = HashMap::from([
|
||||
(
|
||||
"nightly".to_owned(),
|
||||
Release::from_database(&latest_nightly_release.unwrap()),
|
||||
),
|
||||
(
|
||||
"stable".to_owned(),
|
||||
Release::from_database(&latest_stable_release.unwrap()),
|
||||
),
|
||||
]);
|
||||
Ok(CachedResponse::new(
|
||||
Json(response),
|
||||
"public, max-age=300".to_owned(),
|
||||
))
|
||||
}
|
||||
|
||||
#[get("/releases/recent")]
|
||||
pub async fn get_recent_releases(
|
||||
_rate_limiter: RateLimiter,
|
||||
db: &State<Pool<Sqlite>>,
|
||||
) -> Result<CachedResponse<Json<HashMap<String, Vec<Release>>>>, Status> {
|
||||
let db_nightly_releases = sqlite::get_recent_nightly_releases(db).await;
|
||||
let db_stable_releases = sqlite::get_recent_stable_releases(db).await;
|
||||
|
||||
if db_nightly_releases.is_err() || db_stable_releases.is_err() {
|
||||
return Err(Status::InternalServerError);
|
||||
}
|
||||
|
||||
let nightly_releases = db_nightly_releases
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|db_release| Release::from_database(db_release))
|
||||
.collect();
|
||||
let stable_releases = db_stable_releases
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|db_release| Release::from_database(db_release))
|
||||
.collect();
|
||||
let response = HashMap::from([
|
||||
("nightly".to_owned(), nightly_releases),
|
||||
("stable".to_owned(), stable_releases),
|
||||
]);
|
||||
Ok(CachedResponse::new(
|
||||
Json(response),
|
||||
"public, max-age=300".to_owned(),
|
||||
))
|
||||
}
|
||||
|
||||
#[get("/releases/changelog?<base>&<compare>")]
|
||||
pub async fn get_release_changelog(
|
||||
_rate_limiter: RateLimiter,
|
||||
db: &State<Pool<Sqlite>>,
|
||||
base: &str,
|
||||
compare: &str,
|
||||
) -> Result<CachedResponse<Json<String>>, Status> {
|
||||
let base_integral = semver_tag_to_integral(base);
|
||||
let compare_integral = semver_tag_to_integral(compare);
|
||||
if base_integral.is_some() && compare_integral.is_some() {
|
||||
let release_notes = sqlite::get_release_notes_for_range(
|
||||
db,
|
||||
base_integral.unwrap(),
|
||||
compare_integral.unwrap(),
|
||||
)
|
||||
.await;
|
||||
match release_notes {
|
||||
Ok(release_notes) => {
|
||||
let mut combined_notes = "".to_string();
|
||||
for note in release_notes.iter() {
|
||||
if let Some(content) = ¬e.notes {
|
||||
combined_notes = combined_notes + content.as_str();
|
||||
}
|
||||
}
|
||||
Ok(CachedResponse::new(
|
||||
Json(combined_notes),
|
||||
"public, max-age=3600".to_owned(),
|
||||
))
|
||||
}
|
||||
Err(_) => Err(Status::InternalServerError),
|
||||
}
|
||||
} else {
|
||||
Err(Status::BadRequest)
|
||||
}
|
||||
}
|
||||
|
||||
#[get("/releases/<release_type>?<version_cursor>&<page_size>")]
|
||||
pub async fn get_release_list(
|
||||
_rate_limiter: RateLimiter,
|
||||
db: &State<Pool<Sqlite>>,
|
||||
release_type: &str,
|
||||
version_cursor: Option<String>,
|
||||
page_size: Option<i32>,
|
||||
) -> Result<CachedResponse<Json<Vec<Release>>>, Status> {
|
||||
let mut final_page_size = 100;
|
||||
if let Some(size) = page_size {
|
||||
final_page_size = size.clamp(1, 200);
|
||||
}
|
||||
|
||||
let version_cursor_integral = match version_cursor {
|
||||
Some(cursor) => semver_tag_to_integral(&cursor),
|
||||
None => None,
|
||||
};
|
||||
|
||||
debug!("version_cursor_integral: {:?}", version_cursor_integral);
|
||||
|
||||
let db_releases =
|
||||
sqlite::list_releases(db, version_cursor_integral, release_type, final_page_size).await;
|
||||
match db_releases {
|
||||
Ok(db_releases) => {
|
||||
let releases = db_releases
|
||||
.iter()
|
||||
.map(|db_release| Release::from_database(db_release))
|
||||
.collect();
|
||||
Ok(CachedResponse::new(
|
||||
Json(releases),
|
||||
"public, max-age=300".to_owned(),
|
||||
))
|
||||
}
|
||||
Err(_) => Err(Status::InternalServerError),
|
||||
}
|
||||
}
|
||||
|
||||
// TODO - add searching capabilities alongside new frontend features (no point doing it yet)
|
||||
// #[post("/releases/search")]
|
||||
// pub async fn post_search_releases(db: &State<Pool<Sqlite>>) -> sqlite::DBResult<Json<Release>> {
|
||||
// let release = sqlite::get_version(db).await?;
|
||||
// Ok(Json(release))
|
||||
// }
|
||||
|
||||
#[post("/webhooks/githubReleaseEvent", format = "json", data = "<event>")]
|
||||
pub async fn handle_github_webhook_release_event(
|
||||
_rate_limiter: RateLimiter,
|
||||
event: GithubWebhookEvent,
|
||||
db: &State<Pool<Sqlite>>,
|
||||
) -> Status {
|
||||
// The GithubWebhookEvent guard validates that it's a signed webhook payload
|
||||
match event.0.specific {
|
||||
WebhookEventPayload::Release(payload) => match payload.action {
|
||||
ReleaseWebhookEventAction::Published => {
|
||||
let release_info =
|
||||
<octocrab::models::repos::Release as Deserialize>::deserialize(payload.release)
|
||||
.unwrap();
|
||||
let db_release = ReleaseRow::from_github(&release_info);
|
||||
if db_release.is_none() {
|
||||
log::error!("Unable to parse release, ignoring");
|
||||
return Status::InternalServerError;
|
||||
}
|
||||
let db_result = sqlite::insert_new_release(db, &db_release.unwrap()).await;
|
||||
if db_result.is_err() {
|
||||
log::error!("Error occured when inserting new release: {:?}", db_result);
|
||||
return Status::InternalServerError;
|
||||
}
|
||||
}
|
||||
ReleaseWebhookEventAction::Edited => {
|
||||
let release_info =
|
||||
<octocrab::models::repos::Release as Deserialize>::deserialize(payload.release)
|
||||
.unwrap();
|
||||
let db_release = ReleaseRow::from_github(&release_info);
|
||||
if db_release.is_none() {
|
||||
log::error!("Unable to parse release, ignoring");
|
||||
return Status::InternalServerError;
|
||||
}
|
||||
let db_result = sqlite::update_existing_release(db, &db_release.unwrap()).await;
|
||||
if db_result.is_err() {
|
||||
log::error!("Error occured when inserting new release: {:?}", db_result);
|
||||
return Status::InternalServerError;
|
||||
}
|
||||
}
|
||||
ReleaseWebhookEventAction::Deleted => {
|
||||
let release_info =
|
||||
<octocrab::models::repos::Release as Deserialize>::deserialize(payload.release)
|
||||
.unwrap();
|
||||
let db_release = ReleaseRow::from_github(&release_info);
|
||||
if db_release.is_none() {
|
||||
log::error!("Unable to parse release, ignoring");
|
||||
return Status::InternalServerError;
|
||||
}
|
||||
let db_result = sqlite::archive_release(db, &db_release.unwrap()).await;
|
||||
if db_result.is_err() {
|
||||
log::error!("Error occured when inserting new release: {:?}", db_result);
|
||||
return Status::InternalServerError;
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
// do nothing
|
||||
log::warn!("Unexpected event type: {:?}", payload.action);
|
||||
}
|
||||
},
|
||||
_ => {
|
||||
log::warn!("Unexpected event type");
|
||||
}
|
||||
}
|
||||
Status::Accepted
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
pub struct AddAPIKeyRequest {
|
||||
api_key: String,
|
||||
metadata: String,
|
||||
}
|
||||
|
||||
#[post("/admin/addNewAPIKey", data = "<payload>")]
|
||||
pub async fn admin_add_new_api_key(
|
||||
admin_acess: AdminAccess,
|
||||
db: &State<Pool<Sqlite>>,
|
||||
payload: Json<AddAPIKeyRequest>,
|
||||
) -> Status {
|
||||
match insert_new_api_key(db, &payload.api_key, &payload.metadata).await {
|
||||
Ok(_) => Status::Accepted,
|
||||
Err(_) => Status::InternalServerError,
|
||||
}
|
||||
}
|
||||
25
src/external/github.rs
vendored
Normal file
25
src/external/github.rs
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
pub async fn get_latest_official_version() -> Result<String, octocrab::Error> {
|
||||
let octocrab = octocrab::instance();
|
||||
// TODO - probably handle potential errors
|
||||
let release = octocrab
|
||||
.repos("PCSX2", "pcsx2")
|
||||
.releases()
|
||||
.list()
|
||||
.per_page(1)
|
||||
.send()
|
||||
.await?;
|
||||
return Ok(release.items.first().unwrap().tag_name.clone());
|
||||
}
|
||||
|
||||
pub async fn get_latest_archive_version() -> Result<String, octocrab::Error> {
|
||||
let octocrab = octocrab::instance();
|
||||
// TODO - probably handle potential errors
|
||||
let release = octocrab
|
||||
.repos("PCSX2", "archive")
|
||||
.releases()
|
||||
.list()
|
||||
.per_page(1)
|
||||
.send()
|
||||
.await?;
|
||||
return Ok(release.items.first().unwrap().tag_name.clone());
|
||||
}
|
||||
1
src/external/mod.rs
vendored
Normal file
1
src/external/mod.rs
vendored
Normal file
@@ -0,0 +1 @@
|
||||
pub mod github;
|
||||
47
src/fairings.rs
Normal file
47
src/fairings.rs
Normal file
@@ -0,0 +1,47 @@
|
||||
use lazy_static::lazy_static;
|
||||
use log::info;
|
||||
use regex::Regex;
|
||||
use rocket::{
|
||||
fairing::{Fairing, Info, Kind},
|
||||
Request, Response,
|
||||
};
|
||||
|
||||
#[derive(Default, Clone)]
|
||||
pub struct CORSHeaderFairing {}
|
||||
|
||||
lazy_static! {
|
||||
static ref CF_PAGES_REGEX: Regex =
|
||||
Regex::new(r"https:\/\/[^\.]*\.pcsx2-net-www.pages.dev").unwrap();
|
||||
}
|
||||
|
||||
#[rocket::async_trait]
|
||||
impl Fairing for CORSHeaderFairing {
|
||||
// This is a request and response fairing named "GET/POST Counter".
|
||||
fn info(&self) -> Info {
|
||||
Info {
|
||||
name: "CORS Header Middleware",
|
||||
kind: Kind::Response,
|
||||
}
|
||||
}
|
||||
|
||||
async fn on_response<'r>(&self, request: &'r Request<'_>, response: &mut Response<'r>) {
|
||||
if let Some(origin) = request.headers().get_one("Origin") {
|
||||
if origin == "https://pcsx2.net"
|
||||
|| origin.starts_with("http://localhost")
|
||||
|| origin.starts_with("https://localhost")
|
||||
|| CF_PAGES_REGEX.is_match(origin)
|
||||
{
|
||||
response.set_raw_header("Access-Control-Allow-Origin", "*");
|
||||
} else {
|
||||
info!("Rejecting request from origin: {}", origin);
|
||||
}
|
||||
} else {
|
||||
// Allow localhost requests (no origin) or requests outside of browsers (they can spoof the Origin header anyway)
|
||||
response.set_raw_header("Access-Control-Allow-Origin", "*");
|
||||
}
|
||||
response.set_raw_header("Access-Control-Allow-Headers", "*"); // TODO limit this eventually
|
||||
response.set_raw_header("Access-Control-Allow-Methods", "GET, POST, OPTIONS");
|
||||
}
|
||||
}
|
||||
|
||||
// TODO - great spot for a logging middleware!
|
||||
160
src/guards.rs
Normal file
160
src/guards.rs
Normal file
@@ -0,0 +1,160 @@
|
||||
use std::error::Error;
|
||||
use std::sync::Mutex;
|
||||
|
||||
use hmac::{Hmac, Mac};
|
||||
use octocrab::models::webhook_events::WebhookEvent;
|
||||
use rocket::data::{FromData, ToByteUnit};
|
||||
use rocket::http::Status;
|
||||
use rocket::outcome::Outcome;
|
||||
use rocket::request::FromRequest;
|
||||
use rocket::{Data, Request};
|
||||
use sha2::Sha256;
|
||||
use sqlx::{Pool, Sqlite};
|
||||
|
||||
use crate::storage::sqlite::get_api_key_metadata;
|
||||
use crate::RateLimiterCache;
|
||||
|
||||
pub struct GithubWebhookEvent(pub WebhookEvent);
|
||||
|
||||
#[rocket::async_trait]
|
||||
impl<'r> FromData<'r> for GithubWebhookEvent {
|
||||
type Error = String;
|
||||
|
||||
async fn from_data(
|
||||
request: &'r Request<'_>,
|
||||
data: Data<'r>,
|
||||
) -> rocket::data::Outcome<'r, Self> {
|
||||
match GithubWebhookEvent::from_data_impl(request, data).await {
|
||||
Ok(result) => Outcome::Success(result),
|
||||
Err(err) => {
|
||||
let message = format!("{}", err);
|
||||
Outcome::Error((Status::Forbidden, message))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl GithubWebhookEvent {
|
||||
async fn from_data_impl<'r>(
|
||||
request: &Request<'_>,
|
||||
data: Data<'r>,
|
||||
) -> Result<Self, Box<dyn Error>> {
|
||||
let event_type = request
|
||||
.headers()
|
||||
.get_one("X-Github-Event")
|
||||
.ok_or("No X-Github-Event header")?;
|
||||
|
||||
let signature = request
|
||||
.headers()
|
||||
.get_one("X-Hub-Signature-256")
|
||||
.and_then(|header| parse_signature(header))
|
||||
.ok_or("Missing or invalid X-Hub-Signature-256 header")?;
|
||||
|
||||
rocket::info!("Signature: {}", hex::encode(&signature));
|
||||
|
||||
let limit = request.limits().get("json").unwrap_or(1.mebibytes());
|
||||
let mut content = Vec::new();
|
||||
data.open(limit).stream_to(&mut content).await?;
|
||||
|
||||
verify_signature(&signature, &content)?;
|
||||
|
||||
let event = WebhookEvent::try_from_header_and_body(event_type, &content)?;
|
||||
Ok(GithubWebhookEvent(event))
|
||||
}
|
||||
}
|
||||
|
||||
fn verify_signature(signature: &[u8], content: &[u8]) -> Result<(), impl Error> {
|
||||
let secret = dotenvy::var("GITHUB_WEBHOOK_SECRET").unwrap();
|
||||
let mut mac =
|
||||
Hmac::<Sha256>::new_from_slice(secret.as_bytes()).expect("HMAC can take key of any size");
|
||||
mac.update(&content);
|
||||
mac.verify_slice(signature)
|
||||
}
|
||||
|
||||
fn parse_signature(header: &str) -> Option<Vec<u8>> {
|
||||
let header = header.trim();
|
||||
let Some(digest) = header.strip_prefix("sha256=") else {
|
||||
return None;
|
||||
};
|
||||
hex::decode(digest).ok()
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct RateLimiter;
|
||||
|
||||
#[rocket::async_trait]
|
||||
impl<'r> FromRequest<'r> for RateLimiter {
|
||||
type Error = std::convert::Infallible;
|
||||
|
||||
async fn from_request(request: &'r Request<'_>) -> rocket::request::Outcome<Self, Self::Error> {
|
||||
// If the request has an API-key, we'll potentially short-circuit and disregard rate-limiting
|
||||
if let Some(api_key) = request.headers().get_one("X-PCSX2-API-Key") {
|
||||
// Check that the API Key is valid (right now, does it exist)
|
||||
let db = request
|
||||
.rocket()
|
||||
.state::<Pool<Sqlite>>()
|
||||
.expect("Database managed by Rocket");
|
||||
let api_key_metadata = get_api_key_metadata(db, api_key).await;
|
||||
match api_key_metadata {
|
||||
Ok(_) => return Outcome::Success(RateLimiter),
|
||||
Err(_) => {
|
||||
error!("Invalid API Key provided");
|
||||
return Outcome::Forward(Status::Unauthorized);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Prefer the cloudflare proxied IP if available, otherwise we error out
|
||||
// https://developers.cloudflare.com/support/troubleshooting/restoring-visitor-ips/restoring-original-visitor-ips/
|
||||
let origin_ip = match request.headers().get_one("CF-Connecting-IP") {
|
||||
Some(ip) => ip.to_owned(),
|
||||
None => match request.client_ip() {
|
||||
Some(ip) => ip.to_string(),
|
||||
None => {
|
||||
error!("Unable to determine origin IP");
|
||||
return Outcome::Forward(Status::InternalServerError);
|
||||
}
|
||||
},
|
||||
};
|
||||
debug!("RateLimiter - Origin IP: {}", origin_ip);
|
||||
let rate_limiter_lock = request
|
||||
.rocket()
|
||||
.state::<Mutex<RateLimiterCache>>()
|
||||
.expect("Rate limiter managed by Rocket");
|
||||
let mut rate_limiter = rate_limiter_lock
|
||||
.lock()
|
||||
.expect("Rate limiter can be unlocked");
|
||||
let cache_entry = rate_limiter.get_or_insert(origin_ip);
|
||||
debug!("num requests: {:?}", cache_entry.requests_handled);
|
||||
cache_entry.requests_handled += 1;
|
||||
if cache_entry.requests_handled > 100 {
|
||||
// 100 requests per minute
|
||||
return Outcome::Forward(Status::TooManyRequests);
|
||||
}
|
||||
Outcome::Success(Self)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct AdminAccess;
|
||||
|
||||
#[rocket::async_trait]
|
||||
impl<'r> FromRequest<'r> for AdminAccess {
|
||||
type Error = std::convert::Infallible;
|
||||
|
||||
async fn from_request(request: &'r Request<'_>) -> rocket::request::Outcome<Self, Self::Error> {
|
||||
let admin_key = dotenvy::var("ADMIN_API_KEY").expect("ADMIN_API_KEY env var");
|
||||
match request.headers().get_one("X-PCSX2-API-Key") {
|
||||
Some(api_key) => {
|
||||
if api_key == admin_key {
|
||||
return Outcome::Success(AdminAccess);
|
||||
} else {
|
||||
return Outcome::Forward(Status::Unauthorized);
|
||||
}
|
||||
}
|
||||
None => {
|
||||
return Outcome::Forward(Status::Unauthorized);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
166
src/main.rs
Normal file
166
src/main.rs
Normal file
@@ -0,0 +1,166 @@
|
||||
mod api;
|
||||
mod external;
|
||||
mod fairings;
|
||||
mod guards;
|
||||
mod responders;
|
||||
mod storage;
|
||||
mod util;
|
||||
use fern::colors::{Color, ColoredLevelConfig};
|
||||
|
||||
#[macro_use]
|
||||
extern crate rocket;
|
||||
|
||||
use std::{collections::HashMap, sync::Mutex};
|
||||
|
||||
use chrono::{DateTime, SecondsFormat, Utc};
|
||||
use sqlx::{sqlite::SqliteConnectOptions, SqlitePool};
|
||||
|
||||
// TODO - eventually we probably want a rate limit per endpoint
|
||||
struct RateLimitMetadata {
|
||||
timestamp_start: DateTime<Utc>,
|
||||
requests_handled: usize,
|
||||
}
|
||||
|
||||
struct RateLimiterCache {
|
||||
rate_limit_cache: HashMap<String, RateLimitMetadata>,
|
||||
last_time_flushed: Option<DateTime<Utc>>,
|
||||
}
|
||||
|
||||
impl RateLimiterCache {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
rate_limit_cache: HashMap::new(),
|
||||
last_time_flushed: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_or_insert(&mut self, key: String) -> &mut RateLimitMetadata {
|
||||
self.flush();
|
||||
self.rate_limit_cache
|
||||
.entry(key)
|
||||
.or_insert_with(|| RateLimitMetadata {
|
||||
timestamp_start: Utc::now(),
|
||||
requests_handled: 0,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn flush(&mut self) {
|
||||
// Only flush at most every minute
|
||||
if let Some(last_time_flushed) = self.last_time_flushed {
|
||||
if last_time_flushed + chrono::Duration::minutes(1) > Utc::now() {
|
||||
return;
|
||||
}
|
||||
}
|
||||
// Remove any items that are older than 60 seconds
|
||||
self.rate_limit_cache
|
||||
.retain(|_, v| v.timestamp_start > Utc::now() - chrono::Duration::seconds(60));
|
||||
self.last_time_flushed = Some(Utc::now());
|
||||
}
|
||||
}
|
||||
|
||||
fn setup_logging() {
|
||||
let verbose_logging =
|
||||
dotenvy::var("VERBOSE_LOGGING").map_or(false, |val| val.to_lowercase().eq("true"));
|
||||
let error_log_path = dotenvy::var("ERROR_LOG_PATH").expect("ERROR_LOG_PATH must be set");
|
||||
let app_log_path = dotenvy::var("APP_LOG_PATH").expect("APP_LOG_PATH must be set");
|
||||
let mut log_level = log::LevelFilter::Warn;
|
||||
if verbose_logging == true {
|
||||
log_level = log::LevelFilter::Debug;
|
||||
}
|
||||
let colors_line = ColoredLevelConfig::new()
|
||||
.error(Color::Red)
|
||||
.warn(Color::Yellow)
|
||||
.info(Color::Cyan)
|
||||
.debug(Color::Green)
|
||||
.trace(Color::White);
|
||||
|
||||
fern::Dispatch::new()
|
||||
.chain(std::io::stdout())
|
||||
.chain(
|
||||
fern::log_file(&app_log_path)
|
||||
.unwrap_or_else(|_| panic!("Can't use this app_log_path: {}", &app_log_path)),
|
||||
)
|
||||
.level(log_level)
|
||||
.format(move |out, message, record| {
|
||||
out.finish(format_args!(
|
||||
"{color_line}[{date}] [{level}][{target}] [{message}]",
|
||||
color_line = format_args!(
|
||||
"\x1B[{}m",
|
||||
colors_line.get_color(&record.level()).to_fg_str()
|
||||
),
|
||||
date = chrono::Utc::now().to_rfc3339_opts(SecondsFormat::Millis, true),
|
||||
level = record.level(),
|
||||
target = record.target(),
|
||||
message = message
|
||||
))
|
||||
})
|
||||
.chain(
|
||||
fern::Dispatch::new().level(log::LevelFilter::Error).chain(
|
||||
fern::log_file(&error_log_path).unwrap_or_else(|_| {
|
||||
panic!("Cann't use this error_log_path: {}", &error_log_path)
|
||||
}),
|
||||
),
|
||||
)
|
||||
.apply()
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
#[rocket::main]
|
||||
async fn main() -> Result<(), rocket::Error> {
|
||||
setup_logging();
|
||||
|
||||
let rate_limiter = Mutex::new(RateLimiterCache::new());
|
||||
|
||||
let db = SqlitePool::connect_with(
|
||||
SqliteConnectOptions::new()
|
||||
.filename("db.sqlite3")
|
||||
.create_if_missing(true),
|
||||
)
|
||||
.await
|
||||
.expect("Couldn't connect to sqlite database");
|
||||
|
||||
sqlx::migrate!("db/migrations")
|
||||
.run(&db)
|
||||
.await
|
||||
.expect("Couldn't migrate the database tables");
|
||||
|
||||
// Check to see if the database is out of date (pull latest releases)
|
||||
// do this only if we have the github api credential set
|
||||
if dotenvy::var("GITHUB_API_TOKEN").is_ok() {
|
||||
let octocrab = octocrab::Octocrab::builder()
|
||||
.personal_token(dotenvy::var("GITHUB_API_TOKEN").unwrap())
|
||||
.build();
|
||||
octocrab::initialise(octocrab.unwrap());
|
||||
storage::sync::sync_database(&db).await;
|
||||
}
|
||||
|
||||
let _rocket = rocket::build()
|
||||
// LEGACY - V1 - potentially remove eventually, the blocker would be the updater code in the emulator itself
|
||||
// it might be best to just never remove this
|
||||
.mount(
|
||||
"/v1",
|
||||
routes![
|
||||
api::v1::get_latest_releases_and_pull_requests,
|
||||
api::v1::list_nightly_releases,
|
||||
api::v1::list_stable_releases
|
||||
],
|
||||
)
|
||||
// TODO - not enabling V2 yet, want to write unit-tests and such before potentially people start using them
|
||||
// .mount(
|
||||
// "/v2",
|
||||
// routes![
|
||||
// api::v2::get_latest_releases,
|
||||
// api::v2::get_recent_releases,
|
||||
// api::v2::get_release_changelog,
|
||||
// api::v2::get_release_list,
|
||||
// api::v2::handle_github_webhook_release_event,
|
||||
// api::v2::admin_add_new_api_key,
|
||||
// ],
|
||||
// )
|
||||
.attach(fairings::CORSHeaderFairing::default())
|
||||
.manage(db)
|
||||
.manage(rate_limiter)
|
||||
.launch()
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
22
src/responders.rs
Normal file
22
src/responders.rs
Normal file
@@ -0,0 +1,22 @@
|
||||
use rocket::http::Header;
|
||||
use rocket::response::Responder;
|
||||
|
||||
struct CacheControlHeader(String);
|
||||
impl From<CacheControlHeader> for Header<'static> {
|
||||
fn from(CacheControlHeader(s): CacheControlHeader) -> Self {
|
||||
Header::new("Cache-Control", s)
|
||||
}
|
||||
}
|
||||
#[derive(Responder)]
|
||||
pub struct CachedResponse<T> {
|
||||
inner: T,
|
||||
cache_control_header: CacheControlHeader,
|
||||
}
|
||||
impl<'r, 'o: 'r, T: Responder<'r, 'o>> CachedResponse<T> {
|
||||
pub fn new(inner: T, header_value: String) -> Self {
|
||||
CachedResponse {
|
||||
inner,
|
||||
cache_control_header: CacheControlHeader(header_value),
|
||||
}
|
||||
}
|
||||
}
|
||||
4
src/storage/mod.rs
Normal file
4
src/storage/mod.rs
Normal file
@@ -0,0 +1,4 @@
|
||||
pub mod models;
|
||||
pub mod sqlite;
|
||||
pub mod sync;
|
||||
pub mod v1;
|
||||
148
src/storage/models.rs
Normal file
148
src/storage/models.rs
Normal file
@@ -0,0 +1,148 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use chrono::{Duration, SecondsFormat, Utc};
|
||||
use octocrab::models::repos::Release;
|
||||
use regex::Regex;
|
||||
use rocket::form::validate::Contains;
|
||||
use rocket::serde::json::serde_json;
|
||||
use rocket::serde::Serialize;
|
||||
use sqlx::FromRow;
|
||||
|
||||
use crate::api::models::ReleaseAsset;
|
||||
use crate::util::semver_tag_to_integral;
|
||||
|
||||
#[derive(Serialize, FromRow, Debug)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
pub struct ReleaseVersion {
|
||||
pub version: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, FromRow, Debug)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
pub struct ReleaseRow {
|
||||
pub id: i64,
|
||||
pub version: String,
|
||||
pub version_integral: i64,
|
||||
pub published_timestamp: Option<String>,
|
||||
pub created_timestamp: Option<String>,
|
||||
pub github_release_id: i64,
|
||||
pub github_url: String,
|
||||
pub release_type: String,
|
||||
pub next_audit: String,
|
||||
pub next_audit_days: i64,
|
||||
pub archived: i64,
|
||||
pub notes: Option<String>,
|
||||
pub assets: String,
|
||||
}
|
||||
|
||||
impl ReleaseRow {
|
||||
pub fn from_github(github_release: &Release) -> Option<Self> {
|
||||
let mut assets: HashMap<String, Vec<ReleaseAsset>> = HashMap::new();
|
||||
github_release.assets.iter().for_each(|asset| {
|
||||
let mut platform = "Windows";
|
||||
if asset.name.to_lowercase().contains("linux") {
|
||||
platform = "Linux";
|
||||
} else if asset.name.to_lowercase().contains("macos") {
|
||||
platform = "macOS";
|
||||
}
|
||||
|
||||
let file_name_regex = Regex::new(r"(.+v\d+\.\d+\.\d+[^.]*)\.").unwrap();
|
||||
let filename_matches: Vec<_> = file_name_regex.captures_iter(&asset.name).collect();
|
||||
|
||||
// Initialize tags as an empty vector
|
||||
let mut tags: Vec<String> = Vec::new();
|
||||
|
||||
// Check if there is at least one match
|
||||
if let Some(captures) = filename_matches.get(0) {
|
||||
// Get the first capture group from the match
|
||||
if let Some(match_str) = captures.get(1) {
|
||||
// Split the match by "-" and slice from the fourth element onward
|
||||
tags = match_str
|
||||
.as_str()
|
||||
.split('-')
|
||||
.skip(3)
|
||||
.map(String::from)
|
||||
.collect();
|
||||
}
|
||||
}
|
||||
|
||||
assets
|
||||
.entry(platform.to_owned())
|
||||
.or_insert_with(Vec::new)
|
||||
.push(ReleaseAsset {
|
||||
download_url: asset.browser_download_url.to_string(),
|
||||
tags,
|
||||
download_count: asset.download_count,
|
||||
download_size_bytes: asset.size,
|
||||
});
|
||||
});
|
||||
|
||||
// Date override support
|
||||
let mut release_date_override = None;
|
||||
if github_release.body.is_some() && github_release.body.contains("DATE_OVERRIDE") {
|
||||
let regexp = Regex::new(r"DATE_OVERRIDE:\s?(\d{4}-\d{2}-\d{2})").unwrap();
|
||||
let release_body = github_release.body.clone().unwrap_or("".to_string());
|
||||
let matches: Vec<&str> = regexp
|
||||
.captures_iter(&release_body)
|
||||
.filter_map(|cap| cap.get(1).map(|m| m.as_str()))
|
||||
.collect();
|
||||
if let Some(first_match) = matches.first() {
|
||||
release_date_override = Some(format!("{}T12:00:00.000Z", first_match));
|
||||
}
|
||||
}
|
||||
|
||||
let semver_integral = semver_tag_to_integral(github_release.tag_name.as_str());
|
||||
if semver_integral.is_none() {
|
||||
log::error!("Unable to parse tag into semver integral");
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(Self {
|
||||
id: -1,
|
||||
version: github_release.tag_name.clone(),
|
||||
version_integral: semver_integral.unwrap(),
|
||||
published_timestamp: match &github_release.published_at {
|
||||
Some(published_at) => {
|
||||
Some(published_at.to_rfc3339_opts(SecondsFormat::Millis, true))
|
||||
}
|
||||
None => None,
|
||||
},
|
||||
created_timestamp: match &github_release.created_at {
|
||||
Some(created_at) => {
|
||||
if release_date_override.is_some() {
|
||||
release_date_override
|
||||
} else {
|
||||
Some(created_at.to_rfc3339_opts(SecondsFormat::Millis, true))
|
||||
}
|
||||
}
|
||||
None => None,
|
||||
},
|
||||
github_release_id: github_release.id.0 as i64,
|
||||
github_url: github_release.html_url.to_string(),
|
||||
release_type: if github_release.prerelease {
|
||||
"nightly".to_owned()
|
||||
} else {
|
||||
"stable".to_owned()
|
||||
},
|
||||
next_audit: (Utc::now() + Duration::days(7))
|
||||
.to_rfc3339_opts(SecondsFormat::Millis, true),
|
||||
next_audit_days: 7,
|
||||
archived: 0,
|
||||
notes: github_release.body.clone(),
|
||||
assets: serde_json::to_string(&assets).unwrap(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, FromRow, Debug)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
pub struct ReleaseNotesColumn {
|
||||
pub notes: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, FromRow, Debug)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
pub struct APIKeyMetadataRow {
|
||||
pub api_key: String,
|
||||
pub metadata_json: String,
|
||||
}
|
||||
192
src/storage/sqlite.rs
Normal file
192
src/storage/sqlite.rs
Normal file
@@ -0,0 +1,192 @@
|
||||
use log::info;
|
||||
use sqlx::SqlitePool;
|
||||
|
||||
use crate::storage::models::ReleaseVersion;
|
||||
|
||||
use super::models::{APIKeyMetadataRow, ReleaseNotesColumn, ReleaseRow};
|
||||
|
||||
pub type DBResult<T, E = rocket::response::Debug<sqlx::Error>> = std::result::Result<T, E>;
|
||||
|
||||
pub async fn insert_new_release(db: &SqlitePool, release: &ReleaseRow) -> DBResult<()> {
|
||||
info!("inserting release {} into database", release.version);
|
||||
sqlx::query!(
|
||||
r#"INSERT OR IGNORE INTO releases (version, version_integral, published_timestamp, created_timestamp, github_release_id, github_url, release_type, next_audit, next_audit_days, archived, notes, assets) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);"#,
|
||||
release.version,
|
||||
release.version_integral,
|
||||
release.published_timestamp,
|
||||
release.created_timestamp,
|
||||
release.github_release_id,
|
||||
release.github_url,
|
||||
release.release_type,
|
||||
release.next_audit,
|
||||
release.next_audit_days,
|
||||
0,
|
||||
release.notes,
|
||||
release.assets
|
||||
).execute(db).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn update_existing_release(db: &SqlitePool, release: &ReleaseRow) -> DBResult<()> {
|
||||
sqlx::query!(
|
||||
r#"UPDATE releases SET notes = ?, assets = ? WHERE version = ?;"#,
|
||||
release.notes,
|
||||
release.assets,
|
||||
release.version
|
||||
)
|
||||
.execute(db)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn archive_release(db: &SqlitePool, release: &ReleaseRow) -> DBResult<()> {
|
||||
let mut sanitized_tag = release.version.clone();
|
||||
if sanitized_tag.starts_with("v") {
|
||||
sanitized_tag.remove(0);
|
||||
}
|
||||
sqlx::query!(
|
||||
r#"UPDATE releases SET archived = 1 WHERE version = ?;"#,
|
||||
sanitized_tag
|
||||
)
|
||||
.execute(db)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// TODO - move away from '*' usages
|
||||
pub async fn get_latest_nightly_release(db: &SqlitePool) -> DBResult<ReleaseRow> {
|
||||
let latest_nightly = sqlx::query_as!(
|
||||
ReleaseRow,
|
||||
r#"
|
||||
SELECT * FROM releases WHERE release_type = 'nightly' AND archived = 0 ORDER BY version_integral DESC LIMIT 1;
|
||||
"#
|
||||
)
|
||||
.fetch_one(db)
|
||||
.await?;
|
||||
Ok(latest_nightly)
|
||||
}
|
||||
|
||||
pub async fn get_latest_stable_release(db: &SqlitePool) -> DBResult<ReleaseRow> {
|
||||
let latest_stable = sqlx::query_as!(
|
||||
ReleaseRow,
|
||||
r#"
|
||||
SELECT * FROM releases WHERE release_type = 'stable' AND archived = 0 ORDER BY version_integral DESC LIMIT 1;
|
||||
"#
|
||||
)
|
||||
.fetch_one(db)
|
||||
.await?;
|
||||
Ok(latest_stable)
|
||||
}
|
||||
|
||||
pub async fn get_recent_nightly_releases(db: &SqlitePool) -> DBResult<Vec<ReleaseRow>> {
|
||||
let nightly_releases = sqlx::query_as!(
|
||||
ReleaseRow,
|
||||
r#"
|
||||
SELECT * FROM releases WHERE release_type = 'nightly' AND archived = 0 ORDER BY version_integral DESC LIMIT 200;
|
||||
"#
|
||||
)
|
||||
.fetch_all(db)
|
||||
.await?;
|
||||
Ok(nightly_releases)
|
||||
}
|
||||
|
||||
pub async fn get_recent_stable_releases(db: &SqlitePool) -> DBResult<Vec<ReleaseRow>> {
|
||||
let stable_releases = sqlx::query_as!(
|
||||
ReleaseRow,
|
||||
r#"
|
||||
SELECT * FROM releases WHERE release_type = 'stable' AND archived = 0 ORDER BY version_integral DESC LIMIT 200;
|
||||
"#
|
||||
)
|
||||
.fetch_all(db)
|
||||
.await?;
|
||||
Ok(stable_releases)
|
||||
}
|
||||
|
||||
pub async fn get_release_notes_for_range(
|
||||
db: &SqlitePool,
|
||||
base_version_integral: i64,
|
||||
compare_version_integral: i64,
|
||||
) -> DBResult<Vec<ReleaseNotesColumn>> {
|
||||
let releases = sqlx::query_as!(
|
||||
ReleaseNotesColumn,
|
||||
r#"
|
||||
SELECT notes FROM releases WHERE archived = 0 AND version_integral >= ? AND version_integral <= ? ORDER BY version_integral DESC;
|
||||
"#, compare_version_integral, base_version_integral
|
||||
)
|
||||
.fetch_all(db)
|
||||
.await?;
|
||||
Ok(releases)
|
||||
}
|
||||
|
||||
pub async fn list_releases(
|
||||
db: &SqlitePool,
|
||||
start_cursor_integral: Option<i64>,
|
||||
release_type: &str,
|
||||
page_size: i32,
|
||||
) -> DBResult<Vec<ReleaseRow>> {
|
||||
if start_cursor_integral.is_none() {
|
||||
let releases = sqlx::query_as!(
|
||||
ReleaseRow,
|
||||
r#"
|
||||
SELECT * FROM releases WHERE release_type = ? AND archived = 0 ORDER BY version_integral DESC LIMIT ?;
|
||||
"#, release_type, page_size
|
||||
)
|
||||
.fetch_all(db)
|
||||
.await?;
|
||||
Ok(releases)
|
||||
} else {
|
||||
let releases = sqlx::query_as!(
|
||||
ReleaseRow,
|
||||
r#"
|
||||
SELECT * FROM releases WHERE release_type = ? AND version_integral < ? AND archived = 0 ORDER BY version_integral DESC LIMIT ?;
|
||||
"#, release_type, start_cursor_integral, page_size
|
||||
)
|
||||
.fetch_all(db)
|
||||
.await?;
|
||||
Ok(releases)
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn list_all_release_tags(db: &SqlitePool) -> DBResult<Vec<ReleaseVersion>> {
|
||||
let versions = sqlx::query_as!(
|
||||
ReleaseVersion,
|
||||
r#"
|
||||
SELECT version FROM releases;
|
||||
"#,
|
||||
)
|
||||
.fetch_all(db)
|
||||
.await?;
|
||||
Ok(versions)
|
||||
}
|
||||
|
||||
// TODO - search releases
|
||||
|
||||
pub async fn get_api_key_metadata(db: &SqlitePool, api_key: &str) -> DBResult<APIKeyMetadataRow> {
|
||||
let api_key_metadata = sqlx::query_as!(
|
||||
APIKeyMetadataRow,
|
||||
r#"
|
||||
SELECT * FROM api_keys WHERE api_key = ?;
|
||||
"#,
|
||||
api_key
|
||||
)
|
||||
.fetch_one(db)
|
||||
.await?;
|
||||
Ok(api_key_metadata)
|
||||
}
|
||||
|
||||
pub async fn insert_new_api_key(
|
||||
db: &SqlitePool,
|
||||
api_key: &String,
|
||||
key_metadata: &String,
|
||||
) -> DBResult<()> {
|
||||
sqlx::query!(
|
||||
r#"
|
||||
INSERT OR IGNORE INTO api_keys (api_key, metadata_json) VALUES (?, ?);
|
||||
"#,
|
||||
api_key,
|
||||
key_metadata
|
||||
)
|
||||
.execute(db)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
130
src/storage/sync.rs
Normal file
130
src/storage/sync.rs
Normal file
@@ -0,0 +1,130 @@
|
||||
use log::info;
|
||||
use rocket::{futures::FutureExt, tokio::pin};
|
||||
use sqlx::SqlitePool;
|
||||
|
||||
use crate::{
|
||||
external,
|
||||
storage::{self, models::ReleaseRow, sqlite},
|
||||
};
|
||||
|
||||
// Ensures the database contains all of the releases
|
||||
// This does not update existing releases, version numbers that are already inserted are ignored
|
||||
pub async fn sync_database(db: &SqlitePool) -> bool {
|
||||
info!("Checking to see if the current database is up to date");
|
||||
// 0. Get a list of all current version numbers (tags)
|
||||
let current_version_data = storage::sqlite::list_all_release_tags(db).await;
|
||||
if current_version_data.is_err() {
|
||||
log::error!(
|
||||
"unable to fetch current version data: {:?}",
|
||||
current_version_data.err()
|
||||
);
|
||||
return false;
|
||||
}
|
||||
let current_versions = current_version_data
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|release| release.version.clone())
|
||||
.collect::<String>();
|
||||
// 1. pull github's latest release on pcsx2/pcsx2, see if we have that in the database
|
||||
let latest_version = match external::github::get_latest_official_version().await {
|
||||
Ok(latest_version) => latest_version,
|
||||
Err(err) => {
|
||||
log::error!("unable to fetch latest PCSX2/pcsx2 version: {:?}", err);
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
let latest_archive_version = match external::github::get_latest_archive_version().await {
|
||||
Ok(latest_version) => latest_version,
|
||||
Err(err) => {
|
||||
log::error!("unable to fetch latest PCSX2/archive version: {:?}", err);
|
||||
return false;
|
||||
}
|
||||
};
|
||||
// 2. if not, then do a full scrape on both repos, inserting whatever has not already been added before
|
||||
if !current_versions.contains(&latest_version)
|
||||
|| !current_versions.contains(&latest_archive_version)
|
||||
{
|
||||
info!("DB is missing latest version ({}) or latest archived version ({}), syncing with GH's API", latest_version, latest_archive_version)
|
||||
}
|
||||
|
||||
let octocrab = octocrab::instance();
|
||||
// Process main repository
|
||||
if !current_versions.contains(&latest_version) {
|
||||
let main_release_stream_req = octocrab
|
||||
.repos("PCSX2", "pcsx2")
|
||||
.releases()
|
||||
.list()
|
||||
.per_page(100)
|
||||
.send()
|
||||
.await;
|
||||
if main_release_stream_req.is_err() {
|
||||
log::error!(
|
||||
"unable to retrieve PCSX2/pcsx2 releases: {:?}",
|
||||
main_release_stream_req.err()
|
||||
);
|
||||
return false;
|
||||
}
|
||||
let main_release_stream = main_release_stream_req.unwrap().into_stream(&octocrab);
|
||||
pin!(main_release_stream);
|
||||
while let Some(release) = rocket::futures::TryStreamExt::try_next(&mut main_release_stream)
|
||||
.await
|
||||
.unwrap_or_else(|err| None)
|
||||
{
|
||||
if !current_versions.contains(&release.tag_name) {
|
||||
info!("Adding to DB: {}", &release.tag_name);
|
||||
let db_release = ReleaseRow::from_github(&release);
|
||||
if db_release.is_none() {
|
||||
log::error!("Unable to parse release, ignoring");
|
||||
continue;
|
||||
}
|
||||
let db_result = sqlite::insert_new_release(db, &db_release.unwrap()).await;
|
||||
if db_result.is_err() {
|
||||
log::error!("Error occured when inserting new release: {:?}", db_result);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Process archive repository
|
||||
if !current_versions.contains(&latest_archive_version) {
|
||||
let archive_release_stream_req = octocrab
|
||||
.repos("PCSX2", "archive")
|
||||
.releases()
|
||||
.list()
|
||||
.per_page(100)
|
||||
.send()
|
||||
.await;
|
||||
if archive_release_stream_req.is_err() {
|
||||
log::error!(
|
||||
"unable to retrieve PCSX2/archive releases: {:?}",
|
||||
archive_release_stream_req.err()
|
||||
);
|
||||
return false;
|
||||
}
|
||||
let archive_release_stream = archive_release_stream_req.unwrap().into_stream(&octocrab);
|
||||
pin!(archive_release_stream);
|
||||
while let Some(release) =
|
||||
rocket::futures::TryStreamExt::try_next(&mut archive_release_stream)
|
||||
.await
|
||||
.unwrap_or_else(|err| None)
|
||||
{
|
||||
if !current_versions.contains(&release.tag_name) {
|
||||
info!("Adding to DB: {}", &release.tag_name);
|
||||
let db_release = ReleaseRow::from_github(&release);
|
||||
if db_release.is_none() {
|
||||
log::error!("Unable to parse release, ignoring");
|
||||
continue;
|
||||
}
|
||||
let db_result = sqlite::insert_new_release(db, &db_release.unwrap()).await;
|
||||
if db_result.is_err() {
|
||||
log::error!("Error occured when inserting new release: {:?}", db_result);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
30
src/storage/v1.rs
Normal file
30
src/storage/v1.rs
Normal file
@@ -0,0 +1,30 @@
|
||||
use sqlx::SqlitePool;
|
||||
|
||||
use super::{models::ReleaseRow, sqlite::DBResult};
|
||||
|
||||
pub async fn list_releases_with_offset(
|
||||
db: &SqlitePool,
|
||||
offset: i32,
|
||||
release_type: &str,
|
||||
page_size: i32,
|
||||
) -> DBResult<Vec<ReleaseRow>> {
|
||||
let releases = sqlx::query_as!(
|
||||
ReleaseRow,
|
||||
r#"
|
||||
SELECT * FROM releases WHERE release_type = ? AND archived = 0 ORDER BY version_integral DESC LIMIT ? OFFSET ?;
|
||||
"#, release_type, page_size, offset
|
||||
)
|
||||
.fetch_all(db)
|
||||
.await?;
|
||||
Ok(releases)
|
||||
}
|
||||
|
||||
pub async fn get_total_count_of_release_type(db: &SqlitePool, release_type: &str) -> DBResult<i64> {
|
||||
let release_count = sqlx::query!(
|
||||
r#"SELECT COUNT(*) as count FROM releases WHERE release_type = ?;"#,
|
||||
release_type
|
||||
)
|
||||
.fetch_one(db)
|
||||
.await?;
|
||||
Ok(release_count.count.into())
|
||||
}
|
||||
65
src/util.rs
Normal file
65
src/util.rs
Normal file
@@ -0,0 +1,65 @@
|
||||
fn pad_start(input: &str, pad_length: usize, pad_char: char) -> String {
|
||||
let padding = pad_char
|
||||
.to_string()
|
||||
.repeat(pad_length.saturating_sub(input.len()));
|
||||
format!("{}{}", padding, input)
|
||||
}
|
||||
|
||||
pub fn semver_tag_to_integral(version: &str) -> Option<i64> {
|
||||
let mut valid_semver = version;
|
||||
if valid_semver.starts_with("v") {
|
||||
valid_semver = valid_semver.strip_prefix("v").unwrap();
|
||||
}
|
||||
|
||||
// 1.2.3 becomes = 000001 000002 000003
|
||||
let parts: Vec<&str> = valid_semver.split(".").collect();
|
||||
if parts.len() < 2 || parts.len() > 3 {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut integral_string = String::new();
|
||||
for part in &parts {
|
||||
if !part.parse::<i64>().is_ok() {
|
||||
return None;
|
||||
}
|
||||
integral_string += pad_start(part, 6, '0').as_str();
|
||||
}
|
||||
// A slight caveat -- some releases were tagged 1.0 unfortunately
|
||||
if parts.len() == 2 {
|
||||
integral_string += "000000";
|
||||
}
|
||||
Some(integral_string.parse().unwrap())
|
||||
}
|
||||
|
||||
pub struct Semver {
|
||||
pub major: i64,
|
||||
pub minor: i64,
|
||||
pub patch: i64,
|
||||
}
|
||||
|
||||
impl Semver {
|
||||
pub fn new(version: &str) -> Semver {
|
||||
let mut valid_semver = version;
|
||||
if valid_semver.starts_with("v") {
|
||||
valid_semver = valid_semver
|
||||
.strip_prefix("v")
|
||||
.expect("removed the 'v' prefix we found");
|
||||
}
|
||||
// TODO - some releases did not have 3 parts!
|
||||
if valid_semver.split(".").count() < 3 {
|
||||
let parts: Vec<&str> = valid_semver.split(".").collect();
|
||||
Semver {
|
||||
major: parts[0].parse().unwrap(),
|
||||
minor: parts[1].parse().unwrap(),
|
||||
patch: 0,
|
||||
}
|
||||
} else {
|
||||
let parts: Vec<&str> = valid_semver.split(".").collect();
|
||||
Semver {
|
||||
major: parts[0].parse().unwrap(),
|
||||
minor: parts[1].parse().unwrap(),
|
||||
patch: parts[2].parse().unwrap(),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
100
tsconfig.json
100
tsconfig.json
@@ -1,100 +0,0 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
/* Visit https://aka.ms/tsconfig.json to read more about this file */
|
||||
|
||||
/* Projects */
|
||||
// "incremental": true, /* Enable incremental compilation */
|
||||
// "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */
|
||||
// "tsBuildInfoFile": "./", /* Specify the folder for .tsbuildinfo incremental compilation files. */
|
||||
// "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects */
|
||||
// "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */
|
||||
// "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */
|
||||
|
||||
/* Language and Environment */
|
||||
"target": "es5", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */
|
||||
// "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */
|
||||
// "jsx": "preserve", /* Specify what JSX code is generated. */
|
||||
// "experimentalDecorators": true, /* Enable experimental support for TC39 stage 2 draft decorators. */
|
||||
// "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */
|
||||
// "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h' */
|
||||
// "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */
|
||||
// "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using `jsx: react-jsx*`.` */
|
||||
// "reactNamespace": "", /* Specify the object invoked for `createElement`. This only applies when targeting `react` JSX emit. */
|
||||
// "noLib": true, /* Disable including any library files, including the default lib.d.ts. */
|
||||
// "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */
|
||||
|
||||
/* Modules */
|
||||
"module": "commonjs", /* Specify what module code is generated. */
|
||||
"rootDir": "./", /* Specify the root folder within your source files. */
|
||||
// "moduleResolution": "node", /* Specify how TypeScript looks up a file from a given module specifier. */
|
||||
// "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */
|
||||
// "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */
|
||||
// "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */
|
||||
// "typeRoots": [], /* Specify multiple folders that act like `./node_modules/@types`. */
|
||||
// "types": [], /* Specify type package names to be included without being referenced in a source file. */
|
||||
// "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */
|
||||
// "resolveJsonModule": true, /* Enable importing .json files */
|
||||
// "noResolve": true, /* Disallow `import`s, `require`s or `<reference>`s from expanding the number of files TypeScript should add to a project. */
|
||||
|
||||
/* JavaScript Support */
|
||||
// "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the `checkJS` option to get errors from these files. */
|
||||
// "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */
|
||||
// "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from `node_modules`. Only applicable with `allowJs`. */
|
||||
|
||||
/* Emit */
|
||||
// "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */
|
||||
// "declarationMap": true, /* Create sourcemaps for d.ts files. */
|
||||
// "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */
|
||||
// "sourceMap": true, /* Create source map files for emitted JavaScript files. */
|
||||
// "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If `declaration` is true, also designates a file that bundles all .d.ts output. */
|
||||
"outDir": "./dist", /* Specify an output folder for all emitted files. */
|
||||
// "removeComments": true, /* Disable emitting comments. */
|
||||
// "noEmit": true, /* Disable emitting files from a compilation. */
|
||||
// "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */
|
||||
// "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types */
|
||||
// "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */
|
||||
// "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */
|
||||
// "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */
|
||||
// "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */
|
||||
// "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */
|
||||
// "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */
|
||||
// "newLine": "crlf", /* Set the newline character for emitting files. */
|
||||
// "stripInternal": true, /* Disable emitting declarations that have `@internal` in their JSDoc comments. */
|
||||
// "noEmitHelpers": true, /* Disable generating custom helper functions like `__extends` in compiled output. */
|
||||
// "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */
|
||||
// "preserveConstEnums": true, /* Disable erasing `const enum` declarations in generated code. */
|
||||
// "declarationDir": "./", /* Specify the output directory for generated declaration files. */
|
||||
|
||||
/* Interop Constraints */
|
||||
// "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */
|
||||
// "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */
|
||||
"esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables `allowSyntheticDefaultImports` for type compatibility. */
|
||||
// "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */
|
||||
"forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */
|
||||
|
||||
/* Type Checking */
|
||||
"strict": true, /* Enable all strict type-checking options. */
|
||||
// "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied `any` type.. */
|
||||
// "strictNullChecks": true, /* When type checking, take into account `null` and `undefined`. */
|
||||
// "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */
|
||||
// "strictBindCallApply": true, /* Check that the arguments for `bind`, `call`, and `apply` methods match the original function. */
|
||||
// "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */
|
||||
// "noImplicitThis": true, /* Enable error reporting when `this` is given the type `any`. */
|
||||
// "useUnknownInCatchVariables": true, /* Type catch clause variables as 'unknown' instead of 'any'. */
|
||||
// "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */
|
||||
// "noUnusedLocals": true, /* Enable error reporting when a local variables aren't read. */
|
||||
// "noUnusedParameters": true, /* Raise an error when a function parameter isn't read */
|
||||
// "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */
|
||||
// "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */
|
||||
// "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */
|
||||
// "noUncheckedIndexedAccess": true, /* Include 'undefined' in index signature results */
|
||||
// "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */
|
||||
// "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type */
|
||||
// "allowUnusedLabels": true, /* Disable error reporting for unused labels. */
|
||||
// "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */
|
||||
|
||||
/* Completeness */
|
||||
// "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */
|
||||
"skipLibCheck": true /* Skip type checking all .d.ts files. */
|
||||
}
|
||||
}
|
||||
@@ -1,37 +0,0 @@
|
||||
import LokiTransport from "winston-loki";
|
||||
import winston from "winston";
|
||||
|
||||
export class LogFactory {
|
||||
private devEnv = process.env.NODE_ENV !== "production";
|
||||
private log: winston.Logger;
|
||||
|
||||
constructor(scope: string) {
|
||||
this.log = winston.createLogger({
|
||||
defaultMeta: { service: "pcsx2-api", scope: scope },
|
||||
});
|
||||
this.log.add(
|
||||
new winston.transports.Console({
|
||||
format: winston.format.simple(),
|
||||
})
|
||||
);
|
||||
if (!this.devEnv) {
|
||||
console.log("Piping logs to Grafana as well");
|
||||
const lokiTransport = new LokiTransport({
|
||||
host: `https://logs-prod-us-central1.grafana.net`,
|
||||
batching: true,
|
||||
basicAuth: `${process.env.GRAFANA_LOKI_USER}:${process.env.GRAFANA_LOKI_PASS}`,
|
||||
labels: { app: "pcsx2-backend", env: this.devEnv ? "dev" : "prod" },
|
||||
// remove color from log level label - loki really doesn't like it
|
||||
format: winston.format.uncolorize({
|
||||
message: false,
|
||||
raw: false,
|
||||
}),
|
||||
});
|
||||
this.log.add(lokiTransport);
|
||||
}
|
||||
}
|
||||
|
||||
public getLogger(): winston.Logger {
|
||||
return this.log;
|
||||
}
|
||||
}
|
||||
3
v1/README.md
Normal file
3
v1/README.md
Normal file
@@ -0,0 +1,3 @@
|
||||
# TODO
|
||||
|
||||
- Backup the legacy version of the app, mostly just to produce fixture data for unit-tests on the v2 version (which replicates the old endpoints for compatibility sake)
|
||||
Reference in New Issue
Block a user