diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 4a2f4497..db836022 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -12,71 +12,70 @@ name: "CodeQL" on: - push: - branches: [ "main" ] - pull_request: - # The branches below must be a subset of the branches above - branches: [ "main" ] - schedule: - - cron: '18 19 * * 4' + push: + branches: ["main"] + pull_request: + # The branches below must be a subset of the branches above + branches: ["main"] + schedule: + - cron: "18 19 * * 4" jobs: - analyze: - name: Analyze - # Runner size impacts CodeQL analysis time. To learn more, please see: - # - https://gh.io/recommended-hardware-resources-for-running-codeql - # - https://gh.io/supported-runners-and-hardware-resources - # - https://gh.io/using-larger-runners - # Consider using larger runners for possible analysis time improvements. - runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }} - timeout-minutes: ${{ (matrix.language == 'swift' && 120) || 360 }} - permissions: - actions: read - contents: read - security-events: write + analyze: + name: Analyze + # Runner size impacts CodeQL analysis time. To learn more, please see: + # - https://gh.io/recommended-hardware-resources-for-running-codeql + # - https://gh.io/supported-runners-and-hardware-resources + # - https://gh.io/using-larger-runners + # Consider using larger runners for possible analysis time improvements. + runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }} + timeout-minutes: ${{ (matrix.language == 'swift' && 120) || 360 }} + permissions: + actions: read + contents: read + security-events: write - strategy: - fail-fast: false - matrix: - language: [ 'javascript-typescript' ] - # CodeQL supports [ 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift' ] - # Use only 'java-kotlin' to analyze code written in Java, Kotlin or both - # Use only 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both - # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support + strategy: + fail-fast: false + matrix: + language: ["javascript-typescript"] + # CodeQL supports [ 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift' ] + # Use only 'java-kotlin' to analyze code written in Java, Kotlin or both + # Use only 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both + # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support - steps: - - name: Checkout repository - uses: actions/checkout@v3 + steps: + - name: Checkout repository + uses: actions/checkout@v3 - # Initializes the CodeQL tools for scanning. - - name: Initialize CodeQL - uses: github/codeql-action/init@v2 - with: - languages: ${{ matrix.language }} - # If you wish to specify custom queries, you can do so here or in a config file. - # By default, queries listed here will override any specified in a config file. - # Prefix the list here with "+" to use these queries and those in the config file. + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v2 + with: + languages: ${{ matrix.language }} + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. - # For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs - # queries: security-extended,security-and-quality + # For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs + # queries: security-extended,security-and-quality + # Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift). + # If this step fails, then you should remove it and run the build manually (see below) + - name: Autobuild + uses: github/codeql-action/autobuild@v2 - # Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift). - # If this step fails, then you should remove it and run the build manually (see below) - - name: Autobuild - uses: github/codeql-action/autobuild@v2 + # ℹī¸ Command-line programs to run using the OS shell. + # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun - # ℹī¸ Command-line programs to run using the OS shell. - # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun + # If the Autobuild fails above, remove it and uncomment the following three lines. + # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. - # If the Autobuild fails above, remove it and uncomment the following three lines. - # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. + # - run: | + # echo "Run, Build Application using script" + # ./location_of_script_within_repo/buildscript.sh - # - run: | - # echo "Run, Build Application using script" - # ./location_of_script_within_repo/buildscript.sh - - - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v2 - with: - category: "/language:${{matrix.language}}" + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v2 + with: + category: "/language:${{matrix.language}}" diff --git a/.github/workflows/docker-publish.yml b/.github/workflows/docker-publish.yml new file mode 100644 index 00000000..fc8026cc --- /dev/null +++ b/.github/workflows/docker-publish.yml @@ -0,0 +1,96 @@ +name: Build and publish Docker image + +# This workflow uses actions that are not certified by GitHub. +# They are provided by a third-party and are governed by +# separate terms of service, privacy policy, and support +# documentation. + +on: + schedule: + - cron: "22 5 * * *" + push: + branches: ["main"] + # Publish semver tags as releases. + tags: ["v*.*.*"] + pull_request: + branches: ["main"] + +env: + # Use docker.io for Docker Hub if empty + REGISTRY: ghcr.io + # github.repository as / + IMAGE_NAME: ${{ github.repository }} + +jobs: + build: + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + # This is used to complete the identity challenge + # with sigstore/fulcio when running outside of PRs. + id-token: write + + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + # Install the cosign tool except on PR + # https://github.com/sigstore/cosign-installer + - name: Install cosign + if: github.event_name != 'pull_request' + uses: sigstore/cosign-installer@6e04d228eb30da1757ee4e1dd75a0ec73a653e06 #v3.1.1 + with: + cosign-release: "v2.1.1" + + # Set up BuildKit Docker container builder to be able to build + # multi-platform images and export cache + # https://github.com/docker/setup-buildx-action + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226 # v3.0.0 + + # Login against a Docker registry except on PR + # https://github.com/docker/login-action + - name: Log into registry ${{ env.REGISTRY }} + if: github.event_name != 'pull_request' + uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + # Extract metadata (tags, labels) for Docker + # https://github.com/docker/metadata-action + - name: Extract Docker metadata + id: meta + uses: docker/metadata-action@96383f45573cb7f253c731d3b3ab81c87ef81934 # v5.0.0 + with: + images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + + # Build and push Docker image with Buildx (don't push on PR) + # https://github.com/docker/build-push-action + - name: Build and push Docker image + id: build-and-push + uses: docker/build-push-action@0565240e2d4ab88bba5387d719585280857ece09 # v5.0.0 + with: + context: . + push: ${{ github.event_name != 'pull_request' }} + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + cache-from: type=gha + cache-to: type=gha,mode=max + + # Sign the resulting Docker image digest except on PRs. + # This will only write to the public Rekor transparency log when the Docker + # repository is public to avoid leaking data. If you would like to publish + # transparency data even for private images, pass --force to cosign below. + # https://github.com/sigstore/cosign + - name: Sign the published Docker image + if: ${{ github.event_name != 'pull_request' }} + env: + # https://docs.github.com/en/actions/security-guides/security-hardening-for-github-actions#using-an-intermediate-environment-variable + TAGS: ${{ steps.meta.outputs.tags }} + DIGEST: ${{ steps.build-and-push.outputs.digest }} + # This step uses the identity token to provision an ephemeral certificate + # against the sigstore community Fulcio instance. + run: echo "${TAGS}" | xargs -I {} cosign sign --yes {}@${DIGEST} diff --git a/.github/workflows/eslint.yml b/.github/workflows/eslint.yml index 2f0cedf8..68ffac6d 100644 --- a/.github/workflows/eslint.yml +++ b/.github/workflows/eslint.yml @@ -10,43 +10,43 @@ name: ESLint Checks on: - push: - branches: [ "main" ] - pull_request: - # The branches below must be a subset of the branches above - branches: [ "main" ] - schedule: - - cron: '35 17 * * 3' + push: + branches: ["main"] + pull_request: + # The branches below must be a subset of the branches above + branches: ["main"] + schedule: + - cron: "35 17 * * 3" jobs: - eslint: - name: Run eslint scanning - runs-on: ubuntu-latest - permissions: - contents: read - security-events: write - actions: read # only required for a private repository by github/codeql-action/upload-sarif to get the Action run status - steps: - - name: Checkout code - uses: actions/checkout@v3 - - name: Setup Bun - uses: oven-sh/setup-bun@v1 + eslint: + name: Run eslint scanning + runs-on: ubuntu-latest + permissions: + contents: read + security-events: write + actions: read # only required for a private repository by github/codeql-action/upload-sarif to get the Action run status + steps: + - name: Checkout code + uses: actions/checkout@v3 + - name: Setup Bun + uses: oven-sh/setup-bun@v1 - - name: Install NPM packages - run: | - bun install - - - name: Generate Prisma Client - run: | - bun prisma generate + - name: Install NPM packages + run: | + bun install - - name: Run ESLint - run: | - bunx eslint . --config .eslintrc.cjs --ext .js,.jsx,.ts,.tsx --format @microsoft/eslint-formatter-sarif --output-file eslint-results.sarif - continue-on-error: true + - name: Generate Prisma Client + run: | + bunx prisma generate - - name: Upload analysis results to GitHub - uses: github/codeql-action/upload-sarif@v2 - with: - sarif_file: eslint-results.sarif - wait-for-processing: true \ No newline at end of file + - name: Run ESLint + run: | + bunx eslint . --config .eslintrc.cjs --ext .js,.jsx,.ts,.tsx --format @microsoft/eslint-formatter-sarif --output-file eslint-results.sarif + continue-on-error: true + + - name: Upload analysis results to GitHub + uses: github/codeql-action/upload-sarif@v2 + with: + sarif_file: eslint-results.sarif + wait-for-processing: true diff --git a/Dockerfile b/Dockerfile index 0bfaaf18..edb38027 100644 --- a/Dockerfile +++ b/Dockerfile @@ -34,11 +34,11 @@ LABEL org.opencontainers.image.licenses "AGPL-3.0" LABEL org.opencontainers.image.title "Lysand Server" LABEL org.opencontainers.image.description "Lysand Server docker image" -# run the app -USER bun +# CD to app +WORKDIR /app RUN bunx prisma generate -# Remove Node -USER root -RUN rm /usr/local/bin/node -USER bun -ENTRYPOINT [ "bun", "run", "index.ts" ] +# CD to app +WORKDIR /app +ENV NODE_ENV=production +# Run migrations and start the server +ENTRYPOINT [ "bun", "migrate", "&&", "bun", "run", "index.ts" ] diff --git a/README.md b/README.md index ea144cab..2cbc5774 100644 --- a/README.md +++ b/README.md @@ -95,6 +95,58 @@ To run the server, simply run the following command: bun start ``` +### Using the CLI + +Lysand includes a built-in CLI for managing the server. To use it, simply run the following command: + +```bash +bun cli +``` + +You can use the `help` command to see a list of available commands. These include creating users, deleting users and more. + +### Using Database Commands + +The `bun prisma` commands allows you to use Prisma commands without needing to add in environment variables for the database config. Just run Prisma commands as you would normally, replacing `bunx prisma` with `bun prisma`. + +## With Docker + +> **Note**: Docker is currently broken, as Bun with Prisma does not work well with Docker yet for unknown reasons. The following instructions are for when this is fixed. +> +> These instructions will probably also work with Podman and other container runtimes. + +You can also run Lysand using Docker. To do so, you can: + +1. Acquire the Postgres Dockerfile from above +2. Use this repository's [`docker-compose.yml`](docker-compose.yml) file +3. Create the `lysand-net` docker network: +```bash +docker network create lysand-net +``` +1. Fill in the config file (see [Installation](#installation)) +2. Run the following command: +```bash +docker-compose up -d +``` + +You may need root privileges to run Docker commands. + +### Running CLI commands inside Docker + +You can run CLI commands inside Docker using the following command: + +```bash +sudo docker exec -it lysand bun cli ... +``` + +### Running migrations inside Docker + +You can run migrations inside Docker using the following command (if needed): + +```bash +sudo docker exec -it lysand bun migrate +``` + ## Contributing Contributions are welcome! Please see the [CONTRIBUTING.md](CONTRIBUTING.md) file for more information. diff --git a/bun.lockb b/bun.lockb index 19ad00f3..43fc0ec3 100755 Binary files a/bun.lockb and b/bun.lockb differ diff --git a/database/entities/Relationship.ts b/database/entities/Relationship.ts index 5f290c5d..d9d8b384 100644 --- a/database/entities/Relationship.ts +++ b/database/entities/Relationship.ts @@ -37,6 +37,33 @@ export const createNewRelationship = async ( }); }; +export const checkForBidirectionalRelationships = async ( + user1: User, + user2: User, + createIfNotExists = true +): Promise => { + const relationship1 = await client.relationship.findFirst({ + where: { + ownerId: user1.id, + subjectId: user2.id, + }, + }); + + const relationship2 = await client.relationship.findFirst({ + where: { + ownerId: user2.id, + subjectId: user1.id, + }, + }); + + if (!relationship1 && !relationship2 && createIfNotExists) { + await createNewRelationship(user1, user2); + await createNewRelationship(user2, user1); + } + + return !!relationship1 && !!relationship2; +}; + /** * Converts the relationship to an API-friendly format. * @returns The API-friendly relationship. diff --git a/database/entities/Status.ts b/database/entities/Status.ts index 7731c7f3..4b1cd2a8 100644 --- a/database/entities/Status.ts +++ b/database/entities/Status.ts @@ -22,6 +22,8 @@ import { import { emojiToAPI, emojiToLysand, parseEmojis } from "./Emoji"; import type { APIStatus } from "~types/entities/status"; import { applicationToAPI } from "./Application"; +import { attachmentToAPI } from "./Attachment"; +import type { APIAttachment } from "~types/entities/attachment"; const config = getConfig(); @@ -53,9 +55,12 @@ export const statusAndUserRelations: Prisma.StatusInclude = { }, }, }, + reblogs: true, attachments: true, instance: true, - mentions: true, + mentions: { + include: userRelations, + }, pinnedBy: true, _count: { select: { @@ -77,7 +82,9 @@ export const statusAndUserRelations: Prisma.StatusInclude = { }, }, instance: true, - mentions: true, + mentions: { + include: userRelations, + }, pinnedBy: true, _count: { select: { @@ -307,12 +314,9 @@ export const createNewStatus = async (data: { }; quote?: Status; }) => { - // Get people mentioned in the content - const mentionedPeople = [...data.content.matchAll(/@([a-zA-Z0-9_]+)/g)].map( - match => { - return `${config.http.base_url}/users/${match[1]}`; - } - ); + // Get people mentioned in the content (match @username or @username@domain.com mentions) + const mentionedPeople = + data.content.match(/@[a-zA-Z0-9_]+(@[a-zA-Z0-9_]+)?/g) ?? []; let mentions = data.mentions || []; @@ -437,8 +441,12 @@ export const statusToAPI = async ( ), // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition favourites_count: (status.likes ?? []).length, - media_attachments: [], - mentions: [], + // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition + media_attachments: (status.attachments ?? []).map( + a => attachmentToAPI(a) as APIAttachment + ), + // @ts-expect-error Prisma TypeScript types dont include relations + mentions: status.mentions.map(mention => userToAPI(mention)), language: null, muted: user ? user.relationships.find(r => r.subjectId == status.authorId) @@ -456,11 +464,7 @@ export const statusToAPI = async ( reblogId: status.id, }, })), - reblogs_count: await client.status.count({ - where: { - reblogId: status.id, - }, - }), + reblogs_count: status._count.reblogs, replies_count: status._count.replies, sensitive: status.sensitive, spoiler_text: status.spoilerText, diff --git a/docker-compose.yml b/docker-compose.yml index a03827a5..f8f28d75 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,12 +1,40 @@ -version: '3' +--- +# Run `docker network create lysand-net` before running docker-compose up +version: "3" services: - lysand: - build: - context: . - dockerfile: Dockerfile - ports: - - 8080:8080 - container_name: lysand - volumes: - - ./logs:/app/logs - - ./config:/app/config + lysand: + image: ghcr.io/lysand-org/lysand:main + volumes: + #- ./logs:/app/logs + - ./config:/app/config + - ./.env:/app/.env + restart: unless-stopped + container_name: lysand + networks: + - lysand-net + db: + build: + context: . + dockerfile: Postgres.Dockerfile + container_name: lysand-db + restart: unless-stopped + environment: + POSTGRES_DB: lysand + POSTGRES_USER: lysand + POSTGRES_PASSWORD: lysand + networks: + - lysand-net + volumes: + - ./db-data:/var/lib/postgresql/data + redis: + image: "redis:latest" + container_name: lysand-redis + volumes: + - ./redis-data:/data + restart: unless-stopped + networks: + - lysand-net + +networks: + lysand-net: + external: true diff --git a/package.json b/package.json index e521f599..a8cbc3f1 100644 --- a/package.json +++ b/package.json @@ -10,9 +10,9 @@ "url": "https://cpluspatch.com" }, "bugs": { - "url": "https://github.com/CPlusPatch/lysand/issues" + "url": "https://github.com/lysand-org/lysand/issues" }, - "icon": "https://github.com/CPlusPatch/lysand", + "icon": "https://github.com/lysand-org/lysand", "license": "AGPL-3.0", "keywords": [ "federated", @@ -28,16 +28,17 @@ ], "repository": { "type": "git", - "url": "git+https://github.com/CPlusPatch/lysand.git" + "url": "git+https://github.com/lysand-org/lysand.git" }, "private": true, "scripts": { "dev": "bun run index.ts", "start": "bun run index.ts", - "migrate-dev": "bunx prisma migrate dev", - "migrate": "bunx prisma migrate deploy", + "migrate-dev": "bun prisma migrate dev", + "migrate": "bun prisma migrate deploy", "lint": "eslint --config .eslintrc.cjs --ext .ts .", - "generate": "bunx prisma generate", + "prisma": "bun run prisma.ts", + "generate": "bun prisma generate", "cli": "bun run cli.ts" }, "trustedDependencies": [ @@ -45,41 +46,41 @@ "@prisma/client" ], "devDependencies": { - "@julr/unocss-preset-forms": "^0.0.5", + "@julr/unocss-preset-forms": "^0.1.0", "@microsoft/eslint-formatter-sarif": "^3.0.0", - "@types/html-to-text": "^9.0.3", - "@types/jsonld": "^1.5.9", - "@typescript-eslint/eslint-plugin": "^6.6.0", - "@typescript-eslint/parser": "^6.6.0", - "@unocss/cli": "^0.55.7", + "@types/html-to-text": "^9.0.4", + "@types/jsonld": "^1.5.13", + "@typescript-eslint/eslint-plugin": "^6.13.1", + "@typescript-eslint/parser": "^6.13.1", + "@unocss/cli": "^0.57.7", "activitypub-types": "^1.0.3", "bun-types": "latest", - "eslint": "^8.49.0", + "eslint": "^8.54.0", "eslint-config-prettier": "^9.0.0", - "eslint-formatter-pretty": "^5.0.0", + "eslint-formatter-pretty": "^6.0.0", "eslint-formatter-summary": "^1.1.0", - "eslint-plugin-prettier": "^5.0.0", - "prettier": "^3.0.3", - "typescript": "^5.2.2", - "unocss": "^0.55.7" + "eslint-plugin-prettier": "^5.0.1", + "prettier": "^3.1.0", + "typescript": "^5.3.2", + "unocss": "^0.57.7" }, "peerDependencies": { - "typescript": "^5.0.0" + "typescript": "^5.3.2" }, "dependencies": { - "@aws-sdk/client-s3": "^3.429.0", + "@aws-sdk/client-s3": "^3.461.0", "@prisma/client": "^5.6.0", "blurhash": "^2.0.5", - "bullmq": "^4.14.2", + "bullmq": "^4.14.4", "chalk": "^5.3.0", "eventemitter3": "^5.0.1", "html-to-text": "^9.0.5", "ip-matching": "^2.1.2", "iso-639-1": "^3.1.0", - "isomorphic-dompurify": "^1.9.0", + "isomorphic-dompurify": "^1.10.0", "jsonld": "^8.3.1", "marked": "^9.1.2", - "prisma": "latest", + "prisma": "^5.6.0", "semver": "^7.5.4", "sharp": "^0.33.0-rc.2" } diff --git a/prisma.ts b/prisma.ts new file mode 100644 index 00000000..82d8c1d5 --- /dev/null +++ b/prisma.ts @@ -0,0 +1,17 @@ +// Proxies all `bunx prisma` commands with an environment variable + +import { getConfig } from "@config"; + +const args = process.argv.slice(2); +const config = getConfig(); + +const { stdout } = Bun.spawn(["bunx", "prisma", ...args], { + env: { + ...process.env, + DATABASE_URL: `postgresql://${config.database.username}:${config.database.password}@${config.database.host}:${config.database.port}/${config.database.database}`, + }, +}); + +// Show stdout +const text = await new Response(stdout).text(); +console.log(text); diff --git a/server/api/api/v1/accounts/search/index.ts b/server/api/api/v1/accounts/search/index.ts new file mode 100644 index 00000000..5ec36528 --- /dev/null +++ b/server/api/api/v1/accounts/search/index.ts @@ -0,0 +1,72 @@ +import { errorResponse, jsonResponse } from "@response"; +import { + getFromRequest, + userRelations, + userToAPI, +} from "~database/entities/User"; +import { applyConfig } from "@api"; +import { parseRequest } from "@request"; +import { client } from "~database/datasource"; + +export const meta = applyConfig({ + allowedMethods: ["GET"], + route: "/api/v1/accounts/search", + ratelimits: { + max: 100, + duration: 60, + }, + auth: { + required: true, + }, +}); + +export default async (req: Request): Promise => { + // TODO: Add checks for disabled or not email verified accounts + + const { user } = await getFromRequest(req); + + if (!user) return errorResponse("Unauthorized", 401); + + const { + following = false, + limit = 40, + offset, + q, + } = await parseRequest<{ + q?: string; + limit?: number; + offset?: number; + resolve?: boolean; + following?: boolean; + }>(req); + + if (limit < 1 || limit > 80) { + return errorResponse("Limit must be between 1 and 80", 400); + } + + // TODO: Add WebFinger resolve + + const accounts = await client.user.findMany({ + where: { + displayName: { + contains: q, + }, + username: { + contains: q, + }, + relationshipSubjects: following + ? { + some: { + ownerId: user.id, + following, + }, + } + : undefined, + }, + take: Number(limit), + skip: Number(offset || 0), + include: userRelations, + }); + + return jsonResponse(accounts.map(acct => userToAPI(acct))); +}; diff --git a/server/api/api/v1/follow_requests/[account_id]/authorize.ts b/server/api/api/v1/follow_requests/[account_id]/authorize.ts new file mode 100644 index 00000000..074aae92 --- /dev/null +++ b/server/api/api/v1/follow_requests/[account_id]/authorize.ts @@ -0,0 +1,79 @@ +import { errorResponse, jsonResponse } from "@response"; +import { getFromRequest, userRelations } from "~database/entities/User"; +import { applyConfig } from "@api"; +import { client } from "~database/datasource"; +import type { MatchedRoute } from "bun"; +import { + checkForBidirectionalRelationships, + relationshipToAPI, +} from "~database/entities/Relationship"; + +export const meta = applyConfig({ + allowedMethods: ["POST"], + route: "/api/v1/follow_requests/:account_id/authorize", + ratelimits: { + max: 100, + duration: 60, + }, + auth: { + required: true, + }, +}); + +export default async ( + req: Request, + matchedRoute: MatchedRoute +): Promise => { + const { user } = await getFromRequest(req); + + if (!user) return errorResponse("Unauthorized", 401); + + const { account_id } = matchedRoute.params; + + const account = await client.user.findUnique({ + where: { + id: account_id, + }, + include: userRelations, + }); + + if (!account) return errorResponse("Account not found", 404); + + // Check if there is a relationship on both sides + await checkForBidirectionalRelationships(user, account); + + // Authorize follow request + await client.relationship.updateMany({ + where: { + subjectId: user.id, + ownerId: account.id, + requested: true, + }, + data: { + requested: false, + following: true, + }, + }); + + // Update followedBy for other user + await client.relationship.updateMany({ + where: { + subjectId: account.id, + ownerId: user.id, + }, + data: { + followedBy: true, + }, + }); + + const relationship = await client.relationship.findFirst({ + where: { + subjectId: account.id, + ownerId: user.id, + }, + }); + + if (!relationship) return errorResponse("Relationship not found", 404); + + return jsonResponse(relationshipToAPI(relationship)); +}; diff --git a/server/api/api/v1/follow_requests/[account_id]/reject.ts b/server/api/api/v1/follow_requests/[account_id]/reject.ts new file mode 100644 index 00000000..6a667e79 --- /dev/null +++ b/server/api/api/v1/follow_requests/[account_id]/reject.ts @@ -0,0 +1,67 @@ +import { errorResponse, jsonResponse } from "@response"; +import { getFromRequest, userRelations } from "~database/entities/User"; +import { applyConfig } from "@api"; +import { client } from "~database/datasource"; +import type { MatchedRoute } from "bun"; +import { + checkForBidirectionalRelationships, + relationshipToAPI, +} from "~database/entities/Relationship"; + +export const meta = applyConfig({ + allowedMethods: ["POST"], + route: "/api/v1/follow_requests/:account_id/reject", + ratelimits: { + max: 100, + duration: 60, + }, + auth: { + required: true, + }, +}); + +export default async ( + req: Request, + matchedRoute: MatchedRoute +): Promise => { + const { user } = await getFromRequest(req); + + if (!user) return errorResponse("Unauthorized", 401); + + const { account_id } = matchedRoute.params; + + const account = await client.user.findUnique({ + where: { + id: account_id, + }, + include: userRelations, + }); + + if (!account) return errorResponse("Account not found", 404); + + // Check if there is a relationship on both sides + await checkForBidirectionalRelationships(user, account); + + // Reject follow request + await client.relationship.updateMany({ + where: { + subjectId: user.id, + ownerId: account.id, + requested: true, + }, + data: { + requested: false, + }, + }); + + const relationship = await client.relationship.findFirst({ + where: { + subjectId: account.id, + ownerId: user.id, + }, + }); + + if (!relationship) return errorResponse("Relationship not found", 404); + + return jsonResponse(relationshipToAPI(relationship)); +}; diff --git a/server/api/api/v1/follow_requests/index.ts b/server/api/api/v1/follow_requests/index.ts new file mode 100644 index 00000000..d465c61d --- /dev/null +++ b/server/api/api/v1/follow_requests/index.ts @@ -0,0 +1,82 @@ +import { errorResponse, jsonResponse } from "@response"; +import { + getFromRequest, + userRelations, + userToAPI, +} from "~database/entities/User"; +import { applyConfig } from "@api"; +import { client } from "~database/datasource"; +import { parseRequest } from "@request"; + +export const meta = applyConfig({ + allowedMethods: ["GET"], + route: "/api/v1/follow_requests", + ratelimits: { + max: 100, + duration: 60, + }, + auth: { + required: true, + }, +}); + +export default async (req: Request): Promise => { + const { user } = await getFromRequest(req); + + const { + limit = 20, + max_id, + min_id, + since_id, + } = await parseRequest<{ + max_id?: string; + since_id?: string; + min_id?: string; + limit?: number; + }>(req); + + if (limit < 1 || limit > 40) { + return errorResponse("Limit must be between 1 and 40", 400); + } + + if (!user) return errorResponse("Unauthorized", 401); + + const objects = await client.user.findMany({ + where: { + id: { + lt: max_id ?? undefined, + gte: since_id ?? undefined, + gt: min_id ?? undefined, + }, + relationships: { + some: { + subjectId: user.id, + requested: true, + }, + }, + }, + include: userRelations, + take: limit, + orderBy: { + id: "desc", + }, + }); + + // Constuct HTTP Link header (next and prev) + const linkHeader = []; + if (objects.length > 0) { + const urlWithoutQuery = req.url.split("?")[0]; + linkHeader.push( + `<${urlWithoutQuery}?max_id=${objects.at(-1)?.id}>; rel="next"`, + `<${urlWithoutQuery}?min_id=${objects[0].id}>; rel="prev"` + ); + } + + return jsonResponse( + objects.map(user => userToAPI(user)), + 200, + { + Link: linkHeader.join(", "), + } + ); +}; diff --git a/server/api/api/v1/media/index.ts b/server/api/api/v1/media/index.ts new file mode 100644 index 00000000..da3019ef --- /dev/null +++ b/server/api/api/v1/media/index.ts @@ -0,0 +1,123 @@ +import { applyConfig } from "@api"; +import { errorResponse, jsonResponse } from "@response"; +import { client } from "~database/datasource"; +import { encode } from "blurhash"; +import { getFromRequest } from "~database/entities/User"; +import type { APIRouteMeta } from "~types/api"; +import sharp from "sharp"; +import { uploadFile } from "~classes/media"; +import { getConfig } from "@config"; +import { attachmentToAPI, getUrl } from "~database/entities/Attachment"; + +export const meta: APIRouteMeta = applyConfig({ + allowedMethods: ["POST"], + ratelimits: { + max: 10, + duration: 60, + }, + route: "/api/v1/media", + auth: { + required: true, + oauthPermissions: ["write:media"], + }, +}); + +/** + * Upload new media + */ +export default async (req: Request): Promise => { + const { user } = await getFromRequest(req); + + if (!user) { + return errorResponse("Unauthorized", 401); + } + + const form = await req.formData(); + + const file = form.get("file") as unknown as File | undefined; + const thumbnail = form.get("thumbnail"); + const description = form.get("description") as string | undefined; + + // Floating point numbers from -1.0 to 1.0, comma delimited + // const focus = form.get("focus"); + + if (!file) { + return errorResponse("No file provided", 400); + } + + const config = getConfig(); + + if (file.size > config.validation.max_media_size) { + return errorResponse( + `File too large, max size is ${config.validation.max_media_size} bytes`, + 413 + ); + } + + if ( + config.validation.enforce_mime_types && + !config.validation.allowed_mime_types.includes(file.type) + ) { + return errorResponse("Invalid file type", 415); + } + + if ( + description && + description.length > config.validation.max_media_description_size + ) { + return errorResponse( + `Description too long, max length is ${config.validation.max_media_description_size} characters`, + 413 + ); + } + + const sha256 = new Bun.SHA256(); + + const isImage = file.type.startsWith("image/"); + + const metadata = isImage + ? await sharp(await file.arrayBuffer()).metadata() + : null; + + const blurhash = isImage + ? encode( + new Uint8ClampedArray(await file.arrayBuffer()), + metadata?.width ?? 0, + metadata?.height ?? 0, + 4, + 4 + ) + : null; + + let url = ""; + + const hash = await uploadFile(file, config); + + url = hash ? getUrl(hash, config) : ""; + + let thumbnailUrl = ""; + + if (thumbnail) { + const hash = await uploadFile(thumbnail as unknown as File, config); + + thumbnailUrl = hash ? getUrl(hash, config) : ""; + } + + const newAttachment = await client.attachment.create({ + data: { + url, + thumbnail_url: thumbnailUrl, + sha256: sha256.update(await file.arrayBuffer()).digest("hex"), + mime_type: file.type, + description: description ?? "", + size: file.size, + blurhash: blurhash ?? undefined, + width: metadata?.width ?? undefined, + height: metadata?.height ?? undefined, + }, + }); + + // TODO: Add job to process videos and other media + + return jsonResponse(attachmentToAPI(newAttachment)); +}; diff --git a/server/api/api/v1/statuses/[id]/reblog.ts b/server/api/api/v1/statuses/[id]/reblog.ts index 32096db3..dc67ee8d 100644 --- a/server/api/api/v1/statuses/[id]/reblog.ts +++ b/server/api/api/v1/statuses/[id]/reblog.ts @@ -10,7 +10,10 @@ import { statusAndUserRelations, statusToAPI, } from "~database/entities/Status"; -import { getFromRequest } from "~database/entities/User"; +import { + getFromRequest, + type UserWithRelations, +} from "~database/entities/User"; import type { APIRouteMeta } from "~types/api"; export const meta: APIRouteMeta = applyConfig({ @@ -84,10 +87,15 @@ export default async ( }); // Create notification for reblog if reblogged user is on the same instance - if (status.reblog?.author.instanceId === user.instanceId) { + if ( + // @ts-expect-error Prisma relations not showing in types + (status.reblog?.author as UserWithRelations).instanceId === + user.instanceId + ) { await client.notification.create({ data: { accountId: user.id, + // @ts-expect-error Prisma relations not showing in types notifiedId: status.reblog.authorId, type: "reblog", statusId: status.reblogId, diff --git a/server/api/api/v1/statuses/index.ts b/server/api/api/v1/statuses/index.ts index c5946f82..86275366 100644 --- a/server/api/api/v1/statuses/index.ts +++ b/server/api/api/v1/statuses/index.ts @@ -192,6 +192,7 @@ export default async ( return errorResponse("Reply status not found", 404); } + // @ts-expect-error Prisma Typescript doesn't include relations replyUser = replyStatus.author; } diff --git a/server/api/api/v1/timelines/home.ts b/server/api/api/v1/timelines/home.ts index ea54886c..cb1374c4 100644 --- a/server/api/api/v1/timelines/home.ts +++ b/server/api/api/v1/timelines/home.ts @@ -50,21 +50,33 @@ export default async (req: Request): Promise => { gte: since_id ?? undefined, gt: min_id ?? undefined, }, - author: { - OR: [ - { - relationships: { - some: { - subjectId: user.id, - following: true, + OR: [ + { + author: { + OR: [ + { + relationshipSubjects: { + some: { + ownerId: user.id, + following: true, + }, + }, }, - }, + { + id: user.id, + }, + ], }, - { - id: user.id, + }, + { + // Include posts where the user is mentioned in addition to posts by followed users + mentions: { + some: { + id: user.id, + }, }, - ], - }, + }, + ], }, include: statusAndUserRelations, take: limit,