Compare commits

...

70 Commits

Author SHA1 Message Date
Claire
fc4e2eca9f Bump version to v4.1.12 2024-01-24 15:31:06 +01:00
Claire
2e8943aecd Add rate-limit of TOTP authentication attempts at controller level (#28801) 2024-01-24 15:31:06 +01:00
Claire
e6072a8d13 Fix error when processing remote files with unusually long names (#28823) 2024-01-24 15:31:06 +01:00
Claire
460e4fbdd6 Fix processing of compacted single-item JSON-LD collections (#28816) 2024-01-24 15:31:06 +01:00
Jonathan de Jong
de60322711 Retry 401 errors on replies fetching (#28788)
Co-authored-by: Claire <claire.github-309c@sitedethib.com>
2024-01-24 15:31:06 +01:00
Jeong Arm
90bb870680 Ignore RecordNotUnique errors in LinkCrawlWorker (#28748) 2024-01-24 15:31:06 +01:00
Claire
9292d998fe Fix Mastodon not correctly processing HTTP Signatures with query strings (#28476) 2024-01-24 15:31:06 +01:00
Claire
92643f48de Convert signature verification specs to request specs (#28443) 2024-01-24 15:31:06 +01:00
Claire
458620bdd4 Fix potential redirection loop of streaming endpoint (#28665) 2024-01-24 15:31:06 +01:00
Claire
a1a71263e0 Fix streaming API redirection ignoring the port of streaming_api_base_url (#28558) 2024-01-24 15:31:06 +01:00
MitarashiDango
4c5575e8e0 Fix Undo Announce activity is not sent, when not followed by the reblogged post author (#18482)
Co-authored-by: Claire <claire.github-309c@sitedethib.com>
2024-01-24 15:31:06 +01:00
Claire
a2ddd849e2 Fix LinkCrawlWorker error when encountering empty OEmbed response (#28268) 2024-01-24 15:31:06 +01:00
Claire
2e4d43933d Fix SQL query in /api/v1/directory (#28412) 2023-12-18 11:03:20 +01:00
Claire
363bedd050 Bump version to v4.1.11 2023-12-04 15:28:02 +01:00
Claire
cc94c70970 Clamp dates when serializing to Elasticsearch API (#28081) 2023-12-04 15:28:02 +01:00
Claire
613d00706c Change GIF max matrix size error to explicitly mention GIF files (#27927) 2023-12-04 15:28:02 +01:00
Jonathan de Jong
8bbe2b970f Have Follow activities bypass availability (#27586)
Co-authored-by: Claire <claire.github-309c@sitedethib.com>
2023-12-04 15:28:02 +01:00
Claire
803e15a3cf Fix incoming status creation date not being restricted to standard ISO8601 (#27655) 2023-12-04 15:28:02 +01:00
Claire
1d835c9423 Fix posts from force-sensitized accounts being able to trend (#27620) 2023-12-04 15:28:02 +01:00
Claire
ab68df9af0 Fix hashtag matching pattern matching some URLs (#27584) 2023-12-04 15:28:02 +01:00
Claire
a89a25714d Fix some link anchors being recognized as hashtags (#27271) 2023-12-04 15:28:02 +01:00
Claire
1210524a3d Fix processing LDSigned activities from actors with unknown public keys (#27474) 2023-12-04 15:28:02 +01:00
Claire
ff3a9dad0d Fix error and incorrect URLs in /api/v1/accounts/:id/featured_tags for remote accounts (#27459) 2023-12-04 15:28:02 +01:00
Claire
3ef0a19bac Fix report processing notice not mentioning the report number when performing a custom action (#27442) 2023-12-04 15:28:02 +01:00
Claire
78e457614c Change Content-Security-Policy to be tighter on media paths (#26889) 2023-12-04 15:28:02 +01:00
Claire
1e896e99d2 Update dependencies (#27354) 2023-10-10 15:32:42 +02:00
Claire
df60d04dc1 Bump version to v4.1.10 2023-10-10 13:51:56 +02:00
Matt Jankowski
335982325e Dont match mention in url query string (#25656)
Co-authored-by: Claire <claire.github-309c@sitedethib.com>
2023-10-10 13:51:56 +02:00
Claire
15c5727f71 Add a short-lived lock to trend refresh scheduler (#27253) 2023-10-10 13:51:56 +02:00
David Aaron
f8154cf732 Change min age of backup policy from 1 week to 6 days (#27200) 2023-10-10 13:51:56 +02:00
Jakob Gillich
45669ac5e6 Fix importer returning negative row estimates (#27258) 2023-10-10 13:51:56 +02:00
Claire
8d73fbee87 Change some worker lock TTLs (#27246) 2023-10-10 13:51:56 +02:00
Claire
f1d3eda159 Fix filtering audit log for entries about disabling 2FA (#27186) 2023-10-10 13:51:56 +02:00
Essem
c97fbabb61 Properly remove tIME chunk from PNG uploads (#27111) 2023-10-10 13:51:56 +02:00
Claire
f2fff6be66 Fix crash when filtering for “dormant” relationships (#27306) 2023-10-10 13:51:56 +02:00
Claire
b40c42fd1e Fix inefficient queries in “Follows and followers” as well as several admin pages (#27116) 2023-10-10 13:51:56 +02:00
Claire
9950e59578 Disable setting the latest tag for 4.1 docker builds (#27023) 2023-09-21 18:14:24 +02:00
Claire
e4c0aaf626 Bump version to v4.1.9 (#26997) 2023-09-20 17:25:05 +02:00
Claire
5d93c5f019 Fix post translation erroring out (v4.1.x) (#26990) 2023-09-20 15:59:57 +02:00
Claire
af0ee12908 Disable ruby linting for 4.1.x branch (#26993) 2023-09-20 12:54:08 +02:00
Claire
46bd58f74d Bump version to v4.1.8 2023-09-19 17:01:44 +02:00
Claire
d6c0ae995c Fix post edits not being forwarded as expected (#26936) 2023-09-19 17:01:44 +02:00
Claire
5fd89e53d2 Fix moderator rights inconsistencies (#26729) 2023-09-19 17:01:44 +02:00
Claire
5caade9fb0 Fix crash when encountering invalid URL (#26814) 2023-09-19 17:01:44 +02:00
Claire
34959eccd2 Fix cached posts including stale stats (#26409) 2023-09-19 17:01:44 +02:00
Nicolai Søborg
21bf42bca1 Fix frame_rate for videos where ffprobe reports 0/0 (#26500) 2023-09-19 17:01:44 +02:00
yufushiro
7802837885 Fix unexpected audio stream transcoding when uploaded video is eligible to passthrough (#26608)
Co-authored-by: Claire <claire.github-309c@sitedethib.com>
2023-09-19 17:01:44 +02:00
Claire
48ee3ae13d Merge pull request from GHSA-v3xf-c9qf-j667 2023-09-19 16:53:58 +02:00
Claire
5f9511c389 Merge pull request from GHSA-2693-xr3m-jhqr 2023-09-19 16:53:21 +02:00
Claire
38a5d92f38 Change Dockerfile to upgrade packages when building (#26929)
Co-authored-by: Renaud Chaput <renchap@gmail.com>
2023-09-18 08:32:04 +02:00
Claire
7f7e068975 Update actions for stable-4.1 (#26815)
Co-authored-by: Renaud Chaput <renchap@gmail.com>
2023-09-06 12:19:02 +02:00
Claire
5f88a2d70b Bump version to v4.1.7 2023-09-05 19:16:09 +02:00
Emelia Smith
cf80d54cba Allow reports with long comments from remote instances, but truncate (#25028) 2023-09-05 19:16:09 +02:00
Daniel M Brasil
ea7fa048f3 Fix /api/v1/timelines/tag/:hashtag allowing for unauthenticated access when public preview is disabled (#26237) 2023-09-05 19:16:09 +02:00
Claire
6339806f05 Fix blocking subdomains of an already-blocked domain (#26392) 2023-09-05 19:16:09 +02:00
Claire
86afbf25d0 Change text extraction in PlainTextFormatter to be faster (#26727) 2023-09-05 19:16:09 +02:00
Claire
1ad64b5557 Backport container build changes to the stable-4.1 branch (#26738)
Co-authored-by: Renaud Chaput <renchap@gmail.com>
2023-08-31 19:54:10 +02:00
Claire
ac7d40b561 Bump version to v4.1.6 2023-07-31 14:33:06 +02:00
Renaud Chaput
2fc6117d1b Fix missing return values in streaming (#26233) 2023-07-31 14:33:06 +02:00
Emelia Smith
2eb1a5b7b6 Fix: Streaming server memory leak in HTTP EventSource cleanup (#26228) 2023-07-31 14:33:06 +02:00
Claire
6c321bb5e1 Fix incorrect connect timeout in outgoing requests (#26116) 2023-07-31 14:33:06 +02:00
Emelia Smith
da230600ac Refactor streaming's filtering logic & improve documentation (#26213) 2023-07-31 14:33:06 +02:00
Claire
1792be342a Fix wrong filters sometimes applying in streaming (#26159) 2023-07-31 14:33:06 +02:00
Claire
ebf4f034c2 Bump version to v4.1.5 2023-07-21 16:07:43 +02:00
Claire
889102013f Fix CSP headers being unintendedly wide (#26105) 2023-07-21 16:07:43 +02:00
Claire
d94a2c8aca Change request timeout handling to use a longer deadline (#26055) 2023-07-21 16:07:43 +02:00
Claire
efd066670d Fix moderation interface for remote instances with a .zip TLD (#25885) 2023-07-21 16:07:43 +02:00
Claire
13ec425b72 Fix remote accounts being possibly persisted to database with incomplete protocol values (#25886) 2023-07-21 16:07:43 +02:00
Michael Stanclift
7a99f0744d Fix trending publishers table not rendering correctly on narrow screens (#25945) 2023-07-21 16:07:43 +02:00
Claire
69c8f26946 Add check preventing Sidekiq workers from running with Makara configured (#25850)
Co-authored-by: Eugen Rochko <eugen@zeonfederated.com>
2023-07-21 14:18:04 +02:00
115 changed files with 1784 additions and 1015 deletions

View File

@@ -1,225 +0,0 @@
version: 2.1
orbs:
ruby: circleci/ruby@2.0.0
node: circleci/node@5.0.3
executors:
default:
parameters:
ruby-version:
type: string
docker:
- image: cimg/ruby:<< parameters.ruby-version >>
environment:
BUNDLE_JOBS: 3
BUNDLE_RETRY: 3
CONTINUOUS_INTEGRATION: true
DB_HOST: localhost
DB_USER: root
DISABLE_SIMPLECOV: true
RAILS_ENV: test
- image: cimg/postgres:14.5
environment:
POSTGRES_USER: root
POSTGRES_HOST_AUTH_METHOD: trust
- image: cimg/redis:7.0
commands:
install-system-dependencies:
steps:
- run:
name: Install system dependencies
command: |
sudo apt-get update
sudo apt-get install -y libicu-dev libidn11-dev
install-ruby-dependencies:
parameters:
ruby-version:
type: string
steps:
- run:
command: |
bundle config clean 'true'
bundle config frozen 'true'
bundle config without 'development production'
name: Set bundler settings
- ruby/install-deps:
bundler-version: '2.3.26'
key: ruby<< parameters.ruby-version >>-gems-v1
wait-db:
steps:
- run:
command: dockerize -wait tcp://localhost:5432 -wait tcp://localhost:6379 -timeout 1m
name: Wait for PostgreSQL and Redis
jobs:
build:
docker:
- image: cimg/ruby:3.0-node
environment:
RAILS_ENV: test
steps:
- checkout
- install-system-dependencies
- install-ruby-dependencies:
ruby-version: '3.0'
- node/install-packages:
cache-version: v1
pkg-manager: yarn
- run:
command: |
export NODE_OPTIONS=--openssl-legacy-provider
./bin/rails assets:precompile
name: Precompile assets
- persist_to_workspace:
paths:
- public/assets
- public/packs-test
root: .
test:
parameters:
ruby-version:
type: string
executor:
name: default
ruby-version: << parameters.ruby-version >>
environment:
ALLOW_NOPAM: true
PAM_ENABLED: true
PAM_DEFAULT_SERVICE: pam_test
PAM_CONTROLLED_SERVICE: pam_test_controlled
parallelism: 4
steps:
- checkout
- install-system-dependencies
- run:
command: sudo apt-get install -y ffmpeg imagemagick libpam-dev
name: Install additional system dependencies
- run:
command: bundle config with 'pam_authentication'
name: Enable PAM authentication
- install-ruby-dependencies:
ruby-version: << parameters.ruby-version >>
- attach_workspace:
at: .
- wait-db
- run:
command: ./bin/rails db:create db:schema:load db:seed
name: Load database schema
- ruby/rspec-test
test-migrations:
executor:
name: default
ruby-version: '3.0'
steps:
- checkout
- install-system-dependencies
- install-ruby-dependencies:
ruby-version: '3.0'
- wait-db
- run:
command: ./bin/rails db:create
name: Create database
- run:
command: ./bin/rails db:migrate VERSION=20171010025614
name: Run migrations up to v2.0.0
- run:
command: ./bin/rails tests:migrations:populate_v2
name: Populate database with test data
- run:
command: ./bin/rails db:migrate VERSION=20180514140000
name: Run migrations up to v2.4.0
- run:
command: ./bin/rails tests:migrations:populate_v2_4
name: Populate database with test data
- run:
command: ./bin/rails db:migrate VERSION=20180707154237
name: Run migrations up to v2.4.3
- run:
command: ./bin/rails tests:migrations:populate_v2_4_3
name: Populate database with test data
- run:
command: ./bin/rails db:migrate
name: Run all remaining migrations
- run:
command: ./bin/rails tests:migrations:check_database
name: Check migration result
test-two-step-migrations:
executor:
name: default
ruby-version: '3.0'
steps:
- checkout
- install-system-dependencies
- install-ruby-dependencies:
ruby-version: '3.0'
- wait-db
- run:
command: ./bin/rails db:create
name: Create database
- run:
command: ./bin/rails db:migrate VERSION=20171010025614
name: Run migrations up to v2.0.0
- run:
command: ./bin/rails tests:migrations:populate_v2
name: Populate database with test data
- run:
command: ./bin/rails db:migrate VERSION=20180514140000
name: Run pre-deployment migrations up to v2.4.0
environment:
SKIP_POST_DEPLOYMENT_MIGRATIONS: true
- run:
command: ./bin/rails tests:migrations:populate_v2_4
name: Populate database with test data
- run:
command: ./bin/rails db:migrate VERSION=20180707154237
name: Run migrations up to v2.4.3
environment:
SKIP_POST_DEPLOYMENT_MIGRATIONS: true
- run:
command: ./bin/rails tests:migrations:populate_v2_4_3
name: Populate database with test data
- run:
command: ./bin/rails db:migrate
name: Run all remaining pre-deployment migrations
environment:
SKIP_POST_DEPLOYMENT_MIGRATIONS: true
- run:
command: ./bin/rails db:migrate
name: Run all post-deployment migrations
- run:
command: ./bin/rails tests:migrations:check_database
name: Check migration result
workflows:
version: 2
build-and-test:
jobs:
- build
- test:
matrix:
parameters:
ruby-version:
- '2.7'
- '3.0'
name: test-ruby<< matrix.ruby-version >>
requires:
- build
- test-migrations:
requires:
- build
- test-two-step-migrations:
requires:
- build
- node/run:
cache-version: v1
name: test-webui
pkg-manager: yarn
requires:
- build
version: '16.18'
yarn-run: test:jest

View File

@@ -0,0 +1,92 @@
on:
workflow_call:
inputs:
platforms:
required: true
type: string
cache:
type: boolean
default: true
use_native_arm64_builder:
type: boolean
push_to_images:
type: string
flavor:
type: string
tags:
type: string
labels:
type: string
jobs:
build-image:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: docker/setup-qemu-action@v2
if: contains(inputs.platforms, 'linux/arm64') && !inputs.use_native_arm64_builder
- uses: docker/setup-buildx-action@v2
id: buildx
if: ${{ !(inputs.use_native_arm64_builder && contains(inputs.platforms, 'linux/arm64')) }}
- name: Start a local Docker Builder
if: inputs.use_native_arm64_builder && contains(inputs.platforms, 'linux/arm64')
run: |
docker run --rm -d --name buildkitd -p 1234:1234 --privileged moby/buildkit:latest --addr tcp://0.0.0.0:1234
- uses: docker/setup-buildx-action@v2
id: buildx-native
if: inputs.use_native_arm64_builder && contains(inputs.platforms, 'linux/arm64')
with:
driver: remote
endpoint: tcp://localhost:1234
platforms: linux/amd64
append: |
- endpoint: tcp://${{ vars.DOCKER_BUILDER_HETZNER_ARM64_01_HOST }}:13865
platforms: linux/arm64
name: mastodon-docker-builder-arm64-01
driver-opts:
- servername=mastodon-docker-builder-arm64-01
env:
BUILDER_NODE_1_AUTH_TLS_CACERT: ${{ secrets.DOCKER_BUILDER_HETZNER_ARM64_01_CACERT }}
BUILDER_NODE_1_AUTH_TLS_CERT: ${{ secrets.DOCKER_BUILDER_HETZNER_ARM64_01_CERT }}
BUILDER_NODE_1_AUTH_TLS_KEY: ${{ secrets.DOCKER_BUILDER_HETZNER_ARM64_01_KEY }}
- name: Log in to Docker Hub
if: contains(inputs.push_to_images, 'tootsuite')
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Log in to the Github Container registry
if: contains(inputs.push_to_images, 'ghcr.io')
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- uses: docker/metadata-action@v4
id: meta
if: ${{ inputs.push_to_images != '' }}
with:
images: ${{ inputs.push_to_images }}
flavor: ${{ inputs.flavor }}
tags: ${{ inputs.tags }}
labels: ${{ inputs.labels }}
- uses: docker/build-push-action@v4
with:
context: .
platforms: ${{ inputs.platforms }}
provenance: false
builder: ${{ steps.buildx.outputs.name || steps.buildx-native.outputs.name }}
push: ${{ inputs.push_to_images != '' }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: ${{ inputs.cache && 'type=gha' || '' }}
cache-to: ${{ inputs.cache && 'type=gha,mode=max' || '' }}

View File

@@ -1,70 +0,0 @@
name: Build container image
on:
workflow_dispatch:
push:
branches:
- 'main'
tags:
- '*'
pull_request:
paths:
- .github/workflows/build-image.yml
- Dockerfile
permissions:
contents: read
packages: write
jobs:
build-image:
runs-on: ubuntu-latest
concurrency:
group: ${{ github.ref }}
cancel-in-progress: true
steps:
- uses: actions/checkout@v3
- uses: hadolint/hadolint-action@v3.1.0
- uses: docker/setup-qemu-action@v2
- uses: docker/setup-buildx-action@v2
- name: Log in to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
if: github.repository == 'mastodon/mastodon' && github.event_name != 'pull_request'
- name: Log in to the Github Container registry
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
if: github.repository == 'mastodon/mastodon' && github.event_name != 'pull_request'
- uses: docker/metadata-action@v4
id: meta
with:
images: |
tootsuite/mastodon
ghcr.io/mastodon/mastodon
flavor: |
latest=auto
tags: |
type=edge,branch=main
type=pep440,pattern={{raw}}
type=pep440,pattern=v{{major}}.{{minor}}
type=ref,event=pr
- uses: docker/build-push-action@v4
with:
context: .
platforms: linux/amd64,linux/arm64
provenance: false
builder: ${{ steps.buildx.outputs.name }}
push: ${{ github.repository == 'mastodon/mastodon' && github.event_name != 'pull_request' }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=gha
cache-to: type=gha,mode=max

27
.github/workflows/build-releases.yml vendored Normal file
View File

@@ -0,0 +1,27 @@
name: Build container release images
on:
push:
tags:
- '*'
permissions:
contents: read
packages: write
jobs:
build-image:
uses: ./.github/workflows/build-container-image.yml
with:
platforms: linux/amd64,linux/arm64
use_native_arm64_builder: true
push_to_images: |
tootsuite/mastodon
ghcr.io/mastodon/mastodon
# Do not use cache when building releases, so apt update is always ran and the release always contain the latest packages
cache: false
flavor: |
latest=false
tags: |
type=pep440,pattern={{raw}}
type=pep440,pattern=v{{major}}.{{minor}}
secrets: inherit

View File

@@ -1,41 +0,0 @@
name: Ruby Linting
on:
push:
branches-ignore:
- 'dependabot/**'
paths:
- 'Gemfile*'
- '.rubocop.yml'
- '**/*.rb'
- '**/*.rake'
- '.github/workflows/lint-ruby.yml'
pull_request:
paths:
- 'Gemfile*'
- '.rubocop.yml'
- '**/*.rb'
- '**/*.rake'
- '.github/workflows/lint-ruby.yml'
jobs:
lint:
runs-on: ubuntu-latest
steps:
- name: Checkout Code
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Set-up RuboCop Problem Mathcher
uses: r7kamura/rubocop-problem-matchers-action@v1
- name: Run rubocop
uses: github/super-linter@v4
env:
DEFAULT_BRANCH: main
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
LINTER_RULES_PATH: .
RUBY_CONFIG_FILE: .rubocop.yml
VALIDATE_ALL_CODEBASE: false
VALIDATE_RUBY: true

15
.github/workflows/test-image-build.yml vendored Normal file
View File

@@ -0,0 +1,15 @@
name: Test container image build
on:
pull_request:
permissions:
contents: read
jobs:
build-image:
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
uses: ./.github/workflows/build-container-image.yml
with:
platforms: linux/amd64 # Testing only on native platform so it is performant

View File

@@ -3,6 +3,119 @@ Changelog
All notable changes to this project will be documented in this file. All notable changes to this project will be documented in this file.
## [4.1.12] - 2024-01-24
### Fixed
- Fix error when processing remote files with unusually long names ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/28823))
- Fix processing of compacted single-item JSON-LD collections ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/28816))
- Retry 401 errors on replies fetching ([ShadowJonathan](https://github.com/mastodon/mastodon/pull/28788))
- Fix `RecordNotUnique` errors in LinkCrawlWorker ([tribela](https://github.com/mastodon/mastodon/pull/28748))
- Fix Mastodon not correctly processing HTTP Signatures with query strings ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/28443), [ClearlyClaire](https://github.com/mastodon/mastodon/pull/28476))
- Fix potential redirection loop of streaming endpoint ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/28665))
- Fix streaming API redirection ignoring the port of `streaming_api_base_url` ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/28558))
- Fix `Undo Announce` activity not being sent to non-follower authors ([MitarashiDango](https://github.com/mastodon/mastodon/pull/18482))
- Fix `LinkCrawlWorker` error when encountering empty OEmbed response ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/28268))
### Security
- Add rate-limit of TOTP authentication attempts at controller level ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/28801))
## [4.1.11] - 2023-12-04
### Changed
- Change GIF max matrix size error to explicitly mention GIF files ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/27927))
- Change `Follow` activities delivery to bypass availability check ([ShadowJonathan](https://github.com/mastodon/mastodon/pull/27586))
- Change Content-Security-Policy to be tighter on media paths ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26889))
### Fixed
- Fix incoming status creation date not being restricted to standard ISO8601 ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/27655), [ClearlyClaire](https://github.com/mastodon/mastodon/pull/28081))
- Fix posts from force-sensitized accounts being able to trend ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/27620))
- Fix processing LDSigned activities from actors with unknown public keys ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/27474))
- Fix error and incorrect URLs in `/api/v1/accounts/:id/featured_tags` for remote accounts ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/27459))
- Fix report processing notice not mentioning the report number when performing a custom action ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/27442))
- Fix some link anchors being recognized as hashtags ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/27271), [ClearlyClaire](https://github.com/mastodon/mastodon/pull/27584))
## [4.1.10] - 2023-10-10
### Changed
- Change some worker lock TTLs to be shorter-lived ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/27246))
- Change user archive export allowed period from 7 days to 6 days ([suddjian](https://github.com/mastodon/mastodon/pull/27200))
### Fixed
- Fix mentions being matched in some URL query strings ([mjankowski](https://github.com/mastodon/mastodon/pull/25656))
- Fix multiple instances of the trend refresh scheduler sometimes running at once ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/27253))
- Fix importer returning negative row estimates ([jgillich](https://github.com/mastodon/mastodon/pull/27258))
- Fix filtering audit log for entries about disabling 2FA ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/27186))
- Fix tIME chunk not being properly removed from PNG uploads ([TheEssem](https://github.com/mastodon/mastodon/pull/27111))
- Fix inefficient queries in “Follows and followers” as well as several admin pages ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/27116), [ClearlyClaire](https://github.com/mastodon/mastodon/pull/27306))
## [4.1.9] - 2023-09-20
### Fixed
- Fix post translation erroring out ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26990))
## [4.1.8] - 2023-09-19
### Fixed
- Fix post edits not being forwarded as expected ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26936))
- Fix moderator rights inconsistencies ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26729))
- Fix crash when encountering invalid URL ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26814))
- Fix cached posts including stale stats ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26409))
- Fix uploading of video files for which `ffprobe` reports `0/0` average framerate ([NicolaiSoeborg](https://github.com/mastodon/mastodon/pull/26500))
- Fix unexpected audio stream transcoding when uploaded video is eligible to passthrough ([yufushiro](https://github.com/mastodon/mastodon/pull/26608))
### Security
- Fix missing HTML sanitization in translation API (CVE-2023-42452)
- Fix incorrect domain name normalization (CVE-2023-42451)
## [4.1.7] - 2023-09-05
### Changed
- Change remote report processing to accept reports with long comments, but truncate them ([ThisIsMissEm](https://github.com/mastodon/mastodon/pull/25028))
### Fixed
- **Fix blocking subdomains of an already-blocked domain** ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26392))
- Fix `/api/v1/timelines/tag/:hashtag` allowing for unauthenticated access when public preview is disabled ([danielmbrasil](https://github.com/mastodon/mastodon/pull/26237))
- Fix inefficiencies in `PlainTextFormatter` ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26727))
## [4.1.6] - 2023-07-31
### Fixed
- Fix memory leak in streaming server ([ThisIsMissEm](https://github.com/mastodon/mastodon/pull/26228))
- Fix wrong filters sometimes applying in streaming ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26159), [ThisIsMissEm](https://github.com/mastodon/mastodon/pull/26213), [renchap](https://github.com/mastodon/mastodon/pull/26233))
- Fix incorrect connect timeout in outgoing requests ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26116))
## [4.1.5] - 2023-07-21
### Added
- Add check preventing Sidekiq workers from running with Makara configured ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/25850))
### Changed
- Change request timeout handling to use a longer deadline ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26055))
### Fixed
- Fix moderation interface for remote instances with a .zip TLD ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/25886))
- Fix remote accounts being possibly persisted to database with incomplete protocol values ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/25886))
- Fix trending publishers table not rendering correctly on narrow screens ([vmstan](https://github.com/mastodon/mastodon/pull/25945))
### Security
- Fix CSP headers being unintentionally wide ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26105))
## [4.1.4] - 2023-07-07 ## [4.1.4] - 2023-07-07
### Fixed ### Fixed

View File

@@ -17,6 +17,7 @@ COPY Gemfile* package.json yarn.lock /opt/mastodon/
# hadolint ignore=DL3008 # hadolint ignore=DL3008
RUN apt-get update && \ RUN apt-get update && \
apt-get -yq dist-upgrade && \
apt-get install -y --no-install-recommends build-essential \ apt-get install -y --no-install-recommends build-essential \
ca-certificates \ ca-certificates \
git \ git \

View File

@@ -10,40 +10,40 @@ GIT
GEM GEM
remote: https://rubygems.org/ remote: https://rubygems.org/
specs: specs:
actioncable (6.1.7.4) actioncable (6.1.7.6)
actionpack (= 6.1.7.4) actionpack (= 6.1.7.6)
activesupport (= 6.1.7.4) activesupport (= 6.1.7.6)
nio4r (~> 2.0) nio4r (~> 2.0)
websocket-driver (>= 0.6.1) websocket-driver (>= 0.6.1)
actionmailbox (6.1.7.4) actionmailbox (6.1.7.6)
actionpack (= 6.1.7.4) actionpack (= 6.1.7.6)
activejob (= 6.1.7.4) activejob (= 6.1.7.6)
activerecord (= 6.1.7.4) activerecord (= 6.1.7.6)
activestorage (= 6.1.7.4) activestorage (= 6.1.7.6)
activesupport (= 6.1.7.4) activesupport (= 6.1.7.6)
mail (>= 2.7.1) mail (>= 2.7.1)
actionmailer (6.1.7.4) actionmailer (6.1.7.6)
actionpack (= 6.1.7.4) actionpack (= 6.1.7.6)
actionview (= 6.1.7.4) actionview (= 6.1.7.6)
activejob (= 6.1.7.4) activejob (= 6.1.7.6)
activesupport (= 6.1.7.4) activesupport (= 6.1.7.6)
mail (~> 2.5, >= 2.5.4) mail (~> 2.5, >= 2.5.4)
rails-dom-testing (~> 2.0) rails-dom-testing (~> 2.0)
actionpack (6.1.7.4) actionpack (6.1.7.6)
actionview (= 6.1.7.4) actionview (= 6.1.7.6)
activesupport (= 6.1.7.4) activesupport (= 6.1.7.6)
rack (~> 2.0, >= 2.0.9) rack (~> 2.0, >= 2.0.9)
rack-test (>= 0.6.3) rack-test (>= 0.6.3)
rails-dom-testing (~> 2.0) rails-dom-testing (~> 2.0)
rails-html-sanitizer (~> 1.0, >= 1.2.0) rails-html-sanitizer (~> 1.0, >= 1.2.0)
actiontext (6.1.7.4) actiontext (6.1.7.6)
actionpack (= 6.1.7.4) actionpack (= 6.1.7.6)
activerecord (= 6.1.7.4) activerecord (= 6.1.7.6)
activestorage (= 6.1.7.4) activestorage (= 6.1.7.6)
activesupport (= 6.1.7.4) activesupport (= 6.1.7.6)
nokogiri (>= 1.8.5) nokogiri (>= 1.8.5)
actionview (6.1.7.4) actionview (6.1.7.6)
activesupport (= 6.1.7.4) activesupport (= 6.1.7.6)
builder (~> 3.1) builder (~> 3.1)
erubi (~> 1.4) erubi (~> 1.4)
rails-dom-testing (~> 2.0) rails-dom-testing (~> 2.0)
@@ -54,22 +54,22 @@ GEM
case_transform (>= 0.2) case_transform (>= 0.2)
jsonapi-renderer (>= 0.1.1.beta1, < 0.3) jsonapi-renderer (>= 0.1.1.beta1, < 0.3)
active_record_query_trace (1.8) active_record_query_trace (1.8)
activejob (6.1.7.4) activejob (6.1.7.6)
activesupport (= 6.1.7.4) activesupport (= 6.1.7.6)
globalid (>= 0.3.6) globalid (>= 0.3.6)
activemodel (6.1.7.4) activemodel (6.1.7.6)
activesupport (= 6.1.7.4) activesupport (= 6.1.7.6)
activerecord (6.1.7.4) activerecord (6.1.7.6)
activemodel (= 6.1.7.4) activemodel (= 6.1.7.6)
activesupport (= 6.1.7.4) activesupport (= 6.1.7.6)
activestorage (6.1.7.4) activestorage (6.1.7.6)
actionpack (= 6.1.7.4) actionpack (= 6.1.7.6)
activejob (= 6.1.7.4) activejob (= 6.1.7.6)
activerecord (= 6.1.7.4) activerecord (= 6.1.7.6)
activesupport (= 6.1.7.4) activesupport (= 6.1.7.6)
marcel (~> 1.0) marcel (~> 1.0)
mini_mime (>= 1.1.0) mini_mime (>= 1.1.0)
activesupport (6.1.7.4) activesupport (6.1.7.6)
concurrent-ruby (~> 1.0, >= 1.0.2) concurrent-ruby (~> 1.0, >= 1.0.2)
i18n (>= 1.6, < 2) i18n (>= 1.6, < 2)
minitest (>= 5.1) minitest (>= 5.1)
@@ -404,13 +404,13 @@ GEM
mime-types (3.4.1) mime-types (3.4.1)
mime-types-data (~> 3.2015) mime-types-data (~> 3.2015)
mime-types-data (3.2022.0105) mime-types-data (3.2022.0105)
mini_mime (1.1.2) mini_mime (1.1.5)
mini_portile2 (2.8.2) mini_portile2 (2.8.4)
minitest (5.17.0) minitest (5.17.0)
msgpack (1.6.0) msgpack (1.6.0)
multi_json (1.15.0) multi_json (1.15.0)
multipart-post (2.1.1) multipart-post (2.1.1)
net-imap (0.3.6) net-imap (0.3.7)
date date
net-protocol net-protocol
net-ldap (0.17.1) net-ldap (0.17.1)
@@ -491,13 +491,13 @@ GEM
pry-rails (0.3.9) pry-rails (0.3.9)
pry (>= 0.10.4) pry (>= 0.10.4)
public_suffix (5.0.1) public_suffix (5.0.1)
puma (5.6.5) puma (5.6.7)
nio4r (~> 2.0) nio4r (~> 2.0)
pundit (2.3.0) pundit (2.3.0)
activesupport (>= 3.0.0) activesupport (>= 3.0.0)
raabro (1.4.0) raabro (1.4.0)
racc (1.6.2) racc (1.6.2)
rack (2.2.7) rack (2.2.8)
rack-attack (6.6.1) rack-attack (6.6.1)
rack (>= 1.0, < 3) rack (>= 1.0, < 3)
rack-cors (1.1.1) rack-cors (1.1.1)
@@ -512,20 +512,20 @@ GEM
rack rack
rack-test (2.0.2) rack-test (2.0.2)
rack (>= 1.3) rack (>= 1.3)
rails (6.1.7.4) rails (6.1.7.6)
actioncable (= 6.1.7.4) actioncable (= 6.1.7.6)
actionmailbox (= 6.1.7.4) actionmailbox (= 6.1.7.6)
actionmailer (= 6.1.7.4) actionmailer (= 6.1.7.6)
actionpack (= 6.1.7.4) actionpack (= 6.1.7.6)
actiontext (= 6.1.7.4) actiontext (= 6.1.7.6)
actionview (= 6.1.7.4) actionview (= 6.1.7.6)
activejob (= 6.1.7.4) activejob (= 6.1.7.6)
activemodel (= 6.1.7.4) activemodel (= 6.1.7.6)
activerecord (= 6.1.7.4) activerecord (= 6.1.7.6)
activestorage (= 6.1.7.4) activestorage (= 6.1.7.6)
activesupport (= 6.1.7.4) activesupport (= 6.1.7.6)
bundler (>= 1.15.0) bundler (>= 1.15.0)
railties (= 6.1.7.4) railties (= 6.1.7.6)
sprockets-rails (>= 2.0.0) sprockets-rails (>= 2.0.0)
rails-controller-testing (1.0.5) rails-controller-testing (1.0.5)
actionpack (>= 5.0.1.rc1) actionpack (>= 5.0.1.rc1)
@@ -541,9 +541,9 @@ GEM
railties (>= 6.0.0, < 7) railties (>= 6.0.0, < 7)
rails-settings-cached (0.6.6) rails-settings-cached (0.6.6)
rails (>= 4.2.0) rails (>= 4.2.0)
railties (6.1.7.4) railties (6.1.7.6)
actionpack (= 6.1.7.4) actionpack (= 6.1.7.6)
activesupport (= 6.1.7.4) activesupport (= 6.1.7.6)
method_source method_source
rake (>= 12.2) rake (>= 12.2)
thor (~> 1.0) thor (~> 1.0)
@@ -634,7 +634,7 @@ GEM
activerecord (>= 4.0.0) activerecord (>= 4.0.0)
railties (>= 4.0.0) railties (>= 4.0.0)
semantic_range (3.0.0) semantic_range (3.0.0)
sidekiq (6.5.8) sidekiq (6.5.11)
connection_pool (>= 2.2.5, < 3) connection_pool (>= 2.2.5, < 3)
rack (~> 2.0) rack (~> 2.0)
redis (>= 4.5.0, < 5) redis (>= 4.5.0, < 5)
@@ -746,14 +746,14 @@ GEM
rack-proxy (>= 0.6.1) rack-proxy (>= 0.6.1)
railties (>= 5.2) railties (>= 5.2)
semantic_range (>= 2.3.0) semantic_range (>= 2.3.0)
websocket-driver (0.7.5) websocket-driver (0.7.6)
websocket-extensions (>= 0.1.0) websocket-extensions (>= 0.1.0)
websocket-extensions (0.1.5) websocket-extensions (0.1.5)
wisper (2.0.1) wisper (2.0.1)
xorcist (1.1.3) xorcist (1.1.3)
xpath (3.2.0) xpath (3.2.0)
nokogiri (~> 1.8) nokogiri (~> 1.8)
zeitwerk (2.6.8) zeitwerk (2.6.12)
PLATFORMS PLATFORMS
ruby ruby

View File

@@ -11,7 +11,9 @@ A "vulnerability in Mastodon" is a vulnerability in the code distributed through
## Supported Versions ## Supported Versions
| Version | Supported | | Version | Supported |
| ------- | ----------| | ------- | ---------------- |
| 4.0.x | Yes | | 4.2.x | Yes |
| 3.5.x | Yes | | 4.1.x | Yes |
| 4.0.x | No |
| 3.5.x | Until 2023-12-31 |
| < 3.5 | No | | < 3.5 | No |

View File

@@ -1,6 +1,8 @@
# frozen_string_literal: true # frozen_string_literal: true
class AccountsIndex < Chewy::Index class AccountsIndex < Chewy::Index
include DatetimeClampingConcern
settings index: { refresh_interval: '30s' }, analysis: { settings index: { refresh_interval: '30s' }, analysis: {
analyzer: { analyzer: {
content: { content: {
@@ -38,6 +40,6 @@ class AccountsIndex < Chewy::Index
field :following_count, type: 'long', value: ->(account) { account.following_count } field :following_count, type: 'long', value: ->(account) { account.following_count }
field :followers_count, type: 'long', value: ->(account) { account.followers_count } field :followers_count, type: 'long', value: ->(account) { account.followers_count }
field :last_status_at, type: 'date', value: ->(account) { account.last_status_at || account.created_at } field :last_status_at, type: 'date', value: ->(account) { clamp_date(account.last_status_at || account.created_at) }
end end
end end

View File

@@ -0,0 +1,14 @@
# frozen_string_literal: true
module DatetimeClampingConcern
extend ActiveSupport::Concern
MIN_ISO8601_DATETIME = '0000-01-01T00:00:00Z'.to_datetime.freeze
MAX_ISO8601_DATETIME = '9999-12-31T23:59:59Z'.to_datetime.freeze
class_methods do
def clamp_date(datetime)
datetime.clamp(MIN_ISO8601_DATETIME, MAX_ISO8601_DATETIME)
end
end
end

View File

@@ -1,6 +1,8 @@
# frozen_string_literal: true # frozen_string_literal: true
class TagsIndex < Chewy::Index class TagsIndex < Chewy::Index
include DatetimeClampingConcern
settings index: { refresh_interval: '30s' }, analysis: { settings index: { refresh_interval: '30s' }, analysis: {
analyzer: { analyzer: {
content: { content: {
@@ -36,6 +38,6 @@ class TagsIndex < Chewy::Index
field :reviewed, type: 'boolean', value: ->(tag) { tag.reviewed? } field :reviewed, type: 'boolean', value: ->(tag) { tag.reviewed? }
field :usage, type: 'long', value: ->(tag, crutches) { tag.history.aggregate(crutches.time_period).accounts } field :usage, type: 'long', value: ->(tag, crutches) { tag.history.aggregate(crutches.time_period).accounts }
field :last_status_at, type: 'date', value: ->(tag) { tag.last_status_at || tag.created_at } field :last_status_at, type: 'date', value: ->(tag) { clamp_date(tag.last_status_at || tag.created_at) }
end end
end end

View File

@@ -21,7 +21,7 @@ module Admin
account_action.save! account_action.save!
if account_action.with_report? if account_action.with_report?
redirect_to admin_reports_path, notice: I18n.t('admin.reports.processed_msg', id: params[:report_id]) redirect_to admin_reports_path, notice: I18n.t('admin.reports.processed_msg', id: resource_params[:report_id])
else else
redirect_to admin_account_path(@account.id) redirect_to admin_account_path(@account.id)
end end

View File

@@ -37,7 +37,7 @@ module Admin
@domain_block.errors.delete(:domain) @domain_block.errors.delete(:domain)
render :new render :new
else else
if existing_domain_block.present? if existing_domain_block.present? && existing_domain_block.domain == TagManager.instance.normalize_domain(@domain_block.domain.strip)
@domain_block = existing_domain_block @domain_block = existing_domain_block
@domain_block.update(resource_params) @domain_block.update(resource_params)
end end

View File

@@ -2,7 +2,7 @@
class Api::V1::StreamingController < Api::BaseController class Api::V1::StreamingController < Api::BaseController
def index def index
if Rails.configuration.x.streaming_api_base_url == request.host if same_host?
not_found not_found
else else
redirect_to streaming_api_url, status: 301 redirect_to streaming_api_url, status: 301
@@ -11,9 +11,16 @@ class Api::V1::StreamingController < Api::BaseController
private private
def same_host?
base_url = Addressable::URI.parse(Rails.configuration.x.streaming_api_base_url)
request.host == base_url.host && request.port == (base_url.port || 80)
end
def streaming_api_url def streaming_api_url
Addressable::URI.parse(request.url).tap do |uri| Addressable::URI.parse(request.url).tap do |uri|
uri.host = Addressable::URI.parse(Rails.configuration.x.streaming_api_base_url).host base_url = Addressable::URI.parse(Rails.configuration.x.streaming_api_base_url)
uri.host = base_url.host
uri.port = base_url.port
end.to_s end.to_s
end end
end end

View File

@@ -1,6 +1,7 @@
# frozen_string_literal: true # frozen_string_literal: true
class Api::V1::Timelines::TagController < Api::BaseController class Api::V1::Timelines::TagController < Api::BaseController
before_action -> { doorkeeper_authorize! :read, :'read:statuses' }, only: :show, if: :require_auth?
before_action :load_tag before_action :load_tag
after_action :insert_pagination_headers, unless: -> { @statuses.empty? } after_action :insert_pagination_headers, unless: -> { @statuses.empty? }
@@ -11,6 +12,10 @@ class Api::V1::Timelines::TagController < Api::BaseController
private private
def require_auth?
!Setting.timeline_preview
end
def load_tag def load_tag
@tag = Tag.find_normalized(params[:id]) @tag = Tag.find_normalized(params[:id])
end end

View File

@@ -1,6 +1,10 @@
# frozen_string_literal: true # frozen_string_literal: true
class Auth::SessionsController < Devise::SessionsController class Auth::SessionsController < Devise::SessionsController
include Redisable
MAX_2FA_ATTEMPTS_PER_HOUR = 10
layout 'auth' layout 'auth'
skip_before_action :require_no_authentication, only: [:create] skip_before_action :require_no_authentication, only: [:create]
@@ -136,9 +140,23 @@ class Auth::SessionsController < Devise::SessionsController
session.delete(:attempt_user_updated_at) session.delete(:attempt_user_updated_at)
end end
def clear_2fa_attempt_from_user(user)
redis.del(second_factor_attempts_key(user))
end
def check_second_factor_rate_limits(user)
attempts, = redis.multi do |multi|
multi.incr(second_factor_attempts_key(user))
multi.expire(second_factor_attempts_key(user), 1.hour)
end
attempts >= MAX_2FA_ATTEMPTS_PER_HOUR
end
def on_authentication_success(user, security_measure) def on_authentication_success(user, security_measure)
@on_authentication_success_called = true @on_authentication_success_called = true
clear_2fa_attempt_from_user(user)
clear_attempt_from_session clear_attempt_from_session
user.update_sign_in!(new_sign_in: true) user.update_sign_in!(new_sign_in: true)
@@ -170,4 +188,8 @@ class Auth::SessionsController < Devise::SessionsController
user_agent: request.user_agent user_agent: request.user_agent
) )
end end
def second_factor_attempts_key(user)
"2fa_auth_attempts:#{user.id}:#{Time.now.utc.hour}"
end
end end

View File

@@ -91,14 +91,23 @@ module SignatureVerification
raise SignatureVerificationError, "Public key not found for key #{signature_params['keyId']}" if actor.nil? raise SignatureVerificationError, "Public key not found for key #{signature_params['keyId']}" if actor.nil?
signature = Base64.decode64(signature_params['signature']) signature = Base64.decode64(signature_params['signature'])
compare_signed_string = build_signed_string compare_signed_string = build_signed_string(include_query_string: true)
return actor unless verify_signature(actor, signature, compare_signed_string).nil? return actor unless verify_signature(actor, signature, compare_signed_string).nil?
# Compatibility quirk with older Mastodon versions
compare_signed_string = build_signed_string(include_query_string: false)
return actor unless verify_signature(actor, signature, compare_signed_string).nil?
actor = stoplight_wrap_request { actor_refresh_key!(actor) } actor = stoplight_wrap_request { actor_refresh_key!(actor) }
raise SignatureVerificationError, "Could not refresh public key #{signature_params['keyId']}" if actor.nil? raise SignatureVerificationError, "Could not refresh public key #{signature_params['keyId']}" if actor.nil?
compare_signed_string = build_signed_string(include_query_string: true)
return actor unless verify_signature(actor, signature, compare_signed_string).nil?
# Compatibility quirk with older Mastodon versions
compare_signed_string = build_signed_string(include_query_string: false)
return actor unless verify_signature(actor, signature, compare_signed_string).nil? return actor unless verify_signature(actor, signature, compare_signed_string).nil?
fail_with! "Verification failed for #{actor.to_log_human_identifier} #{actor.uri} using rsa-sha256 (RSASSA-PKCS1-v1_5 with SHA-256)", signed_string: compare_signed_string, signature: signature_params['signature'] fail_with! "Verification failed for #{actor.to_log_human_identifier} #{actor.uri} using rsa-sha256 (RSASSA-PKCS1-v1_5 with SHA-256)", signed_string: compare_signed_string, signature: signature_params['signature']
@@ -177,16 +186,24 @@ module SignatureVerification
nil nil
end end
def build_signed_string def build_signed_string(include_query_string: true)
signed_headers.map do |signed_header| signed_headers.map do |signed_header|
if signed_header == Request::REQUEST_TARGET case signed_header
when Request::REQUEST_TARGET
if include_query_string
"#{Request::REQUEST_TARGET}: #{request.method.downcase} #{request.original_fullpath}"
else
# Current versions of Mastodon incorrectly omit the query string from the (request-target) pseudo-header.
# Therefore, temporarily support such incorrect signatures for compatibility.
# TODO: remove eventually some time after release of the fixed version
"#{Request::REQUEST_TARGET}: #{request.method.downcase} #{request.path}" "#{Request::REQUEST_TARGET}: #{request.method.downcase} #{request.path}"
elsif signed_header == '(created)' end
when '(created)'
raise SignatureVerificationError, 'Invalid pseudo-header (created) for rsa-sha256' unless signature_algorithm == 'hs2019' raise SignatureVerificationError, 'Invalid pseudo-header (created) for rsa-sha256' unless signature_algorithm == 'hs2019'
raise SignatureVerificationError, 'Pseudo-header (created) used but corresponding argument missing' if signature_params['created'].blank? raise SignatureVerificationError, 'Pseudo-header (created) used but corresponding argument missing' if signature_params['created'].blank?
"(created): #{signature_params['created']}" "(created): #{signature_params['created']}"
elsif signed_header == '(expires)' when '(expires)'
raise SignatureVerificationError, 'Invalid pseudo-header (expires) for rsa-sha256' unless signature_algorithm == 'hs2019' raise SignatureVerificationError, 'Invalid pseudo-header (expires) for rsa-sha256' unless signature_algorithm == 'hs2019'
raise SignatureVerificationError, 'Pseudo-header (expires) used but corresponding argument missing' if signature_params['expires'].blank? raise SignatureVerificationError, 'Pseudo-header (expires) used but corresponding argument missing' if signature_params['expires'].blank?

View File

@@ -65,6 +65,11 @@ module TwoFactorAuthenticationConcern
end end
def authenticate_with_two_factor_via_otp(user) def authenticate_with_two_factor_via_otp(user)
if check_second_factor_rate_limits(user)
flash.now[:alert] = I18n.t('users.rate_limited')
return prompt_for_two_factor(user)
end
if valid_otp_attempt?(user) if valid_otp_attempt?(user)
on_authentication_success(user, :otp) on_authentication_success(user, :otp)
else else

View File

@@ -157,7 +157,7 @@ module JsonLdHelper
end end
end end
def fetch_resource(uri, id, on_behalf_of = nil) def fetch_resource(uri, id, on_behalf_of = nil, request_options: {})
unless id unless id
json = fetch_resource_without_id_validation(uri, on_behalf_of) json = fetch_resource_without_id_validation(uri, on_behalf_of)
@@ -166,14 +166,14 @@ module JsonLdHelper
uri = json['id'] uri = json['id']
end end
json = fetch_resource_without_id_validation(uri, on_behalf_of) json = fetch_resource_without_id_validation(uri, on_behalf_of, request_options: request_options)
json.present? && json['id'] == uri ? json : nil json.present? && json['id'] == uri ? json : nil
end end
def fetch_resource_without_id_validation(uri, on_behalf_of = nil, raise_on_temporary_error = false) def fetch_resource_without_id_validation(uri, on_behalf_of = nil, raise_on_temporary_error = false, request_options: {})
on_behalf_of ||= Account.representative on_behalf_of ||= Account.representative
build_request(uri, on_behalf_of).perform do |response| build_request(uri, on_behalf_of, options: request_options).perform do |response|
raise Mastodon::UnexpectedResponseError, response unless response_successful?(response) || response_error_unsalvageable?(response) || !raise_on_temporary_error raise Mastodon::UnexpectedResponseError, response unless response_successful?(response) || response_error_unsalvageable?(response) || !raise_on_temporary_error
body_to_json(response.body_with_limit) if response.code == 200 body_to_json(response.body_with_limit) if response.code == 200
@@ -206,8 +206,8 @@ module JsonLdHelper
response.code == 501 || ((400...500).cover?(response.code) && ![401, 408, 429].include?(response.code)) response.code == 501 || ((400...500).cover?(response.code) && ![401, 408, 429].include?(response.code))
end end
def build_request(uri, on_behalf_of = nil) def build_request(uri, on_behalf_of = nil, options: {})
Request.new(:get, uri).tap do |request| Request.new(:get, uri, **options).tap do |request|
request.on_behalf_of(on_behalf_of) if on_behalf_of request.on_behalf_of(on_behalf_of) if on_behalf_of
request.add_headers('Accept' => 'application/activity+json, application/ld+json') request.add_headers('Accept' => 'application/activity+json, application/ld+json')
end end

View File

@@ -16,7 +16,7 @@ class ActivityPub::Activity::Flag < ActivityPub::Activity
@account, @account,
target_account, target_account,
status_ids: target_statuses.nil? ? [] : target_statuses.map(&:id), status_ids: target_statuses.nil? ? [] : target_statuses.map(&:id),
comment: @json['content'] || '', comment: report_comment,
uri: report_uri uri: report_uri
) )
end end
@@ -35,4 +35,8 @@ class ActivityPub::Activity::Flag < ActivityPub::Activity
def report_uri def report_uri
@json['id'] unless @json['id'].nil? || invalid_origin?(@json['id']) @json['id'] unless @json['id'].nil? || invalid_origin?(@json['id'])
end end
def report_comment
(@json['content'] || '')[0...5000]
end
end end

View File

@@ -28,6 +28,6 @@ class ActivityPub::Activity::Update < ActivityPub::Activity
return if @status.nil? return if @status.nil?
ActivityPub::ProcessStatusUpdateService.new.call(@status, @object, request_id: @options[:request_id]) ActivityPub::ProcessStatusUpdateService.new.call(@status, @json, @object, request_id: @options[:request_id])
end end
end end

View File

@@ -19,7 +19,7 @@ class ActivityPub::LinkedDataSignature
return unless type == 'RsaSignature2017' return unless type == 'RsaSignature2017'
creator = ActivityPub::TagManager.instance.uri_to_actor(creator_uri) creator = ActivityPub::TagManager.instance.uri_to_actor(creator_uri)
creator ||= ActivityPub::FetchRemoteKeyService.new.call(creator_uri, id: false) creator = ActivityPub::FetchRemoteKeyService.new.call(creator_uri, id: false) if creator&.public_key.blank?
return if creator.nil? return if creator.nil?
@@ -27,9 +27,9 @@ class ActivityPub::LinkedDataSignature
document_hash = hash(@json.without('signature')) document_hash = hash(@json.without('signature'))
to_be_verified = options_hash + document_hash to_be_verified = options_hash + document_hash
if creator.keypair.public_key.verify(OpenSSL::Digest.new('SHA256'), Base64.decode64(signature), to_be_verified) creator if creator.keypair.public_key.verify(OpenSSL::Digest.new('SHA256'), Base64.decode64(signature), to_be_verified)
creator rescue OpenSSL::PKey::RSAError
end false
end end
def sign!(creator, sign_with: nil) def sign!(creator, sign_with: nil)

View File

@@ -53,7 +53,8 @@ class ActivityPub::Parser::StatusParser
end end
def created_at def created_at
@object['published']&.to_datetime datetime = @object['published']&.to_datetime
datetime if datetime.present? && (0..9999).cover?(datetime.year)
rescue ArgumentError rescue ArgumentError
nil nil
end end

View File

@@ -27,6 +27,8 @@ class ActivityPub::TagManager
when :note, :comment, :activity when :note, :comment, :activity
return activity_account_status_url(target.account, target) if target.reblog? return activity_account_status_url(target.account, target) if target.reblog?
short_account_status_url(target.account, target) short_account_status_url(target.account, target)
when :flag
target.uri
end end
end end
@@ -41,6 +43,8 @@ class ActivityPub::TagManager
account_status_url(target.account, target) account_status_url(target.account, target)
when :emoji when :emoji
emoji_url(target) emoji_url(target)
when :flag
target.uri
end end
end end

View File

@@ -0,0 +1,9 @@
# frozen_string_literal: true
class Admin::AccountStatusesFilter < AccountStatusesFilter
private
def blocked?
false
end
end

View File

@@ -34,7 +34,9 @@ class Importer::BaseImporter
# Estimate the amount of documents that would be indexed. Not exact! # Estimate the amount of documents that would be indexed. Not exact!
# @returns [Integer] # @returns [Integer]
def estimate! def estimate!
ActiveRecord::Base.connection_pool.with_connection { |connection| connection.select_one("SELECT reltuples AS estimate FROM pg_class WHERE relname = '#{index.adapter.target.table_name}'")['estimate'].to_i } reltuples = ActiveRecord::Base.connection_pool.with_connection { |connection| connection.select_one("SELECT reltuples FROM pg_class WHERE relname = '#{index.adapter.target.table_name}'")['reltuples'].to_i }
# If the table has never yet been vacuumed or analyzed, reltuples contains -1
[reltuples, 0].max
end end
# Import data from the database into the index # Import data from the database into the index

View File

@@ -1,9 +1,7 @@
# frozen_string_literal: true # frozen_string_literal: true
class PlainTextFormatter class PlainTextFormatter
include ActionView::Helpers::TextHelper NEWLINE_TAGS_RE = %r{(<br />|<br>|</p>)+}
NEWLINE_TAGS_RE = /(<br \/>|<br>|<\/p>)+/.freeze
attr_reader :text, :local attr_reader :text, :local
@@ -18,7 +16,10 @@ class PlainTextFormatter
if local? if local?
text text
else else
html_entities.decode(strip_tags(insert_newlines)).chomp node = Nokogiri::HTML.fragment(insert_newlines)
# Elements that are entirely removed with our Sanitize config
node.xpath('.//iframe|.//math|.//noembed|.//noframes|.//noscript|.//plaintext|.//script|.//style|.//svg|.//xmp').remove
node.text.chomp
end end
end end
@@ -27,8 +28,4 @@ class PlainTextFormatter
def insert_newlines def insert_newlines
text.gsub(NEWLINE_TAGS_RE) { |match| "#{match}\n" } text.gsub(NEWLINE_TAGS_RE) { |match| "#{match}\n" }
end end
def html_entities
HTMLEntities.new
end
end end

View File

@@ -4,14 +4,22 @@ require 'ipaddr'
require 'socket' require 'socket'
require 'resolv' require 'resolv'
# Monkey-patch the HTTP.rb timeout class to avoid using a timeout block # Use our own timeout class to avoid using HTTP.rb's timeout block
# around the Socket#open method, since we use our own timeout blocks inside # around the Socket#open method, since we use our own timeout blocks inside
# that method # that method
# #
# Also changes how the read timeout behaves so that it is cumulative (closer # Also changes how the read timeout behaves so that it is cumulative (closer
# to HTTP::Timeout::Global, but still having distinct timeouts for other # to HTTP::Timeout::Global, but still having distinct timeouts for other
# operation types) # operation types)
class HTTP::Timeout::PerOperation class PerOperationWithDeadline < HTTP::Timeout::PerOperation
READ_DEADLINE = 30
def initialize(*args)
super
@read_deadline = options.fetch(:read_deadline, READ_DEADLINE)
end
def connect(socket_class, host, port, nodelay = false) def connect(socket_class, host, port, nodelay = false)
@socket = socket_class.open(host, port) @socket = socket_class.open(host, port)
@socket.setsockopt(Socket::IPPROTO_TCP, Socket::TCP_NODELAY, 1) if nodelay @socket.setsockopt(Socket::IPPROTO_TCP, Socket::TCP_NODELAY, 1) if nodelay
@@ -24,7 +32,7 @@ class HTTP::Timeout::PerOperation
# Read data from the socket # Read data from the socket
def readpartial(size, buffer = nil) def readpartial(size, buffer = nil)
@deadline ||= Process.clock_gettime(Process::CLOCK_MONOTONIC) + @read_timeout @deadline ||= Process.clock_gettime(Process::CLOCK_MONOTONIC) + @read_deadline
timeout = false timeout = false
loop do loop do
@@ -33,7 +41,8 @@ class HTTP::Timeout::PerOperation
return :eof if result.nil? return :eof if result.nil?
remaining_time = @deadline - Process.clock_gettime(Process::CLOCK_MONOTONIC) remaining_time = @deadline - Process.clock_gettime(Process::CLOCK_MONOTONIC)
raise HTTP::TimeoutError, "Read timed out after #{@read_timeout} seconds" if timeout || remaining_time <= 0 raise HTTP::TimeoutError, "Read timed out after #{@read_timeout} seconds" if timeout
raise HTTP::TimeoutError, "Read timed out after a total of #{@read_deadline} seconds" if remaining_time <= 0
return result if result != :wait_readable return result if result != :wait_readable
# marking the socket for timeout. Why is this not being raised immediately? # marking the socket for timeout. Why is this not being raised immediately?
@@ -46,7 +55,7 @@ class HTTP::Timeout::PerOperation
# timeout. Else, the first timeout was a proper timeout. # timeout. Else, the first timeout was a proper timeout.
# This hack has to be done because io/wait#wait_readable doesn't provide a value for when # This hack has to be done because io/wait#wait_readable doesn't provide a value for when
# the socket is closed by the server, and HTTP::Parser doesn't provide the limit for the chunks. # the socket is closed by the server, and HTTP::Parser doesn't provide the limit for the chunks.
timeout = true unless @socket.to_io.wait_readable(remaining_time) timeout = true unless @socket.to_io.wait_readable([remaining_time, @read_timeout].min)
end end
end end
end end
@@ -57,7 +66,7 @@ class Request
# We enforce a 5s timeout on DNS resolving, 5s timeout on socket opening # We enforce a 5s timeout on DNS resolving, 5s timeout on socket opening
# and 5s timeout on the TLS handshake, meaning the worst case should take # and 5s timeout on the TLS handshake, meaning the worst case should take
# about 15s in total # about 15s in total
TIMEOUT = { connect: 5, read: 10, write: 10 }.freeze TIMEOUT = { connect_timeout: 5, read_timeout: 10, write_timeout: 10, read_deadline: 30 }.freeze
include RoutingHelper include RoutingHelper
@@ -68,7 +77,9 @@ class Request
@url = Addressable::URI.parse(url).normalize @url = Addressable::URI.parse(url).normalize
@http_client = options.delete(:http_client) @http_client = options.delete(:http_client)
@allow_local = options.delete(:allow_local) @allow_local = options.delete(:allow_local)
@full_path = options.delete(:with_query_string)
@options = options.merge(socket_class: use_proxy? || @allow_local ? ProxySocket : Socket) @options = options.merge(socket_class: use_proxy? || @allow_local ? ProxySocket : Socket)
@options = @options.merge(timeout_class: PerOperationWithDeadline, timeout_options: TIMEOUT)
@options = @options.merge(proxy_url) if use_proxy? @options = @options.merge(proxy_url) if use_proxy?
@headers = {} @headers = {}
@@ -129,14 +140,14 @@ class Request
end end
def http_client def http_client
HTTP.use(:auto_inflate).timeout(TIMEOUT.dup).follow(max_hops: 3) HTTP.use(:auto_inflate).follow(max_hops: 3)
end end
end end
private private
def set_common_headers! def set_common_headers!
@headers[REQUEST_TARGET] = "#{@verb} #{@url.path}" @headers[REQUEST_TARGET] = request_target
@headers['User-Agent'] = Mastodon::Version.user_agent @headers['User-Agent'] = Mastodon::Version.user_agent
@headers['Host'] = @url.host @headers['Host'] = @url.host
@headers['Date'] = Time.now.utc.httpdate @headers['Date'] = Time.now.utc.httpdate
@@ -147,6 +158,14 @@ class Request
@headers['Digest'] = "SHA-256=#{Digest::SHA256.base64digest(@options[:body])}" @headers['Digest'] = "SHA-256=#{Digest::SHA256.base64digest(@options[:body])}"
end end
def request_target
if @url.query.nil? || !@full_path
"#{@verb} #{@url.path}"
else
"#{@verb} #{@url.path}?#{@url.query}"
end
end
def signature def signature
algorithm = 'rsa-sha256' algorithm = 'rsa-sha256'
signature = Base64.strict_encode64(@keypair.sign(OpenSSL::Digest.new('SHA256'), signed_string)) signature = Base64.strict_encode64(@keypair.sign(OpenSSL::Digest.new('SHA256'), signed_string))
@@ -275,11 +294,11 @@ class Request
end end
until socks.empty? until socks.empty?
_, available_socks, = IO.select(nil, socks, nil, Request::TIMEOUT[:connect]) _, available_socks, = IO.select(nil, socks, nil, Request::TIMEOUT[:connect_timeout])
if available_socks.nil? if available_socks.nil?
socks.each(&:close) socks.each(&:close)
raise HTTP::TimeoutError, "Connect timed out after #{Request::TIMEOUT[:connect]} seconds" raise HTTP::TimeoutError, "Connect timed out after #{Request::TIMEOUT[:connect_timeout]} seconds"
end end
available_socks.each do |sock| available_socks.each do |sock|

View File

@@ -16,17 +16,16 @@ class StatusReachFinder
private private
def reached_account_inboxes def reached_account_inboxes
Account.where(id: reached_account_ids).inboxes
end
def reached_account_ids
# When the status is a reblog, there are no interactions with it # When the status is a reblog, there are no interactions with it
# directly, we assume all interactions are with the original one # directly, we assume all interactions are with the original one
if @status.reblog? if @status.reblog?
[] [reblog_of_account_id]
else else
Account.where(id: reached_account_ids).inboxes
end
end
def reached_account_ids
[ [
replied_to_account_id, replied_to_account_id,
reblog_of_account_id, reblog_of_account_id,
@@ -40,6 +39,7 @@ class StatusReachFinder
arr.uniq! arr.uniq!
end end
end end
end
def replied_to_account_id def replied_to_account_id
@status.in_reply_to_account_id if distributable? @status.in_reply_to_account_id if distributable?

View File

@@ -7,18 +7,18 @@ class TagManager
include RoutingHelper include RoutingHelper
def web_domain?(domain) def web_domain?(domain)
domain.nil? || domain.gsub(/[\/]/, '').casecmp(Rails.configuration.x.web_domain).zero? domain.nil? || domain.delete_suffix('/').casecmp(Rails.configuration.x.web_domain).zero?
end end
def local_domain?(domain) def local_domain?(domain)
domain.nil? || domain.gsub(/[\/]/, '').casecmp(Rails.configuration.x.local_domain).zero? domain.nil? || domain.delete_suffix('/').casecmp(Rails.configuration.x.local_domain).zero?
end end
def normalize_domain(domain) def normalize_domain(domain)
return if domain.nil? return if domain.nil?
uri = Addressable::URI.new uri = Addressable::URI.new
uri.host = domain.gsub(/[\/]/, '') uri.host = domain.delete_suffix('/')
uri.normalized_host uri.normalized_host
end end
@@ -28,7 +28,7 @@ class TagManager
domain = uri.host + (uri.port ? ":#{uri.port}" : '') domain = uri.host + (uri.port ? ":#{uri.port}" : '')
TagManager.instance.web_domain?(domain) TagManager.instance.web_domain?(domain)
rescue Addressable::URI::InvalidURIError rescue Addressable::URI::InvalidURIError, IDN::Idna::IdnaError
false false
end end
end end

View File

@@ -46,7 +46,7 @@ class TranslationService::DeepL < TranslationService
raise UnexpectedResponseError unless json.is_a?(Hash) raise UnexpectedResponseError unless json.is_a?(Hash)
Translation.new(text: json.dig('translations', 0, 'text'), detected_source_language: json.dig('translations', 0, 'detected_source_language')&.downcase, provider: 'DeepL.com') Translation.new(text: Sanitize.fragment(json.dig('translations', 0, 'text'), Sanitize::Config::MASTODON_STRICT), detected_source_language: json.dig('translations', 0, 'detected_source_language')&.downcase, provider: 'DeepL.com')
rescue Oj::ParseError rescue Oj::ParseError
raise UnexpectedResponseError raise UnexpectedResponseError
end end

View File

@@ -37,7 +37,7 @@ class TranslationService::LibreTranslate < TranslationService
raise UnexpectedResponseError unless json.is_a?(Hash) raise UnexpectedResponseError unless json.is_a?(Hash)
Translation.new(text: json['translatedText'], detected_source_language: source_language, provider: 'LibreTranslate') Translation.new(text: Sanitize.fragment(json['translatedText'], Sanitize::Config::MASTODON_STRICT), detected_source_language: source_language, provider: 'LibreTranslate')
rescue Oj::ParseError rescue Oj::ParseError
raise UnexpectedResponseError raise UnexpectedResponseError
end end

View File

@@ -43,6 +43,9 @@ class VideoMetadataExtractor
@height = video_stream[:height] @height = video_stream[:height]
@frame_rate = video_stream[:avg_frame_rate] == '0/0' ? nil : Rational(video_stream[:avg_frame_rate]) @frame_rate = video_stream[:avg_frame_rate] == '0/0' ? nil : Rational(video_stream[:avg_frame_rate])
@r_frame_rate = video_stream[:r_frame_rate] == '0/0' ? nil : Rational(video_stream[:r_frame_rate]) @r_frame_rate = video_stream[:r_frame_rate] == '0/0' ? nil : Rational(video_stream[:r_frame_rate])
# For some video streams the frame_rate reported by `ffprobe` will be 0/0, but for these streams we
# should use `r_frame_rate` instead. Video screencast generated by Gnome Screencast have this issue.
@frame_rate ||= @r_frame_rate
end end
if (audio_stream = audio_streams.first) if (audio_stream = audio_streams.first)

View File

@@ -61,9 +61,9 @@ class Account < ApplicationRecord
trust_level trust_level
) )
USERNAME_RE = /[a-z0-9_]+([a-z0-9_\.-]+[a-z0-9_]+)?/i USERNAME_RE = /[a-z0-9_]+([a-z0-9_.-]+[a-z0-9_]+)?/i
MENTION_RE = /(?<=^|[^\/[:word:]])@((#{USERNAME_RE})(?:@[[:word:]\.\-]+[[:word:]]+)?)/i MENTION_RE = %r{(?<![=/[:word:]])@((#{USERNAME_RE})(?:@[[:word:].-]+[[:word:]]+)?)}i
URL_PREFIX_RE = /\Ahttp(s?):\/\/[^\/]+/ URL_PREFIX_RE = %r{\Ahttp(s?)://[^/]+}
USERNAME_ONLY_RE = /\A#{USERNAME_RE}\z/i USERNAME_ONLY_RE = /\A#{USERNAME_RE}\z/i
include Attachmentable include Attachmentable
@@ -112,10 +112,10 @@ class Account < ApplicationRecord
scope :matches_domain, ->(value) { where(arel_table[:domain].matches("%#{value}%")) } scope :matches_domain, ->(value) { where(arel_table[:domain].matches("%#{value}%")) }
scope :without_unapproved, -> { left_outer_joins(:user).remote.or(left_outer_joins(:user).merge(User.approved.confirmed)) } scope :without_unapproved, -> { left_outer_joins(:user).remote.or(left_outer_joins(:user).merge(User.approved.confirmed)) }
scope :searchable, -> { without_unapproved.without_suspended.where(moved_to_account_id: nil) } scope :searchable, -> { without_unapproved.without_suspended.where(moved_to_account_id: nil) }
scope :discoverable, -> { searchable.without_silenced.where(discoverable: true).left_outer_joins(:account_stat) } scope :discoverable, -> { searchable.without_silenced.where(discoverable: true).joins(:account_stat) }
scope :followable_by, ->(account) { joins(arel_table.join(Follow.arel_table, Arel::Nodes::OuterJoin).on(arel_table[:id].eq(Follow.arel_table[:target_account_id]).and(Follow.arel_table[:account_id].eq(account.id))).join_sources).where(Follow.arel_table[:id].eq(nil)).joins(arel_table.join(FollowRequest.arel_table, Arel::Nodes::OuterJoin).on(arel_table[:id].eq(FollowRequest.arel_table[:target_account_id]).and(FollowRequest.arel_table[:account_id].eq(account.id))).join_sources).where(FollowRequest.arel_table[:id].eq(nil)) } scope :followable_by, ->(account) { joins(arel_table.join(Follow.arel_table, Arel::Nodes::OuterJoin).on(arel_table[:id].eq(Follow.arel_table[:target_account_id]).and(Follow.arel_table[:account_id].eq(account.id))).join_sources).where(Follow.arel_table[:id].eq(nil)).joins(arel_table.join(FollowRequest.arel_table, Arel::Nodes::OuterJoin).on(arel_table[:id].eq(FollowRequest.arel_table[:target_account_id]).and(FollowRequest.arel_table[:account_id].eq(account.id))).join_sources).where(FollowRequest.arel_table[:id].eq(nil)) }
scope :by_recent_status, -> { order(Arel.sql('(case when account_stats.last_status_at is null then 1 else 0 end) asc, account_stats.last_status_at desc, accounts.id desc')) } scope :by_recent_status, -> { includes(:account_stat).merge(AccountStat.order('last_status_at DESC NULLS LAST')).references(:account_stat) }
scope :by_recent_sign_in, -> { order(Arel.sql('(case when users.current_sign_in_at is null then 1 else 0 end) asc, users.current_sign_in_at desc, accounts.id desc')) } scope :by_recent_sign_in, -> { order(Arel.sql('users.current_sign_in_at DESC NULLS LAST')) }
scope :popular, -> { order('account_stats.followers_count desc') } scope :popular, -> { order('account_stats.followers_count desc') }
scope :by_domain_and_subdomains, ->(domain) { where(domain: domain).or(where(arel_table[:domain].matches("%.#{domain}"))) } scope :by_domain_and_subdomains, ->(domain) { where(domain: domain).or(where(arel_table[:domain].matches("%.#{domain}"))) }
scope :not_excluded_by_account, ->(account) { where.not(id: account.excluded_from_timeline_account_ids) } scope :not_excluded_by_account, ->(account) { where.not(id: account.excluded_from_timeline_account_ids) }

View File

@@ -38,7 +38,7 @@ class Admin::ActionLogFilter
destroy_status: { target_type: 'Status', action: 'destroy' }.freeze, destroy_status: { target_type: 'Status', action: 'destroy' }.freeze,
destroy_user_role: { target_type: 'UserRole', action: 'destroy' }.freeze, destroy_user_role: { target_type: 'UserRole', action: 'destroy' }.freeze,
destroy_canonical_email_block: { target_type: 'CanonicalEmailBlock', action: 'destroy' }.freeze, destroy_canonical_email_block: { target_type: 'CanonicalEmailBlock', action: 'destroy' }.freeze,
disable_2fa_user: { target_type: 'User', action: 'disable' }.freeze, disable_2fa_user: { target_type: 'User', action: 'disable_2fa' }.freeze,
disable_custom_emoji: { target_type: 'CustomEmoji', action: 'disable' }.freeze, disable_custom_emoji: { target_type: 'CustomEmoji', action: 'disable' }.freeze,
disable_user: { target_type: 'User', action: 'disable' }.freeze, disable_user: { target_type: 'User', action: 'disable' }.freeze,
enable_custom_emoji: { target_type: 'CustomEmoji', action: 'enable' }.freeze, enable_custom_emoji: { target_type: 'CustomEmoji', action: 'enable' }.freeze,

View File

@@ -140,6 +140,6 @@ class Admin::StatusBatchAction
end end
def allowed_status_ids def allowed_status_ids
AccountStatusesFilter.new(@report.target_account, current_account).results.with_discarded.where(id: status_ids).pluck(:id) Admin::AccountStatusesFilter.new(@report.target_account, current_account).results.with_discarded.where(id: status_ids).pluck(:id)
end end
end end

View File

@@ -18,7 +18,7 @@ module AccountAvatar
included do included do
# Avatar upload # Avatar upload
has_attached_file :avatar, styles: ->(f) { avatar_styles(f) }, convert_options: { all: '+profile "!icc,*" +set modify-date +set create-date' }, processors: [:lazy_thumbnail] has_attached_file :avatar, styles: ->(f) { avatar_styles(f) }, convert_options: { all: '+profile "!icc,*" +set date:modify +set date:create +set date:timestamp' }, processors: [:lazy_thumbnail]
validates_attachment_content_type :avatar, content_type: IMAGE_MIME_TYPES validates_attachment_content_type :avatar, content_type: IMAGE_MIME_TYPES
validates_attachment_size :avatar, less_than: LIMIT validates_attachment_size :avatar, less_than: LIMIT
remotable_attachment :avatar, LIMIT, suppress_errors: false remotable_attachment :avatar, LIMIT, suppress_errors: false

View File

@@ -19,7 +19,7 @@ module AccountHeader
included do included do
# Header upload # Header upload
has_attached_file :header, styles: ->(f) { header_styles(f) }, convert_options: { all: '+profile "!icc,*" +set modify-date +set create-date' }, processors: [:lazy_thumbnail] has_attached_file :header, styles: ->(f) { header_styles(f) }, convert_options: { all: '+profile "!icc,*" +set date:modify +set date:create +set date:timestamp' }, processors: [:lazy_thumbnail]
validates_attachment_content_type :header, content_type: IMAGE_MIME_TYPES validates_attachment_content_type :header, content_type: IMAGE_MIME_TYPES
validates_attachment_size :header, less_than: LIMIT validates_attachment_size :header, less_than: LIMIT
remotable_attachment :header, LIMIT, suppress_errors: false remotable_attachment :header, LIMIT, suppress_errors: false

View File

@@ -52,9 +52,13 @@ module Attachmentable
return if attachment.blank? || !/image.*/.match?(attachment.content_type) || attachment.queued_for_write[:original].blank? return if attachment.blank? || !/image.*/.match?(attachment.content_type) || attachment.queued_for_write[:original].blank?
width, height = FastImage.size(attachment.queued_for_write[:original].path) width, height = FastImage.size(attachment.queued_for_write[:original].path)
matrix_limit = attachment.content_type == 'image/gif' ? GIF_MATRIX_LIMIT : MAX_MATRIX_LIMIT return unless width.present? && height.present?
raise Mastodon::DimensionsValidationError, "#{width}x#{height} images are not supported" if width.present? && height.present? && (width * height > matrix_limit) if attachment.content_type == 'image/gif' && width * height > GIF_MATRIX_LIMIT
raise Mastodon::DimensionsValidationError, "#{width}x#{height} GIF files are not supported"
elsif width * height > MAX_MATRIX_LIMIT
raise Mastodon::DimensionsValidationError, "#{width}x#{height} images are not supported"
end
end end
def appropriate_extension(attachment) def appropriate_extension(attachment)

View File

@@ -37,7 +37,7 @@ class CustomEmoji < ApplicationRecord
belongs_to :category, class_name: 'CustomEmojiCategory', optional: true belongs_to :category, class_name: 'CustomEmojiCategory', optional: true
has_one :local_counterpart, -> { where(domain: nil) }, class_name: 'CustomEmoji', primary_key: :shortcode, foreign_key: :shortcode has_one :local_counterpart, -> { where(domain: nil) }, class_name: 'CustomEmoji', primary_key: :shortcode, foreign_key: :shortcode
has_attached_file :image, styles: { static: { format: 'png', convert_options: '-coalesce +profile "!icc,*" +set modify-date +set create-date' } }, validate_media_type: false has_attached_file :image, styles: { static: { format: 'png', convert_options: '-coalesce +profile "!icc,*" +set date:modify +set date:create +set date:timestamp' } }, validate_media_type: false
before_validation :downcase_domain before_validation :downcase_domain

View File

@@ -169,7 +169,7 @@ class MediaAttachment < ApplicationRecord
}.freeze }.freeze
GLOBAL_CONVERT_OPTIONS = { GLOBAL_CONVERT_OPTIONS = {
all: '-quality 90 +profile "!icc,*" +set modify-date +set create-date', all: '-quality 90 +profile "!icc,*" +set date:modify +set date:create +set date:timestamp',
}.freeze }.freeze
belongs_to :account, inverse_of: :media_attachments, optional: true belongs_to :account, inverse_of: :media_attachments, optional: true

View File

@@ -50,7 +50,7 @@ class PreviewCard < ApplicationRecord
has_and_belongs_to_many :statuses has_and_belongs_to_many :statuses
has_one :trend, class_name: 'PreviewCardTrend', inverse_of: :preview_card, dependent: :destroy has_one :trend, class_name: 'PreviewCardTrend', inverse_of: :preview_card, dependent: :destroy
has_attached_file :image, processors: [:thumbnail, :blurhash_transcoder], styles: ->(f) { image_styles(f) }, convert_options: { all: '-quality 90 +profile "!icc,*" +set modify-date +set create-date' }, validate_media_type: false has_attached_file :image, processors: [:thumbnail, :blurhash_transcoder], styles: ->(f) { image_styles(f) }, convert_options: { all: '-quality 90 +profile "!icc,*" +set date:modify +set date:create +set date:timestamp' }, validate_media_type: false
validates :url, presence: true, uniqueness: true validates :url, presence: true, uniqueness: true
validates_attachment_content_type :image, content_type: IMAGE_MIME_TYPES validates_attachment_content_type :image, content_type: IMAGE_MIME_TYPES

View File

@@ -25,7 +25,7 @@ class PreviewCardProvider < ApplicationRecord
validates :domain, presence: true, uniqueness: true, domain: true validates :domain, presence: true, uniqueness: true, domain: true
has_attached_file :icon, styles: { static: { format: 'png', convert_options: '-coalesce +profile "!icc,*" +set modify-date +set create-date' } }, validate_media_type: false has_attached_file :icon, styles: { static: { format: 'png', convert_options: '-coalesce +profile "!icc,*" +set date:modify +set date:create +set date:timestamp' } }, validate_media_type: false
validates_attachment :icon, content_type: { content_type: ICON_MIME_TYPES }, size: { less_than: LIMIT } validates_attachment :icon, content_type: { content_type: ICON_MIME_TYPES }, size: { less_than: LIMIT }
remotable_attachment :icon, LIMIT remotable_attachment :icon, LIMIT

View File

@@ -60,13 +60,13 @@ class RelationshipFilter
def relationship_scope(value) def relationship_scope(value)
case value case value
when 'following' when 'following'
account.following.eager_load(:account_stat).reorder(nil) account.following.includes(:account_stat).reorder(nil)
when 'followed_by' when 'followed_by'
account.followers.eager_load(:account_stat).reorder(nil) account.followers.includes(:account_stat).reorder(nil)
when 'mutual' when 'mutual'
account.followers.eager_load(:account_stat).reorder(nil).merge(Account.where(id: account.following)) account.followers.includes(:account_stat).reorder(nil).merge(Account.where(id: account.following))
when 'invited' when 'invited'
Account.joins(user: :invite).merge(Invite.where(user: account.user)).eager_load(:account_stat).reorder(nil) Account.joins(user: :invite).merge(Invite.where(user: account.user)).includes(:account_stat).reorder(nil)
else else
raise Mastodon::InvalidParameterError, "Unknown relationship: #{value}" raise Mastodon::InvalidParameterError, "Unknown relationship: #{value}"
end end
@@ -112,7 +112,7 @@ class RelationshipFilter
def activity_scope(value) def activity_scope(value)
case value case value
when 'dormant' when 'dormant'
AccountStat.where(last_status_at: nil).or(AccountStat.where(AccountStat.arel_table[:last_status_at].lt(1.month.ago))) Account.joins(:account_stat).where(account_stat: { last_status_at: [nil, ...1.month.ago] })
else else
raise Mastodon::InvalidParameterError, "Unknown activity: #{value}" raise Mastodon::InvalidParameterError, "Unknown activity: #{value}"
end end

View File

@@ -39,7 +39,10 @@ class Report < ApplicationRecord
scope :resolved, -> { where.not(action_taken_at: nil) } scope :resolved, -> { where.not(action_taken_at: nil) }
scope :with_accounts, -> { includes([:account, :target_account, :action_taken_by_account, :assigned_account].index_with({ user: [:invite_request, :invite] })) } scope :with_accounts, -> { includes([:account, :target_account, :action_taken_by_account, :assigned_account].index_with({ user: [:invite_request, :invite] })) }
validates :comment, length: { maximum: 1_000 } # A report is considered local if the reporter is local
delegate :local?, to: :account
validates :comment, length: { maximum: 1_000 }, if: :local?
validates :rule_ids, absence: true, unless: :violation? validates :rule_ids, absence: true, unless: :violation?
validate :validate_rule_ids validate :validate_rule_ids
@@ -50,10 +53,6 @@ class Report < ApplicationRecord
violation: 2_000, violation: 2_000,
} }
def local?
false # Force uri_for to use uri attribute
end
before_validation :set_uri, only: :create before_validation :set_uri, only: :create
after_create_commit :trigger_webhooks after_create_commit :trigger_webhooks

View File

@@ -40,7 +40,7 @@ class SiteUpload < ApplicationRecord
mascot: {}.freeze, mascot: {}.freeze,
}.freeze }.freeze
has_attached_file :file, styles: ->(file) { STYLES[file.instance.var.to_sym] }, convert_options: { all: '-coalesce +profile "!icc,*" +set modify-date +set create-date' }, processors: [:lazy_thumbnail, :blurhash_transcoder, :type_corrector] has_attached_file :file, styles: ->(file) { STYLES[file.instance.var.to_sym] }, convert_options: { all: '-coalesce +profile "!icc,*" +set date:modify +set date:create +set date:timestamp' }, processors: [:lazy_thumbnail, :blurhash_transcoder, :type_corrector]
validates_attachment_content_type :file, content_type: /\Aimage\/.*\z/ validates_attachment_content_type :file, content_type: /\Aimage\/.*\z/
validates :file, presence: true validates :file, presence: true

View File

@@ -354,13 +354,25 @@ class Status < ApplicationRecord
account_ids.uniq! account_ids.uniq!
status_ids = cached_items.map { |item| item.reblog? ? item.reblog_of_id : item.id }.uniq
return if account_ids.empty? return if account_ids.empty?
accounts = Account.where(id: account_ids).includes(:account_stat, :user).index_by(&:id) accounts = Account.where(id: account_ids).includes(:account_stat, :user).index_by(&:id)
status_stats = StatusStat.where(status_id: status_ids).index_by(&:status_id)
cached_items.each do |item| cached_items.each do |item|
item.account = accounts[item.account_id] item.account = accounts[item.account_id]
item.reblog.account = accounts[item.reblog.account_id] if item.reblog? item.reblog.account = accounts[item.reblog.account_id] if item.reblog?
if item.reblog?
status_stat = status_stats[item.reblog.id]
item.reblog.status_stat = status_stat if status_stat.present?
else
status_stat = status_stats[item.id]
item.status_stat = status_stat if status_stat.present?
end
end end
end end

View File

@@ -33,7 +33,7 @@ class Tag < ApplicationRecord
HASTAG_LAST_SEQUENCE = '([[:word:]_]*[[:alpha:]][[:word:]_]*)' HASTAG_LAST_SEQUENCE = '([[:word:]_]*[[:alpha:]][[:word:]_]*)'
HASHTAG_NAME_PAT = "#{HASHTAG_FIRST_SEQUENCE}|#{HASTAG_LAST_SEQUENCE}" HASHTAG_NAME_PAT = "#{HASHTAG_FIRST_SEQUENCE}|#{HASTAG_LAST_SEQUENCE}"
HASHTAG_RE = /(?:^|[^\/\)\w])#(#{HASHTAG_NAME_PAT})/i HASHTAG_RE = %r{(?<![=/)\w])#(#{HASHTAG_NAME_PAT})}i
HASHTAG_NAME_RE = /\A(#{HASHTAG_NAME_PAT})\z/i HASHTAG_NAME_RE = /\A(#{HASHTAG_NAME_PAT})\z/i
HASHTAG_INVALID_CHARS_RE = /[^[:alnum:]#{HASHTAG_SEPARATORS}]/ HASHTAG_INVALID_CHARS_RE = /[^[:alnum:]#{HASHTAG_SEPARATORS}]/

View File

@@ -91,7 +91,7 @@ class Trends::Statuses < Trends::Base
private private
def eligible?(status) def eligible?(status)
status.public_visibility? && status.account.discoverable? && !status.account.silenced? && status.spoiler_text.blank? && !status.sensitive? && !status.reply? && valid_locale?(status.language) status.public_visibility? && status.account.discoverable? && !status.account.silenced? && !status.account.sensitized? && status.spoiler_text.blank? && !status.sensitive? && !status.reply? && valid_locale?(status.language)
end end
def calculate_scores(statuses, at_time) def calculate_scores(statuses, at_time)

View File

@@ -12,7 +12,7 @@ class Admin::StatusPolicy < ApplicationPolicy
end end
def show? def show?
role.can?(:manage_reports, :manage_users) && (record.public_visibility? || record.unlisted_visibility? || record.reported?) role.can?(:manage_reports, :manage_users) && (record.public_visibility? || record.unlisted_visibility? || record.reported? || viewable_through_normal_policy?)
end end
def destroy? def destroy?
@@ -26,4 +26,10 @@ class Admin::StatusPolicy < ApplicationPolicy
def review? def review?
role.can?(:manage_taxonomies) role.can?(:manage_taxonomies)
end end
private
def viewable_through_normal_policy?
StatusPolicy.new(current_account, record, @preloaded_relations).show?
end
end end

View File

@@ -1,7 +1,7 @@
# frozen_string_literal: true # frozen_string_literal: true
class BackupPolicy < ApplicationPolicy class BackupPolicy < ApplicationPolicy
MIN_AGE = 1.week MIN_AGE = 6.days
def create? def create?
user_signed_in? && current_user.backups.where('created_at >= ?', MIN_AGE.ago).count.zero? user_signed_in? && current_user.backups.where('created_at >= ?', MIN_AGE.ago).count.zero?

View File

@@ -10,7 +10,9 @@ class REST::FeaturedTagSerializer < ActiveModel::Serializer
end end
def url def url
short_account_tag_url(object.account, object.tag) # The path is hardcoded because we have to deal with both local and
# remote users, which are different routes
account_with_domain_url(object.account, "tagged/#{object.tag.to_param}")
end end
def name def name

View File

@@ -23,9 +23,9 @@ class ActivityPub::FetchFeaturedCollectionService < BaseService
case collection['type'] case collection['type']
when 'Collection', 'CollectionPage' when 'Collection', 'CollectionPage'
collection['items'] as_array(collection['items'])
when 'OrderedCollection', 'OrderedCollectionPage' when 'OrderedCollection', 'OrderedCollectionPage'
collection['orderedItems'] as_array(collection['orderedItems'])
end end
end end

View File

@@ -8,6 +8,6 @@ class ActivityPub::FetchRemotePollService < BaseService
return unless supported_context?(json) return unless supported_context?(json)
ActivityPub::ProcessStatusUpdateService.new.call(poll.status, json) ActivityPub::ProcessStatusUpdateService.new.call(poll.status, json, json)
end end
end end

View File

@@ -26,9 +26,9 @@ class ActivityPub::FetchRepliesService < BaseService
case collection['type'] case collection['type']
when 'Collection', 'CollectionPage' when 'Collection', 'CollectionPage'
collection['items'] as_array(collection['items'])
when 'OrderedCollection', 'OrderedCollectionPage' when 'OrderedCollection', 'OrderedCollectionPage'
collection['orderedItems'] as_array(collection['orderedItems'])
end end
end end
@@ -36,7 +36,21 @@ class ActivityPub::FetchRepliesService < BaseService
return collection_or_uri if collection_or_uri.is_a?(Hash) return collection_or_uri if collection_or_uri.is_a?(Hash)
return unless @allow_synchronous_requests return unless @allow_synchronous_requests
return if invalid_origin?(collection_or_uri) return if invalid_origin?(collection_or_uri)
# NOTE: For backward compatibility reasons, Mastodon signs outgoing
# queries incorrectly by default.
#
# While this is relevant for all URLs with query strings, this is
# the only code path where this happens in practice.
#
# Therefore, retry with correct signatures if this fails.
begin
fetch_resource_without_id_validation(collection_or_uri, nil, true) fetch_resource_without_id_validation(collection_or_uri, nil, true)
rescue Mastodon::UnexpectedResponseError => e
raise unless e.response && e.response.code == 401 && Addressable::URI.parse(collection_or_uri).query.present?
fetch_resource_without_id_validation(collection_or_uri, nil, true, request_options: { with_query_string: true })
end
end end
def filtered_replies def filtered_replies

View File

@@ -76,6 +76,9 @@ class ActivityPub::ProcessAccountService < BaseService
@account.suspended_at = domain_block.created_at if auto_suspend? @account.suspended_at = domain_block.created_at if auto_suspend?
@account.suspension_origin = :local if auto_suspend? @account.suspension_origin = :local if auto_suspend?
@account.silenced_at = domain_block.created_at if auto_silence? @account.silenced_at = domain_block.created_at if auto_silence?
set_immediate_protocol_attributes!
@account.save @account.save
end end

View File

@@ -5,10 +5,11 @@ class ActivityPub::ProcessStatusUpdateService < BaseService
include Redisable include Redisable
include Lockable include Lockable
def call(status, json, request_id: nil) def call(status, activity_json, object_json, request_id: nil)
raise ArgumentError, 'Status has unsaved changes' if status.changed? raise ArgumentError, 'Status has unsaved changes' if status.changed?
@json = json @activity_json = activity_json
@json = object_json
@status_parser = ActivityPub::Parser::StatusParser.new(@json) @status_parser = ActivityPub::Parser::StatusParser.new(@json)
@uri = @status_parser.uri @uri = @status_parser.uri
@status = status @status = status
@@ -308,6 +309,6 @@ class ActivityPub::ProcessStatusUpdateService < BaseService
end end
def forwarder def forwarder
@forwarder ||= ActivityPub::Forwarder.new(@account, @json, @status) @forwarder ||= ActivityPub::Forwarder.new(@account, @activity_json, @status)
end end
end end

View File

@@ -59,9 +59,9 @@ class ActivityPub::SynchronizeFollowersService < BaseService
case collection['type'] case collection['type']
when 'Collection', 'CollectionPage' when 'Collection', 'CollectionPage'
collection['items'] as_array(collection['items'])
when 'OrderedCollection', 'OrderedCollectionPage' when 'OrderedCollection', 'OrderedCollectionPage'
collection['orderedItems'] as_array(collection['orderedItems'])
end end
end end

View File

@@ -100,7 +100,7 @@ class FetchOEmbedService
end end
def validate(oembed) def validate(oembed)
oembed if oembed[:version].to_s == '1.0' && oembed[:type].present? oembed if oembed.present? && oembed[:version].to_s == '1.0' && oembed[:type].present?
end end
def html def html

View File

@@ -71,7 +71,7 @@ class FollowService < BaseService
if @target_account.local? if @target_account.local?
LocalNotificationWorker.perform_async(@target_account.id, follow_request.id, follow_request.class.name, 'follow_request') LocalNotificationWorker.perform_async(@target_account.id, follow_request.id, follow_request.class.name, 'follow_request')
elsif @target_account.activitypub? elsif @target_account.activitypub?
ActivityPub::DeliveryWorker.perform_async(build_json(follow_request), @source_account.id, @target_account.inbox_url) ActivityPub::DeliveryWorker.perform_async(build_json(follow_request), @source_account.id, @target_account.inbox_url, { 'bypass_availability' => true })
end end
follow_request follow_request

View File

@@ -69,7 +69,7 @@ class Keys::QueryService < BaseService
return if json['items'].blank? return if json['items'].blank?
@devices = json['items'].map do |device| @devices = as_array(json['items']).map do |device|
Device.new(device_id: device['id'], name: device['name'], identity_key: device.dig('identityKey', 'publicKeyBase64'), fingerprint_key: device.dig('fingerprintKey', 'publicKeyBase64'), claim_url: device['claim']) Device.new(device_id: device['id'], name: device['name'], identity_key: device.dig('identityKey', 'publicKeyBase64'), fingerprint_key: device.dig('fingerprintKey', 'publicKeyBase64'), claim_url: device['claim'])
end end
rescue HTTP::Error, OpenSSL::SSL::SSLError, Mastodon::Error => e rescue HTTP::Error, OpenSSL::SSL::SSLError, Mastodon::Error => e

View File

@@ -45,11 +45,7 @@ class ReblogService < BaseService
def create_notification(reblog) def create_notification(reblog)
reblogged_status = reblog.reblog reblogged_status = reblog.reblog
if reblogged_status.account.local? LocalNotificationWorker.perform_async(reblogged_status.account_id, reblog.id, reblog.class.name, 'reblog') if reblogged_status.account.local?
LocalNotificationWorker.perform_async(reblogged_status.account_id, reblog.id, reblog.class.name, 'reblog')
elsif reblogged_status.account.activitypub? && !reblogged_status.account.following?(reblog.account)
ActivityPub::DeliveryWorker.perform_async(build_json(reblog), reblog.account_id, reblogged_status.account.inbox_url)
end
end end
def bump_potential_friendship(account, reblog) def bump_potential_friendship(account, reblog)

View File

@@ -12,7 +12,9 @@ class TranslateStatusService < BaseService
@content = status_content_format(@status) @content = status_content_format(@status)
@target_language = target_language @target_language = target_language
Rails.cache.fetch("translations/#{@status.language}/#{@target_language}/#{content_hash}", expires_in: CACHE_TTL) { translation_backend.translate(@content, @status.language, @target_language) } Rails.cache.fetch("translations:v2/#{@status.language}/#{@target_language}/#{content_hash}", expires_in: CACHE_TTL) do
translation_backend.translate(@content, @status.language, @target_language)
end
end end
private private

View File

@@ -29,7 +29,7 @@
- Trends::PreviewCardProviderFilter::KEYS.each do |key| - Trends::PreviewCardProviderFilter::KEYS.each do |key|
= hidden_field_tag key, params[key] if params[key].present? = hidden_field_tag key, params[key] if params[key].present?
.batch-table.optional .batch-table
.batch-table__toolbar .batch-table__toolbar
%label.batch-table__toolbar__select.batch-checkbox-all %label.batch-table__toolbar__select.batch-checkbox-all
= check_box_tag :batch_checkbox_all, nil, false = check_box_tag :batch_checkbox_all, nil, false

View File

@@ -3,7 +3,7 @@
class AccountDeletionWorker class AccountDeletionWorker
include Sidekiq::Worker include Sidekiq::Worker
sidekiq_options queue: 'pull', lock: :until_executed sidekiq_options queue: 'pull', lock: :until_executed, lock_ttl: 1.week.to_i
def perform(account_id, options = {}) def perform(account_id, options = {})
reserve_username = options.with_indifferent_access.fetch(:reserve_username, true) reserve_username = options.with_indifferent_access.fetch(:reserve_username, true)

View File

@@ -23,9 +23,10 @@ class ActivityPub::DeliveryWorker
HEADERS = { 'Content-Type' => 'application/activity+json' }.freeze HEADERS = { 'Content-Type' => 'application/activity+json' }.freeze
def perform(json, source_account_id, inbox_url, options = {}) def perform(json, source_account_id, inbox_url, options = {})
return unless DeliveryFailureTracker.available?(inbox_url)
@options = options.with_indifferent_access @options = options.with_indifferent_access
return unless @options[:bypass_availability] || DeliveryFailureTracker.available?(inbox_url)
@json = json @json = json
@source_account = Account.find(source_account_id) @source_account = Account.find(source_account_id)
@inbox_url = inbox_url @inbox_url = inbox_url

View File

@@ -3,7 +3,7 @@
class ActivityPub::SynchronizeFeaturedCollectionWorker class ActivityPub::SynchronizeFeaturedCollectionWorker
include Sidekiq::Worker include Sidekiq::Worker
sidekiq_options queue: 'pull', lock: :until_executed sidekiq_options queue: 'pull', lock: :until_executed, lock_ttl: 1.day.to_i
def perform(account_id, options = {}) def perform(account_id, options = {})
options = { note: true, hashtag: false }.deep_merge(options.deep_symbolize_keys) options = { note: true, hashtag: false }.deep_merge(options.deep_symbolize_keys)

View File

@@ -3,7 +3,7 @@
class ActivityPub::SynchronizeFeaturedTagsCollectionWorker class ActivityPub::SynchronizeFeaturedTagsCollectionWorker
include Sidekiq::Worker include Sidekiq::Worker
sidekiq_options queue: 'pull', lock: :until_executed sidekiq_options queue: 'pull', lock: :until_executed, lock_ttl: 1.day.to_i
def perform(account_id, url) def perform(account_id, url)
ActivityPub::FetchFeaturedTagsCollectionService.new.call(Account.find(account_id), url) ActivityPub::FetchFeaturedTagsCollectionService.new.call(Account.find(account_id), url)

View File

@@ -1,7 +1,7 @@
# frozen_string_literal: true # frozen_string_literal: true
class ActivityPub::UpdateDistributionWorker < ActivityPub::RawDistributionWorker class ActivityPub::UpdateDistributionWorker < ActivityPub::RawDistributionWorker
sidekiq_options queue: 'push', lock: :until_executed sidekiq_options queue: 'push', lock: :until_executed, lock_ttl: 1.day.to_i
# Distribute an profile update to servers that might have a copy # Distribute an profile update to servers that might have a copy
# of the account in question # of the account in question

View File

@@ -3,7 +3,7 @@
class Admin::AccountDeletionWorker class Admin::AccountDeletionWorker
include Sidekiq::Worker include Sidekiq::Worker
sidekiq_options queue: 'pull', lock: :until_executed sidekiq_options queue: 'pull', lock: :until_executed, lock_ttl: 1.week.to_i
def perform(account_id) def perform(account_id)
DeleteAccountService.new.call(Account.find(account_id), reserve_username: true, reserve_email: true) DeleteAccountService.new.call(Account.find(account_id), reserve_username: true, reserve_email: true)

View File

@@ -3,7 +3,7 @@
class Admin::DomainPurgeWorker class Admin::DomainPurgeWorker
include Sidekiq::Worker include Sidekiq::Worker
sidekiq_options queue: 'pull', lock: :until_executed sidekiq_options queue: 'pull', lock: :until_executed, lock_ttl: 1.week.to_i
def perform(domain) def perform(domain)
PurgeDomainService.new.call(domain) PurgeDomainService.new.call(domain)

View File

@@ -7,7 +7,7 @@ class LinkCrawlWorker
def perform(status_id) def perform(status_id)
FetchLinkCardService.new.call(Status.find(status_id)) FetchLinkCardService.new.call(Status.find(status_id))
rescue ActiveRecord::RecordNotFound rescue ActiveRecord::RecordNotFound, ActiveRecord::RecordNotUnique
true true
end end
end end

View File

@@ -3,7 +3,7 @@
class PublishScheduledStatusWorker class PublishScheduledStatusWorker
include Sidekiq::Worker include Sidekiq::Worker
sidekiq_options lock: :until_executed sidekiq_options lock: :until_executed, lock_ttl: 1.hour.to_i
def perform(scheduled_status_id) def perform(scheduled_status_id)
scheduled_status = ScheduledStatus.find(scheduled_status_id) scheduled_status = ScheduledStatus.find(scheduled_status_id)

View File

@@ -3,7 +3,7 @@
class ResolveAccountWorker class ResolveAccountWorker
include Sidekiq::Worker include Sidekiq::Worker
sidekiq_options queue: 'pull', lock: :until_executed sidekiq_options queue: 'pull', lock: :until_executed, lock_ttl: 1.day.to_i
def perform(uri) def perform(uri)
ResolveAccountService.new.call(uri) ResolveAccountService.new.call(uri)

View File

@@ -4,7 +4,7 @@ class Scheduler::IndexingScheduler
include Sidekiq::Worker include Sidekiq::Worker
include Redisable include Redisable
sidekiq_options retry: 0 sidekiq_options retry: 0, lock: :until_executed, lock_ttl: 30.minutes.to_i
IMPORT_BATCH_SIZE = 1000 IMPORT_BATCH_SIZE = 1000
SCAN_BATCH_SIZE = 10 * IMPORT_BATCH_SIZE SCAN_BATCH_SIZE = 10 * IMPORT_BATCH_SIZE

View File

@@ -3,7 +3,7 @@
class Scheduler::ScheduledStatusesScheduler class Scheduler::ScheduledStatusesScheduler
include Sidekiq::Worker include Sidekiq::Worker
sidekiq_options retry: 0 sidekiq_options retry: 0, lock: :until_executed, lock_ttl: 1.hour.to_i
def perform def perform
publish_scheduled_statuses! publish_scheduled_statuses!

View File

@@ -3,7 +3,7 @@
class Scheduler::Trends::RefreshScheduler class Scheduler::Trends::RefreshScheduler
include Sidekiq::Worker include Sidekiq::Worker
sidekiq_options retry: 0 sidekiq_options retry: 0, lock: :until_executed, lock_ttl: 30.minutes.to_i
def perform def perform
Trends.refresh! Trends.refresh!

View File

@@ -3,7 +3,7 @@
class VerifyAccountLinksWorker class VerifyAccountLinksWorker
include Sidekiq::Worker include Sidekiq::Worker
sidekiq_options queue: 'default', retry: false, lock: :until_executed sidekiq_options queue: 'default', retry: false, lock: :until_executed, lock_ttl: 1.hour.to_i
def perform(account_id) def perform(account_id)
account = Account.find(account_id) account = Account.find(account_id)

View File

@@ -3,7 +3,11 @@
# https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Security-Policy # https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Security-Policy
def host_to_url(str) def host_to_url(str)
"http#{Rails.configuration.x.use_https ? 's' : ''}://#{str}".split('/').first if str.present? return if str.blank?
uri = Addressable::URI.parse("http#{Rails.configuration.x.use_https ? 's' : ''}://#{str}")
uri.path += '/' unless uri.path.blank? || uri.path.end_with?('/')
uri.to_s
end end
base_host = Rails.configuration.x.web_domain base_host = Rails.configuration.x.web_domain

View File

@@ -3,6 +3,11 @@
require_relative '../../lib/mastodon/sidekiq_middleware' require_relative '../../lib/mastodon/sidekiq_middleware'
Sidekiq.configure_server do |config| Sidekiq.configure_server do |config|
if Rails.configuration.database_configuration.dig('production', 'adapter') == 'postgresql_makara'
STDERR.puts 'ERROR: Database replication is not currently supported in Sidekiq workers. Check your configuration.'
exit 1
end
config.redis = REDIS_SIDEKIQ_PARAMS config.redis = REDIS_SIDEKIQ_PARAMS
config.server_middleware do |chain| config.server_middleware do |chain|

View File

@@ -1684,6 +1684,7 @@ en:
follow_limit_reached: You cannot follow more than %{limit} people follow_limit_reached: You cannot follow more than %{limit} people
invalid_otp_token: Invalid two-factor code invalid_otp_token: Invalid two-factor code
otp_lost_help_html: If you lost access to both, you may get in touch with %{email} otp_lost_help_html: If you lost access to both, you may get in touch with %{email}
rate_limited: Too many authentication attempts, try again later.
seamless_external_login: You are logged in via an external service, so password and e-mail settings are not available. seamless_external_login: You are logged in via an external service, so password and e-mail settings are not available.
signed_in_as: 'Signed in as:' signed_in_as: 'Signed in as:'
verification: verification:

View File

@@ -126,7 +126,7 @@ Rails.application.routes.draw do
get '/@:account_username/:id/embed', to: 'statuses#embed', as: :embed_short_account_status get '/@:account_username/:id/embed', to: 'statuses#embed', as: :embed_short_account_status
end end
get '/@:username_with_domain/(*any)', to: 'home#index', constraints: { username_with_domain: /([^\/])+?/ }, format: false get '/@:username_with_domain/(*any)', to: 'home#index', constraints: { username_with_domain: %r{([^/])+?} }, as: :account_with_domain, format: false
get '/settings', to: redirect('/settings/profile') get '/settings', to: redirect('/settings/profile')
namespace :settings do namespace :settings do
@@ -292,7 +292,7 @@ Rails.application.routes.draw do
end end
end end
resources :instances, only: [:index, :show, :destroy], constraints: { id: /[^\/]+/ } do resources :instances, only: [:index, :show, :destroy], constraints: { id: /[^\/]+/ }, format: 'html' do
member do member do
post :clear_delivery_errors post :clear_delivery_errors
post :restart_delivery post :restart_delivery

View File

@@ -56,7 +56,7 @@ services:
web: web:
build: . build: .
image: ghcr.io/mastodon/mastodon image: ghcr.io/mastodon/mastodon:v4.1.12
restart: always restart: always
env_file: .env.production env_file: .env.production
command: bash -c "rm -f /mastodon/tmp/pids/server.pid; bundle exec rails s -p 3000" command: bash -c "rm -f /mastodon/tmp/pids/server.pid; bundle exec rails s -p 3000"
@@ -77,7 +77,7 @@ services:
streaming: streaming:
build: . build: .
image: ghcr.io/mastodon/mastodon image: ghcr.io/mastodon/mastodon:v4.1.12
restart: always restart: always
env_file: .env.production env_file: .env.production
command: node ./streaming command: node ./streaming
@@ -95,7 +95,7 @@ services:
sidekiq: sidekiq:
build: . build: .
image: ghcr.io/mastodon/mastodon image: ghcr.io/mastodon/mastodon:v4.1.12
restart: always restart: always
env_file: .env.production env_file: .env.production
command: bundle exec sidekiq command: bundle exec sidekiq

View File

@@ -13,7 +13,7 @@ module Mastodon
end end
def patch def patch
4 12
end end
def flags def flags

View File

@@ -16,7 +16,7 @@ module Paperclip
private private
def cache_current_values def cache_current_values
@original_filename = filename_from_content_disposition.presence || filename_from_path.presence || 'data' @original_filename = truncated_filename
@tempfile = copy_to_tempfile(@target) @tempfile = copy_to_tempfile(@target)
@content_type = ContentTypeDetector.new(@tempfile.path).detect @content_type = ContentTypeDetector.new(@tempfile.path).detect
@size = File.size(@tempfile) @size = File.size(@tempfile)
@@ -43,6 +43,13 @@ module Paperclip
source.response.connection.close source.response.connection.close
end end
def truncated_filename
filename = filename_from_content_disposition.presence || filename_from_path.presence || 'data'
extension = File.extname(filename)
basename = File.basename(filename, extension)
[basename[...20], extension[..4]].compact_blank.join
end
def filename_from_content_disposition def filename_from_content_disposition
disposition = @target.response.headers['content-disposition'] disposition = @target.response.headers['content-disposition']
disposition&.match(/filename="([^"]*)"/)&.captures&.first disposition&.match(/filename="([^"]*)"/)&.captures&.first

View File

@@ -37,14 +37,16 @@ module Paperclip
@output_options['f'] = 'image2' @output_options['f'] = 'image2'
@output_options['vframes'] = 1 @output_options['vframes'] = 1
when 'mp4' when 'mp4'
unless eligible_to_passthrough?(metadata)
@output_options['acodec'] = 'aac' @output_options['acodec'] = 'aac'
@output_options['strict'] = 'experimental' @output_options['strict'] = 'experimental'
if high_vfr?(metadata) && !eligible_to_passthrough?(metadata) if high_vfr?(metadata)
@output_options['vsync'] = 'vfr' @output_options['vsync'] = 'vfr'
@output_options['r'] = @vfr_threshold @output_options['r'] = @vfr_threshold
end end
end end
end
command_arguments, interpolations = prepare_command(destination) command_arguments, interpolations = prepare_command(destination)

View File

@@ -40,24 +40,36 @@ describe Admin::StatusesController do
end end
describe 'POST #batch' do describe 'POST #batch' do
before do subject { post :batch, params: { :account_id => account.id, action => '', :admin_status_batch_action => { status_ids: status_ids } } }
post :batch, params: { account_id: account.id, action => '', admin_status_batch_action: { status_ids: status_ids } }
end
let(:status_ids) { [media_attached_status.id] } let(:status_ids) { [media_attached_status.id] }
context 'when action is report' do shared_examples 'when action is report' do
let(:action) { 'report' } let(:action) { 'report' }
it 'creates a report' do it 'creates a report' do
subject
report = Report.last report = Report.last
expect(report.target_account_id).to eq account.id expect(report.target_account_id).to eq account.id
expect(report.status_ids).to eq status_ids expect(report.status_ids).to eq status_ids
end end
it 'redirects to report page' do it 'redirects to report page' do
subject
expect(response).to redirect_to(admin_report_path(Report.last.id)) expect(response).to redirect_to(admin_report_path(Report.last.id))
end end
end end
it_behaves_like 'when action is report'
context 'when the moderator is blocked by the author' do
before do
account.block!(user.account)
end
it_behaves_like 'when action is report'
end
end end
end end

View File

@@ -5,7 +5,7 @@ require 'rails_helper'
describe Api::V1::StreamingController do describe Api::V1::StreamingController do
around(:each) do |example| around(:each) do |example|
before = Rails.configuration.x.streaming_api_base_url before = Rails.configuration.x.streaming_api_base_url
Rails.configuration.x.streaming_api_base_url = Rails.configuration.x.web_domain Rails.configuration.x.streaming_api_base_url = "wss://#{Rails.configuration.x.web_domain}"
example.run example.run
Rails.configuration.x.streaming_api_base_url = before Rails.configuration.x.streaming_api_base_url = before
end end

View File

@@ -6,35 +6,65 @@ describe Api::V1::Timelines::TagController do
render_views render_views
let(:user) { Fabricate(:user) } let(:user) { Fabricate(:user) }
let(:token) { Fabricate(:accessible_access_token, resource_owner_id: user.id, scopes: 'read:statuses') }
before do before do
allow(controller).to receive(:doorkeeper_token) { token } allow(controller).to receive(:doorkeeper_token) { token }
end end
context 'with a user context' do
let(:token) { Fabricate(:accessible_access_token, resource_owner_id: user.id) }
describe 'GET #show' do describe 'GET #show' do
subject do
get :show, params: { id: 'test' }
end
before do before do
PostStatusService.new.call(user.account, text: 'It is a #test') PostStatusService.new.call(user.account, text: 'It is a #test')
end end
it 'returns http success' do context 'when the instance allows public preview' do
get :show, params: { id: 'test' } context 'when the user is not authenticated' do
let(:token) { nil }
it 'returns http success', :aggregate_failures do
subject
expect(response).to have_http_status(200)
expect(response.headers['Link'].links.size).to eq(2)
end
end
context 'when the user is authenticated' do
it 'returns http success', :aggregate_failures do
subject
expect(response).to have_http_status(200) expect(response).to have_http_status(200)
expect(response.headers['Link'].links.size).to eq(2) expect(response.headers['Link'].links.size).to eq(2)
end end
end end
end end
context 'without a user context' do context 'when the instance does not allow public preview' do
let(:token) { Fabricate(:accessible_access_token, resource_owner_id: nil) } before do
Form::AdminSettings.new(timeline_preview: false).save
end
context 'when the user is not authenticated' do
let(:token) { nil }
it 'returns http unauthorized' do
subject
expect(response).to have_http_status(401)
end
end
context 'when the user is authenticated' do
it 'returns http success', :aggregate_failures do
subject
describe 'GET #show' do
it 'returns http success' do
get :show, params: { id: 'test' }
expect(response).to have_http_status(200) expect(response).to have_http_status(200)
expect(response.headers['Link']).to be_nil expect(response.headers['Link'].links.size).to eq(2)
end
end end
end end
end end

View File

@@ -262,7 +262,27 @@ RSpec.describe Auth::SessionsController, type: :controller do
end end
end end
context 'using a valid OTP' do context 'when repeatedly using an invalid TOTP code before using a valid code' do
before do
stub_const('Auth::SessionsController::MAX_2FA_ATTEMPTS_PER_HOUR', 2)
end
it 'does not log the user in' do
# Travel to the beginning of an hour to avoid crossing rate-limit buckets
travel_to '2023-12-20T10:00:00Z'
Auth::SessionsController::MAX_2FA_ATTEMPTS_PER_HOUR.times do
post :create, params: { user: { otp_attempt: '1234' } }, session: { attempt_user_id: user.id, attempt_user_updated_at: user.updated_at.to_s }
expect(controller.current_user).to be_nil
end
post :create, params: { user: { otp_attempt: user.current_otp } }, session: { attempt_user_id: user.id, attempt_user_updated_at: user.updated_at.to_s }
expect(controller.current_user).to be_nil
expect(flash[:alert]).to match I18n.t('users.rate_limited')
end
end
context 'when using a valid OTP' do
before do before do
post :create, params: { user: { otp_attempt: user.current_otp } }, session: { attempt_user_id: user.id, attempt_user_updated_at: user.updated_at.to_s } post :create, params: { user: { otp_attempt: user.current_otp } }, session: { attempt_user_id: user.id, attempt_user_updated_at: user.updated_at.to_s }
end end

View File

@@ -13,12 +13,17 @@ RSpec.describe CacheConcern, type: :controller do
def empty_relation def empty_relation
render plain: cache_collection(Status.none, Status).size render plain: cache_collection(Status.none, Status).size
end end
def account_statuses_favourites
render plain: cache_collection(Status.where(account_id: params[:id]), Status).map(&:favourites_count)
end
end end
before do before do
routes.draw do routes.draw do
get 'empty_array' => 'anonymous#empty_array' get 'empty_array' => 'anonymous#empty_array'
post 'empty_relation' => 'anonymous#empty_relation' get 'empty_relation' => 'anonymous#empty_relation'
get 'account_statuses_favourites' => 'anonymous#account_statuses_favourites'
end end
end end
@@ -36,5 +41,20 @@ RSpec.describe CacheConcern, type: :controller do
expect(response.body).to eq '0' expect(response.body).to eq '0'
end end
end end
context 'when given a collection of statuses' do
let!(:account) { Fabricate(:account) }
let!(:status) { Fabricate(:status, account: account) }
it 'correctly updates with new interactions' do
get :account_statuses_favourites, params: { id: account.id }
expect(response.body).to eq '[0]'
FavouriteService.new.call(account, status)
get :account_statuses_favourites, params: { id: account.id }
expect(response.body).to eq '[1]'
end
end
end end
end end

View File

@@ -1,303 +0,0 @@
# frozen_string_literal: true
require 'rails_helper'
describe ApplicationController, type: :controller do
class WrappedActor
attr_reader :wrapped_account
def initialize(wrapped_account)
@wrapped_account = wrapped_account
end
delegate :uri, :keypair, to: :wrapped_account
end
controller do
include SignatureVerification
before_action :require_actor_signature!, only: [:signature_required]
def success
head 200
end
def alternative_success
head 200
end
def signature_required
head 200
end
end
before do
routes.draw do
match via: [:get, :post], 'success' => 'anonymous#success'
match via: [:get, :post], 'signature_required' => 'anonymous#signature_required'
end
end
context 'without signature header' do
before do
get :success
end
describe '#signed_request?' do
it 'returns false' do
expect(controller.signed_request?).to be false
end
end
describe '#signed_request_account' do
it 'returns nil' do
expect(controller.signed_request_account).to be_nil
end
end
end
context 'with signature header' do
let!(:author) { Fabricate(:account, domain: 'example.com', uri: 'https://example.com/actor') }
context 'without body' do
before do
get :success
fake_request = Request.new(:get, request.url)
fake_request.on_behalf_of(author)
request.headers.merge!(fake_request.headers)
end
describe '#signed_request?' do
it 'returns true' do
expect(controller.signed_request?).to be true
end
end
describe '#signed_request_account' do
it 'returns an account' do
expect(controller.signed_request_account).to eq author
end
it 'returns nil when path does not match' do
request.path = '/alternative-path'
expect(controller.signed_request_account).to be_nil
end
it 'returns nil when method does not match' do
post :success
expect(controller.signed_request_account).to be_nil
end
end
end
context 'with a valid actor that is not an Account' do
let(:actor) { WrappedActor.new(author) }
before do
get :success
fake_request = Request.new(:get, request.url)
fake_request.on_behalf_of(author)
request.headers.merge!(fake_request.headers)
allow(ActivityPub::TagManager.instance).to receive(:uri_to_actor).with(anything) do
actor
end
end
describe '#signed_request?' do
it 'returns true' do
expect(controller.signed_request?).to be true
end
end
describe '#signed_request_account' do
it 'returns nil' do
expect(controller.signed_request_account).to be_nil
end
end
describe '#signed_request_actor' do
it 'returns the expected actor' do
expect(controller.signed_request_actor).to eq actor
end
end
end
context 'with request with unparseable Date header' do
before do
get :success
fake_request = Request.new(:get, request.url)
fake_request.add_headers({ 'Date' => 'wrong date' })
fake_request.on_behalf_of(author)
request.headers.merge!(fake_request.headers)
end
describe '#signed_request?' do
it 'returns true' do
expect(controller.signed_request?).to be true
end
end
describe '#signed_request_account' do
it 'returns nil' do
expect(controller.signed_request_account).to be_nil
end
end
describe '#signature_verification_failure_reason' do
it 'contains an error description' do
controller.signed_request_account
expect(controller.signature_verification_failure_reason[:error]).to eq 'Invalid Date header: not RFC 2616 compliant date: "wrong date"'
end
end
end
context 'with request older than a day' do
before do
get :success
fake_request = Request.new(:get, request.url)
fake_request.add_headers({ 'Date' => 2.days.ago.utc.httpdate })
fake_request.on_behalf_of(author)
request.headers.merge!(fake_request.headers)
end
describe '#signed_request?' do
it 'returns true' do
expect(controller.signed_request?).to be true
end
end
describe '#signed_request_account' do
it 'returns nil' do
expect(controller.signed_request_account).to be_nil
end
end
describe '#signature_verification_failure_reason' do
it 'contains an error description' do
controller.signed_request_account
expect(controller.signature_verification_failure_reason[:error]).to eq 'Signed request date outside acceptable time window'
end
end
end
context 'with inaccessible key' do
before do
get :success
author = Fabricate(:account, domain: 'localhost:5000', uri: 'http://localhost:5000/actor')
fake_request = Request.new(:get, request.url)
fake_request.on_behalf_of(author)
author.destroy
request.headers.merge!(fake_request.headers)
stub_request(:get, 'http://localhost:5000/actor#main-key').to_raise(Mastodon::HostValidationError)
end
describe '#signed_request?' do
it 'returns true' do
expect(controller.signed_request?).to be true
end
end
describe '#signed_request_account' do
it 'returns nil' do
expect(controller.signed_request_account).to be_nil
end
end
end
context 'with body' do
before do
allow(controller).to receive(:actor_refresh_key!).and_return(author)
post :success, body: 'Hello world'
fake_request = Request.new(:post, request.url, body: 'Hello world')
fake_request.on_behalf_of(author)
request.headers.merge!(fake_request.headers)
end
describe '#signed_request?' do
it 'returns true' do
expect(controller.signed_request?).to be true
end
end
describe '#signed_request_account' do
it 'returns an account' do
expect(controller.signed_request_account).to eq author
end
end
context 'when path does not match' do
before do
request.path = '/alternative-path'
end
describe '#signed_request_account' do
it 'returns nil' do
expect(controller.signed_request_account).to be_nil
end
end
describe '#signature_verification_failure_reason' do
it 'contains an error description' do
controller.signed_request_account
expect(controller.signature_verification_failure_reason[:error]).to include('using rsa-sha256 (RSASSA-PKCS1-v1_5 with SHA-256)')
expect(controller.signature_verification_failure_reason[:signed_string]).to include("(request-target): post /alternative-path\n")
end
end
end
context 'when method does not match' do
before do
get :success
end
describe '#signed_request_account' do
it 'returns nil' do
expect(controller.signed_request_account).to be_nil
end
end
end
context 'when body has been tampered' do
before do
post :success, body: 'doo doo doo'
end
describe '#signed_request_account' do
it 'returns nil when body has been tampered' do
expect(controller.signed_request_account).to be_nil
end
end
end
end
end
context 'when a signature is required' do
before do
get :signature_required
end
context 'without signature header' do
it 'returns HTTP 401' do
expect(response).to have_http_status(401)
end
it 'returns an error' do
expect(Oj.load(response.body)['error']).to eq 'Request not signed'
end
end
end
end

View File

@@ -1,6 +1,8 @@
# frozen_string_literal: true
Fabricator(:account_stat) do Fabricator(:account_stat) do
account nil account { Fabricate.build(:account) }
statuses_count "" statuses_count '123'
following_count "" following_count '456'
followers_count "" followers_count '789'
end end

View File

@@ -29,7 +29,47 @@ RSpec.describe ActivityPub::Activity::Create do
subject.perform subject.perform
end end
context 'object has been edited' do context 'when object publication date is below ISO8601 range' do
let(:object_json) do
{
id: [ActivityPub::TagManager.instance.uri_for(sender), '#bar'].join,
type: 'Note',
content: 'Lorem ipsum',
published: '-0977-11-03T08:31:22Z',
}
end
it 'creates status with a valid creation date', :aggregate_failures do
status = sender.statuses.first
expect(status).to_not be_nil
expect(status.text).to eq 'Lorem ipsum'
expect(status.created_at).to be_within(30).of(Time.now.utc)
end
end
context 'when object publication date is above ISO8601 range' do
let(:object_json) do
{
id: [ActivityPub::TagManager.instance.uri_for(sender), '#bar'].join,
type: 'Note',
content: 'Lorem ipsum',
published: '10000-11-03T08:31:22Z',
}
end
it 'creates status with a valid creation date', :aggregate_failures do
status = sender.statuses.first
expect(status).to_not be_nil
expect(status.text).to eq 'Lorem ipsum'
expect(status.created_at).to be_within(30).of(Time.now.utc)
end
end
context 'when object has been edited' do
let(:object_json) do let(:object_json) do
{ {
id: [ActivityPub::TagManager.instance.uri_for(sender), '#bar'].join, id: [ActivityPub::TagManager.instance.uri_for(sender), '#bar'].join,
@@ -40,18 +80,16 @@ RSpec.describe ActivityPub::Activity::Create do
} }
end end
it 'creates status' do it 'creates status with appropriate creation and edition dates', :aggregate_failures do
status = sender.statuses.first status = sender.statuses.first
expect(status).to_not be_nil expect(status).to_not be_nil
expect(status.text).to eq 'Lorem ipsum' expect(status.text).to eq 'Lorem ipsum'
end
it 'marks status as edited' do expect(status.created_at).to eq '2022-01-22T15:00:00Z'.to_datetime
status = sender.statuses.first
expect(status).to_not be_nil expect(status.edited?).to be true
expect(status.edited?).to eq true expect(status.edited_at).to eq '2022-01-22T16:00:00Z'.to_datetime
end end
end end

View File

@@ -37,6 +37,37 @@ RSpec.describe ActivityPub::Activity::Flag do
end end
end end
context 'when the report comment is excessively long' do
subject do
described_class.new({
'@context': 'https://www.w3.org/ns/activitystreams',
id: flag_id,
type: 'Flag',
content: long_comment,
actor: ActivityPub::TagManager.instance.uri_for(sender),
object: [
ActivityPub::TagManager.instance.uri_for(flagged),
ActivityPub::TagManager.instance.uri_for(status),
],
}.with_indifferent_access, sender)
end
let(:long_comment) { Faker::Lorem.characters(number: 6000) }
before do
subject.perform
end
it 'creates a report but with a truncated comment' do
report = Report.find_by(account: sender, target_account: flagged)
expect(report).to_not be_nil
expect(report.comment.length).to eq 5000
expect(report.comment).to eq long_comment[0...5000]
expect(report.status_ids).to eq [status.id]
end
end
context 'when the reported status is private and should not be visible to the remote server' do context 'when the reported status is private and should not be visible to the remote server' do
let(:status) { Fabricate(:status, account: flagged, uri: 'foobar', visibility: :private) } let(:status) { Fabricate(:status, account: flagged, uri: 'foobar', visibility: :private) }

View File

@@ -36,6 +36,40 @@ RSpec.describe ActivityPub::LinkedDataSignature do
end end
end end
context 'when local account record is missing a public key' do
let(:raw_signature) do
{
'creator' => 'http://example.com/alice',
'created' => '2017-09-23T20:21:34Z',
}
end
let(:signature) { raw_signature.merge('type' => 'RsaSignature2017', 'signatureValue' => sign(sender, raw_signature, raw_json)) }
let(:service_stub) { instance_double(ActivityPub::FetchRemoteKeyService) }
before do
# Ensure signature is computed with the old key
signature
# Unset key
old_key = sender.public_key
sender.update!(private_key: '', public_key: '')
allow(ActivityPub::FetchRemoteKeyService).to receive(:new).and_return(service_stub)
allow(service_stub).to receive(:call).with('http://example.com/alice', id: false) do
sender.update!(public_key: old_key)
sender
end
end
it 'fetches key and returns creator' do
expect(subject.verify_actor!).to eq sender
expect(service_stub).to have_received(:call).with('http://example.com/alice', id: false).once
end
end
context 'when signature is missing' do context 'when signature is missing' do
let(:signature) { nil } let(:signature) { nil }

View File

@@ -110,6 +110,14 @@ RSpec.describe ActivityPub::TagManager do
expect(subject.cc(status)).to include(subject.uri_for(foo)) expect(subject.cc(status)).to include(subject.uri_for(foo))
expect(subject.cc(status)).to_not include(subject.uri_for(alice)) expect(subject.cc(status)).to_not include(subject.uri_for(alice))
end end
it 'returns poster of reblogged post, if reblog' do
bob = Fabricate(:account, username: 'bob', domain: 'example.com', inbox_url: 'http://example.com/bob')
alice = Fabricate(:account, username: 'alice')
status = Fabricate(:status, visibility: :public, account: bob)
reblog = Fabricate(:status, visibility: :public, account: alice, reblog: status)
expect(subject.cc(reblog)).to include(subject.uri_for(bob))
end
end end
describe '#local_uri?' do describe '#local_uri?' do

Some files were not shown because too many files have changed in this diff Show More